Line data Source code
1 : /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2 :
3 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
4 : Contributed by Jason Merrill <jason@redhat.com>
5 :
6 : This file is part of GCC.
7 :
8 : GCC is free software; you can redistribute it and/or modify it under
9 : the terms of the GNU General Public License as published by the Free
10 : Software Foundation; either version 3, or (at your option) any later
11 : version.
12 :
13 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 : for more details.
17 :
18 : You should have received a copy of the GNU General Public License
19 : along with GCC; see the file COPYING3. If not see
20 : <http://www.gnu.org/licenses/>. */
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "target.h"
26 : #include "basic-block.h"
27 : #include "cp-tree.h"
28 : #include "gimple.h"
29 : #include "predict.h"
30 : #include "stor-layout.h"
31 : #include "tree-iterator.h"
32 : #include "gimplify.h"
33 : #include "c-family/c-ubsan.h"
34 : #include "stringpool.h"
35 : #include "attribs.h"
36 : #include "asan.h"
37 : #include "gcc-rich-location.h"
38 : #include "memmodel.h"
39 : #include "tm_p.h"
40 : #include "output.h"
41 : #include "file-prefix-map.h"
42 : #include "cgraph.h"
43 : #include "omp-general.h"
44 : #include "opts.h"
45 : #include "gcc-urlifier.h"
46 : #include "contracts.h" // build_contract_check ()
47 :
48 : /* Keep track of forward references to immediate-escalating functions in
49 : case they become consteval. This vector contains ADDR_EXPRs and
50 : PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
51 : function call in them, to check that they can be evaluated to a constant,
52 : and immediate-escalating functions that may become consteval. */
53 : static GTY(()) hash_set<tree> *deferred_escalating_exprs;
54 :
55 : static void
56 21302426 : remember_escalating_expr (tree t)
57 : {
58 21302426 : if (uses_template_parms (t))
59 : /* Templates don't escalate, and cp_fold_immediate can get confused by
60 : other template trees in the function body (c++/115986). */
61 : return;
62 21302426 : if (!deferred_escalating_exprs)
63 18671 : deferred_escalating_exprs = hash_set<tree>::create_ggc (37);
64 21302426 : deferred_escalating_exprs->add (t);
65 : }
66 :
67 : /* Flags for cp_fold and cp_fold_r. */
68 :
69 : enum fold_flags {
70 : ff_none = 0,
71 : /* Whether we're being called from cp_fold_function. */
72 : ff_genericize = 1 << 0,
73 : /* Whether we're folding a point where we know we're
74 : definitely not in a manifestly constant-evaluated
75 : context. */
76 : ff_mce_false = 1 << 1,
77 : /* Whether we're only folding non-ODR usages of constants.
78 : This happens before saving the constexpr funcdef, so
79 : we should do as little other folding as possible.
80 : Mutually exclusive with ff_mce_false. */
81 : ff_only_non_odr = 1 << 2,
82 : };
83 :
84 : using fold_flags_t = int;
85 :
86 121765328 : struct cp_fold_data
87 : {
88 : hash_set<tree> pset;
89 : fold_flags_t flags;
90 187416820 : cp_fold_data (fold_flags_t flags): flags (flags)
91 : {
92 187416820 : gcc_checking_assert (!(flags & ff_mce_false)
93 : || !(flags & ff_only_non_odr));
94 187416820 : }
95 : };
96 :
97 : /* Forward declarations. */
98 :
99 : static tree cp_genericize_r (tree *, int *, void *);
100 : static tree cp_fold_r (tree *, int *, void *);
101 : static void cp_genericize_tree (tree*, bool);
102 : static tree cp_fold (tree, fold_flags_t);
103 : static tree cp_fold_immediate_r (tree *, int *, void *);
104 :
105 : /* Genericize a TRY_BLOCK. */
106 :
107 : static void
108 17272 : genericize_try_block (tree *stmt_p)
109 : {
110 17272 : tree body = TRY_STMTS (*stmt_p);
111 17272 : tree cleanup = TRY_HANDLERS (*stmt_p);
112 :
113 17272 : *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 17272 : }
115 :
116 : /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 :
118 : static void
119 20295 : genericize_catch_block (tree *stmt_p)
120 : {
121 20295 : tree type = HANDLER_TYPE (*stmt_p);
122 20295 : tree body = HANDLER_BODY (*stmt_p);
123 :
124 : /* FIXME should the caught type go in TREE_TYPE? */
125 20295 : *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 20295 : }
127 :
128 : /* A terser interface for building a representation of an exception
129 : specification. */
130 :
131 : static tree
132 5782 : build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 : {
134 5782 : tree t;
135 :
136 : /* FIXME should the allowed types go in TREE_TYPE? */
137 5782 : t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 5782 : append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 :
140 5782 : t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 5782 : append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 :
143 5782 : return t;
144 : }
145 :
146 : /* Genericize an EH_SPEC_BLOCK by converting it to a
147 : TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 :
149 : static void
150 5782 : genericize_eh_spec_block (tree *stmt_p)
151 : {
152 5782 : tree body = EH_SPEC_STMTS (*stmt_p);
153 5782 : tree allowed = EH_SPEC_RAISES (*stmt_p);
154 5782 : tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 :
156 5782 : *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 5782 : suppress_warning (*stmt_p);
158 5782 : suppress_warning (TREE_OPERAND (*stmt_p, 1));
159 5782 : }
160 :
161 : /* Return the first non-compound statement in STMT. */
162 :
163 : tree
164 13826316 : first_stmt (tree stmt)
165 : {
166 21207311 : switch (TREE_CODE (stmt))
167 : {
168 5531463 : case STATEMENT_LIST:
169 5531463 : if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
170 3951169 : return first_stmt (p->stmt);
171 1580294 : return void_node;
172 :
173 3429826 : case BIND_EXPR:
174 3429826 : return first_stmt (BIND_EXPR_BODY (stmt));
175 :
176 : default:
177 : return stmt;
178 : }
179 : }
180 :
181 : /* Genericize an IF_STMT by turning it into a COND_EXPR. */
182 :
183 : static void
184 20441396 : genericize_if_stmt (tree *stmt_p)
185 : {
186 20441396 : tree stmt, cond, then_, else_;
187 20441396 : location_t locus = EXPR_LOCATION (*stmt_p);
188 :
189 20441396 : stmt = *stmt_p;
190 20441396 : cond = IF_COND (stmt);
191 20441396 : then_ = THEN_CLAUSE (stmt);
192 20441396 : else_ = ELSE_CLAUSE (stmt);
193 :
194 20441396 : if (then_ && else_)
195 : {
196 6913158 : tree ft = first_stmt (then_);
197 6913158 : tree fe = first_stmt (else_);
198 6913158 : br_predictor pr;
199 6913158 : if (TREE_CODE (ft) == PREDICT_EXPR
200 32838 : && TREE_CODE (fe) == PREDICT_EXPR
201 48 : && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
202 6913197 : && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
203 : {
204 3 : gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
205 3 : richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
206 3 : warning_at (&richloc, OPT_Wattributes,
207 : "both branches of %<if%> statement marked as %qs",
208 : pr == PRED_HOT_LABEL ? "likely" : "unlikely");
209 3 : }
210 : }
211 :
212 20441396 : if (IF_STMT_VACUOUS_INIT_P (stmt))
213 : {
214 51 : gcc_checking_assert (integer_zerop (cond));
215 51 : gcc_checking_assert (!else_ || !TREE_SIDE_EFFECTS (else_));
216 51 : tree lab = create_artificial_label (UNKNOWN_LOCATION);
217 51 : VACUOUS_INIT_LABEL_P (lab) = 1;
218 51 : tree goto_expr = build_stmt (UNKNOWN_LOCATION, GOTO_EXPR, lab);
219 51 : tree label_expr = build_stmt (UNKNOWN_LOCATION, LABEL_EXPR, lab);
220 51 : if (TREE_CODE (then_) == STATEMENT_LIST)
221 : {
222 51 : tree_stmt_iterator i = tsi_start (then_);
223 51 : tsi_link_before (&i, goto_expr, TSI_CONTINUE_LINKING);
224 51 : i = tsi_last (then_);
225 51 : tsi_link_after (&i, label_expr, TSI_CONTINUE_LINKING);
226 51 : stmt = then_;
227 : }
228 : else
229 : {
230 0 : stmt = NULL_TREE;
231 0 : append_to_statement_list (goto_expr, &stmt);
232 0 : append_to_statement_list (then_, &stmt);
233 0 : append_to_statement_list (label_expr, &stmt);
234 : }
235 51 : *stmt_p = stmt;
236 51 : return;
237 : }
238 :
239 20441345 : if (!then_)
240 1994 : then_ = build_empty_stmt (locus);
241 20441345 : if (!else_)
242 13526291 : else_ = build_empty_stmt (locus);
243 :
244 : /* consteval if has been verified not to have the then_/else_ blocks
245 : entered by gotos/case labels from elsewhere, and as then_ block
246 : can contain unfolded immediate function calls, we have to discard
247 : the then_ block regardless of whether else_ has side-effects or not. */
248 20441345 : if (IF_STMT_CONSTEVAL_P (stmt))
249 : {
250 33496 : if (block_may_fallthru (then_))
251 8083 : stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
252 : void_node, else_);
253 : else
254 25413 : stmt = else_;
255 : }
256 20407849 : else if (IF_STMT_CONSTEXPR_P (stmt))
257 6164825 : stmt = integer_nonzerop (cond) ? then_ : else_;
258 : /* ??? This optimization doesn't seem to belong here, but removing it
259 : causes -Wreturn-type regressions (e.g. 107310). */
260 16195902 : else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
261 90409 : stmt = then_;
262 16105493 : else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
263 35286 : stmt = else_;
264 : else
265 16070207 : stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
266 20441345 : protected_set_expr_location_if_unset (stmt, locus);
267 20441345 : *stmt_p = stmt;
268 : }
269 :
270 : /* Hook into the middle of gimplifying an OMP_FOR node. */
271 :
272 : static enum gimplify_status
273 45263 : cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
274 : {
275 45263 : tree for_stmt = *expr_p;
276 45263 : gimple_seq seq = NULL;
277 :
278 : /* Protect ourselves from recursion. */
279 45263 : if (OMP_FOR_GIMPLIFYING_P (for_stmt))
280 : return GS_UNHANDLED;
281 21028 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
282 :
283 21028 : gimplify_and_add (for_stmt, &seq);
284 21028 : gimple_seq_add_seq (pre_p, seq);
285 :
286 21028 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
287 :
288 21028 : return GS_ALL_DONE;
289 : }
290 :
291 : /* Gimplify an EXPR_STMT node. */
292 :
293 : static void
294 3886125 : gimplify_expr_stmt (tree *stmt_p)
295 : {
296 3886125 : tree stmt = EXPR_STMT_EXPR (*stmt_p);
297 :
298 3886125 : if (stmt == error_mark_node)
299 : stmt = NULL;
300 :
301 : /* Gimplification of a statement expression will nullify the
302 : statement if all its side effects are moved to *PRE_P and *POST_P.
303 :
304 : In this case we will not want to emit the gimplified statement.
305 : However, we may still want to emit a warning, so we do that before
306 : gimplification. */
307 3882193 : if (stmt && warn_unused_value)
308 : {
309 306718 : if (!TREE_SIDE_EFFECTS (stmt))
310 : {
311 0 : if (!IS_EMPTY_STMT (stmt)
312 6846 : && !VOID_TYPE_P (TREE_TYPE (stmt))
313 6846 : && !warning_suppressed_p (stmt, OPT_Wunused_value))
314 0 : warning (OPT_Wunused_value, "statement with no effect");
315 : }
316 : else
317 299872 : warn_if_unused_value (stmt, input_location);
318 : }
319 :
320 3886125 : if (stmt == NULL_TREE)
321 3932 : stmt = alloc_stmt_list ();
322 :
323 3886125 : *stmt_p = stmt;
324 3886125 : }
325 :
326 : /* Gimplify initialization from an AGGR_INIT_EXPR. */
327 :
328 : static void
329 11372486 : cp_gimplify_init_expr (tree *expr_p)
330 : {
331 11372486 : tree from = TREE_OPERAND (*expr_p, 1);
332 11372486 : tree to = TREE_OPERAND (*expr_p, 0);
333 11372486 : tree t;
334 :
335 11372486 : if (TREE_CODE (from) == TARGET_EXPR)
336 183688 : if (tree init = TARGET_EXPR_INITIAL (from))
337 : {
338 : /* Make sure that we expected to elide this temporary. But also allow
339 : gimplify_modify_expr_rhs to elide temporaries of trivial type. */
340 183688 : gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
341 : || !TREE_ADDRESSABLE (TREE_TYPE (from)));
342 183688 : if (target_expr_needs_replace (from))
343 : {
344 : /* If this was changed by cp_genericize_target_expr, we need to
345 : walk into it to replace uses of the slot. */
346 80 : replace_decl (&init, TARGET_EXPR_SLOT (from), to);
347 80 : *expr_p = init;
348 80 : return;
349 : }
350 : else
351 : from = init;
352 : }
353 :
354 : /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
355 : inside the TARGET_EXPR. */
356 11458160 : for (t = from; t; )
357 : {
358 11458160 : tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
359 :
360 : /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
361 : replace the slot operand with our target.
362 :
363 : Should we add a target parm to gimplify_expr instead? No, as in this
364 : case we want to replace the INIT_EXPR. */
365 11458160 : if (TREE_CODE (sub) == AGGR_INIT_EXPR
366 11458160 : || TREE_CODE (sub) == VEC_INIT_EXPR)
367 : {
368 107426 : if (TREE_CODE (sub) == AGGR_INIT_EXPR)
369 107426 : AGGR_INIT_EXPR_SLOT (sub) = to;
370 : else
371 0 : VEC_INIT_EXPR_SLOT (sub) = to;
372 107426 : *expr_p = from;
373 :
374 : /* The initialization is now a side-effect, so the container can
375 : become void. */
376 107426 : if (from != sub)
377 73 : TREE_TYPE (from) = void_type_node;
378 : }
379 :
380 : /* Handle aggregate NSDMI. */
381 11458160 : replace_placeholders (sub, to);
382 :
383 11458160 : if (t == sub)
384 : break;
385 : else
386 85754 : t = TREE_OPERAND (t, 1);
387 : }
388 :
389 : }
390 :
391 : /* Gimplify a MUST_NOT_THROW_EXPR. */
392 :
393 : static enum gimplify_status
394 562821 : gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
395 : {
396 562821 : tree stmt = *expr_p;
397 562821 : tree temp = voidify_wrapper_expr (stmt, NULL);
398 562821 : tree body = TREE_OPERAND (stmt, 0);
399 562821 : gimple_seq try_ = NULL;
400 562821 : gimple_seq catch_ = NULL;
401 562821 : gimple *mnt;
402 :
403 562821 : gimplify_and_add (body, &try_);
404 562821 : mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
405 562821 : gimple_seq_add_stmt_without_update (&catch_, mnt);
406 562821 : mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
407 :
408 562821 : gimple_seq_add_stmt_without_update (pre_p, mnt);
409 562821 : if (temp)
410 : {
411 33 : *expr_p = temp;
412 33 : return GS_OK;
413 : }
414 :
415 562788 : *expr_p = NULL;
416 562788 : return GS_ALL_DONE;
417 : }
418 :
419 : /* Return TRUE if an operand (OP) of a given TYPE being copied is
420 : really just an empty class copy.
421 :
422 : Check that the operand has a simple form so that TARGET_EXPRs and
423 : non-empty CONSTRUCTORs get reduced properly, and we leave the
424 : return slot optimization alone because it isn't a copy. */
425 :
426 : bool
427 19228812 : simple_empty_class_p (tree type, tree op, tree_code code)
428 : {
429 22427375 : if (TREE_CODE (op) == COMPOUND_EXPR)
430 180293 : return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
431 4242731 : if (SIMPLE_TARGET_EXPR_P (op)
432 25269446 : && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
433 : /* The TARGET_EXPR is itself a simple copy, look through it. */
434 3018270 : return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
435 :
436 19228812 : if (TREE_CODE (op) == PARM_DECL
437 19228812 : && TREE_ADDRESSABLE (TREE_TYPE (op)))
438 : {
439 7 : tree fn = DECL_CONTEXT (op);
440 7 : if (DECL_THUNK_P (fn)
441 10 : || lambda_static_thunk_p (fn))
442 : /* In a thunk, we pass through invisible reference parms, so this isn't
443 : actually a copy. */
444 7 : return false;
445 : }
446 :
447 19228805 : return
448 19228805 : (TREE_CODE (op) == EMPTY_CLASS_EXPR
449 19228775 : || code == MODIFY_EXPR
450 16349122 : || is_gimple_lvalue (op)
451 12925338 : || INDIRECT_REF_P (op)
452 12548010 : || (TREE_CODE (op) == CONSTRUCTOR
453 2723271 : && CONSTRUCTOR_NELTS (op) == 0)
454 10082359 : || (TREE_CODE (op) == CALL_EXPR
455 2549792 : && !CALL_EXPR_RETURN_SLOT_OPT (op)))
456 11599293 : && !TREE_CLOBBER_P (op)
457 30258729 : && is_really_empty_class (type, /*ignore_vptr*/true);
458 : }
459 :
460 : /* Returns true if evaluating E as an lvalue has side-effects;
461 : specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
462 : have side-effects until there is a read or write through it. */
463 :
464 : static bool
465 2539080 : lvalue_has_side_effects (tree e)
466 : {
467 2539080 : if (!TREE_SIDE_EFFECTS (e))
468 : return false;
469 55727 : while (handled_component_p (e))
470 : {
471 4866 : if (TREE_CODE (e) == ARRAY_REF
472 4866 : && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
473 : return true;
474 3228 : e = TREE_OPERAND (e, 0);
475 : }
476 50861 : if (DECL_P (e))
477 : /* Just naming a variable has no side-effects. */
478 : return false;
479 34060 : else if (INDIRECT_REF_P (e))
480 : /* Similarly, indirection has no side-effects. */
481 33926 : return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
482 : else
483 : /* For anything else, trust TREE_SIDE_EFFECTS. */
484 134 : return TREE_SIDE_EFFECTS (e);
485 : }
486 :
487 : /* Return true if FN is an immediate-escalating function. */
488 :
489 : bool
490 182911704 : immediate_escalating_function_p (tree fn)
491 : {
492 182911704 : if (!fn || !flag_immediate_escalation)
493 : return false;
494 :
495 182911379 : gcc_checking_assert (TREE_CODE (fn) == FUNCTION_DECL);
496 :
497 182911379 : if (DECL_IMMEDIATE_FUNCTION_P (fn))
498 : return false;
499 :
500 : /* An immediate-escalating function is
501 : -- the call operator of a lambda that is not declared with the consteval
502 : specifier */
503 185824811 : if (LAMBDA_FUNCTION_P (fn))
504 : return true;
505 : /* -- a defaulted function that is not declared with the
506 : consteval specifier */
507 181223450 : if (DECL_DEFAULTED_FN (fn))
508 : return true;
509 : /* -- a function that results from the instantiation of a templated entity
510 : defined with the constexpr specifier. */
511 174112358 : return is_instantiation_of_constexpr (fn);
512 : }
513 :
514 : /* Return true if FN is an immediate-escalating function that has not been
515 : checked for escalating expressions.. */
516 :
517 : static bool
518 182894721 : unchecked_immediate_escalating_function_p (tree fn)
519 : {
520 182894721 : return (immediate_escalating_function_p (fn)
521 182894721 : && !DECL_ESCALATION_CHECKED_P (fn));
522 : }
523 :
524 : /* Promote FN to an immediate function, including its clones. */
525 :
526 : void
527 15644 : promote_function_to_consteval (tree fn)
528 : {
529 15644 : SET_DECL_IMMEDIATE_FUNCTION_P (fn);
530 15644 : DECL_ESCALATION_CHECKED_P (fn) = true;
531 15644 : tree clone;
532 24832 : FOR_EACH_CLONE (clone, fn)
533 : {
534 9188 : SET_DECL_IMMEDIATE_FUNCTION_P (clone);
535 9188 : DECL_ESCALATION_CHECKED_P (clone) = true;
536 : }
537 15644 : }
538 :
539 : /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
540 : we found a non-constant immediate function, or taking the address
541 : of an immediate function. */
542 :
543 : tree
544 13790269 : cp_fold_immediate (tree *tp, mce_value manifestly_const_eval,
545 : tree decl /*= current_function_decl*/)
546 : {
547 13790269 : if (cxx_dialect <= cxx17)
548 : return NULL_TREE;
549 :
550 13782842 : temp_override<tree> cfd (current_function_decl, decl);
551 :
552 13782842 : fold_flags_t flags = ff_none;
553 13782842 : if (manifestly_const_eval == mce_false)
554 8138639 : flags |= ff_mce_false;
555 :
556 13782842 : cp_fold_data data (flags);
557 13782842 : int save_errorcount = errorcount;
558 13782842 : tree r = cp_walk_tree (tp, cp_fold_immediate_r, &data, NULL);
559 13782842 : if (errorcount > save_errorcount)
560 49 : return integer_one_node;
561 : return r;
562 13782842 : }
563 :
564 : /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
565 : was initially not an immediate function, but was promoted to one because
566 : its body contained an immediate-escalating expression or conversion. */
567 :
568 : static void
569 457 : maybe_explain_promoted_consteval (location_t loc, tree fn)
570 : {
571 457 : if (DECL_ESCALATION_CHECKED_P (fn))
572 : {
573 : /* See if we can figure out what made the function consteval. */
574 126 : tree x = cp_fold_immediate (&DECL_SAVED_TREE (fn), mce_unknown, NULL_TREE);
575 126 : if (x)
576 99 : inform (cp_expr_loc_or_loc (x, loc),
577 : "%qD was promoted to an immediate function because its "
578 : "body contains an immediate-escalating expression %qE", fn, x);
579 : else
580 27 : inform (loc, "%qD was promoted to an immediate function", fn);
581 : }
582 457 : }
583 :
584 : /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
585 : by expressions with side-effects in other operands. */
586 :
587 : static enum gimplify_status
588 36464 : gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
589 : bool (*gimple_test_f) (tree))
590 : {
591 36464 : enum gimplify_status t
592 36464 : = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
593 36464 : if (t == GS_ERROR)
594 : return GS_ERROR;
595 36461 : else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
596 3008 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
597 : return t;
598 : }
599 :
600 : /* Like gimplify_arg, but if ORDERED is set (which should be set if
601 : any of the arguments this argument is sequenced before has
602 : TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
603 : are gimplified into SSA_NAME or a fresh temporary and for
604 : non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
605 :
606 : static enum gimplify_status
607 4100307 : cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
608 : bool ordered)
609 : {
610 4100307 : enum gimplify_status t;
611 4100307 : if (ordered
612 409386 : && !is_gimple_reg_type (TREE_TYPE (*arg_p))
613 4102133 : && TREE_CODE (*arg_p) == TARGET_EXPR)
614 : {
615 : /* gimplify_arg would strip away the TARGET_EXPR, but
616 : that can mean we don't copy the argument and some following
617 : argument with side-effect could modify it. */
618 1696 : protected_set_expr_location (*arg_p, call_location);
619 1696 : return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
620 : }
621 : else
622 : {
623 4098611 : t = gimplify_arg (arg_p, pre_p, call_location);
624 4098611 : if (t == GS_ERROR)
625 : return GS_ERROR;
626 4098611 : else if (ordered
627 407690 : && is_gimple_reg_type (TREE_TYPE (*arg_p))
628 407560 : && is_gimple_variable (*arg_p)
629 210834 : && TREE_CODE (*arg_p) != SSA_NAME
630 : /* No need to force references into register, references
631 : can't be modified. */
632 131257 : && !TYPE_REF_P (TREE_TYPE (*arg_p))
633 : /* And this can't be modified either. */
634 4191322 : && *arg_p != current_class_ptr)
635 10600 : *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
636 4098611 : return t;
637 : }
638 :
639 : }
640 :
641 : /* Emit a decl = {CLOBBER(bob)}; stmt before DECL_EXPR or first
642 : TARGET_EXPR gimplification for -flifetime-dse=2. */
643 :
644 : static void
645 1355472 : maybe_emit_clobber_object_begin (tree decl, gimple_seq *pre_p)
646 : {
647 1355472 : if (VAR_P (decl)
648 1354884 : && auto_var_p (decl)
649 1322108 : && TREE_TYPE (decl) != error_mark_node
650 1322099 : && DECL_NONTRIVIALLY_INITIALIZED_P (decl)
651 : /* Don't do it if it is fully initialized. */
652 811922 : && DECL_INITIAL (decl) == NULL_TREE
653 415899 : && !DECL_HAS_VALUE_EXPR_P (decl)
654 415600 : && !OPAQUE_TYPE_P (TREE_TYPE (decl))
655 : /* Nor going to have decl = .DEFERRED_INIT (...); added. */
656 1771072 : && (flag_auto_var_init == AUTO_INIT_UNINITIALIZED
657 61923 : || lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl))
658 61923 : || lookup_attribute ("indeterminate", DECL_ATTRIBUTES (decl))))
659 : {
660 353678 : tree eltype = strip_array_types (TREE_TYPE (decl));
661 353678 : if (RECORD_OR_UNION_TYPE_P (eltype)
662 353678 : && !is_empty_class (eltype))
663 : {
664 135649 : tree clobber
665 135649 : = build_clobber (TREE_TYPE (decl), CLOBBER_OBJECT_BEGIN);
666 135649 : gimple *g = gimple_build_assign (decl, clobber);
667 135649 : gimple_set_location (g, DECL_SOURCE_LOCATION (decl));
668 135649 : gimple_seq_add_stmt_without_update (pre_p, g);
669 : }
670 : }
671 1355472 : }
672 :
673 : /* Do C++-specific gimplification. Args are as for gimplify_expr. */
674 :
675 : int
676 168198962 : cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
677 : {
678 168198962 : int saved_stmts_are_full_exprs_p = 0;
679 168198962 : location_t loc = cp_expr_loc_or_input_loc (*expr_p);
680 168198962 : enum tree_code code = TREE_CODE (*expr_p);
681 168198962 : enum gimplify_status ret;
682 :
683 168198962 : if (STATEMENT_CODE_P (code))
684 : {
685 3929474 : saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
686 7858948 : current_stmt_tree ()->stmts_are_full_exprs_p
687 3929474 : = STMT_IS_FULL_EXPR_P (*expr_p);
688 : }
689 :
690 168198962 : switch (code)
691 : {
692 339720 : case AGGR_INIT_EXPR:
693 339720 : simplify_aggr_init_expr (expr_p);
694 339720 : ret = GS_OK;
695 339720 : break;
696 :
697 0 : case VEC_INIT_EXPR:
698 0 : {
699 0 : *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
700 : tf_warning_or_error);
701 :
702 0 : cp_fold_data data (ff_genericize | ff_mce_false);
703 0 : cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
704 0 : cp_genericize_tree (expr_p, false);
705 0 : copy_if_shared (expr_p);
706 0 : ret = GS_OK;
707 0 : }
708 0 : break;
709 :
710 18783 : case THROW_EXPR:
711 : /* FIXME communicate throw type to back end, probably by moving
712 : THROW_EXPR into ../tree.def. */
713 18783 : *expr_p = TREE_OPERAND (*expr_p, 0);
714 18783 : ret = GS_OK;
715 18783 : break;
716 :
717 562821 : case MUST_NOT_THROW_EXPR:
718 562821 : ret = gimplify_must_not_throw_expr (expr_p, pre_p);
719 562821 : break;
720 :
721 : /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
722 : LHS of an assignment might also be involved in the RHS, as in bug
723 : 25979. */
724 11372486 : case INIT_EXPR:
725 11372486 : cp_gimplify_init_expr (expr_p);
726 11372486 : if (TREE_CODE (*expr_p) != INIT_EXPR)
727 : return GS_OK;
728 : /* Fall through. */
729 15259637 : case MODIFY_EXPR:
730 11264980 : modify_expr_case:
731 15259637 : {
732 : /* If the back end isn't clever enough to know that the lhs and rhs
733 : types are the same, add an explicit conversion. */
734 15259637 : tree op0 = TREE_OPERAND (*expr_p, 0);
735 15259637 : tree op1 = TREE_OPERAND (*expr_p, 1);
736 :
737 15259637 : if (!error_operand_p (op0)
738 15259637 : && !error_operand_p (op1)
739 15259618 : && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
740 15256573 : || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
741 15262691 : && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
742 9 : TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
743 9 : TREE_TYPE (op0), op1);
744 :
745 15259628 : else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
746 : {
747 195393 : while (TREE_CODE (op1) == TARGET_EXPR)
748 : /* We're disconnecting the initializer from its target,
749 : don't create a temporary. */
750 9228 : op1 = TARGET_EXPR_INITIAL (op1);
751 :
752 : /* Remove any copies of empty classes. Also drop volatile
753 : variables on the RHS to avoid infinite recursion from
754 : gimplify_expr trying to load the value. */
755 186165 : if (TREE_SIDE_EFFECTS (op1))
756 : {
757 14114 : if (TREE_THIS_VOLATILE (op1)
758 0 : && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
759 0 : op1 = build_fold_addr_expr (op1);
760 :
761 14114 : suppress_warning (op1, OPT_Wunused_result);
762 14114 : gimplify_and_add (op1, pre_p);
763 : }
764 186165 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
765 : is_gimple_lvalue, fb_lvalue);
766 186165 : *expr_p = TREE_OPERAND (*expr_p, 0);
767 186165 : if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
768 : /* Avoid 'return *<retval>;' */
769 6 : *expr_p = TREE_OPERAND (*expr_p, 0);
770 : }
771 : /* P0145 says that the RHS is sequenced before the LHS.
772 : gimplify_modify_expr gimplifies the RHS before the LHS, but that
773 : isn't quite strong enough in two cases:
774 :
775 : 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
776 : mean it's evaluated after the LHS.
777 :
778 : 2) the value calculation of the RHS is also sequenced before the
779 : LHS, so for scalar assignment we need to preevaluate if the
780 : RHS could be affected by LHS side-effects even if it has no
781 : side-effects of its own. We don't need this for classes because
782 : class assignment takes its RHS by reference. */
783 15073463 : else if (flag_strong_eval_order > 1
784 13707090 : && TREE_CODE (*expr_p) == MODIFY_EXPR
785 2539080 : && lvalue_has_side_effects (op0)
786 15109017 : && (TREE_CODE (op1) == CALL_EXPR
787 28650 : || (SCALAR_TYPE_P (TREE_TYPE (op1))
788 22642 : && !TREE_CONSTANT (op1))))
789 20019 : TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
790 : }
791 : ret = GS_OK;
792 : break;
793 :
794 82367 : case EMPTY_CLASS_EXPR:
795 : /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
796 82367 : *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
797 82367 : ret = GS_OK;
798 82367 : break;
799 :
800 0 : case BASELINK:
801 0 : *expr_p = BASELINK_FUNCTIONS (*expr_p);
802 0 : ret = GS_OK;
803 0 : break;
804 :
805 17272 : case TRY_BLOCK:
806 17272 : genericize_try_block (expr_p);
807 17272 : ret = GS_OK;
808 17272 : break;
809 :
810 20295 : case HANDLER:
811 20295 : genericize_catch_block (expr_p);
812 20295 : ret = GS_OK;
813 20295 : break;
814 :
815 5782 : case EH_SPEC_BLOCK:
816 5782 : genericize_eh_spec_block (expr_p);
817 5782 : ret = GS_OK;
818 5782 : break;
819 :
820 0 : case USING_STMT:
821 0 : gcc_unreachable ();
822 :
823 0 : case FOR_STMT:
824 0 : case WHILE_STMT:
825 0 : case DO_STMT:
826 0 : case SWITCH_STMT:
827 0 : case CONTINUE_STMT:
828 0 : case BREAK_STMT:
829 0 : gcc_unreachable ();
830 :
831 45263 : case OMP_FOR:
832 45263 : case OMP_SIMD:
833 45263 : case OMP_DISTRIBUTE:
834 45263 : case OMP_LOOP:
835 45263 : case OMP_TASKLOOP:
836 45263 : case OMP_TILE:
837 45263 : case OMP_UNROLL:
838 45263 : ret = cp_gimplify_omp_for (expr_p, pre_p);
839 45263 : break;
840 :
841 3886125 : case EXPR_STMT:
842 3886125 : gimplify_expr_stmt (expr_p);
843 3886125 : ret = GS_OK;
844 3886125 : break;
845 :
846 0 : case UNARY_PLUS_EXPR:
847 0 : {
848 0 : tree arg = TREE_OPERAND (*expr_p, 0);
849 0 : tree type = TREE_TYPE (*expr_p);
850 0 : *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
851 : : arg;
852 0 : ret = GS_OK;
853 : }
854 0 : break;
855 :
856 : case CALL_EXPR:
857 : ret = GS_OK;
858 : /* At this point any function that takes/returns a consteval-only
859 : expression is a problem. */
860 21479790 : for (int i = 0; i < call_expr_nargs (*expr_p); ++i)
861 13093385 : if (check_out_of_consteval_use (CALL_EXPR_ARG (*expr_p, i)))
862 5 : ret = GS_ERROR;
863 8386405 : if (consteval_only_p (TREE_TYPE (*expr_p)))
864 1 : ret = GS_ERROR;
865 8386405 : if (flag_strong_eval_order == 2
866 7879485 : && CALL_EXPR_FN (*expr_p)
867 7548523 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
868 15183869 : && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
869 : {
870 36464 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
871 36464 : enum gimplify_status t
872 36464 : = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
873 : is_gimple_call_addr);
874 36464 : if (t == GS_ERROR)
875 : ret = GS_ERROR;
876 : /* GIMPLE considers most pointer conversion useless, but for
877 : calls we actually care about the exact function pointer type. */
878 36461 : else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
879 8970 : CALL_EXPR_FN (*expr_p)
880 17940 : = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
881 : }
882 8386405 : if (!CALL_EXPR_FN (*expr_p))
883 : /* Internal function call. */;
884 8032907 : else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
885 : {
886 : /* This is a call to a (compound) assignment operator that used
887 : the operator syntax; gimplify the RHS first. */
888 45890 : gcc_assert (call_expr_nargs (*expr_p) == 2);
889 45890 : gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
890 45890 : enum gimplify_status t
891 45890 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
892 45890 : TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
893 45890 : if (t == GS_ERROR)
894 : ret = GS_ERROR;
895 : }
896 7987017 : else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
897 : {
898 : /* Leave the last argument for gimplify_call_expr, to avoid problems
899 : with __builtin_va_arg_pack(). */
900 205093 : int nargs = call_expr_nargs (*expr_p) - 1;
901 205093 : int last_side_effects_arg = -1;
902 407192 : for (int i = nargs; i > 0; --i)
903 222233 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
904 : {
905 : last_side_effects_arg = i;
906 : break;
907 : }
908 436945 : for (int i = 0; i < nargs; ++i)
909 : {
910 231852 : enum gimplify_status t
911 231852 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
912 : i < last_side_effects_arg);
913 231852 : if (t == GS_ERROR)
914 0 : ret = GS_ERROR;
915 : }
916 : }
917 7781924 : else if (flag_strong_eval_order
918 7781924 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
919 : {
920 : /* If flag_strong_eval_order, evaluate the object argument first. */
921 7218630 : tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
922 7218630 : if (INDIRECT_TYPE_P (fntype))
923 7218627 : fntype = TREE_TYPE (fntype);
924 7218630 : tree decl = cp_get_callee_fndecl_nofold (*expr_p);
925 : /* We can't just rely on 'decl' because virtual function callees
926 : are expressed as OBJ_TYPE_REF. Note that the xobj memfn check
927 : will also hold for calls of the form (&A::f)(a, ...) which does
928 : not require such sequencing, though it's allowed under
929 : "indeterminately sequenced". */
930 7218630 : if (TREE_CODE (fntype) == METHOD_TYPE
931 7218630 : || (decl && DECL_LANG_SPECIFIC (decl)
932 3384640 : && DECL_XOBJ_MEMBER_FUNCTION_P (decl)))
933 : {
934 3822565 : int nargs = call_expr_nargs (*expr_p);
935 3822565 : bool side_effects = false;
936 5332313 : for (int i = 1; i < nargs; ++i)
937 1883970 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
938 : {
939 : side_effects = true;
940 : break;
941 : }
942 3822565 : enum gimplify_status t
943 3822565 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
944 : side_effects);
945 3822565 : if (t == GS_ERROR)
946 : ret = GS_ERROR;
947 : }
948 : }
949 8386405 : if (ret != GS_ERROR)
950 : {
951 8386396 : tree decl = cp_get_callee_fndecl_nofold (*expr_p);
952 8386396 : if (!decl)
953 : break;
954 7991493 : if (fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
955 4 : switch (DECL_FE_FUNCTION_CODE (decl))
956 : {
957 0 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
958 0 : *expr_p = boolean_false_node;
959 0 : break;
960 0 : case CP_BUILT_IN_SOURCE_LOCATION:
961 0 : *expr_p
962 0 : = fold_builtin_source_location (*expr_p);
963 0 : break;
964 0 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
965 0 : *expr_p
966 0 : = fold_builtin_is_corresponding_member
967 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
968 : &CALL_EXPR_ARG (*expr_p, 0));
969 0 : break;
970 0 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
971 0 : *expr_p
972 0 : = fold_builtin_is_pointer_inverconvertible_with_class
973 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
974 : &CALL_EXPR_ARG (*expr_p, 0));
975 0 : break;
976 0 : case CP_BUILT_IN_EH_PTR_ADJUST_REF:
977 0 : error_at (EXPR_LOCATION (*expr_p),
978 : "%qs used outside of constant expressions",
979 : "__builtin_eh_ptr_adjust_ref");
980 0 : *expr_p = void_node;
981 0 : break;
982 4 : case CP_BUILT_IN_CURRENT_EXCEPTION:
983 4 : case CP_BUILT_IN_UNCAUGHT_EXCEPTIONS:
984 4 : {
985 4 : const char *name
986 : = (DECL_FE_FUNCTION_CODE (decl)
987 : == CP_BUILT_IN_CURRENT_EXCEPTION
988 4 : ? "current_exception" : "uncaught_exceptions");
989 4 : tree newdecl = lookup_qualified_name (std_node, name);
990 4 : if (error_operand_p (newdecl))
991 0 : *expr_p = build_zero_cst (TREE_TYPE (*expr_p));
992 4 : else if (TREE_CODE (newdecl) != FUNCTION_DECL
993 4 : || !same_type_p (TREE_TYPE (TREE_TYPE (newdecl)),
994 : TREE_TYPE (TREE_TYPE (decl)))
995 8 : || (TYPE_ARG_TYPES (TREE_TYPE (newdecl))
996 4 : != void_list_node))
997 : {
998 0 : error_at (EXPR_LOCATION (*expr_p),
999 : "unexpected %<std::%s%> declaration",
1000 : name);
1001 0 : *expr_p = build_zero_cst (TREE_TYPE (*expr_p));
1002 : }
1003 : else
1004 4 : *expr_p = build_call_expr_loc (EXPR_LOCATION (*expr_p),
1005 : newdecl, 0);
1006 : break;
1007 : }
1008 0 : case CP_BUILT_IN_IS_STRING_LITERAL:
1009 0 : *expr_p
1010 0 : = fold_builtin_is_string_literal (EXPR_LOCATION (*expr_p),
1011 0 : call_expr_nargs (*expr_p),
1012 : &CALL_EXPR_ARG (*expr_p,
1013 : 0));
1014 0 : break;
1015 0 : case CP_BUILT_IN_CONSTEXPR_DIAG:
1016 0 : *expr_p = void_node;
1017 0 : break;
1018 : default:
1019 : break;
1020 : }
1021 7991489 : else if (fndecl_built_in_p (decl, BUILT_IN_CLZG, BUILT_IN_CTZG))
1022 33 : ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p,
1023 : post_p);
1024 : else
1025 : /* All consteval functions should have been processed by now. */
1026 7991456 : gcc_checking_assert (!immediate_invocation_p (decl));
1027 : }
1028 : break;
1029 :
1030 630032 : case TARGET_EXPR:
1031 : /* A TARGET_EXPR that expresses direct-initialization should have been
1032 : elided by cp_gimplify_init_expr. */
1033 630032 : gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
1034 : /* Likewise, but allow extra temps of trivial type so that
1035 : gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
1036 : on the rhs of an assignment, as in constexpr-aggr1.C. */
1037 630032 : gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
1038 : || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
1039 630032 : if (flag_lifetime_dse > 1
1040 629904 : && TARGET_EXPR_INITIAL (*expr_p)
1041 1248435 : && VOID_TYPE_P (TREE_TYPE (TARGET_EXPR_INITIAL (*expr_p))))
1042 233820 : maybe_emit_clobber_object_begin (TARGET_EXPR_SLOT (*expr_p), pre_p);
1043 : ret = GS_UNHANDLED;
1044 : break;
1045 :
1046 3 : case PTRMEM_CST:
1047 3 : *expr_p = cplus_expand_constant (*expr_p);
1048 3 : if (TREE_CODE (*expr_p) == PTRMEM_CST)
1049 : ret = GS_ERROR;
1050 : else
1051 19629984 : ret = GS_OK;
1052 : break;
1053 :
1054 1121880 : case DECL_EXPR:
1055 1121880 : if (flag_lifetime_dse > 1)
1056 1121652 : maybe_emit_clobber_object_begin (DECL_EXPR_DECL (*expr_p), pre_p);
1057 : ret = GS_UNHANDLED;
1058 : break;
1059 :
1060 1178518 : case RETURN_EXPR:
1061 1178518 : if (TREE_OPERAND (*expr_p, 0)
1062 1178518 : && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
1063 12536 : || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
1064 : {
1065 1115004 : expr_p = &TREE_OPERAND (*expr_p, 0);
1066 : /* Avoid going through the INIT_EXPR case, which can
1067 : degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
1068 1115004 : goto modify_expr_case;
1069 : }
1070 : /* Fall through. */
1071 :
1072 137715071 : default:
1073 137715071 : ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
1074 137715071 : break;
1075 : }
1076 :
1077 : /* Restore saved state. */
1078 168091456 : if (STATEMENT_CODE_P (code))
1079 3929474 : current_stmt_tree ()->stmts_are_full_exprs_p
1080 3929474 : = saved_stmts_are_full_exprs_p;
1081 :
1082 : return ret;
1083 : }
1084 :
1085 : bool
1086 1925667209 : is_invisiref_parm (const_tree t)
1087 : {
1088 1804858496 : return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
1089 1968290396 : && DECL_BY_REFERENCE (t));
1090 : }
1091 :
1092 : /* A stable comparison routine for use with splay trees and DECLs. */
1093 :
1094 : static int
1095 62002 : splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
1096 : {
1097 62002 : tree a = (tree) xa;
1098 62002 : tree b = (tree) xb;
1099 :
1100 62002 : return DECL_UID (a) - DECL_UID (b);
1101 : }
1102 :
1103 : /* OpenMP context during genericization. */
1104 :
1105 : struct cp_genericize_omp_taskreg
1106 : {
1107 : bool is_parallel;
1108 : bool default_shared;
1109 : struct cp_genericize_omp_taskreg *outer;
1110 : splay_tree variables;
1111 : };
1112 :
1113 : /* Return true if genericization should try to determine if
1114 : DECL is firstprivate or shared within task regions. */
1115 :
1116 : static bool
1117 118969 : omp_var_to_track (tree decl)
1118 : {
1119 118969 : tree type = TREE_TYPE (decl);
1120 118969 : if (is_invisiref_parm (decl))
1121 537 : type = TREE_TYPE (type);
1122 118432 : else if (TYPE_REF_P (type))
1123 4186 : type = TREE_TYPE (type);
1124 143912 : while (TREE_CODE (type) == ARRAY_TYPE)
1125 24943 : type = TREE_TYPE (type);
1126 118969 : if (type == error_mark_node || !CLASS_TYPE_P (type))
1127 : return false;
1128 13912 : if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
1129 : return false;
1130 13909 : if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1131 : return false;
1132 : return true;
1133 : }
1134 :
1135 : /* Note DECL use in OpenMP region OMP_CTX during genericization. */
1136 :
1137 : static void
1138 14098 : omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1139 : {
1140 14098 : splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1141 : (splay_tree_key) decl);
1142 14098 : if (n == NULL)
1143 : {
1144 4310 : int flags = OMP_CLAUSE_DEFAULT_SHARED;
1145 4310 : if (omp_ctx->outer)
1146 1257 : omp_cxx_notice_variable (omp_ctx->outer, decl);
1147 4310 : if (!omp_ctx->default_shared)
1148 : {
1149 948 : struct cp_genericize_omp_taskreg *octx;
1150 :
1151 1065 : for (octx = omp_ctx->outer; octx; octx = octx->outer)
1152 : {
1153 902 : n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1154 902 : if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1155 : {
1156 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1157 : break;
1158 : }
1159 859 : if (octx->is_parallel)
1160 : break;
1161 : }
1162 948 : if (octx == NULL
1163 948 : && (TREE_CODE (decl) == PARM_DECL
1164 120 : || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1165 41 : && DECL_CONTEXT (decl) == current_function_decl)))
1166 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1167 864 : if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1168 : {
1169 : /* DECL is implicitly determined firstprivate in
1170 : the current task construct. Ensure copy ctor and
1171 : dtor are instantiated, because during gimplification
1172 : it will be already too late. */
1173 127 : tree type = TREE_TYPE (decl);
1174 127 : if (is_invisiref_parm (decl))
1175 2 : type = TREE_TYPE (type);
1176 125 : else if (TYPE_REF_P (type))
1177 52 : type = TREE_TYPE (type);
1178 177 : while (TREE_CODE (type) == ARRAY_TYPE)
1179 50 : type = TREE_TYPE (type);
1180 127 : get_copy_ctor (type, tf_none);
1181 127 : get_dtor (type, tf_none);
1182 : }
1183 : }
1184 4310 : splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1185 : }
1186 14098 : }
1187 :
1188 : /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1189 : not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1190 :
1191 : static bool
1192 134616 : any_non_eliding_target_exprs (tree ctor)
1193 : {
1194 609990 : for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
1195 : {
1196 475377 : if (TREE_CODE (e.value) == TARGET_EXPR
1197 475377 : && !TARGET_EXPR_ELIDING_P (e.value))
1198 : return true;
1199 : }
1200 : return false;
1201 : }
1202 :
1203 : /* If we might need to clean up a partially constructed object, break down the
1204 : CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1205 : point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1206 : the result. */
1207 :
1208 : static void
1209 80299156 : cp_genericize_init (tree *replace, tree from, tree to, vec<tree,va_gc>** flags)
1210 : {
1211 80299156 : tree init = NULL_TREE;
1212 80299156 : if (TREE_CODE (from) == VEC_INIT_EXPR)
1213 1069 : init = expand_vec_init_expr (to, from, tf_warning_or_error, flags);
1214 80298087 : else if (TREE_CODE (from) == CONSTRUCTOR
1215 4666770 : && TREE_SIDE_EFFECTS (from)
1216 80433924 : && ((flag_exceptions
1217 135778 : && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
1218 134616 : || any_non_eliding_target_exprs (from)))
1219 : {
1220 1224 : to = cp_stabilize_reference (to);
1221 1224 : replace_placeholders (from, to);
1222 1224 : init = split_nonconstant_init (to, from);
1223 : }
1224 :
1225 2293 : if (init)
1226 : {
1227 2293 : if (*replace == from)
1228 : /* Make cp_gimplify_init_expr call replace_decl on this
1229 : TARGET_EXPR_INITIAL. */
1230 673 : init = fold_convert (void_type_node, init);
1231 2293 : *replace = init;
1232 : }
1233 80299156 : }
1234 :
1235 : /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1236 :
1237 : static void
1238 59555674 : cp_genericize_init_expr (tree *stmt_p)
1239 : {
1240 59555674 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1241 59555674 : tree to = TREE_OPERAND (*stmt_p, 0);
1242 59555674 : tree from = TREE_OPERAND (*stmt_p, 1);
1243 7144232 : if (SIMPLE_TARGET_EXPR_P (from)
1244 : /* Return gets confused if we clobber its INIT_EXPR this soon. */
1245 65171966 : && TREE_CODE (to) != RESULT_DECL)
1246 306214 : from = TARGET_EXPR_INITIAL (from);
1247 59555674 : cp_genericize_init (stmt_p, from, to, nullptr);
1248 59555674 : }
1249 :
1250 : /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1251 : replace_decl later when we know what we're initializing. */
1252 :
1253 : static void
1254 20743482 : cp_genericize_target_expr (tree *stmt_p)
1255 : {
1256 20743482 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1257 20743482 : tree slot = TARGET_EXPR_SLOT (*stmt_p);
1258 20743482 : vec<tree, va_gc> *flags = make_tree_vector ();
1259 20743482 : cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
1260 20743482 : TARGET_EXPR_INITIAL (*stmt_p), slot, &flags);
1261 20743482 : gcc_assert (!DECL_INITIAL (slot));
1262 62230478 : for (tree f : flags)
1263 : {
1264 : /* Once initialization is complete TARGET_EXPR_CLEANUP becomes active, so
1265 : disable any subobject cleanups. */
1266 32 : tree d = build_disable_temp_cleanup (f);
1267 32 : auto &r = TARGET_EXPR_INITIAL (*stmt_p);
1268 32 : r = add_stmt_to_compound (r, d);
1269 : }
1270 20743482 : release_tree_vector (flags);
1271 20743482 : }
1272 :
1273 : /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1274 : TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1275 : replacement when cp_folding TARGET_EXPR to preserve the invariant that
1276 : AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1277 :
1278 : static bool
1279 126 : maybe_replace_decl (tree *tp, tree decl, tree replacement)
1280 : {
1281 126 : if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1282 : return false;
1283 : tree t = *tp;
1284 46 : while (TREE_CODE (t) == COMPOUND_EXPR)
1285 0 : t = TREE_OPERAND (t, 1);
1286 46 : if (TREE_CODE (t) == AGGR_INIT_EXPR)
1287 46 : replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1288 0 : else if (TREE_CODE (t) == VEC_INIT_EXPR)
1289 0 : replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1290 : else
1291 0 : replace_decl (tp, decl, replacement);
1292 : return true;
1293 : }
1294 :
1295 : /* Genericization context. */
1296 :
1297 48929122 : struct cp_genericize_data
1298 : {
1299 : hash_set<tree> *p_set;
1300 : auto_vec<tree> bind_expr_stack;
1301 : struct cp_genericize_omp_taskreg *omp_ctx;
1302 : tree try_block;
1303 : bool no_sanitize_p;
1304 : bool handle_invisiref_parm_p;
1305 : };
1306 :
1307 : /* Emit an error about taking the address of an immediate function.
1308 : EXPR is the whole expression; DECL is the immediate function. */
1309 :
1310 : static void
1311 63 : taking_address_of_imm_fn_error (tree expr, tree decl)
1312 : {
1313 63 : auto_diagnostic_group d;
1314 63 : const location_t loc = (TREE_CODE (expr) == PTRMEM_CST
1315 63 : ? PTRMEM_CST_LOCATION (expr)
1316 63 : : EXPR_LOCATION (expr));
1317 63 : error_at (loc, "taking address of an immediate function %qD", decl);
1318 63 : maybe_explain_promoted_consteval (loc, decl);
1319 63 : }
1320 :
1321 : /* Build up an INIT_EXPR to initialize the object of a constructor call that
1322 : has been folded to a constant value. CALL is the CALL_EXPR for the
1323 : constructor call; INIT is the value. */
1324 :
1325 : static tree
1326 356 : cp_build_init_expr_for_ctor (tree call, tree init)
1327 : {
1328 356 : tree a = CALL_EXPR_ARG (call, 0);
1329 356 : if (is_dummy_object (a))
1330 : return init;
1331 356 : const bool return_this = targetm.cxx.cdtor_returns_this ();
1332 356 : const location_t loc = EXPR_LOCATION (call);
1333 356 : if (return_this)
1334 0 : a = cp_save_expr (a);
1335 356 : tree s = build_fold_indirect_ref_loc (loc, a);
1336 356 : init = cp_build_init_expr (s, init);
1337 356 : if (return_this)
1338 : {
1339 0 : init = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (call), init,
1340 0 : fold_convert_loc (loc, TREE_TYPE (call), a));
1341 0 : suppress_warning (init);
1342 : }
1343 : return init;
1344 : }
1345 :
1346 : /* For every DECL_EXPR check if it declares a consteval-only variable and
1347 : if so, overwrite it with a no-op. The point here is not to leak
1348 : consteval-only variables into the middle end. */
1349 :
1350 : static tree
1351 428244 : wipe_consteval_only_r (tree *stmt_p, int *, void *)
1352 : {
1353 428244 : if (TREE_CODE (*stmt_p) == DECL_EXPR)
1354 : {
1355 761 : tree d = DECL_EXPR_DECL (*stmt_p);
1356 761 : if (VAR_P (d) && consteval_only_p (d))
1357 : /* Wipe the DECL_EXPR so that it doesn't get into gimple. */
1358 3 : *stmt_p = void_node;
1359 : }
1360 428244 : return NULL_TREE;
1361 : }
1362 :
1363 : /* A walk_tree callback for cp_fold_function and cp_fully_fold_init to handle
1364 : immediate functions. */
1365 :
1366 : static tree
1367 2956345689 : cp_fold_immediate_r (tree *stmt_p, int *walk_subtrees, void *data_)
1368 : {
1369 2956345689 : auto data = static_cast<cp_fold_data *>(data_);
1370 2956345689 : tree stmt = *stmt_p;
1371 : /* The purpose of this is not to emit errors for mce_unknown. */
1372 2956345689 : const tsubst_flags_t complain = (data->flags & ff_mce_false
1373 2956345689 : ? tf_error : tf_none);
1374 2956345689 : const tree_code code = TREE_CODE (stmt);
1375 :
1376 : /* No need to look into types or unevaluated operands.
1377 : NB: This affects cp_fold_r as well. */
1378 2956345689 : if (TYPE_P (stmt)
1379 2954400354 : || unevaluated_p (code)
1380 : /* We do not use in_immediate_context here because it checks
1381 : more than is desirable, e.g., sk_template_parms. */
1382 2954068665 : || cp_unevaluated_operand
1383 5910414354 : || (current_function_decl
1384 5740341808 : && DECL_IMMEDIATE_FUNCTION_P (current_function_decl)))
1385 : {
1386 2304722 : *walk_subtrees = 0;
1387 2304722 : return NULL_TREE;
1388 : }
1389 :
1390 : /* Most invalid uses of consteval-only types should have been already
1391 : detected at this point. And the valid ones won't be needed
1392 : anymore. */
1393 2954040967 : if (flag_reflection
1394 31618046 : && complain
1395 26399642 : && (data->flags & ff_genericize)
1396 20173872 : && TREE_CODE (stmt) == STATEMENT_LIST)
1397 1691253 : for (tree s : tsi_range (stmt))
1398 1131778 : if (check_out_of_consteval_use (s))
1399 8 : *stmt_p = void_node;
1400 :
1401 2954040967 : tree decl = NULL_TREE;
1402 2954040967 : bool call_p = false;
1403 :
1404 : /* We are looking for &fn or fn(). */
1405 2954040967 : switch (code)
1406 : {
1407 33375903 : case DECL_EXPR:
1408 : /* Clear consteval-only DECL_EXPRs. */
1409 33375903 : if (flag_reflection)
1410 : {
1411 286851 : tree d = DECL_EXPR_DECL (stmt);
1412 286851 : if (VAR_P (d) && consteval_only_p (d))
1413 380 : *stmt_p = void_node;
1414 : }
1415 : break;
1416 140048143 : case CALL_EXPR:
1417 140048143 : case AGGR_INIT_EXPR:
1418 140048143 : if (tree fn = cp_get_callee (stmt))
1419 139851947 : if (TREE_CODE (fn) != ADDR_EXPR || ADDR_EXPR_DENOTES_CALL_P (fn))
1420 134166784 : decl = cp_get_fndecl_from_callee (fn, /*fold*/false);
1421 134166784 : call_p = true;
1422 134166784 : break;
1423 31926 : case PTRMEM_CST:
1424 31926 : decl = PTRMEM_CST_MEMBER (stmt);
1425 31926 : break;
1426 213462237 : case ADDR_EXPR:
1427 213462237 : if (!ADDR_EXPR_DENOTES_CALL_P (stmt))
1428 80553625 : decl = TREE_OPERAND (stmt, 0);
1429 : break;
1430 19972434 : case IF_STMT:
1431 19972434 : if (IF_STMT_CONSTEVAL_P (stmt))
1432 : {
1433 33511 : if (!data->pset.add (stmt))
1434 : {
1435 33511 : cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_immediate_r, data_,
1436 : nullptr);
1437 33511 : if (flag_reflection)
1438 : /* Check & clear consteval-only DECL_EXPRs even here,
1439 : because we wouldn't be walking this subtree otherwise. */
1440 8418 : cp_walk_tree (&THEN_CLAUSE (stmt), wipe_consteval_only_r,
1441 : data_, nullptr);
1442 : }
1443 33511 : *walk_subtrees = 0;
1444 33511 : return NULL_TREE;
1445 : }
1446 : /* FALLTHRU */
1447 2567089247 : default:
1448 2567089247 : if (data->pset.add (stmt))
1449 432640485 : *walk_subtrees = 0;
1450 : return NULL_TREE;
1451 : }
1452 :
1453 214752715 : if (!decl || TREE_CODE (decl) != FUNCTION_DECL)
1454 247319667 : return NULL_TREE;
1455 :
1456 : /* Fully escalate once all templates have been instantiated. What we're
1457 : calling is not a consteval function but it may become one. This
1458 : requires recursing; DECL may be promoted to consteval because it
1459 : contains an escalating expression E, but E itself may have to be
1460 : promoted first, etc. */
1461 139598542 : if (at_eof > 1 && unchecked_immediate_escalating_function_p (decl))
1462 : {
1463 : /* Set before the actual walk to avoid endless recursion. */
1464 4902454 : DECL_ESCALATION_CHECKED_P (decl) = true;
1465 : /* We're only looking for the first escalating expression. Let us not
1466 : walk more trees than necessary, hence mce_unknown. */
1467 4902454 : cp_fold_immediate (&DECL_SAVED_TREE (decl), mce_unknown, decl);
1468 : }
1469 :
1470 : /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1471 : it is not initially in an immediate function context and it is either
1472 : -- an immediate invocation that is not a constant expression and is not
1473 : a subexpression of an immediate invocation."
1474 :
1475 : If we are in an immediate-escalating function, the immediate-escalating
1476 : expression or conversion makes it an immediate function. So STMT does
1477 : not need to produce a constant expression. */
1478 279197084 : if (DECL_IMMEDIATE_FUNCTION_P (decl))
1479 : {
1480 79467 : tree e = cxx_constant_value (stmt, tf_none);
1481 79467 : if (e == error_mark_node)
1482 : {
1483 : /* This takes care of, e.g.,
1484 : template <typename T>
1485 : constexpr int f(T t)
1486 : {
1487 : return id(t);
1488 : }
1489 : where id (consteval) causes f<int> to be promoted. */
1490 1630 : if (immediate_escalating_function_p (current_function_decl))
1491 1077 : promote_function_to_consteval (current_function_decl);
1492 553 : else if (complain & tf_error)
1493 : {
1494 445 : if (call_p)
1495 : {
1496 394 : auto_diagnostic_group d;
1497 394 : location_t loc = cp_expr_loc_or_input_loc (stmt);
1498 394 : error_at (loc, "call to consteval function %qE is "
1499 : "not a constant expression", stmt);
1500 : /* Explain why it's not a constant expression. */
1501 394 : *stmt_p = cxx_constant_value (stmt, complain);
1502 394 : maybe_explain_promoted_consteval (loc, decl);
1503 394 : }
1504 51 : else if (!data->pset.add (stmt))
1505 : {
1506 51 : taking_address_of_imm_fn_error (stmt, decl);
1507 51 : *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1508 : }
1509 : /* If we're giving hard errors, continue the walk rather than
1510 : bailing out after the first error. */
1511 445 : return NULL_TREE;
1512 : }
1513 1185 : *walk_subtrees = 0;
1514 1185 : return stmt;
1515 : }
1516 : /* If we called a consteval function and it evaluated to a consteval-only
1517 : expression, it could be a problem if we are outside a manifestly
1518 : constant-evaluated context. */
1519 77837 : else if ((data->flags & ff_genericize)
1520 77837 : && check_out_of_consteval_use (e, complain))
1521 : {
1522 2 : *stmt_p = void_node;
1523 2 : if (complain & tf_error)
1524 : return NULL_TREE;
1525 : else
1526 : {
1527 0 : *walk_subtrees = 0;
1528 0 : return stmt;
1529 : }
1530 : }
1531 :
1532 : /* We've evaluated the consteval function call. */
1533 77835 : if (call_p)
1534 : {
1535 132946 : if (code == CALL_EXPR && DECL_CONSTRUCTOR_P (decl))
1536 0 : *stmt_p = cp_build_init_expr_for_ctor (stmt, e);
1537 : else
1538 77835 : *stmt_p = e;
1539 : }
1540 : }
1541 : /* We've encountered a function call that may turn out to be consteval
1542 : later. Store its caller so that we can ensure that the call is
1543 : a constant expression. */
1544 139519075 : else if (unchecked_immediate_escalating_function_p (decl))
1545 : {
1546 : /* Make sure we're not inserting new elements while walking
1547 : the deferred_escalating_exprs hash table; if we are, it's
1548 : likely that a function wasn't properly marked checked for
1549 : i-e expressions. */
1550 21302700 : gcc_checking_assert (at_eof <= 1);
1551 21302700 : if (current_function_decl)
1552 21302310 : remember_escalating_expr (current_function_decl);
1553 : /* auto p = &f<int>; in the global scope won't be ensconced in
1554 : a function we could store for later at this point. (If there's
1555 : no c_f_d at this point and we're dealing with a call, we should
1556 : see the call when cp_fold_function __static_i_and_d.) */
1557 390 : else if (!call_p)
1558 116 : remember_escalating_expr (stmt);
1559 : }
1560 :
1561 : return NULL_TREE;
1562 : }
1563 :
1564 : /* A walk_tree helper to replace constant-initialized references in an
1565 : OMP_CLAUSE with the the declaration that they refer to. Such refs
1566 : will have been folded out in the body by cp_fold_non_odr_use_1 and
1567 : so we need to follow suit to prevent confusion. */
1568 :
1569 : static tree
1570 95231 : cp_fold_omp_clause_refs_r (tree *expr_p, int *walk_subtrees, void */*data*/)
1571 : {
1572 95231 : tree expr = *expr_p;
1573 :
1574 95231 : if (TYPE_P (expr))
1575 : {
1576 0 : *walk_subtrees = 0;
1577 0 : return NULL_TREE;
1578 : }
1579 :
1580 95231 : if (DECL_P (expr))
1581 : {
1582 52333 : *walk_subtrees = 0;
1583 :
1584 52333 : if (decl_constant_var_p (expr)
1585 52333 : && TYPE_REF_P (TREE_TYPE (expr)))
1586 : {
1587 532 : tree init = maybe_constant_value (expr);
1588 532 : if (TREE_CONSTANT (init))
1589 532 : *expr_p = tree_strip_nop_conversions (init);
1590 : }
1591 : }
1592 :
1593 : return NULL_TREE;
1594 : }
1595 :
1596 : /* Perform any pre-gimplification folding of C++ front end trees to
1597 : GENERIC.
1598 : Note: The folding of non-omp cases is something to move into
1599 : the middle-end. As for now we have most foldings only on GENERIC
1600 : in fold-const, we need to perform this before transformation to
1601 : GIMPLE-form.
1602 :
1603 : ??? This is algorithmically weird because walk_tree works in pre-order, so
1604 : we see outer expressions before inner expressions. This isn't as much of an
1605 : issue because cp_fold recurses into subexpressions in many cases, but then
1606 : walk_tree walks back into those subexpressions again. We avoid the
1607 : resulting complexity problem by caching the result of cp_fold, but it's
1608 : inelegant. */
1609 :
1610 : static tree
1611 4177281786 : cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1612 : {
1613 4177281786 : cp_fold_data *data = (cp_fold_data*)data_;
1614 4177281786 : tree stmt = *stmt_p;
1615 4177281786 : enum tree_code code = TREE_CODE (stmt);
1616 :
1617 4177281786 : *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1618 :
1619 4177281786 : if (data->pset.add (stmt))
1620 : {
1621 : /* Don't walk subtrees of stmts we've already walked once, otherwise
1622 : we can have exponential complexity with e.g. lots of nested
1623 : SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1624 : always the same tree, which the first time cp_fold_r has been
1625 : called on it had the subtrees walked. */
1626 589473893 : *walk_subtrees = 0;
1627 589473893 : return NULL_TREE;
1628 : }
1629 :
1630 3587807893 : code = TREE_CODE (stmt);
1631 3587807893 : switch (code)
1632 : {
1633 51699 : tree x;
1634 51699 : int i, n;
1635 51699 : case OMP_FOR:
1636 51699 : case OMP_SIMD:
1637 51699 : case OMP_DISTRIBUTE:
1638 51699 : case OMP_LOOP:
1639 51699 : case OMP_TASKLOOP:
1640 51699 : case OMP_TILE:
1641 51699 : case OMP_UNROLL:
1642 51699 : case OACC_LOOP:
1643 51699 : cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1644 51699 : cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1645 51699 : cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1646 51699 : x = OMP_FOR_COND (stmt);
1647 51699 : if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1648 : {
1649 0 : cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1650 0 : cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1651 : }
1652 38119 : else if (x && TREE_CODE (x) == TREE_VEC)
1653 : {
1654 38119 : n = TREE_VEC_LENGTH (x);
1655 87580 : for (i = 0; i < n; i++)
1656 : {
1657 49461 : tree o = TREE_VEC_ELT (x, i);
1658 49461 : if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1659 47635 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1660 : }
1661 : }
1662 51699 : x = OMP_FOR_INCR (stmt);
1663 51699 : if (x && TREE_CODE (x) == TREE_VEC)
1664 : {
1665 38119 : n = TREE_VEC_LENGTH (x);
1666 87580 : for (i = 0; i < n; i++)
1667 : {
1668 49461 : tree o = TREE_VEC_ELT (x, i);
1669 49461 : if (o && TREE_CODE (o) == MODIFY_EXPR)
1670 12038 : o = TREE_OPERAND (o, 1);
1671 47635 : if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1672 38497 : || TREE_CODE (o) == POINTER_PLUS_EXPR))
1673 : {
1674 12038 : cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1675 12038 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1676 : }
1677 : }
1678 : }
1679 51699 : cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1680 51699 : *walk_subtrees = 0;
1681 51699 : return NULL_TREE;
1682 :
1683 164806 : case OMP_CLAUSE:
1684 164806 : if ((data->flags & ff_only_non_odr)
1685 82414 : && omp_clause_num_ops[OMP_CLAUSE_CODE (stmt)] >= 1
1686 69028 : && OMP_CLAUSE_CODE (stmt) >= OMP_CLAUSE_PRIVATE
1687 69028 : && OMP_CLAUSE_CODE (stmt) <= OMP_CLAUSE__SCANTEMP_
1688 210475 : && OMP_CLAUSE_DECL (stmt))
1689 : {
1690 44491 : tree *decl = &OMP_CLAUSE_DECL (stmt);
1691 44491 : cp_walk_tree (decl, cp_fold_omp_clause_refs_r, NULL, NULL);
1692 44491 : if (TREE_CODE (*decl) == ADDR_EXPR
1693 44491 : && DECL_P (TREE_OPERAND (*decl, 0)))
1694 148 : *decl = TREE_OPERAND (*decl, 0);
1695 44491 : data->pset.add (*decl);
1696 : }
1697 : break;
1698 :
1699 42083786 : case IF_STMT:
1700 42083786 : if (IF_STMT_CONSTEVAL_P (stmt))
1701 : {
1702 : /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1703 : boolean_false_node. */
1704 67231 : cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1705 67231 : cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1706 67231 : *walk_subtrees = 0;
1707 67231 : return NULL_TREE;
1708 : }
1709 : break;
1710 :
1711 : /* cp_genericize_{init,target}_expr are only for genericize time; they're
1712 : here rather than in cp_genericize to avoid problems with the invisible
1713 : reference transition. */
1714 120122679 : case INIT_EXPR:
1715 120122679 : if (data->flags & ff_genericize)
1716 59555674 : cp_genericize_init_expr (stmt_p);
1717 : break;
1718 :
1719 42434670 : case TARGET_EXPR:
1720 42434670 : if (!flag_no_inline)
1721 40658955 : if (tree &init = TARGET_EXPR_INITIAL (stmt))
1722 : {
1723 40658955 : tree folded = maybe_constant_init (init, TARGET_EXPR_SLOT (stmt),
1724 40658955 : (data->flags & ff_mce_false
1725 : ? mce_false : mce_unknown));
1726 40658955 : if (folded != init && TREE_CONSTANT (folded))
1727 2186161 : init = folded;
1728 : }
1729 :
1730 : /* This needs to happen between the constexpr evaluation (which wants
1731 : pre-generic trees) and fold (which wants the cp_genericize_init
1732 : transformations). */
1733 42434670 : if (data->flags & ff_genericize)
1734 20743482 : cp_genericize_target_expr (stmt_p);
1735 :
1736 42434670 : if (tree &init = TARGET_EXPR_INITIAL (stmt))
1737 : {
1738 42434670 : cp_walk_tree (&init, cp_fold_r, data, NULL);
1739 42434670 : cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1740 42434670 : *walk_subtrees = 0;
1741 : /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1742 : that case, strip it in favor of this one. */
1743 42434670 : if (TREE_CODE (init) == TARGET_EXPR)
1744 : {
1745 126 : tree sub = TARGET_EXPR_INITIAL (init);
1746 126 : maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1747 126 : TARGET_EXPR_SLOT (stmt));
1748 126 : init = sub;
1749 : }
1750 : }
1751 : break;
1752 :
1753 : default:
1754 : break;
1755 : }
1756 :
1757 : return NULL_TREE;
1758 : }
1759 :
1760 : /* Fold ALL the trees! FIXME we should be able to remove this, but
1761 : apparently that still causes optimization regressions. */
1762 :
1763 : void
1764 64694196 : cp_fold_function (tree fndecl)
1765 : {
1766 : /* By now all manifestly-constant-evaluated expressions will have
1767 : been constant-evaluated already if possible, so we can safely
1768 : pass ff_mce_false. */
1769 64694196 : cp_fold_data data (ff_genericize | ff_mce_false);
1770 : /* Do cp_fold_immediate_r in separate whole IL walk instead of during
1771 : cp_fold_r, as otherwise expressions using results of immediate functions
1772 : might not be folded as cp_fold is called on those before cp_fold_r is
1773 : called on their argument. */
1774 64694196 : if (cxx_dialect >= cxx20)
1775 : {
1776 62749895 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_immediate_r,
1777 : &data, NULL);
1778 62749895 : data.pset.empty ();
1779 : }
1780 64694196 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1781 :
1782 : /* This is merely an optimization: if FNDECL has no i-e expressions,
1783 : we'll not save c_f_d, and we can safely say that FNDECL will not
1784 : be promoted to consteval. */
1785 64694196 : if (deferred_escalating_exprs
1786 64694196 : && !deferred_escalating_exprs->contains (current_function_decl))
1787 49737966 : DECL_ESCALATION_CHECKED_P (fndecl) = true;
1788 64694196 : }
1789 :
1790 : /* Fold any non-ODR usages of constant variables in FNDECL. This occurs
1791 : before saving the constexpr fundef, so do as little other folding
1792 : as possible. */
1793 :
1794 : void
1795 65651492 : cp_fold_function_non_odr_use (tree fndecl)
1796 : {
1797 65651492 : cp_fold_data data (ff_only_non_odr);
1798 65651492 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1799 65651492 : }
1800 :
1801 : /* We've stashed immediate-escalating functions. Now see if they indeed
1802 : ought to be promoted to consteval. */
1803 :
1804 : void
1805 96703 : process_and_check_pending_immediate_escalating_fns ()
1806 : {
1807 : /* This will be null for -fno-immediate-escalation. */
1808 96703 : if (!deferred_escalating_exprs)
1809 : return;
1810 :
1811 23389970 : for (auto e : *deferred_escalating_exprs)
1812 11685711 : if (TREE_CODE (e) == FUNCTION_DECL && !DECL_ESCALATION_CHECKED_P (e))
1813 8127883 : cp_fold_immediate (&DECL_SAVED_TREE (e), mce_false, e);
1814 :
1815 : /* We've escalated every function that could have been promoted to
1816 : consteval. Check that we are not taking the address of a consteval
1817 : function. */
1818 23390086 : for (auto e : *deferred_escalating_exprs)
1819 : {
1820 11685711 : if (TREE_CODE (e) == FUNCTION_DECL)
1821 11685595 : continue;
1822 116 : tree decl = (TREE_CODE (e) == PTRMEM_CST
1823 116 : ? PTRMEM_CST_MEMBER (e)
1824 116 : : TREE_OPERAND (e, 0));
1825 232 : if (DECL_IMMEDIATE_FUNCTION_P (decl))
1826 12 : taking_address_of_imm_fn_error (e, decl);
1827 : }
1828 :
1829 18664 : deferred_escalating_exprs = nullptr;
1830 : }
1831 :
1832 : /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1833 :
1834 232439 : static tree genericize_spaceship (tree expr)
1835 : {
1836 232439 : iloc_sentinel s (cp_expr_location (expr));
1837 232439 : tree type = TREE_TYPE (expr);
1838 232439 : tree op0 = TREE_OPERAND (expr, 0);
1839 232439 : tree op1 = TREE_OPERAND (expr, 1);
1840 232439 : return genericize_spaceship (input_location, type, op0, op1);
1841 232439 : }
1842 :
1843 : /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1844 : to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1845 : the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1846 : NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1847 :
1848 : tree
1849 134809248 : predeclare_vla (tree expr)
1850 : {
1851 134809248 : tree type = TREE_TYPE (expr);
1852 134809248 : if (type == error_mark_node)
1853 : return expr;
1854 134809148 : if (is_typedef_decl (expr))
1855 134809148 : type = DECL_ORIGINAL_TYPE (expr);
1856 :
1857 : /* We need to strip pointers for gimplify_type_sizes. */
1858 134809148 : tree vla = type;
1859 205943739 : while (POINTER_TYPE_P (vla))
1860 : {
1861 73612570 : if (TYPE_NAME (vla))
1862 : return expr;
1863 71134591 : vla = TREE_TYPE (vla);
1864 : }
1865 69278719 : if (vla == type || TYPE_NAME (vla)
1866 132874081 : || !variably_modified_type_p (vla, NULL_TREE))
1867 132330966 : return expr;
1868 :
1869 203 : tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1870 203 : DECL_ARTIFICIAL (decl) = 1;
1871 203 : TYPE_NAME (vla) = decl;
1872 203 : tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1873 203 : if (DECL_P (expr))
1874 : {
1875 5 : add_stmt (dexp);
1876 5 : return NULL_TREE;
1877 : }
1878 : else
1879 : {
1880 198 : expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1881 198 : return expr;
1882 : }
1883 : }
1884 :
1885 : /* Perform any pre-gimplification lowering of C++ front end trees to
1886 : GENERIC. */
1887 :
1888 : static tree
1889 1747367813 : cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1890 : {
1891 1767809826 : tree stmt = *stmt_p;
1892 1767809826 : struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1893 1767809826 : hash_set<tree> *p_set = wtd->p_set;
1894 :
1895 : /* If in an OpenMP context, note var uses. */
1896 1767809826 : if (UNLIKELY (wtd->omp_ctx != NULL)
1897 594956 : && (VAR_P (stmt)
1898 : || TREE_CODE (stmt) == PARM_DECL
1899 : || TREE_CODE (stmt) == RESULT_DECL)
1900 1767917355 : && omp_var_to_track (stmt))
1901 12338 : omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1902 :
1903 : /* Don't dereference parms in a thunk, pass the references through. */
1904 71683707 : if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1905 1839430360 : || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1906 : {
1907 63373 : *walk_subtrees = 0;
1908 63373 : return NULL;
1909 : }
1910 :
1911 : /* Dereference invisible reference parms. */
1912 1767746453 : if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1913 : {
1914 1242933 : *stmt_p = convert_from_reference (stmt);
1915 1242933 : p_set->add (*stmt_p);
1916 1242933 : *walk_subtrees = 0;
1917 1242933 : return NULL;
1918 : }
1919 :
1920 : /* Map block scope extern declarations to visible declarations with the
1921 : same name and type in outer scopes if any. */
1922 1766503520 : if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1923 20798 : if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1924 : {
1925 20798 : if (alias != error_mark_node)
1926 : {
1927 20795 : *stmt_p = alias;
1928 20795 : TREE_USED (alias) |= TREE_USED (stmt);
1929 : }
1930 20798 : *walk_subtrees = 0;
1931 20798 : return NULL;
1932 : }
1933 :
1934 1766482722 : if (TREE_CODE (stmt) == INTEGER_CST
1935 201671134 : && TYPE_REF_P (TREE_TYPE (stmt))
1936 155 : && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1937 1766482723 : && !wtd->no_sanitize_p)
1938 : {
1939 1 : ubsan_maybe_instrument_reference (stmt_p);
1940 1 : if (*stmt_p != stmt)
1941 : {
1942 1 : *walk_subtrees = 0;
1943 1 : return NULL_TREE;
1944 : }
1945 : }
1946 :
1947 : /* Other than invisiref parms, don't walk the same tree twice. */
1948 1766482721 : if (p_set->contains (stmt))
1949 : {
1950 349656073 : *walk_subtrees = 0;
1951 349656073 : return NULL_TREE;
1952 : }
1953 :
1954 1416826648 : if ((TREE_CODE (stmt) == VAR_DECL
1955 : || TREE_CODE (stmt) == PARM_DECL
1956 : || TREE_CODE (stmt) == RESULT_DECL)
1957 153160415 : && DECL_HAS_VALUE_EXPR_P (stmt)
1958 : /* Walk DECL_VALUE_EXPR mainly for benefit of xobj lambdas so that we
1959 : adjust any invisiref object parm uses within the capture proxies.
1960 : TODO: For GCC 17 do this walking unconditionally. */
1961 652401 : && current_function_decl
1962 652401 : && DECL_XOBJ_MEMBER_FUNCTION_P (current_function_decl)
1963 896 : && LAMBDA_FUNCTION_P (current_function_decl))
1964 : {
1965 442 : tree ve = DECL_VALUE_EXPR (stmt);
1966 442 : cp_walk_tree (&ve, cp_genericize_r, data, NULL);
1967 442 : SET_DECL_VALUE_EXPR (stmt, ve);
1968 : }
1969 :
1970 1416826648 : switch (TREE_CODE (stmt))
1971 : {
1972 114918455 : case ADDR_EXPR:
1973 114918455 : if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1974 : {
1975 : /* If in an OpenMP context, note var uses. */
1976 522546 : if (UNLIKELY (wtd->omp_ctx != NULL)
1977 522546 : && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1978 412 : omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1979 522546 : *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1980 522546 : *walk_subtrees = 0;
1981 : }
1982 : break;
1983 :
1984 42589198 : case RETURN_EXPR:
1985 42589198 : if (TREE_OPERAND (stmt, 0))
1986 : {
1987 41962648 : if (error_operand_p (TREE_OPERAND (stmt, 0))
1988 41962648 : && warn_return_type)
1989 : /* Suppress -Wreturn-type for this function. */
1990 12 : suppress_warning (current_function_decl, OPT_Wreturn_type);
1991 :
1992 41962648 : if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1993 : /* Don't dereference an invisiref RESULT_DECL inside a
1994 : RETURN_EXPR. */
1995 560 : *walk_subtrees = 0;
1996 41962648 : if (RETURN_EXPR_LOCAL_ADDR_P (stmt))
1997 : {
1998 : /* Don't return the address of a local variable. */
1999 166 : tree *p = &TREE_OPERAND (stmt, 0);
2000 332 : while (TREE_CODE (*p) == COMPOUND_EXPR)
2001 0 : p = &TREE_OPERAND (*p, 0);
2002 166 : if (TREE_CODE (*p) == INIT_EXPR)
2003 : {
2004 166 : tree op = TREE_OPERAND (*p, 1);
2005 166 : tree new_op = build2 (COMPOUND_EXPR, TREE_TYPE (op), op,
2006 166 : build_zero_cst (TREE_TYPE (op)));
2007 166 : TREE_OPERAND (*p, 1) = new_op;
2008 : }
2009 : }
2010 : }
2011 : break;
2012 :
2013 82301 : case OMP_CLAUSE:
2014 82301 : switch (OMP_CLAUSE_CODE (stmt))
2015 : {
2016 2587 : case OMP_CLAUSE_LASTPRIVATE:
2017 : /* Don't dereference an invisiref in OpenMP clauses. */
2018 2587 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2019 : {
2020 53 : *walk_subtrees = 0;
2021 53 : if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
2022 48 : cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
2023 : cp_genericize_r, data, NULL);
2024 : }
2025 : break;
2026 2084 : case OMP_CLAUSE_PRIVATE:
2027 : /* Don't dereference an invisiref in OpenMP clauses. */
2028 2084 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2029 8 : *walk_subtrees = 0;
2030 2076 : else if (wtd->omp_ctx != NULL)
2031 : {
2032 : /* Private clause doesn't cause any references to the
2033 : var in outer contexts, avoid calling
2034 : omp_cxx_notice_variable for it. */
2035 584 : struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
2036 584 : wtd->omp_ctx = NULL;
2037 584 : cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
2038 : data, NULL);
2039 584 : wtd->omp_ctx = old;
2040 584 : *walk_subtrees = 0;
2041 : }
2042 : break;
2043 6052 : case OMP_CLAUSE_SHARED:
2044 6052 : case OMP_CLAUSE_FIRSTPRIVATE:
2045 6052 : case OMP_CLAUSE_COPYIN:
2046 6052 : case OMP_CLAUSE_COPYPRIVATE:
2047 6052 : case OMP_CLAUSE_INCLUSIVE:
2048 6052 : case OMP_CLAUSE_EXCLUSIVE:
2049 : /* Don't dereference an invisiref in OpenMP clauses. */
2050 6052 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2051 87 : *walk_subtrees = 0;
2052 : break;
2053 8448 : case OMP_CLAUSE_REDUCTION:
2054 8448 : case OMP_CLAUSE_IN_REDUCTION:
2055 8448 : case OMP_CLAUSE_TASK_REDUCTION:
2056 : /* Don't dereference an invisiref in reduction clause's
2057 : OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
2058 : still needs to be genericized. */
2059 8448 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2060 : {
2061 38 : *walk_subtrees = 0;
2062 38 : if (OMP_CLAUSE_REDUCTION_INIT (stmt))
2063 38 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
2064 : cp_genericize_r, data, NULL);
2065 38 : if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
2066 38 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
2067 : cp_genericize_r, data, NULL);
2068 : }
2069 : break;
2070 : default:
2071 : break;
2072 : }
2073 : break;
2074 :
2075 : /* Due to the way voidify_wrapper_expr is written, we don't get a chance
2076 : to lower this construct before scanning it, so we need to lower these
2077 : before doing anything else. */
2078 5522839 : case CLEANUP_STMT:
2079 5522839 : *stmt_p = build2_loc (EXPR_LOCATION (stmt),
2080 5522839 : CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
2081 : : TRY_FINALLY_EXPR,
2082 : void_type_node,
2083 5522839 : CLEANUP_BODY (stmt),
2084 5522839 : CLEANUP_EXPR (stmt));
2085 5522839 : break;
2086 :
2087 20441396 : case IF_STMT:
2088 20441396 : genericize_if_stmt (stmt_p);
2089 : /* *stmt_p has changed, tail recurse to handle it again. */
2090 20441396 : return cp_genericize_r (stmt_p, walk_subtrees, data);
2091 :
2092 : /* COND_EXPR might have incompatible types in branches if one or both
2093 : arms are bitfields. Fix it up now. */
2094 18599556 : case COND_EXPR:
2095 18599556 : {
2096 18599556 : tree type_left
2097 18599556 : = (TREE_OPERAND (stmt, 1)
2098 18599556 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
2099 : : NULL_TREE);
2100 18599556 : tree type_right
2101 18599556 : = (TREE_OPERAND (stmt, 2)
2102 18599556 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
2103 : : NULL_TREE);
2104 18599556 : if (type_left
2105 18599589 : && !useless_type_conversion_p (TREE_TYPE (stmt),
2106 33 : TREE_TYPE (TREE_OPERAND (stmt, 1))))
2107 : {
2108 30 : TREE_OPERAND (stmt, 1)
2109 30 : = fold_convert (type_left, TREE_OPERAND (stmt, 1));
2110 30 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
2111 : type_left));
2112 : }
2113 18599556 : if (type_right
2114 18599573 : && !useless_type_conversion_p (TREE_TYPE (stmt),
2115 17 : TREE_TYPE (TREE_OPERAND (stmt, 2))))
2116 : {
2117 17 : TREE_OPERAND (stmt, 2)
2118 17 : = fold_convert (type_right, TREE_OPERAND (stmt, 2));
2119 17 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
2120 : type_right));
2121 : }
2122 : }
2123 : break;
2124 :
2125 24518848 : case BIND_EXPR:
2126 24518848 : if (UNLIKELY (wtd->omp_ctx != NULL))
2127 : {
2128 27006 : tree decl;
2129 33294 : for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
2130 6288 : if (VAR_P (decl)
2131 6240 : && !DECL_EXTERNAL (decl)
2132 12528 : && omp_var_to_track (decl))
2133 : {
2134 586 : splay_tree_node n
2135 586 : = splay_tree_lookup (wtd->omp_ctx->variables,
2136 : (splay_tree_key) decl);
2137 586 : if (n == NULL)
2138 586 : splay_tree_insert (wtd->omp_ctx->variables,
2139 : (splay_tree_key) decl,
2140 586 : TREE_STATIC (decl)
2141 : ? OMP_CLAUSE_DEFAULT_SHARED
2142 : : OMP_CLAUSE_DEFAULT_PRIVATE);
2143 : }
2144 : }
2145 24518848 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
2146 : {
2147 : /* The point here is to not sanitize static initializers. */
2148 3385 : bool no_sanitize_p = wtd->no_sanitize_p;
2149 3385 : wtd->no_sanitize_p = true;
2150 3385 : for (tree decl = BIND_EXPR_VARS (stmt);
2151 6593 : decl;
2152 3208 : decl = DECL_CHAIN (decl))
2153 3208 : if (VAR_P (decl)
2154 2805 : && TREE_STATIC (decl)
2155 3284 : && DECL_INITIAL (decl))
2156 12 : cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
2157 3385 : wtd->no_sanitize_p = no_sanitize_p;
2158 : }
2159 24518848 : if (flag_reflection)
2160 : /* Wipe consteval-only vars from BIND_EXPR_VARS and BLOCK_VARS. */
2161 443102 : for (tree *p = &BIND_EXPR_VARS (stmt); *p; )
2162 : {
2163 217194 : if (VAR_P (*p) && consteval_only_p (*p))
2164 : {
2165 398 : if (BIND_EXPR_BLOCK (stmt)
2166 398 : && *p == BLOCK_VARS (BIND_EXPR_BLOCK (stmt)))
2167 200 : BLOCK_VARS (BIND_EXPR_BLOCK (stmt)) = DECL_CHAIN (*p);
2168 398 : *p = DECL_CHAIN (*p);
2169 398 : continue;
2170 : }
2171 216796 : p = &DECL_CHAIN (*p);
2172 : }
2173 24518848 : wtd->bind_expr_stack.safe_push (stmt);
2174 24518848 : cp_walk_tree (&BIND_EXPR_BODY (stmt),
2175 : cp_genericize_r, data, NULL);
2176 24518848 : wtd->bind_expr_stack.pop ();
2177 24518848 : break;
2178 :
2179 617 : case ASSERTION_STMT:
2180 617 : case PRECONDITION_STMT:
2181 617 : case POSTCONDITION_STMT:
2182 617 : if (tree check = build_contract_check (stmt))
2183 : {
2184 617 : *stmt_p = check;
2185 617 : return cp_genericize_r (stmt_p, walk_subtrees, data);
2186 : }
2187 : /* If we didn't build a check, replace it with void_node so we don't
2188 : leak contracts into GENERIC. */
2189 0 : *stmt_p = void_node;
2190 0 : *walk_subtrees = 0;
2191 0 : break;
2192 :
2193 21547 : case USING_STMT:
2194 21547 : {
2195 21547 : tree block = NULL_TREE;
2196 :
2197 : /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
2198 : BLOCK, and append an IMPORTED_DECL to its
2199 : BLOCK_VARS chained list. */
2200 21547 : if (wtd->bind_expr_stack.exists ())
2201 : {
2202 21547 : int i;
2203 21547 : for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
2204 21547 : if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
2205 : break;
2206 : }
2207 21547 : if (block)
2208 : {
2209 21547 : tree decl = TREE_OPERAND (stmt, 0);
2210 21547 : gcc_assert (decl);
2211 :
2212 21547 : if (undeduced_auto_decl (decl))
2213 : /* Omit from the GENERIC, the back-end can't handle it. */;
2214 : else
2215 : {
2216 21544 : tree using_directive = make_node (IMPORTED_DECL);
2217 21544 : TREE_TYPE (using_directive) = void_type_node;
2218 21544 : DECL_CONTEXT (using_directive) = current_function_decl;
2219 43088 : DECL_SOURCE_LOCATION (using_directive)
2220 21544 : = cp_expr_loc_or_input_loc (stmt);
2221 :
2222 21544 : IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
2223 21544 : DECL_CHAIN (using_directive) = BLOCK_VARS (block);
2224 21544 : BLOCK_VARS (block) = using_directive;
2225 : }
2226 : }
2227 : /* The USING_STMT won't appear in GENERIC. */
2228 21547 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
2229 21547 : *walk_subtrees = 0;
2230 : }
2231 21547 : break;
2232 :
2233 24281828 : case DECL_EXPR:
2234 24281828 : if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
2235 : {
2236 : /* Using decls inside DECL_EXPRs are just dropped on the floor. */
2237 20310 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
2238 20310 : *walk_subtrees = 0;
2239 : }
2240 : else
2241 : {
2242 24261518 : tree d = DECL_EXPR_DECL (stmt);
2243 24261518 : if (VAR_P (d))
2244 48521556 : gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
2245 : }
2246 : break;
2247 :
2248 11392 : case OMP_PARALLEL:
2249 11392 : case OMP_TASK:
2250 11392 : case OMP_TASKLOOP:
2251 11392 : {
2252 11392 : struct cp_genericize_omp_taskreg omp_ctx;
2253 11392 : tree c, decl;
2254 11392 : splay_tree_node n;
2255 :
2256 11392 : *walk_subtrees = 0;
2257 11392 : cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
2258 11392 : omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
2259 11392 : omp_ctx.default_shared = omp_ctx.is_parallel;
2260 11392 : omp_ctx.outer = wtd->omp_ctx;
2261 11392 : omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
2262 11392 : wtd->omp_ctx = &omp_ctx;
2263 27186 : for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2264 15794 : switch (OMP_CLAUSE_CODE (c))
2265 : {
2266 4788 : case OMP_CLAUSE_SHARED:
2267 4788 : case OMP_CLAUSE_PRIVATE:
2268 4788 : case OMP_CLAUSE_FIRSTPRIVATE:
2269 4788 : case OMP_CLAUSE_LASTPRIVATE:
2270 4788 : decl = OMP_CLAUSE_DECL (c);
2271 4788 : if (decl == error_mark_node || !omp_var_to_track (decl))
2272 : break;
2273 519 : n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
2274 519 : if (n != NULL)
2275 : break;
2276 1020 : splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
2277 510 : OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2278 : ? OMP_CLAUSE_DEFAULT_SHARED
2279 : : OMP_CLAUSE_DEFAULT_PRIVATE);
2280 510 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
2281 91 : omp_cxx_notice_variable (omp_ctx.outer, decl);
2282 : break;
2283 1647 : case OMP_CLAUSE_DEFAULT:
2284 1647 : if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
2285 731 : omp_ctx.default_shared = true;
2286 : default:
2287 : break;
2288 : }
2289 11392 : if (TREE_CODE (stmt) == OMP_TASKLOOP)
2290 1001 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2291 : cp_genericize_r, cp_walk_subtrees);
2292 : else
2293 10391 : cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
2294 11392 : wtd->omp_ctx = omp_ctx.outer;
2295 11392 : splay_tree_delete (omp_ctx.variables);
2296 : }
2297 11392 : break;
2298 :
2299 6916 : case OMP_TARGET:
2300 6916 : cfun->has_omp_target = true;
2301 6916 : break;
2302 :
2303 134459 : case TRY_BLOCK:
2304 134459 : {
2305 134459 : *walk_subtrees = 0;
2306 134459 : tree try_block = wtd->try_block;
2307 134459 : wtd->try_block = stmt;
2308 134459 : cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
2309 134459 : wtd->try_block = try_block;
2310 134459 : cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
2311 : }
2312 134459 : break;
2313 :
2314 24492042 : case MUST_NOT_THROW_EXPR:
2315 : /* MUST_NOT_THROW_COND might be something else with TM. */
2316 24492042 : if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
2317 : {
2318 24492024 : *walk_subtrees = 0;
2319 24492024 : tree try_block = wtd->try_block;
2320 24492024 : wtd->try_block = stmt;
2321 24492024 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2322 24492024 : wtd->try_block = try_block;
2323 : }
2324 : break;
2325 :
2326 137084 : case THROW_EXPR:
2327 137084 : {
2328 137084 : location_t loc = location_of (stmt);
2329 137084 : if (warning_suppressed_p (stmt /* What warning? */))
2330 : /* Never mind. */;
2331 41145 : else if (wtd->try_block)
2332 : {
2333 10065 : if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
2334 : {
2335 18 : auto_diagnostic_group d;
2336 31 : if (warning_at (loc, OPT_Wterminate,
2337 : "%<throw%> will always call %<terminate%>")
2338 10 : && cxx_dialect >= cxx11
2339 36 : && DECL_DESTRUCTOR_P (current_function_decl))
2340 5 : inform (loc, "in C++11 destructors default to %<noexcept%>");
2341 18 : }
2342 : }
2343 : else
2344 : {
2345 103 : if (warn_cxx11_compat && cxx_dialect < cxx11
2346 206 : && DECL_DESTRUCTOR_P (current_function_decl)
2347 1 : && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
2348 : == NULL_TREE)
2349 31081 : && (get_defaulted_eh_spec (current_function_decl)
2350 1 : == empty_except_spec))
2351 1 : warning_at (loc, OPT_Wc__11_compat,
2352 : "in C++11 this %<throw%> will call %<terminate%> "
2353 : "because destructors default to %<noexcept%>");
2354 : }
2355 : }
2356 : break;
2357 :
2358 39678288 : case CONVERT_EXPR:
2359 39678288 : gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
2360 39678288 : gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
2361 : break;
2362 :
2363 232439 : case SPACESHIP_EXPR:
2364 232439 : *stmt_p = genericize_spaceship (*stmt_p);
2365 232439 : break;
2366 :
2367 30862 : case PTRMEM_CST:
2368 : /* By the time we get here we're handing off to the back end, so we don't
2369 : need or want to preserve PTRMEM_CST anymore. */
2370 30862 : *stmt_p = cplus_expand_constant (stmt);
2371 30862 : *walk_subtrees = 0;
2372 30862 : break;
2373 :
2374 263127 : case MEM_REF:
2375 : /* For MEM_REF, make sure not to sanitize the second operand even
2376 : if it has reference type. It is just an offset with a type
2377 : holding other information. There is no other processing we
2378 : need to do for INTEGER_CSTs, so just ignore the second argument
2379 : unconditionally. */
2380 263127 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2381 263127 : *walk_subtrees = 0;
2382 263127 : break;
2383 :
2384 112615232 : case NOP_EXPR:
2385 112615232 : *stmt_p = predeclare_vla (*stmt_p);
2386 :
2387 : /* Warn of new allocations that are not big enough for the target
2388 : type. */
2389 112615232 : if (warn_alloc_size
2390 1074127 : && TREE_CODE (TREE_OPERAND (stmt, 0)) == CALL_EXPR
2391 112680239 : && POINTER_TYPE_P (TREE_TYPE (stmt)))
2392 : {
2393 23951 : if (tree fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 0)))
2394 23937 : if (DECL_IS_MALLOC (fndecl))
2395 : {
2396 1169 : tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
2397 1169 : tree alloc_size = lookup_attribute ("alloc_size", attrs);
2398 1169 : if (alloc_size)
2399 1167 : warn_for_alloc_size (EXPR_LOCATION (stmt),
2400 1167 : TREE_TYPE (TREE_TYPE (stmt)),
2401 1167 : TREE_OPERAND (stmt, 0), alloc_size);
2402 : }
2403 : }
2404 :
2405 112615232 : if (!wtd->no_sanitize_p
2406 112615227 : && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
2407 112630415 : && TYPE_REF_P (TREE_TYPE (stmt)))
2408 2340 : ubsan_maybe_instrument_reference (stmt_p);
2409 : break;
2410 :
2411 71617011 : case CALL_EXPR:
2412 71617011 : if (!wtd->no_sanitize_p
2413 71617011 : && sanitize_flags_p ((SANITIZE_NULL
2414 : | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
2415 : {
2416 15644 : tree fn = CALL_EXPR_FN (stmt);
2417 15644 : if (fn != NULL_TREE
2418 9699 : && !error_operand_p (fn)
2419 9699 : && INDIRECT_TYPE_P (TREE_TYPE (fn))
2420 25343 : && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
2421 : {
2422 5434 : bool is_ctor
2423 5434 : = TREE_CODE (fn) == ADDR_EXPR
2424 5311 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2425 16056 : && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
2426 5434 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
2427 4823 : ubsan_maybe_instrument_member_call (stmt, is_ctor);
2428 5434 : if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
2429 4603 : cp_ubsan_maybe_instrument_member_call (stmt);
2430 : }
2431 10210 : else if (fn == NULL_TREE
2432 5945 : && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
2433 4875 : && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
2434 10216 : && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
2435 6 : *walk_subtrees = 0;
2436 : }
2437 : /* Fall through. */
2438 76228775 : case AGGR_INIT_EXPR:
2439 : /* For calls to a multi-versioned function, overload resolution
2440 : returns the function with the highest target priority, that is,
2441 : the version that will checked for dispatching first. If this
2442 : version is inlinable, a direct call to this version can be made
2443 : otherwise the call should go through the dispatcher.
2444 : This is done at multiple_target.cc for target_version semantics. */
2445 76228775 : {
2446 76228775 : tree fn = cp_get_callee_fndecl_nofold (stmt);
2447 76228775 : if (TARGET_HAS_FMV_TARGET_ATTRIBUTE
2448 : && fn
2449 75158478 : && DECL_FUNCTION_VERSIONED (fn)
2450 76228907 : && (current_function_decl == NULL
2451 132 : || !targetm.target_option.can_inline_p
2452 132 : (current_function_decl, fn)))
2453 120 : if (tree dis = get_function_version_dispatcher (fn))
2454 : {
2455 120 : mark_versions_used (dis);
2456 120 : dis = build_address (dis);
2457 120 : if (TREE_CODE (stmt) == CALL_EXPR)
2458 117 : CALL_EXPR_FN (stmt) = dis;
2459 : else
2460 3 : AGGR_INIT_EXPR_FN (stmt) = dis;
2461 : }
2462 : }
2463 : break;
2464 :
2465 19502058 : case TARGET_EXPR:
2466 19502058 : if (TARGET_EXPR_INITIAL (stmt)
2467 19502058 : && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
2468 23058197 : && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
2469 166 : TARGET_EXPR_NO_ELIDE (stmt) = 1;
2470 : break;
2471 :
2472 660 : case TEMPLATE_ID_EXPR:
2473 660 : gcc_assert (concept_check_p (stmt));
2474 : /* Emit the value of the concept check. */
2475 660 : *stmt_p = evaluate_concept_check (stmt);
2476 660 : walk_subtrees = 0;
2477 660 : break;
2478 :
2479 4187 : case OMP_DISTRIBUTE:
2480 : /* Need to explicitly instantiate copy ctors on class iterators of
2481 : composite distribute parallel for. */
2482 4187 : if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
2483 : {
2484 3712 : tree *data[4] = { NULL, NULL, NULL, NULL };
2485 3712 : tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
2486 : find_combined_omp_for, data, NULL);
2487 3712 : if (inner != NULL_TREE
2488 3678 : && TREE_CODE (inner) == OMP_FOR)
2489 : {
2490 4494 : for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
2491 2829 : if (TREE_VEC_ELT (OMP_FOR_INIT (inner), i)
2492 2813 : && OMP_FOR_ORIG_DECLS (inner)
2493 2813 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2494 : i)) == TREE_LIST
2495 2853 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2496 : i)))
2497 : {
2498 12 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
2499 : /* Class iterators aren't allowed on OMP_SIMD, so the only
2500 : case we need to solve is distribute parallel for. */
2501 12 : gcc_assert (TREE_CODE (inner) == OMP_FOR
2502 : && data[1]);
2503 12 : tree orig_decl = TREE_PURPOSE (orig);
2504 12 : tree c, cl = NULL_TREE;
2505 12 : for (c = OMP_FOR_CLAUSES (inner);
2506 16 : c; c = OMP_CLAUSE_CHAIN (c))
2507 12 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2508 5 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2509 13 : && OMP_CLAUSE_DECL (c) == orig_decl)
2510 : {
2511 : cl = c;
2512 : break;
2513 : }
2514 12 : if (cl == NULL_TREE)
2515 : {
2516 4 : for (c = OMP_PARALLEL_CLAUSES (*data[1]);
2517 4 : c; c = OMP_CLAUSE_CHAIN (c))
2518 1 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2519 1 : && OMP_CLAUSE_DECL (c) == orig_decl)
2520 : {
2521 : cl = c;
2522 : break;
2523 : }
2524 : }
2525 4 : if (cl)
2526 : {
2527 9 : orig_decl = require_complete_type (orig_decl);
2528 9 : tree inner_type = TREE_TYPE (orig_decl);
2529 9 : if (orig_decl == error_mark_node)
2530 0 : continue;
2531 9 : if (TYPE_REF_P (TREE_TYPE (orig_decl)))
2532 0 : inner_type = TREE_TYPE (inner_type);
2533 :
2534 9 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
2535 0 : inner_type = TREE_TYPE (inner_type);
2536 9 : get_copy_ctor (inner_type, tf_warning_or_error);
2537 : }
2538 : }
2539 : }
2540 : }
2541 : /* FALLTHRU */
2542 :
2543 4721042 : case FOR_STMT:
2544 4721042 : case WHILE_STMT:
2545 4721042 : case DO_STMT:
2546 4721042 : case SWITCH_STMT:
2547 4721042 : case CONTINUE_STMT:
2548 4721042 : case BREAK_STMT:
2549 4721042 : case OMP_FOR:
2550 4721042 : case OMP_SIMD:
2551 4721042 : case OMP_LOOP:
2552 4721042 : case OMP_TILE:
2553 4721042 : case OMP_UNROLL:
2554 4721042 : case OACC_LOOP:
2555 : /* These cases are handled by shared code. */
2556 4721042 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2557 : cp_genericize_r, cp_walk_subtrees);
2558 4721042 : break;
2559 :
2560 66831183 : case STATEMENT_LIST:
2561 : /* As above, handled by shared code. */
2562 66831183 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2563 : cp_genericize_r, cp_walk_subtrees);
2564 : /* If a statement list is freed as part of genericisation it will be
2565 : pushed onto the top of a statement list cache stack. A subsequent
2566 : action can cause a new statement list to be required - and the one
2567 : just pushed will be returned. If that is marked as visited, it can
2568 : prevent a tail recursion from processing the 'new' statement list,
2569 : so we do not mark statement lists as visited. */
2570 66831183 : return NULL_TREE;
2571 73326 : break;
2572 :
2573 73326 : case BIT_CAST_EXPR:
2574 73326 : *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
2575 73326 : TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2576 73326 : break;
2577 :
2578 20423088 : case MODIFY_EXPR:
2579 : /* Mark stores to parts of complex automatic non-addressable
2580 : variables as DECL_NOT_GIMPLE_REG_P for -O0. This can't be
2581 : done during gimplification. See PR119120. */
2582 20423088 : if ((TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
2583 20395677 : || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR)
2584 54833 : && !optimize
2585 462 : && DECL_P (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0))
2586 20423182 : && is_gimple_reg (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0)))
2587 50 : DECL_NOT_GIMPLE_REG_P (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0)) = 1;
2588 : break;
2589 :
2590 800468090 : default:
2591 800468090 : if (IS_TYPE_OR_DECL_P (stmt))
2592 250336806 : *walk_subtrees = 0;
2593 : break;
2594 : }
2595 :
2596 1329553452 : p_set->add (*stmt_p);
2597 :
2598 1329553452 : return NULL;
2599 : }
2600 :
2601 : /* Lower C++ front end trees to GENERIC in T_P. */
2602 :
2603 : static void
2604 48929122 : cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
2605 : {
2606 48929122 : struct cp_genericize_data wtd;
2607 :
2608 48929122 : wtd.p_set = new hash_set<tree>;
2609 48929122 : wtd.bind_expr_stack.create (0);
2610 48929122 : wtd.omp_ctx = NULL;
2611 48929122 : wtd.try_block = NULL_TREE;
2612 48929122 : wtd.no_sanitize_p = false;
2613 48929122 : wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
2614 48929122 : cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
2615 97858244 : delete wtd.p_set;
2616 48929122 : if (sanitize_flags_p (SANITIZE_VPTR))
2617 5665 : cp_ubsan_instrument_member_accesses (t_p);
2618 48929122 : }
2619 :
2620 : /* If a function that should end with a return in non-void
2621 : function doesn't obviously end with return, add ubsan
2622 : instrumentation code to verify it at runtime. If -fsanitize=return
2623 : is not enabled, instrument __builtin_unreachable. */
2624 :
2625 : static void
2626 48929122 : cp_maybe_instrument_return (tree fndecl)
2627 : {
2628 48929122 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
2629 70377030 : || DECL_CONSTRUCTOR_P (fndecl)
2630 35188515 : || DECL_DESTRUCTOR_P (fndecl)
2631 84117637 : || !targetm.warn_func_return (fndecl))
2632 13740619 : return;
2633 :
2634 35188503 : if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
2635 : /* Don't add __builtin_unreachable () if not optimizing, it will not
2636 : improve any optimizations in that case, just break UB code.
2637 : Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2638 : UBSan covers this with ubsan_instrument_return above where sufficient
2639 : information is provided, while the __builtin_unreachable () below
2640 : if return sanitization is disabled will just result in hard to
2641 : understand runtime error without location. */
2642 35188503 : && ((!optimize && !flag_unreachable_traps)
2643 35184665 : || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
2644 30 : return;
2645 :
2646 35188473 : tree t = DECL_SAVED_TREE (fndecl);
2647 60181667 : while (t)
2648 : {
2649 60181667 : switch (TREE_CODE (t))
2650 : {
2651 4594518 : case BIND_EXPR:
2652 4594518 : t = BIND_EXPR_BODY (t);
2653 4594518 : continue;
2654 8228076 : case TRY_FINALLY_EXPR:
2655 8228076 : case CLEANUP_POINT_EXPR:
2656 8228076 : t = TREE_OPERAND (t, 0);
2657 8228076 : continue;
2658 12173342 : case STATEMENT_LIST:
2659 12173342 : {
2660 12173342 : tree_stmt_iterator i = tsi_last (t);
2661 12176088 : while (!tsi_end_p (i))
2662 : {
2663 12173346 : tree p = tsi_stmt (i);
2664 12173346 : if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
2665 : break;
2666 2746 : tsi_prev (&i);
2667 : }
2668 12173342 : if (!tsi_end_p (i))
2669 : {
2670 12170600 : t = tsi_stmt (i);
2671 12170600 : continue;
2672 : }
2673 : }
2674 2742 : break;
2675 : case RETURN_EXPR:
2676 : return;
2677 : default:
2678 : break;
2679 12822594 : }
2680 : break;
2681 : }
2682 19597416 : if (t == NULL_TREE)
2683 : return;
2684 19597416 : tree *p = &DECL_SAVED_TREE (fndecl);
2685 19597416 : if (TREE_CODE (*p) == BIND_EXPR)
2686 880239 : p = &BIND_EXPR_BODY (*p);
2687 :
2688 19597416 : location_t loc = DECL_SOURCE_LOCATION (fndecl);
2689 19597416 : if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
2690 1799 : t = ubsan_instrument_return (loc);
2691 : else
2692 19595617 : t = build_builtin_unreachable (BUILTINS_LOCATION);
2693 :
2694 19597416 : append_to_statement_list (t, p);
2695 : }
2696 :
2697 : void
2698 64692466 : cp_genericize (tree fndecl)
2699 : {
2700 64692466 : tree t;
2701 :
2702 : /* Fix up the types of parms passed by invisible reference. */
2703 171660011 : for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2704 106967545 : if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2705 : {
2706 : /* If a function's arguments are copied to create a thunk,
2707 : then DECL_BY_REFERENCE will be set -- but the type of the
2708 : argument will be a pointer type, so we will never get
2709 : here. */
2710 123596 : gcc_assert (!DECL_BY_REFERENCE (t));
2711 123596 : gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2712 123596 : TREE_TYPE (t) = DECL_ARG_TYPE (t);
2713 123596 : DECL_BY_REFERENCE (t) = 1;
2714 123596 : TREE_ADDRESSABLE (t) = 0;
2715 123596 : relayout_decl (t);
2716 : }
2717 :
2718 : /* Do the same for the return value. */
2719 64692466 : if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2720 : {
2721 1103577 : t = DECL_RESULT (fndecl);
2722 1103577 : TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2723 1103577 : DECL_BY_REFERENCE (t) = 1;
2724 1103577 : TREE_ADDRESSABLE (t) = 0;
2725 1103577 : relayout_decl (t);
2726 1103577 : if (DECL_NAME (t))
2727 : {
2728 : /* Adjust DECL_VALUE_EXPR of the original var. */
2729 120108 : tree outer = outer_curly_brace_block (current_function_decl);
2730 120108 : tree var;
2731 :
2732 120108 : if (outer)
2733 285086 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2734 284413 : if (VAR_P (var)
2735 274661 : && DECL_NAME (t) == DECL_NAME (var)
2736 119435 : && DECL_HAS_VALUE_EXPR_P (var)
2737 403848 : && DECL_VALUE_EXPR (var) == t)
2738 : {
2739 119435 : tree val = convert_from_reference (t);
2740 119435 : SET_DECL_VALUE_EXPR (var, val);
2741 119435 : break;
2742 : }
2743 : }
2744 : }
2745 :
2746 : /* If we're a clone, the body is already GIMPLE. */
2747 64692466 : if (DECL_CLONED_FUNCTION_P (fndecl))
2748 15763344 : return;
2749 :
2750 : /* Allow cp_genericize calls to be nested. */
2751 48929122 : bc_state_t save_state;
2752 48929122 : save_bc_state (&save_state);
2753 :
2754 : /* We do want to see every occurrence of the parms, so we can't just use
2755 : walk_tree's hash functionality. */
2756 48929122 : cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2757 :
2758 48929122 : cp_maybe_instrument_return (fndecl);
2759 :
2760 : /* Do everything else. */
2761 48929122 : c_genericize (fndecl);
2762 48929122 : restore_bc_state (&save_state);
2763 : }
2764 :
2765 : /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2766 : NULL if there is in fact nothing to do. ARG2 may be null if FN
2767 : actually only takes one argument. */
2768 :
2769 : static tree
2770 3662 : cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2771 : {
2772 3662 : tree defparm, parm, t;
2773 3662 : int i = 0;
2774 3662 : int nargs;
2775 3662 : tree *argarray;
2776 :
2777 3662 : if (fn == NULL)
2778 : return NULL;
2779 :
2780 2824 : nargs = list_length (DECL_ARGUMENTS (fn));
2781 2824 : argarray = XALLOCAVEC (tree, nargs);
2782 :
2783 2824 : defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2784 2824 : if (arg2)
2785 944 : defparm = TREE_CHAIN (defparm);
2786 :
2787 2824 : bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2788 2824 : if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2789 : {
2790 27 : tree inner_type = TREE_TYPE (arg1);
2791 27 : tree start1, end1, p1;
2792 27 : tree start2 = NULL, p2 = NULL;
2793 27 : tree ret = NULL, lab;
2794 :
2795 27 : start1 = arg1;
2796 27 : start2 = arg2;
2797 27 : do
2798 : {
2799 27 : inner_type = TREE_TYPE (inner_type);
2800 27 : start1 = build4 (ARRAY_REF, inner_type, start1,
2801 : size_zero_node, NULL, NULL);
2802 27 : if (arg2)
2803 9 : start2 = build4 (ARRAY_REF, inner_type, start2,
2804 : size_zero_node, NULL, NULL);
2805 : }
2806 27 : while (TREE_CODE (inner_type) == ARRAY_TYPE);
2807 27 : start1 = build_fold_addr_expr_loc (input_location, start1);
2808 27 : if (arg2)
2809 9 : start2 = build_fold_addr_expr_loc (input_location, start2);
2810 :
2811 27 : end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2812 27 : end1 = fold_build_pointer_plus (start1, end1);
2813 :
2814 27 : p1 = create_tmp_var (TREE_TYPE (start1));
2815 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2816 27 : append_to_statement_list (t, &ret);
2817 :
2818 27 : if (arg2)
2819 : {
2820 9 : p2 = create_tmp_var (TREE_TYPE (start2));
2821 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2822 9 : append_to_statement_list (t, &ret);
2823 : }
2824 :
2825 27 : lab = create_artificial_label (input_location);
2826 27 : t = build1 (LABEL_EXPR, void_type_node, lab);
2827 27 : append_to_statement_list (t, &ret);
2828 :
2829 27 : argarray[i++] = p1;
2830 27 : if (arg2)
2831 9 : argarray[i++] = p2;
2832 : /* Handle default arguments. */
2833 27 : for (parm = defparm; parm && parm != void_list_node;
2834 0 : parm = TREE_CHAIN (parm), i++)
2835 0 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2836 0 : TREE_PURPOSE (parm), fn,
2837 : i - is_method, tf_warning_or_error);
2838 27 : t = build_call_a (fn, i, argarray);
2839 27 : if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2840 0 : t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2841 27 : t = fold_convert (void_type_node, t);
2842 27 : t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2843 27 : append_to_statement_list (t, &ret);
2844 :
2845 27 : t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2846 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2847 27 : append_to_statement_list (t, &ret);
2848 :
2849 27 : if (arg2)
2850 : {
2851 9 : t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2852 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2853 9 : append_to_statement_list (t, &ret);
2854 : }
2855 :
2856 27 : t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2857 27 : t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2858 27 : append_to_statement_list (t, &ret);
2859 :
2860 27 : return ret;
2861 : }
2862 : else
2863 : {
2864 2797 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2865 2797 : if (arg2)
2866 935 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2867 : /* Handle default arguments. */
2868 2802 : for (parm = defparm; parm && parm != void_list_node;
2869 5 : parm = TREE_CHAIN (parm), i++)
2870 10 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2871 5 : TREE_PURPOSE (parm), fn,
2872 : i - is_method, tf_warning_or_error);
2873 2797 : t = build_call_a (fn, i, argarray);
2874 2797 : if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2875 1 : t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2876 2797 : t = fold_convert (void_type_node, t);
2877 2797 : return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2878 : }
2879 : }
2880 :
2881 : /* Return code to initialize DECL with its default constructor, or
2882 : NULL if there's nothing to do. */
2883 :
2884 : tree
2885 42509 : cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2886 : {
2887 42509 : tree info = CP_OMP_CLAUSE_INFO (clause);
2888 42509 : tree ret = NULL;
2889 :
2890 42509 : if (info)
2891 1392 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2892 :
2893 42509 : return ret;
2894 : }
2895 :
2896 : /* Return code to initialize DST with a copy constructor from SRC. */
2897 :
2898 : tree
2899 12330 : cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2900 : {
2901 12330 : tree info = CP_OMP_CLAUSE_INFO (clause);
2902 12330 : tree ret = NULL;
2903 :
2904 12330 : if (info)
2905 283 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2906 283 : if (ret == NULL)
2907 12112 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2908 :
2909 12330 : return ret;
2910 : }
2911 :
2912 : /* Similarly, except use an assignment operator instead. */
2913 :
2914 : tree
2915 12680 : cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2916 : {
2917 12680 : tree info = CP_OMP_CLAUSE_INFO (clause);
2918 12680 : tree ret = NULL;
2919 :
2920 12680 : if (info)
2921 748 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2922 748 : if (ret == NULL)
2923 11954 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2924 :
2925 12680 : return ret;
2926 : }
2927 :
2928 : /* Return code to destroy DECL. */
2929 :
2930 : tree
2931 62496 : cxx_omp_clause_dtor (tree clause, tree decl)
2932 : {
2933 62496 : tree info = CP_OMP_CLAUSE_INFO (clause);
2934 62496 : tree ret = NULL;
2935 :
2936 62496 : if (info)
2937 1239 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2938 :
2939 62496 : return ret;
2940 : }
2941 :
2942 : /* True if OpenMP should privatize what this DECL points to rather
2943 : than the DECL itself. */
2944 :
2945 : bool
2946 926612 : cxx_omp_privatize_by_reference (const_tree decl)
2947 : {
2948 926612 : return (TYPE_REF_P (TREE_TYPE (decl))
2949 926612 : || is_invisiref_parm (decl));
2950 : }
2951 :
2952 : /* Return true if DECL is const qualified var having no mutable member. */
2953 : bool
2954 15250 : cxx_omp_const_qual_no_mutable (tree decl)
2955 : {
2956 15250 : tree type = TREE_TYPE (decl);
2957 15250 : if (TYPE_REF_P (type))
2958 : {
2959 843 : if (!is_invisiref_parm (decl))
2960 : return false;
2961 0 : type = TREE_TYPE (type);
2962 :
2963 0 : if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2964 : {
2965 : /* NVR doesn't preserve const qualification of the
2966 : variable's type. */
2967 0 : tree outer = outer_curly_brace_block (current_function_decl);
2968 0 : tree var;
2969 :
2970 0 : if (outer)
2971 0 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2972 0 : if (VAR_P (var)
2973 0 : && DECL_NAME (decl) == DECL_NAME (var)
2974 0 : && (TYPE_MAIN_VARIANT (type)
2975 0 : == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2976 : {
2977 0 : if (TYPE_READONLY (TREE_TYPE (var)))
2978 0 : type = TREE_TYPE (var);
2979 : break;
2980 : }
2981 : }
2982 : }
2983 :
2984 14407 : if (type == error_mark_node)
2985 : return false;
2986 :
2987 : /* Variables with const-qualified type having no mutable member
2988 : are predetermined shared. */
2989 14392 : if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2990 : return true;
2991 :
2992 : return false;
2993 : }
2994 :
2995 : /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2996 : of DECL is predetermined. */
2997 :
2998 : enum omp_clause_default_kind
2999 55342 : cxx_omp_predetermined_sharing_1 (tree decl)
3000 : {
3001 : /* Static data members are predetermined shared. */
3002 55342 : if (TREE_STATIC (decl))
3003 : {
3004 15125 : tree ctx = CP_DECL_CONTEXT (decl);
3005 15125 : if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
3006 : return OMP_CLAUSE_DEFAULT_SHARED;
3007 :
3008 15019 : if (c_omp_predefined_variable (decl))
3009 : return OMP_CLAUSE_DEFAULT_SHARED;
3010 : }
3011 :
3012 : /* this may not be specified in data-sharing clauses, still we need
3013 : to predetermined it firstprivate. */
3014 55191 : if (decl == current_class_ptr)
3015 113 : return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
3016 :
3017 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
3018 : }
3019 :
3020 : /* Likewise, but also include the artificial vars. We don't want to
3021 : disallow the artificial vars being mentioned in explicit clauses,
3022 : as we use artificial vars e.g. for loop constructs with random
3023 : access iterators other than pointers, but during gimplification
3024 : we want to treat them as predetermined. */
3025 :
3026 : enum omp_clause_default_kind
3027 34890 : cxx_omp_predetermined_sharing (tree decl)
3028 : {
3029 34890 : enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
3030 34890 : if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
3031 : return ret;
3032 :
3033 : /* Predetermine artificial variables holding integral values, those
3034 : are usually result of gimplify_one_sizepos or SAVE_EXPR
3035 : gimplification. */
3036 34673 : if (VAR_P (decl)
3037 22784 : && DECL_ARTIFICIAL (decl)
3038 6994 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
3039 35191 : && !(DECL_LANG_SPECIFIC (decl)
3040 2 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
3041 : return OMP_CLAUSE_DEFAULT_SHARED;
3042 :
3043 : /* Similarly for typeinfo symbols. */
3044 34157 : if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
3045 57 : return OMP_CLAUSE_DEFAULT_SHARED;
3046 :
3047 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
3048 : }
3049 :
3050 : enum omp_clause_defaultmap_kind
3051 17367 : cxx_omp_predetermined_mapping (tree decl)
3052 : {
3053 : /* Predetermine artificial variables holding integral values, those
3054 : are usually result of gimplify_one_sizepos or SAVE_EXPR
3055 : gimplification. */
3056 17367 : if (VAR_P (decl)
3057 1637 : && DECL_ARTIFICIAL (decl)
3058 142 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
3059 17433 : && !(DECL_LANG_SPECIFIC (decl)
3060 6 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
3061 : return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
3062 :
3063 17301 : if (c_omp_predefined_variable (decl))
3064 12 : return OMP_CLAUSE_DEFAULTMAP_TO;
3065 :
3066 : return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
3067 : }
3068 :
3069 : /* Finalize an implicitly determined clause. */
3070 :
3071 : void
3072 64138 : cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
3073 : {
3074 64138 : tree decl, inner_type;
3075 64138 : bool make_shared = false;
3076 :
3077 64138 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
3078 56658 : && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
3079 94130 : && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
3080 4752 : || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
3081 : return;
3082 :
3083 34161 : decl = OMP_CLAUSE_DECL (c);
3084 34161 : decl = require_complete_type (decl);
3085 34161 : inner_type = TREE_TYPE (decl);
3086 34161 : if (decl == error_mark_node)
3087 34161 : make_shared = true;
3088 34161 : else if (TYPE_REF_P (TREE_TYPE (decl)))
3089 86 : inner_type = TREE_TYPE (inner_type);
3090 :
3091 : /* We're interested in the base element, not arrays. */
3092 34403 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
3093 242 : inner_type = TREE_TYPE (inner_type);
3094 :
3095 : /* Check for special function availability by building a call to one.
3096 : Save the results, because later we won't be in the right context
3097 : for making these queries. */
3098 34161 : bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
3099 34161 : bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
3100 34161 : if (!make_shared
3101 34161 : && CLASS_TYPE_P (inner_type)
3102 34384 : && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
3103 : true))
3104 : make_shared = true;
3105 :
3106 34155 : if (make_shared)
3107 : {
3108 6 : OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
3109 6 : OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
3110 6 : OMP_CLAUSE_SHARED_READONLY (c) = 0;
3111 : }
3112 : }
3113 :
3114 : tree
3115 32 : cxx_omp_finish_mapper_clauses (tree clauses)
3116 : {
3117 32 : return finish_omp_clauses (clauses, C_ORT_OMP);
3118 : }
3119 :
3120 : /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
3121 : disregarded in OpenMP construct, because it is going to be
3122 : remapped during OpenMP lowering. SHARED is true if DECL
3123 : is going to be shared, false if it is going to be privatized. */
3124 :
3125 : bool
3126 663248 : cxx_omp_disregard_value_expr (tree decl, bool shared)
3127 : {
3128 663248 : if (shared)
3129 : return false;
3130 400949 : if (VAR_P (decl)
3131 379045 : && DECL_HAS_VALUE_EXPR_P (decl)
3132 9515 : && DECL_ARTIFICIAL (decl)
3133 9004 : && DECL_LANG_SPECIFIC (decl)
3134 408830 : && DECL_OMP_PRIVATIZED_MEMBER (decl))
3135 : return true;
3136 395536 : if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
3137 : return true;
3138 : return false;
3139 : }
3140 :
3141 : /* Fold any non-ODR-usages of a constant variable in expression X. */
3142 :
3143 : static tree
3144 555936525 : cp_fold_non_odr_use_1 (tree x)
3145 : {
3146 555936525 : tree var = x;
3147 841932083 : while (!VAR_P (var))
3148 771005704 : switch (TREE_CODE (var))
3149 : {
3150 274210738 : case ARRAY_REF:
3151 274210738 : case BIT_FIELD_REF:
3152 274210738 : case COMPONENT_REF:
3153 274210738 : case VIEW_CONVERT_EXPR:
3154 274210738 : CASE_CONVERT:
3155 274210738 : var = TREE_OPERAND (var, 0);
3156 274210738 : break;
3157 :
3158 27437629 : case INDIRECT_REF:
3159 27437629 : if (REFERENCE_REF_P (var))
3160 11784820 : var = TREE_OPERAND (var, 0);
3161 : else
3162 : return x;
3163 11784820 : break;
3164 :
3165 : default:
3166 : return x;
3167 : }
3168 :
3169 70926379 : if (TREE_THIS_VOLATILE (var)
3170 70926379 : || !decl_constant_var_p (var))
3171 65462692 : return x;
3172 :
3173 : /* We mustn't fold std::hardware_destructive_interference_size here
3174 : so that maybe_warn_about_constant_value can complain if it's used
3175 : in a manifestly constant-evaluated context. */
3176 5463687 : if (decl_in_std_namespace_p (var)
3177 2695019 : && DECL_NAME (var)
3178 8158706 : && id_equal (DECL_NAME (var), "hardware_destructive_interference_size"))
3179 : return x;
3180 :
3181 5463687 : tree t = maybe_constant_value (x);
3182 5463687 : return TREE_CONSTANT (t) ? t : x;
3183 : }
3184 :
3185 : /* Fold expression X which is used as an rvalue if RVAL is true. */
3186 :
3187 : static tree
3188 2444934943 : cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
3189 : {
3190 2464719705 : while (true)
3191 : {
3192 2454827324 : if (rval && (flags & ff_only_non_odr))
3193 539699962 : x = cp_fold_non_odr_use_1 (x);
3194 2454827324 : x = cp_fold (x, flags);
3195 2454827324 : if (rval)
3196 : {
3197 1645385205 : x = mark_rvalue_use (x);
3198 1645385205 : if (!(flags & ff_only_non_odr)
3199 1645385205 : && DECL_P (x) && !TYPE_REF_P (TREE_TYPE (x)))
3200 : {
3201 248377192 : tree v = decl_constant_value (x);
3202 248377192 : if (v != x && v != error_mark_node)
3203 : {
3204 9892381 : x = v;
3205 9892381 : continue;
3206 : }
3207 : }
3208 : }
3209 2444934943 : break;
3210 9892381 : }
3211 2444934943 : return x;
3212 : }
3213 :
3214 : tree
3215 73078418 : cp_fold_maybe_rvalue (tree x, bool rval)
3216 : {
3217 73078418 : return cp_fold_maybe_rvalue (x, rval, ff_none);
3218 : }
3219 :
3220 : /* Fold expression X which is used as an rvalue. */
3221 :
3222 : static tree
3223 258563830 : cp_fold_rvalue (tree x, fold_flags_t flags)
3224 : {
3225 9643967 : return cp_fold_maybe_rvalue (x, true, flags);
3226 : }
3227 :
3228 : tree
3229 410637 : cp_fold_rvalue (tree x)
3230 : {
3231 410637 : return cp_fold_rvalue (x, ff_none);
3232 : }
3233 :
3234 : /* Fold any non-ODR used constants in an expression X which
3235 : is used as an rvalue if RVAL is true. */
3236 :
3237 : tree
3238 695569 : cp_fold_non_odr_use (tree x, bool rval)
3239 : {
3240 695569 : return cp_fold_maybe_rvalue (x, rval, ff_only_non_odr);
3241 : }
3242 :
3243 : /* Perform folding on expression X. */
3244 :
3245 : static tree
3246 272925729 : cp_fully_fold (tree x, mce_value manifestly_const_eval)
3247 : {
3248 272925729 : if (processing_template_decl)
3249 : return x;
3250 : /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
3251 : have to call both. */
3252 248509226 : if (cxx_dialect >= cxx11)
3253 : {
3254 247389488 : x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
3255 : /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
3256 : a TARGET_EXPR; undo that here. */
3257 247389488 : if (TREE_CODE (x) == TARGET_EXPR)
3258 696790 : x = TARGET_EXPR_INITIAL (x);
3259 246692698 : else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
3260 26013668 : && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
3261 246692886 : && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
3262 188 : x = TREE_OPERAND (x, 0);
3263 : }
3264 248509226 : fold_flags_t flags = ff_none;
3265 248509226 : if (manifestly_const_eval == mce_false)
3266 43288290 : flags |= ff_mce_false;
3267 248509226 : return cp_fold_rvalue (x, flags);
3268 : }
3269 :
3270 : tree
3271 229637439 : cp_fully_fold (tree x)
3272 : {
3273 229637439 : return cp_fully_fold (x, mce_unknown);
3274 : }
3275 :
3276 : /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
3277 : in some cases. */
3278 :
3279 : tree
3280 45446947 : cp_fully_fold_init (tree x)
3281 : {
3282 45446947 : if (processing_template_decl)
3283 2158657 : return x;
3284 43288290 : x = cp_fully_fold (x, mce_false);
3285 43288290 : cp_fold_data data (ff_mce_false);
3286 43288290 : if (cxx_dialect >= cxx20)
3287 : {
3288 42654682 : cp_walk_tree (&x, cp_fold_immediate_r, &data, NULL);
3289 42654682 : data.pset.empty ();
3290 : }
3291 43288290 : cp_walk_tree (&x, cp_fold_r, &data, NULL);
3292 43288290 : return x;
3293 43288290 : }
3294 :
3295 : /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
3296 : and certain changes are made to the folding done. Or should be (FIXME). We
3297 : never touch maybe_const, as it is only used for the C front-end
3298 : C_MAYBE_CONST_EXPR. */
3299 :
3300 : tree
3301 73078418 : c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
3302 : {
3303 73078418 : return cp_fold_maybe_rvalue (x, !lval);
3304 : }
3305 :
3306 : static GTY((deletable)) hash_map<tree, tree> *fold_caches[3];
3307 :
3308 : /* Subroutine of cp_fold. Returns which fold cache to use according
3309 : to the given flags. We need multiple caches since the result of
3310 : folding may depend on which flags are used. */
3311 :
3312 : static hash_map<tree, tree> *&
3313 4521015512 : get_fold_cache (fold_flags_t flags)
3314 : {
3315 0 : if (flags & ff_mce_false)
3316 2037458634 : return fold_caches[2];
3317 2483556878 : else if (flags & ff_only_non_odr)
3318 2136239043 : return fold_caches[1];
3319 : else
3320 347317835 : return fold_caches[0];
3321 : }
3322 :
3323 : /* Dispose of the whole FOLD_CACHE. */
3324 :
3325 : void
3326 38559068 : clear_fold_cache (void)
3327 : {
3328 154236272 : for (auto& fold_cache : fold_caches)
3329 115677204 : if (fold_cache != NULL)
3330 138760524 : fold_cache->empty ();
3331 38559068 : }
3332 :
3333 : /* This function tries to fold an expression X.
3334 : To avoid combinatorial explosion, folding results are kept in fold_cache.
3335 : If X is invalid, we don't fold at all.
3336 : For performance reasons we don't cache expressions representing a
3337 : declaration or constant.
3338 : Function returns X or its folded variant. */
3339 :
3340 : static tree
3341 7345826188 : cp_fold (tree x, fold_flags_t flags)
3342 : {
3343 7345826188 : tree op0, op1, op2, op3;
3344 7345826188 : tree org_x = x, r = NULL_TREE;
3345 7345826188 : enum tree_code code;
3346 7345826188 : location_t loc;
3347 7345826188 : bool rval_ops = true;
3348 :
3349 7345826188 : if (!x || x == error_mark_node)
3350 : return x;
3351 :
3352 7339573689 : if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
3353 : return x;
3354 :
3355 : /* Don't bother to cache DECLs or constants. */
3356 7339293691 : if (DECL_P (x) || CONSTANT_CLASS_P (x))
3357 : return x;
3358 :
3359 4521015512 : auto& fold_cache = get_fold_cache (flags);
3360 4521015512 : if (fold_cache == NULL)
3361 495556 : fold_cache = hash_map<tree, tree>::create_ggc (101);
3362 :
3363 4521015512 : if (tree *cached = fold_cache->get (x))
3364 : {
3365 : /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
3366 : argument has been folded into a tree invariant, make sure it is
3367 : unshared. See PR112727. */
3368 1041507959 : if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
3369 85 : return unshare_expr (*cached);
3370 1041507874 : return *cached;
3371 : }
3372 :
3373 3479507553 : uid_sensitive_constexpr_evaluation_checker c;
3374 :
3375 3479507553 : code = TREE_CODE (x);
3376 3479507553 : switch (code)
3377 : {
3378 197778731 : case CLEANUP_POINT_EXPR:
3379 : /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
3380 : effects. */
3381 197778731 : r = cp_fold (TREE_OPERAND (x, 0), flags);
3382 197778731 : if (!TREE_SIDE_EFFECTS (r) && !(flags & ff_only_non_odr))
3383 2267268 : x = r;
3384 : break;
3385 :
3386 1561112 : case SIZEOF_EXPR:
3387 1561112 : x = fold_sizeof_expr (x);
3388 1561112 : break;
3389 :
3390 309046311 : case VIEW_CONVERT_EXPR:
3391 309046311 : rval_ops = false;
3392 : /* FALLTHRU */
3393 957149169 : case NON_LVALUE_EXPR:
3394 957149169 : CASE_CONVERT:
3395 :
3396 957149169 : if (VOID_TYPE_P (TREE_TYPE (x)))
3397 : {
3398 : /* This is just to make sure we don't end up with casts to
3399 : void from error_mark_node. If we just return x, then
3400 : cp_fold_r might fold the operand into error_mark_node and
3401 : leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
3402 : during gimplification doesn't like such casts.
3403 : Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
3404 : folding of the operand should be in the caches and if in cp_fold_r
3405 : it will modify it in place. */
3406 87041936 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3407 87041936 : if (op0 == error_mark_node)
3408 104 : x = error_mark_node;
3409 : break;
3410 : }
3411 :
3412 870107233 : loc = EXPR_LOCATION (x);
3413 870107233 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3414 :
3415 870107233 : if (op0 == error_mark_node)
3416 0 : x = error_mark_node;
3417 870107233 : else if (flags & ff_only_non_odr)
3418 : {
3419 322875844 : if (op0 != TREE_OPERAND (x, 0))
3420 6251284 : x = build1_loc (loc, code, TREE_TYPE (x), op0);
3421 322875844 : if (code == NOP_EXPR)
3422 168218922 : REINTERPRET_CAST_P (x) = REINTERPRET_CAST_P (org_x);
3423 : }
3424 547231389 : else if (code == CONVERT_EXPR
3425 43412147 : && SCALAR_TYPE_P (TREE_TYPE (x))
3426 590642578 : && op0 != void_node)
3427 : /* During parsing we used convert_to_*_nofold; re-convert now using the
3428 : folding variants, since fold() doesn't do those transformations. */
3429 39202360 : x = fold (convert (TREE_TYPE (x), op0));
3430 508029029 : else if (op0 != TREE_OPERAND (x, 0))
3431 135667099 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3432 : else
3433 372361930 : x = fold (x);
3434 :
3435 : /* Conversion of an out-of-range value has implementation-defined
3436 : behavior; the language considers it different from arithmetic
3437 : overflow, which is undefined. */
3438 870107233 : if (TREE_CODE (op0) == INTEGER_CST
3439 870107233 : && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
3440 44 : TREE_OVERFLOW (x) = false;
3441 :
3442 : break;
3443 :
3444 245 : case EXCESS_PRECISION_EXPR:
3445 245 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3446 245 : if (op0 == error_mark_node)
3447 0 : x = error_mark_node;
3448 245 : else if (flags & ff_only_non_odr)
3449 : {
3450 65 : if (op0 != TREE_OPERAND (x, 0))
3451 0 : x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
3452 : }
3453 : else
3454 180 : x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
3455 : break;
3456 :
3457 121356442 : case INDIRECT_REF:
3458 : /* We don't need the decltype(auto) obfuscation anymore. */
3459 121356442 : if (REF_PARENTHESIZED_P (x))
3460 : {
3461 718 : tree p = maybe_undo_parenthesized_ref (x);
3462 718 : if (p != x)
3463 0 : return cp_fold (p, flags);
3464 : }
3465 : /* When folding non-ODR usages of constants, we also want to
3466 : remove any constant-initialized references, even when
3467 : used as lvalues. */
3468 121356442 : if ((flags & ff_only_non_odr) && REFERENCE_REF_P (x))
3469 : {
3470 16236563 : op0 = cp_fold_non_odr_use_1 (TREE_OPERAND (x, 0));
3471 16236563 : if (op0 != TREE_OPERAND (x, 0))
3472 1511 : return convert_from_reference (cp_fold (op0, flags));
3473 : }
3474 121354931 : goto unary;
3475 :
3476 295279284 : case ADDR_EXPR:
3477 295279284 : loc = EXPR_LOCATION (x);
3478 295279284 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
3479 :
3480 : /* Cope with user tricks that amount to offsetof. */
3481 295279284 : if (op0 != error_mark_node
3482 295279284 : && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0))
3483 406272222 : && !(flags & ff_only_non_odr))
3484 : {
3485 61725775 : tree val = get_base_address (op0);
3486 61725775 : if (val
3487 61725775 : && INDIRECT_REF_P (val)
3488 24284007 : && COMPLETE_TYPE_P (TREE_TYPE (val))
3489 86009692 : && TREE_CONSTANT (TREE_OPERAND (val, 0)))
3490 : {
3491 264 : val = TREE_OPERAND (val, 0);
3492 264 : STRIP_NOPS (val);
3493 264 : val = maybe_constant_value (val);
3494 264 : if (TREE_CODE (val) == INTEGER_CST)
3495 127 : return fold_offsetof (op0, TREE_TYPE (x));
3496 : }
3497 : }
3498 295279157 : goto finish_unary;
3499 :
3500 : case REALPART_EXPR:
3501 : case IMAGPART_EXPR:
3502 161539226 : rval_ops = false;
3503 : /* FALLTHRU */
3504 161539226 : case CONJ_EXPR:
3505 161539226 : case FIX_TRUNC_EXPR:
3506 161539226 : case FLOAT_EXPR:
3507 161539226 : case NEGATE_EXPR:
3508 161539226 : case ABS_EXPR:
3509 161539226 : case ABSU_EXPR:
3510 161539226 : case BIT_NOT_EXPR:
3511 161539226 : case TRUTH_NOT_EXPR:
3512 161539226 : case FIXED_CONVERT_EXPR:
3513 161539226 : unary:
3514 :
3515 161539226 : loc = EXPR_LOCATION (x);
3516 161539226 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3517 :
3518 456818383 : finish_unary:
3519 456818383 : if (op0 == error_mark_node)
3520 0 : x = error_mark_node;
3521 456818383 : else if (op0 != TREE_OPERAND (x, 0))
3522 : {
3523 31166243 : if (flags & ff_only_non_odr)
3524 1321875 : x = build1_loc (loc, code, TREE_TYPE (x), op0);
3525 : else
3526 29844368 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3527 31166243 : if (code == INDIRECT_REF
3528 10013057 : && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
3529 : {
3530 10012930 : TREE_READONLY (x) = TREE_READONLY (org_x);
3531 10012930 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3532 10012930 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3533 : }
3534 : }
3535 425652140 : else if (!(flags & ff_only_non_odr))
3536 213132047 : x = fold (x);
3537 :
3538 456818383 : gcc_assert (TREE_CODE (x) != COND_EXPR
3539 : || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
3540 : break;
3541 :
3542 310738 : case UNARY_PLUS_EXPR:
3543 310738 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3544 310738 : if (op0 == error_mark_node)
3545 0 : x = error_mark_node;
3546 310738 : else if (flags & ff_only_non_odr)
3547 : {
3548 122883 : if (op0 != TREE_OPERAND (x, 0))
3549 12 : x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
3550 : }
3551 : else
3552 187855 : x = fold_convert (TREE_TYPE (x), op0);
3553 : break;
3554 :
3555 189053621 : case POSTDECREMENT_EXPR:
3556 189053621 : case POSTINCREMENT_EXPR:
3557 189053621 : case INIT_EXPR:
3558 189053621 : case PREDECREMENT_EXPR:
3559 189053621 : case PREINCREMENT_EXPR:
3560 189053621 : case COMPOUND_EXPR:
3561 189053621 : case MODIFY_EXPR:
3562 189053621 : rval_ops = false;
3563 : /* FALLTHRU */
3564 392835569 : case POINTER_PLUS_EXPR:
3565 392835569 : case PLUS_EXPR:
3566 392835569 : case POINTER_DIFF_EXPR:
3567 392835569 : case MINUS_EXPR:
3568 392835569 : case MULT_EXPR:
3569 392835569 : case TRUNC_DIV_EXPR:
3570 392835569 : case CEIL_DIV_EXPR:
3571 392835569 : case FLOOR_DIV_EXPR:
3572 392835569 : case ROUND_DIV_EXPR:
3573 392835569 : case TRUNC_MOD_EXPR:
3574 392835569 : case CEIL_MOD_EXPR:
3575 392835569 : case ROUND_MOD_EXPR:
3576 392835569 : case RDIV_EXPR:
3577 392835569 : case EXACT_DIV_EXPR:
3578 392835569 : case MIN_EXPR:
3579 392835569 : case MAX_EXPR:
3580 392835569 : case LSHIFT_EXPR:
3581 392835569 : case RSHIFT_EXPR:
3582 392835569 : case LROTATE_EXPR:
3583 392835569 : case RROTATE_EXPR:
3584 392835569 : case BIT_AND_EXPR:
3585 392835569 : case BIT_IOR_EXPR:
3586 392835569 : case BIT_XOR_EXPR:
3587 392835569 : case TRUTH_AND_EXPR:
3588 392835569 : case TRUTH_ANDIF_EXPR:
3589 392835569 : case TRUTH_OR_EXPR:
3590 392835569 : case TRUTH_ORIF_EXPR:
3591 392835569 : case TRUTH_XOR_EXPR:
3592 392835569 : case LT_EXPR: case LE_EXPR:
3593 392835569 : case GT_EXPR: case GE_EXPR:
3594 392835569 : case EQ_EXPR: case NE_EXPR:
3595 392835569 : case UNORDERED_EXPR: case ORDERED_EXPR:
3596 392835569 : case UNLT_EXPR: case UNLE_EXPR:
3597 392835569 : case UNGT_EXPR: case UNGE_EXPR:
3598 392835569 : case UNEQ_EXPR: case LTGT_EXPR:
3599 392835569 : case RANGE_EXPR: case COMPLEX_EXPR:
3600 :
3601 392835569 : loc = EXPR_LOCATION (x);
3602 392835569 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3603 392835569 : bool clear_decl_read;
3604 392835569 : clear_decl_read = false;
3605 392835569 : if (code == MODIFY_EXPR
3606 48699838 : && (VAR_P (op0) || TREE_CODE (op0) == PARM_DECL)
3607 406267708 : && !DECL_READ_P (op0))
3608 : clear_decl_read = true;
3609 392835569 : op1 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 1),
3610 : code != COMPOUND_EXPR, flags);
3611 392835569 : if (clear_decl_read)
3612 729559 : DECL_READ_P (op0) = 0;
3613 :
3614 392835569 : if (flags & ff_only_non_odr)
3615 : {
3616 170972485 : if (op0 == error_mark_node || op1 == error_mark_node)
3617 24 : x = error_mark_node;
3618 170972461 : else if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
3619 : {
3620 9652722 : if (code == INIT_EXPR && op1 != TREE_OPERAND (x, 1))
3621 3495580 : set_target_expr_eliding (op1);
3622 9652722 : x = build2_loc (loc, code, TREE_TYPE (x), op0, op1);
3623 : }
3624 : break;
3625 : }
3626 :
3627 : /* decltype(nullptr) has only one value, so optimize away all comparisons
3628 : with that type right away, keeping them in the IL causes troubles for
3629 : various optimizations. */
3630 221863084 : if (COMPARISON_CLASS_P (org_x)
3631 36110444 : && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
3632 221863111 : && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
3633 : {
3634 27 : switch (code)
3635 : {
3636 12 : case EQ_EXPR:
3637 12 : x = constant_boolean_node (true, TREE_TYPE (x));
3638 12 : break;
3639 15 : case NE_EXPR:
3640 15 : x = constant_boolean_node (false, TREE_TYPE (x));
3641 15 : break;
3642 0 : default:
3643 0 : gcc_unreachable ();
3644 : }
3645 27 : return omit_two_operands_loc (loc, TREE_TYPE (x), x,
3646 27 : op0, op1);
3647 : }
3648 :
3649 221863057 : if (op0 == error_mark_node || op1 == error_mark_node)
3650 102 : x = error_mark_node;
3651 221862955 : else if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
3652 152951599 : x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
3653 : else
3654 68911356 : x = fold (x);
3655 :
3656 : /* This is only needed for -Wnonnull-compare and only if
3657 : TREE_NO_WARNING (org_x), but to avoid that option affecting code
3658 : generation, we do it always. */
3659 221863057 : if (COMPARISON_CLASS_P (org_x))
3660 : {
3661 36110417 : if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
3662 : ;
3663 35038674 : else if (COMPARISON_CLASS_P (x))
3664 : {
3665 34193428 : if (warn_nonnull_compare
3666 34193428 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3667 122805 : suppress_warning (x, OPT_Wnonnull_compare);
3668 : }
3669 : /* Otherwise give up on optimizing these, let GIMPLE folders
3670 : optimize those later on. */
3671 845246 : else if (op0 != TREE_OPERAND (org_x, 0)
3672 845246 : || op1 != TREE_OPERAND (org_x, 1))
3673 : {
3674 844022 : x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
3675 844022 : if (warn_nonnull_compare
3676 844022 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3677 16 : suppress_warning (x, OPT_Wnonnull_compare);
3678 : }
3679 : else
3680 1224 : x = org_x;
3681 : }
3682 :
3683 : break;
3684 :
3685 9333229 : case VEC_COND_EXPR:
3686 9333229 : case COND_EXPR:
3687 9333229 : loc = EXPR_LOCATION (x);
3688 9333229 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3689 9333229 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3690 9333229 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3691 :
3692 9333229 : if (flags & ff_only_non_odr)
3693 : {
3694 3928360 : if (op0 == error_mark_node
3695 3928354 : || op1 == error_mark_node
3696 3928354 : || op2 == error_mark_node)
3697 6 : x = error_mark_node;
3698 3928354 : else if (op0 != TREE_OPERAND (x, 0)
3699 3854931 : || op1 != TREE_OPERAND (x, 1)
3700 7568619 : || op2 != TREE_OPERAND (x, 2))
3701 301431 : x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3702 : break;
3703 : }
3704 :
3705 5404869 : if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
3706 : {
3707 19714 : warning_sentinel s (warn_int_in_bool_context);
3708 19714 : if (!VOID_TYPE_P (TREE_TYPE (op1)))
3709 19714 : op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
3710 19714 : if (!VOID_TYPE_P (TREE_TYPE (op2)))
3711 19693 : op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
3712 19714 : }
3713 5385155 : else if (VOID_TYPE_P (TREE_TYPE (x)))
3714 : {
3715 1736401 : if (TREE_CODE (op0) == INTEGER_CST)
3716 : {
3717 : /* If the condition is constant, fold can fold away
3718 : the COND_EXPR. If some statement-level uses of COND_EXPR
3719 : have one of the branches NULL, avoid folding crash. */
3720 279488 : if (!op1)
3721 0 : op1 = build_empty_stmt (loc);
3722 279488 : if (!op2)
3723 12 : op2 = build_empty_stmt (loc);
3724 : }
3725 : else
3726 : {
3727 : /* Otherwise, don't bother folding a void condition, since
3728 : it can't produce a constant value. */
3729 1456913 : if (op0 != TREE_OPERAND (x, 0)
3730 1374902 : || op1 != TREE_OPERAND (x, 1)
3731 2496292 : || op2 != TREE_OPERAND (x, 2))
3732 419791 : x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3733 : break;
3734 : }
3735 : }
3736 :
3737 3947956 : if (op0 == error_mark_node
3738 3947956 : || op1 == error_mark_node
3739 3947942 : || op2 == error_mark_node)
3740 62 : x = error_mark_node;
3741 3947894 : else if (op0 != TREE_OPERAND (x, 0)
3742 1556827 : || op1 != TREE_OPERAND (x, 1)
3743 5168496 : || op2 != TREE_OPERAND (x, 2))
3744 2829340 : x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3745 : else
3746 1118554 : x = fold (x);
3747 :
3748 : /* A COND_EXPR might have incompatible types in branches if one or both
3749 : arms are bitfields. If folding exposed such a branch, fix it up. */
3750 3947956 : if (TREE_CODE (x) != code
3751 880251 : && x != error_mark_node
3752 4828145 : && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
3753 17040 : x = fold_convert (TREE_TYPE (org_x), x);
3754 :
3755 : break;
3756 :
3757 215062839 : case CALL_EXPR:
3758 215062839 : {
3759 215062839 : tree callee = get_callee_fndecl (x);
3760 :
3761 : /* "Inline" calls to std::move/forward and other cast-like functions
3762 : by simply folding them into a corresponding cast to their return
3763 : type. This is cheaper than relying on the middle end to do so, and
3764 : also means we avoid generating useless debug info for them at all.
3765 :
3766 : At this point the argument has already been converted into a
3767 : reference, so it suffices to use a NOP_EXPR to express the
3768 : cast. */
3769 215062839 : if ((OPTION_SET_P (flag_fold_simple_inlines)
3770 215062839 : ? flag_fold_simple_inlines
3771 215062585 : : !flag_no_inline)
3772 205776848 : && call_expr_nargs (x) == 1
3773 107435449 : && decl_in_std_namespace_p (callee)
3774 69761405 : && DECL_NAME (callee) != NULL_TREE
3775 284824244 : && (id_equal (DECL_NAME (callee), "move")
3776 68545891 : || id_equal (DECL_NAME (callee), "forward")
3777 67022934 : || id_equal (DECL_NAME (callee), "forward_like")
3778 67022872 : || id_equal (DECL_NAME (callee), "addressof")
3779 : /* This addressof equivalent is used heavily in libstdc++. */
3780 66695043 : || id_equal (DECL_NAME (callee), "__addressof")
3781 66324401 : || id_equal (DECL_NAME (callee), "to_underlying")
3782 66324397 : || id_equal (DECL_NAME (callee), "as_const")))
3783 : {
3784 3438635 : r = CALL_EXPR_ARG (x, 0);
3785 : /* These type-checks must be performed here, because invalid
3786 : definitions of these functions could fail to ensure those and
3787 : build_nop could misbehave. See PR122185. */
3788 3438635 : if (id_equal (DECL_NAME (callee), "to_underlying")
3789 3438635 : ? TREE_CODE (TREE_TYPE (r)) == ENUMERAL_TYPE
3790 4 : && INTEGRAL_TYPE_P (TREE_TYPE (x))
3791 6178791 : : INDIRECT_TYPE_P (TREE_TYPE (x))
3792 6178791 : && INDIRECT_TYPE_P (TREE_TYPE (r)))
3793 : {
3794 3438629 : r = build_nop (TREE_TYPE (x), r);
3795 3438629 : x = cp_fold (r, flags);
3796 : }
3797 : break;
3798 : }
3799 :
3800 211624204 : int sv = optimize, nw = sv;
3801 :
3802 : /* Some built-in function calls will be evaluated at compile-time in
3803 : fold (). Set optimize to 1 when folding __builtin_constant_p inside
3804 : a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3805 209403126 : if (callee && fndecl_built_in_p (callee) && !optimize
3806 1784741 : && DECL_IS_BUILTIN_CONSTANT_P (callee)
3807 34074 : && current_function_decl
3808 211658262 : && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
3809 : nw = 1;
3810 :
3811 209403126 : if (callee && !(flags & ff_only_non_odr)
3812 332937498 : && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
3813 : {
3814 57248 : iloc_sentinel ils (EXPR_LOCATION (x));
3815 57248 : switch (DECL_FE_FUNCTION_CODE (callee))
3816 : {
3817 54460 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
3818 : /* Defer folding __builtin_is_constant_evaluated unless
3819 : we know this isn't a manifestly constant-evaluated
3820 : context. */
3821 54460 : if (flags & ff_mce_false)
3822 27609 : x = boolean_false_node;
3823 : break;
3824 3 : case CP_BUILT_IN_SOURCE_LOCATION:
3825 3 : x = fold_builtin_source_location (x);
3826 3 : break;
3827 456 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
3828 912 : x = fold_builtin_is_corresponding_member
3829 456 : (EXPR_LOCATION (x), call_expr_nargs (x),
3830 : &CALL_EXPR_ARG (x, 0));
3831 456 : break;
3832 400 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
3833 800 : x = fold_builtin_is_pointer_inverconvertible_with_class
3834 400 : (EXPR_LOCATION (x), call_expr_nargs (x),
3835 : &CALL_EXPR_ARG (x, 0));
3836 400 : break;
3837 : default:
3838 : break;
3839 : }
3840 57248 : break;
3841 57248 : }
3842 :
3843 211566956 : bool changed = false;
3844 211566956 : int m = call_expr_nargs (x);
3845 523190470 : for (int i = 0; i < m; i++)
3846 : {
3847 311623514 : r = cp_fold (CALL_EXPR_ARG (x, i), flags);
3848 311623514 : if (r != CALL_EXPR_ARG (x, i))
3849 : {
3850 104555708 : if (r == error_mark_node)
3851 : {
3852 0 : x = error_mark_node;
3853 0 : break;
3854 : }
3855 104555708 : if (!changed)
3856 66872801 : x = copy_node (x);
3857 104555708 : CALL_EXPR_ARG (x, i) = r;
3858 104555708 : changed = true;
3859 : }
3860 : }
3861 : /* Don't fold away the function entirely if we're just folding
3862 : non-ODR-used variables. */
3863 211566956 : if (x == error_mark_node || (flags & ff_only_non_odr))
3864 : break;
3865 :
3866 122547484 : optimize = nw;
3867 122547484 : r = fold (x);
3868 122547484 : optimize = sv;
3869 :
3870 122547484 : if (TREE_CODE (r) != CALL_EXPR)
3871 : {
3872 2362021 : x = cp_fold (r, flags);
3873 2362021 : break;
3874 : }
3875 :
3876 120185463 : optimize = nw;
3877 :
3878 : /* Invoke maybe_constant_value for functions declared
3879 : constexpr and not called with AGGR_INIT_EXPRs.
3880 : TODO:
3881 : Do constexpr expansion of expressions where the call itself is not
3882 : constant, but the call followed by an INDIRECT_REF is. */
3883 118894025 : if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3884 178219393 : && (!flag_no_inline
3885 2370022 : || lookup_attribute ("always_inline",
3886 2370022 : DECL_ATTRIBUTES (callee))))
3887 : {
3888 56183608 : mce_value manifestly_const_eval = mce_unknown;
3889 56183608 : if (flags & ff_mce_false)
3890 : /* Allow folding __builtin_is_constant_evaluated to false during
3891 : constexpr evaluation of this call. */
3892 43267352 : manifestly_const_eval = mce_false;
3893 56183608 : r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3894 : manifestly_const_eval);
3895 : }
3896 120185463 : optimize = sv;
3897 :
3898 120185463 : if (TREE_CODE (r) != CALL_EXPR)
3899 : {
3900 7833496 : if (DECL_CONSTRUCTOR_P (callee))
3901 356 : r = cp_build_init_expr_for_ctor (x, r);
3902 3916748 : x = r;
3903 3916748 : break;
3904 : }
3905 :
3906 : break;
3907 : }
3908 :
3909 24377699 : case CONSTRUCTOR:
3910 24377699 : {
3911 24377699 : unsigned i;
3912 24377699 : constructor_elt *p;
3913 24377699 : vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3914 24377699 : vec<constructor_elt, va_gc> *nelts = NULL;
3915 102251280 : FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3916 : {
3917 77873581 : tree op = cp_fold (p->value, flags);
3918 77873581 : if (op == error_mark_node)
3919 : {
3920 0 : x = error_mark_node;
3921 0 : vec_free (nelts);
3922 : break;
3923 : }
3924 77873581 : else if (op != p->value)
3925 : {
3926 1052296 : if (nelts == NULL)
3927 659529 : nelts = elts->copy ();
3928 1052296 : (*nelts)[i].value = op;
3929 : }
3930 : }
3931 24377699 : if (nelts)
3932 : {
3933 659529 : x = build_constructor (TREE_TYPE (x), nelts);
3934 659529 : CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3935 659529 : = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3936 659529 : CONSTRUCTOR_MUTABLE_POISON (x)
3937 1319058 : = CONSTRUCTOR_MUTABLE_POISON (org_x);
3938 : }
3939 24377699 : if (VECTOR_TYPE_P (TREE_TYPE (x)))
3940 71311 : x = fold (x);
3941 : break;
3942 : }
3943 655669 : case TREE_VEC:
3944 655669 : {
3945 655669 : bool changed = false;
3946 655669 : int n = TREE_VEC_LENGTH (x);
3947 :
3948 1619629 : for (int i = 0; i < n; i++)
3949 : {
3950 963960 : tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3951 963960 : if (op != TREE_VEC_ELT (x, i))
3952 : {
3953 864 : if (!changed)
3954 821 : x = copy_node (x);
3955 864 : TREE_VEC_ELT (x, i) = op;
3956 864 : changed = true;
3957 : }
3958 : }
3959 : }
3960 :
3961 : break;
3962 :
3963 3113562 : case ARRAY_REF:
3964 3113562 : case ARRAY_RANGE_REF:
3965 :
3966 3113562 : loc = EXPR_LOCATION (x);
3967 3113562 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3968 3113562 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3969 3113562 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3970 3113562 : op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3971 :
3972 3113562 : if (op0 == error_mark_node
3973 3113562 : || op1 == error_mark_node
3974 3113562 : || op2 == error_mark_node
3975 3113562 : || op3 == error_mark_node)
3976 0 : x = error_mark_node;
3977 3113562 : else if (op0 != TREE_OPERAND (x, 0)
3978 2140348 : || op1 != TREE_OPERAND (x, 1)
3979 1655976 : || op2 != TREE_OPERAND (x, 2)
3980 4769538 : || op3 != TREE_OPERAND (x, 3))
3981 : {
3982 1457586 : x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3983 1457586 : TREE_READONLY (x) = TREE_READONLY (org_x);
3984 1457586 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3985 1457586 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3986 : }
3987 :
3988 3113562 : if (!(flags & ff_only_non_odr))
3989 1815593 : x = fold (x);
3990 : break;
3991 :
3992 1512489 : case SAVE_EXPR:
3993 : /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3994 : folding, evaluates to an invariant. In that case no need to wrap
3995 : this folded tree with a SAVE_EXPR. */
3996 1512489 : r = cp_fold (TREE_OPERAND (x, 0), flags);
3997 1512489 : if (tree_invariant_p (r))
3998 57 : x = r;
3999 : break;
4000 :
4001 10 : case REQUIRES_EXPR:
4002 10 : x = evaluate_requires_expr (x);
4003 10 : break;
4004 :
4005 : default:
4006 : return org_x;
4007 : }
4008 :
4009 2260509417 : if (EXPR_P (x) && TREE_CODE (x) == code)
4010 : {
4011 1879360638 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
4012 1879360638 : copy_warning (x, org_x);
4013 : }
4014 :
4015 2260509417 : if (!c.evaluation_restricted_p ())
4016 : {
4017 2260457012 : fold_cache->put (org_x, x);
4018 : /* Prevent that we try to fold an already folded result again. */
4019 2260457012 : if (x != org_x)
4020 720384314 : fold_cache->put (x, x);
4021 : }
4022 :
4023 : return x;
4024 : }
4025 :
4026 : /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
4027 :
4028 : tree
4029 296480526 : lookup_hotness_attribute (tree list)
4030 : {
4031 296575207 : for (; list; list = TREE_CHAIN (list))
4032 : {
4033 1867292 : tree name = get_attribute_name (list);
4034 1867292 : if ((is_attribute_p ("hot", name)
4035 1867292 : || is_attribute_p ("cold", name)
4036 1867289 : || is_attribute_p ("likely", name)
4037 1311016 : || is_attribute_p ("unlikely", name))
4038 3639912 : && is_attribute_namespace_p ("", list))
4039 : break;
4040 : }
4041 296480526 : return list;
4042 : }
4043 :
4044 : /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
4045 :
4046 : static tree
4047 1772602 : remove_hotness_attribute (tree list)
4048 : {
4049 3545222 : for (tree *p = &list; *p; )
4050 : {
4051 1772620 : tree l = *p;
4052 1772620 : tree name = get_attribute_name (l);
4053 1772620 : if ((is_attribute_p ("hot", name)
4054 1772620 : || is_attribute_p ("cold", name)
4055 1772617 : || is_attribute_p ("likely", name)
4056 1216344 : || is_attribute_p ("unlikely", name))
4057 3545240 : && is_attribute_namespace_p ("", l))
4058 : {
4059 1772611 : *p = TREE_CHAIN (l);
4060 1772611 : continue;
4061 : }
4062 9 : p = &TREE_CHAIN (l);
4063 : }
4064 1772602 : return list;
4065 : }
4066 :
4067 : /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
4068 : PREDICT_EXPR. */
4069 :
4070 : tree
4071 294707942 : process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
4072 : {
4073 294707942 : if (std_attrs == error_mark_node)
4074 : return std_attrs;
4075 294707924 : if (tree attr = lookup_hotness_attribute (std_attrs))
4076 : {
4077 1772602 : tree name = get_attribute_name (attr);
4078 1772602 : bool hot = (is_attribute_p ("hot", name)
4079 1772602 : || is_attribute_p ("likely", name));
4080 1772602 : tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
4081 : hot ? TAKEN : NOT_TAKEN);
4082 1772602 : SET_EXPR_LOCATION (pred, attrs_loc);
4083 1772602 : add_stmt (pred);
4084 1772602 : if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
4085 : {
4086 9 : auto_urlify_attributes sentinel;
4087 9 : warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
4088 : get_attribute_name (other), name);
4089 9 : }
4090 1772602 : std_attrs = remove_hotness_attribute (std_attrs);
4091 : }
4092 : return std_attrs;
4093 : }
4094 :
4095 : /* Build IFN_ASSUME internal call for assume condition ARG. */
4096 :
4097 : tree
4098 11002 : build_assume_call (location_t loc, tree arg)
4099 : {
4100 11002 : if (!processing_template_decl)
4101 10931 : arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
4102 11002 : return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
4103 11002 : 1, arg);
4104 : }
4105 :
4106 : /* If [[assume (cond)]] appears on this statement, handle it. */
4107 :
4108 : tree
4109 234415897 : process_stmt_assume_attribute (tree std_attrs, tree statement,
4110 : location_t attrs_loc)
4111 : {
4112 234415897 : if (std_attrs == error_mark_node)
4113 : return std_attrs;
4114 234415879 : tree attr = lookup_attribute ("gnu", "assume", std_attrs);
4115 234415879 : if (!attr)
4116 : return std_attrs;
4117 : /* The next token after the assume attribute is not ';'. */
4118 10934 : if (statement)
4119 : {
4120 12 : warning_at (attrs_loc, OPT_Wattributes,
4121 : "%<assume%> attribute not followed by %<;%>");
4122 12 : attr = NULL_TREE;
4123 : }
4124 21892 : for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
4125 : {
4126 10958 : tree args = TREE_VALUE (attr);
4127 10958 : if (args && PACK_EXPANSION_P (args))
4128 : {
4129 6 : auto_diagnostic_group d;
4130 6 : error_at (attrs_loc, "pack expansion of %qE attribute",
4131 : get_attribute_name (attr));
4132 6 : if (cxx_dialect >= cxx17)
4133 4 : inform (attrs_loc, "use fold expression in the attribute "
4134 : "argument instead");
4135 6 : continue;
4136 6 : }
4137 10952 : int nargs = list_length (args);
4138 10952 : if (nargs != 1)
4139 : {
4140 42 : auto_diagnostic_group d;
4141 42 : error_at (attrs_loc, "wrong number of arguments specified for "
4142 : "%qE attribute", get_attribute_name (attr));
4143 42 : inform (attrs_loc, "expected %i, found %i", 1, nargs);
4144 42 : }
4145 : else
4146 : {
4147 10910 : tree arg = TREE_VALUE (args);
4148 10910 : if (!type_dependent_expression_p (arg))
4149 10839 : arg = contextual_conv_bool (arg, tf_warning_or_error);
4150 10910 : if (error_operand_p (arg))
4151 18 : continue;
4152 10892 : finish_expr_stmt (build_assume_call (attrs_loc, arg));
4153 : }
4154 : }
4155 10934 : return remove_attribute ("gnu", "assume", std_attrs);
4156 : }
4157 :
4158 : /* Return the type std::source_location::__impl after performing
4159 : verification on it. */
4160 :
4161 : tree
4162 9517 : get_source_location_impl_type ()
4163 : {
4164 9517 : tree name = get_identifier ("source_location");
4165 9517 : tree decl = lookup_qualified_name (std_node, name);
4166 9517 : if (TREE_CODE (decl) != TYPE_DECL)
4167 : {
4168 6 : auto_diagnostic_group d;
4169 6 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
4170 3 : qualified_name_lookup_error (std_node, name, decl, input_location);
4171 : else
4172 3 : error ("%qD is not a type", decl);
4173 6 : return error_mark_node;
4174 6 : }
4175 9511 : name = get_identifier ("__impl");
4176 9511 : tree type = TREE_TYPE (decl);
4177 9511 : decl = lookup_qualified_name (type, name);
4178 9511 : if (TREE_CODE (decl) != TYPE_DECL)
4179 : {
4180 9 : auto_diagnostic_group d;
4181 9 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
4182 6 : qualified_name_lookup_error (type, name, decl, input_location);
4183 : else
4184 3 : error ("%qD is not a type", decl);
4185 9 : return error_mark_node;
4186 9 : }
4187 9502 : type = TREE_TYPE (decl);
4188 9502 : if (TREE_CODE (type) != RECORD_TYPE)
4189 : {
4190 3 : error ("%qD is not a class type", decl);
4191 3 : return error_mark_node;
4192 : }
4193 :
4194 9499 : int cnt = 0;
4195 9499 : for (tree field = TYPE_FIELDS (type);
4196 47456 : (field = next_aggregate_field (field)) != NULL_TREE;
4197 37957 : field = DECL_CHAIN (field))
4198 : {
4199 37966 : if (DECL_NAME (field) != NULL_TREE)
4200 : {
4201 37966 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
4202 37966 : if (strcmp (n, "_M_file_name") == 0
4203 28470 : || strcmp (n, "_M_function_name") == 0)
4204 : {
4205 18989 : if (TREE_TYPE (field) != const_string_type_node)
4206 : {
4207 3 : error ("%qD does not have %<const char *%> type", field);
4208 3 : return error_mark_node;
4209 : }
4210 18986 : cnt++;
4211 18986 : continue;
4212 : }
4213 18977 : else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
4214 : {
4215 18974 : if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
4216 : {
4217 3 : error ("%qD does not have integral type", field);
4218 3 : return error_mark_node;
4219 : }
4220 18971 : cnt++;
4221 18971 : continue;
4222 : }
4223 : }
4224 : cnt = 0;
4225 : break;
4226 : }
4227 9490 : if (cnt != 4)
4228 : {
4229 9 : error ("%<std::source_location::__impl%> does not contain only "
4230 : "non-static data members %<_M_file_name%>, "
4231 : "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
4232 9 : return error_mark_node;
4233 : }
4234 9484 : return build_qualified_type (type, TYPE_QUAL_CONST);
4235 : }
4236 :
4237 : /* Type for source_location_table hash_set. */
4238 : struct GTY((for_user)) source_location_table_entry {
4239 : location_t loc;
4240 : unsigned uid;
4241 : tree var;
4242 : };
4243 :
4244 : /* Traits class for function start hash maps below. */
4245 :
4246 : struct source_location_table_entry_hash
4247 : : ggc_remove <source_location_table_entry>
4248 : {
4249 : typedef source_location_table_entry value_type;
4250 : typedef source_location_table_entry compare_type;
4251 :
4252 : static hashval_t
4253 21948 : hash (const source_location_table_entry &ref)
4254 : {
4255 21948 : inchash::hash hstate (0);
4256 21948 : hstate.add_int (ref.loc);
4257 21948 : hstate.add_int (ref.uid);
4258 21948 : return hstate.end ();
4259 : }
4260 :
4261 : static bool
4262 18126 : equal (const source_location_table_entry &ref1,
4263 : const source_location_table_entry &ref2)
4264 : {
4265 18126 : return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
4266 : }
4267 :
4268 : static void
4269 : mark_deleted (source_location_table_entry &ref)
4270 : {
4271 : ref.loc = UNKNOWN_LOCATION;
4272 : ref.uid = -1U;
4273 : ref.var = NULL_TREE;
4274 : }
4275 :
4276 : static const bool empty_zero_p = true;
4277 :
4278 : static void
4279 0 : mark_empty (source_location_table_entry &ref)
4280 : {
4281 0 : ref.loc = UNKNOWN_LOCATION;
4282 0 : ref.uid = 0;
4283 0 : ref.var = NULL_TREE;
4284 : }
4285 :
4286 : static bool
4287 28122 : is_deleted (const source_location_table_entry &ref)
4288 : {
4289 28122 : return (ref.loc == UNKNOWN_LOCATION
4290 0 : && ref.uid == -1U
4291 28122 : && ref.var == NULL_TREE);
4292 : }
4293 :
4294 : static bool
4295 121668 : is_empty (const source_location_table_entry &ref)
4296 : {
4297 121668 : return (ref.loc == UNKNOWN_LOCATION
4298 61906 : && ref.uid == 0
4299 183574 : && ref.var == NULL_TREE);
4300 : }
4301 :
4302 : static void
4303 3 : pch_nx (source_location_table_entry &p)
4304 : {
4305 3 : extern void gt_pch_nx (source_location_table_entry &);
4306 3 : gt_pch_nx (p);
4307 3 : }
4308 :
4309 : static void
4310 3 : pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
4311 : {
4312 3 : extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
4313 : void *);
4314 3 : gt_pch_nx (&p, op, cookie);
4315 3 : }
4316 : };
4317 :
4318 : static GTY(()) hash_table <source_location_table_entry_hash>
4319 : *source_location_table;
4320 :
4321 : /* Build a std::source_location::__impl from a location_t. */
4322 :
4323 : tree
4324 5237 : build_source_location_impl (location_t loc, tree fndecl,
4325 : tree source_location_impl)
4326 : {
4327 5237 : if (source_location_table == NULL)
4328 226 : source_location_table
4329 226 : = hash_table <source_location_table_entry_hash>::create_ggc (64);
4330 5237 : const line_map_ordinary *map;
4331 5237 : source_location_table_entry entry;
4332 5237 : entry.loc
4333 5237 : = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
4334 : &map);
4335 5237 : entry.uid = fndecl ? DECL_UID (fndecl) : -1;
4336 5237 : entry.var = error_mark_node;
4337 5237 : source_location_table_entry *entryp
4338 5237 : = source_location_table->find_slot (entry, INSERT);
4339 :
4340 5237 : if (entryp->var)
4341 : return entryp->var;
4342 :
4343 3652 : tree var = build_decl (loc, VAR_DECL, generate_internal_label ("Lsrc_loc"),
4344 : source_location_impl);
4345 3652 : TREE_STATIC (var) = 1;
4346 3652 : TREE_PUBLIC (var) = 0;
4347 3652 : DECL_ARTIFICIAL (var) = 1;
4348 3652 : DECL_IGNORED_P (var) = 1;
4349 3652 : DECL_EXTERNAL (var) = 0;
4350 3652 : DECL_DECLARED_CONSTEXPR_P (var) = 1;
4351 3652 : DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
4352 3652 : layout_decl (var, 0);
4353 :
4354 3652 : vec<constructor_elt, va_gc> *v = NULL;
4355 3652 : vec_alloc (v, 4);
4356 3652 : for (tree field = TYPE_FIELDS (source_location_impl);
4357 18260 : (field = next_aggregate_field (field)) != NULL_TREE;
4358 14608 : field = DECL_CHAIN (field))
4359 : {
4360 14608 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
4361 14608 : tree val = NULL_TREE;
4362 14608 : if (strcmp (n, "_M_file_name") == 0)
4363 : {
4364 3652 : if (const char *fname = LOCATION_FILE (loc))
4365 : {
4366 3652 : fname = remap_macro_filename (fname);
4367 3652 : val = build_string_literal (fname);
4368 : }
4369 : else
4370 0 : val = build_string_literal ("");
4371 : }
4372 10956 : else if (strcmp (n, "_M_function_name") == 0)
4373 : {
4374 3652 : const char *name = "";
4375 :
4376 3652 : if (fndecl)
4377 : {
4378 : /* If this is a coroutine, we should get the name of the user
4379 : function rather than the actor we generate. */
4380 3447 : if (tree ramp = DECL_RAMP_FN (fndecl))
4381 12 : name = cxx_printable_name (ramp, 2);
4382 : else
4383 3435 : name = cxx_printable_name (fndecl, 2);
4384 : }
4385 :
4386 3652 : val = build_string_literal (name);
4387 : }
4388 7304 : else if (strcmp (n, "_M_line") == 0)
4389 3652 : val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
4390 3652 : else if (strcmp (n, "_M_column") == 0)
4391 3652 : val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
4392 : else
4393 0 : gcc_unreachable ();
4394 14608 : CONSTRUCTOR_APPEND_ELT (v, field, val);
4395 : }
4396 :
4397 3652 : tree ctor = build_constructor (source_location_impl, v);
4398 3652 : TREE_CONSTANT (ctor) = 1;
4399 3652 : TREE_STATIC (ctor) = 1;
4400 3652 : DECL_INITIAL (var) = ctor;
4401 3652 : varpool_node::finalize_decl (var);
4402 3652 : *entryp = entry;
4403 3652 : entryp->var = var;
4404 3652 : return var;
4405 : }
4406 :
4407 : /* Fold the __builtin_source_location () call T. */
4408 :
4409 : tree
4410 4622 : fold_builtin_source_location (const_tree t)
4411 : {
4412 4622 : gcc_assert (TREE_CODE (t) == CALL_EXPR);
4413 : /* TREE_TYPE (t) is const std::source_location::__impl* */
4414 4622 : tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
4415 4622 : if (source_location_impl == error_mark_node)
4416 0 : return build_zero_cst (const_ptr_type_node);
4417 4622 : gcc_assert (CLASS_TYPE_P (source_location_impl)
4418 : && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
4419 :
4420 4622 : location_t loc = EXPR_LOCATION (t);
4421 4622 : tree var = build_source_location_impl (loc, current_function_decl,
4422 : source_location_impl);
4423 4622 : return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
4424 : }
4425 :
4426 : #include "gt-cp-cp-gimplify.h"
|