Line data Source code
1 : /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2 :
3 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
4 : Contributed by Jason Merrill <jason@redhat.com>
5 :
6 : This file is part of GCC.
7 :
8 : GCC is free software; you can redistribute it and/or modify it under
9 : the terms of the GNU General Public License as published by the Free
10 : Software Foundation; either version 3, or (at your option) any later
11 : version.
12 :
13 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 : for more details.
17 :
18 : You should have received a copy of the GNU General Public License
19 : along with GCC; see the file COPYING3. If not see
20 : <http://www.gnu.org/licenses/>. */
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "target.h"
26 : #include "basic-block.h"
27 : #include "cp-tree.h"
28 : #include "gimple.h"
29 : #include "predict.h"
30 : #include "stor-layout.h"
31 : #include "tree-iterator.h"
32 : #include "gimplify.h"
33 : #include "c-family/c-ubsan.h"
34 : #include "stringpool.h"
35 : #include "attribs.h"
36 : #include "asan.h"
37 : #include "gcc-rich-location.h"
38 : #include "memmodel.h"
39 : #include "tm_p.h"
40 : #include "output.h"
41 : #include "file-prefix-map.h"
42 : #include "cgraph.h"
43 : #include "omp-general.h"
44 : #include "opts.h"
45 : #include "gcc-urlifier.h"
46 : #include "contracts.h" // build_contract_check ()
47 :
48 : /* Keep track of forward references to immediate-escalating functions in
49 : case they become consteval. This vector contains ADDR_EXPRs and
50 : PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
51 : function call in them, to check that they can be evaluated to a constant,
52 : and immediate-escalating functions that may become consteval. */
53 : static GTY(()) hash_set<tree> *deferred_escalating_exprs;
54 :
55 : static void
56 28599828 : remember_escalating_expr (tree t)
57 : {
58 28599828 : if (uses_template_parms (t))
59 : /* Templates don't escalate, and cp_fold_immediate can get confused by
60 : other template trees in the function body (c++/115986). */
61 : return;
62 28599828 : if (!deferred_escalating_exprs)
63 18215 : deferred_escalating_exprs = hash_set<tree>::create_ggc (37);
64 28599828 : deferred_escalating_exprs->add (t);
65 : }
66 :
67 : /* Flags for cp_fold and cp_fold_r. */
68 :
69 : enum fold_flags {
70 : ff_none = 0,
71 : /* Whether we're being called from cp_fold_function. */
72 : ff_genericize = 1 << 0,
73 : /* Whether we're folding a point where we know we're
74 : definitely not in a manifestly constant-evaluated
75 : context. */
76 : ff_mce_false = 1 << 1,
77 : /* Whether we're only folding non-ODR usages of constants.
78 : This happens before saving the constexpr funcdef, so
79 : we should do as little other folding as possible.
80 : Mutually exclusive with ff_mce_false. */
81 : ff_only_non_odr = 1 << 2,
82 : };
83 :
84 : using fold_flags_t = int;
85 :
86 140625595 : struct cp_fold_data
87 : {
88 : hash_set<tree> pset;
89 : fold_flags_t flags;
90 214148432 : cp_fold_data (fold_flags_t flags): flags (flags)
91 : {
92 214148432 : gcc_checking_assert (!(flags & ff_mce_false)
93 : || !(flags & ff_only_non_odr));
94 214148432 : }
95 : };
96 :
97 : /* Forward declarations. */
98 :
99 : static tree cp_genericize_r (tree *, int *, void *);
100 : static tree cp_fold_r (tree *, int *, void *);
101 : static void cp_genericize_tree (tree*, bool);
102 : static tree cp_fold (tree, fold_flags_t);
103 : static tree cp_fold_immediate_r (tree *, int *, void *);
104 :
105 : /* Genericize a TRY_BLOCK. */
106 :
107 : static void
108 17106 : genericize_try_block (tree *stmt_p)
109 : {
110 17106 : tree body = TRY_STMTS (*stmt_p);
111 17106 : tree cleanup = TRY_HANDLERS (*stmt_p);
112 :
113 17106 : *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 17106 : }
115 :
116 : /* Genericize a HANDLER by converting to a CATCH_EXPR. */
117 :
118 : static void
119 20127 : genericize_catch_block (tree *stmt_p)
120 : {
121 20127 : tree type = HANDLER_TYPE (*stmt_p);
122 20127 : tree body = HANDLER_BODY (*stmt_p);
123 :
124 : /* FIXME should the caught type go in TREE_TYPE? */
125 20127 : *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 20127 : }
127 :
128 : /* A terser interface for building a representation of an exception
129 : specification. */
130 :
131 : static tree
132 4682 : build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 : {
134 4682 : tree t;
135 :
136 : /* FIXME should the allowed types go in TREE_TYPE? */
137 4682 : t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 4682 : append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 :
140 4682 : t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 4682 : append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 :
143 4682 : return t;
144 : }
145 :
146 : /* Genericize an EH_SPEC_BLOCK by converting it to a
147 : TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148 :
149 : static void
150 4682 : genericize_eh_spec_block (tree *stmt_p)
151 : {
152 4682 : tree body = EH_SPEC_STMTS (*stmt_p);
153 4682 : tree allowed = EH_SPEC_RAISES (*stmt_p);
154 4682 : tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 :
156 4682 : *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157 4682 : suppress_warning (*stmt_p);
158 4682 : suppress_warning (TREE_OPERAND (*stmt_p, 1));
159 4682 : }
160 :
161 : /* Return the first non-compound statement in STMT. */
162 :
163 : tree
164 21460454 : first_stmt (tree stmt)
165 : {
166 31911660 : switch (TREE_CODE (stmt))
167 : {
168 8755924 : case STATEMENT_LIST:
169 8755924 : if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
170 5066583 : return first_stmt (p->stmt);
171 3689341 : return void_node;
172 :
173 5384623 : case BIND_EXPR:
174 5384623 : return first_stmt (BIND_EXPR_BODY (stmt));
175 :
176 : default:
177 : return stmt;
178 : }
179 : }
180 :
181 : /* Genericize an IF_STMT by turning it into a COND_EXPR. */
182 :
183 : static void
184 31369354 : genericize_if_stmt (tree *stmt_p)
185 : {
186 31369354 : tree stmt, cond, then_, else_;
187 31369354 : location_t locus = EXPR_LOCATION (*stmt_p);
188 :
189 31369354 : stmt = *stmt_p;
190 31369354 : cond = IF_COND (stmt);
191 31369354 : then_ = THEN_CLAUSE (stmt);
192 31369354 : else_ = ELSE_CLAUSE (stmt);
193 :
194 31369354 : if (then_ && else_)
195 : {
196 10730227 : tree ft = first_stmt (then_);
197 10730227 : tree fe = first_stmt (else_);
198 10730227 : br_predictor pr;
199 10730227 : if (TREE_CODE (ft) == PREDICT_EXPR
200 156030 : && TREE_CODE (fe) == PREDICT_EXPR
201 48 : && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
202 10730266 : && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
203 : {
204 3 : gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
205 3 : richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
206 3 : warning_at (&richloc, OPT_Wattributes,
207 : "both branches of %<if%> statement marked as %qs",
208 : pr == PRED_HOT_LABEL ? "likely" : "unlikely");
209 3 : }
210 : }
211 :
212 31369354 : if (IF_STMT_VACUOUS_INIT_P (stmt))
213 : {
214 51 : gcc_checking_assert (integer_zerop (cond));
215 51 : gcc_checking_assert (!else_ || !TREE_SIDE_EFFECTS (else_));
216 51 : tree lab = create_artificial_label (UNKNOWN_LOCATION);
217 51 : VACUOUS_INIT_LABEL_P (lab) = 1;
218 51 : tree goto_expr = build_stmt (UNKNOWN_LOCATION, GOTO_EXPR, lab);
219 51 : tree label_expr = build_stmt (UNKNOWN_LOCATION, LABEL_EXPR, lab);
220 51 : if (TREE_CODE (then_) == STATEMENT_LIST)
221 : {
222 51 : tree_stmt_iterator i = tsi_start (then_);
223 51 : tsi_link_before (&i, goto_expr, TSI_CONTINUE_LINKING);
224 51 : i = tsi_last (then_);
225 51 : tsi_link_after (&i, label_expr, TSI_CONTINUE_LINKING);
226 51 : stmt = then_;
227 : }
228 : else
229 : {
230 0 : stmt = NULL_TREE;
231 0 : append_to_statement_list (goto_expr, &stmt);
232 0 : append_to_statement_list (then_, &stmt);
233 0 : append_to_statement_list (label_expr, &stmt);
234 : }
235 51 : *stmt_p = stmt;
236 51 : return;
237 : }
238 :
239 31369303 : if (!then_)
240 1992 : then_ = build_empty_stmt (locus);
241 31369303 : if (!else_)
242 20637181 : else_ = build_empty_stmt (locus);
243 :
244 : /* consteval if has been verified not to have the then_/else_ blocks
245 : entered by gotos/case labels from elsewhere, and as then_ block
246 : can contain unfolded immediate function calls, we have to discard
247 : the then_ block regardless of whether else_ has side-effects or not. */
248 31369303 : if (IF_STMT_CONSTEVAL_P (stmt))
249 : {
250 32754 : if (block_may_fallthru (then_))
251 7915 : stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
252 : void_node, else_);
253 : else
254 24839 : stmt = else_;
255 : }
256 31336549 : else if (IF_STMT_CONSTEXPR_P (stmt))
257 13208182 : stmt = integer_nonzerop (cond) ? then_ : else_;
258 : /* ??? This optimization doesn't seem to belong here, but removing it
259 : causes -Wreturn-type regressions (e.g. 107310). */
260 22867767 : else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
261 106206 : stmt = then_;
262 22761561 : else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
263 35240 : stmt = else_;
264 : else
265 22726321 : stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
266 31369303 : protected_set_expr_location_if_unset (stmt, locus);
267 31369303 : *stmt_p = stmt;
268 : }
269 :
270 : /* Hook into the middle of gimplifying an OMP_FOR node. */
271 :
272 : static enum gimplify_status
273 45263 : cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
274 : {
275 45263 : tree for_stmt = *expr_p;
276 45263 : gimple_seq seq = NULL;
277 :
278 : /* Protect ourselves from recursion. */
279 45263 : if (OMP_FOR_GIMPLIFYING_P (for_stmt))
280 : return GS_UNHANDLED;
281 21028 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
282 :
283 21028 : gimplify_and_add (for_stmt, &seq);
284 21028 : gimple_seq_add_seq (pre_p, seq);
285 :
286 21028 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
287 :
288 21028 : return GS_ALL_DONE;
289 : }
290 :
291 : /* Gimplify an EXPR_STMT node. */
292 :
293 : static void
294 3894853 : gimplify_expr_stmt (tree *stmt_p)
295 : {
296 3894853 : tree stmt = EXPR_STMT_EXPR (*stmt_p);
297 :
298 3894853 : if (stmt == error_mark_node)
299 : stmt = NULL;
300 :
301 : /* Gimplification of a statement expression will nullify the
302 : statement if all its side effects are moved to *PRE_P and *POST_P.
303 :
304 : In this case we will not want to emit the gimplified statement.
305 : However, we may still want to emit a warning, so we do that before
306 : gimplification. */
307 3890928 : if (stmt && warn_unused_value)
308 : {
309 297051 : if (!TREE_SIDE_EFFECTS (stmt))
310 : {
311 0 : if (!IS_EMPTY_STMT (stmt)
312 6834 : && !VOID_TYPE_P (TREE_TYPE (stmt))
313 6834 : && !warning_suppressed_p (stmt, OPT_Wunused_value))
314 0 : warning (OPT_Wunused_value, "statement with no effect");
315 : }
316 : else
317 290217 : warn_if_unused_value (stmt, input_location);
318 : }
319 :
320 3894853 : if (stmt == NULL_TREE)
321 3925 : stmt = alloc_stmt_list ();
322 :
323 3894853 : *stmt_p = stmt;
324 3894853 : }
325 :
326 : /* Gimplify initialization from an AGGR_INIT_EXPR. */
327 :
328 : static void
329 11427196 : cp_gimplify_init_expr (tree *expr_p)
330 : {
331 11427196 : tree from = TREE_OPERAND (*expr_p, 1);
332 11427196 : tree to = TREE_OPERAND (*expr_p, 0);
333 11427196 : tree t;
334 :
335 11427196 : if (TREE_CODE (from) == TARGET_EXPR)
336 189243 : if (tree init = TARGET_EXPR_INITIAL (from))
337 : {
338 : /* Make sure that we expected to elide this temporary. But also allow
339 : gimplify_modify_expr_rhs to elide temporaries of trivial type. */
340 189243 : gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
341 : || !TREE_ADDRESSABLE (TREE_TYPE (from)));
342 189243 : if (target_expr_needs_replace (from))
343 : {
344 : /* If this was changed by cp_genericize_target_expr, we need to
345 : walk into it to replace uses of the slot. */
346 74 : replace_decl (&init, TARGET_EXPR_SLOT (from), to);
347 74 : *expr_p = init;
348 74 : return;
349 : }
350 : else
351 : from = init;
352 : }
353 :
354 : /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
355 : inside the TARGET_EXPR. */
356 11511407 : for (t = from; t; )
357 : {
358 11511407 : tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
359 :
360 : /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
361 : replace the slot operand with our target.
362 :
363 : Should we add a target parm to gimplify_expr instead? No, as in this
364 : case we want to replace the INIT_EXPR. */
365 11511407 : if (TREE_CODE (sub) == AGGR_INIT_EXPR
366 11511407 : || TREE_CODE (sub) == VEC_INIT_EXPR)
367 : {
368 107207 : if (TREE_CODE (sub) == AGGR_INIT_EXPR)
369 107207 : AGGR_INIT_EXPR_SLOT (sub) = to;
370 : else
371 0 : VEC_INIT_EXPR_SLOT (sub) = to;
372 107207 : *expr_p = from;
373 :
374 : /* The initialization is now a side-effect, so the container can
375 : become void. */
376 107207 : if (from != sub)
377 73 : TREE_TYPE (from) = void_type_node;
378 : }
379 :
380 : /* Handle aggregate NSDMI. */
381 11511407 : replace_placeholders (sub, to);
382 :
383 11511407 : if (t == sub)
384 : break;
385 : else
386 84285 : t = TREE_OPERAND (t, 1);
387 : }
388 :
389 : }
390 :
391 : /* Gimplify a MUST_NOT_THROW_EXPR. */
392 :
393 : static enum gimplify_status
394 551004 : gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
395 : {
396 551004 : tree stmt = *expr_p;
397 551004 : tree temp = voidify_wrapper_expr (stmt, NULL);
398 551004 : tree body = TREE_OPERAND (stmt, 0);
399 551004 : gimple_seq try_ = NULL;
400 551004 : gimple_seq catch_ = NULL;
401 551004 : gimple *mnt;
402 :
403 551004 : gimplify_and_add (body, &try_);
404 551004 : mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
405 551004 : gimple_seq_add_stmt_without_update (&catch_, mnt);
406 551004 : mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
407 :
408 551004 : gimple_seq_add_stmt_without_update (pre_p, mnt);
409 551004 : if (temp)
410 : {
411 33 : *expr_p = temp;
412 33 : return GS_OK;
413 : }
414 :
415 550971 : *expr_p = NULL;
416 550971 : return GS_ALL_DONE;
417 : }
418 :
419 : /* Return TRUE if an operand (OP) of a given TYPE being copied is
420 : really just an empty class copy.
421 :
422 : Check that the operand has a simple form so that TARGET_EXPRs and
423 : non-empty CONSTRUCTORs get reduced properly, and we leave the
424 : return slot optimization alone because it isn't a copy. */
425 :
426 : bool
427 19777229 : simple_empty_class_p (tree type, tree op, tree_code code)
428 : {
429 23254254 : if (TREE_CODE (op) == COMPOUND_EXPR)
430 177637 : return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
431 4740454 : if (SIMPLE_TARGET_EXPR_P (op)
432 26380096 : && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
433 : /* The TARGET_EXPR is itself a simple copy, look through it. */
434 3299388 : return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
435 :
436 19777229 : if (TREE_CODE (op) == PARM_DECL
437 19777229 : && TREE_ADDRESSABLE (TREE_TYPE (op)))
438 : {
439 7 : tree fn = DECL_CONTEXT (op);
440 7 : if (DECL_THUNK_P (fn)
441 10 : || lambda_static_thunk_p (fn))
442 : /* In a thunk, we pass through invisible reference parms, so this isn't
443 : actually a copy. */
444 7 : return false;
445 : }
446 :
447 19777222 : return
448 19777222 : (TREE_CODE (op) == EMPTY_CLASS_EXPR
449 19777192 : || code == MODIFY_EXPR
450 16878044 : || is_gimple_lvalue (op)
451 13429009 : || INDIRECT_REF_P (op)
452 13058424 : || (TREE_CODE (op) == CONSTRUCTOR
453 2980403 : && CONSTRUCTOR_NELTS (op) == 0)
454 10361192 : || (TREE_CODE (op) == CALL_EXPR
455 2563650 : && !CALL_EXPR_RETURN_SLOT_OPT (op)))
456 11882595 : && !TREE_CLOBBER_P (op)
457 31104084 : && is_really_empty_class (type, /*ignore_vptr*/true);
458 : }
459 :
460 : /* Returns true if evaluating E as an lvalue has side-effects;
461 : specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
462 : have side-effects until there is a read or write through it. */
463 :
464 : static bool
465 2562519 : lvalue_has_side_effects (tree e)
466 : {
467 2562519 : if (!TREE_SIDE_EFFECTS (e))
468 : return false;
469 57385 : while (handled_component_p (e))
470 : {
471 4881 : if (TREE_CODE (e) == ARRAY_REF
472 4881 : && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
473 : return true;
474 3228 : e = TREE_OPERAND (e, 0);
475 : }
476 52504 : if (DECL_P (e))
477 : /* Just naming a variable has no side-effects. */
478 : return false;
479 35703 : else if (INDIRECT_REF_P (e))
480 : /* Similarly, indirection has no side-effects. */
481 35569 : return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
482 : else
483 : /* For anything else, trust TREE_SIDE_EFFECTS. */
484 134 : return TREE_SIDE_EFFECTS (e);
485 : }
486 :
487 : /* Return true if FN is an immediate-escalating function. */
488 :
489 : bool
490 247961245 : immediate_escalating_function_p (tree fn)
491 : {
492 247961245 : if (!fn || !flag_immediate_escalation)
493 : return false;
494 :
495 247960920 : gcc_checking_assert (TREE_CODE (fn) == FUNCTION_DECL);
496 :
497 247960920 : if (DECL_IMMEDIATE_FUNCTION_P (fn))
498 : return false;
499 :
500 : /* An immediate-escalating function is
501 : -- the call operator of a lambda that is not declared with the consteval
502 : specifier */
503 255119688 : if (LAMBDA_FUNCTION_P (fn))
504 : return true;
505 : /* -- a defaulted function that is not declared with the
506 : consteval specifier */
507 242947345 : if (DECL_DEFAULTED_FN (fn))
508 : return true;
509 : /* -- a function that results from the instantiation of a templated entity
510 : defined with the constexpr specifier. */
511 234712792 : return is_instantiation_of_constexpr (fn);
512 : }
513 :
514 : /* Return true if FN is an immediate-escalating function that has not been
515 : checked for escalating expressions.. */
516 :
517 : static bool
518 247946975 : unchecked_immediate_escalating_function_p (tree fn)
519 : {
520 247946975 : return (immediate_escalating_function_p (fn)
521 247946975 : && !DECL_ESCALATION_CHECKED_P (fn));
522 : }
523 :
524 : /* Promote FN to an immediate function, including its clones. */
525 :
526 : void
527 13075 : promote_function_to_consteval (tree fn)
528 : {
529 13075 : SET_DECL_IMMEDIATE_FUNCTION_P (fn);
530 13075 : DECL_ESCALATION_CHECKED_P (fn) = true;
531 13075 : tree clone;
532 20947 : FOR_EACH_CLONE (clone, fn)
533 : {
534 7872 : SET_DECL_IMMEDIATE_FUNCTION_P (clone);
535 7872 : DECL_ESCALATION_CHECKED_P (clone) = true;
536 : }
537 13075 : }
538 :
539 : /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
540 : we found a non-constant immediate function, or taking the address
541 : of an immediate function. */
542 :
543 : tree
544 16877422 : cp_fold_immediate (tree *tp, mce_value manifestly_const_eval,
545 : tree decl /*= current_function_decl*/)
546 : {
547 16877422 : if (cxx_dialect <= cxx17)
548 : return NULL_TREE;
549 :
550 16869963 : temp_override<tree> cfd (current_function_decl, decl);
551 :
552 16869963 : fold_flags_t flags = ff_none;
553 16869963 : if (manifestly_const_eval == mce_false)
554 9524813 : flags |= ff_mce_false;
555 :
556 16869963 : cp_fold_data data (flags);
557 16869963 : int save_errorcount = errorcount;
558 16869963 : tree r = cp_walk_tree (tp, cp_fold_immediate_r, &data, NULL);
559 16869963 : if (errorcount > save_errorcount)
560 49 : return integer_one_node;
561 : return r;
562 16869963 : }
563 :
564 : /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
565 : was initially not an immediate function, but was promoted to one because
566 : its body contained an immediate-escalating expression or conversion. */
567 :
568 : static void
569 457 : maybe_explain_promoted_consteval (location_t loc, tree fn)
570 : {
571 457 : if (DECL_ESCALATION_CHECKED_P (fn))
572 : {
573 : /* See if we can figure out what made the function consteval. */
574 126 : tree x = cp_fold_immediate (&DECL_SAVED_TREE (fn), mce_unknown, NULL_TREE);
575 126 : if (x)
576 99 : inform (cp_expr_loc_or_loc (x, loc),
577 : "%qD was promoted to an immediate function because its "
578 : "body contains an immediate-escalating expression %qE", fn, x);
579 : else
580 27 : inform (loc, "%qD was promoted to an immediate function", fn);
581 : }
582 457 : }
583 :
584 : /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
585 : by expressions with side-effects in other operands. */
586 :
587 : static enum gimplify_status
588 36481 : gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
589 : bool (*gimple_test_f) (tree))
590 : {
591 36481 : enum gimplify_status t
592 36481 : = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
593 36481 : if (t == GS_ERROR)
594 : return GS_ERROR;
595 36478 : else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
596 2876 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
597 : return t;
598 : }
599 :
600 : /* Like gimplify_arg, but if ORDERED is set (which should be set if
601 : any of the arguments this argument is sequenced before has
602 : TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
603 : are gimplified into SSA_NAME or a fresh temporary and for
604 : non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
605 :
606 : static enum gimplify_status
607 4061806 : cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
608 : bool ordered)
609 : {
610 4061806 : enum gimplify_status t;
611 4061806 : if (ordered
612 403761 : && !is_gimple_reg_type (TREE_TYPE (*arg_p))
613 4063638 : && TREE_CODE (*arg_p) == TARGET_EXPR)
614 : {
615 : /* gimplify_arg would strip away the TARGET_EXPR, but
616 : that can mean we don't copy the argument and some following
617 : argument with side-effect could modify it. */
618 1703 : protected_set_expr_location (*arg_p, call_location);
619 1703 : return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
620 : }
621 : else
622 : {
623 4060103 : t = gimplify_arg (arg_p, pre_p, call_location);
624 4060103 : if (t == GS_ERROR)
625 : return GS_ERROR;
626 4060103 : else if (ordered
627 402058 : && is_gimple_reg_type (TREE_TYPE (*arg_p))
628 401929 : && is_gimple_variable (*arg_p)
629 211863 : && TREE_CODE (*arg_p) != SSA_NAME
630 : /* No need to force references into register, references
631 : can't be modified. */
632 129609 : && !TYPE_REF_P (TREE_TYPE (*arg_p))
633 : /* And this can't be modified either. */
634 4152124 : && *arg_p != current_class_ptr)
635 10503 : *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
636 4060103 : return t;
637 : }
638 :
639 : }
640 :
641 : /* Emit a decl = {CLOBBER(bob)}; stmt before DECL_EXPR or first
642 : TARGET_EXPR gimplification for -flifetime-dse=2. */
643 :
644 : static void
645 1374173 : maybe_emit_clobber_object_begin (tree decl, gimple_seq *pre_p)
646 : {
647 1374173 : if (VAR_P (decl)
648 1373585 : && auto_var_p (decl)
649 1340655 : && TREE_TYPE (decl) != error_mark_node
650 1340646 : && DECL_NONTRIVIALLY_INITIALIZED_P (decl)
651 : /* Don't do it if it is fully initialized. */
652 824738 : && DECL_INITIAL (decl) == NULL_TREE
653 418127 : && !DECL_HAS_VALUE_EXPR_P (decl)
654 417842 : && !OPAQUE_TYPE_P (TREE_TYPE (decl))
655 : /* Nor going to have decl = .DEFERRED_INIT (...); added. */
656 1792015 : && (flag_auto_var_init == AUTO_INIT_UNINITIALIZED
657 61576 : || lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl))
658 61576 : || lookup_attribute ("indeterminate", DECL_ATTRIBUTES (decl))))
659 : {
660 356267 : tree eltype = strip_array_types (TREE_TYPE (decl));
661 356267 : if (RECORD_OR_UNION_TYPE_P (eltype)
662 356267 : && !is_empty_class (eltype))
663 : {
664 137745 : tree clobber
665 137745 : = build_clobber (TREE_TYPE (decl), CLOBBER_OBJECT_BEGIN);
666 137745 : gimple *g = gimple_build_assign (decl, clobber);
667 137745 : gimple_set_location (g, DECL_SOURCE_LOCATION (decl));
668 137745 : gimple_seq_add_stmt_without_update (pre_p, g);
669 : }
670 : }
671 1374173 : }
672 :
673 : /* Do C++-specific gimplification. Args are as for gimplify_expr. */
674 :
675 : int
676 168487383 : cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
677 : {
678 168487383 : int saved_stmts_are_full_exprs_p = 0;
679 168487383 : location_t loc = cp_expr_loc_or_input_loc (*expr_p);
680 168487383 : enum tree_code code = TREE_CODE (*expr_p);
681 168487383 : enum gimplify_status ret;
682 :
683 168487383 : if (STATEMENT_CODE_P (code))
684 : {
685 3936768 : saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
686 7873536 : current_stmt_tree ()->stmts_are_full_exprs_p
687 3936768 : = STMT_IS_FULL_EXPR_P (*expr_p);
688 : }
689 :
690 168487383 : switch (code)
691 : {
692 336152 : case AGGR_INIT_EXPR:
693 336152 : simplify_aggr_init_expr (expr_p);
694 336152 : ret = GS_OK;
695 336152 : break;
696 :
697 0 : case VEC_INIT_EXPR:
698 0 : {
699 0 : *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
700 : tf_warning_or_error);
701 :
702 0 : cp_fold_data data (ff_genericize | ff_mce_false);
703 0 : cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
704 0 : cp_genericize_tree (expr_p, false);
705 0 : copy_if_shared (expr_p);
706 0 : ret = GS_OK;
707 0 : }
708 0 : break;
709 :
710 18613 : case THROW_EXPR:
711 : /* FIXME communicate throw type to back end, probably by moving
712 : THROW_EXPR into ../tree.def. */
713 18613 : *expr_p = TREE_OPERAND (*expr_p, 0);
714 18613 : ret = GS_OK;
715 18613 : break;
716 :
717 551004 : case MUST_NOT_THROW_EXPR:
718 551004 : ret = gimplify_must_not_throw_expr (expr_p, pre_p);
719 551004 : break;
720 :
721 : /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
722 : LHS of an assignment might also be involved in the RHS, as in bug
723 : 25979. */
724 11427196 : case INIT_EXPR:
725 11427196 : cp_gimplify_init_expr (expr_p);
726 11427196 : if (TREE_CODE (*expr_p) != INIT_EXPR)
727 : return GS_OK;
728 : /* Fall through. */
729 15323177 : case MODIFY_EXPR:
730 11319915 : modify_expr_case:
731 15323177 : {
732 : /* If the back end isn't clever enough to know that the lhs and rhs
733 : types are the same, add an explicit conversion. */
734 15323177 : tree op0 = TREE_OPERAND (*expr_p, 0);
735 15323177 : tree op1 = TREE_OPERAND (*expr_p, 1);
736 :
737 15323177 : if (!error_operand_p (op0)
738 15323177 : && !error_operand_p (op1)
739 15323158 : && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
740 15320113 : || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
741 15326231 : && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
742 9 : TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
743 9 : TREE_TYPE (op0), op1);
744 :
745 15323168 : else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
746 : {
747 190619 : while (TREE_CODE (op1) == TARGET_EXPR)
748 : /* We're disconnecting the initializer from its target,
749 : don't create a temporary. */
750 9119 : op1 = TARGET_EXPR_INITIAL (op1);
751 :
752 : /* Remove any copies of empty classes. Also drop volatile
753 : variables on the RHS to avoid infinite recursion from
754 : gimplify_expr trying to load the value. */
755 181500 : if (TREE_SIDE_EFFECTS (op1))
756 : {
757 13998 : if (TREE_THIS_VOLATILE (op1)
758 0 : && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
759 0 : op1 = build_fold_addr_expr (op1);
760 :
761 13998 : suppress_warning (op1, OPT_Wunused_result);
762 13998 : gimplify_and_add (op1, pre_p);
763 : }
764 181500 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
765 : is_gimple_lvalue, fb_lvalue);
766 181500 : *expr_p = TREE_OPERAND (*expr_p, 0);
767 181500 : if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
768 : /* Avoid 'return *<retval>;' */
769 6 : *expr_p = TREE_OPERAND (*expr_p, 0);
770 : }
771 : /* P0145 says that the RHS is sequenced before the LHS.
772 : gimplify_modify_expr gimplifies the RHS before the LHS, but that
773 : isn't quite strong enough in two cases:
774 :
775 : 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
776 : mean it's evaluated after the LHS.
777 :
778 : 2) the value calculation of the RHS is also sequenced before the
779 : LHS, so for scalar assignment we need to preevaluate if the
780 : RHS could be affected by LHS side-effects even if it has no
781 : side-effects of its own. We don't need this for classes because
782 : class assignment takes its RHS by reference. */
783 15141668 : else if (flag_strong_eval_order > 1
784 13813338 : && TREE_CODE (*expr_p) == MODIFY_EXPR
785 2562519 : && lvalue_has_side_effects (op0)
786 15178880 : && (TREE_CODE (op1) == CALL_EXPR
787 30305 : || (SCALAR_TYPE_P (TREE_TYPE (op1))
788 24283 : && !TREE_CONSTANT (op1))))
789 20622 : TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
790 : }
791 : ret = GS_OK;
792 : break;
793 :
794 81962 : case EMPTY_CLASS_EXPR:
795 : /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
796 81962 : *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
797 81962 : ret = GS_OK;
798 81962 : break;
799 :
800 0 : case BASELINK:
801 0 : *expr_p = BASELINK_FUNCTIONS (*expr_p);
802 0 : ret = GS_OK;
803 0 : break;
804 :
805 17106 : case TRY_BLOCK:
806 17106 : genericize_try_block (expr_p);
807 17106 : ret = GS_OK;
808 17106 : break;
809 :
810 20127 : case HANDLER:
811 20127 : genericize_catch_block (expr_p);
812 20127 : ret = GS_OK;
813 20127 : break;
814 :
815 4682 : case EH_SPEC_BLOCK:
816 4682 : genericize_eh_spec_block (expr_p);
817 4682 : ret = GS_OK;
818 4682 : break;
819 :
820 0 : case USING_STMT:
821 0 : gcc_unreachable ();
822 :
823 0 : case FOR_STMT:
824 0 : case WHILE_STMT:
825 0 : case DO_STMT:
826 0 : case SWITCH_STMT:
827 0 : case CONTINUE_STMT:
828 0 : case BREAK_STMT:
829 0 : gcc_unreachable ();
830 :
831 45263 : case OMP_FOR:
832 45263 : case OMP_SIMD:
833 45263 : case OMP_DISTRIBUTE:
834 45263 : case OMP_LOOP:
835 45263 : case OMP_TASKLOOP:
836 45263 : case OMP_TILE:
837 45263 : case OMP_UNROLL:
838 45263 : ret = cp_gimplify_omp_for (expr_p, pre_p);
839 45263 : break;
840 :
841 3894853 : case EXPR_STMT:
842 3894853 : gimplify_expr_stmt (expr_p);
843 3894853 : ret = GS_OK;
844 3894853 : break;
845 :
846 0 : case UNARY_PLUS_EXPR:
847 0 : {
848 0 : tree arg = TREE_OPERAND (*expr_p, 0);
849 0 : tree type = TREE_TYPE (*expr_p);
850 0 : *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
851 : : arg;
852 0 : ret = GS_OK;
853 : }
854 0 : break;
855 :
856 : case CALL_EXPR:
857 : ret = GS_OK;
858 : /* At this point any function that takes/returns a consteval-only
859 : expression is a problem. */
860 21349439 : for (int i = 0; i < call_expr_nargs (*expr_p); ++i)
861 13029954 : if (check_out_of_consteval_use (CALL_EXPR_ARG (*expr_p, i)))
862 5 : ret = GS_ERROR;
863 8319485 : if (consteval_only_p (TREE_TYPE (*expr_p)))
864 1 : ret = GS_ERROR;
865 8319485 : if (flag_strong_eval_order == 2
866 7845317 : && CALL_EXPR_FN (*expr_p)
867 7514603 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
868 15095244 : && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
869 : {
870 36481 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
871 36481 : enum gimplify_status t
872 36481 : = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
873 : is_gimple_call_addr);
874 36481 : if (t == GS_ERROR)
875 : ret = GS_ERROR;
876 : /* GIMPLE considers most pointer conversion useless, but for
877 : calls we actually care about the exact function pointer type. */
878 36478 : else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
879 8947 : CALL_EXPR_FN (*expr_p)
880 17894 : = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
881 : }
882 8319485 : if (!CALL_EXPR_FN (*expr_p))
883 : /* Internal function call. */;
884 7966235 : else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
885 : {
886 : /* This is a call to a (compound) assignment operator that used
887 : the operator syntax; gimplify the RHS first. */
888 46165 : gcc_assert (call_expr_nargs (*expr_p) == 2);
889 46165 : gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
890 46165 : enum gimplify_status t
891 46165 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
892 46165 : TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
893 46165 : if (t == GS_ERROR)
894 : ret = GS_ERROR;
895 : }
896 7920070 : else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
897 : {
898 : /* Leave the last argument for gimplify_call_expr, to avoid problems
899 : with __builtin_va_arg_pack(). */
900 196371 : int nargs = call_expr_nargs (*expr_p) - 1;
901 196371 : int last_side_effects_arg = -1;
902 390294 : for (int i = nargs; i > 0; --i)
903 213624 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
904 : {
905 : last_side_effects_arg = i;
906 : break;
907 : }
908 419812 : for (int i = 0; i < nargs; ++i)
909 : {
910 223441 : enum gimplify_status t
911 223441 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
912 : i < last_side_effects_arg);
913 223441 : if (t == GS_ERROR)
914 0 : ret = GS_ERROR;
915 : }
916 : }
917 7723699 : else if (flag_strong_eval_order
918 7723699 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
919 : {
920 : /* If flag_strong_eval_order, evaluate the object argument first. */
921 7166070 : tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
922 7166070 : if (INDIRECT_TYPE_P (fntype))
923 7166067 : fntype = TREE_TYPE (fntype);
924 7166070 : tree decl = cp_get_callee_fndecl_nofold (*expr_p);
925 : /* We can't just rely on 'decl' because virtual function callees
926 : are expressed as OBJ_TYPE_REF. Note that the xobj memfn check
927 : will also hold for calls of the form (&A::f)(a, ...) which does
928 : not require such sequencing, though it's allowed under
929 : "indeterminately sequenced". */
930 7166070 : if (TREE_CODE (fntype) == METHOD_TYPE
931 7166070 : || (decl && DECL_LANG_SPECIFIC (decl)
932 3362644 : && DECL_XOBJ_MEMBER_FUNCTION_P (decl)))
933 : {
934 3792200 : int nargs = call_expr_nargs (*expr_p);
935 3792200 : bool side_effects = false;
936 5318537 : for (int i = 1; i < nargs; ++i)
937 1894833 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
938 : {
939 : side_effects = true;
940 : break;
941 : }
942 3792200 : enum gimplify_status t
943 3792200 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
944 : side_effects);
945 3792200 : if (t == GS_ERROR)
946 : ret = GS_ERROR;
947 : }
948 : }
949 8319485 : if (ret != GS_ERROR)
950 : {
951 8319476 : tree decl = cp_get_callee_fndecl_nofold (*expr_p);
952 8319476 : if (!decl)
953 : break;
954 7924891 : if (fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
955 0 : switch (DECL_FE_FUNCTION_CODE (decl))
956 : {
957 0 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
958 0 : *expr_p = boolean_false_node;
959 0 : break;
960 0 : case CP_BUILT_IN_SOURCE_LOCATION:
961 0 : *expr_p
962 0 : = fold_builtin_source_location (*expr_p);
963 0 : break;
964 0 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
965 0 : *expr_p
966 0 : = fold_builtin_is_corresponding_member
967 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
968 : &CALL_EXPR_ARG (*expr_p, 0));
969 0 : break;
970 0 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
971 0 : *expr_p
972 0 : = fold_builtin_is_pointer_inverconvertible_with_class
973 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
974 : &CALL_EXPR_ARG (*expr_p, 0));
975 0 : break;
976 0 : case CP_BUILT_IN_EH_PTR_ADJUST_REF:
977 0 : error_at (EXPR_LOCATION (*expr_p),
978 : "%qs used outside of constant expressions",
979 : "__builtin_eh_ptr_adjust_ref");
980 0 : *expr_p = void_node;
981 0 : break;
982 0 : case CP_BUILT_IN_IS_STRING_LITERAL:
983 0 : *expr_p
984 0 : = fold_builtin_is_string_literal (EXPR_LOCATION (*expr_p),
985 0 : call_expr_nargs (*expr_p),
986 : &CALL_EXPR_ARG (*expr_p,
987 : 0));
988 0 : break;
989 0 : case CP_BUILT_IN_CONSTEXPR_DIAG:
990 0 : *expr_p = void_node;
991 0 : break;
992 : default:
993 : break;
994 : }
995 7924891 : else if (fndecl_built_in_p (decl, BUILT_IN_CLZG, BUILT_IN_CTZG))
996 33 : ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p,
997 : post_p);
998 : else
999 : /* All consteval functions should have been processed by now. */
1000 7924858 : gcc_checking_assert (!immediate_invocation_p (decl));
1001 : }
1002 : break;
1003 :
1004 618582 : case TARGET_EXPR:
1005 : /* A TARGET_EXPR that expresses direct-initialization should have been
1006 : elided by cp_gimplify_init_expr. */
1007 618582 : gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
1008 : /* Likewise, but allow extra temps of trivial type so that
1009 : gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
1010 : on the rhs of an assignment, as in constexpr-aggr1.C. */
1011 618582 : gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
1012 : || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
1013 618582 : if (flag_lifetime_dse > 1
1014 618454 : && TARGET_EXPR_INITIAL (*expr_p)
1015 1225538 : && VOID_TYPE_P (TREE_TYPE (TARGET_EXPR_INITIAL (*expr_p))))
1016 230573 : maybe_emit_clobber_object_begin (TARGET_EXPR_SLOT (*expr_p), pre_p);
1017 : ret = GS_UNHANDLED;
1018 : break;
1019 :
1020 3 : case PTRMEM_CST:
1021 3 : *expr_p = cplus_expand_constant (*expr_p);
1022 3 : if (TREE_CODE (*expr_p) == PTRMEM_CST)
1023 : ret = GS_ERROR;
1024 : else
1025 19696675 : ret = GS_OK;
1026 : break;
1027 :
1028 1143828 : case DECL_EXPR:
1029 1143828 : if (flag_lifetime_dse > 1)
1030 1143600 : maybe_emit_clobber_object_begin (DECL_EXPR_DECL (*expr_p), pre_p);
1031 : ret = GS_UNHANDLED;
1032 : break;
1033 :
1034 1169522 : case RETURN_EXPR:
1035 1169522 : if (TREE_OPERAND (*expr_p, 0)
1036 1169522 : && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
1037 12344 : || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
1038 : {
1039 1104114 : expr_p = &TREE_OPERAND (*expr_p, 0);
1040 : /* Avoid going through the INIT_EXPR case, which can
1041 : degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
1042 1104114 : goto modify_expr_case;
1043 : }
1044 : /* Fall through. */
1045 :
1046 138005265 : default:
1047 138005265 : ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
1048 138005265 : break;
1049 : }
1050 :
1051 : /* Restore saved state. */
1052 168380102 : if (STATEMENT_CODE_P (code))
1053 3936768 : current_stmt_tree ()->stmts_are_full_exprs_p
1054 3936768 : = saved_stmts_are_full_exprs_p;
1055 :
1056 : return ret;
1057 : }
1058 :
1059 : bool
1060 2529014951 : is_invisiref_parm (const_tree t)
1061 : {
1062 2385988352 : return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
1063 2577624330 : && DECL_BY_REFERENCE (t));
1064 : }
1065 :
1066 : /* A stable comparison routine for use with splay trees and DECLs. */
1067 :
1068 : static int
1069 62002 : splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
1070 : {
1071 62002 : tree a = (tree) xa;
1072 62002 : tree b = (tree) xb;
1073 :
1074 62002 : return DECL_UID (a) - DECL_UID (b);
1075 : }
1076 :
1077 : /* OpenMP context during genericization. */
1078 :
1079 : struct cp_genericize_omp_taskreg
1080 : {
1081 : bool is_parallel;
1082 : bool default_shared;
1083 : struct cp_genericize_omp_taskreg *outer;
1084 : splay_tree variables;
1085 : };
1086 :
1087 : /* Return true if genericization should try to determine if
1088 : DECL is firstprivate or shared within task regions. */
1089 :
1090 : static bool
1091 118969 : omp_var_to_track (tree decl)
1092 : {
1093 118969 : tree type = TREE_TYPE (decl);
1094 118969 : if (is_invisiref_parm (decl))
1095 537 : type = TREE_TYPE (type);
1096 118432 : else if (TYPE_REF_P (type))
1097 4186 : type = TREE_TYPE (type);
1098 143912 : while (TREE_CODE (type) == ARRAY_TYPE)
1099 24943 : type = TREE_TYPE (type);
1100 118969 : if (type == error_mark_node || !CLASS_TYPE_P (type))
1101 : return false;
1102 13912 : if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
1103 : return false;
1104 13909 : if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1105 : return false;
1106 : return true;
1107 : }
1108 :
1109 : /* Note DECL use in OpenMP region OMP_CTX during genericization. */
1110 :
1111 : static void
1112 14098 : omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1113 : {
1114 14098 : splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1115 : (splay_tree_key) decl);
1116 14098 : if (n == NULL)
1117 : {
1118 4310 : int flags = OMP_CLAUSE_DEFAULT_SHARED;
1119 4310 : if (omp_ctx->outer)
1120 1257 : omp_cxx_notice_variable (omp_ctx->outer, decl);
1121 4310 : if (!omp_ctx->default_shared)
1122 : {
1123 948 : struct cp_genericize_omp_taskreg *octx;
1124 :
1125 1065 : for (octx = omp_ctx->outer; octx; octx = octx->outer)
1126 : {
1127 902 : n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1128 902 : if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1129 : {
1130 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1131 : break;
1132 : }
1133 859 : if (octx->is_parallel)
1134 : break;
1135 : }
1136 948 : if (octx == NULL
1137 948 : && (TREE_CODE (decl) == PARM_DECL
1138 120 : || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1139 41 : && DECL_CONTEXT (decl) == current_function_decl)))
1140 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1141 864 : if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1142 : {
1143 : /* DECL is implicitly determined firstprivate in
1144 : the current task construct. Ensure copy ctor and
1145 : dtor are instantiated, because during gimplification
1146 : it will be already too late. */
1147 127 : tree type = TREE_TYPE (decl);
1148 127 : if (is_invisiref_parm (decl))
1149 2 : type = TREE_TYPE (type);
1150 125 : else if (TYPE_REF_P (type))
1151 52 : type = TREE_TYPE (type);
1152 177 : while (TREE_CODE (type) == ARRAY_TYPE)
1153 50 : type = TREE_TYPE (type);
1154 127 : get_copy_ctor (type, tf_none);
1155 127 : get_dtor (type, tf_none);
1156 : }
1157 : }
1158 4310 : splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1159 : }
1160 14098 : }
1161 :
1162 : /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1163 : not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1164 :
1165 : static bool
1166 133333 : any_non_eliding_target_exprs (tree ctor)
1167 : {
1168 603894 : for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
1169 : {
1170 470564 : if (TREE_CODE (e.value) == TARGET_EXPR
1171 470564 : && !TARGET_EXPR_ELIDING_P (e.value))
1172 : return true;
1173 : }
1174 : return false;
1175 : }
1176 :
1177 : /* If we might need to clean up a partially constructed object, break down the
1178 : CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1179 : point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1180 : the result. */
1181 :
1182 : static void
1183 99420524 : cp_genericize_init (tree *replace, tree from, tree to, vec<tree,va_gc>** flags)
1184 : {
1185 99420524 : tree init = NULL_TREE;
1186 99420524 : if (TREE_CODE (from) == VEC_INIT_EXPR)
1187 1070 : init = expand_vec_init_expr (to, from, tf_warning_or_error, flags);
1188 99419454 : else if (TREE_CODE (from) == CONSTRUCTOR
1189 5480682 : && TREE_SIDE_EFFECTS (from)
1190 99553958 : && ((flag_exceptions
1191 134445 : && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
1192 133333 : || any_non_eliding_target_exprs (from)))
1193 : {
1194 1174 : to = cp_stabilize_reference (to);
1195 1174 : replace_placeholders (from, to);
1196 1174 : init = split_nonconstant_init (to, from);
1197 : }
1198 :
1199 2244 : if (init)
1200 : {
1201 2244 : if (*replace == from)
1202 : /* Make cp_gimplify_init_expr call replace_decl on this
1203 : TARGET_EXPR_INITIAL. */
1204 667 : init = fold_convert (void_type_node, init);
1205 2244 : *replace = init;
1206 : }
1207 99420524 : }
1208 :
1209 : /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1210 :
1211 : static void
1212 70069526 : cp_genericize_init_expr (tree *stmt_p)
1213 : {
1214 70069526 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1215 70069526 : tree to = TREE_OPERAND (*stmt_p, 0);
1216 70069526 : tree from = TREE_OPERAND (*stmt_p, 1);
1217 9269151 : if (SIMPLE_TARGET_EXPR_P (from)
1218 : /* Return gets confused if we clobber its INIT_EXPR this soon. */
1219 77598746 : && TREE_CODE (to) != RESULT_DECL)
1220 335789 : from = TARGET_EXPR_INITIAL (from);
1221 70069526 : cp_genericize_init (stmt_p, from, to, nullptr);
1222 70069526 : }
1223 :
1224 : /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1225 : replace_decl later when we know what we're initializing. */
1226 :
1227 : static void
1228 29350998 : cp_genericize_target_expr (tree *stmt_p)
1229 : {
1230 29350998 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
1231 29350998 : tree slot = TARGET_EXPR_SLOT (*stmt_p);
1232 29350998 : vec<tree, va_gc> *flags = make_tree_vector ();
1233 29350998 : cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
1234 29350998 : TARGET_EXPR_INITIAL (*stmt_p), slot, &flags);
1235 29350998 : gcc_assert (!DECL_INITIAL (slot));
1236 88053026 : for (tree f : flags)
1237 : {
1238 : /* Once initialization is complete TARGET_EXPR_CLEANUP becomes active, so
1239 : disable any subobject cleanups. */
1240 32 : tree d = build_disable_temp_cleanup (f);
1241 32 : auto &r = TARGET_EXPR_INITIAL (*stmt_p);
1242 32 : r = add_stmt_to_compound (r, d);
1243 : }
1244 29350998 : release_tree_vector (flags);
1245 29350998 : }
1246 :
1247 : /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1248 : TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1249 : replacement when cp_folding TARGET_EXPR to preserve the invariant that
1250 : AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1251 :
1252 : static bool
1253 126 : maybe_replace_decl (tree *tp, tree decl, tree replacement)
1254 : {
1255 126 : if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1256 : return false;
1257 : tree t = *tp;
1258 46 : while (TREE_CODE (t) == COMPOUND_EXPR)
1259 0 : t = TREE_OPERAND (t, 1);
1260 46 : if (TREE_CODE (t) == AGGR_INIT_EXPR)
1261 46 : replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1262 0 : else if (TREE_CODE (t) == VEC_INIT_EXPR)
1263 0 : replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1264 : else
1265 0 : replace_decl (tp, decl, replacement);
1266 : return true;
1267 : }
1268 :
1269 : /* Genericization context. */
1270 :
1271 54687412 : struct cp_genericize_data
1272 : {
1273 : hash_set<tree> *p_set;
1274 : auto_vec<tree> bind_expr_stack;
1275 : struct cp_genericize_omp_taskreg *omp_ctx;
1276 : tree try_block;
1277 : bool no_sanitize_p;
1278 : bool handle_invisiref_parm_p;
1279 : };
1280 :
1281 : /* Emit an error about taking the address of an immediate function.
1282 : EXPR is the whole expression; DECL is the immediate function. */
1283 :
1284 : static void
1285 63 : taking_address_of_imm_fn_error (tree expr, tree decl)
1286 : {
1287 63 : auto_diagnostic_group d;
1288 63 : const location_t loc = (TREE_CODE (expr) == PTRMEM_CST
1289 63 : ? PTRMEM_CST_LOCATION (expr)
1290 63 : : EXPR_LOCATION (expr));
1291 63 : error_at (loc, "taking address of an immediate function %qD", decl);
1292 63 : maybe_explain_promoted_consteval (loc, decl);
1293 63 : }
1294 :
1295 : /* Build up an INIT_EXPR to initialize the object of a constructor call that
1296 : has been folded to a constant value. CALL is the CALL_EXPR for the
1297 : constructor call; INIT is the value. */
1298 :
1299 : static tree
1300 356 : cp_build_init_expr_for_ctor (tree call, tree init)
1301 : {
1302 356 : tree a = CALL_EXPR_ARG (call, 0);
1303 356 : if (is_dummy_object (a))
1304 : return init;
1305 356 : const bool return_this = targetm.cxx.cdtor_returns_this ();
1306 356 : const location_t loc = EXPR_LOCATION (call);
1307 356 : if (return_this)
1308 0 : a = cp_save_expr (a);
1309 356 : tree s = build_fold_indirect_ref_loc (loc, a);
1310 356 : init = cp_build_init_expr (s, init);
1311 356 : if (return_this)
1312 : {
1313 0 : init = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (call), init,
1314 0 : fold_convert_loc (loc, TREE_TYPE (call), a));
1315 0 : suppress_warning (init);
1316 : }
1317 : return init;
1318 : }
1319 :
1320 : /* For every DECL_EXPR check if it declares a consteval-only variable and
1321 : if so, overwrite it with a no-op. The point here is not to leak
1322 : consteval-only variables into the middle end. */
1323 :
1324 : static tree
1325 394516 : wipe_consteval_only_r (tree *stmt_p, int *, void *)
1326 : {
1327 394516 : if (TREE_CODE (*stmt_p) == DECL_EXPR)
1328 : {
1329 701 : tree d = DECL_EXPR_DECL (*stmt_p);
1330 701 : if (VAR_P (d) && consteval_only_p (d))
1331 : /* Wipe the DECL_EXPR so that it doesn't get into gimple. */
1332 3 : *stmt_p = void_node;
1333 : }
1334 394516 : return NULL_TREE;
1335 : }
1336 :
1337 : /* A walk_tree callback for cp_fold_function and cp_fully_fold_init to handle
1338 : immediate functions. */
1339 :
1340 : static tree
1341 3957327681 : cp_fold_immediate_r (tree *stmt_p, int *walk_subtrees, void *data_)
1342 : {
1343 3957327681 : auto data = static_cast<cp_fold_data *>(data_);
1344 3957327681 : tree stmt = *stmt_p;
1345 : /* The purpose of this is not to emit errors for mce_unknown. */
1346 3957327681 : const tsubst_flags_t complain = (data->flags & ff_mce_false
1347 3957327681 : ? tf_error : tf_none);
1348 3957327681 : const tree_code code = TREE_CODE (stmt);
1349 :
1350 : /* No need to look into types or unevaluated operands.
1351 : NB: This affects cp_fold_r as well. */
1352 3957327681 : if (TYPE_P (stmt)
1353 3952422520 : || unevaluated_p (code)
1354 : /* We do not use in_immediate_context here because it checks
1355 : more than is desirable, e.g., sk_template_parms. */
1356 3952098381 : || cp_unevaluated_operand
1357 7909426062 : || (current_function_decl
1358 7727737734 : && DECL_IMMEDIATE_FUNCTION_P (current_function_decl)))
1359 : {
1360 5256350 : *walk_subtrees = 0;
1361 5256350 : return NULL_TREE;
1362 : }
1363 :
1364 : /* Most invalid uses of consteval-only types should have been already
1365 : detected at this point. And the valid ones won't be needed
1366 : anymore. */
1367 3952071331 : if (flag_reflection
1368 29600324 : && complain
1369 24734314 : && (data->flags & ff_genericize)
1370 18991438 : && TREE_CODE (stmt) == STATEMENT_LIST)
1371 1606597 : for (tree s : tsi_range (stmt))
1372 1076639 : if (check_out_of_consteval_use (s))
1373 8 : *stmt_p = void_node;
1374 :
1375 3952071331 : tree decl = NULL_TREE;
1376 3952071331 : bool call_p = false;
1377 :
1378 : /* We are looking for &fn or fn(). */
1379 3952071331 : switch (code)
1380 : {
1381 48707753 : case DECL_EXPR:
1382 : /* Clear consteval-only DECL_EXPRs. */
1383 48707753 : if (flag_reflection)
1384 : {
1385 269833 : tree d = DECL_EXPR_DECL (stmt);
1386 269833 : if (VAR_P (d) && consteval_only_p (d))
1387 378 : *stmt_p = void_node;
1388 : }
1389 : break;
1390 186756100 : case CALL_EXPR:
1391 186756100 : case AGGR_INIT_EXPR:
1392 186756100 : if (tree fn = cp_get_callee (stmt))
1393 185901977 : if (TREE_CODE (fn) != ADDR_EXPR || ADDR_EXPR_DENOTES_CALL_P (fn))
1394 179941450 : decl = cp_get_fndecl_from_callee (fn, /*fold*/false);
1395 179941450 : call_p = true;
1396 179941450 : break;
1397 31625 : case PTRMEM_CST:
1398 31625 : decl = PTRMEM_CST_MEMBER (stmt);
1399 31625 : break;
1400 297732356 : case ADDR_EXPR:
1401 297732356 : if (!ADDR_EXPR_DENOTES_CALL_P (stmt))
1402 119436580 : decl = TREE_OPERAND (stmt, 0);
1403 : break;
1404 30903580 : case IF_STMT:
1405 30903580 : if (IF_STMT_CONSTEVAL_P (stmt))
1406 : {
1407 32769 : if (!data->pset.add (stmt))
1408 : {
1409 32769 : cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_immediate_r, data_,
1410 : nullptr);
1411 32769 : if (flag_reflection)
1412 : /* Check & clear consteval-only DECL_EXPRs even here,
1413 : because we wouldn't be walking this subtree otherwise. */
1414 7762 : cp_walk_tree (&THEN_CLAUSE (stmt), wipe_consteval_only_r,
1415 : data_, nullptr);
1416 : }
1417 32769 : *walk_subtrees = 0;
1418 32769 : return NULL_TREE;
1419 : }
1420 : /* FALLTHRU */
1421 3418810728 : default:
1422 3418810728 : if (data->pset.add (stmt))
1423 643335413 : *walk_subtrees = 0;
1424 : return NULL_TREE;
1425 : }
1426 :
1427 299410033 : if (!decl || TREE_CODE (decl) != FUNCTION_DECL)
1428 347974411 : return NULL_TREE;
1429 :
1430 : /* Fully escalate once all templates have been instantiated. What we're
1431 : calling is not a consteval function but it may become one. This
1432 : requires recursing; DECL may be promoted to consteval because it
1433 : contains an escalating expression E, but E itself may have to be
1434 : promoted first, etc. */
1435 185253423 : if (at_eof > 1 && unchecked_immediate_escalating_function_p (decl))
1436 : {
1437 : /* Set before the actual walk to avoid endless recursion. */
1438 6596131 : DECL_ESCALATION_CHECKED_P (decl) = true;
1439 : /* We're only looking for the first escalating expression. Let us not
1440 : walk more trees than necessary, hence mce_unknown. */
1441 6596131 : cp_fold_immediate (&DECL_SAVED_TREE (decl), mce_unknown, decl);
1442 : }
1443 :
1444 : /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1445 : it is not initially in an immediate function context and it is either
1446 : -- an immediate invocation that is not a constant expression and is not
1447 : a subexpression of an immediate invocation."
1448 :
1449 : If we are in an immediate-escalating function, the immediate-escalating
1450 : expression or conversion makes it an immediate function. So STMT does
1451 : not need to produce a constant expression. */
1452 370506846 : if (DECL_IMMEDIATE_FUNCTION_P (decl))
1453 : {
1454 78465 : tree e = cxx_constant_value (stmt, tf_none);
1455 78465 : if (e == error_mark_node)
1456 : {
1457 : /* This takes care of, e.g.,
1458 : template <typename T>
1459 : constexpr int f(T t)
1460 : {
1461 : return id(t);
1462 : }
1463 : where id (consteval) causes f<int> to be promoted. */
1464 1621 : if (immediate_escalating_function_p (current_function_decl))
1465 1068 : promote_function_to_consteval (current_function_decl);
1466 553 : else if (complain & tf_error)
1467 : {
1468 445 : if (call_p)
1469 : {
1470 394 : auto_diagnostic_group d;
1471 394 : location_t loc = cp_expr_loc_or_input_loc (stmt);
1472 394 : error_at (loc, "call to consteval function %qE is "
1473 : "not a constant expression", stmt);
1474 : /* Explain why it's not a constant expression. */
1475 394 : *stmt_p = cxx_constant_value (stmt, complain);
1476 394 : maybe_explain_promoted_consteval (loc, decl);
1477 394 : }
1478 51 : else if (!data->pset.add (stmt))
1479 : {
1480 51 : taking_address_of_imm_fn_error (stmt, decl);
1481 51 : *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1482 : }
1483 : /* If we're giving hard errors, continue the walk rather than
1484 : bailing out after the first error. */
1485 445 : return NULL_TREE;
1486 : }
1487 1176 : *walk_subtrees = 0;
1488 1176 : return stmt;
1489 : }
1490 : /* If we called a consteval function and it evaluated to a consteval-only
1491 : expression, it could be a problem if we are outside a manifestly
1492 : constant-evaluated context. */
1493 76844 : else if ((data->flags & ff_genericize)
1494 76844 : && check_out_of_consteval_use (e, complain))
1495 : {
1496 2 : *stmt_p = void_node;
1497 2 : if (complain & tf_error)
1498 : return NULL_TREE;
1499 : else
1500 : {
1501 0 : *walk_subtrees = 0;
1502 0 : return stmt;
1503 : }
1504 : }
1505 :
1506 : /* We've evaluated the consteval function call. */
1507 76842 : if (call_p)
1508 : {
1509 131572 : if (code == CALL_EXPR && DECL_CONSTRUCTOR_P (decl))
1510 0 : *stmt_p = cp_build_init_expr_for_ctor (stmt, e);
1511 : else
1512 76842 : *stmt_p = e;
1513 : }
1514 : }
1515 : /* We've encountered a function call that may turn out to be consteval
1516 : later. Store its caller so that we can ensure that the call is
1517 : a constant expression. */
1518 185174958 : else if (unchecked_immediate_escalating_function_p (decl))
1519 : {
1520 : /* Make sure we're not inserting new elements while walking
1521 : the deferred_escalating_exprs hash table; if we are, it's
1522 : likely that a function wasn't properly marked checked for
1523 : i-e expressions. */
1524 28600102 : gcc_checking_assert (at_eof <= 1);
1525 28600102 : if (current_function_decl)
1526 28599712 : remember_escalating_expr (current_function_decl);
1527 : /* auto p = &f<int>; in the global scope won't be ensconced in
1528 : a function we could store for later at this point. (If there's
1529 : no c_f_d at this point and we're dealing with a call, we should
1530 : see the call when cp_fold_function __static_i_and_d.) */
1531 390 : else if (!call_p)
1532 116 : remember_escalating_expr (stmt);
1533 : }
1534 :
1535 : return NULL_TREE;
1536 : }
1537 :
1538 : /* A walk_tree helper to replace constant-initialized references in an
1539 : OMP_CLAUSE with the the declaration that they refer to. Such refs
1540 : will have been folded out in the body by cp_fold_non_odr_use_1 and
1541 : so we need to follow suit to prevent confusion. */
1542 :
1543 : static tree
1544 95231 : cp_fold_omp_clause_refs_r (tree *expr_p, int *walk_subtrees, void */*data*/)
1545 : {
1546 95231 : tree expr = *expr_p;
1547 :
1548 95231 : if (TYPE_P (expr))
1549 : {
1550 0 : *walk_subtrees = 0;
1551 0 : return NULL_TREE;
1552 : }
1553 :
1554 95231 : if (DECL_P (expr))
1555 : {
1556 52333 : *walk_subtrees = 0;
1557 :
1558 52333 : if (decl_constant_var_p (expr)
1559 52333 : && TYPE_REF_P (TREE_TYPE (expr)))
1560 : {
1561 532 : tree init = maybe_constant_value (expr);
1562 532 : if (TREE_CONSTANT (init))
1563 532 : *expr_p = tree_strip_nop_conversions (init);
1564 : }
1565 : }
1566 :
1567 : return NULL_TREE;
1568 : }
1569 :
1570 : /* Perform any pre-gimplification folding of C++ front end trees to
1571 : GENERIC.
1572 : Note: The folding of non-omp cases is something to move into
1573 : the middle-end. As for now we have most foldings only on GENERIC
1574 : in fold-const, we need to perform this before transformation to
1575 : GIMPLE-form.
1576 :
1577 : ??? This is algorithmically weird because walk_tree works in pre-order, so
1578 : we see outer expressions before inner expressions. This isn't as much of an
1579 : issue because cp_fold recurses into subexpressions in many cases, but then
1580 : walk_tree walks back into those subexpressions again. We avoid the
1581 : resulting complexity problem by caching the result of cp_fold, but it's
1582 : inelegant. */
1583 :
1584 : static tree
1585 5328441414 : cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1586 : {
1587 5328441414 : cp_fold_data *data = (cp_fold_data*)data_;
1588 5328441414 : tree stmt = *stmt_p;
1589 5328441414 : enum tree_code code = TREE_CODE (stmt);
1590 :
1591 5328441414 : *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1592 :
1593 5328441414 : if (data->pset.add (stmt))
1594 : {
1595 : /* Don't walk subtrees of stmts we've already walked once, otherwise
1596 : we can have exponential complexity with e.g. lots of nested
1597 : SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1598 : always the same tree, which the first time cp_fold_r has been
1599 : called on it had the subtrees walked. */
1600 822484816 : *walk_subtrees = 0;
1601 822484816 : return NULL_TREE;
1602 : }
1603 :
1604 4505956598 : code = TREE_CODE (stmt);
1605 4505956598 : switch (code)
1606 : {
1607 51699 : tree x;
1608 51699 : int i, n;
1609 51699 : case OMP_FOR:
1610 51699 : case OMP_SIMD:
1611 51699 : case OMP_DISTRIBUTE:
1612 51699 : case OMP_LOOP:
1613 51699 : case OMP_TASKLOOP:
1614 51699 : case OMP_TILE:
1615 51699 : case OMP_UNROLL:
1616 51699 : case OACC_LOOP:
1617 51699 : cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1618 51699 : cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1619 51699 : cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1620 51699 : x = OMP_FOR_COND (stmt);
1621 51699 : if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1622 : {
1623 0 : cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1624 0 : cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1625 : }
1626 38119 : else if (x && TREE_CODE (x) == TREE_VEC)
1627 : {
1628 38119 : n = TREE_VEC_LENGTH (x);
1629 87580 : for (i = 0; i < n; i++)
1630 : {
1631 49461 : tree o = TREE_VEC_ELT (x, i);
1632 49461 : if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1633 47635 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1634 : }
1635 : }
1636 51699 : x = OMP_FOR_INCR (stmt);
1637 51699 : if (x && TREE_CODE (x) == TREE_VEC)
1638 : {
1639 38119 : n = TREE_VEC_LENGTH (x);
1640 87580 : for (i = 0; i < n; i++)
1641 : {
1642 49461 : tree o = TREE_VEC_ELT (x, i);
1643 49461 : if (o && TREE_CODE (o) == MODIFY_EXPR)
1644 12038 : o = TREE_OPERAND (o, 1);
1645 47635 : if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1646 38497 : || TREE_CODE (o) == POINTER_PLUS_EXPR))
1647 : {
1648 12038 : cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1649 12038 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1650 : }
1651 : }
1652 : }
1653 51699 : cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1654 51699 : *walk_subtrees = 0;
1655 51699 : return NULL_TREE;
1656 :
1657 164806 : case OMP_CLAUSE:
1658 164806 : if ((data->flags & ff_only_non_odr)
1659 82414 : && omp_clause_num_ops[OMP_CLAUSE_CODE (stmt)] >= 1
1660 69028 : && OMP_CLAUSE_CODE (stmt) >= OMP_CLAUSE_PRIVATE
1661 69028 : && OMP_CLAUSE_CODE (stmt) <= OMP_CLAUSE__SCANTEMP_
1662 210475 : && OMP_CLAUSE_DECL (stmt))
1663 : {
1664 44491 : tree *decl = &OMP_CLAUSE_DECL (stmt);
1665 44491 : cp_walk_tree (decl, cp_fold_omp_clause_refs_r, NULL, NULL);
1666 44491 : if (TREE_CODE (*decl) == ADDR_EXPR
1667 44491 : && DECL_P (TREE_OPERAND (*decl, 0)))
1668 148 : *decl = TREE_OPERAND (*decl, 0);
1669 44491 : data->pset.add (*decl);
1670 : }
1671 : break;
1672 :
1673 64318650 : case IF_STMT:
1674 64318650 : if (IF_STMT_CONSTEVAL_P (stmt))
1675 : {
1676 : /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1677 : boolean_false_node. */
1678 65728 : cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1679 65728 : cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1680 65728 : *walk_subtrees = 0;
1681 65728 : return NULL_TREE;
1682 : }
1683 : break;
1684 :
1685 : /* cp_genericize_{init,target}_expr are only for genericize time; they're
1686 : here rather than in cp_genericize to avoid problems with the invisible
1687 : reference transition. */
1688 141613625 : case INIT_EXPR:
1689 141613625 : if (data->flags & ff_genericize)
1690 70069526 : cp_genericize_init_expr (stmt_p);
1691 : break;
1692 :
1693 60107630 : case TARGET_EXPR:
1694 60107630 : if (!flag_no_inline)
1695 58381606 : if (tree &init = TARGET_EXPR_INITIAL (stmt))
1696 : {
1697 58381606 : tree folded = maybe_constant_init (init, TARGET_EXPR_SLOT (stmt),
1698 58381606 : (data->flags & ff_mce_false
1699 : ? mce_false : mce_unknown));
1700 58381606 : if (folded != init && TREE_CONSTANT (folded))
1701 2891364 : init = folded;
1702 : }
1703 :
1704 : /* This needs to happen between the constexpr evaluation (which wants
1705 : pre-generic trees) and fold (which wants the cp_genericize_init
1706 : transformations). */
1707 60107630 : if (data->flags & ff_genericize)
1708 29350998 : cp_genericize_target_expr (stmt_p);
1709 :
1710 60107630 : if (tree &init = TARGET_EXPR_INITIAL (stmt))
1711 : {
1712 60107630 : cp_walk_tree (&init, cp_fold_r, data, NULL);
1713 60107630 : cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1714 60107630 : *walk_subtrees = 0;
1715 : /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1716 : that case, strip it in favor of this one. */
1717 60107630 : if (TREE_CODE (init) == TARGET_EXPR)
1718 : {
1719 126 : tree sub = TARGET_EXPR_INITIAL (init);
1720 126 : maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1721 126 : TARGET_EXPR_SLOT (stmt));
1722 126 : init = sub;
1723 : }
1724 : }
1725 : break;
1726 :
1727 : default:
1728 : break;
1729 : }
1730 :
1731 : return NULL_TREE;
1732 : }
1733 :
1734 : /* Fold ALL the trees! FIXME we should be able to remove this, but
1735 : apparently that still causes optimization regressions. */
1736 :
1737 : void
1738 72095691 : cp_fold_function (tree fndecl)
1739 : {
1740 : /* By now all manifestly-constant-evaluated expressions will have
1741 : been constant-evaluated already if possible, so we can safely
1742 : pass ff_mce_false. */
1743 72095691 : cp_fold_data data (ff_genericize | ff_mce_false);
1744 : /* Do cp_fold_immediate_r in separate whole IL walk instead of during
1745 : cp_fold_r, as otherwise expressions using results of immediate functions
1746 : might not be folded as cp_fold is called on those before cp_fold_r is
1747 : called on their argument. */
1748 72095691 : if (cxx_dialect >= cxx20)
1749 : {
1750 70175919 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_immediate_r,
1751 : &data, NULL);
1752 70175919 : data.pset.empty ();
1753 : }
1754 72095691 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1755 :
1756 : /* This is merely an optimization: if FNDECL has no i-e expressions,
1757 : we'll not save c_f_d, and we can safely say that FNDECL will not
1758 : be promoted to consteval. */
1759 72095691 : if (deferred_escalating_exprs
1760 72095691 : && !deferred_escalating_exprs->contains (current_function_decl))
1761 54488879 : DECL_ESCALATION_CHECKED_P (fndecl) = true;
1762 72095691 : }
1763 :
1764 : /* Fold any non-ODR usages of constant variables in FNDECL. This occurs
1765 : before saving the constexpr fundef, so do as little other folding
1766 : as possible. */
1767 :
1768 : void
1769 73522837 : cp_fold_function_non_odr_use (tree fndecl)
1770 : {
1771 73522837 : cp_fold_data data (ff_only_non_odr);
1772 73522837 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1773 73522837 : }
1774 :
1775 : /* We've stashed immediate-escalating functions. Now see if they indeed
1776 : ought to be promoted to consteval. */
1777 :
1778 : void
1779 96437 : process_and_check_pending_immediate_escalating_fns ()
1780 : {
1781 : /* This will be null for -fno-immediate-escalation. */
1782 96437 : if (!deferred_escalating_exprs)
1783 : return;
1784 :
1785 28785464 : for (auto e : *deferred_escalating_exprs)
1786 14383686 : if (TREE_CODE (e) == FUNCTION_DECL && !DECL_ESCALATION_CHECKED_P (e))
1787 9514242 : cp_fold_immediate (&DECL_SAVED_TREE (e), mce_false, e);
1788 :
1789 : /* We've escalated every function that could have been promoted to
1790 : consteval. Check that we are not taking the address of a consteval
1791 : function. */
1792 28785580 : for (auto e : *deferred_escalating_exprs)
1793 : {
1794 14383686 : if (TREE_CODE (e) == FUNCTION_DECL)
1795 14383570 : continue;
1796 116 : tree decl = (TREE_CODE (e) == PTRMEM_CST
1797 116 : ? PTRMEM_CST_MEMBER (e)
1798 116 : : TREE_OPERAND (e, 0));
1799 232 : if (DECL_IMMEDIATE_FUNCTION_P (decl))
1800 12 : taking_address_of_imm_fn_error (e, decl);
1801 : }
1802 :
1803 18208 : deferred_escalating_exprs = nullptr;
1804 : }
1805 :
1806 : /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1807 :
1808 253416 : static tree genericize_spaceship (tree expr)
1809 : {
1810 253416 : iloc_sentinel s (cp_expr_location (expr));
1811 253416 : tree type = TREE_TYPE (expr);
1812 253416 : tree op0 = TREE_OPERAND (expr, 0);
1813 253416 : tree op1 = TREE_OPERAND (expr, 1);
1814 253416 : return genericize_spaceship (input_location, type, op0, op1);
1815 253416 : }
1816 :
1817 : /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1818 : to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1819 : the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1820 : NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1821 :
1822 : tree
1823 178639117 : predeclare_vla (tree expr)
1824 : {
1825 178639117 : tree type = TREE_TYPE (expr);
1826 178639117 : if (type == error_mark_node)
1827 : return expr;
1828 178639017 : if (is_typedef_decl (expr))
1829 178639017 : type = DECL_ORIGINAL_TYPE (expr);
1830 :
1831 : /* We need to strip pointers for gimplify_type_sizes. */
1832 178639017 : tree vla = type;
1833 272099252 : while (POINTER_TYPE_P (vla))
1834 : {
1835 97431081 : if (TYPE_NAME (vla))
1836 : return expr;
1837 93460235 : vla = TREE_TYPE (vla);
1838 : }
1839 91263695 : if (vla == type || TYPE_NAME (vla)
1840 175266658 : || !variably_modified_type_p (vla, NULL_TREE))
1841 174667968 : return expr;
1842 :
1843 203 : tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1844 203 : DECL_ARTIFICIAL (decl) = 1;
1845 203 : TYPE_NAME (vla) = decl;
1846 203 : tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1847 203 : if (DECL_P (expr))
1848 : {
1849 5 : add_stmt (dexp);
1850 5 : return NULL_TREE;
1851 : }
1852 : else
1853 : {
1854 198 : expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1855 198 : return expr;
1856 : }
1857 : }
1858 :
1859 : /* Perform any pre-gimplification lowering of C++ front end trees to
1860 : GENERIC. */
1861 :
1862 : static tree
1863 2291409568 : cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1864 : {
1865 2322779532 : tree stmt = *stmt_p;
1866 2322779532 : struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1867 2322779532 : hash_set<tree> *p_set = wtd->p_set;
1868 :
1869 : /* If in an OpenMP context, note var uses. */
1870 2322779532 : if (UNLIKELY (wtd->omp_ctx != NULL)
1871 594956 : && (VAR_P (stmt)
1872 : || TREE_CODE (stmt) == PARM_DECL
1873 : || TREE_CODE (stmt) == RESULT_DECL)
1874 2322887061 : && omp_var_to_track (stmt))
1875 12338 : omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1876 :
1877 : /* Don't dereference parms in a thunk, pass the references through. */
1878 94264514 : if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1879 2416981486 : || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1880 : {
1881 62755 : *walk_subtrees = 0;
1882 62755 : return NULL;
1883 : }
1884 :
1885 : /* Dereference invisible reference parms. */
1886 2322716777 : if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1887 : {
1888 1386067 : *stmt_p = convert_from_reference (stmt);
1889 1386067 : p_set->add (*stmt_p);
1890 1386067 : *walk_subtrees = 0;
1891 1386067 : return NULL;
1892 : }
1893 :
1894 : /* Map block scope extern declarations to visible declarations with the
1895 : same name and type in outer scopes if any. */
1896 2321330710 : if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1897 20259 : if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1898 : {
1899 20259 : if (alias != error_mark_node)
1900 : {
1901 20256 : *stmt_p = alias;
1902 20256 : TREE_USED (alias) |= TREE_USED (stmt);
1903 : }
1904 20259 : *walk_subtrees = 0;
1905 20259 : return NULL;
1906 : }
1907 :
1908 2321310451 : if (TREE_CODE (stmt) == INTEGER_CST
1909 285336210 : && TYPE_REF_P (TREE_TYPE (stmt))
1910 155 : && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1911 2321310452 : && !wtd->no_sanitize_p)
1912 : {
1913 1 : ubsan_maybe_instrument_reference (stmt_p);
1914 1 : if (*stmt_p != stmt)
1915 : {
1916 1 : *walk_subtrees = 0;
1917 1 : return NULL_TREE;
1918 : }
1919 : }
1920 :
1921 : /* Other than invisiref parms, don't walk the same tree twice. */
1922 2321310450 : if (p_set->contains (stmt))
1923 : {
1924 494586061 : *walk_subtrees = 0;
1925 494586061 : return NULL_TREE;
1926 : }
1927 :
1928 1826724389 : if ((TREE_CODE (stmt) == VAR_DECL
1929 : || TREE_CODE (stmt) == PARM_DECL
1930 : || TREE_CODE (stmt) == RESULT_DECL)
1931 183872726 : && DECL_HAS_VALUE_EXPR_P (stmt)
1932 : /* Walk DECL_VALUE_EXPR mainly for benefit of xobj lambdas so that we
1933 : adjust any invisiref object parm uses within the capture proxies.
1934 : TODO: For GCC 17 do this walking unconditionally. */
1935 2206829 : && current_function_decl
1936 2206829 : && DECL_XOBJ_MEMBER_FUNCTION_P (current_function_decl)
1937 872 : && LAMBDA_FUNCTION_P (current_function_decl))
1938 : {
1939 430 : tree ve = DECL_VALUE_EXPR (stmt);
1940 430 : cp_walk_tree (&ve, cp_genericize_r, data, NULL);
1941 430 : SET_DECL_VALUE_EXPR (stmt, ve);
1942 : }
1943 :
1944 1826724389 : switch (TREE_CODE (stmt))
1945 : {
1946 157467257 : case ADDR_EXPR:
1947 157467257 : if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1948 : {
1949 : /* If in an OpenMP context, note var uses. */
1950 561232 : if (UNLIKELY (wtd->omp_ctx != NULL)
1951 561232 : && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1952 412 : omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1953 561232 : *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1954 561232 : *walk_subtrees = 0;
1955 : }
1956 : break;
1957 :
1958 48963128 : case RETURN_EXPR:
1959 48963128 : if (TREE_OPERAND (stmt, 0))
1960 : {
1961 47782387 : if (error_operand_p (TREE_OPERAND (stmt, 0))
1962 47782387 : && warn_return_type)
1963 : /* Suppress -Wreturn-type for this function. */
1964 12 : suppress_warning (current_function_decl, OPT_Wreturn_type);
1965 :
1966 47782387 : if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1967 : /* Don't dereference an invisiref RESULT_DECL inside a
1968 : RETURN_EXPR. */
1969 560 : *walk_subtrees = 0;
1970 47782387 : if (RETURN_EXPR_LOCAL_ADDR_P (stmt))
1971 : {
1972 : /* Don't return the address of a local variable. */
1973 166 : tree *p = &TREE_OPERAND (stmt, 0);
1974 332 : while (TREE_CODE (*p) == COMPOUND_EXPR)
1975 0 : p = &TREE_OPERAND (*p, 0);
1976 166 : if (TREE_CODE (*p) == INIT_EXPR)
1977 : {
1978 166 : tree op = TREE_OPERAND (*p, 1);
1979 166 : tree new_op = build2 (COMPOUND_EXPR, TREE_TYPE (op), op,
1980 166 : build_zero_cst (TREE_TYPE (op)));
1981 166 : TREE_OPERAND (*p, 1) = new_op;
1982 : }
1983 : }
1984 : }
1985 : break;
1986 :
1987 82301 : case OMP_CLAUSE:
1988 82301 : switch (OMP_CLAUSE_CODE (stmt))
1989 : {
1990 2587 : case OMP_CLAUSE_LASTPRIVATE:
1991 : /* Don't dereference an invisiref in OpenMP clauses. */
1992 2587 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1993 : {
1994 53 : *walk_subtrees = 0;
1995 53 : if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1996 48 : cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1997 : cp_genericize_r, data, NULL);
1998 : }
1999 : break;
2000 2084 : case OMP_CLAUSE_PRIVATE:
2001 : /* Don't dereference an invisiref in OpenMP clauses. */
2002 2084 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2003 8 : *walk_subtrees = 0;
2004 2076 : else if (wtd->omp_ctx != NULL)
2005 : {
2006 : /* Private clause doesn't cause any references to the
2007 : var in outer contexts, avoid calling
2008 : omp_cxx_notice_variable for it. */
2009 584 : struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
2010 584 : wtd->omp_ctx = NULL;
2011 584 : cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
2012 : data, NULL);
2013 584 : wtd->omp_ctx = old;
2014 584 : *walk_subtrees = 0;
2015 : }
2016 : break;
2017 6052 : case OMP_CLAUSE_SHARED:
2018 6052 : case OMP_CLAUSE_FIRSTPRIVATE:
2019 6052 : case OMP_CLAUSE_COPYIN:
2020 6052 : case OMP_CLAUSE_COPYPRIVATE:
2021 6052 : case OMP_CLAUSE_INCLUSIVE:
2022 6052 : case OMP_CLAUSE_EXCLUSIVE:
2023 : /* Don't dereference an invisiref in OpenMP clauses. */
2024 6052 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2025 87 : *walk_subtrees = 0;
2026 : break;
2027 8448 : case OMP_CLAUSE_REDUCTION:
2028 8448 : case OMP_CLAUSE_IN_REDUCTION:
2029 8448 : case OMP_CLAUSE_TASK_REDUCTION:
2030 : /* Don't dereference an invisiref in reduction clause's
2031 : OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
2032 : still needs to be genericized. */
2033 8448 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
2034 : {
2035 38 : *walk_subtrees = 0;
2036 38 : if (OMP_CLAUSE_REDUCTION_INIT (stmt))
2037 38 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
2038 : cp_genericize_r, data, NULL);
2039 38 : if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
2040 38 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
2041 : cp_genericize_r, data, NULL);
2042 : }
2043 : break;
2044 : default:
2045 : break;
2046 : }
2047 : break;
2048 :
2049 : /* Due to the way voidify_wrapper_expr is written, we don't get a chance
2050 : to lower this construct before scanning it, so we need to lower these
2051 : before doing anything else. */
2052 6058193 : case CLEANUP_STMT:
2053 6058193 : *stmt_p = build2_loc (EXPR_LOCATION (stmt),
2054 6058193 : CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
2055 : : TRY_FINALLY_EXPR,
2056 : void_type_node,
2057 6058193 : CLEANUP_BODY (stmt),
2058 6058193 : CLEANUP_EXPR (stmt));
2059 6058193 : break;
2060 :
2061 31369354 : case IF_STMT:
2062 31369354 : genericize_if_stmt (stmt_p);
2063 : /* *stmt_p has changed, tail recurse to handle it again. */
2064 31369354 : return cp_genericize_r (stmt_p, walk_subtrees, data);
2065 :
2066 : /* COND_EXPR might have incompatible types in branches if one or both
2067 : arms are bitfields. Fix it up now. */
2068 25694749 : case COND_EXPR:
2069 25694749 : {
2070 25694749 : tree type_left
2071 25694749 : = (TREE_OPERAND (stmt, 1)
2072 25694749 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
2073 : : NULL_TREE);
2074 25694749 : tree type_right
2075 25694749 : = (TREE_OPERAND (stmt, 2)
2076 25694749 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
2077 : : NULL_TREE);
2078 25694749 : if (type_left
2079 25694782 : && !useless_type_conversion_p (TREE_TYPE (stmt),
2080 33 : TREE_TYPE (TREE_OPERAND (stmt, 1))))
2081 : {
2082 30 : TREE_OPERAND (stmt, 1)
2083 30 : = fold_convert (type_left, TREE_OPERAND (stmt, 1));
2084 30 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
2085 : type_left));
2086 : }
2087 25694749 : if (type_right
2088 25694766 : && !useless_type_conversion_p (TREE_TYPE (stmt),
2089 17 : TREE_TYPE (TREE_OPERAND (stmt, 2))))
2090 : {
2091 17 : TREE_OPERAND (stmt, 2)
2092 17 : = fold_convert (type_right, TREE_OPERAND (stmt, 2));
2093 17 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
2094 : type_right));
2095 : }
2096 : }
2097 : break;
2098 :
2099 33512545 : case BIND_EXPR:
2100 33512545 : if (UNLIKELY (wtd->omp_ctx != NULL))
2101 : {
2102 27006 : tree decl;
2103 33294 : for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
2104 6288 : if (VAR_P (decl)
2105 6240 : && !DECL_EXTERNAL (decl)
2106 12528 : && omp_var_to_track (decl))
2107 : {
2108 586 : splay_tree_node n
2109 586 : = splay_tree_lookup (wtd->omp_ctx->variables,
2110 : (splay_tree_key) decl);
2111 586 : if (n == NULL)
2112 586 : splay_tree_insert (wtd->omp_ctx->variables,
2113 : (splay_tree_key) decl,
2114 586 : TREE_STATIC (decl)
2115 : ? OMP_CLAUSE_DEFAULT_SHARED
2116 : : OMP_CLAUSE_DEFAULT_PRIVATE);
2117 : }
2118 : }
2119 33512545 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
2120 : {
2121 : /* The point here is to not sanitize static initializers. */
2122 3384 : bool no_sanitize_p = wtd->no_sanitize_p;
2123 3384 : wtd->no_sanitize_p = true;
2124 3384 : for (tree decl = BIND_EXPR_VARS (stmt);
2125 6592 : decl;
2126 3208 : decl = DECL_CHAIN (decl))
2127 3208 : if (VAR_P (decl)
2128 2805 : && TREE_STATIC (decl)
2129 3284 : && DECL_INITIAL (decl))
2130 12 : cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
2131 3384 : wtd->no_sanitize_p = no_sanitize_p;
2132 : }
2133 33512545 : if (flag_reflection)
2134 : /* Wipe consteval-only vars from BIND_EXPR_VARS and BLOCK_VARS. */
2135 418724 : for (tree *p = &BIND_EXPR_VARS (stmt); *p; )
2136 : {
2137 205521 : if (VAR_P (*p) && consteval_only_p (*p))
2138 : {
2139 396 : if (BIND_EXPR_BLOCK (stmt)
2140 396 : && *p == BLOCK_VARS (BIND_EXPR_BLOCK (stmt)))
2141 194 : BLOCK_VARS (BIND_EXPR_BLOCK (stmt)) = DECL_CHAIN (*p);
2142 396 : *p = DECL_CHAIN (*p);
2143 396 : continue;
2144 : }
2145 205125 : p = &DECL_CHAIN (*p);
2146 : }
2147 33512545 : wtd->bind_expr_stack.safe_push (stmt);
2148 33512545 : cp_walk_tree (&BIND_EXPR_BODY (stmt),
2149 : cp_genericize_r, data, NULL);
2150 33512545 : wtd->bind_expr_stack.pop ();
2151 33512545 : break;
2152 :
2153 610 : case ASSERTION_STMT:
2154 610 : case PRECONDITION_STMT:
2155 610 : case POSTCONDITION_STMT:
2156 610 : if (tree check = build_contract_check (stmt))
2157 : {
2158 610 : *stmt_p = check;
2159 610 : return cp_genericize_r (stmt_p, walk_subtrees, data);
2160 : }
2161 : /* If we didn't build a check, replace it with void_node so we don't
2162 : leak contracts into GENERIC. */
2163 0 : *stmt_p = void_node;
2164 0 : *walk_subtrees = 0;
2165 0 : break;
2166 :
2167 87369 : case USING_STMT:
2168 87369 : {
2169 87369 : tree block = NULL_TREE;
2170 :
2171 : /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
2172 : BLOCK, and append an IMPORTED_DECL to its
2173 : BLOCK_VARS chained list. */
2174 87369 : if (wtd->bind_expr_stack.exists ())
2175 : {
2176 87369 : int i;
2177 87369 : for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
2178 87369 : if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
2179 : break;
2180 : }
2181 87369 : if (block)
2182 : {
2183 87369 : tree decl = TREE_OPERAND (stmt, 0);
2184 87369 : gcc_assert (decl);
2185 :
2186 87369 : if (undeduced_auto_decl (decl))
2187 : /* Omit from the GENERIC, the back-end can't handle it. */;
2188 : else
2189 : {
2190 87366 : tree using_directive = make_node (IMPORTED_DECL);
2191 87366 : TREE_TYPE (using_directive) = void_type_node;
2192 87366 : DECL_CONTEXT (using_directive) = current_function_decl;
2193 174732 : DECL_SOURCE_LOCATION (using_directive)
2194 87366 : = cp_expr_loc_or_input_loc (stmt);
2195 :
2196 87366 : IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
2197 87366 : DECL_CHAIN (using_directive) = BLOCK_VARS (block);
2198 87366 : BLOCK_VARS (block) = using_directive;
2199 : }
2200 : }
2201 : /* The USING_STMT won't appear in GENERIC. */
2202 87369 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
2203 87369 : *walk_subtrees = 0;
2204 : }
2205 87369 : break;
2206 :
2207 33133182 : case DECL_EXPR:
2208 33133182 : if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
2209 : {
2210 : /* Using decls inside DECL_EXPRs are just dropped on the floor. */
2211 20029 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
2212 20029 : *walk_subtrees = 0;
2213 : }
2214 : else
2215 : {
2216 33113153 : tree d = DECL_EXPR_DECL (stmt);
2217 33113153 : if (VAR_P (d))
2218 66224826 : gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
2219 : }
2220 : break;
2221 :
2222 11392 : case OMP_PARALLEL:
2223 11392 : case OMP_TASK:
2224 11392 : case OMP_TASKLOOP:
2225 11392 : {
2226 11392 : struct cp_genericize_omp_taskreg omp_ctx;
2227 11392 : tree c, decl;
2228 11392 : splay_tree_node n;
2229 :
2230 11392 : *walk_subtrees = 0;
2231 11392 : cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
2232 11392 : omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
2233 11392 : omp_ctx.default_shared = omp_ctx.is_parallel;
2234 11392 : omp_ctx.outer = wtd->omp_ctx;
2235 11392 : omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
2236 11392 : wtd->omp_ctx = &omp_ctx;
2237 27186 : for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2238 15794 : switch (OMP_CLAUSE_CODE (c))
2239 : {
2240 4788 : case OMP_CLAUSE_SHARED:
2241 4788 : case OMP_CLAUSE_PRIVATE:
2242 4788 : case OMP_CLAUSE_FIRSTPRIVATE:
2243 4788 : case OMP_CLAUSE_LASTPRIVATE:
2244 4788 : decl = OMP_CLAUSE_DECL (c);
2245 4788 : if (decl == error_mark_node || !omp_var_to_track (decl))
2246 : break;
2247 519 : n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
2248 519 : if (n != NULL)
2249 : break;
2250 1020 : splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
2251 510 : OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2252 : ? OMP_CLAUSE_DEFAULT_SHARED
2253 : : OMP_CLAUSE_DEFAULT_PRIVATE);
2254 510 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
2255 91 : omp_cxx_notice_variable (omp_ctx.outer, decl);
2256 : break;
2257 1647 : case OMP_CLAUSE_DEFAULT:
2258 1647 : if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
2259 731 : omp_ctx.default_shared = true;
2260 : default:
2261 : break;
2262 : }
2263 11392 : if (TREE_CODE (stmt) == OMP_TASKLOOP)
2264 1001 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2265 : cp_genericize_r, cp_walk_subtrees);
2266 : else
2267 10391 : cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
2268 11392 : wtd->omp_ctx = omp_ctx.outer;
2269 11392 : splay_tree_delete (omp_ctx.variables);
2270 : }
2271 11392 : break;
2272 :
2273 6916 : case OMP_TARGET:
2274 6916 : cfun->has_omp_target = true;
2275 6916 : break;
2276 :
2277 132603 : case TRY_BLOCK:
2278 132603 : {
2279 132603 : *walk_subtrees = 0;
2280 132603 : tree try_block = wtd->try_block;
2281 132603 : wtd->try_block = stmt;
2282 132603 : cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
2283 132603 : wtd->try_block = try_block;
2284 132603 : cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
2285 : }
2286 132603 : break;
2287 :
2288 25506887 : case MUST_NOT_THROW_EXPR:
2289 : /* MUST_NOT_THROW_COND might be something else with TM. */
2290 25506887 : if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
2291 : {
2292 25506869 : *walk_subtrees = 0;
2293 25506869 : tree try_block = wtd->try_block;
2294 25506869 : wtd->try_block = stmt;
2295 25506869 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2296 25506869 : wtd->try_block = try_block;
2297 : }
2298 : break;
2299 :
2300 135510 : case THROW_EXPR:
2301 135510 : {
2302 135510 : location_t loc = location_of (stmt);
2303 135510 : if (warning_suppressed_p (stmt /* What warning? */))
2304 : /* Never mind. */;
2305 40643 : else if (wtd->try_block)
2306 : {
2307 9979 : if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
2308 : {
2309 18 : auto_diagnostic_group d;
2310 31 : if (warning_at (loc, OPT_Wterminate,
2311 : "%<throw%> will always call %<terminate%>")
2312 10 : && cxx_dialect >= cxx11
2313 36 : && DECL_DESTRUCTOR_P (current_function_decl))
2314 5 : inform (loc, "in C++11 destructors default to %<noexcept%>");
2315 18 : }
2316 : }
2317 : else
2318 : {
2319 103 : if (warn_cxx11_compat && cxx_dialect < cxx11
2320 206 : && DECL_DESTRUCTOR_P (current_function_decl)
2321 1 : && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
2322 : == NULL_TREE)
2323 30665 : && (get_defaulted_eh_spec (current_function_decl)
2324 1 : == empty_except_spec))
2325 1 : warning_at (loc, OPT_Wc__11_compat,
2326 : "in C++11 this %<throw%> will call %<terminate%> "
2327 : "because destructors default to %<noexcept%>");
2328 : }
2329 : }
2330 : break;
2331 :
2332 54134369 : case CONVERT_EXPR:
2333 54134369 : gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
2334 54134369 : gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
2335 : break;
2336 :
2337 253416 : case SPACESHIP_EXPR:
2338 253416 : *stmt_p = genericize_spaceship (*stmt_p);
2339 253416 : break;
2340 :
2341 30583 : case PTRMEM_CST:
2342 : /* By the time we get here we're handing off to the back end, so we don't
2343 : need or want to preserve PTRMEM_CST anymore. */
2344 30583 : *stmt_p = cplus_expand_constant (stmt);
2345 30583 : *walk_subtrees = 0;
2346 30583 : break;
2347 :
2348 357462 : case MEM_REF:
2349 : /* For MEM_REF, make sure not to sanitize the second operand even
2350 : if it has reference type. It is just an offset with a type
2351 : holding other information. There is no other processing we
2352 : need to do for INTEGER_CSTs, so just ignore the second argument
2353 : unconditionally. */
2354 357462 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
2355 357462 : *walk_subtrees = 0;
2356 357462 : break;
2357 :
2358 148019534 : case NOP_EXPR:
2359 148019534 : *stmt_p = predeclare_vla (*stmt_p);
2360 :
2361 : /* Warn of new allocations that are not big enough for the target
2362 : type. */
2363 148019534 : if (warn_alloc_size
2364 1035662 : && TREE_CODE (TREE_OPERAND (stmt, 0)) == CALL_EXPR
2365 148082728 : && POINTER_TYPE_P (TREE_TYPE (stmt)))
2366 : {
2367 22800 : if (tree fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 0)))
2368 22786 : if (DECL_IS_MALLOC (fndecl))
2369 : {
2370 1087 : tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
2371 1087 : tree alloc_size = lookup_attribute ("alloc_size", attrs);
2372 1087 : if (alloc_size)
2373 1085 : warn_for_alloc_size (EXPR_LOCATION (stmt),
2374 1085 : TREE_TYPE (TREE_TYPE (stmt)),
2375 1085 : TREE_OPERAND (stmt, 0), alloc_size);
2376 : }
2377 : }
2378 :
2379 148019534 : if (!wtd->no_sanitize_p
2380 148019529 : && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
2381 148034716 : && TYPE_REF_P (TREE_TYPE (stmt)))
2382 2338 : ubsan_maybe_instrument_reference (stmt_p);
2383 : break;
2384 :
2385 94181860 : case CALL_EXPR:
2386 94181860 : if (!wtd->no_sanitize_p
2387 94181860 : && sanitize_flags_p ((SANITIZE_NULL
2388 : | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
2389 : {
2390 15642 : tree fn = CALL_EXPR_FN (stmt);
2391 15642 : if (fn != NULL_TREE
2392 9698 : && !error_operand_p (fn)
2393 9698 : && INDIRECT_TYPE_P (TREE_TYPE (fn))
2394 25340 : && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
2395 : {
2396 5435 : bool is_ctor
2397 5435 : = TREE_CODE (fn) == ADDR_EXPR
2398 5312 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2399 16059 : && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
2400 5435 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
2401 4824 : ubsan_maybe_instrument_member_call (stmt, is_ctor);
2402 5435 : if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
2403 4604 : cp_ubsan_maybe_instrument_member_call (stmt);
2404 : }
2405 10207 : else if (fn == NULL_TREE
2406 5944 : && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
2407 4874 : && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
2408 10213 : && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
2409 6 : *walk_subtrees = 0;
2410 : }
2411 : /* Fall through. */
2412 99977724 : case AGGR_INIT_EXPR:
2413 : /* For calls to a multi-versioned function, overload resolution
2414 : returns the function with the highest target priority, that is,
2415 : the version that will checked for dispatching first. If this
2416 : version is inlinable, a direct call to this version can be made
2417 : otherwise the call should go through the dispatcher.
2418 : This is done at multiple_target.cc for target_version semantics. */
2419 99977724 : {
2420 99977724 : tree fn = cp_get_callee_fndecl_nofold (stmt);
2421 99977724 : if (TARGET_HAS_FMV_TARGET_ATTRIBUTE
2422 : && fn
2423 98276114 : && DECL_FUNCTION_VERSIONED (fn)
2424 99977856 : && (current_function_decl == NULL
2425 132 : || !targetm.target_option.can_inline_p
2426 132 : (current_function_decl, fn)))
2427 120 : if (tree dis = get_function_version_dispatcher (fn))
2428 : {
2429 120 : mark_versions_used (dis);
2430 120 : dis = build_address (dis);
2431 120 : if (TREE_CODE (stmt) == CALL_EXPR)
2432 117 : CALL_EXPR_FN (stmt) = dis;
2433 : else
2434 3 : AGGR_INIT_EXPR_FN (stmt) = dis;
2435 : }
2436 : }
2437 : break;
2438 :
2439 27834885 : case TARGET_EXPR:
2440 27834885 : if (TARGET_EXPR_INITIAL (stmt)
2441 27834885 : && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
2442 32013911 : && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
2443 166 : TARGET_EXPR_NO_ELIDE (stmt) = 1;
2444 : break;
2445 :
2446 660 : case TEMPLATE_ID_EXPR:
2447 660 : gcc_assert (concept_check_p (stmt));
2448 : /* Emit the value of the concept check. */
2449 660 : *stmt_p = evaluate_concept_check (stmt);
2450 660 : walk_subtrees = 0;
2451 660 : break;
2452 :
2453 4187 : case OMP_DISTRIBUTE:
2454 : /* Need to explicitly instantiate copy ctors on class iterators of
2455 : composite distribute parallel for. */
2456 4187 : if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
2457 : {
2458 3712 : tree *data[4] = { NULL, NULL, NULL, NULL };
2459 3712 : tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
2460 : find_combined_omp_for, data, NULL);
2461 3712 : if (inner != NULL_TREE
2462 3678 : && TREE_CODE (inner) == OMP_FOR)
2463 : {
2464 4494 : for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
2465 2829 : if (TREE_VEC_ELT (OMP_FOR_INIT (inner), i)
2466 2813 : && OMP_FOR_ORIG_DECLS (inner)
2467 2813 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2468 : i)) == TREE_LIST
2469 2853 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
2470 : i)))
2471 : {
2472 12 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
2473 : /* Class iterators aren't allowed on OMP_SIMD, so the only
2474 : case we need to solve is distribute parallel for. */
2475 12 : gcc_assert (TREE_CODE (inner) == OMP_FOR
2476 : && data[1]);
2477 12 : tree orig_decl = TREE_PURPOSE (orig);
2478 12 : tree c, cl = NULL_TREE;
2479 12 : for (c = OMP_FOR_CLAUSES (inner);
2480 16 : c; c = OMP_CLAUSE_CHAIN (c))
2481 12 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2482 5 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2483 13 : && OMP_CLAUSE_DECL (c) == orig_decl)
2484 : {
2485 : cl = c;
2486 : break;
2487 : }
2488 12 : if (cl == NULL_TREE)
2489 : {
2490 4 : for (c = OMP_PARALLEL_CLAUSES (*data[1]);
2491 4 : c; c = OMP_CLAUSE_CHAIN (c))
2492 1 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
2493 1 : && OMP_CLAUSE_DECL (c) == orig_decl)
2494 : {
2495 : cl = c;
2496 : break;
2497 : }
2498 : }
2499 4 : if (cl)
2500 : {
2501 9 : orig_decl = require_complete_type (orig_decl);
2502 9 : tree inner_type = TREE_TYPE (orig_decl);
2503 9 : if (orig_decl == error_mark_node)
2504 0 : continue;
2505 9 : if (TYPE_REF_P (TREE_TYPE (orig_decl)))
2506 0 : inner_type = TREE_TYPE (inner_type);
2507 :
2508 9 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
2509 0 : inner_type = TREE_TYPE (inner_type);
2510 9 : get_copy_ctor (inner_type, tf_warning_or_error);
2511 : }
2512 : }
2513 : }
2514 : }
2515 : /* FALLTHRU */
2516 :
2517 6913997 : case FOR_STMT:
2518 6913997 : case WHILE_STMT:
2519 6913997 : case DO_STMT:
2520 6913997 : case SWITCH_STMT:
2521 6913997 : case CONTINUE_STMT:
2522 6913997 : case BREAK_STMT:
2523 6913997 : case OMP_FOR:
2524 6913997 : case OMP_SIMD:
2525 6913997 : case OMP_LOOP:
2526 6913997 : case OMP_TILE:
2527 6913997 : case OMP_UNROLL:
2528 6913997 : case OACC_LOOP:
2529 : /* These cases are handled by shared code. */
2530 6913997 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2531 : cp_genericize_r, cp_walk_subtrees);
2532 6913997 : break;
2533 :
2534 78658388 : case STATEMENT_LIST:
2535 : /* As above, handled by shared code. */
2536 78658388 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
2537 : cp_genericize_r, cp_walk_subtrees);
2538 : /* If a statement list is freed as part of genericisation it will be
2539 : pushed onto the top of a statement list cache stack. A subsequent
2540 : action can cause a new statement list to be required - and the one
2541 : just pushed will be returned. If that is marked as visited, it can
2542 : prevent a tail recursion from processing the 'new' statement list,
2543 : so we do not mark statement lists as visited. */
2544 78658388 : return NULL_TREE;
2545 72567 : break;
2546 :
2547 72567 : case BIT_CAST_EXPR:
2548 72567 : *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
2549 72567 : TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2550 72567 : break;
2551 :
2552 28902087 : case MODIFY_EXPR:
2553 : /* Mark stores to parts of complex automatic non-addressable
2554 : variables as DECL_NOT_GIMPLE_REG_P for -O0. This can't be
2555 : done during gimplification. See PR119120. */
2556 28902087 : if ((TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
2557 28874955 : || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR)
2558 54275 : && !optimize
2559 462 : && DECL_P (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0))
2560 28902181 : && is_gimple_reg (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0)))
2561 50 : DECL_NOT_GIMPLE_REG_P (TREE_OPERAND (TREE_OPERAND (stmt, 0), 0)) = 1;
2562 : break;
2563 :
2564 1019406721 : default:
2565 1019406721 : if (IS_TYPE_OR_DECL_P (stmt))
2566 314767245 : *walk_subtrees = 0;
2567 : break;
2568 : }
2569 :
2570 1716696037 : p_set->add (*stmt_p);
2571 :
2572 1716696037 : return NULL;
2573 : }
2574 :
2575 : /* Lower C++ front end trees to GENERIC in T_P. */
2576 :
2577 : static void
2578 54687412 : cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
2579 : {
2580 54687412 : struct cp_genericize_data wtd;
2581 :
2582 54687412 : wtd.p_set = new hash_set<tree>;
2583 54687412 : wtd.bind_expr_stack.create (0);
2584 54687412 : wtd.omp_ctx = NULL;
2585 54687412 : wtd.try_block = NULL_TREE;
2586 54687412 : wtd.no_sanitize_p = false;
2587 54687412 : wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
2588 54687412 : cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
2589 109374824 : delete wtd.p_set;
2590 54687412 : if (sanitize_flags_p (SANITIZE_VPTR))
2591 5680 : cp_ubsan_instrument_member_accesses (t_p);
2592 54687412 : }
2593 :
2594 : /* If a function that should end with a return in non-void
2595 : function doesn't obviously end with return, add ubsan
2596 : instrumentation code to verify it at runtime. If -fsanitize=return
2597 : is not enabled, instrument __builtin_unreachable. */
2598 :
2599 : static void
2600 54687412 : cp_maybe_instrument_return (tree fndecl)
2601 : {
2602 54687412 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
2603 78183658 : || DECL_CONSTRUCTOR_P (fndecl)
2604 39091829 : || DECL_DESTRUCTOR_P (fndecl)
2605 93779241 : || !targetm.warn_func_return (fndecl))
2606 15595595 : return;
2607 :
2608 39091817 : if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
2609 : /* Don't add __builtin_unreachable () if not optimizing, it will not
2610 : improve any optimizations in that case, just break UB code.
2611 : Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2612 : UBSan covers this with ubsan_instrument_return above where sufficient
2613 : information is provided, while the __builtin_unreachable () below
2614 : if return sanitization is disabled will just result in hard to
2615 : understand runtime error without location. */
2616 39091817 : && ((!optimize && !flag_unreachable_traps)
2617 39087965 : || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
2618 30 : return;
2619 :
2620 39091787 : tree t = DECL_SAVED_TREE (fndecl);
2621 69798153 : while (t)
2622 : {
2623 69798153 : switch (TREE_CODE (t))
2624 : {
2625 6085380 : case BIND_EXPR:
2626 6085380 : t = BIND_EXPR_BODY (t);
2627 6085380 : continue;
2628 10756686 : case TRY_FINALLY_EXPR:
2629 10756686 : case CLEANUP_POINT_EXPR:
2630 10756686 : t = TREE_OPERAND (t, 0);
2631 10756686 : continue;
2632 13867040 : case STATEMENT_LIST:
2633 13867040 : {
2634 13867040 : tree_stmt_iterator i = tsi_last (t);
2635 13869780 : while (!tsi_end_p (i))
2636 : {
2637 13867040 : tree p = tsi_stmt (i);
2638 13867040 : if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
2639 : break;
2640 2740 : tsi_prev (&i);
2641 : }
2642 13867040 : if (!tsi_end_p (i))
2643 : {
2644 13864300 : t = tsi_stmt (i);
2645 13864300 : continue;
2646 : }
2647 : }
2648 2740 : break;
2649 : case RETURN_EXPR:
2650 : return;
2651 : default:
2652 : break;
2653 16842066 : }
2654 : break;
2655 : }
2656 20919631 : if (t == NULL_TREE)
2657 : return;
2658 20919631 : tree *p = &DECL_SAVED_TREE (fndecl);
2659 20919631 : if (TREE_CODE (*p) == BIND_EXPR)
2660 1208823 : p = &BIND_EXPR_BODY (*p);
2661 :
2662 20919631 : location_t loc = DECL_SOURCE_LOCATION (fndecl);
2663 20919631 : if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
2664 1799 : t = ubsan_instrument_return (loc);
2665 : else
2666 20917832 : t = build_builtin_unreachable (BUILTINS_LOCATION);
2667 :
2668 20919631 : append_to_statement_list (t, p);
2669 : }
2670 :
2671 : void
2672 72094188 : cp_genericize (tree fndecl)
2673 : {
2674 72094188 : tree t;
2675 :
2676 : /* Fix up the types of parms passed by invisible reference. */
2677 192999697 : for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2678 120905509 : if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2679 : {
2680 : /* If a function's arguments are copied to create a thunk,
2681 : then DECL_BY_REFERENCE will be set -- but the type of the
2682 : argument will be a pointer type, so we will never get
2683 : here. */
2684 122516 : gcc_assert (!DECL_BY_REFERENCE (t));
2685 122516 : gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2686 122516 : TREE_TYPE (t) = DECL_ARG_TYPE (t);
2687 122516 : DECL_BY_REFERENCE (t) = 1;
2688 122516 : TREE_ADDRESSABLE (t) = 0;
2689 122516 : relayout_decl (t);
2690 : }
2691 :
2692 : /* Do the same for the return value. */
2693 72094188 : if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2694 : {
2695 1214839 : t = DECL_RESULT (fndecl);
2696 1214839 : TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2697 1214839 : DECL_BY_REFERENCE (t) = 1;
2698 1214839 : TREE_ADDRESSABLE (t) = 0;
2699 1214839 : relayout_decl (t);
2700 1214839 : if (DECL_NAME (t))
2701 : {
2702 : /* Adjust DECL_VALUE_EXPR of the original var. */
2703 133585 : tree outer = outer_curly_brace_block (current_function_decl);
2704 133585 : tree var;
2705 :
2706 133585 : if (outer)
2707 293652 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2708 292979 : if (VAR_P (var)
2709 283320 : && DECL_NAME (t) == DECL_NAME (var)
2710 132912 : && DECL_HAS_VALUE_EXPR_P (var)
2711 425891 : && DECL_VALUE_EXPR (var) == t)
2712 : {
2713 132912 : tree val = convert_from_reference (t);
2714 132912 : SET_DECL_VALUE_EXPR (var, val);
2715 132912 : break;
2716 : }
2717 : }
2718 : }
2719 :
2720 : /* If we're a clone, the body is already GIMPLE. */
2721 72094188 : if (DECL_CLONED_FUNCTION_P (fndecl))
2722 17406776 : return;
2723 :
2724 : /* Allow cp_genericize calls to be nested. */
2725 54687412 : bc_state_t save_state;
2726 54687412 : save_bc_state (&save_state);
2727 :
2728 : /* We do want to see every occurrence of the parms, so we can't just use
2729 : walk_tree's hash functionality. */
2730 54687412 : cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2731 :
2732 54687412 : cp_maybe_instrument_return (fndecl);
2733 :
2734 : /* Do everything else. */
2735 54687412 : c_genericize (fndecl);
2736 54687412 : restore_bc_state (&save_state);
2737 : }
2738 :
2739 : /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2740 : NULL if there is in fact nothing to do. ARG2 may be null if FN
2741 : actually only takes one argument. */
2742 :
2743 : static tree
2744 3662 : cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2745 : {
2746 3662 : tree defparm, parm, t;
2747 3662 : int i = 0;
2748 3662 : int nargs;
2749 3662 : tree *argarray;
2750 :
2751 3662 : if (fn == NULL)
2752 : return NULL;
2753 :
2754 2824 : nargs = list_length (DECL_ARGUMENTS (fn));
2755 2824 : argarray = XALLOCAVEC (tree, nargs);
2756 :
2757 2824 : defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2758 2824 : if (arg2)
2759 944 : defparm = TREE_CHAIN (defparm);
2760 :
2761 2824 : bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2762 2824 : if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2763 : {
2764 27 : tree inner_type = TREE_TYPE (arg1);
2765 27 : tree start1, end1, p1;
2766 27 : tree start2 = NULL, p2 = NULL;
2767 27 : tree ret = NULL, lab;
2768 :
2769 27 : start1 = arg1;
2770 27 : start2 = arg2;
2771 27 : do
2772 : {
2773 27 : inner_type = TREE_TYPE (inner_type);
2774 27 : start1 = build4 (ARRAY_REF, inner_type, start1,
2775 : size_zero_node, NULL, NULL);
2776 27 : if (arg2)
2777 9 : start2 = build4 (ARRAY_REF, inner_type, start2,
2778 : size_zero_node, NULL, NULL);
2779 : }
2780 27 : while (TREE_CODE (inner_type) == ARRAY_TYPE);
2781 27 : start1 = build_fold_addr_expr_loc (input_location, start1);
2782 27 : if (arg2)
2783 9 : start2 = build_fold_addr_expr_loc (input_location, start2);
2784 :
2785 27 : end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2786 27 : end1 = fold_build_pointer_plus (start1, end1);
2787 :
2788 27 : p1 = create_tmp_var (TREE_TYPE (start1));
2789 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2790 27 : append_to_statement_list (t, &ret);
2791 :
2792 27 : if (arg2)
2793 : {
2794 9 : p2 = create_tmp_var (TREE_TYPE (start2));
2795 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2796 9 : append_to_statement_list (t, &ret);
2797 : }
2798 :
2799 27 : lab = create_artificial_label (input_location);
2800 27 : t = build1 (LABEL_EXPR, void_type_node, lab);
2801 27 : append_to_statement_list (t, &ret);
2802 :
2803 27 : argarray[i++] = p1;
2804 27 : if (arg2)
2805 9 : argarray[i++] = p2;
2806 : /* Handle default arguments. */
2807 27 : for (parm = defparm; parm && parm != void_list_node;
2808 0 : parm = TREE_CHAIN (parm), i++)
2809 0 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2810 0 : TREE_PURPOSE (parm), fn,
2811 : i - is_method, tf_warning_or_error);
2812 27 : t = build_call_a (fn, i, argarray);
2813 27 : if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2814 0 : t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2815 27 : t = fold_convert (void_type_node, t);
2816 27 : t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2817 27 : append_to_statement_list (t, &ret);
2818 :
2819 27 : t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2820 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2821 27 : append_to_statement_list (t, &ret);
2822 :
2823 27 : if (arg2)
2824 : {
2825 9 : t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2826 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2827 9 : append_to_statement_list (t, &ret);
2828 : }
2829 :
2830 27 : t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2831 27 : t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2832 27 : append_to_statement_list (t, &ret);
2833 :
2834 27 : return ret;
2835 : }
2836 : else
2837 : {
2838 2797 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2839 2797 : if (arg2)
2840 935 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2841 : /* Handle default arguments. */
2842 2802 : for (parm = defparm; parm && parm != void_list_node;
2843 5 : parm = TREE_CHAIN (parm), i++)
2844 10 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2845 5 : TREE_PURPOSE (parm), fn,
2846 : i - is_method, tf_warning_or_error);
2847 2797 : t = build_call_a (fn, i, argarray);
2848 2797 : if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2849 1 : t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2850 2797 : t = fold_convert (void_type_node, t);
2851 2797 : return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2852 : }
2853 : }
2854 :
2855 : /* Return code to initialize DECL with its default constructor, or
2856 : NULL if there's nothing to do. */
2857 :
2858 : tree
2859 42509 : cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2860 : {
2861 42509 : tree info = CP_OMP_CLAUSE_INFO (clause);
2862 42509 : tree ret = NULL;
2863 :
2864 42509 : if (info)
2865 1392 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2866 :
2867 42509 : return ret;
2868 : }
2869 :
2870 : /* Return code to initialize DST with a copy constructor from SRC. */
2871 :
2872 : tree
2873 12330 : cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2874 : {
2875 12330 : tree info = CP_OMP_CLAUSE_INFO (clause);
2876 12330 : tree ret = NULL;
2877 :
2878 12330 : if (info)
2879 283 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2880 283 : if (ret == NULL)
2881 12112 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2882 :
2883 12330 : return ret;
2884 : }
2885 :
2886 : /* Similarly, except use an assignment operator instead. */
2887 :
2888 : tree
2889 12680 : cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2890 : {
2891 12680 : tree info = CP_OMP_CLAUSE_INFO (clause);
2892 12680 : tree ret = NULL;
2893 :
2894 12680 : if (info)
2895 748 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2896 748 : if (ret == NULL)
2897 11954 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2898 :
2899 12680 : return ret;
2900 : }
2901 :
2902 : /* Return code to destroy DECL. */
2903 :
2904 : tree
2905 62496 : cxx_omp_clause_dtor (tree clause, tree decl)
2906 : {
2907 62496 : tree info = CP_OMP_CLAUSE_INFO (clause);
2908 62496 : tree ret = NULL;
2909 :
2910 62496 : if (info)
2911 1239 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2912 :
2913 62496 : return ret;
2914 : }
2915 :
2916 : /* True if OpenMP should privatize what this DECL points to rather
2917 : than the DECL itself. */
2918 :
2919 : bool
2920 935456 : cxx_omp_privatize_by_reference (const_tree decl)
2921 : {
2922 935456 : return (TYPE_REF_P (TREE_TYPE (decl))
2923 935456 : || is_invisiref_parm (decl));
2924 : }
2925 :
2926 : /* Return true if DECL is const qualified var having no mutable member. */
2927 : bool
2928 15250 : cxx_omp_const_qual_no_mutable (tree decl)
2929 : {
2930 15250 : tree type = TREE_TYPE (decl);
2931 15250 : if (TYPE_REF_P (type))
2932 : {
2933 843 : if (!is_invisiref_parm (decl))
2934 : return false;
2935 0 : type = TREE_TYPE (type);
2936 :
2937 0 : if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2938 : {
2939 : /* NVR doesn't preserve const qualification of the
2940 : variable's type. */
2941 0 : tree outer = outer_curly_brace_block (current_function_decl);
2942 0 : tree var;
2943 :
2944 0 : if (outer)
2945 0 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2946 0 : if (VAR_P (var)
2947 0 : && DECL_NAME (decl) == DECL_NAME (var)
2948 0 : && (TYPE_MAIN_VARIANT (type)
2949 0 : == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2950 : {
2951 0 : if (TYPE_READONLY (TREE_TYPE (var)))
2952 0 : type = TREE_TYPE (var);
2953 : break;
2954 : }
2955 : }
2956 : }
2957 :
2958 14407 : if (type == error_mark_node)
2959 : return false;
2960 :
2961 : /* Variables with const-qualified type having no mutable member
2962 : are predetermined shared. */
2963 14392 : if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2964 : return true;
2965 :
2966 : return false;
2967 : }
2968 :
2969 : /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2970 : of DECL is predetermined. */
2971 :
2972 : enum omp_clause_default_kind
2973 55342 : cxx_omp_predetermined_sharing_1 (tree decl)
2974 : {
2975 : /* Static data members are predetermined shared. */
2976 55342 : if (TREE_STATIC (decl))
2977 : {
2978 15125 : tree ctx = CP_DECL_CONTEXT (decl);
2979 15125 : if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2980 : return OMP_CLAUSE_DEFAULT_SHARED;
2981 :
2982 15019 : if (c_omp_predefined_variable (decl))
2983 : return OMP_CLAUSE_DEFAULT_SHARED;
2984 : }
2985 :
2986 : /* this may not be specified in data-sharing clauses, still we need
2987 : to predetermined it firstprivate. */
2988 55191 : if (decl == current_class_ptr)
2989 113 : return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2990 :
2991 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2992 : }
2993 :
2994 : /* Likewise, but also include the artificial vars. We don't want to
2995 : disallow the artificial vars being mentioned in explicit clauses,
2996 : as we use artificial vars e.g. for loop constructs with random
2997 : access iterators other than pointers, but during gimplification
2998 : we want to treat them as predetermined. */
2999 :
3000 : enum omp_clause_default_kind
3001 34890 : cxx_omp_predetermined_sharing (tree decl)
3002 : {
3003 34890 : enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
3004 34890 : if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
3005 : return ret;
3006 :
3007 : /* Predetermine artificial variables holding integral values, those
3008 : are usually result of gimplify_one_sizepos or SAVE_EXPR
3009 : gimplification. */
3010 34673 : if (VAR_P (decl)
3011 22784 : && DECL_ARTIFICIAL (decl)
3012 6994 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
3013 35191 : && !(DECL_LANG_SPECIFIC (decl)
3014 2 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
3015 : return OMP_CLAUSE_DEFAULT_SHARED;
3016 :
3017 : /* Similarly for typeinfo symbols. */
3018 34157 : if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
3019 57 : return OMP_CLAUSE_DEFAULT_SHARED;
3020 :
3021 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
3022 : }
3023 :
3024 : enum omp_clause_defaultmap_kind
3025 17367 : cxx_omp_predetermined_mapping (tree decl)
3026 : {
3027 : /* Predetermine artificial variables holding integral values, those
3028 : are usually result of gimplify_one_sizepos or SAVE_EXPR
3029 : gimplification. */
3030 17367 : if (VAR_P (decl)
3031 1637 : && DECL_ARTIFICIAL (decl)
3032 142 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
3033 17433 : && !(DECL_LANG_SPECIFIC (decl)
3034 6 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
3035 : return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
3036 :
3037 17301 : if (c_omp_predefined_variable (decl))
3038 12 : return OMP_CLAUSE_DEFAULTMAP_TO;
3039 :
3040 : return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
3041 : }
3042 :
3043 : /* Finalize an implicitly determined clause. */
3044 :
3045 : void
3046 64138 : cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
3047 : {
3048 64138 : tree decl, inner_type;
3049 64138 : bool make_shared = false;
3050 :
3051 64138 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
3052 56658 : && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
3053 94130 : && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
3054 4752 : || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
3055 : return;
3056 :
3057 34161 : decl = OMP_CLAUSE_DECL (c);
3058 34161 : decl = require_complete_type (decl);
3059 34161 : inner_type = TREE_TYPE (decl);
3060 34161 : if (decl == error_mark_node)
3061 34161 : make_shared = true;
3062 34161 : else if (TYPE_REF_P (TREE_TYPE (decl)))
3063 86 : inner_type = TREE_TYPE (inner_type);
3064 :
3065 : /* We're interested in the base element, not arrays. */
3066 34403 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
3067 242 : inner_type = TREE_TYPE (inner_type);
3068 :
3069 : /* Check for special function availability by building a call to one.
3070 : Save the results, because later we won't be in the right context
3071 : for making these queries. */
3072 34161 : bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
3073 34161 : bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
3074 34161 : if (!make_shared
3075 34161 : && CLASS_TYPE_P (inner_type)
3076 34384 : && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
3077 : true))
3078 : make_shared = true;
3079 :
3080 34155 : if (make_shared)
3081 : {
3082 6 : OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
3083 6 : OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
3084 6 : OMP_CLAUSE_SHARED_READONLY (c) = 0;
3085 : }
3086 : }
3087 :
3088 : tree
3089 32 : cxx_omp_finish_mapper_clauses (tree clauses)
3090 : {
3091 32 : return finish_omp_clauses (clauses, C_ORT_OMP);
3092 : }
3093 :
3094 : /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
3095 : disregarded in OpenMP construct, because it is going to be
3096 : remapped during OpenMP lowering. SHARED is true if DECL
3097 : is going to be shared, false if it is going to be privatized. */
3098 :
3099 : bool
3100 663248 : cxx_omp_disregard_value_expr (tree decl, bool shared)
3101 : {
3102 663248 : if (shared)
3103 : return false;
3104 400949 : if (VAR_P (decl)
3105 379045 : && DECL_HAS_VALUE_EXPR_P (decl)
3106 9515 : && DECL_ARTIFICIAL (decl)
3107 9004 : && DECL_LANG_SPECIFIC (decl)
3108 408830 : && DECL_OMP_PRIVATIZED_MEMBER (decl))
3109 : return true;
3110 395536 : if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
3111 : return true;
3112 : return false;
3113 : }
3114 :
3115 : /* Fold any non-ODR-usages of a constant variable in expression X. */
3116 :
3117 : static tree
3118 692586088 : cp_fold_non_odr_use_1 (tree x)
3119 : {
3120 692586088 : tree var = x;
3121 1066034351 : while (!VAR_P (var))
3122 973935246 : switch (TREE_CODE (var))
3123 : {
3124 356918179 : case ARRAY_REF:
3125 356918179 : case BIT_FIELD_REF:
3126 356918179 : case COMPONENT_REF:
3127 356918179 : case VIEW_CONVERT_EXPR:
3128 356918179 : CASE_CONVERT:
3129 356918179 : var = TREE_OPERAND (var, 0);
3130 356918179 : break;
3131 :
3132 41323732 : case INDIRECT_REF:
3133 41323732 : if (REFERENCE_REF_P (var))
3134 16530084 : var = TREE_OPERAND (var, 0);
3135 : else
3136 : return x;
3137 16530084 : break;
3138 :
3139 : default:
3140 : return x;
3141 : }
3142 :
3143 92099105 : if (TREE_THIS_VOLATILE (var)
3144 92099105 : || !decl_constant_var_p (var))
3145 85850252 : return x;
3146 :
3147 : /* We mustn't fold std::hardware_destructive_interference_size here
3148 : so that maybe_warn_about_constant_value can complain if it's used
3149 : in a manifestly constant-evaluated context. */
3150 6248853 : if (decl_in_std_namespace_p (var)
3151 2939281 : && DECL_NAME (var)
3152 9188134 : && id_equal (DECL_NAME (var), "hardware_destructive_interference_size"))
3153 : return x;
3154 :
3155 6248853 : tree t = maybe_constant_value (x);
3156 6248853 : return TREE_CONSTANT (t) ? t : x;
3157 : }
3158 :
3159 : /* Fold expression X which is used as an rvalue if RVAL is true. */
3160 :
3161 : static tree
3162 2984777012 : cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
3163 : {
3164 3012510764 : while (true)
3165 : {
3166 2998643888 : if (rval && (flags & ff_only_non_odr))
3167 670625395 : x = cp_fold_non_odr_use_1 (x);
3168 2998643888 : x = cp_fold (x, flags);
3169 2998643888 : if (rval)
3170 : {
3171 1992317141 : x = mark_rvalue_use (x);
3172 1992317141 : if (!(flags & ff_only_non_odr)
3173 1992317141 : && DECL_P (x) && !TYPE_REF_P (TREE_TYPE (x)))
3174 : {
3175 291873372 : tree v = decl_constant_value (x);
3176 291873372 : if (v != x && v != error_mark_node)
3177 : {
3178 13866876 : x = v;
3179 13866876 : continue;
3180 : }
3181 : }
3182 : }
3183 2984777012 : break;
3184 13866876 : }
3185 2984777012 : return x;
3186 : }
3187 :
3188 : tree
3189 80255079 : cp_fold_maybe_rvalue (tree x, bool rval)
3190 : {
3191 80255079 : return cp_fold_maybe_rvalue (x, rval, ff_none);
3192 : }
3193 :
3194 : /* Fold expression X which is used as an rvalue. */
3195 :
3196 : static tree
3197 294494875 : cp_fold_rvalue (tree x, fold_flags_t flags)
3198 : {
3199 10944046 : return cp_fold_maybe_rvalue (x, true, flags);
3200 : }
3201 :
3202 : tree
3203 400078 : cp_fold_rvalue (tree x)
3204 : {
3205 400078 : return cp_fold_rvalue (x, ff_none);
3206 : }
3207 :
3208 : /* Fold any non-ODR used constants in an expression X which
3209 : is used as an rvalue if RVAL is true. */
3210 :
3211 : tree
3212 875858 : cp_fold_non_odr_use (tree x, bool rval)
3213 : {
3214 875858 : return cp_fold_maybe_rvalue (x, rval, ff_only_non_odr);
3215 : }
3216 :
3217 : /* Perform folding on expression X. */
3218 :
3219 : static tree
3220 307037209 : cp_fully_fold (tree x, mce_value manifestly_const_eval)
3221 : {
3222 307037209 : if (processing_template_decl)
3223 : return x;
3224 : /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
3225 : have to call both. */
3226 283150751 : if (cxx_dialect >= cxx11)
3227 : {
3228 282093949 : x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
3229 : /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
3230 : a TARGET_EXPR; undo that here. */
3231 282093949 : if (TREE_CODE (x) == TARGET_EXPR)
3232 1267086 : x = TARGET_EXPR_INITIAL (x);
3233 280826863 : else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
3234 30335942 : && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
3235 280827050 : && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
3236 187 : x = TREE_OPERAND (x, 0);
3237 : }
3238 283150751 : fold_flags_t flags = ff_none;
3239 283150751 : if (manifestly_const_eval == mce_false)
3240 51659941 : flags |= ff_mce_false;
3241 283150751 : return cp_fold_rvalue (x, flags);
3242 : }
3243 :
3244 : tree
3245 255377268 : cp_fully_fold (tree x)
3246 : {
3247 255377268 : return cp_fully_fold (x, mce_unknown);
3248 : }
3249 :
3250 : /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
3251 : in some cases. */
3252 :
3253 : tree
3254 53777413 : cp_fully_fold_init (tree x)
3255 : {
3256 53777413 : if (processing_template_decl)
3257 2117472 : return x;
3258 51659941 : x = cp_fully_fold (x, mce_false);
3259 51659941 : cp_fold_data data (ff_mce_false);
3260 51659941 : if (cxx_dialect >= cxx20)
3261 : {
3262 51030209 : cp_walk_tree (&x, cp_fold_immediate_r, &data, NULL);
3263 51030209 : data.pset.empty ();
3264 : }
3265 51659941 : cp_walk_tree (&x, cp_fold_r, &data, NULL);
3266 51659941 : return x;
3267 51659941 : }
3268 :
3269 : /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
3270 : and certain changes are made to the folding done. Or should be (FIXME). We
3271 : never touch maybe_const, as it is only used for the C front-end
3272 : C_MAYBE_CONST_EXPR. */
3273 :
3274 : tree
3275 80255079 : c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
3276 : {
3277 80255079 : return cp_fold_maybe_rvalue (x, !lval);
3278 : }
3279 :
3280 : static GTY((deletable)) hash_map<tree, tree> *fold_caches[3];
3281 :
3282 : /* Subroutine of cp_fold. Returns which fold cache to use according
3283 : to the given flags. We need multiple caches since the result of
3284 : folding may depend on which flags are used. */
3285 :
3286 : static hash_map<tree, tree> *&
3287 5696608345 : get_fold_cache (fold_flags_t flags)
3288 : {
3289 0 : if (flags & ff_mce_false)
3290 2584745644 : return fold_caches[2];
3291 3111862701 : else if (flags & ff_only_non_odr)
3292 2710705342 : return fold_caches[1];
3293 : else
3294 401157359 : return fold_caches[0];
3295 : }
3296 :
3297 : /* Dispose of the whole FOLD_CACHE. */
3298 :
3299 : void
3300 44312941 : clear_fold_cache (void)
3301 : {
3302 177251764 : for (auto& fold_cache : fold_caches)
3303 132938823 : if (fold_cache != NULL)
3304 161552511 : fold_cache->empty ();
3305 44312941 : }
3306 :
3307 : /* This function tries to fold an expression X.
3308 : To avoid combinatorial explosion, folding results are kept in fold_cache.
3309 : If X is invalid, we don't fold at all.
3310 : For performance reasons we don't cache expressions representing a
3311 : declaration or constant.
3312 : Function returns X or its folded variant. */
3313 :
3314 : static tree
3315 9240980990 : cp_fold (tree x, fold_flags_t flags)
3316 : {
3317 9240980990 : tree op0, op1, op2, op3;
3318 9240980990 : tree org_x = x, r = NULL_TREE;
3319 9240980990 : enum tree_code code;
3320 9240980990 : location_t loc;
3321 9240980990 : bool rval_ops = true;
3322 :
3323 9240980990 : if (!x || x == error_mark_node)
3324 : return x;
3325 :
3326 9234360586 : if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
3327 : return x;
3328 :
3329 : /* Don't bother to cache DECLs or constants. */
3330 9234085143 : if (DECL_P (x) || CONSTANT_CLASS_P (x))
3331 : return x;
3332 :
3333 5696608345 : auto& fold_cache = get_fold_cache (flags);
3334 5696608345 : if (fold_cache == NULL)
3335 486607 : fold_cache = hash_map<tree, tree>::create_ggc (101);
3336 :
3337 5696608345 : if (tree *cached = fold_cache->get (x))
3338 : {
3339 : /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
3340 : argument has been folded into a tree invariant, make sure it is
3341 : unshared. See PR112727. */
3342 1315977753 : if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
3343 85 : return unshare_expr (*cached);
3344 1315977668 : return *cached;
3345 : }
3346 :
3347 4380630592 : uid_sensitive_constexpr_evaluation_checker c;
3348 :
3349 4380630592 : code = TREE_CODE (x);
3350 4380630592 : switch (code)
3351 : {
3352 254470337 : case CLEANUP_POINT_EXPR:
3353 : /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
3354 : effects. */
3355 254470337 : r = cp_fold (TREE_OPERAND (x, 0), flags);
3356 254470337 : if (!TREE_SIDE_EFFECTS (r) && !(flags & ff_only_non_odr))
3357 2493533 : x = r;
3358 : break;
3359 :
3360 1886884 : case SIZEOF_EXPR:
3361 1886884 : x = fold_sizeof_expr (x);
3362 1886884 : break;
3363 :
3364 368291992 : case VIEW_CONVERT_EXPR:
3365 368291992 : rval_ops = false;
3366 : /* FALLTHRU */
3367 1185651098 : case NON_LVALUE_EXPR:
3368 1185651098 : CASE_CONVERT:
3369 :
3370 1185651098 : if (VOID_TYPE_P (TREE_TYPE (x)))
3371 : {
3372 : /* This is just to make sure we don't end up with casts to
3373 : void from error_mark_node. If we just return x, then
3374 : cp_fold_r might fold the operand into error_mark_node and
3375 : leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
3376 : during gimplification doesn't like such casts.
3377 : Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
3378 : folding of the operand should be in the caches and if in cp_fold_r
3379 : it will modify it in place. */
3380 114901561 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3381 114901561 : if (op0 == error_mark_node)
3382 104 : x = error_mark_node;
3383 : break;
3384 : }
3385 :
3386 1070749537 : loc = EXPR_LOCATION (x);
3387 1070749537 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3388 :
3389 1070749537 : if (op0 == error_mark_node)
3390 0 : x = error_mark_node;
3391 1070749537 : else if (flags & ff_only_non_odr)
3392 : {
3393 407566124 : if (op0 != TREE_OPERAND (x, 0))
3394 9028155 : x = build1_loc (loc, code, TREE_TYPE (x), op0);
3395 407566124 : if (code == NOP_EXPR)
3396 211972521 : REINTERPRET_CAST_P (x) = REINTERPRET_CAST_P (org_x);
3397 : }
3398 663183413 : else if (code == CONVERT_EXPR
3399 57836612 : && SCALAR_TYPE_P (TREE_TYPE (x))
3400 721019103 : && op0 != void_node)
3401 : /* During parsing we used convert_to_*_nofold; re-convert now using the
3402 : folding variants, since fold() doesn't do those transformations. */
3403 52703109 : x = fold (convert (TREE_TYPE (x), op0));
3404 610480304 : else if (op0 != TREE_OPERAND (x, 0))
3405 165441132 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3406 : else
3407 445039172 : x = fold (x);
3408 :
3409 : /* Conversion of an out-of-range value has implementation-defined
3410 : behavior; the language considers it different from arithmetic
3411 : overflow, which is undefined. */
3412 1070749537 : if (TREE_CODE (op0) == INTEGER_CST
3413 1070749537 : && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
3414 44 : TREE_OVERFLOW (x) = false;
3415 :
3416 : break;
3417 :
3418 245 : case EXCESS_PRECISION_EXPR:
3419 245 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3420 245 : if (op0 == error_mark_node)
3421 0 : x = error_mark_node;
3422 245 : else if (flags & ff_only_non_odr)
3423 : {
3424 65 : if (op0 != TREE_OPERAND (x, 0))
3425 0 : x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
3426 : }
3427 : else
3428 180 : x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
3429 : break;
3430 :
3431 158180119 : case INDIRECT_REF:
3432 : /* We don't need the decltype(auto) obfuscation anymore. */
3433 158180119 : if (REF_PARENTHESIZED_P (x))
3434 : {
3435 718 : tree p = maybe_undo_parenthesized_ref (x);
3436 718 : if (p != x)
3437 0 : return cp_fold (p, flags);
3438 : }
3439 : /* When folding non-ODR usages of constants, we also want to
3440 : remove any constant-initialized references, even when
3441 : used as lvalues. */
3442 158180119 : if ((flags & ff_only_non_odr) && REFERENCE_REF_P (x))
3443 : {
3444 21960693 : op0 = cp_fold_non_odr_use_1 (TREE_OPERAND (x, 0));
3445 21960693 : if (op0 != TREE_OPERAND (x, 0))
3446 1504 : return convert_from_reference (cp_fold (op0, flags));
3447 : }
3448 158178615 : goto unary;
3449 :
3450 389273220 : case ADDR_EXPR:
3451 389273220 : loc = EXPR_LOCATION (x);
3452 389273220 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
3453 :
3454 : /* Cope with user tricks that amount to offsetof. */
3455 389273220 : if (op0 != error_mark_node
3456 389273220 : && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0))
3457 545032483 : && !(flags & ff_only_non_odr))
3458 : {
3459 85860581 : tree val = get_base_address (op0);
3460 85860581 : if (val
3461 85860581 : && INDIRECT_REF_P (val)
3462 29853510 : && COMPLETE_TYPE_P (TREE_TYPE (val))
3463 115714001 : && TREE_CONSTANT (TREE_OPERAND (val, 0)))
3464 : {
3465 261 : val = TREE_OPERAND (val, 0);
3466 261 : STRIP_NOPS (val);
3467 261 : val = maybe_constant_value (val);
3468 261 : if (TREE_CODE (val) == INTEGER_CST)
3469 127 : return fold_offsetof (op0, TREE_TYPE (x));
3470 : }
3471 : }
3472 389273093 : goto finish_unary;
3473 :
3474 : case REALPART_EXPR:
3475 : case IMAGPART_EXPR:
3476 201209018 : rval_ops = false;
3477 : /* FALLTHRU */
3478 201209018 : case CONJ_EXPR:
3479 201209018 : case FIX_TRUNC_EXPR:
3480 201209018 : case FLOAT_EXPR:
3481 201209018 : case NEGATE_EXPR:
3482 201209018 : case ABS_EXPR:
3483 201209018 : case ABSU_EXPR:
3484 201209018 : case BIT_NOT_EXPR:
3485 201209018 : case TRUTH_NOT_EXPR:
3486 201209018 : case FIXED_CONVERT_EXPR:
3487 201209018 : unary:
3488 :
3489 201209018 : loc = EXPR_LOCATION (x);
3490 201209018 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3491 :
3492 590482111 : finish_unary:
3493 590482111 : if (op0 == error_mark_node)
3494 0 : x = error_mark_node;
3495 590482111 : else if (op0 != TREE_OPERAND (x, 0))
3496 : {
3497 36891666 : if (flags & ff_only_non_odr)
3498 1763505 : x = build1_loc (loc, code, TREE_TYPE (x), op0);
3499 : else
3500 35128161 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
3501 36891666 : if (code == INDIRECT_REF
3502 15480853 : && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
3503 : {
3504 15480726 : TREE_READONLY (x) = TREE_READONLY (org_x);
3505 15480726 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3506 15480726 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3507 : }
3508 : }
3509 553590445 : else if (!(flags & ff_only_non_odr))
3510 278084408 : x = fold (x);
3511 :
3512 590482111 : gcc_assert (TREE_CODE (x) != COND_EXPR
3513 : || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
3514 : break;
3515 :
3516 308216 : case UNARY_PLUS_EXPR:
3517 308216 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3518 308216 : if (op0 == error_mark_node)
3519 0 : x = error_mark_node;
3520 308216 : else if (flags & ff_only_non_odr)
3521 : {
3522 121913 : if (op0 != TREE_OPERAND (x, 0))
3523 12 : x = build1_loc (EXPR_LOCATION (x), code, TREE_TYPE (x), op0);
3524 : }
3525 : else
3526 186303 : x = fold_convert (TREE_TYPE (x), op0);
3527 : break;
3528 :
3529 231754551 : case POSTDECREMENT_EXPR:
3530 231754551 : case POSTINCREMENT_EXPR:
3531 231754551 : case INIT_EXPR:
3532 231754551 : case PREDECREMENT_EXPR:
3533 231754551 : case PREINCREMENT_EXPR:
3534 231754551 : case COMPOUND_EXPR:
3535 231754551 : case MODIFY_EXPR:
3536 231754551 : rval_ops = false;
3537 : /* FALLTHRU */
3538 473959590 : case POINTER_PLUS_EXPR:
3539 473959590 : case PLUS_EXPR:
3540 473959590 : case POINTER_DIFF_EXPR:
3541 473959590 : case MINUS_EXPR:
3542 473959590 : case MULT_EXPR:
3543 473959590 : case TRUNC_DIV_EXPR:
3544 473959590 : case CEIL_DIV_EXPR:
3545 473959590 : case FLOOR_DIV_EXPR:
3546 473959590 : case ROUND_DIV_EXPR:
3547 473959590 : case TRUNC_MOD_EXPR:
3548 473959590 : case CEIL_MOD_EXPR:
3549 473959590 : case ROUND_MOD_EXPR:
3550 473959590 : case RDIV_EXPR:
3551 473959590 : case EXACT_DIV_EXPR:
3552 473959590 : case MIN_EXPR:
3553 473959590 : case MAX_EXPR:
3554 473959590 : case LSHIFT_EXPR:
3555 473959590 : case RSHIFT_EXPR:
3556 473959590 : case LROTATE_EXPR:
3557 473959590 : case RROTATE_EXPR:
3558 473959590 : case BIT_AND_EXPR:
3559 473959590 : case BIT_IOR_EXPR:
3560 473959590 : case BIT_XOR_EXPR:
3561 473959590 : case TRUTH_AND_EXPR:
3562 473959590 : case TRUTH_ANDIF_EXPR:
3563 473959590 : case TRUTH_OR_EXPR:
3564 473959590 : case TRUTH_ORIF_EXPR:
3565 473959590 : case TRUTH_XOR_EXPR:
3566 473959590 : case LT_EXPR: case LE_EXPR:
3567 473959590 : case GT_EXPR: case GE_EXPR:
3568 473959590 : case EQ_EXPR: case NE_EXPR:
3569 473959590 : case UNORDERED_EXPR: case ORDERED_EXPR:
3570 473959590 : case UNLT_EXPR: case UNLE_EXPR:
3571 473959590 : case UNGT_EXPR: case UNGE_EXPR:
3572 473959590 : case UNEQ_EXPR: case LTGT_EXPR:
3573 473959590 : case RANGE_EXPR: case COMPLEX_EXPR:
3574 :
3575 473959590 : loc = EXPR_LOCATION (x);
3576 473959590 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
3577 473959590 : bool clear_decl_read;
3578 473959590 : clear_decl_read = false;
3579 473959590 : if (code == MODIFY_EXPR
3580 66266117 : && (VAR_P (op0) || TREE_CODE (op0) == PARM_DECL)
3581 495342008 : && !DECL_READ_P (op0))
3582 : clear_decl_read = true;
3583 473959590 : op1 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 1),
3584 : code != COMPOUND_EXPR, flags);
3585 473959590 : if (clear_decl_read)
3586 880696 : DECL_READ_P (op0) = 0;
3587 :
3588 473959590 : if (flags & ff_only_non_odr)
3589 : {
3590 207790660 : if (op0 == error_mark_node || op1 == error_mark_node)
3591 24 : x = error_mark_node;
3592 207790636 : else if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
3593 : {
3594 11115398 : if (code == INIT_EXPR && op1 != TREE_OPERAND (x, 1))
3595 4207429 : set_target_expr_eliding (op1);
3596 11115398 : x = build2_loc (loc, code, TREE_TYPE (x), op0, op1);
3597 : }
3598 : break;
3599 : }
3600 :
3601 : /* decltype(nullptr) has only one value, so optimize away all comparisons
3602 : with that type right away, keeping them in the IL causes troubles for
3603 : various optimizations. */
3604 266168930 : if (COMPARISON_CLASS_P (org_x)
3605 46583421 : && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
3606 266168957 : && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
3607 : {
3608 27 : switch (code)
3609 : {
3610 12 : case EQ_EXPR:
3611 12 : x = constant_boolean_node (true, TREE_TYPE (x));
3612 12 : break;
3613 15 : case NE_EXPR:
3614 15 : x = constant_boolean_node (false, TREE_TYPE (x));
3615 15 : break;
3616 0 : default:
3617 0 : gcc_unreachable ();
3618 : }
3619 27 : return omit_two_operands_loc (loc, TREE_TYPE (x), x,
3620 27 : op0, op1);
3621 : }
3622 :
3623 266168903 : if (op0 == error_mark_node || op1 == error_mark_node)
3624 102 : x = error_mark_node;
3625 266168801 : else if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
3626 181885438 : x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
3627 : else
3628 84283363 : x = fold (x);
3629 :
3630 : /* This is only needed for -Wnonnull-compare and only if
3631 : TREE_NO_WARNING (org_x), but to avoid that option affecting code
3632 : generation, we do it always. */
3633 266168903 : if (COMPARISON_CLASS_P (org_x))
3634 : {
3635 46583394 : if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
3636 : ;
3637 45350125 : else if (COMPARISON_CLASS_P (x))
3638 : {
3639 44315530 : if (warn_nonnull_compare
3640 44315530 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3641 122775 : suppress_warning (x, OPT_Wnonnull_compare);
3642 : }
3643 : /* Otherwise give up on optimizing these, let GIMPLE folders
3644 : optimize those later on. */
3645 1034595 : else if (op0 != TREE_OPERAND (org_x, 0)
3646 1034595 : || op1 != TREE_OPERAND (org_x, 1))
3647 : {
3648 1033369 : x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
3649 1033369 : if (warn_nonnull_compare
3650 1033369 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
3651 16 : suppress_warning (x, OPT_Wnonnull_compare);
3652 : }
3653 : else
3654 1226 : x = org_x;
3655 : }
3656 :
3657 : break;
3658 :
3659 10635830 : case VEC_COND_EXPR:
3660 10635830 : case COND_EXPR:
3661 10635830 : loc = EXPR_LOCATION (x);
3662 10635830 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
3663 10635830 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3664 10635830 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3665 :
3666 10635830 : if (flags & ff_only_non_odr)
3667 : {
3668 4393354 : if (op0 == error_mark_node
3669 4393348 : || op1 == error_mark_node
3670 4393348 : || op2 == error_mark_node)
3671 6 : x = error_mark_node;
3672 4393348 : else if (op0 != TREE_OPERAND (x, 0)
3673 4320637 : || op1 != TREE_OPERAND (x, 1)
3674 8505648 : || op2 != TREE_OPERAND (x, 2))
3675 327188 : x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3676 : break;
3677 : }
3678 :
3679 6242476 : if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
3680 : {
3681 19486 : warning_sentinel s (warn_int_in_bool_context);
3682 19486 : if (!VOID_TYPE_P (TREE_TYPE (op1)))
3683 19486 : op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
3684 19486 : if (!VOID_TYPE_P (TREE_TYPE (op2)))
3685 19465 : op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
3686 19486 : }
3687 6222990 : else if (VOID_TYPE_P (TREE_TYPE (x)))
3688 : {
3689 1803775 : if (TREE_CODE (op0) == INTEGER_CST)
3690 : {
3691 : /* If the condition is constant, fold can fold away
3692 : the COND_EXPR. If some statement-level uses of COND_EXPR
3693 : have one of the branches NULL, avoid folding crash. */
3694 275900 : if (!op1)
3695 0 : op1 = build_empty_stmt (loc);
3696 275900 : if (!op2)
3697 12 : op2 = build_empty_stmt (loc);
3698 : }
3699 : else
3700 : {
3701 : /* Otherwise, don't bother folding a void condition, since
3702 : it can't produce a constant value. */
3703 1527875 : if (op0 != TREE_OPERAND (x, 0)
3704 1429554 : || op1 != TREE_OPERAND (x, 1)
3705 2627753 : || op2 != TREE_OPERAND (x, 2))
3706 430237 : x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3707 : break;
3708 : }
3709 : }
3710 :
3711 4714601 : if (op0 == error_mark_node
3712 4714601 : || op1 == error_mark_node
3713 4714587 : || op2 == error_mark_node)
3714 62 : x = error_mark_node;
3715 4714539 : else if (op0 != TREE_OPERAND (x, 0)
3716 1957964 : || op1 != TREE_OPERAND (x, 1)
3717 6208361 : || op2 != TREE_OPERAND (x, 2))
3718 3321608 : x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
3719 : else
3720 1392931 : x = fold (x);
3721 :
3722 : /* A COND_EXPR might have incompatible types in branches if one or both
3723 : arms are bitfields. If folding exposed such a branch, fix it up. */
3724 4714601 : if (TREE_CODE (x) != code
3725 861412 : && x != error_mark_node
3726 5575951 : && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
3727 16700 : x = fold_convert (TREE_TYPE (org_x), x);
3728 :
3729 : break;
3730 :
3731 270437974 : case CALL_EXPR:
3732 270437974 : {
3733 270437974 : tree callee = get_callee_fndecl (x);
3734 :
3735 : /* "Inline" calls to std::move/forward and other cast-like functions
3736 : by simply folding them into a corresponding cast to their return
3737 : type. This is cheaper than relying on the middle end to do so, and
3738 : also means we avoid generating useless debug info for them at all.
3739 :
3740 : At this point the argument has already been converted into a
3741 : reference, so it suffices to use a NOP_EXPR to express the
3742 : cast. */
3743 270437974 : if ((OPTION_SET_P (flag_fold_simple_inlines)
3744 270437974 : ? flag_fold_simple_inlines
3745 270437720 : : !flag_no_inline)
3746 261381990 : && call_expr_nargs (x) == 1
3747 135095223 : && decl_in_std_namespace_p (callee)
3748 84993661 : && DECL_NAME (callee) != NULL_TREE
3749 355431635 : && (id_equal (DECL_NAME (callee), "move")
3750 83199059 : || id_equal (DECL_NAME (callee), "forward")
3751 80515357 : || id_equal (DECL_NAME (callee), "forward_like")
3752 80515295 : || id_equal (DECL_NAME (callee), "addressof")
3753 : /* This addressof equivalent is used heavily in libstdc++. */
3754 80198689 : || id_equal (DECL_NAME (callee), "__addressof")
3755 79789766 : || id_equal (DECL_NAME (callee), "to_underlying")
3756 79789762 : || id_equal (DECL_NAME (callee), "as_const")))
3757 : {
3758 5205526 : r = CALL_EXPR_ARG (x, 0);
3759 : /* These type-checks must be performed here, because invalid
3760 : definitions of these functions could fail to ensure those and
3761 : build_nop could misbehave. See PR122185. */
3762 5205526 : if (id_equal (DECL_NAME (callee), "to_underlying")
3763 5205526 : ? TREE_CODE (TREE_TYPE (r)) == ENUMERAL_TYPE
3764 4 : && INTEGRAL_TYPE_P (TREE_TYPE (x))
3765 9685515 : : INDIRECT_TYPE_P (TREE_TYPE (x))
3766 9685515 : && INDIRECT_TYPE_P (TREE_TYPE (r)))
3767 : {
3768 5205520 : r = build_nop (TREE_TYPE (x), r);
3769 5205520 : x = cp_fold (r, flags);
3770 : }
3771 : break;
3772 : }
3773 :
3774 265232448 : int sv = optimize, nw = sv;
3775 :
3776 : /* Some built-in function calls will be evaluated at compile-time in
3777 : fold (). Set optimize to 1 when folding __builtin_constant_p inside
3778 : a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3779 261800399 : if (callee && fndecl_built_in_p (callee) && !optimize
3780 1767547 : && DECL_IS_BUILTIN_CONSTANT_P (callee)
3781 33184 : && current_function_decl
3782 265265616 : && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
3783 : nw = 1;
3784 :
3785 261800399 : if (callee && !(flags & ff_only_non_odr)
3786 415401953 : && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
3787 : {
3788 55449 : iloc_sentinel ils (EXPR_LOCATION (x));
3789 55449 : switch (DECL_FE_FUNCTION_CODE (callee))
3790 : {
3791 52824 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
3792 : /* Defer folding __builtin_is_constant_evaluated unless
3793 : we know this isn't a manifestly constant-evaluated
3794 : context. */
3795 52824 : if (flags & ff_mce_false)
3796 26791 : x = boolean_false_node;
3797 : break;
3798 3 : case CP_BUILT_IN_SOURCE_LOCATION:
3799 3 : x = fold_builtin_source_location (x);
3800 3 : break;
3801 456 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
3802 912 : x = fold_builtin_is_corresponding_member
3803 456 : (EXPR_LOCATION (x), call_expr_nargs (x),
3804 : &CALL_EXPR_ARG (x, 0));
3805 456 : break;
3806 400 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
3807 800 : x = fold_builtin_is_pointer_inverconvertible_with_class
3808 400 : (EXPR_LOCATION (x), call_expr_nargs (x),
3809 : &CALL_EXPR_ARG (x, 0));
3810 400 : break;
3811 : default:
3812 : break;
3813 : }
3814 55449 : break;
3815 55449 : }
3816 :
3817 265176999 : bool changed = false;
3818 265176999 : int m = call_expr_nargs (x);
3819 667193156 : for (int i = 0; i < m; i++)
3820 : {
3821 402016157 : r = cp_fold (CALL_EXPR_ARG (x, i), flags);
3822 402016157 : if (r != CALL_EXPR_ARG (x, i))
3823 : {
3824 126542435 : if (r == error_mark_node)
3825 : {
3826 0 : x = error_mark_node;
3827 0 : break;
3828 : }
3829 126542435 : if (!changed)
3830 82399601 : x = copy_node (x);
3831 126542435 : CALL_EXPR_ARG (x, i) = r;
3832 126542435 : changed = true;
3833 : }
3834 : }
3835 : /* Don't fold away the function entirely if we're just folding
3836 : non-ODR-used variables. */
3837 265176999 : if (x == error_mark_node || (flags & ff_only_non_odr))
3838 : break;
3839 :
3840 152031694 : optimize = nw;
3841 152031694 : r = fold (x);
3842 152031694 : optimize = sv;
3843 :
3844 152031694 : if (TREE_CODE (r) != CALL_EXPR)
3845 : {
3846 2922612 : x = cp_fold (r, flags);
3847 2922612 : break;
3848 : }
3849 :
3850 149109082 : optimize = nw;
3851 :
3852 : /* Invoke maybe_constant_value for functions declared
3853 : constexpr and not called with AGGR_INIT_EXPRs.
3854 : TODO:
3855 : Do constexpr expansion of expressions where the call itself is not
3856 : constant, but the call followed by an INDIRECT_REF is. */
3857 147191444 : if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3858 226412193 : && (!flag_no_inline
3859 2310663 : || lookup_attribute ("always_inline",
3860 2310663 : DECL_ATTRIBUTES (callee))))
3861 : {
3862 75497982 : mce_value manifestly_const_eval = mce_unknown;
3863 75497982 : if (flags & ff_mce_false)
3864 : /* Allow folding __builtin_is_constant_evaluated to false during
3865 : constexpr evaluation of this call. */
3866 59227811 : manifestly_const_eval = mce_false;
3867 75497982 : r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3868 : manifestly_const_eval);
3869 : }
3870 149109082 : optimize = sv;
3871 :
3872 149109082 : if (TREE_CODE (r) != CALL_EXPR)
3873 : {
3874 8553636 : if (DECL_CONSTRUCTOR_P (callee))
3875 356 : r = cp_build_init_expr_for_ctor (x, r);
3876 4276818 : x = r;
3877 4276818 : break;
3878 : }
3879 :
3880 : break;
3881 : }
3882 :
3883 30054176 : case CONSTRUCTOR:
3884 30054176 : {
3885 30054176 : unsigned i;
3886 30054176 : constructor_elt *p;
3887 30054176 : vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3888 30054176 : vec<constructor_elt, va_gc> *nelts = NULL;
3889 124327723 : FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3890 : {
3891 94273547 : tree op = cp_fold (p->value, flags);
3892 94273547 : if (op == error_mark_node)
3893 : {
3894 0 : x = error_mark_node;
3895 0 : vec_free (nelts);
3896 : break;
3897 : }
3898 94273547 : else if (op != p->value)
3899 : {
3900 2011483 : if (nelts == NULL)
3901 1620422 : nelts = elts->copy ();
3902 2011483 : (*nelts)[i].value = op;
3903 : }
3904 : }
3905 30054176 : if (nelts)
3906 : {
3907 1620422 : x = build_constructor (TREE_TYPE (x), nelts);
3908 1620422 : CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3909 1620422 : = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3910 1620422 : CONSTRUCTOR_MUTABLE_POISON (x)
3911 3240844 : = CONSTRUCTOR_MUTABLE_POISON (org_x);
3912 : }
3913 30054176 : if (VECTOR_TYPE_P (TREE_TYPE (x)))
3914 71311 : x = fold (x);
3915 : break;
3916 : }
3917 2767965 : case TREE_VEC:
3918 2767965 : {
3919 2767965 : bool changed = false;
3920 2767965 : int n = TREE_VEC_LENGTH (x);
3921 :
3922 6700958 : for (int i = 0; i < n; i++)
3923 : {
3924 3932993 : tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3925 3932993 : if (op != TREE_VEC_ELT (x, i))
3926 : {
3927 864 : if (!changed)
3928 821 : x = copy_node (x);
3929 864 : TREE_VEC_ELT (x, i) = op;
3930 864 : changed = true;
3931 : }
3932 : }
3933 : }
3934 :
3935 : break;
3936 :
3937 3297582 : case ARRAY_REF:
3938 3297582 : case ARRAY_RANGE_REF:
3939 :
3940 3297582 : loc = EXPR_LOCATION (x);
3941 3297582 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3942 3297582 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3943 3297582 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3944 3297582 : op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3945 :
3946 3297582 : if (op0 == error_mark_node
3947 3297582 : || op1 == error_mark_node
3948 3297582 : || op2 == error_mark_node
3949 3297582 : || op3 == error_mark_node)
3950 0 : x = error_mark_node;
3951 3297582 : else if (op0 != TREE_OPERAND (x, 0)
3952 2309344 : || op1 != TREE_OPERAND (x, 1)
3953 1748642 : || op2 != TREE_OPERAND (x, 2)
3954 5046224 : || op3 != TREE_OPERAND (x, 3))
3955 : {
3956 1548940 : x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3957 1548940 : TREE_READONLY (x) = TREE_READONLY (org_x);
3958 1548940 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3959 1548940 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3960 : }
3961 :
3962 3297582 : if (!(flags & ff_only_non_odr))
3963 1922540 : x = fold (x);
3964 : break;
3965 :
3966 1709469 : case SAVE_EXPR:
3967 : /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3968 : folding, evaluates to an invariant. In that case no need to wrap
3969 : this folded tree with a SAVE_EXPR. */
3970 1709469 : r = cp_fold (TREE_OPERAND (x, 0), flags);
3971 1709469 : if (tree_invariant_p (r))
3972 57 : x = r;
3973 : break;
3974 :
3975 10 : case REQUIRES_EXPR:
3976 10 : x = evaluate_requires_expr (x);
3977 10 : break;
3978 :
3979 : default:
3980 : return org_x;
3981 : }
3982 :
3983 2825661460 : if (EXPR_P (x) && TREE_CODE (x) == code)
3984 : {
3985 2369196122 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3986 2369196122 : copy_warning (x, org_x);
3987 : }
3988 :
3989 2825661460 : if (!c.evaluation_restricted_p ())
3990 : {
3991 2825601673 : fold_cache->put (org_x, x);
3992 : /* Prevent that we try to fold an already folded result again. */
3993 2825601673 : if (x != org_x)
3994 867210264 : fold_cache->put (x, x);
3995 : }
3996 :
3997 : return x;
3998 : }
3999 :
4000 : /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
4001 :
4002 : tree
4003 291393778 : lookup_hotness_attribute (tree list)
4004 : {
4005 291487394 : for (; list; list = TREE_CHAIN (list))
4006 : {
4007 1843872 : tree name = get_attribute_name (list);
4008 1843872 : if ((is_attribute_p ("hot", name)
4009 1843872 : || is_attribute_p ("cold", name)
4010 1843869 : || is_attribute_p ("likely", name)
4011 1296357 : || is_attribute_p ("unlikely", name))
4012 3594137 : && is_attribute_namespace_p ("", list))
4013 : break;
4014 : }
4015 291393778 : return list;
4016 : }
4017 :
4018 : /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
4019 :
4020 : static tree
4021 1750247 : remove_hotness_attribute (tree list)
4022 : {
4023 3500512 : for (tree *p = &list; *p; )
4024 : {
4025 1750265 : tree l = *p;
4026 1750265 : tree name = get_attribute_name (l);
4027 1750265 : if ((is_attribute_p ("hot", name)
4028 1750265 : || is_attribute_p ("cold", name)
4029 1750262 : || is_attribute_p ("likely", name)
4030 1202750 : || is_attribute_p ("unlikely", name))
4031 3500530 : && is_attribute_namespace_p ("", l))
4032 : {
4033 1750256 : *p = TREE_CHAIN (l);
4034 1750256 : continue;
4035 : }
4036 9 : p = &TREE_CHAIN (l);
4037 : }
4038 1750247 : return list;
4039 : }
4040 :
4041 : /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
4042 : PREDICT_EXPR. */
4043 :
4044 : tree
4045 289643549 : process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
4046 : {
4047 289643549 : if (std_attrs == error_mark_node)
4048 : return std_attrs;
4049 289643531 : if (tree attr = lookup_hotness_attribute (std_attrs))
4050 : {
4051 1750247 : tree name = get_attribute_name (attr);
4052 1750247 : bool hot = (is_attribute_p ("hot", name)
4053 1750247 : || is_attribute_p ("likely", name));
4054 1750247 : tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
4055 : hot ? TAKEN : NOT_TAKEN);
4056 1750247 : SET_EXPR_LOCATION (pred, attrs_loc);
4057 1750247 : add_stmt (pred);
4058 1750247 : if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
4059 : {
4060 9 : auto_urlify_attributes sentinel;
4061 9 : warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
4062 : get_attribute_name (other), name);
4063 9 : }
4064 1750247 : std_attrs = remove_hotness_attribute (std_attrs);
4065 : }
4066 : return std_attrs;
4067 : }
4068 :
4069 : /* Build IFN_ASSUME internal call for assume condition ARG. */
4070 :
4071 : tree
4072 10790 : build_assume_call (location_t loc, tree arg)
4073 : {
4074 10790 : if (!processing_template_decl)
4075 10719 : arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
4076 10790 : return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
4077 10790 : 1, arg);
4078 : }
4079 :
4080 : /* If [[assume (cond)]] appears on this statement, handle it. */
4081 :
4082 : tree
4083 230489273 : process_stmt_assume_attribute (tree std_attrs, tree statement,
4084 : location_t attrs_loc)
4085 : {
4086 230489273 : if (std_attrs == error_mark_node)
4087 : return std_attrs;
4088 230489255 : tree attr = lookup_attribute ("gnu", "assume", std_attrs);
4089 230489255 : if (!attr)
4090 : return std_attrs;
4091 : /* The next token after the assume attribute is not ';'. */
4092 10722 : if (statement)
4093 : {
4094 12 : warning_at (attrs_loc, OPT_Wattributes,
4095 : "%<assume%> attribute not followed by %<;%>");
4096 12 : attr = NULL_TREE;
4097 : }
4098 21468 : for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
4099 : {
4100 10746 : tree args = TREE_VALUE (attr);
4101 10746 : if (args && PACK_EXPANSION_P (args))
4102 : {
4103 6 : auto_diagnostic_group d;
4104 6 : error_at (attrs_loc, "pack expansion of %qE attribute",
4105 : get_attribute_name (attr));
4106 6 : if (cxx_dialect >= cxx17)
4107 4 : inform (attrs_loc, "use fold expression in the attribute "
4108 : "argument instead");
4109 6 : continue;
4110 6 : }
4111 10740 : int nargs = list_length (args);
4112 10740 : if (nargs != 1)
4113 : {
4114 42 : auto_diagnostic_group d;
4115 42 : error_at (attrs_loc, "wrong number of arguments specified for "
4116 : "%qE attribute", get_attribute_name (attr));
4117 42 : inform (attrs_loc, "expected %i, found %i", 1, nargs);
4118 42 : }
4119 : else
4120 : {
4121 10698 : tree arg = TREE_VALUE (args);
4122 10698 : if (!type_dependent_expression_p (arg))
4123 10627 : arg = contextual_conv_bool (arg, tf_warning_or_error);
4124 10698 : if (error_operand_p (arg))
4125 18 : continue;
4126 10680 : finish_expr_stmt (build_assume_call (attrs_loc, arg));
4127 : }
4128 : }
4129 10722 : return remove_attribute ("gnu", "assume", std_attrs);
4130 : }
4131 :
4132 : /* Return the type std::source_location::__impl after performing
4133 : verification on it. */
4134 :
4135 : tree
4136 9393 : get_source_location_impl_type ()
4137 : {
4138 9393 : tree name = get_identifier ("source_location");
4139 9393 : tree decl = lookup_qualified_name (std_node, name);
4140 9393 : if (TREE_CODE (decl) != TYPE_DECL)
4141 : {
4142 6 : auto_diagnostic_group d;
4143 6 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
4144 3 : qualified_name_lookup_error (std_node, name, decl, input_location);
4145 : else
4146 3 : error ("%qD is not a type", decl);
4147 6 : return error_mark_node;
4148 6 : }
4149 9387 : name = get_identifier ("__impl");
4150 9387 : tree type = TREE_TYPE (decl);
4151 9387 : decl = lookup_qualified_name (type, name);
4152 9387 : if (TREE_CODE (decl) != TYPE_DECL)
4153 : {
4154 9 : auto_diagnostic_group d;
4155 9 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
4156 6 : qualified_name_lookup_error (type, name, decl, input_location);
4157 : else
4158 3 : error ("%qD is not a type", decl);
4159 9 : return error_mark_node;
4160 9 : }
4161 9378 : type = TREE_TYPE (decl);
4162 9378 : if (TREE_CODE (type) != RECORD_TYPE)
4163 : {
4164 3 : error ("%qD is not a class type", decl);
4165 3 : return error_mark_node;
4166 : }
4167 :
4168 9375 : int cnt = 0;
4169 9375 : for (tree field = TYPE_FIELDS (type);
4170 46836 : (field = next_aggregate_field (field)) != NULL_TREE;
4171 37461 : field = DECL_CHAIN (field))
4172 : {
4173 37470 : if (DECL_NAME (field) != NULL_TREE)
4174 : {
4175 37470 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
4176 37470 : if (strcmp (n, "_M_file_name") == 0
4177 28098 : || strcmp (n, "_M_function_name") == 0)
4178 : {
4179 18741 : if (TREE_TYPE (field) != const_string_type_node)
4180 : {
4181 3 : error ("%qD does not have %<const char *%> type", field);
4182 3 : return error_mark_node;
4183 : }
4184 18738 : cnt++;
4185 18738 : continue;
4186 : }
4187 18729 : else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
4188 : {
4189 18726 : if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
4190 : {
4191 3 : error ("%qD does not have integral type", field);
4192 3 : return error_mark_node;
4193 : }
4194 18723 : cnt++;
4195 18723 : continue;
4196 : }
4197 : }
4198 : cnt = 0;
4199 : break;
4200 : }
4201 9366 : if (cnt != 4)
4202 : {
4203 9 : error ("%<std::source_location::__impl%> does not contain only "
4204 : "non-static data members %<_M_file_name%>, "
4205 : "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
4206 9 : return error_mark_node;
4207 : }
4208 9360 : return build_qualified_type (type, TYPE_QUAL_CONST);
4209 : }
4210 :
4211 : /* Type for source_location_table hash_set. */
4212 : struct GTY((for_user)) source_location_table_entry {
4213 : location_t loc;
4214 : unsigned uid;
4215 : tree var;
4216 : };
4217 :
4218 : /* Traits class for function start hash maps below. */
4219 :
4220 : struct source_location_table_entry_hash
4221 : : ggc_remove <source_location_table_entry>
4222 : {
4223 : typedef source_location_table_entry value_type;
4224 : typedef source_location_table_entry compare_type;
4225 :
4226 : static hashval_t
4227 21251 : hash (const source_location_table_entry &ref)
4228 : {
4229 21251 : inchash::hash hstate (0);
4230 21251 : hstate.add_int (ref.loc);
4231 21251 : hstate.add_int (ref.uid);
4232 21251 : return hstate.end ();
4233 : }
4234 :
4235 : static bool
4236 17412 : equal (const source_location_table_entry &ref1,
4237 : const source_location_table_entry &ref2)
4238 : {
4239 17412 : return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
4240 : }
4241 :
4242 : static void
4243 : mark_deleted (source_location_table_entry &ref)
4244 : {
4245 : ref.loc = UNKNOWN_LOCATION;
4246 : ref.uid = -1U;
4247 : ref.var = NULL_TREE;
4248 : }
4249 :
4250 : static const bool empty_zero_p = true;
4251 :
4252 : static void
4253 0 : mark_empty (source_location_table_entry &ref)
4254 : {
4255 0 : ref.loc = UNKNOWN_LOCATION;
4256 0 : ref.uid = 0;
4257 0 : ref.var = NULL_TREE;
4258 : }
4259 :
4260 : static bool
4261 27404 : is_deleted (const source_location_table_entry &ref)
4262 : {
4263 27404 : return (ref.loc == UNKNOWN_LOCATION
4264 0 : && ref.uid == -1U
4265 27404 : && ref.var == NULL_TREE);
4266 : }
4267 :
4268 : static bool
4269 118820 : is_empty (const source_location_table_entry &ref)
4270 : {
4271 118820 : return (ref.loc == UNKNOWN_LOCATION
4272 60529 : && ref.uid == 0
4273 179349 : && ref.var == NULL_TREE);
4274 : }
4275 :
4276 : static void
4277 3 : pch_nx (source_location_table_entry &p)
4278 : {
4279 3 : extern void gt_pch_nx (source_location_table_entry &);
4280 3 : gt_pch_nx (p);
4281 3 : }
4282 :
4283 : static void
4284 3 : pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
4285 : {
4286 3 : extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
4287 : void *);
4288 3 : gt_pch_nx (&p, op, cookie);
4289 3 : }
4290 : };
4291 :
4292 : static GTY(()) hash_table <source_location_table_entry_hash>
4293 : *source_location_table;
4294 :
4295 : /* Build a std::source_location::__impl from a location_t. */
4296 :
4297 : tree
4298 5129 : build_source_location_impl (location_t loc, tree fndecl,
4299 : tree source_location_impl)
4300 : {
4301 5129 : if (source_location_table == NULL)
4302 222 : source_location_table
4303 222 : = hash_table <source_location_table_entry_hash>::create_ggc (64);
4304 5129 : const line_map_ordinary *map;
4305 5129 : source_location_table_entry entry;
4306 5129 : entry.loc
4307 5129 : = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
4308 : &map);
4309 5129 : entry.uid = fndecl ? DECL_UID (fndecl) : -1;
4310 5129 : entry.var = error_mark_node;
4311 5129 : source_location_table_entry *entryp
4312 5129 : = source_location_table->find_slot (entry, INSERT);
4313 :
4314 5129 : if (entryp->var)
4315 : return entryp->var;
4316 :
4317 3615 : tree var = build_decl (loc, VAR_DECL, generate_internal_label ("Lsrc_loc"),
4318 : source_location_impl);
4319 3615 : TREE_STATIC (var) = 1;
4320 3615 : TREE_PUBLIC (var) = 0;
4321 3615 : DECL_ARTIFICIAL (var) = 1;
4322 3615 : DECL_IGNORED_P (var) = 1;
4323 3615 : DECL_EXTERNAL (var) = 0;
4324 3615 : DECL_DECLARED_CONSTEXPR_P (var) = 1;
4325 3615 : DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
4326 3615 : layout_decl (var, 0);
4327 :
4328 3615 : vec<constructor_elt, va_gc> *v = NULL;
4329 3615 : vec_alloc (v, 4);
4330 3615 : for (tree field = TYPE_FIELDS (source_location_impl);
4331 18075 : (field = next_aggregate_field (field)) != NULL_TREE;
4332 14460 : field = DECL_CHAIN (field))
4333 : {
4334 14460 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
4335 14460 : tree val = NULL_TREE;
4336 14460 : if (strcmp (n, "_M_file_name") == 0)
4337 : {
4338 3615 : if (const char *fname = LOCATION_FILE (loc))
4339 : {
4340 3615 : fname = remap_macro_filename (fname);
4341 3615 : val = build_string_literal (fname);
4342 : }
4343 : else
4344 0 : val = build_string_literal ("");
4345 : }
4346 10845 : else if (strcmp (n, "_M_function_name") == 0)
4347 : {
4348 3615 : const char *name = "";
4349 :
4350 3615 : if (fndecl)
4351 : {
4352 : /* If this is a coroutine, we should get the name of the user
4353 : function rather than the actor we generate. */
4354 3425 : if (tree ramp = DECL_RAMP_FN (fndecl))
4355 12 : name = cxx_printable_name (ramp, 2);
4356 : else
4357 3413 : name = cxx_printable_name (fndecl, 2);
4358 : }
4359 :
4360 3615 : val = build_string_literal (name);
4361 : }
4362 7230 : else if (strcmp (n, "_M_line") == 0)
4363 3615 : val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
4364 3615 : else if (strcmp (n, "_M_column") == 0)
4365 3615 : val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
4366 : else
4367 0 : gcc_unreachable ();
4368 14460 : CONSTRUCTOR_APPEND_ELT (v, field, val);
4369 : }
4370 :
4371 3615 : tree ctor = build_constructor (source_location_impl, v);
4372 3615 : TREE_CONSTANT (ctor) = 1;
4373 3615 : TREE_STATIC (ctor) = 1;
4374 3615 : DECL_INITIAL (var) = ctor;
4375 3615 : varpool_node::finalize_decl (var);
4376 3615 : *entryp = entry;
4377 3615 : entryp->var = var;
4378 3615 : return var;
4379 : }
4380 :
4381 : /* Fold the __builtin_source_location () call T. */
4382 :
4383 : tree
4384 4521 : fold_builtin_source_location (const_tree t)
4385 : {
4386 4521 : gcc_assert (TREE_CODE (t) == CALL_EXPR);
4387 : /* TREE_TYPE (t) is const std::source_location::__impl* */
4388 4521 : tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
4389 4521 : if (source_location_impl == error_mark_node)
4390 0 : return build_zero_cst (const_ptr_type_node);
4391 4521 : gcc_assert (CLASS_TYPE_P (source_location_impl)
4392 : && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
4393 :
4394 4521 : location_t loc = EXPR_LOCATION (t);
4395 4521 : tree var = build_source_location_impl (loc, current_function_decl,
4396 : source_location_impl);
4397 4521 : return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
4398 : }
4399 :
4400 : #include "gt-cp-cp-gimplify.h"
|