Line data Source code
1 : /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 : tree representation into the GIMPLE form.
3 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
4 : Major work done by Sebastian Pop <s.pop@laposte.net>,
5 : Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 :
7 : This file is part of GCC.
8 :
9 : GCC is free software; you can redistribute it and/or modify it under
10 : the terms of the GNU General Public License as published by the Free
11 : Software Foundation; either version 3, or (at your option) any later
12 : version.
13 :
14 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : for more details.
18 :
19 : You should have received a copy of the GNU General Public License
20 : along with GCC; see the file COPYING3. If not see
21 : <http://www.gnu.org/licenses/>. */
22 :
23 : #include "config.h"
24 : #include "system.h"
25 : #include "coretypes.h"
26 : #include "backend.h"
27 : #include "target.h"
28 : #include "rtl.h"
29 : #include "tree.h"
30 : #include "memmodel.h"
31 : #include "tm_p.h"
32 : #include "gimple.h"
33 : #include "gimple-predict.h"
34 : #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 : #include "ssa.h"
36 : #include "cgraph.h"
37 : #include "tree-pretty-print.h"
38 : #include "diagnostic-core.h"
39 : #include "diagnostic.h" /* For errorcount. */
40 : #include "alias.h"
41 : #include "fold-const.h"
42 : #include "calls.h"
43 : #include "varasm.h"
44 : #include "stmt.h"
45 : #include "expr.h"
46 : #include "gimple-iterator.h"
47 : #include "gimple-fold.h"
48 : #include "tree-eh.h"
49 : #include "gimplify.h"
50 : #include "stor-layout.h"
51 : #include "print-tree.h"
52 : #include "tree-iterator.h"
53 : #include "tree-inline.h"
54 : #include "langhooks.h"
55 : #include "tree-cfg.h"
56 : #include "tree-ssa.h"
57 : #include "tree-hash-traits.h"
58 : #include "omp-general.h"
59 : #include "omp-low.h"
60 : #include "gimple-low.h"
61 : #include "gomp-constants.h"
62 : #include "splay-tree.h"
63 : #include "gimple-walk.h"
64 : #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 : #include "builtins.h"
66 : #include "stringpool.h"
67 : #include "attribs.h"
68 : #include "asan.h"
69 : #include "dbgcnt.h"
70 : #include "omp-offload.h"
71 : #include "context.h"
72 : #include "tree-nested.h"
73 : #include "gcc-urlifier.h"
74 : #include "insn-config.h"
75 : #include "recog.h"
76 : #include "output.h"
77 : #include "gimplify_reg_info.h"
78 :
79 : /* Identifier for a basic condition, mapping it to other basic conditions of
80 : its Boolean expression. Basic conditions given the same uid (in the same
81 : function) are parts of the same ANDIF/ORIF expression. Used for condition
82 : coverage. */
83 : static unsigned nextconduid = 1;
84 :
85 : /* Annotated gconds so that basic conditions in the same expression map to
86 : the same uid. This is used for condition coverage. */
87 : static hash_map <tree, unsigned> *cond_uids;
88 :
89 : /* Get a fresh identifier for a new condition expression. This is used for
90 : condition coverage. */
91 : static unsigned
92 5800957 : next_cond_uid ()
93 : {
94 5800957 : return nextconduid++;
95 : }
96 :
97 : /* Reset the condition uid to the value it should have when compiling a new
98 : function. 0 is already the default/untouched value, so start at non-zero.
99 : A valid and set id should always be > 0. This is used for condition
100 : coverage. */
101 : static void
102 2869750 : reset_cond_uid ()
103 : {
104 2869750 : nextconduid = 1;
105 0 : }
106 :
107 : /* Associate the condition STMT with the discriminator UID. STMTs that are
108 : broken down with ANDIF/ORIF from the same Boolean expression should be given
109 : the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
110 : { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
111 : for condition coverage. */
112 : static void
113 1216945 : tree_associate_condition_with_expr (tree stmt, unsigned uid)
114 : {
115 1216945 : if (!condition_coverage_flag)
116 : return;
117 :
118 527 : if (!cond_uids)
119 65 : cond_uids = new hash_map <tree, unsigned> ();
120 :
121 527 : cond_uids->put (stmt, uid);
122 : }
123 :
124 : /* Hash set of poisoned variables in a bind expr. */
125 : static hash_set<tree> *asan_poisoned_variables = NULL;
126 :
127 : /* Hash set of already-resolved calls to OpenMP "declare variant"
128 : functions. A call can resolve to the original function and
129 : we don't want to repeat the resolution multiple times. */
130 : static hash_set<tree> *omp_resolved_variant_calls = NULL;
131 :
132 : enum gimplify_omp_var_data
133 : {
134 : GOVD_SEEN = 0x000001,
135 : GOVD_EXPLICIT = 0x000002,
136 : GOVD_SHARED = 0x000004,
137 : GOVD_PRIVATE = 0x000008,
138 : GOVD_FIRSTPRIVATE = 0x000010,
139 : GOVD_LASTPRIVATE = 0x000020,
140 : GOVD_REDUCTION = 0x000040,
141 : GOVD_LOCAL = 0x00080,
142 : GOVD_MAP = 0x000100,
143 : GOVD_DEBUG_PRIVATE = 0x000200,
144 : GOVD_PRIVATE_OUTER_REF = 0x000400,
145 : GOVD_LINEAR = 0x000800,
146 : GOVD_ALIGNED = 0x001000,
147 :
148 : /* Flag for GOVD_MAP: don't copy back. */
149 : GOVD_MAP_TO_ONLY = 0x002000,
150 :
151 : /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
152 : GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
153 :
154 : GOVD_MAP_0LEN_ARRAY = 0x008000,
155 :
156 : /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
157 : GOVD_MAP_ALWAYS_TO = 0x010000,
158 :
159 : /* Flag for shared vars that are or might be stored to in the region. */
160 : GOVD_WRITTEN = 0x020000,
161 :
162 : /* Flag for GOVD_MAP, if it is a forced mapping. */
163 : GOVD_MAP_FORCE = 0x040000,
164 :
165 : /* Flag for GOVD_MAP: must be present already. */
166 : GOVD_MAP_FORCE_PRESENT = 0x080000,
167 :
168 : /* Flag for GOVD_MAP: only allocate. */
169 : GOVD_MAP_ALLOC_ONLY = 0x100000,
170 :
171 : /* Flag for GOVD_MAP: only copy back. */
172 : GOVD_MAP_FROM_ONLY = 0x200000,
173 :
174 : GOVD_NONTEMPORAL = 0x400000,
175 :
176 : /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
177 : GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
178 :
179 : GOVD_CONDTEMP = 0x1000000,
180 :
181 : /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
182 : GOVD_REDUCTION_INSCAN = 0x2000000,
183 :
184 : /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
185 : GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
186 :
187 : GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
188 : | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
189 : | GOVD_LOCAL)
190 : };
191 :
192 :
193 : enum omp_region_type
194 : {
195 : ORT_WORKSHARE = 0x00,
196 : ORT_TASKGROUP = 0x01,
197 : ORT_DISPATCH = 0x02,
198 : ORT_SIMD = 0x04,
199 :
200 : ORT_PARALLEL = 0x08,
201 : ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
202 :
203 : ORT_TASK = 0x10,
204 : ORT_UNTIED_TASK = ORT_TASK | 1,
205 : ORT_TASKLOOP = ORT_TASK | 2,
206 : ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
207 :
208 : ORT_TEAMS = 0x20,
209 : ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
210 : ORT_HOST_TEAMS = ORT_TEAMS | 2,
211 : ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
212 :
213 : /* Data region. */
214 : ORT_TARGET_DATA = 0x40,
215 :
216 : /* Data region with offloading. */
217 : ORT_TARGET = 0x80,
218 : ORT_COMBINED_TARGET = ORT_TARGET | 1,
219 : ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
220 :
221 : /* OpenACC variants. */
222 : ORT_ACC = 0x100, /* A generic OpenACC region. */
223 : ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
224 : ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
225 : ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
226 : ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
227 : ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
228 :
229 : /* Dummy OpenMP region, used to disable expansion of
230 : DECL_VALUE_EXPRs in taskloop pre body. */
231 : ORT_NONE = 0x200
232 : };
233 :
234 : /* Gimplify hashtable helper. */
235 :
236 : struct gimplify_hasher : free_ptr_hash <elt_t>
237 : {
238 : static inline hashval_t hash (const elt_t *);
239 : static inline bool equal (const elt_t *, const elt_t *);
240 : };
241 :
242 : struct gimplify_ctx
243 : {
244 : struct gimplify_ctx *prev_context;
245 :
246 : vec<gbind *> bind_expr_stack;
247 : tree temps;
248 : gimple_seq conditional_cleanups;
249 : tree exit_label;
250 : tree return_temp;
251 :
252 : vec<tree> case_labels;
253 : hash_set<tree> *live_switch_vars;
254 : /* The formal temporary table. Should this be persistent? */
255 : hash_table<gimplify_hasher> *temp_htab;
256 :
257 : int conditions;
258 : unsigned into_ssa : 1;
259 : unsigned allow_rhs_cond_expr : 1;
260 : unsigned in_cleanup_point_expr : 1;
261 : unsigned keep_stack : 1;
262 : unsigned save_stack : 1;
263 : unsigned in_switch_expr : 1;
264 : unsigned in_handler_expr : 1;
265 : };
266 :
267 : enum gimplify_defaultmap_kind
268 : {
269 : GDMK_SCALAR,
270 : GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
271 : GDMK_AGGREGATE,
272 : GDMK_ALLOCATABLE,
273 : GDMK_POINTER
274 : };
275 :
276 : struct gimplify_omp_ctx
277 : {
278 : struct gimplify_omp_ctx *outer_context;
279 : splay_tree variables;
280 : hash_map<omp_name_type<tree>, tree> *implicit_mappers;
281 : hash_set<tree> *privatized_types;
282 : tree clauses;
283 : /* Iteration variables in an OMP_FOR. */
284 : vec<tree> loop_iter_var;
285 : location_t location;
286 : enum omp_clause_default_kind default_kind;
287 : enum omp_region_type region_type;
288 : enum tree_code code;
289 : bool combined_loop;
290 : bool distribute;
291 : bool target_firstprivatize_array_bases;
292 : bool add_safelen1;
293 : bool order_concurrent;
294 : bool has_depend;
295 : bool in_for_exprs;
296 : bool in_call_args;
297 : int defaultmap[5];
298 : };
299 :
300 : static struct gimplify_ctx *gimplify_ctxp;
301 : static struct gimplify_omp_ctx *gimplify_omp_ctxp;
302 : static bool in_omp_construct;
303 :
304 : /* Forward declaration. */
305 : static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
306 : static hash_map<tree, tree> *oacc_declare_returns;
307 : static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
308 : bool (*) (tree), fallback_t, bool);
309 : static void prepare_gimple_addressable (tree *, gimple_seq *);
310 :
311 : /* Shorter alias name for the above function for use in gimplify.cc
312 : only. */
313 :
314 : static inline void
315 92250774 : gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
316 : {
317 92250774 : gimple_seq_add_stmt_without_update (seq_p, gs);
318 6621456 : }
319 :
320 : /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
321 : NULL, a new sequence is allocated. This function is
322 : similar to gimple_seq_add_seq, but does not scan the operands.
323 : During gimplification, we need to manipulate statement sequences
324 : before the def/use vectors have been constructed. */
325 :
326 : static void
327 9235172 : gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
328 : {
329 9235172 : gimple_stmt_iterator si;
330 :
331 9235172 : if (src == NULL)
332 4038138 : return;
333 :
334 5197034 : si = gsi_last (*dst_p);
335 5197034 : gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
336 : }
337 :
338 :
339 : /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
340 : and popping gimplify contexts. */
341 :
342 : static struct gimplify_ctx *ctx_pool = NULL;
343 :
344 : /* Return a gimplify context struct from the pool. */
345 :
346 : static inline struct gimplify_ctx *
347 8858601 : ctx_alloc (void)
348 : {
349 8858601 : struct gimplify_ctx * c = ctx_pool;
350 :
351 8858601 : if (c)
352 8596068 : ctx_pool = c->prev_context;
353 : else
354 262533 : c = XNEW (struct gimplify_ctx);
355 :
356 8858601 : memset (c, '\0', sizeof (*c));
357 8858601 : return c;
358 : }
359 :
360 : /* Put gimplify context C back into the pool. */
361 :
362 : static inline void
363 8858597 : ctx_free (struct gimplify_ctx *c)
364 : {
365 8858597 : c->prev_context = ctx_pool;
366 8858597 : ctx_pool = c;
367 : }
368 :
369 : /* Free allocated ctx stack memory. */
370 :
371 : void
372 230419 : free_gimplify_stack (void)
373 : {
374 230419 : struct gimplify_ctx *c;
375 :
376 463328 : while ((c = ctx_pool))
377 : {
378 232909 : ctx_pool = c->prev_context;
379 232909 : free (c);
380 : }
381 230419 : }
382 :
383 :
384 : /* Set up a context for the gimplifier. */
385 :
386 : void
387 8858601 : push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
388 : {
389 8858601 : struct gimplify_ctx *c = ctx_alloc ();
390 :
391 8858601 : c->prev_context = gimplify_ctxp;
392 8858601 : gimplify_ctxp = c;
393 8858601 : gimplify_ctxp->into_ssa = in_ssa;
394 8858601 : gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
395 8858601 : }
396 :
397 : /* Tear down a context for the gimplifier. If BODY is non-null, then
398 : put the temporaries into the outer BIND_EXPR. Otherwise, put them
399 : in the local_decls.
400 :
401 : BODY is not a sequence, but the first tuple in a sequence. */
402 :
403 : void
404 8858597 : pop_gimplify_context (gimple *body)
405 : {
406 8858597 : struct gimplify_ctx *c = gimplify_ctxp;
407 :
408 8858597 : gcc_assert (c
409 : && (!c->bind_expr_stack.exists ()
410 : || c->bind_expr_stack.is_empty ()));
411 8858597 : c->bind_expr_stack.release ();
412 8858597 : gimplify_ctxp = c->prev_context;
413 :
414 8858597 : if (body)
415 2993798 : declare_vars (c->temps, body, false);
416 : else
417 5864799 : record_vars (c->temps);
418 :
419 8858597 : delete c->temp_htab;
420 8858597 : c->temp_htab = NULL;
421 8858597 : ctx_free (c);
422 8858597 : }
423 :
424 : /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
425 :
426 : static void
427 5936071 : gimple_push_bind_expr (gbind *bind_stmt)
428 : {
429 5936071 : gimplify_ctxp->bind_expr_stack.reserve (8);
430 5936071 : gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
431 5936071 : }
432 :
433 : /* Pop the first element off the stack of bindings. */
434 :
435 : static void
436 5936071 : gimple_pop_bind_expr (void)
437 : {
438 0 : gimplify_ctxp->bind_expr_stack.pop ();
439 1256 : }
440 :
441 : /* Return the first element of the stack of bindings. */
442 :
443 : gbind *
444 0 : gimple_current_bind_expr (void)
445 : {
446 0 : return gimplify_ctxp->bind_expr_stack.last ();
447 : }
448 :
449 : /* Return the stack of bindings created during gimplification. */
450 :
451 : vec<gbind *>
452 359 : gimple_bind_expr_stack (void)
453 : {
454 359 : return gimplify_ctxp->bind_expr_stack;
455 : }
456 :
457 : /* Return true iff there is a COND_EXPR between us and the innermost
458 : CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
459 :
460 : static bool
461 2815675 : gimple_conditional_context (void)
462 : {
463 2815675 : return gimplify_ctxp->conditions > 0;
464 : }
465 :
466 : /* Note that we've entered a COND_EXPR. */
467 :
468 : static void
469 5801392 : gimple_push_condition (void)
470 : {
471 : #ifdef ENABLE_GIMPLE_CHECKING
472 5801392 : if (gimplify_ctxp->conditions == 0)
473 3816981 : gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
474 : #endif
475 5801392 : ++(gimplify_ctxp->conditions);
476 5801392 : }
477 :
478 : /* Note that we've left a COND_EXPR. If we're back at unconditional scope
479 : now, add any conditional cleanups we've seen to the prequeue. */
480 :
481 : static void
482 5801392 : gimple_pop_condition (gimple_seq *pre_p)
483 : {
484 5801392 : int conds = --(gimplify_ctxp->conditions);
485 :
486 5801392 : gcc_assert (conds >= 0);
487 5801392 : if (conds == 0)
488 : {
489 3816981 : gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
490 3816981 : gimplify_ctxp->conditional_cleanups = NULL;
491 : }
492 5801392 : }
493 :
494 : /* A stable comparison routine for use with splay trees and DECLs. */
495 :
496 : static int
497 18147456 : splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
498 : {
499 18147456 : tree a = (tree) xa;
500 18147456 : tree b = (tree) xb;
501 :
502 18147456 : return DECL_UID (a) - DECL_UID (b);
503 : }
504 :
505 : /* Create a new omp construct that deals with variable remapping. */
506 :
507 : static struct gimplify_omp_ctx *
508 139521 : new_omp_context (enum omp_region_type region_type)
509 : {
510 139521 : struct gimplify_omp_ctx *c;
511 :
512 139521 : c = XCNEW (struct gimplify_omp_ctx);
513 139521 : c->outer_context = gimplify_omp_ctxp;
514 139521 : c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
515 139521 : c->implicit_mappers = new hash_map<omp_name_type<tree>, tree>;
516 139521 : c->privatized_types = new hash_set<tree>;
517 139521 : c->location = input_location;
518 139521 : c->region_type = region_type;
519 139521 : if ((region_type & ORT_TASK) == 0)
520 133480 : c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
521 : else
522 6041 : c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
523 139521 : c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
524 139521 : c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
525 139521 : c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
526 139521 : c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
527 139521 : c->defaultmap[GDMK_POINTER] = GOVD_MAP;
528 :
529 139521 : return c;
530 : }
531 :
532 : /* Destroy an omp construct that deals with variable remapping. */
533 :
534 : static void
535 138962 : delete_omp_context (struct gimplify_omp_ctx *c)
536 : {
537 138962 : splay_tree_delete (c->variables);
538 277924 : delete c->privatized_types;
539 277924 : delete c->implicit_mappers;
540 138962 : c->loop_iter_var.release ();
541 138962 : XDELETE (c);
542 138962 : }
543 :
544 : static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
545 : static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
546 :
547 : /* Both gimplify the statement T and append it to *SEQ_P. This function
548 : behaves exactly as gimplify_stmt, but you don't have to pass T as a
549 : reference. */
550 :
551 : void
552 37972785 : gimplify_and_add (tree t, gimple_seq *seq_p)
553 : {
554 37972785 : gimplify_stmt (&t, seq_p);
555 37972785 : }
556 :
557 : /* Gimplify statement T into sequence *SEQ_P, and return the first
558 : tuple in the sequence of generated tuples for this statement.
559 : Return NULL if gimplifying T produced no tuples. */
560 :
561 : static gimple *
562 104924 : gimplify_and_return_first (tree t, gimple_seq *seq_p)
563 : {
564 104924 : gimple_stmt_iterator last = gsi_last (*seq_p);
565 :
566 104924 : gimplify_and_add (t, seq_p);
567 :
568 104924 : if (!gsi_end_p (last))
569 : {
570 4881 : gsi_next (&last);
571 4881 : return gsi_stmt (last);
572 : }
573 : else
574 100043 : return gimple_seq_first_stmt (*seq_p);
575 : }
576 :
577 : /* Returns true iff T is a valid RHS for an assignment to an un-renamed
578 : LHS, or for a call argument. */
579 :
580 : static bool
581 236009 : is_gimple_mem_rhs (tree t)
582 : {
583 : /* If we're dealing with a renamable type, either source or dest must be
584 : a renamed variable. */
585 236009 : if (is_gimple_reg_type (TREE_TYPE (t)))
586 232644 : return is_gimple_val (t);
587 : else
588 3365 : return is_gimple_val (t) || is_gimple_lvalue (t);
589 : }
590 :
591 : /* Return true if T is a CALL_EXPR or an expression that can be
592 : assigned to a temporary. Note that this predicate should only be
593 : used during gimplification. See the rationale for this in
594 : gimplify_modify_expr. */
595 :
596 : static bool
597 97509260 : is_gimple_reg_rhs_or_call (tree t)
598 : {
599 71593523 : return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
600 97509260 : || TREE_CODE (t) == CALL_EXPR);
601 : }
602 :
603 : /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
604 : this predicate should only be used during gimplification. See the
605 : rationale for this in gimplify_modify_expr. */
606 :
607 : static bool
608 14195032 : is_gimple_mem_rhs_or_call (tree t)
609 : {
610 : /* If we're dealing with a renamable type, either source or dest must be
611 : a renamed variable. */
612 14195032 : if (is_gimple_reg_type (TREE_TYPE (t)))
613 10895906 : return is_gimple_val (t);
614 : else
615 3299126 : return (is_gimple_val (t)
616 1640129 : || is_gimple_lvalue (t)
617 1125867 : || (TREE_CODE (t) == CONSTRUCTOR && CONSTRUCTOR_NELTS (t) == 0)
618 4424603 : || TREE_CODE (t) == CALL_EXPR);
619 : }
620 :
621 : /* Create a temporary with a name derived from VAL. Subroutine of
622 : lookup_tmp_var; nobody else should call this function. */
623 :
624 : static inline tree
625 2134556 : create_tmp_from_val (tree val)
626 : {
627 : /* Drop all qualifiers and address-space information from the value type. */
628 2134556 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
629 2134556 : tree var = create_tmp_var (type, get_name (val));
630 2134556 : return var;
631 : }
632 :
633 : /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
634 : an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
635 :
636 : static tree
637 2367701 : lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
638 : {
639 2367701 : tree ret;
640 :
641 : /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
642 2367701 : gcc_assert (!is_formal || !not_gimple_reg);
643 :
644 : /* If not optimizing, never really reuse a temporary. local-alloc
645 : won't allocate any variable that is used in more than one basic
646 : block, which means it will go into memory, causing much extra
647 : work in reload and final and poorer code generation, outweighing
648 : the extra memory allocation here. */
649 2367701 : if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
650 : {
651 1088582 : ret = create_tmp_from_val (val);
652 1088582 : DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
653 : }
654 : else
655 : {
656 1279119 : elt_t elt, *elt_p;
657 1279119 : elt_t **slot;
658 :
659 1279119 : elt.val = val;
660 1279119 : if (!gimplify_ctxp->temp_htab)
661 390624 : gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
662 1279119 : slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
663 1279119 : if (*slot == NULL)
664 : {
665 1045974 : elt_p = XNEW (elt_t);
666 1045974 : elt_p->val = val;
667 1045974 : elt_p->temp = ret = create_tmp_from_val (val);
668 1045974 : *slot = elt_p;
669 : }
670 : else
671 : {
672 233145 : elt_p = *slot;
673 233145 : ret = elt_p->temp;
674 : }
675 : }
676 :
677 2367701 : return ret;
678 : }
679 :
680 : /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
681 :
682 : static tree
683 26455707 : internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
684 : bool is_formal, bool allow_ssa, bool not_gimple_reg)
685 : {
686 26455707 : tree t, mod;
687 :
688 : /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
689 : can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
690 26455707 : gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
691 : fb_rvalue);
692 :
693 26455707 : if (allow_ssa
694 26187078 : && gimplify_ctxp->into_ssa
695 51208067 : && is_gimple_reg_type (TREE_TYPE (val)))
696 : {
697 24088007 : t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
698 24088007 : if (! gimple_in_ssa_p (cfun))
699 : {
700 20748195 : const char *name = get_name (val);
701 20748195 : if (name)
702 6795282 : SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
703 : }
704 : }
705 : else
706 2367700 : t = lookup_tmp_var (val, is_formal, not_gimple_reg);
707 :
708 26455707 : mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
709 :
710 26455707 : SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
711 :
712 : /* gimplify_modify_expr might want to reduce this further. */
713 26455707 : gimplify_and_add (mod, pre_p);
714 26455707 : ggc_free (mod);
715 :
716 : /* If we failed to gimplify VAL then we can end up with the temporary
717 : SSA name not having a definition. In this case return a decl. */
718 26455707 : if (TREE_CODE (t) == SSA_NAME && ! SSA_NAME_DEF_STMT (t))
719 1 : return lookup_tmp_var (val, is_formal, not_gimple_reg);
720 :
721 : return t;
722 : }
723 :
724 : /* Return a formal temporary variable initialized with VAL. PRE_P is as
725 : in gimplify_expr. Only use this function if:
726 :
727 : 1) The value of the unfactored expression represented by VAL will not
728 : change between the initialization and use of the temporary, and
729 : 2) The temporary will not be otherwise modified.
730 :
731 : For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
732 : and #2 means it is inappropriate for && temps.
733 :
734 : For other cases, use get_initialized_tmp_var instead. */
735 :
736 : tree
737 25954308 : get_formal_tmp_var (tree val, gimple_seq *pre_p)
738 : {
739 25954308 : return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
740 : }
741 :
742 : /* Return a temporary variable initialized with VAL. PRE_P and POST_P
743 : are as in gimplify_expr. */
744 :
745 : tree
746 489420 : get_initialized_tmp_var (tree val, gimple_seq *pre_p,
747 : gimple_seq *post_p /* = NULL */,
748 : bool allow_ssa /* = true */)
749 : {
750 489420 : return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
751 : }
752 :
753 : /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
754 : generate debug info for them; otherwise don't. */
755 :
756 : void
757 3001640 : declare_vars (tree vars, gimple *gs, bool debug_info)
758 : {
759 3001640 : tree last = vars;
760 3001640 : if (last)
761 : {
762 1882288 : tree temps, block;
763 :
764 1882288 : gbind *scope = as_a <gbind *> (gs);
765 :
766 1882288 : temps = nreverse (last);
767 :
768 1882288 : block = gimple_bind_block (scope);
769 1882288 : gcc_assert (!block || TREE_CODE (block) == BLOCK);
770 1882288 : if (!block || !debug_info)
771 : {
772 1878649 : DECL_CHAIN (last) = gimple_bind_vars (scope);
773 1878649 : gimple_bind_set_vars (scope, temps);
774 : }
775 : else
776 : {
777 : /* We need to attach the nodes both to the BIND_EXPR and to its
778 : associated BLOCK for debugging purposes. The key point here
779 : is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
780 : is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
781 3639 : if (BLOCK_VARS (block))
782 3446 : BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
783 : else
784 : {
785 193 : gimple_bind_set_vars (scope,
786 : chainon (gimple_bind_vars (scope), temps));
787 193 : BLOCK_VARS (block) = temps;
788 : }
789 : }
790 : }
791 3001640 : }
792 :
793 : /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
794 : for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
795 : no such upper bound can be obtained. */
796 :
797 : static void
798 0 : force_constant_size (tree var)
799 : {
800 : /* The only attempt we make is by querying the maximum size of objects
801 : of the variable's type. */
802 :
803 0 : HOST_WIDE_INT max_size;
804 :
805 0 : gcc_assert (VAR_P (var));
806 :
807 0 : max_size = max_int_size_in_bytes (TREE_TYPE (var));
808 :
809 0 : gcc_assert (max_size >= 0);
810 :
811 0 : DECL_SIZE_UNIT (var)
812 0 : = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
813 0 : DECL_SIZE (var)
814 0 : = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
815 0 : }
816 :
817 : /* Push the temporary variable TMP into the current binding. */
818 :
819 : void
820 34201 : gimple_add_tmp_var_fn (struct function *fn, tree tmp)
821 : {
822 34201 : gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
823 :
824 : /* Later processing assumes that the object size is constant, which might
825 : not be true at this point. Force the use of a constant upper bound in
826 : this case. */
827 34201 : if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
828 0 : force_constant_size (tmp);
829 :
830 34201 : DECL_CONTEXT (tmp) = fn->decl;
831 34201 : DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
832 :
833 34201 : record_vars_into (tmp, fn->decl);
834 34201 : }
835 :
836 : /* Push the temporary variable TMP into the current binding. */
837 :
838 : void
839 16215354 : gimple_add_tmp_var (tree tmp)
840 : {
841 16215354 : gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
842 :
843 : /* Later processing assumes that the object size is constant, which might
844 : not be true at this point. Force the use of a constant upper bound in
845 : this case. */
846 16215354 : if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
847 0 : force_constant_size (tmp);
848 :
849 16215354 : DECL_CONTEXT (tmp) = current_function_decl;
850 16215354 : DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
851 :
852 16215354 : if (gimplify_ctxp)
853 : {
854 5561723 : DECL_CHAIN (tmp) = gimplify_ctxp->temps;
855 5561723 : gimplify_ctxp->temps = tmp;
856 :
857 : /* Mark temporaries local within the nearest enclosing parallel. */
858 5561723 : if (gimplify_omp_ctxp)
859 : {
860 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
861 548862 : int flag = GOVD_LOCAL | GOVD_SEEN;
862 : while (ctx
863 548862 : && (ctx->region_type == ORT_WORKSHARE
864 : || ctx->region_type == ORT_TASKGROUP
865 401408 : || ctx->region_type == ORT_SIMD
866 351217 : || ctx->region_type == ORT_ACC))
867 : {
868 204496 : if (ctx->region_type == ORT_SIMD
869 50191 : && TREE_ADDRESSABLE (tmp)
870 95 : && !TREE_STATIC (tmp))
871 : {
872 95 : if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
873 0 : ctx->add_safelen1 = true;
874 95 : else if (ctx->in_for_exprs)
875 : flag = GOVD_PRIVATE;
876 : else
877 : flag = GOVD_PRIVATE | GOVD_SEEN;
878 : break;
879 : }
880 204401 : ctx = ctx->outer_context;
881 : }
882 344366 : if (ctx)
883 317280 : omp_add_variable (ctx, tmp, flag);
884 : }
885 : }
886 10653631 : else if (cfun)
887 10653631 : record_vars (tmp);
888 : else
889 : {
890 0 : gimple_seq body_seq;
891 :
892 : /* This case is for nested functions. We need to expose the locals
893 : they create. */
894 0 : body_seq = gimple_body (current_function_decl);
895 0 : declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
896 : }
897 16215354 : }
898 :
899 :
900 :
901 : /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
902 : nodes that are referenced more than once in GENERIC functions. This is
903 : necessary because gimplification (translation into GIMPLE) is performed
904 : by modifying tree nodes in-place, so gimplification of a shared node in a
905 : first context could generate an invalid GIMPLE form in a second context.
906 :
907 : This is achieved with a simple mark/copy/unmark algorithm that walks the
908 : GENERIC representation top-down, marks nodes with TREE_VISITED the first
909 : time it encounters them, duplicates them if they already have TREE_VISITED
910 : set, and finally removes the TREE_VISITED marks it has set.
911 :
912 : The algorithm works only at the function level, i.e. it generates a GENERIC
913 : representation of a function with no nodes shared within the function when
914 : passed a GENERIC function (except for nodes that are allowed to be shared).
915 :
916 : At the global level, it is also necessary to unshare tree nodes that are
917 : referenced in more than one function, for the same aforementioned reason.
918 : This requires some cooperation from the front-end. There are 2 strategies:
919 :
920 : 1. Manual unsharing. The front-end needs to call unshare_expr on every
921 : expression that might end up being shared across functions.
922 :
923 : 2. Deep unsharing. This is an extension of regular unsharing. Instead
924 : of calling unshare_expr on expressions that might be shared across
925 : functions, the front-end pre-marks them with TREE_VISITED. This will
926 : ensure that they are unshared on the first reference within functions
927 : when the regular unsharing algorithm runs. The counterpart is that
928 : this algorithm must look deeper than for manual unsharing, which is
929 : specified by LANG_HOOKS_DEEP_UNSHARING.
930 :
931 : If there are only few specific cases of node sharing across functions, it is
932 : probably easier for a front-end to unshare the expressions manually. On the
933 : contrary, if the expressions generated at the global level are as widespread
934 : as expressions generated within functions, deep unsharing is very likely the
935 : way to go. */
936 :
937 : /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
938 : These nodes model computations that must be done once. If we were to
939 : unshare something like SAVE_EXPR(i++), the gimplification process would
940 : create wrong code. However, if DATA is non-null, it must hold a pointer
941 : set that is used to unshare the subtrees of these nodes. */
942 :
943 : static tree
944 3107693974 : mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
945 : {
946 3107693974 : tree t = *tp;
947 3107693974 : enum tree_code code = TREE_CODE (t);
948 :
949 : /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
950 : copy their subtrees if we can make sure to do it only once. */
951 3107693974 : if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
952 : {
953 13818896 : if (data && !((hash_set<tree> *)data)->add (t))
954 : ;
955 : else
956 13818896 : *walk_subtrees = 0;
957 : }
958 :
959 : /* Stop at types, decls, constants like copy_tree_r. */
960 3093875078 : else if (TREE_CODE_CLASS (code) == tcc_type
961 : || TREE_CODE_CLASS (code) == tcc_declaration
962 3093875078 : || TREE_CODE_CLASS (code) == tcc_constant)
963 1905216446 : *walk_subtrees = 0;
964 :
965 : /* Cope with the statement expression extension. */
966 1188658632 : else if (code == STATEMENT_LIST)
967 : ;
968 :
969 : /* Leave the bulk of the work to copy_tree_r itself. */
970 : else
971 1188608997 : copy_tree_r (tp, walk_subtrees, NULL);
972 :
973 3107693974 : return NULL_TREE;
974 : }
975 :
976 : /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
977 : If *TP has been visited already, then *TP is deeply copied by calling
978 : mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
979 :
980 : static tree
981 278085932 : copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
982 : {
983 278085932 : tree t = *tp;
984 278085932 : enum tree_code code = TREE_CODE (t);
985 :
986 : /* Skip types, decls, and constants. But we do want to look at their
987 : types and the bounds of types. Mark them as visited so we properly
988 : unmark their subtrees on the unmark pass. If we've already seen them,
989 : don't look down further. */
990 278085932 : if (TREE_CODE_CLASS (code) == tcc_type
991 : || TREE_CODE_CLASS (code) == tcc_declaration
992 278085932 : || TREE_CODE_CLASS (code) == tcc_constant)
993 : {
994 132279490 : if (TREE_VISITED (t))
995 81376126 : *walk_subtrees = 0;
996 : else
997 50903364 : TREE_VISITED (t) = 1;
998 : }
999 :
1000 : /* If this node has been visited already, unshare it and don't look
1001 : any deeper. */
1002 145806442 : else if (TREE_VISITED (t))
1003 : {
1004 1718268 : walk_tree (tp, mostly_copy_tree_r, data, NULL);
1005 1718268 : *walk_subtrees = 0;
1006 : }
1007 :
1008 : /* Otherwise, mark the node as visited and keep looking. */
1009 : else
1010 144088174 : TREE_VISITED (t) = 1;
1011 :
1012 278085932 : return NULL_TREE;
1013 : }
1014 :
1015 : /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
1016 : copy_if_shared_r callback unmodified. */
1017 :
1018 : void
1019 8688069 : copy_if_shared (tree *tp, void *data)
1020 : {
1021 8688069 : walk_tree (tp, copy_if_shared_r, data, NULL);
1022 8688069 : }
1023 :
1024 : /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
1025 : any nested functions. */
1026 :
1027 : static void
1028 2896023 : unshare_body (tree fndecl)
1029 : {
1030 2896023 : struct cgraph_node *cgn = cgraph_node::get (fndecl);
1031 : /* If the language requires deep unsharing, we need a pointer set to make
1032 : sure we don't repeatedly unshare subtrees of unshareable nodes. */
1033 2896023 : hash_set<tree> *visited
1034 2896023 : = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
1035 :
1036 2896023 : copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
1037 2896023 : copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
1038 2896023 : copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
1039 :
1040 2899354 : delete visited;
1041 :
1042 2896023 : if (cgn)
1043 5836880 : for (cgn = first_nested_function (cgn); cgn;
1044 25769 : cgn = next_nested_function (cgn))
1045 25769 : unshare_body (cgn->decl);
1046 2896023 : }
1047 :
1048 : /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1049 : Subtrees are walked until the first unvisited node is encountered. */
1050 :
1051 : static tree
1052 278085506 : unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1053 : {
1054 278085506 : tree t = *tp;
1055 :
1056 : /* If this node has been visited, unmark it and keep looking. */
1057 278085506 : if (TREE_VISITED (t))
1058 194996007 : TREE_VISITED (t) = 0;
1059 :
1060 : /* Otherwise, don't look any deeper. */
1061 : else
1062 83089499 : *walk_subtrees = 0;
1063 :
1064 278085506 : return NULL_TREE;
1065 : }
1066 :
1067 : /* Unmark the visited trees rooted at *TP. */
1068 :
1069 : static inline void
1070 8688069 : unmark_visited (tree *tp)
1071 : {
1072 8688069 : walk_tree (tp, unmark_visited_r, NULL, NULL);
1073 8688069 : }
1074 :
1075 : /* Likewise, but mark all trees as not visited. */
1076 :
1077 : static void
1078 2896023 : unvisit_body (tree fndecl)
1079 : {
1080 2896023 : struct cgraph_node *cgn = cgraph_node::get (fndecl);
1081 :
1082 2896023 : unmark_visited (&DECL_SAVED_TREE (fndecl));
1083 2896023 : unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1084 2896023 : unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1085 :
1086 2896023 : if (cgn)
1087 2918440 : for (cgn = first_nested_function (cgn);
1088 2918440 : cgn; cgn = next_nested_function (cgn))
1089 25769 : unvisit_body (cgn->decl);
1090 2896023 : }
1091 :
1092 : /* Unconditionally make an unshared copy of EXPR. This is used when using
1093 : stored expressions which span multiple functions, such as BINFO_VTABLE,
1094 : as the normal unsharing process can't tell that they're shared. */
1095 :
1096 : tree
1097 1682592704 : unshare_expr (tree expr)
1098 : {
1099 1682592704 : walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1100 1682592704 : return expr;
1101 : }
1102 :
1103 : /* Worker for unshare_expr_without_location. */
1104 :
1105 : static tree
1106 11759654 : prune_expr_location (tree *tp, int *walk_subtrees, void *)
1107 : {
1108 11759654 : if (EXPR_P (*tp))
1109 6144217 : SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1110 : else
1111 5615437 : *walk_subtrees = 0;
1112 11759654 : return NULL_TREE;
1113 : }
1114 :
1115 : /* Similar to unshare_expr but also prune all expression locations
1116 : from EXPR. */
1117 :
1118 : tree
1119 22601921 : unshare_expr_without_location (tree expr)
1120 : {
1121 22601921 : walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1122 22601921 : if (EXPR_P (expr))
1123 3739720 : walk_tree (&expr, prune_expr_location, NULL, NULL);
1124 22601921 : return expr;
1125 : }
1126 :
1127 : /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1128 : one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1129 : comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1130 : EXPR is the location of the EXPR. */
1131 :
1132 : static location_t
1133 1161681 : rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1134 : {
1135 1161681 : if (!expr)
1136 : return or_else;
1137 :
1138 1161681 : if (EXPR_HAS_LOCATION (expr))
1139 808908 : return EXPR_LOCATION (expr);
1140 :
1141 352773 : if (TREE_CODE (expr) != STATEMENT_LIST)
1142 : return or_else;
1143 :
1144 0 : tree_stmt_iterator i = tsi_start (expr);
1145 :
1146 0 : bool found = false;
1147 0 : while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1148 : {
1149 0 : found = true;
1150 0 : tsi_next (&i);
1151 : }
1152 :
1153 352773 : if (!found || !tsi_one_before_end_p (i))
1154 : return or_else;
1155 :
1156 0 : return rexpr_location (tsi_stmt (i), or_else);
1157 : }
1158 :
1159 : /* Return TRUE iff EXPR (maybe recursively) has a location; see
1160 : rexpr_location for the potential recursion. */
1161 :
1162 : static inline bool
1163 503119 : rexpr_has_location (tree expr)
1164 : {
1165 295930 : return rexpr_location (expr) != UNKNOWN_LOCATION;
1166 : }
1167 :
1168 :
1169 : /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1170 : contain statements and have a value. Assign its value to a temporary
1171 : and give it void_type_node. Return the temporary, or NULL_TREE if
1172 : WRAPPER was already void. */
1173 :
1174 : tree
1175 20135844 : voidify_wrapper_expr (tree wrapper, tree temp)
1176 : {
1177 20135844 : tree type = TREE_TYPE (wrapper);
1178 20135844 : if (type && !VOID_TYPE_P (type))
1179 : {
1180 : tree *p;
1181 :
1182 : /* Set p to point to the body of the wrapper. Loop until we find
1183 : something that isn't a wrapper. */
1184 782276 : for (p = &wrapper; p && *p; )
1185 : {
1186 782276 : switch (TREE_CODE (*p))
1187 : {
1188 3144 : case BIND_EXPR:
1189 3144 : TREE_SIDE_EFFECTS (*p) = 1;
1190 3144 : TREE_TYPE (*p) = void_type_node;
1191 : /* For a BIND_EXPR, the body is operand 1. */
1192 3144 : p = &BIND_EXPR_BODY (*p);
1193 3144 : break;
1194 :
1195 360919 : case CLEANUP_POINT_EXPR:
1196 360919 : case TRY_FINALLY_EXPR:
1197 360919 : case TRY_CATCH_EXPR:
1198 360919 : TREE_SIDE_EFFECTS (*p) = 1;
1199 360919 : TREE_TYPE (*p) = void_type_node;
1200 360919 : p = &TREE_OPERAND (*p, 0);
1201 360919 : break;
1202 :
1203 18316 : case STATEMENT_LIST:
1204 18316 : {
1205 18316 : tree_stmt_iterator i = tsi_last (*p);
1206 18316 : TREE_SIDE_EFFECTS (*p) = 1;
1207 18316 : TREE_TYPE (*p) = void_type_node;
1208 18316 : p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1209 : }
1210 18316 : break;
1211 :
1212 : case COMPOUND_EXPR:
1213 : /* Advance to the last statement. Set all container types to
1214 : void. */
1215 64846 : for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1216 : {
1217 32426 : TREE_SIDE_EFFECTS (*p) = 1;
1218 32426 : TREE_TYPE (*p) = void_type_node;
1219 : }
1220 : break;
1221 :
1222 76 : case TRANSACTION_EXPR:
1223 76 : TREE_SIDE_EFFECTS (*p) = 1;
1224 76 : TREE_TYPE (*p) = void_type_node;
1225 76 : p = &TRANSACTION_EXPR_BODY (*p);
1226 76 : break;
1227 :
1228 367401 : default:
1229 : /* Assume that any tree upon which voidify_wrapper_expr is
1230 : directly called is a wrapper, and that its body is op0. */
1231 367401 : if (p == &wrapper)
1232 : {
1233 33 : TREE_SIDE_EFFECTS (*p) = 1;
1234 33 : TREE_TYPE (*p) = void_type_node;
1235 33 : p = &TREE_OPERAND (*p, 0);
1236 33 : break;
1237 : }
1238 367368 : goto out;
1239 : }
1240 : }
1241 :
1242 0 : out:
1243 367368 : if (p == NULL || IS_EMPTY_STMT (*p))
1244 : temp = NULL_TREE;
1245 367368 : else if (temp)
1246 : {
1247 : /* The wrapper is on the RHS of an assignment that we're pushing
1248 : down. */
1249 1864 : gcc_assert (TREE_CODE (temp) == INIT_EXPR
1250 : || TREE_CODE (temp) == MODIFY_EXPR);
1251 1864 : TREE_OPERAND (temp, 1) = *p;
1252 1864 : *p = temp;
1253 : }
1254 : else
1255 : {
1256 365504 : temp = create_tmp_var (type, "retval");
1257 365504 : *p = build2 (INIT_EXPR, type, temp, *p);
1258 : }
1259 :
1260 367368 : return temp;
1261 : }
1262 :
1263 : return NULL_TREE;
1264 : }
1265 :
1266 : /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1267 : a temporary through which they communicate. */
1268 :
1269 : static void
1270 7850 : build_stack_save_restore (gcall **save, gcall **restore)
1271 : {
1272 7850 : tree tmp_var;
1273 :
1274 15700 : *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1275 7850 : tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1276 7850 : gimple_call_set_lhs (*save, tmp_var);
1277 :
1278 7850 : *restore
1279 7850 : = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1280 : 1, tmp_var);
1281 7850 : }
1282 :
1283 : /* Generate IFN_ASAN_MARK call that poisons shadow memory of the DECL
1284 : variable. */
1285 :
1286 : static tree
1287 437 : build_asan_poison_call_expr (tree decl)
1288 : {
1289 : /* Do not poison variables that have size equal to zero. */
1290 437 : tree unit_size = DECL_SIZE_UNIT (decl);
1291 437 : if (zerop (unit_size))
1292 : return NULL_TREE;
1293 :
1294 437 : tree base = build_fold_addr_expr (decl);
1295 :
1296 437 : return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1297 : void_type_node, 3,
1298 : build_int_cst (integer_type_node,
1299 : ASAN_MARK_POISON),
1300 : base, unit_size);
1301 : }
1302 :
1303 : /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1304 : on POISON flag, shadow memory of a DECL variable. The call will be
1305 : put on location identified by IT iterator, where BEFORE flag drives
1306 : position where the stmt will be put. */
1307 :
1308 : static void
1309 4814 : asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1310 : bool before)
1311 : {
1312 4814 : tree unit_size = DECL_SIZE_UNIT (decl);
1313 4814 : tree base = build_fold_addr_expr (decl);
1314 :
1315 : /* Do not poison variables that have size equal to zero. */
1316 4814 : if (zerop (unit_size))
1317 4814 : return;
1318 :
1319 : /* It's necessary to have all stack variables aligned to ASAN granularity
1320 : bytes. */
1321 4800 : gcc_assert (!hwassist_sanitize_p () || hwassist_sanitize_stack_p ());
1322 4800 : unsigned shadow_granularity
1323 4800 : = (hwassist_sanitize_p ()
1324 4800 : ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY);
1325 4800 : if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1326 4598 : SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1327 :
1328 4800 : HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1329 :
1330 4800 : gimple *g
1331 4800 : = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1332 4800 : build_int_cst (integer_type_node, flags),
1333 : base, unit_size);
1334 :
1335 4800 : if (before)
1336 2542 : gsi_insert_before (it, g, GSI_NEW_STMT);
1337 : else
1338 2258 : gsi_insert_after (it, g, GSI_NEW_STMT);
1339 : }
1340 :
1341 : /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1342 : either poisons or unpoisons a DECL. Created statement is appended
1343 : to SEQ_P gimple sequence. */
1344 :
1345 : static void
1346 4377 : asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1347 : {
1348 4377 : gimple_stmt_iterator it = gsi_last (*seq_p);
1349 4377 : bool before = false;
1350 :
1351 4377 : if (gsi_end_p (it))
1352 2327 : before = true;
1353 :
1354 4377 : asan_poison_variable (decl, poison, &it, before);
1355 4377 : }
1356 :
1357 : /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1358 :
1359 : static int
1360 135 : sort_by_decl_uid (const void *a, const void *b)
1361 : {
1362 135 : const tree *t1 = (const tree *)a;
1363 135 : const tree *t2 = (const tree *)b;
1364 :
1365 135 : int uid1 = DECL_UID (*t1);
1366 135 : int uid2 = DECL_UID (*t2);
1367 :
1368 135 : if (uid1 < uid2)
1369 : return -1;
1370 54 : else if (uid1 > uid2)
1371 : return 1;
1372 : else
1373 0 : return 0;
1374 : }
1375 :
1376 : /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1377 : depending on POISON flag. Created statement is appended
1378 : to SEQ_P gimple sequence. */
1379 :
1380 : static void
1381 1044671 : asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1382 : {
1383 1044671 : unsigned c = variables->elements ();
1384 1044671 : if (c == 0)
1385 1044495 : return;
1386 :
1387 176 : auto_vec<tree> sorted_variables (c);
1388 :
1389 176 : for (hash_set<tree>::iterator it = variables->begin ();
1390 570 : it != variables->end (); ++it)
1391 197 : sorted_variables.safe_push (*it);
1392 :
1393 176 : sorted_variables.qsort (sort_by_decl_uid);
1394 :
1395 : unsigned i;
1396 : tree var;
1397 549 : FOR_EACH_VEC_ELT (sorted_variables, i, var)
1398 : {
1399 197 : asan_poison_variable (var, poison, seq_p);
1400 :
1401 : /* Add use_after_scope_memory attribute for the variable in order
1402 : to prevent re-written into SSA. */
1403 197 : if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1404 197 : DECL_ATTRIBUTES (var)))
1405 90 : DECL_ATTRIBUTES (var)
1406 180 : = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1407 : integer_one_node,
1408 90 : DECL_ATTRIBUTES (var));
1409 : }
1410 176 : }
1411 :
1412 : /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1413 :
1414 : static enum gimplify_status
1415 5934815 : gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1416 : {
1417 5934815 : tree bind_expr = *expr_p;
1418 5934815 : bool old_keep_stack = gimplify_ctxp->keep_stack;
1419 5934815 : bool old_save_stack = gimplify_ctxp->save_stack;
1420 5934815 : tree t;
1421 5934815 : gbind *bind_stmt;
1422 5934815 : gimple_seq body, cleanup;
1423 5934815 : gcall *stack_save;
1424 5934815 : location_t start_locus = 0, end_locus = 0;
1425 5934815 : tree ret_clauses = NULL;
1426 :
1427 5934815 : tree temp = voidify_wrapper_expr (bind_expr, NULL);
1428 :
1429 : /* Mark variables seen in this bind expr. */
1430 12659068 : for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1431 : {
1432 6724253 : if (VAR_P (t))
1433 : {
1434 6095437 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1435 6095437 : tree attr;
1436 :
1437 6095437 : if (flag_openmp
1438 251114 : && !is_global_var (t)
1439 242743 : && !TREE_STATIC (t)
1440 242743 : && DECL_CONTEXT (t) == current_function_decl
1441 242743 : && TREE_USED (t)
1442 6336575 : && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1443 : != NULL_TREE)
1444 : {
1445 76 : gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1446 76 : tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1447 76 : tree align = TREE_VALUE (TREE_VALUE (attr));
1448 : /* Allocate directives that appear in a target region must specify
1449 : an allocator clause unless a requires directive with the
1450 : dynamic_allocators clause is present in the same compilation
1451 : unit. */
1452 76 : bool missing_dyn_alloc = false;
1453 76 : if (alloc == NULL_TREE
1454 48 : && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1455 : == 0))
1456 : {
1457 : /* This comes too early for omp_discover_declare_target...,
1458 : but should at least catch the most common cases. */
1459 42 : missing_dyn_alloc
1460 42 : = cgraph_node::get (current_function_decl)->offloadable;
1461 42 : for (struct gimplify_omp_ctx *ctx2 = ctx;
1462 48 : ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1463 6 : if (ctx2->code == OMP_TARGET)
1464 2 : missing_dyn_alloc = true;
1465 : }
1466 42 : if (missing_dyn_alloc)
1467 4 : error_at (DECL_SOURCE_LOCATION (t),
1468 : "%<allocate%> directive for %qD inside a target "
1469 : "region must specify an %<allocator%> clause", t);
1470 : /* Skip for omp_default_mem_alloc (= 1),
1471 : unless align is present. For C/C++, there should be always a
1472 : statement list following if TREE_USED, except for, e.g., using
1473 : this decl in a static_assert; in that case, only a single
1474 : DECL_EXPR remains, which can be skipped here. */
1475 72 : else if (!errorcount
1476 57 : && (align != NULL_TREE
1477 57 : || alloc == NULL_TREE
1478 12 : || !integer_onep (alloc))
1479 127 : && (lang_GNU_Fortran ()
1480 27 : || (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1481 : != DECL_EXPR)))
1482 : {
1483 : /* Fortran might already use a pointer type internally;
1484 : use that pointer except for type(C_ptr) and type(C_funptr);
1485 : note that normal proc pointers are rejected. */
1486 55 : tree type = TREE_TYPE (t);
1487 55 : tree tmp, v;
1488 55 : if (lang_GNU_Fortran ()
1489 28 : && POINTER_TYPE_P (type)
1490 8 : && TREE_TYPE (type) != void_type_node
1491 61 : && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1492 : {
1493 6 : type = TREE_TYPE (type);
1494 6 : v = t;
1495 : }
1496 : else
1497 : {
1498 49 : tmp = build_pointer_type (type);
1499 49 : v = create_tmp_var (tmp, get_name (t));
1500 49 : DECL_IGNORED_P (v) = 0;
1501 49 : DECL_ATTRIBUTES (v)
1502 49 : = tree_cons (get_identifier ("omp allocate var"),
1503 : build_tree_list (NULL_TREE, t),
1504 : remove_attribute ("omp allocate",
1505 49 : DECL_ATTRIBUTES (t)));
1506 49 : tmp = build_fold_indirect_ref (v);
1507 49 : TREE_THIS_NOTRAP (tmp) = 1;
1508 49 : SET_DECL_VALUE_EXPR (t, tmp);
1509 49 : DECL_HAS_VALUE_EXPR_P (t) = 1;
1510 : }
1511 55 : tree sz = TYPE_SIZE_UNIT (type);
1512 : /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1513 : hence, for some decls, a size variable is saved in the
1514 : attributes; use it, if available. */
1515 55 : if (TREE_CHAIN (TREE_VALUE (attr))
1516 28 : && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1517 61 : && TREE_PURPOSE (
1518 : TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1519 : {
1520 6 : sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1521 6 : sz = TREE_PURPOSE (sz);
1522 : }
1523 55 : if (alloc == NULL_TREE)
1524 36 : alloc = build_zero_cst (ptr_type_node);
1525 55 : if (align == NULL_TREE)
1526 44 : align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1527 : else
1528 11 : align = build_int_cst (size_type_node,
1529 11 : MAX (tree_to_uhwi (align),
1530 : DECL_ALIGN_UNIT (t)));
1531 55 : location_t loc = DECL_SOURCE_LOCATION (t);
1532 55 : tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1533 55 : tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1534 55 : tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1535 55 : fold_convert (TREE_TYPE (v), tmp));
1536 55 : gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1537 : /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1538 : and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1539 : is set, using in a condition much further below. */
1540 61 : gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1541 : || TREE_CHAIN (TREE_VALUE (attr)));
1542 55 : if (TREE_CHAIN (TREE_VALUE (attr)))
1543 : {
1544 : /* Fortran is special as it does not have properly nest
1545 : declarations in blocks. And as there is no
1546 : initializer, there is also no expression to look for.
1547 : Hence, the FE makes the statement list of the
1548 : try-finally block available. We can put the GOMP_alloc
1549 : at the top, unless an allocator or size expression
1550 : requires to put it afterward; note that the size is
1551 : always later in generated code; for strings, no
1552 : size expr but still an expr might be available.
1553 : As LTO does not handle a statement list, 'sl' has
1554 : to be removed; done so by removing the attribute. */
1555 28 : DECL_ATTRIBUTES (t)
1556 28 : = remove_attribute ("omp allocate",
1557 28 : DECL_ATTRIBUTES (t));
1558 28 : tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1559 28 : tree_stmt_iterator e = tsi_start (sl);
1560 28 : tree needle = NULL_TREE;
1561 28 : if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1562 : {
1563 6 : needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1564 6 : needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1565 : : sz);
1566 : }
1567 22 : else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1568 : needle = sz;
1569 22 : else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1570 : needle = alloc;
1571 :
1572 10 : if (needle != NULL_TREE)
1573 : {
1574 49 : while (!tsi_end_p (e))
1575 : {
1576 49 : if (*e == needle
1577 49 : || (TREE_CODE (*e) == MODIFY_EXPR
1578 49 : && TREE_OPERAND (*e, 0) == needle))
1579 : break;
1580 39 : ++e;
1581 : }
1582 10 : gcc_assert (!tsi_end_p (e));
1583 : }
1584 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1585 :
1586 : /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1587 : here; for C/C++ it will be added in the 'cleanup'
1588 : section after gimplification. But Fortran already has
1589 : a try-finally block. */
1590 28 : sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1591 28 : e = tsi_last (sl);
1592 28 : tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1593 28 : tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1594 : build_zero_cst (ptr_type_node));
1595 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1596 28 : tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1597 28 : tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1598 28 : fold_convert (TREE_TYPE (v), tmp));
1599 28 : ++e;
1600 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1601 : }
1602 : else
1603 : {
1604 27 : gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1605 : == STATEMENT_LIST);
1606 27 : tree_stmt_iterator e;
1607 27 : e = tsi_start (BIND_EXPR_BODY (bind_expr));
1608 117 : while (!tsi_end_p (e))
1609 : {
1610 90 : if ((TREE_CODE (*e) == DECL_EXPR
1611 60 : && TREE_OPERAND (*e, 0) == t)
1612 123 : || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1613 0 : && (TREE_CODE (TREE_OPERAND (*e, 0))
1614 : == DECL_EXPR)
1615 0 : && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1616 : == t)))
1617 : break;
1618 63 : ++e;
1619 : }
1620 27 : gcc_assert (!tsi_end_p (e));
1621 27 : tsi_link_before (&e, tmp, TSI_SAME_STMT);
1622 : }
1623 : }
1624 : }
1625 :
1626 : /* Mark variable as local. */
1627 6095437 : if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1628 : {
1629 98812 : if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1630 98812 : || splay_tree_lookup (ctx->variables,
1631 : (splay_tree_key) t) == NULL)
1632 : {
1633 98812 : int flag = GOVD_LOCAL;
1634 98812 : if (ctx->region_type == ORT_SIMD
1635 4374 : && TREE_ADDRESSABLE (t)
1636 81 : && !TREE_STATIC (t))
1637 : {
1638 69 : if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1639 2 : ctx->add_safelen1 = true;
1640 : else
1641 : flag = GOVD_PRIVATE;
1642 : }
1643 98812 : omp_add_variable (ctx, t, flag | GOVD_SEEN);
1644 : }
1645 : /* Static locals inside of target construct or offloaded
1646 : routines need to be "omp declare target". */
1647 98812 : if (TREE_STATIC (t))
1648 1307 : for (; ctx; ctx = ctx->outer_context)
1649 1152 : if ((ctx->region_type & ORT_TARGET) != 0)
1650 : {
1651 939 : if (!lookup_attribute ("omp declare target",
1652 939 : DECL_ATTRIBUTES (t)))
1653 : {
1654 927 : tree id = get_identifier ("omp declare target");
1655 927 : DECL_ATTRIBUTES (t)
1656 927 : = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1657 927 : varpool_node *node = varpool_node::get (t);
1658 927 : if (node)
1659 : {
1660 33 : node->offloadable = 1;
1661 33 : if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1662 : {
1663 : g->have_offload = true;
1664 : if (!in_lto_p)
1665 : vec_safe_push (offload_vars, t);
1666 : }
1667 : }
1668 : }
1669 : break;
1670 : }
1671 : }
1672 :
1673 6095437 : DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1674 :
1675 6095437 : if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1676 1085 : cfun->has_local_explicit_reg_vars = true;
1677 : }
1678 : }
1679 :
1680 11869630 : bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1681 5934815 : BIND_EXPR_BLOCK (bind_expr));
1682 5934815 : gimple_push_bind_expr (bind_stmt);
1683 :
1684 5934815 : gimplify_ctxp->keep_stack = false;
1685 5934815 : gimplify_ctxp->save_stack = false;
1686 :
1687 : /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1688 5934815 : body = NULL;
1689 5934815 : gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1690 5934815 : gimple_bind_set_body (bind_stmt, body);
1691 :
1692 : /* Source location wise, the cleanup code (stack_restore and clobbers)
1693 : belongs to the end of the block, so propagate what we have. The
1694 : stack_save operation belongs to the beginning of block, which we can
1695 : infer from the bind_expr directly if the block has no explicit
1696 : assignment. */
1697 5934815 : if (BIND_EXPR_BLOCK (bind_expr))
1698 : {
1699 5803729 : end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1700 5803729 : start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1701 : }
1702 5803729 : if (start_locus == 0)
1703 5934815 : start_locus = EXPR_LOCATION (bind_expr);
1704 :
1705 5934815 : cleanup = NULL;
1706 5934815 : stack_save = NULL;
1707 :
1708 : /* Add clobbers for all variables that go out of scope. */
1709 12659068 : for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1710 : {
1711 6724253 : if (VAR_P (t)
1712 6095437 : && !is_global_var (t)
1713 12604037 : && DECL_CONTEXT (t) == current_function_decl)
1714 : {
1715 5879784 : if (flag_openmp
1716 242728 : && DECL_HAS_VALUE_EXPR_P (t)
1717 1060 : && TREE_USED (t)
1718 5880825 : && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1719 : {
1720 : /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1721 : causes that the GOMP_free call is already added above;
1722 : and "omp allocate" is removed from DECL_ATTRIBUTES. */
1723 27 : tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1724 27 : tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1725 27 : tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1726 : build_zero_cst (ptr_type_node));
1727 27 : gimplify_and_add (tmp, &cleanup);
1728 27 : gimple *clobber_stmt;
1729 27 : tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1730 27 : clobber_stmt = gimple_build_assign (v, tmp);
1731 27 : gimple_set_location (clobber_stmt, end_locus);
1732 27 : gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1733 : }
1734 5879784 : if (!DECL_HARD_REGISTER (t)
1735 5878699 : && !TREE_THIS_VOLATILE (t)
1736 5839828 : && !DECL_HAS_VALUE_EXPR_P (t)
1737 : /* Only care for variables that have to be in memory. Others
1738 : will be rewritten into SSA names, hence moved to the
1739 : top-level. */
1740 5739391 : && !is_gimple_reg (t)
1741 7097383 : && flag_stack_reuse != SR_NONE)
1742 : {
1743 1214531 : tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_STORAGE_END);
1744 1214531 : gimple *clobber_stmt;
1745 1214531 : clobber_stmt = gimple_build_assign (t, clobber);
1746 1214531 : gimple_set_location (clobber_stmt, end_locus);
1747 1214531 : gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1748 : }
1749 :
1750 5879784 : if (flag_openacc && oacc_declare_returns != NULL)
1751 : {
1752 207 : tree key = t;
1753 207 : if (DECL_HAS_VALUE_EXPR_P (key))
1754 : {
1755 8 : key = DECL_VALUE_EXPR (key);
1756 8 : if (INDIRECT_REF_P (key))
1757 8 : key = TREE_OPERAND (key, 0);
1758 : }
1759 207 : tree *c = oacc_declare_returns->get (key);
1760 207 : if (c != NULL)
1761 : {
1762 116 : if (ret_clauses)
1763 64 : OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1764 :
1765 116 : ret_clauses = unshare_expr (*c);
1766 :
1767 116 : oacc_declare_returns->remove (key);
1768 :
1769 116 : if (oacc_declare_returns->is_empty ())
1770 : {
1771 40 : delete oacc_declare_returns;
1772 40 : oacc_declare_returns = NULL;
1773 : }
1774 : }
1775 : }
1776 : }
1777 :
1778 6724253 : if (asan_poisoned_variables != NULL
1779 6724253 : && asan_poisoned_variables->contains (t))
1780 : {
1781 2090 : asan_poisoned_variables->remove (t);
1782 2090 : asan_poison_variable (t, true, &cleanup);
1783 : }
1784 :
1785 6724253 : if (gimplify_ctxp->live_switch_vars != NULL
1786 6724253 : && gimplify_ctxp->live_switch_vars->contains (t))
1787 55 : gimplify_ctxp->live_switch_vars->remove (t);
1788 : }
1789 :
1790 : /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1791 : the stack space allocated to the VLAs. */
1792 5934815 : if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1793 : {
1794 7850 : gcall *stack_restore;
1795 :
1796 : /* Save stack on entry and restore it on exit. Add a try_finally
1797 : block to achieve this. */
1798 7850 : build_stack_save_restore (&stack_save, &stack_restore);
1799 :
1800 7850 : gimple_set_location (stack_save, start_locus);
1801 7850 : gimple_set_location (stack_restore, end_locus);
1802 :
1803 7850 : gimplify_seq_add_stmt (&cleanup, stack_restore);
1804 : }
1805 :
1806 5934815 : if (ret_clauses)
1807 : {
1808 52 : gomp_target *stmt;
1809 52 : gimple_stmt_iterator si = gsi_start (cleanup);
1810 :
1811 52 : stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1812 : ret_clauses);
1813 52 : gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1814 : }
1815 :
1816 5934815 : if (cleanup)
1817 : {
1818 743696 : gtry *gs;
1819 743696 : gimple_seq new_body;
1820 :
1821 743696 : new_body = NULL;
1822 743696 : gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1823 : GIMPLE_TRY_FINALLY);
1824 :
1825 743696 : if (stack_save)
1826 7850 : gimplify_seq_add_stmt (&new_body, stack_save);
1827 743696 : gimplify_seq_add_stmt (&new_body, gs);
1828 743696 : gimple_bind_set_body (bind_stmt, new_body);
1829 : }
1830 :
1831 : /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1832 5934815 : if (!gimplify_ctxp->keep_stack)
1833 5919082 : gimplify_ctxp->keep_stack = old_keep_stack;
1834 5934815 : gimplify_ctxp->save_stack = old_save_stack;
1835 :
1836 5934815 : gimple_pop_bind_expr ();
1837 :
1838 5934815 : gimplify_seq_add_stmt (pre_p, bind_stmt);
1839 :
1840 5934815 : if (temp)
1841 : {
1842 1709 : *expr_p = temp;
1843 1709 : return GS_OK;
1844 : }
1845 :
1846 5933106 : *expr_p = NULL_TREE;
1847 5933106 : return GS_ALL_DONE;
1848 : }
1849 :
1850 : /* Maybe add early return predict statement to PRE_P sequence. */
1851 :
1852 : static void
1853 2233828 : maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1854 : {
1855 : /* If we are not in a conditional context, add PREDICT statement. */
1856 2233828 : if (gimple_conditional_context ())
1857 : {
1858 455034 : gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1859 : NOT_TAKEN);
1860 455034 : gimplify_seq_add_stmt (pre_p, predict);
1861 : }
1862 2233828 : }
1863 :
1864 : /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1865 : GIMPLE value, it is assigned to a new temporary and the statement is
1866 : re-written to return the temporary.
1867 :
1868 : PRE_P points to the sequence where side effects that must happen before
1869 : STMT should be stored. */
1870 :
1871 : static enum gimplify_status
1872 2233834 : gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1873 : {
1874 2233834 : greturn *ret;
1875 2233834 : tree ret_expr = TREE_OPERAND (stmt, 0);
1876 2233834 : tree result_decl, result;
1877 :
1878 2233834 : if (ret_expr == error_mark_node)
1879 : return GS_ERROR;
1880 :
1881 2233828 : if (!ret_expr
1882 2134607 : || TREE_CODE (ret_expr) == RESULT_DECL)
1883 : {
1884 112555 : maybe_add_early_return_predict_stmt (pre_p);
1885 112555 : greturn *ret = gimple_build_return (ret_expr);
1886 112555 : copy_warning (ret, stmt);
1887 112555 : gimplify_seq_add_stmt (pre_p, ret);
1888 112555 : return GS_ALL_DONE;
1889 : }
1890 :
1891 2121273 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1892 : result_decl = NULL_TREE;
1893 2120952 : else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1894 : {
1895 : /* Used in C++ for handling EH cleanup of the return value if a local
1896 : cleanup throws. Assume the front-end knows what it's doing. */
1897 5099 : result_decl = DECL_RESULT (current_function_decl);
1898 : /* But crash if we end up trying to modify ret_expr below. */
1899 5099 : ret_expr = NULL_TREE;
1900 : }
1901 : else
1902 : {
1903 2115853 : result_decl = TREE_OPERAND (ret_expr, 0);
1904 :
1905 : /* See through a return by reference. */
1906 2115853 : if (INDIRECT_REF_P (result_decl))
1907 41914 : result_decl = TREE_OPERAND (result_decl, 0);
1908 :
1909 2115853 : gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1910 : || TREE_CODE (ret_expr) == INIT_EXPR)
1911 : && TREE_CODE (result_decl) == RESULT_DECL);
1912 : }
1913 :
1914 : /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1915 : Recall that aggregate_value_p is FALSE for any aggregate type that is
1916 : returned in registers. If we're returning values in registers, then
1917 : we don't want to extend the lifetime of the RESULT_DECL, particularly
1918 : across another call. In addition, for those aggregates for which
1919 : hard_function_value generates a PARALLEL, we'll die during normal
1920 : expansion of structure assignments; there's special code in expand_return
1921 : to handle this case that does not exist in expand_expr. */
1922 5099 : if (!result_decl)
1923 : result = NULL_TREE;
1924 2120952 : else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1925 : {
1926 177326 : if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1927 : {
1928 43 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1929 0 : gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1930 : /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1931 : should be effectively allocated by the caller, i.e. all calls to
1932 : this function must be subject to the Return Slot Optimization. */
1933 43 : gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1934 43 : gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1935 : }
1936 : result = result_decl;
1937 : }
1938 1943626 : else if (gimplify_ctxp->return_temp)
1939 : result = gimplify_ctxp->return_temp;
1940 : else
1941 : {
1942 1441305 : result = create_tmp_reg (TREE_TYPE (result_decl));
1943 :
1944 : /* ??? With complex control flow (usually involving abnormal edges),
1945 : we can wind up warning about an uninitialized value for this. Due
1946 : to how this variable is constructed and initialized, this is never
1947 : true. Give up and never warn. */
1948 1441305 : suppress_warning (result, OPT_Wuninitialized);
1949 :
1950 1441305 : gimplify_ctxp->return_temp = result;
1951 : }
1952 :
1953 : /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1954 : Then gimplify the whole thing. */
1955 2121273 : if (result != result_decl)
1956 1943626 : TREE_OPERAND (ret_expr, 0) = result;
1957 :
1958 2121273 : gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1959 :
1960 2121273 : maybe_add_early_return_predict_stmt (pre_p);
1961 2121273 : ret = gimple_build_return (result);
1962 2121273 : copy_warning (ret, stmt);
1963 2121273 : gimplify_seq_add_stmt (pre_p, ret);
1964 :
1965 2121273 : return GS_ALL_DONE;
1966 : }
1967 :
1968 : /* Gimplify a variable-length array DECL. */
1969 :
1970 : static void
1971 8949 : gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1972 : {
1973 : /* This is a variable-sized decl. Simplify its size and mark it
1974 : for deferred expansion. */
1975 8949 : tree t, addr, ptr_type;
1976 :
1977 8949 : gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1978 8949 : gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1979 :
1980 : /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1981 8949 : if (DECL_HAS_VALUE_EXPR_P (decl))
1982 : return;
1983 :
1984 : /* All occurrences of this decl in final gimplified code will be
1985 : replaced by indirection. Setting DECL_VALUE_EXPR does two
1986 : things: First, it lets the rest of the gimplifier know what
1987 : replacement to use. Second, it lets the debug info know
1988 : where to find the value. */
1989 8944 : ptr_type = build_pointer_type (TREE_TYPE (decl));
1990 8944 : addr = create_tmp_var (ptr_type, get_name (decl));
1991 8944 : DECL_IGNORED_P (addr) = 0;
1992 8944 : t = build_fold_indirect_ref (addr);
1993 8944 : TREE_THIS_NOTRAP (t) = 1;
1994 8944 : SET_DECL_VALUE_EXPR (decl, t);
1995 8944 : DECL_HAS_VALUE_EXPR_P (decl) = 1;
1996 :
1997 8944 : t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1998 8944 : max_int_size_in_bytes (TREE_TYPE (decl)));
1999 : /* The call has been built for a variable-sized object. */
2000 8944 : CALL_ALLOCA_FOR_VAR_P (t) = 1;
2001 8944 : t = fold_convert (ptr_type, t);
2002 8944 : t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
2003 :
2004 8944 : gimplify_and_add (t, seq_p);
2005 :
2006 : /* Record the dynamic allocation associated with DECL if requested. */
2007 8944 : if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
2008 0 : record_dynamic_alloc (decl);
2009 : }
2010 :
2011 : /* A helper function to be called via walk_tree. Mark all labels under *TP
2012 : as being forced. To be called for DECL_INITIAL of static variables. */
2013 :
2014 : static tree
2015 849943 : force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2016 : {
2017 849943 : if (TYPE_P (*tp))
2018 0 : *walk_subtrees = 0;
2019 849943 : if (TREE_CODE (*tp) == LABEL_DECL)
2020 : {
2021 926 : FORCED_LABEL (*tp) = 1;
2022 926 : cfun->has_forced_label_in_static = 1;
2023 : }
2024 :
2025 849943 : return NULL_TREE;
2026 : }
2027 :
2028 : /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
2029 : Build a call to internal const function DEFERRED_INIT:
2030 : 1st argument: SIZE of the DECL;
2031 : 2nd argument: INIT_TYPE;
2032 : 3rd argument: NAME of the DECL;
2033 :
2034 : as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
2035 :
2036 : static void
2037 110213 : gimple_add_init_for_auto_var (tree decl,
2038 : enum auto_init_type init_type,
2039 : gimple_seq *seq_p)
2040 : {
2041 110213 : gcc_assert (auto_var_p (decl));
2042 110213 : gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
2043 :
2044 110213 : const location_t loc = DECL_SOURCE_LOCATION (decl);
2045 110213 : tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
2046 110213 : tree init_type_node = build_int_cst (integer_type_node, (int) init_type);
2047 110213 : tree decl_name;
2048 :
2049 110213 : if (DECL_NAME (decl))
2050 62728 : decl_name = build_string_literal (DECL_NAME (decl));
2051 : else
2052 : {
2053 47485 : char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
2054 47485 : sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
2055 47485 : decl_name = build_string_literal (decl_name_anonymous);
2056 : }
2057 :
2058 110213 : tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
2059 110213 : TREE_TYPE (decl), 3,
2060 : decl_size, init_type_node,
2061 : decl_name);
2062 :
2063 110213 : gimplify_assign (decl, call, seq_p);
2064 110213 : }
2065 :
2066 : /* Generate padding initialization for automatic variable DECL.
2067 : C guarantees that brace-init with fewer initializers than members
2068 : aggregate will initialize the rest of the aggregate as-if it were
2069 : static initialization. In turn static initialization guarantees
2070 : that padding is initialized to zero. So, we always initialize paddings
2071 : to zeroes regardless INIT_TYPE.
2072 : To do the padding initialization, we insert a call to
2073 : __builtin_clear_padding (&decl, 0, for_auto_init = true).
2074 : Note, we add an additional dummy argument for __builtin_clear_padding,
2075 : 'for_auto_init' to distinguish whether this call is for automatic
2076 : variable initialization or not.
2077 : */
2078 : static void
2079 114 : gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2080 : gimple_seq *seq_p)
2081 : {
2082 114 : tree addr_of_decl = NULL_TREE;
2083 114 : tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2084 :
2085 114 : if (is_vla)
2086 : {
2087 : /* The temporary address variable for this vla should be
2088 : created in gimplify_vla_decl. */
2089 0 : gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2090 0 : gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2091 0 : addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2092 : }
2093 : else
2094 : {
2095 114 : mark_addressable (decl);
2096 114 : addr_of_decl = build_fold_addr_expr (decl);
2097 : }
2098 :
2099 114 : gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2100 114 : build_one_cst (TREE_TYPE (addr_of_decl)));
2101 114 : gimplify_seq_add_stmt (seq_p, call);
2102 114 : }
2103 :
2104 : /* Return true if the DECL need to be automatically initialized by the
2105 : compiler. */
2106 : static bool
2107 4152105 : var_needs_auto_init_p (tree decl)
2108 : {
2109 4152105 : if (auto_var_p (decl)
2110 4058054 : && (TREE_CODE (decl) != VAR_DECL || !DECL_HARD_REGISTER (decl))
2111 4057349 : && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2112 205010 : && !lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl))
2113 205002 : && !lookup_attribute ("indeterminate", DECL_ATTRIBUTES (decl))
2114 204993 : && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2115 4357098 : && !is_empty_type (TREE_TYPE (decl)))
2116 : return true;
2117 : return false;
2118 : }
2119 :
2120 : /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2121 : and initialization explicit. */
2122 :
2123 : static enum gimplify_status
2124 6498798 : gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2125 : {
2126 6498798 : tree stmt = *stmt_p;
2127 6498798 : tree decl = DECL_EXPR_DECL (stmt);
2128 :
2129 6498798 : *stmt_p = NULL_TREE;
2130 :
2131 6498798 : if (TREE_TYPE (decl) == error_mark_node)
2132 : return GS_ERROR;
2133 :
2134 6498720 : if ((TREE_CODE (decl) == TYPE_DECL
2135 6456246 : || VAR_P (decl))
2136 12951918 : && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2137 : {
2138 690089 : gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2139 690089 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2140 20908 : gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2141 : }
2142 :
2143 : /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2144 : in case its size expressions contain problematic nodes like CALL_EXPR. */
2145 6498720 : if (TREE_CODE (decl) == TYPE_DECL
2146 42474 : && DECL_ORIGINAL_TYPE (decl)
2147 6504041 : && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2148 : {
2149 0 : gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2150 0 : if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2151 0 : gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2152 : }
2153 :
2154 6498720 : if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2155 : {
2156 6453198 : tree init = DECL_INITIAL (decl);
2157 6453198 : bool is_vla = false;
2158 : /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2159 : gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2160 : If the decl has VALUE_EXPR that was created by FE (usually
2161 : C++FE), it's a proxy variable, and FE already initialized
2162 : the VALUE_EXPR of it, we should not initialize it anymore. */
2163 6453198 : bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2164 :
2165 6453198 : poly_uint64 size;
2166 6453198 : if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2167 6453198 : || (!TREE_STATIC (decl)
2168 6328720 : && flag_stack_check == GENERIC_STACK_CHECK
2169 234 : && maybe_gt (size,
2170 : (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2171 : {
2172 8826 : gimplify_vla_decl (decl, seq_p);
2173 8826 : is_vla = true;
2174 : }
2175 :
2176 6453198 : if (asan_poisoned_variables
2177 4955 : && !is_vla
2178 4769 : && TREE_ADDRESSABLE (decl)
2179 2343 : && !TREE_STATIC (decl)
2180 2133 : && !DECL_HAS_VALUE_EXPR_P (decl)
2181 2098 : && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2182 2098 : && dbg_cnt (asan_use_after_scope)
2183 2098 : && !gimplify_omp_ctxp
2184 : /* GNAT introduces temporaries to hold return values of calls in
2185 : initializers of variables defined in other units, so the
2186 : declaration of the variable is discarded completely. We do not
2187 : want to issue poison calls for such dropped variables. */
2188 6455288 : && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2189 0 : || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2190 : {
2191 2090 : asan_poisoned_variables->add (decl);
2192 2090 : asan_poison_variable (decl, false, seq_p);
2193 2090 : if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2194 55 : gimplify_ctxp->live_switch_vars->add (decl);
2195 : }
2196 :
2197 : /* Some front ends do not explicitly declare all anonymous
2198 : artificial variables. We compensate here by declaring the
2199 : variables, though it would be better if the front ends would
2200 : explicitly declare them. */
2201 6453198 : if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2202 6453198 : && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2203 16624 : gimple_add_tmp_var (decl);
2204 :
2205 6453198 : if (init && init != error_mark_node)
2206 : {
2207 3690286 : if (!TREE_STATIC (decl))
2208 : {
2209 3660322 : DECL_INITIAL (decl) = NULL_TREE;
2210 3660322 : init = build2 (INIT_EXPR, void_type_node, decl, init);
2211 3660322 : gimplify_and_add (init, seq_p);
2212 3660322 : ggc_free (init);
2213 : /* Clear TREE_READONLY if we really have an initialization. */
2214 3660322 : if (!DECL_INITIAL (decl)
2215 3660322 : && !omp_privatize_by_reference (decl))
2216 3643978 : TREE_READONLY (decl) = 0;
2217 : }
2218 : else
2219 : /* We must still examine initializers for static variables
2220 : as they may contain a label address. */
2221 29964 : walk_tree (&init, force_labels_r, NULL, NULL);
2222 : }
2223 : /* When there is no explicit initializer, if the user requested,
2224 : We should insert an artifical initializer for this automatic
2225 : variable. */
2226 2762912 : else if (var_needs_auto_init_p (decl)
2227 2762912 : && !decl_had_value_expr_p)
2228 : {
2229 67681 : gimple_add_init_for_auto_var (decl,
2230 : flag_auto_var_init,
2231 : seq_p);
2232 : /* The expanding of a call to the above .DEFERRED_INIT will apply
2233 : block initialization to the whole space covered by this variable.
2234 : As a result, all the paddings will be initialized to zeroes
2235 : for zero initialization and 0xFE byte-repeatable patterns for
2236 : pattern initialization.
2237 : In order to make the paddings as zeroes for pattern init, We
2238 : should add a call to __builtin_clear_padding to clear the
2239 : paddings to zero in compatible with CLANG.
2240 : We cannot insert this call if the variable is a gimple register
2241 : since __builtin_clear_padding will take the address of the
2242 : variable. As a result, if a long double/_Complex long double
2243 : variable will spilled into stack later, its padding is 0XFE. */
2244 67681 : if (flag_auto_var_init == AUTO_INIT_PATTERN
2245 183 : && !is_gimple_reg (decl)
2246 67778 : && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2247 74 : gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2248 : }
2249 : }
2250 :
2251 : return GS_ALL_DONE;
2252 : }
2253 :
2254 : /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2255 : and replacing the LOOP_EXPR with goto, but if the loop contains an
2256 : EXIT_EXPR, we need to append a label for it to jump to. */
2257 :
2258 : static enum gimplify_status
2259 200047 : gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2260 : {
2261 200047 : tree saved_label = gimplify_ctxp->exit_label;
2262 200047 : tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2263 :
2264 200047 : gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2265 :
2266 200047 : gimplify_ctxp->exit_label = NULL_TREE;
2267 :
2268 200047 : gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2269 :
2270 200047 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2271 :
2272 200047 : if (gimplify_ctxp->exit_label)
2273 6342 : gimplify_seq_add_stmt (pre_p,
2274 3171 : gimple_build_label (gimplify_ctxp->exit_label));
2275 :
2276 200047 : gimplify_ctxp->exit_label = saved_label;
2277 :
2278 200047 : *expr_p = NULL;
2279 200047 : return GS_ALL_DONE;
2280 : }
2281 :
2282 : /* Gimplify a statement list onto a sequence. These may be created either
2283 : by an enlightened front-end, or by shortcut_cond_expr. */
2284 :
2285 : static enum gimplify_status
2286 8474342 : gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2287 : {
2288 8474342 : tree temp = voidify_wrapper_expr (*expr_p, NULL);
2289 :
2290 8474342 : tree_stmt_iterator i = tsi_start (*expr_p);
2291 :
2292 54619364 : while (!tsi_end_p (i))
2293 : {
2294 37670680 : gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2295 37670680 : tsi_delink (&i);
2296 : }
2297 :
2298 8474342 : if (temp)
2299 : {
2300 15172 : *expr_p = temp;
2301 15172 : return GS_OK;
2302 : }
2303 :
2304 : return GS_ALL_DONE;
2305 : }
2306 :
2307 :
2308 : /* Emit warning for the unreachable statement STMT if needed.
2309 : Return the gimple itself when the warning is emitted, otherwise
2310 : return NULL. */
2311 : static gimple *
2312 181 : emit_warn_switch_unreachable (gimple *stmt)
2313 : {
2314 181 : if (gimple_code (stmt) == GIMPLE_GOTO
2315 50 : && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2316 231 : && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2317 : /* Don't warn for compiler-generated gotos. These occur
2318 : in Duff's devices, for example. */
2319 : return NULL;
2320 : else
2321 131 : warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2322 : "statement will never be executed");
2323 131 : return stmt;
2324 : }
2325 :
2326 : /* Callback for walk_gimple_seq. */
2327 :
2328 : static tree
2329 50434 : warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2330 : bool *handled_ops_p,
2331 : struct walk_stmt_info *wi)
2332 : {
2333 50434 : gimple *stmt = gsi_stmt (*gsi_p);
2334 50434 : bool unreachable_issued = wi->info != NULL;
2335 :
2336 50434 : *handled_ops_p = true;
2337 50434 : switch (gimple_code (stmt))
2338 : {
2339 155 : case GIMPLE_TRY:
2340 : /* A compiler-generated cleanup or a user-written try block.
2341 : If it's empty, don't dive into it--that would result in
2342 : worse location info. */
2343 155 : if (gimple_try_eval (stmt) == NULL)
2344 : {
2345 3 : if (warn_switch_unreachable && !unreachable_issued)
2346 3 : wi->info = emit_warn_switch_unreachable (stmt);
2347 :
2348 : /* Stop when auto var init warning is not on. */
2349 3 : if (!warn_trivial_auto_var_init)
2350 3 : return integer_zero_node;
2351 : }
2352 : /* Fall through. */
2353 2561 : case GIMPLE_BIND:
2354 2561 : case GIMPLE_CATCH:
2355 2561 : case GIMPLE_EH_FILTER:
2356 2561 : case GIMPLE_TRANSACTION:
2357 : /* Walk the sub-statements. */
2358 2561 : *handled_ops_p = false;
2359 2561 : break;
2360 :
2361 : case GIMPLE_DEBUG:
2362 : /* Ignore these. We may generate them before declarations that
2363 : are never executed. If there's something to warn about,
2364 : there will be non-debug stmts too, and we'll catch those. */
2365 : break;
2366 :
2367 60 : case GIMPLE_ASSIGN:
2368 : /* See comment below in the GIMPLE_CALL case. */
2369 60 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2370 20 : && gimple_assign_single_p (stmt)
2371 79 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2372 : {
2373 6 : gimple *g = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
2374 6 : if (gimple_call_internal_p (g, IFN_DEFERRED_INIT))
2375 : break;
2376 : }
2377 54 : goto do_default;
2378 :
2379 46572 : case GIMPLE_LABEL:
2380 : /* Stop till the first Label. */
2381 46572 : return integer_zero_node;
2382 62 : case GIMPLE_CALL:
2383 62 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2384 : {
2385 8 : *handled_ops_p = false;
2386 8 : break;
2387 : }
2388 : /* Don't warn for compiler-generated initializations for
2389 : -ftrivial-auto-var-init for -Wswitch-unreachable. Though
2390 : do warn for -Wtrivial-auto-var-init.
2391 : There are 3 cases:
2392 : case 1: a call to .DEFERRED_INIT;
2393 : case 2: a call to __builtin_clear_padding with the 2nd argument is
2394 : present and non-zero;
2395 : case 3: a gimple assign store right after the call to .DEFERRED_INIT
2396 : that has the LHS of .DEFERRED_INIT as the RHS as following:
2397 : _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2398 : i1 = _1.
2399 : case 3 is handled above in the GIMPLE_ASSIGN case. */
2400 54 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2401 54 : && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2402 : {
2403 33 : if (warn_trivial_auto_var_init)
2404 : {
2405 : /* Get the variable name from the 3rd argument of call. */
2406 12 : tree var_name = gimple_call_arg (stmt, 2);
2407 12 : var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2408 12 : const char *var_name_str = TREE_STRING_POINTER (var_name);
2409 :
2410 12 : warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2411 : "%qs cannot be initialized with "
2412 : "%<-ftrivial-auto-var_init%>", var_name_str);
2413 : }
2414 : break;
2415 : }
2416 21 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2417 7 : && gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2418 24 : && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2419 : break;
2420 : /* Fall through. */
2421 178 : default:
2422 178 : do_default:
2423 : /* check the first "real" statement (not a decl/lexical scope/...), issue
2424 : warning if needed. */
2425 178 : if (warn_switch_unreachable && !unreachable_issued)
2426 178 : wi->info = emit_warn_switch_unreachable (stmt);
2427 : /* Stop when auto var init warning is not on. */
2428 178 : if (!warn_trivial_auto_var_init)
2429 178 : return integer_zero_node;
2430 : break;
2431 : }
2432 : return NULL_TREE;
2433 : }
2434 :
2435 :
2436 : /* Possibly warn about unreachable statements between switch's controlling
2437 : expression and the first case. Also warn about -ftrivial-auto-var-init
2438 : cannot initialize the auto variable under such situation.
2439 : SEQ is the body of a switch expression. */
2440 :
2441 : static void
2442 50589 : maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2443 : {
2444 60 : if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2445 : /* This warning doesn't play well with Fortran when optimizations
2446 : are on. */
2447 50529 : || lang_GNU_Fortran ()
2448 97515 : || seq == NULL)
2449 3833 : return;
2450 :
2451 46756 : struct walk_stmt_info wi;
2452 :
2453 46756 : memset (&wi, 0, sizeof (wi));
2454 46756 : walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2455 : }
2456 :
2457 :
2458 : /* A label entry that pairs label and a location. */
2459 : struct label_entry
2460 : {
2461 : tree label;
2462 : location_t loc;
2463 : };
2464 :
2465 : /* Find LABEL in vector of label entries VEC. */
2466 :
2467 : static struct label_entry *
2468 11794 : find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2469 : {
2470 11794 : unsigned int i;
2471 11794 : struct label_entry *l;
2472 :
2473 22683 : FOR_EACH_VEC_ELT (*vec, i, l)
2474 19391 : if (l->label == label)
2475 : return l;
2476 : return NULL;
2477 : }
2478 :
2479 : /* Return true if LABEL, a LABEL_DECL, represents a case label
2480 : in a vector of labels CASES. */
2481 :
2482 : static bool
2483 15266 : case_label_p (const vec<tree> *cases, tree label)
2484 : {
2485 15266 : unsigned int i;
2486 15266 : tree l;
2487 :
2488 156568 : FOR_EACH_VEC_ELT (*cases, i, l)
2489 156242 : if (CASE_LABEL (l) == label)
2490 : return true;
2491 : return false;
2492 : }
2493 :
2494 : /* Find the last nondebug statement in a scope STMT. */
2495 :
2496 : static gimple *
2497 2032 : last_stmt_in_scope (gimple *stmt)
2498 : {
2499 3818 : if (!stmt)
2500 : return NULL;
2501 :
2502 6210 : auto last_stmt_in_seq = [] (gimple_seq s)
2503 : {
2504 2403 : gimple_seq_node n;
2505 2403 : for (n = gimple_seq_last (s);
2506 2408 : n && (is_gimple_debug (n)
2507 2395 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2508 44 : && gimple_call_internal_p (n, IFN_DEFERRED_INIT)));
2509 5 : n = n->prev)
2510 7 : if (n == s)
2511 : return (gimple *) NULL;
2512 : return (gimple *) n;
2513 : };
2514 :
2515 3807 : switch (gimple_code (stmt))
2516 : {
2517 1332 : case GIMPLE_BIND:
2518 1332 : {
2519 1332 : gbind *bind = as_a <gbind *> (stmt);
2520 1332 : stmt = last_stmt_in_seq (gimple_bind_body (bind));
2521 1332 : return last_stmt_in_scope (stmt);
2522 : }
2523 :
2524 617 : case GIMPLE_TRY:
2525 617 : {
2526 617 : gtry *try_stmt = as_a <gtry *> (stmt);
2527 617 : stmt = last_stmt_in_seq (gimple_try_eval (try_stmt));
2528 617 : gimple *last_eval = last_stmt_in_scope (stmt);
2529 617 : if (gimple_stmt_may_fallthru (last_eval)
2530 473 : && (last_eval == NULL
2531 470 : || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2532 1083 : && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2533 : {
2534 454 : stmt = last_stmt_in_seq (gimple_try_cleanup (try_stmt));
2535 454 : return last_stmt_in_scope (stmt);
2536 : }
2537 : else
2538 : return last_eval;
2539 : }
2540 :
2541 0 : case GIMPLE_DEBUG:
2542 0 : gcc_unreachable ();
2543 :
2544 : default:
2545 : return stmt;
2546 : }
2547 : }
2548 :
2549 : /* Collect labels that may fall through into LABELS and return the statement
2550 : preceding another case label, or a user-defined label. Store a location
2551 : useful to give warnings at *PREVLOC (usually the location of the returned
2552 : statement or of its surrounding scope). */
2553 :
2554 : static gimple *
2555 19670 : collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2556 : auto_vec <struct label_entry> *labels,
2557 : location_t *prevloc)
2558 : {
2559 19670 : gimple *prev = NULL;
2560 :
2561 19670 : *prevloc = UNKNOWN_LOCATION;
2562 96476 : do
2563 : {
2564 96476 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2565 : {
2566 : /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2567 : which starts on a GIMPLE_SWITCH and ends with a break label.
2568 : Handle that as a single statement that can fall through. */
2569 1393 : gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2570 1393 : gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2571 1393 : gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2572 1387 : if (last
2573 1387 : && gimple_code (first) == GIMPLE_SWITCH
2574 1489 : && gimple_code (last) == GIMPLE_LABEL)
2575 : {
2576 102 : tree label = gimple_label_label (as_a <glabel *> (last));
2577 102 : if (SWITCH_BREAK_LABEL_P (label))
2578 : {
2579 102 : prev = bind;
2580 102 : gsi_next (gsi_p);
2581 102 : continue;
2582 : }
2583 : }
2584 : }
2585 96374 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2586 96374 : || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2587 : {
2588 : /* Nested scope. Only look at the last statement of
2589 : the innermost scope. */
2590 1415 : location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2591 1415 : gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2592 1415 : if (last)
2593 : {
2594 1407 : prev = last;
2595 : /* It might be a label without a location. Use the
2596 : location of the scope then. */
2597 1407 : if (!gimple_has_location (prev))
2598 616 : *prevloc = bind_loc;
2599 : }
2600 1415 : gsi_next (gsi_p);
2601 1415 : continue;
2602 1415 : }
2603 :
2604 : /* Ifs are tricky. */
2605 94959 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2606 : {
2607 6969 : gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2608 6969 : tree false_lab = gimple_cond_false_label (cond_stmt);
2609 6969 : location_t if_loc = gimple_location (cond_stmt);
2610 :
2611 : /* If we have e.g.
2612 : if (i > 1) goto <D.2259>; else goto D;
2613 : we can't do much with the else-branch. */
2614 6969 : if (!DECL_ARTIFICIAL (false_lab))
2615 : break;
2616 :
2617 : /* Go on until the false label, then one step back. */
2618 53216 : for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2619 : {
2620 53216 : gimple *stmt = gsi_stmt (*gsi_p);
2621 53216 : if (gimple_code (stmt) == GIMPLE_LABEL
2622 53216 : && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2623 : break;
2624 : }
2625 :
2626 : /* Not found? Oops. */
2627 6969 : if (gsi_end_p (*gsi_p))
2628 : break;
2629 :
2630 : /* A dead label can't fall through. */
2631 6969 : if (!UNUSED_LABEL_P (false_lab))
2632 : {
2633 6922 : struct label_entry l = { false_lab, if_loc };
2634 6922 : labels->safe_push (l);
2635 : }
2636 :
2637 : /* Go to the last statement of the then branch. */
2638 6969 : gsi_prev (gsi_p);
2639 :
2640 : /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2641 : <D.1759>:
2642 : <stmt>;
2643 : goto <D.1761>;
2644 : <D.1760>:
2645 : */
2646 6969 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2647 6969 : && !gimple_has_location (gsi_stmt (*gsi_p)))
2648 : {
2649 : /* Look at the statement before, it might be
2650 : attribute fallthrough, in which case don't warn. */
2651 1439 : gsi_prev (gsi_p);
2652 1439 : bool fallthru_before_dest
2653 1439 : = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2654 1439 : gsi_next (gsi_p);
2655 1439 : tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2656 1439 : if (!fallthru_before_dest)
2657 : {
2658 1382 : struct label_entry l = { goto_dest, if_loc };
2659 1382 : labels->safe_push (l);
2660 : }
2661 : }
2662 : /* This case is about
2663 : if (1 != 0) goto <D.2022>; else goto <D.2023>;
2664 : <D.2022>:
2665 : n = n + 1; // #1
2666 : <D.2023>: // #2
2667 : <D.1988>: // #3
2668 : where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2669 : through to #3. So set PREV to #1. */
2670 5530 : else if (UNUSED_LABEL_P (false_lab))
2671 47 : prev = gsi_stmt (*gsi_p);
2672 :
2673 : /* And move back. */
2674 6969 : gsi_next (gsi_p);
2675 : }
2676 :
2677 94959 : tree lab;
2678 : /* Remember the last statement. Skip labels that are of no interest
2679 : to us. */
2680 94959 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2681 : {
2682 11576 : tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2683 11576 : if (find_label_entry (labels, label))
2684 70541 : prev = gsi_stmt (*gsi_p);
2685 : }
2686 83383 : else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2687 : ;
2688 83383 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2689 83383 : && gimple_call_internal_p (gsi_stmt (*gsi_p),
2690 : IFN_DEFERRED_INIT))
2691 : ;
2692 83383 : else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2693 : ;
2694 82215 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2695 963 : && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2696 190 : && (lab = gimple_goto_dest (gsi_stmt (*gsi_p)))
2697 190 : && TREE_CODE (lab) == LABEL_DECL
2698 82404 : && VACUOUS_INIT_LABEL_P (lab))
2699 : ;
2700 82215 : else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2701 70541 : prev = gsi_stmt (*gsi_p);
2702 94959 : gsi_next (gsi_p);
2703 : }
2704 96476 : while (!gsi_end_p (*gsi_p)
2705 : /* Stop if we find a case or a user-defined label. */
2706 192952 : && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2707 20389 : || !gimple_has_location (gsi_stmt (*gsi_p))));
2708 :
2709 19670 : if (prev && gimple_has_location (prev))
2710 19217 : *prevloc = gimple_location (prev);
2711 19670 : return prev;
2712 : }
2713 :
2714 : /* Return true if the switch fallthrough warning should occur. LABEL is
2715 : the label statement that we're falling through to. */
2716 :
2717 : static bool
2718 15775 : should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2719 : {
2720 15775 : gimple_stmt_iterator gsi = *gsi_p;
2721 :
2722 : /* Don't warn if the label is marked with a "falls through" comment. */
2723 15775 : if (FALLTHROUGH_LABEL_P (label))
2724 : return false;
2725 :
2726 : /* Don't warn for non-case labels followed by a statement:
2727 : case 0:
2728 : foo ();
2729 : label:
2730 : bar ();
2731 : as these are likely intentional. */
2732 15011 : if (!case_label_p (&gimplify_ctxp->case_labels, label))
2733 : {
2734 : tree l;
2735 326 : while (!gsi_end_p (gsi)
2736 326 : && ((gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2737 255 : && (l
2738 255 : = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2739 255 : && !case_label_p (&gimplify_ctxp->case_labels, l))
2740 149 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2741 31 : && gimple_call_internal_p (gsi_stmt (gsi),
2742 : IFN_DEFERRED_INIT))))
2743 169 : gsi_next_nondebug (&gsi);
2744 157 : if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2745 : return false;
2746 : }
2747 :
2748 : /* Don't warn for terminated branches, i.e. when the subsequent case labels
2749 : immediately breaks. */
2750 14940 : gsi = *gsi_p;
2751 :
2752 : /* Skip all immediately following labels. */
2753 33341 : while (!gsi_end_p (gsi)
2754 33341 : && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2755 15075 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT
2756 14874 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2757 357 : && gimple_call_internal_p (gsi_stmt (gsi),
2758 : IFN_DEFERRED_INIT))))
2759 18401 : gsi_next_nondebug (&gsi);
2760 :
2761 : /* { ... something; default:; } */
2762 14940 : if (gsi_end_p (gsi)
2763 : /* { ... something; default: break; } or
2764 : { ... something; default: goto L; } */
2765 14874 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2766 : /* { ... something; default: return; } */
2767 28727 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2768 : return false;
2769 :
2770 : return true;
2771 : }
2772 :
2773 : /* Callback for walk_gimple_seq. */
2774 :
2775 : static tree
2776 20548 : warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2777 : struct walk_stmt_info *)
2778 : {
2779 20548 : gimple *stmt = gsi_stmt (*gsi_p);
2780 :
2781 20548 : *handled_ops_p = true;
2782 20548 : switch (gimple_code (stmt))
2783 : {
2784 228 : case GIMPLE_TRY:
2785 228 : case GIMPLE_BIND:
2786 228 : case GIMPLE_CATCH:
2787 228 : case GIMPLE_EH_FILTER:
2788 228 : case GIMPLE_TRANSACTION:
2789 : /* Walk the sub-statements. */
2790 228 : *handled_ops_p = false;
2791 228 : break;
2792 :
2793 : /* Find a sequence of form:
2794 :
2795 : GIMPLE_LABEL
2796 : [...]
2797 : <may fallthru stmt>
2798 : GIMPLE_LABEL
2799 :
2800 : and possibly warn. */
2801 : case GIMPLE_LABEL:
2802 : {
2803 : /* Found a label. Skip all immediately following labels. */
2804 46297 : while (!gsi_end_p (*gsi_p)
2805 46297 : && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2806 26432 : gsi_next_nondebug (gsi_p);
2807 :
2808 : /* There might be no more statements. */
2809 19865 : if (gsi_end_p (*gsi_p))
2810 4083 : return integer_zero_node;
2811 :
2812 : /* Vector of labels that fall through. */
2813 19670 : auto_vec <struct label_entry> labels;
2814 19670 : location_t prevloc;
2815 19670 : gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2816 :
2817 : /* There might be no more statements. */
2818 19670 : if (gsi_end_p (*gsi_p))
2819 3888 : return integer_zero_node;
2820 :
2821 15782 : gimple *next = gsi_stmt (*gsi_p);
2822 15782 : tree label;
2823 : /* If what follows is a label, then we may have a fallthrough. */
2824 15782 : if (gimple_code (next) == GIMPLE_LABEL
2825 15782 : && gimple_has_location (next)
2826 15782 : && (label = gimple_label_label (as_a <glabel *> (next)))
2827 31564 : && prev != NULL)
2828 : {
2829 15775 : struct label_entry *l;
2830 15775 : bool warned_p = false;
2831 15775 : auto_diagnostic_group d;
2832 15775 : if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2833 : /* Quiet. */;
2834 13760 : else if (gimple_code (prev) == GIMPLE_LABEL
2835 218 : && (label = gimple_label_label (as_a <glabel *> (prev)))
2836 13978 : && (l = find_label_entry (&labels, label)))
2837 198 : warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2838 : "this statement may fall through");
2839 13562 : else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2840 : /* Try to be clever and don't warn when the statement
2841 : can't actually fall through. */
2842 13161 : && gimple_stmt_may_fallthru (prev)
2843 13863 : && prevloc != UNKNOWN_LOCATION)
2844 301 : warned_p = warning_at (prevloc,
2845 301 : OPT_Wimplicit_fallthrough_,
2846 : "this statement may fall through");
2847 499 : if (warned_p)
2848 499 : inform (gimple_location (next), "here");
2849 :
2850 : /* Mark this label as processed so as to prevent multiple
2851 : warnings in nested switches. */
2852 15775 : FALLTHROUGH_LABEL_P (label) = true;
2853 :
2854 : /* So that next warn_implicit_fallthrough_r will start looking for
2855 : a new sequence starting with this label. */
2856 15775 : gsi_prev (gsi_p);
2857 15775 : }
2858 3888 : }
2859 15782 : break;
2860 : default:
2861 : break;
2862 : }
2863 : return NULL_TREE;
2864 : }
2865 :
2866 : /* Warn when a switch case falls through. */
2867 :
2868 : static void
2869 50589 : maybe_warn_implicit_fallthrough (gimple_seq seq)
2870 : {
2871 50589 : if (!warn_implicit_fallthrough)
2872 46608 : return;
2873 :
2874 : /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2875 3981 : if (!(lang_GNU_C ()
2876 1073 : || lang_GNU_CXX ()
2877 0 : || lang_GNU_OBJC ()))
2878 : return;
2879 :
2880 3981 : struct walk_stmt_info wi;
2881 3981 : memset (&wi, 0, sizeof (wi));
2882 3981 : walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2883 : }
2884 :
2885 : /* Callback for walk_gimple_seq. */
2886 :
2887 : static tree
2888 3266544 : expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2889 : struct walk_stmt_info *wi)
2890 : {
2891 3266544 : gimple *stmt = gsi_stmt (*gsi_p);
2892 :
2893 3266544 : *handled_ops_p = true;
2894 3266544 : switch (gimple_code (stmt))
2895 : {
2896 149557 : case GIMPLE_TRY:
2897 149557 : case GIMPLE_BIND:
2898 149557 : case GIMPLE_CATCH:
2899 149557 : case GIMPLE_EH_FILTER:
2900 149557 : case GIMPLE_TRANSACTION:
2901 : /* Walk the sub-statements. */
2902 149557 : *handled_ops_p = false;
2903 149557 : break;
2904 253525 : case GIMPLE_CALL:
2905 253525 : static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2906 253525 : if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2907 : {
2908 3973 : location_t loc = gimple_location (stmt);
2909 3973 : gsi_remove (gsi_p, true);
2910 3973 : wi->removed_stmt = true;
2911 :
2912 : /* nothrow flag is added by genericize_c_loop to mark fallthrough
2913 : statement at the end of some loop's body. Those should be
2914 : always diagnosed, either because they indeed don't precede
2915 : a case label or default label, or because the next statement
2916 : is not within the same iteration statement. */
2917 3973 : if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2918 : {
2919 12 : pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2920 : "a case label or default label");
2921 12 : break;
2922 : }
2923 :
2924 3961 : if (gsi_end_p (*gsi_p))
2925 : {
2926 32 : static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2927 32 : static_cast<location_t *>(wi->info)[1] = loc;
2928 32 : break;
2929 : }
2930 :
2931 3929 : bool found = false;
2932 :
2933 3929 : gimple_stmt_iterator gsi2 = *gsi_p;
2934 3929 : stmt = gsi_stmt (gsi2);
2935 3929 : tree lab;
2936 3929 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2937 832 : && gimple_code (stmt) == GIMPLE_GOTO
2938 20 : && (lab = gimple_goto_dest (stmt))
2939 20 : && TREE_CODE (lab) == LABEL_DECL
2940 3949 : && VACUOUS_INIT_LABEL_P (lab))
2941 : {
2942 : /* Handle for C++ artificial -ftrivial-auto-var-init=
2943 : sequences. Those look like:
2944 : goto lab1;
2945 : lab2:;
2946 : v1 = .DEFERRED_INIT (...);
2947 : v2 = .DEFERRED_INIT (...);
2948 : lab3:;
2949 : v3 = .DEFERRED_INIT (...);
2950 : lab1:;
2951 : In this case, a case/default label can be either in between
2952 : the GIMPLE_GOTO and the corresponding GIMPLE_LABEL, if jumps
2953 : from the switch condition to the case/default label cross
2954 : vacuous initialization of some variables, or after the
2955 : corresponding GIMPLE_LABEL, if those jumps don't cross
2956 : any such initialization but there is an adjacent named label
2957 : which crosses such initialization. So, for the purpose of
2958 : this function, just ignore the goto but until reaching the
2959 : corresponding GIMPLE_LABEL allow also .DEFERRED_INIT
2960 : calls. */
2961 5 : gsi_next (&gsi2);
2962 : }
2963 3924 : else if (gimple_code (stmt) == GIMPLE_GOTO
2964 3924 : && !gimple_has_location (stmt))
2965 : {
2966 : /* Go on until the artificial label. */
2967 57 : tree goto_dest = gimple_goto_dest (stmt);
2968 414 : for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2969 : {
2970 357 : if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2971 357 : && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2972 : == goto_dest)
2973 : break;
2974 : }
2975 :
2976 : /* Not found? Stop. */
2977 57 : if (gsi_end_p (gsi2))
2978 : break;
2979 :
2980 : /* Look one past it. */
2981 57 : gsi_next (&gsi2);
2982 : }
2983 :
2984 : /* We're looking for a case label or default label here. */
2985 4027 : while (!gsi_end_p (gsi2))
2986 : {
2987 4027 : stmt = gsi_stmt (gsi2);
2988 4027 : if (gimple_code (stmt) == GIMPLE_LABEL)
2989 : {
2990 3984 : tree label = gimple_label_label (as_a <glabel *> (stmt));
2991 3984 : if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2992 : {
2993 : found = true;
2994 : break;
2995 : }
2996 : }
2997 43 : else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2998 : ;
2999 43 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
3000 43 : && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3001 : ;
3002 43 : else if (!is_gimple_debug (stmt))
3003 : /* Anything else is not expected. */
3004 : break;
3005 98 : gsi_next (&gsi2);
3006 : }
3007 3929 : if (!found)
3008 23 : pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
3009 : "a case label or default label");
3010 : }
3011 : break;
3012 2863462 : default:
3013 2863462 : static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
3014 2863462 : break;
3015 : }
3016 3266544 : return NULL_TREE;
3017 : }
3018 :
3019 : /* Expand all FALLTHROUGH () calls in SEQ. */
3020 :
3021 : static void
3022 48908 : expand_FALLTHROUGH (gimple_seq *seq_p)
3023 : {
3024 48908 : auto_urlify_attributes sentinel;
3025 :
3026 48908 : struct walk_stmt_info wi;
3027 48908 : location_t loc[2];
3028 48908 : memset (&wi, 0, sizeof (wi));
3029 48908 : loc[0] = UNKNOWN_LOCATION;
3030 48908 : loc[1] = UNKNOWN_LOCATION;
3031 48908 : wi.info = (void *) &loc[0];
3032 48908 : walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
3033 48908 : if (loc[0] != UNKNOWN_LOCATION)
3034 : /* We've found [[fallthrough]]; at the end of a switch, which the C++
3035 : standard says is ill-formed; see [dcl.attr.fallthrough]. */
3036 8 : pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
3037 : "a case label or default label");
3038 48908 : }
3039 :
3040 :
3041 : /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
3042 : branch to. */
3043 :
3044 : static enum gimplify_status
3045 50589 : gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
3046 : {
3047 50589 : tree switch_expr = *expr_p;
3048 50589 : gimple_seq switch_body_seq = NULL;
3049 50589 : enum gimplify_status ret;
3050 50589 : tree index_type = TREE_TYPE (switch_expr);
3051 50589 : if (index_type == NULL_TREE)
3052 11349 : index_type = TREE_TYPE (SWITCH_COND (switch_expr));
3053 :
3054 50589 : ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
3055 : fb_rvalue);
3056 50589 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
3057 : return ret;
3058 :
3059 50589 : if (SWITCH_BODY (switch_expr))
3060 : {
3061 50589 : vec<tree> labels;
3062 50589 : vec<tree> saved_labels;
3063 50589 : hash_set<tree> *saved_live_switch_vars = NULL;
3064 50589 : tree default_case = NULL_TREE;
3065 50589 : gswitch *switch_stmt;
3066 :
3067 : /* Save old labels, get new ones from body, then restore the old
3068 : labels. Save all the things from the switch body to append after. */
3069 50589 : saved_labels = gimplify_ctxp->case_labels;
3070 50589 : gimplify_ctxp->case_labels.create (8);
3071 :
3072 : /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
3073 50589 : saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
3074 50589 : tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
3075 50589 : if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
3076 50307 : gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
3077 : else
3078 282 : gimplify_ctxp->live_switch_vars = NULL;
3079 :
3080 50589 : bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
3081 50589 : gimplify_ctxp->in_switch_expr = true;
3082 :
3083 50589 : gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
3084 :
3085 50589 : gimplify_ctxp->in_switch_expr = old_in_switch_expr;
3086 50589 : maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
3087 50589 : maybe_warn_implicit_fallthrough (switch_body_seq);
3088 : /* Only do this for the outermost GIMPLE_SWITCH. */
3089 50589 : if (!gimplify_ctxp->in_switch_expr)
3090 48908 : expand_FALLTHROUGH (&switch_body_seq);
3091 :
3092 50589 : labels = gimplify_ctxp->case_labels;
3093 50589 : gimplify_ctxp->case_labels = saved_labels;
3094 :
3095 50589 : if (gimplify_ctxp->live_switch_vars)
3096 : {
3097 50307 : gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
3098 50307 : delete gimplify_ctxp->live_switch_vars;
3099 : }
3100 50589 : gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
3101 :
3102 50589 : preprocess_case_label_vec_for_gimple (labels, index_type,
3103 : &default_case);
3104 :
3105 50589 : bool add_bind = false;
3106 50589 : if (!default_case)
3107 : {
3108 14897 : glabel *new_default;
3109 :
3110 14897 : default_case
3111 14897 : = build_case_label (NULL_TREE, NULL_TREE,
3112 : create_artificial_label (UNKNOWN_LOCATION));
3113 14897 : if (old_in_switch_expr)
3114 : {
3115 1078 : SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
3116 1078 : add_bind = true;
3117 : }
3118 14897 : new_default = gimple_build_label (CASE_LABEL (default_case));
3119 14897 : gimplify_seq_add_stmt (&switch_body_seq, new_default);
3120 : }
3121 35692 : else if (old_in_switch_expr)
3122 : {
3123 603 : gimple *last = gimple_seq_last_stmt (switch_body_seq);
3124 603 : if (last && gimple_code (last) == GIMPLE_LABEL)
3125 : {
3126 338 : tree label = gimple_label_label (as_a <glabel *> (last));
3127 338 : if (SWITCH_BREAK_LABEL_P (label))
3128 50589 : add_bind = true;
3129 : }
3130 : }
3131 :
3132 50589 : switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
3133 : default_case, labels);
3134 50589 : gimple_set_location (switch_stmt, EXPR_LOCATION (switch_expr));
3135 : /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
3136 : ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
3137 : wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
3138 : so that we can easily find the start and end of the switch
3139 : statement. */
3140 50589 : if (add_bind)
3141 : {
3142 1415 : gimple_seq bind_body = NULL;
3143 1415 : gimplify_seq_add_stmt (&bind_body, switch_stmt);
3144 1415 : gimple_seq_add_seq (&bind_body, switch_body_seq);
3145 1415 : gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3146 1415 : gimple_set_location (bind, EXPR_LOCATION (switch_expr));
3147 1415 : gimplify_seq_add_stmt (pre_p, bind);
3148 : }
3149 : else
3150 : {
3151 49174 : gimplify_seq_add_stmt (pre_p, switch_stmt);
3152 49174 : gimplify_seq_add_seq (pre_p, switch_body_seq);
3153 : }
3154 50589 : labels.release ();
3155 : }
3156 : else
3157 0 : gcc_unreachable ();
3158 :
3159 50589 : return GS_ALL_DONE;
3160 : }
3161 :
3162 : /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3163 :
3164 : static enum gimplify_status
3165 2540992 : gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3166 : {
3167 2540992 : gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3168 : == current_function_decl);
3169 :
3170 2540992 : tree label = LABEL_EXPR_LABEL (*expr_p);
3171 2540992 : glabel *label_stmt = gimple_build_label (label);
3172 2540992 : gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3173 2540992 : gimplify_seq_add_stmt (pre_p, label_stmt);
3174 :
3175 2540992 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3176 21 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3177 : NOT_TAKEN));
3178 2540971 : else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3179 11 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3180 : TAKEN));
3181 :
3182 2540992 : return GS_ALL_DONE;
3183 : }
3184 :
3185 : /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3186 :
3187 : static enum gimplify_status
3188 1044052 : gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3189 : {
3190 1044052 : struct gimplify_ctx *ctxp;
3191 1044052 : glabel *label_stmt;
3192 :
3193 : /* Invalid programs can play Duff's Device type games with, for example,
3194 : #pragma omp parallel. At least in the C front end, we don't
3195 : detect such invalid branches until after gimplification, in the
3196 : diagnose_omp_blocks pass. */
3197 1044062 : for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3198 1044062 : if (ctxp->case_labels.exists ())
3199 : break;
3200 :
3201 1044052 : tree label = CASE_LABEL (*expr_p);
3202 1044052 : label_stmt = gimple_build_label (label);
3203 1044052 : gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3204 1044052 : ctxp->case_labels.safe_push (*expr_p);
3205 1044052 : gimplify_seq_add_stmt (pre_p, label_stmt);
3206 :
3207 1044052 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3208 16 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3209 : NOT_TAKEN));
3210 1044036 : else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3211 20 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3212 : TAKEN));
3213 :
3214 1044052 : return GS_ALL_DONE;
3215 : }
3216 :
3217 : /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3218 : if necessary. */
3219 :
3220 : tree
3221 1786541 : build_and_jump (tree *label_p)
3222 : {
3223 1786541 : if (label_p == NULL)
3224 : /* If there's nowhere to jump, just fall through. */
3225 : return NULL_TREE;
3226 :
3227 1212552 : if (*label_p == NULL_TREE)
3228 : {
3229 695575 : tree label = create_artificial_label (UNKNOWN_LOCATION);
3230 695575 : *label_p = label;
3231 : }
3232 :
3233 1212552 : return build1 (GOTO_EXPR, void_type_node, *label_p);
3234 : }
3235 :
3236 : /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3237 : This also involves building a label to jump to and communicating it to
3238 : gimplify_loop_expr through gimplify_ctxp->exit_label. */
3239 :
3240 : static enum gimplify_status
3241 3174 : gimplify_exit_expr (tree *expr_p)
3242 : {
3243 3174 : tree cond = TREE_OPERAND (*expr_p, 0);
3244 3174 : tree expr;
3245 :
3246 3174 : expr = build_and_jump (&gimplify_ctxp->exit_label);
3247 3174 : expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3248 3174 : *expr_p = expr;
3249 :
3250 3174 : return GS_OK;
3251 : }
3252 :
3253 : /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3254 : different from its canonical type, wrap the whole thing inside a
3255 : NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3256 : type.
3257 :
3258 : The canonical type of a COMPONENT_REF is the type of the field being
3259 : referenced--unless the field is a bit-field which can be read directly
3260 : in a smaller mode, in which case the canonical type is the
3261 : sign-appropriate type corresponding to that mode. */
3262 :
3263 : static void
3264 18394499 : canonicalize_component_ref (tree *expr_p)
3265 : {
3266 18394499 : tree expr = *expr_p;
3267 18394499 : tree type;
3268 :
3269 18394499 : gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3270 :
3271 18394499 : if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3272 7445509 : type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3273 : else
3274 10948990 : type = TREE_TYPE (TREE_OPERAND (expr, 1));
3275 :
3276 : /* One could argue that all the stuff below is not necessary for
3277 : the non-bitfield case and declare it a FE error if type
3278 : adjustment would be needed. */
3279 18394499 : if (TREE_TYPE (expr) != type)
3280 : {
3281 : #ifdef ENABLE_TYPES_CHECKING
3282 769436 : tree old_type = TREE_TYPE (expr);
3283 : #endif
3284 769436 : int type_quals;
3285 :
3286 : /* We need to preserve qualifiers and propagate them from
3287 : operand 0. */
3288 769436 : type_quals = TYPE_QUALS (type)
3289 769436 : | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3290 769436 : if (TYPE_QUALS (type) != type_quals)
3291 769195 : type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3292 :
3293 : /* Set the type of the COMPONENT_REF to the underlying type. */
3294 769436 : TREE_TYPE (expr) = type;
3295 :
3296 : #ifdef ENABLE_TYPES_CHECKING
3297 : /* It is now a FE error, if the conversion from the canonical
3298 : type to the original expression type is not useless. */
3299 769436 : gcc_assert (useless_type_conversion_p (old_type, type));
3300 : #endif
3301 : }
3302 18394499 : }
3303 :
3304 : /* If a NOP conversion is changing a pointer to array of foo to a pointer
3305 : to foo, embed that change in the ADDR_EXPR by converting
3306 : T array[U];
3307 : (T *)&array
3308 : ==>
3309 : &array[L]
3310 : where L is the lower bound. For simplicity, only do this for constant
3311 : lower bound.
3312 : The constraint is that the type of &array[L] is trivially convertible
3313 : to T *. */
3314 :
3315 : static void
3316 525031 : canonicalize_addr_expr (tree *expr_p)
3317 : {
3318 525031 : tree expr = *expr_p;
3319 525031 : tree addr_expr = TREE_OPERAND (expr, 0);
3320 525031 : tree datype, ddatype, pddatype;
3321 :
3322 : /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3323 1049716 : if (!POINTER_TYPE_P (TREE_TYPE (expr))
3324 525031 : || TREE_CODE (addr_expr) != ADDR_EXPR)
3325 : return;
3326 :
3327 : /* The addr_expr type should be a pointer to an array. */
3328 346 : datype = TREE_TYPE (TREE_TYPE (addr_expr));
3329 346 : if (TREE_CODE (datype) != ARRAY_TYPE)
3330 : return;
3331 :
3332 : /* The pointer to element type shall be trivially convertible to
3333 : the expression pointer type. */
3334 28 : ddatype = TREE_TYPE (datype);
3335 28 : pddatype = build_pointer_type (ddatype);
3336 28 : if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3337 : pddatype))
3338 : return;
3339 :
3340 : /* The lower bound and element sizes must be constant. */
3341 0 : if (!TYPE_SIZE_UNIT (ddatype)
3342 0 : || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3343 0 : || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3344 0 : || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3345 : return;
3346 :
3347 : /* All checks succeeded. Build a new node to merge the cast. */
3348 0 : *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3349 0 : TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3350 : NULL_TREE, NULL_TREE);
3351 0 : *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3352 :
3353 : /* We can have stripped a required restrict qualifier above. */
3354 0 : if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3355 0 : *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3356 : }
3357 :
3358 : /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3359 : underneath as appropriate. */
3360 :
3361 : static enum gimplify_status
3362 12825356 : gimplify_conversion (tree *expr_p)
3363 : {
3364 12825356 : location_t loc = EXPR_LOCATION (*expr_p);
3365 12825356 : gcc_assert (CONVERT_EXPR_P (*expr_p));
3366 :
3367 : /* Then strip away all but the outermost conversion. */
3368 12825356 : STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3369 :
3370 : /* And remove the outermost conversion if it's useless. */
3371 12825356 : if (tree_ssa_useless_type_conversion (*expr_p))
3372 0 : *expr_p = TREE_OPERAND (*expr_p, 0);
3373 :
3374 : /* If we still have a conversion at the toplevel,
3375 : then canonicalize some constructs. */
3376 12825356 : if (CONVERT_EXPR_P (*expr_p))
3377 : {
3378 12825356 : tree sub = TREE_OPERAND (*expr_p, 0);
3379 :
3380 : /* If a NOP conversion is changing the type of a COMPONENT_REF
3381 : expression, then canonicalize its type now in order to expose more
3382 : redundant conversions. */
3383 12825356 : if (TREE_CODE (sub) == COMPONENT_REF)
3384 271852 : canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3385 :
3386 : /* If a NOP conversion is changing a pointer to array of foo
3387 : to a pointer to foo, embed that change in the ADDR_EXPR. */
3388 12553504 : else if (TREE_CODE (sub) == ADDR_EXPR)
3389 525031 : canonicalize_addr_expr (expr_p);
3390 : }
3391 :
3392 : /* If we have a conversion to a non-register type force the
3393 : use of a VIEW_CONVERT_EXPR instead. */
3394 12825356 : if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3395 90 : *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3396 90 : TREE_OPERAND (*expr_p, 0));
3397 :
3398 : /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3399 12825356 : if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3400 229098 : TREE_SET_CODE (*expr_p, NOP_EXPR);
3401 :
3402 12825356 : return GS_OK;
3403 : }
3404 :
3405 : /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3406 : DECL_VALUE_EXPR, and it's worth re-examining things. */
3407 :
3408 : static enum gimplify_status
3409 126213634 : gimplify_var_or_parm_decl (tree *expr_p)
3410 : {
3411 126213634 : tree decl = *expr_p;
3412 :
3413 : /* ??? If this is a local variable, and it has not been seen in any
3414 : outer BIND_EXPR, then it's probably the result of a duplicate
3415 : declaration, for which we've already issued an error. It would
3416 : be really nice if the front end wouldn't leak these at all.
3417 : Currently the only known culprit is C++ destructors, as seen
3418 : in g++.old-deja/g++.jason/binding.C.
3419 : Another possible culpit are size expressions for variably modified
3420 : types which are lost in the FE or not gimplified correctly. */
3421 126213634 : if (VAR_P (decl)
3422 98195567 : && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3423 17080347 : && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3424 126273560 : && decl_function_context (decl) == current_function_decl)
3425 : {
3426 75 : gcc_assert (seen_error ());
3427 : return GS_ERROR;
3428 : }
3429 :
3430 : /* When within an OMP context, notice uses of variables. */
3431 126213559 : if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3432 : return GS_ALL_DONE;
3433 :
3434 : /* If the decl is an alias for another expression, substitute it now. */
3435 126207739 : if (DECL_HAS_VALUE_EXPR_P (decl))
3436 : {
3437 430947 : *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3438 430947 : return GS_OK;
3439 : }
3440 :
3441 : return GS_ALL_DONE;
3442 : }
3443 :
3444 : /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3445 :
3446 : static void
3447 110336291 : recalculate_side_effects (tree t)
3448 : {
3449 110336291 : enum tree_code code = TREE_CODE (t);
3450 110336291 : int len = TREE_OPERAND_LENGTH (t);
3451 110336291 : int i;
3452 :
3453 110336291 : switch (TREE_CODE_CLASS (code))
3454 : {
3455 665108 : case tcc_expression:
3456 665108 : switch (code)
3457 : {
3458 : case INIT_EXPR:
3459 : case MODIFY_EXPR:
3460 : case VA_ARG_EXPR:
3461 : case PREDECREMENT_EXPR:
3462 : case PREINCREMENT_EXPR:
3463 : case POSTDECREMENT_EXPR:
3464 : case POSTINCREMENT_EXPR:
3465 : /* All of these have side-effects, no matter what their
3466 : operands are. */
3467 : return;
3468 :
3469 : default:
3470 : break;
3471 : }
3472 : /* Fall through. */
3473 :
3474 110336289 : case tcc_comparison: /* a comparison expression */
3475 110336289 : case tcc_unary: /* a unary arithmetic expression */
3476 110336289 : case tcc_binary: /* a binary arithmetic expression */
3477 110336289 : case tcc_reference: /* a reference */
3478 110336289 : case tcc_vl_exp: /* a function call */
3479 110336289 : TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3480 359452347 : for (i = 0; i < len; ++i)
3481 : {
3482 249116058 : tree op = TREE_OPERAND (t, i);
3483 249116058 : if (op && TREE_SIDE_EFFECTS (op))
3484 1290393 : TREE_SIDE_EFFECTS (t) = 1;
3485 : }
3486 : break;
3487 :
3488 : case tcc_constant:
3489 : /* No side-effects. */
3490 : return;
3491 :
3492 2 : default:
3493 2 : if (code == SSA_NAME)
3494 : /* No side-effects. */
3495 : return;
3496 0 : gcc_unreachable ();
3497 : }
3498 : }
3499 :
3500 : /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3501 : node *EXPR_P.
3502 :
3503 : compound_lval
3504 : : min_lval '[' val ']'
3505 : | min_lval '.' ID
3506 : | compound_lval '[' val ']'
3507 : | compound_lval '.' ID
3508 :
3509 : This is not part of the original SIMPLE definition, which separates
3510 : array and member references, but it seems reasonable to handle them
3511 : together. Also, this way we don't run into problems with union
3512 : aliasing; gcc requires that for accesses through a union to alias, the
3513 : union reference must be explicit, which was not always the case when we
3514 : were splitting up array and member refs.
3515 :
3516 : PRE_P points to the sequence where side effects that must happen before
3517 : *EXPR_P should be stored.
3518 :
3519 : POST_P points to the sequence where side effects that must happen after
3520 : *EXPR_P should be stored. */
3521 :
3522 : static enum gimplify_status
3523 29721822 : gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3524 : fallback_t fallback)
3525 : {
3526 29721822 : tree *p;
3527 29721822 : enum gimplify_status ret = GS_ALL_DONE, tret;
3528 29721822 : int i;
3529 29721822 : location_t loc = EXPR_LOCATION (*expr_p);
3530 29721822 : tree expr = *expr_p;
3531 :
3532 : /* Create a stack of the subexpressions so later we can walk them in
3533 : order from inner to outer. */
3534 29721822 : auto_vec<tree, 10> expr_stack;
3535 :
3536 : /* We can handle anything that get_inner_reference can deal with. */
3537 71783202 : for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3538 : {
3539 42061380 : restart:
3540 : /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3541 71848004 : if (TREE_CODE (*p) == INDIRECT_REF)
3542 3911600 : *p = fold_indirect_ref_loc (loc, *p);
3543 :
3544 71848004 : if (handled_component_p (*p))
3545 : ;
3546 : /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3547 : additional COMPONENT_REFs. */
3548 13145667 : else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3549 31223084 : && gimplify_var_or_parm_decl (p) == GS_OK)
3550 64802 : goto restart;
3551 : else
3552 : break;
3553 :
3554 42061380 : expr_stack.safe_push (*p);
3555 : }
3556 :
3557 29721822 : gcc_assert (expr_stack.length ());
3558 :
3559 : /* Now EXPR_STACK is a stack of pointers to all the refs we've
3560 : walked through and P points to the innermost expression.
3561 :
3562 : Java requires that we elaborated nodes in source order. That
3563 : means we must gimplify the inner expression followed by each of
3564 : the indices, in order. But we can't gimplify the inner
3565 : expression until we deal with any variable bounds, sizes, or
3566 : positions in order to deal with PLACEHOLDER_EXPRs.
3567 :
3568 : The base expression may contain a statement expression that
3569 : has declarations used in size expressions, so has to be
3570 : gimplified before gimplifying the size expressions.
3571 :
3572 : So we do this in three steps. First we deal with variable
3573 : bounds, sizes, and positions, then we gimplify the base and
3574 : ensure it is memory if needed, then we deal with the annotations
3575 : for any variables in the components and any indices, from left
3576 : to right. */
3577 :
3578 29721822 : bool need_non_reg = false;
3579 71783199 : for (i = expr_stack.length () - 1; i >= 0; i--)
3580 : {
3581 42061380 : tree t = expr_stack[i];
3582 :
3583 42061380 : if (error_operand_p (TREE_OPERAND (t, 0)))
3584 : return GS_ERROR;
3585 :
3586 42061377 : if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3587 : {
3588 : /* Deal with the low bound and element type size and put them into
3589 : the ARRAY_REF. If these values are set, they have already been
3590 : gimplified. */
3591 8883589 : if (TREE_OPERAND (t, 2) == NULL_TREE)
3592 : {
3593 8819882 : tree low = unshare_expr (array_ref_low_bound (t));
3594 8819882 : if (!is_gimple_min_invariant (low))
3595 : {
3596 0 : TREE_OPERAND (t, 2) = low;
3597 : }
3598 : }
3599 :
3600 8883589 : if (TREE_OPERAND (t, 3) == NULL_TREE)
3601 : {
3602 8870066 : tree elmt_size = array_ref_element_size (t);
3603 8870066 : if (!is_gimple_min_invariant (elmt_size))
3604 : {
3605 8495 : elmt_size = unshare_expr (elmt_size);
3606 8495 : tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3607 8495 : tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3608 :
3609 : /* Divide the element size by the alignment of the element
3610 : type (above). */
3611 8495 : elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3612 : elmt_size, factor);
3613 :
3614 8495 : TREE_OPERAND (t, 3) = elmt_size;
3615 : }
3616 : }
3617 : need_non_reg = true;
3618 : }
3619 33177788 : else if (TREE_CODE (t) == COMPONENT_REF)
3620 : {
3621 : /* Set the field offset into T and gimplify it. */
3622 31997523 : if (TREE_OPERAND (t, 2) == NULL_TREE)
3623 : {
3624 31997144 : tree offset = component_ref_field_offset (t);
3625 31997144 : if (!is_gimple_min_invariant (offset))
3626 : {
3627 287 : offset = unshare_expr (offset);
3628 287 : tree field = TREE_OPERAND (t, 1);
3629 287 : tree factor
3630 287 : = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3631 :
3632 : /* Divide the offset by its alignment. */
3633 287 : offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3634 : offset, factor);
3635 :
3636 287 : TREE_OPERAND (t, 2) = offset;
3637 : }
3638 : }
3639 : need_non_reg = true;
3640 : }
3641 1180265 : else if (!is_gimple_reg_type (TREE_TYPE (t)))
3642 : /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3643 : is a non-register type then require the base object to be a
3644 : non-register as well. */
3645 41158859 : need_non_reg = true;
3646 : }
3647 :
3648 : /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3649 : so as to match the min_lval predicate. Failure to do so may result
3650 : in the creation of large aggregate temporaries. */
3651 29721819 : tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3652 : fallback | fb_lvalue);
3653 29721819 : ret = MIN (ret, tret);
3654 29721819 : if (ret == GS_ERROR)
3655 : return GS_ERROR;
3656 :
3657 : /* Step 2a: if we have component references we do not support on
3658 : registers then make sure the base isn't a register. Of course
3659 : we can only do so if an rvalue is OK. */
3660 29721819 : if (need_non_reg && (fallback & fb_rvalue))
3661 22222150 : prepare_gimple_addressable (p, pre_p);
3662 :
3663 :
3664 : /* Step 3: gimplify size expressions and the indices and operands of
3665 : ARRAY_REF. During this loop we also remove any useless conversions.
3666 : If we operate on a register also make sure to properly gimplify
3667 : to individual operations. */
3668 :
3669 29721819 : bool reg_operations = is_gimple_reg (*p);
3670 71783196 : for (; expr_stack.length () > 0; )
3671 : {
3672 42061377 : tree t = expr_stack.pop ();
3673 :
3674 42061377 : if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3675 : {
3676 8883589 : gcc_assert (!reg_operations);
3677 :
3678 : /* Gimplify the low bound and element type size. */
3679 8883589 : tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3680 : is_gimple_reg, fb_rvalue);
3681 8883589 : ret = MIN (ret, tret);
3682 :
3683 8883589 : tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3684 : is_gimple_reg, fb_rvalue);
3685 8883589 : ret = MIN (ret, tret);
3686 :
3687 : /* Gimplify the dimension. */
3688 8883589 : tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3689 : is_gimple_val, fb_rvalue);
3690 8883589 : ret = MIN (ret, tret);
3691 : }
3692 33177788 : else if (TREE_CODE (t) == COMPONENT_REF)
3693 : {
3694 31997523 : gcc_assert (!reg_operations);
3695 :
3696 31997523 : tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3697 : is_gimple_reg, fb_rvalue);
3698 31997523 : ret = MIN (ret, tret);
3699 : }
3700 1180265 : else if (reg_operations)
3701 : {
3702 783820 : tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3703 : is_gimple_val, fb_rvalue);
3704 1180265 : ret = MIN (ret, tret);
3705 : }
3706 :
3707 42061377 : STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3708 :
3709 : /* The innermost expression P may have originally had
3710 : TREE_SIDE_EFFECTS set which would have caused all the outer
3711 : expressions in *EXPR_P leading to P to also have had
3712 : TREE_SIDE_EFFECTS set. */
3713 42061377 : recalculate_side_effects (t);
3714 : }
3715 :
3716 : /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3717 29721819 : if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3718 : {
3719 18122647 : canonicalize_component_ref (expr_p);
3720 : }
3721 :
3722 29721819 : expr_stack.release ();
3723 :
3724 29721819 : gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3725 :
3726 : return ret;
3727 29721822 : }
3728 :
3729 : /* Gimplify the self modifying expression pointed to by EXPR_P
3730 : (++, --, +=, -=).
3731 :
3732 : PRE_P points to the list where side effects that must happen before
3733 : *EXPR_P should be stored.
3734 :
3735 : POST_P points to the list where side effects that must happen after
3736 : *EXPR_P should be stored.
3737 :
3738 : WANT_VALUE is nonzero iff we want to use the value of this expression
3739 : in another expression.
3740 :
3741 : ARITH_TYPE is the type the computation should be performed in. */
3742 :
3743 : enum gimplify_status
3744 1142122 : gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3745 : bool want_value, tree arith_type)
3746 : {
3747 1142122 : enum tree_code code;
3748 1142122 : tree lhs, lvalue, rhs, t1;
3749 1142122 : gimple_seq post = NULL, *orig_post_p = post_p;
3750 1142122 : bool postfix;
3751 1142122 : enum tree_code arith_code;
3752 1142122 : enum gimplify_status ret;
3753 1142122 : location_t loc = EXPR_LOCATION (*expr_p);
3754 :
3755 1142122 : code = TREE_CODE (*expr_p);
3756 :
3757 1142122 : gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3758 : || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3759 :
3760 : /* Prefix or postfix? */
3761 1142122 : if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3762 : /* Faster to treat as prefix if result is not used. */
3763 : postfix = want_value;
3764 : else
3765 : postfix = false;
3766 :
3767 : /* For postfix, make sure the inner expression's post side effects
3768 : are executed after side effects from this expression. */
3769 400019 : if (postfix)
3770 1142122 : post_p = &post;
3771 :
3772 : /* Add or subtract? */
3773 1142122 : if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3774 : arith_code = PLUS_EXPR;
3775 : else
3776 48286 : arith_code = MINUS_EXPR;
3777 :
3778 : /* Gimplify the LHS into a GIMPLE lvalue. */
3779 1142122 : lvalue = TREE_OPERAND (*expr_p, 0);
3780 1142122 : ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3781 1142122 : if (ret == GS_ERROR)
3782 : return ret;
3783 :
3784 : /* Extract the operands to the arithmetic operation. */
3785 1142118 : lhs = lvalue;
3786 1142118 : rhs = TREE_OPERAND (*expr_p, 1);
3787 :
3788 : /* For postfix operator, we evaluate the LHS to an rvalue and then use
3789 : that as the result value and in the postqueue operation. */
3790 1142118 : if (postfix)
3791 : {
3792 189185 : ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3793 189185 : if (ret == GS_ERROR)
3794 : return ret;
3795 :
3796 189185 : lhs = get_initialized_tmp_var (lhs, pre_p);
3797 : }
3798 :
3799 : /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3800 1142118 : if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3801 : {
3802 239006 : rhs = convert_to_ptrofftype_loc (loc, rhs);
3803 239006 : if (arith_code == MINUS_EXPR)
3804 10031 : rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3805 239006 : t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3806 : }
3807 : else
3808 903112 : t1 = fold_convert (TREE_TYPE (*expr_p),
3809 : fold_build2 (arith_code, arith_type,
3810 : fold_convert (arith_type, lhs),
3811 : fold_convert (arith_type, rhs)));
3812 :
3813 1142118 : if (postfix)
3814 : {
3815 189185 : gimplify_assign (lvalue, t1, pre_p);
3816 189185 : gimplify_seq_add_seq (orig_post_p, post);
3817 189185 : *expr_p = lhs;
3818 189185 : return GS_ALL_DONE;
3819 : }
3820 : else
3821 : {
3822 952933 : *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3823 952933 : return GS_OK;
3824 : }
3825 : }
3826 :
3827 : /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3828 :
3829 : static void
3830 80681079 : maybe_with_size_expr (tree *expr_p)
3831 : {
3832 80681079 : tree expr = *expr_p;
3833 80681079 : tree type = TREE_TYPE (expr);
3834 80681079 : tree size;
3835 :
3836 : /* If we've already wrapped this or the type is error_mark_node, we can't do
3837 : anything. */
3838 80681079 : if (TREE_CODE (expr) == WITH_SIZE_EXPR
3839 80680954 : || type == error_mark_node)
3840 : return;
3841 :
3842 : /* If the size isn't known or is a constant, we have nothing to do. */
3843 80680806 : size = TYPE_SIZE_UNIT (type);
3844 80680806 : if (!size || poly_int_tree_p (size))
3845 : return;
3846 :
3847 : /* Otherwise, make a WITH_SIZE_EXPR. */
3848 1675 : size = unshare_expr (size);
3849 1675 : size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3850 1675 : *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3851 : }
3852 :
3853 : /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3854 : Store any side-effects in PRE_P. CALL_LOCATION is the location of
3855 : the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3856 : gimplified to an SSA name. */
3857 :
3858 : enum gimplify_status
3859 33260740 : gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3860 : bool allow_ssa)
3861 : {
3862 33260740 : bool (*test) (tree);
3863 33260740 : fallback_t fb;
3864 :
3865 : /* In general, we allow lvalues for function arguments to avoid
3866 : extra overhead of copying large aggregates out of even larger
3867 : aggregates into temporaries only to copy the temporaries to
3868 : the argument list. Make optimizers happy by pulling out to
3869 : temporaries those types that fit in registers. */
3870 33260740 : if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3871 : test = is_gimple_val, fb = fb_rvalue;
3872 : else
3873 : {
3874 1545959 : test = is_gimple_lvalue, fb = fb_either;
3875 : /* Also strip a TARGET_EXPR that would force an extra copy. */
3876 1545959 : if (TREE_CODE (*arg_p) == TARGET_EXPR)
3877 : {
3878 306094 : tree init = TARGET_EXPR_INITIAL (*arg_p);
3879 306094 : if (init
3880 306091 : && !VOID_TYPE_P (TREE_TYPE (init))
3881 : /* Currently, due to c++/116015, it is not desirable to
3882 : strip a TARGET_EXPR whose initializer is a {}. The
3883 : problem is that if we do elide it, we also have to
3884 : replace all the occurrences of the slot temporary in the
3885 : initializer with the temporary created for the argument.
3886 : But we do not have that temporary yet so the replacement
3887 : would be quite awkward and it might be needed to resort
3888 : back to a PLACEHOLDER_EXPR. Note that stripping the
3889 : TARGET_EXPR wouldn't help anyway, as gimplify_expr would
3890 : just allocate a temporary to store the CONSTRUCTOR into.
3891 : (FIXME PR116375.)
3892 :
3893 : See convert_for_arg_passing for the C++ code that marks
3894 : the TARGET_EXPR as eliding or not. */
3895 572958 : && TREE_CODE (init) != CONSTRUCTOR)
3896 239819 : *arg_p = init;
3897 : }
3898 : }
3899 :
3900 : /* If this is a variable sized type, we must remember the size. */
3901 33260740 : maybe_with_size_expr (arg_p);
3902 :
3903 : /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3904 : /* Make sure arguments have the same location as the function call
3905 : itself. */
3906 33260740 : protected_set_expr_location (*arg_p, call_location);
3907 :
3908 : /* There is a sequence point before a function call. Side effects in
3909 : the argument list must occur before the actual call. So, when
3910 : gimplifying arguments, force gimplify_expr to use an internal
3911 : post queue which is then appended to the end of PRE_P. */
3912 33260740 : return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3913 : }
3914 :
3915 : /* Don't fold inside offloading or taskreg regions: it can break code by
3916 : adding decl references that weren't in the source. We'll do it during
3917 : omplower pass instead. */
3918 :
3919 : static bool
3920 58874286 : maybe_fold_stmt (gimple_stmt_iterator *gsi)
3921 : {
3922 58874286 : struct gimplify_omp_ctx *ctx;
3923 59401605 : for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3924 1188921 : if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3925 : return false;
3926 532268 : else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3927 : return false;
3928 : /* Delay folding of builtins until the IL is in consistent state
3929 : so the diagnostic machinery can do a better job. */
3930 58212684 : if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3931 : return false;
3932 55870887 : return fold_stmt (gsi);
3933 : }
3934 :
3935 : static tree
3936 : expand_late_variant_directive (vec<struct omp_variant> all_candidates,
3937 : tree construct_context);
3938 :
3939 :
3940 : /* Helper function for calls to omp_dynamic_cond: find the current
3941 : enclosing block in the gimplification context. */
3942 : static tree
3943 359 : find_supercontext (void)
3944 : {
3945 359 : vec<gbind *>stack = gimple_bind_expr_stack ();
3946 686 : for (int i = stack.length () - 1; i >= 0; i++)
3947 : {
3948 327 : gbind *b = stack[i];
3949 327 : if (b->block)
3950 : return b->block;
3951 : }
3952 : return NULL_TREE;
3953 : }
3954 :
3955 : /* OpenMP: Handle the append_args and adjust_args clauses of
3956 : declare_variant for EXPR, which is a CALL_EXPR whose CALL_EXPR_FN
3957 : is the variant, within a dispatch construct with clauses DISPATCH_CLAUSES.
3958 : WANT_VALUE and POINTERIZE are as for expand_variant_call_expr.
3959 :
3960 : 'append_args' causes interop objects are added after the last regular
3961 : (nonhidden, nonvariadic) arguments of the variant function.
3962 : 'adjust_args' with need_device_{addr,ptr} converts the pointer target of
3963 : a pointer from a host to a device address. This uses either the default
3964 : device or the passed device number, which then sets the default device
3965 : address. */
3966 : static tree
3967 471 : modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
3968 : bool want_value, bool pointerize)
3969 : {
3970 471 : location_t loc = EXPR_LOCATION (expr);
3971 471 : tree fndecl = get_callee_fndecl (expr);
3972 :
3973 : /* Skip processing if we don't get the expected call form. */
3974 471 : if (!fndecl)
3975 : return expr;
3976 :
3977 471 : tree init_code = NULL_TREE;
3978 471 : tree cleanup = NULL_TREE;
3979 471 : tree clobbers = NULL_TREE;
3980 471 : int nargs = call_expr_nargs (expr);
3981 471 : tree dispatch_device_num = NULL_TREE;
3982 471 : tree dispatch_interop = NULL_TREE;
3983 471 : tree dispatch_append_args = NULL_TREE;
3984 471 : int nfirst_args = 0;
3985 471 : tree dispatch_adjust_args_list
3986 471 : = lookup_attribute ("omp declare variant variant args",
3987 471 : DECL_ATTRIBUTES (fndecl));
3988 :
3989 471 : if (dispatch_adjust_args_list)
3990 : {
3991 389 : dispatch_adjust_args_list = TREE_VALUE (dispatch_adjust_args_list);
3992 389 : dispatch_append_args = TREE_CHAIN (dispatch_adjust_args_list);
3993 389 : if (TREE_PURPOSE (dispatch_adjust_args_list) == NULL_TREE
3994 389 : && TREE_VALUE (dispatch_adjust_args_list) == NULL_TREE)
3995 : dispatch_adjust_args_list = NULL_TREE;
3996 : }
3997 389 : if (dispatch_append_args)
3998 : {
3999 245 : nfirst_args = tree_to_shwi (TREE_PURPOSE (dispatch_append_args));
4000 245 : dispatch_append_args = TREE_VALUE (dispatch_append_args);
4001 : }
4002 471 : dispatch_device_num = omp_find_clause (dispatch_clauses, OMP_CLAUSE_DEVICE);
4003 471 : if (dispatch_device_num)
4004 287 : dispatch_device_num = OMP_CLAUSE_DEVICE_ID (dispatch_device_num);
4005 471 : dispatch_interop = omp_find_clause (dispatch_clauses, OMP_CLAUSE_INTEROP);
4006 471 : int nappend = 0, ninterop = 0;
4007 880 : for (tree t = dispatch_append_args; t; t = TREE_CHAIN (t))
4008 409 : nappend++;
4009 :
4010 : /* FIXME: error checking should be taken out of this function and
4011 : handled before any attempt at filtering or resolution happens.
4012 : Otherwise whether or not diagnostics appear is determined by
4013 : GCC internals, how good the front ends are at constant-folding,
4014 : the split between early/late resolution, etc instead of the code
4015 : as written by the user. */
4016 471 : if (dispatch_interop)
4017 861 : for (tree t = dispatch_interop; t; t = TREE_CHAIN (t))
4018 605 : if (OMP_CLAUSE_CODE (t) == OMP_CLAUSE_INTEROP)
4019 409 : ninterop++;
4020 471 : if (dispatch_interop && !dispatch_device_num)
4021 : {
4022 7 : gcc_checking_assert (ninterop > 1);
4023 7 : error_at (OMP_CLAUSE_LOCATION (dispatch_interop),
4024 : "the %<device%> clause must be present if the %<interop%> "
4025 : "clause has more than one list item");
4026 : }
4027 471 : if (nappend < ninterop)
4028 : {
4029 48 : error_at (OMP_CLAUSE_LOCATION (dispatch_interop),
4030 : "number of list items in %<interop%> clause (%d) "
4031 : "exceeds the number of %<append_args%> items (%d) for "
4032 : "%<declare variant%> candidate %qD", ninterop, nappend, fndecl);
4033 88 : inform (dispatch_append_args
4034 8 : ? EXPR_LOCATION (TREE_PURPOSE (dispatch_append_args))
4035 40 : : DECL_SOURCE_LOCATION (fndecl),
4036 : "%<declare variant%> candidate %qD declared here", fndecl);
4037 48 : ninterop = nappend;
4038 : }
4039 471 : if (dispatch_append_args)
4040 : {
4041 245 : tree *buffer = XALLOCAVEC (tree, nargs + nappend);
4042 245 : tree arg = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4043 : /* Copy the first arguments; insert then the interop objects,
4044 : and then copy the rest (nargs - nfirst_args) args. */
4045 245 : int i;
4046 679 : for (i = 0; i < nfirst_args; i++)
4047 : {
4048 434 : arg = TREE_CHAIN (arg);
4049 434 : buffer[i] = CALL_EXPR_ARG (expr, i);
4050 : }
4051 : int j = ninterop;
4052 583 : for (tree t = dispatch_interop; t && j > 0; t = TREE_CHAIN (t))
4053 338 : if (OMP_CLAUSE_CODE (t) == OMP_CLAUSE_INTEROP)
4054 338 : buffer[i + --j] = OMP_CLAUSE_DECL (t);
4055 245 : gcc_checking_assert (j == 0);
4056 :
4057 : /* Do we need to create additional interop objects? */
4058 245 : if (ninterop < nappend)
4059 : {
4060 56 : if (dispatch_device_num == NULL_TREE)
4061 : /* Not remapping device number. */
4062 22 : dispatch_device_num = build_int_cst (integer_type_node,
4063 : GOMP_DEVICE_DEFAULT_OMP_61);
4064 56 : int nnew = nappend - ninterop;
4065 56 : tree nobjs = build_int_cst (integer_type_node, nnew);
4066 56 : tree a, t;
4067 :
4068 : /* Skip to the append_args clause for the first constructed
4069 : interop argument. */
4070 56 : tree apparg = dispatch_append_args;
4071 83 : for (j = 0; j < ninterop; j++)
4072 27 : apparg = TREE_CHAIN (apparg);
4073 :
4074 : /* omp_interop_t *objs[n]; */
4075 56 : tree objtype = build_pointer_type (pointer_sized_int_node);
4076 56 : t = build_array_type_nelts (objtype, nnew);
4077 56 : tree objs = create_tmp_var (t, "interopobjs");
4078 :
4079 : /* int target_tgtsync[n]; */
4080 56 : t = build_array_type_nelts (integer_type_node, nnew);
4081 56 : tree target_tgtsync = create_tmp_var (t, "tgt_tgtsync");
4082 :
4083 : /* Scan first to determine if we need a prefer_type array. */
4084 56 : tree prefer_type = NULL_TREE;
4085 56 : tree prefer_type_type = NULL_TREE;
4086 141 : for (j = ninterop, a = apparg; j < nappend; j++, a = TREE_CHAIN (a))
4087 64 : if (TREE_VALUE (a) != NULL_TREE)
4088 : {
4089 : /* const char *prefer_type[n]; */
4090 35 : t = build_qualified_type (char_type_node, TYPE_QUAL_CONST);
4091 35 : prefer_type_type = build_pointer_type (t);
4092 35 : t = build_array_type_nelts (prefer_type_type, nnew);
4093 35 : prefer_type = create_tmp_var (t, "pref_type");
4094 35 : break;
4095 : }
4096 :
4097 : /* Initialize the arrays, generating temp vars and clobbers for
4098 : the interop objects. (The constructed array holding the
4099 : pointers to these objects shouldn't need clobbering as there's
4100 : no reason for GOMP_interop to modify its contents.) */
4101 127 : for (j = ninterop, a = apparg; j < nappend; j++, a = TREE_CHAIN (a))
4102 : {
4103 : /* The allocated temporaries for the interop objects
4104 : have type omp_interop_t, which is an integer type that
4105 : can encode a pointer. */
4106 71 : tree objvar = create_tmp_var (pointer_sized_int_node, "interop");
4107 71 : buffer[i + j] = objvar;
4108 71 : TREE_ADDRESSABLE (objvar) = 1;
4109 : /* Generate a clobber for the temporary for when we're done
4110 : with it. */
4111 71 : tree c = build_clobber (pointer_sized_int_node,
4112 : CLOBBER_OBJECT_END);
4113 71 : c = build2 (MODIFY_EXPR, pointer_sized_int_node, objvar, c);
4114 71 : if (clobbers)
4115 15 : clobbers = build2 (COMPOUND_EXPR, TREE_TYPE (clobbers),
4116 : c, clobbers);
4117 : else
4118 : clobbers = c;
4119 :
4120 : /* objs[offset] = &objvar; */
4121 71 : tree offset = build_int_cst (integer_type_node, j - ninterop);
4122 71 : tree init = build4 (ARRAY_REF, objtype, objs, offset,
4123 : NULL_TREE, NULL_TREE);
4124 71 : init = build2 (MODIFY_EXPR, objtype, init,
4125 : build_fold_addr_expr (objvar));
4126 71 : if (init_code)
4127 15 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4128 : init_code, init);
4129 : else
4130 : init_code = init;
4131 :
4132 : /* target_tgtsync[offset] = tgt;
4133 : (Don't blame me, I didn't design the encoding of this
4134 : info into the dispatch interop clause data structure,
4135 : but the runtime wants a bit mask.) */
4136 71 : tree tree_tgt = TREE_OPERAND (TREE_PURPOSE (a), 0);
4137 71 : int tgt = 0;
4138 71 : if (TREE_PURPOSE (tree_tgt) == boolean_true_node)
4139 42 : tgt |= GOMP_INTEROP_TARGET;
4140 71 : if (TREE_VALUE (tree_tgt) == boolean_true_node)
4141 36 : tgt |= GOMP_INTEROP_TARGETSYNC;
4142 71 : init = build4 (ARRAY_REF, integer_type_node,
4143 : target_tgtsync, offset, NULL_TREE, NULL_TREE);
4144 71 : init = build2 (MODIFY_EXPR, integer_type_node, init,
4145 71 : build_int_cst (integer_type_node, tgt));
4146 71 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4147 : init_code, init);
4148 :
4149 71 : if (prefer_type)
4150 : {
4151 45 : tree pref = TREE_VALUE (a);
4152 45 : if (pref == NULL_TREE)
4153 6 : pref = null_pointer_node;
4154 : else
4155 39 : pref = build_fold_addr_expr (pref);
4156 45 : init = build4 (ARRAY_REF, prefer_type_type, prefer_type,
4157 : offset, NULL_TREE, NULL_TREE);
4158 45 : init = build2 (MODIFY_EXPR, prefer_type_type, init,
4159 : pref);
4160 45 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4161 : init_code, init);
4162 : }
4163 : }
4164 :
4165 56 : objs = build_fold_addr_expr (objs);
4166 56 : target_tgtsync = build_fold_addr_expr (target_tgtsync);
4167 56 : prefer_type = prefer_type ? build_fold_addr_expr (prefer_type)
4168 : : null_pointer_node;
4169 56 : tree fn = builtin_decl_explicit (BUILT_IN_GOMP_INTEROP);
4170 56 : tree create
4171 56 : = build_call_expr_loc (loc, fn, 11, dispatch_device_num,
4172 : nobjs, objs, target_tgtsync, prefer_type,
4173 : integer_zero_node, null_pointer_node,
4174 : integer_zero_node, null_pointer_node,
4175 : integer_zero_node, null_pointer_node);
4176 56 : if (init_code)
4177 56 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (create),
4178 : init_code, create);
4179 : else
4180 : init_code = create;
4181 :
4182 56 : cleanup
4183 56 : = build_call_expr_loc (loc, fn, 11, dispatch_device_num,
4184 : integer_zero_node, null_pointer_node,
4185 : null_pointer_node, null_pointer_node,
4186 : integer_zero_node, null_pointer_node,
4187 : nobjs, objs,
4188 : integer_zero_node, null_pointer_node);
4189 56 : if (clobbers)
4190 56 : cleanup = build2 (COMPOUND_EXPR, TREE_TYPE (clobbers),
4191 : cleanup, clobbers);
4192 : }
4193 :
4194 654 : for (j = 0; j < nappend; j++)
4195 : {
4196 : /* Fortran permits by-reference or by-value for the dummy arg
4197 : and by-value, by-reference, ptr by-reference as actual
4198 : argument. Handle this. */
4199 409 : tree obj = buffer[i + j]; // interop object
4200 409 : tree a2 = TREE_VALUE (arg); // parameter type
4201 766 : if (POINTER_TYPE_P (TREE_TYPE (obj))
4202 436 : && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (obj))))
4203 : {
4204 32 : tree t = TREE_TYPE (TREE_TYPE (obj));
4205 32 : gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (t)));
4206 32 : obj = fold_build1 (INDIRECT_REF, t, obj);
4207 : }
4208 750 : if (POINTER_TYPE_P (TREE_TYPE (obj))
4209 420 : && INTEGRAL_TYPE_P (a2))
4210 : {
4211 43 : tree t = TREE_TYPE (TREE_TYPE (obj));
4212 43 : gcc_checking_assert (INTEGRAL_TYPE_P (t));
4213 43 : obj = fold_build1 (INDIRECT_REF, t, obj);
4214 : }
4215 497 : else if (INTEGRAL_TYPE_P (TREE_TYPE (obj))
4216 461 : && POINTER_TYPE_P (a2))
4217 : {
4218 18 : gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (a2)));
4219 18 : obj = build_fold_addr_expr (obj);
4220 : }
4221 348 : else if (!INTEGRAL_TYPE_P (a2)
4222 348 : || !INTEGRAL_TYPE_P (TREE_TYPE (obj)))
4223 : {
4224 36 : tree t = TREE_TYPE (obj);
4225 36 : gcc_checking_assert (POINTER_TYPE_P (t)
4226 : && POINTER_TYPE_P (a2)
4227 : && INTEGRAL_TYPE_P (TREE_TYPE (t))
4228 : && INTEGRAL_TYPE_P (TREE_TYPE (a2)));
4229 : }
4230 409 : buffer[i + j] = obj;
4231 409 : arg = TREE_CHAIN (arg);
4232 : }
4233 245 : i += nappend;
4234 534 : for (j = nfirst_args; j < nargs; j++)
4235 289 : buffer[i++] = CALL_EXPR_ARG (expr, j);
4236 245 : nargs += nappend;
4237 245 : tree call = expr;
4238 490 : expr = build_call_array_loc (EXPR_LOCATION (expr), TREE_TYPE (call),
4239 245 : CALL_EXPR_FN (call), nargs, buffer);
4240 :
4241 : /* Copy all CALL_EXPR flags. */
4242 245 : CALL_EXPR_STATIC_CHAIN (expr) = CALL_EXPR_STATIC_CHAIN (call);
4243 245 : CALL_EXPR_TAILCALL (expr) = CALL_EXPR_TAILCALL (call);
4244 245 : CALL_EXPR_RETURN_SLOT_OPT (expr)
4245 245 : = CALL_EXPR_RETURN_SLOT_OPT (call);
4246 245 : CALL_FROM_THUNK_P (expr) = CALL_FROM_THUNK_P (call);
4247 245 : SET_EXPR_LOCATION (expr, EXPR_LOCATION (call));
4248 245 : CALL_EXPR_VA_ARG_PACK (expr) = CALL_EXPR_VA_ARG_PACK (call);
4249 : }
4250 :
4251 : /* Nothing to do for adjust_args? */
4252 471 : if (!dispatch_adjust_args_list || !TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4253 260 : goto add_cleanup;
4254 :
4255 : /* Handle adjust_args. */
4256 966 : for (int i = 0; i < nargs; i++)
4257 : {
4258 755 : tree *arg_p = &CALL_EXPR_ARG (expr, i);
4259 :
4260 : /* Nothing to do if arg is constant null pointer. */
4261 755 : if (integer_zerop (*arg_p))
4262 6 : continue;
4263 :
4264 : bool need_device_ptr = false;
4265 : bool need_device_addr = false;
4266 2247 : for (int need_addr = 0; need_addr <= 1; need_addr++)
4267 2301 : for (tree arg = (need_addr
4268 1498 : ? TREE_VALUE (dispatch_adjust_args_list)
4269 1498 : : TREE_PURPOSE (dispatch_adjust_args_list));
4270 2301 : arg != NULL; arg = TREE_CHAIN (arg))
4271 : {
4272 1152 : if (TREE_VALUE (arg)
4273 1152 : && TREE_CODE (TREE_VALUE (arg)) == INTEGER_CST
4274 2304 : && wi::eq_p (i, wi::to_wide (TREE_VALUE (arg))))
4275 : {
4276 349 : if (need_addr)
4277 : need_device_addr = true;
4278 : else
4279 349 : need_device_ptr = true;
4280 : break;
4281 : }
4282 : }
4283 :
4284 749 : if (need_device_ptr || need_device_addr)
4285 : {
4286 740 : bool is_device_ptr = false;
4287 740 : bool has_device_addr = false;
4288 :
4289 740 : for (tree c = dispatch_clauses; c; c = TREE_CHAIN (c))
4290 : {
4291 454 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
4292 454 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
4293 : {
4294 164 : tree decl1 = DECL_NAME (OMP_CLAUSE_DECL (c));
4295 164 : tree decl2 = tree_strip_nop_conversions (*arg_p);
4296 164 : if (TREE_CODE (decl2) == ADDR_EXPR)
4297 19 : decl2 = TREE_OPERAND (decl2, 0);
4298 164 : if (VAR_P (decl2) || TREE_CODE (decl2) == PARM_DECL)
4299 : {
4300 140 : decl2 = DECL_NAME (decl2);
4301 140 : if (decl1 == decl2
4302 203 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
4303 : {
4304 58 : if (need_device_addr)
4305 0 : warning_at (OMP_CLAUSE_LOCATION (c),
4306 0 : OPT_Wopenmp,
4307 : "%<is_device_ptr%> for %qD does"
4308 : " not imply %<has_device_addr%> "
4309 : "required for %<need_device_addr%>",
4310 0 : OMP_CLAUSE_DECL (c));
4311 : is_device_ptr = true;
4312 : break;
4313 : }
4314 82 : else if (decl1 == decl2)
4315 : {
4316 5 : if (need_device_ptr)
4317 10 : warning_at (OMP_CLAUSE_LOCATION (c),
4318 5 : OPT_Wopenmp,
4319 : "%<has_device_addr%> for %qD does"
4320 : " not imply %<is_device_ptr%> "
4321 : "required for %<need_device_ptr%>",
4322 5 : OMP_CLAUSE_DECL (c));
4323 : has_device_addr = true;
4324 : break;
4325 : }
4326 : }
4327 : }
4328 : }
4329 :
4330 349 : if ((need_device_ptr && !is_device_ptr)
4331 58 : || (need_device_addr && !has_device_addr))
4332 : {
4333 291 : if (dispatch_device_num == NULL_TREE)
4334 : {
4335 : // device_num = omp_get_default_device ()
4336 105 : tree fn
4337 105 : = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
4338 105 : tree call = build_call_expr (fn, 0);
4339 105 : dispatch_device_num = create_tmp_var_raw (TREE_TYPE (call));
4340 105 : tree init
4341 105 : = build4 (TARGET_EXPR, TREE_TYPE (call),
4342 : dispatch_device_num, call, NULL_TREE, NULL_TREE);
4343 105 : if (init_code)
4344 0 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4345 : init_code, init);
4346 : else
4347 : init_code = init;
4348 : }
4349 :
4350 : // We want to emit the following statement:
4351 : // mapped_arg = omp_get_mapped_ptr (arg,
4352 : // device_num)
4353 : // but arg has to be the actual pointer, not a
4354 : // reference or a conversion expression.
4355 291 : tree actual_ptr
4356 291 : = ((TREE_CODE (*arg_p) == ADDR_EXPR)
4357 291 : ? TREE_OPERAND (*arg_p, 0)
4358 38 : : *arg_p);
4359 291 : if (TREE_CODE (actual_ptr) == NOP_EXPR
4360 291 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (actual_ptr, 0)))
4361 : == REFERENCE_TYPE))
4362 : {
4363 6 : actual_ptr = TREE_OPERAND (actual_ptr, 0);
4364 6 : actual_ptr = build1 (INDIRECT_REF,
4365 6 : TREE_TYPE (actual_ptr),
4366 : actual_ptr);
4367 : }
4368 291 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_MAPPED_PTR);
4369 291 : tree mapped_arg = build_call_expr_loc (loc, fn, 2, actual_ptr,
4370 : dispatch_device_num);
4371 :
4372 291 : if (TREE_CODE (*arg_p) == ADDR_EXPR
4373 291 : || (TREE_CODE (TREE_TYPE (actual_ptr)) == REFERENCE_TYPE))
4374 47 : mapped_arg = build_fold_addr_expr (mapped_arg);
4375 244 : else if (TREE_CODE (*arg_p) == NOP_EXPR)
4376 35 : mapped_arg = build1 (NOP_EXPR, TREE_TYPE (*arg_p),
4377 : mapped_arg);
4378 291 : *arg_p = mapped_arg;
4379 : }
4380 : }
4381 : }
4382 :
4383 471 : add_cleanup:
4384 471 : if (cleanup)
4385 : {
4386 56 : tree result = NULL_TREE;
4387 56 : if (want_value && pointerize)
4388 : {
4389 0 : tree tmp = create_tmp_var (build_pointer_type (TREE_TYPE (expr)),
4390 : "cleanuptmp");
4391 0 : result = build_simple_mem_ref (tmp);
4392 0 : expr = build2 (INIT_EXPR, TREE_TYPE (tmp), tmp,
4393 : build_fold_addr_expr (expr));
4394 : }
4395 56 : else if (want_value)
4396 : {
4397 18 : tree tmp = create_tmp_var (TREE_TYPE (expr), "cleanuptmp");
4398 18 : result = tmp;
4399 18 : expr = build2 (INIT_EXPR, TREE_TYPE (tmp), tmp, expr);
4400 : }
4401 56 : if (init_code)
4402 56 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init_code, expr);
4403 56 : expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, cleanup);
4404 :
4405 56 : if (result)
4406 18 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (result), expr, result);
4407 : }
4408 415 : else if (init_code)
4409 105 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init_code, expr);
4410 :
4411 : return expr;
4412 : }
4413 :
4414 : /* Helper function for gimplify_call_expr: handle "declare variant"
4415 : resolution and expansion of the CALL_EXPR EXPR. WANT_VALUE is true
4416 : if the result value of the call is needed; POINTERIZE is true if it
4417 : also needs to be pointerized. If OMP_DISPATCH_P is true, apply
4418 : associated transformations using DISPATCH_CLAUSES.
4419 : This function may return either the original call or some other
4420 : expression such as a conditional to select one of multiple calls.
4421 :
4422 : FIXME: this function is written to be independent of gimplifier internals
4423 : so that it could be moved to omp-general.cc and invoked from the
4424 : front ends instead, per PR115076. */
4425 :
4426 : static tree
4427 1322 : expand_variant_call_expr (tree expr, bool want_value, bool pointerize,
4428 : bool omp_dispatch_p, tree dispatch_clauses)
4429 : {
4430 : /* If we've already processed this call, stop now. This can happen
4431 : if the variant call resolves to the original function, or to
4432 : a dynamic conditional that includes the default call to the original
4433 : function. */
4434 1322 : gcc_assert (omp_resolved_variant_calls != NULL);
4435 1322 : if (omp_resolved_variant_calls->contains (expr))
4436 131 : return expr;
4437 :
4438 1191 : tree fndecl = get_callee_fndecl (expr);
4439 1191 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (expr));
4440 1191 : location_t loc = EXPR_LOCATION (expr);
4441 1191 : tree construct_context = omp_get_construct_context ();
4442 1191 : vec<struct omp_variant> all_candidates
4443 1191 : = omp_declare_variant_candidates (fndecl, construct_context);
4444 1191 : gcc_assert (!all_candidates.is_empty ());
4445 1191 : vec<struct omp_variant> candidates
4446 1191 : = omp_get_dynamic_candidates (all_candidates, construct_context);
4447 :
4448 : /* If the variant call could be resolved now, build a nest of COND_EXPRs
4449 : if there are dynamic candidates, and/or a new CALL_EXPR for each
4450 : candidate call. */
4451 1191 : if (!candidates.is_empty ())
4452 : {
4453 1179 : int n = candidates.length ();
4454 1179 : tree tail = NULL_TREE;
4455 :
4456 2396 : for (int i = n - 1; i >= 0; i--)
4457 : {
4458 1217 : if (tail)
4459 38 : gcc_assert (candidates[i].dynamic_selector);
4460 : else
4461 1179 : gcc_assert (!candidates[i].dynamic_selector);
4462 1217 : if (candidates[i].alternative == fndecl)
4463 : {
4464 : /* We should only get the original function back as the
4465 : default. */
4466 261 : gcc_assert (!tail);
4467 261 : omp_resolved_variant_calls->add (expr);
4468 261 : tail = expr;
4469 : }
4470 : else
4471 : {
4472 : /* For the final static selector, we can re-use the old
4473 : CALL_EXPR and just replace the function, unless it may
4474 : need dispatch argument modification. Otherwise,
4475 : make a copy of it. */
4476 956 : tree thiscall = (tail || omp_dispatch_p
4477 956 : ? unshare_expr (expr) : expr);
4478 956 : CALL_EXPR_FN (thiscall) = build1 (ADDR_EXPR, fnptrtype,
4479 956 : candidates[i].alternative);
4480 956 : if (omp_dispatch_p)
4481 471 : thiscall = modify_call_for_omp_dispatch (thiscall,
4482 : dispatch_clauses,
4483 : want_value,
4484 : pointerize);
4485 956 : if (!tail)
4486 : tail = thiscall;
4487 : else
4488 76 : tail = build3 (COND_EXPR, TREE_TYPE (expr),
4489 38 : omp_dynamic_cond (candidates[i].selector,
4490 : find_supercontext ()),
4491 : thiscall, tail);
4492 : }
4493 : }
4494 : return tail;
4495 : }
4496 :
4497 : /* If we couldn't resolve the variant call now, expand it into a loop using
4498 : a switch and OMP_NEXT_VARIANT for dispatch. The ompdevlow pass will
4499 : handle OMP_NEXT_VARIANT expansion. */
4500 : else
4501 : {
4502 : /* If we need a usable return value, we need a temporary
4503 : and an assignment in each alternative. This logic was borrowed
4504 : from gimplify_cond_expr. */
4505 12 : tree type = TREE_TYPE (expr);
4506 12 : tree tmp = NULL_TREE, result = NULL_TREE;
4507 :
4508 12 : if (want_value)
4509 : {
4510 12 : if (pointerize)
4511 : {
4512 0 : type = build_pointer_type (type);
4513 0 : tmp = create_tmp_var (type, "iftmp");
4514 0 : result = build_simple_mem_ref_loc (loc, tmp);
4515 : }
4516 : else
4517 : {
4518 12 : tmp = create_tmp_var (type, "iftmp");
4519 12 : result = tmp;
4520 : }
4521 : }
4522 :
4523 : /* Preprocess the all_candidates array so that the alternative field of
4524 : each element holds the actual function call expression and possible
4525 : assignment, instead of just the decl for the variant function. */
4526 60 : for (unsigned int i = 0; i < all_candidates.length (); i++)
4527 : {
4528 48 : tree decl = all_candidates[i].alternative;
4529 48 : tree thiscall;
4530 :
4531 : /* We need to turn the decl from the candidate into a function
4532 : call and possible assignment, and stuff that in
4533 : the directive seq of the gomp_variant. */
4534 48 : if (decl == fndecl)
4535 : {
4536 12 : thiscall = expr;
4537 12 : omp_resolved_variant_calls->add (expr);
4538 : }
4539 : else
4540 : {
4541 36 : thiscall = unshare_expr (expr);
4542 36 : CALL_EXPR_FN (thiscall) = build1 (ADDR_EXPR, fnptrtype, decl);
4543 36 : if (omp_dispatch_p)
4544 0 : thiscall = modify_call_for_omp_dispatch (thiscall,
4545 : dispatch_clauses,
4546 : want_value,
4547 : pointerize);
4548 : }
4549 48 : if (pointerize)
4550 0 : thiscall = build_fold_addr_expr_loc (loc, thiscall);
4551 48 : if (want_value)
4552 48 : thiscall = build2 (INIT_EXPR, type, tmp, thiscall);
4553 48 : all_candidates[i].alternative = thiscall;
4554 : }
4555 :
4556 12 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
4557 12 : tree expansion = expand_late_variant_directive (all_candidates,
4558 : construct_context);
4559 12 : if (result)
4560 12 : expansion = build2 (COMPOUND_EXPR, TREE_TYPE (result),
4561 : expansion, result);
4562 12 : return expansion;
4563 : }
4564 : }
4565 :
4566 : /* Wrapper around expand_variant_call_expr to interface with gimplifier
4567 : state. EXPR and OMP_DISPATCH_P are as for expand_variant_call_expr,
4568 : FALLBACK is used to compute the WANT_VALUE and POINTERIZE arguments. */
4569 : static tree
4570 1322 : gimplify_variant_call_expr (tree expr, fallback_t fallback,
4571 : bool omp_dispatch_p)
4572 : {
4573 1322 : tree type = TREE_TYPE (expr);
4574 1322 : bool want_value = (fallback != fb_none && !VOID_TYPE_P (type));
4575 669 : bool pointerize = false;
4576 : /* If the result value must be an lvalue or the result type must
4577 : live in memory, then we have to pointerize it if we need a temporary. */
4578 669 : if (want_value
4579 669 : && ((!(fallback & fb_rvalue) && (fallback & fb_lvalue))
4580 669 : || TREE_ADDRESSABLE (type)))
4581 0 : pointerize = true;
4582 :
4583 1834 : return expand_variant_call_expr (expr, want_value, pointerize,
4584 : omp_dispatch_p,
4585 : (omp_dispatch_p
4586 512 : ? gimplify_omp_ctxp->clauses
4587 1322 : : NULL_TREE));
4588 : }
4589 :
4590 :
4591 : /* Helper function for gimplify_call_expr, called via walk_tree.
4592 : Find used user labels. */
4593 :
4594 : static tree
4595 766 : find_used_user_labels (tree *tp, int *, void *)
4596 : {
4597 766 : if (TREE_CODE (*tp) == LABEL_EXPR
4598 15 : && !DECL_ARTIFICIAL (LABEL_EXPR_LABEL (*tp))
4599 15 : && DECL_NAME (LABEL_EXPR_LABEL (*tp))
4600 781 : && TREE_USED (LABEL_EXPR_LABEL (*tp)))
4601 15 : return *tp;
4602 : return NULL_TREE;
4603 : }
4604 :
4605 :
4606 : /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
4607 : WANT_VALUE is true if the result of the call is desired. */
4608 :
4609 : static enum gimplify_status
4610 16769422 : gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4611 : {
4612 16769422 : bool want_value = (fallback != fb_none);
4613 16769422 : tree fndecl, parms, p, fnptrtype;
4614 16769422 : enum gimplify_status ret;
4615 16769422 : int i, nargs;
4616 16769422 : gcall *call;
4617 16769422 : bool builtin_va_start_p = false, omp_dispatch_p = false;
4618 16769422 : location_t loc = EXPR_LOCATION (*expr_p);
4619 :
4620 16769422 : gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
4621 :
4622 : /* For reliable diagnostics during inlining, it is necessary that
4623 : every call_expr be annotated with file and line. */
4624 16769422 : if (! EXPR_HAS_LOCATION (*expr_p))
4625 563750 : SET_EXPR_LOCATION (*expr_p, input_location);
4626 :
4627 : /* Gimplify internal functions created in the FEs. */
4628 16769422 : if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
4629 : {
4630 551584 : enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
4631 551584 : if (ifn == IFN_GOMP_DISPATCH)
4632 : {
4633 884 : gcc_assert (flag_openmp
4634 : && gimplify_omp_ctxp
4635 : && gimplify_omp_ctxp->code == OMP_DISPATCH);
4636 884 : *expr_p = CALL_EXPR_ARG (*expr_p, 0);
4637 884 : gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
4638 884 : if (! EXPR_HAS_LOCATION (*expr_p))
4639 94 : SET_EXPR_LOCATION (*expr_p, input_location);
4640 : omp_dispatch_p = true;
4641 : }
4642 : else
4643 : {
4644 550700 : if (want_value)
4645 : return GS_ALL_DONE;
4646 :
4647 20599 : nargs = call_expr_nargs (*expr_p);
4648 20599 : auto_vec<tree> vargs (nargs);
4649 :
4650 20599 : if (ifn == IFN_ASSUME)
4651 : {
4652 497 : if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
4653 : {
4654 : /* If the [[assume (cond)]]; condition is simple
4655 : enough and can be evaluated unconditionally
4656 : without side-effects, expand it as
4657 : if (!cond) __builtin_unreachable (); */
4658 279 : tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
4659 279 : *expr_p
4660 558 : = build3 (COND_EXPR, void_type_node,
4661 279 : CALL_EXPR_ARG (*expr_p, 0), void_node,
4662 279 : build_call_expr_loc (EXPR_LOCATION (*expr_p),
4663 : fndecl, 0));
4664 279 : return GS_OK;
4665 : }
4666 : /* If not optimizing, ignore the assumptions unless there
4667 : are used user labels in it. */
4668 218 : if ((!optimize
4669 98 : && !walk_tree_without_duplicates (&CALL_EXPR_ARG (*expr_p,
4670 : 0),
4671 : find_used_user_labels,
4672 : NULL))
4673 233 : || seen_error ())
4674 : {
4675 94 : *expr_p = NULL_TREE;
4676 94 : return GS_ALL_DONE;
4677 : }
4678 : /* Temporarily, until gimple lowering, transform
4679 : .ASSUME (cond);
4680 : into:
4681 : [[assume (guard)]]
4682 : {
4683 : guard = cond;
4684 : }
4685 : such that gimple lowering can outline the condition into
4686 : a separate function easily. */
4687 124 : tree guard = create_tmp_var (boolean_type_node);
4688 124 : *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
4689 124 : gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
4690 124 : *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
4691 124 : push_gimplify_context ();
4692 124 : gimple_seq body = NULL;
4693 124 : gimple *g = gimplify_and_return_first (*expr_p, &body);
4694 124 : pop_gimplify_context (g);
4695 124 : g = gimple_build_assume (guard, body);
4696 124 : gimple_set_location (g, loc);
4697 124 : gimplify_seq_add_stmt (pre_p, g);
4698 124 : *expr_p = NULL_TREE;
4699 124 : return GS_ALL_DONE;
4700 : }
4701 20102 : else if (ifn == IFN_UBSAN_BOUNDS
4702 20102 : && nargs == 3
4703 20102 : && integer_onep (CALL_EXPR_ARG (*expr_p, 0)))
4704 : {
4705 : /* If first argument is one, add TYPE_MAX_VALUE (TYPE_DOMAIN (t))
4706 : to 3rd argument and change first argument to 0. This is
4707 : done by ubsan_instrument_bounds so that we can use the
4708 : max value from gimplify_type_sizes here instead of original
4709 : expression for VLAs. */
4710 406 : tree type = TREE_TYPE (CALL_EXPR_ARG (*expr_p, 0));
4711 406 : CALL_EXPR_ARG (*expr_p, 0) = build_int_cst (type, 0);
4712 406 : gcc_assert (TREE_CODE (type) == POINTER_TYPE);
4713 406 : type = TREE_TYPE (type);
4714 406 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4715 406 : tree maxv = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4716 406 : gcc_assert (maxv);
4717 406 : tree arg3 = CALL_EXPR_ARG (*expr_p, 2);
4718 406 : CALL_EXPR_ARG (*expr_p, 2)
4719 812 : = fold_build2 (PLUS_EXPR, TREE_TYPE (arg3), maxv, arg3);
4720 : }
4721 :
4722 45102 : for (i = 0; i < nargs; i++)
4723 : {
4724 25000 : gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4725 25000 : EXPR_LOCATION (*expr_p));
4726 25000 : vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
4727 : }
4728 :
4729 20102 : gcall *call = gimple_build_call_internal_vec (ifn, vargs);
4730 20102 : gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
4731 20102 : gimplify_seq_add_stmt (pre_p, call);
4732 20102 : return GS_ALL_DONE;
4733 20599 : }
4734 : }
4735 :
4736 : /* This may be a call to a builtin function.
4737 :
4738 : Builtin function calls may be transformed into different
4739 : (and more efficient) builtin function calls under certain
4740 : circumstances. Unfortunately, gimplification can muck things
4741 : up enough that the builtin expanders are not aware that certain
4742 : transformations are still valid.
4743 :
4744 : So we attempt transformation/gimplification of the call before
4745 : we gimplify the CALL_EXPR. At this time we do not manage to
4746 : transform all calls in the same manner as the expanders do, but
4747 : we do transform most of them. */
4748 16218722 : fndecl = get_callee_fndecl (*expr_p);
4749 16218722 : if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4750 3932128 : switch (DECL_FUNCTION_CODE (fndecl))
4751 : {
4752 94301 : CASE_BUILT_IN_ALLOCA:
4753 : /* If the call has been built for a variable-sized object, then we
4754 : want to restore the stack level when the enclosing BIND_EXPR is
4755 : exited to reclaim the allocated space; otherwise, we precisely
4756 : need to do the opposite and preserve the latest stack level. */
4757 94301 : if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
4758 8944 : gimplify_ctxp->save_stack = true;
4759 : else
4760 85357 : gimplify_ctxp->keep_stack = true;
4761 : break;
4762 :
4763 20790 : case BUILT_IN_VA_START:
4764 20790 : {
4765 20790 : builtin_va_start_p = true;
4766 20790 : if (call_expr_nargs (*expr_p) < 2)
4767 : {
4768 1 : error ("too few arguments to function %<va_start%>");
4769 1 : *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
4770 1 : return GS_OK;
4771 : }
4772 :
4773 20789 : if (fold_builtin_next_arg (*expr_p, true))
4774 : {
4775 8 : *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
4776 8 : return GS_OK;
4777 : }
4778 : break;
4779 : }
4780 :
4781 30 : case BUILT_IN_EH_RETURN:
4782 30 : cfun->calls_eh_return = true;
4783 30 : break;
4784 :
4785 1030 : case BUILT_IN_CLEAR_PADDING:
4786 1030 : if (call_expr_nargs (*expr_p) == 1)
4787 : {
4788 : /* Remember the original type of the argument in an internal
4789 : dummy second argument, as in GIMPLE pointer conversions are
4790 : useless. Also mark this call as not for automatic
4791 : initialization in the internal dummy third argument. */
4792 515 : p = CALL_EXPR_ARG (*expr_p, 0);
4793 515 : *expr_p
4794 515 : = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
4795 515 : build_zero_cst (TREE_TYPE (p)));
4796 515 : return GS_OK;
4797 : }
4798 : break;
4799 :
4800 : default:
4801 : ;
4802 : }
4803 16218198 : if (fndecl && fndecl_built_in_p (fndecl))
4804 : {
4805 4166530 : tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4806 4166530 : if (new_tree && new_tree != *expr_p)
4807 : {
4808 : /* There was a transformation of this call which computes the
4809 : same value, but in a more efficient way. Return and try
4810 : again. */
4811 717 : *expr_p = new_tree;
4812 717 : return GS_OK;
4813 : }
4814 : }
4815 :
4816 : /* Remember the original function pointer type. */
4817 16217481 : fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
4818 :
4819 : /* Handle "declare variant" resolution and arglist processing. */
4820 16217481 : if (flag_openmp
4821 445574 : && fndecl
4822 444893 : && cfun
4823 444893 : && (cfun->curr_properties & PROP_gimple_any) == 0
4824 247866 : && !omp_has_novariants ()
4825 16465235 : && lookup_attribute ("omp declare variant base",
4826 247754 : DECL_ATTRIBUTES (fndecl)))
4827 : {
4828 1322 : tree orig = *expr_p;
4829 1322 : *expr_p = gimplify_variant_call_expr (*expr_p, fallback,
4830 : omp_dispatch_p);
4831 :
4832 : /* This may resolve to the same call, or the call expr with just
4833 : the function replaced, in which case we should just continue to
4834 : gimplify it normally. Otherwise, if we get something else back,
4835 : stop here and re-gimplify the whole replacement expr. */
4836 1322 : if (*expr_p != orig)
4837 : return GS_OK;
4838 : }
4839 :
4840 : /* There is a sequence point before the call, so any side effects in
4841 : the calling expression must occur before the actual call. Force
4842 : gimplify_expr to use an internal post queue. */
4843 16216992 : ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
4844 : is_gimple_call_addr, fb_rvalue);
4845 :
4846 16216992 : if (ret == GS_ERROR)
4847 : return GS_ERROR;
4848 :
4849 16216990 : nargs = call_expr_nargs (*expr_p);
4850 :
4851 : /* Get argument types for verification. */
4852 16216990 : fndecl = get_callee_fndecl (*expr_p);
4853 16216990 : parms = NULL_TREE;
4854 16216990 : if (fndecl)
4855 15995219 : parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4856 : else
4857 221771 : parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
4858 :
4859 32212209 : if (fndecl && DECL_ARGUMENTS (fndecl))
4860 : p = DECL_ARGUMENTS (fndecl);
4861 7287343 : else if (parms)
4862 : p = parms;
4863 : else
4864 : p = NULL_TREE;
4865 43037000 : for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
4866 : ;
4867 :
4868 : /* If the last argument is __builtin_va_arg_pack () and it is not
4869 : passed as a named argument, decrease the number of CALL_EXPR
4870 : arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
4871 16216990 : if (!p
4872 16216990 : && i < nargs
4873 16216990 : && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
4874 : {
4875 5925 : tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
4876 5925 : tree last_arg_fndecl = get_callee_fndecl (last_arg);
4877 :
4878 5925 : if (last_arg_fndecl
4879 5925 : && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
4880 : {
4881 131 : tree call = *expr_p;
4882 :
4883 131 : --nargs;
4884 262 : *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
4885 131 : CALL_EXPR_FN (call),
4886 131 : nargs, CALL_EXPR_ARGP (call));
4887 :
4888 : /* Copy all CALL_EXPR flags, location and block, except
4889 : CALL_EXPR_VA_ARG_PACK flag. */
4890 131 : CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4891 131 : CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4892 131 : CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4893 131 : = CALL_EXPR_RETURN_SLOT_OPT (call);
4894 131 : CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4895 131 : SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4896 :
4897 : /* Set CALL_EXPR_VA_ARG_PACK. */
4898 131 : CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4899 : }
4900 : }
4901 :
4902 : /* If the call returns twice then after building the CFG the call
4903 : argument computations will no longer dominate the call because
4904 : we add an abnormal incoming edge to the call. So do not use SSA
4905 : vars there. */
4906 16216990 : bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4907 :
4908 :
4909 : /* Gimplify the function arguments. */
4910 16216990 : if (nargs > 0)
4911 : {
4912 14051802 : for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4913 42295206 : PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4914 28243404 : PUSH_ARGS_REVERSED ? i-- : i++)
4915 : {
4916 28243404 : enum gimplify_status t;
4917 :
4918 : /* Avoid gimplifying the second argument to va_start, which needs to
4919 : be the plain PARM_DECL. */
4920 28243404 : if ((i != 1) || !builtin_va_start_p)
4921 : {
4922 28222623 : tree *arg_p = &CALL_EXPR_ARG (*expr_p, i);
4923 :
4924 28222623 : if (gimplify_omp_ctxp && gimplify_omp_ctxp->code == OMP_DISPATCH)
4925 6687 : gimplify_omp_ctxp->in_call_args = true;
4926 28222623 : t = gimplify_arg (arg_p, pre_p, EXPR_LOCATION (*expr_p),
4927 28222623 : !returns_twice);
4928 28222623 : if (gimplify_omp_ctxp && gimplify_omp_ctxp->code == OMP_DISPATCH)
4929 6687 : gimplify_omp_ctxp->in_call_args = false;
4930 :
4931 28222623 : if (t == GS_ERROR)
4932 28243404 : ret = GS_ERROR;
4933 : }
4934 : }
4935 : }
4936 :
4937 : /* Gimplify the static chain. */
4938 16216990 : if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4939 : {
4940 28852 : if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4941 3 : CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4942 : else
4943 : {
4944 28173 : enum gimplify_status t;
4945 28173 : t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4946 28173 : EXPR_LOCATION (*expr_p), ! returns_twice);
4947 28173 : if (t == GS_ERROR)
4948 16216990 : ret = GS_ERROR;
4949 : }
4950 : }
4951 :
4952 : /* Verify the function result. */
4953 16216990 : if (want_value && fndecl
4954 16216990 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4955 : {
4956 0 : error_at (loc, "using result of function returning %<void%>");
4957 0 : ret = GS_ERROR;
4958 : }
4959 :
4960 : /* Try this again in case gimplification exposed something. */
4961 16216990 : if (ret != GS_ERROR)
4962 : {
4963 16216842 : tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4964 :
4965 16216842 : if (new_tree && new_tree != *expr_p)
4966 : {
4967 : /* There was a transformation of this call which computes the
4968 : same value, but in a more efficient way. Return and try
4969 : again. */
4970 11512 : *expr_p = new_tree;
4971 11512 : return GS_OK;
4972 : }
4973 : }
4974 : else
4975 : {
4976 148 : *expr_p = error_mark_node;
4977 148 : return GS_ERROR;
4978 : }
4979 :
4980 : /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4981 : decl. This allows us to eliminate redundant or useless
4982 : calls to "const" functions. */
4983 16205330 : if (TREE_CODE (*expr_p) == CALL_EXPR)
4984 : {
4985 16205330 : int flags = call_expr_flags (*expr_p);
4986 16205330 : if (flags & (ECF_CONST | ECF_PURE)
4987 : /* An infinite loop is considered a side effect. */
4988 2439618 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4989 2055405 : TREE_SIDE_EFFECTS (*expr_p) = 0;
4990 : }
4991 :
4992 : /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4993 : and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4994 : form and delegate the creation of a GIMPLE_CALL to
4995 : gimplify_modify_expr. This is always possible because when
4996 : WANT_VALUE is true, the caller wants the result of this call into
4997 : a temporary, which means that we will emit an INIT_EXPR in
4998 : internal_get_tmp_var which will then be handled by
4999 : gimplify_modify_expr. */
5000 16205330 : if (!want_value)
5001 : {
5002 : /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
5003 : have to do is replicate it as a GIMPLE_CALL tuple. */
5004 6151390 : gimple_stmt_iterator gsi;
5005 6151390 : call = gimple_build_call_from_tree (*expr_p, fnptrtype);
5006 6151390 : notice_special_calls (call);
5007 6151390 : gimplify_seq_add_stmt (pre_p, call);
5008 6151390 : gsi = gsi_last (*pre_p);
5009 6151390 : maybe_fold_stmt (&gsi);
5010 6151390 : *expr_p = NULL_TREE;
5011 : }
5012 : else
5013 : /* Remember the original function type. */
5014 10053940 : CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
5015 10053940 : CALL_EXPR_FN (*expr_p));
5016 :
5017 : return ret;
5018 : }
5019 :
5020 : /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
5021 : rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
5022 :
5023 : TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
5024 : condition is true or false, respectively. If null, we should generate
5025 : our own to skip over the evaluation of this specific expression.
5026 :
5027 : LOCUS is the source location of the COND_EXPR.
5028 :
5029 : The condition_uid is a discriminator tag for condition coverage used to map
5030 : conditions to its corresponding full Boolean function.
5031 :
5032 : This function is the tree equivalent of do_jump.
5033 :
5034 : shortcut_cond_r should only be called by shortcut_cond_expr. */
5035 :
5036 : static tree
5037 1318108 : shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
5038 : location_t locus, unsigned condition_uid)
5039 : {
5040 1318108 : tree local_label = NULL_TREE;
5041 1318108 : tree t, expr = NULL;
5042 :
5043 : /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
5044 : retain the shortcut semantics. Just insert the gotos here;
5045 : shortcut_cond_expr will append the real blocks later. */
5046 1318108 : if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
5047 : {
5048 121972 : location_t new_locus;
5049 :
5050 : /* Turn if (a && b) into
5051 :
5052 : if (a); else goto no;
5053 : if (b) goto yes; else goto no;
5054 : (no:) */
5055 :
5056 121972 : if (false_label_p == NULL)
5057 3956 : false_label_p = &local_label;
5058 :
5059 : /* Keep the original source location on the first 'if'. */
5060 121972 : t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus,
5061 : condition_uid);
5062 121972 : append_to_statement_list (t, &expr);
5063 :
5064 : /* Set the source location of the && on the second 'if'. */
5065 121972 : new_locus = rexpr_location (pred, locus);
5066 121972 : t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
5067 : new_locus, condition_uid);
5068 121972 : append_to_statement_list (t, &expr);
5069 : }
5070 1196136 : else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5071 : {
5072 382813 : location_t new_locus;
5073 :
5074 : /* Turn if (a || b) into
5075 :
5076 : if (a) goto yes;
5077 : if (b) goto yes; else goto no;
5078 : (yes:) */
5079 :
5080 382813 : if (true_label_p == NULL)
5081 229895 : true_label_p = &local_label;
5082 :
5083 : /* Keep the original source location on the first 'if'. */
5084 382813 : t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus,
5085 : condition_uid);
5086 382813 : append_to_statement_list (t, &expr);
5087 :
5088 : /* Set the source location of the || on the second 'if'. */
5089 382813 : new_locus = rexpr_location (pred, locus);
5090 382813 : t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
5091 : new_locus, condition_uid);
5092 382813 : append_to_statement_list (t, &expr);
5093 : }
5094 813323 : else if (TREE_CODE (pred) == COND_EXPR
5095 854 : && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
5096 814177 : && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
5097 : {
5098 848 : location_t new_locus;
5099 :
5100 : /* As long as we're messing with gotos, turn if (a ? b : c) into
5101 : if (a)
5102 : if (b) goto yes; else goto no;
5103 : else
5104 : if (c) goto yes; else goto no;
5105 :
5106 : Don't do this if one of the arms has void type, which can happen
5107 : in C++ when the arm is throw. */
5108 :
5109 : /* Keep the original source location on the first 'if'. Set the source
5110 : location of the ? on the second 'if'. */
5111 848 : new_locus = rexpr_location (pred, locus);
5112 1696 : expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
5113 848 : shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
5114 : false_label_p, locus, condition_uid),
5115 848 : shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
5116 : false_label_p, new_locus,
5117 : condition_uid));
5118 848 : tree_associate_condition_with_expr (expr, condition_uid);
5119 : }
5120 : else
5121 : {
5122 812475 : expr = build3 (COND_EXPR, void_type_node, pred,
5123 : build_and_jump (true_label_p),
5124 : build_and_jump (false_label_p));
5125 812475 : SET_EXPR_LOCATION (expr, locus);
5126 812475 : tree_associate_condition_with_expr (expr, condition_uid);
5127 : }
5128 :
5129 1318108 : if (local_label)
5130 : {
5131 233851 : t = build1 (LABEL_EXPR, void_type_node, local_label);
5132 233851 : append_to_statement_list (t, &expr);
5133 : }
5134 :
5135 1318108 : return expr;
5136 : }
5137 :
5138 : /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
5139 : any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
5140 : statement, if it is the last one. Otherwise, return NULL. */
5141 :
5142 : static tree
5143 11513674 : find_goto (tree expr)
5144 : {
5145 11838043 : if (!expr)
5146 : return NULL_TREE;
5147 :
5148 9898429 : if (TREE_CODE (expr) == GOTO_EXPR)
5149 : return expr;
5150 :
5151 7912037 : if (TREE_CODE (expr) != STATEMENT_LIST)
5152 : return NULL_TREE;
5153 :
5154 1030794 : tree_stmt_iterator i = tsi_start (expr);
5155 :
5156 1386995 : while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
5157 356201 : tsi_next (&i);
5158 :
5159 10461100 : if (!tsi_one_before_end_p (i))
5160 : return NULL_TREE;
5161 :
5162 324369 : return find_goto (tsi_stmt (i));
5163 : }
5164 :
5165 : /* Same as find_goto, except that it returns NULL if the destination
5166 : is not a LABEL_DECL. */
5167 :
5168 : static inline tree
5169 11513674 : find_goto_label (tree expr)
5170 : {
5171 11513674 : tree dest = find_goto (expr);
5172 13500066 : if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
5173 1986281 : return dest;
5174 : return NULL_TREE;
5175 : }
5176 :
5177 :
5178 : /* Given a multi-term condition (ANDIF, ORIF), walk the predicate PRED and tag
5179 : every basic condition with CONDITION_UID. Two basic conditions share the
5180 : CONDITION_UID discriminator when they belong to the same predicate, which is
5181 : used by the condition coverage. Doing this as an explicit step makes for a
5182 : simpler implementation than weaving it into the splitting code as the
5183 : splitting code eventually calls the entry point gimplfiy_expr which makes
5184 : bookkeeping complicated. */
5185 : static void
5186 569885 : tag_shortcut_cond (tree pred, unsigned condition_uid)
5187 : {
5188 608591 : if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR
5189 608591 : || TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5190 : {
5191 558732 : tree fst = TREE_OPERAND (pred, 0);
5192 558732 : tree lst = TREE_OPERAND (pred, 1);
5193 :
5194 558732 : if (TREE_CODE (fst) == TRUTH_ANDIF_EXPR
5195 558732 : || TREE_CODE (fst) == TRUTH_ORIF_EXPR)
5196 166951 : tag_shortcut_cond (fst, condition_uid);
5197 391781 : else if (TREE_CODE (fst) == COND_EXPR)
5198 244 : tree_associate_condition_with_expr (fst, condition_uid);
5199 :
5200 558732 : if (TREE_CODE (lst) == TRUTH_ANDIF_EXPR
5201 558732 : || TREE_CODE (lst) == TRUTH_ORIF_EXPR)
5202 : tag_shortcut_cond (lst, condition_uid);
5203 520026 : else if (TREE_CODE (lst) == COND_EXPR)
5204 444 : tree_associate_condition_with_expr (lst, condition_uid);
5205 : }
5206 569885 : }
5207 :
5208 : /* Given a conditional expression EXPR with short-circuit boolean
5209 : predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
5210 : predicate apart into the equivalent sequence of conditionals. CONDITION_UID
5211 : is a the tag/discriminator for this EXPR - all basic conditions in the
5212 : expression will be given the same CONDITION_UID. */
5213 : static tree
5214 402934 : shortcut_cond_expr (tree expr, unsigned condition_uid)
5215 : {
5216 402934 : tree pred = TREE_OPERAND (expr, 0);
5217 402934 : tree then_ = TREE_OPERAND (expr, 1);
5218 402934 : tree else_ = TREE_OPERAND (expr, 2);
5219 402934 : tree true_label, false_label, end_label, t;
5220 402934 : tree *true_label_p;
5221 402934 : tree *false_label_p;
5222 402934 : bool emit_end, emit_false, jump_over_else;
5223 402934 : bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
5224 402934 : bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
5225 :
5226 402934 : tag_shortcut_cond (pred, condition_uid);
5227 :
5228 : /* First do simple transformations. */
5229 402934 : if (!else_se)
5230 : {
5231 : /* If there is no 'else', turn
5232 : if (a && b) then c
5233 : into
5234 : if (a) if (b) then c. */
5235 290492 : while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
5236 : {
5237 : /* Keep the original source location on the first 'if'. */
5238 50743 : location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
5239 50743 : TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
5240 : /* Set the source location of the && on the second 'if'. */
5241 50743 : if (rexpr_has_location (pred))
5242 49723 : SET_EXPR_LOCATION (expr, rexpr_location (pred));
5243 50743 : then_ = shortcut_cond_expr (expr, condition_uid);
5244 50743 : then_se = then_ && TREE_SIDE_EFFECTS (then_);
5245 50743 : pred = TREE_OPERAND (pred, 0);
5246 50743 : expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
5247 50743 : SET_EXPR_LOCATION (expr, locus);
5248 : }
5249 : }
5250 :
5251 402934 : if (!then_se)
5252 : {
5253 : /* If there is no 'then', turn
5254 : if (a || b); else d
5255 : into
5256 : if (a); else if (b); else d. */
5257 25219 : while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5258 : {
5259 : /* Keep the original source location on the first 'if'. */
5260 794 : location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
5261 794 : TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
5262 : /* Set the source location of the || on the second 'if'. */
5263 794 : if (rexpr_has_location (pred))
5264 793 : SET_EXPR_LOCATION (expr, rexpr_location (pred));
5265 794 : else_ = shortcut_cond_expr (expr, condition_uid);
5266 794 : else_se = else_ && TREE_SIDE_EFFECTS (else_);
5267 794 : pred = TREE_OPERAND (pred, 0);
5268 794 : expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
5269 794 : SET_EXPR_LOCATION (expr, locus);
5270 : }
5271 : }
5272 :
5273 : /* The expr tree should also have the expression id set. */
5274 402934 : tree_associate_condition_with_expr (expr, condition_uid);
5275 :
5276 : /* If we're done, great. */
5277 402934 : if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
5278 402934 : && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
5279 96092 : return expr;
5280 :
5281 : /* Otherwise we need to mess with gotos. Change
5282 : if (a) c; else d;
5283 : to
5284 : if (a); else goto no;
5285 : c; goto end;
5286 : no: d; end:
5287 : and recursively gimplify the condition. */
5288 :
5289 306842 : true_label = false_label = end_label = NULL_TREE;
5290 :
5291 : /* If our arms just jump somewhere, hijack those labels so we don't
5292 : generate jumps to jumps. */
5293 :
5294 306842 : if (tree then_goto = find_goto_label (then_))
5295 : {
5296 4559 : true_label = GOTO_DESTINATION (then_goto);
5297 4559 : then_ = NULL;
5298 4559 : then_se = false;
5299 : }
5300 :
5301 306842 : if (tree else_goto = find_goto_label (else_))
5302 : {
5303 4152 : false_label = GOTO_DESTINATION (else_goto);
5304 4152 : else_ = NULL;
5305 4152 : else_se = false;
5306 : }
5307 :
5308 : /* If we aren't hijacking a label for the 'then' branch, it falls through. */
5309 306842 : if (true_label)
5310 : true_label_p = &true_label;
5311 : else
5312 302283 : true_label_p = NULL;
5313 :
5314 : /* The 'else' branch also needs a label if it contains interesting code. */
5315 306842 : if (false_label || else_se)
5316 : false_label_p = &false_label;
5317 : else
5318 : false_label_p = NULL;
5319 :
5320 : /* If there was nothing else in our arms, just forward the label(s). */
5321 148774 : if (!then_se && !else_se)
5322 8448 : return shortcut_cond_r (pred, true_label_p, false_label_p,
5323 12672 : EXPR_LOC_OR_LOC (expr, input_location), condition_uid);
5324 :
5325 : /* If our last subexpression already has a terminal label, reuse it. */
5326 302618 : if (else_se)
5327 158068 : t = expr_last (else_);
5328 144550 : else if (then_se)
5329 144550 : t = expr_last (then_);
5330 : else
5331 : t = NULL;
5332 302618 : if (t && TREE_CODE (t) == LABEL_EXPR)
5333 974 : end_label = LABEL_EXPR_LABEL (t);
5334 :
5335 : /* If we don't care about jumping to the 'else' branch, jump to the end
5336 : if the condition is false. */
5337 302618 : if (!false_label_p)
5338 144308 : false_label_p = &end_label;
5339 :
5340 : /* We only want to emit these labels if we aren't hijacking them. */
5341 302618 : emit_end = (end_label == NULL_TREE);
5342 302618 : emit_false = (false_label == NULL_TREE);
5343 :
5344 : /* We only emit the jump over the else clause if we have to--if the
5345 : then clause may fall through. Otherwise we can wind up with a
5346 : useless jump and a useless label at the end of gimplified code,
5347 : which will cause us to think that this conditional as a whole
5348 : falls through even if it doesn't. If we then inline a function
5349 : which ends with such a condition, that can cause us to issue an
5350 : inappropriate warning about control reaching the end of a
5351 : non-void function. */
5352 302618 : jump_over_else = block_may_fallthru (then_);
5353 :
5354 604613 : pred = shortcut_cond_r (pred, true_label_p, false_label_p,
5355 604613 : EXPR_LOC_OR_LOC (expr, input_location),
5356 : condition_uid);
5357 :
5358 302618 : expr = NULL;
5359 302618 : append_to_statement_list (pred, &expr);
5360 :
5361 302618 : append_to_statement_list (then_, &expr);
5362 302618 : if (else_se)
5363 : {
5364 158068 : if (jump_over_else)
5365 : {
5366 155652 : tree last = expr_last (expr);
5367 155652 : t = build_and_jump (&end_label);
5368 155652 : if (rexpr_has_location (last))
5369 37333 : SET_EXPR_LOCATION (t, rexpr_location (last));
5370 155652 : append_to_statement_list (t, &expr);
5371 : }
5372 158068 : if (emit_false)
5373 : {
5374 158068 : t = build1 (LABEL_EXPR, void_type_node, false_label);
5375 158068 : append_to_statement_list (t, &expr);
5376 : }
5377 158068 : append_to_statement_list (else_, &expr);
5378 : }
5379 302618 : if (emit_end && end_label)
5380 : {
5381 298991 : t = build1 (LABEL_EXPR, void_type_node, end_label);
5382 298991 : append_to_statement_list (t, &expr);
5383 : }
5384 :
5385 302618 : return expr;
5386 : }
5387 :
5388 : /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
5389 :
5390 : tree
5391 16396810 : gimple_boolify (tree expr)
5392 : {
5393 16396810 : tree type = TREE_TYPE (expr);
5394 16396810 : location_t loc = EXPR_LOCATION (expr);
5395 :
5396 16396810 : if (TREE_CODE (expr) == NE_EXPR
5397 5656708 : && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
5398 17328725 : && integer_zerop (TREE_OPERAND (expr, 1)))
5399 : {
5400 480464 : tree call = TREE_OPERAND (expr, 0);
5401 480464 : tree fn = get_callee_fndecl (call);
5402 :
5403 : /* For __builtin_expect ((long) (x), y) recurse into x as well
5404 : if x is truth_value_p. */
5405 480464 : if (fn
5406 479493 : && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
5407 609781 : && call_expr_nargs (call) == 2)
5408 : {
5409 129317 : tree arg = CALL_EXPR_ARG (call, 0);
5410 129317 : if (arg)
5411 : {
5412 129317 : if (TREE_CODE (arg) == NOP_EXPR
5413 129317 : && TREE_TYPE (arg) == TREE_TYPE (call))
5414 60809 : arg = TREE_OPERAND (arg, 0);
5415 129317 : if (truth_value_p (TREE_CODE (arg)))
5416 : {
5417 106312 : arg = gimple_boolify (arg);
5418 106312 : CALL_EXPR_ARG (call, 0)
5419 212624 : = fold_convert_loc (loc, TREE_TYPE (call), arg);
5420 : }
5421 : }
5422 : }
5423 : }
5424 :
5425 16396810 : switch (TREE_CODE (expr))
5426 : {
5427 1220998 : case TRUTH_AND_EXPR:
5428 1220998 : case TRUTH_OR_EXPR:
5429 1220998 : case TRUTH_XOR_EXPR:
5430 1220998 : case TRUTH_ANDIF_EXPR:
5431 1220998 : case TRUTH_ORIF_EXPR:
5432 : /* Also boolify the arguments of truth exprs. */
5433 1220998 : TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
5434 : /* FALLTHRU */
5435 :
5436 1639928 : case TRUTH_NOT_EXPR:
5437 1639928 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5438 :
5439 : /* These expressions always produce boolean results. */
5440 1639928 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5441 306308 : TREE_TYPE (expr) = boolean_type_node;
5442 : return expr;
5443 :
5444 5236 : case ANNOTATE_EXPR:
5445 5236 : switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
5446 : {
5447 5236 : case annot_expr_ivdep_kind:
5448 5236 : case annot_expr_unroll_kind:
5449 5236 : case annot_expr_no_vector_kind:
5450 5236 : case annot_expr_vector_kind:
5451 5236 : case annot_expr_parallel_kind:
5452 5236 : case annot_expr_maybe_infinite_kind:
5453 5236 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5454 5236 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5455 3319 : TREE_TYPE (expr) = boolean_type_node;
5456 : return expr;
5457 0 : default:
5458 0 : gcc_unreachable ();
5459 : }
5460 :
5461 14751646 : default:
5462 14751646 : if (COMPARISON_CLASS_P (expr))
5463 : {
5464 : /* These expressions always produce boolean results. */
5465 12886645 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5466 1574078 : TREE_TYPE (expr) = boolean_type_node;
5467 12886645 : return expr;
5468 : }
5469 : /* Other expressions that get here must have boolean values, but
5470 : might need to be converted to the appropriate mode. */
5471 1865001 : if (TREE_CODE (type) == BOOLEAN_TYPE)
5472 : return expr;
5473 201335 : return fold_convert_loc (loc, boolean_type_node, expr);
5474 : }
5475 : }
5476 :
5477 : /* Given a conditional expression *EXPR_P without side effects, gimplify
5478 : its operands. New statements are inserted to PRE_P. */
5479 :
5480 : static enum gimplify_status
5481 46029 : gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
5482 : {
5483 46029 : tree expr = *expr_p, cond;
5484 46029 : enum gimplify_status ret, tret;
5485 46029 : enum tree_code code;
5486 :
5487 46029 : cond = gimple_boolify (COND_EXPR_COND (expr));
5488 :
5489 : /* We need to handle && and || specially, as their gimplification
5490 : creates pure cond_expr, thus leading to an infinite cycle otherwise. */
5491 46029 : code = TREE_CODE (cond);
5492 46029 : if (code == TRUTH_ANDIF_EXPR)
5493 31 : TREE_SET_CODE (cond, TRUTH_AND_EXPR);
5494 45998 : else if (code == TRUTH_ORIF_EXPR)
5495 4637 : TREE_SET_CODE (cond, TRUTH_OR_EXPR);
5496 46029 : ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
5497 46029 : COND_EXPR_COND (*expr_p) = cond;
5498 :
5499 46029 : tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
5500 : is_gimple_val, fb_rvalue);
5501 46029 : ret = MIN (ret, tret);
5502 46029 : tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
5503 : is_gimple_val, fb_rvalue);
5504 :
5505 46029 : return MIN (ret, tret);
5506 : }
5507 :
5508 : /* Return true if evaluating EXPR could trap.
5509 : EXPR is GENERIC, while tree_could_trap_p can be called
5510 : only on GIMPLE. */
5511 :
5512 : bool
5513 19266484 : generic_expr_could_trap_p (tree expr)
5514 : {
5515 19266484 : unsigned i, n;
5516 :
5517 19266484 : if (!expr || is_gimple_val (expr))
5518 7732228 : return false;
5519 :
5520 11534256 : if (!EXPR_P (expr) || tree_could_trap_p (expr))
5521 3226283 : return true;
5522 :
5523 8307973 : n = TREE_OPERAND_LENGTH (expr);
5524 18186348 : for (i = 0; i < n; i++)
5525 13546929 : if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
5526 : return true;
5527 :
5528 : return false;
5529 : }
5530 :
5531 : /* Associate the condition STMT with the discriminator UID. STMTs that are
5532 : broken down with ANDIF/ORIF from the same Boolean expression should be given
5533 : the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
5534 : { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
5535 : for condition coverage. */
5536 : static void
5537 5449995 : gimple_associate_condition_with_expr (struct function *fn, gcond *stmt,
5538 : unsigned uid)
5539 : {
5540 5449995 : if (!condition_coverage_flag)
5541 : return;
5542 :
5543 636 : if (!fn->cond_uids)
5544 135 : fn->cond_uids = new hash_map <gcond*, unsigned> ();
5545 :
5546 636 : fn->cond_uids->put (stmt, uid);
5547 : }
5548 :
5549 : /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
5550 : into
5551 :
5552 : if (p) if (p)
5553 : t1 = a; a;
5554 : else or else
5555 : t1 = b; b;
5556 : t1;
5557 :
5558 : The second form is used when *EXPR_P is of type void.
5559 :
5560 : PRE_P points to the list where side effects that must happen before
5561 : *EXPR_P should be stored. */
5562 :
5563 : static enum gimplify_status
5564 6217496 : gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
5565 : {
5566 6217496 : tree expr = *expr_p;
5567 6217496 : tree type = TREE_TYPE (expr);
5568 6217496 : location_t loc = EXPR_LOCATION (expr);
5569 6217496 : tree tmp, arm1, arm2;
5570 6217496 : enum gimplify_status ret;
5571 6217496 : tree label_true, label_false, label_cont;
5572 6217496 : bool have_then_clause_p, have_else_clause_p;
5573 6217496 : gcond *cond_stmt;
5574 6217496 : enum tree_code pred_code;
5575 6217496 : gimple_seq seq = NULL;
5576 :
5577 : /* If this COND_EXPR has a value, copy the values into a temporary within
5578 : the arms. */
5579 6217496 : if (!VOID_TYPE_P (type))
5580 : {
5581 415340 : tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
5582 415340 : tree result;
5583 :
5584 : /* If either an rvalue is ok or we do not require an lvalue, create the
5585 : temporary. But we cannot do that if the type is addressable. */
5586 415340 : if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
5587 415333 : && !TREE_ADDRESSABLE (type))
5588 : {
5589 415333 : if (gimplify_ctxp->allow_rhs_cond_expr
5590 : /* If either branch has side effects or could trap, it can't be
5591 : evaluated unconditionally. */
5592 46029 : && !TREE_SIDE_EFFECTS (then_)
5593 46029 : && !generic_expr_could_trap_p (then_)
5594 46029 : && !TREE_SIDE_EFFECTS (else_)
5595 461362 : && !generic_expr_could_trap_p (else_))
5596 46029 : return gimplify_pure_cond_expr (expr_p, pre_p);
5597 :
5598 369304 : tmp = create_tmp_var (type, "iftmp");
5599 369304 : result = tmp;
5600 : }
5601 :
5602 : /* Otherwise, only create and copy references to the values. */
5603 : else
5604 : {
5605 7 : type = build_pointer_type (type);
5606 :
5607 7 : if (!VOID_TYPE_P (TREE_TYPE (then_)))
5608 7 : then_ = build_fold_addr_expr_loc (loc, then_);
5609 :
5610 7 : if (!VOID_TYPE_P (TREE_TYPE (else_)))
5611 7 : else_ = build_fold_addr_expr_loc (loc, else_);
5612 :
5613 7 : expr
5614 7 : = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
5615 :
5616 7 : tmp = create_tmp_var (type, "iftmp");
5617 7 : result = build_simple_mem_ref_loc (loc, tmp);
5618 : }
5619 :
5620 : /* Build the new then clause, `tmp = then_;'. But don't build the
5621 : assignment if the value is void; in C++ it can be if it's a throw. */
5622 369311 : if (!VOID_TYPE_P (TREE_TYPE (then_)))
5623 287751 : TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
5624 :
5625 : /* Similarly, build the new else clause, `tmp = else_;'. */
5626 369311 : if (!VOID_TYPE_P (TREE_TYPE (else_)))
5627 358684 : TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
5628 :
5629 369311 : TREE_TYPE (expr) = void_type_node;
5630 369311 : recalculate_side_effects (expr);
5631 :
5632 : /* Move the COND_EXPR to the prequeue. */
5633 369311 : gimplify_stmt (&expr, pre_p);
5634 :
5635 369311 : *expr_p = result;
5636 369311 : return GS_ALL_DONE;
5637 : }
5638 :
5639 : /* Remove any COMPOUND_EXPR so the following cases will be caught. */
5640 5816631 : STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
5641 5802156 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
5642 80701 : gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
5643 :
5644 : /* Make sure the condition has BOOLEAN_TYPE. */
5645 5802156 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5646 :
5647 : /* Break apart && and || conditions. */
5648 5802156 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
5649 5802156 : || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
5650 : {
5651 351397 : expr = shortcut_cond_expr (expr, next_cond_uid ());
5652 :
5653 351397 : if (expr != *expr_p)
5654 : {
5655 351397 : *expr_p = expr;
5656 :
5657 : /* We can't rely on gimplify_expr to re-gimplify the expanded
5658 : form properly, as cleanups might cause the target labels to be
5659 : wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
5660 : set up a conditional context. */
5661 351397 : gimple_push_condition ();
5662 351397 : gimplify_stmt (expr_p, &seq);
5663 351397 : gimple_pop_condition (pre_p);
5664 351397 : gimple_seq_add_seq (pre_p, seq);
5665 :
5666 351397 : return GS_ALL_DONE;
5667 : }
5668 : }
5669 :
5670 : /* Now do the normal gimplification. */
5671 :
5672 : /* Gimplify condition. */
5673 5450759 : ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
5674 : is_gimple_condexpr_for_cond, fb_rvalue);
5675 5450759 : if (ret == GS_ERROR)
5676 : return GS_ERROR;
5677 5449995 : gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
5678 :
5679 5449995 : gimple_push_condition ();
5680 :
5681 5449995 : have_then_clause_p = have_else_clause_p = false;
5682 5449995 : label_true = find_goto_label (TREE_OPERAND (expr, 1));
5683 5449995 : if (label_true
5684 1263813 : && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
5685 : /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
5686 : have different locations, otherwise we end up with incorrect
5687 : location information on the branches. */
5688 6713808 : && (optimize
5689 223276 : || !EXPR_HAS_LOCATION (expr)
5690 196481 : || !rexpr_has_location (label_true)
5691 64969 : || EXPR_LOCATION (expr) == rexpr_location (label_true)))
5692 : {
5693 1262661 : have_then_clause_p = true;
5694 1262661 : label_true = GOTO_DESTINATION (label_true);
5695 : }
5696 : else
5697 4187334 : label_true = create_artificial_label (UNKNOWN_LOCATION);
5698 5449995 : label_false = find_goto_label (TREE_OPERAND (expr, 2));
5699 5449995 : if (label_false
5700 713757 : && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
5701 : /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
5702 : have different locations, otherwise we end up with incorrect
5703 : location information on the branches. */
5704 6163752 : && (optimize
5705 100326 : || !EXPR_HAS_LOCATION (expr)
5706 99449 : || !rexpr_has_location (label_false)
5707 111 : || EXPR_LOCATION (expr) == rexpr_location (label_false)))
5708 : {
5709 713658 : have_else_clause_p = true;
5710 713658 : label_false = GOTO_DESTINATION (label_false);
5711 : }
5712 : else
5713 4736337 : label_false = create_artificial_label (UNKNOWN_LOCATION);
5714 :
5715 5449995 : unsigned cond_uid = 0;
5716 5449995 : if (cond_uids)
5717 455 : if (unsigned *v = cond_uids->get (expr))
5718 435 : cond_uid = *v;
5719 435 : if (cond_uid == 0)
5720 5449560 : cond_uid = next_cond_uid ();
5721 :
5722 5449995 : gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
5723 : &arm2);
5724 5449995 : cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
5725 : label_false);
5726 5449995 : gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
5727 5449995 : gimple_associate_condition_with_expr (cfun, cond_stmt, cond_uid);
5728 5449995 : copy_warning (cond_stmt, COND_EXPR_COND (expr));
5729 5449995 : gimplify_seq_add_stmt (&seq, cond_stmt);
5730 5449995 : gimple_stmt_iterator gsi = gsi_last (seq);
5731 5449995 : maybe_fold_stmt (&gsi);
5732 :
5733 5449995 : label_cont = NULL_TREE;
5734 5449995 : if (!have_then_clause_p)
5735 : {
5736 : /* For if (...) {} else { code; } put label_true after
5737 : the else block. */
5738 4187334 : if (TREE_OPERAND (expr, 1) == NULL_TREE
5739 196469 : && !have_else_clause_p
5740 4188101 : && TREE_OPERAND (expr, 2) != NULL_TREE)
5741 : {
5742 : /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
5743 : handling that label_cont == label_true can be only reached
5744 : through fallthrough from { code; }. */
5745 767 : if (integer_zerop (COND_EXPR_COND (expr)))
5746 0 : UNUSED_LABEL_P (label_true) = 1;
5747 : label_cont = label_true;
5748 : }
5749 : else
5750 : {
5751 4186567 : bool then_side_effects
5752 4186567 : = (TREE_OPERAND (expr, 1)
5753 4186567 : && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
5754 4186567 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
5755 4186567 : have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
5756 : /* For if (...) { code; } else {} or
5757 : if (...) { code; } else goto label; or
5758 : if (...) { code; return; } else { ... }
5759 : label_cont isn't needed. */
5760 4186567 : if (!have_else_clause_p
5761 3989725 : && TREE_OPERAND (expr, 2) != NULL_TREE
5762 6964317 : && gimple_seq_may_fallthru (seq))
5763 : {
5764 1919461 : gimple *g;
5765 1919461 : label_cont = create_artificial_label (UNKNOWN_LOCATION);
5766 :
5767 : /* For if (0) { non-side-effect-code } else { code }
5768 : tell -Wimplicit-fallthrough handling that label_cont can
5769 : be only reached through fallthrough from { code }. */
5770 1919461 : if (integer_zerop (COND_EXPR_COND (expr)))
5771 : {
5772 72984 : UNUSED_LABEL_P (label_true) = 1;
5773 72984 : if (!then_side_effects)
5774 11410 : UNUSED_LABEL_P (label_cont) = 1;
5775 : }
5776 :
5777 1919461 : g = gimple_build_goto (label_cont);
5778 :
5779 : /* GIMPLE_COND's are very low level; they have embedded
5780 : gotos. This particular embedded goto should not be marked
5781 : with the location of the original COND_EXPR, as it would
5782 : correspond to the COND_EXPR's condition, not the ELSE or the
5783 : THEN arms. To avoid marking it with the wrong location, flag
5784 : it as "no location". */
5785 1919461 : gimple_set_do_not_emit_location (g);
5786 :
5787 1919461 : gimplify_seq_add_stmt (&seq, g);
5788 : }
5789 : }
5790 : }
5791 5449995 : if (!have_else_clause_p)
5792 : {
5793 : /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
5794 : tell -Wimplicit-fallthrough handling that label_false can be only
5795 : reached through fallthrough from { code }. */
5796 4736337 : if (integer_nonzerop (COND_EXPR_COND (expr))
5797 4736337 : && (TREE_OPERAND (expr, 2) == NULL_TREE
5798 56769 : || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
5799 7013 : UNUSED_LABEL_P (label_false) = 1;
5800 4736337 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
5801 4736337 : have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
5802 : }
5803 5449995 : if (label_cont)
5804 1920228 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
5805 :
5806 5449995 : gimple_pop_condition (pre_p);
5807 5449995 : gimple_seq_add_seq (pre_p, seq);
5808 :
5809 5449995 : if (ret == GS_ERROR)
5810 : ; /* Do nothing. */
5811 5449995 : else if (have_then_clause_p || have_else_clause_p)
5812 : ret = GS_ALL_DONE;
5813 : else
5814 : {
5815 : /* Both arms are empty; replace the COND_EXPR with its predicate. */
5816 2650 : expr = TREE_OPERAND (expr, 0);
5817 2650 : gimplify_stmt (&expr, pre_p);
5818 : }
5819 :
5820 5449995 : *expr_p = NULL;
5821 5449995 : return ret;
5822 : }
5823 :
5824 : /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
5825 : to be marked addressable.
5826 :
5827 : We cannot rely on such an expression being directly markable if a temporary
5828 : has been created by the gimplification. In this case, we create another
5829 : temporary and initialize it with a copy, which will become a store after we
5830 : mark it addressable. This can happen if the front-end passed us something
5831 : that it could not mark addressable yet, like a Fortran pass-by-reference
5832 : parameter (int) floatvar. */
5833 :
5834 : static void
5835 54847635 : prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
5836 : {
5837 60156793 : while (handled_component_p (*expr_p))
5838 5309158 : expr_p = &TREE_OPERAND (*expr_p, 0);
5839 :
5840 : /* Do not allow an SSA name as the temporary. */
5841 54847635 : if (is_gimple_reg (*expr_p))
5842 11979 : *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
5843 54847635 : }
5844 :
5845 : /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5846 : a call to __builtin_memcpy. */
5847 :
5848 : static enum gimplify_status
5849 1089 : gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
5850 : gimple_seq *seq_p)
5851 : {
5852 1089 : tree t, to, to_ptr, from, from_ptr;
5853 1089 : gcall *gs;
5854 1089 : location_t loc = EXPR_LOCATION (*expr_p);
5855 :
5856 1089 : to = TREE_OPERAND (*expr_p, 0);
5857 1089 : from = TREE_OPERAND (*expr_p, 1);
5858 1089 : gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
5859 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
5860 :
5861 : /* Mark the RHS addressable. Beware that it may not be possible to do so
5862 : directly if a temporary has been created by the gimplification. */
5863 1089 : prepare_gimple_addressable (&from, seq_p);
5864 :
5865 1089 : mark_addressable (from);
5866 1089 : from_ptr = build_fold_addr_expr_loc (loc, from);
5867 1089 : gimplify_arg (&from_ptr, seq_p, loc);
5868 :
5869 1089 : mark_addressable (to);
5870 1089 : to_ptr = build_fold_addr_expr_loc (loc, to);
5871 1089 : gimplify_arg (&to_ptr, seq_p, loc);
5872 :
5873 1089 : t = builtin_decl_implicit (BUILT_IN_MEMCPY);
5874 :
5875 1089 : gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
5876 1089 : gimple_call_set_alloca_for_var (gs, true);
5877 :
5878 1089 : if (want_value)
5879 : {
5880 : /* tmp = memcpy() */
5881 8 : t = create_tmp_var (TREE_TYPE (to_ptr));
5882 8 : gimple_call_set_lhs (gs, t);
5883 8 : gimplify_seq_add_stmt (seq_p, gs);
5884 :
5885 8 : *expr_p = build_simple_mem_ref (t);
5886 8 : return GS_ALL_DONE;
5887 : }
5888 :
5889 1081 : gimplify_seq_add_stmt (seq_p, gs);
5890 1081 : *expr_p = NULL;
5891 1081 : return GS_ALL_DONE;
5892 : }
5893 :
5894 : /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5895 : a call to __builtin_memset. In this case we know that the RHS is
5896 : a CONSTRUCTOR with an empty element list. */
5897 :
5898 : static enum gimplify_status
5899 79 : gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
5900 : gimple_seq *seq_p)
5901 : {
5902 79 : tree t, from, to, to_ptr;
5903 79 : gcall *gs;
5904 79 : location_t loc = EXPR_LOCATION (*expr_p);
5905 :
5906 : /* Assert our assumptions, to abort instead of producing wrong code
5907 : silently if they are not met. Beware that the RHS CONSTRUCTOR might
5908 : not be immediately exposed. */
5909 79 : from = TREE_OPERAND (*expr_p, 1);
5910 79 : if (TREE_CODE (from) == WITH_SIZE_EXPR)
5911 79 : from = TREE_OPERAND (from, 0);
5912 :
5913 79 : gcc_assert (TREE_CODE (from) == CONSTRUCTOR
5914 : && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
5915 :
5916 : /* Now proceed. */
5917 79 : to = TREE_OPERAND (*expr_p, 0);
5918 79 : gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
5919 :
5920 79 : to_ptr = build_fold_addr_expr_loc (loc, to);
5921 79 : gimplify_arg (&to_ptr, seq_p, loc);
5922 79 : t = builtin_decl_implicit (BUILT_IN_MEMSET);
5923 :
5924 79 : gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
5925 :
5926 79 : if (want_value)
5927 : {
5928 : /* tmp = memset() */
5929 0 : t = create_tmp_var (TREE_TYPE (to_ptr));
5930 0 : gimple_call_set_lhs (gs, t);
5931 0 : gimplify_seq_add_stmt (seq_p, gs);
5932 :
5933 0 : *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
5934 0 : return GS_ALL_DONE;
5935 : }
5936 :
5937 79 : gimplify_seq_add_stmt (seq_p, gs);
5938 79 : *expr_p = NULL;
5939 79 : return GS_ALL_DONE;
5940 : }
5941 :
5942 : /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
5943 : determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
5944 : assignment. Return non-null if we detect a potential overlap. */
5945 :
5946 : struct gimplify_init_ctor_preeval_data
5947 : {
5948 : /* The base decl of the lhs object. May be NULL, in which case we
5949 : have to assume the lhs is indirect. */
5950 : tree lhs_base_decl;
5951 :
5952 : /* The alias set of the lhs object. */
5953 : alias_set_type lhs_alias_set;
5954 : };
5955 :
5956 : static tree
5957 115377 : gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
5958 : {
5959 115377 : struct gimplify_init_ctor_preeval_data *data
5960 : = (struct gimplify_init_ctor_preeval_data *) xdata;
5961 115377 : tree t = *tp;
5962 :
5963 : /* If we find the base object, obviously we have overlap. */
5964 115377 : if (data->lhs_base_decl == t)
5965 : return t;
5966 :
5967 : /* If the constructor component is indirect, determine if we have a
5968 : potential overlap with the lhs. The only bits of information we
5969 : have to go on at this point are addressability and alias sets. */
5970 115376 : if ((INDIRECT_REF_P (t)
5971 115376 : || TREE_CODE (t) == MEM_REF)
5972 651 : && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5973 115892 : && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5974 : return t;
5975 :
5976 : /* If the constructor component is a call, determine if it can hide a
5977 : potential overlap with the lhs through an INDIRECT_REF like above.
5978 : ??? Ugh - this is completely broken. In fact this whole analysis
5979 : doesn't look conservative. */
5980 114860 : if (TREE_CODE (t) == CALL_EXPR)
5981 : {
5982 0 : tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5983 :
5984 0 : for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5985 0 : if (POINTER_TYPE_P (TREE_VALUE (type))
5986 0 : && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5987 0 : && alias_sets_conflict_p (data->lhs_alias_set,
5988 : get_alias_set
5989 0 : (TREE_TYPE (TREE_VALUE (type)))))
5990 : return t;
5991 : }
5992 :
5993 114860 : if (IS_TYPE_OR_DECL_P (t))
5994 10968 : *walk_subtrees = 0;
5995 : return NULL;
5996 : }
5997 :
5998 : /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5999 : force values that overlap with the lhs (as described by *DATA)
6000 : into temporaries. */
6001 :
6002 : static void
6003 411045 : gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6004 : struct gimplify_init_ctor_preeval_data *data)
6005 : {
6006 411045 : enum gimplify_status one;
6007 :
6008 : /* If the value is constant, then there's nothing to pre-evaluate. */
6009 411045 : if (TREE_CONSTANT (*expr_p))
6010 : {
6011 : /* Ensure it does not have side effects, it might contain a reference to
6012 : the object we're initializing. */
6013 194675 : gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
6014 : return;
6015 : }
6016 :
6017 : /* If the type has non-trivial constructors, we can't pre-evaluate. */
6018 216370 : if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
6019 : return;
6020 :
6021 : /* Recurse for nested constructors. */
6022 216370 : if (TREE_CODE (*expr_p) == CONSTRUCTOR)
6023 : {
6024 73253 : unsigned HOST_WIDE_INT ix;
6025 73253 : constructor_elt *ce;
6026 73253 : vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
6027 :
6028 269853 : FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
6029 196600 : gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
6030 :
6031 : return;
6032 : }
6033 :
6034 : /* If this is a variable sized type, we must remember the size. */
6035 143117 : maybe_with_size_expr (expr_p);
6036 :
6037 : /* Gimplify the constructor element to something appropriate for the rhs
6038 : of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
6039 : the gimplifier will consider this a store to memory. Doing this
6040 : gimplification now means that we won't have to deal with complicated
6041 : language-specific trees, nor trees like SAVE_EXPR that can induce
6042 : exponential search behavior. */
6043 143117 : one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
6044 143117 : if (one == GS_ERROR)
6045 : {
6046 0 : *expr_p = NULL;
6047 0 : return;
6048 : }
6049 :
6050 : /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
6051 : with the lhs, since "a = { .x=a }" doesn't make sense. This will
6052 : always be true for all scalars, since is_gimple_mem_rhs insists on a
6053 : temporary variable for them. */
6054 143117 : if (DECL_P (*expr_p))
6055 : return;
6056 :
6057 : /* If this is of variable size, we have no choice but to assume it doesn't
6058 : overlap since we can't make a temporary for it. */
6059 103813 : if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
6060 : return;
6061 :
6062 : /* Otherwise, we must search for overlap ... */
6063 103813 : if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
6064 : return;
6065 :
6066 : /* ... and if found, force the value into a temporary. */
6067 517 : *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6068 : }
6069 :
6070 : /* A subroutine of gimplify_init_ctor_eval. Create a loop for
6071 : a RANGE_EXPR in a CONSTRUCTOR for an array.
6072 :
6073 : var = lower;
6074 : loop_entry:
6075 : object[var] = value;
6076 : if (var == upper)
6077 : goto loop_exit;
6078 : var = var + 1;
6079 : goto loop_entry;
6080 : loop_exit:
6081 :
6082 : We increment var _after_ the loop exit check because we might otherwise
6083 : fail if upper == TYPE_MAX_VALUE (type for upper).
6084 :
6085 : Note that we never have to deal with SAVE_EXPRs here, because this has
6086 : already been taken care of for us, in gimplify_init_ctor_preeval(). */
6087 :
6088 : static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
6089 : gimple_seq *, bool);
6090 :
6091 : static void
6092 412 : gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
6093 : tree value, tree array_elt_type,
6094 : gimple_seq *pre_p, bool cleared)
6095 : {
6096 412 : tree loop_entry_label, loop_exit_label, fall_thru_label;
6097 412 : tree var, var_type, cref, tmp;
6098 :
6099 412 : loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
6100 412 : loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
6101 412 : fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6102 :
6103 : /* Create and initialize the index variable. */
6104 412 : var_type = TREE_TYPE (upper);
6105 412 : var = create_tmp_var (var_type);
6106 412 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6107 :
6108 : /* Add the loop entry label. */
6109 412 : gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6110 :
6111 : /* Build the reference. */
6112 412 : cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
6113 : var, NULL_TREE, NULL_TREE);
6114 :
6115 : /* If we are a constructor, just call gimplify_init_ctor_eval to do
6116 : the store. Otherwise just assign value to the reference. */
6117 :
6118 412 : if (TREE_CODE (value) == CONSTRUCTOR)
6119 : /* NB we might have to call ourself recursively through
6120 : gimplify_init_ctor_eval if the value is a constructor. */
6121 99 : gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
6122 : pre_p, cleared);
6123 : else
6124 : {
6125 313 : if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
6126 : != GS_ERROR)
6127 313 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6128 : }
6129 :
6130 : /* We exit the loop when the index var is equal to the upper bound. */
6131 824 : gimplify_seq_add_stmt (pre_p,
6132 412 : gimple_build_cond (EQ_EXPR, var, upper,
6133 : loop_exit_label, fall_thru_label));
6134 :
6135 412 : gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6136 :
6137 : /* Otherwise, increment the index var... */
6138 412 : tmp = build2 (PLUS_EXPR, var_type, var,
6139 : fold_convert (var_type, integer_one_node));
6140 412 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6141 :
6142 : /* ...and jump back to the loop entry. */
6143 412 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6144 :
6145 : /* Add the loop exit label. */
6146 412 : gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6147 412 : }
6148 :
6149 : /* A subroutine of gimplify_init_constructor. Generate individual
6150 : MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
6151 : assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
6152 : CONSTRUCTOR. CLEARED is true if the entire LHS object has been
6153 : zeroed first. */
6154 :
6155 : static void
6156 1225861 : gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
6157 : gimple_seq *pre_p, bool cleared)
6158 : {
6159 1225861 : tree array_elt_type = NULL;
6160 1225861 : unsigned HOST_WIDE_INT ix;
6161 1225861 : tree purpose, value;
6162 :
6163 1225861 : if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
6164 137985 : array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
6165 :
6166 4584108 : FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
6167 : {
6168 3358247 : tree cref;
6169 :
6170 : /* NULL values are created above for gimplification errors. */
6171 3358247 : if (value == NULL)
6172 338837 : continue;
6173 :
6174 3358247 : if (cleared && initializer_zerop (value))
6175 338266 : continue;
6176 :
6177 : /* ??? Here's to hoping the front end fills in all of the indices,
6178 : so we don't have to figure out what's missing ourselves. */
6179 3019981 : gcc_assert (purpose);
6180 :
6181 : /* Skip zero-sized fields, unless value has side-effects. This can
6182 : happen with calls to functions returning a empty type, which
6183 : we shouldn't discard. As a number of downstream passes don't
6184 : expect sets of empty type fields, we rely on the gimplification of
6185 : the MODIFY_EXPR we make below to drop the assignment statement. */
6186 3019981 : if (!TREE_SIDE_EFFECTS (value)
6187 2728113 : && TREE_CODE (purpose) == FIELD_DECL
6188 5287129 : && is_empty_type (TREE_TYPE (purpose)))
6189 159 : continue;
6190 :
6191 : /* If we have a RANGE_EXPR, we have to build a loop to assign the
6192 : whole range. */
6193 3019822 : if (TREE_CODE (purpose) == RANGE_EXPR)
6194 : {
6195 422 : tree lower = TREE_OPERAND (purpose, 0);
6196 422 : tree upper = TREE_OPERAND (purpose, 1);
6197 :
6198 : /* If the lower bound is equal to upper, just treat it as if
6199 : upper was the index. */
6200 422 : if (simple_cst_equal (lower, upper))
6201 : purpose = upper;
6202 : else
6203 : {
6204 412 : gimplify_init_ctor_eval_range (object, lower, upper, value,
6205 : array_elt_type, pre_p, cleared);
6206 412 : continue;
6207 : }
6208 : }
6209 :
6210 3019410 : if (array_elt_type)
6211 : {
6212 : /* Do not use bitsizetype for ARRAY_REF indices. */
6213 550973 : if (TYPE_DOMAIN (TREE_TYPE (object)))
6214 550973 : purpose
6215 550973 : = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
6216 : purpose);
6217 550973 : cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
6218 : purpose, NULL_TREE, NULL_TREE);
6219 : }
6220 : else
6221 : {
6222 2468437 : gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
6223 2468437 : cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
6224 : unshare_expr (object), purpose, NULL_TREE);
6225 : }
6226 :
6227 3019410 : if (TREE_CODE (value) == CONSTRUCTOR
6228 3019410 : && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
6229 291274 : gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
6230 : pre_p, cleared);
6231 2728136 : else if (TREE_CODE (value) == RAW_DATA_CST)
6232 : {
6233 37 : if (RAW_DATA_LENGTH (value) <= 32)
6234 : {
6235 13 : for (unsigned int i = 0; i < (unsigned) RAW_DATA_LENGTH (value);
6236 : ++i)
6237 12 : if (!cleared || RAW_DATA_POINTER (value)[i])
6238 : {
6239 12 : if (i)
6240 : {
6241 11 : tree p
6242 11 : = fold_build2 (PLUS_EXPR, TREE_TYPE (purpose),
6243 : purpose,
6244 : build_int_cst (TREE_TYPE (purpose),
6245 : i));
6246 11 : cref = build4 (ARRAY_REF, array_elt_type,
6247 : unshare_expr (object), p, NULL_TREE,
6248 : NULL_TREE);
6249 : }
6250 12 : tree init
6251 12 : = build2 (INIT_EXPR, TREE_TYPE (cref), cref,
6252 12 : build_int_cst (TREE_TYPE (value),
6253 12 : RAW_DATA_UCHAR_ELT (value, i)));
6254 12 : gimplify_and_add (init, pre_p);
6255 12 : ggc_free (init);
6256 : }
6257 : }
6258 : else
6259 : {
6260 36 : tree rtype = build_array_type_nelts (TREE_TYPE (value),
6261 36 : RAW_DATA_LENGTH (value));
6262 36 : tree rctor = build_constructor_single (rtype, bitsize_zero_node,
6263 : value);
6264 36 : tree addr = build_fold_addr_expr (cref);
6265 36 : cref = build2 (MEM_REF, rtype, addr,
6266 : build_int_cst (ptr_type_node, 0));
6267 36 : rctor = tree_output_constant_def (rctor);
6268 36 : if (!useless_type_conversion_p (rtype, TREE_TYPE (rctor)))
6269 4 : rctor = build1 (VIEW_CONVERT_EXPR, rtype, rctor);
6270 36 : if (gimplify_expr (&cref, pre_p, NULL, is_gimple_lvalue,
6271 : fb_lvalue) != GS_ERROR)
6272 72 : gimplify_seq_add_stmt (pre_p,
6273 36 : gimple_build_assign (cref, rctor));
6274 : }
6275 : }
6276 : else
6277 : {
6278 2728099 : tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
6279 2728099 : gimplify_and_add (init, pre_p);
6280 2728099 : ggc_free (init);
6281 : }
6282 : }
6283 1225861 : }
6284 :
6285 : /* Return the appropriate RHS predicate for this LHS. */
6286 :
6287 : gimple_predicate
6288 48430298 : rhs_predicate_for (tree lhs)
6289 : {
6290 48430298 : if (is_gimple_reg (lhs))
6291 : return is_gimple_reg_rhs_or_call;
6292 : else
6293 11336326 : return is_gimple_mem_rhs_or_call;
6294 : }
6295 :
6296 : /* Return the initial guess for an appropriate RHS predicate for this LHS,
6297 : before the LHS has been gimplified. */
6298 :
6299 : static gimple_predicate
6300 47277143 : initial_rhs_predicate_for (tree lhs)
6301 : {
6302 47277143 : if (is_gimple_reg_type (TREE_TYPE (lhs)))
6303 : return is_gimple_reg_rhs_or_call;
6304 : else
6305 2316643 : return is_gimple_mem_rhs_or_call;
6306 : }
6307 :
6308 : /* Gimplify a C99 compound literal expression. This just means adding
6309 : the DECL_EXPR before the current statement and using its anonymous
6310 : decl instead. */
6311 :
6312 : static enum gimplify_status
6313 36955 : gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
6314 : bool (*gimple_test_f) (tree),
6315 : fallback_t fallback)
6316 : {
6317 36955 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
6318 36955 : tree decl = DECL_EXPR_DECL (decl_s);
6319 36955 : tree init = DECL_INITIAL (decl);
6320 : /* Mark the decl as addressable if the compound literal
6321 : expression is addressable now, otherwise it is marked too late
6322 : after we gimplify the initialization expression. */
6323 36955 : if (TREE_ADDRESSABLE (*expr_p))
6324 542 : TREE_ADDRESSABLE (decl) = 1;
6325 : /* Otherwise, if we don't need an lvalue and have a literal directly
6326 : substitute it. Check if it matches the gimple predicate, as
6327 : otherwise we'd generate a new temporary, and we can as well just
6328 : use the decl we already have. */
6329 36413 : else if (!TREE_ADDRESSABLE (decl)
6330 36413 : && !TREE_THIS_VOLATILE (decl)
6331 36408 : && init
6332 36400 : && (fallback & fb_lvalue) == 0
6333 72663 : && gimple_test_f (init))
6334 : {
6335 35163 : *expr_p = init;
6336 35163 : return GS_OK;
6337 : }
6338 :
6339 : /* If the decl is not addressable, then it is being used in some
6340 : expression or on the right hand side of a statement, and it can
6341 : be put into a readonly data section. */
6342 1792 : if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
6343 1092 : TREE_READONLY (decl) = 1;
6344 :
6345 : /* This decl isn't mentioned in the enclosing block, so add it to the
6346 : list of temps. FIXME it seems a bit of a kludge to say that
6347 : anonymous artificial vars aren't pushed, but everything else is. */
6348 1792 : if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
6349 2 : gimple_add_tmp_var (decl);
6350 :
6351 1792 : gimplify_and_add (decl_s, pre_p);
6352 1792 : *expr_p = decl;
6353 1792 : return GS_OK;
6354 : }
6355 :
6356 : /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
6357 : return a new CONSTRUCTOR if something changed. */
6358 :
6359 : static tree
6360 1821054 : optimize_compound_literals_in_ctor (tree orig_ctor)
6361 : {
6362 1821054 : tree ctor = orig_ctor;
6363 1821054 : vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
6364 1821054 : unsigned int idx, num = vec_safe_length (elts);
6365 :
6366 6115670 : for (idx = 0; idx < num; idx++)
6367 : {
6368 4294616 : tree value = (*elts)[idx].value;
6369 4294616 : tree newval = value;
6370 4294616 : if (TREE_CODE (value) == CONSTRUCTOR)
6371 525145 : newval = optimize_compound_literals_in_ctor (value);
6372 3769471 : else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
6373 : {
6374 103 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
6375 103 : tree decl = DECL_EXPR_DECL (decl_s);
6376 103 : tree init = DECL_INITIAL (decl);
6377 :
6378 103 : if (!TREE_ADDRESSABLE (value)
6379 103 : && !TREE_ADDRESSABLE (decl)
6380 103 : && init
6381 103 : && TREE_CODE (init) == CONSTRUCTOR)
6382 52 : newval = optimize_compound_literals_in_ctor (init);
6383 : }
6384 4294616 : if (newval == value)
6385 4294556 : continue;
6386 :
6387 60 : if (ctor == orig_ctor)
6388 : {
6389 49 : ctor = copy_node (orig_ctor);
6390 98 : CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
6391 49 : elts = CONSTRUCTOR_ELTS (ctor);
6392 : }
6393 60 : (*elts)[idx].value = newval;
6394 : }
6395 1821054 : return ctor;
6396 : }
6397 :
6398 : /* A subroutine of gimplify_modify_expr. Break out elements of a
6399 : CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
6400 :
6401 : Note that we still need to clear any elements that don't have explicit
6402 : initializers, so if not all elements are initialized we keep the
6403 : original MODIFY_EXPR, we just remove all of the constructor elements.
6404 :
6405 : If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
6406 : GS_ERROR if we would have to create a temporary when gimplifying
6407 : this constructor. Otherwise, return GS_OK.
6408 :
6409 : If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
6410 :
6411 : static enum gimplify_status
6412 1295857 : gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6413 : bool want_value, bool notify_temp_creation)
6414 : {
6415 1295857 : tree object, ctor, type;
6416 1295857 : enum gimplify_status ret;
6417 1295857 : vec<constructor_elt, va_gc> *elts;
6418 1295857 : bool cleared = false;
6419 1295857 : bool is_empty_ctor = false;
6420 1295857 : bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
6421 :
6422 1295857 : gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
6423 :
6424 1295857 : if (!notify_temp_creation)
6425 : {
6426 1287570 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6427 : is_gimple_lvalue, fb_lvalue);
6428 1287570 : if (ret == GS_ERROR)
6429 : return ret;
6430 : }
6431 :
6432 1295857 : object = TREE_OPERAND (*expr_p, 0);
6433 1295857 : ctor = TREE_OPERAND (*expr_p, 1)
6434 1295857 : = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
6435 1295857 : type = TREE_TYPE (ctor);
6436 1295857 : elts = CONSTRUCTOR_ELTS (ctor);
6437 1295857 : ret = GS_ALL_DONE;
6438 :
6439 1295857 : switch (TREE_CODE (type))
6440 : {
6441 1289143 : case RECORD_TYPE:
6442 1289143 : case UNION_TYPE:
6443 1289143 : case QUAL_UNION_TYPE:
6444 1289143 : case ARRAY_TYPE:
6445 1289143 : {
6446 : /* Use readonly data for initializers of this or smaller size
6447 : regardless of the num_nonzero_elements / num_unique_nonzero_elements
6448 : ratio. */
6449 1289143 : const HOST_WIDE_INT min_unique_size = 64;
6450 : /* If num_nonzero_elements / num_unique_nonzero_elements ratio
6451 : is smaller than this, use readonly data. */
6452 1289143 : const int unique_nonzero_ratio = 8;
6453 : /* True if a single access of the object must be ensured. This is the
6454 : case if the target is volatile, the type is non-addressable and more
6455 : than one field need to be assigned. */
6456 1289143 : const bool ensure_single_access
6457 1289143 : = TREE_THIS_VOLATILE (object)
6458 254 : && !TREE_ADDRESSABLE (type)
6459 1289346 : && vec_safe_length (elts) > 1;
6460 1289143 : struct gimplify_init_ctor_preeval_data preeval_data;
6461 1289143 : HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
6462 1289143 : HOST_WIDE_INT num_unique_nonzero_elements;
6463 1289143 : int complete_p;
6464 1289143 : bool valid_const_initializer;
6465 :
6466 : /* Aggregate types must lower constructors to initialization of
6467 : individual elements. The exception is that a CONSTRUCTOR node
6468 : with no elements indicates zero-initialization of the whole. */
6469 1289143 : if (vec_safe_is_empty (elts))
6470 : {
6471 326389 : if (notify_temp_creation)
6472 9870 : return GS_OK;
6473 :
6474 : /* The var will be initialized and so appear on lhs of
6475 : assignment, it can't be TREE_READONLY anymore. */
6476 326341 : if (VAR_P (object))
6477 183939 : TREE_READONLY (object) = 0;
6478 :
6479 : is_empty_ctor = true;
6480 328096 : break;
6481 : }
6482 :
6483 : /* Fetch information about the constructor to direct later processing.
6484 : We might want to make static versions of it in various cases, and
6485 : can only do so if it known to be a valid constant initializer. */
6486 962754 : valid_const_initializer
6487 962754 : = categorize_ctor_elements (ctor, &num_nonzero_elements,
6488 : &num_unique_nonzero_elements,
6489 : &num_ctor_elements, &complete_p);
6490 :
6491 : /* If a const aggregate variable is being initialized, then it
6492 : should never be a lose to promote the variable to be static. */
6493 962754 : if (valid_const_initializer
6494 588507 : && num_nonzero_elements > 1
6495 438714 : && TREE_READONLY (object)
6496 4006 : && VAR_P (object)
6497 3629 : && !DECL_REGISTER (object)
6498 3622 : && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
6499 3378 : || DECL_MERGEABLE (object))
6500 : /* For ctors that have many repeated nonzero elements
6501 : represented through RANGE_EXPRs, prefer initializing
6502 : those through runtime loops over copies of large amounts
6503 : of data from readonly data section. */
6504 962754 : && (num_unique_nonzero_elements
6505 1755 : > num_nonzero_elements / unique_nonzero_ratio
6506 0 : || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
6507 : <= (unsigned HOST_WIDE_INT) min_unique_size)))
6508 : {
6509 1755 : if (notify_temp_creation)
6510 : return GS_ERROR;
6511 :
6512 1755 : DECL_INITIAL (object) = ctor;
6513 1755 : TREE_STATIC (object) = 1;
6514 1755 : if (!DECL_NAME (object) || DECL_NAMELESS (object))
6515 1538 : DECL_NAME (object) = create_tmp_var_name ("C");
6516 1755 : walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
6517 :
6518 : /* ??? C++ doesn't automatically append a .<number> to the
6519 : assembler name, and even when it does, it looks at FE private
6520 : data structures to figure out what that number should be,
6521 : which are not set for this variable. I suppose this is
6522 : important for local statics for inline functions, which aren't
6523 : "local" in the object file sense. So in order to get a unique
6524 : TU-local symbol, we must invoke the lhd version now. */
6525 1755 : lhd_set_decl_assembler_name (object);
6526 :
6527 1755 : *expr_p = NULL_TREE;
6528 1755 : break;
6529 : }
6530 :
6531 : /* The var will be initialized and so appear on lhs of
6532 : assignment, it can't be TREE_READONLY anymore. */
6533 960999 : if (VAR_P (object) && !notify_temp_creation)
6534 759068 : TREE_READONLY (object) = 0;
6535 :
6536 : /* If there are "lots" of initialized elements, even discounting
6537 : those that are not address constants (and thus *must* be
6538 : computed at runtime), then partition the constructor into
6539 : constant and non-constant parts. Block copy the constant
6540 : parts in, then generate code for the non-constant parts. */
6541 : /* TODO. There's code in cp/typeck.cc to do this. */
6542 :
6543 960999 : if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
6544 : /* store_constructor will ignore the clearing of variable-sized
6545 : objects. Initializers for such objects must explicitly set
6546 : every field that needs to be set. */
6547 : cleared = false;
6548 960979 : else if (!complete_p)
6549 : /* If the constructor isn't complete, clear the whole object
6550 : beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
6551 :
6552 : ??? This ought not to be needed. For any element not present
6553 : in the initializer, we should simply set them to zero. Except
6554 : we'd need to *find* the elements that are not present, and that
6555 : requires trickery to avoid quadratic compile-time behavior in
6556 : large cases or excessive memory use in small cases. */
6557 180529 : cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
6558 780450 : else if (num_ctor_elements - num_nonzero_elements
6559 780450 : > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
6560 780450 : && num_nonzero_elements < num_ctor_elements / 4)
6561 : /* If there are "lots" of zeros, it's more efficient to clear
6562 : the memory and then set the nonzero elements. */
6563 : cleared = true;
6564 779589 : else if (ensure_single_access && num_nonzero_elements == 0)
6565 : /* If a single access to the target must be ensured and all elements
6566 : are zero, then it's optimal to clear whatever their number. */
6567 : cleared = true;
6568 : /* If the object is small enough to go in registers, and it's
6569 : not required to be constructed in memory, clear it first.
6570 : That will avoid wasting cycles preserving any padding bits
6571 : that might be there, and if there aren't any, the compiler
6572 : is smart enough to optimize the clearing out. */
6573 779580 : else if (complete_p <= 0
6574 13687 : && !TREE_ADDRESSABLE (ctor)
6575 13673 : && !TREE_THIS_VOLATILE (object)
6576 13670 : && (TYPE_MODE (type) != BLKmode || TYPE_NO_FORCE_BLK (type))
6577 786654 : && optimize)
6578 : cleared = true;
6579 : else
6580 : cleared = false;
6581 :
6582 : /* If there are "lots" of initialized elements, and all of them
6583 : are valid address constants, then the entire initializer can
6584 : be dropped to memory, and then memcpy'd out. Don't do this
6585 : for sparse arrays, though, as it's more efficient to follow
6586 : the standard CONSTRUCTOR behavior of memset followed by
6587 : individual element initialization. Also don't do this for small
6588 : all-zero initializers (which aren't big enough to merit
6589 : clearing), and don't try to make bitwise copies of
6590 : TREE_ADDRESSABLE types. */
6591 960999 : if (valid_const_initializer
6592 586752 : && complete_p
6593 420912 : && !(cleared || num_nonzero_elements == 0)
6594 327997 : && !TREE_ADDRESSABLE (type))
6595 : {
6596 326530 : HOST_WIDE_INT size = int_size_in_bytes (type);
6597 326530 : unsigned int align;
6598 :
6599 : /* ??? We can still get unbounded array types, at least
6600 : from the C++ front end. This seems wrong, but attempt
6601 : to work around it for now. */
6602 326530 : if (size < 0)
6603 : {
6604 9 : size = int_size_in_bytes (TREE_TYPE (object));
6605 9 : if (size >= 0)
6606 0 : TREE_TYPE (ctor) = type = TREE_TYPE (object);
6607 : }
6608 :
6609 : /* Find the maximum alignment we can assume for the object. */
6610 : /* ??? Make use of DECL_OFFSET_ALIGN. */
6611 326530 : if (DECL_P (object))
6612 312929 : align = DECL_ALIGN (object);
6613 : else
6614 13601 : align = TYPE_ALIGN (type);
6615 :
6616 : /* Do a block move either if the size is so small as to make
6617 : each individual move a sub-unit move on average, or if it
6618 : is so large as to make individual moves inefficient. */
6619 326530 : if (size > 0
6620 326521 : && num_nonzero_elements > 1
6621 : /* For ctors that have many repeated nonzero elements
6622 : represented through RANGE_EXPRs, prefer initializing
6623 : those through runtime loops over copies of large amounts
6624 : of data from readonly data section. */
6625 297027 : && (num_unique_nonzero_elements
6626 297027 : > num_nonzero_elements / unique_nonzero_ratio
6627 56 : || size <= min_unique_size)
6628 623501 : && (size < num_nonzero_elements
6629 296865 : || !can_move_by_pieces (size, align)))
6630 : {
6631 2840 : if (notify_temp_creation)
6632 : return GS_ERROR;
6633 :
6634 1422 : walk_tree (&ctor, force_labels_r, NULL, NULL);
6635 1422 : ctor = tree_output_constant_def (ctor);
6636 1422 : if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
6637 0 : ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
6638 1422 : TREE_OPERAND (*expr_p, 1) = ctor;
6639 :
6640 : /* This is no longer an assignment of a CONSTRUCTOR, but
6641 : we still may have processing to do on the LHS. So
6642 : pretend we didn't do anything here to let that happen. */
6643 1422 : return GS_UNHANDLED;
6644 : }
6645 : }
6646 :
6647 : /* If a single access to the target must be ensured and there are
6648 : nonzero elements or the zero elements are not assigned en masse,
6649 : initialize the target from a temporary. */
6650 958159 : if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
6651 : {
6652 167 : if (notify_temp_creation)
6653 : return GS_ERROR;
6654 :
6655 161 : tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
6656 161 : TREE_OPERAND (*expr_p, 0) = temp;
6657 161 : *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
6658 : *expr_p,
6659 : build2 (MODIFY_EXPR, void_type_node,
6660 : object, temp));
6661 161 : return GS_OK;
6662 : }
6663 :
6664 957992 : if (notify_temp_creation)
6665 : return GS_OK;
6666 :
6667 : /* If there are nonzero elements and if needed, pre-evaluate to capture
6668 : elements overlapping with the lhs into temporaries. We must do this
6669 : before clearing to fetch the values before they are zeroed-out. */
6670 951177 : if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
6671 : {
6672 214445 : preeval_data.lhs_base_decl = get_base_address (object);
6673 214445 : if (!DECL_P (preeval_data.lhs_base_decl))
6674 41202 : preeval_data.lhs_base_decl = NULL;
6675 214445 : preeval_data.lhs_alias_set = get_alias_set (object);
6676 :
6677 214445 : gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
6678 : pre_p, post_p, &preeval_data);
6679 : }
6680 :
6681 951177 : bool ctor_has_side_effects_p
6682 951177 : = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
6683 :
6684 951177 : if (cleared)
6685 : {
6686 : /* Zap the CONSTRUCTOR element list, which simplifies this case.
6687 : Note that we still have to gimplify, in order to handle the
6688 : case of variable sized types. Avoid shared tree structures. */
6689 187667 : CONSTRUCTOR_ELTS (ctor) = NULL;
6690 187667 : TREE_SIDE_EFFECTS (ctor) = 0;
6691 187667 : object = unshare_expr (object);
6692 187667 : gimplify_stmt (expr_p, pre_p);
6693 : }
6694 :
6695 : /* If we have not block cleared the object, or if there are nonzero
6696 : elements in the constructor, or if the constructor has side effects,
6697 : add assignments to the individual scalar fields of the object. */
6698 187667 : if (!cleared
6699 187667 : || num_nonzero_elements > 0
6700 17034 : || ctor_has_side_effects_p)
6701 934488 : gimplify_init_ctor_eval (object, elts, pre_p, cleared);
6702 :
6703 951177 : *expr_p = NULL_TREE;
6704 : }
6705 951177 : break;
6706 :
6707 0 : case COMPLEX_TYPE:
6708 0 : {
6709 0 : tree r, i;
6710 :
6711 0 : if (notify_temp_creation)
6712 : return GS_OK;
6713 :
6714 : /* Extract the real and imaginary parts out of the ctor. */
6715 0 : gcc_assert (elts->length () == 2);
6716 0 : r = (*elts)[0].value;
6717 0 : i = (*elts)[1].value;
6718 0 : if (r == NULL || i == NULL)
6719 : {
6720 0 : tree zero = build_zero_cst (TREE_TYPE (type));
6721 0 : if (r == NULL)
6722 0 : r = zero;
6723 0 : if (i == NULL)
6724 0 : i = zero;
6725 : }
6726 :
6727 : /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
6728 : represent creation of a complex value. */
6729 0 : if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
6730 : {
6731 0 : ctor = build_complex (type, r, i);
6732 0 : TREE_OPERAND (*expr_p, 1) = ctor;
6733 : }
6734 : else
6735 : {
6736 0 : ctor = build2 (COMPLEX_EXPR, type, r, i);
6737 0 : TREE_OPERAND (*expr_p, 1) = ctor;
6738 0 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
6739 : pre_p,
6740 : post_p,
6741 0 : rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
6742 : fb_rvalue);
6743 : }
6744 : }
6745 : break;
6746 :
6747 6714 : case VECTOR_TYPE:
6748 6714 : {
6749 6714 : unsigned HOST_WIDE_INT ix;
6750 6714 : constructor_elt *ce;
6751 :
6752 6714 : if (notify_temp_creation)
6753 1295857 : return GS_OK;
6754 :
6755 : /* Vector types use CONSTRUCTOR all the way through gimple
6756 : compilation as a general initializer. */
6757 64071 : FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
6758 : {
6759 57357 : enum gimplify_status tret;
6760 57357 : tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
6761 : fb_rvalue);
6762 57357 : if (tret == GS_ERROR)
6763 : ret = GS_ERROR;
6764 57356 : else if (TREE_STATIC (ctor)
6765 57388 : && !initializer_constant_valid_p (ce->value,
6766 32 : TREE_TYPE (ce->value)))
6767 32 : TREE_STATIC (ctor) = 0;
6768 : }
6769 6714 : recompute_constructor_flags (ctor);
6770 :
6771 : /* Go ahead and simplify constant constructors to VECTOR_CST. */
6772 6714 : if (TREE_CONSTANT (ctor))
6773 : {
6774 2505 : bool constant_p = true;
6775 : tree value;
6776 :
6777 : /* Even when ctor is constant, it might contain non-*_CST
6778 : elements, such as addresses or trapping values like
6779 : 1.0/0.0 - 1.0/0.0. Such expressions don't belong
6780 : in VECTOR_CST nodes. */
6781 2505 : FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
6782 2323 : if (!CONSTANT_CLASS_P (value))
6783 : {
6784 : constant_p = false;
6785 : break;
6786 : }
6787 :
6788 182 : if (constant_p)
6789 : {
6790 182 : TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
6791 182 : break;
6792 : }
6793 : }
6794 :
6795 6532 : if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
6796 915 : TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
6797 : }
6798 : break;
6799 :
6800 0 : default:
6801 : /* So how did we get a CONSTRUCTOR for a scalar type? */
6802 0 : gcc_unreachable ();
6803 : }
6804 :
6805 1285987 : if (ret == GS_ERROR)
6806 : return GS_ERROR;
6807 : /* If we have gimplified both sides of the initializer but have
6808 : not emitted an assignment, do so now. */
6809 1285986 : if (*expr_p
6810 : /* If the type is an empty type, we don't need to emit the
6811 : assignment. */
6812 1285986 : && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
6813 : {
6814 329236 : tree lhs = TREE_OPERAND (*expr_p, 0);
6815 329236 : tree rhs = TREE_OPERAND (*expr_p, 1);
6816 329236 : if (want_value && object == lhs)
6817 12 : lhs = unshare_expr (lhs);
6818 329236 : gassign *init = gimple_build_assign (lhs, rhs);
6819 329236 : gimplify_seq_add_stmt (pre_p, init);
6820 : }
6821 1285986 : if (want_value)
6822 : {
6823 19 : *expr_p = object;
6824 19 : ret = GS_OK;
6825 : }
6826 : else
6827 : {
6828 1285967 : *expr_p = NULL;
6829 1285967 : ret = GS_ALL_DONE;
6830 : }
6831 :
6832 : /* If the user requests to initialize automatic variables, we
6833 : should initialize paddings inside the variable. Add a call to
6834 : __builtin_clear_pading (&object, 0, for_auto_init = true) to
6835 : initialize paddings of object always to zero regardless of
6836 : INIT_TYPE. Note, we will not insert this call if the aggregate
6837 : variable has be completely cleared already or it's initialized
6838 : with an empty constructor. We cannot insert this call if the
6839 : variable is a gimple register since __builtin_clear_padding will take
6840 : the address of the variable. As a result, if a long double/_Complex long
6841 : double variable will be spilled into stack later, its padding cannot
6842 : be cleared with __builtin_clear_padding. We should clear its padding
6843 : when it is spilled into memory. */
6844 1285986 : if (is_init_expr
6845 910983 : && !is_gimple_reg (object)
6846 906977 : && clear_padding_type_may_have_padding_p (type)
6847 867481 : && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
6848 218600 : || !AGGREGATE_TYPE_P (type))
6849 648881 : && var_needs_auto_init_p (object)
6850 1299069 : && flag_auto_var_init != AUTO_INIT_CXX26)
6851 40 : gimple_add_padding_init_for_auto_var (object, false, pre_p);
6852 :
6853 : return ret;
6854 : }
6855 :
6856 : /* Given a pointer value OP0, return a simplified version of an
6857 : indirection through OP0, or NULL_TREE if no simplification is
6858 : possible. This may only be applied to a rhs of an expression.
6859 : Note that the resulting type may be different from the type pointed
6860 : to in the sense that it is still compatible from the langhooks
6861 : point of view. */
6862 :
6863 : static tree
6864 512856 : gimple_fold_indirect_ref_rhs (tree t)
6865 : {
6866 0 : return gimple_fold_indirect_ref (t);
6867 : }
6868 :
6869 : /* Subroutine of gimplify_modify_expr to do simplifications of
6870 : MODIFY_EXPRs based on the code of the RHS. We loop for as long as
6871 : something changes. */
6872 :
6873 : static enum gimplify_status
6874 96549228 : gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
6875 : gimple_seq *pre_p, gimple_seq *post_p,
6876 : bool want_value)
6877 : {
6878 96549228 : enum gimplify_status ret = GS_UNHANDLED;
6879 97168853 : bool changed;
6880 :
6881 95728880 : do
6882 : {
6883 97168853 : changed = false;
6884 97168853 : switch (TREE_CODE (*from_p))
6885 : {
6886 11853585 : case VAR_DECL:
6887 : /* If we're assigning from a read-only variable initialized with
6888 : a constructor and not volatile, do the direct assignment from
6889 : the constructor, but only if the target is not volatile either
6890 : since this latter assignment might end up being done on a per
6891 : field basis. However, if the target is volatile and the type
6892 : is aggregate and non-addressable, gimplify_init_constructor
6893 : knows that it needs to ensure a single access to the target
6894 : and it will return GS_OK only in this case. */
6895 11853585 : if (TREE_READONLY (*from_p)
6896 95300 : && DECL_INITIAL (*from_p)
6897 61153 : && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
6898 8287 : && !TREE_THIS_VOLATILE (*from_p)
6899 11861872 : && (!TREE_THIS_VOLATILE (*to_p)
6900 6 : || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
6901 6 : && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
6902 : {
6903 8287 : tree old_from = *from_p;
6904 8287 : enum gimplify_status subret;
6905 :
6906 : /* Move the constructor into the RHS. */
6907 8287 : *from_p = unshare_expr (DECL_INITIAL (*from_p));
6908 :
6909 : /* Let's see if gimplify_init_constructor will need to put
6910 : it in memory. */
6911 8287 : subret = gimplify_init_constructor (expr_p, NULL, NULL,
6912 : false, true);
6913 8287 : if (subret == GS_ERROR)
6914 : {
6915 : /* If so, revert the change. */
6916 1424 : *from_p = old_from;
6917 : }
6918 : else
6919 : {
6920 : ret = GS_OK;
6921 : changed = true;
6922 : }
6923 : }
6924 : break;
6925 518018 : case INDIRECT_REF:
6926 518018 : if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
6927 : /* If we have code like
6928 :
6929 : *(const A*)(A*)&x
6930 :
6931 : where the type of "x" is a (possibly cv-qualified variant
6932 : of "A"), treat the entire expression as identical to "x".
6933 : This kind of code arises in C++ when an object is bound
6934 : to a const reference, and if "x" is a TARGET_EXPR we want
6935 : to take advantage of the optimization below. But not if
6936 : the type is TREE_ADDRESSABLE; then C++17 says that the
6937 : TARGET_EXPR needs to be a temporary. */
6938 1025712 : if (tree t
6939 512856 : = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
6940 : {
6941 120567 : bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6942 120567 : if (TREE_THIS_VOLATILE (t) != volatile_p)
6943 : {
6944 5 : if (DECL_P (t))
6945 0 : t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
6946 : build_fold_addr_expr (t));
6947 5 : if (REFERENCE_CLASS_P (t))
6948 5 : TREE_THIS_VOLATILE (t) = volatile_p;
6949 : }
6950 120567 : *from_p = t;
6951 120567 : ret = GS_OK;
6952 120567 : changed = true;
6953 : }
6954 : break;
6955 :
6956 177896 : case TARGET_EXPR:
6957 177896 : {
6958 : /* If we are initializing something from a TARGET_EXPR, strip the
6959 : TARGET_EXPR and initialize it directly, if possible. This can't
6960 : be done if the initializer is void, since that implies that the
6961 : temporary is set in some non-trivial way.
6962 :
6963 : ??? What about code that pulls out the temp and uses it
6964 : elsewhere? I think that such code never uses the TARGET_EXPR as
6965 : an initializer. If I'm wrong, we'll die because the temp won't
6966 : have any RTL. In that case, I guess we'll need to replace
6967 : references somehow. */
6968 177896 : tree init = TARGET_EXPR_INITIAL (*from_p);
6969 :
6970 177896 : if (init
6971 177180 : && (TREE_CODE (*expr_p) != MODIFY_EXPR
6972 52600 : || !TARGET_EXPR_NO_ELIDE (*from_p))
6973 354874 : && !VOID_TYPE_P (TREE_TYPE (init)))
6974 : {
6975 161336 : *from_p = init;
6976 161336 : ret = GS_OK;
6977 161336 : changed = true;
6978 : }
6979 : }
6980 : break;
6981 :
6982 330497 : case COMPOUND_EXPR:
6983 : /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
6984 : caught. */
6985 330497 : gimplify_compound_expr (from_p, pre_p, true);
6986 330497 : ret = GS_OK;
6987 330497 : changed = true;
6988 330497 : break;
6989 :
6990 1307658 : case CONSTRUCTOR:
6991 : /* If we already made some changes, let the front end have a
6992 : crack at this before we break it down. */
6993 1307658 : if (ret != GS_UNHANDLED)
6994 : break;
6995 :
6996 : /* If we're initializing from a CONSTRUCTOR, break this into
6997 : individual MODIFY_EXPRs. */
6998 1287570 : ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
6999 : false);
7000 1287570 : return ret;
7001 :
7002 227596 : case COND_EXPR:
7003 : /* If we're assigning to a non-register type, push the assignment
7004 : down into the branches. This is mandatory for ADDRESSABLE types,
7005 : since we cannot generate temporaries for such, but it saves a
7006 : copy in other cases as well.
7007 : Also avoid an extra temporary and copy when assigning to
7008 : a register. */
7009 227596 : if (!is_gimple_reg_type (TREE_TYPE (*from_p))
7010 227596 : || (is_gimple_reg (*to_p) && !gimplify_ctxp->allow_rhs_cond_expr))
7011 : {
7012 : /* This code should mirror the code in gimplify_cond_expr. */
7013 146118 : enum tree_code code = TREE_CODE (*expr_p);
7014 146118 : tree cond = *from_p;
7015 146118 : tree result = *to_p;
7016 :
7017 146118 : ret = gimplify_expr (&result, pre_p, post_p,
7018 : is_gimple_lvalue, fb_lvalue);
7019 146118 : if (ret != GS_ERROR)
7020 146118 : ret = GS_OK;
7021 :
7022 : /* If we are going to write RESULT more than once, clear
7023 : TREE_READONLY flag, otherwise we might incorrectly promote
7024 : the variable to static const and initialize it at compile
7025 : time in one of the branches. */
7026 146118 : if (VAR_P (result)
7027 145077 : && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
7028 282122 : && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
7029 135912 : TREE_READONLY (result) = 0;
7030 146118 : if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
7031 136909 : TREE_OPERAND (cond, 1)
7032 273818 : = build2 (code, void_type_node, result,
7033 136909 : TREE_OPERAND (cond, 1));
7034 146118 : if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
7035 146026 : TREE_OPERAND (cond, 2)
7036 292052 : = build2 (code, void_type_node, unshare_expr (result),
7037 146026 : TREE_OPERAND (cond, 2));
7038 :
7039 146118 : TREE_TYPE (cond) = void_type_node;
7040 146118 : recalculate_side_effects (cond);
7041 :
7042 146118 : if (want_value)
7043 : {
7044 68 : gimplify_and_add (cond, pre_p);
7045 68 : *expr_p = unshare_expr (result);
7046 : }
7047 : else
7048 146050 : *expr_p = cond;
7049 146118 : return ret;
7050 : }
7051 : break;
7052 :
7053 9822178 : case CALL_EXPR:
7054 : /* For calls that return in memory, give *to_p as the CALL_EXPR's
7055 : return slot so that we don't generate a temporary. */
7056 9822178 : if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
7057 9822178 : && aggregate_value_p (*from_p, *from_p))
7058 : {
7059 246634 : bool use_target;
7060 :
7061 246634 : if (!(rhs_predicate_for (*to_p))(*from_p))
7062 : /* If we need a temporary, *to_p isn't accurate. */
7063 : use_target = false;
7064 : /* It's OK to use the return slot directly unless it's an NRV. */
7065 245890 : else if (TREE_CODE (*to_p) == RESULT_DECL
7066 5143 : && DECL_NAME (*to_p) == NULL_TREE
7067 250901 : && needs_to_live_in_memory (*to_p))
7068 : use_target = true;
7069 240879 : else if (is_gimple_reg_type (TREE_TYPE (*to_p))
7070 240879 : || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
7071 : /* Don't force regs into memory. */
7072 : use_target = false;
7073 226822 : else if (TREE_CODE (*expr_p) == INIT_EXPR)
7074 : /* It's OK to use the target directly if it's being
7075 : initialized. */
7076 : use_target = true;
7077 13629 : else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
7078 : != INTEGER_CST)
7079 : /* Always use the target and thus RSO for variable-sized types.
7080 : GIMPLE cannot deal with a variable-sized assignment
7081 : embedded in a call statement. */
7082 : use_target = true;
7083 13601 : else if (TREE_CODE (*to_p) != SSA_NAME
7084 13601 : && (!is_gimple_variable (*to_p)
7085 10487 : || needs_to_live_in_memory (*to_p)))
7086 : /* Don't use the original target if it's already addressable;
7087 : if its address escapes, and the called function uses the
7088 : NRV optimization, a conforming program could see *to_p
7089 : change before the called function returns; see c++/19317.
7090 : When optimizing, the return_slot pass marks more functions
7091 : as safe after we have escape info. */
7092 : use_target = false;
7093 : else
7094 : use_target = true;
7095 :
7096 : if (use_target)
7097 : {
7098 220329 : CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
7099 220329 : mark_addressable (*to_p);
7100 : }
7101 : }
7102 : break;
7103 :
7104 1542 : case WITH_SIZE_EXPR:
7105 : /* Likewise for calls that return an aggregate of non-constant size,
7106 : since we would not be able to generate a temporary at all. */
7107 1542 : if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
7108 : {
7109 349 : *from_p = TREE_OPERAND (*from_p, 0);
7110 : /* We don't change ret in this case because the
7111 : WITH_SIZE_EXPR might have been added in
7112 : gimplify_modify_expr, so returning GS_OK would lead to an
7113 : infinite loop. */
7114 349 : changed = true;
7115 : }
7116 : break;
7117 :
7118 : /* If we're initializing from a container, push the initialization
7119 : inside it. */
7120 1864 : case CLEANUP_POINT_EXPR:
7121 1864 : case BIND_EXPR:
7122 1864 : case STATEMENT_LIST:
7123 1864 : {
7124 1864 : tree wrap = *from_p;
7125 1864 : tree t;
7126 :
7127 1864 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
7128 : fb_lvalue);
7129 1864 : if (ret != GS_ERROR)
7130 1864 : ret = GS_OK;
7131 :
7132 1864 : t = voidify_wrapper_expr (wrap, *expr_p);
7133 1864 : gcc_assert (t == *expr_p);
7134 :
7135 1864 : if (want_value)
7136 : {
7137 6 : gimplify_and_add (wrap, pre_p);
7138 6 : *expr_p = unshare_expr (*to_p);
7139 : }
7140 : else
7141 1858 : *expr_p = wrap;
7142 : return GS_OK;
7143 : }
7144 :
7145 9463994 : case NOP_EXPR:
7146 : /* Pull out compound literal expressions from a NOP_EXPR.
7147 : Those are created in the C FE to drop qualifiers during
7148 : lvalue conversion. */
7149 9463994 : if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
7150 9463994 : && tree_ssa_useless_type_conversion (*from_p))
7151 : {
7152 13 : *from_p = TREE_OPERAND (*from_p, 0);
7153 13 : ret = GS_OK;
7154 13 : changed = true;
7155 : }
7156 : break;
7157 :
7158 4421 : case COMPOUND_LITERAL_EXPR:
7159 4421 : {
7160 4421 : tree complit = TREE_OPERAND (*expr_p, 1);
7161 4421 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
7162 4421 : tree decl = DECL_EXPR_DECL (decl_s);
7163 4421 : tree init = DECL_INITIAL (decl);
7164 :
7165 : /* struct T x = (struct T) { 0, 1, 2 } can be optimized
7166 : into struct T x = { 0, 1, 2 } if the address of the
7167 : compound literal has never been taken. */
7168 4421 : if (!TREE_ADDRESSABLE (complit)
7169 4421 : && !TREE_ADDRESSABLE (decl)
7170 4421 : && init)
7171 : {
7172 4421 : *expr_p = copy_node (*expr_p);
7173 4421 : TREE_OPERAND (*expr_p, 1) = init;
7174 4421 : return GS_OK;
7175 : }
7176 : }
7177 :
7178 : default:
7179 : break;
7180 : }
7181 : }
7182 : while (changed);
7183 :
7184 : return ret;
7185 : }
7186 :
7187 :
7188 : /* Return true if T looks like a valid GIMPLE statement. */
7189 :
7190 : static bool
7191 19119684 : is_gimple_stmt (tree t)
7192 : {
7193 19119684 : const enum tree_code code = TREE_CODE (t);
7194 :
7195 19119684 : switch (code)
7196 : {
7197 1739612 : case NOP_EXPR:
7198 : /* The only valid NOP_EXPR is the empty statement. */
7199 1739612 : return IS_EMPTY_STMT (t);
7200 :
7201 0 : case BIND_EXPR:
7202 0 : case COND_EXPR:
7203 : /* These are only valid if they're void. */
7204 0 : return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
7205 :
7206 : case SWITCH_EXPR:
7207 : case GOTO_EXPR:
7208 : case RETURN_EXPR:
7209 : case LABEL_EXPR:
7210 : case CASE_LABEL_EXPR:
7211 : case TRY_CATCH_EXPR:
7212 : case TRY_FINALLY_EXPR:
7213 : case EH_FILTER_EXPR:
7214 : case CATCH_EXPR:
7215 : case ASM_EXPR:
7216 : case STATEMENT_LIST:
7217 : case OACC_PARALLEL:
7218 : case OACC_KERNELS:
7219 : case OACC_SERIAL:
7220 : case OACC_DATA:
7221 : case OACC_HOST_DATA:
7222 : case OACC_DECLARE:
7223 : case OACC_UPDATE:
7224 : case OACC_ENTER_DATA:
7225 : case OACC_EXIT_DATA:
7226 : case OACC_CACHE:
7227 : case OMP_PARALLEL:
7228 : case OMP_FOR:
7229 : case OMP_SIMD:
7230 : case OMP_DISTRIBUTE:
7231 : case OMP_LOOP:
7232 : case OMP_TILE:
7233 : case OMP_UNROLL:
7234 : case OACC_LOOP:
7235 : case OMP_SCAN:
7236 : case OMP_SCOPE:
7237 : case OMP_DISPATCH:
7238 : case OMP_SECTIONS:
7239 : case OMP_SECTION:
7240 : case OMP_STRUCTURED_BLOCK:
7241 : case OMP_SINGLE:
7242 : case OMP_MASTER:
7243 : case OMP_MASKED:
7244 : case OMP_TASKGROUP:
7245 : case OMP_ORDERED:
7246 : case OMP_CRITICAL:
7247 : case OMP_METADIRECTIVE:
7248 : case OMP_TASK:
7249 : case OMP_TARGET:
7250 : case OMP_TARGET_DATA:
7251 : case OMP_TARGET_UPDATE:
7252 : case OMP_TARGET_ENTER_DATA:
7253 : case OMP_TARGET_EXIT_DATA:
7254 : case OMP_TASKLOOP:
7255 : case OMP_TEAMS:
7256 : /* These are always void. */
7257 : return true;
7258 :
7259 : case CALL_EXPR:
7260 : case MODIFY_EXPR:
7261 : case PREDICT_EXPR:
7262 : /* These are valid regardless of their type. */
7263 : return true;
7264 :
7265 : default:
7266 : return false;
7267 : }
7268 : }
7269 :
7270 :
7271 : /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
7272 : a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
7273 :
7274 : IMPORTANT NOTE: This promotion is performed by introducing a load of the
7275 : other, unmodified part of the complex object just before the total store.
7276 : As a consequence, if the object is still uninitialized, an undefined value
7277 : will be loaded into a register, which may result in a spurious exception
7278 : if the register is floating-point and the value happens to be a signaling
7279 : NaN for example. Then the fully-fledged complex operations lowering pass
7280 : followed by a DCE pass are necessary in order to fix things up. */
7281 :
7282 : static enum gimplify_status
7283 1960 : gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
7284 : bool want_value)
7285 : {
7286 1960 : enum tree_code code, ocode;
7287 1960 : tree lhs, rhs, new_rhs, other, realpart, imagpart;
7288 :
7289 1960 : lhs = TREE_OPERAND (*expr_p, 0);
7290 1960 : rhs = TREE_OPERAND (*expr_p, 1);
7291 1960 : code = TREE_CODE (lhs);
7292 1960 : lhs = TREE_OPERAND (lhs, 0);
7293 :
7294 1960 : ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
7295 1960 : other = build1 (ocode, TREE_TYPE (rhs), lhs);
7296 1960 : suppress_warning (other);
7297 1960 : other = get_formal_tmp_var (other, pre_p);
7298 :
7299 1960 : realpart = code == REALPART_EXPR ? rhs : other;
7300 966 : imagpart = code == REALPART_EXPR ? other : rhs;
7301 :
7302 1960 : if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
7303 0 : new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
7304 : else
7305 1960 : new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
7306 :
7307 1960 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
7308 1960 : *expr_p = (want_value) ? rhs : NULL_TREE;
7309 :
7310 1960 : return GS_ALL_DONE;
7311 : }
7312 :
7313 : /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
7314 :
7315 : modify_expr
7316 : : varname '=' rhs
7317 : | '*' ID '=' rhs
7318 :
7319 : PRE_P points to the list where side effects that must happen before
7320 : *EXPR_P should be stored.
7321 :
7322 : POST_P points to the list where side effects that must happen after
7323 : *EXPR_P should be stored.
7324 :
7325 : WANT_VALUE is nonzero iff we want to use the value of this expression
7326 : in another expression. */
7327 :
7328 : static enum gimplify_status
7329 49826853 : gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7330 : bool want_value)
7331 : {
7332 49826853 : tree *from_p = &TREE_OPERAND (*expr_p, 1);
7333 49826853 : tree *to_p = &TREE_OPERAND (*expr_p, 0);
7334 49826853 : enum gimplify_status ret = GS_UNHANDLED;
7335 49826853 : gimple *assign;
7336 49826853 : location_t loc = EXPR_LOCATION (*expr_p);
7337 49826853 : gimple_stmt_iterator gsi;
7338 :
7339 49826853 : if (error_operand_p (*from_p) || error_operand_p (*to_p))
7340 : return GS_ERROR;
7341 :
7342 49826825 : gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
7343 : || TREE_CODE (*expr_p) == INIT_EXPR);
7344 :
7345 : /* Trying to simplify a clobber using normal logic doesn't work,
7346 : so handle it here. */
7347 49826825 : if (TREE_CLOBBER_P (*from_p))
7348 : {
7349 554430 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7350 554430 : if (ret == GS_ERROR)
7351 : return ret;
7352 554430 : gcc_assert (!want_value);
7353 554430 : if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
7354 : {
7355 199 : tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
7356 : pre_p, post_p);
7357 199 : *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
7358 : }
7359 554430 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
7360 554430 : *expr_p = NULL;
7361 554430 : return GS_ALL_DONE;
7362 : }
7363 :
7364 : /* Convert initialization from an empty variable-size CONSTRUCTOR to
7365 : memset. */
7366 49272395 : if (TREE_TYPE (*from_p) != error_mark_node
7367 49272395 : && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
7368 49272395 : && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
7369 1570 : && TREE_CODE (*from_p) == CONSTRUCTOR
7370 49272494 : && CONSTRUCTOR_NELTS (*from_p) == 0)
7371 : {
7372 79 : maybe_with_size_expr (from_p);
7373 79 : gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
7374 79 : return gimplify_modify_expr_to_memset (expr_p,
7375 79 : TREE_OPERAND (*from_p, 1),
7376 79 : want_value, pre_p);
7377 : }
7378 :
7379 : /* Insert pointer conversions required by the middle-end that are not
7380 : required by the frontend. This fixes middle-end type checking for
7381 : for example gcc.dg/redecl-6.c. */
7382 49272316 : if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
7383 : {
7384 13353616 : STRIP_USELESS_TYPE_CONVERSION (*from_p);
7385 13353616 : if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
7386 501 : *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
7387 : }
7388 :
7389 : /* See if any simplifications can be done based on what the RHS is. */
7390 49272316 : ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
7391 : want_value);
7392 49272316 : if (ret != GS_UNHANDLED)
7393 : return ret;
7394 :
7395 : /* For empty types only gimplify the left hand side and right hand
7396 : side as statements and throw away the assignment. Do this after
7397 : gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
7398 : types properly. */
7399 47279224 : if (is_empty_type (TREE_TYPE (*from_p))
7400 13364 : && !want_value
7401 : /* Don't do this for calls that return addressable types, expand_call
7402 : relies on those having a lhs. */
7403 47292581 : && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
7404 11276 : && TREE_CODE (*from_p) == CALL_EXPR))
7405 : {
7406 2081 : suppress_warning (*from_p, OPT_Wunused_result);
7407 2081 : gimplify_stmt (from_p, pre_p);
7408 2081 : gimplify_stmt (to_p, pre_p);
7409 2081 : *expr_p = NULL_TREE;
7410 2081 : return GS_ALL_DONE;
7411 : }
7412 :
7413 : /* If the value being copied is of variable width, compute the length
7414 : of the copy into a WITH_SIZE_EXPR. Note that we need to do this
7415 : before gimplifying any of the operands so that we can resolve any
7416 : PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
7417 : the size of the expression to be copied, not of the destination, so
7418 : that is what we must do here. */
7419 47277143 : maybe_with_size_expr (from_p);
7420 :
7421 : /* As a special case, we have to temporarily allow for assignments
7422 : with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
7423 : a toplevel statement, when gimplifying the GENERIC expression
7424 : MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
7425 : GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
7426 :
7427 : Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
7428 : prevent gimplify_expr from trying to create a new temporary for
7429 : foo's LHS, we tell it that it should only gimplify until it
7430 : reaches the CALL_EXPR. On return from gimplify_expr, the newly
7431 : created GIMPLE_CALL <foo> will be the last statement in *PRE_P
7432 : and all we need to do here is set 'a' to be its LHS. */
7433 :
7434 : /* Gimplify the RHS first for C++17 and bug 71104. */
7435 47277143 : gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
7436 47277143 : ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
7437 47277143 : if (ret == GS_ERROR)
7438 : return ret;
7439 :
7440 : /* Then gimplify the LHS. */
7441 : /* If we gimplified the RHS to a CALL_EXPR and that call may return
7442 : twice we have to make sure to gimplify into non-SSA as otherwise
7443 : the abnormal edge added later will make those defs not dominate
7444 : their uses.
7445 : ??? Technically this applies only to the registers used in the
7446 : resulting non-register *TO_P. */
7447 47276987 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
7448 47276987 : if (saved_into_ssa
7449 44934321 : && TREE_CODE (*from_p) == CALL_EXPR
7450 52268072 : && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
7451 1505 : gimplify_ctxp->into_ssa = false;
7452 47276987 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7453 47276987 : gimplify_ctxp->into_ssa = saved_into_ssa;
7454 47276987 : if (ret == GS_ERROR)
7455 : return ret;
7456 :
7457 : /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
7458 : guess for the predicate was wrong. */
7459 47276912 : gimple_predicate final_pred = rhs_predicate_for (*to_p);
7460 47276912 : if (final_pred != initial_pred)
7461 : {
7462 8041555 : ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
7463 8041555 : if (ret == GS_ERROR)
7464 : return ret;
7465 : }
7466 :
7467 : /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
7468 : size as argument to the call. */
7469 47276912 : if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
7470 : {
7471 1439 : tree call = TREE_OPERAND (*from_p, 0);
7472 1439 : tree vlasize = TREE_OPERAND (*from_p, 1);
7473 :
7474 1439 : if (TREE_CODE (call) == CALL_EXPR
7475 1439 : && CALL_EXPR_IFN (call) == IFN_VA_ARG)
7476 : {
7477 95 : int nargs = call_expr_nargs (call);
7478 95 : tree type = TREE_TYPE (call);
7479 95 : tree ap = CALL_EXPR_ARG (call, 0);
7480 95 : tree tag = CALL_EXPR_ARG (call, 1);
7481 95 : tree aptag = CALL_EXPR_ARG (call, 2);
7482 95 : tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
7483 : IFN_VA_ARG, type,
7484 : nargs + 1, ap, tag,
7485 : aptag, vlasize);
7486 95 : TREE_OPERAND (*from_p, 0) = newcall;
7487 : }
7488 : }
7489 :
7490 : /* Now see if the above changed *from_p to something we handle specially. */
7491 47276912 : ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
7492 : want_value);
7493 47276912 : if (ret != GS_UNHANDLED)
7494 : return ret;
7495 :
7496 : /* If we've got a variable sized assignment between two lvalues (i.e. does
7497 : not involve a call), then we can make things a bit more straightforward
7498 : by converting the assignment to memcpy or memset. */
7499 47275950 : if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
7500 : {
7501 1090 : tree from = TREE_OPERAND (*from_p, 0);
7502 1090 : tree size = TREE_OPERAND (*from_p, 1);
7503 :
7504 1090 : if (TREE_CODE (from) == CONSTRUCTOR)
7505 0 : return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
7506 1090 : else if (is_gimple_addressable (from)
7507 1090 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
7508 2179 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
7509 : {
7510 1089 : *from_p = from;
7511 1089 : return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
7512 1089 : pre_p);
7513 : }
7514 : }
7515 :
7516 : /* Transform partial stores to non-addressable complex variables into
7517 : total stores. This allows us to use real instead of virtual operands
7518 : for these variables, which improves optimization. */
7519 47274861 : if ((TREE_CODE (*to_p) == REALPART_EXPR
7520 47274861 : || TREE_CODE (*to_p) == IMAGPART_EXPR)
7521 47274861 : && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
7522 1960 : return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
7523 :
7524 : /* Try to alleviate the effects of the gimplification creating artificial
7525 : temporaries (see for example is_gimple_reg_rhs) on the debug info, but
7526 : make sure not to create DECL_DEBUG_EXPR links across functions. */
7527 47272901 : if (!gimplify_ctxp->into_ssa
7528 2341752 : && VAR_P (*from_p)
7529 467951 : && DECL_IGNORED_P (*from_p)
7530 347715 : && DECL_P (*to_p)
7531 136871 : && !DECL_IGNORED_P (*to_p)
7532 37761 : && decl_function_context (*to_p) == current_function_decl
7533 47305102 : && decl_function_context (*from_p) == current_function_decl)
7534 : {
7535 32196 : if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
7536 26951 : DECL_NAME (*from_p)
7537 53902 : = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
7538 32196 : DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
7539 32196 : SET_DECL_DEBUG_EXPR (*from_p, *to_p);
7540 : }
7541 :
7542 47272901 : if (want_value && TREE_THIS_VOLATILE (*to_p))
7543 782 : *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
7544 :
7545 47272901 : if (TREE_CODE (*from_p) == CALL_EXPR)
7546 : {
7547 : /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
7548 : instead of a GIMPLE_ASSIGN. */
7549 4700370 : gcall *call_stmt;
7550 4700370 : if (CALL_EXPR_FN (*from_p) == NULL_TREE)
7551 : {
7552 : /* Gimplify internal functions created in the FEs. */
7553 294454 : int nargs = call_expr_nargs (*from_p), i;
7554 294454 : enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
7555 294454 : auto_vec<tree> vargs (nargs);
7556 :
7557 1484440 : for (i = 0; i < nargs; i++)
7558 : {
7559 895532 : gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
7560 895532 : EXPR_LOCATION (*from_p));
7561 895532 : vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
7562 : }
7563 294454 : call_stmt = gimple_build_call_internal_vec (ifn, vargs);
7564 294454 : gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
7565 588908 : gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
7566 294454 : }
7567 : else
7568 : {
7569 4405916 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
7570 4405916 : CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
7571 4405916 : STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
7572 4405916 : tree fndecl = get_callee_fndecl (*from_p);
7573 4405916 : if (fndecl
7574 4290985 : && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
7575 4536946 : && call_expr_nargs (*from_p) == 3)
7576 65852 : call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
7577 65852 : CALL_EXPR_ARG (*from_p, 0),
7578 65852 : CALL_EXPR_ARG (*from_p, 1),
7579 65852 : CALL_EXPR_ARG (*from_p, 2));
7580 : else
7581 : {
7582 4340064 : call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
7583 : }
7584 : }
7585 4700370 : notice_special_calls (call_stmt);
7586 4700370 : if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
7587 4700195 : gimple_call_set_lhs (call_stmt, *to_p);
7588 175 : else if (TREE_CODE (*to_p) == SSA_NAME)
7589 : /* The above is somewhat premature, avoid ICEing later for a
7590 : SSA name w/o a definition. We may have uses in the GIMPLE IL.
7591 : ??? This doesn't make it a default-def. */
7592 84 : SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
7593 :
7594 : assign = call_stmt;
7595 : }
7596 : else
7597 : {
7598 42572531 : assign = gimple_build_assign (*to_p, *from_p);
7599 42572531 : gimple_set_location (assign, EXPR_LOCATION (*expr_p));
7600 42572531 : if (COMPARISON_CLASS_P (*from_p))
7601 1218426 : copy_warning (assign, *from_p);
7602 : }
7603 :
7604 47272901 : if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
7605 : {
7606 : /* We should have got an SSA name from the start. */
7607 35046900 : gcc_assert (TREE_CODE (*to_p) == SSA_NAME
7608 : || ! gimple_in_ssa_p (cfun));
7609 : }
7610 :
7611 47272901 : gimplify_seq_add_stmt (pre_p, assign);
7612 47272901 : gsi = gsi_last (*pre_p);
7613 47272901 : maybe_fold_stmt (&gsi);
7614 :
7615 47272901 : if (want_value)
7616 : {
7617 653486 : *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
7618 653486 : return GS_OK;
7619 : }
7620 : else
7621 46619415 : *expr_p = NULL;
7622 :
7623 46619415 : return GS_ALL_DONE;
7624 : }
7625 :
7626 : /* Gimplify a comparison between two variable-sized objects. Do this
7627 : with a call to BUILT_IN_MEMCMP. */
7628 :
7629 : static enum gimplify_status
7630 0 : gimplify_variable_sized_compare (tree *expr_p)
7631 : {
7632 0 : location_t loc = EXPR_LOCATION (*expr_p);
7633 0 : tree op0 = TREE_OPERAND (*expr_p, 0);
7634 0 : tree op1 = TREE_OPERAND (*expr_p, 1);
7635 0 : tree t, arg, dest, src, expr;
7636 :
7637 0 : arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
7638 0 : arg = unshare_expr (arg);
7639 0 : arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
7640 0 : src = build_fold_addr_expr_loc (loc, op1);
7641 0 : dest = build_fold_addr_expr_loc (loc, op0);
7642 0 : t = builtin_decl_implicit (BUILT_IN_MEMCMP);
7643 0 : t = build_call_expr_loc (loc, t, 3, dest, src, arg);
7644 :
7645 0 : expr
7646 0 : = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
7647 0 : SET_EXPR_LOCATION (expr, loc);
7648 0 : *expr_p = expr;
7649 :
7650 0 : return GS_OK;
7651 : }
7652 :
7653 : /* Gimplify a comparison between two aggregate objects of integral scalar
7654 : mode as a comparison between the bitwise equivalent scalar values. */
7655 :
7656 : static enum gimplify_status
7657 16 : gimplify_scalar_mode_aggregate_compare (tree *expr_p)
7658 : {
7659 16 : const location_t loc = EXPR_LOCATION (*expr_p);
7660 16 : const enum tree_code code = TREE_CODE (*expr_p);
7661 16 : tree op0 = TREE_OPERAND (*expr_p, 0);
7662 16 : tree op1 = TREE_OPERAND (*expr_p, 1);
7663 16 : tree type = TREE_TYPE (op0);
7664 16 : tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
7665 :
7666 16 : op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
7667 16 : op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
7668 :
7669 : /* We need to perform ordering comparisons in memory order like memcmp and,
7670 : therefore, may need to byte-swap operands for little-endian targets. */
7671 16 : if (code != EQ_EXPR && code != NE_EXPR)
7672 : {
7673 0 : gcc_assert (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN);
7674 0 : gcc_assert (TREE_CODE (scalar_type) == INTEGER_TYPE);
7675 0 : tree fndecl;
7676 :
7677 0 : if (BYTES_BIG_ENDIAN)
7678 : fndecl = NULL_TREE;
7679 : else
7680 0 : switch (int_size_in_bytes (scalar_type))
7681 : {
7682 : case 1:
7683 : fndecl = NULL_TREE;
7684 : break;
7685 0 : case 2:
7686 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP16);
7687 0 : break;
7688 0 : case 4:
7689 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP32);
7690 0 : break;
7691 0 : case 8:
7692 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP64);
7693 0 : break;
7694 0 : case 16:
7695 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP128);
7696 0 : break;
7697 0 : default:
7698 0 : gcc_unreachable ();
7699 : }
7700 :
7701 0 : if (fndecl)
7702 : {
7703 0 : op0 = build_call_expr_loc (loc, fndecl, 1, op0);
7704 0 : op1 = build_call_expr_loc (loc, fndecl, 1, op1);
7705 : }
7706 : }
7707 :
7708 16 : *expr_p = fold_build2_loc (loc, code, TREE_TYPE (*expr_p), op0, op1);
7709 :
7710 16 : return GS_OK;
7711 : }
7712 :
7713 : /* Gimplify an expression sequence. This function gimplifies each
7714 : expression and rewrites the original expression with the last
7715 : expression of the sequence in GIMPLE form.
7716 :
7717 : PRE_P points to the list where the side effects for all the
7718 : expressions in the sequence will be emitted.
7719 :
7720 : WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
7721 :
7722 : static enum gimplify_status
7723 1091390 : gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
7724 : {
7725 1091390 : tree t = *expr_p;
7726 :
7727 1177943 : do
7728 : {
7729 1177943 : tree *sub_p = &TREE_OPERAND (t, 0);
7730 :
7731 1177943 : if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
7732 203418 : gimplify_compound_expr (sub_p, pre_p, false);
7733 : else
7734 974525 : gimplify_stmt (sub_p, pre_p);
7735 :
7736 1177943 : t = TREE_OPERAND (t, 1);
7737 : }
7738 1177943 : while (TREE_CODE (t) == COMPOUND_EXPR);
7739 :
7740 1091390 : *expr_p = t;
7741 1091390 : if (want_value)
7742 : return GS_OK;
7743 : else
7744 : {
7745 595345 : gimplify_stmt (expr_p, pre_p);
7746 595345 : return GS_ALL_DONE;
7747 : }
7748 : }
7749 :
7750 : /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
7751 : gimplify. After gimplification, EXPR_P will point to a new temporary
7752 : that holds the original value of the SAVE_EXPR node.
7753 :
7754 : PRE_P points to the list where side effects that must happen before
7755 : *EXPR_P should be stored. */
7756 :
7757 : static enum gimplify_status
7758 438860 : gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7759 : {
7760 438860 : enum gimplify_status ret = GS_ALL_DONE;
7761 438860 : tree val;
7762 :
7763 438860 : gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
7764 438860 : val = TREE_OPERAND (*expr_p, 0);
7765 :
7766 438860 : if (val && TREE_TYPE (val) == error_mark_node)
7767 : return GS_ERROR;
7768 :
7769 : /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
7770 438858 : if (!SAVE_EXPR_RESOLVED_P (*expr_p))
7771 : {
7772 : /* The operand may be a void-valued expression. It is
7773 : being executed only for its side-effects. */
7774 168266 : if (TREE_TYPE (val) == void_type_node)
7775 : {
7776 24 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7777 : is_gimple_stmt, fb_none);
7778 24 : val = NULL;
7779 : }
7780 : else
7781 : /* The temporary may not be an SSA name as later abnormal and EH
7782 : control flow may invalidate use/def domination. When in SSA
7783 : form then assume there are no such issues and SAVE_EXPRs only
7784 : appear via GENERIC foldings. */
7785 336484 : val = get_initialized_tmp_var (val, pre_p, post_p,
7786 336484 : gimple_in_ssa_p (cfun));
7787 :
7788 168266 : TREE_OPERAND (*expr_p, 0) = val;
7789 168266 : SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
7790 : }
7791 :
7792 438858 : *expr_p = val;
7793 :
7794 438858 : return ret;
7795 : }
7796 :
7797 : /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
7798 :
7799 : unary_expr
7800 : : ...
7801 : | '&' varname
7802 : ...
7803 :
7804 : PRE_P points to the list where side effects that must happen before
7805 : *EXPR_P should be stored.
7806 :
7807 : POST_P points to the list where side effects that must happen after
7808 : *EXPR_P should be stored. */
7809 :
7810 : static enum gimplify_status
7811 32704326 : gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7812 : {
7813 32704326 : tree expr = *expr_p;
7814 32704326 : tree op0 = TREE_OPERAND (expr, 0);
7815 32704326 : enum gimplify_status ret;
7816 32704326 : location_t loc = EXPR_LOCATION (*expr_p);
7817 :
7818 32704326 : switch (TREE_CODE (op0))
7819 : {
7820 97139 : case INDIRECT_REF:
7821 97139 : do_indirect_ref:
7822 : /* Check if we are dealing with an expression of the form '&*ptr'.
7823 : While the front end folds away '&*ptr' into 'ptr', these
7824 : expressions may be generated internally by the compiler (e.g.,
7825 : builtins like __builtin_va_end). */
7826 : /* Caution: the silent array decomposition semantics we allow for
7827 : ADDR_EXPR means we can't always discard the pair. */
7828 : /* Gimplification of the ADDR_EXPR operand may drop
7829 : cv-qualification conversions, so make sure we add them if
7830 : needed. */
7831 97139 : {
7832 97139 : tree op00 = TREE_OPERAND (op0, 0);
7833 97139 : tree t_expr = TREE_TYPE (expr);
7834 97139 : tree t_op00 = TREE_TYPE (op00);
7835 :
7836 97139 : if (!useless_type_conversion_p (t_expr, t_op00))
7837 0 : op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
7838 97139 : *expr_p = op00;
7839 97139 : ret = GS_OK;
7840 : }
7841 97139 : break;
7842 :
7843 0 : case VIEW_CONVERT_EXPR:
7844 : /* Take the address of our operand and then convert it to the type of
7845 : this ADDR_EXPR.
7846 :
7847 : ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
7848 : all clear. The impact of this transformation is even less clear. */
7849 :
7850 : /* If the operand is a useless conversion, look through it. Doing so
7851 : guarantees that the ADDR_EXPR and its operand will remain of the
7852 : same type. */
7853 0 : if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
7854 0 : op0 = TREE_OPERAND (op0, 0);
7855 :
7856 0 : *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
7857 : build_fold_addr_expr_loc (loc,
7858 0 : TREE_OPERAND (op0, 0)));
7859 0 : ret = GS_OK;
7860 0 : break;
7861 :
7862 61256 : case MEM_REF:
7863 61256 : if (integer_zerop (TREE_OPERAND (op0, 1)))
7864 36408 : goto do_indirect_ref;
7865 :
7866 : /* fall through */
7867 :
7868 32624381 : default:
7869 : /* If we see a call to a declared builtin or see its address
7870 : being taken (we can unify those cases here) then we can mark
7871 : the builtin for implicit generation by GCC. */
7872 32624381 : if (TREE_CODE (op0) == FUNCTION_DECL
7873 16330635 : && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
7874 36556042 : && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
7875 1275596 : set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
7876 :
7877 : /* We use fb_either here because the C frontend sometimes takes
7878 : the address of a call that returns a struct; see
7879 : gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
7880 : the implied temporary explicit. */
7881 :
7882 : /* Make the operand addressable. */
7883 32624381 : ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
7884 : is_gimple_addressable, fb_either);
7885 32624381 : if (ret == GS_ERROR)
7886 : break;
7887 :
7888 : /* Then mark it. Beware that it may not be possible to do so directly
7889 : if a temporary has been created by the gimplification. */
7890 32624379 : prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
7891 :
7892 32624379 : op0 = TREE_OPERAND (expr, 0);
7893 :
7894 : /* For various reasons, the gimplification of the expression
7895 : may have made a new INDIRECT_REF. */
7896 32624379 : if (INDIRECT_REF_P (op0)
7897 32624379 : || (TREE_CODE (op0) == MEM_REF
7898 42042 : && integer_zerop (TREE_OPERAND (op0, 1))))
7899 17194 : goto do_indirect_ref;
7900 :
7901 32607185 : mark_addressable (TREE_OPERAND (expr, 0));
7902 :
7903 : /* The FEs may end up building ADDR_EXPRs early on a decl with
7904 : an incomplete type. Re-build ADDR_EXPRs in canonical form
7905 : here. */
7906 32607185 : if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
7907 145554 : *expr_p = build_fold_addr_expr (op0);
7908 :
7909 : /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
7910 32607185 : if (TREE_CODE (*expr_p) == ADDR_EXPR)
7911 32607184 : recompute_tree_invariant_for_addr_expr (*expr_p);
7912 :
7913 : /* If we re-built the ADDR_EXPR add a conversion to the original type
7914 : if required. */
7915 32607185 : if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
7916 0 : *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
7917 :
7918 : break;
7919 : }
7920 :
7921 32704326 : return ret;
7922 : }
7923 :
7924 : /* Return the number of times character C occurs in string S. */
7925 :
7926 : static int
7927 92060 : num_occurrences (int c, const char *s)
7928 : {
7929 92060 : int n = 0;
7930 250019 : while (*s)
7931 157959 : n += (*s++ == c);
7932 92060 : return n;
7933 : }
7934 :
7935 : /* A subroutine of gimplify_asm_expr. Check that all operands have
7936 : the same number of alternatives. Return -1 if this is violated. Otherwise
7937 : return the number of alternatives. */
7938 :
7939 : static int
7940 194960 : num_alternatives (const_tree link)
7941 : {
7942 194960 : if (link == nullptr)
7943 : return 0;
7944 :
7945 47551 : const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7946 47551 : int num = num_occurrences (',', constraint);
7947 :
7948 47551 : if (num + 1 > MAX_RECOG_ALTERNATIVES)
7949 : return -1;
7950 :
7951 92060 : for (link = TREE_CHAIN (link); link; link = TREE_CHAIN (link))
7952 : {
7953 44509 : constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7954 89018 : if (num_occurrences (',', constraint) != num)
7955 : return -1;
7956 : }
7957 47551 : return num + 1;
7958 : }
7959 :
7960 : /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
7961 : value; output operands should be a gimple lvalue. */
7962 :
7963 : static enum gimplify_status
7964 97480 : gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7965 : {
7966 97480 : tree expr;
7967 97480 : int noutputs;
7968 97480 : const char **oconstraints;
7969 97480 : int i;
7970 97480 : tree link;
7971 97480 : const char *constraint;
7972 97480 : bool allows_mem, allows_reg, is_inout;
7973 97480 : enum gimplify_status ret, tret;
7974 97480 : gasm *stmt;
7975 97480 : vec<tree, va_gc> *inputs;
7976 97480 : vec<tree, va_gc> *outputs;
7977 97480 : vec<tree, va_gc> *clobbers;
7978 97480 : vec<tree, va_gc> *labels;
7979 97480 : tree link_next;
7980 :
7981 97480 : expr = *expr_p;
7982 97480 : noutputs = list_length (ASM_OUTPUTS (expr));
7983 97480 : oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
7984 :
7985 97480 : inputs = NULL;
7986 97480 : outputs = NULL;
7987 97480 : clobbers = NULL;
7988 97480 : labels = NULL;
7989 :
7990 97480 : int num_alternatives_out = num_alternatives (ASM_OUTPUTS (expr));
7991 97480 : int num_alternatives_in = num_alternatives (ASM_INPUTS (expr));
7992 97480 : if (num_alternatives_out == -1 || num_alternatives_in == -1
7993 97480 : || (num_alternatives_out > 0 && num_alternatives_in > 0
7994 16715 : && num_alternatives_out != num_alternatives_in))
7995 : {
7996 0 : error ("operand constraints for %<asm%> differ "
7997 : "in number of alternatives");
7998 0 : return GS_ERROR;
7999 : }
8000 97480 : int num_alternatives = MAX (num_alternatives_out, num_alternatives_in);
8001 :
8002 97480 : gimplify_reg_info reg_info (num_alternatives, noutputs);
8003 :
8004 97480 : link_next = NULL_TREE;
8005 192926 : for (link = ASM_CLOBBERS (expr); link; link = link_next)
8006 : {
8007 : /* The clobber entry could also be an error marker. */
8008 95446 : if (TREE_CODE (TREE_VALUE (link)) == STRING_CST)
8009 : {
8010 95423 : const char *regname= TREE_STRING_POINTER (TREE_VALUE (link));
8011 95423 : int regno = decode_reg_name (regname);
8012 95423 : if (regno >= 0)
8013 41262 : reg_info.set_clobbered (regno);
8014 : }
8015 95446 : link_next = TREE_CHAIN (link);
8016 95446 : TREE_CHAIN (link) = NULL_TREE;
8017 95446 : vec_safe_push (clobbers, link);
8018 : }
8019 :
8020 97480 : ret = GS_ALL_DONE;
8021 97480 : link_next = NULL_TREE;
8022 157059 : for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
8023 : {
8024 59612 : bool ok;
8025 59612 : size_t constraint_len;
8026 :
8027 59612 : if (error_operand_p (TREE_VALUE (link)))
8028 : return GS_ERROR;
8029 59591 : link_next = TREE_CHAIN (link);
8030 :
8031 119182 : oconstraints[i]
8032 59591 : = constraint
8033 59591 : = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8034 59591 : constraint_len = strlen (constraint);
8035 59591 : if (constraint_len == 0)
8036 0 : continue;
8037 :
8038 59591 : reg_info.operand = TREE_VALUE (link);
8039 59591 : ok = parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8040 : &allows_reg, &is_inout, ®_info);
8041 59591 : if (!ok)
8042 : {
8043 11 : ret = GS_ERROR;
8044 11 : is_inout = false;
8045 : }
8046 :
8047 : /* If we can't make copies, we can only accept memory.
8048 : Similarly for VLAs. */
8049 59591 : tree outtype = TREE_TYPE (TREE_VALUE (link));
8050 59591 : if (TREE_ADDRESSABLE (outtype)
8051 59576 : || !COMPLETE_TYPE_P (outtype)
8052 119142 : || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype)))
8053 : {
8054 295 : if (allows_mem)
8055 283 : allows_reg = 0;
8056 : else
8057 : {
8058 12 : error ("impossible constraint in %<asm%>");
8059 12 : error ("non-memory output %d must stay in memory", i);
8060 12 : return GS_ERROR;
8061 : }
8062 : }
8063 :
8064 59579 : if (!allows_reg && allows_mem)
8065 1663 : mark_addressable (TREE_VALUE (link));
8066 :
8067 59579 : tree orig = TREE_VALUE (link);
8068 110493 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8069 : is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8070 : fb_lvalue | fb_mayfail);
8071 59579 : if (tret == GS_ERROR)
8072 : {
8073 0 : if (orig != error_mark_node)
8074 0 : error ("invalid lvalue in %<asm%> output %d", i);
8075 : ret = tret;
8076 : }
8077 :
8078 : /* If the gimplified operand is a register we do not allow memory. */
8079 59579 : if (allows_reg
8080 57913 : && allows_mem
8081 65364 : && (is_gimple_reg (TREE_VALUE (link))
8082 267 : || (handled_component_p (TREE_VALUE (link))
8083 36 : && is_gimple_reg (TREE_OPERAND (TREE_VALUE (link), 0)))))
8084 5519 : allows_mem = 0;
8085 :
8086 : /* If the constraint does not allow memory make sure we gimplify
8087 : it to a register if it is not already but its base is. This
8088 : happens for complex and vector components. */
8089 59579 : if (!allows_mem)
8090 : {
8091 57650 : tree op = TREE_VALUE (link);
8092 57650 : if (! is_gimple_val (op)
8093 21098 : && is_gimple_reg_type (TREE_TYPE (op))
8094 78747 : && is_gimple_reg (get_base_address (op)))
8095 : {
8096 19 : tree tem = create_tmp_reg (TREE_TYPE (op));
8097 19 : tree ass;
8098 19 : if (is_inout)
8099 : {
8100 7 : ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
8101 : tem, unshare_expr (op));
8102 7 : gimplify_and_add (ass, pre_p);
8103 : }
8104 19 : ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
8105 19 : gimplify_and_add (ass, post_p);
8106 :
8107 19 : TREE_VALUE (link) = tem;
8108 19 : tret = GS_OK;
8109 : }
8110 : }
8111 :
8112 59579 : vec_safe_push (outputs, link);
8113 59579 : TREE_CHAIN (link) = NULL_TREE;
8114 :
8115 59579 : if (is_inout)
8116 : {
8117 : /* An input/output operand. To give the optimizers more
8118 : flexibility, split it into separate input and output
8119 : operands. */
8120 8665 : tree input;
8121 : /* Buffer big enough to format a 32-bit UINT_MAX into. */
8122 8665 : char buf[11];
8123 :
8124 : /* Turn the in/out constraint into an output constraint. */
8125 8665 : char *p = xstrdup (constraint);
8126 8665 : p[0] = '=';
8127 8665 : TREE_PURPOSE (link) = unshare_expr (TREE_PURPOSE (link));
8128 8665 : TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
8129 :
8130 : /* And add a matching input constraint. */
8131 8665 : if (allows_reg)
8132 : {
8133 8167 : sprintf (buf, "%u", i);
8134 :
8135 : /* If there are multiple alternatives in the constraint,
8136 : handle each of them individually. Those that allow register
8137 : will be replaced with operand number, the others will stay
8138 : unchanged. */
8139 8167 : if (strchr (p, ',') != NULL)
8140 : {
8141 100 : size_t len = 0, buflen = strlen (buf);
8142 100 : char *beg, *end, *str, *dst;
8143 :
8144 100 : for (beg = p + 1;;)
8145 : {
8146 200 : end = strchr (beg, ',');
8147 200 : if (end == NULL)
8148 100 : end = strchr (beg, '\0');
8149 200 : if ((size_t) (end - beg) < buflen)
8150 28 : len += buflen + 1;
8151 : else
8152 172 : len += end - beg + 1;
8153 200 : if (*end)
8154 100 : beg = end + 1;
8155 : else
8156 : break;
8157 : }
8158 :
8159 100 : str = (char *) alloca (len);
8160 100 : for (beg = p + 1, dst = str;;)
8161 : {
8162 200 : const char *tem;
8163 200 : bool mem_p, reg_p, inout_p;
8164 :
8165 200 : end = strchr (beg, ',');
8166 200 : if (end)
8167 100 : *end = '\0';
8168 200 : beg[-1] = '=';
8169 200 : tem = beg - 1;
8170 200 : parse_output_constraint (&tem, i, 0, 0, &mem_p, ®_p,
8171 : &inout_p, nullptr);
8172 200 : if (dst != str)
8173 100 : *dst++ = ',';
8174 200 : if (reg_p)
8175 : {
8176 115 : memcpy (dst, buf, buflen);
8177 115 : dst += buflen;
8178 : }
8179 : else
8180 : {
8181 85 : if (end)
8182 0 : len = end - beg;
8183 : else
8184 85 : len = strlen (beg);
8185 85 : memcpy (dst, beg, len);
8186 85 : dst += len;
8187 : }
8188 200 : if (end)
8189 100 : beg = end + 1;
8190 : else
8191 : break;
8192 100 : }
8193 100 : *dst = '\0';
8194 100 : input = build_string (dst - str, str);
8195 : }
8196 : else
8197 8067 : input = build_string (strlen (buf), buf);
8198 : }
8199 : else
8200 498 : input = build_string (constraint_len - 1, constraint + 1);
8201 :
8202 8665 : free (p);
8203 :
8204 8665 : input = build_tree_list (build_tree_list (NULL_TREE, input),
8205 8665 : unshare_expr (TREE_VALUE (link)));
8206 8665 : ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
8207 : }
8208 : }
8209 :
8210 : /* After all output operands have been gimplified, verify that each output
8211 : operand is used at most once in case of hard register constraints. Thus,
8212 : error out in cases like
8213 : asm ("" : "={0}" (x), "={1}" (x));
8214 : or even for
8215 : asm ("" : "=r" (x), "={1}" (x));
8216 :
8217 : FIXME: Ideally we would also error out for cases like
8218 : int x;
8219 : asm ("" : "=r" (x), "=r" (x));
8220 : However, since code like that was previously accepted, erroring out now might
8221 : break existing code. On the other hand, we already error out for register
8222 : asm like
8223 : register int x asm ("0");
8224 : asm ("" : "=r" (x), "=r" (x));
8225 : Thus, maybe it wouldn't be too bad to also error out in the former
8226 : non-register-asm case.
8227 : */
8228 157022 : for (unsigned i = 0; i < vec_safe_length (outputs); ++i)
8229 : {
8230 59577 : tree link = (*outputs)[i];
8231 59577 : tree op1 = TREE_VALUE (link);
8232 59577 : const char *constraint
8233 59577 : = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8234 59577 : if (strchr (constraint, '{') != nullptr)
8235 236 : for (unsigned j = 0; j < vec_safe_length (outputs); ++j)
8236 : {
8237 142 : if (i == j)
8238 95 : continue;
8239 47 : tree link2 = (*outputs)[j];
8240 47 : tree op2 = TREE_VALUE (link2);
8241 47 : if (op1 == op2)
8242 : {
8243 2 : error ("multiple outputs to lvalue %qE", op2);
8244 2 : return GS_ERROR;
8245 : }
8246 : }
8247 : }
8248 :
8249 97445 : link_next = NULL_TREE;
8250 97445 : int input_num = 0;
8251 138496 : for (link = ASM_INPUTS (expr); link; ++input_num, ++i, link = link_next)
8252 : {
8253 41109 : if (error_operand_p (TREE_VALUE (link)))
8254 : return GS_ERROR;
8255 41060 : link_next = TREE_CHAIN (link);
8256 41060 : constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8257 41060 : reg_info.operand = TREE_VALUE (link);
8258 41060 : bool ok = parse_input_constraint (&constraint, input_num, 0, noutputs, 0,
8259 : oconstraints, &allows_mem, &allows_reg,
8260 : ®_info);
8261 41060 : if (!ok)
8262 : {
8263 30 : ret = GS_ERROR;
8264 30 : is_inout = false;
8265 : }
8266 :
8267 : /* If we can't make copies, we can only accept memory. */
8268 41060 : tree intype = TREE_TYPE (TREE_VALUE (link));
8269 41060 : if (TREE_ADDRESSABLE (intype)
8270 41042 : || !COMPLETE_TYPE_P (intype)
8271 82063 : || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype)))
8272 : {
8273 304 : if (allows_mem)
8274 295 : allows_reg = 0;
8275 : else
8276 : {
8277 9 : error ("impossible constraint in %<asm%>");
8278 9 : error ("non-memory input %d must stay in memory", i);
8279 9 : return GS_ERROR;
8280 : }
8281 : }
8282 :
8283 : /* If the operand is a memory input, it should be an lvalue. */
8284 41051 : if (!allows_reg && allows_mem)
8285 : {
8286 1975 : tree inputv = TREE_VALUE (link);
8287 1975 : STRIP_NOPS (inputv);
8288 1975 : if (TREE_CODE (inputv) == PREDECREMENT_EXPR
8289 : || TREE_CODE (inputv) == PREINCREMENT_EXPR
8290 : || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8291 1975 : || TREE_CODE (inputv) == POSTINCREMENT_EXPR
8292 1963 : || TREE_CODE (inputv) == MODIFY_EXPR
8293 3936 : || VOID_TYPE_P (TREE_TYPE (inputv)))
8294 38 : TREE_VALUE (link) = error_mark_node;
8295 1975 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8296 : is_gimple_lvalue, fb_lvalue | fb_mayfail);
8297 1975 : if (tret != GS_ERROR)
8298 : {
8299 : /* Unlike output operands, memory inputs are not guaranteed
8300 : to be lvalues by the FE, and while the expressions are
8301 : marked addressable there, if it is e.g. a statement
8302 : expression, temporaries in it might not end up being
8303 : addressable. They might be already used in the IL and thus
8304 : it is too late to make them addressable now though. */
8305 1929 : tree x = TREE_VALUE (link);
8306 2070 : while (handled_component_p (x))
8307 141 : x = TREE_OPERAND (x, 0);
8308 1929 : if (TREE_CODE (x) == MEM_REF
8309 1929 : && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
8310 0 : x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
8311 1929 : if ((VAR_P (x)
8312 : || TREE_CODE (x) == PARM_DECL
8313 : || TREE_CODE (x) == RESULT_DECL)
8314 1481 : && !TREE_ADDRESSABLE (x)
8315 23 : && is_gimple_reg (x))
8316 : {
8317 17 : warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
8318 17 : input_location), 0,
8319 : "memory input %d is not directly addressable",
8320 : i);
8321 17 : prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
8322 : }
8323 : }
8324 1975 : mark_addressable (TREE_VALUE (link));
8325 1975 : if (tret == GS_ERROR)
8326 : {
8327 46 : if (inputv != error_mark_node)
8328 46 : error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
8329 : "memory input %d is not directly addressable", i);
8330 : ret = tret;
8331 : }
8332 : }
8333 : else
8334 : {
8335 39076 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8336 : is_gimple_asm_val, fb_rvalue);
8337 39076 : if (tret == GS_ERROR)
8338 46 : ret = tret;
8339 : }
8340 :
8341 41051 : TREE_CHAIN (link) = NULL_TREE;
8342 41051 : vec_safe_push (inputs, link);
8343 : }
8344 :
8345 97387 : link_next = NULL_TREE;
8346 98269 : for (link = ASM_LABELS (expr); link; link = link_next)
8347 : {
8348 882 : link_next = TREE_CHAIN (link);
8349 882 : TREE_CHAIN (link) = NULL_TREE;
8350 882 : vec_safe_push (labels, link);
8351 : }
8352 :
8353 : /* Do not add ASMs with errors to the gimple IL stream. */
8354 97387 : if (ret != GS_ERROR)
8355 : {
8356 97304 : stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
8357 : inputs, outputs, clobbers, labels);
8358 :
8359 : /* asm is volatile if it was marked by the user as volatile or
8360 : there are no outputs or this is an asm goto. */
8361 194608 : gimple_asm_set_volatile (stmt,
8362 97304 : ASM_VOLATILE_P (expr)
8363 7523 : || noutputs == 0
8364 104813 : || labels);
8365 97304 : gimple_asm_set_basic (stmt, ASM_BASIC_P (expr));
8366 97304 : gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
8367 :
8368 97304 : gimplify_seq_add_stmt (pre_p, stmt);
8369 : }
8370 :
8371 : return ret;
8372 97480 : }
8373 :
8374 : /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
8375 : GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
8376 : gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
8377 : return to this function.
8378 :
8379 : FIXME should we complexify the prequeue handling instead? Or use flags
8380 : for all the cleanups and let the optimizer tighten them up? The current
8381 : code seems pretty fragile; it will break on a cleanup within any
8382 : non-conditional nesting. But any such nesting would be broken, anyway;
8383 : we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
8384 : and continues out of it. We can do that at the RTL level, though, so
8385 : having an optimizer to tighten up try/finally regions would be a Good
8386 : Thing. */
8387 :
8388 : static enum gimplify_status
8389 5172692 : gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
8390 : {
8391 5172692 : gimple_stmt_iterator iter;
8392 5172692 : gimple_seq body_sequence = NULL;
8393 :
8394 5172692 : tree temp = voidify_wrapper_expr (*expr_p, NULL);
8395 :
8396 : /* We only care about the number of conditions between the innermost
8397 : CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
8398 : any cleanups collected outside the CLEANUP_POINT_EXPR. */
8399 5172692 : int old_conds = gimplify_ctxp->conditions;
8400 5172692 : gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
8401 5172692 : bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
8402 5172692 : gimplify_ctxp->conditions = 0;
8403 5172692 : gimplify_ctxp->conditional_cleanups = NULL;
8404 5172692 : gimplify_ctxp->in_cleanup_point_expr = true;
8405 :
8406 5172692 : gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
8407 :
8408 5172692 : gimplify_ctxp->conditions = old_conds;
8409 5172692 : gimplify_ctxp->conditional_cleanups = old_cleanups;
8410 5172692 : gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
8411 :
8412 26925959 : for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
8413 : {
8414 16619015 : gimple *wce = gsi_stmt (iter);
8415 :
8416 16619015 : if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
8417 : {
8418 581847 : if (gsi_one_before_end_p (iter))
8419 : {
8420 : /* Note that gsi_insert_seq_before and gsi_remove do not
8421 : scan operands, unlike some other sequence mutators. */
8422 2993 : if (!gimple_wce_cleanup_eh_only (wce))
8423 2993 : gsi_insert_seq_before_without_update (&iter,
8424 : gimple_wce_cleanup (wce),
8425 : GSI_SAME_STMT);
8426 2993 : gsi_remove (&iter, true);
8427 2993 : break;
8428 : }
8429 : else
8430 : {
8431 578854 : gtry *gtry;
8432 578854 : gimple_seq seq;
8433 578854 : enum gimple_try_flags kind;
8434 :
8435 578854 : if (gimple_wce_cleanup_eh_only (wce))
8436 : kind = GIMPLE_TRY_CATCH;
8437 : else
8438 565465 : kind = GIMPLE_TRY_FINALLY;
8439 578854 : seq = gsi_split_seq_after (iter);
8440 :
8441 578854 : gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
8442 : /* Do not use gsi_replace here, as it may scan operands.
8443 : We want to do a simple structural modification only. */
8444 578854 : gsi_set_stmt (&iter, gtry);
8445 1157708 : iter = gsi_start (gtry->eval);
8446 : }
8447 : }
8448 : else
8449 16037168 : gsi_next (&iter);
8450 : }
8451 :
8452 5172692 : gimplify_seq_add_seq (pre_p, body_sequence);
8453 5172692 : if (temp)
8454 : {
8455 348514 : *expr_p = temp;
8456 348514 : return GS_OK;
8457 : }
8458 : else
8459 : {
8460 4824178 : *expr_p = NULL;
8461 4824178 : return GS_ALL_DONE;
8462 : }
8463 : }
8464 :
8465 : /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
8466 : is the cleanup action required. EH_ONLY is true if the cleanup should
8467 : only be executed if an exception is thrown, not on normal exit.
8468 : If FORCE_UNCOND is true perform the cleanup unconditionally; this is
8469 : only valid for clobbers. */
8470 :
8471 : static void
8472 586617 : gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
8473 : bool force_uncond = false)
8474 : {
8475 586617 : gimple *wce;
8476 586617 : gimple_seq cleanup_stmts = NULL;
8477 :
8478 : /* Errors can result in improperly nested cleanups. Which results in
8479 : confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
8480 586617 : if (seen_error ())
8481 4770 : return;
8482 :
8483 581847 : if (gimple_conditional_context ())
8484 : {
8485 : /* If we're in a conditional context, this is more complex. We only
8486 : want to run the cleanup if we actually ran the initialization that
8487 : necessitates it, but we want to run it after the end of the
8488 : conditional context. So we wrap the try/finally around the
8489 : condition and use a flag to determine whether or not to actually
8490 : run the destructor. Thus
8491 :
8492 : test ? f(A()) : 0
8493 :
8494 : becomes (approximately)
8495 :
8496 : flag = 0;
8497 : try {
8498 : if (test) { A::A(temp); flag = 1; val = f(temp); }
8499 : else { val = 0; }
8500 : } finally {
8501 : if (flag) A::~A(temp);
8502 : }
8503 : val
8504 : */
8505 14415 : if (force_uncond)
8506 : {
8507 13532 : gimplify_stmt (&cleanup, &cleanup_stmts);
8508 13532 : wce = gimple_build_wce (cleanup_stmts);
8509 13532 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
8510 : }
8511 : else
8512 : {
8513 883 : tree flag = create_tmp_var (boolean_type_node, "cleanup");
8514 883 : gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
8515 883 : gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
8516 :
8517 883 : cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
8518 883 : gimplify_stmt (&cleanup, &cleanup_stmts);
8519 883 : wce = gimple_build_wce (cleanup_stmts);
8520 883 : gimple_wce_set_cleanup_eh_only (wce, eh_only);
8521 :
8522 883 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
8523 883 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
8524 883 : gimplify_seq_add_stmt (pre_p, ftrue);
8525 :
8526 : /* Because of this manipulation, and the EH edges that jump
8527 : threading cannot redirect, the temporary (VAR) will appear
8528 : to be used uninitialized. Don't warn. */
8529 883 : suppress_warning (var, OPT_Wuninitialized);
8530 : }
8531 : }
8532 : else
8533 : {
8534 567432 : gimplify_stmt (&cleanup, &cleanup_stmts);
8535 567432 : wce = gimple_build_wce (cleanup_stmts);
8536 567432 : gimple_wce_set_cleanup_eh_only (wce, eh_only);
8537 567432 : gimplify_seq_add_stmt (pre_p, wce);
8538 : }
8539 : }
8540 :
8541 : /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
8542 :
8543 : static enum gimplify_status
8544 776969 : gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8545 : {
8546 776969 : tree targ = *expr_p;
8547 776969 : tree temp = TARGET_EXPR_SLOT (targ);
8548 776969 : tree init = TARGET_EXPR_INITIAL (targ);
8549 776969 : enum gimplify_status ret;
8550 :
8551 776969 : bool unpoison_empty_seq = false;
8552 776969 : gimple_stmt_iterator unpoison_it;
8553 :
8554 776969 : if (init)
8555 : {
8556 740312 : gimple_seq init_pre_p = NULL;
8557 740312 : bool is_vla = false;
8558 :
8559 : /* TARGET_EXPR temps aren't part of the enclosing block, so add it
8560 : to the temps list. Handle also variable length TARGET_EXPRs. */
8561 740312 : if (!poly_int_tree_p (DECL_SIZE (temp)))
8562 : {
8563 42 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
8564 1 : gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
8565 : /* FIXME: this is correct only when the size of the type does
8566 : not depend on expressions evaluated in init. */
8567 42 : gimplify_vla_decl (temp, &init_pre_p);
8568 42 : is_vla = true;
8569 : }
8570 : else
8571 : {
8572 : /* Save location where we need to place unpoisoning. It's possible
8573 : that a variable will be converted to needs_to_live_in_memory. */
8574 740270 : unpoison_it = gsi_last (*pre_p);
8575 740270 : unpoison_empty_seq = gsi_end_p (unpoison_it);
8576 :
8577 740270 : gimple_add_tmp_var (temp);
8578 : }
8579 :
8580 740312 : if (var_needs_auto_init_p (temp) && VOID_TYPE_P (TREE_TYPE (init)))
8581 : {
8582 42532 : gimple_add_init_for_auto_var (temp, flag_auto_var_init, &init_pre_p);
8583 42532 : if (flag_auto_var_init == AUTO_INIT_PATTERN
8584 0 : && !is_gimple_reg (temp)
8585 42532 : && clear_padding_type_may_have_padding_p (TREE_TYPE (temp)))
8586 0 : gimple_add_padding_init_for_auto_var (temp, is_vla, &init_pre_p);
8587 : }
8588 :
8589 : /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
8590 : expression is supposed to initialize the slot. */
8591 740312 : if (VOID_TYPE_P (TREE_TYPE (init)))
8592 271406 : ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
8593 : fb_none);
8594 : else
8595 : {
8596 468906 : tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
8597 468906 : init = init_expr;
8598 468906 : ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
8599 : fb_none);
8600 468906 : init = NULL;
8601 468906 : ggc_free (init_expr);
8602 : }
8603 740312 : if (ret == GS_ERROR)
8604 : {
8605 : /* PR c++/28266 Make sure this is expanded only once. */
8606 10 : TARGET_EXPR_INITIAL (targ) = NULL_TREE;
8607 10 : return GS_ERROR;
8608 : }
8609 :
8610 740302 : if (init)
8611 0 : gimplify_and_add (init, &init_pre_p);
8612 :
8613 : /* Add a clobber for the temporary going out of scope, like
8614 : gimplify_bind_expr. But only if we did not promote the
8615 : temporary to static storage. */
8616 740302 : if (gimplify_ctxp->in_cleanup_point_expr
8617 595110 : && !TREE_STATIC (temp)
8618 1334076 : && needs_to_live_in_memory (temp))
8619 : {
8620 444299 : if (flag_stack_reuse == SR_ALL)
8621 : {
8622 443854 : tree clobber = build_clobber (TREE_TYPE (temp),
8623 : CLOBBER_STORAGE_END);
8624 443854 : clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
8625 443854 : gimple_push_cleanup (temp, clobber, false, pre_p, true);
8626 : }
8627 444299 : if (asan_poisoned_variables
8628 445 : && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
8629 445 : && !TREE_STATIC (temp)
8630 445 : && dbg_cnt (asan_use_after_scope)
8631 444744 : && !gimplify_omp_ctxp)
8632 : {
8633 437 : tree asan_cleanup = build_asan_poison_call_expr (temp);
8634 437 : if (asan_cleanup)
8635 : {
8636 437 : if (unpoison_empty_seq)
8637 223 : unpoison_it = gsi_start (*pre_p);
8638 :
8639 437 : asan_poison_variable (temp, false, &unpoison_it,
8640 : unpoison_empty_seq);
8641 437 : gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
8642 : }
8643 : }
8644 : }
8645 :
8646 740302 : gimple_seq_add_seq (pre_p, init_pre_p);
8647 :
8648 : /* If needed, push the cleanup for the temp. */
8649 740302 : if (TARGET_EXPR_CLEANUP (targ))
8650 142326 : gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
8651 142326 : CLEANUP_EH_ONLY (targ), pre_p);
8652 :
8653 : /* Only expand this once. */
8654 740302 : TREE_OPERAND (targ, 3) = init;
8655 740302 : TARGET_EXPR_INITIAL (targ) = NULL_TREE;
8656 : }
8657 : else
8658 : /* We should have expanded this before. */
8659 36657 : gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
8660 :
8661 776959 : *expr_p = temp;
8662 776959 : return GS_OK;
8663 : }
8664 :
8665 : /* Gimplification of expression trees. */
8666 :
8667 : /* Gimplify an expression which appears at statement context. The
8668 : corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
8669 : NULL, a new sequence is allocated.
8670 :
8671 : Return true if we actually added a statement to the queue. */
8672 :
8673 : bool
8674 101662103 : gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
8675 : {
8676 101662103 : gimple_seq_node last;
8677 :
8678 101662103 : last = gimple_seq_last (*seq_p);
8679 101662103 : gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
8680 101662103 : return last != gimple_seq_last (*seq_p);
8681 : }
8682 :
8683 : /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
8684 : to CTX. If entries already exist, force them to be some flavor of private.
8685 : If there is no enclosing parallel, do nothing. */
8686 :
8687 : void
8688 123189 : omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
8689 : {
8690 123189 : splay_tree_node n;
8691 :
8692 123189 : if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
8693 : return;
8694 :
8695 36812 : do
8696 : {
8697 36812 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8698 36812 : if (n != NULL)
8699 : {
8700 4289 : if (n->value & GOVD_SHARED)
8701 183 : n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
8702 4106 : else if (n->value & GOVD_MAP)
8703 55 : n->value |= GOVD_MAP_TO_ONLY;
8704 : else
8705 : return;
8706 : }
8707 32523 : else if ((ctx->region_type & ORT_TARGET) != 0)
8708 : {
8709 6944 : if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
8710 4189 : omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
8711 : else
8712 2755 : omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
8713 : }
8714 25579 : else if (ctx->region_type != ORT_WORKSHARE
8715 : && ctx->region_type != ORT_TASKGROUP
8716 23750 : && ctx->region_type != ORT_SIMD
8717 23126 : && ctx->region_type != ORT_ACC
8718 23016 : && !(ctx->region_type & ORT_TARGET_DATA))
8719 14435 : omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
8720 :
8721 32761 : ctx = ctx->outer_context;
8722 : }
8723 32761 : while (ctx);
8724 : }
8725 :
8726 : /* Similarly for each of the type sizes of TYPE. */
8727 :
8728 : static void
8729 39862 : omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
8730 : {
8731 39862 : if (type == NULL || type == error_mark_node)
8732 : return;
8733 39858 : type = TYPE_MAIN_VARIANT (type);
8734 :
8735 39858 : if (ctx->privatized_types->add (type))
8736 : return;
8737 :
8738 30483 : switch (TREE_CODE (type))
8739 : {
8740 12975 : case INTEGER_TYPE:
8741 12975 : case ENUMERAL_TYPE:
8742 12975 : case BOOLEAN_TYPE:
8743 12975 : case REAL_TYPE:
8744 12975 : case FIXED_POINT_TYPE:
8745 12975 : omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
8746 12975 : omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
8747 12975 : break;
8748 :
8749 5363 : case ARRAY_TYPE:
8750 5363 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
8751 5363 : omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
8752 5363 : break;
8753 :
8754 2502 : case RECORD_TYPE:
8755 2502 : case UNION_TYPE:
8756 2502 : case QUAL_UNION_TYPE:
8757 2502 : {
8758 2502 : tree field;
8759 26623 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8760 24121 : if (TREE_CODE (field) == FIELD_DECL)
8761 : {
8762 8997 : omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
8763 8997 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
8764 : }
8765 : }
8766 : break;
8767 :
8768 8995 : case POINTER_TYPE:
8769 8995 : case REFERENCE_TYPE:
8770 8995 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
8771 8995 : break;
8772 :
8773 : default:
8774 : break;
8775 : }
8776 :
8777 30483 : omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
8778 30483 : omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
8779 30483 : lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
8780 : }
8781 :
8782 : /* Add an entry for DECL in the OMP context CTX with FLAGS. */
8783 :
8784 : static void
8785 708246 : omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
8786 : {
8787 708246 : splay_tree_node n;
8788 708246 : unsigned int nflags;
8789 708246 : tree t;
8790 :
8791 708246 : if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
8792 : return;
8793 :
8794 : /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
8795 : there are constructors involved somewhere. Exception is a shared clause,
8796 : there is nothing privatized in that case. */
8797 708101 : if ((flags & GOVD_SHARED) == 0
8798 708101 : && (TREE_ADDRESSABLE (TREE_TYPE (decl))
8799 647877 : || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
8800 7600 : flags |= GOVD_SEEN;
8801 :
8802 708101 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8803 708101 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8804 : {
8805 : /* We shouldn't be re-adding the decl with the same data
8806 : sharing class. */
8807 824 : gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
8808 824 : nflags = n->value | flags;
8809 : /* The only combination of data sharing classes we should see is
8810 : FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
8811 : reduction variables to be used in data sharing clauses. */
8812 824 : gcc_assert ((ctx->region_type & ORT_ACC) != 0
8813 : || ((nflags & GOVD_DATA_SHARE_CLASS)
8814 : == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
8815 : || (flags & GOVD_DATA_SHARE_CLASS) == 0);
8816 824 : n->value = nflags;
8817 824 : return;
8818 : }
8819 :
8820 : /* When adding a variable-sized variable, we have to handle all sorts
8821 : of additional bits of data: the pointer replacement variable, and
8822 : the parameters of the type. */
8823 707277 : if (DECL_SIZE (decl) && !poly_int_tree_p (DECL_SIZE (decl)))
8824 : {
8825 : /* Add the pointer replacement variable as PRIVATE if the variable
8826 : replacement is private, else FIRSTPRIVATE since we'll need the
8827 : address of the original variable either for SHARED, or for the
8828 : copy into or out of the context. */
8829 1328 : if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
8830 : {
8831 1238 : if (flags & GOVD_MAP)
8832 : nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
8833 531 : else if (flags & GOVD_PRIVATE)
8834 : nflags = GOVD_PRIVATE;
8835 441 : else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8836 50 : && (flags & GOVD_FIRSTPRIVATE))
8837 431 : || (ctx->region_type == ORT_TARGET_DATA
8838 6 : && (flags & GOVD_DATA_SHARE_CLASS) == 0))
8839 : nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
8840 : else
8841 1238 : nflags = GOVD_FIRSTPRIVATE;
8842 1238 : nflags |= flags & GOVD_SEEN;
8843 1238 : t = DECL_VALUE_EXPR (decl);
8844 1238 : gcc_assert (INDIRECT_REF_P (t));
8845 1238 : t = TREE_OPERAND (t, 0);
8846 1238 : gcc_assert (DECL_P (t));
8847 1238 : omp_add_variable (ctx, t, nflags);
8848 : }
8849 :
8850 : /* Add all of the variable and type parameters (which should have
8851 : been gimplified to a formal temporary) as FIRSTPRIVATE. */
8852 1328 : omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
8853 1328 : omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
8854 1328 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
8855 :
8856 : /* The variable-sized variable itself is never SHARED, only some form
8857 : of PRIVATE. The sharing would take place via the pointer variable
8858 : which we remapped above. */
8859 1328 : if (flags & GOVD_SHARED)
8860 284 : flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
8861 284 : | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
8862 :
8863 : /* We're going to make use of the TYPE_SIZE_UNIT at least in the
8864 : alloca statement we generate for the variable, so make sure it
8865 : is available. This isn't automatically needed for the SHARED
8866 : case, since we won't be allocating local storage then.
8867 : For local variables TYPE_SIZE_UNIT might not be gimplified yet,
8868 : in this case omp_notice_variable will be called later
8869 : on when it is gimplified. */
8870 1044 : else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
8871 1044 : && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
8872 275 : omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
8873 : }
8874 705949 : else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
8875 705949 : && omp_privatize_by_reference (decl))
8876 : {
8877 9816 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
8878 :
8879 : /* Similar to the direct variable sized case above, we'll need the
8880 : size of references being privatized. */
8881 9816 : if ((flags & GOVD_SHARED) == 0)
8882 : {
8883 6131 : t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8884 6131 : if (t && DECL_P (t))
8885 1372 : omp_notice_variable (ctx, t, true);
8886 : }
8887 : }
8888 :
8889 707277 : if (n != NULL)
8890 1812 : n->value |= flags;
8891 : else
8892 705465 : splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
8893 :
8894 : /* For reductions clauses in OpenACC loop directives, by default create a
8895 : copy clause on the enclosing parallel construct for carrying back the
8896 : results. */
8897 707277 : if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
8898 : {
8899 4716 : struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
8900 6454 : while (outer_ctx)
8901 : {
8902 5788 : n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
8903 5788 : if (n != NULL)
8904 : {
8905 : /* Ignore local variables and explicitly declared clauses. */
8906 3990 : if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
8907 : break;
8908 443 : else if (outer_ctx->region_type == ORT_ACC_KERNELS)
8909 : {
8910 : /* According to the OpenACC spec, such a reduction variable
8911 : should already have a copy map on a kernels construct,
8912 : verify that here. */
8913 143 : gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
8914 : && (n->value & GOVD_MAP));
8915 : }
8916 300 : else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
8917 : {
8918 : /* Remove firstprivate and make it a copy map. */
8919 150 : n->value &= ~GOVD_FIRSTPRIVATE;
8920 150 : n->value |= GOVD_MAP;
8921 : }
8922 : }
8923 1798 : else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
8924 : {
8925 503 : splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
8926 : GOVD_MAP | GOVD_SEEN);
8927 503 : break;
8928 : }
8929 1738 : outer_ctx = outer_ctx->outer_context;
8930 : }
8931 : }
8932 : }
8933 :
8934 : /* Notice a threadprivate variable DECL used in OMP context CTX.
8935 : This just prints out diagnostics about threadprivate variable uses
8936 : in untied tasks. If DECL2 is non-NULL, prevent this warning
8937 : on that variable. */
8938 :
8939 : static bool
8940 14183 : omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
8941 : tree decl2)
8942 : {
8943 14183 : splay_tree_node n;
8944 14183 : struct gimplify_omp_ctx *octx;
8945 :
8946 28798 : for (octx = ctx; octx; octx = octx->outer_context)
8947 14615 : if ((octx->region_type & ORT_TARGET) != 0
8948 14615 : || octx->order_concurrent)
8949 : {
8950 112 : n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
8951 112 : if (n == NULL)
8952 : {
8953 20 : if (octx->order_concurrent)
8954 : {
8955 20 : error ("threadprivate variable %qE used in a region with"
8956 20 : " %<order(concurrent)%> clause", DECL_NAME (decl));
8957 20 : inform (octx->location, "enclosing region");
8958 : }
8959 : else
8960 : {
8961 0 : error ("threadprivate variable %qE used in target region",
8962 0 : DECL_NAME (decl));
8963 0 : inform (octx->location, "enclosing target region");
8964 : }
8965 20 : splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
8966 : }
8967 112 : if (decl2)
8968 0 : splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
8969 : }
8970 :
8971 14183 : if (ctx->region_type != ORT_UNTIED_TASK)
8972 : return false;
8973 37 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8974 37 : if (n == NULL)
8975 : {
8976 6 : error ("threadprivate variable %qE used in untied task",
8977 6 : DECL_NAME (decl));
8978 6 : inform (ctx->location, "enclosing task");
8979 6 : splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
8980 : }
8981 37 : if (decl2)
8982 4 : splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
8983 : return false;
8984 : }
8985 :
8986 : /* Return true if global var DECL is device resident. */
8987 :
8988 : static bool
8989 641 : device_resident_p (tree decl)
8990 : {
8991 641 : tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
8992 :
8993 641 : if (!attr)
8994 : return false;
8995 :
8996 0 : for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
8997 : {
8998 0 : tree c = TREE_VALUE (t);
8999 0 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
9000 : return true;
9001 : }
9002 :
9003 : return false;
9004 : }
9005 :
9006 : /* Return true if DECL has an ACC DECLARE attribute. */
9007 :
9008 : static bool
9009 9097 : is_oacc_declared (tree decl)
9010 : {
9011 9097 : tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
9012 9097 : tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
9013 9097 : return declared != NULL_TREE;
9014 : }
9015 :
9016 : /* Determine outer default flags for DECL mentioned in an OMP region
9017 : but not declared in an enclosing clause.
9018 :
9019 : ??? Some compiler-generated variables (like SAVE_EXPRs) could be
9020 : remapped firstprivate instead of shared. To some extent this is
9021 : addressed in omp_firstprivatize_type_sizes, but not
9022 : effectively. */
9023 :
9024 : static unsigned
9025 47874 : omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
9026 : bool in_code, unsigned flags)
9027 : {
9028 47874 : enum omp_clause_default_kind default_kind = ctx->default_kind;
9029 47874 : enum omp_clause_default_kind kind;
9030 :
9031 47874 : kind = lang_hooks.decls.omp_predetermined_sharing (decl);
9032 47874 : if (ctx->region_type & ORT_TASK)
9033 : {
9034 3967 : tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
9035 :
9036 : /* The event-handle specified by a detach clause should always be firstprivate,
9037 : regardless of the current default. */
9038 4179 : if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
9039 : kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
9040 : }
9041 47874 : if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
9042 : default_kind = kind;
9043 45060 : else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
9044 : default_kind = OMP_CLAUSE_DEFAULT_SHARED;
9045 : /* For C/C++ default({,first}private), variables with static storage duration
9046 : declared in a namespace or global scope and referenced in construct
9047 : must be explicitly specified, i.e. acts as default(none). */
9048 45059 : else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
9049 45059 : || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
9050 368 : && VAR_P (decl)
9051 321 : && is_global_var (decl)
9052 184 : && (DECL_FILE_SCOPE_P (decl)
9053 104 : || (DECL_CONTEXT (decl)
9054 104 : && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
9055 45187 : && !lang_GNU_Fortran ())
9056 : default_kind = OMP_CLAUSE_DEFAULT_NONE;
9057 :
9058 47745 : switch (default_kind)
9059 : {
9060 267 : case OMP_CLAUSE_DEFAULT_NONE:
9061 267 : {
9062 267 : const char *rtype;
9063 :
9064 267 : if (ctx->region_type & ORT_PARALLEL)
9065 : rtype = "parallel";
9066 108 : else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
9067 : rtype = "taskloop";
9068 72 : else if (ctx->region_type & ORT_TASK)
9069 : rtype = "task";
9070 36 : else if (ctx->region_type & ORT_TEAMS)
9071 : rtype = "teams";
9072 : else
9073 0 : gcc_unreachable ();
9074 :
9075 267 : error ("%qE not specified in enclosing %qs",
9076 267 : DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
9077 267 : inform (ctx->location, "enclosing %qs", rtype);
9078 : }
9079 : /* FALLTHRU */
9080 44245 : case OMP_CLAUSE_DEFAULT_SHARED:
9081 44245 : flags |= GOVD_SHARED;
9082 44245 : break;
9083 155 : case OMP_CLAUSE_DEFAULT_PRIVATE:
9084 155 : flags |= GOVD_PRIVATE;
9085 155 : break;
9086 277 : case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
9087 277 : flags |= GOVD_FIRSTPRIVATE;
9088 277 : break;
9089 3197 : case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
9090 : /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
9091 3197 : gcc_assert ((ctx->region_type & ORT_TASK) != 0);
9092 3197 : if (struct gimplify_omp_ctx *octx = ctx->outer_context)
9093 : {
9094 2160 : omp_notice_variable (octx, decl, in_code);
9095 2990 : for (; octx; octx = octx->outer_context)
9096 : {
9097 2934 : splay_tree_node n2;
9098 :
9099 2934 : n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
9100 2934 : if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
9101 5 : && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
9102 5 : continue;
9103 2929 : if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
9104 : {
9105 896 : flags |= GOVD_FIRSTPRIVATE;
9106 896 : goto found_outer;
9107 : }
9108 2033 : if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
9109 : {
9110 1208 : flags |= GOVD_SHARED;
9111 1208 : goto found_outer;
9112 : }
9113 : }
9114 : }
9115 :
9116 1093 : if (TREE_CODE (decl) == PARM_DECL
9117 1093 : || (!is_global_var (decl)
9118 288 : && DECL_CONTEXT (decl) == current_function_decl))
9119 713 : flags |= GOVD_FIRSTPRIVATE;
9120 : else
9121 380 : flags |= GOVD_SHARED;
9122 47874 : found_outer:
9123 : break;
9124 :
9125 0 : default:
9126 0 : gcc_unreachable ();
9127 : }
9128 :
9129 47874 : return flags;
9130 : }
9131 :
9132 : /* Return string name for types of OpenACC constructs from ORT_* values. */
9133 :
9134 : static const char *
9135 570 : oacc_region_type_name (enum omp_region_type region_type)
9136 : {
9137 570 : switch (region_type)
9138 : {
9139 : case ORT_ACC_DATA:
9140 : return "data";
9141 140 : case ORT_ACC_PARALLEL:
9142 140 : return "parallel";
9143 140 : case ORT_ACC_KERNELS:
9144 140 : return "kernels";
9145 140 : case ORT_ACC_SERIAL:
9146 140 : return "serial";
9147 0 : default:
9148 0 : gcc_unreachable ();
9149 : }
9150 : }
9151 :
9152 : /* Determine outer default flags for DECL mentioned in an OACC region
9153 : but not declared in an enclosing clause. */
9154 :
9155 : static unsigned
9156 8835 : oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
9157 : {
9158 8835 : struct gimplify_omp_ctx *ctx_default = ctx;
9159 : /* If no 'default' clause appears on this compute construct... */
9160 8835 : if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
9161 : {
9162 : /* ..., see if one appears on a lexically containing 'data'
9163 : construct. */
9164 9614 : while ((ctx_default = ctx_default->outer_context))
9165 : {
9166 1534 : if (ctx_default->region_type == ORT_ACC_DATA
9167 1534 : && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
9168 : break;
9169 : }
9170 : /* If not, reset. */
9171 8256 : if (!ctx_default)
9172 8659 : ctx_default = ctx;
9173 : }
9174 :
9175 8835 : bool on_device = false;
9176 8835 : bool is_private = false;
9177 8835 : bool declared = is_oacc_declared (decl);
9178 8835 : tree type = TREE_TYPE (decl);
9179 :
9180 8835 : if (omp_privatize_by_reference (decl))
9181 338 : type = TREE_TYPE (type);
9182 :
9183 : /* For Fortran COMMON blocks, only used variables in those blocks are
9184 : transferred and remapped. The block itself will have a private clause to
9185 : avoid transfering the data twice.
9186 : The hook evaluates to false by default. For a variable in Fortran's COMMON
9187 : or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
9188 : the variables in such a COMMON/EQUIVALENCE block shall be privatized not
9189 : the whole block. For C++ and Fortran, it can also be true under certain
9190 : other conditions, if DECL_HAS_VALUE_EXPR. */
9191 8835 : if (RECORD_OR_UNION_TYPE_P (type))
9192 982 : is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
9193 :
9194 8835 : if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
9195 8835 : && is_global_var (decl)
9196 641 : && device_resident_p (decl)
9197 8835 : && !is_private)
9198 : {
9199 0 : on_device = true;
9200 0 : flags |= GOVD_MAP_TO_ONLY;
9201 : }
9202 :
9203 8835 : switch (ctx->region_type)
9204 : {
9205 1456 : case ORT_ACC_KERNELS:
9206 1456 : if (is_private)
9207 0 : flags |= GOVD_FIRSTPRIVATE;
9208 1456 : else if (AGGREGATE_TYPE_P (type))
9209 : {
9210 : /* Aggregates default to 'present_or_copy', or 'present'. */
9211 414 : if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
9212 396 : flags |= GOVD_MAP;
9213 : else
9214 18 : flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
9215 : }
9216 : else
9217 : /* Scalars default to 'copy'. */
9218 1042 : flags |= GOVD_MAP | GOVD_MAP_FORCE;
9219 :
9220 : break;
9221 :
9222 7379 : case ORT_ACC_PARALLEL:
9223 7379 : case ORT_ACC_SERIAL:
9224 7379 : if (is_private)
9225 0 : flags |= GOVD_FIRSTPRIVATE;
9226 7379 : else if (on_device || declared)
9227 17 : flags |= GOVD_MAP;
9228 7362 : else if (AGGREGATE_TYPE_P (type))
9229 : {
9230 : /* Aggregates default to 'present_or_copy', or 'present'. */
9231 3697 : if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
9232 3431 : flags |= GOVD_MAP;
9233 : else
9234 266 : flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
9235 : }
9236 : else
9237 : /* Scalars default to 'firstprivate'. */
9238 3665 : flags |= GOVD_FIRSTPRIVATE;
9239 :
9240 : break;
9241 :
9242 0 : default:
9243 0 : gcc_unreachable ();
9244 : }
9245 :
9246 8835 : if (DECL_ARTIFICIAL (decl))
9247 : ; /* We can get compiler-generated decls, and should not complain
9248 : about them. */
9249 8527 : else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
9250 : {
9251 420 : error ("%qE not specified in enclosing OpenACC %qs construct",
9252 210 : DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
9253 : oacc_region_type_name (ctx->region_type));
9254 210 : if (ctx_default != ctx)
9255 150 : inform (ctx->location, "enclosing OpenACC %qs construct and",
9256 : oacc_region_type_name (ctx->region_type));
9257 210 : inform (ctx_default->location,
9258 : "enclosing OpenACC %qs construct with %qs clause",
9259 : oacc_region_type_name (ctx_default->region_type),
9260 : "default(none)");
9261 : }
9262 8317 : else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
9263 : ; /* Handled above. */
9264 : else
9265 7784 : gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
9266 :
9267 8835 : return flags;
9268 : }
9269 :
9270 : /* Record the fact that DECL was used within the OMP context CTX.
9271 : IN_CODE is true when real code uses DECL, and false when we should
9272 : merely emit default(none) errors. Return true if DECL is going to
9273 : be remapped and thus DECL shouldn't be gimplified into its
9274 : DECL_VALUE_EXPR (if any). */
9275 :
9276 : static bool
9277 3877015 : omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
9278 : {
9279 3877015 : splay_tree_node n;
9280 3877015 : unsigned flags = in_code ? GOVD_SEEN : 0;
9281 3877015 : bool ret = false, shared;
9282 :
9283 3877015 : if (error_operand_p (decl))
9284 : return false;
9285 :
9286 3877015 : if (DECL_ARTIFICIAL (decl))
9287 : {
9288 2211773 : tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
9289 2211773 : if (attr)
9290 543 : decl = TREE_VALUE (TREE_VALUE (attr));
9291 : }
9292 :
9293 3877015 : if (ctx->region_type == ORT_NONE)
9294 248 : return lang_hooks.decls.omp_disregard_value_expr (decl, false);
9295 :
9296 3876767 : if (is_global_var (decl))
9297 : {
9298 : /* Threadprivate variables are predetermined. */
9299 500908 : if (DECL_THREAD_LOCAL_P (decl))
9300 13565 : return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
9301 :
9302 487343 : if (DECL_HAS_VALUE_EXPR_P (decl))
9303 : {
9304 4380 : if (ctx->region_type & ORT_ACC)
9305 : /* For OpenACC, defer expansion of value to avoid transfering
9306 : privatized common block data instead of im-/explicitly
9307 : transferred variables which are in common blocks. */
9308 : ;
9309 : else
9310 : {
9311 2104 : tree value = get_base_address (DECL_VALUE_EXPR (decl));
9312 :
9313 2104 : if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
9314 618 : return omp_notice_threadprivate_variable (ctx, decl, value);
9315 : }
9316 : }
9317 :
9318 486725 : if (gimplify_omp_ctxp->outer_context == NULL
9319 113266 : && VAR_P (decl)
9320 599991 : && oacc_get_fn_attrib (current_function_decl))
9321 : {
9322 765 : location_t loc = DECL_SOURCE_LOCATION (decl);
9323 :
9324 765 : if (lookup_attribute ("omp declare target link",
9325 765 : DECL_ATTRIBUTES (decl)))
9326 : {
9327 48 : error_at (loc,
9328 : "%qE with %<link%> clause used in %<routine%> function",
9329 24 : DECL_NAME (decl));
9330 24 : return false;
9331 : }
9332 741 : else if (!lookup_attribute ("omp declare target",
9333 741 : DECL_ATTRIBUTES (decl)))
9334 : {
9335 132 : error_at (loc,
9336 : "%qE requires a %<declare%> directive for use "
9337 66 : "in a %<routine%> function", DECL_NAME (decl));
9338 66 : return false;
9339 : }
9340 : }
9341 : }
9342 :
9343 3862494 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9344 3862494 : if ((ctx->region_type & ORT_TARGET) != 0)
9345 : {
9346 824993 : if (n == NULL)
9347 : {
9348 44469 : unsigned nflags = flags;
9349 44469 : if ((ctx->region_type & ORT_ACC) == 0)
9350 : {
9351 33519 : bool is_declare_target = false;
9352 33519 : if (is_global_var (decl)
9353 33519 : && varpool_node::get_create (decl)->offloadable)
9354 : {
9355 7002 : struct gimplify_omp_ctx *octx;
9356 7002 : for (octx = ctx->outer_context;
9357 7010 : octx; octx = octx->outer_context)
9358 : {
9359 9 : n = splay_tree_lookup (octx->variables,
9360 : (splay_tree_key)decl);
9361 9 : if (n
9362 9 : && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
9363 9 : && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9364 : break;
9365 : }
9366 7002 : is_declare_target = octx == NULL;
9367 : }
9368 7002 : if (!is_declare_target)
9369 : {
9370 26518 : int gdmk;
9371 26518 : enum omp_clause_defaultmap_kind kind;
9372 26518 : if (lang_hooks.decls.omp_allocatable_p (decl))
9373 : gdmk = GDMK_ALLOCATABLE;
9374 26174 : else if (lang_hooks.decls.omp_scalar_target_p (decl))
9375 : gdmk = GDMK_SCALAR_TARGET;
9376 26099 : else if (lang_hooks.decls.omp_scalar_p (decl, false))
9377 : gdmk = GDMK_SCALAR;
9378 10962 : else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9379 10962 : || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9380 2977 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9381 : == POINTER_TYPE)))
9382 : gdmk = GDMK_POINTER;
9383 : else
9384 : gdmk = GDMK_AGGREGATE;
9385 26518 : kind = lang_hooks.decls.omp_predetermined_mapping (decl);
9386 26518 : if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
9387 : {
9388 945 : if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
9389 101 : nflags |= GOVD_FIRSTPRIVATE;
9390 844 : else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
9391 844 : nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
9392 : else
9393 0 : gcc_unreachable ();
9394 : }
9395 25573 : else if (ctx->defaultmap[gdmk] == 0)
9396 : {
9397 77 : tree d = lang_hooks.decls.omp_report_decl (decl);
9398 77 : error ("%qE not specified in enclosing %<target%>",
9399 77 : DECL_NAME (d));
9400 77 : inform (ctx->location, "enclosing %<target%>");
9401 : }
9402 25496 : else if (ctx->defaultmap[gdmk]
9403 25496 : & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
9404 8977 : nflags |= ctx->defaultmap[gdmk];
9405 16519 : else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
9406 : {
9407 42 : gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
9408 42 : nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
9409 : }
9410 : else
9411 : {
9412 16477 : gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
9413 16477 : nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
9414 : }
9415 : }
9416 : }
9417 :
9418 44469 : struct gimplify_omp_ctx *octx = ctx->outer_context;
9419 44469 : if ((ctx->region_type & ORT_ACC) && octx)
9420 : {
9421 : /* Look in outer OpenACC contexts, to see if there's a
9422 : data attribute for this variable. */
9423 3535 : omp_notice_variable (octx, decl, in_code);
9424 :
9425 5307 : for (; octx; octx = octx->outer_context)
9426 : {
9427 3883 : if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
9428 : break;
9429 3883 : splay_tree_node n2
9430 3883 : = splay_tree_lookup (octx->variables,
9431 : (splay_tree_key) decl);
9432 3883 : if (n2)
9433 : {
9434 2111 : if (octx->region_type == ORT_ACC_HOST_DATA)
9435 4 : error ("variable %qE declared in enclosing "
9436 4 : "%<host_data%> region", DECL_NAME (decl));
9437 2111 : nflags |= GOVD_MAP;
9438 2111 : if (octx->region_type == ORT_ACC_DATA
9439 2107 : && (n2->value & GOVD_MAP_0LEN_ARRAY))
9440 288 : nflags |= GOVD_MAP_0LEN_ARRAY;
9441 2111 : goto found_outer;
9442 : }
9443 : }
9444 : }
9445 :
9446 42358 : if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
9447 : | GOVD_MAP_ALLOC_ONLY)) == flags)
9448 : {
9449 32394 : tree type = TREE_TYPE (decl);
9450 32394 : location_t loc = DECL_SOURCE_LOCATION (decl);
9451 :
9452 32394 : if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9453 32394 : && omp_privatize_by_reference (decl))
9454 28 : type = TREE_TYPE (type);
9455 :
9456 32394 : if (!verify_type_context (loc, TCTX_OMP_MAP_IMP_REF, type))
9457 : /* Check if TYPE can appear in a target region.
9458 : verify_type_context has already issued an error if it
9459 : can't. */
9460 0 : nflags |= GOVD_MAP | GOVD_EXPLICIT;
9461 32394 : else if (!omp_mappable_type (type))
9462 : {
9463 8 : error ("%qD referenced in target region does not have "
9464 : "a mappable type", decl);
9465 8 : nflags |= GOVD_MAP | GOVD_EXPLICIT;
9466 : }
9467 : else
9468 : {
9469 32386 : if ((ctx->region_type & ORT_ACC) != 0)
9470 8835 : nflags = oacc_default_clause (ctx, decl, flags);
9471 : else
9472 23551 : nflags |= GOVD_MAP;
9473 : }
9474 : }
9475 9964 : found_outer:
9476 44469 : omp_add_variable (ctx, decl, nflags);
9477 44469 : if (ctx->region_type & ORT_ACC)
9478 : /* For OpenACC, as remarked above, defer expansion. */
9479 : shared = false;
9480 : else
9481 33519 : shared = (nflags & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
9482 44469 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9483 : }
9484 : else
9485 : {
9486 780524 : if (ctx->region_type & ORT_ACC)
9487 : /* For OpenACC, as remarked above, defer expansion. */
9488 : shared = false;
9489 : else
9490 484390 : shared = ((n->value | flags)
9491 484390 : & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
9492 780524 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9493 : /* If nothing changed, there's nothing left to do. */
9494 780524 : if ((n->value & flags) == flags)
9495 : return ret;
9496 16034 : flags |= n->value;
9497 16034 : n->value = flags;
9498 : }
9499 60503 : goto do_outer;
9500 : }
9501 :
9502 3037501 : if (n == NULL)
9503 : {
9504 1180903 : if (ctx->region_type == ORT_WORKSHARE
9505 : || ctx->region_type == ORT_TASKGROUP
9506 560590 : || ctx->region_type == ORT_SIMD
9507 288547 : || ctx->region_type == ORT_ACC
9508 92322 : || (ctx->region_type & ORT_TARGET_DATA) != 0)
9509 1133029 : goto do_outer;
9510 :
9511 47874 : flags = omp_default_clause (ctx, decl, in_code, flags);
9512 :
9513 47874 : if ((flags & GOVD_PRIVATE)
9514 47874 : && lang_hooks.decls.omp_private_outer_ref (decl))
9515 6 : flags |= GOVD_PRIVATE_OUTER_REF;
9516 :
9517 47874 : omp_add_variable (ctx, decl, flags);
9518 :
9519 47874 : shared = (flags & GOVD_SHARED) != 0;
9520 47874 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9521 47874 : goto do_outer;
9522 : }
9523 :
9524 : /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
9525 : lb, b or incr expressions, those shouldn't be turned into simd arrays. */
9526 1856598 : if (ctx->region_type == ORT_SIMD
9527 151155 : && ctx->in_for_exprs
9528 70 : && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
9529 : == GOVD_PRIVATE))
9530 1856598 : flags &= ~GOVD_SEEN;
9531 :
9532 1856598 : if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
9533 31752 : && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
9534 1888311 : && DECL_SIZE (decl))
9535 : {
9536 31712 : tree size;
9537 31712 : if (!poly_int_tree_p (DECL_SIZE (decl)))
9538 : {
9539 148 : splay_tree_node n2;
9540 148 : tree t = DECL_VALUE_EXPR (decl);
9541 148 : gcc_assert (INDIRECT_REF_P (t));
9542 148 : t = TREE_OPERAND (t, 0);
9543 148 : gcc_assert (DECL_P (t));
9544 148 : n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9545 148 : n2->value |= GOVD_SEEN;
9546 : }
9547 31564 : else if (omp_privatize_by_reference (decl)
9548 4180 : && (size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
9549 35707 : && !poly_int_tree_p (size))
9550 : {
9551 1324 : splay_tree_node n2;
9552 1324 : gcc_assert (DECL_P (size));
9553 1324 : n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) size);
9554 1324 : if (n2)
9555 617 : omp_notice_variable (ctx, size, true);
9556 : }
9557 : }
9558 :
9559 1856598 : if (ctx->region_type & ORT_ACC)
9560 : /* For OpenACC, as remarked above, defer expansion. */
9561 : shared = false;
9562 : else
9563 1674781 : shared = ((flags | n->value) & GOVD_SHARED) != 0;
9564 1856598 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9565 :
9566 : /* If nothing changed, there's nothing left to do. */
9567 1856598 : if ((n->value & flags) == flags)
9568 : return ret;
9569 31713 : flags |= n->value;
9570 31713 : n->value = flags;
9571 :
9572 1273119 : do_outer:
9573 : /* If the variable is private in the current context, then we don't
9574 : need to propagate anything to an outer context. */
9575 1273119 : if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
9576 : return ret;
9577 1262748 : if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9578 : == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9579 : return ret;
9580 1262688 : if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9581 : | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9582 : == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9583 : return ret;
9584 1262688 : if (ctx->outer_context
9585 1262688 : && omp_notice_variable (ctx->outer_context, decl, in_code))
9586 : return true;
9587 : return ret;
9588 : }
9589 :
9590 : /* Verify that DECL is private within CTX. If there's specific information
9591 : to the contrary in the innermost scope, generate an error. */
9592 :
9593 : static bool
9594 50125 : omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
9595 : {
9596 95812 : splay_tree_node n;
9597 :
9598 95812 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9599 95812 : if (n != NULL)
9600 : {
9601 17952 : if (n->value & GOVD_SHARED)
9602 : {
9603 407 : if (ctx == gimplify_omp_ctxp)
9604 : {
9605 0 : if (simd)
9606 0 : error ("iteration variable %qE is predetermined linear",
9607 0 : DECL_NAME (decl));
9608 : else
9609 0 : error ("iteration variable %qE should be private",
9610 0 : DECL_NAME (decl));
9611 0 : n->value = GOVD_PRIVATE;
9612 0 : return true;
9613 : }
9614 : else
9615 : return false;
9616 : }
9617 17545 : else if ((n->value & GOVD_EXPLICIT) != 0
9618 9703 : && (ctx == gimplify_omp_ctxp
9619 334 : || (ctx->region_type == ORT_COMBINED_PARALLEL
9620 214 : && gimplify_omp_ctxp->outer_context == ctx)))
9621 : {
9622 9583 : if ((n->value & GOVD_FIRSTPRIVATE) != 0)
9623 4 : error ("iteration variable %qE should not be firstprivate",
9624 4 : DECL_NAME (decl));
9625 9579 : else if ((n->value & GOVD_REDUCTION) != 0)
9626 8 : error ("iteration variable %qE should not be reduction",
9627 8 : DECL_NAME (decl));
9628 9571 : else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
9629 58 : error ("iteration variable %qE should not be linear",
9630 58 : DECL_NAME (decl));
9631 : }
9632 17545 : return (ctx == gimplify_omp_ctxp
9633 17545 : || (ctx->region_type == ORT_COMBINED_PARALLEL
9634 19421 : && gimplify_omp_ctxp->outer_context == ctx));
9635 : }
9636 :
9637 77860 : if (ctx->region_type != ORT_WORKSHARE
9638 : && ctx->region_type != ORT_TASKGROUP
9639 49418 : && ctx->region_type != ORT_SIMD
9640 36153 : && ctx->region_type != ORT_ACC)
9641 : return false;
9642 53880 : else if (ctx->outer_context)
9643 : return omp_is_private (ctx->outer_context, decl, simd);
9644 : return false;
9645 : }
9646 :
9647 : /* Return true if DECL is private within a parallel region
9648 : that binds to the current construct's context or in parallel
9649 : region's REDUCTION clause. */
9650 :
9651 : static bool
9652 11547 : omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
9653 : {
9654 11619 : splay_tree_node n;
9655 :
9656 11619 : do
9657 : {
9658 11619 : ctx = ctx->outer_context;
9659 11619 : if (ctx == NULL)
9660 : {
9661 2764 : if (is_global_var (decl))
9662 : return false;
9663 :
9664 : /* References might be private, but might be shared too,
9665 : when checking for copyprivate, assume they might be
9666 : private, otherwise assume they might be shared. */
9667 1290 : if (copyprivate)
9668 : return true;
9669 :
9670 1244 : if (omp_privatize_by_reference (decl))
9671 : return false;
9672 :
9673 : /* Treat C++ privatized non-static data members outside
9674 : of the privatization the same. */
9675 1198 : if (omp_member_access_dummy_var (decl))
9676 : return false;
9677 :
9678 : return true;
9679 : }
9680 :
9681 8855 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9682 :
9683 8855 : if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
9684 1291 : && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
9685 : {
9686 242 : if ((ctx->region_type & ORT_TARGET_DATA) != 0
9687 238 : || n == NULL
9688 230 : || (n->value & GOVD_MAP) == 0)
9689 12 : continue;
9690 : return false;
9691 : }
9692 :
9693 7564 : if (n != NULL)
9694 : {
9695 5360 : if ((n->value & GOVD_LOCAL) != 0
9696 5360 : && omp_member_access_dummy_var (decl))
9697 : return false;
9698 5321 : return (n->value & GOVD_SHARED) == 0;
9699 : }
9700 :
9701 3253 : if (ctx->region_type == ORT_WORKSHARE
9702 : || ctx->region_type == ORT_TASKGROUP
9703 3197 : || ctx->region_type == ORT_SIMD
9704 3193 : || ctx->region_type == ORT_ACC)
9705 60 : continue;
9706 :
9707 : break;
9708 : }
9709 : while (1);
9710 : return false;
9711 : }
9712 :
9713 : /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
9714 :
9715 : static tree
9716 3148 : find_decl_expr (tree *tp, int *walk_subtrees, void *data)
9717 : {
9718 3148 : tree t = *tp;
9719 :
9720 : /* If this node has been visited, unmark it and keep looking. */
9721 3148 : if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
9722 : return t;
9723 :
9724 2724 : if (IS_TYPE_OR_DECL_P (t))
9725 452 : *walk_subtrees = 0;
9726 : return NULL_TREE;
9727 : }
9728 :
9729 :
9730 : /* Gimplify the affinity clause but effectively ignore it.
9731 : Generate:
9732 : var = begin;
9733 : if ((step > 1) ? var <= end : var > end)
9734 : locatator_var_expr; */
9735 :
9736 : static void
9737 374 : gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
9738 : {
9739 374 : tree last_iter = NULL_TREE;
9740 374 : tree last_bind = NULL_TREE;
9741 374 : tree label = NULL_TREE;
9742 374 : tree *last_body = NULL;
9743 1011 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
9744 637 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
9745 : {
9746 637 : tree t = OMP_CLAUSE_DECL (c);
9747 637 : if (OMP_ITERATOR_DECL_P (t))
9748 : {
9749 389 : if (TREE_VALUE (t) == null_pointer_node)
9750 201 : continue;
9751 188 : if (TREE_PURPOSE (t) != last_iter)
9752 : {
9753 127 : if (last_bind)
9754 : {
9755 9 : append_to_statement_list (label, last_body);
9756 9 : gimplify_and_add (last_bind, pre_p);
9757 9 : last_bind = NULL_TREE;
9758 : }
9759 274 : for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9760 : {
9761 147 : if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
9762 : is_gimple_val, fb_rvalue) == GS_ERROR
9763 147 : || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
9764 : is_gimple_val, fb_rvalue) == GS_ERROR
9765 147 : || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
9766 : is_gimple_val, fb_rvalue) == GS_ERROR
9767 294 : || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
9768 : is_gimple_val, fb_rvalue)
9769 : == GS_ERROR))
9770 0 : return;
9771 : }
9772 127 : last_iter = TREE_PURPOSE (t);
9773 127 : tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
9774 127 : last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
9775 : NULL, block);
9776 127 : last_body = &BIND_EXPR_BODY (last_bind);
9777 127 : tree cond = NULL_TREE;
9778 127 : location_t loc = OMP_CLAUSE_LOCATION (c);
9779 274 : for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9780 : {
9781 147 : tree var = TREE_VEC_ELT (it, 0);
9782 147 : tree begin = TREE_VEC_ELT (it, 1);
9783 147 : tree end = TREE_VEC_ELT (it, 2);
9784 147 : tree step = TREE_VEC_ELT (it, 3);
9785 147 : loc = DECL_SOURCE_LOCATION (var);
9786 147 : tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
9787 : var, begin);
9788 147 : append_to_statement_list_force (tem, last_body);
9789 :
9790 147 : tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9791 147 : step, build_zero_cst (TREE_TYPE (step)));
9792 147 : tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
9793 : var, end);
9794 147 : tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9795 : var, end);
9796 147 : cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
9797 : cond1, cond2, cond3);
9798 147 : if (cond)
9799 20 : cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
9800 : boolean_type_node, cond, cond1);
9801 : else
9802 : cond = cond1;
9803 : }
9804 127 : tree cont_label = create_artificial_label (loc);
9805 127 : label = build1 (LABEL_EXPR, void_type_node, cont_label);
9806 127 : tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9807 : void_node,
9808 : build_and_jump (&cont_label));
9809 127 : append_to_statement_list_force (tem, last_body);
9810 : }
9811 188 : if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9812 : {
9813 0 : append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
9814 : last_body);
9815 0 : TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9816 : }
9817 188 : if (error_operand_p (TREE_VALUE (t)))
9818 : return;
9819 188 : append_to_statement_list_force (TREE_VALUE (t), last_body);
9820 188 : TREE_VALUE (t) = null_pointer_node;
9821 : }
9822 : else
9823 : {
9824 248 : if (last_bind)
9825 : {
9826 9 : append_to_statement_list (label, last_body);
9827 9 : gimplify_and_add (last_bind, pre_p);
9828 9 : last_bind = NULL_TREE;
9829 : }
9830 248 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9831 : {
9832 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9833 : NULL, is_gimple_val, fb_rvalue);
9834 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9835 : }
9836 248 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
9837 : return;
9838 248 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9839 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
9840 : return;
9841 248 : gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
9842 : }
9843 : }
9844 374 : if (last_bind)
9845 : {
9846 109 : append_to_statement_list (label, last_body);
9847 109 : gimplify_and_add (last_bind, pre_p);
9848 : }
9849 : return;
9850 : }
9851 :
9852 : /* Returns a tree expression containing the total iteration count of the
9853 : OpenMP iterator IT. */
9854 :
9855 : static tree
9856 320 : compute_omp_iterator_count (tree it, gimple_seq *pre_p)
9857 : {
9858 320 : tree tcnt = size_one_node;
9859 706 : for (; it; it = TREE_CHAIN (it))
9860 : {
9861 386 : if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
9862 : is_gimple_val, fb_rvalue) == GS_ERROR
9863 386 : || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
9864 : is_gimple_val, fb_rvalue) == GS_ERROR
9865 386 : || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
9866 : is_gimple_val, fb_rvalue) == GS_ERROR
9867 772 : || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
9868 : is_gimple_val, fb_rvalue) == GS_ERROR))
9869 0 : return NULL_TREE;
9870 386 : tree var = TREE_VEC_ELT (it, 0);
9871 386 : tree begin = TREE_VEC_ELT (it, 1);
9872 386 : tree end = TREE_VEC_ELT (it, 2);
9873 386 : tree step = TREE_VEC_ELT (it, 3);
9874 386 : tree orig_step = TREE_VEC_ELT (it, 4);
9875 386 : tree type = TREE_TYPE (var);
9876 386 : tree stype = TREE_TYPE (step);
9877 386 : location_t loc = DECL_SOURCE_LOCATION (var);
9878 386 : tree endmbegin;
9879 : /* Compute count for this iterator as
9880 : orig_step > 0
9881 : ? (begin < end ? (end - begin + (step - 1)) / step : 0)
9882 : : (begin > end ? (end - begin + (step + 1)) / step : 0)
9883 : and compute product of those for the entire clause. */
9884 386 : if (POINTER_TYPE_P (type))
9885 42 : endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR, stype, end, begin);
9886 : else
9887 344 : endmbegin = fold_build2_loc (loc, MINUS_EXPR, type, end, begin);
9888 386 : tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype, step,
9889 : build_int_cst (stype, 1));
9890 386 : tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
9891 : build_int_cst (stype, 1));
9892 386 : tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
9893 : unshare_expr (endmbegin), stepm1);
9894 386 : pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, pos, step);
9895 386 : tree neg = fold_build2_loc (loc, PLUS_EXPR, stype, endmbegin, stepp1);
9896 386 : if (TYPE_UNSIGNED (stype))
9897 : {
9898 31 : neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
9899 31 : step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
9900 : }
9901 386 : neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, neg, step);
9902 386 : step = NULL_TREE;
9903 386 : tree cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, begin, end);
9904 386 : pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
9905 : build_int_cst (stype, 0));
9906 386 : cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, end, begin);
9907 386 : neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
9908 : build_int_cst (stype, 0));
9909 386 : tree osteptype = TREE_TYPE (orig_step);
9910 386 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
9911 : build_int_cst (osteptype, 0));
9912 386 : tree cnt = fold_build3_loc (loc, COND_EXPR, stype, cond, pos, neg);
9913 386 : cnt = fold_convert_loc (loc, sizetype, cnt);
9914 386 : if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
9915 : fb_rvalue) == GS_ERROR)
9916 : return NULL_TREE;
9917 386 : tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
9918 : }
9919 320 : if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9920 : return NULL_TREE;
9921 :
9922 320 : return tcnt;
9923 : }
9924 :
9925 : /* Build loops iterating over the space defined by the OpenMP iterator IT.
9926 : Returns a pointer to the BIND_EXPR_BODY in the innermost loop body.
9927 : LAST_BIND is set to point to the BIND_EXPR containing the whole loop. */
9928 :
9929 : static tree *
9930 320 : build_omp_iterator_loop (tree it, gimple_seq *pre_p, tree *last_bind)
9931 : {
9932 320 : if (*last_bind)
9933 31 : gimplify_and_add (*last_bind, pre_p);
9934 320 : tree block = TREE_VEC_ELT (it, 5);
9935 320 : *last_bind = build3 (BIND_EXPR, void_type_node,
9936 320 : BLOCK_VARS (block), NULL, block);
9937 320 : TREE_SIDE_EFFECTS (*last_bind) = 1;
9938 320 : tree *p = &BIND_EXPR_BODY (*last_bind);
9939 706 : for (; it; it = TREE_CHAIN (it))
9940 : {
9941 386 : tree var = TREE_VEC_ELT (it, 0);
9942 386 : tree begin = TREE_VEC_ELT (it, 1);
9943 386 : tree end = TREE_VEC_ELT (it, 2);
9944 386 : tree step = TREE_VEC_ELT (it, 3);
9945 386 : tree orig_step = TREE_VEC_ELT (it, 4);
9946 386 : tree type = TREE_TYPE (var);
9947 386 : location_t loc = DECL_SOURCE_LOCATION (var);
9948 : /* Emit:
9949 : var = begin;
9950 : goto cond_label;
9951 : beg_label:
9952 : ...
9953 : var = var + step;
9954 : cond_label:
9955 : if (orig_step > 0) {
9956 : if (var < end) goto beg_label;
9957 : } else {
9958 : if (var > end) goto beg_label;
9959 : }
9960 : for each iterator, with inner iterators added to
9961 : the ... above. */
9962 386 : tree beg_label = create_artificial_label (loc);
9963 386 : tree cond_label = NULL_TREE;
9964 386 : tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, begin);
9965 386 : append_to_statement_list_force (tem, p);
9966 386 : tem = build_and_jump (&cond_label);
9967 386 : append_to_statement_list_force (tem, p);
9968 386 : tem = build1 (LABEL_EXPR, void_type_node, beg_label);
9969 386 : append_to_statement_list (tem, p);
9970 386 : tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
9971 : NULL_TREE, NULL_TREE);
9972 386 : TREE_SIDE_EFFECTS (bind) = 1;
9973 386 : SET_EXPR_LOCATION (bind, loc);
9974 386 : append_to_statement_list_force (bind, p);
9975 386 : if (POINTER_TYPE_P (type))
9976 42 : tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
9977 : var, fold_convert_loc (loc, sizetype, step));
9978 : else
9979 344 : tem = build2_loc (loc, PLUS_EXPR, type, var, step);
9980 386 : tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, tem);
9981 386 : append_to_statement_list_force (tem, p);
9982 386 : tem = build1 (LABEL_EXPR, void_type_node, cond_label);
9983 386 : append_to_statement_list (tem, p);
9984 386 : tree cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, var, end);
9985 386 : tree pos = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9986 : build_and_jump (&beg_label), void_node);
9987 386 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, var, end);
9988 386 : tree neg = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9989 : build_and_jump (&beg_label), void_node);
9990 386 : tree osteptype = TREE_TYPE (orig_step);
9991 386 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
9992 : build_int_cst (osteptype, 0));
9993 386 : tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, pos, neg);
9994 386 : append_to_statement_list_force (tem, p);
9995 386 : p = &BIND_EXPR_BODY (bind);
9996 : }
9997 :
9998 320 : return p;
9999 : }
10000 :
10001 :
10002 : /* Callback for walk_tree to find a VAR_DECL (stored in DATA) in the
10003 : tree TP. */
10004 :
10005 : static tree
10006 2306 : find_var_decl (tree *tp, int *, void *data)
10007 : {
10008 2306 : if (*tp == (tree) data)
10009 206 : return *tp;
10010 :
10011 : return NULL_TREE;
10012 : }
10013 :
10014 : /* Returns an element-by-element copy of OMP iterator tree IT. */
10015 :
10016 : static tree
10017 198 : copy_omp_iterator (tree it, int elem_count = -1)
10018 : {
10019 198 : if (elem_count < 0)
10020 60 : elem_count = TREE_VEC_LENGTH (it);
10021 198 : tree new_it = make_tree_vec (elem_count);
10022 1386 : for (int i = 0; i < TREE_VEC_LENGTH (it); i++)
10023 1188 : TREE_VEC_ELT (new_it, i) = TREE_VEC_ELT (it, i);
10024 :
10025 198 : return new_it;
10026 : }
10027 :
10028 : /* Helper function for walk_tree in remap_omp_iterator_var. */
10029 :
10030 : static tree
10031 871 : remap_omp_iterator_var_1 (tree *tp, int *, void *data)
10032 : {
10033 871 : tree old_var = ((tree *) data)[0];
10034 871 : tree new_var = ((tree *) data)[1];
10035 :
10036 871 : if (*tp == old_var)
10037 92 : *tp = new_var;
10038 871 : return NULL_TREE;
10039 : }
10040 :
10041 : /* Replace instances of OLD_VAR in TP with NEW_VAR. */
10042 :
10043 : static void
10044 184 : remap_omp_iterator_var (tree *tp, tree old_var, tree new_var)
10045 : {
10046 184 : tree vars[2] = { old_var, new_var };
10047 184 : walk_tree (tp, remap_omp_iterator_var_1, vars, NULL);
10048 184 : }
10049 :
10050 : /* Scan through all clauses using OpenMP iterators in LIST_P. If any
10051 : clauses have iterators with variables that are not used by the clause
10052 : decl or size, issue a warning and replace the iterator with a copy with
10053 : the unused variables removed. */
10054 :
10055 : static void
10056 25163 : remove_unused_omp_iterator_vars (tree *list_p)
10057 : {
10058 25163 : auto_vec< vec<tree> > iter_vars;
10059 25163 : auto_vec<tree> new_iterators;
10060 :
10061 68665 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10062 : {
10063 43502 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10064 43446 : continue;
10065 146 : auto_vec<tree> vars;
10066 146 : bool need_new_iterators = false;
10067 416 : for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
10068 : {
10069 270 : tree var = TREE_VEC_ELT (it, 0);
10070 270 : tree t = walk_tree (&OMP_CLAUSE_DECL (c), find_var_decl, var, NULL);
10071 270 : if (t == NULL_TREE)
10072 126 : t = walk_tree (&OMP_CLAUSE_SIZE (c), find_var_decl, var, NULL);
10073 126 : if (t == NULL_TREE)
10074 : {
10075 64 : need_new_iterators = true;
10076 64 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10077 44 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO
10078 28 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM))
10079 44 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO
10080 92 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)
10081 40 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
10082 : "iterator variable %qE not used in clause "
10083 40 : "expression", DECL_NAME (var));
10084 : }
10085 : else
10086 206 : vars.safe_push (var);
10087 : }
10088 146 : if (!need_new_iterators)
10089 82 : continue;
10090 64 : if (need_new_iterators && vars.is_empty ())
10091 : {
10092 : /* No iteration variables are used in the clause - remove the
10093 : iterator from the clause. */
10094 8 : OMP_CLAUSE_ITERATORS (c) = NULL_TREE;
10095 8 : continue;
10096 : }
10097 :
10098 : /* If a new iterator has been created for the current set of used
10099 : iterator variables, then use that as the iterator. Otherwise,
10100 : create a new iterator for the current iterator variable set. */
10101 : unsigned i;
10102 92 : for (i = 0; i < iter_vars.length (); i++)
10103 : {
10104 112 : if (vars.length () != iter_vars[i].length ())
10105 0 : continue;
10106 : bool identical_p = true;
10107 136 : for (unsigned j = 0; j < vars.length () && identical_p; j++)
10108 80 : identical_p = vars[j] == iter_vars[i][j];
10109 :
10110 56 : if (identical_p)
10111 : break;
10112 : }
10113 56 : if (i < iter_vars.length ())
10114 20 : OMP_CLAUSE_ITERATORS (c) = new_iterators[i];
10115 : else
10116 : {
10117 36 : tree new_iters = NULL_TREE;
10118 36 : tree *new_iters_p = &new_iters;
10119 36 : tree new_vars = NULL_TREE;
10120 36 : tree *new_vars_p = &new_vars;
10121 36 : i = 0;
10122 112 : for (tree it = OMP_CLAUSE_ITERATORS (c); it && i < vars.length();
10123 76 : it = TREE_CHAIN (it))
10124 : {
10125 76 : tree var = TREE_VEC_ELT (it, 0);
10126 76 : if (var == vars[i])
10127 : {
10128 60 : *new_iters_p = copy_omp_iterator (it);
10129 60 : *new_vars_p = build_decl (OMP_CLAUSE_LOCATION (c), VAR_DECL,
10130 60 : DECL_NAME (var), TREE_TYPE (var));
10131 60 : DECL_ARTIFICIAL (*new_vars_p) = 1;
10132 60 : DECL_CONTEXT (*new_vars_p) = DECL_CONTEXT (var);
10133 60 : TREE_VEC_ELT (*new_iters_p, 0) = *new_vars_p;
10134 60 : new_iters_p = &TREE_CHAIN (*new_iters_p);
10135 60 : new_vars_p = &DECL_CHAIN (*new_vars_p);
10136 60 : i++;
10137 : }
10138 : }
10139 36 : tree new_block = make_node (BLOCK);
10140 36 : BLOCK_VARS (new_block) = new_vars;
10141 36 : TREE_VEC_ELT (new_iters, 5) = new_block;
10142 36 : new_iterators.safe_push (new_iters);
10143 36 : iter_vars.safe_push (vars.copy ());
10144 36 : OMP_CLAUSE_ITERATORS (c) = new_iters;
10145 : }
10146 :
10147 : /* Remap clause to use the new variables. */
10148 56 : i = 0;
10149 148 : for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
10150 : {
10151 92 : tree old_var = vars[i++];
10152 92 : tree new_var = TREE_VEC_ELT (it, 0);
10153 92 : remap_omp_iterator_var (&OMP_CLAUSE_DECL (c), old_var, new_var);
10154 92 : remap_omp_iterator_var (&OMP_CLAUSE_SIZE (c), old_var, new_var);
10155 : }
10156 146 : }
10157 :
10158 25199 : for (unsigned i = 0; i < iter_vars.length (); i++)
10159 36 : iter_vars[i].release ();
10160 25163 : }
10161 :
10162 140 : struct iterator_loop_info_t
10163 : {
10164 : tree bind;
10165 : tree count;
10166 : tree index;
10167 : tree body_label;
10168 : auto_vec<tree> clauses;
10169 : };
10170 :
10171 : typedef hash_map<tree, iterator_loop_info_t> iterator_loop_info_map_t;
10172 :
10173 : /* Builds a loop to expand any OpenMP iterators in the clauses in LIST_P,
10174 : reusing any previously built loops if they use the same set of iterators.
10175 : Generated Gimple statements are placed into LOOPS_SEQ_P. The clause
10176 : iterators are updated with information on how and where to insert code into
10177 : the loop body. */
10178 :
10179 : static void
10180 25163 : build_omp_iterators_loops (tree *list_p, gimple_seq *loops_seq_p)
10181 : {
10182 25163 : iterator_loop_info_map_t loops;
10183 :
10184 68665 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10185 : {
10186 43502 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10187 43364 : continue;
10188 :
10189 138 : bool built_p;
10190 138 : iterator_loop_info_t &loop
10191 138 : = loops.get_or_insert (OMP_CLAUSE_ITERATORS (c), &built_p);
10192 :
10193 138 : if (!built_p)
10194 : {
10195 70 : loop.count = compute_omp_iterator_count (OMP_CLAUSE_ITERATORS (c),
10196 : loops_seq_p);
10197 70 : if (!loop.count)
10198 0 : continue;
10199 70 : if (integer_zerop (loop.count))
10200 8 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
10201 : "iteration count is zero");
10202 :
10203 70 : loop.bind = NULL_TREE;
10204 70 : tree *body = build_omp_iterator_loop (OMP_CLAUSE_ITERATORS (c),
10205 : loops_seq_p, &loop.bind);
10206 :
10207 70 : loop.index = create_tmp_var (sizetype);
10208 70 : SET_EXPR_LOCATION (loop.bind, OMP_CLAUSE_LOCATION (c));
10209 :
10210 : /* BEFORE LOOP: */
10211 : /* idx = -1; */
10212 : /* This should be initialized to before the individual elements,
10213 : as idx is pre-incremented in the loop body. */
10214 70 : gimple *assign = gimple_build_assign (loop.index, size_int (-1));
10215 70 : gimple_seq_add_stmt (loops_seq_p, assign);
10216 :
10217 : /* IN LOOP BODY: */
10218 : /* Create a label so we can find this point later. */
10219 70 : loop.body_label = create_artificial_label (OMP_CLAUSE_LOCATION (c));
10220 70 : tree tem = build1 (LABEL_EXPR, void_type_node, loop.body_label);
10221 70 : append_to_statement_list_force (tem, body);
10222 :
10223 : /* idx += 2; */
10224 70 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10225 : void_type_node, loop.index,
10226 : size_binop (PLUS_EXPR, loop.index, size_int (2)));
10227 70 : append_to_statement_list_force (tem, body);
10228 : }
10229 :
10230 : /* Create array to hold expanded values. */
10231 138 : tree last_count_2 = size_binop (MULT_EXPR, loop.count, size_int (2));
10232 138 : tree arr_length = size_binop (PLUS_EXPR, last_count_2, size_int (1));
10233 138 : tree elems = NULL_TREE;
10234 138 : if (TREE_CONSTANT (arr_length))
10235 : {
10236 138 : tree type = build_array_type (ptr_type_node,
10237 : build_index_type (arr_length));
10238 138 : elems = create_tmp_var_raw (type, "omp_iter_data");
10239 138 : TREE_ADDRESSABLE (elems) = 1;
10240 138 : gimple_add_tmp_var (elems);
10241 : }
10242 : else
10243 : {
10244 : /* Handle dynamic sizes. */
10245 0 : sorry ("dynamic iterator sizes not implemented yet");
10246 : }
10247 :
10248 : /* BEFORE LOOP: */
10249 : /* elems[0] = count; */
10250 138 : tree lhs = build4 (ARRAY_REF, ptr_type_node, elems, size_int (0),
10251 : NULL_TREE, NULL_TREE);
10252 138 : tree tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10253 : void_type_node, lhs, loop.count);
10254 138 : gimplify_and_add (tem, loops_seq_p);
10255 :
10256 : /* Make a copy of the iterator with extra info at the end. */
10257 138 : int elem_count = TREE_VEC_LENGTH (OMP_CLAUSE_ITERATORS (c));
10258 138 : tree new_iterator = copy_omp_iterator (OMP_CLAUSE_ITERATORS (c),
10259 : elem_count + 3);
10260 138 : TREE_VEC_ELT (new_iterator, elem_count) = loop.body_label;
10261 138 : TREE_VEC_ELT (new_iterator, elem_count + 1) = elems;
10262 138 : TREE_VEC_ELT (new_iterator, elem_count + 2) = loop.index;
10263 138 : TREE_CHAIN (new_iterator) = TREE_CHAIN (OMP_CLAUSE_ITERATORS (c));
10264 138 : OMP_CLAUSE_ITERATORS (c) = new_iterator;
10265 :
10266 138 : loop.clauses.safe_push (c);
10267 : }
10268 :
10269 : /* Now gimplify and add all the loops that were built. */
10270 25233 : for (hash_map<tree, iterator_loop_info_t>::iterator it = loops.begin ();
10271 50466 : it != loops.end (); ++it)
10272 70 : gimplify_and_add ((*it).second.bind, loops_seq_p);
10273 25163 : }
10274 :
10275 : /* Helper function for enter_omp_iterator_loop_context. */
10276 :
10277 : static gimple_seq *
10278 1586 : enter_omp_iterator_loop_context_1 (tree iterator, gimple_seq *loops_seq_p)
10279 : {
10280 : /* Drill into the nested bind expressions to get to the loop body. */
10281 1586 : for (gimple_stmt_iterator gsi = gsi_start (*loops_seq_p);
10282 10031 : !gsi_end_p (gsi); gsi_next (&gsi))
10283 : {
10284 9615 : gimple *stmt = gsi_stmt (gsi);
10285 :
10286 9615 : switch (gimple_code (stmt))
10287 : {
10288 1256 : case GIMPLE_BIND:
10289 1256 : {
10290 1256 : gbind *bind_stmt = as_a<gbind *> (stmt);
10291 1256 : gimple_push_bind_expr (bind_stmt);
10292 1256 : gimple_seq *bind_body_p = gimple_bind_body_ptr (bind_stmt);
10293 1256 : gimple_seq *seq =
10294 1256 : enter_omp_iterator_loop_context_1 (iterator, bind_body_p);
10295 1256 : if (seq)
10296 : return seq;
10297 416 : gimple_pop_bind_expr ();
10298 : }
10299 416 : break;
10300 0 : case GIMPLE_TRY:
10301 0 : {
10302 0 : gimple_seq *try_eval_p = gimple_try_eval_ptr (stmt);
10303 0 : gimple_seq *seq =
10304 0 : enter_omp_iterator_loop_context_1 (iterator, try_eval_p);
10305 0 : if (seq)
10306 : return seq;
10307 : }
10308 : break;
10309 1790 : case GIMPLE_LABEL:
10310 1790 : {
10311 1790 : glabel *label_stmt = as_a<glabel *> (stmt);
10312 1790 : tree label = gimple_label_label (label_stmt);
10313 1790 : if (label == TREE_VEC_ELT (iterator, 6))
10314 : return loops_seq_p;
10315 : }
10316 : break;
10317 : default:
10318 : break;
10319 : }
10320 : }
10321 :
10322 : return NULL;
10323 : }
10324 :
10325 : /* Enter the Gimplification context in LOOPS_SEQ_P for the iterator loop
10326 : associated with OpenMP clause C. Returns the gimple_seq for the loop body
10327 : if C has OpenMP iterators, or ALT_SEQ_P if not. */
10328 :
10329 : static gimple_seq *
10330 63387 : enter_omp_iterator_loop_context (tree c, gimple_seq *loops_seq_p,
10331 : gimple_seq *alt_seq_p)
10332 : {
10333 63387 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10334 : return alt_seq_p;
10335 :
10336 330 : push_gimplify_context ();
10337 :
10338 330 : gimple_seq *seq = enter_omp_iterator_loop_context_1 (OMP_CLAUSE_ITERATORS (c),
10339 : loops_seq_p);
10340 330 : gcc_assert (seq);
10341 : return seq;
10342 : }
10343 :
10344 : /* Enter the Gimplification context in STMT for the iterator loop associated
10345 : with OpenMP clause C. Returns the gimple_seq for the loop body if C has
10346 : OpenMP iterators, or ALT_SEQ_P if not. */
10347 :
10348 : gimple_seq *
10349 192 : enter_omp_iterator_loop_context (tree c, gomp_target *stmt,
10350 : gimple_seq *alt_seq_p)
10351 : {
10352 192 : gimple_seq *loops_seq_p = gimple_omp_target_iterator_loops_ptr (stmt);
10353 192 : return enter_omp_iterator_loop_context (c, loops_seq_p, alt_seq_p);
10354 : }
10355 :
10356 : /* Exit the Gimplification context for the OpenMP clause C. */
10357 :
10358 : void
10359 63477 : exit_omp_iterator_loop_context (tree c)
10360 : {
10361 63477 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10362 : return;
10363 1170 : while (!gimplify_ctxp->bind_expr_stack.is_empty ())
10364 840 : gimple_pop_bind_expr ();
10365 330 : pop_gimplify_context (NULL);
10366 : }
10367 :
10368 : /* If *LIST_P contains any OpenMP depend clauses with iterators,
10369 : lower all the depend clauses by populating corresponding depend
10370 : array. Returns 0 if there are no such depend clauses, or
10371 : 2 if all depend clauses should be removed, 1 otherwise. */
10372 :
10373 : static int
10374 1898 : gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
10375 : {
10376 1898 : tree c;
10377 1898 : gimple *g;
10378 1898 : size_t n[5] = { 0, 0, 0, 0, 0 };
10379 1898 : bool unused[5];
10380 1898 : tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
10381 1898 : tree last_iter = NULL_TREE, last_count = NULL_TREE;
10382 1898 : size_t i, j;
10383 1898 : location_t first_loc = UNKNOWN_LOCATION;
10384 :
10385 6236 : for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10386 4338 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10387 : {
10388 2200 : switch (OMP_CLAUSE_DEPEND_KIND (c))
10389 : {
10390 : case OMP_CLAUSE_DEPEND_IN:
10391 : i = 2;
10392 : break;
10393 : case OMP_CLAUSE_DEPEND_OUT:
10394 : case OMP_CLAUSE_DEPEND_INOUT:
10395 : i = 0;
10396 : break;
10397 : case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10398 : i = 1;
10399 : break;
10400 : case OMP_CLAUSE_DEPEND_DEPOBJ:
10401 : i = 3;
10402 : break;
10403 : case OMP_CLAUSE_DEPEND_INOUTSET:
10404 : i = 4;
10405 : break;
10406 0 : default:
10407 0 : gcc_unreachable ();
10408 : }
10409 2200 : tree t = OMP_CLAUSE_DECL (c);
10410 2200 : if (first_loc == UNKNOWN_LOCATION)
10411 1898 : first_loc = OMP_CLAUSE_LOCATION (c);
10412 2200 : if (OMP_ITERATOR_DECL_P (t))
10413 : {
10414 297 : if (TREE_PURPOSE (t) != last_iter)
10415 : {
10416 250 : tree tcnt = compute_omp_iterator_count (TREE_PURPOSE (t),
10417 : pre_p);
10418 250 : if (!tcnt)
10419 : return 2;
10420 250 : last_iter = TREE_PURPOSE (t);
10421 250 : last_count = tcnt;
10422 : }
10423 297 : if (counts[i] == NULL_TREE)
10424 243 : counts[i] = last_count;
10425 : else
10426 54 : counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
10427 : PLUS_EXPR, counts[i], last_count);
10428 : }
10429 : else
10430 1903 : n[i]++;
10431 : }
10432 10601 : for (i = 0; i < 5; i++)
10433 8908 : if (counts[i])
10434 : break;
10435 1898 : if (i == 5)
10436 : return 0;
10437 :
10438 205 : tree total = size_zero_node;
10439 1230 : for (i = 0; i < 5; i++)
10440 : {
10441 1025 : unused[i] = counts[i] == NULL_TREE && n[i] == 0;
10442 1025 : if (counts[i] == NULL_TREE)
10443 782 : counts[i] = size_zero_node;
10444 1025 : if (n[i])
10445 44 : counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
10446 1025 : if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
10447 : fb_rvalue) == GS_ERROR)
10448 : return 2;
10449 1025 : total = size_binop (PLUS_EXPR, total, counts[i]);
10450 : }
10451 :
10452 205 : if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
10453 : == GS_ERROR)
10454 : return 2;
10455 205 : bool is_old = unused[1] && unused[3] && unused[4];
10456 205 : tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
10457 : size_int (is_old ? 1 : 4));
10458 205 : if (!unused[4])
10459 3 : totalpx = size_binop (PLUS_EXPR, totalpx,
10460 : size_binop (MULT_EXPR, counts[4], size_int (2)));
10461 205 : tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
10462 205 : tree array = create_tmp_var_raw (type);
10463 205 : TREE_ADDRESSABLE (array) = 1;
10464 205 : if (!poly_int_tree_p (totalpx))
10465 : {
10466 81 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
10467 81 : gimplify_type_sizes (TREE_TYPE (array), pre_p);
10468 81 : if (gimplify_omp_ctxp)
10469 : {
10470 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10471 : while (ctx
10472 20 : && (ctx->region_type == ORT_WORKSHARE
10473 : || ctx->region_type == ORT_TASKGROUP
10474 20 : || ctx->region_type == ORT_SIMD
10475 20 : || ctx->region_type == ORT_ACC))
10476 0 : ctx = ctx->outer_context;
10477 20 : if (ctx)
10478 20 : omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
10479 : }
10480 81 : gimplify_vla_decl (array, pre_p);
10481 : }
10482 : else
10483 124 : gimple_add_tmp_var (array);
10484 205 : tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10485 : NULL_TREE);
10486 205 : tree tem;
10487 205 : if (!is_old)
10488 : {
10489 31 : tem = build2 (MODIFY_EXPR, void_type_node, r,
10490 : build_int_cst (ptr_type_node, 0));
10491 31 : gimplify_and_add (tem, pre_p);
10492 31 : r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10493 : NULL_TREE);
10494 : }
10495 205 : tem = build2 (MODIFY_EXPR, void_type_node, r,
10496 : fold_convert (ptr_type_node, total));
10497 205 : gimplify_and_add (tem, pre_p);
10498 801 : for (i = 1; i < (is_old ? 2 : 4); i++)
10499 : {
10500 267 : r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
10501 : NULL_TREE, NULL_TREE);
10502 267 : tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
10503 267 : gimplify_and_add (tem, pre_p);
10504 : }
10505 :
10506 : tree cnts[6];
10507 717 : for (j = 5; j; j--)
10508 717 : if (!unused[j - 1])
10509 : break;
10510 1230 : for (i = 0; i < 5; i++)
10511 : {
10512 1025 : if (i && (i >= j || unused[i - 1]))
10513 : {
10514 770 : cnts[i] = cnts[i - 1];
10515 770 : continue;
10516 : }
10517 255 : cnts[i] = create_tmp_var (sizetype);
10518 255 : if (i == 0)
10519 236 : g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
10520 : else
10521 : {
10522 50 : tree t;
10523 50 : if (is_old)
10524 38 : t = size_binop (PLUS_EXPR, counts[0], size_int (2));
10525 : else
10526 12 : t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
10527 50 : if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
10528 : == GS_ERROR)
10529 0 : return 2;
10530 50 : g = gimple_build_assign (cnts[i], t);
10531 : }
10532 255 : gimple_seq_add_stmt (pre_p, g);
10533 : }
10534 205 : if (unused[4])
10535 202 : cnts[5] = NULL_TREE;
10536 : else
10537 : {
10538 3 : tree t = size_binop (PLUS_EXPR, total, size_int (5));
10539 3 : cnts[5] = create_tmp_var (sizetype);
10540 3 : g = gimple_build_assign (cnts[i], t);
10541 3 : gimple_seq_add_stmt (pre_p, g);
10542 : }
10543 :
10544 205 : last_iter = NULL_TREE;
10545 205 : tree last_bind = NULL_TREE;
10546 205 : tree *last_body = NULL;
10547 563 : for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10548 358 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10549 : {
10550 343 : switch (OMP_CLAUSE_DEPEND_KIND (c))
10551 : {
10552 : case OMP_CLAUSE_DEPEND_IN:
10553 : i = 2;
10554 : break;
10555 : case OMP_CLAUSE_DEPEND_OUT:
10556 : case OMP_CLAUSE_DEPEND_INOUT:
10557 : i = 0;
10558 : break;
10559 : case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10560 : i = 1;
10561 : break;
10562 : case OMP_CLAUSE_DEPEND_DEPOBJ:
10563 : i = 3;
10564 : break;
10565 : case OMP_CLAUSE_DEPEND_INOUTSET:
10566 : i = 4;
10567 : break;
10568 0 : default:
10569 0 : gcc_unreachable ();
10570 : }
10571 343 : tree t = OMP_CLAUSE_DECL (c);
10572 343 : if (OMP_ITERATOR_DECL_P (t))
10573 : {
10574 297 : if (TREE_PURPOSE (t) != last_iter)
10575 : {
10576 250 : last_body = build_omp_iterator_loop (TREE_PURPOSE (t), pre_p,
10577 : &last_bind);
10578 250 : SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
10579 : }
10580 297 : last_iter = TREE_PURPOSE (t);
10581 297 : if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
10582 : {
10583 0 : append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
10584 : 0), last_body);
10585 0 : TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
10586 : }
10587 297 : if (error_operand_p (TREE_VALUE (t)))
10588 : return 2;
10589 297 : if (TREE_VALUE (t) != null_pointer_node)
10590 291 : TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
10591 297 : if (i == 4)
10592 : {
10593 3 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10594 : NULL_TREE, NULL_TREE);
10595 3 : tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
10596 : NULL_TREE, NULL_TREE);
10597 3 : r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
10598 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10599 : void_type_node, r, r2);
10600 3 : append_to_statement_list_force (tem, last_body);
10601 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10602 : void_type_node, cnts[i],
10603 : size_binop (PLUS_EXPR, cnts[i],
10604 : size_int (1)));
10605 3 : append_to_statement_list_force (tem, last_body);
10606 3 : i = 5;
10607 : }
10608 297 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10609 : NULL_TREE, NULL_TREE);
10610 594 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10611 297 : void_type_node, r, TREE_VALUE (t));
10612 297 : append_to_statement_list_force (tem, last_body);
10613 297 : if (i == 5)
10614 : {
10615 3 : r = build4 (ARRAY_REF, ptr_type_node, array,
10616 : size_binop (PLUS_EXPR, cnts[i], size_int (1)),
10617 : NULL_TREE, NULL_TREE);
10618 3 : tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
10619 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10620 : void_type_node, r, tem);
10621 3 : append_to_statement_list_force (tem, last_body);
10622 : }
10623 297 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10624 : void_type_node, cnts[i],
10625 297 : size_binop (PLUS_EXPR, cnts[i],
10626 : size_int (1 + (i == 5))));
10627 297 : append_to_statement_list_force (tem, last_body);
10628 297 : TREE_VALUE (t) = null_pointer_node;
10629 : }
10630 : else
10631 : {
10632 46 : if (last_bind)
10633 : {
10634 18 : gimplify_and_add (last_bind, pre_p);
10635 18 : last_bind = NULL_TREE;
10636 : }
10637 46 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
10638 : {
10639 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
10640 : NULL, is_gimple_val, fb_rvalue);
10641 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
10642 : }
10643 46 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
10644 : return 2;
10645 46 : if (OMP_CLAUSE_DECL (c) != null_pointer_node)
10646 46 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
10647 46 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
10648 : is_gimple_val, fb_rvalue) == GS_ERROR)
10649 : return 2;
10650 46 : if (i == 4)
10651 : {
10652 0 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10653 : NULL_TREE, NULL_TREE);
10654 0 : tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
10655 : NULL_TREE, NULL_TREE);
10656 0 : r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
10657 0 : tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
10658 0 : gimplify_and_add (tem, pre_p);
10659 0 : g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
10660 : cnts[i],
10661 : size_int (1)));
10662 0 : gimple_seq_add_stmt (pre_p, g);
10663 0 : i = 5;
10664 : }
10665 46 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10666 : NULL_TREE, NULL_TREE);
10667 46 : tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
10668 46 : gimplify_and_add (tem, pre_p);
10669 46 : if (i == 5)
10670 : {
10671 0 : r = build4 (ARRAY_REF, ptr_type_node, array,
10672 : size_binop (PLUS_EXPR, cnts[i], size_int (1)),
10673 : NULL_TREE, NULL_TREE);
10674 0 : tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
10675 0 : tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
10676 0 : append_to_statement_list_force (tem, last_body);
10677 0 : gimplify_and_add (tem, pre_p);
10678 : }
10679 46 : g = gimple_build_assign (cnts[i],
10680 46 : size_binop (PLUS_EXPR, cnts[i],
10681 : size_int (1 + (i == 5))));
10682 46 : gimple_seq_add_stmt (pre_p, g);
10683 : }
10684 : }
10685 205 : if (last_bind)
10686 201 : gimplify_and_add (last_bind, pre_p);
10687 205 : tree cond = boolean_false_node;
10688 205 : if (is_old)
10689 : {
10690 174 : if (!unused[0])
10691 88 : cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
10692 : size_binop_loc (first_loc, PLUS_EXPR, counts[0],
10693 : size_int (2)));
10694 174 : if (!unused[2])
10695 124 : cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
10696 : build2_loc (first_loc, NE_EXPR, boolean_type_node,
10697 : cnts[2],
10698 : size_binop_loc (first_loc, PLUS_EXPR,
10699 : totalpx,
10700 : size_int (1))));
10701 : }
10702 : else
10703 : {
10704 31 : tree prev = size_int (5);
10705 186 : for (i = 0; i < 5; i++)
10706 : {
10707 155 : if (unused[i])
10708 112 : continue;
10709 43 : prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
10710 43 : cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
10711 : build2_loc (first_loc, NE_EXPR, boolean_type_node,
10712 : cnts[i], unshare_expr (prev)));
10713 : }
10714 : }
10715 205 : tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
10716 : build_call_expr_loc (first_loc,
10717 : builtin_decl_explicit (BUILT_IN_TRAP),
10718 : 0), void_node);
10719 205 : gimplify_and_add (tem, pre_p);
10720 205 : c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10721 205 : OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10722 205 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10723 205 : OMP_CLAUSE_CHAIN (c) = *list_p;
10724 205 : *list_p = c;
10725 205 : return 1;
10726 : }
10727 :
10728 : /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
10729 :
10730 : static bool
10731 123145 : omp_map_clause_descriptor_p (tree c)
10732 : {
10733 123145 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
10734 : return false;
10735 :
10736 123141 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
10737 : return true;
10738 :
10739 80625 : if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
10740 72890 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE)
10741 81069 : && OMP_CLAUSE_RELEASE_DESCRIPTOR (c))
10742 6135 : return true;
10743 :
10744 : return false;
10745 : }
10746 :
10747 : /* For a set of mappings describing an array section pointed to by a struct
10748 : (or derived type, etc.) component, create an "alloc" or "release" node to
10749 : insert into a list following a GOMP_MAP_STRUCT node. For some types of
10750 : mapping (e.g. Fortran arrays with descriptors), an additional mapping may
10751 : be created that is inserted into the list of mapping nodes attached to the
10752 : directive being processed -- not part of the sorted list of nodes after
10753 : GOMP_MAP_STRUCT.
10754 :
10755 : CODE is the code of the directive being processed. GRP_START and GRP_END
10756 : are the first and last of two or three nodes representing this array section
10757 : mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
10758 : GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
10759 : filled with the additional node described above, if needed.
10760 :
10761 : This function does not add the new nodes to any lists itself. It is the
10762 : responsibility of the caller to do that. */
10763 :
10764 : static tree
10765 1729 : build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
10766 : tree *extra_node)
10767 : {
10768 1524 : enum gomp_map_kind mkind
10769 1729 : = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
10770 1729 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
10771 :
10772 1729 : gcc_assert (grp_start != grp_end);
10773 :
10774 1729 : tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10775 1729 : OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10776 1729 : OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
10777 1729 : OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
10778 1729 : tree grp_mid = NULL_TREE;
10779 1729 : if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
10780 197 : grp_mid = OMP_CLAUSE_CHAIN (grp_start);
10781 :
10782 197 : if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
10783 0 : OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
10784 : else
10785 1729 : OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
10786 :
10787 1729 : if (grp_mid
10788 197 : && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
10789 1926 : && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER)
10790 : {
10791 0 : tree c3
10792 0 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10793 0 : OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
10794 0 : OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
10795 0 : OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
10796 0 : OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
10797 :
10798 0 : *extra_node = c3;
10799 : }
10800 : else
10801 1729 : *extra_node = NULL_TREE;
10802 :
10803 1729 : return c2;
10804 : }
10805 :
10806 : /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
10807 : and set *BITPOSP and *POFFSETP to the bit offset of the access.
10808 : If BASE_REF is non-NULL and the containing object is a reference, set
10809 : *BASE_REF to that reference before dereferencing the object.
10810 : If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
10811 : has array type, else return NULL. */
10812 :
10813 : static tree
10814 7323 : extract_base_bit_offset (tree base, poly_int64 *bitposp,
10815 : poly_offset_int *poffsetp,
10816 : bool *variable_offset)
10817 : {
10818 7323 : tree offset;
10819 7323 : poly_int64 bitsize, bitpos;
10820 7323 : machine_mode mode;
10821 7323 : int unsignedp, reversep, volatilep = 0;
10822 7323 : poly_offset_int poffset;
10823 :
10824 7323 : STRIP_NOPS (base);
10825 :
10826 7323 : base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
10827 : &unsignedp, &reversep, &volatilep);
10828 :
10829 7323 : STRIP_NOPS (base);
10830 :
10831 7323 : if (offset && poly_int_tree_p (offset))
10832 : {
10833 0 : poffset = wi::to_poly_offset (offset);
10834 0 : *variable_offset = false;
10835 : }
10836 : else
10837 : {
10838 7323 : poffset = 0;
10839 7323 : *variable_offset = (offset != NULL_TREE);
10840 : }
10841 :
10842 7323 : if (maybe_ne (bitpos, 0))
10843 5291 : poffset += bits_to_bytes_round_down (bitpos);
10844 :
10845 7323 : *bitposp = bitpos;
10846 7323 : *poffsetp = poffset;
10847 :
10848 7323 : return base;
10849 : }
10850 :
10851 : /* Used for topological sorting of mapping groups. UNVISITED means we haven't
10852 : started processing the group yet. The TEMPORARY mark is used when we first
10853 : encounter a group on a depth-first traversal, and the PERMANENT mark is used
10854 : when we have processed all the group's children (i.e. all the base pointers
10855 : referred to by the group's mapping nodes, recursively). */
10856 :
10857 : enum omp_tsort_mark {
10858 : UNVISITED,
10859 : TEMPORARY,
10860 : PERMANENT
10861 : };
10862 :
10863 : /* Hash for trees based on operand_equal_p. Like tree_operand_hash
10864 : but ignores side effects in the equality comparisons. */
10865 :
10866 : struct tree_operand_hash_no_se : tree_operand_hash
10867 : {
10868 : static inline bool equal (const value_type &,
10869 : const compare_type &);
10870 : };
10871 :
10872 : inline bool
10873 395663 : tree_operand_hash_no_se::equal (const value_type &t1,
10874 : const compare_type &t2)
10875 : {
10876 395663 : return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
10877 : }
10878 :
10879 : /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
10880 : clause. */
10881 :
10882 : struct omp_mapping_group {
10883 : tree *grp_start;
10884 : tree grp_end;
10885 : omp_tsort_mark mark;
10886 : /* If we've removed the group but need to reindex, mark the group as
10887 : deleted. */
10888 : bool deleted;
10889 : /* The group points to an already-created "GOMP_MAP_STRUCT
10890 : GOMP_MAP_ATTACH_DETACH" pair. */
10891 : bool reprocess_struct;
10892 : /* The group should use "zero-length" allocations for pointers that are not
10893 : mapped "to" on the same directive. */
10894 : bool fragile;
10895 : struct omp_mapping_group *sibling;
10896 : struct omp_mapping_group *next;
10897 : };
10898 :
10899 : DEBUG_FUNCTION void
10900 0 : debug_mapping_group (omp_mapping_group *grp)
10901 : {
10902 0 : tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
10903 0 : OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
10904 0 : debug_generic_expr (*grp->grp_start);
10905 0 : OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
10906 0 : }
10907 :
10908 : /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
10909 : isn't one. */
10910 :
10911 : static tree
10912 34600 : omp_get_base_pointer (tree expr)
10913 : {
10914 34600 : while (TREE_CODE (expr) == ARRAY_REF
10915 41431 : || TREE_CODE (expr) == COMPONENT_REF)
10916 6831 : expr = TREE_OPERAND (expr, 0);
10917 :
10918 34600 : if (INDIRECT_REF_P (expr)
10919 34600 : || (TREE_CODE (expr) == MEM_REF
10920 0 : && integer_zerop (TREE_OPERAND (expr, 1))))
10921 : {
10922 10393 : expr = TREE_OPERAND (expr, 0);
10923 10430 : while (TREE_CODE (expr) == COMPOUND_EXPR)
10924 37 : expr = TREE_OPERAND (expr, 1);
10925 10393 : if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
10926 837 : expr = TREE_OPERAND (expr, 0);
10927 10393 : if (TREE_CODE (expr) == SAVE_EXPR)
10928 31 : expr = TREE_OPERAND (expr, 0);
10929 10393 : STRIP_NOPS (expr);
10930 10393 : return expr;
10931 : }
10932 :
10933 : return NULL_TREE;
10934 : }
10935 :
10936 : /* An attach or detach operation depends directly on the address being
10937 : attached/detached. Return that address, or none if there are no
10938 : attachments/detachments. */
10939 :
10940 : static tree
10941 16044 : omp_get_attachment (omp_mapping_group *grp)
10942 : {
10943 16044 : tree node = *grp->grp_start;
10944 :
10945 16044 : switch (OMP_CLAUSE_MAP_KIND (node))
10946 : {
10947 13180 : case GOMP_MAP_TO:
10948 13180 : case GOMP_MAP_FROM:
10949 13180 : case GOMP_MAP_TOFROM:
10950 13180 : case GOMP_MAP_ALWAYS_FROM:
10951 13180 : case GOMP_MAP_ALWAYS_TO:
10952 13180 : case GOMP_MAP_ALWAYS_TOFROM:
10953 13180 : case GOMP_MAP_FORCE_FROM:
10954 13180 : case GOMP_MAP_FORCE_TO:
10955 13180 : case GOMP_MAP_FORCE_TOFROM:
10956 13180 : case GOMP_MAP_FORCE_PRESENT:
10957 13180 : case GOMP_MAP_PRESENT_ALLOC:
10958 13180 : case GOMP_MAP_PRESENT_FROM:
10959 13180 : case GOMP_MAP_PRESENT_TO:
10960 13180 : case GOMP_MAP_PRESENT_TOFROM:
10961 13180 : case GOMP_MAP_ALWAYS_PRESENT_FROM:
10962 13180 : case GOMP_MAP_ALWAYS_PRESENT_TO:
10963 13180 : case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
10964 13180 : case GOMP_MAP_ALLOC:
10965 13180 : case GOMP_MAP_RELEASE:
10966 13180 : case GOMP_MAP_DELETE:
10967 13180 : case GOMP_MAP_FORCE_ALLOC:
10968 13180 : if (node == grp->grp_end)
10969 : return NULL_TREE;
10970 :
10971 6434 : node = OMP_CLAUSE_CHAIN (node);
10972 6434 : if (node && omp_map_clause_descriptor_p (node))
10973 : {
10974 1007 : gcc_assert (node != grp->grp_end);
10975 1007 : node = OMP_CLAUSE_CHAIN (node);
10976 : }
10977 6434 : if (node)
10978 6434 : switch (OMP_CLAUSE_MAP_KIND (node))
10979 : {
10980 : case GOMP_MAP_POINTER:
10981 : case GOMP_MAP_ALWAYS_POINTER:
10982 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
10983 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10984 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
10985 : return NULL_TREE;
10986 :
10987 2512 : case GOMP_MAP_ATTACH_DETACH:
10988 2512 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
10989 2512 : case GOMP_MAP_DETACH:
10990 2512 : return OMP_CLAUSE_DECL (node);
10991 :
10992 0 : default:
10993 0 : internal_error ("unexpected mapping node");
10994 : }
10995 0 : return error_mark_node;
10996 :
10997 0 : case GOMP_MAP_TO_PSET:
10998 0 : gcc_assert (node != grp->grp_end);
10999 0 : node = OMP_CLAUSE_CHAIN (node);
11000 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
11001 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
11002 0 : return OMP_CLAUSE_DECL (node);
11003 : else
11004 0 : internal_error ("unexpected mapping node");
11005 : return error_mark_node;
11006 :
11007 537 : case GOMP_MAP_ATTACH:
11008 537 : case GOMP_MAP_DETACH:
11009 537 : node = OMP_CLAUSE_CHAIN (node);
11010 537 : if (!node || *grp->grp_start == grp->grp_end)
11011 537 : return OMP_CLAUSE_DECL (*grp->grp_start);
11012 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11013 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11014 0 : return OMP_CLAUSE_DECL (*grp->grp_start);
11015 : else
11016 0 : internal_error ("unexpected mapping node");
11017 : return error_mark_node;
11018 :
11019 : case GOMP_MAP_STRUCT:
11020 : case GOMP_MAP_STRUCT_UNORD:
11021 : case GOMP_MAP_FORCE_DEVICEPTR:
11022 : case GOMP_MAP_DEVICE_RESIDENT:
11023 : case GOMP_MAP_LINK:
11024 : case GOMP_MAP_IF_PRESENT:
11025 : case GOMP_MAP_FIRSTPRIVATE:
11026 : case GOMP_MAP_FIRSTPRIVATE_INT:
11027 : case GOMP_MAP_USE_DEVICE_PTR:
11028 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11029 : return NULL_TREE;
11030 :
11031 0 : default:
11032 0 : internal_error ("unexpected mapping node");
11033 : }
11034 :
11035 : return error_mark_node;
11036 : }
11037 :
11038 : /* Given a pointer START_P to the start of a group of related (e.g. pointer)
11039 : mappings, return the chain pointer to the end of that group in the list. */
11040 :
11041 : static tree *
11042 107832 : omp_group_last (tree *start_p)
11043 : {
11044 107832 : tree c = *start_p, nc, *grp_last_p = start_p;
11045 :
11046 107832 : gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
11047 :
11048 107832 : nc = OMP_CLAUSE_CHAIN (c);
11049 :
11050 188976 : if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
11051 : return grp_last_p;
11052 :
11053 68935 : switch (OMP_CLAUSE_MAP_KIND (c))
11054 : {
11055 : default:
11056 : while (nc
11057 111626 : && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11058 234144 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11059 104464 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11060 94139 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
11061 81493 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
11062 55684 : || (OMP_CLAUSE_MAP_KIND (nc)
11063 : == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
11064 55668 : || (OMP_CLAUSE_MAP_KIND (nc)
11065 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
11066 55301 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH
11067 55246 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
11068 53334 : || omp_map_clause_descriptor_p (nc)))
11069 : {
11070 66025 : tree nc2 = OMP_CLAUSE_CHAIN (nc);
11071 66025 : if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH)
11072 : {
11073 : /* In the specific case we're doing "exit data" on an array
11074 : slice of a reference-to-pointer struct component, we will see
11075 : DETACH followed by ATTACH_DETACH here. We want to treat that
11076 : as a single group. In other cases DETACH might represent a
11077 : stand-alone "detach" clause, so we don't want to consider
11078 : that part of the group. */
11079 55 : if (nc2
11080 16 : && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
11081 71 : && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH_DETACH)
11082 0 : goto consume_two_nodes;
11083 : else
11084 : break;
11085 : }
11086 65970 : if (nc2
11087 48569 : && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
11088 42056 : && (OMP_CLAUSE_MAP_KIND (nc)
11089 : == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
11090 65986 : && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
11091 : {
11092 16 : consume_two_nodes:
11093 16 : grp_last_p = &OMP_CLAUSE_CHAIN (nc);
11094 16 : c = nc2;
11095 16 : nc = OMP_CLAUSE_CHAIN (nc2);
11096 : }
11097 : else
11098 : {
11099 65954 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11100 65954 : c = nc;
11101 65954 : nc = nc2;
11102 : }
11103 : }
11104 : break;
11105 :
11106 296 : case GOMP_MAP_ATTACH:
11107 296 : case GOMP_MAP_DETACH:
11108 : /* This is a weird artifact of how directives are parsed: bare attach or
11109 : detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
11110 : FIRSTPRIVATE_REFERENCE node. FIXME. */
11111 296 : if (nc
11112 296 : && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11113 296 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11114 296 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
11115 0 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11116 : break;
11117 :
11118 27 : case GOMP_MAP_TO_PSET:
11119 27 : if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11120 27 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
11121 6 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
11122 27 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11123 : break;
11124 :
11125 5551 : case GOMP_MAP_STRUCT:
11126 5551 : case GOMP_MAP_STRUCT_UNORD:
11127 5551 : {
11128 5551 : unsigned HOST_WIDE_INT num_mappings
11129 5551 : = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
11130 5551 : if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11131 5213 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11132 10282 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
11133 1134 : grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
11134 13711 : for (unsigned i = 0; i < num_mappings; i++)
11135 8160 : grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
11136 : }
11137 : break;
11138 : }
11139 :
11140 : return grp_last_p;
11141 : }
11142 :
11143 : /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
11144 : OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
11145 : associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
11146 : if we have more than one such group, else return NULL. */
11147 :
11148 : static void
11149 87877 : omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
11150 : tree gather_sentinel)
11151 : {
11152 87877 : for (tree *cp = list_p;
11153 261726 : *cp && *cp != gather_sentinel;
11154 173849 : cp = &OMP_CLAUSE_CHAIN (*cp))
11155 : {
11156 173849 : if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
11157 96958 : continue;
11158 :
11159 76891 : tree *grp_last_p = omp_group_last (cp);
11160 76891 : omp_mapping_group grp;
11161 :
11162 76891 : grp.grp_start = cp;
11163 76891 : grp.grp_end = *grp_last_p;
11164 76891 : grp.mark = UNVISITED;
11165 76891 : grp.sibling = NULL;
11166 76891 : grp.deleted = false;
11167 76891 : grp.reprocess_struct = false;
11168 76891 : grp.fragile = false;
11169 76891 : grp.next = NULL;
11170 76891 : groups->safe_push (grp);
11171 :
11172 76891 : cp = grp_last_p;
11173 : }
11174 87877 : }
11175 :
11176 : static vec<omp_mapping_group> *
11177 87559 : omp_gather_mapping_groups (tree *list_p)
11178 : {
11179 87559 : vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
11180 :
11181 87559 : omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
11182 :
11183 87559 : if (groups->length () > 0)
11184 : return groups;
11185 : else
11186 : {
11187 44580 : delete groups;
11188 44580 : return NULL;
11189 : }
11190 : }
11191 :
11192 : /* A pointer mapping group GRP may define a block of memory starting at some
11193 : base address, and maybe also define a firstprivate pointer or firstprivate
11194 : reference that points to that block. The return value is a node containing
11195 : the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
11196 : If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
11197 : return the number of consecutive chained nodes in CHAINED. */
11198 :
11199 : static tree
11200 77285 : omp_group_base (omp_mapping_group *grp, unsigned int *chained,
11201 : tree *firstprivate)
11202 : {
11203 77285 : tree node = *grp->grp_start;
11204 :
11205 77285 : *firstprivate = NULL_TREE;
11206 77285 : *chained = 1;
11207 :
11208 77285 : switch (OMP_CLAUSE_MAP_KIND (node))
11209 : {
11210 73207 : case GOMP_MAP_TO:
11211 73207 : case GOMP_MAP_FROM:
11212 73207 : case GOMP_MAP_TOFROM:
11213 73207 : case GOMP_MAP_ALWAYS_FROM:
11214 73207 : case GOMP_MAP_ALWAYS_TO:
11215 73207 : case GOMP_MAP_ALWAYS_TOFROM:
11216 73207 : case GOMP_MAP_FORCE_FROM:
11217 73207 : case GOMP_MAP_FORCE_TO:
11218 73207 : case GOMP_MAP_FORCE_TOFROM:
11219 73207 : case GOMP_MAP_FORCE_PRESENT:
11220 73207 : case GOMP_MAP_PRESENT_ALLOC:
11221 73207 : case GOMP_MAP_PRESENT_FROM:
11222 73207 : case GOMP_MAP_PRESENT_TO:
11223 73207 : case GOMP_MAP_PRESENT_TOFROM:
11224 73207 : case GOMP_MAP_ALWAYS_PRESENT_FROM:
11225 73207 : case GOMP_MAP_ALWAYS_PRESENT_TO:
11226 73207 : case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
11227 73207 : case GOMP_MAP_ALLOC:
11228 73207 : case GOMP_MAP_RELEASE:
11229 73207 : case GOMP_MAP_DELETE:
11230 73207 : case GOMP_MAP_FORCE_ALLOC:
11231 73207 : case GOMP_MAP_IF_PRESENT:
11232 73207 : if (node == grp->grp_end)
11233 : return node;
11234 :
11235 34217 : node = OMP_CLAUSE_CHAIN (node);
11236 34217 : if (!node)
11237 0 : internal_error ("unexpected mapping node");
11238 34217 : if (omp_map_clause_descriptor_p (node))
11239 : {
11240 10595 : if (node == grp->grp_end)
11241 0 : return *grp->grp_start;
11242 10595 : node = OMP_CLAUSE_CHAIN (node);
11243 : }
11244 34217 : switch (OMP_CLAUSE_MAP_KIND (node))
11245 : {
11246 23045 : case GOMP_MAP_POINTER:
11247 23045 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11248 23045 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11249 23045 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
11250 23045 : *firstprivate = OMP_CLAUSE_DECL (node);
11251 23045 : return *grp->grp_start;
11252 :
11253 11172 : case GOMP_MAP_ALWAYS_POINTER:
11254 11172 : case GOMP_MAP_ATTACH_DETACH:
11255 11172 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11256 11172 : case GOMP_MAP_DETACH:
11257 11172 : return *grp->grp_start;
11258 :
11259 0 : default:
11260 0 : internal_error ("unexpected mapping node");
11261 : }
11262 : return error_mark_node;
11263 :
11264 18 : case GOMP_MAP_TO_PSET:
11265 18 : gcc_assert (node != grp->grp_end);
11266 18 : node = OMP_CLAUSE_CHAIN (node);
11267 18 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
11268 18 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
11269 : return NULL_TREE;
11270 : else
11271 0 : internal_error ("unexpected mapping node");
11272 : return error_mark_node;
11273 :
11274 1089 : case GOMP_MAP_ATTACH:
11275 1089 : case GOMP_MAP_DETACH:
11276 1089 : node = OMP_CLAUSE_CHAIN (node);
11277 1089 : if (!node || *grp->grp_start == grp->grp_end)
11278 : return NULL_TREE;
11279 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11280 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11281 : {
11282 : /* We're mapping the base pointer itself in a bare attach or detach
11283 : node. This is a side effect of how parsing works, and the mapping
11284 : will be removed anyway (at least for enter/exit data directives).
11285 : We should ignore the mapping here. FIXME. */
11286 : return NULL_TREE;
11287 : }
11288 : else
11289 0 : internal_error ("unexpected mapping node");
11290 : return error_mark_node;
11291 :
11292 2687 : case GOMP_MAP_STRUCT:
11293 2687 : case GOMP_MAP_STRUCT_UNORD:
11294 2687 : {
11295 2687 : unsigned HOST_WIDE_INT num_mappings
11296 2687 : = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
11297 2687 : node = OMP_CLAUSE_CHAIN (node);
11298 2687 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11299 2687 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11300 : {
11301 393 : *firstprivate = OMP_CLAUSE_DECL (node);
11302 393 : node = OMP_CLAUSE_CHAIN (node);
11303 : }
11304 2294 : else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
11305 314 : node = OMP_CLAUSE_CHAIN (node);
11306 2687 : *chained = num_mappings;
11307 2687 : return node;
11308 : }
11309 :
11310 : case GOMP_MAP_FORCE_DEVICEPTR:
11311 : case GOMP_MAP_DEVICE_RESIDENT:
11312 : case GOMP_MAP_LINK:
11313 : case GOMP_MAP_FIRSTPRIVATE:
11314 : case GOMP_MAP_FIRSTPRIVATE_INT:
11315 : case GOMP_MAP_USE_DEVICE_PTR:
11316 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11317 : return NULL_TREE;
11318 :
11319 0 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11320 0 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11321 0 : case GOMP_MAP_POINTER:
11322 0 : case GOMP_MAP_ALWAYS_POINTER:
11323 0 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
11324 : /* These shouldn't appear by themselves. */
11325 0 : if (!seen_error ())
11326 0 : internal_error ("unexpected pointer mapping node");
11327 0 : return error_mark_node;
11328 :
11329 0 : default:
11330 0 : gcc_unreachable ();
11331 : }
11332 :
11333 : return error_mark_node;
11334 : }
11335 :
11336 : /* Given a vector of omp_mapping_groups, build a hash table so we can look up
11337 : nodes by tree_operand_hash_no_se. */
11338 :
11339 : static void
11340 43463 : omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
11341 : omp_mapping_group *> *grpmap,
11342 : vec<omp_mapping_group> *groups,
11343 : tree reindex_sentinel)
11344 : {
11345 43463 : omp_mapping_group *grp;
11346 43463 : unsigned int i;
11347 43463 : bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
11348 :
11349 121202 : FOR_EACH_VEC_ELT (*groups, i, grp)
11350 : {
11351 77739 : if (reindexing && *grp->grp_start == reindex_sentinel)
11352 77739 : above_hwm = true;
11353 :
11354 77739 : if (reindexing && !above_hwm)
11355 54301 : continue;
11356 :
11357 77439 : if (grp->reprocess_struct)
11358 154 : continue;
11359 :
11360 77285 : tree fpp;
11361 77285 : unsigned int chained;
11362 77285 : tree node = omp_group_base (grp, &chained, &fpp);
11363 :
11364 77285 : if (node == error_mark_node || (!node && !fpp))
11365 1391 : continue;
11366 :
11367 : for (unsigned j = 0;
11368 153064 : node && j < chained;
11369 77170 : node = OMP_CLAUSE_CHAIN (node), j++)
11370 : {
11371 77170 : tree decl = OMP_CLAUSE_DECL (node);
11372 : /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
11373 : meaning node-hash lookups don't work. This is a workaround for
11374 : that, but ideally we should just create the INDIRECT_REF at
11375 : source instead. FIXME. */
11376 77170 : if (TREE_CODE (decl) == MEM_REF
11377 77170 : && integer_zerop (TREE_OPERAND (decl, 1)))
11378 0 : decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
11379 :
11380 77170 : omp_mapping_group **prev = grpmap->get (decl);
11381 :
11382 77170 : if (prev && *prev == grp)
11383 : /* Empty. */;
11384 77170 : else if (prev)
11385 : {
11386 : /* Mapping the same thing twice is normally diagnosed as an error,
11387 : but can happen under some circumstances, e.g. in pr99928-16.c,
11388 : the directive:
11389 :
11390 : #pragma omp target simd reduction(+:a[:3]) \
11391 : map(always, tofrom: a[:6])
11392 : ...
11393 :
11394 : will result in two "a[0]" mappings (of different sizes). */
11395 :
11396 140 : grp->sibling = (*prev)->sibling;
11397 140 : (*prev)->sibling = grp;
11398 : }
11399 : else
11400 77030 : grpmap->put (decl, grp);
11401 : }
11402 :
11403 75894 : if (!fpp)
11404 52456 : continue;
11405 :
11406 23438 : omp_mapping_group **prev = grpmap->get (fpp);
11407 23438 : if (prev && *prev != grp)
11408 : {
11409 12 : grp->sibling = (*prev)->sibling;
11410 12 : (*prev)->sibling = grp;
11411 : }
11412 : else
11413 23426 : grpmap->put (fpp, grp);
11414 : }
11415 43463 : }
11416 :
11417 : static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
11418 42979 : omp_index_mapping_groups (vec<omp_mapping_group> *groups)
11419 : {
11420 42979 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
11421 42979 : = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
11422 :
11423 42979 : omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
11424 :
11425 42979 : return grpmap;
11426 : }
11427 :
11428 : /* Rebuild group map from partially-processed clause list (during
11429 : omp_build_struct_sibling_lists). We have already processed nodes up until
11430 : a high-water mark (HWM). This is a bit tricky because the list is being
11431 : reordered as it is scanned, but we know:
11432 :
11433 : 1. The list after HWM has not been touched yet, so we can reindex it safely.
11434 :
11435 : 2. The list before and including HWM has been altered, but remains
11436 : well-formed throughout the sibling-list building operation.
11437 :
11438 : so, we can do the reindex operation in two parts, on the processed and
11439 : then the unprocessed halves of the list. */
11440 :
11441 : static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
11442 318 : omp_reindex_mapping_groups (tree *list_p,
11443 : vec<omp_mapping_group> *groups,
11444 : vec<omp_mapping_group> *processed_groups,
11445 : tree sentinel)
11446 : {
11447 318 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
11448 318 : = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
11449 :
11450 318 : processed_groups->truncate (0);
11451 :
11452 318 : omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
11453 318 : omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
11454 318 : if (sentinel)
11455 166 : omp_index_mapping_groups_1 (grpmap, groups, sentinel);
11456 :
11457 318 : return grpmap;
11458 : }
11459 :
11460 : /* Find the immediately-containing struct for a component ref (etc.)
11461 : expression EXPR. */
11462 :
11463 : static tree
11464 45485 : omp_containing_struct (tree expr)
11465 : {
11466 45485 : tree expr0 = expr;
11467 :
11468 45485 : STRIP_NOPS (expr);
11469 :
11470 : /* Note: don't strip NOPs unless we're also stripping off array refs or a
11471 : component ref. */
11472 45485 : if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
11473 : return expr0;
11474 :
11475 25986 : while (TREE_CODE (expr) == ARRAY_REF)
11476 3546 : expr = TREE_OPERAND (expr, 0);
11477 :
11478 22440 : if (TREE_CODE (expr) == COMPONENT_REF)
11479 20115 : expr = TREE_OPERAND (expr, 0);
11480 :
11481 : return expr;
11482 : }
11483 :
11484 : /* Return TRUE if DECL describes a component that is part of a whole structure
11485 : that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
11486 : that maps that structure, if present. */
11487 :
11488 : static bool
11489 24284 : omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
11490 : omp_mapping_group *> *grpmap,
11491 : tree decl,
11492 : omp_mapping_group **mapped_by_group)
11493 : {
11494 24284 : tree wsdecl = NULL_TREE;
11495 :
11496 24284 : *mapped_by_group = NULL;
11497 :
11498 45485 : while (true)
11499 : {
11500 45485 : wsdecl = omp_containing_struct (decl);
11501 45485 : if (wsdecl == decl)
11502 : break;
11503 22440 : omp_mapping_group **wholestruct = grpmap->get (wsdecl);
11504 22440 : if (!wholestruct
11505 19871 : && TREE_CODE (wsdecl) == MEM_REF
11506 22440 : && integer_zerop (TREE_OPERAND (wsdecl, 1)))
11507 : {
11508 0 : tree deref = TREE_OPERAND (wsdecl, 0);
11509 0 : deref = build_fold_indirect_ref (deref);
11510 0 : wholestruct = grpmap->get (deref);
11511 : }
11512 22440 : if (wholestruct)
11513 : {
11514 : /* An intermediate descriptor should not match here because the
11515 : pointee is actually not mapped by this group -- it is just a
11516 : zero-length alloc. */
11517 2569 : tree desc = OMP_CLAUSE_CHAIN (*(*wholestruct)->grp_start);
11518 2569 : if (desc != NULL_TREE && omp_map_clause_descriptor_p (desc))
11519 1330 : goto next;
11520 1239 : *mapped_by_group = *wholestruct;
11521 1239 : return true;
11522 : }
11523 19871 : next:
11524 : decl = wsdecl;
11525 : }
11526 :
11527 : return false;
11528 : }
11529 :
11530 : /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
11531 : FALSE on error. */
11532 :
11533 : static bool
11534 19362 : omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
11535 : vec<omp_mapping_group> *groups,
11536 : hash_map<tree_operand_hash_no_se,
11537 : omp_mapping_group *> *grpmap,
11538 : omp_mapping_group *grp)
11539 : {
11540 19362 : if (grp->mark == PERMANENT)
11541 : return true;
11542 16044 : if (grp->mark == TEMPORARY)
11543 : {
11544 0 : fprintf (stderr, "when processing group:\n");
11545 0 : debug_mapping_group (grp);
11546 0 : internal_error ("base pointer cycle detected");
11547 : return false;
11548 : }
11549 16044 : grp->mark = TEMPORARY;
11550 :
11551 16044 : tree attaches_to = omp_get_attachment (grp);
11552 :
11553 16044 : if (attaches_to)
11554 : {
11555 3049 : omp_mapping_group **basep = grpmap->get (attaches_to);
11556 :
11557 3049 : if (basep && *basep != grp)
11558 : {
11559 2920 : for (omp_mapping_group *w = *basep; w; w = w->sibling)
11560 1460 : if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
11561 : return false;
11562 : }
11563 : }
11564 :
11565 16044 : tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
11566 :
11567 22119 : while (decl)
11568 : {
11569 22119 : tree base = omp_get_base_pointer (decl);
11570 :
11571 22119 : if (!base)
11572 : break;
11573 :
11574 8809 : omp_mapping_group **innerp = grpmap->get (base);
11575 8809 : omp_mapping_group *wholestruct;
11576 :
11577 : /* We should treat whole-structure mappings as if all (pointer, in this
11578 : case) members are mapped as individual list items. Check if we have
11579 : such a whole-structure mapping, if we don't have an explicit reference
11580 : to the pointer member itself. */
11581 8809 : if (!innerp
11582 4068 : && TREE_CODE (base) == COMPONENT_REF
11583 10953 : && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
11584 : innerp = &wholestruct;
11585 :
11586 8809 : if (innerp && *innerp != grp)
11587 : {
11588 5468 : for (omp_mapping_group *w = *innerp; w; w = w->sibling)
11589 2734 : if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
11590 0 : return false;
11591 : break;
11592 : }
11593 :
11594 6075 : decl = base;
11595 : }
11596 :
11597 16044 : grp->mark = PERMANENT;
11598 :
11599 : /* Emit grp to output list. */
11600 :
11601 16044 : **outlist = grp;
11602 16044 : *outlist = &grp->next;
11603 :
11604 16044 : return true;
11605 : }
11606 :
11607 : /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
11608 : before mappings that use those pointers. This is an implementation of the
11609 : depth-first search algorithm, described e.g. at:
11610 :
11611 : https://en.wikipedia.org/wiki/Topological_sorting
11612 : */
11613 :
11614 : static omp_mapping_group *
11615 7952 : omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
11616 : hash_map<tree_operand_hash_no_se, omp_mapping_group *>
11617 : *grpmap,
11618 : bool enter_exit_data)
11619 : {
11620 7952 : omp_mapping_group *grp, *outlist = NULL, **cursor;
11621 7952 : unsigned int i;
11622 7952 : bool saw_runtime_implicit = false;
11623 :
11624 7952 : cursor = &outlist;
11625 :
11626 23996 : FOR_EACH_VEC_ELT (*groups, i, grp)
11627 : {
11628 16044 : if (grp->mark != PERMANENT)
11629 : {
11630 15192 : if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
11631 : {
11632 683 : saw_runtime_implicit = true;
11633 683 : continue;
11634 : }
11635 14509 : if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
11636 : return NULL;
11637 : }
11638 : }
11639 :
11640 7952 : if (!saw_runtime_implicit)
11641 7670 : return outlist;
11642 :
11643 1634 : FOR_EACH_VEC_ELT (*groups, i, grp)
11644 : {
11645 1352 : if (grp->mark != PERMANENT
11646 1352 : && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
11647 : {
11648 : /* Clear the flag for enter/exit data because it is currently
11649 : meaningless for those operations in libgomp. */
11650 659 : if (enter_exit_data)
11651 446 : OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start) = 0;
11652 :
11653 659 : if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
11654 : return NULL;
11655 : }
11656 : }
11657 :
11658 282 : return outlist;
11659 : }
11660 :
11661 : /* Split INLIST into three parts:
11662 :
11663 : - "present" alloc/to/from groups
11664 : - other to/from groups
11665 : - other alloc/release/delete groups
11666 :
11667 : These sub-lists are then concatenated together to form the final list.
11668 : Each sub-list retains the order of the original list.
11669 : Note that ATTACH nodes are later moved to the end of the list in
11670 : gimplify_adjust_omp_clauses, for target regions. */
11671 :
11672 : static omp_mapping_group *
11673 7952 : omp_segregate_mapping_groups (omp_mapping_group *inlist)
11674 : {
11675 7952 : omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
11676 7952 : omp_mapping_group *p_groups = NULL;
11677 7952 : omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
11678 7952 : omp_mapping_group **p_tail = &p_groups;
11679 :
11680 23996 : for (omp_mapping_group *w = inlist; w;)
11681 : {
11682 16044 : tree c = *w->grp_start;
11683 16044 : omp_mapping_group *next = w->next;
11684 :
11685 16044 : gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
11686 :
11687 16044 : switch (OMP_CLAUSE_MAP_KIND (c))
11688 : {
11689 1815 : case GOMP_MAP_ALLOC:
11690 1815 : case GOMP_MAP_RELEASE:
11691 1815 : case GOMP_MAP_DELETE:
11692 1815 : *ard_tail = w;
11693 1815 : w->next = NULL;
11694 1815 : ard_tail = &w->next;
11695 1815 : break;
11696 :
11697 : /* These map types are all semantically identical, so are moved into a
11698 : single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
11699 : in gimplify_adjust_omp_clauses. */
11700 125 : case GOMP_MAP_PRESENT_ALLOC:
11701 125 : case GOMP_MAP_PRESENT_FROM:
11702 125 : case GOMP_MAP_PRESENT_TO:
11703 125 : case GOMP_MAP_PRESENT_TOFROM:
11704 125 : *p_tail = w;
11705 125 : w->next = NULL;
11706 125 : p_tail = &w->next;
11707 125 : break;
11708 :
11709 14104 : default:
11710 14104 : *tf_tail = w;
11711 14104 : w->next = NULL;
11712 14104 : tf_tail = &w->next;
11713 : }
11714 :
11715 : w = next;
11716 : }
11717 :
11718 : /* Now splice the lists together... */
11719 7952 : *tf_tail = ard_groups;
11720 7952 : *p_tail = tf_groups;
11721 :
11722 7952 : return p_groups;
11723 : }
11724 :
11725 : /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
11726 : those groups based on the output list of omp_tsort_mapping_groups --
11727 : singly-linked, threaded through each element's NEXT pointer starting at
11728 : HEAD. Each list element appears exactly once in that linked list.
11729 :
11730 : Each element of GROUPS may correspond to one or several mapping nodes.
11731 : Node groups are kept together, and in the reordered list, the positions of
11732 : the original groups are reused for the positions of the reordered list.
11733 : Hence if we have e.g.
11734 :
11735 : {to ptr ptr} firstprivate {tofrom ptr} ...
11736 : ^ ^ ^
11737 : first group non-"map" second group
11738 :
11739 : and say the second group contains a base pointer for the first so must be
11740 : moved before it, the resulting list will contain:
11741 :
11742 : {tofrom ptr} firstprivate {to ptr ptr} ...
11743 : ^ prev. second group ^ prev. first group
11744 : */
11745 :
11746 : static tree *
11747 7952 : omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
11748 : omp_mapping_group *head,
11749 : tree *list_p)
11750 : {
11751 7952 : omp_mapping_group *grp;
11752 7952 : unsigned int i;
11753 7952 : unsigned numgroups = groups->length ();
11754 7952 : auto_vec<tree> old_heads (numgroups);
11755 7952 : auto_vec<tree *> old_headps (numgroups);
11756 7952 : auto_vec<tree> new_heads (numgroups);
11757 7952 : auto_vec<tree> old_succs (numgroups);
11758 7952 : bool map_at_start = (list_p == (*groups)[0].grp_start);
11759 :
11760 7952 : tree *new_grp_tail = NULL;
11761 :
11762 : /* Stash the start & end nodes of each mapping group before we start
11763 : modifying the list. */
11764 23996 : FOR_EACH_VEC_ELT (*groups, i, grp)
11765 : {
11766 16044 : old_headps.quick_push (grp->grp_start);
11767 16044 : old_heads.quick_push (*grp->grp_start);
11768 16044 : old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
11769 : }
11770 :
11771 : /* And similarly, the heads of the groups in the order we want to rearrange
11772 : the list to. */
11773 23996 : for (omp_mapping_group *w = head; w; w = w->next)
11774 16044 : new_heads.quick_push (*w->grp_start);
11775 :
11776 23996 : FOR_EACH_VEC_ELT (*groups, i, grp)
11777 : {
11778 16044 : gcc_assert (head);
11779 :
11780 16044 : if (new_grp_tail && old_succs[i - 1] == old_heads[i])
11781 : {
11782 : /* a {b c d} {e f g} h i j (original)
11783 : -->
11784 : a {k l m} {e f g} h i j (inserted new group on last iter)
11785 : -->
11786 : a {k l m} {n o p} h i j (this time, chain last group to new one)
11787 : ^new_grp_tail
11788 : */
11789 7470 : *new_grp_tail = new_heads[i];
11790 : }
11791 8574 : else if (new_grp_tail)
11792 : {
11793 : /* a {b c d} e {f g h} i j k (original)
11794 : -->
11795 : a {l m n} e {f g h} i j k (gap after last iter's group)
11796 : -->
11797 : a {l m n} e {o p q} h i j (chain last group to old successor)
11798 : ^new_grp_tail
11799 : */
11800 622 : *new_grp_tail = old_succs[i - 1];
11801 622 : *old_headps[i] = new_heads[i];
11802 : }
11803 : else
11804 : {
11805 : /* The first inserted group -- point to new group, and leave end
11806 : open.
11807 : a {b c d} e f
11808 : -->
11809 : a {g h i...
11810 : */
11811 7952 : *grp->grp_start = new_heads[i];
11812 : }
11813 :
11814 16044 : new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
11815 :
11816 16044 : head = head->next;
11817 : }
11818 :
11819 7952 : if (new_grp_tail)
11820 7952 : *new_grp_tail = old_succs[numgroups - 1];
11821 :
11822 7952 : gcc_assert (!head);
11823 :
11824 11136 : return map_at_start ? (*groups)[0].grp_start : list_p;
11825 7952 : }
11826 :
11827 : /* DECL is supposed to have lastprivate semantics in the outer contexts
11828 : of combined/composite constructs, starting with OCTX.
11829 : Add needed lastprivate, shared or map clause if no data sharing or
11830 : mapping clause are present. IMPLICIT_P is true if it is an implicit
11831 : clause (IV on simd), in which case the lastprivate will not be
11832 : copied to some constructs. */
11833 :
11834 : static void
11835 14652 : omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
11836 : tree decl, bool implicit_p)
11837 : {
11838 14652 : struct gimplify_omp_ctx *orig_octx = octx;
11839 26973 : for (; octx; octx = octx->outer_context)
11840 : {
11841 25900 : if ((octx->region_type == ORT_COMBINED_PARALLEL
11842 20864 : || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
11843 25331 : && splay_tree_lookup (octx->variables,
11844 : (splay_tree_key) decl) == NULL)
11845 : {
11846 3887 : omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
11847 3887 : continue;
11848 : }
11849 18374 : if ((octx->region_type & ORT_TASK) != 0
11850 575 : && octx->combined_loop
11851 18655 : && splay_tree_lookup (octx->variables,
11852 : (splay_tree_key) decl) == NULL)
11853 : {
11854 248 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11855 248 : continue;
11856 : }
11857 22710 : if (implicit_p
11858 13619 : && octx->region_type == ORT_WORKSHARE
11859 10405 : && octx->combined_loop
11860 10300 : && splay_tree_lookup (octx->variables,
11861 : (splay_tree_key) decl) == NULL
11862 10300 : && octx->outer_context
11863 8907 : && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
11864 22717 : && splay_tree_lookup (octx->outer_context->variables,
11865 : (splay_tree_key) decl) == NULL)
11866 : {
11867 4832 : octx = octx->outer_context;
11868 4832 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11869 4832 : continue;
11870 : }
11871 5564 : if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
11872 7482 : && octx->combined_loop
11873 7295 : && splay_tree_lookup (octx->variables,
11874 : (splay_tree_key) decl) == NULL
11875 19071 : && !omp_check_private (octx, decl, false))
11876 : {
11877 3354 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11878 3354 : continue;
11879 : }
11880 9692 : if (octx->region_type == ORT_COMBINED_TARGET)
11881 : {
11882 2008 : splay_tree_node n = splay_tree_lookup (octx->variables,
11883 : (splay_tree_key) decl);
11884 2008 : if (n == NULL)
11885 : {
11886 1986 : omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
11887 1986 : octx = octx->outer_context;
11888 : }
11889 22 : else if (!implicit_p
11890 22 : && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
11891 : {
11892 12 : n->value &= ~(GOVD_FIRSTPRIVATE
11893 : | GOVD_FIRSTPRIVATE_IMPLICIT
11894 : | GOVD_EXPLICIT);
11895 12 : omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
11896 12 : octx = octx->outer_context;
11897 : }
11898 : }
11899 : break;
11900 : }
11901 14652 : if (octx && (implicit_p || octx != orig_octx))
11902 4046 : omp_notice_variable (octx, decl, true);
11903 14652 : }
11904 :
11905 : /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
11906 : a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
11907 :
11908 : static omp_mapping_group *
11909 5850 : omp_get_nonfirstprivate_group (hash_map<tree_operand_hash_no_se,
11910 : omp_mapping_group *> *grpmap,
11911 : tree decl, bool allow_deleted = false)
11912 : {
11913 5850 : omp_mapping_group **to_group_p = grpmap->get (decl);
11914 :
11915 5850 : if (!to_group_p)
11916 : return NULL;
11917 :
11918 2357 : omp_mapping_group *to_group = *to_group_p;
11919 :
11920 3618 : for (; to_group; to_group = to_group->sibling)
11921 : {
11922 2394 : tree grp_end = to_group->grp_end;
11923 2394 : switch (OMP_CLAUSE_MAP_KIND (grp_end))
11924 : {
11925 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11926 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11927 : break;
11928 :
11929 1135 : default:
11930 1135 : if (allow_deleted || !to_group->deleted)
11931 : return to_group;
11932 : }
11933 : }
11934 :
11935 : return NULL;
11936 : }
11937 :
11938 : /* Return TRUE if the directive (whose clauses are described by the hash table
11939 : of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
11940 : true, only count TO mappings. If ALLOW_DELETED is true, ignore the
11941 : "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
11942 : TRUE if DECL is mapped as a member of a whole-struct mapping. */
11943 :
11944 : static bool
11945 4388 : omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
11946 : omp_mapping_group *> *grpmap,
11947 : tree decl, omp_mapping_group **base_group,
11948 : bool to_specifically, bool allow_deleted,
11949 : bool contained_in_struct)
11950 : {
11951 4388 : omp_mapping_group *decl_group
11952 4388 : = omp_get_nonfirstprivate_group (grpmap, decl, allow_deleted);
11953 :
11954 4388 : *base_group = NULL;
11955 :
11956 4388 : if (decl_group)
11957 : {
11958 1016 : tree grp_first = *decl_group->grp_start;
11959 : /* We might be called during omp_build_struct_sibling_lists, when
11960 : GOMP_MAP_STRUCT might have been inserted at the start of the group.
11961 : Skip over that, and also possibly the node after it. */
11962 1016 : if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
11963 1016 : || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
11964 : {
11965 6 : grp_first = OMP_CLAUSE_CHAIN (grp_first);
11966 6 : if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
11967 6 : || (OMP_CLAUSE_MAP_KIND (grp_first)
11968 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11969 12 : || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_ATTACH_DETACH)
11970 0 : grp_first = OMP_CLAUSE_CHAIN (grp_first);
11971 : }
11972 1016 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
11973 1016 : if (!to_specifically
11974 566 : || GOMP_MAP_COPY_TO_P (first_kind)
11975 357 : || first_kind == GOMP_MAP_ALLOC)
11976 : {
11977 959 : *base_group = decl_group;
11978 959 : return true;
11979 : }
11980 : }
11981 :
11982 3429 : if (contained_in_struct
11983 3429 : && omp_mapped_by_containing_struct (grpmap, decl, base_group))
11984 : return true;
11985 :
11986 : return false;
11987 : }
11988 :
11989 : /* If we have mappings INNER and OUTER, where INNER is a component access and
11990 : OUTER is a mapping of the whole containing struct, check that the mappings
11991 : are compatible. We'll be deleting the inner mapping, so we need to make
11992 : sure the outer mapping does (at least) the same transfers to/from the device
11993 : as the inner mapping. */
11994 :
11995 : bool
11996 172 : omp_check_mapping_compatibility (location_t loc,
11997 : omp_mapping_group *outer,
11998 : omp_mapping_group *inner)
11999 : {
12000 172 : tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
12001 :
12002 172 : gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
12003 172 : gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
12004 :
12005 172 : enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
12006 172 : enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
12007 :
12008 172 : if (outer_kind == inner_kind)
12009 : return true;
12010 :
12011 70 : switch (outer_kind)
12012 : {
12013 0 : case GOMP_MAP_ALWAYS_TO:
12014 0 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12015 0 : || inner_kind == GOMP_MAP_ALLOC
12016 0 : || inner_kind == GOMP_MAP_TO)
12017 : return true;
12018 : break;
12019 :
12020 0 : case GOMP_MAP_ALWAYS_FROM:
12021 0 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12022 0 : || inner_kind == GOMP_MAP_RELEASE
12023 : || inner_kind == GOMP_MAP_FROM)
12024 : return true;
12025 : break;
12026 :
12027 10 : case GOMP_MAP_TO:
12028 10 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12029 10 : || inner_kind == GOMP_MAP_ALLOC)
12030 : return true;
12031 : break;
12032 :
12033 8 : case GOMP_MAP_FROM:
12034 8 : if (inner_kind == GOMP_MAP_RELEASE
12035 8 : || inner_kind == GOMP_MAP_FORCE_PRESENT)
12036 : return true;
12037 : break;
12038 :
12039 32 : case GOMP_MAP_ALWAYS_TOFROM:
12040 32 : case GOMP_MAP_TOFROM:
12041 32 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12042 32 : || inner_kind == GOMP_MAP_ALLOC
12043 : || inner_kind == GOMP_MAP_TO
12044 24 : || inner_kind == GOMP_MAP_FROM
12045 12 : || inner_kind == GOMP_MAP_TOFROM)
12046 : return true;
12047 : break;
12048 :
12049 28 : default:
12050 28 : ;
12051 : }
12052 :
12053 84 : error_at (loc, "data movement for component %qE is not compatible with "
12054 28 : "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
12055 28 : OMP_CLAUSE_DECL (first_outer));
12056 :
12057 28 : return false;
12058 : }
12059 :
12060 : /* This function handles several cases where clauses on a mapping directive
12061 : can interact with each other.
12062 :
12063 : If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
12064 : on the same directive, change the mapping of the first node to
12065 : ATTACH_DETACH. We should have detected that this will happen already in
12066 : c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
12067 : as addressable. (If we didn't, bail out.)
12068 :
12069 : If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
12070 : mapping the base pointer also, we may need to change the mapping type to
12071 : ATTACH_DETACH and synthesize an alloc node for the reference itself.
12072 :
12073 : If we have an ATTACH_DETACH node, this is an array section with a pointer
12074 : base. If we're mapping the base on the same directive too, we can drop its
12075 : mapping. However, if we have a reference to pointer, make other appropriate
12076 : adjustments to the mapping nodes instead.
12077 :
12078 : If we have an ATTACH_DETACH node with a Fortran pointer-set (array
12079 : descriptor) mapping for a derived-type component, and we're also mapping the
12080 : whole of the derived-type variable on another clause, the pointer-set
12081 : mapping is removed.
12082 :
12083 : If we have a component access but we're also mapping the whole of the
12084 : containing struct, drop the former access.
12085 :
12086 : If the expression is a component access, and we're also mapping a base
12087 : pointer used in that component access in the same expression, change the
12088 : mapping type of the latter to ALLOC (ready for processing by
12089 : omp_build_struct_sibling_lists). */
12090 :
12091 : void
12092 7952 : omp_resolve_clause_dependencies (enum tree_code code,
12093 : vec<omp_mapping_group> *groups,
12094 : hash_map<tree_operand_hash_no_se,
12095 : omp_mapping_group *> *grpmap)
12096 : {
12097 7952 : int i;
12098 7952 : omp_mapping_group *grp;
12099 7952 : bool repair_chain = false;
12100 :
12101 22797 : FOR_EACH_VEC_ELT (*groups, i, grp)
12102 : {
12103 14845 : tree grp_end = grp->grp_end;
12104 14845 : tree decl = OMP_CLAUSE_DECL (grp_end);
12105 :
12106 14845 : gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
12107 :
12108 14845 : switch (OMP_CLAUSE_MAP_KIND (grp_end))
12109 : {
12110 1150 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
12111 1150 : {
12112 1150 : omp_mapping_group *to_group
12113 1150 : = omp_get_nonfirstprivate_group (grpmap, decl);
12114 :
12115 1150 : if (!to_group || to_group == grp)
12116 1125 : continue;
12117 :
12118 25 : tree grp_first = *to_group->grp_start;
12119 25 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
12120 :
12121 25 : if ((GOMP_MAP_COPY_TO_P (first_kind)
12122 7 : || first_kind == GOMP_MAP_ALLOC)
12123 50 : && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
12124 : != GOMP_MAP_FIRSTPRIVATE_POINTER))
12125 : {
12126 25 : gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end)));
12127 25 : OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
12128 : }
12129 : }
12130 : break;
12131 :
12132 156 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12133 156 : {
12134 156 : tree ptr = build_fold_indirect_ref (decl);
12135 :
12136 156 : omp_mapping_group *to_group
12137 156 : = omp_get_nonfirstprivate_group (grpmap, ptr);
12138 :
12139 156 : if (!to_group || to_group == grp)
12140 152 : continue;
12141 :
12142 4 : tree grp_first = *to_group->grp_start;
12143 4 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
12144 :
12145 4 : if (GOMP_MAP_COPY_TO_P (first_kind)
12146 4 : || first_kind == GOMP_MAP_ALLOC)
12147 : {
12148 4 : OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
12149 4 : OMP_CLAUSE_DECL (grp_end) = ptr;
12150 4 : if ((OMP_CLAUSE_CHAIN (*to_group->grp_start)
12151 4 : == to_group->grp_end)
12152 4 : && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
12153 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12154 : {
12155 0 : gcc_assert (TREE_ADDRESSABLE
12156 : (OMP_CLAUSE_DECL (to_group->grp_end)));
12157 0 : OMP_CLAUSE_SET_MAP_KIND (to_group->grp_end,
12158 : GOMP_MAP_ATTACH_DETACH);
12159 :
12160 0 : location_t loc = OMP_CLAUSE_LOCATION (to_group->grp_end);
12161 0 : tree alloc
12162 0 : = build_omp_clause (loc, OMP_CLAUSE_MAP);
12163 0 : OMP_CLAUSE_SET_MAP_KIND (alloc, GOMP_MAP_ALLOC);
12164 0 : tree tmp = build_fold_addr_expr (OMP_CLAUSE_DECL
12165 : (to_group->grp_end));
12166 0 : tree char_ptr_type = build_pointer_type (char_type_node);
12167 0 : OMP_CLAUSE_DECL (alloc)
12168 0 : = build2 (MEM_REF, char_type_node,
12169 : tmp,
12170 : build_int_cst (char_ptr_type, 0));
12171 0 : OMP_CLAUSE_SIZE (alloc) = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
12172 :
12173 0 : OMP_CLAUSE_CHAIN (alloc)
12174 0 : = OMP_CLAUSE_CHAIN (*to_group->grp_start);
12175 0 : OMP_CLAUSE_CHAIN (*to_group->grp_start) = alloc;
12176 : }
12177 : }
12178 : }
12179 : break;
12180 :
12181 : case GOMP_MAP_ATTACH_DETACH:
12182 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12183 : {
12184 2579 : tree base_ptr, referenced_ptr_node = NULL_TREE;
12185 :
12186 2579 : while (TREE_CODE (decl) == ARRAY_REF)
12187 0 : decl = TREE_OPERAND (decl, 0);
12188 :
12189 2579 : if (TREE_CODE (decl) == INDIRECT_REF)
12190 13 : decl = TREE_OPERAND (decl, 0);
12191 :
12192 : /* Only component accesses. */
12193 2579 : if (DECL_P (decl))
12194 217 : continue;
12195 :
12196 : /* We want the pointer itself when checking if the base pointer is
12197 : mapped elsewhere in the same directive -- if we have a
12198 : reference to the pointer, don't use that. */
12199 :
12200 2362 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12201 2362 : && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12202 : {
12203 394 : referenced_ptr_node = OMP_CLAUSE_CHAIN (*grp->grp_start);
12204 394 : base_ptr = OMP_CLAUSE_DECL (referenced_ptr_node);
12205 : }
12206 : else
12207 : base_ptr = decl;
12208 :
12209 1944 : gomp_map_kind zlas_kind
12210 2362 : = (code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
12211 2362 : ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
12212 :
12213 2362 : if (TREE_CODE (TREE_TYPE (base_ptr)) == POINTER_TYPE)
12214 : {
12215 : /* If we map the base TO, and we're doing an attachment, we can
12216 : skip the TO mapping altogether and create an ALLOC mapping
12217 : instead, since the attachment will overwrite the device
12218 : pointer in that location immediately anyway. Otherwise,
12219 : change our mapping to
12220 : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
12221 : attachment target has not been copied to the device already
12222 : by some earlier directive. */
12223 :
12224 1938 : bool base_mapped_to = false;
12225 :
12226 1938 : omp_mapping_group *base_group;
12227 :
12228 1938 : if (omp_directive_maps_explicitly (grpmap, base_ptr,
12229 : &base_group, false, true,
12230 : false))
12231 : {
12232 450 : if (referenced_ptr_node)
12233 : {
12234 129 : base_mapped_to = true;
12235 129 : if ((OMP_CLAUSE_MAP_KIND (base_group->grp_end)
12236 : == GOMP_MAP_ATTACH_DETACH)
12237 129 : && (OMP_CLAUSE_CHAIN (*base_group->grp_start)
12238 : == base_group->grp_end))
12239 : {
12240 258 : OMP_CLAUSE_CHAIN (*base_group->grp_start)
12241 129 : = OMP_CLAUSE_CHAIN (base_group->grp_end);
12242 129 : base_group->grp_end = *base_group->grp_start;
12243 129 : repair_chain = true;
12244 : }
12245 : }
12246 : else
12247 : {
12248 321 : base_group->deleted = true;
12249 321 : OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end) = 1;
12250 : }
12251 : }
12252 :
12253 : /* We're dealing with a reference to a pointer, and we are
12254 : attaching both the reference and the pointer. We know the
12255 : reference itself is on the target, because we are going to
12256 : create an ALLOC node for it in accumulate_sibling_list. The
12257 : pointer might be on the target already or it might not, but
12258 : if it isn't then it's not an error, so use
12259 : GOMP_MAP_ATTACH_ZLAS for it. */
12260 1938 : if (!base_mapped_to && referenced_ptr_node)
12261 56 : OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node, zlas_kind);
12262 :
12263 1938 : omp_mapping_group *struct_group;
12264 1938 : tree desc;
12265 1938 : if ((desc = OMP_CLAUSE_CHAIN (*grp->grp_start))
12266 1938 : && omp_map_clause_descriptor_p (desc)
12267 2983 : && omp_mapped_by_containing_struct (grpmap, decl,
12268 : &struct_group))
12269 : /* If we have a pointer set but we're mapping (or unmapping)
12270 : the whole of the containing struct, we can remove the
12271 : pointer set mapping. */
12272 15 : OMP_CLAUSE_CHAIN (*grp->grp_start) = OMP_CLAUSE_CHAIN (desc);
12273 : }
12274 424 : else if (TREE_CODE (TREE_TYPE (base_ptr)) == REFERENCE_TYPE
12275 424 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr)))
12276 : == ARRAY_TYPE)
12277 562 : && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
12278 : (*grp->grp_start))
12279 92 : OMP_CLAUSE_SET_MAP_KIND (grp->grp_end, zlas_kind);
12280 : }
12281 : break;
12282 :
12283 : case GOMP_MAP_ATTACH:
12284 : /* Ignore standalone attach here. */
12285 : break;
12286 :
12287 10897 : default:
12288 10897 : {
12289 10897 : omp_mapping_group *struct_group;
12290 10897 : if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
12291 10897 : && *grp->grp_start == grp_end)
12292 : {
12293 94 : omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
12294 : struct_group, grp);
12295 : /* Remove the whole of this mapping -- redundant. */
12296 94 : grp->deleted = true;
12297 : }
12298 :
12299 : tree base = decl;
12300 12481 : while ((base = omp_get_base_pointer (base)))
12301 : {
12302 1584 : omp_mapping_group *base_group;
12303 :
12304 1584 : if (omp_directive_maps_explicitly (grpmap, base, &base_group,
12305 : true, true, false))
12306 : {
12307 342 : tree grp_first = *base_group->grp_start;
12308 342 : OMP_CLAUSE_SET_MAP_KIND (grp_first, GOMP_MAP_ALLOC);
12309 : }
12310 : }
12311 : }
12312 : }
12313 : }
12314 :
12315 7952 : if (repair_chain)
12316 : {
12317 : /* Group start pointers may have become detached from the
12318 : OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
12319 : end of those groups. Fix that now. */
12320 : tree *new_next = NULL;
12321 716 : FOR_EACH_VEC_ELT (*groups, i, grp)
12322 : {
12323 587 : if (new_next)
12324 458 : grp->grp_start = new_next;
12325 :
12326 587 : new_next = &OMP_CLAUSE_CHAIN (grp->grp_end);
12327 : }
12328 : }
12329 7952 : }
12330 :
12331 : /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
12332 : clause dependencies we handle for now are struct element mappings and
12333 : whole-struct mappings on the same directive, and duplicate clause
12334 : detection. */
12335 :
12336 : void
12337 9457 : oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
12338 : hash_map<tree_operand_hash_no_se,
12339 : omp_mapping_group *> *grpmap)
12340 : {
12341 9457 : int i;
12342 9457 : omp_mapping_group *grp;
12343 9457 : hash_set<tree_operand_hash> *seen_components = NULL;
12344 9457 : hash_set<tree_operand_hash> *shown_error = NULL;
12345 :
12346 24629 : FOR_EACH_VEC_ELT (*groups, i, grp)
12347 : {
12348 15172 : tree grp_end = grp->grp_end;
12349 15172 : tree decl = OMP_CLAUSE_DECL (grp_end);
12350 :
12351 15172 : gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
12352 :
12353 15172 : if (DECL_P (grp_end))
12354 14655 : continue;
12355 :
12356 15172 : tree c = OMP_CLAUSE_DECL (*grp->grp_start);
12357 16797 : while (TREE_CODE (c) == ARRAY_REF)
12358 1625 : c = TREE_OPERAND (c, 0);
12359 15172 : if (TREE_CODE (c) != COMPONENT_REF)
12360 14655 : continue;
12361 517 : if (!seen_components)
12362 474 : seen_components = new hash_set<tree_operand_hash> ();
12363 517 : if (!shown_error)
12364 474 : shown_error = new hash_set<tree_operand_hash> ();
12365 517 : if (seen_components->contains (c)
12366 517 : && !shown_error->contains (c))
12367 : {
12368 10 : error_at (OMP_CLAUSE_LOCATION (grp_end),
12369 : "%qE appears more than once in map clauses",
12370 5 : OMP_CLAUSE_DECL (grp_end));
12371 5 : shown_error->add (c);
12372 : }
12373 : else
12374 512 : seen_components->add (c);
12375 :
12376 517 : omp_mapping_group *struct_group;
12377 517 : if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
12378 517 : && *grp->grp_start == grp_end)
12379 : {
12380 78 : omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
12381 : struct_group, grp);
12382 : /* Remove the whole of this mapping -- redundant. */
12383 78 : grp->deleted = true;
12384 : }
12385 : }
12386 :
12387 9457 : if (seen_components)
12388 474 : delete seen_components;
12389 9457 : if (shown_error)
12390 474 : delete shown_error;
12391 9457 : }
12392 :
12393 : /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
12394 : is linked to the previous node pointed to by INSERT_AT. */
12395 :
12396 : static tree *
12397 1076 : omp_siblist_insert_node_after (tree newnode, tree *insert_at)
12398 : {
12399 1076 : OMP_CLAUSE_CHAIN (newnode) = *insert_at;
12400 1076 : *insert_at = newnode;
12401 1076 : return &OMP_CLAUSE_CHAIN (newnode);
12402 : }
12403 :
12404 : /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
12405 : pointed to by chain MOVE_AFTER instead. */
12406 :
12407 : static void
12408 1152 : omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
12409 : {
12410 1152 : gcc_assert (node == *old_pos);
12411 1152 : *old_pos = OMP_CLAUSE_CHAIN (node);
12412 1152 : OMP_CLAUSE_CHAIN (node) = *move_after;
12413 1152 : *move_after = node;
12414 1152 : }
12415 :
12416 : /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
12417 : LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
12418 : new nodes are prepended to the list before splicing into the new position.
12419 : Return the position we should continue scanning the list at, or NULL to
12420 : stay where we were. */
12421 :
12422 : static tree *
12423 254 : omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
12424 : tree *move_after)
12425 : {
12426 254 : if (first_ptr == move_after)
12427 : return NULL;
12428 :
12429 243 : tree tmp = *first_ptr;
12430 243 : *first_ptr = OMP_CLAUSE_CHAIN (last_node);
12431 243 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12432 243 : *move_after = tmp;
12433 :
12434 243 : return first_ptr;
12435 : }
12436 :
12437 : /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
12438 : [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
12439 : pointer MOVE_AFTER.
12440 :
12441 : The latter list was previously part of the OMP clause list, and the former
12442 : (prepended) part is comprised of new nodes.
12443 :
12444 : We start with a list of nodes starting with a struct mapping node. We
12445 : rearrange the list so that new nodes starting from FIRST_NEW and whose last
12446 : node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
12447 : the group of mapping nodes we are currently processing (from the chain
12448 : FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
12449 : we should continue processing from, or NULL to stay where we were.
12450 :
12451 : The transformation (in the case where MOVE_AFTER and FIRST_PTR are
12452 : different) is worked through below. Here we are processing LAST_NODE, and
12453 : FIRST_PTR points at the preceding mapping clause:
12454 :
12455 : #. mapping node chain
12456 : ---------------------------------------------------
12457 : A. struct_node [->B]
12458 : B. comp_1 [->C]
12459 : C. comp_2 [->D (move_after)]
12460 : D. map_to_3 [->E]
12461 : E. attach_3 [->F (first_ptr)]
12462 : F. map_to_4 [->G (continue_at)]
12463 : G. attach_4 (last_node) [->H]
12464 : H. ...
12465 :
12466 : *last_new_tail = *first_ptr;
12467 :
12468 : I. new_node (first_new) [->F (last_new_tail)]
12469 :
12470 : *first_ptr = OMP_CLAUSE_CHAIN (last_node)
12471 :
12472 : #. mapping node chain
12473 : ----------------------------------------------------
12474 : A. struct_node [->B]
12475 : B. comp_1 [->C]
12476 : C. comp_2 [->D (move_after)]
12477 : D. map_to_3 [->E]
12478 : E. attach_3 [->H (first_ptr)]
12479 : F. map_to_4 [->G (continue_at)]
12480 : G. attach_4 (last_node) [->H]
12481 : H. ...
12482 :
12483 : I. new_node (first_new) [->F (last_new_tail)]
12484 :
12485 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12486 :
12487 : #. mapping node chain
12488 : ---------------------------------------------------
12489 : A. struct_node [->B]
12490 : B. comp_1 [->C]
12491 : C. comp_2 [->D (move_after)]
12492 : D. map_to_3 [->E]
12493 : E. attach_3 [->H (continue_at)]
12494 : F. map_to_4 [->G]
12495 : G. attach_4 (last_node) [->D]
12496 : H. ...
12497 :
12498 : I. new_node (first_new) [->F (last_new_tail)]
12499 :
12500 : *move_after = first_new;
12501 :
12502 : #. mapping node chain
12503 : ---------------------------------------------------
12504 : A. struct_node [->B]
12505 : B. comp_1 [->C]
12506 : C. comp_2 [->I (move_after)]
12507 : D. map_to_3 [->E]
12508 : E. attach_3 [->H (continue_at)]
12509 : F. map_to_4 [->G]
12510 : G. attach_4 (last_node) [->D]
12511 : H. ...
12512 : I. new_node (first_new) [->F (last_new_tail)]
12513 :
12514 : or, in order:
12515 :
12516 : #. mapping node chain
12517 : ---------------------------------------------------
12518 : A. struct_node [->B]
12519 : B. comp_1 [->C]
12520 : C. comp_2 [->I (move_after)]
12521 : I. new_node (first_new) [->F (last_new_tail)]
12522 : F. map_to_4 [->G]
12523 : G. attach_4 (last_node) [->D]
12524 : D. map_to_3 [->E]
12525 : E. attach_3 [->H (continue_at)]
12526 : H. ...
12527 : */
12528 :
12529 : static tree *
12530 71 : omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
12531 : tree *first_ptr, tree last_node,
12532 : tree *move_after)
12533 : {
12534 71 : tree *continue_at = NULL;
12535 71 : *last_new_tail = *first_ptr;
12536 71 : if (first_ptr == move_after)
12537 12 : *move_after = first_new;
12538 : else
12539 : {
12540 59 : *first_ptr = OMP_CLAUSE_CHAIN (last_node);
12541 59 : continue_at = first_ptr;
12542 59 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12543 59 : *move_after = first_new;
12544 : }
12545 71 : return continue_at;
12546 : }
12547 :
12548 : static omp_addr_token *
12549 11007 : omp_first_chained_access_token (vec<omp_addr_token *> &addr_tokens)
12550 : {
12551 11007 : using namespace omp_addr_tokenizer;
12552 11007 : int idx = addr_tokens.length () - 1;
12553 11007 : gcc_assert (idx >= 0);
12554 11007 : if (addr_tokens[idx]->type != ACCESS_METHOD)
12555 : return addr_tokens[idx];
12556 11034 : while (idx > 0 && addr_tokens[idx - 1]->type == ACCESS_METHOD)
12557 : idx--;
12558 11007 : return addr_tokens[idx];
12559 : }
12560 :
12561 : /* Mapping struct members causes an additional set of nodes to be created,
12562 : starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
12563 : number of members being mapped, in order of ascending position (address or
12564 : bitwise).
12565 :
12566 : We scan through the list of mapping clauses, calling this function for each
12567 : struct member mapping we find, and build up the list of mappings after the
12568 : initial GOMP_MAP_STRUCT node. For pointer members, these will be
12569 : newly-created ALLOC nodes. For non-pointer members, the existing mapping is
12570 : moved into place in the sorted list.
12571 :
12572 : struct {
12573 : int *a;
12574 : int *b;
12575 : int c;
12576 : int *d;
12577 : };
12578 :
12579 : #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
12580 : struct.d[0:n])
12581 :
12582 : GOMP_MAP_STRUCT (4)
12583 : [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
12584 : GOMP_MAP_ALLOC (struct.a)
12585 : GOMP_MAP_ALLOC (struct.b)
12586 : GOMP_MAP_TO (struct.c)
12587 : GOMP_MAP_ALLOC (struct.d)
12588 : ...
12589 :
12590 : In the case where we are mapping references to pointers, or in Fortran if
12591 : we are mapping an array with a descriptor, additional nodes may be created
12592 : after the struct node list also.
12593 :
12594 : The return code is either a pointer to the next node to process (if the
12595 : list has been rearranged), else NULL to continue with the next node in the
12596 : original list. */
12597 :
12598 : static tree *
12599 4512 : omp_accumulate_sibling_list (enum omp_region_type region_type,
12600 : enum tree_code code,
12601 : hash_map<tree_operand_hash, tree>
12602 : *&struct_map_to_clause,
12603 : hash_map<tree_operand_hash_no_se,
12604 : omp_mapping_group *> *group_map,
12605 : tree *grp_start_p, tree grp_end,
12606 : vec<omp_addr_token *> &addr_tokens, tree **inner,
12607 : bool *fragile_p, bool reprocessing_struct,
12608 : tree **added_tail)
12609 : {
12610 4512 : using namespace omp_addr_tokenizer;
12611 4512 : poly_offset_int coffset;
12612 4512 : poly_int64 cbitpos;
12613 4512 : tree ocd = OMP_CLAUSE_DECL (grp_end);
12614 4512 : bool openmp = !(region_type & ORT_ACC);
12615 4512 : bool target = (region_type & ORT_TARGET) != 0;
12616 4512 : tree *continue_at = NULL;
12617 :
12618 4825 : while (TREE_CODE (ocd) == ARRAY_REF)
12619 313 : ocd = TREE_OPERAND (ocd, 0);
12620 :
12621 4512 : if (*fragile_p)
12622 : {
12623 156 : omp_mapping_group *to_group
12624 156 : = omp_get_nonfirstprivate_group (group_map, ocd, true);
12625 :
12626 156 : if (to_group)
12627 : return NULL;
12628 : }
12629 :
12630 4424 : omp_addr_token *last_token = omp_first_chained_access_token (addr_tokens);
12631 4424 : if (last_token->type == ACCESS_METHOD)
12632 : {
12633 4424 : switch (last_token->u.access_kind)
12634 : {
12635 735 : case ACCESS_REF:
12636 735 : case ACCESS_REF_TO_POINTER:
12637 735 : case ACCESS_REF_TO_POINTER_OFFSET:
12638 735 : case ACCESS_INDEXED_REF_TO_ARRAY:
12639 : /* We may see either a bare reference or a dereferenced
12640 : "convert_from_reference"-like one here. Handle either way. */
12641 735 : if (TREE_CODE (ocd) == INDIRECT_REF)
12642 64 : ocd = TREE_OPERAND (ocd, 0);
12643 735 : gcc_assert (TREE_CODE (TREE_TYPE (ocd)) == REFERENCE_TYPE);
12644 : break;
12645 :
12646 : default:
12647 : ;
12648 : }
12649 : }
12650 :
12651 4424 : bool variable_offset;
12652 4424 : tree base
12653 4424 : = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
12654 :
12655 4424 : int base_token;
12656 22631 : for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
12657 : {
12658 18207 : if (addr_tokens[base_token]->type == ARRAY_BASE
12659 18207 : || addr_tokens[base_token]->type == STRUCTURE_BASE)
12660 : break;
12661 : }
12662 :
12663 : /* The two expressions in the assertion below aren't quite the same: if we
12664 : have 'struct_base_decl access_indexed_array' for something like
12665 : "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
12666 : will be "myvar[2]" -- the actual base of the structure.
12667 : The former interpretation leads to a strange situation where we get
12668 : struct(myvar) alloc(myvar[2].ptr1)
12669 : That is, the array of structures is kind of treated as one big structure
12670 : for the purposes of gathering sibling lists, etc. */
12671 : /* gcc_assert (base == addr_tokens[base_token]->expr); */
12672 :
12673 4424 : bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
12674 : == GOMP_MAP_ATTACH_DETACH)
12675 4424 : || (OMP_CLAUSE_MAP_KIND (grp_end)
12676 4424 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
12677 4424 : bool has_descriptor = false;
12678 4424 : if (OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
12679 : {
12680 2884 : tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
12681 2884 : if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
12682 : has_descriptor = true;
12683 : }
12684 :
12685 4424 : if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
12686 : {
12687 2968 : enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
12688 :
12689 2968 : if (struct_map_to_clause == NULL)
12690 2412 : struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
12691 :
12692 2968 : if (variable_offset)
12693 274 : str_kind = GOMP_MAP_STRUCT_UNORD;
12694 :
12695 2968 : tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
12696 :
12697 2968 : OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
12698 2968 : OMP_CLAUSE_DECL (l) = unshare_expr (base);
12699 2968 : OMP_CLAUSE_SIZE (l) = size_int (1);
12700 :
12701 2968 : struct_map_to_clause->put (base, l);
12702 :
12703 : /* On first iterating through the clause list, we insert the struct node
12704 : just before the component access node that triggers the initial
12705 : omp_accumulate_sibling_list call for a particular sibling list (and
12706 : it then forms the first entry in that list). When reprocessing
12707 : struct bases that are themselves component accesses, we insert the
12708 : struct node on an off-side list to avoid inserting the new
12709 : GOMP_MAP_STRUCT into the middle of the old one. */
12710 2968 : tree *insert_node_pos = reprocessing_struct ? *added_tail : grp_start_p;
12711 :
12712 2968 : if (has_descriptor)
12713 : {
12714 726 : tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
12715 726 : if (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
12716 187 : OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
12717 726 : tree sc = *insert_node_pos;
12718 726 : OMP_CLAUSE_CHAIN (l) = desc;
12719 726 : OMP_CLAUSE_CHAIN (*grp_start_p) = OMP_CLAUSE_CHAIN (desc);
12720 726 : OMP_CLAUSE_CHAIN (desc) = sc;
12721 726 : *insert_node_pos = l;
12722 : }
12723 2242 : else if (attach_detach)
12724 : {
12725 1396 : tree extra_node;
12726 1396 : tree alloc_node
12727 1396 : = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
12728 : &extra_node);
12729 1396 : tree *tail;
12730 1396 : OMP_CLAUSE_CHAIN (l) = alloc_node;
12731 :
12732 1396 : if (extra_node)
12733 : {
12734 0 : OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
12735 0 : OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
12736 0 : tail = &OMP_CLAUSE_CHAIN (extra_node);
12737 : }
12738 : else
12739 : {
12740 1396 : OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
12741 1396 : tail = &OMP_CLAUSE_CHAIN (alloc_node);
12742 : }
12743 :
12744 : /* For OpenMP semantics, we don't want to implicitly allocate
12745 : space for the pointer here for non-compute regions (e.g. "enter
12746 : data"). A FRAGILE_P node is only being created so that
12747 : omp-low.cc is able to rewrite the struct properly.
12748 : For references (to pointers), we want to actually allocate the
12749 : space for the reference itself in the sorted list following the
12750 : struct node.
12751 : For pointers, we want to allocate space if we had an explicit
12752 : mapping of the attachment point, but not otherwise. */
12753 1396 : if (*fragile_p
12754 1396 : || (openmp
12755 : && !target
12756 : && attach_detach
12757 234 : && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
12758 88 : && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
12759 : {
12760 133 : if (!lang_GNU_Fortran ())
12761 : /* In Fortran, pointers are dereferenced automatically, but may
12762 : be unassociated. So we still want to allocate space for the
12763 : pointer (as the base for an attach operation that should be
12764 : present in the same directive's clause list also). */
12765 103 : OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
12766 133 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
12767 : }
12768 :
12769 1396 : *insert_node_pos = l;
12770 :
12771 1396 : if (reprocessing_struct)
12772 : {
12773 : /* When reprocessing a struct node group used as the base of a
12774 : subcomponent access, if we have a reference-to-pointer base,
12775 : we will see:
12776 : struct(**ptr) attach(*ptr)
12777 : whereas for a non-reprocess-struct group, we see, e.g.:
12778 : tofrom(**ptr) attach(*ptr) attach(ptr)
12779 : and we create the "alloc" for the second "attach", i.e.
12780 : for the reference itself. When reprocessing a struct group we
12781 : thus change the pointer attachment into a reference attachment
12782 : by stripping the indirection. (The attachment of the
12783 : referenced pointer must happen elsewhere, either on the same
12784 : directive, or otherwise.) */
12785 180 : tree adecl = OMP_CLAUSE_DECL (alloc_node);
12786 :
12787 180 : if ((TREE_CODE (adecl) == INDIRECT_REF
12788 148 : || (TREE_CODE (adecl) == MEM_REF
12789 0 : && integer_zerop (TREE_OPERAND (adecl, 1))))
12790 32 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl, 0)))
12791 : == REFERENCE_TYPE)
12792 212 : && (TREE_CODE (TREE_TYPE (TREE_TYPE
12793 : (TREE_OPERAND (adecl, 0)))) == POINTER_TYPE))
12794 32 : OMP_CLAUSE_DECL (alloc_node) = TREE_OPERAND (adecl, 0);
12795 :
12796 180 : *added_tail = tail;
12797 : }
12798 : }
12799 : else
12800 : {
12801 846 : gcc_assert (*grp_start_p == grp_end);
12802 846 : if (reprocessing_struct)
12803 : {
12804 : /* If we don't have an attach/detach node, this is a
12805 : "target data" directive or similar, not an offload region.
12806 : Synthesize an "alloc" node using just the initiating
12807 : GOMP_MAP_STRUCT decl. */
12808 16 : gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
12809 32 : || code == OACC_EXIT_DATA)
12810 32 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
12811 32 : tree alloc_node
12812 32 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
12813 : OMP_CLAUSE_MAP);
12814 32 : OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
12815 32 : OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
12816 64 : OMP_CLAUSE_SIZE (alloc_node)
12817 32 : = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
12818 :
12819 32 : OMP_CLAUSE_CHAIN (alloc_node) = OMP_CLAUSE_CHAIN (l);
12820 32 : OMP_CLAUSE_CHAIN (l) = alloc_node;
12821 32 : *insert_node_pos = l;
12822 32 : *added_tail = &OMP_CLAUSE_CHAIN (alloc_node);
12823 : }
12824 : else
12825 814 : grp_start_p = omp_siblist_insert_node_after (l, insert_node_pos);
12826 : }
12827 :
12828 2968 : unsigned last_access = base_token + 1;
12829 :
12830 2968 : while (last_access + 1 < addr_tokens.length ()
12831 3302 : && addr_tokens[last_access + 1]->type == ACCESS_METHOD)
12832 : last_access++;
12833 :
12834 2968 : if ((region_type & ORT_TARGET)
12835 2968 : && addr_tokens[base_token + 1]->type == ACCESS_METHOD)
12836 : {
12837 1674 : bool base_ref = false;
12838 1674 : access_method_kinds access_kind
12839 1674 : = addr_tokens[last_access]->u.access_kind;
12840 :
12841 1674 : switch (access_kind)
12842 : {
12843 : case ACCESS_DIRECT:
12844 : case ACCESS_INDEXED_ARRAY:
12845 1026 : return NULL;
12846 :
12847 403 : case ACCESS_REF:
12848 403 : case ACCESS_REF_TO_POINTER:
12849 403 : case ACCESS_REF_TO_POINTER_OFFSET:
12850 403 : case ACCESS_INDEXED_REF_TO_ARRAY:
12851 403 : base_ref = true;
12852 403 : break;
12853 :
12854 866 : default:
12855 866 : ;
12856 : }
12857 866 : tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
12858 : OMP_CLAUSE_MAP);
12859 866 : enum gomp_map_kind mkind;
12860 866 : omp_mapping_group *decl_group;
12861 866 : tree use_base;
12862 866 : switch (access_kind)
12863 : {
12864 463 : case ACCESS_POINTER:
12865 463 : case ACCESS_POINTER_OFFSET:
12866 463 : use_base = addr_tokens[last_access]->expr;
12867 463 : break;
12868 198 : case ACCESS_REF_TO_POINTER:
12869 198 : case ACCESS_REF_TO_POINTER_OFFSET:
12870 198 : use_base
12871 198 : = build_fold_indirect_ref (addr_tokens[last_access]->expr);
12872 198 : break;
12873 205 : default:
12874 205 : use_base = addr_tokens[base_token]->expr;
12875 : }
12876 866 : bool mapped_to_p
12877 866 : = omp_directive_maps_explicitly (group_map, use_base, &decl_group,
12878 : true, false, true);
12879 866 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12880 866 : && DECL_P (addr_tokens[last_access]->expr)
12881 1372 : && !mapped_to_p)
12882 444 : mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
12883 : : GOMP_MAP_FIRSTPRIVATE_POINTER;
12884 : else
12885 : mkind = GOMP_MAP_ATTACH_DETACH;
12886 :
12887 866 : OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
12888 : /* If we have a reference to pointer base, we want to attach the
12889 : pointer here, not the reference. The reference attachment happens
12890 : elsewhere. */
12891 866 : bool ref_to_ptr
12892 866 : = (access_kind == ACCESS_REF_TO_POINTER
12893 866 : || access_kind == ACCESS_REF_TO_POINTER_OFFSET);
12894 866 : tree sdecl = addr_tokens[last_access]->expr;
12895 866 : tree sdecl_ptr = ref_to_ptr ? build_fold_indirect_ref (sdecl)
12896 : : sdecl;
12897 : /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
12898 : want to use the reference itself for the decl, but we
12899 : still want to use the pointer to calculate the bias. */
12900 866 : OMP_CLAUSE_DECL (c2) = (mkind == GOMP_MAP_ATTACH_DETACH)
12901 866 : ? sdecl_ptr : sdecl;
12902 866 : sdecl = sdecl_ptr;
12903 866 : tree baddr = build_fold_addr_expr (base);
12904 866 : baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
12905 : ptrdiff_type_node, baddr);
12906 866 : tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
12907 : ptrdiff_type_node, sdecl);
12908 866 : OMP_CLAUSE_SIZE (c2)
12909 866 : = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
12910 : ptrdiff_type_node, baddr, decladdr);
12911 : /* Insert after struct node. */
12912 866 : OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
12913 866 : OMP_CLAUSE_CHAIN (l) = c2;
12914 :
12915 866 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12916 866 : && (addr_tokens[base_token]->u.structure_base_kind
12917 : == BASE_COMPONENT_EXPR)
12918 286 : && mkind == GOMP_MAP_ATTACH_DETACH
12919 1152 : && addr_tokens[last_access]->u.access_kind != ACCESS_REF)
12920 : {
12921 218 : *inner = insert_node_pos;
12922 218 : if (openmp)
12923 166 : *fragile_p = true;
12924 218 : return NULL;
12925 : }
12926 : }
12927 :
12928 1942 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12929 1942 : && (addr_tokens[base_token]->u.structure_base_kind
12930 : == BASE_COMPONENT_EXPR)
12931 2409 : && addr_tokens[last_access]->u.access_kind == ACCESS_REF)
12932 100 : *inner = insert_node_pos;
12933 :
12934 1942 : return NULL;
12935 : }
12936 1456 : else if (struct_map_to_clause)
12937 : {
12938 1456 : tree *osc = struct_map_to_clause->get (base);
12939 1456 : tree *sc = NULL, *scp = NULL;
12940 :
12941 1456 : unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
12942 1456 : sc = &OMP_CLAUSE_CHAIN (*osc);
12943 : /* The struct mapping might be immediately followed by a
12944 : FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
12945 : if it's an indirect access or a reference, or if the structure base
12946 : is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
12947 : after they have been processed there, and ATTACH_DETACH nodes are
12948 : recomputed and moved out of the GOMP_MAP_STRUCT construct once
12949 : sibling list building is complete. */
12950 1456 : if (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
12951 1411 : || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
12952 2822 : || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_ATTACH_DETACH)
12953 201 : sc = &OMP_CLAUSE_CHAIN (*sc);
12954 3945 : for (i = 0; i < elems; i++, sc = &OMP_CLAUSE_CHAIN (*sc))
12955 2899 : if (attach_detach && sc == grp_start_p)
12956 : break;
12957 2899 : else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
12958 182 : && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
12959 3081 : && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
12960 : break;
12961 : else
12962 : {
12963 2899 : tree sc_decl = OMP_CLAUSE_DECL (*sc);
12964 2899 : poly_offset_int offset;
12965 2899 : poly_int64 bitpos;
12966 :
12967 2899 : if (TREE_CODE (sc_decl) == ARRAY_REF)
12968 : {
12969 366 : while (TREE_CODE (sc_decl) == ARRAY_REF)
12970 184 : sc_decl = TREE_OPERAND (sc_decl, 0);
12971 182 : if (TREE_CODE (sc_decl) != COMPONENT_REF
12972 182 : || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
12973 : break;
12974 : }
12975 2717 : else if (INDIRECT_REF_P (sc_decl)
12976 0 : && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
12977 2717 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
12978 : == REFERENCE_TYPE))
12979 0 : sc_decl = TREE_OPERAND (sc_decl, 0);
12980 :
12981 2899 : bool variable_offset2;
12982 2899 : tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
12983 : &variable_offset2);
12984 2899 : if (!base2 || !operand_equal_p (base2, base, 0))
12985 : break;
12986 2899 : if (scp)
12987 469 : continue;
12988 2679 : if (variable_offset2)
12989 : {
12990 341 : OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
12991 :
12992 341 : if (has_descriptor)
12993 : {
12994 : /* Sort mapped components by offset. This is needed for
12995 : libgomp to handle Fortran derived-type allocatable
12996 : components transparently. */
12997 :
12998 323 : poly_int64 bitsize;
12999 323 : tree offset, coffset;
13000 323 : machine_mode mode;
13001 323 : int unsignedp, reversep, volatilep;
13002 323 : tree inner_ref1
13003 323 : = get_inner_reference (sc_decl, &bitsize, &bitpos,
13004 : &offset, &mode, &unsignedp,
13005 : &reversep, &volatilep);
13006 323 : tree osc_decl = ocd;
13007 323 : STRIP_NOPS (osc_decl);
13008 323 : tree inner_ref2
13009 323 : = get_inner_reference (osc_decl, &bitsize, &bitpos,
13010 : &coffset, &mode, &unsignedp,
13011 : &reversep, &volatilep);
13012 323 : gcc_assert (operand_equal_p (inner_ref1, inner_ref2, 0));
13013 323 : tree offset_diff
13014 323 : = fold_binary_to_constant (MINUS_EXPR, size_type_node,
13015 : coffset, offset);
13016 572 : if (offset_diff == NULL_TREE
13017 323 : || TREE_INT_CST_ELT (offset_diff, 0) > 0)
13018 249 : continue;
13019 : else
13020 : break;
13021 : }
13022 : }
13023 2338 : else if ((region_type & ORT_ACC) != 0)
13024 : {
13025 : /* For OpenACC, allow (ignore) duplicate struct accesses in
13026 : the middle of a mapping clause, e.g. "mystruct->foo" in:
13027 : copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
13028 223 : if (reprocessing_struct
13029 8 : && known_eq (coffset, offset)
13030 223 : && known_eq (cbitpos, bitpos))
13031 21 : return NULL;
13032 : }
13033 2115 : else if (known_eq (coffset, offset)
13034 2115 : && known_eq (cbitpos, bitpos))
13035 : {
13036 : /* Having two struct members at the same offset doesn't work,
13037 : so make sure we don't. (We're allowed to ignore this.
13038 : Should we report the error?) */
13039 : /*error_at (OMP_CLAUSE_LOCATION (grp_end),
13040 : "duplicate struct member %qE in map clauses",
13041 : OMP_CLAUSE_DECL (grp_end));*/
13042 : return NULL;
13043 : }
13044 2335 : if (maybe_lt (coffset, offset)
13045 4063 : || (known_eq (coffset, offset)
13046 20 : && maybe_lt (cbitpos, bitpos)))
13047 : {
13048 607 : if (attach_detach)
13049 : scp = sc;
13050 : else
13051 : break;
13052 : }
13053 : }
13054 :
13055 1435 : OMP_CLAUSE_SIZE (*osc)
13056 1435 : = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
13057 :
13058 1435 : if (reprocessing_struct)
13059 : {
13060 : /* If we're reprocessing a struct node, we don't want to do most of
13061 : the list manipulation below. We only need to handle the (pointer
13062 : or reference) attach/detach case. */
13063 8 : tree extra_node, alloc_node;
13064 8 : if (has_descriptor)
13065 0 : gcc_unreachable ();
13066 8 : else if (attach_detach)
13067 8 : alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
13068 : grp_end, &extra_node);
13069 : else
13070 : {
13071 : /* If we don't have an attach/detach node, this is a
13072 : "target data" directive or similar, not an offload region.
13073 : Synthesize an "alloc" node using just the initiating
13074 : GOMP_MAP_STRUCT decl. */
13075 0 : gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
13076 0 : || code == OACC_EXIT_DATA)
13077 0 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
13078 0 : alloc_node
13079 0 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
13080 : OMP_CLAUSE_MAP);
13081 0 : OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
13082 0 : OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
13083 0 : OMP_CLAUSE_SIZE (alloc_node)
13084 0 : = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
13085 : }
13086 :
13087 8 : if (scp)
13088 0 : omp_siblist_insert_node_after (alloc_node, scp);
13089 : else
13090 : {
13091 8 : tree *new_end = omp_siblist_insert_node_after (alloc_node, sc);
13092 8 : if (sc == *added_tail)
13093 8 : *added_tail = new_end;
13094 : }
13095 :
13096 8 : return NULL;
13097 : }
13098 :
13099 1427 : if (has_descriptor)
13100 : {
13101 538 : tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
13102 538 : if (code == OMP_TARGET_EXIT_DATA
13103 538 : || code == OACC_EXIT_DATA)
13104 105 : OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
13105 1076 : omp_siblist_move_node_after (desc,
13106 538 : &OMP_CLAUSE_CHAIN (*grp_start_p),
13107 : scp ? scp : sc);
13108 : }
13109 889 : else if (attach_detach)
13110 : {
13111 325 : tree cl = NULL_TREE, extra_node;
13112 325 : tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
13113 : grp_end, &extra_node);
13114 325 : tree *tail_chain = NULL;
13115 :
13116 325 : if (*fragile_p
13117 325 : || (openmp
13118 : && !target
13119 : && attach_detach
13120 62 : && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
13121 23 : && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
13122 : {
13123 6 : if (!lang_GNU_Fortran ())
13124 6 : OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
13125 6 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
13126 : }
13127 :
13128 : /* Here, we have:
13129 :
13130 : grp_end : the last (or only) node in this group.
13131 : grp_start_p : pointer to the first node in a pointer mapping group
13132 : up to and including GRP_END.
13133 : sc : pointer to the chain for the end of the struct component
13134 : list.
13135 : scp : pointer to the chain for the sorted position at which we
13136 : should insert in the middle of the struct component list
13137 : (else NULL to insert at end).
13138 : alloc_node : the "alloc" node for the structure (pointer-type)
13139 : component. We insert at SCP (if present), else SC
13140 : (the end of the struct component list).
13141 : extra_node : a newly-synthesized node for an additional indirect
13142 : pointer mapping or a Fortran pointer set, if needed.
13143 : cl : first node to prepend before grp_start_p.
13144 : tail_chain : pointer to chain of last prepended node.
13145 :
13146 : The general idea is we move the nodes for this struct mapping
13147 : together: the alloc node goes into the sorted list directly after
13148 : the struct mapping, and any extra nodes (together with the nodes
13149 : mapping arrays pointed to by struct components) get moved after
13150 : that list. When SCP is NULL, we insert the nodes at SC, i.e. at
13151 : the end of the struct component mapping list. It's important that
13152 : the alloc_node comes first in that case because it's part of the
13153 : sorted component mapping list (but subsequent nodes are not!). */
13154 :
13155 325 : if (scp)
13156 254 : omp_siblist_insert_node_after (alloc_node, scp);
13157 :
13158 : /* Make [cl,tail_chain] a list of the alloc node (if we haven't
13159 : already inserted it) and the extra_node (if it is present). The
13160 : list can be empty if we added alloc_node above and there is no
13161 : extra node. */
13162 254 : if (scp && extra_node)
13163 : {
13164 0 : cl = extra_node;
13165 0 : tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
13166 : }
13167 325 : else if (extra_node)
13168 : {
13169 0 : OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
13170 0 : cl = alloc_node;
13171 0 : tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
13172 : }
13173 325 : else if (!scp)
13174 : {
13175 71 : cl = alloc_node;
13176 71 : tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
13177 : }
13178 :
13179 325 : continue_at
13180 71 : = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
13181 : grp_start_p, grp_end,
13182 : sc)
13183 254 : : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
13184 : }
13185 564 : else if (*sc != grp_end)
13186 : {
13187 524 : gcc_assert (*grp_start_p == grp_end);
13188 :
13189 : /* We are moving the current node back to a previous struct node:
13190 : the node that used to point to the current node will now point to
13191 : the next node. */
13192 524 : continue_at = grp_start_p;
13193 : /* In the non-pointer case, the mapping clause itself is moved into
13194 : the correct position in the struct component list, which in this
13195 : case is just SC. */
13196 524 : omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
13197 : }
13198 : }
13199 : return continue_at;
13200 : }
13201 :
13202 : /* Scan through GROUPS, and create sorted structure sibling lists without
13203 : gimplifying. */
13204 :
13205 : static bool
13206 17409 : omp_build_struct_sibling_lists (enum tree_code code,
13207 : enum omp_region_type region_type,
13208 : vec<omp_mapping_group> *groups,
13209 : hash_map<tree_operand_hash_no_se,
13210 : omp_mapping_group *> **grpmap,
13211 : tree *list_p)
13212 : {
13213 17409 : using namespace omp_addr_tokenizer;
13214 17409 : unsigned i;
13215 17409 : omp_mapping_group *grp;
13216 17409 : hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
13217 17409 : bool success = true;
13218 17409 : tree *new_next = NULL;
13219 34818 : tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
13220 17409 : tree added_nodes = NULL_TREE;
13221 17409 : tree *added_tail = &added_nodes;
13222 17409 : auto_vec<omp_mapping_group> pre_hwm_groups;
13223 :
13224 65153 : FOR_EACH_VEC_ELT (*groups, i, grp)
13225 : {
13226 30335 : tree c = grp->grp_end;
13227 30335 : tree decl = OMP_CLAUSE_DECL (c);
13228 30335 : tree grp_end = grp->grp_end;
13229 30335 : auto_vec<omp_addr_token *> addr_tokens;
13230 30335 : tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
13231 :
13232 30335 : if (new_next && !grp->reprocess_struct)
13233 510 : grp->grp_start = new_next;
13234 :
13235 30335 : new_next = NULL;
13236 :
13237 30335 : tree *grp_start_p = grp->grp_start;
13238 :
13239 30335 : if (DECL_P (decl))
13240 20513 : continue;
13241 :
13242 : /* Skip groups we marked for deletion in
13243 : {omp,oacc}_resolve_clause_dependencies. */
13244 9822 : if (grp->deleted)
13245 462 : continue;
13246 :
13247 9360 : if (OMP_CLAUSE_CHAIN (*grp_start_p)
13248 9360 : && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
13249 : {
13250 : /* Don't process an array descriptor that isn't inside a derived type
13251 : as a struct (the GOMP_MAP_POINTER following will have the form
13252 : "var.data", but such mappings are handled specially). */
13253 5862 : tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
13254 5862 : if (omp_map_clause_descriptor_p (grpmid)
13255 9950 : && DECL_P (OMP_CLAUSE_DECL (grpmid)))
13256 2777 : continue;
13257 : }
13258 :
13259 : tree expr = decl;
13260 :
13261 7598 : while (TREE_CODE (expr) == ARRAY_REF)
13262 1015 : expr = TREE_OPERAND (expr, 0);
13263 :
13264 6583 : if (!omp_parse_expr (addr_tokens, expr))
13265 0 : continue;
13266 :
13267 6583 : omp_addr_token *last_token
13268 6583 : = omp_first_chained_access_token (addr_tokens);
13269 :
13270 : /* A mapping of a reference to a pointer member that doesn't specify an
13271 : array section, etc., like this:
13272 : *mystruct.ref_to_ptr
13273 : should not be processed by the struct sibling-list handling code --
13274 : it just transfers the referenced pointer.
13275 :
13276 : In contrast, the quite similar-looking construct:
13277 : *mystruct.ptr
13278 : which is equivalent to e.g.
13279 : mystruct.ptr[0]
13280 : *does* trigger sibling-list processing.
13281 :
13282 : An exception for the former case is for "fragile" groups where the
13283 : reference itself is not handled otherwise; this is subject to special
13284 : handling in omp_accumulate_sibling_list also. */
13285 :
13286 6583 : if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
13287 3362 : && last_token->type == ACCESS_METHOD
13288 3362 : && last_token->u.access_kind == ACCESS_REF
13289 6867 : && !grp->fragile)
13290 219 : continue;
13291 :
13292 6364 : tree d = decl;
13293 6364 : if (TREE_CODE (d) == ARRAY_REF)
13294 : {
13295 1974 : while (TREE_CODE (d) == ARRAY_REF)
13296 998 : d = TREE_OPERAND (d, 0);
13297 976 : if (TREE_CODE (d) == COMPONENT_REF
13298 976 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
13299 : decl = d;
13300 : }
13301 6364 : if (d == decl
13302 5699 : && INDIRECT_REF_P (decl)
13303 708 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
13304 105 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
13305 : == REFERENCE_TYPE)
13306 6461 : && (OMP_CLAUSE_MAP_KIND (c)
13307 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
13308 97 : decl = TREE_OPERAND (decl, 0);
13309 :
13310 6364 : STRIP_NOPS (decl);
13311 :
13312 6364 : if (TREE_CODE (decl) != COMPONENT_REF)
13313 1318 : continue;
13314 :
13315 : /* If we're mapping the whole struct in another node, skip adding this
13316 : node to a sibling list. */
13317 5046 : omp_mapping_group *wholestruct;
13318 5046 : if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
13319 : &wholestruct))
13320 : {
13321 177 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
13322 160 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c) = 0;
13323 177 : continue;
13324 : }
13325 :
13326 4869 : if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
13327 4869 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
13328 4768 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
13329 : && code != OACC_UPDATE
13330 9572 : && code != OMP_TARGET_UPDATE)
13331 : {
13332 4512 : if (error_operand_p (decl))
13333 : {
13334 0 : success = false;
13335 0 : goto error_out;
13336 : }
13337 :
13338 4512 : tree stype = TREE_TYPE (decl);
13339 4512 : if (TREE_CODE (stype) == REFERENCE_TYPE)
13340 768 : stype = TREE_TYPE (stype);
13341 4512 : if (TYPE_SIZE_UNIT (stype) == NULL
13342 4512 : || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
13343 : {
13344 0 : error_at (OMP_CLAUSE_LOCATION (c),
13345 : "mapping field %qE of variable length "
13346 0 : "structure", OMP_CLAUSE_DECL (c));
13347 0 : success = false;
13348 0 : goto error_out;
13349 : }
13350 :
13351 4512 : tree *inner = NULL;
13352 4512 : bool fragile_p = grp->fragile;
13353 :
13354 4512 : new_next
13355 9024 : = omp_accumulate_sibling_list (region_type, code,
13356 : struct_map_to_clause, *grpmap,
13357 : grp_start_p, grp_end, addr_tokens,
13358 : &inner, &fragile_p,
13359 4512 : grp->reprocess_struct, &added_tail);
13360 :
13361 4512 : if (inner)
13362 : {
13363 318 : omp_mapping_group newgrp;
13364 318 : newgrp.grp_start = inner;
13365 318 : if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner))
13366 : == GOMP_MAP_ATTACH_DETACH)
13367 286 : newgrp.grp_end = OMP_CLAUSE_CHAIN (*inner);
13368 : else
13369 32 : newgrp.grp_end = *inner;
13370 318 : newgrp.mark = UNVISITED;
13371 318 : newgrp.sibling = NULL;
13372 318 : newgrp.deleted = false;
13373 318 : newgrp.reprocess_struct = true;
13374 318 : newgrp.fragile = fragile_p;
13375 318 : newgrp.next = NULL;
13376 318 : groups->safe_push (newgrp);
13377 :
13378 : /* !!! Growing GROUPS might invalidate the pointers in the group
13379 : map. Rebuild it here. This is a bit inefficient, but
13380 : shouldn't happen very often. */
13381 636 : delete (*grpmap);
13382 318 : *grpmap
13383 318 : = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
13384 : sentinel);
13385 : }
13386 : }
13387 30335 : }
13388 :
13389 : /* Delete groups marked for deletion above. At this point the order of the
13390 : groups may no longer correspond to the order of the underlying list,
13391 : which complicates this a little. First clear out OMP_CLAUSE_DECL for
13392 : deleted nodes... */
13393 :
13394 47744 : FOR_EACH_VEC_ELT (*groups, i, grp)
13395 30335 : if (grp->deleted)
13396 462 : for (tree d = *grp->grp_start;
13397 924 : d != OMP_CLAUSE_CHAIN (grp->grp_end);
13398 462 : d = OMP_CLAUSE_CHAIN (d))
13399 462 : OMP_CLAUSE_DECL (d) = NULL_TREE;
13400 :
13401 : /* ...then sweep through the list removing the now-empty nodes. */
13402 :
13403 : tail = list_p;
13404 93337 : while (*tail)
13405 : {
13406 75928 : if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
13407 75928 : && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
13408 462 : *tail = OMP_CLAUSE_CHAIN (*tail);
13409 : else
13410 75466 : tail = &OMP_CLAUSE_CHAIN (*tail);
13411 : }
13412 :
13413 : /* Tack on the struct nodes added during nested struct reprocessing. */
13414 17409 : if (added_nodes)
13415 : {
13416 192 : *tail = added_nodes;
13417 192 : tail = added_tail;
13418 : }
13419 :
13420 : /* Find each attach node whose bias needs to be adjusted and move it to the
13421 : group containing its pointee, right after the struct node, so that it can
13422 : be picked up by the adjustment code further down in this function. */
13423 17409 : bool attach_bias_needs_adjustment;
13424 17409 : attach_bias_needs_adjustment = false;
13425 65153 : FOR_EACH_VEC_ELT_REVERSE (*groups, i, grp)
13426 : {
13427 30335 : tree c = *grp->grp_start;
13428 29564 : if (c != NULL && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13429 29549 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
13430 26568 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
13431 3261 : && OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) == GOMP_MAP_TO_PSET
13432 527 : && OMP_CLAUSE_MAP_KIND (grp->grp_end) == GOMP_MAP_ATTACH_DETACH
13433 30862 : && OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (grp->grp_end))
13434 : {
13435 90 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (grp->grp_end) = 0;
13436 90 : attach_bias_needs_adjustment = true;
13437 90 : tree *cp;
13438 90 : for (cp = &OMP_CLAUSE_CHAIN (c); cp != NULL;
13439 206 : cp = &OMP_CLAUSE_CHAIN (*cp))
13440 296 : if (*cp == grp->grp_end)
13441 : {
13442 : c = *cp;
13443 : break;
13444 : }
13445 :
13446 90 : tree base = OMP_CLAUSE_DECL (c);
13447 90 : gcc_assert (TREE_CODE (base) == NOP_EXPR);
13448 90 : base = build_fold_indirect_ref (base);
13449 90 : tree *struct_node = struct_map_to_clause->get (base);
13450 90 : omp_siblist_move_node_after (c, cp, &OMP_CLAUSE_CHAIN (*struct_node));
13451 : }
13452 : }
13453 :
13454 : /* Now we have finished building the struct sibling lists, reprocess
13455 : newly-added "attach" nodes: we need the address of the first
13456 : mapped element of each struct sibling list for the bias of the attach
13457 : operation -- not necessarily the base address of the whole struct. */
13458 17409 : if (struct_map_to_clause)
13459 5380 : for (hash_map<tree_operand_hash, tree>::iterator iter
13460 2412 : = struct_map_to_clause->begin ();
13461 7792 : iter != struct_map_to_clause->end ();
13462 2968 : ++iter)
13463 : {
13464 2968 : tree struct_node = (*iter).second;
13465 2968 : gcc_assert (OMP_CLAUSE_CODE (struct_node) == OMP_CLAUSE_MAP);
13466 2968 : tree attach = OMP_CLAUSE_CHAIN (struct_node);
13467 :
13468 2968 : if (OMP_CLAUSE_CODE (attach) != OMP_CLAUSE_MAP
13469 2968 : || OMP_CLAUSE_MAP_KIND (attach) != GOMP_MAP_ATTACH_DETACH)
13470 2456 : continue;
13471 :
13472 512 : OMP_CLAUSE_SET_MAP_KIND (attach, GOMP_MAP_ATTACH);
13473 :
13474 : /* Sanity check: the standalone attach node will not work if we have
13475 : an "enter data" operation (because for those, variables need to be
13476 : mapped separately and attach nodes must be grouped together with the
13477 : base they attach to). We should only have created the
13478 : ATTACH_DETACH node either after GOMP_MAP_STRUCT for a target region
13479 : or for an intermediate descriptor that needs adjustment -- so this
13480 : should never be true. */
13481 512 : gcc_assert ((region_type & ORT_TARGET) != 0
13482 : || attach_bias_needs_adjustment);
13483 :
13484 : /* This is the first sorted node in the struct sibling list. Use it
13485 : to recalculate the correct bias to use.
13486 : (&first_node - attach_decl).
13487 : For GOMP_MAP_STRUCT_UNORD, we need e.g. the
13488 : min(min(min(first,second),third),fourth) element, because the
13489 : elements aren't in any particular order. */
13490 512 : tree lowest_addr;
13491 512 : if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
13492 : {
13493 94 : tree first_node = OMP_CLAUSE_CHAIN (attach);
13494 94 : unsigned HOST_WIDE_INT num_mappings
13495 94 : = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
13496 94 : lowest_addr = OMP_CLAUSE_DECL (first_node);
13497 94 : lowest_addr = build_fold_addr_expr (lowest_addr);
13498 94 : lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
13499 94 : tree next_node = OMP_CLAUSE_CHAIN (first_node);
13500 185 : while (num_mappings > 1)
13501 : {
13502 91 : tree tmp = OMP_CLAUSE_DECL (next_node);
13503 91 : tmp = build_fold_addr_expr (tmp);
13504 91 : tmp = fold_convert (pointer_sized_int_node, tmp);
13505 91 : lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
13506 : lowest_addr, tmp);
13507 91 : next_node = OMP_CLAUSE_CHAIN (next_node);
13508 91 : num_mappings--;
13509 : }
13510 94 : lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
13511 : }
13512 : else
13513 : {
13514 418 : tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
13515 418 : first_node = build_fold_addr_expr (first_node);
13516 418 : lowest_addr = fold_convert (ptrdiff_type_node, first_node);
13517 : }
13518 512 : tree attach_decl = OMP_CLAUSE_DECL (attach);
13519 512 : attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
13520 512 : OMP_CLAUSE_SIZE (attach)
13521 512 : = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
13522 : attach_decl);
13523 :
13524 : /* Remove GOMP_MAP_ATTACH node from after struct node. */
13525 512 : OMP_CLAUSE_CHAIN (struct_node) = OMP_CLAUSE_CHAIN (attach);
13526 : /* ...and re-insert it at the end of our clause list. */
13527 512 : *tail = attach;
13528 512 : OMP_CLAUSE_CHAIN (attach) = NULL_TREE;
13529 512 : tail = &OMP_CLAUSE_CHAIN (attach);
13530 : }
13531 :
13532 14997 : error_out:
13533 17409 : if (struct_map_to_clause)
13534 2412 : delete struct_map_to_clause;
13535 :
13536 17409 : return success;
13537 17409 : }
13538 :
13539 : struct instantiate_mapper_info
13540 : {
13541 : tree *mapper_clauses_p;
13542 : struct gimplify_omp_ctx *omp_ctx;
13543 : gimple_seq *pre_p;
13544 : };
13545 :
13546 : /* Helper function for omp_instantiate_mapper. */
13547 :
13548 : static tree
13549 1272 : remap_mapper_decl_1 (tree *tp, int *walk_subtrees, void *data)
13550 : {
13551 1272 : copy_body_data *id = (copy_body_data *) data;
13552 :
13553 1272 : if (DECL_P (*tp))
13554 : {
13555 564 : tree replacement = remap_decl (*tp, id);
13556 564 : if (*tp != replacement)
13557 : {
13558 289 : *tp = unshare_expr (replacement);
13559 289 : *walk_subtrees = 0;
13560 : }
13561 : }
13562 :
13563 1272 : return NULL_TREE;
13564 : }
13565 :
13566 : /* A copy_decl implementation (for use with tree-inline.cc functions) that
13567 : only transform decls or SSA names that are part of a map we already
13568 : prepared. */
13569 :
13570 : static tree
13571 94 : omp_mapper_copy_decl (tree var, copy_body_data *cb)
13572 : {
13573 94 : tree *repl = cb->decl_map->get (var);
13574 :
13575 94 : if (repl)
13576 0 : return *repl;
13577 :
13578 94 : return var;
13579 : }
13580 :
13581 : static tree *
13582 80 : omp_instantiate_mapper (gimple_seq *pre_p,
13583 : hash_map<omp_name_type<tree>, tree> *implicit_mappers,
13584 : tree mapperfn, tree expr, enum gomp_map_kind outer_kind,
13585 : tree *mapper_clauses_p)
13586 : {
13587 80 : tree mapper_name = NULL_TREE;
13588 80 : tree mapper = lang_hooks.decls.omp_extract_mapper_directive (mapperfn);
13589 80 : gcc_assert (TREE_CODE (mapper) == OMP_DECLARE_MAPPER);
13590 :
13591 80 : tree clause = OMP_DECLARE_MAPPER_CLAUSES (mapper);
13592 80 : tree dummy_var = OMP_DECLARE_MAPPER_DECL (mapper);
13593 :
13594 : /* The "extraction map" is used to map the mapper variable in the "declare
13595 : mapper" directive, and also any temporary variables that have been created
13596 : as part of expanding the mapper function's body (which are expanded as a
13597 : "bind" expression in the pre_p sequence). */
13598 80 : hash_map<tree, tree> extraction_map;
13599 :
13600 80 : extraction_map.put (dummy_var, expr);
13601 80 : extraction_map.put (expr, expr);
13602 :
13603 : /* This copy_body_data is only used to remap the decls in the
13604 : OMP_DECLARE_MAPPER tree node expansion itself. All relevant decls should
13605 : already be in the current function. */
13606 80 : copy_body_data id;
13607 80 : memset (&id, 0, sizeof (id));
13608 80 : id.src_fn = current_function_decl;
13609 80 : id.dst_fn = current_function_decl;
13610 80 : id.src_cfun = cfun;
13611 80 : id.decl_map = &extraction_map;
13612 80 : id.copy_decl = omp_mapper_copy_decl;
13613 80 : id.transform_call_graph_edges = CB_CGE_DUPLICATE; // ???
13614 80 : id.transform_new_cfg = true; // ???
13615 :
13616 238 : for (; clause; clause = OMP_CLAUSE_CHAIN (clause))
13617 : {
13618 158 : enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (clause);
13619 158 : tree *nested_mapper_p = NULL;
13620 :
13621 158 : if (map_kind == GOMP_MAP_PUSH_MAPPER_NAME)
13622 : {
13623 0 : mapper_name = OMP_CLAUSE_DECL (clause);
13624 23 : continue;
13625 : }
13626 158 : else if (map_kind == GOMP_MAP_POP_MAPPER_NAME)
13627 : {
13628 0 : mapper_name = NULL_TREE;
13629 0 : continue;
13630 : }
13631 :
13632 158 : tree decl = OMP_CLAUSE_DECL (clause);
13633 158 : tree unshared, type;
13634 158 : bool nonunit_array_with_mapper = false;
13635 :
13636 158 : if (TREE_CODE (decl) == OMP_ARRAY_SECTION)
13637 : {
13638 62 : location_t loc = OMP_CLAUSE_LOCATION (clause);
13639 62 : tree tmp = lang_hooks.decls.omp_map_array_section (loc, decl);
13640 62 : if (tmp == decl)
13641 : {
13642 48 : unshared = unshare_expr (clause);
13643 48 : nonunit_array_with_mapper = true;
13644 48 : type = TREE_TYPE (TREE_TYPE (decl));
13645 : }
13646 : else
13647 : {
13648 14 : unshared = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13649 14 : OMP_CLAUSE_CODE (clause));
13650 14 : OMP_CLAUSE_DECL (unshared) = tmp;
13651 14 : OMP_CLAUSE_SIZE (unshared)
13652 28 : = DECL_P (tmp) ? DECL_SIZE_UNIT (tmp)
13653 14 : : TYPE_SIZE_UNIT (TREE_TYPE (tmp));
13654 14 : type = TREE_TYPE (tmp);
13655 : }
13656 : }
13657 : else
13658 : {
13659 96 : unshared = unshare_expr (clause);
13660 96 : type = TREE_TYPE (decl);
13661 : }
13662 :
13663 158 : walk_tree (&unshared, remap_mapper_decl_1, &id, NULL);
13664 :
13665 158 : if (OMP_CLAUSE_MAP_KIND (unshared) == GOMP_MAP_UNSET)
13666 28 : OMP_CLAUSE_SET_MAP_KIND (unshared, outer_kind);
13667 :
13668 158 : decl = OMP_CLAUSE_DECL (unshared);
13669 158 : type = TYPE_MAIN_VARIANT (type);
13670 :
13671 158 : nested_mapper_p = implicit_mappers->get ({ mapper_name, type });
13672 :
13673 158 : if (nested_mapper_p && *nested_mapper_p != mapperfn)
13674 : {
13675 23 : if (nonunit_array_with_mapper)
13676 : {
13677 8 : sorry ("user-defined mapper with non-unit length array section");
13678 8 : continue;
13679 : }
13680 :
13681 15 : if (map_kind == GOMP_MAP_UNSET)
13682 0 : map_kind = outer_kind;
13683 :
13684 15 : mapper_clauses_p
13685 15 : = omp_instantiate_mapper (pre_p, implicit_mappers,
13686 : *nested_mapper_p, decl, map_kind,
13687 : mapper_clauses_p);
13688 15 : continue;
13689 : }
13690 :
13691 135 : *mapper_clauses_p = unshared;
13692 135 : mapper_clauses_p = &OMP_CLAUSE_CHAIN (unshared);
13693 : }
13694 :
13695 80 : return mapper_clauses_p;
13696 80 : }
13697 :
13698 : static int
13699 132059 : omp_instantiate_implicit_mappers (splay_tree_node n, void *data)
13700 : {
13701 132059 : tree decl = (tree) n->key;
13702 132059 : instantiate_mapper_info *im_info = (instantiate_mapper_info *) data;
13703 132059 : gimplify_omp_ctx *ctx = im_info->omp_ctx;
13704 132059 : tree *mapper_p = NULL;
13705 132059 : tree type = TREE_TYPE (decl);
13706 132059 : bool ref_p = false;
13707 132059 : unsigned flags = n->value;
13708 :
13709 132059 : if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
13710 : return 0;
13711 23217 : if ((flags & GOVD_SEEN) == 0)
13712 : return 0;
13713 : /* If we already have clauses pertaining to a struct variable, then we don't
13714 : want to implicitly invoke a user-defined mapper. */
13715 19723 : if ((flags & GOVD_EXPLICIT) != 0 && AGGREGATE_TYPE_P (TREE_TYPE (decl)))
13716 : return 0;
13717 :
13718 19723 : if (TREE_CODE (type) == REFERENCE_TYPE)
13719 : {
13720 899 : ref_p = true;
13721 899 : type = TREE_TYPE (type);
13722 : }
13723 :
13724 19723 : type = TYPE_MAIN_VARIANT (type);
13725 :
13726 19723 : if (DECL_P (decl) && type && AGGREGATE_TYPE_P (type))
13727 : {
13728 5268 : gcc_assert (ctx);
13729 5268 : mapper_p = ctx->implicit_mappers->get ({ NULL_TREE, type });
13730 : }
13731 :
13732 5268 : if (mapper_p)
13733 : {
13734 : /* If we have a reference, map the pointed-to object rather than the
13735 : reference itself. */
13736 65 : if (ref_p)
13737 2 : decl = build_fold_indirect_ref (decl);
13738 :
13739 65 : im_info->mapper_clauses_p
13740 65 : = omp_instantiate_mapper (im_info->pre_p, ctx->implicit_mappers,
13741 : *mapper_p, decl, GOMP_MAP_TOFROM,
13742 : im_info->mapper_clauses_p);
13743 : /* Make sure we don't map the same variable implicitly in
13744 : gimplify_adjust_omp_clauses_1 also. */
13745 65 : n->value |= GOVD_EXPLICIT;
13746 : }
13747 :
13748 : return 0;
13749 : }
13750 :
13751 : /* Scan the OMP clauses in *LIST_P, installing mappings into a new
13752 : and previous omp contexts. */
13753 :
13754 : static void
13755 129870 : gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
13756 : enum omp_region_type region_type,
13757 : enum tree_code code,
13758 : gimple_seq *loops_seq_p = NULL)
13759 : {
13760 129870 : using namespace omp_addr_tokenizer;
13761 129870 : struct gimplify_omp_ctx *ctx, *outer_ctx;
13762 129870 : tree c;
13763 129870 : tree *orig_list_p = list_p;
13764 129870 : int handled_depend_iterators = -1;
13765 129870 : int nowait = -1;
13766 :
13767 129870 : ctx = new_omp_context (region_type);
13768 129870 : ctx->code = code;
13769 129870 : outer_ctx = ctx->outer_context;
13770 129870 : if (code == OMP_TARGET)
13771 : {
13772 13126 : if (!lang_GNU_Fortran ())
13773 11027 : ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
13774 13126 : ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
13775 26252 : ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
13776 13126 : ? GOVD_MAP : GOVD_FIRSTPRIVATE);
13777 : }
13778 129870 : if (!lang_GNU_Fortran ())
13779 99059 : switch (code)
13780 : {
13781 18346 : case OMP_TARGET:
13782 18346 : case OMP_TARGET_DATA:
13783 18346 : case OMP_TARGET_ENTER_DATA:
13784 18346 : case OMP_TARGET_EXIT_DATA:
13785 18346 : case OACC_DECLARE:
13786 18346 : case OACC_HOST_DATA:
13787 18346 : case OACC_PARALLEL:
13788 18346 : case OACC_KERNELS:
13789 18346 : ctx->target_firstprivatize_array_bases = true;
13790 : default:
13791 : break;
13792 : }
13793 :
13794 129870 : vec<omp_mapping_group> *groups = NULL;
13795 129870 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
13796 129870 : unsigned grpnum = 0;
13797 129870 : tree *grp_start_p = NULL, grp_end = NULL_TREE;
13798 :
13799 129870 : if (code == OMP_TARGET
13800 129870 : || code == OMP_TARGET_DATA
13801 129870 : || code == OMP_TARGET_ENTER_DATA
13802 : || code == OMP_TARGET_EXIT_DATA
13803 : || code == OACC_DATA
13804 : || code == OACC_KERNELS
13805 : || code == OACC_PARALLEL
13806 : || code == OACC_SERIAL
13807 : || code == OACC_ENTER_DATA
13808 : || code == OACC_EXIT_DATA
13809 : || code == OACC_UPDATE
13810 : || code == OACC_DECLARE)
13811 : {
13812 33264 : groups = omp_gather_mapping_groups (list_p);
13813 :
13814 33264 : if (groups)
13815 17618 : grpmap = omp_index_mapping_groups (groups);
13816 : }
13817 :
13818 333811 : while ((c = *list_p) != NULL)
13819 : {
13820 203941 : bool remove = false;
13821 203941 : bool notice_outer = true;
13822 203941 : bool map_descriptor;
13823 203941 : const char *check_non_private = NULL;
13824 203941 : unsigned int flags;
13825 203941 : tree decl;
13826 203941 : auto_vec<omp_addr_token *, 10> addr_tokens;
13827 203941 : tree op = NULL_TREE;
13828 203941 : location_t loc = OMP_CLAUSE_LOCATION (c);
13829 :
13830 240862 : if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
13831 : {
13832 : grp_start_p = NULL;
13833 : grp_end = NULL_TREE;
13834 : }
13835 :
13836 203941 : if (code == OMP_TARGET
13837 : || code == OMP_TARGET_DATA
13838 : || code == OMP_TARGET_ENTER_DATA
13839 172358 : || code == OMP_TARGET_EXIT_DATA)
13840 : /* Do some target-specific type checks for map operands. */
13841 33803 : switch (OMP_CLAUSE_CODE (c))
13842 : {
13843 24498 : case OMP_CLAUSE_MAP:
13844 24498 : op = OMP_CLAUSE_OPERAND (c, 0);
13845 24498 : verify_type_context (loc, TCTX_OMP_MAP, TREE_TYPE (op));
13846 24498 : break;
13847 180 : case OMP_CLAUSE_PRIVATE:
13848 180 : op = OMP_CLAUSE_OPERAND (c, 0);
13849 180 : verify_type_context (loc, TCTX_OMP_PRIVATE, TREE_TYPE (op));
13850 180 : break;
13851 1296 : case OMP_CLAUSE_FIRSTPRIVATE:
13852 1296 : op = OMP_CLAUSE_OPERAND (c, 0);
13853 1296 : verify_type_context (loc, TCTX_OMP_FIRSTPRIVATE, TREE_TYPE (op));
13854 1296 : break;
13855 2922 : case OMP_CLAUSE_IS_DEVICE_PTR:
13856 2922 : case OMP_CLAUSE_USE_DEVICE_ADDR:
13857 2922 : case OMP_CLAUSE_USE_DEVICE_PTR:
13858 2922 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
13859 2922 : op = OMP_CLAUSE_OPERAND (c, 0);
13860 2922 : verify_type_context (loc, TCTX_OMP_DEVICE_ADDR, TREE_TYPE (op));
13861 2922 : break;
13862 : default:
13863 : break;
13864 : }
13865 :
13866 203941 : switch (OMP_CLAUSE_CODE (c))
13867 : {
13868 12215 : case OMP_CLAUSE_PRIVATE:
13869 12215 : flags = GOVD_PRIVATE | GOVD_EXPLICIT;
13870 12215 : if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
13871 : {
13872 166 : flags |= GOVD_PRIVATE_OUTER_REF;
13873 166 : OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
13874 : }
13875 : else
13876 : notice_outer = false;
13877 12215 : goto do_add;
13878 5518 : case OMP_CLAUSE_SHARED:
13879 5518 : flags = GOVD_SHARED | GOVD_EXPLICIT;
13880 5518 : goto do_add;
13881 7921 : case OMP_CLAUSE_FIRSTPRIVATE:
13882 7921 : flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
13883 7921 : check_non_private = "firstprivate";
13884 7921 : if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13885 : {
13886 380 : gcc_assert (code == OMP_TARGET);
13887 : flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
13888 : }
13889 7921 : goto do_add;
13890 7332 : case OMP_CLAUSE_LASTPRIVATE:
13891 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13892 496 : switch (code)
13893 : {
13894 25 : case OMP_DISTRIBUTE:
13895 25 : error_at (OMP_CLAUSE_LOCATION (c),
13896 : "conditional %<lastprivate%> clause on "
13897 : "%qs construct", "distribute");
13898 25 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13899 25 : break;
13900 13 : case OMP_TASKLOOP:
13901 13 : error_at (OMP_CLAUSE_LOCATION (c),
13902 : "conditional %<lastprivate%> clause on "
13903 : "%qs construct", "taskloop");
13904 13 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13905 13 : break;
13906 : default:
13907 : break;
13908 : }
13909 7332 : flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
13910 7332 : if (code != OMP_LOOP)
13911 6981 : check_non_private = "lastprivate";
13912 7332 : decl = OMP_CLAUSE_DECL (c);
13913 7332 : if (error_operand_p (decl))
13914 0 : goto do_add;
13915 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
13916 7332 : && !lang_hooks.decls.omp_scalar_p (decl, true))
13917 : {
13918 5 : error_at (OMP_CLAUSE_LOCATION (c),
13919 : "non-scalar variable %qD in conditional "
13920 : "%<lastprivate%> clause", decl);
13921 5 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13922 : }
13923 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13924 453 : flags |= GOVD_LASTPRIVATE_CONDITIONAL;
13925 7332 : omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
13926 : false);
13927 7332 : goto do_add;
13928 15217 : case OMP_CLAUSE_REDUCTION:
13929 15217 : if (OMP_CLAUSE_REDUCTION_TASK (c))
13930 : {
13931 594 : if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
13932 : {
13933 408 : if (nowait == -1)
13934 293 : nowait = omp_find_clause (*list_p,
13935 293 : OMP_CLAUSE_NOWAIT) != NULL_TREE;
13936 408 : if (nowait
13937 15 : && (outer_ctx == NULL
13938 0 : || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
13939 : {
13940 15 : error_at (OMP_CLAUSE_LOCATION (c),
13941 : "%<task%> reduction modifier on a construct "
13942 : "with a %<nowait%> clause");
13943 15 : OMP_CLAUSE_REDUCTION_TASK (c) = 0;
13944 : }
13945 : }
13946 186 : else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
13947 : {
13948 40 : error_at (OMP_CLAUSE_LOCATION (c),
13949 : "invalid %<task%> reduction modifier on construct "
13950 : "other than %<parallel%>, %qs, %<sections%> or "
13951 20 : "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
13952 20 : OMP_CLAUSE_REDUCTION_TASK (c) = 0;
13953 : }
13954 : }
13955 15217 : if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13956 831 : switch (code)
13957 : {
13958 4 : case OMP_SECTIONS:
13959 4 : error_at (OMP_CLAUSE_LOCATION (c),
13960 : "%<inscan%> %<reduction%> clause on "
13961 : "%qs construct", "sections");
13962 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13963 4 : break;
13964 4 : case OMP_PARALLEL:
13965 4 : error_at (OMP_CLAUSE_LOCATION (c),
13966 : "%<inscan%> %<reduction%> clause on "
13967 : "%qs construct", "parallel");
13968 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13969 4 : break;
13970 4 : case OMP_TEAMS:
13971 4 : error_at (OMP_CLAUSE_LOCATION (c),
13972 : "%<inscan%> %<reduction%> clause on "
13973 : "%qs construct", "teams");
13974 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13975 4 : break;
13976 4 : case OMP_TASKLOOP:
13977 4 : error_at (OMP_CLAUSE_LOCATION (c),
13978 : "%<inscan%> %<reduction%> clause on "
13979 : "%qs construct", "taskloop");
13980 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13981 4 : break;
13982 4 : case OMP_SCOPE:
13983 4 : error_at (OMP_CLAUSE_LOCATION (c),
13984 : "%<inscan%> %<reduction%> clause on "
13985 : "%qs construct", "scope");
13986 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13987 4 : break;
13988 : default:
13989 : break;
13990 : }
13991 : /* FALLTHRU */
13992 17821 : case OMP_CLAUSE_IN_REDUCTION:
13993 17821 : case OMP_CLAUSE_TASK_REDUCTION:
13994 17821 : flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
13995 : /* OpenACC permits reductions on private variables. */
13996 17821 : if (!(region_type & ORT_ACC)
13997 : /* taskgroup is actually not a worksharing region. */
13998 12302 : && code != OMP_TASKGROUP)
13999 11772 : check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
14000 17821 : decl = OMP_CLAUSE_DECL (c);
14001 17821 : if (TREE_CODE (decl) == MEM_REF)
14002 : {
14003 2539 : tree type = TREE_TYPE (decl);
14004 2539 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
14005 2539 : gimplify_ctxp->into_ssa = false;
14006 2539 : if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
14007 : NULL, is_gimple_val, fb_rvalue, false)
14008 : == GS_ERROR)
14009 : {
14010 0 : gimplify_ctxp->into_ssa = saved_into_ssa;
14011 0 : remove = true;
14012 0 : break;
14013 : }
14014 2539 : gimplify_ctxp->into_ssa = saved_into_ssa;
14015 2539 : tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14016 2539 : if (DECL_P (v))
14017 : {
14018 571 : omp_firstprivatize_variable (ctx, v);
14019 571 : omp_notice_variable (ctx, v, true);
14020 : }
14021 2539 : decl = TREE_OPERAND (decl, 0);
14022 2539 : if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
14023 : {
14024 477 : gimplify_ctxp->into_ssa = false;
14025 477 : if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
14026 : NULL, is_gimple_val, fb_rvalue, false)
14027 : == GS_ERROR)
14028 : {
14029 0 : gimplify_ctxp->into_ssa = saved_into_ssa;
14030 0 : remove = true;
14031 0 : break;
14032 : }
14033 477 : gimplify_ctxp->into_ssa = saved_into_ssa;
14034 477 : v = TREE_OPERAND (decl, 1);
14035 477 : if (DECL_P (v))
14036 : {
14037 477 : omp_firstprivatize_variable (ctx, v);
14038 477 : omp_notice_variable (ctx, v, true);
14039 : }
14040 477 : decl = TREE_OPERAND (decl, 0);
14041 : }
14042 2539 : if (TREE_CODE (decl) == ADDR_EXPR
14043 1163 : || TREE_CODE (decl) == INDIRECT_REF)
14044 1474 : decl = TREE_OPERAND (decl, 0);
14045 : }
14046 17821 : goto do_add_decl;
14047 2668 : case OMP_CLAUSE_LINEAR:
14048 2668 : if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
14049 : is_gimple_val, fb_rvalue) == GS_ERROR)
14050 : {
14051 : remove = true;
14052 : break;
14053 : }
14054 : else
14055 : {
14056 2668 : if (code == OMP_SIMD
14057 2668 : && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14058 : {
14059 1154 : struct gimplify_omp_ctx *octx = outer_ctx;
14060 1154 : if (octx
14061 741 : && octx->region_type == ORT_WORKSHARE
14062 500 : && octx->combined_loop
14063 500 : && !octx->distribute)
14064 : {
14065 492 : if (octx->outer_context
14066 430 : && (octx->outer_context->region_type
14067 : == ORT_COMBINED_PARALLEL))
14068 393 : octx = octx->outer_context->outer_context;
14069 : else
14070 : octx = octx->outer_context;
14071 : }
14072 905 : if (octx
14073 316 : && octx->region_type == ORT_WORKSHARE
14074 16 : && octx->combined_loop
14075 16 : && octx->distribute)
14076 : {
14077 16 : error_at (OMP_CLAUSE_LOCATION (c),
14078 : "%<linear%> clause for variable other than "
14079 : "loop iterator specified on construct "
14080 : "combined with %<distribute%>");
14081 16 : remove = true;
14082 16 : break;
14083 : }
14084 : }
14085 : /* For combined #pragma omp parallel for simd, need to put
14086 : lastprivate and perhaps firstprivate too on the
14087 : parallel. Similarly for #pragma omp for simd. */
14088 : struct gimplify_omp_ctx *octx = outer_ctx;
14089 : bool taskloop_seen = false;
14090 : decl = NULL_TREE;
14091 3443 : do
14092 : {
14093 3443 : if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14094 3443 : && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14095 : break;
14096 3383 : decl = OMP_CLAUSE_DECL (c);
14097 3383 : if (error_operand_p (decl))
14098 : {
14099 : decl = NULL_TREE;
14100 : break;
14101 : }
14102 3383 : flags = GOVD_SEEN;
14103 3383 : if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14104 1672 : flags |= GOVD_FIRSTPRIVATE;
14105 3383 : if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14106 3383 : flags |= GOVD_LASTPRIVATE;
14107 3383 : if (octx
14108 2506 : && octx->region_type == ORT_WORKSHARE
14109 1213 : && octx->combined_loop)
14110 : {
14111 1207 : if (octx->outer_context
14112 1037 : && (octx->outer_context->region_type
14113 : == ORT_COMBINED_PARALLEL))
14114 : octx = octx->outer_context;
14115 476 : else if (omp_check_private (octx, decl, false))
14116 : break;
14117 : }
14118 : else if (octx
14119 1299 : && (octx->region_type & ORT_TASK) != 0
14120 307 : && octx->combined_loop)
14121 : taskloop_seen = true;
14122 : else if (octx
14123 996 : && octx->region_type == ORT_COMBINED_PARALLEL
14124 301 : && ((ctx->region_type == ORT_WORKSHARE
14125 201 : && octx == outer_ctx)
14126 100 : || taskloop_seen))
14127 : flags = GOVD_SEEN | GOVD_SHARED;
14128 : else if (octx
14129 695 : && ((octx->region_type & ORT_COMBINED_TEAMS)
14130 : == ORT_COMBINED_TEAMS))
14131 : flags = GOVD_SEEN | GOVD_SHARED;
14132 540 : else if (octx
14133 540 : && octx->region_type == ORT_COMBINED_TARGET)
14134 : {
14135 195 : if (flags & GOVD_LASTPRIVATE)
14136 195 : flags = GOVD_SEEN | GOVD_MAP;
14137 : }
14138 : else
14139 : break;
14140 2071 : splay_tree_node on
14141 2071 : = splay_tree_lookup (octx->variables,
14142 : (splay_tree_key) decl);
14143 2071 : if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
14144 : {
14145 : octx = NULL;
14146 : break;
14147 : }
14148 2067 : omp_add_variable (octx, decl, flags);
14149 2067 : if (octx->outer_context == NULL)
14150 : break;
14151 : octx = octx->outer_context;
14152 : }
14153 : while (1);
14154 2652 : if (octx
14155 2652 : && decl
14156 2652 : && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14157 728 : || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
14158 1711 : omp_notice_variable (octx, decl, true);
14159 : }
14160 2652 : flags = GOVD_LINEAR | GOVD_EXPLICIT;
14161 2652 : if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14162 2652 : && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14163 : {
14164 : notice_outer = false;
14165 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14166 : }
14167 2652 : goto do_add;
14168 :
14169 50069 : case OMP_CLAUSE_MAP:
14170 50069 : if (!grp_start_p)
14171 : {
14172 30144 : grp_start_p = list_p;
14173 30144 : grp_end = (*groups)[grpnum].grp_end;
14174 30144 : grpnum++;
14175 : }
14176 50069 : decl = OMP_CLAUSE_DECL (c);
14177 :
14178 50069 : if (error_operand_p (decl))
14179 : {
14180 : remove = true;
14181 : break;
14182 : }
14183 :
14184 50069 : if (!omp_parse_expr (addr_tokens, decl))
14185 : {
14186 : remove = true;
14187 : break;
14188 : }
14189 :
14190 50069 : if (remove)
14191 : break;
14192 50069 : if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
14193 : {
14194 : struct gimplify_omp_ctx *octx;
14195 1167 : for (octx = outer_ctx; octx; octx = octx->outer_context)
14196 : {
14197 1167 : if (octx->region_type != ORT_ACC_HOST_DATA)
14198 : break;
14199 12 : splay_tree_node n2
14200 12 : = splay_tree_lookup (octx->variables,
14201 : (splay_tree_key) decl);
14202 12 : if (n2)
14203 4 : error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
14204 : "declared in enclosing %<host_data%> region",
14205 4 : DECL_NAME (decl));
14206 : }
14207 : }
14208 :
14209 50069 : map_descriptor = false;
14210 :
14211 : /* This condition checks if we're mapping an array descriptor that
14212 : isn't inside a derived type -- these have special handling, and
14213 : are not handled as structs in omp_build_struct_sibling_lists.
14214 : See that function for further details. */
14215 50069 : if (*grp_start_p != grp_end
14216 33737 : && OMP_CLAUSE_CHAIN (*grp_start_p)
14217 83806 : && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
14218 : {
14219 16903 : tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
14220 16903 : if (omp_map_clause_descriptor_p (grp_mid)
14221 31979 : && DECL_P (OMP_CLAUSE_DECL (grp_mid)))
14222 : map_descriptor = true;
14223 : }
14224 33166 : else if (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP
14225 33166 : && (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_RELEASE
14226 32530 : || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DELETE)
14227 33944 : && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end))
14228 : map_descriptor = true;
14229 :
14230 : /* Adding the decl for a struct access: we haven't created
14231 : GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
14232 : whether they will be created in gimplify_adjust_omp_clauses.
14233 : NOTE: Technically we should probably look through DECL_VALUE_EXPR
14234 : here because something that looks like a DECL_P may actually be a
14235 : struct access, e.g. variables in a lambda closure
14236 : (__closure->__foo) or class members (this->foo). Currently in both
14237 : those cases we map the whole of the containing object (directly in
14238 : the C++ FE) though, so struct nodes are not created. */
14239 50069 : if (c == grp_end
14240 30144 : && addr_tokens[0]->type == STRUCTURE_BASE
14241 8141 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14242 58210 : && !map_descriptor)
14243 : {
14244 5367 : gcc_assert (addr_tokens[1]->type == ACCESS_METHOD);
14245 : /* If we got to this struct via a chain of pointers, maybe we
14246 : want to map it implicitly instead. */
14247 5367 : if (omp_access_chain_p (addr_tokens, 1))
14248 : break;
14249 5207 : omp_mapping_group *wholestruct;
14250 5207 : if (!(region_type & ORT_ACC)
14251 9143 : && omp_mapped_by_containing_struct (grpmap,
14252 3936 : OMP_CLAUSE_DECL (c),
14253 : &wholestruct))
14254 : break;
14255 4971 : decl = addr_tokens[1]->expr;
14256 4971 : if (splay_tree_lookup (ctx->variables, (splay_tree_key) decl))
14257 : break;
14258 : /* Standalone attach or detach clauses for a struct element
14259 : should not inhibit implicit mapping of the whole struct. */
14260 2902 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14261 2902 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
14262 : break;
14263 2752 : flags = GOVD_MAP | GOVD_EXPLICIT;
14264 :
14265 2752 : gcc_assert (addr_tokens[1]->u.access_kind != ACCESS_DIRECT
14266 : || TREE_ADDRESSABLE (decl));
14267 2752 : goto do_add_decl;
14268 : }
14269 :
14270 44702 : if (!DECL_P (decl))
14271 : {
14272 20812 : tree d = decl, *pd;
14273 20812 : if (TREE_CODE (d) == ARRAY_REF)
14274 : {
14275 4938 : while (TREE_CODE (d) == ARRAY_REF)
14276 2526 : d = TREE_OPERAND (d, 0);
14277 2412 : if (TREE_CODE (d) == COMPONENT_REF
14278 2412 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
14279 : decl = d;
14280 : }
14281 20812 : pd = &OMP_CLAUSE_DECL (c);
14282 20812 : if (d == decl
14283 18446 : && TREE_CODE (decl) == INDIRECT_REF
14284 13773 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14285 1069 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14286 : == REFERENCE_TYPE)
14287 21463 : && (OMP_CLAUSE_MAP_KIND (c)
14288 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
14289 : {
14290 647 : pd = &TREE_OPERAND (decl, 0);
14291 647 : decl = TREE_OPERAND (decl, 0);
14292 : }
14293 :
14294 20812 : if (addr_tokens[0]->type == STRUCTURE_BASE
14295 11287 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14296 11287 : && addr_tokens[1]->type == ACCESS_METHOD
14297 11287 : && (addr_tokens[1]->u.access_kind == ACCESS_POINTER
14298 10790 : || (addr_tokens[1]->u.access_kind
14299 : == ACCESS_POINTER_OFFSET))
14300 21391 : && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)))
14301 : {
14302 0 : tree base = addr_tokens[1]->expr;
14303 0 : splay_tree_node n
14304 0 : = splay_tree_lookup (ctx->variables,
14305 : (splay_tree_key) base);
14306 0 : n->value |= GOVD_SEEN;
14307 : }
14308 :
14309 20812 : if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
14310 : {
14311 : /* Don't gimplify *pd fully at this point, as the base
14312 : will need to be adjusted during omp lowering. */
14313 88 : auto_vec<tree, 10> expr_stack;
14314 88 : tree *p = pd;
14315 88 : while (handled_component_p (*p)
14316 : || TREE_CODE (*p) == INDIRECT_REF
14317 : || TREE_CODE (*p) == ADDR_EXPR
14318 : || TREE_CODE (*p) == MEM_REF
14319 224 : || TREE_CODE (*p) == NON_LVALUE_EXPR)
14320 : {
14321 136 : expr_stack.safe_push (*p);
14322 136 : p = &TREE_OPERAND (*p, 0);
14323 : }
14324 312 : for (int i = expr_stack.length () - 1; i >= 0; i--)
14325 : {
14326 136 : tree t = expr_stack[i];
14327 136 : if (TREE_CODE (t) == ARRAY_REF
14328 136 : || TREE_CODE (t) == ARRAY_RANGE_REF)
14329 : {
14330 56 : if (TREE_OPERAND (t, 2) == NULL_TREE)
14331 : {
14332 56 : tree low = unshare_expr (array_ref_low_bound (t));
14333 56 : if (!is_gimple_min_invariant (low))
14334 : {
14335 0 : TREE_OPERAND (t, 2) = low;
14336 0 : if (gimplify_expr (&TREE_OPERAND (t, 2),
14337 : pre_p, NULL,
14338 : is_gimple_reg,
14339 : fb_rvalue) == GS_ERROR)
14340 0 : remove = true;
14341 : }
14342 : }
14343 0 : else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
14344 : NULL, is_gimple_reg,
14345 : fb_rvalue) == GS_ERROR)
14346 0 : remove = true;
14347 56 : if (TREE_OPERAND (t, 3) == NULL_TREE)
14348 : {
14349 56 : tree elmt_size = array_ref_element_size (t);
14350 56 : if (!is_gimple_min_invariant (elmt_size))
14351 : {
14352 0 : elmt_size = unshare_expr (elmt_size);
14353 0 : tree elmt_type
14354 0 : = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
14355 : 0)));
14356 0 : tree factor
14357 0 : = size_int (TYPE_ALIGN_UNIT (elmt_type));
14358 0 : elmt_size
14359 0 : = size_binop (EXACT_DIV_EXPR, elmt_size,
14360 : factor);
14361 0 : TREE_OPERAND (t, 3) = elmt_size;
14362 0 : if (gimplify_expr (&TREE_OPERAND (t, 3),
14363 : pre_p, NULL,
14364 : is_gimple_reg,
14365 : fb_rvalue) == GS_ERROR)
14366 0 : remove = true;
14367 : }
14368 : }
14369 0 : else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
14370 : NULL, is_gimple_reg,
14371 : fb_rvalue) == GS_ERROR)
14372 0 : remove = true;
14373 : }
14374 80 : else if (TREE_CODE (t) == COMPONENT_REF)
14375 : {
14376 0 : if (TREE_OPERAND (t, 2) == NULL_TREE)
14377 : {
14378 0 : tree offset = component_ref_field_offset (t);
14379 0 : if (!is_gimple_min_invariant (offset))
14380 : {
14381 0 : offset = unshare_expr (offset);
14382 0 : tree field = TREE_OPERAND (t, 1);
14383 0 : tree factor
14384 0 : = size_int (DECL_OFFSET_ALIGN (field)
14385 : / BITS_PER_UNIT);
14386 0 : offset = size_binop (EXACT_DIV_EXPR, offset,
14387 : factor);
14388 0 : TREE_OPERAND (t, 2) = offset;
14389 0 : if (gimplify_expr (&TREE_OPERAND (t, 2),
14390 : pre_p, NULL,
14391 : is_gimple_reg,
14392 : fb_rvalue) == GS_ERROR)
14393 0 : remove = true;
14394 : }
14395 : }
14396 0 : else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
14397 : NULL, is_gimple_reg,
14398 : fb_rvalue) == GS_ERROR)
14399 0 : remove = true;
14400 : }
14401 : }
14402 224 : for (; expr_stack.length () > 0; )
14403 : {
14404 136 : tree t = expr_stack.pop ();
14405 :
14406 136 : if (TREE_CODE (t) == ARRAY_REF
14407 136 : || TREE_CODE (t) == ARRAY_RANGE_REF)
14408 : {
14409 56 : if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
14410 56 : && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
14411 : NULL, is_gimple_val,
14412 : fb_rvalue) == GS_ERROR)
14413 144 : remove = true;
14414 : }
14415 : }
14416 88 : }
14417 : break;
14418 : }
14419 :
14420 23890 : if ((code == OMP_TARGET
14421 : || code == OMP_TARGET_DATA
14422 : || code == OMP_TARGET_ENTER_DATA
14423 14125 : || code == OMP_TARGET_EXIT_DATA)
14424 24437 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14425 : {
14426 : /* If we have attach/detach but the decl we have is a pointer to
14427 : pointer, we're probably mapping the "base level" array
14428 : implicitly. Make sure we don't add the decl as if we mapped
14429 : it explicitly. That is,
14430 :
14431 : int **arr;
14432 : [...]
14433 : #pragma omp target map(arr[a][b:c])
14434 :
14435 : should *not* map "arr" explicitly. That way we get a
14436 : zero-length "alloc" mapping for it, and assuming it's been
14437 : mapped by some previous directive, etc., things work as they
14438 : should. */
14439 :
14440 204 : tree basetype = TREE_TYPE (addr_tokens[0]->expr);
14441 :
14442 204 : if (TREE_CODE (basetype) == REFERENCE_TYPE)
14443 34 : basetype = TREE_TYPE (basetype);
14444 :
14445 204 : if (code == OMP_TARGET
14446 54 : && addr_tokens[0]->type == ARRAY_BASE
14447 54 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14448 54 : && TREE_CODE (basetype) == POINTER_TYPE
14449 258 : && TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
14450 : break;
14451 : }
14452 :
14453 23868 : flags = GOVD_MAP | GOVD_EXPLICIT;
14454 23868 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
14455 23757 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM
14456 23253 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
14457 47095 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
14458 : flags |= GOVD_MAP_ALWAYS_TO;
14459 :
14460 23868 : goto do_add;
14461 :
14462 374 : case OMP_CLAUSE_AFFINITY:
14463 374 : gimplify_omp_affinity (list_p, pre_p);
14464 374 : remove = true;
14465 374 : break;
14466 8 : case OMP_CLAUSE_DOACROSS:
14467 8 : if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
14468 : {
14469 4 : tree deps = OMP_CLAUSE_DECL (c);
14470 8 : while (deps && TREE_CODE (deps) == TREE_LIST)
14471 : {
14472 4 : if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
14473 4 : && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
14474 0 : gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
14475 : pre_p, NULL, is_gimple_val, fb_rvalue);
14476 4 : deps = TREE_CHAIN (deps);
14477 : }
14478 : }
14479 : else
14480 4 : gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
14481 : == OMP_CLAUSE_DOACROSS_SOURCE);
14482 : break;
14483 2200 : case OMP_CLAUSE_DEPEND:
14484 2200 : if (handled_depend_iterators == -1)
14485 1898 : handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
14486 2200 : if (handled_depend_iterators)
14487 : {
14488 343 : if (handled_depend_iterators == 2)
14489 0 : remove = true;
14490 : break;
14491 : }
14492 1857 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
14493 : {
14494 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
14495 : NULL, is_gimple_val, fb_rvalue);
14496 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
14497 : }
14498 1857 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
14499 : {
14500 : remove = true;
14501 : break;
14502 : }
14503 1857 : if (OMP_CLAUSE_DECL (c) != null_pointer_node)
14504 : {
14505 1826 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
14506 1826 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
14507 : is_gimple_val, fb_rvalue) == GS_ERROR)
14508 : {
14509 : remove = true;
14510 : break;
14511 : }
14512 : }
14513 1857 : if (code == OMP_TASK)
14514 1399 : ctx->has_depend = true;
14515 : break;
14516 :
14517 8207 : case OMP_CLAUSE_TO:
14518 8207 : case OMP_CLAUSE_FROM:
14519 8207 : case OMP_CLAUSE__CACHE_:
14520 8207 : decl = OMP_CLAUSE_DECL (c);
14521 8207 : if (error_operand_p (decl))
14522 : {
14523 : remove = true;
14524 : break;
14525 : }
14526 8207 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14527 7435 : OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
14528 684 : : TYPE_SIZE_UNIT (TREE_TYPE (decl));
14529 8207 : gimple_seq *seq_p;
14530 8207 : seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
14531 8207 : if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
14532 : is_gimple_val, fb_rvalue) == GS_ERROR)
14533 : {
14534 0 : remove = true;
14535 0 : exit_omp_iterator_loop_context (c);
14536 0 : break;
14537 : }
14538 8207 : if (!DECL_P (decl))
14539 : {
14540 2019 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
14541 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
14542 0 : remove = true;
14543 2019 : exit_omp_iterator_loop_context (c);
14544 2019 : break;
14545 : }
14546 6188 : exit_omp_iterator_loop_context (c);
14547 6188 : goto do_notice;
14548 :
14549 125 : case OMP_CLAUSE__MAPPER_BINDING_:
14550 125 : {
14551 125 : tree name = OMP_CLAUSE__MAPPER_BINDING__ID (c);
14552 125 : tree var = OMP_CLAUSE__MAPPER_BINDING__DECL (c);
14553 125 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (var));
14554 125 : tree fndecl = OMP_CLAUSE__MAPPER_BINDING__MAPPER (c);
14555 125 : ctx->implicit_mappers->put ({ name, type }, fndecl);
14556 125 : remove = true;
14557 125 : break;
14558 : }
14559 :
14560 2118 : case OMP_CLAUSE_USE_DEVICE_PTR:
14561 2118 : case OMP_CLAUSE_USE_DEVICE_ADDR:
14562 2118 : flags = GOVD_EXPLICIT;
14563 2118 : goto do_add;
14564 :
14565 557 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
14566 557 : decl = OMP_CLAUSE_DECL (c);
14567 557 : while (TREE_CODE (decl) == INDIRECT_REF
14568 606 : || TREE_CODE (decl) == ARRAY_REF)
14569 49 : decl = TREE_OPERAND (decl, 0);
14570 557 : flags = GOVD_EXPLICIT;
14571 557 : goto do_add_decl;
14572 :
14573 500 : case OMP_CLAUSE_IS_DEVICE_PTR:
14574 500 : flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
14575 500 : goto do_add;
14576 :
14577 62314 : do_add:
14578 62314 : decl = OMP_CLAUSE_DECL (c);
14579 83444 : do_add_decl:
14580 83444 : if (error_operand_p (decl))
14581 : {
14582 : remove = true;
14583 : break;
14584 : }
14585 83436 : if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
14586 : {
14587 1755 : tree t = omp_member_access_dummy_var (decl);
14588 1755 : if (t)
14589 : {
14590 668 : tree v = DECL_VALUE_EXPR (decl);
14591 668 : DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
14592 668 : if (outer_ctx)
14593 140 : omp_notice_variable (outer_ctx, t, true);
14594 : }
14595 : }
14596 83436 : if (code == OACC_DATA
14597 2402 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
14598 85838 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
14599 307 : flags |= GOVD_MAP_0LEN_ARRAY;
14600 83436 : omp_add_variable (ctx, decl, flags);
14601 83436 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14602 68219 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
14603 66145 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
14604 86040 : && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
14605 : {
14606 2090 : struct gimplify_omp_ctx *pctx
14607 2150 : = code == OMP_TARGET ? outer_ctx : ctx;
14608 2150 : if (pctx)
14609 2120 : omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
14610 : GOVD_LOCAL | GOVD_SEEN);
14611 2120 : if (pctx
14612 2120 : && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
14613 632 : && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
14614 : find_decl_expr,
14615 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
14616 : NULL) == NULL_TREE)
14617 208 : omp_add_variable (pctx,
14618 208 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
14619 : GOVD_LOCAL | GOVD_SEEN);
14620 2150 : gimplify_omp_ctxp = pctx;
14621 2150 : push_gimplify_context ();
14622 :
14623 2150 : OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
14624 2150 : OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
14625 :
14626 2150 : gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
14627 2150 : &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
14628 2150 : pop_gimplify_context
14629 2150 : (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
14630 2150 : push_gimplify_context ();
14631 4300 : gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
14632 2150 : &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
14633 2150 : pop_gimplify_context
14634 2150 : (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
14635 2150 : OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
14636 2150 : OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
14637 :
14638 2150 : gimplify_omp_ctxp = outer_ctx;
14639 : }
14640 81286 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14641 81286 : && OMP_CLAUSE_LASTPRIVATE_STMT (c))
14642 : {
14643 303 : gimplify_omp_ctxp = ctx;
14644 303 : push_gimplify_context ();
14645 303 : if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
14646 : {
14647 303 : tree bind = build3 (BIND_EXPR, void_type_node, NULL,
14648 : NULL, NULL);
14649 303 : TREE_SIDE_EFFECTS (bind) = 1;
14650 303 : BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
14651 303 : OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
14652 : }
14653 606 : gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
14654 303 : &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
14655 303 : pop_gimplify_context
14656 303 : (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
14657 303 : OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
14658 :
14659 303 : gimplify_omp_ctxp = outer_ctx;
14660 : }
14661 80983 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14662 80983 : && OMP_CLAUSE_LINEAR_STMT (c))
14663 : {
14664 60 : gimplify_omp_ctxp = ctx;
14665 60 : push_gimplify_context ();
14666 60 : if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
14667 : {
14668 60 : tree bind = build3 (BIND_EXPR, void_type_node, NULL,
14669 : NULL, NULL);
14670 60 : TREE_SIDE_EFFECTS (bind) = 1;
14671 60 : BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
14672 60 : OMP_CLAUSE_LINEAR_STMT (c) = bind;
14673 : }
14674 120 : gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
14675 60 : &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
14676 60 : pop_gimplify_context
14677 60 : (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
14678 60 : OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
14679 :
14680 60 : gimplify_omp_ctxp = outer_ctx;
14681 : }
14682 83436 : if (notice_outer)
14683 71327 : goto do_notice;
14684 : break;
14685 :
14686 906 : case OMP_CLAUSE_COPYIN:
14687 906 : case OMP_CLAUSE_COPYPRIVATE:
14688 906 : decl = OMP_CLAUSE_DECL (c);
14689 906 : if (error_operand_p (decl))
14690 : {
14691 : remove = true;
14692 : break;
14693 : }
14694 906 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
14695 : && !remove
14696 906 : && !omp_check_private (ctx, decl, true))
14697 : {
14698 35 : remove = true;
14699 35 : if (is_global_var (decl))
14700 : {
14701 30 : if (DECL_THREAD_LOCAL_P (decl))
14702 : remove = false;
14703 10 : else if (DECL_HAS_VALUE_EXPR_P (decl))
14704 : {
14705 2 : tree value = get_base_address (DECL_VALUE_EXPR (decl));
14706 :
14707 2 : if (value
14708 2 : && DECL_P (value)
14709 4 : && DECL_THREAD_LOCAL_P (value))
14710 : remove = false;
14711 : }
14712 : }
14713 : if (remove)
14714 13 : error_at (OMP_CLAUSE_LOCATION (c),
14715 : "copyprivate variable %qE is not threadprivate"
14716 13 : " or private in outer context", DECL_NAME (decl));
14717 : }
14718 78421 : do_notice:
14719 78421 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14720 63204 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
14721 55283 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14722 30470 : && outer_ctx
14723 17546 : && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
14724 16207 : || (region_type == ORT_WORKSHARE
14725 3884 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14726 1087 : && (OMP_CLAUSE_REDUCTION_INSCAN (c)
14727 912 : || code == OMP_LOOP)))
14728 80366 : && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
14729 857 : || (code == OMP_LOOP
14730 138 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14731 138 : && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
14732 : == ORT_COMBINED_TEAMS))))
14733 : {
14734 1209 : splay_tree_node on
14735 1209 : = splay_tree_lookup (outer_ctx->variables,
14736 : (splay_tree_key)decl);
14737 1209 : if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
14738 : {
14739 891 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14740 705 : && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
14741 1003 : && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
14742 56 : || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
14743 0 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
14744 : == POINTER_TYPE))))
14745 56 : omp_firstprivatize_variable (outer_ctx, decl);
14746 : else
14747 : {
14748 835 : omp_add_variable (outer_ctx, decl,
14749 : GOVD_SEEN | GOVD_SHARED);
14750 835 : if (outer_ctx->outer_context)
14751 235 : omp_notice_variable (outer_ctx->outer_context, decl,
14752 : true);
14753 : }
14754 : }
14755 : }
14756 77821 : if (outer_ctx)
14757 29583 : omp_notice_variable (outer_ctx, decl, true);
14758 78421 : if (check_non_private
14759 26674 : && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
14760 4747 : && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
14761 1641 : || decl == OMP_CLAUSE_DECL (c)
14762 224 : || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
14763 224 : && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14764 : == ADDR_EXPR
14765 103 : || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14766 : == POINTER_PLUS_EXPR
14767 22 : && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
14768 : (OMP_CLAUSE_DECL (c), 0), 0))
14769 : == ADDR_EXPR)))))
14770 83080 : && omp_check_private (ctx, decl, false))
14771 : {
14772 58 : error ("%s variable %qE is private in outer context",
14773 29 : check_non_private, DECL_NAME (decl));
14774 29 : remove = true;
14775 : }
14776 : break;
14777 :
14778 190 : case OMP_CLAUSE_DETACH:
14779 190 : flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
14780 190 : goto do_add;
14781 :
14782 4102 : case OMP_CLAUSE_IF:
14783 4102 : if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
14784 4102 : && OMP_CLAUSE_IF_MODIFIER (c) != code)
14785 : {
14786 : const char *p[2];
14787 168 : for (int i = 0; i < 2; i++)
14788 112 : switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
14789 : {
14790 8 : case VOID_CST: p[i] = "cancel"; break;
14791 24 : case OMP_PARALLEL: p[i] = "parallel"; break;
14792 4 : case OMP_SIMD: p[i] = "simd"; break;
14793 12 : case OMP_TASK: p[i] = "task"; break;
14794 12 : case OMP_TASKLOOP: p[i] = "taskloop"; break;
14795 8 : case OMP_TARGET_DATA: p[i] = "target data"; break;
14796 12 : case OMP_TARGET: p[i] = "target"; break;
14797 12 : case OMP_TARGET_UPDATE: p[i] = "target update"; break;
14798 8 : case OMP_TARGET_ENTER_DATA:
14799 8 : p[i] = "target enter data"; break;
14800 12 : case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
14801 0 : default: gcc_unreachable ();
14802 : }
14803 56 : error_at (OMP_CLAUSE_LOCATION (c),
14804 : "expected %qs %<if%> clause modifier rather than %qs",
14805 : p[0], p[1]);
14806 56 : remove = true;
14807 : }
14808 : /* Fall through. */
14809 :
14810 4812 : case OMP_CLAUSE_SELF:
14811 4812 : case OMP_CLAUSE_FINAL:
14812 4812 : OMP_CLAUSE_OPERAND (c, 0)
14813 9624 : = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
14814 : /* Fall through. */
14815 :
14816 5715 : case OMP_CLAUSE_NUM_TEAMS:
14817 5715 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
14818 903 : && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14819 5960 : && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
14820 : {
14821 208 : if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
14822 : {
14823 : remove = true;
14824 : break;
14825 : }
14826 208 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14827 416 : = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
14828 : pre_p, NULL, true);
14829 : }
14830 : /* Fall through. */
14831 :
14832 21766 : case OMP_CLAUSE_SCHEDULE:
14833 21766 : case OMP_CLAUSE_NUM_THREADS:
14834 21766 : case OMP_CLAUSE_THREAD_LIMIT:
14835 21766 : case OMP_CLAUSE_DIST_SCHEDULE:
14836 21766 : case OMP_CLAUSE_DEVICE:
14837 21766 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
14838 21766 : && OMP_CLAUSE_DEVICE_ANCESTOR (c))
14839 : {
14840 125 : if (code != OMP_TARGET)
14841 : {
14842 20 : error_at (OMP_CLAUSE_LOCATION (c),
14843 : "%<device%> clause with %<ancestor%> is only "
14844 : "allowed on %<target%> construct");
14845 20 : remove = true;
14846 20 : break;
14847 : }
14848 :
14849 105 : tree clauses = *orig_list_p;
14850 330 : for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
14851 236 : if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
14852 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
14853 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
14854 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
14855 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
14856 : )
14857 : {
14858 11 : error_at (OMP_CLAUSE_LOCATION (c),
14859 : "with %<ancestor%>, only the %<device%>, "
14860 : "%<firstprivate%>, %<private%>, %<defaultmap%>, "
14861 : "and %<map%> clauses may appear on the "
14862 : "construct");
14863 11 : remove = true;
14864 11 : break;
14865 : }
14866 : }
14867 21641 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
14868 21641 : && code == OMP_DISPATCH)
14869 : {
14870 272 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
14871 272 : gimplify_ctxp->into_ssa = false;
14872 272 : if (gimplify_expr (&OMP_CLAUSE_DEVICE_ID (c), pre_p, NULL,
14873 : is_gimple_val, fb_rvalue)
14874 : == GS_ERROR)
14875 : remove = true;
14876 272 : else if (DECL_P (OMP_CLAUSE_DEVICE_ID (c)))
14877 55 : omp_add_variable (ctx, OMP_CLAUSE_DEVICE_ID (c),
14878 : GOVD_SHARED | GOVD_SEEN);
14879 272 : gimplify_ctxp->into_ssa = saved_into_ssa;
14880 272 : break;
14881 : }
14882 : /* Fall through. */
14883 :
14884 31215 : case OMP_CLAUSE_PRIORITY:
14885 31215 : case OMP_CLAUSE_GRAINSIZE:
14886 31215 : case OMP_CLAUSE_NUM_TASKS:
14887 31215 : case OMP_CLAUSE_FILTER:
14888 31215 : case OMP_CLAUSE_HINT:
14889 31215 : case OMP_CLAUSE_ASYNC:
14890 31215 : case OMP_CLAUSE_WAIT:
14891 31215 : case OMP_CLAUSE_NUM_GANGS:
14892 31215 : case OMP_CLAUSE_NUM_WORKERS:
14893 31215 : case OMP_CLAUSE_VECTOR_LENGTH:
14894 31215 : case OMP_CLAUSE_WORKER:
14895 31215 : case OMP_CLAUSE_VECTOR:
14896 31215 : if (OMP_CLAUSE_OPERAND (c, 0)
14897 31215 : && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
14898 : {
14899 7756 : if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
14900 : {
14901 : remove = true;
14902 : break;
14903 : }
14904 : /* All these clauses care about value, not a particular decl,
14905 : so try to force it into a SSA_NAME or fresh temporary. */
14906 7749 : OMP_CLAUSE_OPERAND (c, 0)
14907 15498 : = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
14908 : pre_p, NULL, true);
14909 : }
14910 : break;
14911 :
14912 2331 : case OMP_CLAUSE_GANG:
14913 2331 : if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
14914 : is_gimple_val, fb_rvalue) == GS_ERROR)
14915 0 : remove = true;
14916 2331 : if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
14917 : is_gimple_val, fb_rvalue) == GS_ERROR)
14918 0 : remove = true;
14919 : break;
14920 :
14921 13086 : case OMP_CLAUSE_NOWAIT:
14922 13086 : nowait = 1;
14923 13086 : break;
14924 :
14925 72 : case OMP_CLAUSE_USES_ALLOCATORS:
14926 72 : sorry_at (OMP_CLAUSE_LOCATION (c), "%<uses_allocators%> clause");
14927 72 : remove = 1;
14928 72 : break;
14929 :
14930 : case OMP_CLAUSE_ORDERED:
14931 : case OMP_CLAUSE_UNTIED:
14932 : case OMP_CLAUSE_COLLAPSE:
14933 : case OMP_CLAUSE_TILE:
14934 : case OMP_CLAUSE_AUTO:
14935 : case OMP_CLAUSE_SEQ:
14936 : case OMP_CLAUSE_INDEPENDENT:
14937 : case OMP_CLAUSE_MERGEABLE:
14938 : case OMP_CLAUSE_PROC_BIND:
14939 : case OMP_CLAUSE_SAFELEN:
14940 : case OMP_CLAUSE_SIMDLEN:
14941 : case OMP_CLAUSE_NOGROUP:
14942 : case OMP_CLAUSE_THREADS:
14943 : case OMP_CLAUSE_SIMD:
14944 : case OMP_CLAUSE_BIND:
14945 : case OMP_CLAUSE_IF_PRESENT:
14946 : case OMP_CLAUSE_FINALIZE:
14947 : case OMP_CLAUSE_INTEROP:
14948 : case OMP_CLAUSE_INIT:
14949 : case OMP_CLAUSE_USE:
14950 : case OMP_CLAUSE_DESTROY:
14951 : case OMP_CLAUSE_DEVICE_TYPE:
14952 : break;
14953 :
14954 52 : case OMP_CLAUSE_DYN_GROUPPRIVATE:
14955 52 : remove = true;
14956 52 : sorry_at (OMP_CLAUSE_LOCATION (c),"%<dyn_groupprivate%> clause");
14957 52 : break;
14958 :
14959 3976 : case OMP_CLAUSE_ORDER:
14960 3976 : ctx->order_concurrent = true;
14961 3976 : break;
14962 :
14963 1010 : case OMP_CLAUSE_DEFAULTMAP:
14964 1010 : enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
14965 1010 : switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
14966 : {
14967 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
14968 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
14969 : gdmkmin = GDMK_SCALAR;
14970 : gdmkmax = GDMK_POINTER;
14971 : break;
14972 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
14973 : gdmkmin = GDMK_SCALAR;
14974 : gdmkmax = GDMK_SCALAR_TARGET;
14975 : break;
14976 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
14977 : gdmkmin = gdmkmax = GDMK_AGGREGATE;
14978 : break;
14979 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
14980 : gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
14981 : break;
14982 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
14983 : gdmkmin = gdmkmax = GDMK_POINTER;
14984 : break;
14985 0 : default:
14986 0 : gcc_unreachable ();
14987 : }
14988 4471 : for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
14989 3461 : switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
14990 : {
14991 91 : case OMP_CLAUSE_DEFAULTMAP_ALLOC:
14992 91 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
14993 91 : break;
14994 96 : case OMP_CLAUSE_DEFAULTMAP_TO:
14995 96 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
14996 96 : break;
14997 28 : case OMP_CLAUSE_DEFAULTMAP_FROM:
14998 28 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
14999 28 : break;
15000 671 : case OMP_CLAUSE_DEFAULTMAP_TOFROM:
15001 671 : ctx->defaultmap[gdmk] = GOVD_MAP;
15002 671 : break;
15003 380 : case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
15004 380 : ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
15005 380 : break;
15006 2094 : case OMP_CLAUSE_DEFAULTMAP_NONE:
15007 2094 : ctx->defaultmap[gdmk] = 0;
15008 2094 : break;
15009 45 : case OMP_CLAUSE_DEFAULTMAP_PRESENT:
15010 45 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
15011 45 : break;
15012 56 : case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
15013 56 : switch (gdmk)
15014 : {
15015 11 : case GDMK_SCALAR:
15016 11 : ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
15017 11 : break;
15018 11 : case GDMK_SCALAR_TARGET:
15019 11 : ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
15020 11 : ? GOVD_MAP : GOVD_FIRSTPRIVATE);
15021 11 : break;
15022 14 : case GDMK_AGGREGATE:
15023 14 : case GDMK_ALLOCATABLE:
15024 14 : ctx->defaultmap[gdmk] = GOVD_MAP;
15025 14 : break;
15026 20 : case GDMK_POINTER:
15027 20 : ctx->defaultmap[gdmk] = GOVD_MAP;
15028 20 : if (!lang_GNU_Fortran ())
15029 12 : ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
15030 : break;
15031 : default:
15032 : gcc_unreachable ();
15033 : }
15034 : break;
15035 0 : default:
15036 0 : gcc_unreachable ();
15037 : }
15038 : break;
15039 :
15040 824 : case OMP_CLAUSE_ALIGNED:
15041 824 : decl = OMP_CLAUSE_DECL (c);
15042 824 : if (error_operand_p (decl))
15043 : {
15044 : remove = true;
15045 : break;
15046 : }
15047 824 : if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
15048 : is_gimple_val, fb_rvalue) == GS_ERROR)
15049 : {
15050 : remove = true;
15051 : break;
15052 : }
15053 824 : if (!is_global_var (decl)
15054 824 : && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
15055 592 : omp_add_variable (ctx, decl, GOVD_ALIGNED);
15056 : break;
15057 :
15058 489 : case OMP_CLAUSE_NONTEMPORAL:
15059 489 : decl = OMP_CLAUSE_DECL (c);
15060 489 : if (error_operand_p (decl))
15061 : {
15062 : remove = true;
15063 : break;
15064 : }
15065 489 : omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
15066 489 : break;
15067 :
15068 3541 : case OMP_CLAUSE_ALLOCATE:
15069 3541 : decl = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
15070 3541 : if (decl
15071 1700 : && TREE_CODE (decl) == INTEGER_CST
15072 3570 : && wi::eq_p (wi::to_widest (decl), GOMP_OMP_PREDEF_ALLOC_THREADS)
15073 3570 : && (code == OMP_TARGET || code == OMP_TASK || code == OMP_TASKLOOP))
15074 35 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
15075 : "allocator with access trait set to %<thread%> "
15076 : "results in undefined behavior for %qs directive",
15077 : code == OMP_TARGET ? "target"
15078 : : (code == OMP_TASK
15079 15 : ? "task" : "taskloop"));
15080 3541 : decl = OMP_CLAUSE_DECL (c);
15081 3541 : if (error_operand_p (decl))
15082 : {
15083 : remove = true;
15084 : break;
15085 : }
15086 3541 : if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
15087 : is_gimple_val, fb_rvalue) == GS_ERROR)
15088 : {
15089 : remove = true;
15090 : break;
15091 : }
15092 3541 : else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
15093 3541 : || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
15094 : == INTEGER_CST))
15095 : ;
15096 503 : else if (code == OMP_TASKLOOP
15097 503 : || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
15098 66 : OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
15099 132 : = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
15100 : pre_p, NULL, false);
15101 : break;
15102 :
15103 4353 : case OMP_CLAUSE_DEFAULT:
15104 4353 : ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
15105 4353 : break;
15106 :
15107 640 : case OMP_CLAUSE_INCLUSIVE:
15108 640 : case OMP_CLAUSE_EXCLUSIVE:
15109 640 : decl = OMP_CLAUSE_DECL (c);
15110 640 : {
15111 640 : splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
15112 : (splay_tree_key) decl);
15113 640 : if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
15114 : {
15115 5 : error_at (OMP_CLAUSE_LOCATION (c),
15116 : "%qD specified in %qs clause but not in %<inscan%> "
15117 : "%<reduction%> clause on the containing construct",
15118 5 : decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
15119 5 : remove = true;
15120 : }
15121 : else
15122 : {
15123 635 : n->value |= GOVD_REDUCTION_INSCAN;
15124 635 : if (outer_ctx->region_type == ORT_SIMD
15125 520 : && outer_ctx->outer_context
15126 107 : && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
15127 : {
15128 107 : n = splay_tree_lookup (outer_ctx->outer_context->variables,
15129 : (splay_tree_key) decl);
15130 107 : if (n && (n->value & GOVD_REDUCTION) != 0)
15131 107 : n->value |= GOVD_REDUCTION_INSCAN;
15132 : }
15133 : }
15134 : }
15135 : break;
15136 :
15137 103 : case OMP_CLAUSE_NOVARIANTS:
15138 103 : OMP_CLAUSE_NOVARIANTS_EXPR (c)
15139 103 : = gimple_boolify (OMP_CLAUSE_NOVARIANTS_EXPR (c));
15140 103 : break;
15141 115 : case OMP_CLAUSE_NOCONTEXT:
15142 115 : OMP_CLAUSE_NOCONTEXT_EXPR (c)
15143 115 : = gimple_boolify (OMP_CLAUSE_NOCONTEXT_EXPR (c));
15144 115 : break;
15145 0 : case OMP_CLAUSE_NOHOST:
15146 0 : default:
15147 0 : gcc_unreachable ();
15148 : }
15149 :
15150 1399 : if (code == OACC_DATA
15151 5123 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
15152 207353 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15153 4504 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
15154 : remove = true;
15155 203634 : if (remove)
15156 1095 : *list_p = OMP_CLAUSE_CHAIN (c);
15157 : else
15158 202846 : list_p = &OMP_CLAUSE_CHAIN (c);
15159 203941 : }
15160 :
15161 129870 : if (groups)
15162 : {
15163 35236 : delete grpmap;
15164 17618 : delete groups;
15165 : }
15166 :
15167 129870 : ctx->clauses = *orig_list_p;
15168 129870 : gimplify_omp_ctxp = ctx;
15169 129870 : }
15170 :
15171 : /* Return true if DECL is a candidate for shared to firstprivate
15172 : optimization. We only consider non-addressable scalars, not
15173 : too big, and not references. */
15174 :
15175 : static bool
15176 403827 : omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
15177 : {
15178 403827 : if (TREE_ADDRESSABLE (decl))
15179 : return false;
15180 354726 : tree type = TREE_TYPE (decl);
15181 354726 : if (!is_gimple_reg_type (type)
15182 329990 : || TREE_CODE (type) == REFERENCE_TYPE
15183 680451 : || TREE_ADDRESSABLE (type))
15184 : return false;
15185 : /* Don't optimize too large decls, as each thread/task will have
15186 : its own. */
15187 325725 : HOST_WIDE_INT len = int_size_in_bytes (type);
15188 325725 : if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
15189 : return false;
15190 325708 : if (omp_privatize_by_reference (decl))
15191 : return false;
15192 : return true;
15193 : }
15194 :
15195 : /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
15196 : For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
15197 : GOVD_WRITTEN in outer contexts. */
15198 :
15199 : static void
15200 302816 : omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
15201 : {
15202 464525 : for (; ctx; ctx = ctx->outer_context)
15203 : {
15204 447525 : splay_tree_node n = splay_tree_lookup (ctx->variables,
15205 : (splay_tree_key) decl);
15206 447525 : if (n == NULL)
15207 158130 : continue;
15208 289395 : else if (n->value & GOVD_SHARED)
15209 : {
15210 9115 : n->value |= GOVD_WRITTEN;
15211 9115 : return;
15212 : }
15213 280280 : else if (n->value & GOVD_DATA_SHARE_CLASS)
15214 : return;
15215 : }
15216 : }
15217 :
15218 : /* Helper callback for walk_gimple_seq to discover possible stores
15219 : to omp_shared_to_firstprivate_optimizable_decl_p decls and set
15220 : GOVD_WRITTEN if they are GOVD_SHARED in some outer context
15221 : for those. */
15222 :
15223 : static tree
15224 1123132 : omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
15225 : {
15226 1123132 : struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
15227 :
15228 1123132 : *walk_subtrees = 0;
15229 1123132 : if (!wi->is_lhs)
15230 : return NULL_TREE;
15231 :
15232 320560 : tree op = *tp;
15233 399298 : do
15234 : {
15235 399298 : if (handled_component_p (op))
15236 78738 : op = TREE_OPERAND (op, 0);
15237 320560 : else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
15238 320560 : && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
15239 0 : op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
15240 : else
15241 : break;
15242 : }
15243 : while (1);
15244 320560 : if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
15245 58047 : return NULL_TREE;
15246 :
15247 262513 : omp_mark_stores (gimplify_omp_ctxp, op);
15248 262513 : return NULL_TREE;
15249 : }
15250 :
15251 : /* Helper callback for walk_gimple_seq to discover possible stores
15252 : to omp_shared_to_firstprivate_optimizable_decl_p decls and set
15253 : GOVD_WRITTEN if they are GOVD_SHARED in some outer context
15254 : for those. */
15255 :
15256 : static tree
15257 624554 : omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
15258 : bool *handled_ops_p,
15259 : struct walk_stmt_info *wi)
15260 : {
15261 624554 : gimple *stmt = gsi_stmt (*gsi_p);
15262 624554 : switch (gimple_code (stmt))
15263 : {
15264 : /* Don't recurse on OpenMP constructs for which
15265 : gimplify_adjust_omp_clauses already handled the bodies,
15266 : except handle gimple_omp_for_pre_body. */
15267 26682 : case GIMPLE_OMP_FOR:
15268 26682 : *handled_ops_p = true;
15269 26682 : if (gimple_omp_for_pre_body (stmt))
15270 1780 : walk_gimple_seq (gimple_omp_for_pre_body (stmt),
15271 : omp_find_stores_stmt, omp_find_stores_op, wi);
15272 : break;
15273 8560 : case GIMPLE_OMP_PARALLEL:
15274 8560 : case GIMPLE_OMP_TASK:
15275 8560 : case GIMPLE_OMP_SECTIONS:
15276 8560 : case GIMPLE_OMP_SINGLE:
15277 8560 : case GIMPLE_OMP_SCOPE:
15278 8560 : case GIMPLE_OMP_TARGET:
15279 8560 : case GIMPLE_OMP_TEAMS:
15280 8560 : case GIMPLE_OMP_CRITICAL:
15281 8560 : *handled_ops_p = true;
15282 8560 : break;
15283 : default:
15284 : break;
15285 : }
15286 624554 : return NULL_TREE;
15287 : }
15288 :
15289 : struct gimplify_adjust_omp_clauses_data
15290 : {
15291 : tree *list_p;
15292 : gimple_seq *pre_p;
15293 : };
15294 :
15295 : /* For all variables that were not actually used within the context,
15296 : remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
15297 :
15298 : static int
15299 669765 : gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
15300 : {
15301 669765 : tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
15302 669765 : gimple_seq *pre_p
15303 : = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
15304 669765 : tree decl = (tree) n->key;
15305 669765 : unsigned flags = n->value;
15306 669765 : enum omp_clause_code code;
15307 669765 : tree clause;
15308 669765 : bool private_debug;
15309 :
15310 669765 : if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
15311 132692 : && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
15312 : flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
15313 669640 : if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
15314 : return 0;
15315 178133 : if ((flags & GOVD_SEEN) == 0)
15316 : return 0;
15317 157117 : if (flags & GOVD_DEBUG_PRIVATE)
15318 : {
15319 260 : gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
15320 : private_debug = true;
15321 : }
15322 156857 : else if (flags & GOVD_MAP)
15323 : private_debug = false;
15324 : else
15325 138859 : private_debug
15326 138859 : = lang_hooks.decls.omp_private_debug_clause (decl,
15327 138859 : !!(flags & GOVD_SHARED));
15328 138859 : if (private_debug)
15329 : code = OMP_CLAUSE_PRIVATE;
15330 156735 : else if (flags & GOVD_MAP)
15331 : {
15332 17998 : code = OMP_CLAUSE_MAP;
15333 17998 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
15334 17998 : && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
15335 : {
15336 2 : error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
15337 2 : return 0;
15338 : }
15339 17996 : if (VAR_P (decl)
15340 16058 : && DECL_IN_CONSTANT_POOL (decl)
15341 17997 : && !lookup_attribute ("omp declare target",
15342 1 : DECL_ATTRIBUTES (decl)))
15343 : {
15344 1 : tree id = get_identifier ("omp declare target");
15345 1 : DECL_ATTRIBUTES (decl)
15346 1 : = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
15347 1 : varpool_node *node = varpool_node::get (decl);
15348 1 : if (node)
15349 : {
15350 1 : node->offloadable = 1;
15351 1 : if (ENABLE_OFFLOADING)
15352 : g->have_offload = true;
15353 : }
15354 : }
15355 : }
15356 138737 : else if (flags & GOVD_SHARED)
15357 : {
15358 49053 : if (is_global_var (decl))
15359 : {
15360 16138 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
15361 25325 : while (ctx != NULL)
15362 : {
15363 17604 : splay_tree_node on
15364 17604 : = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15365 17604 : if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
15366 : | GOVD_PRIVATE | GOVD_REDUCTION
15367 : | GOVD_LINEAR | GOVD_MAP)) != 0)
15368 : break;
15369 9187 : ctx = ctx->outer_context;
15370 : }
15371 16138 : if (ctx == NULL)
15372 : return 0;
15373 : }
15374 41332 : code = OMP_CLAUSE_SHARED;
15375 : /* Don't optimize shared into firstprivate for read-only vars
15376 : on tasks with depend clause, we shouldn't try to copy them
15377 : until the dependencies are satisfied. */
15378 41332 : if (gimplify_omp_ctxp->has_depend)
15379 350 : flags |= GOVD_WRITTEN;
15380 : }
15381 89684 : else if (flags & GOVD_PRIVATE)
15382 : code = OMP_CLAUSE_PRIVATE;
15383 31217 : else if (flags & GOVD_FIRSTPRIVATE)
15384 : {
15385 21816 : code = OMP_CLAUSE_FIRSTPRIVATE;
15386 21816 : if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
15387 13783 : && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
15388 31918 : && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
15389 : {
15390 1 : error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
15391 : "%<target%> construct", decl);
15392 1 : return 0;
15393 : }
15394 : }
15395 9401 : else if (flags & GOVD_LASTPRIVATE)
15396 : code = OMP_CLAUSE_LASTPRIVATE;
15397 241 : else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
15398 : return 0;
15399 118 : else if (flags & GOVD_CONDTEMP)
15400 : {
15401 118 : code = OMP_CLAUSE__CONDTEMP_;
15402 118 : gimple_add_tmp_var (decl);
15403 : }
15404 : else
15405 0 : gcc_unreachable ();
15406 :
15407 140110 : if (((flags & GOVD_LASTPRIVATE)
15408 139420 : || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
15409 155329 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15410 15205 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15411 :
15412 149270 : tree chain = *list_p;
15413 149270 : clause = build_omp_clause (input_location, code);
15414 149270 : OMP_CLAUSE_DECL (clause) = decl;
15415 149270 : OMP_CLAUSE_CHAIN (clause) = chain;
15416 149270 : if (private_debug)
15417 382 : OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
15418 148888 : else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
15419 6 : OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
15420 148882 : else if (code == OMP_CLAUSE_SHARED
15421 41332 : && (flags & GOVD_WRITTEN) == 0
15422 184155 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15423 21042 : OMP_CLAUSE_SHARED_READONLY (clause) = 1;
15424 127840 : else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
15425 21815 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
15426 106025 : else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
15427 : {
15428 626 : tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
15429 626 : OMP_CLAUSE_DECL (nc) = decl;
15430 626 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
15431 626 : && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
15432 15 : OMP_CLAUSE_DECL (clause)
15433 30 : = build_fold_indirect_ref_loc (input_location, decl);
15434 626 : OMP_CLAUSE_DECL (clause)
15435 626 : = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
15436 : build_int_cst (build_pointer_type (char_type_node), 0));
15437 626 : OMP_CLAUSE_SIZE (clause) = size_zero_node;
15438 626 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15439 626 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
15440 626 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
15441 626 : tree dtype = TREE_TYPE (decl);
15442 626 : if (TREE_CODE (dtype) == REFERENCE_TYPE)
15443 15 : dtype = TREE_TYPE (dtype);
15444 : /* FIRSTPRIVATE_POINTER doesn't work well if we have a
15445 : multiply-indirected pointer. If we have a reference to a pointer to
15446 : a pointer, it's possible that this should really be
15447 : GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
15448 : moment, so stick with this. (See PR113279 and testcases
15449 : baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
15450 626 : if (TREE_CODE (dtype) == POINTER_TYPE
15451 626 : && TREE_CODE (TREE_TYPE (dtype)) == POINTER_TYPE)
15452 19 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
15453 : else
15454 607 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
15455 626 : OMP_CLAUSE_CHAIN (nc) = chain;
15456 626 : OMP_CLAUSE_CHAIN (clause) = nc;
15457 626 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15458 626 : gimplify_omp_ctxp = ctx->outer_context;
15459 626 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
15460 : pre_p, NULL, is_gimple_val, fb_rvalue);
15461 626 : gimplify_omp_ctxp = ctx;
15462 626 : }
15463 17370 : else if (code == OMP_CLAUSE_MAP)
15464 : {
15465 17370 : int kind;
15466 : /* Not all combinations of these GOVD_MAP flags are actually valid. */
15467 17370 : switch (flags & (GOVD_MAP_TO_ONLY
15468 : | GOVD_MAP_FORCE
15469 : | GOVD_MAP_FORCE_PRESENT
15470 : | GOVD_MAP_ALLOC_ONLY
15471 : | GOVD_MAP_FROM_ONLY))
15472 : {
15473 : case 0:
15474 : kind = GOMP_MAP_TOFROM;
15475 : break;
15476 1042 : case GOVD_MAP_FORCE:
15477 1042 : kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
15478 1042 : break;
15479 863 : case GOVD_MAP_TO_ONLY:
15480 863 : kind = GOMP_MAP_TO;
15481 863 : break;
15482 16 : case GOVD_MAP_FROM_ONLY:
15483 16 : kind = GOMP_MAP_FROM;
15484 16 : break;
15485 37 : case GOVD_MAP_ALLOC_ONLY:
15486 37 : kind = GOMP_MAP_ALLOC;
15487 37 : break;
15488 0 : case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
15489 0 : kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
15490 0 : break;
15491 : case GOVD_MAP_FORCE_PRESENT:
15492 326 : kind = GOMP_MAP_FORCE_PRESENT;
15493 : break;
15494 : case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
15495 326 : kind = GOMP_MAP_FORCE_PRESENT;
15496 : break;
15497 0 : default:
15498 0 : gcc_unreachable ();
15499 : }
15500 17370 : OMP_CLAUSE_SET_MAP_KIND (clause, kind);
15501 : /* Setting of the implicit flag for the runtime is currently disabled for
15502 : OpenACC. */
15503 17370 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
15504 9841 : OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
15505 17370 : if (DECL_SIZE (decl)
15506 17370 : && !poly_int_tree_p (DECL_SIZE (decl)))
15507 : {
15508 535 : tree decl2 = DECL_VALUE_EXPR (decl);
15509 535 : gcc_assert (INDIRECT_REF_P (decl2));
15510 535 : decl2 = TREE_OPERAND (decl2, 0);
15511 535 : gcc_assert (DECL_P (decl2));
15512 535 : tree mem = build_simple_mem_ref (decl2);
15513 535 : OMP_CLAUSE_DECL (clause) = mem;
15514 535 : OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
15515 535 : if (gimplify_omp_ctxp->outer_context)
15516 : {
15517 446 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
15518 446 : omp_notice_variable (ctx, decl2, true);
15519 446 : omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
15520 : }
15521 535 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
15522 : OMP_CLAUSE_MAP);
15523 535 : OMP_CLAUSE_DECL (nc) = decl;
15524 535 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15525 535 : if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
15526 535 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
15527 : else
15528 0 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
15529 535 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
15530 535 : OMP_CLAUSE_CHAIN (clause) = nc;
15531 : }
15532 16835 : else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
15533 16835 : && omp_privatize_by_reference (decl))
15534 : {
15535 28 : OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
15536 28 : OMP_CLAUSE_SIZE (clause)
15537 28 : = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
15538 28 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15539 28 : gimplify_omp_ctxp = ctx->outer_context;
15540 28 : gimplify_expr (&OMP_CLAUSE_SIZE (clause),
15541 : pre_p, NULL, is_gimple_val, fb_rvalue);
15542 28 : gimplify_omp_ctxp = ctx;
15543 28 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
15544 : OMP_CLAUSE_MAP);
15545 28 : OMP_CLAUSE_DECL (nc) = decl;
15546 28 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15547 28 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
15548 28 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
15549 28 : OMP_CLAUSE_CHAIN (clause) = nc;
15550 : }
15551 : else
15552 16807 : OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
15553 : }
15554 149270 : if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
15555 : {
15556 690 : tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
15557 690 : OMP_CLAUSE_DECL (nc) = decl;
15558 690 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
15559 690 : OMP_CLAUSE_CHAIN (nc) = chain;
15560 690 : OMP_CLAUSE_CHAIN (clause) = nc;
15561 690 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15562 690 : gimplify_omp_ctxp = ctx->outer_context;
15563 690 : lang_hooks.decls.omp_finish_clause (nc, pre_p,
15564 690 : (ctx->region_type & ORT_ACC) != 0);
15565 690 : gimplify_omp_ctxp = ctx;
15566 : }
15567 149270 : *list_p = clause;
15568 149270 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15569 149270 : gimplify_omp_ctxp = ctx->outer_context;
15570 : /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
15571 : in simd. Those are only added for the local vars inside of simd body
15572 : and they don't need to be e.g. default constructible. */
15573 149270 : if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
15574 141220 : lang_hooks.decls.omp_finish_clause (clause, pre_p,
15575 141220 : (ctx->region_type & ORT_ACC) != 0);
15576 149270 : if (gimplify_omp_ctxp)
15577 184129 : for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
15578 93477 : if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
15579 93477 : && DECL_P (OMP_CLAUSE_SIZE (clause)))
15580 1241 : omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
15581 : true);
15582 149270 : gimplify_omp_ctxp = ctx;
15583 149270 : return 0;
15584 : }
15585 :
15586 : static void
15587 128406 : gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
15588 : enum tree_code code,
15589 : gimple_seq *loops_seq_p = NULL)
15590 : {
15591 128406 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15592 128406 : tree *orig_list_p = list_p;
15593 128406 : tree c, decl;
15594 128406 : bool has_inscan_reductions = false;
15595 :
15596 128406 : if (body)
15597 : {
15598 : struct gimplify_omp_ctx *octx;
15599 224118 : for (octx = ctx; octx; octx = octx->outer_context)
15600 173188 : if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
15601 : break;
15602 111091 : if (octx)
15603 : {
15604 60161 : struct walk_stmt_info wi;
15605 60161 : memset (&wi, 0, sizeof (wi));
15606 60161 : walk_gimple_seq (body, omp_find_stores_stmt,
15607 : omp_find_stores_op, &wi);
15608 : }
15609 : }
15610 :
15611 128406 : if (ctx->add_safelen1)
15612 : {
15613 : /* If there are VLAs in the body of simd loop, prevent
15614 : vectorization. */
15615 2 : gcc_assert (ctx->region_type == ORT_SIMD);
15616 2 : c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
15617 2 : OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
15618 2 : OMP_CLAUSE_CHAIN (c) = *list_p;
15619 2 : *list_p = c;
15620 2 : list_p = &OMP_CLAUSE_CHAIN (c);
15621 : }
15622 :
15623 128406 : if (ctx->region_type == ORT_WORKSHARE
15624 39414 : && ctx->outer_context
15625 27876 : && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
15626 : {
15627 24880 : for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
15628 12538 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15629 12538 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15630 : {
15631 125 : decl = OMP_CLAUSE_DECL (c);
15632 125 : splay_tree_node n
15633 125 : = splay_tree_lookup (ctx->outer_context->variables,
15634 : (splay_tree_key) decl);
15635 125 : gcc_checking_assert (!splay_tree_lookup (ctx->variables,
15636 : (splay_tree_key) decl));
15637 125 : omp_add_variable (ctx, decl, n->value);
15638 125 : tree c2 = copy_node (c);
15639 125 : OMP_CLAUSE_CHAIN (c2) = *list_p;
15640 125 : *list_p = c2;
15641 125 : if ((n->value & GOVD_FIRSTPRIVATE) == 0)
15642 103 : continue;
15643 22 : c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15644 : OMP_CLAUSE_FIRSTPRIVATE);
15645 22 : OMP_CLAUSE_DECL (c2) = decl;
15646 22 : OMP_CLAUSE_CHAIN (c2) = *list_p;
15647 22 : *list_p = c2;
15648 : }
15649 : }
15650 :
15651 128406 : if (code == OMP_TARGET
15652 128406 : || code == OMP_TARGET_DATA
15653 128406 : || code == OMP_TARGET_ENTER_DATA
15654 112601 : || code == OMP_TARGET_EXIT_DATA)
15655 : {
15656 16504 : tree mapper_clauses = NULL_TREE;
15657 16504 : instantiate_mapper_info im_info;
15658 :
15659 16504 : im_info.mapper_clauses_p = &mapper_clauses;
15660 16504 : im_info.omp_ctx = ctx;
15661 16504 : im_info.pre_p = pre_p;
15662 :
15663 16504 : splay_tree_foreach (ctx->variables,
15664 : omp_instantiate_implicit_mappers,
15665 : (void *) &im_info);
15666 :
15667 16504 : if (mapper_clauses)
15668 : {
15669 45 : mapper_clauses
15670 45 : = lang_hooks.decls.omp_finish_mapper_clauses (mapper_clauses);
15671 :
15672 : /* Stick the implicitly-expanded mapper clauses at the end of the
15673 : clause list. */
15674 45 : tree *tail = list_p;
15675 139 : while (*tail)
15676 94 : tail = &OMP_CLAUSE_CHAIN (*tail);
15677 45 : *tail = mapper_clauses;
15678 : }
15679 :
15680 16504 : vec<omp_mapping_group> *groups;
15681 16504 : groups = omp_gather_mapping_groups (list_p);
15682 16504 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
15683 :
15684 16504 : if (groups)
15685 : {
15686 7952 : grpmap = omp_index_mapping_groups (groups);
15687 :
15688 7952 : omp_resolve_clause_dependencies (code, groups, grpmap);
15689 7952 : omp_build_struct_sibling_lists (code, ctx->region_type, groups,
15690 : &grpmap, list_p);
15691 :
15692 7952 : omp_mapping_group *outlist = NULL;
15693 :
15694 15904 : delete grpmap;
15695 7952 : delete groups;
15696 :
15697 : /* Rebuild now we have struct sibling lists. */
15698 7952 : groups = omp_gather_mapping_groups (list_p);
15699 7952 : grpmap = omp_index_mapping_groups (groups);
15700 :
15701 7952 : bool enter_exit = (code == OMP_TARGET_ENTER_DATA
15702 7952 : || code == OMP_TARGET_EXIT_DATA);
15703 :
15704 7952 : outlist = omp_tsort_mapping_groups (groups, grpmap, enter_exit);
15705 7952 : outlist = omp_segregate_mapping_groups (outlist);
15706 7952 : list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
15707 :
15708 7952 : delete grpmap;
15709 7952 : delete groups;
15710 : }
15711 16504 : }
15712 111902 : else if (ctx->region_type & ORT_ACC)
15713 : {
15714 29839 : vec<omp_mapping_group> *groups;
15715 29839 : groups = omp_gather_mapping_groups (list_p);
15716 29839 : if (groups)
15717 : {
15718 9457 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
15719 9457 : grpmap = omp_index_mapping_groups (groups);
15720 :
15721 9457 : oacc_resolve_clause_dependencies (groups, grpmap);
15722 9457 : omp_build_struct_sibling_lists (code, ctx->region_type, groups,
15723 : &grpmap, list_p);
15724 :
15725 9457 : delete groups;
15726 18914 : delete grpmap;
15727 : }
15728 : }
15729 :
15730 128406 : tree attach_list = NULL_TREE;
15731 128406 : tree *attach_tail = &attach_list;
15732 :
15733 128406 : tree *grp_start_p = NULL, grp_end = NULL_TREE;
15734 :
15735 374930 : while ((c = *list_p) != NULL)
15736 : {
15737 246524 : splay_tree_node n;
15738 246524 : bool remove = false;
15739 246524 : bool move_attach = false;
15740 :
15741 288429 : if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
15742 : grp_end = NULL_TREE;
15743 :
15744 246524 : switch (OMP_CLAUSE_CODE (c))
15745 : {
15746 7937 : case OMP_CLAUSE_FIRSTPRIVATE:
15747 7937 : if ((ctx->region_type & ORT_TARGET)
15748 1902 : && (ctx->region_type & ORT_ACC) == 0
15749 9233 : && TYPE_ATOMIC (strip_array_types
15750 : (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
15751 : {
15752 4 : error_at (OMP_CLAUSE_LOCATION (c),
15753 : "%<_Atomic%> %qD in %<firstprivate%> clause on "
15754 2 : "%<target%> construct", OMP_CLAUSE_DECL (c));
15755 2 : remove = true;
15756 2 : break;
15757 : }
15758 7935 : if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
15759 : {
15760 380 : decl = OMP_CLAUSE_DECL (c);
15761 380 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15762 380 : if ((n->value & GOVD_MAP) != 0)
15763 : {
15764 : remove = true;
15765 : break;
15766 : }
15767 368 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
15768 368 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
15769 : }
15770 : /* FALLTHRU */
15771 36703 : case OMP_CLAUSE_PRIVATE:
15772 36703 : case OMP_CLAUSE_SHARED:
15773 36703 : case OMP_CLAUSE_LINEAR:
15774 36703 : decl = OMP_CLAUSE_DECL (c);
15775 36703 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15776 36703 : remove = !(n->value & GOVD_SEEN);
15777 36703 : if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
15778 48 : && code == OMP_PARALLEL
15779 36725 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
15780 : remove = true;
15781 36681 : if (! remove)
15782 : {
15783 32079 : bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
15784 32079 : if ((n->value & GOVD_DEBUG_PRIVATE)
15785 32079 : || lang_hooks.decls.omp_private_debug_clause (decl, shared))
15786 : {
15787 87 : gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
15788 : || ((n->value & GOVD_DATA_SHARE_CLASS)
15789 : == GOVD_SHARED));
15790 87 : OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
15791 87 : OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
15792 : }
15793 32079 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15794 4141 : && ctx->has_depend
15795 32637 : && DECL_P (decl))
15796 558 : n->value |= GOVD_WRITTEN;
15797 32079 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15798 4141 : && (n->value & GOVD_WRITTEN) == 0
15799 3030 : && DECL_P (decl)
15800 35109 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15801 526 : OMP_CLAUSE_SHARED_READONLY (c) = 1;
15802 31553 : else if (DECL_P (decl)
15803 31553 : && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15804 3615 : && (n->value & GOVD_WRITTEN) != 0)
15805 30442 : || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15806 9032 : && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
15807 38585 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15808 5728 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15809 : }
15810 : else
15811 4624 : n->value &= ~GOVD_EXPLICIT;
15812 : break;
15813 :
15814 13004 : case OMP_CLAUSE_LASTPRIVATE:
15815 : /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
15816 : accurately reflect the presence of a FIRSTPRIVATE clause. */
15817 13004 : decl = OMP_CLAUSE_DECL (c);
15818 13004 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15819 13004 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
15820 13004 : = (n->value & GOVD_FIRSTPRIVATE) != 0;
15821 13004 : if (code == OMP_DISTRIBUTE
15822 13004 : && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
15823 : {
15824 4 : remove = true;
15825 4 : error_at (OMP_CLAUSE_LOCATION (c),
15826 : "same variable used in %<firstprivate%> and "
15827 : "%<lastprivate%> clauses on %<distribute%> "
15828 : "construct");
15829 : }
15830 13004 : if (!remove
15831 13000 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15832 13000 : && DECL_P (decl)
15833 13000 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15834 11407 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15835 13004 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
15836 : remove = true;
15837 : break;
15838 :
15839 824 : case OMP_CLAUSE_ALIGNED:
15840 824 : decl = OMP_CLAUSE_DECL (c);
15841 824 : if (!is_global_var (decl))
15842 : {
15843 738 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15844 738 : remove = n == NULL || !(n->value & GOVD_SEEN);
15845 88 : if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
15846 : {
15847 88 : struct gimplify_omp_ctx *octx;
15848 88 : if (n != NULL
15849 88 : && (n->value & (GOVD_DATA_SHARE_CLASS
15850 : & ~GOVD_FIRSTPRIVATE)))
15851 : remove = true;
15852 : else
15853 100 : for (octx = ctx->outer_context; octx;
15854 12 : octx = octx->outer_context)
15855 : {
15856 24 : n = splay_tree_lookup (octx->variables,
15857 : (splay_tree_key) decl);
15858 24 : if (n == NULL)
15859 12 : continue;
15860 12 : if (n->value & GOVD_LOCAL)
15861 : break;
15862 : /* We have to avoid assigning a shared variable
15863 : to itself when trying to add
15864 : __builtin_assume_aligned. */
15865 12 : if (n->value & GOVD_SHARED)
15866 : {
15867 : remove = true;
15868 : break;
15869 : }
15870 : }
15871 : }
15872 : }
15873 86 : else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
15874 : {
15875 86 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15876 86 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
15877 : remove = true;
15878 : }
15879 : break;
15880 :
15881 552 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
15882 552 : decl = OMP_CLAUSE_DECL (c);
15883 552 : while (INDIRECT_REF_P (decl)
15884 601 : || TREE_CODE (decl) == ARRAY_REF)
15885 49 : decl = TREE_OPERAND (decl, 0);
15886 552 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15887 552 : remove = n == NULL || !(n->value & GOVD_SEEN);
15888 : break;
15889 :
15890 900 : case OMP_CLAUSE_IS_DEVICE_PTR:
15891 900 : case OMP_CLAUSE_NONTEMPORAL:
15892 900 : decl = OMP_CLAUSE_DECL (c);
15893 900 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15894 900 : remove = n == NULL || !(n->value & GOVD_SEEN);
15895 : break;
15896 :
15897 55413 : case OMP_CLAUSE_MAP:
15898 55413 : if (OMP_CLAUSE_MAP_GIMPLE_ONLY (c))
15899 : {
15900 90 : remove = true;
15901 90 : goto end_adjust_omp_map_clause;
15902 : }
15903 55323 : decl = OMP_CLAUSE_DECL (c);
15904 55323 : if (!grp_end)
15905 : {
15906 30941 : grp_start_p = list_p;
15907 30941 : grp_end = *omp_group_last (grp_start_p);
15908 : }
15909 55323 : switch (OMP_CLAUSE_MAP_KIND (c))
15910 : {
15911 125 : case GOMP_MAP_PRESENT_ALLOC:
15912 125 : case GOMP_MAP_PRESENT_TO:
15913 125 : case GOMP_MAP_PRESENT_FROM:
15914 125 : case GOMP_MAP_PRESENT_TOFROM:
15915 125 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
15916 125 : break;
15917 : default:
15918 : break;
15919 : }
15920 55323 : switch (code)
15921 : {
15922 4563 : case OACC_DATA:
15923 4563 : if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
15924 : break;
15925 : /* Fallthrough. */
15926 18695 : case OACC_HOST_DATA:
15927 18695 : case OACC_ENTER_DATA:
15928 18695 : case OACC_EXIT_DATA:
15929 18695 : case OMP_TARGET_DATA:
15930 18695 : case OMP_TARGET_ENTER_DATA:
15931 18695 : case OMP_TARGET_EXIT_DATA:
15932 18695 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15933 18695 : || (OMP_CLAUSE_MAP_KIND (c)
15934 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
15935 : /* For target {,enter ,exit }data only the array slice is
15936 : mapped, but not the pointer to it. */
15937 : remove = true;
15938 18695 : if (code == OMP_TARGET_EXIT_DATA
15939 18695 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
15940 2578 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER))
15941 : remove = true;
15942 : break;
15943 : case OMP_TARGET:
15944 : break;
15945 : default:
15946 : break;
15947 : }
15948 18693 : if (remove)
15949 : break;
15950 54988 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
15951 : {
15952 : /* Sanity check: attach/detach map kinds use the size as a bias,
15953 : and it's never right to use the decl size for such
15954 : mappings. */
15955 16962 : gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
15956 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
15957 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH
15958 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
15959 : && (OMP_CLAUSE_MAP_KIND (c)
15960 : != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
15961 20736 : OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
15962 3774 : : TYPE_SIZE_UNIT (TREE_TYPE (decl));
15963 : }
15964 54988 : gimplify_omp_ctxp = ctx->outer_context;
15965 54988 : gimple_seq *seq_p;
15966 54988 : seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
15967 54988 : if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
15968 : is_gimple_val, fb_rvalue) == GS_ERROR)
15969 : {
15970 0 : gimplify_omp_ctxp = ctx;
15971 0 : remove = true;
15972 0 : goto end_adjust_omp_map_clause;
15973 : }
15974 54988 : else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15975 52159 : || (OMP_CLAUSE_MAP_KIND (c)
15976 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
15977 51750 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
15978 61488 : && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
15979 : {
15980 848 : OMP_CLAUSE_SIZE (c)
15981 848 : = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), seq_p, NULL,
15982 : false);
15983 848 : if ((ctx->region_type & ORT_TARGET) != 0)
15984 643 : omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
15985 : GOVD_FIRSTPRIVATE | GOVD_SEEN);
15986 : }
15987 54988 : gimplify_omp_ctxp = ctx;
15988 : /* Data clauses associated with reductions must be
15989 : compatible with present_or_copy. Warn and adjust the clause
15990 : if that is not the case. */
15991 54988 : if (ctx->region_type == ORT_ACC_PARALLEL
15992 46009 : || ctx->region_type == ORT_ACC_SERIAL)
15993 : {
15994 9535 : tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
15995 9535 : n = NULL;
15996 :
15997 9535 : if (DECL_P (t))
15998 7770 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
15999 :
16000 7770 : if (n && (n->value & GOVD_REDUCTION))
16001 : {
16002 809 : enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
16003 :
16004 809 : OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
16005 809 : if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
16006 159 : && kind != GOMP_MAP_FORCE_PRESENT
16007 159 : && kind != GOMP_MAP_POINTER)
16008 : {
16009 120 : warning_at (OMP_CLAUSE_LOCATION (c), 0,
16010 : "incompatible data clause with reduction "
16011 : "on %qE; promoting to %<present_or_copy%>",
16012 120 : DECL_NAME (t));
16013 120 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
16014 : }
16015 : }
16016 : }
16017 54988 : if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
16018 52294 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
16019 55262 : && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
16020 : {
16021 514 : remove = true;
16022 514 : goto end_adjust_omp_map_clause;
16023 : }
16024 : /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
16025 : a variable captured in a lambda closure), look through that now
16026 : before the DECL_P check below. (A code other than COMPONENT_REF,
16027 : i.e. INDIRECT_REF, will be a VLA/variable-length array
16028 : section. A global var may be a variable in a common block. We
16029 : don't want to do this here for either of those.) */
16030 54474 : if ((ctx->region_type & ORT_ACC) == 0
16031 28281 : && DECL_P (decl)
16032 11913 : && !is_global_var (decl)
16033 10546 : && DECL_HAS_VALUE_EXPR_P (decl)
16034 54664 : && TREE_CODE (DECL_VALUE_EXPR (decl)) == COMPONENT_REF)
16035 0 : decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
16036 54474 : if (TREE_CODE (decl) == TARGET_EXPR)
16037 : {
16038 0 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
16039 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
16040 25463 : remove = true;
16041 : }
16042 54474 : else if (!DECL_P (decl))
16043 : {
16044 29011 : if ((ctx->region_type & ORT_TARGET) != 0
16045 29011 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
16046 : {
16047 24 : if (INDIRECT_REF_P (decl)
16048 0 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
16049 24 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
16050 : == REFERENCE_TYPE))
16051 0 : decl = TREE_OPERAND (decl, 0);
16052 24 : if (TREE_CODE (decl) == COMPONENT_REF)
16053 : {
16054 0 : while (TREE_CODE (decl) == COMPONENT_REF)
16055 0 : decl = TREE_OPERAND (decl, 0);
16056 0 : if (DECL_P (decl))
16057 : {
16058 0 : n = splay_tree_lookup (ctx->variables,
16059 : (splay_tree_key) decl);
16060 0 : if (!(n->value & GOVD_SEEN))
16061 29011 : remove = true;
16062 : }
16063 : }
16064 : }
16065 :
16066 29011 : tree d = decl, *pd;
16067 29011 : if (TREE_CODE (d) == ARRAY_REF)
16068 : {
16069 5642 : while (TREE_CODE (d) == ARRAY_REF)
16070 2881 : d = TREE_OPERAND (d, 0);
16071 2761 : if (TREE_CODE (d) == COMPONENT_REF
16072 2761 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
16073 : decl = d;
16074 : }
16075 29011 : pd = &OMP_CLAUSE_DECL (c);
16076 29011 : if (d == decl
16077 26607 : && TREE_CODE (decl) == INDIRECT_REF
16078 15286 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
16079 1470 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
16080 : == REFERENCE_TYPE)
16081 29811 : && (OMP_CLAUSE_MAP_KIND (c)
16082 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
16083 : {
16084 796 : pd = &TREE_OPERAND (decl, 0);
16085 796 : decl = TREE_OPERAND (decl, 0);
16086 : }
16087 :
16088 29011 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16089 3033 : switch (code)
16090 : {
16091 427 : case OACC_ENTER_DATA:
16092 427 : case OACC_EXIT_DATA:
16093 427 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
16094 : == ARRAY_TYPE)
16095 : remove = true;
16096 417 : else if (code == OACC_ENTER_DATA)
16097 263 : goto change_to_attach;
16098 : /* Fallthrough. */
16099 605 : case OMP_TARGET_EXIT_DATA:
16100 605 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DETACH);
16101 605 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c) = 0;
16102 605 : break;
16103 71 : case OACC_UPDATE:
16104 : /* An "attach/detach" operation on an update directive
16105 : should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
16106 : both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
16107 : kinds depend on the previous mapping (for non-TARGET
16108 : regions). */
16109 71 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
16110 71 : break;
16111 2357 : default:
16112 2357 : change_to_attach:
16113 2357 : gcc_assert (!OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c));
16114 2357 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH);
16115 2357 : if ((ctx->region_type & ORT_TARGET) != 0)
16116 1777 : move_attach = true;
16117 : }
16118 25978 : else if ((ctx->region_type & ORT_TARGET) != 0
16119 25978 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
16120 11561 : || (OMP_CLAUSE_MAP_KIND (c)
16121 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
16122 : move_attach = true;
16123 :
16124 : /* If we have e.g. map(struct: *var), don't gimplify the
16125 : argument since omp-low.cc wants to see the decl itself. */
16126 29011 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
16127 1135 : goto end_adjust_omp_map_clause;
16128 :
16129 : /* We've already partly gimplified this in
16130 : gimplify_scan_omp_clauses. Don't do any more. */
16131 27876 : if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
16132 88 : goto end_adjust_omp_map_clause;
16133 :
16134 27788 : gimplify_omp_ctxp = ctx->outer_context;
16135 27788 : if (gimplify_expr (pd, seq_p, NULL, is_gimple_lvalue,
16136 : fb_lvalue) == GS_ERROR)
16137 0 : remove = true;
16138 27788 : gimplify_omp_ctxp = ctx;
16139 27788 : goto end_adjust_omp_map_clause;
16140 : }
16141 :
16142 25463 : if ((code == OMP_TARGET
16143 : || code == OMP_TARGET_DATA
16144 : || code == OMP_TARGET_ENTER_DATA
16145 14349 : || code == OMP_TARGET_EXIT_DATA)
16146 26000 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16147 : {
16148 229 : bool firstprivatize = false;
16149 :
16150 234 : for (struct gimplify_omp_ctx *octx = ctx->outer_context; octx;
16151 5 : octx = octx->outer_context)
16152 : {
16153 6 : splay_tree_node n
16154 12 : = splay_tree_lookup (octx->variables,
16155 6 : (splay_tree_key) OMP_CLAUSE_DECL (c));
16156 : /* If this is contained in an outer OpenMP region as a
16157 : firstprivate value, remove the attach/detach. */
16158 6 : if (n && (n->value & GOVD_FIRSTPRIVATE))
16159 : {
16160 : firstprivatize = true;
16161 : break;
16162 : }
16163 : }
16164 :
16165 229 : enum gomp_map_kind map_kind;
16166 229 : if (firstprivatize)
16167 : map_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
16168 228 : else if (code == OMP_TARGET_EXIT_DATA)
16169 : map_kind = GOMP_MAP_DETACH;
16170 : else
16171 180 : map_kind = GOMP_MAP_ATTACH;
16172 229 : OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
16173 : }
16174 25234 : else if ((ctx->region_type & ORT_ACC) != 0
16175 25234 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16176 : {
16177 0 : enum gomp_map_kind map_kind = (code == OACC_EXIT_DATA
16178 0 : ? GOMP_MAP_DETACH
16179 : : GOMP_MAP_ATTACH);
16180 0 : OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
16181 : }
16182 :
16183 25463 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16184 25463 : if ((ctx->region_type & ORT_TARGET) != 0
16185 15793 : && !(n->value & GOVD_SEEN)
16186 1479 : && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
16187 26695 : && (!is_global_var (decl)
16188 75 : || !lookup_attribute ("omp declare target link",
16189 75 : DECL_ATTRIBUTES (decl))))
16190 : {
16191 1220 : remove = true;
16192 : /* For struct element mapping, if struct is never referenced
16193 : in target block and none of the mapping has always modifier,
16194 : remove all the struct element mappings, which immediately
16195 : follow the GOMP_MAP_STRUCT map clause. */
16196 1220 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
16197 1220 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
16198 : {
16199 28 : HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
16200 76 : while (cnt--)
16201 48 : OMP_CLAUSE_CHAIN (c)
16202 48 : = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
16203 : }
16204 : }
16205 24243 : else if (DECL_SIZE (decl)
16206 24219 : && !poly_int_tree_p (DECL_SIZE (decl))
16207 199 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
16208 199 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
16209 24294 : && (OMP_CLAUSE_MAP_KIND (c)
16210 : != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
16211 : {
16212 : /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
16213 : for these, TREE_CODE (DECL_SIZE (decl)) will always be
16214 : INTEGER_CST. */
16215 51 : gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
16216 :
16217 51 : tree decl2 = DECL_VALUE_EXPR (decl);
16218 51 : gcc_assert (INDIRECT_REF_P (decl2));
16219 51 : decl2 = TREE_OPERAND (decl2, 0);
16220 51 : gcc_assert (DECL_P (decl2));
16221 51 : tree mem = build_simple_mem_ref (decl2);
16222 51 : OMP_CLAUSE_DECL (c) = mem;
16223 51 : OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
16224 51 : if (ctx->outer_context)
16225 : {
16226 15 : omp_notice_variable (ctx->outer_context, decl2, true);
16227 30 : omp_notice_variable (ctx->outer_context,
16228 15 : OMP_CLAUSE_SIZE (c), true);
16229 : }
16230 51 : if (((ctx->region_type & ORT_TARGET) != 0
16231 24 : || !ctx->target_firstprivatize_array_bases)
16232 34 : && ((n->value & GOVD_SEEN) == 0
16233 30 : || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
16234 : {
16235 34 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16236 : OMP_CLAUSE_MAP);
16237 34 : OMP_CLAUSE_DECL (nc) = decl;
16238 34 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
16239 34 : if (ctx->target_firstprivatize_array_bases)
16240 27 : OMP_CLAUSE_SET_MAP_KIND (nc,
16241 : GOMP_MAP_FIRSTPRIVATE_POINTER);
16242 : else
16243 7 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
16244 34 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
16245 34 : OMP_CLAUSE_CHAIN (c) = nc;
16246 34 : c = nc;
16247 : }
16248 : }
16249 : else
16250 : {
16251 24192 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
16252 0 : OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
16253 24192 : gcc_assert ((n->value & GOVD_SEEN) == 0
16254 : || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
16255 : == 0));
16256 : }
16257 :
16258 : /* If we have a target region, we can push all the attaches to the
16259 : end of the list (we may have standalone "attach" operations
16260 : synthesized for GOMP_MAP_STRUCT nodes that must be processed after
16261 : the attachment point AND the pointed-to block have been mapped).
16262 : If we have something else, e.g. "enter data", we need to keep
16263 : "attach" nodes together with the previous node they attach to so
16264 : that separate "exit data" operations work properly (see
16265 : libgomp/target.c). */
16266 25463 : if ((ctx->region_type & ORT_TARGET) != 0
16267 25463 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
16268 15677 : || (OMP_CLAUSE_MAP_KIND (c)
16269 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
16270 : move_attach = true;
16271 :
16272 55078 : end_adjust_omp_map_clause:
16273 55078 : exit_omp_iterator_loop_context (c);
16274 55078 : break;
16275 :
16276 8207 : case OMP_CLAUSE_TO:
16277 8207 : case OMP_CLAUSE_FROM:
16278 8207 : case OMP_CLAUSE__CACHE_:
16279 8207 : decl = OMP_CLAUSE_DECL (c);
16280 8207 : if (!DECL_P (decl))
16281 : break;
16282 6188 : if (DECL_SIZE (decl)
16283 6188 : && !poly_int_tree_p (DECL_SIZE (decl)))
16284 : {
16285 4 : tree decl2 = DECL_VALUE_EXPR (decl);
16286 4 : gcc_assert (INDIRECT_REF_P (decl2));
16287 4 : decl2 = TREE_OPERAND (decl2, 0);
16288 4 : gcc_assert (DECL_P (decl2));
16289 4 : tree mem = build_simple_mem_ref (decl2);
16290 4 : OMP_CLAUSE_DECL (c) = mem;
16291 4 : OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
16292 4 : if (ctx->outer_context)
16293 : {
16294 4 : omp_notice_variable (ctx->outer_context, decl2, true);
16295 4 : omp_notice_variable (ctx->outer_context,
16296 4 : OMP_CLAUSE_SIZE (c), true);
16297 : }
16298 : }
16299 6184 : else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
16300 0 : OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
16301 : break;
16302 :
16303 15212 : case OMP_CLAUSE_REDUCTION:
16304 15212 : if (OMP_CLAUSE_REDUCTION_INSCAN (c))
16305 : {
16306 811 : decl = OMP_CLAUSE_DECL (c);
16307 811 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16308 811 : if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
16309 : {
16310 69 : remove = true;
16311 69 : error_at (OMP_CLAUSE_LOCATION (c),
16312 : "%qD specified in %<inscan%> %<reduction%> clause "
16313 : "but not in %<scan%> directive clause", decl);
16314 69 : break;
16315 : }
16316 : has_inscan_reductions = true;
16317 : }
16318 : /* FALLTHRU */
16319 17747 : case OMP_CLAUSE_IN_REDUCTION:
16320 17747 : case OMP_CLAUSE_TASK_REDUCTION:
16321 17747 : decl = OMP_CLAUSE_DECL (c);
16322 : /* OpenACC reductions need a present_or_copy data clause.
16323 : Add one if necessary. Emit error when the reduction is private. */
16324 17747 : if (ctx->region_type == ORT_ACC_PARALLEL
16325 17215 : || ctx->region_type == ORT_ACC_SERIAL)
16326 : {
16327 803 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16328 803 : if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
16329 : {
16330 13 : remove = true;
16331 13 : error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
16332 13 : "reduction on %qE", DECL_NAME (decl));
16333 : }
16334 790 : else if ((n->value & GOVD_MAP) == 0)
16335 : {
16336 524 : tree next = OMP_CLAUSE_CHAIN (c);
16337 524 : tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
16338 524 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
16339 524 : OMP_CLAUSE_DECL (nc) = decl;
16340 524 : OMP_CLAUSE_CHAIN (c) = nc;
16341 524 : lang_hooks.decls.omp_finish_clause (nc, pre_p,
16342 524 : (ctx->region_type
16343 : & ORT_ACC) != 0);
16344 550 : while (1)
16345 : {
16346 537 : OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
16347 537 : if (OMP_CLAUSE_CHAIN (nc) == NULL)
16348 : break;
16349 13 : nc = OMP_CLAUSE_CHAIN (nc);
16350 : }
16351 524 : OMP_CLAUSE_CHAIN (nc) = next;
16352 524 : n->value |= GOVD_MAP;
16353 : }
16354 : }
16355 17747 : if (DECL_P (decl)
16356 17747 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
16357 7963 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
16358 : break;
16359 :
16360 3541 : case OMP_CLAUSE_ALLOCATE:
16361 3541 : decl = OMP_CLAUSE_DECL (c);
16362 3541 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16363 3541 : if (n != NULL && !(n->value & GOVD_SEEN))
16364 : {
16365 1176 : if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
16366 : != 0
16367 1176 : && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
16368 : remove = true;
16369 : }
16370 : if (!remove
16371 2365 : && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
16372 1273 : && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
16373 483 : && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
16374 347 : || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
16375 287 : || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
16376 : {
16377 214 : tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
16378 214 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
16379 214 : if (n == NULL)
16380 : {
16381 92 : enum omp_clause_default_kind default_kind
16382 : = ctx->default_kind;
16383 92 : ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
16384 92 : omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
16385 : true);
16386 92 : ctx->default_kind = default_kind;
16387 : }
16388 : else
16389 122 : omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
16390 : true);
16391 : }
16392 : break;
16393 :
16394 : case OMP_CLAUSE_COPYIN:
16395 : case OMP_CLAUSE_COPYPRIVATE:
16396 : case OMP_CLAUSE_IF:
16397 : case OMP_CLAUSE_SELF:
16398 : case OMP_CLAUSE_NUM_THREADS:
16399 : case OMP_CLAUSE_NUM_TEAMS:
16400 : case OMP_CLAUSE_THREAD_LIMIT:
16401 : case OMP_CLAUSE_DIST_SCHEDULE:
16402 : case OMP_CLAUSE_DEVICE:
16403 : case OMP_CLAUSE_SCHEDULE:
16404 : case OMP_CLAUSE_NOWAIT:
16405 : case OMP_CLAUSE_ORDERED:
16406 : case OMP_CLAUSE_DEFAULT:
16407 : case OMP_CLAUSE_UNTIED:
16408 : case OMP_CLAUSE_COLLAPSE:
16409 : case OMP_CLAUSE_FINAL:
16410 : case OMP_CLAUSE_MERGEABLE:
16411 : case OMP_CLAUSE_PROC_BIND:
16412 : case OMP_CLAUSE_SAFELEN:
16413 : case OMP_CLAUSE_SIMDLEN:
16414 : case OMP_CLAUSE_DEPEND:
16415 : case OMP_CLAUSE_DOACROSS:
16416 : case OMP_CLAUSE_PRIORITY:
16417 : case OMP_CLAUSE_GRAINSIZE:
16418 : case OMP_CLAUSE_NUM_TASKS:
16419 : case OMP_CLAUSE_NOGROUP:
16420 : case OMP_CLAUSE_THREADS:
16421 : case OMP_CLAUSE_SIMD:
16422 : case OMP_CLAUSE_FILTER:
16423 : case OMP_CLAUSE_HINT:
16424 : case OMP_CLAUSE_DEFAULTMAP:
16425 : case OMP_CLAUSE_ORDER:
16426 : case OMP_CLAUSE_BIND:
16427 : case OMP_CLAUSE_DETACH:
16428 : case OMP_CLAUSE_USE_DEVICE_PTR:
16429 : case OMP_CLAUSE_USE_DEVICE_ADDR:
16430 : case OMP_CLAUSE_ASYNC:
16431 : case OMP_CLAUSE_WAIT:
16432 : case OMP_CLAUSE_INDEPENDENT:
16433 : case OMP_CLAUSE_NUM_GANGS:
16434 : case OMP_CLAUSE_NUM_WORKERS:
16435 : case OMP_CLAUSE_VECTOR_LENGTH:
16436 : case OMP_CLAUSE_GANG:
16437 : case OMP_CLAUSE_WORKER:
16438 : case OMP_CLAUSE_VECTOR:
16439 : case OMP_CLAUSE_AUTO:
16440 : case OMP_CLAUSE_SEQ:
16441 : case OMP_CLAUSE_TILE:
16442 : case OMP_CLAUSE_IF_PRESENT:
16443 : case OMP_CLAUSE_FINALIZE:
16444 : case OMP_CLAUSE_INCLUSIVE:
16445 : case OMP_CLAUSE_EXCLUSIVE:
16446 : case OMP_CLAUSE_USES_ALLOCATORS:
16447 : case OMP_CLAUSE_DEVICE_TYPE:
16448 : break;
16449 :
16450 0 : case OMP_CLAUSE_NOHOST:
16451 0 : default:
16452 0 : gcc_unreachable ();
16453 : }
16454 :
16455 128213 : if (remove)
16456 9883 : *list_p = OMP_CLAUSE_CHAIN (c);
16457 236641 : else if (move_attach)
16458 : {
16459 : /* Remove attach node from here, separate out into its own list. */
16460 2334 : *attach_tail = c;
16461 2334 : *list_p = OMP_CLAUSE_CHAIN (c);
16462 2334 : OMP_CLAUSE_CHAIN (c) = NULL_TREE;
16463 2334 : attach_tail = &OMP_CLAUSE_CHAIN (c);
16464 : }
16465 : else
16466 234307 : list_p = &OMP_CLAUSE_CHAIN (c);
16467 : }
16468 :
16469 : /* Splice attach nodes at the end of the list. */
16470 128406 : if (attach_list)
16471 : {
16472 1108 : *list_p = attach_list;
16473 1108 : list_p = attach_tail;
16474 : }
16475 :
16476 : /* Add in any implicit data sharing. */
16477 128406 : struct gimplify_adjust_omp_clauses_data data;
16478 128406 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
16479 : {
16480 : /* OpenMP. Implicit clauses are added at the start of the clause list,
16481 : but after any non-map clauses. */
16482 : tree *implicit_add_list_p = orig_list_p;
16483 250738 : while (*implicit_add_list_p
16484 250738 : && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
16485 152171 : implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
16486 98567 : data.list_p = implicit_add_list_p;
16487 : }
16488 : else
16489 : /* OpenACC. */
16490 29839 : data.list_p = list_p;
16491 128406 : data.pre_p = pre_p;
16492 128406 : splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
16493 :
16494 128406 : if (has_inscan_reductions)
16495 2431 : for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
16496 1822 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
16497 1822 : && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
16498 : {
16499 5 : error_at (OMP_CLAUSE_LOCATION (c),
16500 : "%<inscan%> %<reduction%> clause used together with "
16501 : "%<linear%> clause for a variable other than loop "
16502 : "iterator");
16503 5 : break;
16504 : }
16505 :
16506 128406 : gimplify_omp_ctxp = ctx->outer_context;
16507 128406 : delete_omp_context (ctx);
16508 128406 : }
16509 :
16510 : /* Try to evaluate a novariants clause. Return 1 if true, 0 if false or absent,
16511 : * -1 if run-time evaluation is needed. */
16512 :
16513 : int
16514 248357 : omp_has_novariants (void)
16515 : {
16516 248357 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16517 248357 : if (ctx != NULL && ctx->code == OMP_DISPATCH && !ctx->in_call_args)
16518 : {
16519 3290 : tree c = omp_find_clause (ctx->clauses, OMP_CLAUSE_NOVARIANTS);
16520 3290 : if (c != NULL_TREE)
16521 : {
16522 299 : if (integer_nonzerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
16523 : return 1;
16524 171 : else if (integer_zerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
16525 : return 0;
16526 : else
16527 : return -1;
16528 : }
16529 : return 0;
16530 : }
16531 : return 0;
16532 : }
16533 :
16534 : /* Try to evaluate a nocontext clause. Return 1 if true, 0 if false or absent,
16535 : * -1 if run-time evaluation is needed. */
16536 :
16537 : static int
16538 1903 : omp_has_nocontext (void)
16539 : {
16540 1903 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16541 1903 : if (ctx != NULL && ctx->code == OMP_DISPATCH)
16542 : {
16543 1903 : tree c = omp_find_clause (ctx->clauses, OMP_CLAUSE_NOCONTEXT);
16544 1903 : if (c != NULL_TREE)
16545 : {
16546 256 : if (integer_nonzerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
16547 : return 1;
16548 94 : else if (integer_zerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
16549 : return 0;
16550 : else
16551 : return -1;
16552 : }
16553 : return 0;
16554 : }
16555 : return 0;
16556 : }
16557 :
16558 : /* Collect a list of traits for enclosing constructs in the current
16559 : OpenMP context. The list is in the same format as the trait selector
16560 : list of construct trait sets built by the front ends.
16561 :
16562 : Per the OpenMP specification, the construct trait set includes constructs
16563 : up to an enclosing "target" construct. If there is no "target" construct,
16564 : then additional things may be added to the construct trait set (simd for
16565 : simd clones, additional constructs associated with "declare variant",
16566 : the target trait for "declare target"); those are not handled here.
16567 : In particular simd clones are not known during gimplification so
16568 : matching/scoring of context selectors that might involve them needs
16569 : to be deferred to the omp_device_lower pass. */
16570 :
16571 : tree
16572 2209 : omp_get_construct_context (void)
16573 : {
16574 2209 : tree result = NULL_TREE;
16575 4180 : for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
16576 : {
16577 2092 : if (((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
16578 : == ORT_TARGET)
16579 121 : && ctx->code == OMP_TARGET)
16580 : {
16581 121 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_TARGET,
16582 : NULL_TREE, NULL_TREE, result);
16583 : /* We're not interested in any outer constructs. */
16584 121 : break;
16585 : }
16586 1971 : else if ((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
16587 214 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_PARALLEL,
16588 : NULL_TREE, NULL_TREE, result);
16589 1757 : else if ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
16590 75 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_TEAMS,
16591 : NULL_TREE, NULL_TREE, result);
16592 1682 : else if (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
16593 164 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_FOR,
16594 : NULL_TREE, NULL_TREE, result);
16595 1518 : else if (ctx->code == OMP_DISPATCH && omp_has_nocontext () != 1)
16596 1279 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_DISPATCH,
16597 : NULL_TREE, NULL_TREE, result);
16598 239 : else if (ctx->region_type == ORT_SIMD
16599 32 : && ctx->code == OMP_SIMD
16600 271 : && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND))
16601 : {
16602 32 : tree props = NULL_TREE;
16603 32 : tree *last = &props;
16604 72 : for (tree c = ctx->clauses; c; c = OMP_CLAUSE_CHAIN (c))
16605 40 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMDLEN
16606 32 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INBRANCH
16607 72 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOTINBRANCH)
16608 : {
16609 8 : *last = unshare_expr (c);
16610 8 : last = &(OMP_CLAUSE_CHAIN (c));
16611 : }
16612 32 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_SIMD,
16613 : NULL_TREE, props, result);
16614 : }
16615 207 : else if (ctx->region_type == ORT_WORKSHARE
16616 5 : && ctx->code == OMP_LOOP
16617 0 : && ctx->outer_context
16618 0 : && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
16619 0 : && ctx->outer_context->outer_context
16620 0 : && ctx->outer_context->outer_context->code == OMP_LOOP
16621 0 : && ctx->outer_context->outer_context->distribute)
16622 1971 : ctx = ctx->outer_context->outer_context;
16623 1971 : ctx = ctx->outer_context;
16624 : }
16625 :
16626 2209 : return result;
16627 : }
16628 :
16629 : /* Gimplify OACC_CACHE. */
16630 :
16631 : static void
16632 665 : gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
16633 : {
16634 665 : tree expr = *expr_p;
16635 :
16636 665 : gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
16637 : OACC_CACHE);
16638 665 : gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
16639 : OACC_CACHE);
16640 :
16641 : /* TODO: Do something sensible with this information. */
16642 :
16643 665 : *expr_p = NULL_TREE;
16644 665 : }
16645 :
16646 : /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
16647 : if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
16648 : kind. The entry kind will replace the one in CLAUSE, while the exit
16649 : kind will be used in a new omp_clause and returned to the caller. */
16650 :
16651 : static tree
16652 190 : gimplify_oacc_declare_1 (tree clause)
16653 : {
16654 190 : HOST_WIDE_INT kind, new_op;
16655 190 : bool ret = false;
16656 190 : tree c = NULL;
16657 :
16658 190 : kind = OMP_CLAUSE_MAP_KIND (clause);
16659 :
16660 190 : switch (kind)
16661 : {
16662 : case GOMP_MAP_ALLOC:
16663 : new_op = GOMP_MAP_RELEASE;
16664 : ret = true;
16665 : break;
16666 :
16667 29 : case GOMP_MAP_FROM:
16668 29 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
16669 29 : new_op = GOMP_MAP_FROM;
16670 29 : ret = true;
16671 29 : break;
16672 :
16673 40 : case GOMP_MAP_TOFROM:
16674 40 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
16675 40 : new_op = GOMP_MAP_FROM;
16676 40 : ret = true;
16677 40 : break;
16678 :
16679 : case GOMP_MAP_DEVICE_RESIDENT:
16680 : case GOMP_MAP_FORCE_DEVICEPTR:
16681 : case GOMP_MAP_FORCE_PRESENT:
16682 : case GOMP_MAP_LINK:
16683 : case GOMP_MAP_POINTER:
16684 : case GOMP_MAP_TO:
16685 : break;
16686 :
16687 0 : default:
16688 0 : gcc_unreachable ();
16689 69 : break;
16690 : }
16691 :
16692 69 : if (ret)
16693 : {
16694 116 : c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
16695 116 : OMP_CLAUSE_SET_MAP_KIND (c, new_op);
16696 116 : OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
16697 : }
16698 :
16699 190 : return c;
16700 : }
16701 :
16702 : /* Gimplify OACC_DECLARE. */
16703 :
16704 : static void
16705 254 : gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
16706 : {
16707 254 : tree expr = *expr_p;
16708 254 : gomp_target *stmt;
16709 254 : tree clauses, t, decl;
16710 :
16711 254 : clauses = OACC_DECLARE_CLAUSES (expr);
16712 :
16713 254 : gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
16714 254 : gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
16715 :
16716 516 : for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
16717 : {
16718 262 : decl = OMP_CLAUSE_DECL (t);
16719 :
16720 262 : if (TREE_CODE (decl) == MEM_REF)
16721 8 : decl = TREE_OPERAND (decl, 0);
16722 :
16723 262 : if (VAR_P (decl) && !is_oacc_declared (decl))
16724 : {
16725 262 : tree attr = get_identifier ("oacc declare target");
16726 262 : DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
16727 262 : DECL_ATTRIBUTES (decl));
16728 : }
16729 :
16730 262 : if (VAR_P (decl)
16731 262 : && !is_global_var (decl)
16732 452 : && DECL_CONTEXT (decl) == current_function_decl)
16733 : {
16734 190 : tree c = gimplify_oacc_declare_1 (t);
16735 190 : if (c)
16736 : {
16737 116 : if (oacc_declare_returns == NULL)
16738 40 : oacc_declare_returns = new hash_map<tree, tree>;
16739 :
16740 116 : oacc_declare_returns->put (decl, c);
16741 : }
16742 : }
16743 :
16744 262 : if (gimplify_omp_ctxp)
16745 72 : omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
16746 : }
16747 :
16748 254 : stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
16749 : clauses);
16750 :
16751 254 : gimplify_seq_add_stmt (pre_p, stmt);
16752 :
16753 254 : *expr_p = NULL_TREE;
16754 254 : }
16755 :
16756 : /* Gimplify the contents of an OMP_PARALLEL statement. This involves
16757 : gimplification of the body, as well as scanning the body for used
16758 : variables. We need to do this scan now, because variable-sized
16759 : decls will be decomposed during gimplification. */
16760 :
16761 : static void
16762 18214 : gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
16763 : {
16764 18214 : tree expr = *expr_p;
16765 18214 : gimple *g;
16766 18214 : gimple_seq body = NULL;
16767 :
16768 36428 : gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
16769 18214 : OMP_PARALLEL_COMBINED (expr)
16770 : ? ORT_COMBINED_PARALLEL
16771 : : ORT_PARALLEL, OMP_PARALLEL);
16772 :
16773 18214 : push_gimplify_context ();
16774 :
16775 18214 : g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
16776 18214 : if (gimple_code (g) == GIMPLE_BIND)
16777 18214 : pop_gimplify_context (g);
16778 : else
16779 0 : pop_gimplify_context (NULL);
16780 :
16781 18214 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
16782 : OMP_PARALLEL);
16783 :
16784 36428 : g = gimple_build_omp_parallel (body,
16785 18214 : OMP_PARALLEL_CLAUSES (expr),
16786 : NULL_TREE, NULL_TREE);
16787 18214 : if (OMP_PARALLEL_COMBINED (expr))
16788 12529 : gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
16789 18214 : gimplify_seq_add_stmt (pre_p, g);
16790 18214 : *expr_p = NULL_TREE;
16791 18214 : }
16792 :
16793 : /* Gimplify the contents of an OMP_TASK statement. This involves
16794 : gimplification of the body, as well as scanning the body for used
16795 : variables. We need to do this scan now, because variable-sized
16796 : decls will be decomposed during gimplification. */
16797 :
16798 : static void
16799 3848 : gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
16800 : {
16801 3848 : tree expr = *expr_p;
16802 3848 : gimple *g;
16803 3848 : gimple_seq body = NULL;
16804 3848 : bool nowait = false;
16805 3848 : bool has_depend = false;
16806 :
16807 3848 : if (OMP_TASK_BODY (expr) == NULL_TREE)
16808 : {
16809 236 : for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16810 150 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
16811 : {
16812 108 : has_depend = true;
16813 108 : if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
16814 : {
16815 9 : error_at (OMP_CLAUSE_LOCATION (c),
16816 : "%<mutexinoutset%> kind in %<depend%> clause on a "
16817 : "%<taskwait%> construct");
16818 9 : break;
16819 : }
16820 : }
16821 42 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
16822 42 : nowait = true;
16823 95 : if (nowait && !has_depend)
16824 : {
16825 5 : error_at (EXPR_LOCATION (expr),
16826 : "%<taskwait%> construct with %<nowait%> clause but no "
16827 : "%<depend%> clauses");
16828 5 : *expr_p = NULL_TREE;
16829 5 : return;
16830 : }
16831 : }
16832 :
16833 11529 : gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
16834 3843 : omp_find_clause (OMP_TASK_CLAUSES (expr),
16835 : OMP_CLAUSE_UNTIED)
16836 : ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
16837 :
16838 3843 : if (OMP_TASK_BODY (expr))
16839 : {
16840 3753 : push_gimplify_context ();
16841 :
16842 3753 : g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
16843 3753 : if (gimple_code (g) == GIMPLE_BIND)
16844 3753 : pop_gimplify_context (g);
16845 : else
16846 0 : pop_gimplify_context (NULL);
16847 : }
16848 :
16849 3843 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
16850 : OMP_TASK);
16851 :
16852 7686 : g = gimple_build_omp_task (body,
16853 3843 : OMP_TASK_CLAUSES (expr),
16854 : NULL_TREE, NULL_TREE,
16855 : NULL_TREE, NULL_TREE, NULL_TREE);
16856 3843 : if (OMP_TASK_BODY (expr) == NULL_TREE)
16857 90 : gimple_omp_task_set_taskwait_p (g, true);
16858 3843 : gimplify_seq_add_stmt (pre_p, g);
16859 3843 : *expr_p = NULL_TREE;
16860 : }
16861 :
16862 : /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
16863 : force it into a temporary initialized in PRE_P and add firstprivate clause
16864 : to ORIG_FOR_STMT. */
16865 :
16866 : static void
16867 4399 : gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
16868 : tree orig_for_stmt)
16869 : {
16870 4399 : if (*tp == NULL || is_gimple_constant (*tp))
16871 : return;
16872 :
16873 779 : if (TREE_CODE (*tp) == SAVE_EXPR)
16874 135 : gimplify_save_expr (tp, pre_p, NULL);
16875 : else
16876 644 : *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
16877 : /* Reference to pointer conversion is considered useless,
16878 : but is significant for firstprivate clause. Force it
16879 : here. */
16880 779 : if (type
16881 687 : && TREE_CODE (type) == POINTER_TYPE
16882 869 : && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
16883 : {
16884 2 : tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
16885 2 : tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
16886 2 : gimplify_and_add (m, pre_p);
16887 2 : *tp = v;
16888 : }
16889 :
16890 779 : tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
16891 779 : OMP_CLAUSE_DECL (c) = *tp;
16892 779 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
16893 779 : OMP_FOR_CLAUSES (orig_for_stmt) = c;
16894 : }
16895 :
16896 : /* Helper function of gimplify_omp_for, find OMP_ORDERED with
16897 : null OMP_ORDERED_BODY inside of OMP_FOR's body. */
16898 :
16899 : static tree
16900 16496 : find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
16901 : {
16902 16496 : switch (TREE_CODE (*tp))
16903 : {
16904 901 : case OMP_ORDERED:
16905 901 : if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
16906 : return *tp;
16907 : break;
16908 23 : case OMP_SIMD:
16909 23 : case OMP_PARALLEL:
16910 23 : case OMP_TARGET:
16911 23 : *walk_subtrees = 0;
16912 23 : break;
16913 : default:
16914 : break;
16915 : }
16916 : return NULL_TREE;
16917 : }
16918 :
16919 : /* Gimplify standalone loop transforming directive which has the
16920 : transformations applied already. So, all that is needed is gimplify
16921 : the remaining loops as normal loops. */
16922 :
16923 : static enum gimplify_status
16924 2264 : gimplify_omp_loop_xform (tree *expr_p, gimple_seq *pre_p)
16925 : {
16926 2264 : tree for_stmt = *expr_p;
16927 :
16928 2264 : if (OMP_FOR_PRE_BODY (for_stmt))
16929 759 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), pre_p);
16930 :
16931 2264 : gimple_seq pre_body = NULL, post_body = NULL;
16932 5203 : for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16933 : {
16934 2939 : if (TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i) == NULL_TREE)
16935 1707 : continue;
16936 1283 : tree iters = NULL_TREE;
16937 1283 : if (i == 0
16938 826 : && TREE_CODE (for_stmt) == OMP_UNROLL
16939 1771 : && !omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_PARTIAL))
16940 : {
16941 311 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_FULL))
16942 269 : iters = omp_loop_number_of_iterations (for_stmt, 0, NULL);
16943 : else
16944 42 : iters = build_int_cst (integer_type_node, 8);
16945 : }
16946 1283 : tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16947 1283 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16948 1283 : tree decl = TREE_OPERAND (t, 0);
16949 1283 : gcc_assert (DECL_P (decl));
16950 1283 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
16951 : || POINTER_TYPE_P (TREE_TYPE (decl)));
16952 1283 : if (DECL_ARTIFICIAL (decl)
16953 876 : && TREE_PRIVATE (t)
16954 867 : && gimplify_omp_ctxp
16955 1613 : && gimplify_omp_ctxp->region_type != ORT_NONE)
16956 : {
16957 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16958 504 : do
16959 : {
16960 504 : splay_tree_node n
16961 504 : = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16962 504 : if (n != NULL)
16963 : break;
16964 346 : else if (ctx->region_type != ORT_WORKSHARE
16965 : && ctx->region_type != ORT_TASKGROUP
16966 62 : && ctx->region_type != ORT_SIMD
16967 38 : && ctx->region_type != ORT_ACC
16968 38 : && !(ctx->region_type & ORT_TARGET_DATA))
16969 : {
16970 38 : omp_add_variable (ctx, decl, GOVD_PRIVATE);
16971 38 : break;
16972 : }
16973 308 : ctx = ctx->outer_context;
16974 : }
16975 308 : while (ctx);
16976 : }
16977 1283 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
16978 : {
16979 23 : gcc_assert (seen_error ());
16980 23 : continue;
16981 : }
16982 1260 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
16983 : fb_rvalue);
16984 1260 : gimplify_and_add (t, &pre_body);
16985 1260 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
16986 1260 : gcc_assert (TREE_OPERAND (t, 0) == decl);
16987 1260 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
16988 : {
16989 28 : gcc_assert (seen_error ());
16990 28 : continue;
16991 : }
16992 1232 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
16993 : fb_rvalue);
16994 1232 : tree l1 = create_artificial_label (UNKNOWN_LOCATION);
16995 1232 : tree l2 = create_artificial_label (UNKNOWN_LOCATION);
16996 1232 : tree l3 = create_artificial_label (UNKNOWN_LOCATION);
16997 1232 : gimplify_seq_add_stmt (&pre_body, gimple_build_goto (l2));
16998 1232 : gimplify_seq_add_stmt (&pre_body, gimple_build_label (l1));
16999 1232 : gimple_seq this_post_body = NULL;
17000 1232 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17001 1232 : if (TREE_CODE (t) == MODIFY_EXPR)
17002 : {
17003 975 : t = TREE_OPERAND (t, 1);
17004 975 : if (TREE_CODE (t) == PLUS_EXPR
17005 975 : && TREE_OPERAND (t, 1) == decl)
17006 : {
17007 0 : TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
17008 0 : TREE_OPERAND (t, 0) = decl;
17009 : }
17010 975 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
17011 : fb_rvalue);
17012 : }
17013 1232 : gimplify_and_add (TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i),
17014 : &this_post_body);
17015 1232 : gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l2));
17016 1232 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17017 1232 : gcond *cond = NULL;
17018 1232 : tree d = decl;
17019 1232 : gimplify_expr (&d, &this_post_body, NULL, is_gimple_val, fb_rvalue);
17020 1232 : if (iters && tree_fits_uhwi_p (iters))
17021 : {
17022 299 : unsigned HOST_WIDE_INT niters = tree_to_uhwi (iters);
17023 299 : if ((unsigned HOST_WIDE_INT) (int) niters == niters
17024 299 : && (int) niters > 0)
17025 : {
17026 299 : t = build2 (TREE_CODE (t), boolean_type_node, d,
17027 299 : TREE_OPERAND (t, 1));
17028 299 : t = build3 (ANNOTATE_EXPR, TREE_TYPE (t), t,
17029 : build_int_cst (integer_type_node,
17030 : annot_expr_unroll_kind),
17031 299 : build_int_cst (integer_type_node, niters));
17032 299 : gimplify_expr (&t, &this_post_body, NULL, is_gimple_val,
17033 : fb_rvalue);
17034 299 : cond = gimple_build_cond (NE_EXPR, t, boolean_false_node,
17035 : l1, l3);
17036 : }
17037 : }
17038 299 : if (cond == NULL)
17039 933 : cond = gimple_build_cond (TREE_CODE (t), d, TREE_OPERAND (t, 1),
17040 : l1, l3);
17041 1232 : gimplify_seq_add_stmt (&this_post_body, cond);
17042 1232 : gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l3));
17043 1232 : gimplify_seq_add_seq (&this_post_body, post_body);
17044 1232 : post_body = this_post_body;
17045 : }
17046 2264 : gimplify_seq_add_seq (pre_p, pre_body);
17047 2264 : gimplify_and_add (OMP_FOR_BODY (for_stmt), pre_p);
17048 2264 : gimplify_seq_add_seq (pre_p, post_body);
17049 :
17050 2264 : *expr_p = NULL_TREE;
17051 2264 : return GS_ALL_DONE;
17052 : }
17053 :
17054 : /* Gimplify the gross structure of an OMP_FOR statement. */
17055 :
17056 : static enum gimplify_status
17057 58075 : gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
17058 : {
17059 58075 : tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
17060 58075 : enum gimplify_status ret = GS_ALL_DONE;
17061 58075 : enum gimplify_status tret;
17062 58075 : gomp_for *gfor;
17063 58075 : gimple_seq for_body, for_pre_body;
17064 58075 : int i;
17065 58075 : bitmap has_decl_expr = NULL;
17066 58075 : enum omp_region_type ort = ORT_WORKSHARE;
17067 58075 : bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
17068 :
17069 58075 : orig_for_stmt = for_stmt = *expr_p;
17070 :
17071 58075 : bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
17072 58075 : != NULL_TREE);
17073 58146 : while (OMP_FOR_INIT (for_stmt) == NULL_TREE)
17074 : {
17075 17738 : tree *data[4] = { NULL, NULL, NULL, NULL };
17076 17738 : gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
17077 17738 : inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
17078 : find_combined_omp_for, data, NULL);
17079 17738 : if (inner_for_stmt == NULL_TREE)
17080 : {
17081 36 : gcc_assert (seen_error ());
17082 36 : *expr_p = NULL_TREE;
17083 3741 : return GS_ERROR;
17084 : }
17085 17702 : gcc_assert (inner_for_stmt == *data[3]);
17086 17702 : omp_maybe_apply_loop_xforms (data[3],
17087 17702 : data[2]
17088 4279 : ? OMP_FOR_CLAUSES (*data[2])
17089 13423 : : TREE_CODE (for_stmt) == OMP_FOR
17090 13423 : ? OMP_FOR_CLAUSES (for_stmt)
17091 : : NULL_TREE);
17092 17702 : if (inner_for_stmt != *data[3])
17093 71 : continue;
17094 17631 : if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
17095 : {
17096 2 : append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
17097 : &OMP_FOR_PRE_BODY (for_stmt));
17098 2 : OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
17099 : }
17100 17631 : if (OMP_FOR_PRE_BODY (inner_for_stmt))
17101 : {
17102 7193 : append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
17103 : &OMP_FOR_PRE_BODY (for_stmt));
17104 7193 : OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
17105 : }
17106 :
17107 17631 : if (data[0])
17108 : {
17109 : /* We have some statements or variable declarations in between
17110 : the composite construct directives. Move them around the
17111 : inner_for_stmt. */
17112 3705 : data[0] = expr_p;
17113 14820 : for (i = 0; i < 3; i++)
17114 11115 : if (data[i])
17115 : {
17116 7028 : tree t = *data[i];
17117 7028 : if (i < 2 && data[i + 1] == &OMP_BODY (t))
17118 1005 : data[i + 1] = data[i];
17119 7028 : *data[i] = OMP_BODY (t);
17120 7028 : tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
17121 : NULL_TREE, make_node (BLOCK));
17122 7028 : OMP_BODY (t) = body;
17123 7028 : append_to_statement_list_force (inner_for_stmt,
17124 : &BIND_EXPR_BODY (body));
17125 7028 : *data[3] = t;
17126 7028 : data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
17127 7028 : gcc_assert (*data[3] == inner_for_stmt);
17128 : }
17129 : return GS_OK;
17130 : }
17131 :
17132 36734 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
17133 22808 : if (!loop_p
17134 21927 : && OMP_FOR_ORIG_DECLS (inner_for_stmt)
17135 11098 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17136 : i)) == TREE_LIST
17137 22876 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17138 : i)))
17139 : {
17140 40 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
17141 : /* Class iterators aren't allowed on OMP_SIMD, so the only
17142 : case we need to solve is distribute parallel for. They are
17143 : allowed on the loop construct, but that is already handled
17144 : in gimplify_omp_loop. */
17145 40 : gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
17146 : && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
17147 : && data[1]);
17148 40 : tree orig_decl = TREE_PURPOSE (orig);
17149 40 : tree last = TREE_VALUE (orig);
17150 40 : tree *pc;
17151 40 : for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
17152 74 : *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
17153 42 : if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
17154 35 : || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
17155 43 : && OMP_CLAUSE_DECL (*pc) == orig_decl)
17156 : break;
17157 40 : if (*pc == NULL_TREE)
17158 : {
17159 32 : tree *spc;
17160 32 : for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
17161 98 : *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
17162 67 : if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
17163 67 : && OMP_CLAUSE_DECL (*spc) == orig_decl)
17164 : break;
17165 32 : if (*spc)
17166 : {
17167 1 : tree c = *spc;
17168 1 : *spc = OMP_CLAUSE_CHAIN (c);
17169 1 : OMP_CLAUSE_CHAIN (c) = NULL_TREE;
17170 1 : *pc = c;
17171 : }
17172 : }
17173 40 : if (*pc == NULL_TREE)
17174 : ;
17175 9 : else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
17176 : {
17177 : /* private clause will appear only on inner_for_stmt.
17178 : Change it into firstprivate, and add private clause
17179 : on for_stmt. */
17180 8 : tree c = copy_node (*pc);
17181 8 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17182 8 : OMP_FOR_CLAUSES (for_stmt) = c;
17183 8 : OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
17184 8 : lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
17185 : }
17186 : else
17187 : {
17188 : /* lastprivate clause will appear on both inner_for_stmt
17189 : and for_stmt. Add firstprivate clause to
17190 : inner_for_stmt. */
17191 1 : tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
17192 : OMP_CLAUSE_FIRSTPRIVATE);
17193 1 : OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
17194 1 : OMP_CLAUSE_CHAIN (c) = *pc;
17195 1 : *pc = c;
17196 1 : lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
17197 : }
17198 40 : tree c = build_omp_clause (UNKNOWN_LOCATION,
17199 : OMP_CLAUSE_FIRSTPRIVATE);
17200 40 : OMP_CLAUSE_DECL (c) = last;
17201 40 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17202 40 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17203 40 : c = build_omp_clause (UNKNOWN_LOCATION,
17204 40 : *pc ? OMP_CLAUSE_SHARED
17205 : : OMP_CLAUSE_FIRSTPRIVATE);
17206 40 : OMP_CLAUSE_DECL (c) = orig_decl;
17207 40 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17208 40 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17209 : }
17210 : /* Similarly, take care of C++ range for temporaries, those should
17211 : be firstprivate on OMP_PARALLEL if any. */
17212 13926 : if (data[1])
17213 17503 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
17214 11108 : if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
17215 5733 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17216 : i)) == TREE_LIST
17217 11184 : && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17218 : i)))
17219 : {
17220 49 : tree orig
17221 49 : = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
17222 49 : tree v = TREE_CHAIN (orig);
17223 49 : tree c = build_omp_clause (UNKNOWN_LOCATION,
17224 : OMP_CLAUSE_FIRSTPRIVATE);
17225 : /* First add firstprivate clause for the __for_end artificial
17226 : decl. */
17227 49 : OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
17228 49 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
17229 : == REFERENCE_TYPE)
17230 0 : OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
17231 49 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17232 49 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17233 49 : if (TREE_VEC_ELT (v, 0))
17234 : {
17235 : /* And now the same for __for_range artificial decl if it
17236 : exists. */
17237 49 : c = build_omp_clause (UNKNOWN_LOCATION,
17238 : OMP_CLAUSE_FIRSTPRIVATE);
17239 49 : OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
17240 49 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
17241 : == REFERENCE_TYPE)
17242 49 : OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
17243 49 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17244 49 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17245 : }
17246 : }
17247 13926 : break;
17248 : }
17249 54334 : if (OMP_FOR_INIT (for_stmt) != NULL_TREE)
17250 : {
17251 40408 : omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
17252 40408 : if (*expr_p != for_stmt)
17253 : return GS_OK;
17254 : }
17255 :
17256 53452 : switch (TREE_CODE (for_stmt))
17257 : {
17258 18180 : case OMP_FOR:
17259 30391 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
17260 : {
17261 458 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17262 : OMP_CLAUSE_SCHEDULE))
17263 47 : error_at (EXPR_LOCATION (for_stmt),
17264 : "%qs clause may not appear on non-rectangular %qs",
17265 27 : "schedule", lang_GNU_Fortran () ? "do" : "for");
17266 458 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
17267 34 : error_at (EXPR_LOCATION (for_stmt),
17268 : "%qs clause may not appear on non-rectangular %qs",
17269 18 : "ordered", lang_GNU_Fortran () ? "do" : "for");
17270 : }
17271 : break;
17272 8219 : case OMP_DISTRIBUTE:
17273 9130 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
17274 8219 : && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17275 : OMP_CLAUSE_DIST_SCHEDULE))
17276 29 : error_at (EXPR_LOCATION (for_stmt),
17277 : "%qs clause may not appear on non-rectangular %qs",
17278 : "dist_schedule", "distribute");
17279 : break;
17280 : case OACC_LOOP:
17281 : ort = ORT_ACC;
17282 : break;
17283 1586 : case OMP_TASKLOOP:
17284 2523 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
17285 : {
17286 36 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17287 : OMP_CLAUSE_GRAINSIZE))
17288 11 : error_at (EXPR_LOCATION (for_stmt),
17289 : "%qs clause may not appear on non-rectangular %qs",
17290 : "grainsize", "taskloop");
17291 36 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17292 : OMP_CLAUSE_NUM_TASKS))
17293 6 : error_at (EXPR_LOCATION (for_stmt),
17294 : "%qs clause may not appear on non-rectangular %qs",
17295 : "num_tasks", "taskloop");
17296 : }
17297 1586 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
17298 : ort = ORT_UNTIED_TASKLOOP;
17299 : else
17300 : ort = ORT_TASKLOOP;
17301 : break;
17302 10653 : case OMP_SIMD:
17303 10653 : ort = ORT_SIMD;
17304 10653 : break;
17305 2264 : case OMP_TILE:
17306 2264 : case OMP_UNROLL:
17307 2264 : gcc_assert (inner_for_stmt == NULL_TREE);
17308 2264 : return gimplify_omp_loop_xform (expr_p, pre_p);
17309 0 : default:
17310 0 : gcc_unreachable ();
17311 : }
17312 :
17313 : /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
17314 : clause for the IV. */
17315 10700 : if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
17316 : {
17317 7825 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
17318 7825 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17319 7825 : decl = TREE_OPERAND (t, 0);
17320 18635 : for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
17321 11924 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
17322 11924 : && OMP_CLAUSE_DECL (c) == decl)
17323 : {
17324 1114 : OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
17325 1114 : break;
17326 : }
17327 : }
17328 :
17329 51188 : if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
17330 51512 : gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
17331 1910 : loop_p && TREE_CODE (for_stmt) != OMP_SIMD
17332 : ? OMP_LOOP : TREE_CODE (for_stmt));
17333 :
17334 51188 : if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
17335 8219 : gimplify_omp_ctxp->distribute = true;
17336 :
17337 : /* Handle OMP_FOR_INIT. */
17338 51188 : for_pre_body = NULL;
17339 51188 : if ((ort == ORT_SIMD
17340 40535 : || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
17341 61928 : && OMP_FOR_PRE_BODY (for_stmt))
17342 : {
17343 6784 : has_decl_expr = BITMAP_ALLOC (NULL);
17344 6784 : if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
17345 6784 : && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
17346 : {
17347 18 : t = OMP_FOR_PRE_BODY (for_stmt);
17348 18 : bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
17349 : }
17350 6766 : else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
17351 : {
17352 6764 : tree_stmt_iterator si;
17353 11234 : for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
17354 4470 : tsi_next (&si))
17355 : {
17356 4470 : t = tsi_stmt (si);
17357 4470 : if (TREE_CODE (t) == DECL_EXPR
17358 4470 : && VAR_P (DECL_EXPR_DECL (t)))
17359 4397 : bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
17360 : }
17361 : }
17362 : }
17363 51188 : if (OMP_FOR_PRE_BODY (for_stmt))
17364 : {
17365 20979 : if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
17366 20254 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
17367 : else
17368 : {
17369 725 : struct gimplify_omp_ctx ctx;
17370 725 : memset (&ctx, 0, sizeof (ctx));
17371 725 : ctx.region_type = ORT_NONE;
17372 725 : gimplify_omp_ctxp = &ctx;
17373 725 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
17374 725 : gimplify_omp_ctxp = NULL;
17375 : }
17376 : }
17377 51188 : OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
17378 :
17379 51188 : if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
17380 13926 : for_stmt = inner_for_stmt;
17381 :
17382 : /* For taskloop, need to gimplify the start, end and step before the
17383 : taskloop, outside of the taskloop omp context. */
17384 51188 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
17385 : {
17386 3458 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17387 : {
17388 1872 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17389 1872 : gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
17390 1872 : ? pre_p : &for_pre_body);
17391 1872 : tree type = TREE_TYPE (TREE_OPERAND (t, 0));
17392 1872 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17393 : {
17394 34 : tree v = TREE_OPERAND (t, 1);
17395 34 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
17396 : for_pre_p, orig_for_stmt);
17397 34 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
17398 : for_pre_p, orig_for_stmt);
17399 : }
17400 : else
17401 1838 : gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
17402 : orig_for_stmt);
17403 :
17404 : /* Handle OMP_FOR_COND. */
17405 1872 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17406 1872 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17407 : {
17408 31 : tree v = TREE_OPERAND (t, 1);
17409 31 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
17410 : for_pre_p, orig_for_stmt);
17411 31 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
17412 : for_pre_p, orig_for_stmt);
17413 : }
17414 : else
17415 1841 : gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
17416 : orig_for_stmt);
17417 :
17418 : /* Handle OMP_FOR_INCR. */
17419 1872 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17420 1872 : if (TREE_CODE (t) == MODIFY_EXPR)
17421 : {
17422 590 : decl = TREE_OPERAND (t, 0);
17423 590 : t = TREE_OPERAND (t, 1);
17424 590 : tree *tp = &TREE_OPERAND (t, 1);
17425 590 : if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
17426 22 : tp = &TREE_OPERAND (t, 0);
17427 :
17428 590 : gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
17429 : orig_for_stmt);
17430 : }
17431 : }
17432 :
17433 1586 : gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
17434 : OMP_TASKLOOP);
17435 : }
17436 :
17437 51188 : if (orig_for_stmt != for_stmt)
17438 13926 : gimplify_omp_ctxp->combined_loop = true;
17439 :
17440 51188 : for_body = NULL;
17441 51188 : gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17442 : == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
17443 51188 : gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17444 : == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
17445 :
17446 51188 : tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
17447 51188 : bool is_doacross = false;
17448 51188 : if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
17449 : find_standalone_omp_ordered, NULL))
17450 : {
17451 531 : OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
17452 531 : is_doacross = true;
17453 531 : int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
17454 531 : gimplify_omp_ctxp->loop_iter_var.create (len * 2);
17455 2200 : for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
17456 1669 : if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
17457 : {
17458 10 : error_at (OMP_CLAUSE_LOCATION (*pc),
17459 : "%<linear%> clause may not be specified together "
17460 : "with %<ordered%> clause if stand-alone %<ordered%> "
17461 : "construct is nested in it");
17462 10 : *pc = OMP_CLAUSE_CHAIN (*pc);
17463 : }
17464 : else
17465 1659 : pc = &OMP_CLAUSE_CHAIN (*pc);
17466 : }
17467 51188 : int collapse = 1, tile = 0;
17468 51188 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
17469 51188 : if (c)
17470 13726 : collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
17471 51188 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
17472 51188 : if (c)
17473 384 : tile = list_length (OMP_CLAUSE_TILE_LIST (c));
17474 51188 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
17475 51188 : hash_set<tree> *allocate_uids = NULL;
17476 51188 : if (c)
17477 : {
17478 275 : allocate_uids = new hash_set<tree>;
17479 1682 : for (; c; c = OMP_CLAUSE_CHAIN (c))
17480 1132 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
17481 478 : allocate_uids->add (OMP_CLAUSE_DECL (c));
17482 : }
17483 124121 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17484 : {
17485 72933 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17486 72933 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17487 72933 : decl = TREE_OPERAND (t, 0);
17488 72933 : gcc_assert (DECL_P (decl));
17489 72933 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
17490 : || POINTER_TYPE_P (TREE_TYPE (decl)));
17491 72933 : if (is_doacross)
17492 : {
17493 1654 : if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
17494 : {
17495 960 : tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17496 960 : if (TREE_CODE (orig_decl) == TREE_LIST)
17497 : {
17498 15 : orig_decl = TREE_PURPOSE (orig_decl);
17499 15 : if (!orig_decl)
17500 0 : orig_decl = decl;
17501 : }
17502 960 : gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
17503 : }
17504 : else
17505 694 : gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
17506 1654 : gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
17507 : }
17508 :
17509 72933 : if (for_stmt == orig_for_stmt)
17510 : {
17511 50125 : tree orig_decl = decl;
17512 50125 : if (OMP_FOR_ORIG_DECLS (for_stmt))
17513 : {
17514 23916 : tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17515 23916 : if (TREE_CODE (orig_decl) == TREE_LIST)
17516 : {
17517 : orig_decl = TREE_PURPOSE (orig_decl);
17518 : if (!orig_decl)
17519 : orig_decl = decl;
17520 : }
17521 : }
17522 50125 : if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
17523 36 : error_at (EXPR_LOCATION (for_stmt),
17524 : "threadprivate iteration variable %qD", orig_decl);
17525 : }
17526 :
17527 : /* Make sure the iteration variable is private. */
17528 72933 : tree c = NULL_TREE;
17529 72933 : tree c2 = NULL_TREE;
17530 72933 : if (orig_for_stmt != for_stmt)
17531 : {
17532 : /* Preserve this information until we gimplify the inner simd. */
17533 22808 : if (has_decl_expr
17534 22808 : && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
17535 3413 : TREE_PRIVATE (t) = 1;
17536 : }
17537 50125 : else if (ort == ORT_SIMD)
17538 : {
17539 15726 : splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
17540 : (splay_tree_key) decl);
17541 15726 : omp_is_private (gimplify_omp_ctxp, decl,
17542 15726 : 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17543 : != 1));
17544 15726 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
17545 : {
17546 2518 : omp_notice_variable (gimplify_omp_ctxp, decl, true);
17547 2518 : if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
17548 0 : for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17549 : OMP_CLAUSE_LASTPRIVATE);
17550 0 : c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
17551 : OMP_CLAUSE_LASTPRIVATE))
17552 0 : if (OMP_CLAUSE_DECL (c3) == decl)
17553 : {
17554 0 : warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
17555 : "conditional %<lastprivate%> on loop "
17556 : "iterator %qD ignored", decl);
17557 0 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
17558 0 : n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
17559 : }
17560 : }
17561 13208 : else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
17562 : {
17563 5637 : c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
17564 5637 : OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
17565 5637 : unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
17566 5637 : if ((has_decl_expr
17567 1309 : && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
17568 6321 : || TREE_PRIVATE (t))
17569 : {
17570 2281 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17571 2281 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17572 : }
17573 5637 : struct gimplify_omp_ctx *outer
17574 5637 : = gimplify_omp_ctxp->outer_context;
17575 5637 : if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
17576 : {
17577 2433 : if (outer->region_type == ORT_WORKSHARE
17578 2193 : && outer->combined_loop)
17579 : {
17580 2142 : n = splay_tree_lookup (outer->variables,
17581 : (splay_tree_key)decl);
17582 2142 : if (n != NULL && (n->value & GOVD_LOCAL) != 0)
17583 : {
17584 0 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17585 0 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17586 : }
17587 : else
17588 : {
17589 2142 : struct gimplify_omp_ctx *octx = outer->outer_context;
17590 2142 : if (octx
17591 1714 : && octx->region_type == ORT_COMBINED_PARALLEL
17592 1476 : && octx->outer_context
17593 1209 : && (octx->outer_context->region_type
17594 : == ORT_WORKSHARE)
17595 1079 : && octx->outer_context->combined_loop)
17596 : {
17597 1079 : octx = octx->outer_context;
17598 1079 : n = splay_tree_lookup (octx->variables,
17599 : (splay_tree_key)decl);
17600 1079 : if (n != NULL && (n->value & GOVD_LOCAL) != 0)
17601 : {
17602 0 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17603 0 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17604 : }
17605 : }
17606 : }
17607 : }
17608 : }
17609 :
17610 5637 : OMP_CLAUSE_DECL (c) = decl;
17611 5637 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17612 5637 : OMP_FOR_CLAUSES (for_stmt) = c;
17613 5637 : omp_add_variable (gimplify_omp_ctxp, decl, flags);
17614 5637 : if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
17615 2433 : omp_lastprivate_for_combined_outer_constructs (outer, decl,
17616 : true);
17617 : }
17618 : else
17619 : {
17620 7571 : bool lastprivate
17621 : = (!has_decl_expr
17622 7571 : || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
17623 7571 : if (TREE_PRIVATE (t))
17624 1787 : lastprivate = false;
17625 7571 : if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
17626 : {
17627 509 : tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17628 509 : if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
17629 : lastprivate = false;
17630 : }
17631 :
17632 7571 : struct gimplify_omp_ctx *outer
17633 7571 : = gimplify_omp_ctxp->outer_context;
17634 7571 : if (outer && lastprivate)
17635 4887 : omp_lastprivate_for_combined_outer_constructs (outer, decl,
17636 : true);
17637 :
17638 9577 : c = build_omp_clause (input_location,
17639 : lastprivate ? OMP_CLAUSE_LASTPRIVATE
17640 : : OMP_CLAUSE_PRIVATE);
17641 7571 : OMP_CLAUSE_DECL (c) = decl;
17642 7571 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17643 7571 : OMP_FOR_CLAUSES (for_stmt) = c;
17644 7571 : omp_add_variable (gimplify_omp_ctxp, decl,
17645 : (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
17646 : | GOVD_EXPLICIT | GOVD_SEEN);
17647 7571 : c = NULL_TREE;
17648 : }
17649 : }
17650 34399 : else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
17651 : {
17652 8727 : omp_notice_variable (gimplify_omp_ctxp, decl, true);
17653 8727 : splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
17654 : (splay_tree_key) decl);
17655 8727 : if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
17656 80 : for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17657 : OMP_CLAUSE_LASTPRIVATE);
17658 80 : c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
17659 : OMP_CLAUSE_LASTPRIVATE))
17660 40 : if (OMP_CLAUSE_DECL (c3) == decl)
17661 : {
17662 40 : warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
17663 : "conditional %<lastprivate%> on loop "
17664 : "iterator %qD ignored", decl);
17665 40 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
17666 40 : n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
17667 : }
17668 : }
17669 : else
17670 25672 : omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
17671 :
17672 : /* If DECL is not a gimple register, create a temporary variable to act
17673 : as an iteration counter. This is valid, since DECL cannot be
17674 : modified in the body of the loop. Similarly for any iteration vars
17675 : in simd with collapse > 1 where the iterator vars must be
17676 : lastprivate. And similarly for vars mentioned in allocate clauses. */
17677 72933 : if (orig_for_stmt != for_stmt)
17678 22808 : var = decl;
17679 50125 : else if (!is_gimple_reg (decl)
17680 46858 : || (ort == ORT_SIMD
17681 14125 : && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
17682 89913 : || (allocate_uids && allocate_uids->contains (decl)))
17683 : {
17684 10369 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
17685 : /* Make sure omp_add_variable is not called on it prematurely.
17686 : We call it ourselves a few lines later. */
17687 10369 : gimplify_omp_ctxp = NULL;
17688 10369 : var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
17689 10369 : gimplify_omp_ctxp = ctx;
17690 10369 : TREE_OPERAND (t, 0) = var;
17691 :
17692 10369 : gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
17693 :
17694 10369 : if (ort == ORT_SIMD
17695 10369 : && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
17696 : {
17697 770 : c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
17698 770 : OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
17699 770 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
17700 770 : OMP_CLAUSE_DECL (c2) = var;
17701 770 : OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
17702 770 : OMP_FOR_CLAUSES (for_stmt) = c2;
17703 770 : omp_add_variable (gimplify_omp_ctxp, var,
17704 : GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
17705 770 : if (c == NULL_TREE)
17706 : {
17707 518 : c = c2;
17708 518 : c2 = NULL_TREE;
17709 : }
17710 : }
17711 : else
17712 9599 : omp_add_variable (gimplify_omp_ctxp, var,
17713 : GOVD_PRIVATE | GOVD_SEEN);
17714 : }
17715 : else
17716 : var = decl;
17717 :
17718 72933 : gimplify_omp_ctxp->in_for_exprs = true;
17719 72933 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17720 : {
17721 719 : tree lb = TREE_OPERAND (t, 1);
17722 719 : tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
17723 : is_gimple_val, fb_rvalue, false);
17724 719 : ret = MIN (ret, tret);
17725 719 : tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
17726 : is_gimple_val, fb_rvalue, false);
17727 : }
17728 : else
17729 72214 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17730 : is_gimple_val, fb_rvalue, false);
17731 72933 : gimplify_omp_ctxp->in_for_exprs = false;
17732 72933 : ret = MIN (ret, tret);
17733 72933 : if (ret == GS_ERROR)
17734 : return ret;
17735 :
17736 : /* Handle OMP_FOR_COND. */
17737 72933 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17738 72933 : gcc_assert (COMPARISON_CLASS_P (t));
17739 72933 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17740 :
17741 72933 : gimplify_omp_ctxp->in_for_exprs = true;
17742 72933 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17743 : {
17744 599 : tree ub = TREE_OPERAND (t, 1);
17745 599 : tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
17746 : is_gimple_val, fb_rvalue, false);
17747 599 : ret = MIN (ret, tret);
17748 599 : tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
17749 : is_gimple_val, fb_rvalue, false);
17750 : }
17751 : else
17752 72334 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17753 : is_gimple_val, fb_rvalue, false);
17754 72933 : gimplify_omp_ctxp->in_for_exprs = false;
17755 72933 : ret = MIN (ret, tret);
17756 :
17757 : /* Handle OMP_FOR_INCR. */
17758 72933 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17759 72933 : switch (TREE_CODE (t))
17760 : {
17761 35676 : case PREINCREMENT_EXPR:
17762 35676 : case POSTINCREMENT_EXPR:
17763 35676 : {
17764 35676 : tree decl = TREE_OPERAND (t, 0);
17765 : /* c_omp_for_incr_canonicalize_ptr() should have been
17766 : called to massage things appropriately. */
17767 35676 : gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
17768 :
17769 35676 : if (orig_for_stmt != for_stmt)
17770 : break;
17771 25291 : t = build_int_cst (TREE_TYPE (decl), 1);
17772 25291 : if (c)
17773 4470 : OMP_CLAUSE_LINEAR_STEP (c) = t;
17774 25291 : t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
17775 25291 : t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
17776 25291 : TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
17777 25291 : break;
17778 : }
17779 :
17780 3830 : case PREDECREMENT_EXPR:
17781 3830 : case POSTDECREMENT_EXPR:
17782 : /* c_omp_for_incr_canonicalize_ptr() should have been
17783 : called to massage things appropriately. */
17784 3830 : gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
17785 3830 : if (orig_for_stmt != for_stmt)
17786 : break;
17787 1992 : t = build_int_cst (TREE_TYPE (decl), -1);
17788 1992 : if (c)
17789 112 : OMP_CLAUSE_LINEAR_STEP (c) = t;
17790 1992 : t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
17791 1992 : t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
17792 1992 : TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
17793 1992 : break;
17794 :
17795 33427 : case MODIFY_EXPR:
17796 33427 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17797 33427 : TREE_OPERAND (t, 0) = var;
17798 :
17799 33427 : t = TREE_OPERAND (t, 1);
17800 33427 : switch (TREE_CODE (t))
17801 : {
17802 26495 : case PLUS_EXPR:
17803 26495 : if (TREE_OPERAND (t, 1) == decl)
17804 : {
17805 215 : TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
17806 215 : TREE_OPERAND (t, 0) = var;
17807 215 : break;
17808 : }
17809 :
17810 : /* Fallthru. */
17811 33212 : case MINUS_EXPR:
17812 33212 : case POINTER_PLUS_EXPR:
17813 33212 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17814 33212 : TREE_OPERAND (t, 0) = var;
17815 33212 : break;
17816 0 : default:
17817 0 : gcc_unreachable ();
17818 : }
17819 :
17820 33427 : gimplify_omp_ctxp->in_for_exprs = true;
17821 33427 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17822 : is_gimple_val, fb_rvalue, false);
17823 33427 : ret = MIN (ret, tret);
17824 33427 : if (c)
17825 : {
17826 1573 : tree step = TREE_OPERAND (t, 1);
17827 1573 : tree stept = TREE_TYPE (decl);
17828 1573 : if (POINTER_TYPE_P (stept))
17829 280 : stept = sizetype;
17830 1573 : step = fold_convert (stept, step);
17831 1573 : if (TREE_CODE (t) == MINUS_EXPR)
17832 295 : step = fold_build1 (NEGATE_EXPR, stept, step);
17833 1573 : OMP_CLAUSE_LINEAR_STEP (c) = step;
17834 1573 : if (step != TREE_OPERAND (t, 1))
17835 : {
17836 295 : tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
17837 : &for_pre_body, NULL,
17838 : is_gimple_val, fb_rvalue, false);
17839 295 : ret = MIN (ret, tret);
17840 : }
17841 : }
17842 33427 : gimplify_omp_ctxp->in_for_exprs = false;
17843 33427 : break;
17844 :
17845 0 : default:
17846 0 : gcc_unreachable ();
17847 : }
17848 :
17849 72933 : if (c2)
17850 : {
17851 252 : gcc_assert (c);
17852 252 : OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
17853 : }
17854 :
17855 72933 : if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
17856 : {
17857 86692 : for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
17858 65132 : if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
17859 16218 : && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
17860 56123 : || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
17861 2159 : && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
17862 1373 : && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
17863 75492 : && OMP_CLAUSE_DECL (c) == decl)
17864 : {
17865 7535 : if (is_doacross && (collapse == 1 || i >= collapse))
17866 : t = var;
17867 : else
17868 : {
17869 7462 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17870 7462 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17871 7462 : gcc_assert (TREE_OPERAND (t, 0) == var);
17872 7462 : t = TREE_OPERAND (t, 1);
17873 7462 : gcc_assert (TREE_CODE (t) == PLUS_EXPR
17874 : || TREE_CODE (t) == MINUS_EXPR
17875 : || TREE_CODE (t) == POINTER_PLUS_EXPR);
17876 7462 : gcc_assert (TREE_OPERAND (t, 0) == var);
17877 14822 : t = build2 (TREE_CODE (t), TREE_TYPE (decl),
17878 : is_doacross ? var : decl,
17879 7462 : TREE_OPERAND (t, 1));
17880 : }
17881 7535 : gimple_seq *seq;
17882 7535 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
17883 6939 : seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
17884 : else
17885 596 : seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
17886 7535 : push_gimplify_context ();
17887 7535 : gimplify_assign (decl, t, seq);
17888 7535 : gimple *bind = NULL;
17889 7535 : if (gimplify_ctxp->temps)
17890 : {
17891 2072 : bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
17892 2072 : *seq = NULL;
17893 2072 : gimplify_seq_add_stmt (seq, bind);
17894 : }
17895 7535 : pop_gimplify_context (bind);
17896 : }
17897 : }
17898 72933 : if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
17899 2609 : for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
17900 : {
17901 1197 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
17902 1197 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17903 1197 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17904 1197 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17905 336 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17906 1197 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
17907 1197 : gcc_assert (COMPARISON_CLASS_P (t));
17908 1197 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17909 1197 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17910 343 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17911 : }
17912 : }
17913 :
17914 51188 : BITMAP_FREE (has_decl_expr);
17915 51463 : delete allocate_uids;
17916 :
17917 51188 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
17918 49602 : || (loop_p && orig_for_stmt == for_stmt))
17919 : {
17920 2642 : push_gimplify_context ();
17921 2642 : if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
17922 : {
17923 1891 : OMP_FOR_BODY (orig_for_stmt)
17924 1891 : = build3 (BIND_EXPR, void_type_node, NULL,
17925 1891 : OMP_FOR_BODY (orig_for_stmt), NULL);
17926 1891 : TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
17927 : }
17928 : }
17929 :
17930 51188 : gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
17931 : &for_body);
17932 :
17933 51188 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
17934 49602 : || (loop_p && orig_for_stmt == for_stmt))
17935 : {
17936 2642 : if (gimple_code (g) == GIMPLE_BIND)
17937 2642 : pop_gimplify_context (g);
17938 : else
17939 0 : pop_gimplify_context (NULL);
17940 : }
17941 :
17942 51188 : if (orig_for_stmt != for_stmt)
17943 36734 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17944 : {
17945 22808 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17946 22808 : decl = TREE_OPERAND (t, 0);
17947 22808 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
17948 22808 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
17949 772 : gimplify_omp_ctxp = ctx->outer_context;
17950 22808 : var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
17951 22808 : gimplify_omp_ctxp = ctx;
17952 22808 : omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
17953 22808 : TREE_OPERAND (t, 0) = var;
17954 22808 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17955 22808 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17956 22808 : TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
17957 22808 : if (OMP_FOR_NON_RECTANGULAR (for_stmt))
17958 791 : for (int j = i + 1;
17959 791 : j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
17960 : {
17961 377 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
17962 377 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17963 377 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17964 377 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17965 : {
17966 133 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17967 133 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17968 : }
17969 377 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
17970 377 : gcc_assert (COMPARISON_CLASS_P (t));
17971 377 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17972 377 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17973 : {
17974 67 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17975 67 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17976 : }
17977 : }
17978 : }
17979 :
17980 51188 : gimplify_adjust_omp_clauses (pre_p, for_body,
17981 : &OMP_FOR_CLAUSES (orig_for_stmt),
17982 51188 : TREE_CODE (orig_for_stmt));
17983 :
17984 51188 : int kind;
17985 51188 : switch (TREE_CODE (orig_for_stmt))
17986 : {
17987 : case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
17988 10653 : case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
17989 8219 : case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
17990 1586 : case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
17991 12550 : case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
17992 0 : default:
17993 0 : gcc_unreachable ();
17994 : }
17995 51188 : if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
17996 : {
17997 1056 : gimplify_seq_add_seq (pre_p, for_pre_body);
17998 1056 : for_pre_body = NULL;
17999 : }
18000 51188 : gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
18001 51188 : TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
18002 : for_pre_body);
18003 51188 : if (orig_for_stmt != for_stmt)
18004 13926 : gimple_omp_for_set_combined_p (gfor, true);
18005 51188 : if (gimplify_omp_ctxp
18006 42560 : && (gimplify_omp_ctxp->combined_loop
18007 35029 : || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
18008 12292 : && gimplify_omp_ctxp->outer_context
18009 7858 : && gimplify_omp_ctxp->outer_context->combined_loop)))
18010 : {
18011 13926 : gimple_omp_for_set_combined_into_p (gfor, true);
18012 13926 : if (gimplify_omp_ctxp->combined_loop)
18013 7531 : gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
18014 : else
18015 6395 : gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
18016 : }
18017 :
18018 124121 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18019 : {
18020 72933 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
18021 72933 : gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
18022 72933 : gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
18023 72933 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
18024 72933 : gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
18025 72933 : gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
18026 72933 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
18027 72933 : gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
18028 : }
18029 :
18030 : /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
18031 : constructs with GIMPLE_OMP_TASK sandwiched in between them.
18032 : The outer taskloop stands for computing the number of iterations,
18033 : counts for collapsed loops and holding taskloop specific clauses.
18034 : The task construct stands for the effect of data sharing on the
18035 : explicit task it creates and the inner taskloop stands for expansion
18036 : of the static loop inside of the explicit task construct. */
18037 51188 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
18038 : {
18039 1586 : tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
18040 1586 : tree task_clauses = NULL_TREE;
18041 1586 : tree c = *gfor_clauses_ptr;
18042 1586 : tree *gtask_clauses_ptr = &task_clauses;
18043 1586 : tree outer_for_clauses = NULL_TREE;
18044 1586 : tree *gforo_clauses_ptr = &outer_for_clauses;
18045 1586 : bitmap lastprivate_uids = NULL;
18046 1586 : if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
18047 : {
18048 36 : c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
18049 36 : if (c)
18050 : {
18051 18 : lastprivate_uids = BITMAP_ALLOC (NULL);
18052 54 : for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
18053 : OMP_CLAUSE_LASTPRIVATE))
18054 18 : bitmap_set_bit (lastprivate_uids,
18055 18 : DECL_UID (OMP_CLAUSE_DECL (c)));
18056 : }
18057 36 : c = *gfor_clauses_ptr;
18058 : }
18059 12252 : for (; c; c = OMP_CLAUSE_CHAIN (c))
18060 10666 : switch (OMP_CLAUSE_CODE (c))
18061 : {
18062 : /* These clauses are allowed on task, move them there. */
18063 6228 : case OMP_CLAUSE_SHARED:
18064 6228 : case OMP_CLAUSE_FIRSTPRIVATE:
18065 6228 : case OMP_CLAUSE_DEFAULT:
18066 6228 : case OMP_CLAUSE_IF:
18067 6228 : case OMP_CLAUSE_UNTIED:
18068 6228 : case OMP_CLAUSE_FINAL:
18069 6228 : case OMP_CLAUSE_MERGEABLE:
18070 6228 : case OMP_CLAUSE_PRIORITY:
18071 6228 : case OMP_CLAUSE_REDUCTION:
18072 6228 : case OMP_CLAUSE_IN_REDUCTION:
18073 6228 : *gtask_clauses_ptr = c;
18074 6228 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18075 6228 : break;
18076 2068 : case OMP_CLAUSE_PRIVATE:
18077 2068 : if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
18078 : {
18079 : /* We want private on outer for and firstprivate
18080 : on task. */
18081 26 : *gtask_clauses_ptr
18082 26 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18083 : OMP_CLAUSE_FIRSTPRIVATE);
18084 26 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18085 26 : lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
18086 : openacc);
18087 26 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18088 26 : *gforo_clauses_ptr = c;
18089 26 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18090 : }
18091 : else
18092 : {
18093 2042 : *gtask_clauses_ptr = c;
18094 2042 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18095 : }
18096 : break;
18097 : /* These clauses go into outer taskloop clauses. */
18098 533 : case OMP_CLAUSE_GRAINSIZE:
18099 533 : case OMP_CLAUSE_NUM_TASKS:
18100 533 : case OMP_CLAUSE_NOGROUP:
18101 533 : *gforo_clauses_ptr = c;
18102 533 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18103 533 : break;
18104 : /* Collapse clause we duplicate on both taskloops. */
18105 584 : case OMP_CLAUSE_COLLAPSE:
18106 584 : *gfor_clauses_ptr = c;
18107 584 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18108 584 : *gforo_clauses_ptr = copy_node (c);
18109 584 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
18110 584 : break;
18111 : /* For lastprivate, keep the clause on inner taskloop, and add
18112 : a shared clause on task. If the same decl is also firstprivate,
18113 : add also firstprivate clause on the inner taskloop. */
18114 1191 : case OMP_CLAUSE_LASTPRIVATE:
18115 1191 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
18116 : {
18117 : /* For taskloop C++ lastprivate IVs, we want:
18118 : 1) private on outer taskloop
18119 : 2) firstprivate and shared on task
18120 : 3) lastprivate on inner taskloop */
18121 38 : *gtask_clauses_ptr
18122 38 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18123 : OMP_CLAUSE_FIRSTPRIVATE);
18124 38 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18125 38 : lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
18126 : openacc);
18127 38 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18128 38 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
18129 38 : *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18130 : OMP_CLAUSE_PRIVATE);
18131 38 : OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
18132 38 : OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
18133 38 : TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
18134 38 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
18135 : }
18136 1191 : *gfor_clauses_ptr = c;
18137 1191 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18138 1191 : *gtask_clauses_ptr
18139 1191 : = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
18140 1191 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18141 1191 : if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
18142 312 : OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
18143 1191 : gtask_clauses_ptr
18144 1191 : = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18145 1191 : break;
18146 : /* Allocate clause we duplicate on task and inner taskloop
18147 : if the decl is lastprivate, otherwise just put on task. */
18148 62 : case OMP_CLAUSE_ALLOCATE:
18149 62 : if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
18150 62 : && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
18151 : {
18152 : /* Additionally, put firstprivate clause on task
18153 : for the allocator if it is not constant. */
18154 34 : *gtask_clauses_ptr
18155 34 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18156 : OMP_CLAUSE_FIRSTPRIVATE);
18157 34 : OMP_CLAUSE_DECL (*gtask_clauses_ptr)
18158 34 : = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
18159 34 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18160 : }
18161 62 : if (lastprivate_uids
18162 97 : && bitmap_bit_p (lastprivate_uids,
18163 35 : DECL_UID (OMP_CLAUSE_DECL (c))))
18164 : {
18165 17 : *gfor_clauses_ptr = c;
18166 17 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18167 17 : *gtask_clauses_ptr = copy_node (c);
18168 17 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18169 : }
18170 : else
18171 : {
18172 45 : *gtask_clauses_ptr = c;
18173 45 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18174 : }
18175 : break;
18176 0 : default:
18177 0 : gcc_unreachable ();
18178 : }
18179 1586 : *gfor_clauses_ptr = NULL_TREE;
18180 1586 : *gtask_clauses_ptr = NULL_TREE;
18181 1586 : *gforo_clauses_ptr = NULL_TREE;
18182 1586 : BITMAP_FREE (lastprivate_uids);
18183 1586 : gimple_set_location (gfor, input_location);
18184 1586 : g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
18185 1586 : g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
18186 : NULL_TREE, NULL_TREE, NULL_TREE);
18187 1586 : gimple_set_location (g, input_location);
18188 1586 : gimple_omp_task_set_taskloop_p (g, true);
18189 1586 : g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
18190 1586 : gomp_for *gforo
18191 1586 : = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
18192 : gimple_omp_for_collapse (gfor),
18193 : gimple_omp_for_pre_body (gfor));
18194 1586 : gimple_omp_for_set_pre_body (gfor, NULL);
18195 1586 : gimple_omp_for_set_combined_p (gforo, true);
18196 1586 : gimple_omp_for_set_combined_into_p (gfor, true);
18197 3458 : for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
18198 : {
18199 1872 : tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
18200 1872 : tree v = create_tmp_var (type);
18201 1872 : gimple_omp_for_set_index (gforo, i, v);
18202 1872 : t = unshare_expr (gimple_omp_for_initial (gfor, i));
18203 1872 : gimple_omp_for_set_initial (gforo, i, t);
18204 1872 : gimple_omp_for_set_cond (gforo, i,
18205 : gimple_omp_for_cond (gfor, i));
18206 1872 : t = unshare_expr (gimple_omp_for_final (gfor, i));
18207 1872 : gimple_omp_for_set_final (gforo, i, t);
18208 1872 : t = unshare_expr (gimple_omp_for_incr (gfor, i));
18209 1872 : gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
18210 1872 : TREE_OPERAND (t, 0) = v;
18211 1872 : gimple_omp_for_set_incr (gforo, i, t);
18212 1872 : t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
18213 1872 : OMP_CLAUSE_DECL (t) = v;
18214 1872 : OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
18215 1872 : gimple_omp_for_set_clauses (gforo, t);
18216 1872 : if (OMP_FOR_NON_RECTANGULAR (for_stmt))
18217 : {
18218 90 : tree *p1 = NULL, *p2 = NULL;
18219 90 : t = gimple_omp_for_initial (gforo, i);
18220 90 : if (TREE_CODE (t) == TREE_VEC)
18221 34 : p1 = &TREE_VEC_ELT (t, 0);
18222 90 : t = gimple_omp_for_final (gforo, i);
18223 90 : if (TREE_CODE (t) == TREE_VEC)
18224 : {
18225 31 : if (p1)
18226 23 : p2 = &TREE_VEC_ELT (t, 0);
18227 : else
18228 8 : p1 = &TREE_VEC_ELT (t, 0);
18229 : }
18230 90 : if (p1)
18231 : {
18232 : int j;
18233 58 : for (j = 0; j < i; j++)
18234 58 : if (*p1 == gimple_omp_for_index (gfor, j))
18235 : {
18236 42 : *p1 = gimple_omp_for_index (gforo, j);
18237 42 : if (p2)
18238 23 : *p2 = *p1;
18239 : break;
18240 : }
18241 42 : gcc_assert (j < i);
18242 : }
18243 : }
18244 : }
18245 1586 : gimplify_seq_add_stmt (pre_p, gforo);
18246 : }
18247 : else
18248 49602 : gimplify_seq_add_stmt (pre_p, gfor);
18249 :
18250 51188 : if (TREE_CODE (orig_for_stmt) == OMP_FOR)
18251 : {
18252 18180 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
18253 18180 : unsigned lastprivate_conditional = 0;
18254 18180 : while (ctx
18255 18216 : && (ctx->region_type == ORT_TARGET_DATA
18256 14178 : || ctx->region_type == ORT_TASKGROUP))
18257 36 : ctx = ctx->outer_context;
18258 18180 : if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
18259 13918 : for (tree c = gimple_omp_for_clauses (gfor);
18260 63160 : c; c = OMP_CLAUSE_CHAIN (c))
18261 49242 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
18262 49242 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
18263 158 : ++lastprivate_conditional;
18264 13918 : if (lastprivate_conditional)
18265 : {
18266 118 : struct omp_for_data fd;
18267 118 : omp_extract_for_data (gfor, &fd, NULL);
18268 118 : tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
18269 118 : lastprivate_conditional);
18270 118 : tree var = create_tmp_var_raw (type);
18271 118 : tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
18272 118 : OMP_CLAUSE_DECL (c) = var;
18273 118 : OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
18274 118 : gimple_omp_for_set_clauses (gfor, c);
18275 118 : omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
18276 : }
18277 : }
18278 33008 : else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
18279 : {
18280 10653 : unsigned lastprivate_conditional = 0;
18281 49159 : for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
18282 38506 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
18283 38506 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
18284 119 : ++lastprivate_conditional;
18285 10653 : if (lastprivate_conditional)
18286 : {
18287 91 : struct omp_for_data fd;
18288 91 : omp_extract_for_data (gfor, &fd, NULL);
18289 91 : tree type = unsigned_type_for (fd.iter_type);
18290 301 : while (lastprivate_conditional--)
18291 : {
18292 119 : tree c = build_omp_clause (UNKNOWN_LOCATION,
18293 : OMP_CLAUSE__CONDTEMP_);
18294 119 : OMP_CLAUSE_DECL (c) = create_tmp_var (type);
18295 119 : OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
18296 119 : gimple_omp_for_set_clauses (gfor, c);
18297 : }
18298 : }
18299 : }
18300 :
18301 51188 : if (ret != GS_ALL_DONE)
18302 : return GS_ERROR;
18303 51188 : *expr_p = NULL_TREE;
18304 51188 : return GS_ALL_DONE;
18305 : }
18306 :
18307 : /* Helper for gimplify_omp_loop, called through walk_tree. */
18308 :
18309 : static tree
18310 294 : note_no_context_vars (tree *tp, int *, void *data)
18311 : {
18312 294 : if (VAR_P (*tp)
18313 56 : && DECL_CONTEXT (*tp) == NULL_TREE
18314 309 : && !is_global_var (*tp))
18315 : {
18316 15 : vec<tree> *d = (vec<tree> *) data;
18317 15 : d->safe_push (*tp);
18318 15 : DECL_CONTEXT (*tp) = current_function_decl;
18319 : }
18320 294 : return NULL_TREE;
18321 : }
18322 :
18323 : /* Gimplify the gross structure of an OMP_LOOP statement. */
18324 :
18325 : static enum gimplify_status
18326 1056 : gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
18327 : {
18328 1056 : tree for_stmt = *expr_p;
18329 1056 : tree clauses = OMP_FOR_CLAUSES (for_stmt);
18330 1056 : struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
18331 1056 : enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
18332 1056 : int i;
18333 :
18334 1056 : omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
18335 1056 : if (*expr_p != for_stmt)
18336 : return GS_OK;
18337 :
18338 : /* If order is not present, the behavior is as if order(concurrent)
18339 : appeared. */
18340 1056 : tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
18341 1056 : if (order == NULL_TREE)
18342 : {
18343 799 : order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
18344 799 : OMP_CLAUSE_CHAIN (order) = clauses;
18345 799 : OMP_FOR_CLAUSES (for_stmt) = clauses = order;
18346 : }
18347 :
18348 1056 : tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
18349 1056 : if (bind == NULL_TREE)
18350 : {
18351 574 : if (!flag_openmp) /* flag_openmp_simd */
18352 : ;
18353 557 : else if (octx && (octx->region_type & ORT_TEAMS) != 0)
18354 : kind = OMP_CLAUSE_BIND_TEAMS;
18355 327 : else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
18356 : kind = OMP_CLAUSE_BIND_PARALLEL;
18357 : else
18358 : {
18359 187 : for (; octx; octx = octx->outer_context)
18360 : {
18361 79 : if ((octx->region_type & ORT_ACC) != 0
18362 79 : || octx->region_type == ORT_NONE
18363 79 : || octx->region_type == ORT_IMPLICIT_TARGET)
18364 0 : continue;
18365 : break;
18366 : }
18367 187 : if (octx == NULL && !in_omp_construct)
18368 4 : error_at (EXPR_LOCATION (for_stmt),
18369 : "%<bind%> clause not specified on a %<loop%> "
18370 : "construct not nested inside another OpenMP construct");
18371 : }
18372 574 : bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
18373 574 : OMP_CLAUSE_CHAIN (bind) = clauses;
18374 574 : OMP_CLAUSE_BIND_KIND (bind) = kind;
18375 574 : OMP_FOR_CLAUSES (for_stmt) = bind;
18376 : }
18377 : else
18378 482 : switch (OMP_CLAUSE_BIND_KIND (bind))
18379 : {
18380 : case OMP_CLAUSE_BIND_THREAD:
18381 : break;
18382 166 : case OMP_CLAUSE_BIND_PARALLEL:
18383 166 : if (!flag_openmp) /* flag_openmp_simd */
18384 : {
18385 0 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18386 0 : break;
18387 : }
18388 240 : for (; octx; octx = octx->outer_context)
18389 78 : if (octx->region_type == ORT_SIMD
18390 78 : && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
18391 : {
18392 4 : error_at (EXPR_LOCATION (for_stmt),
18393 : "%<bind(parallel)%> on a %<loop%> construct nested "
18394 : "inside %<simd%> construct");
18395 4 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18396 4 : break;
18397 : }
18398 : kind = OMP_CLAUSE_BIND_PARALLEL;
18399 : break;
18400 162 : case OMP_CLAUSE_BIND_TEAMS:
18401 162 : if (!flag_openmp) /* flag_openmp_simd */
18402 : {
18403 0 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18404 0 : break;
18405 : }
18406 162 : if ((octx
18407 128 : && octx->region_type != ORT_IMPLICIT_TARGET
18408 111 : && octx->region_type != ORT_NONE
18409 111 : && (octx->region_type & ORT_TEAMS) == 0)
18410 110 : || in_omp_construct)
18411 : {
18412 64 : error_at (EXPR_LOCATION (for_stmt),
18413 : "%<bind(teams)%> on a %<loop%> region not strictly "
18414 : "nested inside of a %<teams%> region");
18415 64 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18416 64 : break;
18417 : }
18418 : kind = OMP_CLAUSE_BIND_TEAMS;
18419 : break;
18420 0 : default:
18421 0 : gcc_unreachable ();
18422 : }
18423 :
18424 4280 : for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
18425 3224 : switch (OMP_CLAUSE_CODE (*pc))
18426 : {
18427 343 : case OMP_CLAUSE_REDUCTION:
18428 343 : if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
18429 : {
18430 4 : error_at (OMP_CLAUSE_LOCATION (*pc),
18431 : "%<inscan%> %<reduction%> clause on "
18432 : "%qs construct", "loop");
18433 4 : OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
18434 : }
18435 343 : if (OMP_CLAUSE_REDUCTION_TASK (*pc))
18436 : {
18437 8 : error_at (OMP_CLAUSE_LOCATION (*pc),
18438 : "invalid %<task%> reduction modifier on construct "
18439 : "other than %<parallel%>, %qs or %<sections%>",
18440 4 : lang_GNU_Fortran () ? "do" : "for");
18441 4 : OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
18442 : }
18443 343 : pc = &OMP_CLAUSE_CHAIN (*pc);
18444 343 : break;
18445 : case OMP_CLAUSE_LASTPRIVATE:
18446 293 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18447 : {
18448 289 : tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
18449 289 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
18450 289 : if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
18451 : break;
18452 26 : if (OMP_FOR_ORIG_DECLS (for_stmt)
18453 25 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
18454 : i)) == TREE_LIST
18455 48 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
18456 : i)))
18457 : {
18458 22 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
18459 22 : if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
18460 : break;
18461 : }
18462 : }
18463 289 : if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
18464 : {
18465 4 : error_at (OMP_CLAUSE_LOCATION (*pc),
18466 : "%<lastprivate%> clause on a %<loop%> construct refers "
18467 : "to a variable %qD which is not the loop iterator",
18468 4 : OMP_CLAUSE_DECL (*pc));
18469 4 : *pc = OMP_CLAUSE_CHAIN (*pc);
18470 4 : break;
18471 : }
18472 285 : pc = &OMP_CLAUSE_CHAIN (*pc);
18473 285 : break;
18474 2592 : default:
18475 2592 : pc = &OMP_CLAUSE_CHAIN (*pc);
18476 2592 : break;
18477 : }
18478 :
18479 1056 : TREE_SET_CODE (for_stmt, OMP_SIMD);
18480 :
18481 1056 : int last;
18482 1056 : switch (kind)
18483 : {
18484 : case OMP_CLAUSE_BIND_THREAD: last = 0; break;
18485 414 : case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
18486 220 : case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
18487 : }
18488 1910 : for (int pass = 1; pass <= last; pass++)
18489 : {
18490 854 : if (pass == 2)
18491 : {
18492 220 : tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
18493 : make_node (BLOCK));
18494 220 : append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
18495 220 : *expr_p = make_node (OMP_PARALLEL);
18496 220 : TREE_TYPE (*expr_p) = void_type_node;
18497 220 : OMP_PARALLEL_BODY (*expr_p) = bind;
18498 220 : OMP_PARALLEL_COMBINED (*expr_p) = 1;
18499 220 : SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
18500 220 : tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
18501 452 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18502 232 : if (OMP_FOR_ORIG_DECLS (for_stmt)
18503 232 : && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
18504 : == TREE_LIST))
18505 : {
18506 16 : tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
18507 16 : if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
18508 : {
18509 13 : *pc = build_omp_clause (UNKNOWN_LOCATION,
18510 : OMP_CLAUSE_FIRSTPRIVATE);
18511 13 : OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
18512 13 : pc = &OMP_CLAUSE_CHAIN (*pc);
18513 : }
18514 : }
18515 : }
18516 854 : tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
18517 854 : tree *pc = &OMP_FOR_CLAUSES (t);
18518 854 : TREE_TYPE (t) = void_type_node;
18519 854 : OMP_FOR_BODY (t) = *expr_p;
18520 854 : SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
18521 3968 : for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
18522 3114 : switch (OMP_CLAUSE_CODE (c))
18523 : {
18524 1992 : case OMP_CLAUSE_BIND:
18525 1992 : case OMP_CLAUSE_ORDER:
18526 1992 : case OMP_CLAUSE_COLLAPSE:
18527 1992 : *pc = copy_node (c);
18528 1992 : pc = &OMP_CLAUSE_CHAIN (*pc);
18529 1992 : break;
18530 : case OMP_CLAUSE_PRIVATE:
18531 : case OMP_CLAUSE_FIRSTPRIVATE:
18532 : /* Only needed on innermost. */
18533 : break;
18534 351 : case OMP_CLAUSE_LASTPRIVATE:
18535 351 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
18536 : {
18537 7 : *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18538 : OMP_CLAUSE_FIRSTPRIVATE);
18539 7 : OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
18540 7 : lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
18541 7 : pc = &OMP_CLAUSE_CHAIN (*pc);
18542 : }
18543 351 : *pc = copy_node (c);
18544 351 : OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
18545 351 : TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
18546 351 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
18547 : {
18548 22 : if (pass != last)
18549 7 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
18550 : else
18551 15 : lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
18552 22 : OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
18553 : }
18554 351 : pc = &OMP_CLAUSE_CHAIN (*pc);
18555 351 : break;
18556 463 : case OMP_CLAUSE_REDUCTION:
18557 463 : *pc = copy_node (c);
18558 463 : OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
18559 463 : TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
18560 463 : if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
18561 : {
18562 15 : auto_vec<tree> no_context_vars;
18563 15 : int walk_subtrees = 0;
18564 15 : note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
18565 : &walk_subtrees, &no_context_vars);
18566 15 : if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
18567 0 : note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
18568 15 : walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
18569 : note_no_context_vars,
18570 : &no_context_vars);
18571 15 : walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
18572 : note_no_context_vars,
18573 : &no_context_vars);
18574 :
18575 15 : OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
18576 15 : = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
18577 15 : if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
18578 0 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
18579 0 : = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
18580 :
18581 15 : hash_map<tree, tree> decl_map;
18582 15 : decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
18583 15 : decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
18584 15 : OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
18585 15 : if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
18586 0 : decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
18587 0 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
18588 :
18589 15 : copy_body_data id;
18590 15 : memset (&id, 0, sizeof (id));
18591 15 : id.src_fn = current_function_decl;
18592 15 : id.dst_fn = current_function_decl;
18593 15 : id.src_cfun = cfun;
18594 15 : id.decl_map = &decl_map;
18595 15 : id.copy_decl = copy_decl_no_change;
18596 15 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
18597 15 : id.transform_new_cfg = true;
18598 15 : id.transform_return_to_modify = false;
18599 15 : id.eh_lp_nr = 0;
18600 15 : walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
18601 : &id, NULL);
18602 15 : walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
18603 : &id, NULL);
18604 :
18605 60 : for (tree d : no_context_vars)
18606 : {
18607 15 : DECL_CONTEXT (d) = NULL_TREE;
18608 15 : DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
18609 : }
18610 15 : }
18611 : else
18612 : {
18613 448 : OMP_CLAUSE_REDUCTION_INIT (*pc)
18614 448 : = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
18615 448 : OMP_CLAUSE_REDUCTION_MERGE (*pc)
18616 896 : = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
18617 : }
18618 463 : pc = &OMP_CLAUSE_CHAIN (*pc);
18619 463 : break;
18620 0 : default:
18621 0 : gcc_unreachable ();
18622 : }
18623 854 : *pc = NULL_TREE;
18624 854 : *expr_p = t;
18625 : }
18626 1056 : return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
18627 : }
18628 :
18629 :
18630 : /* Helper function of optimize_target_teams, find OMP_TEAMS inside
18631 : of OMP_TARGET's body. */
18632 :
18633 : static tree
18634 92619 : find_omp_teams (tree *tp, int *walk_subtrees, void *)
18635 : {
18636 92619 : *walk_subtrees = 0;
18637 92619 : switch (TREE_CODE (*tp))
18638 : {
18639 : case OMP_TEAMS:
18640 : return *tp;
18641 25845 : case BIND_EXPR:
18642 25845 : case STATEMENT_LIST:
18643 25845 : *walk_subtrees = 1;
18644 25845 : break;
18645 : default:
18646 : break;
18647 : }
18648 : return NULL_TREE;
18649 : }
18650 :
18651 : /* Helper function of optimize_target_teams, determine if the expression
18652 : can be computed safely before the target construct on the host. */
18653 :
18654 : static tree
18655 1311 : computable_teams_clause (tree *tp, int *walk_subtrees, void *)
18656 : {
18657 1831 : splay_tree_node n;
18658 :
18659 1831 : if (TYPE_P (*tp))
18660 : {
18661 0 : *walk_subtrees = 0;
18662 0 : return NULL_TREE;
18663 : }
18664 1831 : switch (TREE_CODE (*tp))
18665 : {
18666 990 : case VAR_DECL:
18667 990 : case PARM_DECL:
18668 990 : case RESULT_DECL:
18669 990 : *walk_subtrees = 0;
18670 990 : if (error_operand_p (*tp)
18671 990 : || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
18672 990 : || DECL_HAS_VALUE_EXPR_P (*tp)
18673 990 : || DECL_THREAD_LOCAL_P (*tp)
18674 990 : || TREE_SIDE_EFFECTS (*tp)
18675 1980 : || TREE_THIS_VOLATILE (*tp))
18676 0 : return *tp;
18677 990 : if (is_global_var (*tp)
18678 990 : && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
18679 0 : || lookup_attribute ("omp declare target link",
18680 0 : DECL_ATTRIBUTES (*tp))))
18681 16 : return *tp;
18682 974 : if (VAR_P (*tp)
18683 668 : && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
18684 50 : && !is_global_var (*tp)
18685 1024 : && decl_function_context (*tp) == current_function_decl)
18686 50 : return *tp;
18687 1848 : n = splay_tree_lookup (gimplify_omp_ctxp->variables,
18688 924 : (splay_tree_key) *tp);
18689 924 : if (n == NULL)
18690 : {
18691 292 : if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
18692 : return NULL_TREE;
18693 24 : return *tp;
18694 : }
18695 632 : else if (n->value & GOVD_LOCAL)
18696 0 : return *tp;
18697 632 : else if (n->value & GOVD_FIRSTPRIVATE)
18698 : return NULL_TREE;
18699 112 : else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
18700 : == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
18701 : return NULL_TREE;
18702 96 : return *tp;
18703 76 : case INTEGER_CST:
18704 76 : if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
18705 : return *tp;
18706 : return NULL_TREE;
18707 520 : case TARGET_EXPR:
18708 520 : if (TARGET_EXPR_INITIAL (*tp)
18709 520 : || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
18710 : return *tp;
18711 520 : return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
18712 520 : walk_subtrees, NULL);
18713 : /* Allow some reasonable subset of integral arithmetics. */
18714 193 : case PLUS_EXPR:
18715 193 : case MINUS_EXPR:
18716 193 : case MULT_EXPR:
18717 193 : case TRUNC_DIV_EXPR:
18718 193 : case CEIL_DIV_EXPR:
18719 193 : case FLOOR_DIV_EXPR:
18720 193 : case ROUND_DIV_EXPR:
18721 193 : case TRUNC_MOD_EXPR:
18722 193 : case CEIL_MOD_EXPR:
18723 193 : case FLOOR_MOD_EXPR:
18724 193 : case ROUND_MOD_EXPR:
18725 193 : case RDIV_EXPR:
18726 193 : case EXACT_DIV_EXPR:
18727 193 : case MIN_EXPR:
18728 193 : case MAX_EXPR:
18729 193 : case LSHIFT_EXPR:
18730 193 : case RSHIFT_EXPR:
18731 193 : case BIT_IOR_EXPR:
18732 193 : case BIT_XOR_EXPR:
18733 193 : case BIT_AND_EXPR:
18734 193 : case NEGATE_EXPR:
18735 193 : case ABS_EXPR:
18736 193 : case BIT_NOT_EXPR:
18737 193 : case NON_LVALUE_EXPR:
18738 193 : CASE_CONVERT:
18739 193 : if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
18740 : return *tp;
18741 : return NULL_TREE;
18742 : /* And disallow anything else, except for comparisons. */
18743 52 : default:
18744 52 : if (COMPARISON_CLASS_P (*tp))
18745 : return NULL_TREE;
18746 : return *tp;
18747 : }
18748 : }
18749 :
18750 : /* Try to determine if the num_teams and/or thread_limit expressions
18751 : can have their values determined already before entering the
18752 : target construct.
18753 : INTEGER_CSTs trivially are,
18754 : integral decls that are firstprivate (explicitly or implicitly)
18755 : or explicitly map(always, to:) or map(always, tofrom:) on the target
18756 : region too, and expressions involving simple arithmetics on those
18757 : too, function calls are not ok, dereferencing something neither etc.
18758 : Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
18759 : EXPR based on what we find:
18760 : 0 stands for clause not specified at all, use implementation default
18761 : -1 stands for value that can't be determined easily before entering
18762 : the target construct.
18763 : -2 means that no explicit teams construct was specified
18764 : If teams construct is not present at all, use 1 for num_teams
18765 : and 0 for thread_limit (only one team is involved, and the thread
18766 : limit is implementation defined. */
18767 :
18768 : static void
18769 13126 : optimize_target_teams (tree target, gimple_seq *pre_p)
18770 : {
18771 13126 : tree body = OMP_BODY (target);
18772 13126 : tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
18773 13126 : tree num_teams_lower = NULL_TREE;
18774 13126 : tree num_teams_upper = integer_zero_node;
18775 13126 : tree thread_limit = integer_zero_node;
18776 13126 : location_t num_teams_loc = EXPR_LOCATION (target);
18777 13126 : location_t thread_limit_loc = EXPR_LOCATION (target);
18778 13126 : tree c, *p, expr;
18779 13126 : struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
18780 :
18781 13126 : if (teams == NULL_TREE)
18782 7005 : num_teams_upper = build_int_cst (integer_type_node, -2);
18783 : else
18784 10587 : for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
18785 : {
18786 4466 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
18787 : {
18788 617 : p = &num_teams_upper;
18789 617 : num_teams_loc = OMP_CLAUSE_LOCATION (c);
18790 617 : if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
18791 : {
18792 148 : expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
18793 148 : if (TREE_CODE (expr) == INTEGER_CST)
18794 21 : num_teams_lower = expr;
18795 127 : else if (walk_tree (&expr, computable_teams_clause,
18796 : NULL, NULL))
18797 19 : num_teams_lower = integer_minus_one_node;
18798 : else
18799 : {
18800 108 : num_teams_lower = expr;
18801 108 : gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
18802 108 : if (gimplify_expr (&num_teams_lower, pre_p, NULL,
18803 : is_gimple_val, fb_rvalue, false)
18804 : == GS_ERROR)
18805 : {
18806 0 : gimplify_omp_ctxp = target_ctx;
18807 0 : num_teams_lower = integer_minus_one_node;
18808 : }
18809 : else
18810 : {
18811 108 : gimplify_omp_ctxp = target_ctx;
18812 108 : if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
18813 28 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
18814 56 : = num_teams_lower;
18815 : }
18816 : }
18817 : }
18818 : }
18819 3849 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
18820 : {
18821 471 : p = &thread_limit;
18822 471 : thread_limit_loc = OMP_CLAUSE_LOCATION (c);
18823 : }
18824 : else
18825 3378 : continue;
18826 1088 : expr = OMP_CLAUSE_OPERAND (c, 0);
18827 1088 : if (TREE_CODE (expr) == INTEGER_CST)
18828 : {
18829 173 : *p = expr;
18830 173 : continue;
18831 : }
18832 915 : if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
18833 : {
18834 219 : *p = integer_minus_one_node;
18835 219 : continue;
18836 : }
18837 696 : *p = expr;
18838 696 : gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
18839 696 : if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
18840 : == GS_ERROR)
18841 : {
18842 0 : gimplify_omp_ctxp = target_ctx;
18843 0 : *p = integer_minus_one_node;
18844 0 : continue;
18845 : }
18846 696 : gimplify_omp_ctxp = target_ctx;
18847 696 : if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
18848 48 : OMP_CLAUSE_OPERAND (c, 0) = *p;
18849 : }
18850 13126 : if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
18851 : {
18852 12858 : c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
18853 12858 : OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
18854 12858 : OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
18855 12858 : OMP_TARGET_CLAUSES (target) = c;
18856 : }
18857 13126 : c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
18858 13126 : OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
18859 13126 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
18860 13126 : OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
18861 13126 : OMP_TARGET_CLAUSES (target) = c;
18862 13126 : }
18863 :
18864 : /* Gimplify the gross structure of several OMP constructs. */
18865 :
18866 : static void
18867 39443 : gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
18868 : {
18869 39443 : tree expr = *expr_p;
18870 39443 : gimple *stmt;
18871 39443 : gimple_seq body = NULL;
18872 39443 : enum omp_region_type ort;
18873 :
18874 39443 : switch (TREE_CODE (expr))
18875 : {
18876 : case OMP_SECTIONS:
18877 : case OMP_SINGLE:
18878 : ort = ORT_WORKSHARE;
18879 : break;
18880 214 : case OMP_SCOPE:
18881 214 : ort = ORT_TASKGROUP;
18882 214 : break;
18883 13126 : case OMP_TARGET:
18884 13126 : ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
18885 : break;
18886 2541 : case OACC_KERNELS:
18887 2541 : ort = ORT_ACC_KERNELS;
18888 2541 : break;
18889 7901 : case OACC_PARALLEL:
18890 7901 : ort = ORT_ACC_PARALLEL;
18891 7901 : break;
18892 1040 : case OACC_SERIAL:
18893 1040 : ort = ORT_ACC_SERIAL;
18894 1040 : break;
18895 1950 : case OACC_DATA:
18896 1950 : ort = ORT_ACC_DATA;
18897 1950 : break;
18898 1859 : case OMP_TARGET_DATA:
18899 1859 : ort = ORT_TARGET_DATA;
18900 1859 : break;
18901 8793 : case OMP_TEAMS:
18902 8793 : ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
18903 8793 : if (gimplify_omp_ctxp == NULL
18904 6161 : || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
18905 2632 : ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
18906 : break;
18907 118 : case OACC_HOST_DATA:
18908 118 : ort = ORT_ACC_HOST_DATA;
18909 118 : break;
18910 0 : default:
18911 0 : gcc_unreachable ();
18912 : }
18913 :
18914 39443 : gimple_seq iterator_loops_seq = NULL;
18915 39443 : if (TREE_CODE (expr) == OMP_TARGET)
18916 : {
18917 13126 : remove_unused_omp_iterator_vars (&OMP_CLAUSES (expr));
18918 13126 : build_omp_iterators_loops (&OMP_CLAUSES (expr), &iterator_loops_seq);
18919 : }
18920 :
18921 39443 : bool save_in_omp_construct = in_omp_construct;
18922 39443 : if ((ort & ORT_ACC) == 0)
18923 25893 : in_omp_construct = false;
18924 39443 : gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
18925 39443 : TREE_CODE (expr), &iterator_loops_seq);
18926 39443 : if (TREE_CODE (expr) == OMP_TARGET)
18927 13126 : optimize_target_teams (expr, pre_p);
18928 39443 : if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
18929 10908 : || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
18930 : {
18931 31167 : push_gimplify_context ();
18932 31167 : gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
18933 31167 : if (gimple_code (g) == GIMPLE_BIND)
18934 31167 : pop_gimplify_context (g);
18935 : else
18936 0 : pop_gimplify_context (NULL);
18937 31167 : if ((ort & ORT_TARGET_DATA) != 0)
18938 : {
18939 3927 : enum built_in_function end_ix;
18940 3927 : switch (TREE_CODE (expr))
18941 : {
18942 : case OACC_DATA:
18943 : case OACC_HOST_DATA:
18944 : end_ix = BUILT_IN_GOACC_DATA_END;
18945 : break;
18946 1859 : case OMP_TARGET_DATA:
18947 1859 : end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
18948 1859 : break;
18949 0 : default:
18950 0 : gcc_unreachable ();
18951 : }
18952 3927 : tree fn = builtin_decl_explicit (end_ix);
18953 3927 : g = gimple_build_call (fn, 0);
18954 3927 : gimple_seq cleanup = NULL;
18955 3927 : gimple_seq_add_stmt (&cleanup, g);
18956 3927 : g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
18957 3927 : body = NULL;
18958 3927 : gimple_seq_add_stmt (&body, g);
18959 : }
18960 : }
18961 : else
18962 8276 : gimplify_and_add (OMP_BODY (expr), &body);
18963 39443 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
18964 39443 : TREE_CODE (expr), &iterator_loops_seq);
18965 39443 : in_omp_construct = save_in_omp_construct;
18966 :
18967 39443 : switch (TREE_CODE (expr))
18968 : {
18969 1950 : case OACC_DATA:
18970 3900 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
18971 1950 : OMP_CLAUSES (expr));
18972 1950 : break;
18973 118 : case OACC_HOST_DATA:
18974 118 : if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
18975 : {
18976 128 : for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
18977 95 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
18978 57 : OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
18979 : }
18980 :
18981 236 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
18982 118 : OMP_CLAUSES (expr));
18983 118 : break;
18984 2541 : case OACC_KERNELS:
18985 5082 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
18986 2541 : OMP_CLAUSES (expr));
18987 2541 : break;
18988 7901 : case OACC_PARALLEL:
18989 15802 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
18990 7901 : OMP_CLAUSES (expr));
18991 7901 : break;
18992 1040 : case OACC_SERIAL:
18993 2080 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
18994 1040 : OMP_CLAUSES (expr));
18995 1040 : break;
18996 626 : case OMP_SECTIONS:
18997 626 : stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
18998 626 : break;
18999 1275 : case OMP_SINGLE:
19000 1275 : stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
19001 1275 : break;
19002 214 : case OMP_SCOPE:
19003 214 : stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
19004 214 : break;
19005 13126 : case OMP_TARGET:
19006 26252 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
19007 13126 : OMP_CLAUSES (expr), iterator_loops_seq);
19008 13126 : break;
19009 1859 : case OMP_TARGET_DATA:
19010 : /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
19011 : to be evaluated before the use_device_{ptr,addr} clauses if they
19012 : refer to the same variables. */
19013 1859 : {
19014 1859 : tree use_device_clauses;
19015 1859 : tree *pc, *uc = &use_device_clauses;
19016 9904 : for (pc = &OMP_CLAUSES (expr); *pc; )
19017 8045 : if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
19018 8045 : || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
19019 : {
19020 1959 : *uc = *pc;
19021 1959 : *pc = OMP_CLAUSE_CHAIN (*pc);
19022 1959 : uc = &OMP_CLAUSE_CHAIN (*uc);
19023 : }
19024 : else
19025 6086 : pc = &OMP_CLAUSE_CHAIN (*pc);
19026 1859 : *uc = NULL_TREE;
19027 1859 : *pc = use_device_clauses;
19028 1859 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
19029 1859 : OMP_CLAUSES (expr));
19030 : }
19031 1859 : break;
19032 8793 : case OMP_TEAMS:
19033 8793 : stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
19034 8793 : if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
19035 2632 : gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
19036 : break;
19037 0 : default:
19038 0 : gcc_unreachable ();
19039 : }
19040 :
19041 39443 : gimplify_seq_add_stmt (pre_p, stmt);
19042 39443 : *expr_p = NULL_TREE;
19043 39443 : }
19044 :
19045 : /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
19046 : target update constructs. */
19047 :
19048 : static void
19049 12037 : gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
19050 : {
19051 12037 : tree expr = *expr_p;
19052 12037 : int kind;
19053 12037 : gomp_target *stmt;
19054 12037 : enum omp_region_type ort = ORT_WORKSHARE;
19055 :
19056 12037 : switch (TREE_CODE (expr))
19057 : {
19058 : case OACC_ENTER_DATA:
19059 : kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
19060 : ort = ORT_ACC;
19061 : break;
19062 : case OACC_EXIT_DATA:
19063 : kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
19064 : ort = ORT_ACC;
19065 : break;
19066 : case OACC_UPDATE:
19067 : kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
19068 : ort = ORT_ACC;
19069 : break;
19070 : case OMP_TARGET_UPDATE:
19071 : kind = GF_OMP_TARGET_KIND_UPDATE;
19072 : break;
19073 : case OMP_TARGET_ENTER_DATA:
19074 : kind = GF_OMP_TARGET_KIND_ENTER_DATA;
19075 : break;
19076 : case OMP_TARGET_EXIT_DATA:
19077 : kind = GF_OMP_TARGET_KIND_EXIT_DATA;
19078 : break;
19079 0 : default:
19080 0 : gcc_unreachable ();
19081 : }
19082 :
19083 12037 : gimple_seq iterator_loops_seq = NULL;
19084 12037 : remove_unused_omp_iterator_vars (&OMP_STANDALONE_CLAUSES (expr));
19085 12037 : build_omp_iterators_loops (&OMP_STANDALONE_CLAUSES (expr),
19086 : &iterator_loops_seq);
19087 :
19088 12037 : gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
19089 12037 : ort, TREE_CODE (expr), &iterator_loops_seq);
19090 12037 : gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
19091 12037 : TREE_CODE (expr), &iterator_loops_seq);
19092 12037 : if (TREE_CODE (expr) == OACC_UPDATE
19093 12037 : && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
19094 : OMP_CLAUSE_IF_PRESENT))
19095 : {
19096 : /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
19097 : clause. */
19098 123 : for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19099 97 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
19100 47 : switch (OMP_CLAUSE_MAP_KIND (c))
19101 : {
19102 14 : case GOMP_MAP_FORCE_TO:
19103 14 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
19104 14 : break;
19105 24 : case GOMP_MAP_FORCE_FROM:
19106 24 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
19107 24 : break;
19108 : default:
19109 : break;
19110 : }
19111 : }
19112 12011 : else if (TREE_CODE (expr) == OACC_EXIT_DATA
19113 12011 : && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
19114 : OMP_CLAUSE_FINALIZE))
19115 : {
19116 : /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
19117 : semantics. */
19118 75 : bool have_clause = false;
19119 275 : for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19120 200 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
19121 117 : switch (OMP_CLAUSE_MAP_KIND (c))
19122 : {
19123 45 : case GOMP_MAP_FROM:
19124 45 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
19125 45 : have_clause = true;
19126 45 : break;
19127 47 : case GOMP_MAP_RELEASE:
19128 47 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
19129 47 : have_clause = true;
19130 47 : break;
19131 : case GOMP_MAP_TO_PSET:
19132 : /* Fortran arrays with descriptors must map that descriptor when
19133 : doing standalone "attach" operations (in OpenACC). In that
19134 : case GOMP_MAP_TO_PSET appears by itself with no preceding
19135 : clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
19136 : break;
19137 2 : case GOMP_MAP_POINTER:
19138 : /* TODO PR92929: we may see these here, but they'll always follow
19139 : one of the clauses above, and will be handled by libgomp as
19140 : one group, so no handling required here. */
19141 2 : gcc_assert (have_clause);
19142 : break;
19143 22 : case GOMP_MAP_DETACH:
19144 22 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
19145 22 : have_clause = false;
19146 22 : break;
19147 : case GOMP_MAP_STRUCT:
19148 : case GOMP_MAP_STRUCT_UNORD:
19149 22 : have_clause = false;
19150 : break;
19151 0 : default:
19152 0 : gcc_unreachable ();
19153 : }
19154 : }
19155 12037 : stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr),
19156 : iterator_loops_seq);
19157 :
19158 12037 : gimplify_seq_add_stmt (pre_p, stmt);
19159 12037 : *expr_p = NULL_TREE;
19160 12037 : }
19161 :
19162 : /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
19163 : stabilized the lhs of the atomic operation as *ADDR. Return true if
19164 : EXPR is this stabilized form. */
19165 :
19166 : static bool
19167 36791 : goa_lhs_expr_p (tree expr, tree addr)
19168 : {
19169 : /* Also include casts to other type variants. The C front end is fond
19170 : of adding these for e.g. volatile variables. This is like
19171 : STRIP_TYPE_NOPS but includes the main variant lookup. */
19172 36791 : STRIP_USELESS_TYPE_CONVERSION (expr);
19173 :
19174 36791 : if (INDIRECT_REF_P (expr))
19175 : {
19176 5011 : expr = TREE_OPERAND (expr, 0);
19177 5011 : while (expr != addr
19178 92 : && (CONVERT_EXPR_P (expr)
19179 92 : || TREE_CODE (expr) == NON_LVALUE_EXPR)
19180 0 : && TREE_CODE (expr) == TREE_CODE (addr)
19181 5011 : && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
19182 : {
19183 0 : expr = TREE_OPERAND (expr, 0);
19184 0 : addr = TREE_OPERAND (addr, 0);
19185 : }
19186 5011 : if (expr == addr)
19187 : return true;
19188 92 : return (TREE_CODE (addr) == ADDR_EXPR
19189 62 : && TREE_CODE (expr) == ADDR_EXPR
19190 92 : && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
19191 : }
19192 31780 : if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
19193 : return true;
19194 : return false;
19195 : }
19196 :
19197 : /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
19198 : expression does not involve the lhs, evaluate it into a temporary.
19199 : Return 1 if the lhs appeared as a subexpression, 0 if it did not,
19200 : or -1 if an error was encountered. */
19201 :
19202 : static int
19203 36791 : goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
19204 : tree lhs_var, tree &target_expr, bool rhs, int depth)
19205 : {
19206 36791 : tree expr = *expr_p;
19207 36791 : int saw_lhs = 0;
19208 :
19209 36791 : if (goa_lhs_expr_p (expr, lhs_addr))
19210 : {
19211 9183 : if (pre_p)
19212 8525 : *expr_p = lhs_var;
19213 9183 : return 1;
19214 : }
19215 27608 : if (is_gimple_val (expr))
19216 : return 0;
19217 :
19218 : /* Maximum depth of lhs in expression is for the
19219 : __builtin_clear_padding (...), __builtin_clear_padding (...),
19220 : __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
19221 17276 : if (++depth > 7)
19222 16 : goto finish;
19223 :
19224 17260 : switch (TREE_CODE_CLASS (TREE_CODE (expr)))
19225 : {
19226 9652 : case tcc_binary:
19227 9652 : case tcc_comparison:
19228 9652 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
19229 : lhs_var, target_expr, true, depth);
19230 : /* FALLTHRU */
19231 11699 : case tcc_unary:
19232 11699 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
19233 : lhs_var, target_expr, true, depth);
19234 11699 : break;
19235 2982 : case tcc_expression:
19236 2982 : switch (TREE_CODE (expr))
19237 : {
19238 836 : case TRUTH_ANDIF_EXPR:
19239 836 : case TRUTH_ORIF_EXPR:
19240 836 : case TRUTH_AND_EXPR:
19241 836 : case TRUTH_OR_EXPR:
19242 836 : case TRUTH_XOR_EXPR:
19243 836 : case BIT_INSERT_EXPR:
19244 836 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19245 : lhs_addr, lhs_var, target_expr, true,
19246 : depth);
19247 : /* FALLTHRU */
19248 888 : case TRUTH_NOT_EXPR:
19249 888 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19250 : lhs_addr, lhs_var, target_expr, true,
19251 : depth);
19252 888 : break;
19253 550 : case MODIFY_EXPR:
19254 550 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
19255 : target_expr, true, depth))
19256 : break;
19257 544 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19258 : lhs_addr, lhs_var, target_expr, true,
19259 : depth);
19260 544 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19261 : lhs_addr, lhs_var, target_expr, false,
19262 : depth);
19263 544 : break;
19264 : /* FALLTHRU */
19265 164 : case ADDR_EXPR:
19266 164 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
19267 : target_expr, true, depth))
19268 : break;
19269 138 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19270 : lhs_addr, lhs_var, target_expr, false,
19271 : depth);
19272 138 : break;
19273 : case COMPOUND_EXPR:
19274 : /* Break out any preevaluations from cp_build_modify_expr. */
19275 76 : for (; TREE_CODE (expr) == COMPOUND_EXPR;
19276 38 : expr = TREE_OPERAND (expr, 1))
19277 : {
19278 : /* Special-case __builtin_clear_padding call before
19279 : __builtin_memcmp. */
19280 38 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
19281 : {
19282 2 : tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
19283 2 : if (fndecl
19284 2 : && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
19285 0 : && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
19286 2 : && (!pre_p
19287 0 : || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
19288 : lhs_addr, lhs_var,
19289 : target_expr, true, depth)))
19290 : {
19291 0 : if (pre_p)
19292 0 : *expr_p = expr;
19293 0 : saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
19294 : pre_p, lhs_addr, lhs_var,
19295 : target_expr, true, depth);
19296 0 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
19297 : pre_p, lhs_addr, lhs_var,
19298 : target_expr, rhs, depth);
19299 0 : return saw_lhs;
19300 : }
19301 : }
19302 :
19303 38 : if (pre_p)
19304 35 : gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
19305 : }
19306 38 : if (!pre_p)
19307 3 : return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
19308 3 : target_expr, rhs, depth);
19309 35 : *expr_p = expr;
19310 35 : return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
19311 35 : target_expr, rhs, depth);
19312 441 : case COND_EXPR:
19313 441 : if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
19314 : lhs_var, target_expr, true, depth))
19315 : break;
19316 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19317 : lhs_addr, lhs_var, target_expr, true,
19318 : depth);
19319 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19320 : lhs_addr, lhs_var, target_expr, true,
19321 : depth);
19322 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
19323 : lhs_addr, lhs_var, target_expr, true,
19324 : depth);
19325 428 : break;
19326 880 : case TARGET_EXPR:
19327 880 : if (TARGET_EXPR_INITIAL (expr))
19328 : {
19329 880 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
19330 : lhs_var, target_expr, true,
19331 : depth))
19332 : break;
19333 702 : if (expr == target_expr)
19334 : saw_lhs = 1;
19335 : else
19336 : {
19337 702 : saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
19338 : pre_p, lhs_addr, lhs_var,
19339 : target_expr, true, depth);
19340 702 : if (saw_lhs && target_expr == NULL_TREE && pre_p)
19341 26 : target_expr = expr;
19342 : }
19343 : }
19344 : break;
19345 : default:
19346 : break;
19347 : }
19348 : break;
19349 556 : case tcc_reference:
19350 556 : if (TREE_CODE (expr) == BIT_FIELD_REF
19351 490 : || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
19352 362 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19353 : lhs_addr, lhs_var, target_expr, true,
19354 : depth);
19355 : break;
19356 58 : case tcc_vl_exp:
19357 58 : if (TREE_CODE (expr) == CALL_EXPR)
19358 : {
19359 58 : if (tree fndecl = get_callee_fndecl (expr))
19360 58 : if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
19361 : BUILT_IN_MEMCMP))
19362 : {
19363 56 : int nargs = call_expr_nargs (expr);
19364 224 : for (int i = 0; i < nargs; i++)
19365 168 : saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
19366 : pre_p, lhs_addr, lhs_var,
19367 : target_expr, true, depth);
19368 : }
19369 : }
19370 : break;
19371 : default:
19372 : break;
19373 : }
19374 :
19375 17238 : finish:
19376 17238 : if (saw_lhs == 0 && pre_p)
19377 : {
19378 3485 : enum gimplify_status gs;
19379 3485 : if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
19380 : {
19381 0 : gimplify_stmt (&expr, pre_p);
19382 0 : return saw_lhs;
19383 : }
19384 3485 : else if (rhs)
19385 3307 : gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
19386 : else
19387 178 : gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
19388 3485 : if (gs != GS_ALL_DONE)
19389 36791 : saw_lhs = -1;
19390 : }
19391 :
19392 : return saw_lhs;
19393 : }
19394 :
19395 : /* Gimplify an OMP_ATOMIC statement. */
19396 :
19397 : static enum gimplify_status
19398 10236 : gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
19399 : {
19400 10236 : tree addr = TREE_OPERAND (*expr_p, 0);
19401 10236 : tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
19402 10236 : ? NULL : TREE_OPERAND (*expr_p, 1);
19403 10236 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
19404 10236 : tree tmp_load;
19405 10236 : gomp_atomic_load *loadstmt;
19406 10236 : gomp_atomic_store *storestmt;
19407 10236 : tree target_expr = NULL_TREE;
19408 :
19409 10236 : tmp_load = create_tmp_reg (type);
19410 10236 : if (rhs
19411 10236 : && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
19412 : true, 0) < 0)
19413 : return GS_ERROR;
19414 :
19415 10236 : if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
19416 : != GS_ALL_DONE)
19417 : return GS_ERROR;
19418 :
19419 10236 : loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
19420 10236 : OMP_ATOMIC_MEMORY_ORDER (*expr_p));
19421 10236 : gimplify_seq_add_stmt (pre_p, loadstmt);
19422 10236 : if (rhs)
19423 : {
19424 : /* BIT_INSERT_EXPR is not valid for non-integral bitfield
19425 : representatives. Use BIT_FIELD_REF on the lhs instead. */
19426 9055 : tree rhsarg = rhs;
19427 9055 : if (TREE_CODE (rhs) == COND_EXPR)
19428 428 : rhsarg = TREE_OPERAND (rhs, 1);
19429 9055 : if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
19430 9055 : && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
19431 : {
19432 32 : tree bitpos = TREE_OPERAND (rhsarg, 2);
19433 32 : tree op1 = TREE_OPERAND (rhsarg, 1);
19434 32 : tree bitsize;
19435 32 : tree tmp_store = tmp_load;
19436 32 : if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
19437 12 : tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
19438 32 : if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
19439 32 : bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
19440 : else
19441 0 : bitsize = TYPE_SIZE (TREE_TYPE (op1));
19442 32 : gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
19443 32 : tree t = build2_loc (EXPR_LOCATION (rhsarg),
19444 : MODIFY_EXPR, void_type_node,
19445 32 : build3_loc (EXPR_LOCATION (rhsarg),
19446 32 : BIT_FIELD_REF, TREE_TYPE (op1),
19447 : tmp_store, bitsize, bitpos), op1);
19448 32 : if (TREE_CODE (rhs) == COND_EXPR)
19449 16 : t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
19450 16 : TREE_OPERAND (rhs, 0), t, void_node);
19451 32 : gimplify_and_add (t, pre_p);
19452 32 : rhs = tmp_store;
19453 : }
19454 9055 : bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
19455 9055 : if (TREE_CODE (rhs) == COND_EXPR)
19456 412 : gimplify_ctxp->allow_rhs_cond_expr = true;
19457 9055 : enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
19458 : is_gimple_val, fb_rvalue);
19459 9055 : gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
19460 9055 : if (gs != GS_ALL_DONE)
19461 : return GS_ERROR;
19462 : }
19463 :
19464 10236 : if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
19465 1181 : rhs = tmp_load;
19466 10236 : storestmt
19467 10236 : = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
19468 10236 : if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
19469 : {
19470 37 : gimple_omp_atomic_set_weak (loadstmt);
19471 37 : gimple_omp_atomic_set_weak (storestmt);
19472 : }
19473 10236 : gimplify_seq_add_stmt (pre_p, storestmt);
19474 10236 : switch (TREE_CODE (*expr_p))
19475 : {
19476 2065 : case OMP_ATOMIC_READ:
19477 2065 : case OMP_ATOMIC_CAPTURE_OLD:
19478 2065 : *expr_p = tmp_load;
19479 2065 : gimple_omp_atomic_set_need_value (loadstmt);
19480 2065 : break;
19481 868 : case OMP_ATOMIC_CAPTURE_NEW:
19482 868 : *expr_p = rhs;
19483 868 : gimple_omp_atomic_set_need_value (storestmt);
19484 868 : break;
19485 7303 : default:
19486 7303 : *expr_p = NULL;
19487 7303 : break;
19488 : }
19489 :
19490 : return GS_ALL_DONE;
19491 : }
19492 :
19493 : /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
19494 : body, and adding some EH bits. */
19495 :
19496 : static enum gimplify_status
19497 478 : gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
19498 : {
19499 478 : tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
19500 478 : gimple *body_stmt;
19501 478 : gtransaction *trans_stmt;
19502 478 : gimple_seq body = NULL;
19503 478 : int subcode = 0;
19504 :
19505 : /* Wrap the transaction body in a BIND_EXPR so we have a context
19506 : where to put decls for OMP. */
19507 478 : if (TREE_CODE (tbody) != BIND_EXPR)
19508 : {
19509 439 : tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
19510 439 : TREE_SIDE_EFFECTS (bind) = 1;
19511 439 : SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
19512 439 : TRANSACTION_EXPR_BODY (expr) = bind;
19513 : }
19514 :
19515 478 : push_gimplify_context ();
19516 478 : temp = voidify_wrapper_expr (*expr_p, NULL);
19517 :
19518 478 : body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
19519 478 : pop_gimplify_context (body_stmt);
19520 :
19521 478 : trans_stmt = gimple_build_transaction (body);
19522 478 : if (TRANSACTION_EXPR_OUTER (expr))
19523 : subcode = GTMA_IS_OUTER;
19524 447 : else if (TRANSACTION_EXPR_RELAXED (expr))
19525 81 : subcode = GTMA_IS_RELAXED;
19526 478 : gimple_transaction_set_subcode (trans_stmt, subcode);
19527 :
19528 478 : gimplify_seq_add_stmt (pre_p, trans_stmt);
19529 :
19530 478 : if (temp)
19531 : {
19532 76 : *expr_p = temp;
19533 76 : return GS_OK;
19534 : }
19535 :
19536 402 : *expr_p = NULL_TREE;
19537 402 : return GS_ALL_DONE;
19538 : }
19539 :
19540 : /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
19541 : is the OMP_BODY of the original EXPR (which has already been
19542 : gimplified so it's not present in the EXPR).
19543 :
19544 : Return the gimplified GIMPLE_OMP_ORDERED tuple. */
19545 :
19546 : static gimple *
19547 1950 : gimplify_omp_ordered (tree expr, gimple_seq body)
19548 : {
19549 1950 : tree c, decls;
19550 1950 : int failures = 0;
19551 1950 : unsigned int i;
19552 1950 : tree source_c = NULL_TREE;
19553 1950 : tree sink_c = NULL_TREE;
19554 :
19555 1950 : if (gimplify_omp_ctxp)
19556 : {
19557 3291 : for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19558 1515 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19559 1515 : && gimplify_omp_ctxp->loop_iter_var.is_empty ())
19560 : {
19561 74 : error_at (OMP_CLAUSE_LOCATION (c),
19562 : "%<ordered%> construct with %qs clause must be "
19563 : "closely nested inside a loop with %<ordered%> clause",
19564 74 : OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
19565 74 : failures++;
19566 : }
19567 1441 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19568 1441 : && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
19569 : {
19570 642 : bool fail = false;
19571 642 : sink_c = c;
19572 642 : if (OMP_CLAUSE_DECL (c) == NULL_TREE)
19573 72 : continue; /* omp_cur_iteration - 1 */
19574 570 : for (decls = OMP_CLAUSE_DECL (c), i = 0;
19575 3201 : decls && TREE_CODE (decls) == TREE_LIST;
19576 2631 : decls = TREE_CHAIN (decls), ++i)
19577 2631 : if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
19578 4 : continue;
19579 5254 : else if (TREE_VALUE (decls)
19580 2627 : != gimplify_omp_ctxp->loop_iter_var[2 * i])
19581 : {
19582 8 : error_at (OMP_CLAUSE_LOCATION (c),
19583 : "variable %qE is not an iteration "
19584 : "of outermost loop %d, expected %qE",
19585 8 : TREE_VALUE (decls), i + 1,
19586 8 : gimplify_omp_ctxp->loop_iter_var[2 * i]);
19587 8 : fail = true;
19588 8 : failures++;
19589 : }
19590 : else
19591 5238 : TREE_VALUE (decls)
19592 2619 : = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
19593 1136 : if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
19594 : {
19595 16 : error_at (OMP_CLAUSE_LOCATION (c),
19596 : "number of variables in %qs clause with "
19597 : "%<sink%> modifier does not match number of "
19598 : "iteration variables",
19599 16 : OMP_CLAUSE_DOACROSS_DEPEND (c)
19600 : ? "depend" : "doacross");
19601 16 : failures++;
19602 : }
19603 : }
19604 799 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19605 799 : && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
19606 : {
19607 475 : if (source_c)
19608 : {
19609 4 : error_at (OMP_CLAUSE_LOCATION (c),
19610 : "more than one %qs clause with %<source%> "
19611 : "modifier on an %<ordered%> construct",
19612 4 : OMP_CLAUSE_DOACROSS_DEPEND (source_c)
19613 : ? "depend" : "doacross");
19614 4 : failures++;
19615 : }
19616 : else
19617 : source_c = c;
19618 : }
19619 : }
19620 1950 : if (source_c && sink_c)
19621 : {
19622 4 : error_at (OMP_CLAUSE_LOCATION (source_c),
19623 : "%qs clause with %<source%> modifier specified "
19624 : "together with %qs clauses with %<sink%> modifier "
19625 : "on the same construct",
19626 4 : OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
19627 4 : OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
19628 4 : failures++;
19629 : }
19630 :
19631 1950 : if (failures)
19632 102 : return gimple_build_nop ();
19633 1848 : return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
19634 : }
19635 :
19636 : /* Gimplify an OMP_INTEROP statement. */
19637 :
19638 : static enum gimplify_status
19639 612 : gimplify_omp_interop (tree *expr_p, gimple_seq *pre_p)
19640 : {
19641 612 : tree expr = *expr_p;
19642 :
19643 612 : gimplify_scan_omp_clauses (&OMP_INTEROP_CLAUSES (expr), pre_p, ORT_TASK,
19644 : OMP_INTEROP);
19645 612 : gimple *stmt = gimple_build_omp_interop (OMP_INTEROP_CLAUSES (expr));
19646 612 : gimplify_seq_add_stmt (pre_p, stmt);
19647 612 : *expr_p = NULL_TREE;
19648 612 : return GS_ALL_DONE;
19649 : }
19650 :
19651 : /* Callback for walk_tree to find an IFN_GOMP_DISPATCH. */
19652 :
19653 : static tree
19654 3100 : find_ifn_gomp_dispatch (tree *tp, int *, void *modify)
19655 : {
19656 3100 : tree t = *tp;
19657 :
19658 3100 : if (TREE_CODE (t) == CALL_EXPR && CALL_EXPR_IFN (t) == IFN_GOMP_DISPATCH)
19659 1414 : return *(tree *) modify ? *(tree *) modify : *tp;
19660 :
19661 2248 : if (TREE_CODE (t) == MODIFY_EXPR)
19662 513 : *(tree *) modify = *tp;
19663 :
19664 : return NULL_TREE;
19665 : }
19666 :
19667 : /* Gimplify an OMP_DISPATCH construct. */
19668 :
19669 : static enum gimplify_status
19670 852 : gimplify_omp_dispatch (tree *expr_p, gimple_seq *pre_p)
19671 : {
19672 852 : tree expr = *expr_p;
19673 852 : gimple_seq body = NULL;
19674 :
19675 852 : gimplify_scan_omp_clauses (&OMP_DISPATCH_CLAUSES (expr), pre_p, ORT_DISPATCH,
19676 : OMP_DISPATCH);
19677 852 : push_gimplify_context ();
19678 :
19679 : // If device clause, adjust ICV
19680 852 : tree device
19681 852 : = omp_find_clause (OMP_DISPATCH_CLAUSES (expr), OMP_CLAUSE_DEVICE);
19682 : // If no device clause exists but an interop clause with a single list
19683 : // item, use it to obtain the device number.
19684 852 : if (device)
19685 272 : device = OMP_CLAUSE_DEVICE_ID (device);
19686 : else
19687 : {
19688 580 : tree first_interop_obj
19689 580 : = omp_find_clause (OMP_DISPATCH_CLAUSES (expr), OMP_CLAUSE_INTEROP);
19690 580 : if (first_interop_obj)
19691 96 : for (tree c = TREE_CHAIN (first_interop_obj); c; c = TREE_CHAIN (c))
19692 8 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INTEROP)
19693 : {
19694 : first_interop_obj = NULL_TREE;
19695 : break;
19696 : }
19697 95 : if (first_interop_obj)
19698 : {
19699 88 : device = create_tmp_var (integer_type_node);
19700 88 : tree c = build_omp_clause (OMP_CLAUSE_LOCATION (first_interop_obj),
19701 : OMP_CLAUSE_DEVICE);
19702 88 : OMP_CLAUSE_DEVICE_ID (c) = device;
19703 88 : TREE_CHAIN (c) = TREE_CHAIN (first_interop_obj);
19704 88 : TREE_CHAIN (first_interop_obj) = c;
19705 88 : first_interop_obj = OMP_CLAUSE_DECL (first_interop_obj);
19706 : /* device = omp_get_interop_int (obj, omp_ipr_device_num, NULL); */
19707 88 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_INTEROP_INT);
19708 88 : fn = build_call_expr (fn, 3, first_interop_obj,
19709 : build_int_cst (integer_type_node, -5),
19710 : null_pointer_node);
19711 88 : gimplify_assign (device, fold_convert (integer_type_node, fn), &body);
19712 : }
19713 : }
19714 852 : tree saved_device_icv = NULL_TREE;
19715 852 : if (device
19716 852 : && (TREE_CODE (device) != INTEGER_CST
19717 709 : || !wi::eq_p (wi::to_wide (device), -1 /* omp_initial_device */)))
19718 : {
19719 : // Save current default-device-var ICV
19720 360 : saved_device_icv = create_tmp_var (integer_type_node);
19721 360 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
19722 360 : gcall *call = gimple_build_call (fn, 0);
19723 360 : gimple_call_set_lhs (call, saved_device_icv);
19724 360 : gimplify_seq_add_stmt (&body, call);
19725 :
19726 : // Set default device
19727 360 : fn = builtin_decl_explicit (BUILT_IN_OMP_SET_DEFAULT_DEVICE);
19728 360 : call = gimple_build_call (fn, 1, device);
19729 360 : gimplify_seq_add_stmt (&body, call);
19730 : }
19731 :
19732 : // If the novariants and nocontext clauses are not compile-time constants,
19733 : // we need to generate code for all possible cases:
19734 : // if (novariants) // implies nocontext
19735 : // base()
19736 : // else if (nocontext)
19737 : // variant1()
19738 : // else
19739 : // variant2()
19740 852 : tree *dispatch_body_p = &OMP_DISPATCH_BODY (expr);
19741 852 : if (TREE_CODE (*dispatch_body_p) == BIND_EXPR)
19742 28 : dispatch_body_p = &BIND_EXPR_BODY (*dispatch_body_p);
19743 852 : tree dispatch_body = *dispatch_body_p;
19744 :
19745 : // Look for IFN_GOMP_DISPATCH and extract the base function call
19746 852 : tree base_call_expr = NULL_TREE;
19747 852 : if (TREE_CODE (dispatch_body) == STATEMENT_LIST)
19748 243 : for (tree_stmt_iterator tsi = tsi_start (dispatch_body); !tsi_end_p (tsi);
19749 187 : tsi_next (&tsi))
19750 : {
19751 243 : tree modify = NULL_TREE;
19752 243 : tree stmt = tsi_stmt (tsi);
19753 243 : base_call_expr
19754 243 : = walk_tree (&stmt, find_ifn_gomp_dispatch, &modify, NULL);
19755 243 : if (base_call_expr != NULL_TREE)
19756 : break;
19757 : }
19758 : else
19759 : {
19760 796 : tree modify = NULL_TREE;
19761 796 : base_call_expr
19762 796 : = walk_tree (dispatch_body_p, find_ifn_gomp_dispatch, &modify, NULL);
19763 : }
19764 852 : gcc_assert (base_call_expr != NULL_TREE);
19765 :
19766 852 : tree dst = NULL_TREE;
19767 852 : if (TREE_CODE (base_call_expr) == MODIFY_EXPR)
19768 : {
19769 290 : dst = TREE_OPERAND (base_call_expr, 0);
19770 290 : base_call_expr = TREE_OPERAND (base_call_expr, 1);
19771 : }
19772 :
19773 880 : while (TREE_CODE (base_call_expr) == FLOAT_EXPR
19774 : || TREE_CODE (base_call_expr) == CONVERT_EXPR
19775 : || TREE_CODE (base_call_expr) == COMPLEX_EXPR
19776 : || TREE_CODE (base_call_expr) == INDIRECT_REF
19777 880 : || TREE_CODE (base_call_expr) == NOP_EXPR)
19778 28 : base_call_expr = TREE_OPERAND (base_call_expr, 0);
19779 :
19780 852 : gcc_assert (CALL_EXPR_IFN (base_call_expr) == IFN_GOMP_DISPATCH);
19781 852 : base_call_expr = CALL_EXPR_ARG (base_call_expr, 0);
19782 :
19783 852 : tree base_fndecl = get_callee_fndecl (base_call_expr);
19784 852 : if (base_fndecl != NULL_TREE)
19785 : {
19786 844 : if (DECL_VIRTUAL_P (base_fndecl))
19787 : {
19788 6 : error_at (
19789 3 : EXPR_LOCATION (base_call_expr),
19790 : "%qD is a virtual function but only a direct call is allowed "
19791 : "in a dispatch construct",
19792 3 : DECL_NAME (base_fndecl));
19793 : }
19794 :
19795 : /* We are not actually going to expand the variant call or use
19796 : the result of omp_get_dynamic candidates here; only check that
19797 : it does not trivially resolve to a call to the base function
19798 : so that we can avoid some extra work in building code that's
19799 : not needed in that case. */
19800 844 : tree construct_context = omp_get_construct_context ();
19801 844 : vec<struct omp_variant> all_candidates
19802 844 : = omp_declare_variant_candidates (base_fndecl, construct_context);
19803 844 : gcc_assert (!all_candidates.is_empty ());
19804 844 : vec<struct omp_variant> candidates
19805 844 : = omp_get_dynamic_candidates (all_candidates, construct_context);
19806 844 : tree variant_fndecl
19807 1676 : = (candidates.length () == 1 ? candidates[0].alternative : NULL_TREE);
19808 :
19809 844 : if (base_fndecl != variant_fndecl
19810 844 : && (omp_has_novariants () == -1 || omp_has_nocontext () == -1))
19811 : {
19812 22 : tree novariants_clause = NULL_TREE, nocontext_clause = NULL_TREE,
19813 22 : novariants_cond = NULL_TREE, nocontext_cond = NULL_TREE;
19814 59 : for (tree c = OMP_DISPATCH_CLAUSES (expr); c; c = TREE_CHAIN (c))
19815 : {
19816 37 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOVARIANTS
19817 37 : && !integer_zerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
19818 : {
19819 16 : gcc_assert (novariants_cond == NULL_TREE);
19820 16 : novariants_clause = c;
19821 16 : novariants_cond = OMP_CLAUSE_NOVARIANTS_EXPR (c);
19822 : }
19823 21 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOCONTEXT
19824 21 : && !integer_zerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
19825 : {
19826 16 : gcc_assert (nocontext_cond == NULL_TREE);
19827 16 : nocontext_clause = c;
19828 16 : nocontext_cond = OMP_CLAUSE_NOCONTEXT_EXPR (c);
19829 : }
19830 : }
19831 22 : gcc_assert (novariants_cond != NULL_TREE
19832 : || nocontext_cond != NULL_TREE);
19833 :
19834 22 : enum gimplify_status ret
19835 22 : = gimplify_expr (&novariants_cond, &body, NULL, is_gimple_val,
19836 : fb_rvalue);
19837 22 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
19838 0 : return ret;
19839 22 : ret = gimplify_expr (&nocontext_cond, &body, NULL, is_gimple_val,
19840 : fb_rvalue);
19841 22 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
19842 : return ret;
19843 :
19844 22 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
19845 :
19846 22 : if (novariants_cond != NULL_TREE)
19847 : {
19848 16 : tree base_label = create_artificial_label (UNKNOWN_LOCATION);
19849 16 : tree cond_label = create_artificial_label (UNKNOWN_LOCATION);
19850 16 : gcond *novariants_cond_stmt
19851 16 : = gimple_build_cond_from_tree (novariants_cond, base_label,
19852 : cond_label);
19853 16 : gimplify_seq_add_stmt (&body, novariants_cond_stmt);
19854 :
19855 16 : gimplify_seq_add_stmt (&body, gimple_build_label (base_label));
19856 16 : tree base_call_expr2 = copy_node (base_call_expr);
19857 16 : base_call_expr2
19858 16 : = build_call_expr_internal_loc (EXPR_LOCATION (base_call_expr2),
19859 : IFN_GOMP_DISPATCH,
19860 16 : TREE_TYPE (base_call_expr2), 1,
19861 : base_call_expr2);
19862 16 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19863 : {
19864 16 : base_call_expr2 = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst,
19865 : base_call_expr2);
19866 : }
19867 16 : OMP_CLAUSE_NOVARIANTS_EXPR (novariants_clause)
19868 16 : = boolean_true_node;
19869 16 : gimplify_and_add (base_call_expr2, &body);
19870 16 : gimplify_seq_add_stmt (&body, gimple_build_goto (end_label));
19871 :
19872 16 : OMP_CLAUSE_NOVARIANTS_EXPR (novariants_clause)
19873 16 : = boolean_false_node;
19874 16 : gimplify_seq_add_stmt (&body, gimple_build_label (cond_label));
19875 : }
19876 :
19877 22 : if (nocontext_cond != NULL_TREE)
19878 : {
19879 16 : tree variant1_label = create_artificial_label (UNKNOWN_LOCATION);
19880 16 : tree variant2_label = create_artificial_label (UNKNOWN_LOCATION);
19881 16 : gcond *nocontext_cond_stmt
19882 16 : = gimple_build_cond_from_tree (nocontext_cond, variant1_label,
19883 : variant2_label);
19884 16 : gimplify_seq_add_stmt (&body, nocontext_cond_stmt);
19885 :
19886 32 : gimplify_seq_add_stmt (&body,
19887 16 : gimple_build_label (variant1_label));
19888 16 : tree variant_call_expr = copy_node (base_call_expr);
19889 32 : variant_call_expr = build_call_expr_internal_loc (
19890 16 : EXPR_LOCATION (variant_call_expr), IFN_GOMP_DISPATCH,
19891 16 : TREE_TYPE (variant_call_expr), 1, variant_call_expr);
19892 16 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19893 : {
19894 16 : variant_call_expr = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst,
19895 : variant_call_expr);
19896 : }
19897 16 : OMP_CLAUSE_NOCONTEXT_EXPR (nocontext_clause) = boolean_true_node;
19898 16 : gimplify_and_add (variant_call_expr, &body);
19899 16 : gimplify_seq_add_stmt (&body, gimple_build_goto (end_label));
19900 16 : OMP_CLAUSE_NOCONTEXT_EXPR (nocontext_clause) = boolean_false_node;
19901 32 : gimplify_seq_add_stmt (&body,
19902 16 : gimple_build_label (variant2_label));
19903 : }
19904 :
19905 22 : tree variant_call_expr = base_call_expr;
19906 22 : variant_call_expr
19907 22 : = build_call_expr_internal_loc (EXPR_LOCATION (variant_call_expr),
19908 : IFN_GOMP_DISPATCH,
19909 22 : TREE_TYPE (variant_call_expr), 1,
19910 : variant_call_expr);
19911 22 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19912 : {
19913 22 : variant_call_expr
19914 22 : = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, variant_call_expr);
19915 : }
19916 22 : gimplify_and_add (variant_call_expr, &body);
19917 22 : gimplify_seq_add_stmt (&body, gimple_build_label (end_label));
19918 : }
19919 : else
19920 822 : gimplify_and_add (OMP_DISPATCH_BODY (expr), &body);
19921 : }
19922 : else
19923 8 : gimplify_and_add (OMP_DISPATCH_BODY (expr), &body);
19924 :
19925 : // Restore default-device-var ICV
19926 852 : if (saved_device_icv != NULL_TREE)
19927 : {
19928 360 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_SET_DEFAULT_DEVICE);
19929 360 : gcall *call = gimple_build_call (fn, 1, saved_device_icv);
19930 360 : gimplify_seq_add_stmt (&body, call);
19931 : }
19932 :
19933 : // Wrap dispatch body into a bind
19934 852 : gimple *bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
19935 852 : pop_gimplify_context (bind);
19936 :
19937 : // Manually tear down context created by gimplify_scan_omp_clauses to avoid a
19938 : // call to gimplify_adjust_omp_clauses
19939 852 : gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
19940 852 : if (ctx != NULL)
19941 : {
19942 852 : gcc_assert (ctx->code == OMP_DISPATCH);
19943 852 : gimplify_omp_ctxp = ctx->outer_context;
19944 852 : delete_omp_context (ctx);
19945 : }
19946 :
19947 : // Remove nowait as it has no effect on dispatch (OpenMP 5.2), device as it
19948 : // has been handled above, and depend as the front end handled it by inserting
19949 : // taskwait.
19950 852 : tree *dispatch_clauses_ptr = &OMP_DISPATCH_CLAUSES (expr);
19951 1445 : for (tree c = *dispatch_clauses_ptr; c; c = *dispatch_clauses_ptr)
19952 : {
19953 991 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT
19954 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
19955 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE)
19956 : {
19957 398 : *dispatch_clauses_ptr = OMP_CLAUSE_CHAIN (c);
19958 398 : break;
19959 : }
19960 : else
19961 593 : dispatch_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
19962 : }
19963 :
19964 852 : gimple *stmt = gimple_build_omp_dispatch (bind, OMP_DISPATCH_CLAUSES (expr));
19965 852 : gimplify_seq_add_stmt (pre_p, stmt);
19966 852 : *expr_p = NULL_TREE;
19967 852 : return GS_ALL_DONE;
19968 : }
19969 :
19970 : /* Expand a metadirective that has been resolved at gimplification time
19971 : into the candidate directive variants in CANDIDATES. */
19972 :
19973 : static enum gimplify_status
19974 170 : expand_omp_metadirective (vec<struct omp_variant> &candidates,
19975 : gimple_seq *pre_p)
19976 : {
19977 170 : auto_vec<tree> selectors;
19978 170 : auto_vec<tree> directive_labels;
19979 170 : auto_vec<gimple_seq> directive_bodies;
19980 170 : tree body_label = NULL_TREE;
19981 170 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
19982 :
19983 : /* Construct bodies for each candidate. */
19984 415 : for (unsigned i = 0; i < candidates.length(); i++)
19985 : {
19986 245 : struct omp_variant &candidate = candidates[i];
19987 245 : gimple_seq body = NULL;
19988 :
19989 245 : selectors.safe_push (omp_dynamic_cond (candidate.selector,
19990 : find_supercontext ()));
19991 245 : directive_labels.safe_push (create_artificial_label (UNKNOWN_LOCATION));
19992 :
19993 245 : gimplify_seq_add_stmt (&body,
19994 245 : gimple_build_label (directive_labels.last ()));
19995 245 : if (candidate.alternative != NULL_TREE)
19996 245 : gimplify_stmt (&candidate.alternative, &body);
19997 245 : if (candidate.body != NULL_TREE)
19998 : {
19999 35 : if (body_label != NULL_TREE)
20000 0 : gimplify_seq_add_stmt (&body, gimple_build_goto (body_label));
20001 : else
20002 : {
20003 35 : body_label = create_artificial_label (UNKNOWN_LOCATION);
20004 35 : gimplify_seq_add_stmt (&body, gimple_build_label (body_label));
20005 35 : gimplify_stmt (&candidate.body, &body);
20006 : }
20007 : }
20008 :
20009 245 : directive_bodies.safe_push (body);
20010 : }
20011 :
20012 170 : auto_vec<tree> cond_labels;
20013 :
20014 170 : cond_labels.safe_push (NULL_TREE);
20015 376 : for (unsigned i = 1; i < candidates.length () - 1; i++)
20016 18 : cond_labels.safe_push (create_artificial_label (UNKNOWN_LOCATION));
20017 170 : if (candidates.length () > 1)
20018 57 : cond_labels.safe_push (directive_labels.last ());
20019 :
20020 : /* Generate conditionals to test each dynamic selector in turn, executing
20021 : the directive candidate if successful. */
20022 490 : for (unsigned i = 0; i < candidates.length () - 1; i++)
20023 : {
20024 75 : if (i != 0)
20025 18 : gimplify_seq_add_stmt (pre_p, gimple_build_label (cond_labels [i]));
20026 :
20027 75 : enum gimplify_status ret = gimplify_expr (&selectors[i], pre_p, NULL,
20028 : is_gimple_val, fb_rvalue);
20029 75 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
20030 : return ret;
20031 :
20032 75 : gcond *cond_stmt
20033 75 : = gimple_build_cond_from_tree (selectors[i], directive_labels[i],
20034 75 : cond_labels[i + 1]);
20035 :
20036 75 : gimplify_seq_add_stmt (pre_p, cond_stmt);
20037 75 : gimplify_seq_add_seq (pre_p, directive_bodies[i]);
20038 75 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (end_label));
20039 : }
20040 :
20041 170 : gimplify_seq_add_seq (pre_p, directive_bodies.last ());
20042 170 : gimplify_seq_add_stmt (pre_p, gimple_build_label (end_label));
20043 :
20044 170 : return GS_ALL_DONE;
20045 170 : }
20046 :
20047 : /* Expand a variant construct that requires late resolution in the ompdevlow
20048 : pass. It's a bit easier to do this in tree form and then gimplify that,
20049 : than to emit gimple. The output is going to look something like:
20050 :
20051 : switch_var = OMP_NEXT_VARIANT (0, state);
20052 : loop_label:
20053 : switch (switch_var)
20054 : {
20055 : case 1:
20056 : if (dynamic_selector_predicate_1)
20057 : {
20058 : alternative_1;
20059 : goto end_label;
20060 : }
20061 : else
20062 : {
20063 : switch_var = OMP_NEXT_VARIANT (1, state);
20064 : goto loop_label;
20065 : }
20066 : case 2:
20067 : ...
20068 : }
20069 : end_label:
20070 :
20071 : OMP_NEXT_VARIANT is a magic cookie that is replaced with the switch variable
20072 : index of the next variant to try, after late resolution. */
20073 :
20074 : static tree
20075 16 : expand_late_variant_directive (vec<struct omp_variant> all_candidates,
20076 : tree construct_context)
20077 : {
20078 16 : tree body_label = NULL_TREE;
20079 16 : tree standalone_body = NULL_TREE;
20080 16 : tree loop_label = create_artificial_label (UNKNOWN_LOCATION);
20081 16 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
20082 32 : tree selectors = make_tree_vec (all_candidates.length ());
20083 16 : tree switch_body = NULL_TREE;
20084 16 : tree switch_var = create_tmp_var (integer_type_node, "variant");
20085 16 : tree state = tree_cons (NULL_TREE, construct_context, selectors);
20086 :
20087 92 : for (unsigned int i = 0; i < all_candidates.length (); i++)
20088 : {
20089 76 : tree selector = all_candidates[i].selector;
20090 76 : tree alternative = all_candidates[i].alternative;
20091 76 : tree body = all_candidates[i].body;
20092 76 : TREE_VEC_ELT (selectors, i) = selector;
20093 :
20094 : /* Case label. Numbering is 1-based. */
20095 76 : tree case_val = build_int_cst (integer_type_node, i + 1);
20096 76 : tree case_label
20097 76 : = build_case_label (case_val, NULL_TREE,
20098 : create_artificial_label (UNKNOWN_LOCATION));
20099 76 : append_to_statement_list (case_label, &switch_body);
20100 :
20101 : /* The actual body of the variant. */
20102 76 : tree variant_body = NULL_TREE;
20103 76 : append_to_statement_list (alternative, &variant_body);
20104 :
20105 76 : if (body != NULL_TREE)
20106 : {
20107 0 : if (standalone_body == NULL)
20108 : {
20109 0 : standalone_body = body;
20110 0 : body_label = create_artificial_label (UNKNOWN_LOCATION);
20111 : }
20112 0 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20113 : body_label),
20114 : &variant_body);
20115 : }
20116 : else
20117 76 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20118 : end_label),
20119 : &variant_body);
20120 :
20121 : /* If this is a dynamic selector, wrap variant_body with a conditional.
20122 : If the predicate doesn't match, the else clause sets switch_var and
20123 : jumps to loop_var to try again. */
20124 76 : tree dynamic_selector = omp_dynamic_cond (selector, find_supercontext ());
20125 76 : if (dynamic_selector)
20126 : {
20127 20 : tree else_stmt = NULL_TREE;
20128 20 : tree next = build2 (OMP_NEXT_VARIANT, integer_type_node,
20129 : case_val, state);
20130 20 : append_to_statement_list (build2 (MODIFY_EXPR, integer_type_node,
20131 : switch_var, next),
20132 : &else_stmt);
20133 20 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20134 : loop_label),
20135 : &else_stmt);
20136 20 : variant_body = build3 (COND_EXPR, void_type_node, dynamic_selector,
20137 : variant_body, else_stmt);
20138 : }
20139 76 : append_to_statement_list (variant_body, &switch_body);
20140 : }
20141 :
20142 : /* Put it all together. */
20143 16 : tree result = NULL_TREE;
20144 16 : tree first = build2 (OMP_NEXT_VARIANT, integer_type_node, integer_zero_node,
20145 : state);
20146 16 : append_to_statement_list (build2 (MODIFY_EXPR, integer_type_node,
20147 : switch_var, first),
20148 : &result);
20149 16 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node, loop_label),
20150 : &result);
20151 16 : append_to_statement_list (build2 (SWITCH_EXPR, integer_type_node,
20152 : switch_var, switch_body),
20153 : &result);
20154 16 : if (standalone_body)
20155 : {
20156 0 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node,
20157 : body_label),
20158 : &result);
20159 0 : append_to_statement_list (standalone_body, &result);
20160 : }
20161 16 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node, end_label),
20162 : &result);
20163 16 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
20164 16 : return result;
20165 : }
20166 :
20167 :
20168 : /* Gimplify an OMP_METADIRECTIVE construct. EXPR is the tree version.
20169 : The metadirective will be resolved at this point if possible, otherwise
20170 : a GIMPLE_OMP_VARIANT_CONSTRUCT is created. */
20171 :
20172 : static enum gimplify_status
20173 174 : gimplify_omp_metadirective (tree *expr_p, gimple_seq *pre_p, gimple_seq *,
20174 : bool (*) (tree), fallback_t)
20175 : {
20176 : /* Try to resolve the metadirective. */
20177 174 : tree construct_context = omp_get_construct_context ();
20178 174 : vec<struct omp_variant> all_candidates
20179 174 : = omp_metadirective_candidates (*expr_p, construct_context);
20180 174 : vec<struct omp_variant> candidates
20181 174 : = omp_get_dynamic_candidates (all_candidates, construct_context);
20182 174 : if (!candidates.is_empty ())
20183 170 : return expand_omp_metadirective (candidates, pre_p);
20184 :
20185 : /* The metadirective cannot be resolved yet. Turn it into a loop with
20186 : a nested switch statement, using OMP_NEXT_VARIANT to set the control
20187 : variable for the switch. */
20188 4 : *expr_p = expand_late_variant_directive (all_candidates, construct_context);
20189 4 : return GS_OK;
20190 : }
20191 :
20192 : /* Gimplify an OMP_DECLARE_MAPPER node (by just removing it). */
20193 :
20194 : static enum gimplify_status
20195 0 : gimplify_omp_declare_mapper (tree *expr_p)
20196 : {
20197 0 : *expr_p = NULL_TREE;
20198 0 : return GS_ALL_DONE;
20199 : }
20200 :
20201 : /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
20202 : expression produces a value to be used as an operand inside a GIMPLE
20203 : statement, the value will be stored back in *EXPR_P. This value will
20204 : be a tree of class tcc_declaration, tcc_constant, tcc_reference or
20205 : an SSA_NAME. The corresponding sequence of GIMPLE statements is
20206 : emitted in PRE_P and POST_P.
20207 :
20208 : Additionally, this process may overwrite parts of the input
20209 : expression during gimplification. Ideally, it should be
20210 : possible to do non-destructive gimplification.
20211 :
20212 : EXPR_P points to the GENERIC expression to convert to GIMPLE. If
20213 : the expression needs to evaluate to a value to be used as
20214 : an operand in a GIMPLE statement, this value will be stored in
20215 : *EXPR_P on exit. This happens when the caller specifies one
20216 : of fb_lvalue or fb_rvalue fallback flags.
20217 :
20218 : PRE_P will contain the sequence of GIMPLE statements corresponding
20219 : to the evaluation of EXPR and all the side-effects that must
20220 : be executed before the main expression. On exit, the last
20221 : statement of PRE_P is the core statement being gimplified. For
20222 : instance, when gimplifying 'if (++a)' the last statement in
20223 : PRE_P will be 'if (t.1)' where t.1 is the result of
20224 : pre-incrementing 'a'.
20225 :
20226 : POST_P will contain the sequence of GIMPLE statements corresponding
20227 : to the evaluation of all the side-effects that must be executed
20228 : after the main expression. If this is NULL, the post
20229 : side-effects are stored at the end of PRE_P.
20230 :
20231 : The reason why the output is split in two is to handle post
20232 : side-effects explicitly. In some cases, an expression may have
20233 : inner and outer post side-effects which need to be emitted in
20234 : an order different from the one given by the recursive
20235 : traversal. For instance, for the expression (*p--)++ the post
20236 : side-effects of '--' must actually occur *after* the post
20237 : side-effects of '++'. However, gimplification will first visit
20238 : the inner expression, so if a separate POST sequence was not
20239 : used, the resulting sequence would be:
20240 :
20241 : 1 t.1 = *p
20242 : 2 p = p - 1
20243 : 3 t.2 = t.1 + 1
20244 : 4 *p = t.2
20245 :
20246 : However, the post-decrement operation in line #2 must not be
20247 : evaluated until after the store to *p at line #4, so the
20248 : correct sequence should be:
20249 :
20250 : 1 t.1 = *p
20251 : 2 t.2 = t.1 + 1
20252 : 3 *p = t.2
20253 : 4 p = p - 1
20254 :
20255 : So, by specifying a separate post queue, it is possible
20256 : to emit the post side-effects in the correct order.
20257 : If POST_P is NULL, an internal queue will be used. Before
20258 : returning to the caller, the sequence POST_P is appended to
20259 : the main output sequence PRE_P.
20260 :
20261 : GIMPLE_TEST_F points to a function that takes a tree T and
20262 : returns nonzero if T is in the GIMPLE form requested by the
20263 : caller. The GIMPLE predicates are in gimple.cc.
20264 :
20265 : FALLBACK tells the function what sort of a temporary we want if
20266 : gimplification cannot produce an expression that complies with
20267 : GIMPLE_TEST_F.
20268 :
20269 : fb_none means that no temporary should be generated
20270 : fb_rvalue means that an rvalue is OK to generate
20271 : fb_lvalue means that an lvalue is OK to generate
20272 : fb_either means that either is OK, but an lvalue is preferable.
20273 : fb_mayfail means that gimplification may fail (in which case
20274 : GS_ERROR will be returned)
20275 :
20276 : The return value is either GS_ERROR or GS_ALL_DONE, since this
20277 : function iterates until EXPR is completely gimplified or an error
20278 : occurs. */
20279 :
20280 : enum gimplify_status
20281 514757292 : gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
20282 : bool (*gimple_test_f) (tree), fallback_t fallback)
20283 : {
20284 514757292 : tree tmp;
20285 514757292 : gimple_seq internal_pre = NULL;
20286 514757292 : gimple_seq internal_post = NULL;
20287 514757292 : tree save_expr;
20288 514757292 : bool is_statement;
20289 514757292 : location_t saved_location;
20290 514757292 : enum gimplify_status ret;
20291 514757292 : gimple_stmt_iterator pre_last_gsi, post_last_gsi;
20292 514757292 : tree label;
20293 :
20294 514757292 : save_expr = *expr_p;
20295 514757292 : if (save_expr == NULL_TREE)
20296 : return GS_ALL_DONE;
20297 :
20298 : /* If we are gimplifying a top-level statement, PRE_P must be valid. */
20299 463200008 : is_statement = gimple_test_f == is_gimple_stmt;
20300 463200008 : if (is_statement)
20301 100530511 : gcc_assert (pre_p);
20302 :
20303 : /* Consistency checks. */
20304 463200008 : if (gimple_test_f == is_gimple_reg)
20305 6292172 : gcc_assert (fallback & (fb_rvalue | fb_lvalue));
20306 456907836 : else if (gimple_test_f == is_gimple_val
20307 336821243 : || gimple_test_f == is_gimple_call_addr
20308 320563820 : || gimple_test_f == is_gimple_condexpr_for_cond
20309 314968007 : || gimple_test_f == is_gimple_mem_rhs
20310 314824890 : || gimple_test_f == is_gimple_mem_rhs_or_call
20311 303719570 : || gimple_test_f == is_gimple_reg_rhs
20312 303106219 : || gimple_test_f == is_gimple_reg_rhs_or_call
20313 231528943 : || gimple_test_f == is_gimple_asm_val
20314 231489845 : || gimple_test_f == is_gimple_mem_ref_addr)
20315 240802141 : gcc_assert (fallback & fb_rvalue);
20316 216105695 : else if (gimple_test_f == is_gimple_min_lval
20317 186321254 : || gimple_test_f == is_gimple_lvalue)
20318 82950803 : gcc_assert (fallback & fb_lvalue);
20319 133154892 : else if (gimple_test_f == is_gimple_addressable)
20320 32624381 : gcc_assert (fallback & fb_either);
20321 100530511 : else if (gimple_test_f == is_gimple_stmt)
20322 100530511 : gcc_assert (fallback == fb_none);
20323 : else
20324 : {
20325 : /* We should have recognized the GIMPLE_TEST_F predicate to
20326 : know what kind of fallback to use in case a temporary is
20327 : needed to hold the value or address of *EXPR_P. */
20328 0 : gcc_unreachable ();
20329 : }
20330 :
20331 : /* We used to check the predicate here and return immediately if it
20332 : succeeds. This is wrong; the design is for gimplification to be
20333 : idempotent, and for the predicates to only test for valid forms, not
20334 : whether they are fully simplified. */
20335 463200008 : if (pre_p == NULL)
20336 0 : pre_p = &internal_pre;
20337 :
20338 463200008 : if (post_p == NULL)
20339 186382416 : post_p = &internal_post;
20340 :
20341 : /* Remember the last statements added to PRE_P and POST_P. Every
20342 : new statement added by the gimplification helpers needs to be
20343 : annotated with location information. To centralize the
20344 : responsibility, we remember the last statement that had been
20345 : added to both queues before gimplifying *EXPR_P. If
20346 : gimplification produces new statements in PRE_P and POST_P, those
20347 : statements will be annotated with the same location information
20348 : as *EXPR_P. */
20349 463200008 : pre_last_gsi = gsi_last (*pre_p);
20350 463200008 : post_last_gsi = gsi_last (*post_p);
20351 :
20352 463200008 : saved_location = input_location;
20353 463200008 : if (save_expr != error_mark_node
20354 463200008 : && EXPR_HAS_LOCATION (*expr_p))
20355 173383005 : input_location = EXPR_LOCATION (*expr_p);
20356 :
20357 : /* Loop over the specific gimplifiers until the toplevel node
20358 : remains the same. */
20359 481269088 : do
20360 : {
20361 : /* Strip away as many useless type conversions as possible
20362 : at the toplevel. */
20363 481269088 : STRIP_USELESS_TYPE_CONVERSION (*expr_p);
20364 :
20365 : /* Remember the expr. */
20366 481269088 : save_expr = *expr_p;
20367 :
20368 : /* Die, die, die, my darling. */
20369 481269088 : if (error_operand_p (save_expr))
20370 : {
20371 : ret = GS_ERROR;
20372 : break;
20373 : }
20374 :
20375 : /* Do any language-specific gimplification. */
20376 481265509 : ret = ((enum gimplify_status)
20377 481265509 : lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
20378 481265509 : if (ret == GS_OK)
20379 : {
20380 28223572 : if (*expr_p == NULL_TREE)
20381 : break;
20382 28223572 : if (*expr_p != save_expr)
20383 4691925 : continue;
20384 : }
20385 453041937 : else if (ret != GS_UNHANDLED)
20386 : break;
20387 :
20388 : /* Make sure that all the cases set 'ret' appropriately. */
20389 475997290 : ret = GS_UNHANDLED;
20390 475997290 : switch (TREE_CODE (*expr_p))
20391 : {
20392 : /* First deal with the special cases. */
20393 :
20394 1136420 : case POSTINCREMENT_EXPR:
20395 1136420 : case POSTDECREMENT_EXPR:
20396 1136420 : case PREINCREMENT_EXPR:
20397 1136420 : case PREDECREMENT_EXPR:
20398 2272840 : ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
20399 : fallback != fb_none,
20400 1136420 : TREE_TYPE (*expr_p));
20401 1136420 : break;
20402 :
20403 590316 : case VIEW_CONVERT_EXPR:
20404 590316 : if ((fallback & fb_rvalue)
20405 590190 : && is_gimple_reg_type (TREE_TYPE (*expr_p))
20406 1003126 : && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
20407 : {
20408 401042 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20409 : post_p, is_gimple_val, fb_rvalue);
20410 401042 : recalculate_side_effects (*expr_p);
20411 401042 : break;
20412 : }
20413 : /* Fallthru. */
20414 :
20415 29721822 : case ARRAY_REF:
20416 29721822 : case ARRAY_RANGE_REF:
20417 29721822 : case REALPART_EXPR:
20418 29721822 : case IMAGPART_EXPR:
20419 29721822 : case COMPONENT_REF:
20420 29721822 : ret = gimplify_compound_lval (expr_p, pre_p, post_p,
20421 : fallback ? fallback : fb_rvalue);
20422 29721822 : break;
20423 :
20424 6217496 : case COND_EXPR:
20425 6217496 : ret = gimplify_cond_expr (expr_p, pre_p, fallback);
20426 :
20427 : /* C99 code may assign to an array in a structure value of a
20428 : conditional expression, and this has undefined behavior
20429 : only on execution, so create a temporary if an lvalue is
20430 : required. */
20431 6217496 : if (fallback == fb_lvalue)
20432 : {
20433 7 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20434 7 : mark_addressable (*expr_p);
20435 7 : ret = GS_OK;
20436 : }
20437 : break;
20438 :
20439 16769422 : case CALL_EXPR:
20440 16769422 : ret = gimplify_call_expr (expr_p, pre_p, fallback);
20441 :
20442 : /* C99 code may assign to an array in a structure returned
20443 : from a function, and this has undefined behavior only on
20444 : execution, so create a temporary if an lvalue is
20445 : required. */
20446 16769422 : if (fallback == fb_lvalue)
20447 : {
20448 17889 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20449 17889 : mark_addressable (*expr_p);
20450 17889 : ret = GS_OK;
20451 : }
20452 : break;
20453 :
20454 0 : case TREE_LIST:
20455 0 : gcc_unreachable ();
20456 :
20457 0 : case OMP_ARRAY_SECTION:
20458 0 : gcc_unreachable ();
20459 :
20460 476774 : case COMPOUND_EXPR:
20461 476774 : ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
20462 476774 : break;
20463 :
20464 36955 : case COMPOUND_LITERAL_EXPR:
20465 36955 : ret = gimplify_compound_literal_expr (expr_p, pre_p,
20466 : gimple_test_f, fallback);
20467 36955 : break;
20468 :
20469 49826853 : case MODIFY_EXPR:
20470 49826853 : case INIT_EXPR:
20471 49826853 : ret = gimplify_modify_expr (expr_p, pre_p, post_p,
20472 : fallback != fb_none);
20473 49826853 : break;
20474 :
20475 113413 : case TRUTH_ANDIF_EXPR:
20476 113413 : case TRUTH_ORIF_EXPR:
20477 113413 : {
20478 : /* Preserve the original type of the expression and the
20479 : source location of the outer expression. */
20480 113413 : tree org_type = TREE_TYPE (*expr_p);
20481 113413 : *expr_p = gimple_boolify (*expr_p);
20482 113413 : *expr_p = build3_loc (input_location, COND_EXPR,
20483 : org_type, *expr_p,
20484 : fold_convert_loc
20485 : (input_location,
20486 : org_type, boolean_true_node),
20487 : fold_convert_loc
20488 : (input_location,
20489 : org_type, boolean_false_node));
20490 113413 : ret = GS_OK;
20491 113413 : break;
20492 : }
20493 :
20494 230287 : case TRUTH_NOT_EXPR:
20495 230287 : {
20496 230287 : tree type = TREE_TYPE (*expr_p);
20497 : /* The parsers are careful to generate TRUTH_NOT_EXPR
20498 : only with operands that are always zero or one.
20499 : We do not fold here but handle the only interesting case
20500 : manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
20501 230287 : *expr_p = gimple_boolify (*expr_p);
20502 230287 : if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
20503 230287 : *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
20504 230287 : TREE_TYPE (*expr_p),
20505 230287 : TREE_OPERAND (*expr_p, 0));
20506 : else
20507 0 : *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
20508 0 : TREE_TYPE (*expr_p),
20509 0 : TREE_OPERAND (*expr_p, 0),
20510 0 : build_int_cst (TREE_TYPE (*expr_p), 1));
20511 230287 : if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
20512 5674 : *expr_p = fold_convert_loc (input_location, type, *expr_p);
20513 : ret = GS_OK;
20514 : break;
20515 : }
20516 :
20517 32704326 : case ADDR_EXPR:
20518 32704326 : ret = gimplify_addr_expr (expr_p, pre_p, post_p);
20519 32704326 : break;
20520 :
20521 5535 : case ANNOTATE_EXPR:
20522 5535 : {
20523 5535 : tree cond = TREE_OPERAND (*expr_p, 0);
20524 5535 : tree kind = TREE_OPERAND (*expr_p, 1);
20525 5535 : tree data = TREE_OPERAND (*expr_p, 2);
20526 5535 : tree type = TREE_TYPE (cond);
20527 5535 : if (!INTEGRAL_TYPE_P (type))
20528 : {
20529 0 : *expr_p = cond;
20530 0 : ret = GS_OK;
20531 0 : break;
20532 : }
20533 5535 : tree tmp = create_tmp_var (type);
20534 5535 : gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
20535 5535 : gcall *call
20536 5535 : = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
20537 5535 : gimple_call_set_lhs (call, tmp);
20538 5535 : gimplify_seq_add_stmt (pre_p, call);
20539 5535 : *expr_p = tmp;
20540 5535 : ret = GS_ALL_DONE;
20541 5535 : break;
20542 : }
20543 :
20544 50937 : case VA_ARG_EXPR:
20545 50937 : ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
20546 50937 : break;
20547 :
20548 17180694 : CASE_CONVERT:
20549 17180694 : if (IS_EMPTY_STMT (*expr_p))
20550 : {
20551 : ret = GS_ALL_DONE;
20552 : break;
20553 : }
20554 :
20555 15441092 : if (VOID_TYPE_P (TREE_TYPE (*expr_p))
20556 15441092 : || fallback == fb_none)
20557 : {
20558 : /* Just strip a conversion to void (or in void context) and
20559 : try again. */
20560 2615736 : *expr_p = TREE_OPERAND (*expr_p, 0);
20561 2615736 : ret = GS_OK;
20562 2615736 : break;
20563 : }
20564 :
20565 12825356 : ret = gimplify_conversion (expr_p);
20566 12825356 : if (ret == GS_ERROR)
20567 : break;
20568 12825356 : if (*expr_p != save_expr)
20569 : break;
20570 : /* FALLTHRU */
20571 :
20572 12951124 : case FIX_TRUNC_EXPR:
20573 : /* unary_expr: ... | '(' cast ')' val | ... */
20574 12951124 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20575 : is_gimple_val, fb_rvalue);
20576 12951124 : recalculate_side_effects (*expr_p);
20577 12951124 : break;
20578 :
20579 6224008 : case INDIRECT_REF:
20580 6224008 : {
20581 6224008 : bool volatilep = TREE_THIS_VOLATILE (*expr_p);
20582 6224008 : bool notrap = TREE_THIS_NOTRAP (*expr_p);
20583 6224008 : tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
20584 :
20585 6224008 : *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
20586 6224008 : if (*expr_p != save_expr)
20587 : {
20588 : ret = GS_OK;
20589 : break;
20590 : }
20591 :
20592 6205766 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20593 : is_gimple_reg, fb_rvalue);
20594 6205766 : if (ret == GS_ERROR)
20595 : break;
20596 :
20597 6205764 : recalculate_side_effects (*expr_p);
20598 12411528 : *expr_p = fold_build2_loc (input_location, MEM_REF,
20599 6205764 : TREE_TYPE (*expr_p),
20600 6205764 : TREE_OPERAND (*expr_p, 0),
20601 : build_int_cst (saved_ptr_type, 0));
20602 6205764 : TREE_THIS_VOLATILE (*expr_p) = volatilep;
20603 6205764 : TREE_THIS_NOTRAP (*expr_p) = notrap;
20604 6205764 : ret = GS_OK;
20605 6205764 : break;
20606 : }
20607 :
20608 : /* We arrive here through the various re-gimplifcation paths. */
20609 15837827 : case MEM_REF:
20610 : /* First try re-folding the whole thing. */
20611 15837827 : tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
20612 : TREE_OPERAND (*expr_p, 0),
20613 : TREE_OPERAND (*expr_p, 1));
20614 15837827 : if (tmp)
20615 : {
20616 6568 : REF_REVERSE_STORAGE_ORDER (tmp)
20617 3284 : = REF_REVERSE_STORAGE_ORDER (*expr_p);
20618 3284 : *expr_p = tmp;
20619 3284 : recalculate_side_effects (*expr_p);
20620 3284 : ret = GS_OK;
20621 3284 : break;
20622 : }
20623 : /* Avoid re-gimplifying the address operand if it is already
20624 : in suitable form. Re-gimplifying would mark the address
20625 : operand addressable. Always gimplify when not in SSA form
20626 : as we still may have to gimplify decls with value-exprs. */
20627 15834543 : if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
20628 16837305 : || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
20629 : {
20630 14886718 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20631 : is_gimple_mem_ref_addr, fb_rvalue);
20632 14886718 : if (ret == GS_ERROR)
20633 : break;
20634 : }
20635 15834543 : recalculate_side_effects (*expr_p);
20636 15834543 : ret = GS_ALL_DONE;
20637 15834543 : break;
20638 :
20639 : /* Constants need not be gimplified. */
20640 43787825 : case INTEGER_CST:
20641 43787825 : case REAL_CST:
20642 43787825 : case FIXED_CST:
20643 43787825 : case STRING_CST:
20644 43787825 : case COMPLEX_CST:
20645 43787825 : case VECTOR_CST:
20646 : /* Drop the overflow flag on constants, we do not want
20647 : that in the GIMPLE IL. */
20648 43787825 : if (TREE_OVERFLOW_P (*expr_p))
20649 1165 : *expr_p = drop_tree_overflow (*expr_p);
20650 : ret = GS_ALL_DONE;
20651 : break;
20652 :
20653 117050 : case CONST_DECL:
20654 : /* If we require an lvalue, such as for ADDR_EXPR, retain the
20655 : CONST_DECL node. Otherwise the decl is replaceable by its
20656 : value. */
20657 : /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
20658 117050 : if (fallback & fb_lvalue)
20659 : ret = GS_ALL_DONE;
20660 : else
20661 : {
20662 3475 : *expr_p = DECL_INITIAL (*expr_p);
20663 3475 : ret = GS_OK;
20664 : }
20665 : break;
20666 :
20667 6498798 : case DECL_EXPR:
20668 6498798 : ret = gimplify_decl_expr (expr_p, pre_p);
20669 6498798 : break;
20670 :
20671 5934815 : case BIND_EXPR:
20672 5934815 : ret = gimplify_bind_expr (expr_p, pre_p);
20673 5934815 : break;
20674 :
20675 200047 : case LOOP_EXPR:
20676 200047 : ret = gimplify_loop_expr (expr_p, pre_p);
20677 200047 : break;
20678 :
20679 50589 : case SWITCH_EXPR:
20680 50589 : ret = gimplify_switch_expr (expr_p, pre_p);
20681 50589 : break;
20682 :
20683 3174 : case EXIT_EXPR:
20684 3174 : ret = gimplify_exit_expr (expr_p);
20685 3174 : break;
20686 :
20687 1100869 : case GOTO_EXPR:
20688 : /* If the target is not LABEL, then it is a computed jump
20689 : and the target needs to be gimplified. */
20690 1100869 : if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
20691 : {
20692 1109 : ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
20693 : NULL, is_gimple_val, fb_rvalue);
20694 1109 : if (ret == GS_ERROR)
20695 : break;
20696 : }
20697 2201736 : gimplify_seq_add_stmt (pre_p,
20698 1100868 : gimple_build_goto (GOTO_DESTINATION (*expr_p)));
20699 1100868 : ret = GS_ALL_DONE;
20700 1100868 : break;
20701 :
20702 102264 : case PREDICT_EXPR:
20703 409056 : gimplify_seq_add_stmt (pre_p,
20704 102264 : gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
20705 102264 : PREDICT_EXPR_OUTCOME (*expr_p)));
20706 102264 : ret = GS_ALL_DONE;
20707 102264 : break;
20708 :
20709 2540992 : case LABEL_EXPR:
20710 2540992 : ret = gimplify_label_expr (expr_p, pre_p);
20711 2540992 : label = LABEL_EXPR_LABEL (*expr_p);
20712 2540992 : gcc_assert (decl_function_context (label) == current_function_decl);
20713 :
20714 : /* If the label is used in a goto statement, or address of the label
20715 : is taken, we need to unpoison all variables that were seen so far.
20716 : Doing so would prevent us from reporting a false positives. */
20717 2540992 : if (asan_poisoned_variables
20718 3906 : && asan_used_labels != NULL
20719 1213 : && asan_used_labels->contains (label)
20720 2541806 : && !gimplify_omp_ctxp)
20721 806 : asan_poison_variables (asan_poisoned_variables, false, pre_p);
20722 : break;
20723 :
20724 1044052 : case CASE_LABEL_EXPR:
20725 1044052 : ret = gimplify_case_label_expr (expr_p, pre_p);
20726 :
20727 1044052 : if (gimplify_ctxp->live_switch_vars)
20728 1043865 : asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
20729 : pre_p);
20730 : break;
20731 :
20732 2233834 : case RETURN_EXPR:
20733 2233834 : ret = gimplify_return_expr (*expr_p, pre_p);
20734 2233834 : break;
20735 :
20736 937282 : case CONSTRUCTOR:
20737 : /* Don't reduce this in place; let gimplify_init_constructor work its
20738 : magic. Buf if we're just elaborating this for side effects, just
20739 : gimplify any element that has side-effects. */
20740 937282 : if (fallback == fb_none)
20741 : {
20742 357 : unsigned HOST_WIDE_INT ix;
20743 357 : tree val;
20744 357 : tree temp = NULL_TREE;
20745 374 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
20746 17 : if (TREE_SIDE_EFFECTS (val))
20747 4 : append_to_statement_list (val, &temp);
20748 :
20749 357 : *expr_p = temp;
20750 357 : ret = temp ? GS_OK : GS_ALL_DONE;
20751 : }
20752 : /* C99 code may assign to an array in a constructed
20753 : structure or union, and this has undefined behavior only
20754 : on execution, so create a temporary if an lvalue is
20755 : required. */
20756 936925 : else if (fallback == fb_lvalue)
20757 : {
20758 12 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20759 12 : mark_addressable (*expr_p);
20760 12 : ret = GS_OK;
20761 : }
20762 : else
20763 : ret = GS_ALL_DONE;
20764 : break;
20765 :
20766 : /* The following are special cases that are not handled by the
20767 : original GIMPLE grammar. */
20768 :
20769 : /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
20770 : eliminated. */
20771 438725 : case SAVE_EXPR:
20772 438725 : ret = gimplify_save_expr (expr_p, pre_p, post_p);
20773 438725 : break;
20774 :
20775 400045 : case BIT_FIELD_REF:
20776 400045 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20777 : post_p, is_gimple_lvalue, fb_either);
20778 400045 : recalculate_side_effects (*expr_p);
20779 400045 : break;
20780 :
20781 2445 : case TARGET_MEM_REF:
20782 2445 : {
20783 2445 : enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
20784 :
20785 2445 : if (TMR_BASE (*expr_p))
20786 2445 : r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
20787 : post_p, is_gimple_mem_ref_addr, fb_either);
20788 2445 : if (TMR_INDEX (*expr_p))
20789 1182 : r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
20790 : post_p, is_gimple_val, fb_rvalue);
20791 2445 : if (TMR_INDEX2 (*expr_p))
20792 60 : r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
20793 : post_p, is_gimple_val, fb_rvalue);
20794 : /* TMR_STEP and TMR_OFFSET are always integer constants. */
20795 2445 : ret = MIN (r0, r1);
20796 : }
20797 : break;
20798 :
20799 0 : case NON_LVALUE_EXPR:
20800 : /* This should have been stripped above. */
20801 0 : gcc_unreachable ();
20802 :
20803 97480 : case ASM_EXPR:
20804 97480 : ret = gimplify_asm_expr (expr_p, pre_p, post_p);
20805 97480 : break;
20806 :
20807 490866 : case TRY_FINALLY_EXPR:
20808 490866 : case TRY_CATCH_EXPR:
20809 490866 : {
20810 490866 : gimple_seq eval, cleanup;
20811 490866 : gtry *try_;
20812 :
20813 : /* Calls to destructors are generated automatically in FINALLY/CATCH
20814 : block. They should have location as UNKNOWN_LOCATION. However,
20815 : gimplify_call_expr will reset these call stmts to input_location
20816 : if it finds stmt's location is unknown. To prevent resetting for
20817 : destructors, we set the input_location to unknown.
20818 : Note that this only affects the destructor calls in FINALLY/CATCH
20819 : block, and will automatically reset to its original value by the
20820 : end of gimplify_expr. */
20821 490866 : input_location = UNKNOWN_LOCATION;
20822 490866 : eval = cleanup = NULL;
20823 490866 : gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
20824 490866 : bool save_in_handler_expr = gimplify_ctxp->in_handler_expr;
20825 490866 : if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
20826 490866 : && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
20827 : {
20828 155 : gimple_seq n = NULL, e = NULL;
20829 155 : gimplify_ctxp->in_handler_expr = true;
20830 155 : gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
20831 : 0), &n);
20832 155 : gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
20833 : 1), &e);
20834 155 : if (!gimple_seq_empty_p (n) || !gimple_seq_empty_p (e))
20835 : {
20836 151 : geh_else *stmt = gimple_build_eh_else (n, e);
20837 151 : gimple_seq_add_stmt (&cleanup, stmt);
20838 : }
20839 : }
20840 : else
20841 : {
20842 490711 : gimplify_ctxp->in_handler_expr = true;
20843 490711 : gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
20844 : }
20845 490866 : gimplify_ctxp->in_handler_expr = save_in_handler_expr;
20846 : /* Don't create bogus GIMPLE_TRY with empty cleanup. */
20847 490866 : if (gimple_seq_empty_p (cleanup))
20848 : {
20849 27183 : gimple_seq_add_seq (pre_p, eval);
20850 27183 : ret = GS_ALL_DONE;
20851 27183 : break;
20852 : }
20853 463683 : try_ = gimple_build_try (eval, cleanup,
20854 463683 : TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
20855 : ? GIMPLE_TRY_FINALLY
20856 : : GIMPLE_TRY_CATCH);
20857 463683 : if (EXPR_HAS_LOCATION (save_expr))
20858 848812 : gimple_set_location (try_, EXPR_LOCATION (save_expr));
20859 39277 : else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
20860 28902 : gimple_set_location (try_, saved_location);
20861 463683 : if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
20862 165516 : gimple_try_set_catch_is_cleanup (try_,
20863 165516 : TRY_CATCH_IS_CLEANUP (*expr_p));
20864 463683 : gimplify_seq_add_stmt (pre_p, try_);
20865 463683 : ret = GS_ALL_DONE;
20866 463683 : break;
20867 : }
20868 :
20869 5172692 : case CLEANUP_POINT_EXPR:
20870 5172692 : ret = gimplify_cleanup_point_expr (expr_p, pre_p);
20871 5172692 : break;
20872 :
20873 776969 : case TARGET_EXPR:
20874 776969 : ret = gimplify_target_expr (expr_p, pre_p, post_p);
20875 776969 : break;
20876 :
20877 39583 : case CATCH_EXPR:
20878 39583 : {
20879 39583 : gimple *c;
20880 39583 : gimple_seq handler = NULL;
20881 39583 : gimplify_and_add (CATCH_BODY (*expr_p), &handler);
20882 39583 : c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
20883 39583 : gimplify_seq_add_stmt (pre_p, c);
20884 39583 : ret = GS_ALL_DONE;
20885 39583 : break;
20886 : }
20887 :
20888 5332 : case EH_FILTER_EXPR:
20889 5332 : {
20890 5332 : gimple *ehf;
20891 5332 : gimple_seq failure = NULL;
20892 :
20893 5332 : gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
20894 5332 : ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
20895 5332 : copy_warning (ehf, *expr_p);
20896 5332 : gimplify_seq_add_stmt (pre_p, ehf);
20897 5332 : ret = GS_ALL_DONE;
20898 5332 : break;
20899 : }
20900 :
20901 50804 : case OBJ_TYPE_REF:
20902 50804 : {
20903 50804 : enum gimplify_status r0, r1;
20904 50804 : r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
20905 : post_p, is_gimple_val, fb_rvalue);
20906 50804 : r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
20907 : post_p, is_gimple_val, fb_rvalue);
20908 50804 : TREE_SIDE_EFFECTS (*expr_p) = 0;
20909 50804 : ret = MIN (r0, r1);
20910 : }
20911 : break;
20912 :
20913 40548 : case LABEL_DECL:
20914 : /* We get here when taking the address of a label. We mark
20915 : the label as "forced"; meaning it can never be removed and
20916 : it is a potential target for any computed goto. */
20917 40548 : FORCED_LABEL (*expr_p) = 1;
20918 40548 : ret = GS_ALL_DONE;
20919 40548 : break;
20920 :
20921 8474342 : case STATEMENT_LIST:
20922 8474342 : ret = gimplify_statement_list (expr_p, pre_p);
20923 8474342 : break;
20924 :
20925 1721 : case WITH_SIZE_EXPR:
20926 1721 : {
20927 2003 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20928 : post_p == &internal_post ? NULL : post_p,
20929 : gimple_test_f, fallback);
20930 1721 : gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
20931 : is_gimple_val, fb_rvalue);
20932 1721 : ret = GS_ALL_DONE;
20933 : }
20934 1721 : break;
20935 :
20936 108136217 : case VAR_DECL:
20937 108136217 : case PARM_DECL:
20938 108136217 : ret = gimplify_var_or_parm_decl (expr_p);
20939 108136217 : break;
20940 :
20941 334112 : case RESULT_DECL:
20942 : /* When within an OMP context, notice uses of variables. */
20943 334112 : if (gimplify_omp_ctxp)
20944 1351 : omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
20945 : /* Handlers can refer to the function result; if that has been
20946 : moved, we need to track it. */
20947 334112 : if (gimplify_ctxp->in_handler_expr && gimplify_ctxp->return_temp)
20948 50 : *expr_p = gimplify_ctxp->return_temp;
20949 : ret = GS_ALL_DONE;
20950 : break;
20951 :
20952 0 : case DEBUG_EXPR_DECL:
20953 0 : gcc_unreachable ();
20954 :
20955 2450805 : case DEBUG_BEGIN_STMT:
20956 4901610 : gimplify_seq_add_stmt (pre_p,
20957 : gimple_build_debug_begin_stmt
20958 2450805 : (TREE_BLOCK (*expr_p),
20959 2450805 : EXPR_LOCATION (*expr_p)));
20960 2450805 : ret = GS_ALL_DONE;
20961 2450805 : *expr_p = NULL;
20962 2450805 : break;
20963 :
20964 : case SSA_NAME:
20965 : /* Allow callbacks into the gimplifier during optimization. */
20966 : ret = GS_ALL_DONE;
20967 : break;
20968 :
20969 18214 : case OMP_PARALLEL:
20970 18214 : gimplify_omp_parallel (expr_p, pre_p);
20971 18214 : ret = GS_ALL_DONE;
20972 18214 : break;
20973 :
20974 3848 : case OMP_TASK:
20975 3848 : gimplify_omp_task (expr_p, pre_p);
20976 3848 : ret = GS_ALL_DONE;
20977 3848 : break;
20978 :
20979 10683 : case OMP_SIMD:
20980 10683 : {
20981 : /* Temporarily disable into_ssa, as scan_omp_simd
20982 : which calls copy_gimple_seq_and_replace_locals can't deal
20983 : with SSA_NAMEs defined outside of the body properly. */
20984 10683 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
20985 10683 : gimplify_ctxp->into_ssa = false;
20986 10683 : ret = gimplify_omp_for (expr_p, pre_p);
20987 10683 : gimplify_ctxp->into_ssa = saved_into_ssa;
20988 10683 : break;
20989 : }
20990 :
20991 47392 : case OMP_FOR:
20992 47392 : case OMP_DISTRIBUTE:
20993 47392 : case OMP_TASKLOOP:
20994 47392 : case OMP_TILE:
20995 47392 : case OMP_UNROLL:
20996 47392 : case OACC_LOOP:
20997 47392 : ret = gimplify_omp_for (expr_p, pre_p);
20998 47392 : break;
20999 :
21000 1056 : case OMP_LOOP:
21001 1056 : ret = gimplify_omp_loop (expr_p, pre_p);
21002 1056 : break;
21003 :
21004 665 : case OACC_CACHE:
21005 665 : gimplify_oacc_cache (expr_p, pre_p);
21006 665 : ret = GS_ALL_DONE;
21007 665 : break;
21008 :
21009 254 : case OACC_DECLARE:
21010 254 : gimplify_oacc_declare (expr_p, pre_p);
21011 254 : ret = GS_ALL_DONE;
21012 254 : break;
21013 :
21014 39443 : case OACC_HOST_DATA:
21015 39443 : case OACC_DATA:
21016 39443 : case OACC_KERNELS:
21017 39443 : case OACC_PARALLEL:
21018 39443 : case OACC_SERIAL:
21019 39443 : case OMP_SCOPE:
21020 39443 : case OMP_SECTIONS:
21021 39443 : case OMP_SINGLE:
21022 39443 : case OMP_TARGET:
21023 39443 : case OMP_TARGET_DATA:
21024 39443 : case OMP_TEAMS:
21025 39443 : gimplify_omp_workshare (expr_p, pre_p);
21026 39443 : ret = GS_ALL_DONE;
21027 39443 : break;
21028 :
21029 12037 : case OACC_ENTER_DATA:
21030 12037 : case OACC_EXIT_DATA:
21031 12037 : case OACC_UPDATE:
21032 12037 : case OMP_TARGET_UPDATE:
21033 12037 : case OMP_TARGET_ENTER_DATA:
21034 12037 : case OMP_TARGET_EXIT_DATA:
21035 12037 : gimplify_omp_target_update (expr_p, pre_p);
21036 12037 : ret = GS_ALL_DONE;
21037 12037 : break;
21038 :
21039 7037 : case OMP_SECTION:
21040 7037 : case OMP_STRUCTURED_BLOCK:
21041 7037 : case OMP_MASTER:
21042 7037 : case OMP_MASKED:
21043 7037 : case OMP_ORDERED:
21044 7037 : case OMP_CRITICAL:
21045 7037 : case OMP_SCAN:
21046 7037 : {
21047 7037 : gimple_seq body = NULL;
21048 7037 : gimple *g;
21049 7037 : bool saved_in_omp_construct = in_omp_construct;
21050 :
21051 7037 : in_omp_construct = true;
21052 7037 : gimplify_and_add (OMP_BODY (*expr_p), &body);
21053 7037 : in_omp_construct = saved_in_omp_construct;
21054 7037 : switch (TREE_CODE (*expr_p))
21055 : {
21056 1271 : case OMP_SECTION:
21057 1271 : g = gimple_build_omp_section (body);
21058 1271 : break;
21059 791 : case OMP_STRUCTURED_BLOCK:
21060 791 : g = gimple_build_omp_structured_block (body);
21061 791 : break;
21062 874 : case OMP_MASTER:
21063 874 : g = gimple_build_omp_master (body);
21064 874 : break;
21065 1950 : case OMP_ORDERED:
21066 1950 : g = gimplify_omp_ordered (*expr_p, body);
21067 1950 : if (OMP_BODY (*expr_p) == NULL_TREE
21068 1950 : && gimple_code (g) == GIMPLE_OMP_ORDERED)
21069 1025 : gimple_omp_ordered_standalone (g);
21070 : break;
21071 487 : case OMP_MASKED:
21072 487 : gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
21073 : pre_p, ORT_WORKSHARE, OMP_MASKED);
21074 487 : gimplify_adjust_omp_clauses (pre_p, body,
21075 : &OMP_MASKED_CLAUSES (*expr_p),
21076 : OMP_MASKED);
21077 974 : g = gimple_build_omp_masked (body,
21078 487 : OMP_MASKED_CLAUSES (*expr_p));
21079 487 : break;
21080 546 : case OMP_CRITICAL:
21081 546 : gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
21082 : pre_p, ORT_WORKSHARE, OMP_CRITICAL);
21083 546 : gimplify_adjust_omp_clauses (pre_p, body,
21084 : &OMP_CRITICAL_CLAUSES (*expr_p),
21085 : OMP_CRITICAL);
21086 1638 : g = gimple_build_omp_critical (body,
21087 546 : OMP_CRITICAL_NAME (*expr_p),
21088 546 : OMP_CRITICAL_CLAUSES (*expr_p));
21089 546 : break;
21090 1118 : case OMP_SCAN:
21091 1118 : gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
21092 : pre_p, ORT_WORKSHARE, OMP_SCAN);
21093 1118 : gimplify_adjust_omp_clauses (pre_p, body,
21094 : &OMP_SCAN_CLAUSES (*expr_p),
21095 : OMP_SCAN);
21096 1118 : g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
21097 1118 : break;
21098 0 : default:
21099 0 : gcc_unreachable ();
21100 : }
21101 7037 : gimplify_seq_add_stmt (pre_p, g);
21102 7037 : ret = GS_ALL_DONE;
21103 7037 : break;
21104 : }
21105 :
21106 611 : case OMP_TASKGROUP:
21107 611 : {
21108 611 : gimple_seq body = NULL;
21109 :
21110 611 : tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
21111 611 : bool saved_in_omp_construct = in_omp_construct;
21112 611 : gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
21113 : OMP_TASKGROUP);
21114 611 : gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
21115 :
21116 611 : in_omp_construct = true;
21117 611 : gimplify_and_add (OMP_BODY (*expr_p), &body);
21118 611 : in_omp_construct = saved_in_omp_construct;
21119 611 : gimple_seq cleanup = NULL;
21120 611 : tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
21121 611 : gimple *g = gimple_build_call (fn, 0);
21122 611 : gimple_seq_add_stmt (&cleanup, g);
21123 611 : g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
21124 611 : body = NULL;
21125 611 : gimple_seq_add_stmt (&body, g);
21126 611 : g = gimple_build_omp_taskgroup (body, *pclauses);
21127 611 : gimplify_seq_add_stmt (pre_p, g);
21128 611 : ret = GS_ALL_DONE;
21129 611 : break;
21130 : }
21131 :
21132 612 : case OMP_INTEROP:
21133 612 : ret = gimplify_omp_interop (expr_p, pre_p);
21134 612 : break;
21135 10236 : case OMP_ATOMIC:
21136 10236 : case OMP_ATOMIC_READ:
21137 10236 : case OMP_ATOMIC_CAPTURE_OLD:
21138 10236 : case OMP_ATOMIC_CAPTURE_NEW:
21139 10236 : ret = gimplify_omp_atomic (expr_p, pre_p);
21140 10236 : break;
21141 :
21142 852 : case OMP_DISPATCH:
21143 852 : ret = gimplify_omp_dispatch (expr_p, pre_p);
21144 852 : break;
21145 :
21146 174 : case OMP_METADIRECTIVE:
21147 174 : ret = gimplify_omp_metadirective (expr_p, pre_p, post_p,
21148 : gimple_test_f, fallback);
21149 174 : break;
21150 :
21151 36 : case OMP_NEXT_VARIANT:
21152 36 : case OMP_TARGET_DEVICE_MATCHES:
21153 : /* These are placeholders for constants. There's nothing to do with
21154 : them here but we must mark the containing function as needing
21155 : to run the ompdevlow pass to resolve them. Note that
21156 : OMP_TARGET_DEVICE_MATCHES, in particular, may be inserted by
21157 : the front ends. */
21158 36 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
21159 36 : ret = GS_ALL_DONE;
21160 36 : break;
21161 :
21162 0 : case OMP_DECLARE_MAPPER:
21163 0 : ret = gimplify_omp_declare_mapper (expr_p);
21164 0 : break;
21165 :
21166 478 : case TRANSACTION_EXPR:
21167 478 : ret = gimplify_transaction (expr_p, pre_p);
21168 478 : break;
21169 :
21170 287958 : case TRUTH_AND_EXPR:
21171 287958 : case TRUTH_OR_EXPR:
21172 287958 : case TRUTH_XOR_EXPR:
21173 287958 : {
21174 287958 : tree orig_type = TREE_TYPE (*expr_p);
21175 287958 : tree new_type, xop0, xop1;
21176 287958 : *expr_p = gimple_boolify (*expr_p);
21177 287958 : new_type = TREE_TYPE (*expr_p);
21178 287958 : if (!useless_type_conversion_p (orig_type, new_type))
21179 : {
21180 1140 : *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
21181 1140 : ret = GS_OK;
21182 1140 : break;
21183 : }
21184 :
21185 : /* Boolified binary truth expressions are semantically equivalent
21186 : to bitwise binary expressions. Canonicalize them to the
21187 : bitwise variant. */
21188 286818 : switch (TREE_CODE (*expr_p))
21189 : {
21190 170487 : case TRUTH_AND_EXPR:
21191 170487 : TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
21192 170487 : break;
21193 115742 : case TRUTH_OR_EXPR:
21194 115742 : TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
21195 115742 : break;
21196 589 : case TRUTH_XOR_EXPR:
21197 589 : TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
21198 589 : break;
21199 : default:
21200 : break;
21201 : }
21202 : /* Now make sure that operands have compatible type to
21203 : expression's new_type. */
21204 286818 : xop0 = TREE_OPERAND (*expr_p, 0);
21205 286818 : xop1 = TREE_OPERAND (*expr_p, 1);
21206 286818 : if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
21207 326 : TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
21208 : new_type,
21209 : xop0);
21210 286818 : if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
21211 366 : TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
21212 : new_type,
21213 : xop1);
21214 : /* Continue classified as tcc_binary. */
21215 286818 : goto expr_2;
21216 : }
21217 :
21218 9561 : case VEC_COND_EXPR:
21219 9561 : goto expr_3;
21220 :
21221 132768 : case VEC_PERM_EXPR:
21222 : /* Classified as tcc_expression. */
21223 132768 : goto expr_3;
21224 :
21225 48 : case BIT_INSERT_EXPR:
21226 : /* Argument 3 is a constant. */
21227 48 : goto expr_2;
21228 :
21229 3861731 : case POINTER_PLUS_EXPR:
21230 3861731 : {
21231 3861731 : enum gimplify_status r0, r1;
21232 3861731 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21233 : post_p, is_gimple_val, fb_rvalue);
21234 3861731 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21235 : post_p, is_gimple_val, fb_rvalue);
21236 3861731 : recalculate_side_effects (*expr_p);
21237 3861731 : ret = MIN (r0, r1);
21238 : break;
21239 : }
21240 :
21241 44179767 : default:
21242 44179767 : switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
21243 : {
21244 6947088 : case tcc_comparison:
21245 : /* Handle comparison of objects of non scalar mode aggregates
21246 : with a call to memcmp. It would be nice to only have to do
21247 : this for variable-sized objects, but then we'd have to allow
21248 : the same nest of reference nodes we allow for MODIFY_EXPR and
21249 : that's too complex.
21250 :
21251 : Compare scalar mode aggregates as scalar mode values. Using
21252 : memcmp for them would be very inefficient at best, and is
21253 : plain wrong if bitfields are involved. */
21254 6947088 : if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
21255 : ret = GS_ERROR;
21256 : else
21257 : {
21258 6947088 : tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
21259 :
21260 : /* Vector comparisons need no boolification. */
21261 6947088 : if (TREE_CODE (type) == VECTOR_TYPE)
21262 11640 : goto expr_2;
21263 6935448 : else if (!AGGREGATE_TYPE_P (type))
21264 : {
21265 6935432 : tree org_type = TREE_TYPE (*expr_p);
21266 6935432 : *expr_p = gimple_boolify (*expr_p);
21267 6935432 : if (!useless_type_conversion_p (org_type,
21268 6935432 : TREE_TYPE (*expr_p)))
21269 : {
21270 55684 : *expr_p = fold_convert_loc (input_location,
21271 : org_type, *expr_p);
21272 55684 : ret = GS_OK;
21273 : }
21274 : else
21275 6879748 : goto expr_2;
21276 : }
21277 16 : else if (SCALAR_INT_MODE_P (TYPE_MODE (type)))
21278 16 : ret = gimplify_scalar_mode_aggregate_compare (expr_p);
21279 : else
21280 0 : ret = gimplify_variable_sized_compare (expr_p);
21281 : }
21282 : break;
21283 :
21284 : /* If *EXPR_P does not need to be special-cased, handle it
21285 : according to its class. */
21286 1599914 : case tcc_unary:
21287 1599914 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21288 : post_p, is_gimple_val, fb_rvalue);
21289 1599914 : break;
21290 :
21291 26304009 : case tcc_binary:
21292 26304009 : expr_2:
21293 26304009 : {
21294 26304009 : enum gimplify_status r0, r1;
21295 :
21296 26304009 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21297 : post_p, is_gimple_val, fb_rvalue);
21298 26304009 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21299 : post_p, is_gimple_val, fb_rvalue);
21300 :
21301 26304009 : ret = MIN (r0, r1);
21302 : break;
21303 : }
21304 :
21305 142329 : expr_3:
21306 142329 : {
21307 142329 : enum gimplify_status r0, r1, r2;
21308 :
21309 142329 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21310 : post_p, is_gimple_val, fb_rvalue);
21311 142329 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21312 : post_p, is_gimple_val, fb_rvalue);
21313 142329 : r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
21314 : post_p, is_gimple_val, fb_rvalue);
21315 :
21316 142329 : ret = MIN (MIN (r0, r1), r2);
21317 : break;
21318 : }
21319 :
21320 16507010 : case tcc_declaration:
21321 16507010 : case tcc_constant:
21322 16507010 : ret = GS_ALL_DONE;
21323 16507010 : goto dont_recalculate;
21324 :
21325 0 : default:
21326 0 : gcc_unreachable ();
21327 : }
21328 :
21329 28101952 : recalculate_side_effects (*expr_p);
21330 :
21331 475997290 : dont_recalculate:
21332 : break;
21333 : }
21334 :
21335 475997290 : gcc_assert (*expr_p || ret != GS_OK);
21336 : }
21337 480689215 : while (ret == GS_OK);
21338 :
21339 : /* If we encountered an error_mark somewhere nested inside, either
21340 : stub out the statement or propagate the error back out. */
21341 463200008 : if (ret == GS_ERROR)
21342 : {
21343 5194 : if (is_statement)
21344 3979 : *expr_p = NULL;
21345 5194 : goto out;
21346 : }
21347 :
21348 : /* This was only valid as a return value from the langhook, which
21349 : we handled. Make sure it doesn't escape from any other context. */
21350 463194814 : gcc_assert (ret != GS_UNHANDLED);
21351 :
21352 463194814 : if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
21353 : {
21354 : /* We aren't looking for a value, and we don't have a valid
21355 : statement. If it doesn't have side-effects, throw it away.
21356 : We can also get here with code such as "*&&L;", where L is
21357 : a LABEL_DECL that is marked as FORCED_LABEL. */
21358 1166195 : if (TREE_CODE (*expr_p) == LABEL_DECL
21359 1166195 : || !TREE_SIDE_EFFECTS (*expr_p))
21360 1165179 : *expr_p = NULL;
21361 1016 : else if (!TREE_THIS_VOLATILE (*expr_p))
21362 : {
21363 : /* This is probably a _REF that contains something nested that
21364 : has side effects. Recurse through the operands to find it. */
21365 0 : enum tree_code code = TREE_CODE (*expr_p);
21366 :
21367 0 : switch (code)
21368 : {
21369 0 : case COMPONENT_REF:
21370 0 : case REALPART_EXPR:
21371 0 : case IMAGPART_EXPR:
21372 0 : case VIEW_CONVERT_EXPR:
21373 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
21374 : gimple_test_f, fallback);
21375 0 : break;
21376 :
21377 0 : case ARRAY_REF:
21378 0 : case ARRAY_RANGE_REF:
21379 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
21380 : gimple_test_f, fallback);
21381 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
21382 : gimple_test_f, fallback);
21383 0 : break;
21384 :
21385 0 : default:
21386 : /* Anything else with side-effects must be converted to
21387 : a valid statement before we get here. */
21388 0 : gcc_unreachable ();
21389 : }
21390 :
21391 0 : *expr_p = NULL;
21392 : }
21393 1016 : else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
21394 966 : && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
21395 1969 : && !is_empty_type (TREE_TYPE (*expr_p)))
21396 : {
21397 : /* Historically, the compiler has treated a bare reference
21398 : to a non-BLKmode volatile lvalue as forcing a load. */
21399 893 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
21400 :
21401 : /* Normally, we do not want to create a temporary for a
21402 : TREE_ADDRESSABLE type because such a type should not be
21403 : copied by bitwise-assignment. However, we make an
21404 : exception here, as all we are doing here is ensuring that
21405 : we read the bytes that make up the type. We use
21406 : create_tmp_var_raw because create_tmp_var will abort when
21407 : given a TREE_ADDRESSABLE type. */
21408 893 : tree tmp = create_tmp_var_raw (type, "vol");
21409 893 : gimple_add_tmp_var (tmp);
21410 893 : gimplify_assign (tmp, *expr_p, pre_p);
21411 893 : *expr_p = NULL;
21412 : }
21413 : else
21414 : /* We can't do anything useful with a volatile reference to
21415 : an incomplete type, so just throw it away. Likewise for
21416 : a BLKmode type, since any implicit inner load should
21417 : already have been turned into an explicit one by the
21418 : gimplification process. */
21419 123 : *expr_p = NULL;
21420 : }
21421 :
21422 : /* If we are gimplifying at the statement level, we're done. Tack
21423 : everything together and return. */
21424 463194814 : if (fallback == fb_none || is_statement)
21425 : {
21426 : /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
21427 : it out for GC to reclaim it. */
21428 100526532 : *expr_p = NULL_TREE;
21429 :
21430 100526532 : if (!gimple_seq_empty_p (internal_pre)
21431 100526532 : || !gimple_seq_empty_p (internal_post))
21432 : {
21433 19 : gimplify_seq_add_seq (&internal_pre, internal_post);
21434 19 : gimplify_seq_add_seq (pre_p, internal_pre);
21435 : }
21436 :
21437 : /* The result of gimplifying *EXPR_P is going to be the last few
21438 : statements in *PRE_P and *POST_P. Add location information
21439 : to all the statements that were added by the gimplification
21440 : helpers. */
21441 100526532 : if (!gimple_seq_empty_p (*pre_p))
21442 98083246 : annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
21443 :
21444 100526532 : if (!gimple_seq_empty_p (*post_p))
21445 19 : annotate_all_with_location_after (*post_p, post_last_gsi,
21446 : input_location);
21447 :
21448 100526532 : goto out;
21449 : }
21450 :
21451 : #ifdef ENABLE_GIMPLE_CHECKING
21452 362668282 : if (*expr_p)
21453 : {
21454 362668282 : enum tree_code code = TREE_CODE (*expr_p);
21455 : /* These expressions should already be in gimple IR form. */
21456 362668282 : gcc_assert (code != MODIFY_EXPR
21457 : && code != ASM_EXPR
21458 : && code != BIND_EXPR
21459 : && code != CATCH_EXPR
21460 : && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
21461 : && code != EH_FILTER_EXPR
21462 : && code != GOTO_EXPR
21463 : && code != LABEL_EXPR
21464 : && code != LOOP_EXPR
21465 : && code != SWITCH_EXPR
21466 : && code != TRY_FINALLY_EXPR
21467 : && code != EH_ELSE_EXPR
21468 : && code != OACC_PARALLEL
21469 : && code != OACC_KERNELS
21470 : && code != OACC_SERIAL
21471 : && code != OACC_DATA
21472 : && code != OACC_HOST_DATA
21473 : && code != OACC_DECLARE
21474 : && code != OACC_UPDATE
21475 : && code != OACC_ENTER_DATA
21476 : && code != OACC_EXIT_DATA
21477 : && code != OACC_CACHE
21478 : && code != OMP_CRITICAL
21479 : && code != OMP_FOR
21480 : && code != OACC_LOOP
21481 : && code != OMP_MASTER
21482 : && code != OMP_MASKED
21483 : && code != OMP_TASKGROUP
21484 : && code != OMP_ORDERED
21485 : && code != OMP_PARALLEL
21486 : && code != OMP_SCAN
21487 : && code != OMP_SECTIONS
21488 : && code != OMP_SECTION
21489 : && code != OMP_STRUCTURED_BLOCK
21490 : && code != OMP_SINGLE
21491 : && code != OMP_SCOPE
21492 : && code != OMP_DISPATCH);
21493 : }
21494 : #endif
21495 :
21496 : /* Otherwise we're gimplifying a subexpression, so the resulting
21497 : value is interesting. If it's a valid operand that matches
21498 : GIMPLE_TEST_F, we're done. Unless we are handling some
21499 : post-effects internally; if that's the case, we need to copy into
21500 : a temporary before adding the post-effects to POST_P. */
21501 362668282 : if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
21502 336752522 : goto out;
21503 :
21504 : /* Otherwise, we need to create a new temporary for the gimplified
21505 : expression. */
21506 :
21507 : /* We can't return an lvalue if we have an internal postqueue. The
21508 : object the lvalue refers to would (probably) be modified by the
21509 : postqueue; we need to copy the value out first, which means an
21510 : rvalue. */
21511 25915760 : if ((fallback & fb_lvalue)
21512 571961 : && gimple_seq_empty_p (internal_post)
21513 26487721 : && is_gimple_addressable (*expr_p))
21514 : {
21515 : /* An lvalue will do. Take the address of the expression, store it
21516 : in a temporary, and replace the expression with an INDIRECT_REF of
21517 : that temporary. */
21518 15 : tree ref_alias_type = reference_alias_ptr_type (*expr_p);
21519 15 : unsigned int ref_align = get_object_alignment (*expr_p);
21520 15 : tree ref_type = TREE_TYPE (*expr_p);
21521 15 : tmp = build_fold_addr_expr_loc (input_location, *expr_p);
21522 15 : gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
21523 15 : if (TYPE_ALIGN (ref_type) != ref_align)
21524 1 : ref_type = build_aligned_type (ref_type, ref_align);
21525 15 : *expr_p = build2 (MEM_REF, ref_type,
21526 : tmp, build_zero_cst (ref_alias_type));
21527 : }
21528 25915745 : else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
21529 : {
21530 : /* An rvalue will do. Assign the gimplified expression into a
21531 : new temporary TMP and replace the original expression with
21532 : TMP. First, make sure that the expression has a type so that
21533 : it can be assigned into a temporary. */
21534 25915737 : gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
21535 25915737 : *expr_p = get_formal_tmp_var (*expr_p, pre_p);
21536 : }
21537 : else
21538 : {
21539 : #ifdef ENABLE_GIMPLE_CHECKING
21540 8 : if (!(fallback & fb_mayfail))
21541 : {
21542 0 : fprintf (stderr, "gimplification failed:\n");
21543 0 : print_generic_expr (stderr, *expr_p);
21544 0 : debug_tree (*expr_p);
21545 0 : internal_error ("gimplification failed");
21546 : }
21547 : #endif
21548 8 : gcc_assert (fallback & fb_mayfail);
21549 :
21550 : /* If this is an asm statement, and the user asked for the
21551 : impossible, don't die. Fail and let gimplify_asm_expr
21552 : issue an error. */
21553 8 : ret = GS_ERROR;
21554 8 : goto out;
21555 : }
21556 :
21557 : /* Make sure the temporary matches our predicate. */
21558 25915752 : gcc_assert ((*gimple_test_f) (*expr_p));
21559 :
21560 25915752 : if (!gimple_seq_empty_p (internal_post))
21561 : {
21562 0 : annotate_all_with_location (internal_post, input_location);
21563 0 : gimplify_seq_add_seq (pre_p, internal_post);
21564 : }
21565 :
21566 25915752 : out:
21567 463200008 : input_location = saved_location;
21568 463200008 : return ret;
21569 : }
21570 :
21571 : /* Like gimplify_expr but make sure the gimplified result is not itself
21572 : a SSA name (but a decl if it were). Temporaries required by
21573 : evaluating *EXPR_P may be still SSA names. */
21574 :
21575 : static enum gimplify_status
21576 33522190 : gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
21577 : bool (*gimple_test_f) (tree), fallback_t fallback,
21578 : bool allow_ssa)
21579 : {
21580 33522190 : enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
21581 : gimple_test_f, fallback);
21582 33522190 : if (! allow_ssa
21583 265888 : && TREE_CODE (*expr_p) == SSA_NAME)
21584 68406 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
21585 33522190 : return ret;
21586 : }
21587 :
21588 : /* Look through TYPE for variable-sized objects and gimplify each such
21589 : size that we find. Add to LIST_P any statements generated. */
21590 :
21591 : void
21592 8069997 : gimplify_type_sizes (tree type, gimple_seq *list_p)
21593 : {
21594 8069997 : if (type == NULL || type == error_mark_node)
21595 : return;
21596 :
21597 8069751 : const bool ignored_p
21598 8069751 : = TYPE_NAME (type)
21599 4021345 : && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21600 11872479 : && DECL_IGNORED_P (TYPE_NAME (type));
21601 8069751 : tree t;
21602 :
21603 : /* We first do the main variant, then copy into any other variants. */
21604 8069751 : type = TYPE_MAIN_VARIANT (type);
21605 :
21606 : /* Avoid infinite recursion. */
21607 8069751 : if (TYPE_SIZES_GIMPLIFIED (type))
21608 : return;
21609 :
21610 2414065 : TYPE_SIZES_GIMPLIFIED (type) = 1;
21611 :
21612 2414065 : switch (TREE_CODE (type))
21613 : {
21614 486208 : case INTEGER_TYPE:
21615 486208 : case ENUMERAL_TYPE:
21616 486208 : case BOOLEAN_TYPE:
21617 486208 : case REAL_TYPE:
21618 486208 : case FIXED_POINT_TYPE:
21619 486208 : gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
21620 486208 : gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
21621 :
21622 10173195 : for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
21623 : {
21624 9686987 : TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
21625 9686987 : TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
21626 : }
21627 : break;
21628 :
21629 178299 : case ARRAY_TYPE:
21630 : /* These types may not have declarations, so handle them here. */
21631 178299 : gimplify_type_sizes (TREE_TYPE (type), list_p);
21632 178299 : gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
21633 : /* Ensure VLA bounds aren't removed, for -O0 they should be variables
21634 : with assigned stack slots, for -O1+ -g they should be tracked
21635 : by VTA. */
21636 178299 : if (!ignored_p
21637 178299 : && TYPE_DOMAIN (type)
21638 356373 : && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
21639 : {
21640 178074 : t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
21641 178074 : if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
21642 0 : DECL_IGNORED_P (t) = 0;
21643 178074 : t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21644 178074 : if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
21645 18941 : DECL_IGNORED_P (t) = 0;
21646 : }
21647 : break;
21648 :
21649 478265 : case RECORD_TYPE:
21650 478265 : case UNION_TYPE:
21651 478265 : case QUAL_UNION_TYPE:
21652 10212226 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
21653 9733961 : if (TREE_CODE (field) == FIELD_DECL)
21654 : {
21655 1056797 : gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
21656 : /* Likewise, ensure variable offsets aren't removed. */
21657 1056797 : if (!ignored_p
21658 1056797 : && (t = DECL_FIELD_OFFSET (field))
21659 1056785 : && VAR_P (t)
21660 1056959 : && DECL_ARTIFICIAL (t))
21661 162 : DECL_IGNORED_P (t) = 0;
21662 1056797 : gimplify_one_sizepos (&DECL_SIZE (field), list_p);
21663 1056797 : gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
21664 1056797 : gimplify_type_sizes (TREE_TYPE (field), list_p);
21665 : }
21666 : break;
21667 :
21668 : case POINTER_TYPE:
21669 : case REFERENCE_TYPE:
21670 : /* We used to recurse on the pointed-to type here, which turned out to
21671 : be incorrect because its definition might refer to variables not
21672 : yet initialized at this point if a forward declaration is involved.
21673 :
21674 : It was actually useful for anonymous pointed-to types to ensure
21675 : that the sizes evaluation dominates every possible later use of the
21676 : values. Restricting to such types here would be safe since there
21677 : is no possible forward declaration around, but would introduce an
21678 : undesirable middle-end semantic to anonymity. We then defer to
21679 : front-ends the responsibility of ensuring that the sizes are
21680 : evaluated both early and late enough, e.g. by attaching artificial
21681 : type declarations to the tree. */
21682 : break;
21683 :
21684 : default:
21685 : break;
21686 : }
21687 :
21688 2414065 : gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
21689 2414065 : gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
21690 :
21691 16538803 : for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
21692 : {
21693 14124738 : TYPE_SIZE (t) = TYPE_SIZE (type);
21694 14124738 : TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
21695 14124738 : TYPE_SIZES_GIMPLIFIED (t) = 1;
21696 : }
21697 : }
21698 :
21699 : /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
21700 : a size or position, has had all of its SAVE_EXPRs evaluated.
21701 : We add any required statements to *STMT_P. */
21702 :
21703 : void
21704 8989003 : gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
21705 : {
21706 8989003 : tree expr = *expr_p;
21707 :
21708 : /* We don't do anything if the value isn't there, is constant, or contains
21709 : A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
21710 : a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
21711 : will want to replace it with a new variable, but that will cause problems
21712 : if this type is from outside the function. It's OK to have that here. */
21713 8989003 : if (expr == NULL_TREE
21714 8992871 : || is_gimple_constant (expr)
21715 80592 : || VAR_P (expr)
21716 9065727 : || CONTAINS_PLACEHOLDER_P (expr))
21717 8912279 : return;
21718 :
21719 76724 : *expr_p = unshare_expr (expr);
21720 :
21721 : /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
21722 : if the def vanishes. */
21723 76724 : gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
21724 :
21725 : /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
21726 : FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
21727 : as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
21728 76724 : if (is_gimple_constant (*expr_p))
21729 548 : *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
21730 : }
21731 :
21732 : /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
21733 : containing the sequence of corresponding GIMPLE statements. If DO_PARMS
21734 : is true, also gimplify the parameters. */
21735 :
21736 : gbind *
21737 2870254 : gimplify_body (tree fndecl, bool do_parms)
21738 : {
21739 2870254 : location_t saved_location = input_location;
21740 2870254 : gimple_seq parm_stmts, parm_cleanup = NULL, seq;
21741 2870254 : gimple *outer_stmt;
21742 2870254 : gbind *outer_bind;
21743 :
21744 2870254 : timevar_push (TV_TREE_GIMPLIFY);
21745 :
21746 2870254 : init_tree_ssa (cfun);
21747 :
21748 : /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
21749 : gimplification. */
21750 2870254 : default_rtl_profile ();
21751 :
21752 2870254 : gcc_assert (gimplify_ctxp == NULL);
21753 2870254 : push_gimplify_context (true);
21754 :
21755 2870254 : if (flag_openacc || flag_openmp)
21756 : {
21757 54317 : gcc_assert (gimplify_omp_ctxp == NULL);
21758 54317 : if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
21759 9651 : gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
21760 : }
21761 :
21762 : /* Unshare most shared trees in the body and in that of any nested functions.
21763 : It would seem we don't have to do this for nested functions because
21764 : they are supposed to be output and then the outer function gimplified
21765 : first, but the g++ front end doesn't always do it that way. */
21766 2870254 : unshare_body (fndecl);
21767 2870254 : unvisit_body (fndecl);
21768 :
21769 : /* Make sure input_location isn't set to something weird. */
21770 2870254 : input_location = DECL_SOURCE_LOCATION (fndecl);
21771 :
21772 : /* Resolve callee-copies. This has to be done before processing
21773 : the body so that DECL_VALUE_EXPR gets processed correctly. */
21774 2870254 : parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
21775 :
21776 : /* Gimplify the function's body. */
21777 2870254 : seq = NULL;
21778 2870254 : gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
21779 2870254 : outer_stmt = gimple_seq_first_nondebug_stmt (seq);
21780 2870254 : if (!outer_stmt)
21781 : {
21782 25910 : outer_stmt = gimple_build_nop ();
21783 25910 : gimplify_seq_add_stmt (&seq, outer_stmt);
21784 : }
21785 :
21786 : /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
21787 : not the case, wrap everything in a GIMPLE_BIND to make it so. */
21788 2870254 : if (gimple_code (outer_stmt) == GIMPLE_BIND
21789 2870254 : && (gimple_seq_first_nondebug_stmt (seq)
21790 1670244 : == gimple_seq_last_nondebug_stmt (seq)))
21791 : {
21792 1606947 : outer_bind = as_a <gbind *> (outer_stmt);
21793 1606947 : if (gimple_seq_first_stmt (seq) != outer_stmt
21794 1606947 : || gimple_seq_last_stmt (seq) != outer_stmt)
21795 : {
21796 : /* If there are debug stmts before or after outer_stmt, move them
21797 : inside of outer_bind body. */
21798 1 : gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
21799 1 : gimple_seq second_seq = NULL;
21800 1 : if (gimple_seq_first_stmt (seq) != outer_stmt
21801 2 : && gimple_seq_last_stmt (seq) != outer_stmt)
21802 : {
21803 0 : second_seq = gsi_split_seq_after (gsi);
21804 0 : gsi_remove (&gsi, false);
21805 : }
21806 1 : else if (gimple_seq_first_stmt (seq) != outer_stmt)
21807 1 : gsi_remove (&gsi, false);
21808 : else
21809 : {
21810 0 : gsi_remove (&gsi, false);
21811 0 : second_seq = seq;
21812 0 : seq = NULL;
21813 : }
21814 1 : gimple_seq_add_seq_without_update (&seq,
21815 : gimple_bind_body (outer_bind));
21816 1 : gimple_seq_add_seq_without_update (&seq, second_seq);
21817 1 : gimple_bind_set_body (outer_bind, seq);
21818 : }
21819 : }
21820 : else
21821 1263307 : outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
21822 :
21823 2870254 : DECL_SAVED_TREE (fndecl) = NULL_TREE;
21824 :
21825 : /* If we had callee-copies statements, insert them at the beginning
21826 : of the function and clear DECL_HAS_VALUE_EXPR_P on the parameters. */
21827 2870254 : if (!gimple_seq_empty_p (parm_stmts))
21828 : {
21829 41 : tree parm;
21830 :
21831 41 : gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
21832 41 : if (parm_cleanup)
21833 : {
21834 0 : gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
21835 : GIMPLE_TRY_FINALLY);
21836 0 : parm_stmts = NULL;
21837 0 : gimple_seq_add_stmt (&parm_stmts, g);
21838 : }
21839 41 : gimple_bind_set_body (outer_bind, parm_stmts);
21840 :
21841 41 : for (parm = DECL_ARGUMENTS (current_function_decl);
21842 101 : parm; parm = DECL_CHAIN (parm))
21843 60 : if (DECL_HAS_VALUE_EXPR_P (parm))
21844 : {
21845 0 : DECL_HAS_VALUE_EXPR_P (parm) = 0;
21846 0 : DECL_IGNORED_P (parm) = 0;
21847 : }
21848 : }
21849 :
21850 2870254 : if ((flag_openacc || flag_openmp || flag_openmp_simd)
21851 56573 : && gimplify_omp_ctxp)
21852 : {
21853 9704 : delete_omp_context (gimplify_omp_ctxp);
21854 9704 : gimplify_omp_ctxp = NULL;
21855 : }
21856 :
21857 2870254 : pop_gimplify_context (outer_bind);
21858 2870254 : gcc_assert (gimplify_ctxp == NULL);
21859 :
21860 2870254 : if (flag_checking && !seen_error ())
21861 2821770 : verify_gimple_in_seq (gimple_bind_body (outer_bind));
21862 :
21863 2870254 : timevar_pop (TV_TREE_GIMPLIFY);
21864 2870254 : input_location = saved_location;
21865 :
21866 2870254 : return outer_bind;
21867 : }
21868 :
21869 : typedef char *char_p; /* For DEF_VEC_P. */
21870 :
21871 : /* Return whether we should exclude FNDECL from instrumentation. */
21872 :
21873 : static bool
21874 42 : flag_instrument_functions_exclude_p (tree fndecl)
21875 : {
21876 42 : vec<char_p> *v;
21877 :
21878 42 : v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
21879 43 : if (v && v->length () > 0)
21880 : {
21881 1 : const char *name;
21882 1 : int i;
21883 1 : char *s;
21884 :
21885 1 : name = lang_hooks.decl_printable_name (fndecl, 1);
21886 2 : FOR_EACH_VEC_ELT (*v, i, s)
21887 1 : if (strstr (name, s) != NULL)
21888 2 : return true;
21889 : }
21890 :
21891 41 : v = (vec<char_p> *) flag_instrument_functions_exclude_files;
21892 42 : if (v && v->length () > 0)
21893 : {
21894 1 : const char *name;
21895 1 : int i;
21896 1 : char *s;
21897 :
21898 1 : name = DECL_SOURCE_FILE (fndecl);
21899 1 : FOR_EACH_VEC_ELT (*v, i, s)
21900 1 : if (strstr (name, s) != NULL)
21901 2 : return true;
21902 : }
21903 :
21904 : return false;
21905 : }
21906 :
21907 : /* Build a call to the instrumentation function FNCODE and add it to SEQ.
21908 : If COND_VAR is not NULL, it is a boolean variable guarding the call to
21909 : the instrumentation function. IF STMT is not NULL, it is a statement
21910 : to be executed just before the call to the instrumentation function. */
21911 :
21912 : static void
21913 80 : build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
21914 : tree cond_var, gimple *stmt)
21915 : {
21916 : /* The instrumentation hooks aren't going to call the instrumented
21917 : function and the address they receive is expected to be matchable
21918 : against symbol addresses. Make sure we don't create a trampoline,
21919 : in case the current function is nested. */
21920 80 : tree this_fn_addr = build_fold_addr_expr (current_function_decl);
21921 80 : TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
21922 :
21923 80 : tree label_true, label_false;
21924 80 : if (cond_var)
21925 : {
21926 20 : label_true = create_artificial_label (UNKNOWN_LOCATION);
21927 20 : label_false = create_artificial_label (UNKNOWN_LOCATION);
21928 20 : gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
21929 : label_true, label_false);
21930 20 : gimplify_seq_add_stmt (seq, cond);
21931 20 : gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
21932 20 : gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
21933 : NOT_TAKEN));
21934 : }
21935 :
21936 80 : if (stmt)
21937 10 : gimplify_seq_add_stmt (seq, stmt);
21938 :
21939 80 : tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
21940 80 : gcall *call = gimple_build_call (x, 1, integer_zero_node);
21941 80 : tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
21942 80 : gimple_call_set_lhs (call, tmp_var);
21943 80 : gimplify_seq_add_stmt (seq, call);
21944 80 : x = builtin_decl_implicit (fncode);
21945 80 : call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
21946 80 : gimplify_seq_add_stmt (seq, call);
21947 :
21948 80 : if (cond_var)
21949 20 : gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
21950 80 : }
21951 :
21952 : /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
21953 : node for the function we want to gimplify.
21954 :
21955 : Return the sequence of GIMPLE statements corresponding to the body
21956 : of FNDECL. */
21957 :
21958 : void
21959 2869750 : gimplify_function_tree (tree fndecl)
21960 : {
21961 2869750 : gimple_seq seq;
21962 2869750 : gbind *bind;
21963 :
21964 2869750 : gcc_assert (!gimple_body (fndecl));
21965 :
21966 2869750 : if (DECL_STRUCT_FUNCTION (fndecl))
21967 2866204 : push_cfun (DECL_STRUCT_FUNCTION (fndecl));
21968 : else
21969 3546 : push_struct_function (fndecl);
21970 :
21971 2869750 : reset_cond_uid ();
21972 2869750 : if (cond_uids)
21973 : {
21974 65 : delete cond_uids;
21975 65 : cond_uids = NULL;
21976 : }
21977 :
21978 : /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
21979 : if necessary. */
21980 2869750 : cfun->curr_properties |= PROP_gimple_lva;
21981 :
21982 2869750 : if (asan_sanitize_use_after_scope ())
21983 8125 : asan_poisoned_variables = new hash_set<tree> ();
21984 2869750 : if (flag_openmp)
21985 46698 : omp_resolved_variant_calls = new hash_set<tree> ();
21986 :
21987 2869750 : bind = gimplify_body (fndecl, true);
21988 :
21989 2869750 : if (omp_resolved_variant_calls)
21990 : {
21991 46698 : delete omp_resolved_variant_calls;
21992 46698 : omp_resolved_variant_calls = NULL;
21993 : }
21994 2869750 : if (asan_poisoned_variables)
21995 : {
21996 8125 : delete asan_poisoned_variables;
21997 8125 : asan_poisoned_variables = NULL;
21998 : }
21999 :
22000 : /* The tree body of the function is no longer needed, replace it
22001 : with the new GIMPLE body. */
22002 2869750 : seq = NULL;
22003 2869750 : gimple_seq_add_stmt (&seq, bind);
22004 2869750 : gimple_set_body (fndecl, seq);
22005 :
22006 : /* If we're instrumenting function entry/exit, then prepend the call to
22007 : the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
22008 : catch the exit hook. */
22009 : /* ??? Add some way to ignore exceptions for this TFE. */
22010 2869750 : if (flag_instrument_function_entry_exit
22011 94 : && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
22012 : /* Do not instrument extern inline functions. */
22013 43 : && !(DECL_DECLARED_INLINE_P (fndecl)
22014 6 : && DECL_EXTERNAL (fndecl)
22015 1 : && DECL_DISREGARD_INLINE_LIMITS (fndecl))
22016 2869792 : && !flag_instrument_functions_exclude_p (fndecl))
22017 : {
22018 40 : gimple_seq body = NULL, cleanup = NULL;
22019 40 : gassign *assign;
22020 40 : tree cond_var;
22021 :
22022 : /* If -finstrument-functions-once is specified, generate:
22023 :
22024 : static volatile bool C.0 = false;
22025 : bool tmp_called;
22026 :
22027 : tmp_called = C.0;
22028 : if (!tmp_called)
22029 : {
22030 : C.0 = true;
22031 : [call profiling enter function]
22032 : }
22033 :
22034 : without specific protection for data races. */
22035 40 : if (flag_instrument_function_entry_exit > 1)
22036 : {
22037 10 : tree first_var
22038 10 : = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
22039 : VAR_DECL,
22040 : create_tmp_var_name ("C"),
22041 : boolean_type_node);
22042 10 : DECL_ARTIFICIAL (first_var) = 1;
22043 10 : DECL_IGNORED_P (first_var) = 1;
22044 10 : TREE_STATIC (first_var) = 1;
22045 10 : TREE_THIS_VOLATILE (first_var) = 1;
22046 10 : TREE_USED (first_var) = 1;
22047 10 : DECL_INITIAL (first_var) = boolean_false_node;
22048 10 : varpool_node::add (first_var);
22049 :
22050 10 : cond_var = create_tmp_var (boolean_type_node, "tmp_called");
22051 10 : assign = gimple_build_assign (cond_var, first_var);
22052 10 : gimplify_seq_add_stmt (&body, assign);
22053 :
22054 10 : assign = gimple_build_assign (first_var, boolean_true_node);
22055 : }
22056 :
22057 : else
22058 : {
22059 : cond_var = NULL_TREE;
22060 : assign = NULL;
22061 : }
22062 :
22063 40 : build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
22064 : cond_var, assign);
22065 :
22066 : /* If -finstrument-functions-once is specified, generate:
22067 :
22068 : if (!tmp_called)
22069 : [call profiling exit function]
22070 :
22071 : without specific protection for data races. */
22072 40 : build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
22073 : cond_var, NULL);
22074 :
22075 40 : gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
22076 40 : gimplify_seq_add_stmt (&body, tf);
22077 40 : gbind *new_bind = gimple_build_bind (NULL, body, NULL);
22078 :
22079 : /* Replace the current function body with the body
22080 : wrapped in the try/finally TF. */
22081 40 : seq = NULL;
22082 40 : gimple_seq_add_stmt (&seq, new_bind);
22083 40 : gimple_set_body (fndecl, seq);
22084 40 : bind = new_bind;
22085 : }
22086 :
22087 2869750 : if (sanitize_flags_p (SANITIZE_THREAD)
22088 2869750 : && param_tsan_instrument_func_entry_exit)
22089 : {
22090 934 : gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
22091 934 : gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
22092 934 : gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
22093 : /* Replace the current function body with the body
22094 : wrapped in the try/finally TF. */
22095 934 : seq = NULL;
22096 934 : gimple_seq_add_stmt (&seq, new_bind);
22097 934 : gimple_set_body (fndecl, seq);
22098 : }
22099 :
22100 2869750 : DECL_SAVED_TREE (fndecl) = NULL_TREE;
22101 2869750 : cfun->curr_properties |= PROP_gimple_any;
22102 :
22103 2869750 : pop_cfun ();
22104 :
22105 2869750 : dump_function (TDI_gimple, fndecl);
22106 2869750 : }
22107 :
22108 : /* Return a dummy expression of type TYPE in order to keep going after an
22109 : error. */
22110 :
22111 : static tree
22112 30 : dummy_object (tree type)
22113 : {
22114 30 : tree t = build_int_cst (build_pointer_type (type), 0);
22115 30 : return build2 (MEM_REF, type, t, t);
22116 : }
22117 :
22118 : /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
22119 : builtin function, but a very special sort of operator. */
22120 :
22121 : enum gimplify_status
22122 50937 : gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
22123 : gimple_seq *post_p ATTRIBUTE_UNUSED)
22124 : {
22125 50937 : tree promoted_type, have_va_type;
22126 50937 : tree valist = TREE_OPERAND (*expr_p, 0);
22127 50937 : tree type = TREE_TYPE (*expr_p);
22128 50937 : tree t, tag, aptag;
22129 50937 : location_t loc = EXPR_LOCATION (*expr_p);
22130 :
22131 : /* Verify that valist is of the proper type. */
22132 50937 : have_va_type = TREE_TYPE (valist);
22133 50937 : if (have_va_type == error_mark_node)
22134 : return GS_ERROR;
22135 50918 : have_va_type = targetm.canonical_va_list_type (have_va_type);
22136 50918 : if (have_va_type == NULL_TREE
22137 50918 : && POINTER_TYPE_P (TREE_TYPE (valist)))
22138 : /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
22139 258 : have_va_type
22140 258 : = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
22141 50918 : gcc_assert (have_va_type != NULL_TREE);
22142 :
22143 : /* Generate a diagnostic for requesting data of a type that cannot
22144 : be passed through `...' due to type promotion at the call site. */
22145 50918 : if ((promoted_type = lang_hooks.types.type_promotes_to (type))
22146 : != type)
22147 : {
22148 30 : static bool gave_help;
22149 30 : bool warned;
22150 : /* Use the expansion point to handle cases such as passing bool (defined
22151 : in a system header) through `...'. */
22152 30 : location_t xloc
22153 30 : = expansion_point_location_if_in_system_header (loc);
22154 :
22155 : /* Unfortunately, this is merely undefined, rather than a constraint
22156 : violation, so we cannot make this an error. If this call is never
22157 : executed, the program is still strictly conforming. */
22158 30 : auto_diagnostic_group d;
22159 30 : warned = warning_at (xloc, 0,
22160 : "%qT is promoted to %qT when passed through %<...%>",
22161 : type, promoted_type);
22162 30 : if (!gave_help && warned)
22163 : {
22164 15 : gave_help = true;
22165 15 : inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
22166 : promoted_type, type);
22167 : }
22168 :
22169 : /* We can, however, treat "undefined" any way we please.
22170 : Call abort to encourage the user to fix the program. */
22171 23 : if (warned)
22172 23 : inform (xloc, "if this code is reached, the program will abort");
22173 : /* Before the abort, allow the evaluation of the va_list
22174 : expression to exit or longjmp. */
22175 30 : gimplify_and_add (valist, pre_p);
22176 60 : t = build_call_expr_loc (loc,
22177 : builtin_decl_implicit (BUILT_IN_TRAP), 0);
22178 30 : gimplify_and_add (t, pre_p);
22179 :
22180 : /* This is dead code, but go ahead and finish so that the
22181 : mode of the result comes out right. */
22182 30 : *expr_p = dummy_object (type);
22183 30 : return GS_ALL_DONE;
22184 30 : }
22185 :
22186 50888 : tag = build_int_cst (build_pointer_type (type), 0);
22187 50888 : aptag = build_int_cst (TREE_TYPE (valist), 0);
22188 :
22189 50888 : *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
22190 : valist, tag, aptag);
22191 :
22192 : /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
22193 : needs to be expanded. */
22194 50888 : cfun->curr_properties &= ~PROP_gimple_lva;
22195 :
22196 50888 : return GS_OK;
22197 : }
22198 :
22199 : /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
22200 :
22201 : DST/SRC are the destination and source respectively. You can pass
22202 : ungimplified trees in DST or SRC, in which case they will be
22203 : converted to a gimple operand if necessary.
22204 :
22205 : This function returns the newly created GIMPLE_ASSIGN tuple. */
22206 :
22207 : gimple *
22208 860305 : gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
22209 : {
22210 860305 : tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
22211 860305 : gimplify_and_add (t, seq_p);
22212 860305 : ggc_free (t);
22213 860305 : return gimple_seq_last_stmt (*seq_p);
22214 : }
22215 :
22216 : inline hashval_t
22217 1552575 : gimplify_hasher::hash (const elt_t *p)
22218 : {
22219 1552575 : tree t = p->val;
22220 1552575 : return iterative_hash_expr (t, 0);
22221 : }
22222 :
22223 : inline bool
22224 571021 : gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
22225 : {
22226 571021 : tree t1 = p1->val;
22227 571021 : tree t2 = p2->val;
22228 571021 : enum tree_code code = TREE_CODE (t1);
22229 :
22230 571021 : if (TREE_CODE (t2) != code
22231 571021 : || TREE_TYPE (t1) != TREE_TYPE (t2))
22232 : return false;
22233 :
22234 360836 : if (!operand_equal_p (t1, t2, 0))
22235 : return false;
22236 :
22237 : return true;
22238 : }
|