Line data Source code
1 : /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 : tree representation into the GIMPLE form.
3 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
4 : Major work done by Sebastian Pop <s.pop@laposte.net>,
5 : Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 :
7 : This file is part of GCC.
8 :
9 : GCC is free software; you can redistribute it and/or modify it under
10 : the terms of the GNU General Public License as published by the Free
11 : Software Foundation; either version 3, or (at your option) any later
12 : version.
13 :
14 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : for more details.
18 :
19 : You should have received a copy of the GNU General Public License
20 : along with GCC; see the file COPYING3. If not see
21 : <http://www.gnu.org/licenses/>. */
22 :
23 : #include "config.h"
24 : #include "system.h"
25 : #include "coretypes.h"
26 : #include "backend.h"
27 : #include "target.h"
28 : #include "rtl.h"
29 : #include "tree.h"
30 : #include "memmodel.h"
31 : #include "tm_p.h"
32 : #include "gimple.h"
33 : #include "gimple-predict.h"
34 : #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 : #include "ssa.h"
36 : #include "cgraph.h"
37 : #include "tree-pretty-print.h"
38 : #include "diagnostic-core.h"
39 : #include "diagnostic.h" /* For errorcount. */
40 : #include "alias.h"
41 : #include "fold-const.h"
42 : #include "calls.h"
43 : #include "varasm.h"
44 : #include "stmt.h"
45 : #include "expr.h"
46 : #include "gimple-iterator.h"
47 : #include "gimple-fold.h"
48 : #include "tree-eh.h"
49 : #include "gimplify.h"
50 : #include "stor-layout.h"
51 : #include "print-tree.h"
52 : #include "tree-iterator.h"
53 : #include "tree-inline.h"
54 : #include "langhooks.h"
55 : #include "tree-cfg.h"
56 : #include "tree-ssa.h"
57 : #include "tree-hash-traits.h"
58 : #include "omp-general.h"
59 : #include "omp-low.h"
60 : #include "gimple-low.h"
61 : #include "gomp-constants.h"
62 : #include "splay-tree.h"
63 : #include "gimple-walk.h"
64 : #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 : #include "builtins.h"
66 : #include "stringpool.h"
67 : #include "attribs.h"
68 : #include "asan.h"
69 : #include "dbgcnt.h"
70 : #include "omp-offload.h"
71 : #include "context.h"
72 : #include "tree-nested.h"
73 : #include "gcc-urlifier.h"
74 : #include "insn-config.h"
75 : #include "recog.h"
76 : #include "output.h"
77 : #include "gimplify_reg_info.h"
78 :
79 : /* Identifier for a basic condition, mapping it to other basic conditions of
80 : its Boolean expression. Basic conditions given the same uid (in the same
81 : function) are parts of the same ANDIF/ORIF expression. Used for condition
82 : coverage. */
83 : static unsigned nextconduid = 1;
84 :
85 : /* Annotated gconds so that basic conditions in the same expression map to
86 : the same uid. This is used for condition coverage. */
87 : static hash_map <tree, unsigned> *cond_uids;
88 :
89 : /* Get a fresh identifier for a new condition expression. This is used for
90 : condition coverage. */
91 : static unsigned
92 5786448 : next_cond_uid ()
93 : {
94 5786448 : return nextconduid++;
95 : }
96 :
97 : /* Reset the condition uid to the value it should have when compiling a new
98 : function. 0 is already the default/untouched value, so start at non-zero.
99 : A valid and set id should always be > 0. This is used for condition
100 : coverage. */
101 : static void
102 2874351 : reset_cond_uid ()
103 : {
104 2874351 : nextconduid = 1;
105 0 : }
106 :
107 : /* Associate the condition STMT with the discriminator UID. STMTs that are
108 : broken down with ANDIF/ORIF from the same Boolean expression should be given
109 : the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
110 : { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
111 : for condition coverage. */
112 : static void
113 1215963 : tree_associate_condition_with_expr (tree stmt, unsigned uid)
114 : {
115 1215963 : if (!condition_coverage_flag)
116 : return;
117 :
118 527 : if (!cond_uids)
119 65 : cond_uids = new hash_map <tree, unsigned> ();
120 :
121 527 : cond_uids->put (stmt, uid);
122 : }
123 :
124 : /* Hash set of poisoned variables in a bind expr. */
125 : static hash_set<tree> *asan_poisoned_variables = NULL;
126 :
127 : /* Hash set of already-resolved calls to OpenMP "declare variant"
128 : functions. A call can resolve to the original function and
129 : we don't want to repeat the resolution multiple times. */
130 : static hash_set<tree> *omp_resolved_variant_calls = NULL;
131 :
132 : enum gimplify_omp_var_data
133 : {
134 : GOVD_SEEN = 0x000001,
135 : GOVD_EXPLICIT = 0x000002,
136 : GOVD_SHARED = 0x000004,
137 : GOVD_PRIVATE = 0x000008,
138 : GOVD_FIRSTPRIVATE = 0x000010,
139 : GOVD_LASTPRIVATE = 0x000020,
140 : GOVD_REDUCTION = 0x000040,
141 : GOVD_LOCAL = 0x00080,
142 : GOVD_MAP = 0x000100,
143 : GOVD_DEBUG_PRIVATE = 0x000200,
144 : GOVD_PRIVATE_OUTER_REF = 0x000400,
145 : GOVD_LINEAR = 0x000800,
146 : GOVD_ALIGNED = 0x001000,
147 :
148 : /* Flag for GOVD_MAP: don't copy back. */
149 : GOVD_MAP_TO_ONLY = 0x002000,
150 :
151 : /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
152 : GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
153 :
154 : GOVD_MAP_0LEN_ARRAY = 0x008000,
155 :
156 : /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
157 : GOVD_MAP_ALWAYS_TO = 0x010000,
158 :
159 : /* Flag for shared vars that are or might be stored to in the region. */
160 : GOVD_WRITTEN = 0x020000,
161 :
162 : /* Flag for GOVD_MAP, if it is a forced mapping. */
163 : GOVD_MAP_FORCE = 0x040000,
164 :
165 : /* Flag for GOVD_MAP: must be present already. */
166 : GOVD_MAP_FORCE_PRESENT = 0x080000,
167 :
168 : /* Flag for GOVD_MAP: only allocate. */
169 : GOVD_MAP_ALLOC_ONLY = 0x100000,
170 :
171 : /* Flag for GOVD_MAP: only copy back. */
172 : GOVD_MAP_FROM_ONLY = 0x200000,
173 :
174 : GOVD_NONTEMPORAL = 0x400000,
175 :
176 : /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
177 : GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
178 :
179 : GOVD_CONDTEMP = 0x1000000,
180 :
181 : /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
182 : GOVD_REDUCTION_INSCAN = 0x2000000,
183 :
184 : /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
185 : GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
186 :
187 : GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
188 : | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
189 : | GOVD_LOCAL)
190 : };
191 :
192 :
193 : enum omp_region_type
194 : {
195 : ORT_WORKSHARE = 0x00,
196 : ORT_TASKGROUP = 0x01,
197 : ORT_DISPATCH = 0x02,
198 : ORT_SIMD = 0x04,
199 :
200 : ORT_PARALLEL = 0x08,
201 : ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
202 :
203 : ORT_TASK = 0x10,
204 : ORT_UNTIED_TASK = ORT_TASK | 1,
205 : ORT_TASKLOOP = ORT_TASK | 2,
206 : ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
207 :
208 : ORT_TEAMS = 0x20,
209 : ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
210 : ORT_HOST_TEAMS = ORT_TEAMS | 2,
211 : ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
212 :
213 : /* Data region. */
214 : ORT_TARGET_DATA = 0x40,
215 :
216 : /* Data region with offloading. */
217 : ORT_TARGET = 0x80,
218 : ORT_COMBINED_TARGET = ORT_TARGET | 1,
219 : ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
220 :
221 : /* OpenACC variants. */
222 : ORT_ACC = 0x100, /* A generic OpenACC region. */
223 : ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
224 : ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
225 : ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
226 : ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
227 : ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
228 :
229 : /* Dummy OpenMP region, used to disable expansion of
230 : DECL_VALUE_EXPRs in taskloop pre body. */
231 : ORT_NONE = 0x200
232 : };
233 :
234 : /* Gimplify hashtable helper. */
235 :
236 : struct gimplify_hasher : free_ptr_hash <elt_t>
237 : {
238 : static inline hashval_t hash (const elt_t *);
239 : static inline bool equal (const elt_t *, const elt_t *);
240 : };
241 :
242 : struct gimplify_ctx
243 : {
244 : struct gimplify_ctx *prev_context;
245 :
246 : vec<gbind *> bind_expr_stack;
247 : tree temps;
248 : gimple_seq conditional_cleanups;
249 : tree exit_label;
250 : tree return_temp;
251 :
252 : vec<tree> case_labels;
253 : hash_set<tree> *live_switch_vars;
254 : /* The formal temporary table. Should this be persistent? */
255 : hash_table<gimplify_hasher> *temp_htab;
256 :
257 : int conditions;
258 : unsigned into_ssa : 1;
259 : unsigned allow_rhs_cond_expr : 1;
260 : unsigned in_cleanup_point_expr : 1;
261 : unsigned keep_stack : 1;
262 : unsigned save_stack : 1;
263 : unsigned in_switch_expr : 1;
264 : unsigned in_handler_expr : 1;
265 : };
266 :
267 : enum gimplify_defaultmap_kind
268 : {
269 : GDMK_SCALAR,
270 : GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
271 : GDMK_AGGREGATE,
272 : GDMK_ALLOCATABLE,
273 : GDMK_POINTER
274 : };
275 :
276 : struct gimplify_omp_ctx
277 : {
278 : struct gimplify_omp_ctx *outer_context;
279 : splay_tree variables;
280 : hash_map<omp_name_type<tree>, tree> *implicit_mappers;
281 : hash_set<tree> *privatized_types;
282 : tree clauses;
283 : /* Iteration variables in an OMP_FOR. */
284 : vec<tree> loop_iter_var;
285 : location_t location;
286 : enum omp_clause_default_kind default_kind;
287 : enum omp_region_type region_type;
288 : enum tree_code code;
289 : bool combined_loop;
290 : bool distribute;
291 : bool target_firstprivatize_array_bases;
292 : bool add_safelen1;
293 : bool order_concurrent;
294 : bool has_depend;
295 : bool in_for_exprs;
296 : bool in_call_args;
297 : int defaultmap[5];
298 : };
299 :
300 : static struct gimplify_ctx *gimplify_ctxp;
301 : static struct gimplify_omp_ctx *gimplify_omp_ctxp;
302 : static bool in_omp_construct;
303 :
304 : /* Forward declaration. */
305 : static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
306 : static hash_map<tree, tree> *oacc_declare_returns;
307 : static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
308 : bool (*) (tree), fallback_t, bool);
309 : static void prepare_gimple_addressable (tree *, gimple_seq *);
310 :
311 : /* Shorter alias name for the above function for use in gimplify.cc
312 : only. */
313 :
314 : static inline void
315 92252367 : gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
316 : {
317 92252367 : gimple_seq_add_stmt_without_update (seq_p, gs);
318 6619686 : }
319 :
320 : /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
321 : NULL, a new sequence is allocated. This function is
322 : similar to gimple_seq_add_seq, but does not scan the operands.
323 : During gimplification, we need to manipulate statement sequences
324 : before the def/use vectors have been constructed. */
325 :
326 : static void
327 9180642 : gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
328 : {
329 9180642 : gimple_stmt_iterator si;
330 :
331 9180642 : if (src == NULL)
332 4027419 : return;
333 :
334 5153223 : si = gsi_last (*dst_p);
335 5153223 : gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
336 : }
337 :
338 :
339 : /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
340 : and popping gimplify contexts. */
341 :
342 : static struct gimplify_ctx *ctx_pool = NULL;
343 :
344 : /* Return a gimplify context struct from the pool. */
345 :
346 : static inline struct gimplify_ctx *
347 8867192 : ctx_alloc (void)
348 : {
349 8867192 : struct gimplify_ctx * c = ctx_pool;
350 :
351 8867192 : if (c)
352 8602371 : ctx_pool = c->prev_context;
353 : else
354 264821 : c = XNEW (struct gimplify_ctx);
355 :
356 8867192 : memset (c, '\0', sizeof (*c));
357 8867192 : return c;
358 : }
359 :
360 : /* Put gimplify context C back into the pool. */
361 :
362 : static inline void
363 8867188 : ctx_free (struct gimplify_ctx *c)
364 : {
365 8867188 : c->prev_context = ctx_pool;
366 8867188 : ctx_pool = c;
367 : }
368 :
369 : /* Free allocated ctx stack memory. */
370 :
371 : void
372 232638 : free_gimplify_stack (void)
373 : {
374 232638 : struct gimplify_ctx *c;
375 :
376 467770 : while ((c = ctx_pool))
377 : {
378 235132 : ctx_pool = c->prev_context;
379 235132 : free (c);
380 : }
381 232638 : }
382 :
383 :
384 : /* Set up a context for the gimplifier. */
385 :
386 : void
387 8867192 : push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
388 : {
389 8867192 : struct gimplify_ctx *c = ctx_alloc ();
390 :
391 8867192 : c->prev_context = gimplify_ctxp;
392 8867192 : gimplify_ctxp = c;
393 8867192 : gimplify_ctxp->into_ssa = in_ssa;
394 8867192 : gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
395 8867192 : }
396 :
397 : /* Tear down a context for the gimplifier. If BODY is non-null, then
398 : put the temporaries into the outer BIND_EXPR. Otherwise, put them
399 : in the local_decls.
400 :
401 : BODY is not a sequence, but the first tuple in a sequence. */
402 :
403 : void
404 8867188 : pop_gimplify_context (gimple *body)
405 : {
406 8867188 : struct gimplify_ctx *c = gimplify_ctxp;
407 :
408 8867188 : gcc_assert (c
409 : && (!c->bind_expr_stack.exists ()
410 : || c->bind_expr_stack.is_empty ()));
411 8867188 : c->bind_expr_stack.release ();
412 8867188 : gimplify_ctxp = c->prev_context;
413 :
414 8867188 : if (body)
415 2998444 : declare_vars (c->temps, body, false);
416 : else
417 5868744 : record_vars (c->temps);
418 :
419 8867188 : delete c->temp_htab;
420 8867188 : c->temp_htab = NULL;
421 8867188 : ctx_free (c);
422 8867188 : }
423 :
424 : /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
425 :
426 : static void
427 5922252 : gimple_push_bind_expr (gbind *bind_stmt)
428 : {
429 5922252 : gimplify_ctxp->bind_expr_stack.reserve (8);
430 5922252 : gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
431 5922252 : }
432 :
433 : /* Pop the first element off the stack of bindings. */
434 :
435 : static void
436 5922252 : gimple_pop_bind_expr (void)
437 : {
438 0 : gimplify_ctxp->bind_expr_stack.pop ();
439 1296 : }
440 :
441 : /* Return the first element of the stack of bindings. */
442 :
443 : gbind *
444 0 : gimple_current_bind_expr (void)
445 : {
446 0 : return gimplify_ctxp->bind_expr_stack.last ();
447 : }
448 :
449 : /* Return the stack of bindings created during gimplification. */
450 :
451 : vec<gbind *>
452 359 : gimple_bind_expr_stack (void)
453 : {
454 359 : return gimplify_ctxp->bind_expr_stack;
455 : }
456 :
457 : /* Return true iff there is a COND_EXPR between us and the innermost
458 : CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
459 :
460 : static bool
461 2793520 : gimple_conditional_context (void)
462 : {
463 2793520 : return gimplify_ctxp->conditions > 0;
464 : }
465 :
466 : /* Note that we've entered a COND_EXPR. */
467 :
468 : static void
469 5786883 : gimple_push_condition (void)
470 : {
471 : #ifdef ENABLE_GIMPLE_CHECKING
472 5786883 : if (gimplify_ctxp->conditions == 0)
473 3804762 : gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
474 : #endif
475 5786883 : ++(gimplify_ctxp->conditions);
476 5786883 : }
477 :
478 : /* Note that we've left a COND_EXPR. If we're back at unconditional scope
479 : now, add any conditional cleanups we've seen to the prequeue. */
480 :
481 : static void
482 5786883 : gimple_pop_condition (gimple_seq *pre_p)
483 : {
484 5786883 : int conds = --(gimplify_ctxp->conditions);
485 :
486 5786883 : gcc_assert (conds >= 0);
487 5786883 : if (conds == 0)
488 : {
489 3804762 : gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
490 3804762 : gimplify_ctxp->conditional_cleanups = NULL;
491 : }
492 5786883 : }
493 :
494 : /* A stable comparison routine for use with splay trees and DECLs. */
495 :
496 : static int
497 18154047 : splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
498 : {
499 18154047 : tree a = (tree) xa;
500 18154047 : tree b = (tree) xb;
501 :
502 18154047 : return DECL_UID (a) - DECL_UID (b);
503 : }
504 :
505 : /* Create a new omp construct that deals with variable remapping. */
506 :
507 : static struct gimplify_omp_ctx *
508 139569 : new_omp_context (enum omp_region_type region_type)
509 : {
510 139569 : struct gimplify_omp_ctx *c;
511 :
512 139569 : c = XCNEW (struct gimplify_omp_ctx);
513 139569 : c->outer_context = gimplify_omp_ctxp;
514 139569 : c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
515 139569 : c->implicit_mappers = new hash_map<omp_name_type<tree>, tree>;
516 139569 : c->privatized_types = new hash_set<tree>;
517 139569 : c->location = input_location;
518 139569 : c->region_type = region_type;
519 139569 : if ((region_type & ORT_TASK) == 0)
520 133524 : c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
521 : else
522 6045 : c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
523 139569 : c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
524 139569 : c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
525 139569 : c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
526 139569 : c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
527 139569 : c->defaultmap[GDMK_POINTER] = GOVD_MAP;
528 :
529 139569 : return c;
530 : }
531 :
532 : /* Destroy an omp construct that deals with variable remapping. */
533 :
534 : static void
535 139010 : delete_omp_context (struct gimplify_omp_ctx *c)
536 : {
537 139010 : splay_tree_delete (c->variables);
538 278020 : delete c->privatized_types;
539 278020 : delete c->implicit_mappers;
540 139010 : c->loop_iter_var.release ();
541 139010 : XDELETE (c);
542 139010 : }
543 :
544 : static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
545 : static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
546 :
547 : /* Both gimplify the statement T and append it to *SEQ_P. This function
548 : behaves exactly as gimplify_stmt, but you don't have to pass T as a
549 : reference. */
550 :
551 : void
552 37977318 : gimplify_and_add (tree t, gimple_seq *seq_p)
553 : {
554 37977318 : gimplify_stmt (&t, seq_p);
555 37977318 : }
556 :
557 : /* Gimplify statement T into sequence *SEQ_P, and return the first
558 : tuple in the sequence of generated tuples for this statement.
559 : Return NULL if gimplifying T produced no tuples. */
560 :
561 : static gimple *
562 104962 : gimplify_and_return_first (tree t, gimple_seq *seq_p)
563 : {
564 104962 : gimple_stmt_iterator last = gsi_last (*seq_p);
565 :
566 104962 : gimplify_and_add (t, seq_p);
567 :
568 104962 : if (!gsi_end_p (last))
569 : {
570 4881 : gsi_next (&last);
571 4881 : return gsi_stmt (last);
572 : }
573 : else
574 100081 : return gimple_seq_first_stmt (*seq_p);
575 : }
576 :
577 : /* Returns true iff T is a valid RHS for an assignment to an un-renamed
578 : LHS, or for a call argument. */
579 :
580 : static bool
581 236023 : is_gimple_mem_rhs (tree t)
582 : {
583 : /* If we're dealing with a renamable type, either source or dest must be
584 : a renamed variable. */
585 236023 : if (is_gimple_reg_type (TREE_TYPE (t)))
586 232658 : return is_gimple_val (t);
587 : else
588 3365 : return is_gimple_val (t) || is_gimple_lvalue (t);
589 : }
590 :
591 : /* Return true if T is a CALL_EXPR or an expression that can be
592 : assigned to a temporary. Note that this predicate should only be
593 : used during gimplification. See the rationale for this in
594 : gimplify_modify_expr. */
595 :
596 : static bool
597 97611797 : is_gimple_reg_rhs_or_call (tree t)
598 : {
599 71682870 : return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
600 97611797 : || TREE_CODE (t) == CALL_EXPR);
601 : }
602 :
603 : /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
604 : this predicate should only be used during gimplification. See the
605 : rationale for this in gimplify_modify_expr. */
606 :
607 : static bool
608 14233049 : is_gimple_mem_rhs_or_call (tree t)
609 : {
610 : /* If we're dealing with a renamable type, either source or dest must be
611 : a renamed variable. */
612 14233049 : if (is_gimple_reg_type (TREE_TYPE (t)))
613 10918189 : return is_gimple_val (t);
614 : else
615 3314860 : return (is_gimple_val (t)
616 1650758 : || is_gimple_lvalue (t)
617 1135786 : || (TREE_CODE (t) == CONSTRUCTOR && CONSTRUCTOR_NELTS (t) == 0)
618 4450256 : || TREE_CODE (t) == CALL_EXPR);
619 : }
620 :
621 : /* Create a temporary with a name derived from VAL. Subroutine of
622 : lookup_tmp_var; nobody else should call this function. */
623 :
624 : static inline tree
625 2137373 : create_tmp_from_val (tree val)
626 : {
627 : /* Drop all qualifiers and address-space information from the value type. */
628 2137373 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
629 2137373 : tree var = create_tmp_var (type, get_name (val));
630 2137373 : return var;
631 : }
632 :
633 : /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
634 : an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
635 :
636 : static tree
637 2370927 : lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
638 : {
639 2370927 : tree ret;
640 :
641 : /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
642 2370927 : gcc_assert (!is_formal || !not_gimple_reg);
643 :
644 : /* If not optimizing, never really reuse a temporary. local-alloc
645 : won't allocate any variable that is used in more than one basic
646 : block, which means it will go into memory, causing much extra
647 : work in reload and final and poorer code generation, outweighing
648 : the extra memory allocation here. */
649 2370927 : if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
650 : {
651 1089029 : ret = create_tmp_from_val (val);
652 1089029 : DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
653 : }
654 : else
655 : {
656 1281898 : elt_t elt, *elt_p;
657 1281898 : elt_t **slot;
658 :
659 1281898 : elt.val = val;
660 1281898 : if (!gimplify_ctxp->temp_htab)
661 392321 : gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
662 1281898 : slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
663 1281898 : if (*slot == NULL)
664 : {
665 1048344 : elt_p = XNEW (elt_t);
666 1048344 : elt_p->val = val;
667 1048344 : elt_p->temp = ret = create_tmp_from_val (val);
668 1048344 : *slot = elt_p;
669 : }
670 : else
671 : {
672 233554 : elt_p = *slot;
673 233554 : ret = elt_p->temp;
674 : }
675 : }
676 :
677 2370927 : return ret;
678 : }
679 :
680 : /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
681 :
682 : static tree
683 26468724 : internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
684 : bool is_formal, bool allow_ssa, bool not_gimple_reg)
685 : {
686 26468724 : tree t, mod;
687 :
688 : /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
689 : can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
690 26468724 : gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
691 : fb_rvalue);
692 :
693 26468724 : if (allow_ssa
694 26199688 : && gimplify_ctxp->into_ssa
695 51233017 : && is_gimple_reg_type (TREE_TYPE (val)))
696 : {
697 24097798 : t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
698 24097798 : if (! gimple_in_ssa_p (cfun))
699 : {
700 20747376 : const char *name = get_name (val);
701 20747376 : if (name)
702 6818186 : SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
703 : }
704 : }
705 : else
706 2370926 : t = lookup_tmp_var (val, is_formal, not_gimple_reg);
707 :
708 26468724 : mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
709 :
710 26468724 : SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
711 :
712 : /* gimplify_modify_expr might want to reduce this further. */
713 26468724 : gimplify_and_add (mod, pre_p);
714 26468724 : ggc_free (mod);
715 :
716 : /* If we failed to gimplify VAL then we can end up with the temporary
717 : SSA name not having a definition. In this case return a decl. */
718 26468724 : if (TREE_CODE (t) == SSA_NAME && ! SSA_NAME_DEF_STMT (t))
719 1 : return lookup_tmp_var (val, is_formal, not_gimple_reg);
720 :
721 : return t;
722 : }
723 :
724 : /* Return a formal temporary variable initialized with VAL. PRE_P is as
725 : in gimplify_expr. Only use this function if:
726 :
727 : 1) The value of the unfactored expression represented by VAL will not
728 : change between the initialization and use of the temporary, and
729 : 2) The temporary will not be otherwise modified.
730 :
731 : For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
732 : and #2 means it is inappropriate for && temps.
733 :
734 : For other cases, use get_initialized_tmp_var instead. */
735 :
736 : tree
737 25967501 : get_formal_tmp_var (tree val, gimple_seq *pre_p)
738 : {
739 25967501 : return internal_get_tmp_var (val, pre_p, NULL, true, true, false);
740 : }
741 :
742 : /* Return a temporary variable initialized with VAL. PRE_P and POST_P
743 : are as in gimplify_expr. */
744 :
745 : tree
746 489211 : get_initialized_tmp_var (tree val, gimple_seq *pre_p,
747 : gimple_seq *post_p /* = NULL */,
748 : bool allow_ssa /* = true */)
749 : {
750 489211 : return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa, false);
751 : }
752 :
753 : /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
754 : generate debug info for them; otherwise don't. */
755 :
756 : void
757 3006291 : declare_vars (tree vars, gimple *gs, bool debug_info)
758 : {
759 3006291 : tree last = vars;
760 3006291 : if (last)
761 : {
762 1881449 : tree temps, block;
763 :
764 1881449 : gbind *scope = as_a <gbind *> (gs);
765 :
766 1881449 : temps = nreverse (last);
767 :
768 1881449 : block = gimple_bind_block (scope);
769 1881449 : gcc_assert (!block || TREE_CODE (block) == BLOCK);
770 1881449 : if (!block || !debug_info)
771 : {
772 1877806 : DECL_CHAIN (last) = gimple_bind_vars (scope);
773 1877806 : gimple_bind_set_vars (scope, temps);
774 : }
775 : else
776 : {
777 : /* We need to attach the nodes both to the BIND_EXPR and to its
778 : associated BLOCK for debugging purposes. The key point here
779 : is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
780 : is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
781 3643 : if (BLOCK_VARS (block))
782 3450 : BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
783 : else
784 : {
785 193 : gimple_bind_set_vars (scope,
786 : chainon (gimple_bind_vars (scope), temps));
787 193 : BLOCK_VARS (block) = temps;
788 : }
789 : }
790 : }
791 3006291 : }
792 :
793 : /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
794 : for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
795 : no such upper bound can be obtained. */
796 :
797 : static void
798 0 : force_constant_size (tree var)
799 : {
800 : /* The only attempt we make is by querying the maximum size of objects
801 : of the variable's type. */
802 :
803 0 : HOST_WIDE_INT max_size;
804 :
805 0 : gcc_assert (VAR_P (var));
806 :
807 0 : max_size = max_int_size_in_bytes (TREE_TYPE (var));
808 :
809 0 : gcc_assert (max_size >= 0);
810 :
811 0 : DECL_SIZE_UNIT (var)
812 0 : = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
813 0 : DECL_SIZE (var)
814 0 : = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
815 0 : }
816 :
817 : /* Push the temporary variable TMP into the current binding. */
818 :
819 : void
820 36101 : gimple_add_tmp_var_fn (struct function *fn, tree tmp)
821 : {
822 36101 : gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
823 :
824 : /* Later processing assumes that the object size is constant, which might
825 : not be true at this point. Force the use of a constant upper bound in
826 : this case. */
827 36101 : if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
828 0 : force_constant_size (tmp);
829 :
830 36101 : DECL_CONTEXT (tmp) = fn->decl;
831 36101 : DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
832 :
833 36101 : record_vars_into (tmp, fn->decl);
834 36101 : }
835 :
836 : /* Push the temporary variable TMP into the current binding. */
837 :
838 : void
839 16270121 : gimple_add_tmp_var (tree tmp)
840 : {
841 16270121 : gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
842 :
843 : /* Later processing assumes that the object size is constant, which might
844 : not be true at this point. Force the use of a constant upper bound in
845 : this case. */
846 16270121 : if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
847 0 : force_constant_size (tmp);
848 :
849 16270121 : DECL_CONTEXT (tmp) = current_function_decl;
850 16270121 : DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
851 :
852 16270121 : if (gimplify_ctxp)
853 : {
854 5569123 : DECL_CHAIN (tmp) = gimplify_ctxp->temps;
855 5569123 : gimplify_ctxp->temps = tmp;
856 :
857 : /* Mark temporaries local within the nearest enclosing parallel. */
858 5569123 : if (gimplify_omp_ctxp)
859 : {
860 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
861 549179 : int flag = GOVD_LOCAL | GOVD_SEEN;
862 : while (ctx
863 549179 : && (ctx->region_type == ORT_WORKSHARE
864 : || ctx->region_type == ORT_TASKGROUP
865 401650 : || ctx->region_type == ORT_SIMD
866 351459 : || ctx->region_type == ORT_ACC))
867 : {
868 204571 : if (ctx->region_type == ORT_SIMD
869 50191 : && TREE_ADDRESSABLE (tmp)
870 95 : && !TREE_STATIC (tmp))
871 : {
872 95 : if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
873 0 : ctx->add_safelen1 = true;
874 95 : else if (ctx->in_for_exprs)
875 : flag = GOVD_PRIVATE;
876 : else
877 : flag = GOVD_PRIVATE | GOVD_SEEN;
878 : break;
879 : }
880 204476 : ctx = ctx->outer_context;
881 : }
882 344608 : if (ctx)
883 317522 : omp_add_variable (ctx, tmp, flag);
884 : }
885 : }
886 10700998 : else if (cfun)
887 10700998 : record_vars (tmp);
888 : else
889 : {
890 0 : gimple_seq body_seq;
891 :
892 : /* This case is for nested functions. We need to expose the locals
893 : they create. */
894 0 : body_seq = gimple_body (current_function_decl);
895 0 : declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
896 : }
897 16270121 : }
898 :
899 :
900 :
901 : /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
902 : nodes that are referenced more than once in GENERIC functions. This is
903 : necessary because gimplification (translation into GIMPLE) is performed
904 : by modifying tree nodes in-place, so gimplification of a shared node in a
905 : first context could generate an invalid GIMPLE form in a second context.
906 :
907 : This is achieved with a simple mark/copy/unmark algorithm that walks the
908 : GENERIC representation top-down, marks nodes with TREE_VISITED the first
909 : time it encounters them, duplicates them if they already have TREE_VISITED
910 : set, and finally removes the TREE_VISITED marks it has set.
911 :
912 : The algorithm works only at the function level, i.e. it generates a GENERIC
913 : representation of a function with no nodes shared within the function when
914 : passed a GENERIC function (except for nodes that are allowed to be shared).
915 :
916 : At the global level, it is also necessary to unshare tree nodes that are
917 : referenced in more than one function, for the same aforementioned reason.
918 : This requires some cooperation from the front-end. There are 2 strategies:
919 :
920 : 1. Manual unsharing. The front-end needs to call unshare_expr on every
921 : expression that might end up being shared across functions.
922 :
923 : 2. Deep unsharing. This is an extension of regular unsharing. Instead
924 : of calling unshare_expr on expressions that might be shared across
925 : functions, the front-end pre-marks them with TREE_VISITED. This will
926 : ensure that they are unshared on the first reference within functions
927 : when the regular unsharing algorithm runs. The counterpart is that
928 : this algorithm must look deeper than for manual unsharing, which is
929 : specified by LANG_HOOKS_DEEP_UNSHARING.
930 :
931 : If there are only few specific cases of node sharing across functions, it is
932 : probably easier for a front-end to unshare the expressions manually. On the
933 : contrary, if the expressions generated at the global level are as widespread
934 : as expressions generated within functions, deep unsharing is very likely the
935 : way to go. */
936 :
937 : /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
938 : These nodes model computations that must be done once. If we were to
939 : unshare something like SAVE_EXPR(i++), the gimplification process would
940 : create wrong code. However, if DATA is non-null, it must hold a pointer
941 : set that is used to unshare the subtrees of these nodes. */
942 :
943 : static tree
944 3095286253 : mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
945 : {
946 3095286253 : tree t = *tp;
947 3095286253 : enum tree_code code = TREE_CODE (t);
948 :
949 : /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
950 : copy their subtrees if we can make sure to do it only once. */
951 3095286253 : if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
952 : {
953 13849571 : if (data && !((hash_set<tree> *)data)->add (t))
954 : ;
955 : else
956 13849571 : *walk_subtrees = 0;
957 : }
958 :
959 : /* Stop at types, decls, constants like copy_tree_r. */
960 3081436682 : else if (TREE_CODE_CLASS (code) == tcc_type
961 : || TREE_CODE_CLASS (code) == tcc_declaration
962 3081436682 : || TREE_CODE_CLASS (code) == tcc_constant)
963 1898883613 : *walk_subtrees = 0;
964 :
965 : /* Cope with the statement expression extension. */
966 1182553069 : else if (code == STATEMENT_LIST)
967 : ;
968 :
969 : /* Leave the bulk of the work to copy_tree_r itself. */
970 : else
971 1182503434 : copy_tree_r (tp, walk_subtrees, NULL);
972 :
973 3095286253 : return NULL_TREE;
974 : }
975 :
976 : /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
977 : If *TP has been visited already, then *TP is deeply copied by calling
978 : mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
979 :
980 : static tree
981 278415245 : copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
982 : {
983 278415245 : tree t = *tp;
984 278415245 : enum tree_code code = TREE_CODE (t);
985 :
986 : /* Skip types, decls, and constants. But we do want to look at their
987 : types and the bounds of types. Mark them as visited so we properly
988 : unmark their subtrees on the unmark pass. If we've already seen them,
989 : don't look down further. */
990 278415245 : if (TREE_CODE_CLASS (code) == tcc_type
991 : || TREE_CODE_CLASS (code) == tcc_declaration
992 278415245 : || TREE_CODE_CLASS (code) == tcc_constant)
993 : {
994 132603590 : if (TREE_VISITED (t))
995 81602148 : *walk_subtrees = 0;
996 : else
997 51001442 : TREE_VISITED (t) = 1;
998 : }
999 :
1000 : /* If this node has been visited already, unshare it and don't look
1001 : any deeper. */
1002 145811655 : else if (TREE_VISITED (t))
1003 : {
1004 1712691 : walk_tree (tp, mostly_copy_tree_r, data, NULL);
1005 1712691 : *walk_subtrees = 0;
1006 : }
1007 :
1008 : /* Otherwise, mark the node as visited and keep looking. */
1009 : else
1010 144098964 : TREE_VISITED (t) = 1;
1011 :
1012 278415245 : return NULL_TREE;
1013 : }
1014 :
1015 : /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
1016 : copy_if_shared_r callback unmodified. */
1017 :
1018 : void
1019 8701968 : copy_if_shared (tree *tp, void *data)
1020 : {
1021 8701968 : walk_tree (tp, copy_if_shared_r, data, NULL);
1022 8701968 : }
1023 :
1024 : /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
1025 : any nested functions. */
1026 :
1027 : static void
1028 2900656 : unshare_body (tree fndecl)
1029 : {
1030 2900656 : struct cgraph_node *cgn = cgraph_node::get (fndecl);
1031 : /* If the language requires deep unsharing, we need a pointer set to make
1032 : sure we don't repeatedly unshare subtrees of unshareable nodes. */
1033 2900656 : hash_set<tree> *visited
1034 2900656 : = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
1035 :
1036 2900656 : copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
1037 2900656 : copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
1038 2900656 : copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
1039 :
1040 2903987 : delete visited;
1041 :
1042 2900656 : if (cgn)
1043 5846210 : for (cgn = first_nested_function (cgn); cgn;
1044 25801 : cgn = next_nested_function (cgn))
1045 25801 : unshare_body (cgn->decl);
1046 2900656 : }
1047 :
1048 : /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1049 : Subtrees are walked until the first unvisited node is encountered. */
1050 :
1051 : static tree
1052 278414819 : unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1053 : {
1054 278414819 : tree t = *tp;
1055 :
1056 : /* If this node has been visited, unmark it and keep looking. */
1057 278414819 : if (TREE_VISITED (t))
1058 195104875 : TREE_VISITED (t) = 0;
1059 :
1060 : /* Otherwise, don't look any deeper. */
1061 : else
1062 83309944 : *walk_subtrees = 0;
1063 :
1064 278414819 : return NULL_TREE;
1065 : }
1066 :
1067 : /* Unmark the visited trees rooted at *TP. */
1068 :
1069 : static inline void
1070 8701968 : unmark_visited (tree *tp)
1071 : {
1072 8701968 : walk_tree (tp, unmark_visited_r, NULL, NULL);
1073 8701968 : }
1074 :
1075 : /* Likewise, but mark all trees as not visited. */
1076 :
1077 : static void
1078 2900656 : unvisit_body (tree fndecl)
1079 : {
1080 2900656 : struct cgraph_node *cgn = cgraph_node::get (fndecl);
1081 :
1082 2900656 : unmark_visited (&DECL_SAVED_TREE (fndecl));
1083 2900656 : unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1084 2900656 : unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1085 :
1086 2900656 : if (cgn)
1087 2923105 : for (cgn = first_nested_function (cgn);
1088 2923105 : cgn; cgn = next_nested_function (cgn))
1089 25801 : unvisit_body (cgn->decl);
1090 2900656 : }
1091 :
1092 : /* Unconditionally make an unshared copy of EXPR. This is used when using
1093 : stored expressions which span multiple functions, such as BINFO_VTABLE,
1094 : as the normal unsharing process can't tell that they're shared. */
1095 :
1096 : tree
1097 1680572331 : unshare_expr (tree expr)
1098 : {
1099 1680572331 : walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1100 1680572331 : return expr;
1101 : }
1102 :
1103 : /* Worker for unshare_expr_without_location. */
1104 :
1105 : static tree
1106 11761201 : prune_expr_location (tree *tp, int *walk_subtrees, void *)
1107 : {
1108 11761201 : if (EXPR_P (*tp))
1109 6154160 : SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1110 : else
1111 5607041 : *walk_subtrees = 0;
1112 11761201 : return NULL_TREE;
1113 : }
1114 :
1115 : /* Similar to unshare_expr but also prune all expression locations
1116 : from EXPR. */
1117 :
1118 : tree
1119 22522583 : unshare_expr_without_location (tree expr)
1120 : {
1121 22522583 : walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1122 22522583 : if (EXPR_P (expr))
1123 3728395 : walk_tree (&expr, prune_expr_location, NULL, NULL);
1124 22522583 : return expr;
1125 : }
1126 :
1127 : /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1128 : one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1129 : comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1130 : EXPR is the location of the EXPR. */
1131 :
1132 : static location_t
1133 1183598 : rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1134 : {
1135 1183598 : if (!expr)
1136 : return or_else;
1137 :
1138 1183598 : if (EXPR_HAS_LOCATION (expr))
1139 830448 : return EXPR_LOCATION (expr);
1140 :
1141 353150 : if (TREE_CODE (expr) != STATEMENT_LIST)
1142 : return or_else;
1143 :
1144 0 : tree_stmt_iterator i = tsi_start (expr);
1145 :
1146 0 : bool found = false;
1147 0 : while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1148 : {
1149 0 : found = true;
1150 0 : tsi_next (&i);
1151 : }
1152 :
1153 353150 : if (!found || !tsi_one_before_end_p (i))
1154 : return or_else;
1155 :
1156 0 : return rexpr_location (tsi_stmt (i), or_else);
1157 : }
1158 :
1159 : /* Return TRUE iff EXPR (maybe recursively) has a location; see
1160 : rexpr_location for the potential recursion. */
1161 :
1162 : static inline bool
1163 514329 : rexpr_has_location (tree expr)
1164 : {
1165 307530 : return rexpr_location (expr) != UNKNOWN_LOCATION;
1166 : }
1167 :
1168 :
1169 : /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1170 : contain statements and have a value. Assign its value to a temporary
1171 : and give it void_type_node. Return the temporary, or NULL_TREE if
1172 : WRAPPER was already void. */
1173 :
1174 : tree
1175 20068381 : voidify_wrapper_expr (tree wrapper, tree temp)
1176 : {
1177 20068381 : tree type = TREE_TYPE (wrapper);
1178 20068381 : if (type && !VOID_TYPE_P (type))
1179 : {
1180 : tree *p;
1181 :
1182 : /* Set p to point to the body of the wrapper. Loop until we find
1183 : something that isn't a wrapper. */
1184 776347 : for (p = &wrapper; p && *p; )
1185 : {
1186 776347 : switch (TREE_CODE (*p))
1187 : {
1188 3144 : case BIND_EXPR:
1189 3144 : TREE_SIDE_EFFECTS (*p) = 1;
1190 3144 : TREE_TYPE (*p) = void_type_node;
1191 : /* For a BIND_EXPR, the body is operand 1. */
1192 3144 : p = &BIND_EXPR_BODY (*p);
1193 3144 : break;
1194 :
1195 358007 : case CLEANUP_POINT_EXPR:
1196 358007 : case TRY_FINALLY_EXPR:
1197 358007 : case TRY_CATCH_EXPR:
1198 358007 : TREE_SIDE_EFFECTS (*p) = 1;
1199 358007 : TREE_TYPE (*p) = void_type_node;
1200 358007 : p = &TREE_OPERAND (*p, 0);
1201 358007 : break;
1202 :
1203 18371 : case STATEMENT_LIST:
1204 18371 : {
1205 18371 : tree_stmt_iterator i = tsi_last (*p);
1206 18371 : TREE_SIDE_EFFECTS (*p) = 1;
1207 18371 : TREE_TYPE (*p) = void_type_node;
1208 18371 : p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1209 : }
1210 18371 : break;
1211 :
1212 : case COMPOUND_EXPR:
1213 : /* Advance to the last statement. Set all container types to
1214 : void. */
1215 64416 : for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1216 : {
1217 32211 : TREE_SIDE_EFFECTS (*p) = 1;
1218 32211 : TREE_TYPE (*p) = void_type_node;
1219 : }
1220 : break;
1221 :
1222 76 : case TRANSACTION_EXPR:
1223 76 : TREE_SIDE_EFFECTS (*p) = 1;
1224 76 : TREE_TYPE (*p) = void_type_node;
1225 76 : p = &TRANSACTION_EXPR_BODY (*p);
1226 76 : break;
1227 :
1228 364544 : default:
1229 : /* Assume that any tree upon which voidify_wrapper_expr is
1230 : directly called is a wrapper, and that its body is op0. */
1231 364544 : if (p == &wrapper)
1232 : {
1233 33 : TREE_SIDE_EFFECTS (*p) = 1;
1234 33 : TREE_TYPE (*p) = void_type_node;
1235 33 : p = &TREE_OPERAND (*p, 0);
1236 33 : break;
1237 : }
1238 364511 : goto out;
1239 : }
1240 : }
1241 :
1242 0 : out:
1243 364511 : if (p == NULL || IS_EMPTY_STMT (*p))
1244 : temp = NULL_TREE;
1245 364511 : else if (temp)
1246 : {
1247 : /* The wrapper is on the RHS of an assignment that we're pushing
1248 : down. */
1249 1864 : gcc_assert (TREE_CODE (temp) == INIT_EXPR
1250 : || TREE_CODE (temp) == MODIFY_EXPR);
1251 1864 : TREE_OPERAND (temp, 1) = *p;
1252 1864 : *p = temp;
1253 : }
1254 : else
1255 : {
1256 362647 : temp = create_tmp_var (type, "retval");
1257 362647 : *p = build2 (INIT_EXPR, type, temp, *p);
1258 : }
1259 :
1260 364511 : return temp;
1261 : }
1262 :
1263 : return NULL_TREE;
1264 : }
1265 :
1266 : /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1267 : a temporary through which they communicate. */
1268 :
1269 : static void
1270 7864 : build_stack_save_restore (gcall **save, gcall **restore)
1271 : {
1272 7864 : tree tmp_var;
1273 :
1274 15728 : *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1275 7864 : tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1276 7864 : gimple_call_set_lhs (*save, tmp_var);
1277 :
1278 7864 : *restore
1279 7864 : = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1280 : 1, tmp_var);
1281 7864 : }
1282 :
1283 : /* Generate IFN_ASAN_MARK call that poisons shadow memory of the DECL
1284 : variable. */
1285 :
1286 : static tree
1287 437 : build_asan_poison_call_expr (tree decl)
1288 : {
1289 : /* Do not poison variables that have size equal to zero. */
1290 437 : tree unit_size = DECL_SIZE_UNIT (decl);
1291 437 : if (zerop (unit_size))
1292 : return NULL_TREE;
1293 :
1294 437 : tree base = build_fold_addr_expr (decl);
1295 :
1296 437 : return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1297 : void_type_node, 3,
1298 : build_int_cst (integer_type_node,
1299 : ASAN_MARK_POISON),
1300 : base, unit_size);
1301 : }
1302 :
1303 : /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1304 : on POISON flag, shadow memory of a DECL variable. The call will be
1305 : put on location identified by IT iterator, where BEFORE flag drives
1306 : position where the stmt will be put. */
1307 :
1308 : static void
1309 4816 : asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1310 : bool before)
1311 : {
1312 4816 : tree unit_size = DECL_SIZE_UNIT (decl);
1313 4816 : tree base = build_fold_addr_expr (decl);
1314 :
1315 : /* Do not poison variables that have size equal to zero. */
1316 4816 : if (zerop (unit_size))
1317 4816 : return;
1318 :
1319 : /* It's necessary to have all stack variables aligned to ASAN granularity
1320 : bytes. */
1321 4802 : gcc_assert (!hwassist_sanitize_p () || hwassist_sanitize_stack_p ());
1322 4802 : unsigned shadow_granularity
1323 4802 : = (hwassist_sanitize_p ()
1324 4802 : ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY);
1325 4802 : if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1326 4600 : SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1327 :
1328 4802 : HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1329 :
1330 4802 : gimple *g
1331 4802 : = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1332 4802 : build_int_cst (integer_type_node, flags),
1333 : base, unit_size);
1334 :
1335 4802 : if (before)
1336 2544 : gsi_insert_before (it, g, GSI_NEW_STMT);
1337 : else
1338 2258 : gsi_insert_after (it, g, GSI_NEW_STMT);
1339 : }
1340 :
1341 : /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1342 : either poisons or unpoisons a DECL. Created statement is appended
1343 : to SEQ_P gimple sequence. */
1344 :
1345 : static void
1346 4379 : asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1347 : {
1348 4379 : gimple_stmt_iterator it = gsi_last (*seq_p);
1349 4379 : bool before = false;
1350 :
1351 4379 : if (gsi_end_p (it))
1352 2329 : before = true;
1353 :
1354 4379 : asan_poison_variable (decl, poison, &it, before);
1355 4379 : }
1356 :
1357 : /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1358 :
1359 : static int
1360 135 : sort_by_decl_uid (const void *a, const void *b)
1361 : {
1362 135 : const tree *t1 = (const tree *)a;
1363 135 : const tree *t2 = (const tree *)b;
1364 :
1365 135 : int uid1 = DECL_UID (*t1);
1366 135 : int uid2 = DECL_UID (*t2);
1367 :
1368 135 : if (uid1 < uid2)
1369 : return -1;
1370 50 : else if (uid1 > uid2)
1371 : return 1;
1372 : else
1373 0 : return 0;
1374 : }
1375 :
1376 : /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1377 : depending on POISON flag. Created statement is appended
1378 : to SEQ_P gimple sequence. */
1379 :
1380 : static void
1381 1045435 : asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1382 : {
1383 1045435 : unsigned c = variables->elements ();
1384 1045435 : if (c == 0)
1385 1045259 : return;
1386 :
1387 176 : auto_vec<tree> sorted_variables (c);
1388 :
1389 176 : for (hash_set<tree>::iterator it = variables->begin ();
1390 570 : it != variables->end (); ++it)
1391 197 : sorted_variables.safe_push (*it);
1392 :
1393 176 : sorted_variables.qsort (sort_by_decl_uid);
1394 :
1395 : unsigned i;
1396 : tree var;
1397 549 : FOR_EACH_VEC_ELT (sorted_variables, i, var)
1398 : {
1399 197 : asan_poison_variable (var, poison, seq_p);
1400 :
1401 : /* Add use_after_scope_memory attribute for the variable in order
1402 : to prevent re-written into SSA. */
1403 197 : if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1404 197 : DECL_ATTRIBUTES (var)))
1405 90 : DECL_ATTRIBUTES (var)
1406 180 : = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1407 : integer_one_node,
1408 90 : DECL_ATTRIBUTES (var));
1409 : }
1410 176 : }
1411 :
1412 : /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1413 :
1414 : static enum gimplify_status
1415 5920956 : gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1416 : {
1417 5920956 : tree bind_expr = *expr_p;
1418 5920956 : bool old_keep_stack = gimplify_ctxp->keep_stack;
1419 5920956 : bool old_save_stack = gimplify_ctxp->save_stack;
1420 5920956 : tree t;
1421 5920956 : gbind *bind_stmt;
1422 5920956 : gimple_seq body, cleanup;
1423 5920956 : gcall *stack_save;
1424 5920956 : location_t start_locus = 0, end_locus = 0;
1425 5920956 : tree ret_clauses = NULL;
1426 :
1427 5920956 : tree temp = voidify_wrapper_expr (bind_expr, NULL);
1428 :
1429 : /* Mark variables seen in this bind expr. */
1430 12728992 : for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1431 : {
1432 6808036 : if (VAR_P (t))
1433 : {
1434 6158025 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1435 6158025 : tree attr;
1436 :
1437 6158025 : if (flag_openmp
1438 251310 : && !is_global_var (t)
1439 242899 : && !TREE_STATIC (t)
1440 242899 : && DECL_CONTEXT (t) == current_function_decl
1441 242899 : && TREE_USED (t)
1442 6399319 : && (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1443 : != NULL_TREE)
1444 : {
1445 76 : gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1446 76 : tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1447 76 : tree align = TREE_VALUE (TREE_VALUE (attr));
1448 : /* Allocate directives that appear in a target region must specify
1449 : an allocator clause unless a requires directive with the
1450 : dynamic_allocators clause is present in the same compilation
1451 : unit. */
1452 76 : bool missing_dyn_alloc = false;
1453 76 : if (alloc == NULL_TREE
1454 48 : && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1455 : == 0))
1456 : {
1457 : /* This comes too early for omp_discover_declare_target...,
1458 : but should at least catch the most common cases. */
1459 42 : missing_dyn_alloc
1460 42 : = cgraph_node::get (current_function_decl)->offloadable;
1461 42 : for (struct gimplify_omp_ctx *ctx2 = ctx;
1462 48 : ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1463 6 : if (ctx2->code == OMP_TARGET)
1464 2 : missing_dyn_alloc = true;
1465 : }
1466 42 : if (missing_dyn_alloc)
1467 4 : error_at (DECL_SOURCE_LOCATION (t),
1468 : "%<allocate%> directive for %qD inside a target "
1469 : "region must specify an %<allocator%> clause", t);
1470 : /* Skip for omp_default_mem_alloc (= 1),
1471 : unless align is present. For C/C++, there should be always a
1472 : statement list following if TREE_USED, except for, e.g., using
1473 : this decl in a static_assert; in that case, only a single
1474 : DECL_EXPR remains, which can be skipped here. */
1475 72 : else if (!errorcount
1476 57 : && (align != NULL_TREE
1477 57 : || alloc == NULL_TREE
1478 12 : || !integer_onep (alloc))
1479 127 : && (lang_GNU_Fortran ()
1480 27 : || (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1481 : != DECL_EXPR)))
1482 : {
1483 : /* Fortran might already use a pointer type internally;
1484 : use that pointer except for type(C_ptr) and type(C_funptr);
1485 : note that normal proc pointers are rejected. */
1486 55 : tree type = TREE_TYPE (t);
1487 55 : tree tmp, v;
1488 55 : if (lang_GNU_Fortran ()
1489 28 : && POINTER_TYPE_P (type)
1490 8 : && TREE_TYPE (type) != void_type_node
1491 61 : && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1492 : {
1493 6 : type = TREE_TYPE (type);
1494 6 : v = t;
1495 : }
1496 : else
1497 : {
1498 49 : tmp = build_pointer_type (type);
1499 49 : v = create_tmp_var (tmp, get_name (t));
1500 49 : DECL_IGNORED_P (v) = 0;
1501 49 : DECL_ATTRIBUTES (v)
1502 49 : = tree_cons (get_identifier ("omp allocate var"),
1503 : build_tree_list (NULL_TREE, t),
1504 : remove_attribute ("omp allocate",
1505 49 : DECL_ATTRIBUTES (t)));
1506 49 : tmp = build_fold_indirect_ref (v);
1507 49 : TREE_THIS_NOTRAP (tmp) = 1;
1508 49 : SET_DECL_VALUE_EXPR (t, tmp);
1509 49 : DECL_HAS_VALUE_EXPR_P (t) = 1;
1510 : }
1511 55 : tree sz = TYPE_SIZE_UNIT (type);
1512 : /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1513 : hence, for some decls, a size variable is saved in the
1514 : attributes; use it, if available. */
1515 55 : if (TREE_CHAIN (TREE_VALUE (attr))
1516 28 : && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1517 61 : && TREE_PURPOSE (
1518 : TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1519 : {
1520 6 : sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1521 6 : sz = TREE_PURPOSE (sz);
1522 : }
1523 55 : if (alloc == NULL_TREE)
1524 36 : alloc = build_zero_cst (ptr_type_node);
1525 55 : if (align == NULL_TREE)
1526 44 : align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1527 : else
1528 11 : align = build_int_cst (size_type_node,
1529 11 : MAX (tree_to_uhwi (align),
1530 : DECL_ALIGN_UNIT (t)));
1531 55 : location_t loc = DECL_SOURCE_LOCATION (t);
1532 55 : tmp = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
1533 55 : tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1534 55 : tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1535 55 : fold_convert (TREE_TYPE (v), tmp));
1536 55 : gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1537 : /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1538 : and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1539 : is set, using in a condition much further below. */
1540 61 : gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1541 : || TREE_CHAIN (TREE_VALUE (attr)));
1542 55 : if (TREE_CHAIN (TREE_VALUE (attr)))
1543 : {
1544 : /* Fortran is special as it does not have properly nest
1545 : declarations in blocks. And as there is no
1546 : initializer, there is also no expression to look for.
1547 : Hence, the FE makes the statement list of the
1548 : try-finally block available. We can put the GOMP_alloc
1549 : at the top, unless an allocator or size expression
1550 : requires to put it afterward; note that the size is
1551 : always later in generated code; for strings, no
1552 : size expr but still an expr might be available.
1553 : As LTO does not handle a statement list, 'sl' has
1554 : to be removed; done so by removing the attribute. */
1555 28 : DECL_ATTRIBUTES (t)
1556 28 : = remove_attribute ("omp allocate",
1557 28 : DECL_ATTRIBUTES (t));
1558 28 : tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1559 28 : tree_stmt_iterator e = tsi_start (sl);
1560 28 : tree needle = NULL_TREE;
1561 28 : if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1562 : {
1563 6 : needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1564 6 : needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1565 : : sz);
1566 : }
1567 22 : else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1568 : needle = sz;
1569 22 : else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1570 : needle = alloc;
1571 :
1572 10 : if (needle != NULL_TREE)
1573 : {
1574 49 : while (!tsi_end_p (e))
1575 : {
1576 49 : if (*e == needle
1577 49 : || (TREE_CODE (*e) == MODIFY_EXPR
1578 49 : && TREE_OPERAND (*e, 0) == needle))
1579 : break;
1580 39 : ++e;
1581 : }
1582 10 : gcc_assert (!tsi_end_p (e));
1583 : }
1584 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1585 :
1586 : /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1587 : here; for C/C++ it will be added in the 'cleanup'
1588 : section after gimplification. But Fortran already has
1589 : a try-finally block. */
1590 28 : sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1591 28 : e = tsi_last (sl);
1592 28 : tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1593 28 : tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1594 : build_zero_cst (ptr_type_node));
1595 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1596 28 : tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1597 28 : tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1598 28 : fold_convert (TREE_TYPE (v), tmp));
1599 28 : ++e;
1600 28 : tsi_link_after (&e, tmp, TSI_SAME_STMT);
1601 : }
1602 : else
1603 : {
1604 27 : gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1605 : == STATEMENT_LIST);
1606 27 : tree_stmt_iterator e;
1607 27 : e = tsi_start (BIND_EXPR_BODY (bind_expr));
1608 117 : while (!tsi_end_p (e))
1609 : {
1610 90 : if ((TREE_CODE (*e) == DECL_EXPR
1611 60 : && TREE_OPERAND (*e, 0) == t)
1612 123 : || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1613 0 : && (TREE_CODE (TREE_OPERAND (*e, 0))
1614 : == DECL_EXPR)
1615 0 : && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1616 : == t)))
1617 : break;
1618 63 : ++e;
1619 : }
1620 27 : gcc_assert (!tsi_end_p (e));
1621 27 : tsi_link_before (&e, tmp, TSI_SAME_STMT);
1622 : }
1623 : }
1624 : }
1625 :
1626 : /* Mark variable as local. */
1627 6158025 : if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1628 : {
1629 98767 : if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1630 98767 : || splay_tree_lookup (ctx->variables,
1631 : (splay_tree_key) t) == NULL)
1632 : {
1633 98767 : int flag = GOVD_LOCAL;
1634 98767 : if (ctx->region_type == ORT_SIMD
1635 4374 : && TREE_ADDRESSABLE (t)
1636 81 : && !TREE_STATIC (t))
1637 : {
1638 69 : if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1639 2 : ctx->add_safelen1 = true;
1640 : else
1641 : flag = GOVD_PRIVATE;
1642 : }
1643 98767 : omp_add_variable (ctx, t, flag | GOVD_SEEN);
1644 : }
1645 : /* Static locals inside of target construct or offloaded
1646 : routines need to be "omp declare target". */
1647 98767 : if (TREE_STATIC (t))
1648 1307 : for (; ctx; ctx = ctx->outer_context)
1649 1152 : if ((ctx->region_type & ORT_TARGET) != 0)
1650 : {
1651 939 : if (!lookup_attribute ("omp declare target",
1652 939 : DECL_ATTRIBUTES (t)))
1653 : {
1654 927 : tree id = get_identifier ("omp declare target");
1655 927 : DECL_ATTRIBUTES (t)
1656 927 : = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1657 927 : varpool_node *node = varpool_node::get (t);
1658 927 : if (node)
1659 : {
1660 33 : node->offloadable = 1;
1661 33 : if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1662 : {
1663 : g->have_offload = true;
1664 : if (!in_lto_p)
1665 : vec_safe_push (offload_vars, t);
1666 : }
1667 : }
1668 : }
1669 : break;
1670 : }
1671 : }
1672 :
1673 6158025 : DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1674 :
1675 6158025 : if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1676 1085 : cfun->has_local_explicit_reg_vars = true;
1677 : }
1678 : }
1679 :
1680 11841912 : bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1681 5920956 : BIND_EXPR_BLOCK (bind_expr));
1682 5920956 : gimple_push_bind_expr (bind_stmt);
1683 :
1684 5920956 : gimplify_ctxp->keep_stack = false;
1685 5920956 : gimplify_ctxp->save_stack = false;
1686 :
1687 : /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1688 5920956 : body = NULL;
1689 5920956 : gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1690 5920956 : gimple_bind_set_body (bind_stmt, body);
1691 :
1692 : /* Source location wise, the cleanup code (stack_restore and clobbers)
1693 : belongs to the end of the block, so propagate what we have. The
1694 : stack_save operation belongs to the beginning of block, which we can
1695 : infer from the bind_expr directly if the block has no explicit
1696 : assignment. */
1697 5920956 : if (BIND_EXPR_BLOCK (bind_expr))
1698 : {
1699 5789786 : end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1700 5789786 : start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1701 : }
1702 5789786 : if (start_locus == 0)
1703 5920956 : start_locus = EXPR_LOCATION (bind_expr);
1704 :
1705 5920956 : cleanup = NULL;
1706 5920956 : stack_save = NULL;
1707 :
1708 : /* Add clobbers for all variables that go out of scope. */
1709 12728992 : for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1710 : {
1711 6808036 : if (VAR_P (t)
1712 6158025 : && !is_global_var (t)
1713 12749471 : && DECL_CONTEXT (t) == current_function_decl)
1714 : {
1715 5941435 : if (flag_openmp
1716 242884 : && DECL_HAS_VALUE_EXPR_P (t)
1717 1060 : && TREE_USED (t)
1718 5942476 : && lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
1719 : {
1720 : /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1721 : causes that the GOMP_free call is already added above;
1722 : and "omp allocate" is removed from DECL_ATTRIBUTES. */
1723 27 : tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1724 27 : tree tmp = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
1725 27 : tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1726 : build_zero_cst (ptr_type_node));
1727 27 : gimplify_and_add (tmp, &cleanup);
1728 27 : gimple *clobber_stmt;
1729 27 : tmp = build_clobber (TREE_TYPE (v), CLOBBER_STORAGE_END);
1730 27 : clobber_stmt = gimple_build_assign (v, tmp);
1731 27 : gimple_set_location (clobber_stmt, end_locus);
1732 27 : gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1733 : }
1734 5941435 : if (!DECL_HARD_REGISTER (t)
1735 5940350 : && !TREE_THIS_VOLATILE (t)
1736 5901446 : && !DECL_HAS_VALUE_EXPR_P (t)
1737 : /* Only care for variables that have to be in memory. Others
1738 : will be rewritten into SSA names, hence moved to the
1739 : top-level. */
1740 5794239 : && !is_gimple_reg (t)
1741 7169382 : && flag_stack_reuse != SR_NONE)
1742 : {
1743 1224812 : tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_STORAGE_END);
1744 1224812 : gimple *clobber_stmt;
1745 1224812 : clobber_stmt = gimple_build_assign (t, clobber);
1746 1224812 : gimple_set_location (clobber_stmt, end_locus);
1747 1224812 : gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1748 : }
1749 :
1750 5941435 : if (flag_openacc && oacc_declare_returns != NULL)
1751 : {
1752 207 : tree key = t;
1753 207 : if (DECL_HAS_VALUE_EXPR_P (key))
1754 : {
1755 8 : key = DECL_VALUE_EXPR (key);
1756 8 : if (INDIRECT_REF_P (key))
1757 8 : key = TREE_OPERAND (key, 0);
1758 : }
1759 207 : tree *c = oacc_declare_returns->get (key);
1760 207 : if (c != NULL)
1761 : {
1762 116 : if (ret_clauses)
1763 64 : OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1764 :
1765 116 : ret_clauses = unshare_expr (*c);
1766 :
1767 116 : oacc_declare_returns->remove (key);
1768 :
1769 116 : if (oacc_declare_returns->is_empty ())
1770 : {
1771 40 : delete oacc_declare_returns;
1772 40 : oacc_declare_returns = NULL;
1773 : }
1774 : }
1775 : }
1776 : }
1777 :
1778 6808036 : if (asan_poisoned_variables != NULL
1779 6808036 : && asan_poisoned_variables->contains (t))
1780 : {
1781 2091 : asan_poisoned_variables->remove (t);
1782 2091 : asan_poison_variable (t, true, &cleanup);
1783 : }
1784 :
1785 6808036 : if (gimplify_ctxp->live_switch_vars != NULL
1786 6808036 : && gimplify_ctxp->live_switch_vars->contains (t))
1787 55 : gimplify_ctxp->live_switch_vars->remove (t);
1788 : }
1789 :
1790 : /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1791 : the stack space allocated to the VLAs. */
1792 5920956 : if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1793 : {
1794 7864 : gcall *stack_restore;
1795 :
1796 : /* Save stack on entry and restore it on exit. Add a try_finally
1797 : block to achieve this. */
1798 7864 : build_stack_save_restore (&stack_save, &stack_restore);
1799 :
1800 7864 : gimple_set_location (stack_save, start_locus);
1801 7864 : gimple_set_location (stack_restore, end_locus);
1802 :
1803 7864 : gimplify_seq_add_stmt (&cleanup, stack_restore);
1804 : }
1805 :
1806 5920956 : if (ret_clauses)
1807 : {
1808 52 : gomp_target *stmt;
1809 52 : gimple_stmt_iterator si = gsi_start (cleanup);
1810 :
1811 52 : stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1812 : ret_clauses);
1813 52 : gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1814 : }
1815 :
1816 5920956 : if (cleanup)
1817 : {
1818 748236 : gtry *gs;
1819 748236 : gimple_seq new_body;
1820 :
1821 748236 : new_body = NULL;
1822 748236 : gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1823 : GIMPLE_TRY_FINALLY);
1824 :
1825 748236 : if (stack_save)
1826 7864 : gimplify_seq_add_stmt (&new_body, stack_save);
1827 748236 : gimplify_seq_add_stmt (&new_body, gs);
1828 748236 : gimple_bind_set_body (bind_stmt, new_body);
1829 : }
1830 :
1831 : /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1832 5920956 : if (!gimplify_ctxp->keep_stack)
1833 5905250 : gimplify_ctxp->keep_stack = old_keep_stack;
1834 5920956 : gimplify_ctxp->save_stack = old_save_stack;
1835 :
1836 5920956 : gimple_pop_bind_expr ();
1837 :
1838 5920956 : gimplify_seq_add_stmt (pre_p, bind_stmt);
1839 :
1840 5920956 : if (temp)
1841 : {
1842 1709 : *expr_p = temp;
1843 1709 : return GS_OK;
1844 : }
1845 :
1846 5919247 : *expr_p = NULL_TREE;
1847 5919247 : return GS_ALL_DONE;
1848 : }
1849 :
1850 : /* Maybe add early return predict statement to PRE_P sequence. */
1851 :
1852 : static void
1853 2210884 : maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1854 : {
1855 : /* If we are not in a conditional context, add PREDICT statement. */
1856 2210884 : if (gimple_conditional_context ())
1857 : {
1858 442998 : gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1859 : NOT_TAKEN);
1860 442998 : gimplify_seq_add_stmt (pre_p, predict);
1861 : }
1862 2210884 : }
1863 :
1864 : /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1865 : GIMPLE value, it is assigned to a new temporary and the statement is
1866 : re-written to return the temporary.
1867 :
1868 : PRE_P points to the sequence where side effects that must happen before
1869 : STMT should be stored. */
1870 :
1871 : static enum gimplify_status
1872 2210890 : gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1873 : {
1874 2210890 : greturn *ret;
1875 2210890 : tree ret_expr = TREE_OPERAND (stmt, 0);
1876 2210890 : tree result_decl, result;
1877 :
1878 2210890 : if (ret_expr == error_mark_node)
1879 : return GS_ERROR;
1880 :
1881 2210884 : if (!ret_expr
1882 2122707 : || TREE_CODE (ret_expr) == RESULT_DECL)
1883 : {
1884 101557 : maybe_add_early_return_predict_stmt (pre_p);
1885 101557 : greturn *ret = gimple_build_return (ret_expr);
1886 101557 : copy_warning (ret, stmt);
1887 101557 : gimplify_seq_add_stmt (pre_p, ret);
1888 101557 : return GS_ALL_DONE;
1889 : }
1890 :
1891 2109327 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1892 : result_decl = NULL_TREE;
1893 2109005 : else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1894 : {
1895 : /* Used in C++ for handling EH cleanup of the return value if a local
1896 : cleanup throws. Assume the front-end knows what it's doing. */
1897 4957 : result_decl = DECL_RESULT (current_function_decl);
1898 : /* But crash if we end up trying to modify ret_expr below. */
1899 4957 : ret_expr = NULL_TREE;
1900 : }
1901 : else
1902 : {
1903 2104048 : result_decl = TREE_OPERAND (ret_expr, 0);
1904 :
1905 : /* See through a return by reference. */
1906 2104048 : if (INDIRECT_REF_P (result_decl))
1907 41500 : result_decl = TREE_OPERAND (result_decl, 0);
1908 :
1909 2104048 : gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1910 : || TREE_CODE (ret_expr) == INIT_EXPR)
1911 : && TREE_CODE (result_decl) == RESULT_DECL);
1912 : }
1913 :
1914 : /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1915 : Recall that aggregate_value_p is FALSE for any aggregate type that is
1916 : returned in registers. If we're returning values in registers, then
1917 : we don't want to extend the lifetime of the RESULT_DECL, particularly
1918 : across another call. In addition, for those aggregates for which
1919 : hard_function_value generates a PARALLEL, we'll die during normal
1920 : expansion of structure assignments; there's special code in expand_return
1921 : to handle this case that does not exist in expand_expr. */
1922 4957 : if (!result_decl)
1923 : result = NULL_TREE;
1924 2109005 : else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1925 : {
1926 177095 : if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1927 : {
1928 43 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1929 0 : gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1930 : /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1931 : should be effectively allocated by the caller, i.e. all calls to
1932 : this function must be subject to the Return Slot Optimization. */
1933 43 : gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1934 43 : gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1935 : }
1936 : result = result_decl;
1937 : }
1938 1931910 : else if (gimplify_ctxp->return_temp)
1939 : result = gimplify_ctxp->return_temp;
1940 : else
1941 : {
1942 1439779 : result = create_tmp_reg (TREE_TYPE (result_decl));
1943 :
1944 : /* ??? With complex control flow (usually involving abnormal edges),
1945 : we can wind up warning about an uninitialized value for this. Due
1946 : to how this variable is constructed and initialized, this is never
1947 : true. Give up and never warn. */
1948 1439779 : suppress_warning (result, OPT_Wuninitialized);
1949 :
1950 1439779 : gimplify_ctxp->return_temp = result;
1951 : }
1952 :
1953 : /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1954 : Then gimplify the whole thing. */
1955 2109327 : if (result != result_decl)
1956 1931910 : TREE_OPERAND (ret_expr, 0) = result;
1957 :
1958 2109327 : gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1959 :
1960 2109327 : maybe_add_early_return_predict_stmt (pre_p);
1961 2109327 : ret = gimple_build_return (result);
1962 2109327 : copy_warning (ret, stmt);
1963 2109327 : gimplify_seq_add_stmt (pre_p, ret);
1964 :
1965 2109327 : return GS_ALL_DONE;
1966 : }
1967 :
1968 : /* Gimplify a variable-length array DECL. */
1969 :
1970 : static void
1971 8963 : gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1972 : {
1973 : /* This is a variable-sized decl. Simplify its size and mark it
1974 : for deferred expansion. */
1975 8963 : tree t, addr, ptr_type;
1976 :
1977 8963 : gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1978 8963 : gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1979 :
1980 : /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1981 8963 : if (DECL_HAS_VALUE_EXPR_P (decl))
1982 : return;
1983 :
1984 : /* All occurrences of this decl in final gimplified code will be
1985 : replaced by indirection. Setting DECL_VALUE_EXPR does two
1986 : things: First, it lets the rest of the gimplifier know what
1987 : replacement to use. Second, it lets the debug info know
1988 : where to find the value. */
1989 8958 : ptr_type = build_pointer_type (TREE_TYPE (decl));
1990 8958 : addr = create_tmp_var (ptr_type, get_name (decl));
1991 8958 : DECL_IGNORED_P (addr) = 0;
1992 8958 : t = build_fold_indirect_ref (addr);
1993 8958 : TREE_THIS_NOTRAP (t) = 1;
1994 8958 : SET_DECL_VALUE_EXPR (decl, t);
1995 8958 : DECL_HAS_VALUE_EXPR_P (decl) = 1;
1996 :
1997 8958 : t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1998 8958 : max_int_size_in_bytes (TREE_TYPE (decl)));
1999 : /* The call has been built for a variable-sized object. */
2000 8958 : CALL_ALLOCA_FOR_VAR_P (t) = 1;
2001 8958 : t = fold_convert (ptr_type, t);
2002 8958 : t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
2003 :
2004 8958 : gimplify_and_add (t, seq_p);
2005 :
2006 : /* Record the dynamic allocation associated with DECL if requested. */
2007 8958 : if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
2008 0 : record_dynamic_alloc (decl);
2009 : }
2010 :
2011 : /* A helper function to be called via walk_tree. Mark all labels under *TP
2012 : as being forced. To be called for DECL_INITIAL of static variables. */
2013 :
2014 : static tree
2015 857861 : force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2016 : {
2017 857861 : if (TYPE_P (*tp))
2018 0 : *walk_subtrees = 0;
2019 857861 : if (TREE_CODE (*tp) == LABEL_DECL)
2020 : {
2021 926 : FORCED_LABEL (*tp) = 1;
2022 926 : cfun->has_forced_label_in_static = 1;
2023 : }
2024 :
2025 857861 : return NULL_TREE;
2026 : }
2027 :
2028 : /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
2029 : Build a call to internal const function DEFERRED_INIT:
2030 : 1st argument: SIZE of the DECL;
2031 : 2nd argument: INIT_TYPE;
2032 : 3rd argument: NAME of the DECL;
2033 :
2034 : as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
2035 :
2036 : static void
2037 115724 : gimple_add_init_for_auto_var (tree decl,
2038 : enum auto_init_type init_type,
2039 : gimple_seq *seq_p)
2040 : {
2041 115724 : gcc_assert (auto_var_p (decl));
2042 115724 : gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
2043 :
2044 115724 : const location_t loc = DECL_SOURCE_LOCATION (decl);
2045 115724 : tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
2046 115724 : tree init_type_node = build_int_cst (integer_type_node, (int) init_type);
2047 115724 : tree decl_name;
2048 :
2049 115724 : if (DECL_NAME (decl))
2050 65758 : decl_name = build_string_literal (DECL_NAME (decl));
2051 : else
2052 : {
2053 49966 : char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
2054 49966 : sprintf (decl_name_anonymous, "D.%u", DECL_UID (decl));
2055 49966 : decl_name = build_string_literal (decl_name_anonymous);
2056 : }
2057 :
2058 115724 : tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
2059 115724 : TREE_TYPE (decl), 3,
2060 : decl_size, init_type_node,
2061 : decl_name);
2062 :
2063 115724 : gimplify_assign (decl, call, seq_p);
2064 115724 : }
2065 :
2066 : /* Generate padding initialization for automatic variable DECL.
2067 : C guarantees that brace-init with fewer initializers than members
2068 : aggregate will initialize the rest of the aggregate as-if it were
2069 : static initialization. In turn static initialization guarantees
2070 : that padding is initialized to zero. So, we always initialize paddings
2071 : to zeroes regardless INIT_TYPE.
2072 : To do the padding initialization, we insert a call to
2073 : __builtin_clear_padding (&decl, 0, for_auto_init = true).
2074 : Note, we add an additional dummy argument for __builtin_clear_padding,
2075 : 'for_auto_init' to distinguish whether this call is for automatic
2076 : variable initialization or not.
2077 : */
2078 : static void
2079 114 : gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2080 : gimple_seq *seq_p)
2081 : {
2082 114 : tree addr_of_decl = NULL_TREE;
2083 114 : tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
2084 :
2085 114 : if (is_vla)
2086 : {
2087 : /* The temporary address variable for this vla should be
2088 : created in gimplify_vla_decl. */
2089 0 : gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2090 0 : gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2091 0 : addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2092 : }
2093 : else
2094 : {
2095 114 : mark_addressable (decl);
2096 114 : addr_of_decl = build_fold_addr_expr (decl);
2097 : }
2098 :
2099 114 : gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2100 114 : build_one_cst (TREE_TYPE (addr_of_decl)));
2101 114 : gimplify_seq_add_stmt (seq_p, call);
2102 114 : }
2103 :
2104 : /* Return true if the DECL need to be automatically initialized by the
2105 : compiler. */
2106 : static bool
2107 4219208 : var_needs_auto_init_p (tree decl)
2108 : {
2109 4219208 : if (auto_var_p (decl)
2110 4124620 : && (TREE_CODE (decl) != VAR_DECL || !DECL_HARD_REGISTER (decl))
2111 4123915 : && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2112 233997 : && !lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl))
2113 233989 : && !lookup_attribute ("indeterminate", DECL_ATTRIBUTES (decl))
2114 233980 : && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2115 4453188 : && !is_empty_type (TREE_TYPE (decl)))
2116 : return true;
2117 : return false;
2118 : }
2119 :
2120 : /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2121 : and initialization explicit. */
2122 :
2123 : static enum gimplify_status
2124 6549103 : gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2125 : {
2126 6549103 : tree stmt = *stmt_p;
2127 6549103 : tree decl = DECL_EXPR_DECL (stmt);
2128 :
2129 6549103 : *stmt_p = NULL_TREE;
2130 :
2131 6549103 : if (TREE_TYPE (decl) == error_mark_node)
2132 : return GS_ERROR;
2133 :
2134 6549025 : if ((TREE_CODE (decl) == TYPE_DECL
2135 6506470 : || VAR_P (decl))
2136 13052446 : && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2137 : {
2138 689924 : gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2139 689924 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2140 21361 : gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2141 : }
2142 :
2143 : /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2144 : in case its size expressions contain problematic nodes like CALL_EXPR. */
2145 6549025 : if (TREE_CODE (decl) == TYPE_DECL
2146 42555 : && DECL_ORIGINAL_TYPE (decl)
2147 6554346 : && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2148 : {
2149 0 : gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2150 0 : if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2151 0 : gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2152 : }
2153 :
2154 6549025 : if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2155 : {
2156 6503421 : tree init = DECL_INITIAL (decl);
2157 6503421 : bool is_vla = false;
2158 : /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2159 : gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2160 : If the decl has VALUE_EXPR that was created by FE (usually
2161 : C++FE), it's a proxy variable, and FE already initialized
2162 : the VALUE_EXPR of it, we should not initialize it anymore. */
2163 6503421 : bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2164 :
2165 6503421 : poly_uint64 size;
2166 6503421 : if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
2167 6503421 : || (!TREE_STATIC (decl)
2168 6378946 : && flag_stack_check == GENERIC_STACK_CHECK
2169 234 : && maybe_gt (size,
2170 : (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2171 : {
2172 8839 : gimplify_vla_decl (decl, seq_p);
2173 8839 : is_vla = true;
2174 : }
2175 :
2176 6503421 : if (asan_poisoned_variables
2177 4928 : && !is_vla
2178 4742 : && TREE_ADDRESSABLE (decl)
2179 2344 : && !TREE_STATIC (decl)
2180 2134 : && !DECL_HAS_VALUE_EXPR_P (decl)
2181 2099 : && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2182 2099 : && dbg_cnt (asan_use_after_scope)
2183 2099 : && !gimplify_omp_ctxp
2184 : /* GNAT introduces temporaries to hold return values of calls in
2185 : initializers of variables defined in other units, so the
2186 : declaration of the variable is discarded completely. We do not
2187 : want to issue poison calls for such dropped variables. */
2188 6505512 : && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2189 0 : || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2190 : {
2191 2091 : asan_poisoned_variables->add (decl);
2192 2091 : asan_poison_variable (decl, false, seq_p);
2193 2091 : if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2194 55 : gimplify_ctxp->live_switch_vars->add (decl);
2195 : }
2196 :
2197 : /* Some front ends do not explicitly declare all anonymous
2198 : artificial variables. We compensate here by declaring the
2199 : variables, though it would be better if the front ends would
2200 : explicitly declare them. */
2201 6503421 : if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2202 6503421 : && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2203 16789 : gimple_add_tmp_var (decl);
2204 :
2205 6503421 : if (init && init != error_mark_node)
2206 : {
2207 3688301 : if (!TREE_STATIC (decl))
2208 : {
2209 3658361 : DECL_INITIAL (decl) = NULL_TREE;
2210 3658361 : init = build2 (INIT_EXPR, void_type_node, decl, init);
2211 3658361 : gimplify_and_add (init, seq_p);
2212 3658361 : ggc_free (init);
2213 : /* Clear TREE_READONLY if we really have an initialization. */
2214 3658361 : if (!DECL_INITIAL (decl)
2215 3658361 : && !omp_privatize_by_reference (decl))
2216 3640458 : TREE_READONLY (decl) = 0;
2217 : }
2218 : else
2219 : /* We must still examine initializers for static variables
2220 : as they may contain a label address. */
2221 29940 : walk_tree (&init, force_labels_r, NULL, NULL);
2222 : }
2223 : /* When there is no explicit initializer, if the user requested,
2224 : We should insert an artifical initializer for this automatic
2225 : variable. */
2226 2815120 : else if (var_needs_auto_init_p (decl)
2227 2815120 : && !decl_had_value_expr_p)
2228 : {
2229 70895 : gimple_add_init_for_auto_var (decl,
2230 : flag_auto_var_init,
2231 : seq_p);
2232 : /* The expanding of a call to the above .DEFERRED_INIT will apply
2233 : block initialization to the whole space covered by this variable.
2234 : As a result, all the paddings will be initialized to zeroes
2235 : for zero initialization and 0xFE byte-repeatable patterns for
2236 : pattern initialization.
2237 : In order to make the paddings as zeroes for pattern init, We
2238 : should add a call to __builtin_clear_padding to clear the
2239 : paddings to zero in compatible with CLANG.
2240 : We cannot insert this call if the variable is a gimple register
2241 : since __builtin_clear_padding will take the address of the
2242 : variable. As a result, if a long double/_Complex long double
2243 : variable will spilled into stack later, its padding is 0XFE. */
2244 70895 : if (flag_auto_var_init == AUTO_INIT_PATTERN
2245 183 : && !is_gimple_reg (decl)
2246 70992 : && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2247 74 : gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2248 : }
2249 : }
2250 :
2251 : return GS_ALL_DONE;
2252 : }
2253 :
2254 : /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2255 : and replacing the LOOP_EXPR with goto, but if the loop contains an
2256 : EXIT_EXPR, we need to append a label for it to jump to. */
2257 :
2258 : static enum gimplify_status
2259 200453 : gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2260 : {
2261 200453 : tree saved_label = gimplify_ctxp->exit_label;
2262 200453 : tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2263 :
2264 200453 : gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
2265 :
2266 200453 : gimplify_ctxp->exit_label = NULL_TREE;
2267 :
2268 200453 : gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
2269 :
2270 200453 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
2271 :
2272 200453 : if (gimplify_ctxp->exit_label)
2273 6434 : gimplify_seq_add_stmt (pre_p,
2274 3217 : gimple_build_label (gimplify_ctxp->exit_label));
2275 :
2276 200453 : gimplify_ctxp->exit_label = saved_label;
2277 :
2278 200453 : *expr_p = NULL;
2279 200453 : return GS_ALL_DONE;
2280 : }
2281 :
2282 : /* Gimplify a statement list onto a sequence. These may be created either
2283 : by an enlightened front-end, or by shortcut_cond_expr. */
2284 :
2285 : static enum gimplify_status
2286 8463779 : gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2287 : {
2288 8463779 : tree temp = voidify_wrapper_expr (*expr_p, NULL);
2289 :
2290 8463779 : tree_stmt_iterator i = tsi_start (*expr_p);
2291 :
2292 54693695 : while (!tsi_end_p (i))
2293 : {
2294 37766137 : gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2295 37766137 : tsi_delink (&i);
2296 : }
2297 :
2298 8463779 : if (temp)
2299 : {
2300 15227 : *expr_p = temp;
2301 15227 : return GS_OK;
2302 : }
2303 :
2304 : return GS_ALL_DONE;
2305 : }
2306 :
2307 :
2308 : /* Emit warning for the unreachable statement STMT if needed.
2309 : Return the gimple itself when the warning is emitted, otherwise
2310 : return NULL. */
2311 : static gimple *
2312 181 : emit_warn_switch_unreachable (gimple *stmt)
2313 : {
2314 181 : if (gimple_code (stmt) == GIMPLE_GOTO
2315 50 : && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2316 231 : && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2317 : /* Don't warn for compiler-generated gotos. These occur
2318 : in Duff's devices, for example. */
2319 : return NULL;
2320 : else
2321 131 : warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2322 : "statement will never be executed");
2323 131 : return stmt;
2324 : }
2325 :
2326 : /* Callback for walk_gimple_seq. */
2327 :
2328 : static tree
2329 50122 : warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2330 : bool *handled_ops_p,
2331 : struct walk_stmt_info *wi)
2332 : {
2333 50122 : gimple *stmt = gsi_stmt (*gsi_p);
2334 50122 : bool unreachable_issued = wi->info != NULL;
2335 :
2336 50122 : *handled_ops_p = true;
2337 50122 : switch (gimple_code (stmt))
2338 : {
2339 155 : case GIMPLE_TRY:
2340 : /* A compiler-generated cleanup or a user-written try block.
2341 : If it's empty, don't dive into it--that would result in
2342 : worse location info. */
2343 155 : if (gimple_try_eval (stmt) == NULL)
2344 : {
2345 3 : if (warn_switch_unreachable && !unreachable_issued)
2346 3 : wi->info = emit_warn_switch_unreachable (stmt);
2347 :
2348 : /* Stop when auto var init warning is not on. */
2349 3 : if (!warn_trivial_auto_var_init)
2350 3 : return integer_zero_node;
2351 : }
2352 : /* Fall through. */
2353 2316 : case GIMPLE_BIND:
2354 2316 : case GIMPLE_CATCH:
2355 2316 : case GIMPLE_EH_FILTER:
2356 2316 : case GIMPLE_TRANSACTION:
2357 : /* Walk the sub-statements. */
2358 2316 : *handled_ops_p = false;
2359 2316 : break;
2360 :
2361 : case GIMPLE_DEBUG:
2362 : /* Ignore these. We may generate them before declarations that
2363 : are never executed. If there's something to warn about,
2364 : there will be non-debug stmts too, and we'll catch those. */
2365 : break;
2366 :
2367 60 : case GIMPLE_ASSIGN:
2368 : /* See comment below in the GIMPLE_CALL case. */
2369 60 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2370 20 : && gimple_assign_single_p (stmt)
2371 79 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2372 : {
2373 6 : gimple *g = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
2374 6 : if (gimple_call_internal_p (g, IFN_DEFERRED_INIT))
2375 : break;
2376 : }
2377 54 : goto do_default;
2378 :
2379 46635 : case GIMPLE_LABEL:
2380 : /* Stop till the first Label. */
2381 46635 : return integer_zero_node;
2382 62 : case GIMPLE_CALL:
2383 62 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2384 : {
2385 8 : *handled_ops_p = false;
2386 8 : break;
2387 : }
2388 : /* Don't warn for compiler-generated initializations for
2389 : -ftrivial-auto-var-init for -Wswitch-unreachable. Though
2390 : do warn for -Wtrivial-auto-var-init.
2391 : There are 3 cases:
2392 : case 1: a call to .DEFERRED_INIT;
2393 : case 2: a call to __builtin_clear_padding with the 2nd argument is
2394 : present and non-zero;
2395 : case 3: a gimple assign store right after the call to .DEFERRED_INIT
2396 : that has the LHS of .DEFERRED_INIT as the RHS as following:
2397 : _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2398 : i1 = _1.
2399 : case 3 is handled above in the GIMPLE_ASSIGN case. */
2400 54 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2401 54 : && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2402 : {
2403 33 : if (warn_trivial_auto_var_init)
2404 : {
2405 : /* Get the variable name from the 3rd argument of call. */
2406 12 : tree var_name = gimple_call_arg (stmt, 2);
2407 12 : var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2408 12 : const char *var_name_str = TREE_STRING_POINTER (var_name);
2409 :
2410 12 : warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2411 : "%qs cannot be initialized with "
2412 : "%<-ftrivial-auto-var_init%>", var_name_str);
2413 : }
2414 : break;
2415 : }
2416 21 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2417 7 : && gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2418 24 : && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2419 : break;
2420 : /* Fall through. */
2421 178 : default:
2422 178 : do_default:
2423 : /* check the first "real" statement (not a decl/lexical scope/...), issue
2424 : warning if needed. */
2425 178 : if (warn_switch_unreachable && !unreachable_issued)
2426 178 : wi->info = emit_warn_switch_unreachable (stmt);
2427 : /* Stop when auto var init warning is not on. */
2428 178 : if (!warn_trivial_auto_var_init)
2429 178 : return integer_zero_node;
2430 : break;
2431 : }
2432 : return NULL_TREE;
2433 : }
2434 :
2435 :
2436 : /* Possibly warn about unreachable statements between switch's controlling
2437 : expression and the first case. Also warn about -ftrivial-auto-var-init
2438 : cannot initialize the auto variable under such situation.
2439 : SEQ is the body of a switch expression. */
2440 :
2441 : static void
2442 50660 : maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2443 : {
2444 60 : if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2445 : /* This warning doesn't play well with Fortran when optimizations
2446 : are on. */
2447 50600 : || lang_GNU_Fortran ()
2448 97649 : || seq == NULL)
2449 3841 : return;
2450 :
2451 46819 : struct walk_stmt_info wi;
2452 :
2453 46819 : memset (&wi, 0, sizeof (wi));
2454 46819 : walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2455 : }
2456 :
2457 :
2458 : /* A label entry that pairs label and a location. */
2459 : struct label_entry
2460 : {
2461 : tree label;
2462 : location_t loc;
2463 : };
2464 :
2465 : /* Find LABEL in vector of label entries VEC. */
2466 :
2467 : static struct label_entry *
2468 11794 : find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2469 : {
2470 11794 : unsigned int i;
2471 11794 : struct label_entry *l;
2472 :
2473 22683 : FOR_EACH_VEC_ELT (*vec, i, l)
2474 19391 : if (l->label == label)
2475 : return l;
2476 : return NULL;
2477 : }
2478 :
2479 : /* Return true if LABEL, a LABEL_DECL, represents a case label
2480 : in a vector of labels CASES. */
2481 :
2482 : static bool
2483 15266 : case_label_p (const vec<tree> *cases, tree label)
2484 : {
2485 15266 : unsigned int i;
2486 15266 : tree l;
2487 :
2488 156568 : FOR_EACH_VEC_ELT (*cases, i, l)
2489 156242 : if (CASE_LABEL (l) == label)
2490 : return true;
2491 : return false;
2492 : }
2493 :
2494 : /* Find the last nondebug statement in a scope STMT. */
2495 :
2496 : static gimple *
2497 2032 : last_stmt_in_scope (gimple *stmt)
2498 : {
2499 3818 : if (!stmt)
2500 : return NULL;
2501 :
2502 6210 : auto last_stmt_in_seq = [] (gimple_seq s)
2503 : {
2504 2403 : gimple_seq_node n;
2505 2403 : for (n = gimple_seq_last (s);
2506 2408 : n && (is_gimple_debug (n)
2507 2395 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2508 44 : && gimple_call_internal_p (n, IFN_DEFERRED_INIT)));
2509 5 : n = n->prev)
2510 7 : if (n == s)
2511 : return (gimple *) NULL;
2512 : return (gimple *) n;
2513 : };
2514 :
2515 3807 : switch (gimple_code (stmt))
2516 : {
2517 1332 : case GIMPLE_BIND:
2518 1332 : {
2519 1332 : gbind *bind = as_a <gbind *> (stmt);
2520 1332 : stmt = last_stmt_in_seq (gimple_bind_body (bind));
2521 1332 : return last_stmt_in_scope (stmt);
2522 : }
2523 :
2524 617 : case GIMPLE_TRY:
2525 617 : {
2526 617 : gtry *try_stmt = as_a <gtry *> (stmt);
2527 617 : stmt = last_stmt_in_seq (gimple_try_eval (try_stmt));
2528 617 : gimple *last_eval = last_stmt_in_scope (stmt);
2529 617 : if (gimple_stmt_may_fallthru (last_eval)
2530 473 : && (last_eval == NULL
2531 470 : || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2532 1083 : && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2533 : {
2534 454 : stmt = last_stmt_in_seq (gimple_try_cleanup (try_stmt));
2535 454 : return last_stmt_in_scope (stmt);
2536 : }
2537 : else
2538 : return last_eval;
2539 : }
2540 :
2541 0 : case GIMPLE_DEBUG:
2542 0 : gcc_unreachable ();
2543 :
2544 : default:
2545 : return stmt;
2546 : }
2547 : }
2548 :
2549 : /* Collect labels that may fall through into LABELS and return the statement
2550 : preceding another case label, or a user-defined label. Store a location
2551 : useful to give warnings at *PREVLOC (usually the location of the returned
2552 : statement or of its surrounding scope). */
2553 :
2554 : static gimple *
2555 19670 : collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2556 : auto_vec <struct label_entry> *labels,
2557 : location_t *prevloc)
2558 : {
2559 19670 : gimple *prev = NULL;
2560 :
2561 19670 : *prevloc = UNKNOWN_LOCATION;
2562 96476 : do
2563 : {
2564 96476 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2565 : {
2566 : /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2567 : which starts on a GIMPLE_SWITCH and ends with a break label.
2568 : Handle that as a single statement that can fall through. */
2569 1393 : gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2570 1393 : gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2571 1393 : gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2572 1387 : if (last
2573 1387 : && gimple_code (first) == GIMPLE_SWITCH
2574 1489 : && gimple_code (last) == GIMPLE_LABEL)
2575 : {
2576 102 : tree label = gimple_label_label (as_a <glabel *> (last));
2577 102 : if (SWITCH_BREAK_LABEL_P (label))
2578 : {
2579 102 : prev = bind;
2580 102 : gsi_next (gsi_p);
2581 102 : continue;
2582 : }
2583 : }
2584 : }
2585 96374 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2586 96374 : || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2587 : {
2588 : /* Nested scope. Only look at the last statement of
2589 : the innermost scope. */
2590 1415 : location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2591 1415 : gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2592 1415 : if (last)
2593 : {
2594 1407 : prev = last;
2595 : /* It might be a label without a location. Use the
2596 : location of the scope then. */
2597 1407 : if (!gimple_has_location (prev))
2598 616 : *prevloc = bind_loc;
2599 : }
2600 1415 : gsi_next (gsi_p);
2601 1415 : continue;
2602 1415 : }
2603 :
2604 : /* Ifs are tricky. */
2605 94959 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2606 : {
2607 6969 : gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2608 6969 : tree false_lab = gimple_cond_false_label (cond_stmt);
2609 6969 : location_t if_loc = gimple_location (cond_stmt);
2610 :
2611 : /* If we have e.g.
2612 : if (i > 1) goto <D.2259>; else goto D;
2613 : we can't do much with the else-branch. */
2614 6969 : if (!DECL_ARTIFICIAL (false_lab))
2615 : break;
2616 :
2617 : /* Go on until the false label, then one step back. */
2618 53216 : for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2619 : {
2620 53216 : gimple *stmt = gsi_stmt (*gsi_p);
2621 53216 : if (gimple_code (stmt) == GIMPLE_LABEL
2622 53216 : && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2623 : break;
2624 : }
2625 :
2626 : /* Not found? Oops. */
2627 6969 : if (gsi_end_p (*gsi_p))
2628 : break;
2629 :
2630 : /* A dead label can't fall through. */
2631 6969 : if (!UNUSED_LABEL_P (false_lab))
2632 : {
2633 6922 : struct label_entry l = { false_lab, if_loc };
2634 6922 : labels->safe_push (l);
2635 : }
2636 :
2637 : /* Go to the last statement of the then branch. */
2638 6969 : gsi_prev (gsi_p);
2639 :
2640 : /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2641 : <D.1759>:
2642 : <stmt>;
2643 : goto <D.1761>;
2644 : <D.1760>:
2645 : */
2646 6969 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2647 6969 : && !gimple_has_location (gsi_stmt (*gsi_p)))
2648 : {
2649 : /* Look at the statement before, it might be
2650 : attribute fallthrough, in which case don't warn. */
2651 1439 : gsi_prev (gsi_p);
2652 1439 : bool fallthru_before_dest
2653 1439 : = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2654 1439 : gsi_next (gsi_p);
2655 1439 : tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2656 1439 : if (!fallthru_before_dest)
2657 : {
2658 1382 : struct label_entry l = { goto_dest, if_loc };
2659 1382 : labels->safe_push (l);
2660 : }
2661 : }
2662 : /* This case is about
2663 : if (1 != 0) goto <D.2022>; else goto <D.2023>;
2664 : <D.2022>:
2665 : n = n + 1; // #1
2666 : <D.2023>: // #2
2667 : <D.1988>: // #3
2668 : where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2669 : through to #3. So set PREV to #1. */
2670 5530 : else if (UNUSED_LABEL_P (false_lab))
2671 47 : prev = gsi_stmt (*gsi_p);
2672 :
2673 : /* And move back. */
2674 6969 : gsi_next (gsi_p);
2675 : }
2676 :
2677 94959 : tree lab;
2678 : /* Remember the last statement. Skip labels that are of no interest
2679 : to us. */
2680 94959 : if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2681 : {
2682 11576 : tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2683 11576 : if (find_label_entry (labels, label))
2684 70541 : prev = gsi_stmt (*gsi_p);
2685 : }
2686 83383 : else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2687 : ;
2688 83383 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2689 83383 : && gimple_call_internal_p (gsi_stmt (*gsi_p),
2690 : IFN_DEFERRED_INIT))
2691 : ;
2692 83383 : else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2693 : ;
2694 82215 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2695 963 : && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2696 190 : && (lab = gimple_goto_dest (gsi_stmt (*gsi_p)))
2697 190 : && TREE_CODE (lab) == LABEL_DECL
2698 82404 : && VACUOUS_INIT_LABEL_P (lab))
2699 : ;
2700 82215 : else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2701 70541 : prev = gsi_stmt (*gsi_p);
2702 94959 : gsi_next (gsi_p);
2703 : }
2704 96476 : while (!gsi_end_p (*gsi_p)
2705 : /* Stop if we find a case or a user-defined label. */
2706 192952 : && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2707 20389 : || !gimple_has_location (gsi_stmt (*gsi_p))));
2708 :
2709 19670 : if (prev && gimple_has_location (prev))
2710 19217 : *prevloc = gimple_location (prev);
2711 19670 : return prev;
2712 : }
2713 :
2714 : /* Return true if the switch fallthrough warning should occur. LABEL is
2715 : the label statement that we're falling through to. */
2716 :
2717 : static bool
2718 15775 : should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2719 : {
2720 15775 : gimple_stmt_iterator gsi = *gsi_p;
2721 :
2722 : /* Don't warn if the label is marked with a "falls through" comment. */
2723 15775 : if (FALLTHROUGH_LABEL_P (label))
2724 : return false;
2725 :
2726 : /* Don't warn for non-case labels followed by a statement:
2727 : case 0:
2728 : foo ();
2729 : label:
2730 : bar ();
2731 : as these are likely intentional. */
2732 15011 : if (!case_label_p (&gimplify_ctxp->case_labels, label))
2733 : {
2734 : tree l;
2735 326 : while (!gsi_end_p (gsi)
2736 326 : && ((gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2737 255 : && (l
2738 255 : = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2739 255 : && !case_label_p (&gimplify_ctxp->case_labels, l))
2740 149 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2741 31 : && gimple_call_internal_p (gsi_stmt (gsi),
2742 : IFN_DEFERRED_INIT))))
2743 169 : gsi_next_nondebug (&gsi);
2744 157 : if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2745 : return false;
2746 : }
2747 :
2748 : /* Don't warn for terminated branches, i.e. when the subsequent case labels
2749 : immediately breaks. */
2750 14940 : gsi = *gsi_p;
2751 :
2752 : /* Skip all immediately following labels. */
2753 33341 : while (!gsi_end_p (gsi)
2754 33341 : && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2755 15075 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT
2756 14874 : || (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2757 357 : && gimple_call_internal_p (gsi_stmt (gsi),
2758 : IFN_DEFERRED_INIT))))
2759 18401 : gsi_next_nondebug (&gsi);
2760 :
2761 : /* { ... something; default:; } */
2762 14940 : if (gsi_end_p (gsi)
2763 : /* { ... something; default: break; } or
2764 : { ... something; default: goto L; } */
2765 14874 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2766 : /* { ... something; default: return; } */
2767 28727 : || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2768 : return false;
2769 :
2770 : return true;
2771 : }
2772 :
2773 : /* Callback for walk_gimple_seq. */
2774 :
2775 : static tree
2776 20548 : warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2777 : struct walk_stmt_info *)
2778 : {
2779 20548 : gimple *stmt = gsi_stmt (*gsi_p);
2780 :
2781 20548 : *handled_ops_p = true;
2782 20548 : switch (gimple_code (stmt))
2783 : {
2784 228 : case GIMPLE_TRY:
2785 228 : case GIMPLE_BIND:
2786 228 : case GIMPLE_CATCH:
2787 228 : case GIMPLE_EH_FILTER:
2788 228 : case GIMPLE_TRANSACTION:
2789 : /* Walk the sub-statements. */
2790 228 : *handled_ops_p = false;
2791 228 : break;
2792 :
2793 : /* Find a sequence of form:
2794 :
2795 : GIMPLE_LABEL
2796 : [...]
2797 : <may fallthru stmt>
2798 : GIMPLE_LABEL
2799 :
2800 : and possibly warn. */
2801 : case GIMPLE_LABEL:
2802 : {
2803 : /* Found a label. Skip all immediately following labels. */
2804 46297 : while (!gsi_end_p (*gsi_p)
2805 46297 : && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2806 26432 : gsi_next_nondebug (gsi_p);
2807 :
2808 : /* There might be no more statements. */
2809 19865 : if (gsi_end_p (*gsi_p))
2810 4083 : return integer_zero_node;
2811 :
2812 : /* Vector of labels that fall through. */
2813 19670 : auto_vec <struct label_entry> labels;
2814 19670 : location_t prevloc;
2815 19670 : gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2816 :
2817 : /* There might be no more statements. */
2818 19670 : if (gsi_end_p (*gsi_p))
2819 3888 : return integer_zero_node;
2820 :
2821 15782 : gimple *next = gsi_stmt (*gsi_p);
2822 15782 : tree label;
2823 : /* If what follows is a label, then we may have a fallthrough. */
2824 15782 : if (gimple_code (next) == GIMPLE_LABEL
2825 15782 : && gimple_has_location (next)
2826 15782 : && (label = gimple_label_label (as_a <glabel *> (next)))
2827 31564 : && prev != NULL)
2828 : {
2829 15775 : struct label_entry *l;
2830 15775 : bool warned_p = false;
2831 15775 : auto_diagnostic_group d;
2832 15775 : if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2833 : /* Quiet. */;
2834 13760 : else if (gimple_code (prev) == GIMPLE_LABEL
2835 218 : && (label = gimple_label_label (as_a <glabel *> (prev)))
2836 13978 : && (l = find_label_entry (&labels, label)))
2837 198 : warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2838 : "this statement may fall through");
2839 13562 : else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2840 : /* Try to be clever and don't warn when the statement
2841 : can't actually fall through. */
2842 13161 : && gimple_stmt_may_fallthru (prev)
2843 13863 : && prevloc != UNKNOWN_LOCATION)
2844 301 : warned_p = warning_at (prevloc,
2845 301 : OPT_Wimplicit_fallthrough_,
2846 : "this statement may fall through");
2847 499 : if (warned_p)
2848 499 : inform (gimple_location (next), "here");
2849 :
2850 : /* Mark this label as processed so as to prevent multiple
2851 : warnings in nested switches. */
2852 15775 : FALLTHROUGH_LABEL_P (label) = true;
2853 :
2854 : /* So that next warn_implicit_fallthrough_r will start looking for
2855 : a new sequence starting with this label. */
2856 15775 : gsi_prev (gsi_p);
2857 15775 : }
2858 3888 : }
2859 15782 : break;
2860 : default:
2861 : break;
2862 : }
2863 : return NULL_TREE;
2864 : }
2865 :
2866 : /* Warn when a switch case falls through. */
2867 :
2868 : static void
2869 50660 : maybe_warn_implicit_fallthrough (gimple_seq seq)
2870 : {
2871 50660 : if (!warn_implicit_fallthrough)
2872 46679 : return;
2873 :
2874 : /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2875 3981 : if (!(lang_GNU_C ()
2876 1073 : || lang_GNU_CXX ()
2877 0 : || lang_GNU_OBJC ()))
2878 : return;
2879 :
2880 3981 : struct walk_stmt_info wi;
2881 3981 : memset (&wi, 0, sizeof (wi));
2882 3981 : walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2883 : }
2884 :
2885 : /* Callback for walk_gimple_seq. */
2886 :
2887 : static tree
2888 3275370 : expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2889 : struct walk_stmt_info *wi)
2890 : {
2891 3275370 : gimple *stmt = gsi_stmt (*gsi_p);
2892 :
2893 3275370 : *handled_ops_p = true;
2894 3275370 : switch (gimple_code (stmt))
2895 : {
2896 149652 : case GIMPLE_TRY:
2897 149652 : case GIMPLE_BIND:
2898 149652 : case GIMPLE_CATCH:
2899 149652 : case GIMPLE_EH_FILTER:
2900 149652 : case GIMPLE_TRANSACTION:
2901 : /* Walk the sub-statements. */
2902 149652 : *handled_ops_p = false;
2903 149652 : break;
2904 254647 : case GIMPLE_CALL:
2905 254647 : static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
2906 254647 : if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2907 : {
2908 4107 : location_t loc = gimple_location (stmt);
2909 4107 : gsi_remove (gsi_p, true);
2910 4107 : wi->removed_stmt = true;
2911 :
2912 : /* nothrow flag is added by genericize_c_loop to mark fallthrough
2913 : statement at the end of some loop's body. Those should be
2914 : always diagnosed, either because they indeed don't precede
2915 : a case label or default label, or because the next statement
2916 : is not within the same iteration statement. */
2917 4107 : if ((stmt->subcode & GF_CALL_NOTHROW) != 0)
2918 : {
2919 12 : pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2920 : "a case label or default label");
2921 12 : break;
2922 : }
2923 :
2924 4095 : if (gsi_end_p (*gsi_p))
2925 : {
2926 32 : static_cast<location_t *>(wi->info)[0] = BUILTINS_LOCATION;
2927 32 : static_cast<location_t *>(wi->info)[1] = loc;
2928 32 : break;
2929 : }
2930 :
2931 4063 : bool found = false;
2932 :
2933 4063 : gimple_stmt_iterator gsi2 = *gsi_p;
2934 4063 : stmt = gsi_stmt (gsi2);
2935 4063 : tree lab;
2936 4063 : if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2937 865 : && gimple_code (stmt) == GIMPLE_GOTO
2938 20 : && (lab = gimple_goto_dest (stmt))
2939 20 : && TREE_CODE (lab) == LABEL_DECL
2940 4083 : && VACUOUS_INIT_LABEL_P (lab))
2941 : {
2942 : /* Handle for C++ artificial -ftrivial-auto-var-init=
2943 : sequences. Those look like:
2944 : goto lab1;
2945 : lab2:;
2946 : v1 = .DEFERRED_INIT (...);
2947 : v2 = .DEFERRED_INIT (...);
2948 : lab3:;
2949 : v3 = .DEFERRED_INIT (...);
2950 : lab1:;
2951 : In this case, a case/default label can be either in between
2952 : the GIMPLE_GOTO and the corresponding GIMPLE_LABEL, if jumps
2953 : from the switch condition to the case/default label cross
2954 : vacuous initialization of some variables, or after the
2955 : corresponding GIMPLE_LABEL, if those jumps don't cross
2956 : any such initialization but there is an adjacent named label
2957 : which crosses such initialization. So, for the purpose of
2958 : this function, just ignore the goto but until reaching the
2959 : corresponding GIMPLE_LABEL allow also .DEFERRED_INIT
2960 : calls. */
2961 5 : gsi_next (&gsi2);
2962 : }
2963 4058 : else if (gimple_code (stmt) == GIMPLE_GOTO
2964 4058 : && !gimple_has_location (stmt))
2965 : {
2966 : /* Go on until the artificial label. */
2967 57 : tree goto_dest = gimple_goto_dest (stmt);
2968 414 : for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2969 : {
2970 357 : if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2971 357 : && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2972 : == goto_dest)
2973 : break;
2974 : }
2975 :
2976 : /* Not found? Stop. */
2977 57 : if (gsi_end_p (gsi2))
2978 : break;
2979 :
2980 : /* Look one past it. */
2981 57 : gsi_next (&gsi2);
2982 : }
2983 :
2984 : /* We're looking for a case label or default label here. */
2985 4161 : while (!gsi_end_p (gsi2))
2986 : {
2987 4161 : stmt = gsi_stmt (gsi2);
2988 4161 : if (gimple_code (stmt) == GIMPLE_LABEL)
2989 : {
2990 4118 : tree label = gimple_label_label (as_a <glabel *> (stmt));
2991 4118 : if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2992 : {
2993 : found = true;
2994 : break;
2995 : }
2996 : }
2997 43 : else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2998 : ;
2999 43 : else if (flag_auto_var_init > AUTO_INIT_UNINITIALIZED
3000 43 : && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3001 : ;
3002 43 : else if (!is_gimple_debug (stmt))
3003 : /* Anything else is not expected. */
3004 : break;
3005 98 : gsi_next (&gsi2);
3006 : }
3007 4063 : if (!found)
3008 23 : pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
3009 : "a case label or default label");
3010 : }
3011 : break;
3012 2871071 : default:
3013 2871071 : static_cast<location_t *>(wi->info)[0] = UNKNOWN_LOCATION;
3014 2871071 : break;
3015 : }
3016 3275370 : return NULL_TREE;
3017 : }
3018 :
3019 : /* Expand all FALLTHROUGH () calls in SEQ. */
3020 :
3021 : static void
3022 48972 : expand_FALLTHROUGH (gimple_seq *seq_p)
3023 : {
3024 48972 : auto_urlify_attributes sentinel;
3025 :
3026 48972 : struct walk_stmt_info wi;
3027 48972 : location_t loc[2];
3028 48972 : memset (&wi, 0, sizeof (wi));
3029 48972 : loc[0] = UNKNOWN_LOCATION;
3030 48972 : loc[1] = UNKNOWN_LOCATION;
3031 48972 : wi.info = (void *) &loc[0];
3032 48972 : walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
3033 48972 : if (loc[0] != UNKNOWN_LOCATION)
3034 : /* We've found [[fallthrough]]; at the end of a switch, which the C++
3035 : standard says is ill-formed; see [dcl.attr.fallthrough]. */
3036 8 : pedwarn (loc[1], 0, "attribute %<fallthrough%> not preceding "
3037 : "a case label or default label");
3038 48972 : }
3039 :
3040 :
3041 : /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
3042 : branch to. */
3043 :
3044 : static enum gimplify_status
3045 50660 : gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
3046 : {
3047 50660 : tree switch_expr = *expr_p;
3048 50660 : gimple_seq switch_body_seq = NULL;
3049 50660 : enum gimplify_status ret;
3050 50660 : tree index_type = TREE_TYPE (switch_expr);
3051 50660 : if (index_type == NULL_TREE)
3052 11357 : index_type = TREE_TYPE (SWITCH_COND (switch_expr));
3053 :
3054 50660 : ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
3055 : fb_rvalue);
3056 50660 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
3057 : return ret;
3058 :
3059 50660 : if (SWITCH_BODY (switch_expr))
3060 : {
3061 50660 : vec<tree> labels;
3062 50660 : vec<tree> saved_labels;
3063 50660 : hash_set<tree> *saved_live_switch_vars = NULL;
3064 50660 : tree default_case = NULL_TREE;
3065 50660 : gswitch *switch_stmt;
3066 :
3067 : /* Save old labels, get new ones from body, then restore the old
3068 : labels. Save all the things from the switch body to append after. */
3069 50660 : saved_labels = gimplify_ctxp->case_labels;
3070 50660 : gimplify_ctxp->case_labels.create (8);
3071 :
3072 : /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
3073 50660 : saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
3074 50660 : tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
3075 50660 : if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
3076 50378 : gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
3077 : else
3078 282 : gimplify_ctxp->live_switch_vars = NULL;
3079 :
3080 50660 : bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
3081 50660 : gimplify_ctxp->in_switch_expr = true;
3082 :
3083 50660 : gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
3084 :
3085 50660 : gimplify_ctxp->in_switch_expr = old_in_switch_expr;
3086 50660 : maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
3087 50660 : maybe_warn_implicit_fallthrough (switch_body_seq);
3088 : /* Only do this for the outermost GIMPLE_SWITCH. */
3089 50660 : if (!gimplify_ctxp->in_switch_expr)
3090 48972 : expand_FALLTHROUGH (&switch_body_seq);
3091 :
3092 50660 : labels = gimplify_ctxp->case_labels;
3093 50660 : gimplify_ctxp->case_labels = saved_labels;
3094 :
3095 50660 : if (gimplify_ctxp->live_switch_vars)
3096 : {
3097 50378 : gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
3098 50378 : delete gimplify_ctxp->live_switch_vars;
3099 : }
3100 50660 : gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
3101 :
3102 50660 : preprocess_case_label_vec_for_gimple (labels, index_type,
3103 : &default_case);
3104 :
3105 50660 : bool add_bind = false;
3106 50660 : if (!default_case)
3107 : {
3108 14927 : glabel *new_default;
3109 :
3110 14927 : default_case
3111 14927 : = build_case_label (NULL_TREE, NULL_TREE,
3112 : create_artificial_label (UNKNOWN_LOCATION));
3113 14927 : if (old_in_switch_expr)
3114 : {
3115 1085 : SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
3116 1085 : add_bind = true;
3117 : }
3118 14927 : new_default = gimple_build_label (CASE_LABEL (default_case));
3119 14927 : gimplify_seq_add_stmt (&switch_body_seq, new_default);
3120 : }
3121 35733 : else if (old_in_switch_expr)
3122 : {
3123 603 : gimple *last = gimple_seq_last_stmt (switch_body_seq);
3124 603 : if (last && gimple_code (last) == GIMPLE_LABEL)
3125 : {
3126 338 : tree label = gimple_label_label (as_a <glabel *> (last));
3127 338 : if (SWITCH_BREAK_LABEL_P (label))
3128 50660 : add_bind = true;
3129 : }
3130 : }
3131 :
3132 50660 : switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
3133 : default_case, labels);
3134 50660 : gimple_set_location (switch_stmt, EXPR_LOCATION (switch_expr));
3135 : /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
3136 : ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
3137 : wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
3138 : so that we can easily find the start and end of the switch
3139 : statement. */
3140 50660 : if (add_bind)
3141 : {
3142 1422 : gimple_seq bind_body = NULL;
3143 1422 : gimplify_seq_add_stmt (&bind_body, switch_stmt);
3144 1422 : gimple_seq_add_seq (&bind_body, switch_body_seq);
3145 1422 : gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
3146 1422 : gimple_set_location (bind, EXPR_LOCATION (switch_expr));
3147 1422 : gimplify_seq_add_stmt (pre_p, bind);
3148 : }
3149 : else
3150 : {
3151 49238 : gimplify_seq_add_stmt (pre_p, switch_stmt);
3152 49238 : gimplify_seq_add_seq (pre_p, switch_body_seq);
3153 : }
3154 50660 : labels.release ();
3155 : }
3156 : else
3157 0 : gcc_unreachable ();
3158 :
3159 50660 : return GS_ALL_DONE;
3160 : }
3161 :
3162 : /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
3163 :
3164 : static enum gimplify_status
3165 2551053 : gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3166 : {
3167 2551053 : gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3168 : == current_function_decl);
3169 :
3170 2551053 : tree label = LABEL_EXPR_LABEL (*expr_p);
3171 2551053 : glabel *label_stmt = gimple_build_label (label);
3172 2551053 : gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3173 2551053 : gimplify_seq_add_stmt (pre_p, label_stmt);
3174 :
3175 2551053 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3176 21 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3177 : NOT_TAKEN));
3178 2551032 : else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3179 11 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3180 : TAKEN));
3181 :
3182 2551053 : return GS_ALL_DONE;
3183 : }
3184 :
3185 : /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3186 :
3187 : static enum gimplify_status
3188 1044816 : gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3189 : {
3190 1044816 : struct gimplify_ctx *ctxp;
3191 1044816 : glabel *label_stmt;
3192 :
3193 : /* Invalid programs can play Duff's Device type games with, for example,
3194 : #pragma omp parallel. At least in the C front end, we don't
3195 : detect such invalid branches until after gimplification, in the
3196 : diagnose_omp_blocks pass. */
3197 1044826 : for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3198 1044826 : if (ctxp->case_labels.exists ())
3199 : break;
3200 :
3201 1044816 : tree label = CASE_LABEL (*expr_p);
3202 1044816 : label_stmt = gimple_build_label (label);
3203 1044816 : gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
3204 1044816 : ctxp->case_labels.safe_push (*expr_p);
3205 1044816 : gimplify_seq_add_stmt (pre_p, label_stmt);
3206 :
3207 1044816 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
3208 16 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
3209 : NOT_TAKEN));
3210 1044800 : else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
3211 20 : gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
3212 : TAKEN));
3213 :
3214 1044816 : return GS_ALL_DONE;
3215 : }
3216 :
3217 : /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3218 : if necessary. */
3219 :
3220 : tree
3221 1785318 : build_and_jump (tree *label_p)
3222 : {
3223 1785318 : if (label_p == NULL)
3224 : /* If there's nowhere to jump, just fall through. */
3225 : return NULL_TREE;
3226 :
3227 1211524 : if (*label_p == NULL_TREE)
3228 : {
3229 694689 : tree label = create_artificial_label (UNKNOWN_LOCATION);
3230 694689 : *label_p = label;
3231 : }
3232 :
3233 1211524 : return build1 (GOTO_EXPR, void_type_node, *label_p);
3234 : }
3235 :
3236 : /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3237 : This also involves building a label to jump to and communicating it to
3238 : gimplify_loop_expr through gimplify_ctxp->exit_label. */
3239 :
3240 : static enum gimplify_status
3241 3220 : gimplify_exit_expr (tree *expr_p)
3242 : {
3243 3220 : tree cond = TREE_OPERAND (*expr_p, 0);
3244 3220 : tree expr;
3245 :
3246 3220 : expr = build_and_jump (&gimplify_ctxp->exit_label);
3247 3220 : expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3248 3220 : *expr_p = expr;
3249 :
3250 3220 : return GS_OK;
3251 : }
3252 :
3253 : /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3254 : different from its canonical type, wrap the whole thing inside a
3255 : NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3256 : type.
3257 :
3258 : The canonical type of a COMPONENT_REF is the type of the field being
3259 : referenced--unless the field is a bit-field which can be read directly
3260 : in a smaller mode, in which case the canonical type is the
3261 : sign-appropriate type corresponding to that mode. */
3262 :
3263 : static void
3264 18406218 : canonicalize_component_ref (tree *expr_p)
3265 : {
3266 18406218 : tree expr = *expr_p;
3267 18406218 : tree type;
3268 :
3269 18406218 : gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3270 :
3271 18406218 : if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3272 7464718 : type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3273 : else
3274 10941500 : type = TREE_TYPE (TREE_OPERAND (expr, 1));
3275 :
3276 : /* One could argue that all the stuff below is not necessary for
3277 : the non-bitfield case and declare it a FE error if type
3278 : adjustment would be needed. */
3279 18406218 : if (TREE_TYPE (expr) != type)
3280 : {
3281 : #ifdef ENABLE_TYPES_CHECKING
3282 772737 : tree old_type = TREE_TYPE (expr);
3283 : #endif
3284 772737 : int type_quals;
3285 :
3286 : /* We need to preserve qualifiers and propagate them from
3287 : operand 0. */
3288 772737 : type_quals = TYPE_QUALS (type)
3289 772737 : | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3290 772737 : if (TYPE_QUALS (type) != type_quals)
3291 772481 : type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3292 :
3293 : /* Set the type of the COMPONENT_REF to the underlying type. */
3294 772737 : TREE_TYPE (expr) = type;
3295 :
3296 : #ifdef ENABLE_TYPES_CHECKING
3297 : /* It is now a FE error, if the conversion from the canonical
3298 : type to the original expression type is not useless. */
3299 772737 : gcc_assert (useless_type_conversion_p (old_type, type));
3300 : #endif
3301 : }
3302 18406218 : }
3303 :
3304 : /* If a NOP conversion is changing a pointer to array of foo to a pointer
3305 : to foo, embed that change in the ADDR_EXPR by converting
3306 : T array[U];
3307 : (T *)&array
3308 : ==>
3309 : &array[L]
3310 : where L is the lower bound. For simplicity, only do this for constant
3311 : lower bound.
3312 : The constraint is that the type of &array[L] is trivially convertible
3313 : to T *. */
3314 :
3315 : static void
3316 526966 : canonicalize_addr_expr (tree *expr_p)
3317 : {
3318 526966 : tree expr = *expr_p;
3319 526966 : tree addr_expr = TREE_OPERAND (expr, 0);
3320 526966 : tree datype, ddatype, pddatype;
3321 :
3322 : /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3323 1053586 : if (!POINTER_TYPE_P (TREE_TYPE (expr))
3324 526966 : || TREE_CODE (addr_expr) != ADDR_EXPR)
3325 : return;
3326 :
3327 : /* The addr_expr type should be a pointer to an array. */
3328 346 : datype = TREE_TYPE (TREE_TYPE (addr_expr));
3329 346 : if (TREE_CODE (datype) != ARRAY_TYPE)
3330 : return;
3331 :
3332 : /* The pointer to element type shall be trivially convertible to
3333 : the expression pointer type. */
3334 28 : ddatype = TREE_TYPE (datype);
3335 28 : pddatype = build_pointer_type (ddatype);
3336 28 : if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3337 : pddatype))
3338 : return;
3339 :
3340 : /* The lower bound and element sizes must be constant. */
3341 0 : if (!TYPE_SIZE_UNIT (ddatype)
3342 0 : || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3343 0 : || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3344 0 : || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3345 : return;
3346 :
3347 : /* All checks succeeded. Build a new node to merge the cast. */
3348 0 : *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3349 0 : TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3350 : NULL_TREE, NULL_TREE);
3351 0 : *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3352 :
3353 : /* We can have stripped a required restrict qualifier above. */
3354 0 : if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3355 0 : *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3356 : }
3357 :
3358 : /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3359 : underneath as appropriate. */
3360 :
3361 : static enum gimplify_status
3362 12859595 : gimplify_conversion (tree *expr_p)
3363 : {
3364 12859595 : location_t loc = EXPR_LOCATION (*expr_p);
3365 12859595 : gcc_assert (CONVERT_EXPR_P (*expr_p));
3366 :
3367 : /* Then strip away all but the outermost conversion. */
3368 12859595 : STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3369 :
3370 : /* And remove the outermost conversion if it's useless. */
3371 12859595 : if (tree_ssa_useless_type_conversion (*expr_p))
3372 0 : *expr_p = TREE_OPERAND (*expr_p, 0);
3373 :
3374 : /* If we still have a conversion at the toplevel,
3375 : then canonicalize some constructs. */
3376 12859595 : if (CONVERT_EXPR_P (*expr_p))
3377 : {
3378 12859595 : tree sub = TREE_OPERAND (*expr_p, 0);
3379 :
3380 : /* If a NOP conversion is changing the type of a COMPONENT_REF
3381 : expression, then canonicalize its type now in order to expose more
3382 : redundant conversions. */
3383 12859595 : if (TREE_CODE (sub) == COMPONENT_REF)
3384 272035 : canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3385 :
3386 : /* If a NOP conversion is changing a pointer to array of foo
3387 : to a pointer to foo, embed that change in the ADDR_EXPR. */
3388 12587560 : else if (TREE_CODE (sub) == ADDR_EXPR)
3389 526966 : canonicalize_addr_expr (expr_p);
3390 : }
3391 :
3392 : /* If we have a conversion to a non-register type force the
3393 : use of a VIEW_CONVERT_EXPR instead. */
3394 12859595 : if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3395 90 : *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3396 90 : TREE_OPERAND (*expr_p, 0));
3397 :
3398 : /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3399 12859595 : if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3400 229778 : TREE_SET_CODE (*expr_p, NOP_EXPR);
3401 :
3402 12859595 : return GS_OK;
3403 : }
3404 :
3405 : /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3406 : DECL_VALUE_EXPR, and it's worth re-examining things. */
3407 :
3408 : static enum gimplify_status
3409 126379715 : gimplify_var_or_parm_decl (tree *expr_p)
3410 : {
3411 126379715 : tree decl = *expr_p;
3412 :
3413 : /* ??? If this is a local variable, and it has not been seen in any
3414 : outer BIND_EXPR, then it's probably the result of a duplicate
3415 : declaration, for which we've already issued an error. It would
3416 : be really nice if the front end wouldn't leak these at all.
3417 : Currently the only known culprit is C++ destructors, as seen
3418 : in g++.old-deja/g++.jason/binding.C.
3419 : Another possible culpit are size expressions for variably modified
3420 : types which are lost in the FE or not gimplified correctly. */
3421 126379715 : if (VAR_P (decl)
3422 98561039 : && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3423 17118125 : && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3424 126439657 : && decl_function_context (decl) == current_function_decl)
3425 : {
3426 75 : gcc_assert (seen_error ());
3427 : return GS_ERROR;
3428 : }
3429 :
3430 : /* When within an OMP context, notice uses of variables. */
3431 126379640 : if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3432 : return GS_ALL_DONE;
3433 :
3434 : /* If the decl is an alias for another expression, substitute it now. */
3435 126373820 : if (DECL_HAS_VALUE_EXPR_P (decl))
3436 : {
3437 434647 : *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3438 434647 : return GS_OK;
3439 : }
3440 :
3441 : return GS_ALL_DONE;
3442 : }
3443 :
3444 : /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3445 :
3446 : static void
3447 110326627 : recalculate_side_effects (tree t)
3448 : {
3449 110326627 : enum tree_code code = TREE_CODE (t);
3450 110326627 : int len = TREE_OPERAND_LENGTH (t);
3451 110326627 : int i;
3452 :
3453 110326627 : switch (TREE_CODE_CLASS (code))
3454 : {
3455 666735 : case tcc_expression:
3456 666735 : switch (code)
3457 : {
3458 : case INIT_EXPR:
3459 : case MODIFY_EXPR:
3460 : case VA_ARG_EXPR:
3461 : case PREDECREMENT_EXPR:
3462 : case PREINCREMENT_EXPR:
3463 : case POSTDECREMENT_EXPR:
3464 : case POSTINCREMENT_EXPR:
3465 : /* All of these have side-effects, no matter what their
3466 : operands are. */
3467 : return;
3468 :
3469 : default:
3470 : break;
3471 : }
3472 : /* Fall through. */
3473 :
3474 110326625 : case tcc_comparison: /* a comparison expression */
3475 110326625 : case tcc_unary: /* a unary arithmetic expression */
3476 110326625 : case tcc_binary: /* a binary arithmetic expression */
3477 110326625 : case tcc_reference: /* a reference */
3478 110326625 : case tcc_vl_exp: /* a function call */
3479 110326625 : TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3480 359471879 : for (i = 0; i < len; ++i)
3481 : {
3482 249145254 : tree op = TREE_OPERAND (t, i);
3483 249145254 : if (op && TREE_SIDE_EFFECTS (op))
3484 1290582 : TREE_SIDE_EFFECTS (t) = 1;
3485 : }
3486 : break;
3487 :
3488 : case tcc_constant:
3489 : /* No side-effects. */
3490 : return;
3491 :
3492 2 : default:
3493 2 : if (code == SSA_NAME)
3494 : /* No side-effects. */
3495 : return;
3496 0 : gcc_unreachable ();
3497 : }
3498 : }
3499 :
3500 : /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3501 : node *EXPR_P.
3502 :
3503 : compound_lval
3504 : : min_lval '[' val ']'
3505 : | min_lval '.' ID
3506 : | compound_lval '[' val ']'
3507 : | compound_lval '.' ID
3508 :
3509 : This is not part of the original SIMPLE definition, which separates
3510 : array and member references, but it seems reasonable to handle them
3511 : together. Also, this way we don't run into problems with union
3512 : aliasing; gcc requires that for accesses through a union to alias, the
3513 : union reference must be explicit, which was not always the case when we
3514 : were splitting up array and member refs.
3515 :
3516 : PRE_P points to the sequence where side effects that must happen before
3517 : *EXPR_P should be stored.
3518 :
3519 : POST_P points to the sequence where side effects that must happen after
3520 : *EXPR_P should be stored. */
3521 :
3522 : static enum gimplify_status
3523 29766943 : gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3524 : fallback_t fallback)
3525 : {
3526 29766943 : tree *p;
3527 29766943 : enum gimplify_status ret = GS_ALL_DONE, tret;
3528 29766943 : int i;
3529 29766943 : location_t loc = EXPR_LOCATION (*expr_p);
3530 29766943 : tree expr = *expr_p;
3531 :
3532 : /* Create a stack of the subexpressions so later we can walk them in
3533 : order from inner to outer. */
3534 29766943 : auto_vec<tree, 10> expr_stack;
3535 :
3536 : /* We can handle anything that get_inner_reference can deal with. */
3537 71875964 : for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3538 : {
3539 42109021 : restart:
3540 : /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3541 71940787 : if (TREE_CODE (*p) == INDIRECT_REF)
3542 3893598 : *p = fold_indirect_ref_loc (loc, *p);
3543 :
3544 71940787 : if (handled_component_p (*p))
3545 : ;
3546 : /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3547 : additional COMPONENT_REFs. */
3548 13110877 : else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3549 31275029 : && gimplify_var_or_parm_decl (p) == GS_OK)
3550 64823 : goto restart;
3551 : else
3552 : break;
3553 :
3554 42109021 : expr_stack.safe_push (*p);
3555 : }
3556 :
3557 29766943 : gcc_assert (expr_stack.length ());
3558 :
3559 : /* Now EXPR_STACK is a stack of pointers to all the refs we've
3560 : walked through and P points to the innermost expression.
3561 :
3562 : Java requires that we elaborated nodes in source order. That
3563 : means we must gimplify the inner expression followed by each of
3564 : the indices, in order. But we can't gimplify the inner
3565 : expression until we deal with any variable bounds, sizes, or
3566 : positions in order to deal with PLACEHOLDER_EXPRs.
3567 :
3568 : The base expression may contain a statement expression that
3569 : has declarations used in size expressions, so has to be
3570 : gimplified before gimplifying the size expressions.
3571 :
3572 : So we do this in three steps. First we deal with variable
3573 : bounds, sizes, and positions, then we gimplify the base and
3574 : ensure it is memory if needed, then we deal with the annotations
3575 : for any variables in the components and any indices, from left
3576 : to right. */
3577 :
3578 29766943 : bool need_non_reg = false;
3579 71875961 : for (i = expr_stack.length () - 1; i >= 0; i--)
3580 : {
3581 42109021 : tree t = expr_stack[i];
3582 :
3583 42109021 : if (error_operand_p (TREE_OPERAND (t, 0)))
3584 : return GS_ERROR;
3585 :
3586 42109018 : if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3587 : {
3588 : /* Deal with the low bound and element type size and put them into
3589 : the ARRAY_REF. If these values are set, they have already been
3590 : gimplified. */
3591 8896334 : if (TREE_OPERAND (t, 2) == NULL_TREE)
3592 : {
3593 8828952 : tree low = unshare_expr (array_ref_low_bound (t));
3594 8828952 : if (!is_gimple_min_invariant (low))
3595 : {
3596 0 : TREE_OPERAND (t, 2) = low;
3597 : }
3598 : }
3599 :
3600 8896334 : if (TREE_OPERAND (t, 3) == NULL_TREE)
3601 : {
3602 8882811 : tree elmt_size = array_ref_element_size (t);
3603 8882811 : if (!is_gimple_min_invariant (elmt_size))
3604 : {
3605 8495 : elmt_size = unshare_expr (elmt_size);
3606 8495 : tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3607 8495 : tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3608 :
3609 : /* Divide the element size by the alignment of the element
3610 : type (above). */
3611 8495 : elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3612 : elmt_size, factor);
3613 :
3614 8495 : TREE_OPERAND (t, 3) = elmt_size;
3615 : }
3616 : }
3617 : need_non_reg = true;
3618 : }
3619 33212684 : else if (TREE_CODE (t) == COMPONENT_REF)
3620 : {
3621 : /* Set the field offset into T and gimplify it. */
3622 32031374 : if (TREE_OPERAND (t, 2) == NULL_TREE)
3623 : {
3624 32030995 : tree offset = component_ref_field_offset (t);
3625 32030995 : if (!is_gimple_min_invariant (offset))
3626 : {
3627 287 : offset = unshare_expr (offset);
3628 287 : tree field = TREE_OPERAND (t, 1);
3629 287 : tree factor
3630 287 : = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3631 :
3632 : /* Divide the offset by its alignment. */
3633 287 : offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3634 : offset, factor);
3635 :
3636 287 : TREE_OPERAND (t, 2) = offset;
3637 : }
3638 : }
3639 : need_non_reg = true;
3640 : }
3641 1181310 : else if (!is_gimple_reg_type (TREE_TYPE (t)))
3642 : /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3643 : is a non-register type then require the base object to be a
3644 : non-register as well. */
3645 41207898 : need_non_reg = true;
3646 : }
3647 :
3648 : /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3649 : so as to match the min_lval predicate. Failure to do so may result
3650 : in the creation of large aggregate temporaries. */
3651 29766940 : tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3652 : fallback | fb_lvalue);
3653 29766940 : ret = MIN (ret, tret);
3654 29766940 : if (ret == GS_ERROR)
3655 : return GS_ERROR;
3656 :
3657 : /* Step 2a: if we have component references we do not support on
3658 : registers then make sure the base isn't a register. Of course
3659 : we can only do so if an rvalue is OK. */
3660 29766940 : if (need_non_reg && (fallback & fb_rvalue))
3661 22248776 : prepare_gimple_addressable (p, pre_p);
3662 :
3663 :
3664 : /* Step 3: gimplify size expressions and the indices and operands of
3665 : ARRAY_REF. During this loop we also remove any useless conversions.
3666 : If we operate on a register also make sure to properly gimplify
3667 : to individual operations. */
3668 :
3669 29766940 : bool reg_operations = is_gimple_reg (*p);
3670 71875958 : for (; expr_stack.length () > 0; )
3671 : {
3672 42109018 : tree t = expr_stack.pop ();
3673 :
3674 42109018 : if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3675 : {
3676 8896334 : gcc_assert (!reg_operations);
3677 :
3678 : /* Gimplify the low bound and element type size. */
3679 8896334 : tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3680 : is_gimple_reg, fb_rvalue);
3681 8896334 : ret = MIN (ret, tret);
3682 :
3683 8896334 : tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3684 : is_gimple_reg, fb_rvalue);
3685 8896334 : ret = MIN (ret, tret);
3686 :
3687 : /* Gimplify the dimension. */
3688 8896334 : tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3689 : is_gimple_val, fb_rvalue);
3690 8896334 : ret = MIN (ret, tret);
3691 : }
3692 33212684 : else if (TREE_CODE (t) == COMPONENT_REF)
3693 : {
3694 32031374 : gcc_assert (!reg_operations);
3695 :
3696 32031374 : tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3697 : is_gimple_reg, fb_rvalue);
3698 32031374 : ret = MIN (ret, tret);
3699 : }
3700 1181310 : else if (reg_operations)
3701 : {
3702 782431 : tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3703 : is_gimple_val, fb_rvalue);
3704 1181310 : ret = MIN (ret, tret);
3705 : }
3706 :
3707 42109018 : STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3708 :
3709 : /* The innermost expression P may have originally had
3710 : TREE_SIDE_EFFECTS set which would have caused all the outer
3711 : expressions in *EXPR_P leading to P to also have had
3712 : TREE_SIDE_EFFECTS set. */
3713 42109018 : recalculate_side_effects (t);
3714 : }
3715 :
3716 : /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3717 29766940 : if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3718 : {
3719 18134183 : canonicalize_component_ref (expr_p);
3720 : }
3721 :
3722 29766940 : expr_stack.release ();
3723 :
3724 29766940 : gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3725 :
3726 : return ret;
3727 29766943 : }
3728 :
3729 : /* Gimplify the self modifying expression pointed to by EXPR_P
3730 : (++, --, +=, -=).
3731 :
3732 : PRE_P points to the list where side effects that must happen before
3733 : *EXPR_P should be stored.
3734 :
3735 : POST_P points to the list where side effects that must happen after
3736 : *EXPR_P should be stored.
3737 :
3738 : WANT_VALUE is nonzero iff we want to use the value of this expression
3739 : in another expression.
3740 :
3741 : ARITH_TYPE is the type the computation should be performed in. */
3742 :
3743 : enum gimplify_status
3744 1141830 : gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3745 : bool want_value, tree arith_type)
3746 : {
3747 1141830 : enum tree_code code;
3748 1141830 : tree lhs, lvalue, rhs, t1;
3749 1141830 : gimple_seq post = NULL, *orig_post_p = post_p;
3750 1141830 : bool postfix;
3751 1141830 : enum tree_code arith_code;
3752 1141830 : enum gimplify_status ret;
3753 1141830 : location_t loc = EXPR_LOCATION (*expr_p);
3754 :
3755 1141830 : code = TREE_CODE (*expr_p);
3756 :
3757 1141830 : gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3758 : || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3759 :
3760 : /* Prefix or postfix? */
3761 1141830 : if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3762 : /* Faster to treat as prefix if result is not used. */
3763 : postfix = want_value;
3764 : else
3765 : postfix = false;
3766 :
3767 : /* For postfix, make sure the inner expression's post side effects
3768 : are executed after side effects from this expression. */
3769 400146 : if (postfix)
3770 1141830 : post_p = &post;
3771 :
3772 : /* Add or subtract? */
3773 1141830 : if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3774 : arith_code = PLUS_EXPR;
3775 : else
3776 48210 : arith_code = MINUS_EXPR;
3777 :
3778 : /* Gimplify the LHS into a GIMPLE lvalue. */
3779 1141830 : lvalue = TREE_OPERAND (*expr_p, 0);
3780 1141830 : ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3781 1141830 : if (ret == GS_ERROR)
3782 : return ret;
3783 :
3784 : /* Extract the operands to the arithmetic operation. */
3785 1141826 : lhs = lvalue;
3786 1141826 : rhs = TREE_OPERAND (*expr_p, 1);
3787 :
3788 : /* For postfix operator, we evaluate the LHS to an rvalue and then use
3789 : that as the result value and in the postqueue operation. */
3790 1141826 : if (postfix)
3791 : {
3792 189202 : ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3793 189202 : if (ret == GS_ERROR)
3794 : return ret;
3795 :
3796 189202 : lhs = get_initialized_tmp_var (lhs, pre_p);
3797 : }
3798 :
3799 : /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3800 1141826 : if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3801 : {
3802 238557 : rhs = convert_to_ptrofftype_loc (loc, rhs);
3803 238557 : if (arith_code == MINUS_EXPR)
3804 10024 : rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3805 238557 : t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3806 : }
3807 : else
3808 903269 : t1 = fold_convert (TREE_TYPE (*expr_p),
3809 : fold_build2 (arith_code, arith_type,
3810 : fold_convert (arith_type, lhs),
3811 : fold_convert (arith_type, rhs)));
3812 :
3813 1141826 : if (postfix)
3814 : {
3815 189202 : gimplify_assign (lvalue, t1, pre_p);
3816 189202 : gimplify_seq_add_seq (orig_post_p, post);
3817 189202 : *expr_p = lhs;
3818 189202 : return GS_ALL_DONE;
3819 : }
3820 : else
3821 : {
3822 952624 : *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3823 952624 : return GS_OK;
3824 : }
3825 : }
3826 :
3827 : /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3828 :
3829 : static void
3830 80760668 : maybe_with_size_expr (tree *expr_p)
3831 : {
3832 80760668 : tree expr = *expr_p;
3833 80760668 : tree type = TREE_TYPE (expr);
3834 80760668 : tree size;
3835 :
3836 : /* If we've already wrapped this or the type is error_mark_node, we can't do
3837 : anything. */
3838 80760668 : if (TREE_CODE (expr) == WITH_SIZE_EXPR
3839 80760543 : || type == error_mark_node)
3840 : return;
3841 :
3842 : /* If the size isn't known or is a constant, we have nothing to do. */
3843 80760395 : size = TYPE_SIZE_UNIT (type);
3844 80760395 : if (!size || poly_int_tree_p (size))
3845 : return;
3846 :
3847 : /* Otherwise, make a WITH_SIZE_EXPR. */
3848 1680 : size = unshare_expr (size);
3849 1680 : size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3850 1680 : *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3851 : }
3852 :
3853 : /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3854 : Store any side-effects in PRE_P. CALL_LOCATION is the location of
3855 : the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3856 : gimplified to an SSA name. */
3857 :
3858 : enum gimplify_status
3859 33250089 : gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3860 : bool allow_ssa)
3861 : {
3862 33250089 : bool (*test) (tree);
3863 33250089 : fallback_t fb;
3864 :
3865 : /* In general, we allow lvalues for function arguments to avoid
3866 : extra overhead of copying large aggregates out of even larger
3867 : aggregates into temporaries only to copy the temporaries to
3868 : the argument list. Make optimizers happy by pulling out to
3869 : temporaries those types that fit in registers. */
3870 33250089 : if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3871 : test = is_gimple_val, fb = fb_rvalue;
3872 : else
3873 : {
3874 1556468 : test = is_gimple_lvalue, fb = fb_either;
3875 : /* Also strip a TARGET_EXPR that would force an extra copy. */
3876 1556468 : if (TREE_CODE (*arg_p) == TARGET_EXPR)
3877 : {
3878 307893 : tree init = TARGET_EXPR_INITIAL (*arg_p);
3879 307893 : if (init
3880 307890 : && !VOID_TYPE_P (TREE_TYPE (init))
3881 : /* Currently, due to c++/116015, it is not desirable to
3882 : strip a TARGET_EXPR whose initializer is a {}. The
3883 : problem is that if we do elide it, we also have to
3884 : replace all the occurrences of the slot temporary in the
3885 : initializer with the temporary created for the argument.
3886 : But we do not have that temporary yet so the replacement
3887 : would be quite awkward and it might be needed to resort
3888 : back to a PLACEHOLDER_EXPR. Note that stripping the
3889 : TARGET_EXPR wouldn't help anyway, as gimplify_expr would
3890 : just allocate a temporary to store the CONSTRUCTOR into.
3891 : (FIXME PR116375.)
3892 :
3893 : See convert_for_arg_passing for the C++ code that marks
3894 : the TARGET_EXPR as eliding or not. */
3895 577316 : && TREE_CODE (init) != CONSTRUCTOR)
3896 238624 : *arg_p = init;
3897 : }
3898 : }
3899 :
3900 : /* If this is a variable sized type, we must remember the size. */
3901 33250089 : maybe_with_size_expr (arg_p);
3902 :
3903 : /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3904 : /* Make sure arguments have the same location as the function call
3905 : itself. */
3906 33250089 : protected_set_expr_location (*arg_p, call_location);
3907 :
3908 : /* There is a sequence point before a function call. Side effects in
3909 : the argument list must occur before the actual call. So, when
3910 : gimplifying arguments, force gimplify_expr to use an internal
3911 : post queue which is then appended to the end of PRE_P. */
3912 33250089 : return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3913 : }
3914 :
3915 : /* Don't fold inside offloading or taskreg regions: it can break code by
3916 : adding decl references that weren't in the source. We'll do it during
3917 : omplower pass instead. */
3918 :
3919 : static bool
3920 58951579 : maybe_fold_stmt (gimple_stmt_iterator *gsi)
3921 : {
3922 58951579 : struct gimplify_omp_ctx *ctx;
3923 59479094 : for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3924 1189101 : if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3925 : return false;
3926 532464 : else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3927 : return false;
3928 : /* Delay folding of builtins until the IL is in consistent state
3929 : so the diagnostic machinery can do a better job. */
3930 58289993 : if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3931 : return false;
3932 55935408 : return fold_stmt (gsi);
3933 : }
3934 :
3935 : static tree
3936 : expand_late_variant_directive (vec<struct omp_variant> all_candidates,
3937 : tree construct_context);
3938 :
3939 :
3940 : /* Helper function for calls to omp_dynamic_cond: find the current
3941 : enclosing block in the gimplification context. */
3942 : static tree
3943 359 : find_supercontext (void)
3944 : {
3945 359 : vec<gbind *>stack = gimple_bind_expr_stack ();
3946 686 : for (int i = stack.length () - 1; i >= 0; i++)
3947 : {
3948 327 : gbind *b = stack[i];
3949 327 : if (b->block)
3950 : return b->block;
3951 : }
3952 : return NULL_TREE;
3953 : }
3954 :
3955 : /* OpenMP: Handle the append_args and adjust_args clauses of
3956 : declare_variant for EXPR, which is a CALL_EXPR whose CALL_EXPR_FN
3957 : is the variant, within a dispatch construct with clauses DISPATCH_CLAUSES.
3958 : WANT_VALUE and POINTERIZE are as for expand_variant_call_expr.
3959 :
3960 : 'append_args' causes interop objects are added after the last regular
3961 : (nonhidden, nonvariadic) arguments of the variant function.
3962 : 'adjust_args' with need_device_{addr,ptr} converts the pointer target of
3963 : a pointer from a host to a device address. This uses either the default
3964 : device or the passed device number, which then sets the default device
3965 : address. */
3966 : static tree
3967 471 : modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
3968 : bool want_value, bool pointerize)
3969 : {
3970 471 : location_t loc = EXPR_LOCATION (expr);
3971 471 : tree fndecl = get_callee_fndecl (expr);
3972 :
3973 : /* Skip processing if we don't get the expected call form. */
3974 471 : if (!fndecl)
3975 : return expr;
3976 :
3977 471 : tree init_code = NULL_TREE;
3978 471 : tree cleanup = NULL_TREE;
3979 471 : tree clobbers = NULL_TREE;
3980 471 : int nargs = call_expr_nargs (expr);
3981 471 : tree dispatch_device_num = NULL_TREE;
3982 471 : tree dispatch_interop = NULL_TREE;
3983 471 : tree dispatch_append_args = NULL_TREE;
3984 471 : int nfirst_args = 0;
3985 471 : tree dispatch_adjust_args_list
3986 471 : = lookup_attribute ("omp declare variant variant args",
3987 471 : DECL_ATTRIBUTES (fndecl));
3988 :
3989 471 : if (dispatch_adjust_args_list)
3990 : {
3991 389 : dispatch_adjust_args_list = TREE_VALUE (dispatch_adjust_args_list);
3992 389 : dispatch_append_args = TREE_CHAIN (dispatch_adjust_args_list);
3993 389 : if (TREE_PURPOSE (dispatch_adjust_args_list) == NULL_TREE
3994 389 : && TREE_VALUE (dispatch_adjust_args_list) == NULL_TREE)
3995 : dispatch_adjust_args_list = NULL_TREE;
3996 : }
3997 389 : if (dispatch_append_args)
3998 : {
3999 245 : nfirst_args = tree_to_shwi (TREE_PURPOSE (dispatch_append_args));
4000 245 : dispatch_append_args = TREE_VALUE (dispatch_append_args);
4001 : }
4002 471 : dispatch_device_num = omp_find_clause (dispatch_clauses, OMP_CLAUSE_DEVICE);
4003 471 : if (dispatch_device_num)
4004 287 : dispatch_device_num = OMP_CLAUSE_DEVICE_ID (dispatch_device_num);
4005 471 : dispatch_interop = omp_find_clause (dispatch_clauses, OMP_CLAUSE_INTEROP);
4006 471 : int nappend = 0, ninterop = 0;
4007 880 : for (tree t = dispatch_append_args; t; t = TREE_CHAIN (t))
4008 409 : nappend++;
4009 :
4010 : /* FIXME: error checking should be taken out of this function and
4011 : handled before any attempt at filtering or resolution happens.
4012 : Otherwise whether or not diagnostics appear is determined by
4013 : GCC internals, how good the front ends are at constant-folding,
4014 : the split between early/late resolution, etc instead of the code
4015 : as written by the user. */
4016 471 : if (dispatch_interop)
4017 861 : for (tree t = dispatch_interop; t; t = TREE_CHAIN (t))
4018 605 : if (OMP_CLAUSE_CODE (t) == OMP_CLAUSE_INTEROP)
4019 409 : ninterop++;
4020 471 : if (dispatch_interop && !dispatch_device_num)
4021 : {
4022 7 : gcc_checking_assert (ninterop > 1);
4023 7 : error_at (OMP_CLAUSE_LOCATION (dispatch_interop),
4024 : "the %<device%> clause must be present if the %<interop%> "
4025 : "clause has more than one list item");
4026 : }
4027 471 : if (nappend < ninterop)
4028 : {
4029 48 : error_at (OMP_CLAUSE_LOCATION (dispatch_interop),
4030 : "number of list items in %<interop%> clause (%d) "
4031 : "exceeds the number of %<append_args%> items (%d) for "
4032 : "%<declare variant%> candidate %qD", ninterop, nappend, fndecl);
4033 88 : inform (dispatch_append_args
4034 8 : ? EXPR_LOCATION (TREE_PURPOSE (dispatch_append_args))
4035 40 : : DECL_SOURCE_LOCATION (fndecl),
4036 : "%<declare variant%> candidate %qD declared here", fndecl);
4037 48 : ninterop = nappend;
4038 : }
4039 471 : if (dispatch_append_args)
4040 : {
4041 245 : tree *buffer = XALLOCAVEC (tree, nargs + nappend);
4042 245 : tree arg = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4043 : /* Copy the first arguments; insert then the interop objects,
4044 : and then copy the rest (nargs - nfirst_args) args. */
4045 245 : int i;
4046 679 : for (i = 0; i < nfirst_args; i++)
4047 : {
4048 434 : arg = TREE_CHAIN (arg);
4049 434 : buffer[i] = CALL_EXPR_ARG (expr, i);
4050 : }
4051 : int j = ninterop;
4052 583 : for (tree t = dispatch_interop; t && j > 0; t = TREE_CHAIN (t))
4053 338 : if (OMP_CLAUSE_CODE (t) == OMP_CLAUSE_INTEROP)
4054 338 : buffer[i + --j] = OMP_CLAUSE_DECL (t);
4055 245 : gcc_checking_assert (j == 0);
4056 :
4057 : /* Do we need to create additional interop objects? */
4058 245 : if (ninterop < nappend)
4059 : {
4060 56 : if (dispatch_device_num == NULL_TREE)
4061 : /* Not remapping device number. */
4062 22 : dispatch_device_num = build_int_cst (integer_type_node,
4063 : GOMP_DEVICE_DEFAULT_OMP_61);
4064 56 : int nnew = nappend - ninterop;
4065 56 : tree nobjs = build_int_cst (integer_type_node, nnew);
4066 56 : tree a, t;
4067 :
4068 : /* Skip to the append_args clause for the first constructed
4069 : interop argument. */
4070 56 : tree apparg = dispatch_append_args;
4071 83 : for (j = 0; j < ninterop; j++)
4072 27 : apparg = TREE_CHAIN (apparg);
4073 :
4074 : /* omp_interop_t *objs[n]; */
4075 56 : tree objtype = build_pointer_type (pointer_sized_int_node);
4076 56 : t = build_array_type_nelts (objtype, nnew);
4077 56 : tree objs = create_tmp_var (t, "interopobjs");
4078 :
4079 : /* int target_tgtsync[n]; */
4080 56 : t = build_array_type_nelts (integer_type_node, nnew);
4081 56 : tree target_tgtsync = create_tmp_var (t, "tgt_tgtsync");
4082 :
4083 : /* Scan first to determine if we need a prefer_type array. */
4084 56 : tree prefer_type = NULL_TREE;
4085 56 : tree prefer_type_type = NULL_TREE;
4086 141 : for (j = ninterop, a = apparg; j < nappend; j++, a = TREE_CHAIN (a))
4087 64 : if (TREE_VALUE (a) != NULL_TREE)
4088 : {
4089 : /* const char *prefer_type[n]; */
4090 35 : t = build_qualified_type (char_type_node, TYPE_QUAL_CONST);
4091 35 : prefer_type_type = build_pointer_type (t);
4092 35 : t = build_array_type_nelts (prefer_type_type, nnew);
4093 35 : prefer_type = create_tmp_var (t, "pref_type");
4094 35 : break;
4095 : }
4096 :
4097 : /* Initialize the arrays, generating temp vars and clobbers for
4098 : the interop objects. (The constructed array holding the
4099 : pointers to these objects shouldn't need clobbering as there's
4100 : no reason for GOMP_interop to modify its contents.) */
4101 127 : for (j = ninterop, a = apparg; j < nappend; j++, a = TREE_CHAIN (a))
4102 : {
4103 : /* The allocated temporaries for the interop objects
4104 : have type omp_interop_t, which is an integer type that
4105 : can encode a pointer. */
4106 71 : tree objvar = create_tmp_var (pointer_sized_int_node, "interop");
4107 71 : buffer[i + j] = objvar;
4108 71 : TREE_ADDRESSABLE (objvar) = 1;
4109 : /* Generate a clobber for the temporary for when we're done
4110 : with it. */
4111 71 : tree c = build_clobber (pointer_sized_int_node,
4112 : CLOBBER_OBJECT_END);
4113 71 : c = build2 (MODIFY_EXPR, pointer_sized_int_node, objvar, c);
4114 71 : if (clobbers)
4115 15 : clobbers = build2 (COMPOUND_EXPR, TREE_TYPE (clobbers),
4116 : c, clobbers);
4117 : else
4118 : clobbers = c;
4119 :
4120 : /* objs[offset] = &objvar; */
4121 71 : tree offset = build_int_cst (integer_type_node, j - ninterop);
4122 71 : tree init = build4 (ARRAY_REF, objtype, objs, offset,
4123 : NULL_TREE, NULL_TREE);
4124 71 : init = build2 (MODIFY_EXPR, objtype, init,
4125 : build_fold_addr_expr (objvar));
4126 71 : if (init_code)
4127 15 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4128 : init_code, init);
4129 : else
4130 : init_code = init;
4131 :
4132 : /* target_tgtsync[offset] = tgt;
4133 : (Don't blame me, I didn't design the encoding of this
4134 : info into the dispatch interop clause data structure,
4135 : but the runtime wants a bit mask.) */
4136 71 : tree tree_tgt = TREE_OPERAND (TREE_PURPOSE (a), 0);
4137 71 : int tgt = 0;
4138 71 : if (TREE_PURPOSE (tree_tgt) == boolean_true_node)
4139 42 : tgt |= GOMP_INTEROP_TARGET;
4140 71 : if (TREE_VALUE (tree_tgt) == boolean_true_node)
4141 36 : tgt |= GOMP_INTEROP_TARGETSYNC;
4142 71 : init = build4 (ARRAY_REF, integer_type_node,
4143 : target_tgtsync, offset, NULL_TREE, NULL_TREE);
4144 71 : init = build2 (MODIFY_EXPR, integer_type_node, init,
4145 71 : build_int_cst (integer_type_node, tgt));
4146 71 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4147 : init_code, init);
4148 :
4149 71 : if (prefer_type)
4150 : {
4151 45 : tree pref = TREE_VALUE (a);
4152 45 : if (pref == NULL_TREE)
4153 6 : pref = null_pointer_node;
4154 : else
4155 39 : pref = build_fold_addr_expr (pref);
4156 45 : init = build4 (ARRAY_REF, prefer_type_type, prefer_type,
4157 : offset, NULL_TREE, NULL_TREE);
4158 45 : init = build2 (MODIFY_EXPR, prefer_type_type, init,
4159 : pref);
4160 45 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4161 : init_code, init);
4162 : }
4163 : }
4164 :
4165 56 : objs = build_fold_addr_expr (objs);
4166 56 : target_tgtsync = build_fold_addr_expr (target_tgtsync);
4167 56 : prefer_type = prefer_type ? build_fold_addr_expr (prefer_type)
4168 : : null_pointer_node;
4169 56 : tree fn = builtin_decl_explicit (BUILT_IN_GOMP_INTEROP);
4170 56 : tree create
4171 56 : = build_call_expr_loc (loc, fn, 11, dispatch_device_num,
4172 : nobjs, objs, target_tgtsync, prefer_type,
4173 : integer_zero_node, null_pointer_node,
4174 : integer_zero_node, null_pointer_node,
4175 : integer_zero_node, null_pointer_node);
4176 56 : if (init_code)
4177 56 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (create),
4178 : init_code, create);
4179 : else
4180 : init_code = create;
4181 :
4182 56 : cleanup
4183 56 : = build_call_expr_loc (loc, fn, 11, dispatch_device_num,
4184 : integer_zero_node, null_pointer_node,
4185 : null_pointer_node, null_pointer_node,
4186 : integer_zero_node, null_pointer_node,
4187 : nobjs, objs,
4188 : integer_zero_node, null_pointer_node);
4189 56 : if (clobbers)
4190 56 : cleanup = build2 (COMPOUND_EXPR, TREE_TYPE (clobbers),
4191 : cleanup, clobbers);
4192 : }
4193 :
4194 654 : for (j = 0; j < nappend; j++)
4195 : {
4196 : /* Fortran permits by-reference or by-value for the dummy arg
4197 : and by-value, by-reference, ptr by-reference as actual
4198 : argument. Handle this. */
4199 409 : tree obj = buffer[i + j]; // interop object
4200 409 : tree a2 = TREE_VALUE (arg); // parameter type
4201 766 : if (POINTER_TYPE_P (TREE_TYPE (obj))
4202 436 : && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (obj))))
4203 : {
4204 32 : tree t = TREE_TYPE (TREE_TYPE (obj));
4205 32 : gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (t)));
4206 32 : obj = fold_build1 (INDIRECT_REF, t, obj);
4207 : }
4208 750 : if (POINTER_TYPE_P (TREE_TYPE (obj))
4209 420 : && INTEGRAL_TYPE_P (a2))
4210 : {
4211 43 : tree t = TREE_TYPE (TREE_TYPE (obj));
4212 43 : gcc_checking_assert (INTEGRAL_TYPE_P (t));
4213 43 : obj = fold_build1 (INDIRECT_REF, t, obj);
4214 : }
4215 497 : else if (INTEGRAL_TYPE_P (TREE_TYPE (obj))
4216 461 : && POINTER_TYPE_P (a2))
4217 : {
4218 18 : gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (a2)));
4219 18 : obj = build_fold_addr_expr (obj);
4220 : }
4221 348 : else if (!INTEGRAL_TYPE_P (a2)
4222 348 : || !INTEGRAL_TYPE_P (TREE_TYPE (obj)))
4223 : {
4224 36 : tree t = TREE_TYPE (obj);
4225 36 : gcc_checking_assert (POINTER_TYPE_P (t)
4226 : && POINTER_TYPE_P (a2)
4227 : && INTEGRAL_TYPE_P (TREE_TYPE (t))
4228 : && INTEGRAL_TYPE_P (TREE_TYPE (a2)));
4229 : }
4230 409 : buffer[i + j] = obj;
4231 409 : arg = TREE_CHAIN (arg);
4232 : }
4233 245 : i += nappend;
4234 534 : for (j = nfirst_args; j < nargs; j++)
4235 289 : buffer[i++] = CALL_EXPR_ARG (expr, j);
4236 245 : nargs += nappend;
4237 245 : tree call = expr;
4238 490 : expr = build_call_array_loc (EXPR_LOCATION (expr), TREE_TYPE (call),
4239 245 : CALL_EXPR_FN (call), nargs, buffer);
4240 :
4241 : /* Copy all CALL_EXPR flags. */
4242 245 : CALL_EXPR_STATIC_CHAIN (expr) = CALL_EXPR_STATIC_CHAIN (call);
4243 245 : CALL_EXPR_TAILCALL (expr) = CALL_EXPR_TAILCALL (call);
4244 245 : CALL_EXPR_RETURN_SLOT_OPT (expr)
4245 245 : = CALL_EXPR_RETURN_SLOT_OPT (call);
4246 245 : CALL_FROM_THUNK_P (expr) = CALL_FROM_THUNK_P (call);
4247 245 : SET_EXPR_LOCATION (expr, EXPR_LOCATION (call));
4248 245 : CALL_EXPR_VA_ARG_PACK (expr) = CALL_EXPR_VA_ARG_PACK (call);
4249 : }
4250 :
4251 : /* Nothing to do for adjust_args? */
4252 471 : if (!dispatch_adjust_args_list || !TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4253 260 : goto add_cleanup;
4254 :
4255 : /* Handle adjust_args. */
4256 966 : for (int i = 0; i < nargs; i++)
4257 : {
4258 755 : tree *arg_p = &CALL_EXPR_ARG (expr, i);
4259 :
4260 : /* Nothing to do if arg is constant null pointer. */
4261 755 : if (integer_zerop (*arg_p))
4262 6 : continue;
4263 :
4264 : bool need_device_ptr = false;
4265 : bool need_device_addr = false;
4266 2247 : for (int need_addr = 0; need_addr <= 1; need_addr++)
4267 2301 : for (tree arg = (need_addr
4268 1498 : ? TREE_VALUE (dispatch_adjust_args_list)
4269 1498 : : TREE_PURPOSE (dispatch_adjust_args_list));
4270 2301 : arg != NULL; arg = TREE_CHAIN (arg))
4271 : {
4272 1152 : if (TREE_VALUE (arg)
4273 1152 : && TREE_CODE (TREE_VALUE (arg)) == INTEGER_CST
4274 2304 : && wi::eq_p (i, wi::to_wide (TREE_VALUE (arg))))
4275 : {
4276 349 : if (need_addr)
4277 : need_device_addr = true;
4278 : else
4279 349 : need_device_ptr = true;
4280 : break;
4281 : }
4282 : }
4283 :
4284 749 : if (need_device_ptr || need_device_addr)
4285 : {
4286 740 : bool is_device_ptr = false;
4287 740 : bool has_device_addr = false;
4288 :
4289 740 : for (tree c = dispatch_clauses; c; c = TREE_CHAIN (c))
4290 : {
4291 454 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
4292 454 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
4293 : {
4294 164 : tree decl1 = DECL_NAME (OMP_CLAUSE_DECL (c));
4295 164 : tree decl2 = tree_strip_nop_conversions (*arg_p);
4296 164 : if (TREE_CODE (decl2) == ADDR_EXPR)
4297 19 : decl2 = TREE_OPERAND (decl2, 0);
4298 164 : if (VAR_P (decl2) || TREE_CODE (decl2) == PARM_DECL)
4299 : {
4300 140 : decl2 = DECL_NAME (decl2);
4301 140 : if (decl1 == decl2
4302 203 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
4303 : {
4304 58 : if (need_device_addr)
4305 0 : warning_at (OMP_CLAUSE_LOCATION (c),
4306 0 : OPT_Wopenmp,
4307 : "%<is_device_ptr%> for %qD does"
4308 : " not imply %<has_device_addr%> "
4309 : "required for %<need_device_addr%>",
4310 0 : OMP_CLAUSE_DECL (c));
4311 : is_device_ptr = true;
4312 : break;
4313 : }
4314 82 : else if (decl1 == decl2)
4315 : {
4316 5 : if (need_device_ptr)
4317 10 : warning_at (OMP_CLAUSE_LOCATION (c),
4318 5 : OPT_Wopenmp,
4319 : "%<has_device_addr%> for %qD does"
4320 : " not imply %<is_device_ptr%> "
4321 : "required for %<need_device_ptr%>",
4322 5 : OMP_CLAUSE_DECL (c));
4323 : has_device_addr = true;
4324 : break;
4325 : }
4326 : }
4327 : }
4328 : }
4329 :
4330 349 : if ((need_device_ptr && !is_device_ptr)
4331 58 : || (need_device_addr && !has_device_addr))
4332 : {
4333 291 : if (dispatch_device_num == NULL_TREE)
4334 : {
4335 : // device_num = omp_get_default_device ()
4336 105 : tree fn
4337 105 : = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
4338 105 : tree call = build_call_expr (fn, 0);
4339 105 : dispatch_device_num = create_tmp_var_raw (TREE_TYPE (call));
4340 105 : tree init
4341 105 : = build4 (TARGET_EXPR, TREE_TYPE (call),
4342 : dispatch_device_num, call, NULL_TREE, NULL_TREE);
4343 105 : if (init_code)
4344 0 : init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
4345 : init_code, init);
4346 : else
4347 : init_code = init;
4348 : }
4349 :
4350 : // We want to emit the following statement:
4351 : // mapped_arg = omp_get_mapped_ptr (arg,
4352 : // device_num)
4353 : // but arg has to be the actual pointer, not a
4354 : // reference or a conversion expression.
4355 291 : tree actual_ptr
4356 291 : = ((TREE_CODE (*arg_p) == ADDR_EXPR)
4357 291 : ? TREE_OPERAND (*arg_p, 0)
4358 38 : : *arg_p);
4359 291 : if (TREE_CODE (actual_ptr) == NOP_EXPR
4360 291 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (actual_ptr, 0)))
4361 : == REFERENCE_TYPE))
4362 : {
4363 6 : actual_ptr = TREE_OPERAND (actual_ptr, 0);
4364 6 : actual_ptr = build1 (INDIRECT_REF,
4365 6 : TREE_TYPE (actual_ptr),
4366 : actual_ptr);
4367 : }
4368 291 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_MAPPED_PTR);
4369 291 : tree mapped_arg = build_call_expr_loc (loc, fn, 2, actual_ptr,
4370 : dispatch_device_num);
4371 :
4372 291 : if (TREE_CODE (*arg_p) == ADDR_EXPR
4373 291 : || (TREE_CODE (TREE_TYPE (actual_ptr)) == REFERENCE_TYPE))
4374 47 : mapped_arg = build_fold_addr_expr (mapped_arg);
4375 244 : else if (TREE_CODE (*arg_p) == NOP_EXPR)
4376 35 : mapped_arg = build1 (NOP_EXPR, TREE_TYPE (*arg_p),
4377 : mapped_arg);
4378 291 : *arg_p = mapped_arg;
4379 : }
4380 : }
4381 : }
4382 :
4383 471 : add_cleanup:
4384 471 : if (cleanup)
4385 : {
4386 56 : tree result = NULL_TREE;
4387 56 : if (want_value && pointerize)
4388 : {
4389 0 : tree tmp = create_tmp_var (build_pointer_type (TREE_TYPE (expr)),
4390 : "cleanuptmp");
4391 0 : result = build_simple_mem_ref (tmp);
4392 0 : expr = build2 (INIT_EXPR, TREE_TYPE (tmp), tmp,
4393 : build_fold_addr_expr (expr));
4394 : }
4395 56 : else if (want_value)
4396 : {
4397 18 : tree tmp = create_tmp_var (TREE_TYPE (expr), "cleanuptmp");
4398 18 : result = tmp;
4399 18 : expr = build2 (INIT_EXPR, TREE_TYPE (tmp), tmp, expr);
4400 : }
4401 56 : if (init_code)
4402 56 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init_code, expr);
4403 56 : expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, cleanup);
4404 :
4405 56 : if (result)
4406 18 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (result), expr, result);
4407 : }
4408 415 : else if (init_code)
4409 105 : expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init_code, expr);
4410 :
4411 : return expr;
4412 : }
4413 :
4414 : /* Helper function for gimplify_call_expr: handle "declare variant"
4415 : resolution and expansion of the CALL_EXPR EXPR. WANT_VALUE is true
4416 : if the result value of the call is needed; POINTERIZE is true if it
4417 : also needs to be pointerized. If OMP_DISPATCH_P is true, apply
4418 : associated transformations using DISPATCH_CLAUSES.
4419 : This function may return either the original call or some other
4420 : expression such as a conditional to select one of multiple calls.
4421 :
4422 : FIXME: this function is written to be independent of gimplifier internals
4423 : so that it could be moved to omp-general.cc and invoked from the
4424 : front ends instead, per PR115076. */
4425 :
4426 : static tree
4427 1322 : expand_variant_call_expr (tree expr, bool want_value, bool pointerize,
4428 : bool omp_dispatch_p, tree dispatch_clauses)
4429 : {
4430 : /* If we've already processed this call, stop now. This can happen
4431 : if the variant call resolves to the original function, or to
4432 : a dynamic conditional that includes the default call to the original
4433 : function. */
4434 1322 : gcc_assert (omp_resolved_variant_calls != NULL);
4435 1322 : if (omp_resolved_variant_calls->contains (expr))
4436 131 : return expr;
4437 :
4438 1191 : tree fndecl = get_callee_fndecl (expr);
4439 1191 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (expr));
4440 1191 : location_t loc = EXPR_LOCATION (expr);
4441 1191 : tree construct_context = omp_get_construct_context ();
4442 1191 : vec<struct omp_variant> all_candidates
4443 1191 : = omp_declare_variant_candidates (fndecl, construct_context);
4444 1191 : gcc_assert (!all_candidates.is_empty ());
4445 1191 : vec<struct omp_variant> candidates
4446 1191 : = omp_get_dynamic_candidates (all_candidates, construct_context);
4447 :
4448 : /* If the variant call could be resolved now, build a nest of COND_EXPRs
4449 : if there are dynamic candidates, and/or a new CALL_EXPR for each
4450 : candidate call. */
4451 1191 : if (!candidates.is_empty ())
4452 : {
4453 1179 : int n = candidates.length ();
4454 1179 : tree tail = NULL_TREE;
4455 :
4456 2396 : for (int i = n - 1; i >= 0; i--)
4457 : {
4458 1217 : if (tail)
4459 38 : gcc_assert (candidates[i].dynamic_selector);
4460 : else
4461 1179 : gcc_assert (!candidates[i].dynamic_selector);
4462 1217 : if (candidates[i].alternative == fndecl)
4463 : {
4464 : /* We should only get the original function back as the
4465 : default. */
4466 261 : gcc_assert (!tail);
4467 261 : omp_resolved_variant_calls->add (expr);
4468 261 : tail = expr;
4469 : }
4470 : else
4471 : {
4472 : /* For the final static selector, we can re-use the old
4473 : CALL_EXPR and just replace the function, unless it may
4474 : need dispatch argument modification. Otherwise,
4475 : make a copy of it. */
4476 956 : tree thiscall = (tail || omp_dispatch_p
4477 956 : ? unshare_expr (expr) : expr);
4478 956 : CALL_EXPR_FN (thiscall) = build1 (ADDR_EXPR, fnptrtype,
4479 956 : candidates[i].alternative);
4480 956 : if (omp_dispatch_p)
4481 471 : thiscall = modify_call_for_omp_dispatch (thiscall,
4482 : dispatch_clauses,
4483 : want_value,
4484 : pointerize);
4485 956 : if (!tail)
4486 : tail = thiscall;
4487 : else
4488 76 : tail = build3 (COND_EXPR, TREE_TYPE (expr),
4489 38 : omp_dynamic_cond (candidates[i].selector,
4490 : find_supercontext ()),
4491 : thiscall, tail);
4492 : }
4493 : }
4494 : return tail;
4495 : }
4496 :
4497 : /* If we couldn't resolve the variant call now, expand it into a loop using
4498 : a switch and OMP_NEXT_VARIANT for dispatch. The ompdevlow pass will
4499 : handle OMP_NEXT_VARIANT expansion. */
4500 : else
4501 : {
4502 : /* If we need a usable return value, we need a temporary
4503 : and an assignment in each alternative. This logic was borrowed
4504 : from gimplify_cond_expr. */
4505 12 : tree type = TREE_TYPE (expr);
4506 12 : tree tmp = NULL_TREE, result = NULL_TREE;
4507 :
4508 12 : if (want_value)
4509 : {
4510 12 : if (pointerize)
4511 : {
4512 0 : type = build_pointer_type (type);
4513 0 : tmp = create_tmp_var (type, "iftmp");
4514 0 : result = build_simple_mem_ref_loc (loc, tmp);
4515 : }
4516 : else
4517 : {
4518 12 : tmp = create_tmp_var (type, "iftmp");
4519 12 : result = tmp;
4520 : }
4521 : }
4522 :
4523 : /* Preprocess the all_candidates array so that the alternative field of
4524 : each element holds the actual function call expression and possible
4525 : assignment, instead of just the decl for the variant function. */
4526 60 : for (unsigned int i = 0; i < all_candidates.length (); i++)
4527 : {
4528 48 : tree decl = all_candidates[i].alternative;
4529 48 : tree thiscall;
4530 :
4531 : /* We need to turn the decl from the candidate into a function
4532 : call and possible assignment, and stuff that in
4533 : the directive seq of the gomp_variant. */
4534 48 : if (decl == fndecl)
4535 : {
4536 12 : thiscall = expr;
4537 12 : omp_resolved_variant_calls->add (expr);
4538 : }
4539 : else
4540 : {
4541 36 : thiscall = unshare_expr (expr);
4542 36 : CALL_EXPR_FN (thiscall) = build1 (ADDR_EXPR, fnptrtype, decl);
4543 36 : if (omp_dispatch_p)
4544 0 : thiscall = modify_call_for_omp_dispatch (thiscall,
4545 : dispatch_clauses,
4546 : want_value,
4547 : pointerize);
4548 : }
4549 48 : if (pointerize)
4550 0 : thiscall = build_fold_addr_expr_loc (loc, thiscall);
4551 48 : if (want_value)
4552 48 : thiscall = build2 (INIT_EXPR, type, tmp, thiscall);
4553 48 : all_candidates[i].alternative = thiscall;
4554 : }
4555 :
4556 12 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
4557 12 : tree expansion = expand_late_variant_directive (all_candidates,
4558 : construct_context);
4559 12 : if (result)
4560 12 : expansion = build2 (COMPOUND_EXPR, TREE_TYPE (result),
4561 : expansion, result);
4562 12 : return expansion;
4563 : }
4564 : }
4565 :
4566 : /* Wrapper around expand_variant_call_expr to interface with gimplifier
4567 : state. EXPR and OMP_DISPATCH_P are as for expand_variant_call_expr,
4568 : FALLBACK is used to compute the WANT_VALUE and POINTERIZE arguments. */
4569 : static tree
4570 1322 : gimplify_variant_call_expr (tree expr, fallback_t fallback,
4571 : bool omp_dispatch_p)
4572 : {
4573 1322 : tree type = TREE_TYPE (expr);
4574 1322 : bool want_value = (fallback != fb_none && !VOID_TYPE_P (type));
4575 669 : bool pointerize = false;
4576 : /* If the result value must be an lvalue or the result type must
4577 : live in memory, then we have to pointerize it if we need a temporary. */
4578 669 : if (want_value
4579 669 : && ((!(fallback & fb_rvalue) && (fallback & fb_lvalue))
4580 669 : || TREE_ADDRESSABLE (type)))
4581 0 : pointerize = true;
4582 :
4583 1834 : return expand_variant_call_expr (expr, want_value, pointerize,
4584 : omp_dispatch_p,
4585 : (omp_dispatch_p
4586 512 : ? gimplify_omp_ctxp->clauses
4587 1322 : : NULL_TREE));
4588 : }
4589 :
4590 :
4591 : /* Helper function for gimplify_call_expr, called via walk_tree.
4592 : Find used user labels. */
4593 :
4594 : static tree
4595 766 : find_used_user_labels (tree *tp, int *, void *)
4596 : {
4597 766 : if (TREE_CODE (*tp) == LABEL_EXPR
4598 15 : && !DECL_ARTIFICIAL (LABEL_EXPR_LABEL (*tp))
4599 15 : && DECL_NAME (LABEL_EXPR_LABEL (*tp))
4600 781 : && TREE_USED (LABEL_EXPR_LABEL (*tp)))
4601 15 : return *tp;
4602 : return NULL_TREE;
4603 : }
4604 :
4605 :
4606 : /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
4607 : WANT_VALUE is true if the result of the call is desired. */
4608 :
4609 : static enum gimplify_status
4610 16770417 : gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4611 : {
4612 16770417 : bool want_value = (fallback != fb_none);
4613 16770417 : tree fndecl, parms, p, fnptrtype;
4614 16770417 : enum gimplify_status ret;
4615 16770417 : int i, nargs;
4616 16770417 : gcall *call;
4617 16770417 : bool builtin_va_start_p = false, omp_dispatch_p = false;
4618 16770417 : location_t loc = EXPR_LOCATION (*expr_p);
4619 :
4620 16770417 : gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
4621 :
4622 : /* For reliable diagnostics during inlining, it is necessary that
4623 : every call_expr be annotated with file and line. */
4624 16770417 : if (! EXPR_HAS_LOCATION (*expr_p))
4625 560730 : SET_EXPR_LOCATION (*expr_p, input_location);
4626 :
4627 : /* Gimplify internal functions created in the FEs. */
4628 16770417 : if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
4629 : {
4630 557902 : enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
4631 557902 : if (ifn == IFN_GOMP_DISPATCH)
4632 : {
4633 884 : gcc_assert (flag_openmp
4634 : && gimplify_omp_ctxp
4635 : && gimplify_omp_ctxp->code == OMP_DISPATCH);
4636 884 : *expr_p = CALL_EXPR_ARG (*expr_p, 0);
4637 884 : gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
4638 884 : if (! EXPR_HAS_LOCATION (*expr_p))
4639 94 : SET_EXPR_LOCATION (*expr_p, input_location);
4640 : omp_dispatch_p = true;
4641 : }
4642 : else
4643 : {
4644 557018 : if (want_value)
4645 : return GS_ALL_DONE;
4646 :
4647 20827 : nargs = call_expr_nargs (*expr_p);
4648 20827 : auto_vec<tree> vargs (nargs);
4649 :
4650 20827 : if (ifn == IFN_ASSUME)
4651 : {
4652 511 : if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
4653 : {
4654 : /* If the [[assume (cond)]]; condition is simple
4655 : enough and can be evaluated unconditionally
4656 : without side-effects, expand it as
4657 : if (!cond) __builtin_unreachable (); */
4658 293 : tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
4659 293 : *expr_p
4660 586 : = build3 (COND_EXPR, void_type_node,
4661 293 : CALL_EXPR_ARG (*expr_p, 0), void_node,
4662 293 : build_call_expr_loc (EXPR_LOCATION (*expr_p),
4663 : fndecl, 0));
4664 293 : return GS_OK;
4665 : }
4666 : /* If not optimizing, ignore the assumptions unless there
4667 : are used user labels in it. */
4668 218 : if ((!optimize
4669 98 : && !walk_tree_without_duplicates (&CALL_EXPR_ARG (*expr_p,
4670 : 0),
4671 : find_used_user_labels,
4672 : NULL))
4673 233 : || seen_error ())
4674 : {
4675 94 : *expr_p = NULL_TREE;
4676 94 : return GS_ALL_DONE;
4677 : }
4678 : /* Temporarily, until gimple lowering, transform
4679 : .ASSUME (cond);
4680 : into:
4681 : [[assume (guard)]]
4682 : {
4683 : guard = cond;
4684 : }
4685 : such that gimple lowering can outline the condition into
4686 : a separate function easily. */
4687 124 : tree guard = create_tmp_var (boolean_type_node);
4688 124 : *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
4689 124 : gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
4690 124 : *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
4691 124 : push_gimplify_context ();
4692 124 : gimple_seq body = NULL;
4693 124 : gimple *g = gimplify_and_return_first (*expr_p, &body);
4694 124 : pop_gimplify_context (g);
4695 124 : g = gimple_build_assume (guard, body);
4696 124 : gimple_set_location (g, loc);
4697 124 : gimplify_seq_add_stmt (pre_p, g);
4698 124 : *expr_p = NULL_TREE;
4699 124 : return GS_ALL_DONE;
4700 : }
4701 20316 : else if (ifn == IFN_UBSAN_BOUNDS
4702 20316 : && nargs == 3
4703 20316 : && integer_onep (CALL_EXPR_ARG (*expr_p, 0)))
4704 : {
4705 : /* If first argument is one, add TYPE_MAX_VALUE (TYPE_DOMAIN (t))
4706 : to 3rd argument and change first argument to 0. This is
4707 : done by ubsan_instrument_bounds so that we can use the
4708 : max value from gimplify_type_sizes here instead of original
4709 : expression for VLAs. */
4710 406 : tree type = TREE_TYPE (CALL_EXPR_ARG (*expr_p, 0));
4711 406 : CALL_EXPR_ARG (*expr_p, 0) = build_int_cst (type, 0);
4712 406 : gcc_assert (TREE_CODE (type) == POINTER_TYPE);
4713 406 : type = TREE_TYPE (type);
4714 406 : gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4715 406 : tree maxv = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4716 406 : gcc_assert (maxv);
4717 406 : tree arg3 = CALL_EXPR_ARG (*expr_p, 2);
4718 406 : CALL_EXPR_ARG (*expr_p, 2)
4719 812 : = fold_build2 (PLUS_EXPR, TREE_TYPE (arg3), maxv, arg3);
4720 : }
4721 :
4722 45556 : for (i = 0; i < nargs; i++)
4723 : {
4724 25240 : gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
4725 25240 : EXPR_LOCATION (*expr_p));
4726 25240 : vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
4727 : }
4728 :
4729 20316 : gcall *call = gimple_build_call_internal_vec (ifn, vargs);
4730 20316 : gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
4731 20316 : gimplify_seq_add_stmt (pre_p, call);
4732 20316 : return GS_ALL_DONE;
4733 20827 : }
4734 : }
4735 :
4736 : /* This may be a call to a builtin function.
4737 :
4738 : Builtin function calls may be transformed into different
4739 : (and more efficient) builtin function calls under certain
4740 : circumstances. Unfortunately, gimplification can muck things
4741 : up enough that the builtin expanders are not aware that certain
4742 : transformations are still valid.
4743 :
4744 : So we attempt transformation/gimplification of the call before
4745 : we gimplify the CALL_EXPR. At this time we do not manage to
4746 : transform all calls in the same manner as the expanders do, but
4747 : we do transform most of them. */
4748 16213399 : fndecl = get_callee_fndecl (*expr_p);
4749 16213399 : if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4750 3947879 : switch (DECL_FUNCTION_CODE (fndecl))
4751 : {
4752 95812 : CASE_BUILT_IN_ALLOCA:
4753 : /* If the call has been built for a variable-sized object, then we
4754 : want to restore the stack level when the enclosing BIND_EXPR is
4755 : exited to reclaim the allocated space; otherwise, we precisely
4756 : need to do the opposite and preserve the latest stack level. */
4757 95812 : if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
4758 8958 : gimplify_ctxp->save_stack = true;
4759 : else
4760 86854 : gimplify_ctxp->keep_stack = true;
4761 : break;
4762 :
4763 20811 : case BUILT_IN_VA_START:
4764 20811 : {
4765 20811 : builtin_va_start_p = true;
4766 20811 : if (call_expr_nargs (*expr_p) < 2)
4767 : {
4768 1 : error ("too few arguments to function %<va_start%>");
4769 1 : *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
4770 1 : return GS_OK;
4771 : }
4772 :
4773 20810 : if (fold_builtin_next_arg (*expr_p, true))
4774 : {
4775 8 : *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
4776 8 : return GS_OK;
4777 : }
4778 : break;
4779 : }
4780 :
4781 30 : case BUILT_IN_EH_RETURN:
4782 30 : cfun->calls_eh_return = true;
4783 30 : break;
4784 :
4785 1030 : case BUILT_IN_CLEAR_PADDING:
4786 1030 : if (call_expr_nargs (*expr_p) == 1)
4787 : {
4788 : /* Remember the original type of the argument in an internal
4789 : dummy second argument, as in GIMPLE pointer conversions are
4790 : useless. Also mark this call as not for automatic
4791 : initialization in the internal dummy third argument. */
4792 515 : p = CALL_EXPR_ARG (*expr_p, 0);
4793 515 : *expr_p
4794 515 : = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
4795 515 : build_zero_cst (TREE_TYPE (p)));
4796 515 : return GS_OK;
4797 : }
4798 : break;
4799 :
4800 : default:
4801 : ;
4802 : }
4803 16212875 : if (fndecl && fndecl_built_in_p (fndecl))
4804 : {
4805 4182357 : tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4806 4182357 : if (new_tree && new_tree != *expr_p)
4807 : {
4808 : /* There was a transformation of this call which computes the
4809 : same value, but in a more efficient way. Return and try
4810 : again. */
4811 717 : *expr_p = new_tree;
4812 717 : return GS_OK;
4813 : }
4814 : }
4815 :
4816 : /* Remember the original function pointer type. */
4817 16212158 : fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
4818 :
4819 : /* Handle "declare variant" resolution and arglist processing. */
4820 16212158 : if (flag_openmp
4821 445976 : && fndecl
4822 445295 : && cfun
4823 445295 : && (cfun->curr_properties & PROP_gimple_any) == 0
4824 248206 : && !omp_has_novariants ()
4825 16460252 : && lookup_attribute ("omp declare variant base",
4826 248094 : DECL_ATTRIBUTES (fndecl)))
4827 : {
4828 1322 : tree orig = *expr_p;
4829 1322 : *expr_p = gimplify_variant_call_expr (*expr_p, fallback,
4830 : omp_dispatch_p);
4831 :
4832 : /* This may resolve to the same call, or the call expr with just
4833 : the function replaced, in which case we should just continue to
4834 : gimplify it normally. Otherwise, if we get something else back,
4835 : stop here and re-gimplify the whole replacement expr. */
4836 1322 : if (*expr_p != orig)
4837 : return GS_OK;
4838 : }
4839 :
4840 : /* There is a sequence point before the call, so any side effects in
4841 : the calling expression must occur before the actual call. Force
4842 : gimplify_expr to use an internal post queue. */
4843 16211669 : ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
4844 : is_gimple_call_addr, fb_rvalue);
4845 :
4846 16211669 : if (ret == GS_ERROR)
4847 : return GS_ERROR;
4848 :
4849 16211667 : nargs = call_expr_nargs (*expr_p);
4850 :
4851 : /* Get argument types for verification. */
4852 16211667 : fndecl = get_callee_fndecl (*expr_p);
4853 16211667 : parms = NULL_TREE;
4854 16211667 : if (fndecl)
4855 15990320 : parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4856 : else
4857 221347 : parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
4858 :
4859 32201987 : if (fndecl && DECL_ARGUMENTS (fndecl))
4860 : p = DECL_ARGUMENTS (fndecl);
4861 7304312 : else if (parms)
4862 : p = parms;
4863 : else
4864 : p = NULL_TREE;
4865 43020534 : for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
4866 : ;
4867 :
4868 : /* If the last argument is __builtin_va_arg_pack () and it is not
4869 : passed as a named argument, decrease the number of CALL_EXPR
4870 : arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
4871 16211667 : if (!p
4872 16211667 : && i < nargs
4873 16211667 : && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
4874 : {
4875 5928 : tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
4876 5928 : tree last_arg_fndecl = get_callee_fndecl (last_arg);
4877 :
4878 5928 : if (last_arg_fndecl
4879 5928 : && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
4880 : {
4881 131 : tree call = *expr_p;
4882 :
4883 131 : --nargs;
4884 262 : *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
4885 131 : CALL_EXPR_FN (call),
4886 131 : nargs, CALL_EXPR_ARGP (call));
4887 :
4888 : /* Copy all CALL_EXPR flags, location and block, except
4889 : CALL_EXPR_VA_ARG_PACK flag. */
4890 131 : CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
4891 131 : CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
4892 131 : CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
4893 131 : = CALL_EXPR_RETURN_SLOT_OPT (call);
4894 131 : CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
4895 131 : SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
4896 :
4897 : /* Set CALL_EXPR_VA_ARG_PACK. */
4898 131 : CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
4899 : }
4900 : }
4901 :
4902 : /* If the call returns twice then after building the CFG the call
4903 : argument computations will no longer dominate the call because
4904 : we add an abnormal incoming edge to the call. So do not use SSA
4905 : vars there. */
4906 16211667 : bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4907 :
4908 :
4909 : /* Gimplify the function arguments. */
4910 16211667 : if (nargs > 0)
4911 : {
4912 14033264 : for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4913 42270422 : PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4914 28237158 : PUSH_ARGS_REVERSED ? i-- : i++)
4915 : {
4916 28237158 : enum gimplify_status t;
4917 :
4918 : /* Avoid gimplifying the second argument to va_start, which needs to
4919 : be the plain PARM_DECL. */
4920 28237158 : if ((i != 1) || !builtin_va_start_p)
4921 : {
4922 28216356 : tree *arg_p = &CALL_EXPR_ARG (*expr_p, i);
4923 :
4924 28216356 : if (gimplify_omp_ctxp && gimplify_omp_ctxp->code == OMP_DISPATCH)
4925 6687 : gimplify_omp_ctxp->in_call_args = true;
4926 28216356 : t = gimplify_arg (arg_p, pre_p, EXPR_LOCATION (*expr_p),
4927 28216356 : !returns_twice);
4928 28216356 : if (gimplify_omp_ctxp && gimplify_omp_ctxp->code == OMP_DISPATCH)
4929 6687 : gimplify_omp_ctxp->in_call_args = false;
4930 :
4931 28216356 : if (t == GS_ERROR)
4932 28237158 : ret = GS_ERROR;
4933 : }
4934 : }
4935 : }
4936 :
4937 : /* Gimplify the static chain. */
4938 16211667 : if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4939 : {
4940 28852 : if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4941 3 : CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4942 : else
4943 : {
4944 28173 : enum gimplify_status t;
4945 28173 : t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4946 28173 : EXPR_LOCATION (*expr_p), ! returns_twice);
4947 28173 : if (t == GS_ERROR)
4948 16211667 : ret = GS_ERROR;
4949 : }
4950 : }
4951 :
4952 : /* Verify the function result. */
4953 16211667 : if (want_value && fndecl
4954 16211667 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4955 : {
4956 0 : error_at (loc, "using result of function returning %<void%>");
4957 0 : ret = GS_ERROR;
4958 : }
4959 :
4960 : /* Try this again in case gimplification exposed something. */
4961 16211667 : if (ret != GS_ERROR)
4962 : {
4963 16211519 : tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4964 :
4965 16211519 : if (new_tree && new_tree != *expr_p)
4966 : {
4967 : /* There was a transformation of this call which computes the
4968 : same value, but in a more efficient way. Return and try
4969 : again. */
4970 11512 : *expr_p = new_tree;
4971 11512 : return GS_OK;
4972 : }
4973 : }
4974 : else
4975 : {
4976 148 : *expr_p = error_mark_node;
4977 148 : return GS_ERROR;
4978 : }
4979 :
4980 : /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4981 : decl. This allows us to eliminate redundant or useless
4982 : calls to "const" functions. */
4983 16200007 : if (TREE_CODE (*expr_p) == CALL_EXPR)
4984 : {
4985 16200007 : int flags = call_expr_flags (*expr_p);
4986 16200007 : if (flags & (ECF_CONST | ECF_PURE)
4987 : /* An infinite loop is considered a side effect. */
4988 2448742 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4989 2054574 : TREE_SIDE_EFFECTS (*expr_p) = 0;
4990 : }
4991 :
4992 : /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4993 : and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4994 : form and delegate the creation of a GIMPLE_CALL to
4995 : gimplify_modify_expr. This is always possible because when
4996 : WANT_VALUE is true, the caller wants the result of this call into
4997 : a temporary, which means that we will emit an INIT_EXPR in
4998 : internal_get_tmp_var which will then be handled by
4999 : gimplify_modify_expr. */
5000 16200007 : if (!want_value)
5001 : {
5002 : /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
5003 : have to do is replicate it as a GIMPLE_CALL tuple. */
5004 6152554 : gimple_stmt_iterator gsi;
5005 6152554 : call = gimple_build_call_from_tree (*expr_p, fnptrtype);
5006 6152554 : notice_special_calls (call);
5007 6152554 : gimplify_seq_add_stmt (pre_p, call);
5008 6152554 : gsi = gsi_last (*pre_p);
5009 6152554 : maybe_fold_stmt (&gsi);
5010 6152554 : *expr_p = NULL_TREE;
5011 : }
5012 : else
5013 : /* Remember the original function type. */
5014 10047453 : CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
5015 10047453 : CALL_EXPR_FN (*expr_p));
5016 :
5017 : return ret;
5018 : }
5019 :
5020 : /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
5021 : rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
5022 :
5023 : TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
5024 : condition is true or false, respectively. If null, we should generate
5025 : our own to skip over the evaluation of this specific expression.
5026 :
5027 : LOCUS is the source location of the COND_EXPR.
5028 :
5029 : The condition_uid is a discriminator tag for condition coverage used to map
5030 : conditions to its corresponding full Boolean function.
5031 :
5032 : This function is the tree equivalent of do_jump.
5033 :
5034 : shortcut_cond_r should only be called by shortcut_cond_expr. */
5035 :
5036 : static tree
5037 1317512 : shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
5038 : location_t locus, unsigned condition_uid)
5039 : {
5040 1317512 : tree local_label = NULL_TREE;
5041 1317512 : tree t, expr = NULL;
5042 :
5043 : /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
5044 : retain the shortcut semantics. Just insert the gotos here;
5045 : shortcut_cond_expr will append the real blocks later. */
5046 1317512 : if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
5047 : {
5048 122183 : location_t new_locus;
5049 :
5050 : /* Turn if (a && b) into
5051 :
5052 : if (a); else goto no;
5053 : if (b) goto yes; else goto no;
5054 : (no:) */
5055 :
5056 122183 : if (false_label_p == NULL)
5057 3956 : false_label_p = &local_label;
5058 :
5059 : /* Keep the original source location on the first 'if'. */
5060 122183 : t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus,
5061 : condition_uid);
5062 122183 : append_to_statement_list (t, &expr);
5063 :
5064 : /* Set the source location of the && on the second 'if'. */
5065 122183 : new_locus = rexpr_location (pred, locus);
5066 122183 : t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
5067 : new_locus, condition_uid);
5068 122183 : append_to_statement_list (t, &expr);
5069 : }
5070 1195329 : else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5071 : {
5072 382496 : location_t new_locus;
5073 :
5074 : /* Turn if (a || b) into
5075 :
5076 : if (a) goto yes;
5077 : if (b) goto yes; else goto no;
5078 : (yes:) */
5079 :
5080 382496 : if (true_label_p == NULL)
5081 229612 : true_label_p = &local_label;
5082 :
5083 : /* Keep the original source location on the first 'if'. */
5084 382496 : t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus,
5085 : condition_uid);
5086 382496 : append_to_statement_list (t, &expr);
5087 :
5088 : /* Set the source location of the || on the second 'if'. */
5089 382496 : new_locus = rexpr_location (pred, locus);
5090 382496 : t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
5091 : new_locus, condition_uid);
5092 382496 : append_to_statement_list (t, &expr);
5093 : }
5094 812833 : else if (TREE_CODE (pred) == COND_EXPR
5095 854 : && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
5096 813687 : && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
5097 : {
5098 848 : location_t new_locus;
5099 :
5100 : /* As long as we're messing with gotos, turn if (a ? b : c) into
5101 : if (a)
5102 : if (b) goto yes; else goto no;
5103 : else
5104 : if (c) goto yes; else goto no;
5105 :
5106 : Don't do this if one of the arms has void type, which can happen
5107 : in C++ when the arm is throw. */
5108 :
5109 : /* Keep the original source location on the first 'if'. Set the source
5110 : location of the ? on the second 'if'. */
5111 848 : new_locus = rexpr_location (pred, locus);
5112 1696 : expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
5113 848 : shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
5114 : false_label_p, locus, condition_uid),
5115 848 : shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
5116 : false_label_p, new_locus,
5117 : condition_uid));
5118 848 : tree_associate_condition_with_expr (expr, condition_uid);
5119 : }
5120 : else
5121 : {
5122 811985 : expr = build3 (COND_EXPR, void_type_node, pred,
5123 : build_and_jump (true_label_p),
5124 : build_and_jump (false_label_p));
5125 811985 : SET_EXPR_LOCATION (expr, locus);
5126 811985 : tree_associate_condition_with_expr (expr, condition_uid);
5127 : }
5128 :
5129 1317512 : if (local_label)
5130 : {
5131 233568 : t = build1 (LABEL_EXPR, void_type_node, local_label);
5132 233568 : append_to_statement_list (t, &expr);
5133 : }
5134 :
5135 1317512 : return expr;
5136 : }
5137 :
5138 : /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
5139 : any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
5140 : statement, if it is the last one. Otherwise, return NULL. */
5141 :
5142 : static tree
5143 11484712 : find_goto (tree expr)
5144 : {
5145 11817517 : if (!expr)
5146 : return NULL_TREE;
5147 :
5148 9877591 : if (TREE_CODE (expr) == GOTO_EXPR)
5149 : return expr;
5150 :
5151 7882023 : if (TREE_CODE (expr) != STATEMENT_LIST)
5152 : return NULL_TREE;
5153 :
5154 1037907 : tree_stmt_iterator i = tsi_start (expr);
5155 :
5156 1391299 : while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
5157 353392 : tsi_next (&i);
5158 :
5159 10419321 : if (!tsi_one_before_end_p (i))
5160 : return NULL_TREE;
5161 :
5162 332805 : return find_goto (tsi_stmt (i));
5163 : }
5164 :
5165 : /* Same as find_goto, except that it returns NULL if the destination
5166 : is not a LABEL_DECL. */
5167 :
5168 : static inline tree
5169 11484712 : find_goto_label (tree expr)
5170 : {
5171 11484712 : tree dest = find_goto (expr);
5172 13480280 : if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
5173 1995457 : return dest;
5174 : return NULL_TREE;
5175 : }
5176 :
5177 :
5178 : /* Given a multi-term condition (ANDIF, ORIF), walk the predicate PRED and tag
5179 : every basic condition with CONDITION_UID. Two basic conditions share the
5180 : CONDITION_UID discriminator when they belong to the same predicate, which is
5181 : used by the condition coverage. Doing this as an explicit step makes for a
5182 : simpler implementation than weaving it into the splitting code as the
5183 : splitting code eventually calls the entry point gimplfiy_expr which makes
5184 : bookkeeping complicated. */
5185 : static void
5186 569509 : tag_shortcut_cond (tree pred, unsigned condition_uid)
5187 : {
5188 608325 : if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR
5189 608325 : || TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5190 : {
5191 558520 : tree fst = TREE_OPERAND (pred, 0);
5192 558520 : tree lst = TREE_OPERAND (pred, 1);
5193 :
5194 558520 : if (TREE_CODE (fst) == TRUTH_ANDIF_EXPR
5195 558520 : || TREE_CODE (fst) == TRUTH_ORIF_EXPR)
5196 167067 : tag_shortcut_cond (fst, condition_uid);
5197 391453 : else if (TREE_CODE (fst) == COND_EXPR)
5198 244 : tree_associate_condition_with_expr (fst, condition_uid);
5199 :
5200 558520 : if (TREE_CODE (lst) == TRUTH_ANDIF_EXPR
5201 558520 : || TREE_CODE (lst) == TRUTH_ORIF_EXPR)
5202 : tag_shortcut_cond (lst, condition_uid);
5203 519704 : else if (TREE_CODE (lst) == COND_EXPR)
5204 444 : tree_associate_condition_with_expr (lst, condition_uid);
5205 : }
5206 569509 : }
5207 :
5208 : /* Given a conditional expression EXPR with short-circuit boolean
5209 : predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
5210 : predicate apart into the equivalent sequence of conditionals. CONDITION_UID
5211 : is a the tag/discriminator for this EXPR - all basic conditions in the
5212 : expression will be given the same CONDITION_UID. */
5213 : static tree
5214 402442 : shortcut_cond_expr (tree expr, unsigned condition_uid)
5215 : {
5216 402442 : tree pred = TREE_OPERAND (expr, 0);
5217 402442 : tree then_ = TREE_OPERAND (expr, 1);
5218 402442 : tree else_ = TREE_OPERAND (expr, 2);
5219 402442 : tree true_label, false_label, end_label, t;
5220 402442 : tree *true_label_p;
5221 402442 : tree *false_label_p;
5222 402442 : bool emit_end, emit_false, jump_over_else;
5223 402442 : bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
5224 402442 : bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
5225 :
5226 402442 : tag_shortcut_cond (pred, condition_uid);
5227 :
5228 : /* First do simple transformations. */
5229 402442 : if (!else_se)
5230 : {
5231 : /* If there is no 'else', turn
5232 : if (a && b) then c
5233 : into
5234 : if (a) if (b) then c. */
5235 290246 : while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
5236 : {
5237 : /* Keep the original source location on the first 'if'. */
5238 50662 : location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
5239 50662 : TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
5240 : /* Set the source location of the && on the second 'if'. */
5241 50662 : if (rexpr_has_location (pred))
5242 49642 : SET_EXPR_LOCATION (expr, rexpr_location (pred));
5243 50662 : then_ = shortcut_cond_expr (expr, condition_uid);
5244 50662 : then_se = then_ && TREE_SIDE_EFFECTS (then_);
5245 50662 : pred = TREE_OPERAND (pred, 0);
5246 50662 : expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
5247 50662 : SET_EXPR_LOCATION (expr, locus);
5248 : }
5249 : }
5250 :
5251 402442 : if (!then_se)
5252 : {
5253 : /* If there is no 'then', turn
5254 : if (a || b); else d
5255 : into
5256 : if (a); else if (b); else d. */
5257 25223 : while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
5258 : {
5259 : /* Keep the original source location on the first 'if'. */
5260 795 : location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
5261 795 : TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
5262 : /* Set the source location of the || on the second 'if'. */
5263 795 : if (rexpr_has_location (pred))
5264 794 : SET_EXPR_LOCATION (expr, rexpr_location (pred));
5265 795 : else_ = shortcut_cond_expr (expr, condition_uid);
5266 795 : else_se = else_ && TREE_SIDE_EFFECTS (else_);
5267 795 : pred = TREE_OPERAND (pred, 0);
5268 795 : expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
5269 795 : SET_EXPR_LOCATION (expr, locus);
5270 : }
5271 : }
5272 :
5273 : /* The expr tree should also have the expression id set. */
5274 402442 : tree_associate_condition_with_expr (expr, condition_uid);
5275 :
5276 : /* If we're done, great. */
5277 402442 : if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
5278 402442 : && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
5279 95984 : return expr;
5280 :
5281 : /* Otherwise we need to mess with gotos. Change
5282 : if (a) c; else d;
5283 : to
5284 : if (a); else goto no;
5285 : c; goto end;
5286 : no: d; end:
5287 : and recursively gimplify the condition. */
5288 :
5289 306458 : true_label = false_label = end_label = NULL_TREE;
5290 :
5291 : /* If our arms just jump somewhere, hijack those labels so we don't
5292 : generate jumps to jumps. */
5293 :
5294 306458 : if (tree then_goto = find_goto_label (then_))
5295 : {
5296 4547 : true_label = GOTO_DESTINATION (then_goto);
5297 4547 : then_ = NULL;
5298 4547 : then_se = false;
5299 : }
5300 :
5301 306458 : if (tree else_goto = find_goto_label (else_))
5302 : {
5303 4140 : false_label = GOTO_DESTINATION (else_goto);
5304 4140 : else_ = NULL;
5305 4140 : else_se = false;
5306 : }
5307 :
5308 : /* If we aren't hijacking a label for the 'then' branch, it falls through. */
5309 306458 : if (true_label)
5310 : true_label_p = &true_label;
5311 : else
5312 301911 : true_label_p = NULL;
5313 :
5314 : /* The 'else' branch also needs a label if it contains interesting code. */
5315 306458 : if (false_label || else_se)
5316 : false_label_p = &false_label;
5317 : else
5318 : false_label_p = NULL;
5319 :
5320 : /* If there was nothing else in our arms, just forward the label(s). */
5321 148707 : if (!then_se && !else_se)
5322 8424 : return shortcut_cond_r (pred, true_label_p, false_label_p,
5323 12636 : EXPR_LOC_OR_LOC (expr, input_location), condition_uid);
5324 :
5325 : /* If our last subexpression already has a terminal label, reuse it. */
5326 302246 : if (else_se)
5327 157751 : t = expr_last (else_);
5328 144495 : else if (then_se)
5329 144495 : t = expr_last (then_);
5330 : else
5331 : t = NULL;
5332 302246 : if (t && TREE_CODE (t) == LABEL_EXPR)
5333 948 : end_label = LABEL_EXPR_LABEL (t);
5334 :
5335 : /* If we don't care about jumping to the 'else' branch, jump to the end
5336 : if the condition is false. */
5337 302246 : if (!false_label_p)
5338 144253 : false_label_p = &end_label;
5339 :
5340 : /* We only want to emit these labels if we aren't hijacking them. */
5341 302246 : emit_end = (end_label == NULL_TREE);
5342 302246 : emit_false = (false_label == NULL_TREE);
5343 :
5344 : /* We only emit the jump over the else clause if we have to--if the
5345 : then clause may fall through. Otherwise we can wind up with a
5346 : useless jump and a useless label at the end of gimplified code,
5347 : which will cause us to think that this conditional as a whole
5348 : falls through even if it doesn't. If we then inline a function
5349 : which ends with such a condition, that can cause us to issue an
5350 : inappropriate warning about control reaching the end of a
5351 : non-void function. */
5352 302246 : jump_over_else = block_may_fallthru (then_);
5353 :
5354 603869 : pred = shortcut_cond_r (pred, true_label_p, false_label_p,
5355 603869 : EXPR_LOC_OR_LOC (expr, input_location),
5356 : condition_uid);
5357 :
5358 302246 : expr = NULL;
5359 302246 : append_to_statement_list (pred, &expr);
5360 :
5361 302246 : append_to_statement_list (then_, &expr);
5362 302246 : if (else_se)
5363 : {
5364 157751 : if (jump_over_else)
5365 : {
5366 155342 : tree last = expr_last (expr);
5367 155342 : t = build_and_jump (&end_label);
5368 155342 : if (rexpr_has_location (last))
5369 37333 : SET_EXPR_LOCATION (t, rexpr_location (last));
5370 155342 : append_to_statement_list (t, &expr);
5371 : }
5372 157751 : if (emit_false)
5373 : {
5374 157751 : t = build1 (LABEL_EXPR, void_type_node, false_label);
5375 157751 : append_to_statement_list (t, &expr);
5376 : }
5377 157751 : append_to_statement_list (else_, &expr);
5378 : }
5379 302246 : if (emit_end && end_label)
5380 : {
5381 298652 : t = build1 (LABEL_EXPR, void_type_node, end_label);
5382 298652 : append_to_statement_list (t, &expr);
5383 : }
5384 :
5385 302246 : return expr;
5386 : }
5387 :
5388 : /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
5389 :
5390 : tree
5391 16390965 : gimple_boolify (tree expr)
5392 : {
5393 16390965 : tree type = TREE_TYPE (expr);
5394 16390965 : location_t loc = EXPR_LOCATION (expr);
5395 :
5396 16390965 : if (TREE_CODE (expr) == NE_EXPR
5397 5661601 : && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
5398 17321774 : && integer_zerop (TREE_OPERAND (expr, 1)))
5399 : {
5400 480073 : tree call = TREE_OPERAND (expr, 0);
5401 480073 : tree fn = get_callee_fndecl (call);
5402 :
5403 : /* For __builtin_expect ((long) (x), y) recurse into x as well
5404 : if x is truth_value_p. */
5405 480073 : if (fn
5406 479102 : && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
5407 609139 : && call_expr_nargs (call) == 2)
5408 : {
5409 129066 : tree arg = CALL_EXPR_ARG (call, 0);
5410 129066 : if (arg)
5411 : {
5412 129066 : if (TREE_CODE (arg) == NOP_EXPR
5413 129066 : && TREE_TYPE (arg) == TREE_TYPE (call))
5414 60662 : arg = TREE_OPERAND (arg, 0);
5415 129066 : if (truth_value_p (TREE_CODE (arg)))
5416 : {
5417 106047 : arg = gimple_boolify (arg);
5418 106047 : CALL_EXPR_ARG (call, 0)
5419 212094 : = fold_convert_loc (loc, TREE_TYPE (call), arg);
5420 : }
5421 : }
5422 : }
5423 : }
5424 :
5425 16390965 : switch (TREE_CODE (expr))
5426 : {
5427 1220740 : case TRUTH_AND_EXPR:
5428 1220740 : case TRUTH_OR_EXPR:
5429 1220740 : case TRUTH_XOR_EXPR:
5430 1220740 : case TRUTH_ANDIF_EXPR:
5431 1220740 : case TRUTH_ORIF_EXPR:
5432 : /* Also boolify the arguments of truth exprs. */
5433 1220740 : TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
5434 : /* FALLTHRU */
5435 :
5436 1642832 : case TRUTH_NOT_EXPR:
5437 1642832 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5438 :
5439 : /* These expressions always produce boolean results. */
5440 1642832 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5441 306410 : TREE_TYPE (expr) = boolean_type_node;
5442 : return expr;
5443 :
5444 5210 : case ANNOTATE_EXPR:
5445 5210 : switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
5446 : {
5447 5210 : case annot_expr_ivdep_kind:
5448 5210 : case annot_expr_unroll_kind:
5449 5210 : case annot_expr_no_vector_kind:
5450 5210 : case annot_expr_vector_kind:
5451 5210 : case annot_expr_parallel_kind:
5452 5210 : case annot_expr_maybe_infinite_kind:
5453 5210 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5454 5210 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5455 3319 : TREE_TYPE (expr) = boolean_type_node;
5456 : return expr;
5457 0 : default:
5458 0 : gcc_unreachable ();
5459 : }
5460 :
5461 14742923 : default:
5462 14742923 : if (COMPARISON_CLASS_P (expr))
5463 : {
5464 : /* These expressions always produce boolean results. */
5465 12898151 : if (TREE_CODE (type) != BOOLEAN_TYPE)
5466 1574835 : TREE_TYPE (expr) = boolean_type_node;
5467 12898151 : return expr;
5468 : }
5469 : /* Other expressions that get here must have boolean values, but
5470 : might need to be converted to the appropriate mode. */
5471 1844772 : if (TREE_CODE (type) == BOOLEAN_TYPE)
5472 : return expr;
5473 201533 : return fold_convert_loc (loc, boolean_type_node, expr);
5474 : }
5475 : }
5476 :
5477 : /* Given a conditional expression *EXPR_P without side effects, gimplify
5478 : its operands. New statements are inserted to PRE_P. */
5479 :
5480 : static enum gimplify_status
5481 46017 : gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
5482 : {
5483 46017 : tree expr = *expr_p, cond;
5484 46017 : enum gimplify_status ret, tret;
5485 46017 : enum tree_code code;
5486 :
5487 46017 : cond = gimple_boolify (COND_EXPR_COND (expr));
5488 :
5489 : /* We need to handle && and || specially, as their gimplification
5490 : creates pure cond_expr, thus leading to an infinite cycle otherwise. */
5491 46017 : code = TREE_CODE (cond);
5492 46017 : if (code == TRUTH_ANDIF_EXPR)
5493 31 : TREE_SET_CODE (cond, TRUTH_AND_EXPR);
5494 45986 : else if (code == TRUTH_ORIF_EXPR)
5495 4637 : TREE_SET_CODE (cond, TRUTH_OR_EXPR);
5496 46017 : ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
5497 46017 : COND_EXPR_COND (*expr_p) = cond;
5498 :
5499 46017 : tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
5500 : is_gimple_val, fb_rvalue);
5501 46017 : ret = MIN (ret, tret);
5502 46017 : tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
5503 : is_gimple_val, fb_rvalue);
5504 :
5505 46017 : return MIN (ret, tret);
5506 : }
5507 :
5508 : /* Return true if evaluating EXPR could trap.
5509 : EXPR is GENERIC, while tree_could_trap_p can be called
5510 : only on GIMPLE. */
5511 :
5512 : bool
5513 19240909 : generic_expr_could_trap_p (tree expr)
5514 : {
5515 19240909 : unsigned i, n;
5516 :
5517 19240909 : if (!expr || is_gimple_val (expr))
5518 7687333 : return false;
5519 :
5520 11553576 : if (!EXPR_P (expr) || tree_could_trap_p (expr))
5521 3235735 : return true;
5522 :
5523 8317841 : n = TREE_OPERAND_LENGTH (expr);
5524 18135766 : for (i = 0; i < n; i++)
5525 13525589 : if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
5526 : return true;
5527 :
5528 : return false;
5529 : }
5530 :
5531 : /* Associate the condition STMT with the discriminator UID. STMTs that are
5532 : broken down with ANDIF/ORIF from the same Boolean expression should be given
5533 : the same UID; 'if (a && b && c) { if (d || e) ... } ...' should yield the
5534 : { a: 1, b: 1, c: 1, d: 2, e: 2 } when gimplification is done. This is used
5535 : for condition coverage. */
5536 : static void
5537 5435898 : gimple_associate_condition_with_expr (struct function *fn, gcond *stmt,
5538 : unsigned uid)
5539 : {
5540 5435898 : if (!condition_coverage_flag)
5541 : return;
5542 :
5543 639 : if (!fn->cond_uids)
5544 138 : fn->cond_uids = new hash_map <gcond*, unsigned> ();
5545 :
5546 639 : fn->cond_uids->put (stmt, uid);
5547 : }
5548 :
5549 : /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
5550 : into
5551 :
5552 : if (p) if (p)
5553 : t1 = a; a;
5554 : else or else
5555 : t1 = b; b;
5556 : t1;
5557 :
5558 : The second form is used when *EXPR_P is of type void.
5559 :
5560 : PRE_P points to the list where side effects that must happen before
5561 : *EXPR_P should be stored. */
5562 :
5563 : static enum gimplify_status
5564 6203415 : gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
5565 : {
5566 6203415 : tree expr = *expr_p;
5567 6203415 : tree type = TREE_TYPE (expr);
5568 6203415 : location_t loc = EXPR_LOCATION (expr);
5569 6203415 : tree tmp, arm1, arm2;
5570 6203415 : enum gimplify_status ret;
5571 6203415 : tree label_true, label_false, label_cont;
5572 6203415 : bool have_then_clause_p, have_else_clause_p;
5573 6203415 : gcond *cond_stmt;
5574 6203415 : enum tree_code pred_code;
5575 6203415 : gimple_seq seq = NULL;
5576 :
5577 : /* If this COND_EXPR has a value, copy the values into a temporary within
5578 : the arms. */
5579 6203415 : if (!VOID_TYPE_P (type))
5580 : {
5581 415762 : tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
5582 415762 : tree result;
5583 :
5584 : /* If either an rvalue is ok or we do not require an lvalue, create the
5585 : temporary. But we cannot do that if the type is addressable. */
5586 415762 : if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
5587 415755 : && !TREE_ADDRESSABLE (type))
5588 : {
5589 415755 : if (gimplify_ctxp->allow_rhs_cond_expr
5590 : /* If either branch has side effects or could trap, it can't be
5591 : evaluated unconditionally. */
5592 46017 : && !TREE_SIDE_EFFECTS (then_)
5593 46017 : && !generic_expr_could_trap_p (then_)
5594 46017 : && !TREE_SIDE_EFFECTS (else_)
5595 461772 : && !generic_expr_could_trap_p (else_))
5596 46017 : return gimplify_pure_cond_expr (expr_p, pre_p);
5597 :
5598 369738 : tmp = create_tmp_var (type, "iftmp");
5599 369738 : result = tmp;
5600 : }
5601 :
5602 : /* Otherwise, only create and copy references to the values. */
5603 : else
5604 : {
5605 7 : type = build_pointer_type (type);
5606 :
5607 7 : if (!VOID_TYPE_P (TREE_TYPE (then_)))
5608 7 : then_ = build_fold_addr_expr_loc (loc, then_);
5609 :
5610 7 : if (!VOID_TYPE_P (TREE_TYPE (else_)))
5611 7 : else_ = build_fold_addr_expr_loc (loc, else_);
5612 :
5613 7 : expr
5614 7 : = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
5615 :
5616 7 : tmp = create_tmp_var (type, "iftmp");
5617 7 : result = build_simple_mem_ref_loc (loc, tmp);
5618 : }
5619 :
5620 : /* Build the new then clause, `tmp = then_;'. But don't build the
5621 : assignment if the value is void; in C++ it can be if it's a throw. */
5622 369745 : if (!VOID_TYPE_P (TREE_TYPE (then_)))
5623 288185 : TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
5624 :
5625 : /* Similarly, build the new else clause, `tmp = else_;'. */
5626 369745 : if (!VOID_TYPE_P (TREE_TYPE (else_)))
5627 359118 : TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
5628 :
5629 369745 : TREE_TYPE (expr) = void_type_node;
5630 369745 : recalculate_side_effects (expr);
5631 :
5632 : /* Move the COND_EXPR to the prequeue. */
5633 369745 : gimplify_stmt (&expr, pre_p);
5634 :
5635 369745 : *expr_p = result;
5636 369745 : return GS_ALL_DONE;
5637 : }
5638 :
5639 : /* Remove any COMPOUND_EXPR so the following cases will be caught. */
5640 5803220 : STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
5641 5787653 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
5642 80957 : gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
5643 :
5644 : /* Make sure the condition has BOOLEAN_TYPE. */
5645 5787653 : TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
5646 :
5647 : /* Break apart && and || conditions. */
5648 5787653 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
5649 5787653 : || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
5650 : {
5651 350985 : expr = shortcut_cond_expr (expr, next_cond_uid ());
5652 :
5653 350985 : if (expr != *expr_p)
5654 : {
5655 350985 : *expr_p = expr;
5656 :
5657 : /* We can't rely on gimplify_expr to re-gimplify the expanded
5658 : form properly, as cleanups might cause the target labels to be
5659 : wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
5660 : set up a conditional context. */
5661 350985 : gimple_push_condition ();
5662 350985 : gimplify_stmt (expr_p, &seq);
5663 350985 : gimple_pop_condition (pre_p);
5664 350985 : gimple_seq_add_seq (pre_p, seq);
5665 :
5666 350985 : return GS_ALL_DONE;
5667 : }
5668 : }
5669 :
5670 : /* Now do the normal gimplification. */
5671 :
5672 : /* Gimplify condition. */
5673 5436668 : ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
5674 : is_gimple_condexpr_for_cond, fb_rvalue);
5675 5436668 : if (ret == GS_ERROR)
5676 : return GS_ERROR;
5677 5435898 : gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
5678 :
5679 5435898 : gimple_push_condition ();
5680 :
5681 5435898 : have_then_clause_p = have_else_clause_p = false;
5682 5435898 : label_true = find_goto_label (TREE_OPERAND (expr, 1));
5683 5435898 : if (label_true
5684 1273904 : && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
5685 : /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
5686 : have different locations, otherwise we end up with incorrect
5687 : location information on the branches. */
5688 6709802 : && (optimize
5689 234516 : || !EXPR_HAS_LOCATION (expr)
5690 207693 : || !rexpr_has_location (label_true)
5691 75850 : || EXPR_LOCATION (expr) == rexpr_location (label_true)))
5692 : {
5693 1272752 : have_then_clause_p = true;
5694 1272752 : label_true = GOTO_DESTINATION (label_true);
5695 : }
5696 : else
5697 4163146 : label_true = create_artificial_label (UNKNOWN_LOCATION);
5698 5435898 : label_false = find_goto_label (TREE_OPERAND (expr, 2));
5699 5435898 : if (label_false
5700 712866 : && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
5701 : /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
5702 : have different locations, otherwise we end up with incorrect
5703 : location information on the branches. */
5704 6148764 : && (optimize
5705 100714 : || !EXPR_HAS_LOCATION (expr)
5706 99837 : || !rexpr_has_location (label_false)
5707 123 : || EXPR_LOCATION (expr) == rexpr_location (label_false)))
5708 : {
5709 712755 : have_else_clause_p = true;
5710 712755 : label_false = GOTO_DESTINATION (label_false);
5711 : }
5712 : else
5713 4723143 : label_false = create_artificial_label (UNKNOWN_LOCATION);
5714 :
5715 5435898 : unsigned cond_uid = 0;
5716 5435898 : if (cond_uids)
5717 455 : if (unsigned *v = cond_uids->get (expr))
5718 435 : cond_uid = *v;
5719 435 : if (cond_uid == 0)
5720 5435463 : cond_uid = next_cond_uid ();
5721 :
5722 5435898 : gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
5723 : &arm2);
5724 5435898 : cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
5725 : label_false);
5726 5435898 : gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
5727 5435898 : gimple_associate_condition_with_expr (cfun, cond_stmt, cond_uid);
5728 5435898 : copy_warning (cond_stmt, COND_EXPR_COND (expr));
5729 5435898 : gimplify_seq_add_stmt (&seq, cond_stmt);
5730 5435898 : gimple_stmt_iterator gsi = gsi_last (seq);
5731 5435898 : maybe_fold_stmt (&gsi);
5732 :
5733 5435898 : label_cont = NULL_TREE;
5734 5435898 : if (!have_then_clause_p)
5735 : {
5736 : /* For if (...) {} else { code; } put label_true after
5737 : the else block. */
5738 4163146 : if (TREE_OPERAND (expr, 1) == NULL_TREE
5739 196592 : && !have_else_clause_p
5740 4163914 : && TREE_OPERAND (expr, 2) != NULL_TREE)
5741 : {
5742 : /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
5743 : handling that label_cont == label_true can be only reached
5744 : through fallthrough from { code; }. */
5745 768 : if (integer_zerop (COND_EXPR_COND (expr)))
5746 0 : UNUSED_LABEL_P (label_true) = 1;
5747 : label_cont = label_true;
5748 : }
5749 : else
5750 : {
5751 4162378 : bool then_side_effects
5752 4162378 : = (TREE_OPERAND (expr, 1)
5753 4162378 : && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
5754 4162378 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
5755 4162378 : have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
5756 : /* For if (...) { code; } else {} or
5757 : if (...) { code; } else goto label; or
5758 : if (...) { code; return; } else { ... }
5759 : label_cont isn't needed. */
5760 4162378 : if (!have_else_clause_p
5761 3965414 : && TREE_OPERAND (expr, 2) != NULL_TREE
5762 6915374 : && gimple_seq_may_fallthru (seq))
5763 : {
5764 1917630 : gimple *g;
5765 1917630 : label_cont = create_artificial_label (UNKNOWN_LOCATION);
5766 :
5767 : /* For if (0) { non-side-effect-code } else { code }
5768 : tell -Wimplicit-fallthrough handling that label_cont can
5769 : be only reached through fallthrough from { code }. */
5770 1917630 : if (integer_zerop (COND_EXPR_COND (expr)))
5771 : {
5772 73172 : UNUSED_LABEL_P (label_true) = 1;
5773 73172 : if (!then_side_effects)
5774 11510 : UNUSED_LABEL_P (label_cont) = 1;
5775 : }
5776 :
5777 1917630 : g = gimple_build_goto (label_cont);
5778 :
5779 : /* GIMPLE_COND's are very low level; they have embedded
5780 : gotos. This particular embedded goto should not be marked
5781 : with the location of the original COND_EXPR, as it would
5782 : correspond to the COND_EXPR's condition, not the ELSE or the
5783 : THEN arms. To avoid marking it with the wrong location, flag
5784 : it as "no location". */
5785 1917630 : gimple_set_do_not_emit_location (g);
5786 :
5787 1917630 : gimplify_seq_add_stmt (&seq, g);
5788 : }
5789 : }
5790 : }
5791 5435898 : if (!have_else_clause_p)
5792 : {
5793 : /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
5794 : tell -Wimplicit-fallthrough handling that label_false can be only
5795 : reached through fallthrough from { code }. */
5796 4723143 : if (integer_nonzerop (COND_EXPR_COND (expr))
5797 4723143 : && (TREE_OPERAND (expr, 2) == NULL_TREE
5798 56733 : || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
5799 7013 : UNUSED_LABEL_P (label_false) = 1;
5800 4723143 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
5801 4723143 : have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
5802 : }
5803 5435898 : if (label_cont)
5804 1918398 : gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
5805 :
5806 5435898 : gimple_pop_condition (pre_p);
5807 5435898 : gimple_seq_add_seq (pre_p, seq);
5808 :
5809 5435898 : if (ret == GS_ERROR)
5810 : ; /* Do nothing. */
5811 5435898 : else if (have_then_clause_p || have_else_clause_p)
5812 : ret = GS_ALL_DONE;
5813 : else
5814 : {
5815 : /* Both arms are empty; replace the COND_EXPR with its predicate. */
5816 2694 : expr = TREE_OPERAND (expr, 0);
5817 2694 : gimplify_stmt (&expr, pre_p);
5818 : }
5819 :
5820 5435898 : *expr_p = NULL;
5821 5435898 : return ret;
5822 : }
5823 :
5824 : /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
5825 : to be marked addressable.
5826 :
5827 : We cannot rely on such an expression being directly markable if a temporary
5828 : has been created by the gimplification. In this case, we create another
5829 : temporary and initialize it with a copy, which will become a store after we
5830 : mark it addressable. This can happen if the front-end passed us something
5831 : that it could not mark addressable yet, like a Fortran pass-by-reference
5832 : parameter (int) floatvar. */
5833 :
5834 : static void
5835 54916202 : prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
5836 : {
5837 60224940 : while (handled_component_p (*expr_p))
5838 5308738 : expr_p = &TREE_OPERAND (*expr_p, 0);
5839 :
5840 : /* Do not allow an SSA name as the temporary. */
5841 54916202 : if (is_gimple_reg (*expr_p))
5842 12012 : *expr_p = internal_get_tmp_var (*expr_p, seq_p, NULL, false, false, true);
5843 54916202 : }
5844 :
5845 : /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5846 : a call to __builtin_memcpy. */
5847 :
5848 : static enum gimplify_status
5849 1089 : gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
5850 : gimple_seq *seq_p)
5851 : {
5852 1089 : tree t, to, to_ptr, from, from_ptr;
5853 1089 : gcall *gs;
5854 1089 : location_t loc = EXPR_LOCATION (*expr_p);
5855 :
5856 1089 : to = TREE_OPERAND (*expr_p, 0);
5857 1089 : from = TREE_OPERAND (*expr_p, 1);
5858 1089 : gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to)))
5859 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))));
5860 :
5861 : /* Mark the RHS addressable. Beware that it may not be possible to do so
5862 : directly if a temporary has been created by the gimplification. */
5863 1089 : prepare_gimple_addressable (&from, seq_p);
5864 :
5865 1089 : mark_addressable (from);
5866 1089 : from_ptr = build_fold_addr_expr_loc (loc, from);
5867 1089 : gimplify_arg (&from_ptr, seq_p, loc);
5868 :
5869 1089 : mark_addressable (to);
5870 1089 : to_ptr = build_fold_addr_expr_loc (loc, to);
5871 1089 : gimplify_arg (&to_ptr, seq_p, loc);
5872 :
5873 1089 : t = builtin_decl_implicit (BUILT_IN_MEMCPY);
5874 :
5875 1089 : gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
5876 1089 : gimple_call_set_alloca_for_var (gs, true);
5877 :
5878 1089 : if (want_value)
5879 : {
5880 : /* tmp = memcpy() */
5881 8 : t = create_tmp_var (TREE_TYPE (to_ptr));
5882 8 : gimple_call_set_lhs (gs, t);
5883 8 : gimplify_seq_add_stmt (seq_p, gs);
5884 :
5885 8 : *expr_p = build_simple_mem_ref (t);
5886 8 : return GS_ALL_DONE;
5887 : }
5888 :
5889 1081 : gimplify_seq_add_stmt (seq_p, gs);
5890 1081 : *expr_p = NULL;
5891 1081 : return GS_ALL_DONE;
5892 : }
5893 :
5894 : /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
5895 : a call to __builtin_memset. In this case we know that the RHS is
5896 : a CONSTRUCTOR with an empty element list. */
5897 :
5898 : static enum gimplify_status
5899 84 : gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
5900 : gimple_seq *seq_p)
5901 : {
5902 84 : tree t, from, to, to_ptr;
5903 84 : gcall *gs;
5904 84 : location_t loc = EXPR_LOCATION (*expr_p);
5905 :
5906 : /* Assert our assumptions, to abort instead of producing wrong code
5907 : silently if they are not met. Beware that the RHS CONSTRUCTOR might
5908 : not be immediately exposed. */
5909 84 : from = TREE_OPERAND (*expr_p, 1);
5910 84 : if (TREE_CODE (from) == WITH_SIZE_EXPR)
5911 84 : from = TREE_OPERAND (from, 0);
5912 :
5913 84 : gcc_assert (TREE_CODE (from) == CONSTRUCTOR
5914 : && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
5915 :
5916 : /* Now proceed. */
5917 84 : to = TREE_OPERAND (*expr_p, 0);
5918 84 : gcc_assert (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (to))));
5919 :
5920 84 : to_ptr = build_fold_addr_expr_loc (loc, to);
5921 84 : gimplify_arg (&to_ptr, seq_p, loc);
5922 84 : t = builtin_decl_implicit (BUILT_IN_MEMSET);
5923 :
5924 84 : gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
5925 :
5926 84 : if (want_value)
5927 : {
5928 : /* tmp = memset() */
5929 0 : t = create_tmp_var (TREE_TYPE (to_ptr));
5930 0 : gimple_call_set_lhs (gs, t);
5931 0 : gimplify_seq_add_stmt (seq_p, gs);
5932 :
5933 0 : *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
5934 0 : return GS_ALL_DONE;
5935 : }
5936 :
5937 84 : gimplify_seq_add_stmt (seq_p, gs);
5938 84 : *expr_p = NULL;
5939 84 : return GS_ALL_DONE;
5940 : }
5941 :
5942 : /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
5943 : determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
5944 : assignment. Return non-null if we detect a potential overlap. */
5945 :
5946 : struct gimplify_init_ctor_preeval_data
5947 : {
5948 : /* The base decl of the lhs object. May be NULL, in which case we
5949 : have to assume the lhs is indirect. */
5950 : tree lhs_base_decl;
5951 :
5952 : /* The alias set of the lhs object. */
5953 : alias_set_type lhs_alias_set;
5954 : };
5955 :
5956 : static tree
5957 115383 : gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
5958 : {
5959 115383 : struct gimplify_init_ctor_preeval_data *data
5960 : = (struct gimplify_init_ctor_preeval_data *) xdata;
5961 115383 : tree t = *tp;
5962 :
5963 : /* If we find the base object, obviously we have overlap. */
5964 115383 : if (data->lhs_base_decl == t)
5965 : return t;
5966 :
5967 : /* If the constructor component is indirect, determine if we have a
5968 : potential overlap with the lhs. The only bits of information we
5969 : have to go on at this point are addressability and alias sets. */
5970 115382 : if ((INDIRECT_REF_P (t)
5971 115382 : || TREE_CODE (t) == MEM_REF)
5972 651 : && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5973 115898 : && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
5974 : return t;
5975 :
5976 : /* If the constructor component is a call, determine if it can hide a
5977 : potential overlap with the lhs through an INDIRECT_REF like above.
5978 : ??? Ugh - this is completely broken. In fact this whole analysis
5979 : doesn't look conservative. */
5980 114866 : if (TREE_CODE (t) == CALL_EXPR)
5981 : {
5982 0 : tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
5983 :
5984 0 : for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
5985 0 : if (POINTER_TYPE_P (TREE_VALUE (type))
5986 0 : && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
5987 0 : && alias_sets_conflict_p (data->lhs_alias_set,
5988 : get_alias_set
5989 0 : (TREE_TYPE (TREE_VALUE (type)))))
5990 : return t;
5991 : }
5992 :
5993 114866 : if (IS_TYPE_OR_DECL_P (t))
5994 10968 : *walk_subtrees = 0;
5995 : return NULL;
5996 : }
5997 :
5998 : /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5999 : force values that overlap with the lhs (as described by *DATA)
6000 : into temporaries. */
6001 :
6002 : static void
6003 411552 : gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6004 : struct gimplify_init_ctor_preeval_data *data)
6005 : {
6006 411552 : enum gimplify_status one;
6007 :
6008 : /* If the value is constant, then there's nothing to pre-evaluate. */
6009 411552 : if (TREE_CONSTANT (*expr_p))
6010 : {
6011 : /* Ensure it does not have side effects, it might contain a reference to
6012 : the object we're initializing. */
6013 195167 : gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
6014 : return;
6015 : }
6016 :
6017 : /* If the type has non-trivial constructors, we can't pre-evaluate. */
6018 216385 : if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
6019 : return;
6020 :
6021 : /* Recurse for nested constructors. */
6022 216385 : if (TREE_CODE (*expr_p) == CONSTRUCTOR)
6023 : {
6024 73260 : unsigned HOST_WIDE_INT ix;
6025 73260 : constructor_elt *ce;
6026 73260 : vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
6027 :
6028 269888 : FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
6029 196628 : gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
6030 :
6031 : return;
6032 : }
6033 :
6034 : /* If this is a variable sized type, we must remember the size. */
6035 143125 : maybe_with_size_expr (expr_p);
6036 :
6037 : /* Gimplify the constructor element to something appropriate for the rhs
6038 : of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
6039 : the gimplifier will consider this a store to memory. Doing this
6040 : gimplification now means that we won't have to deal with complicated
6041 : language-specific trees, nor trees like SAVE_EXPR that can induce
6042 : exponential search behavior. */
6043 143125 : one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
6044 143125 : if (one == GS_ERROR)
6045 : {
6046 0 : *expr_p = NULL;
6047 0 : return;
6048 : }
6049 :
6050 : /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
6051 : with the lhs, since "a = { .x=a }" doesn't make sense. This will
6052 : always be true for all scalars, since is_gimple_mem_rhs insists on a
6053 : temporary variable for them. */
6054 143125 : if (DECL_P (*expr_p))
6055 : return;
6056 :
6057 : /* If this is of variable size, we have no choice but to assume it doesn't
6058 : overlap since we can't make a temporary for it. */
6059 103819 : if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
6060 : return;
6061 :
6062 : /* Otherwise, we must search for overlap ... */
6063 103819 : if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
6064 : return;
6065 :
6066 : /* ... and if found, force the value into a temporary. */
6067 517 : *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6068 : }
6069 :
6070 : /* A subroutine of gimplify_init_ctor_eval. Create a loop for
6071 : a RANGE_EXPR in a CONSTRUCTOR for an array.
6072 :
6073 : var = lower;
6074 : loop_entry:
6075 : object[var] = value;
6076 : if (var == upper)
6077 : goto loop_exit;
6078 : var = var + 1;
6079 : goto loop_entry;
6080 : loop_exit:
6081 :
6082 : We increment var _after_ the loop exit check because we might otherwise
6083 : fail if upper == TYPE_MAX_VALUE (type for upper).
6084 :
6085 : Note that we never have to deal with SAVE_EXPRs here, because this has
6086 : already been taken care of for us, in gimplify_init_ctor_preeval(). */
6087 :
6088 : static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
6089 : gimple_seq *, bool);
6090 :
6091 : static void
6092 427 : gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
6093 : tree value, tree array_elt_type,
6094 : gimple_seq *pre_p, bool cleared)
6095 : {
6096 427 : tree loop_entry_label, loop_exit_label, fall_thru_label;
6097 427 : tree var, var_type, cref, tmp;
6098 :
6099 427 : loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
6100 427 : loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
6101 427 : fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6102 :
6103 : /* Create and initialize the index variable. */
6104 427 : var_type = TREE_TYPE (upper);
6105 427 : var = create_tmp_var (var_type);
6106 427 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6107 :
6108 : /* Add the loop entry label. */
6109 427 : gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6110 :
6111 : /* Build the reference. */
6112 427 : cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
6113 : var, NULL_TREE, NULL_TREE);
6114 :
6115 : /* If we are a constructor, just call gimplify_init_ctor_eval to do
6116 : the store. Otherwise just assign value to the reference. */
6117 :
6118 427 : if (TREE_CODE (value) == CONSTRUCTOR)
6119 : /* NB we might have to call ourself recursively through
6120 : gimplify_init_ctor_eval if the value is a constructor. */
6121 114 : gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
6122 : pre_p, cleared);
6123 : else
6124 : {
6125 313 : if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
6126 : != GS_ERROR)
6127 313 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6128 : }
6129 :
6130 : /* We exit the loop when the index var is equal to the upper bound. */
6131 854 : gimplify_seq_add_stmt (pre_p,
6132 427 : gimple_build_cond (EQ_EXPR, var, upper,
6133 : loop_exit_label, fall_thru_label));
6134 :
6135 427 : gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6136 :
6137 : /* Otherwise, increment the index var... */
6138 427 : tmp = build2 (PLUS_EXPR, var_type, var,
6139 : fold_convert (var_type, integer_one_node));
6140 427 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6141 :
6142 : /* ...and jump back to the loop entry. */
6143 427 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6144 :
6145 : /* Add the loop exit label. */
6146 427 : gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6147 427 : }
6148 :
6149 : /* A subroutine of gimplify_init_constructor. Generate individual
6150 : MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
6151 : assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
6152 : CONSTRUCTOR. CLEARED is true if the entire LHS object has been
6153 : zeroed first. */
6154 :
6155 : static void
6156 1236149 : gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
6157 : gimple_seq *pre_p, bool cleared)
6158 : {
6159 1236149 : tree array_elt_type = NULL;
6160 1236149 : unsigned HOST_WIDE_INT ix;
6161 1236149 : tree purpose, value;
6162 :
6163 1236149 : if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
6164 138658 : array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
6165 :
6166 4608239 : FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
6167 : {
6168 3372090 : tree cref;
6169 :
6170 : /* NULL values are created above for gimplification errors. */
6171 3372090 : if (value == NULL)
6172 337564 : continue;
6173 :
6174 3372090 : if (cleared && initializer_zerop (value))
6175 336978 : continue;
6176 :
6177 : /* ??? Here's to hoping the front end fills in all of the indices,
6178 : so we don't have to figure out what's missing ourselves. */
6179 3035112 : gcc_assert (purpose);
6180 :
6181 : /* Skip zero-sized fields, unless value has side-effects. This can
6182 : happen with calls to functions returning a empty type, which
6183 : we shouldn't discard. As a number of downstream passes don't
6184 : expect sets of empty type fields, we rely on the gimplification of
6185 : the MODIFY_EXPR we make below to drop the assignment statement. */
6186 3035112 : if (!TREE_SIDE_EFFECTS (value)
6187 2743632 : && TREE_CODE (purpose) == FIELD_DECL
6188 5316551 : && is_empty_type (TREE_TYPE (purpose)))
6189 159 : continue;
6190 :
6191 : /* If we have a RANGE_EXPR, we have to build a loop to assign the
6192 : whole range. */
6193 3034953 : if (TREE_CODE (purpose) == RANGE_EXPR)
6194 : {
6195 437 : tree lower = TREE_OPERAND (purpose, 0);
6196 437 : tree upper = TREE_OPERAND (purpose, 1);
6197 :
6198 : /* If the lower bound is equal to upper, just treat it as if
6199 : upper was the index. */
6200 437 : if (simple_cst_equal (lower, upper))
6201 : purpose = upper;
6202 : else
6203 : {
6204 427 : gimplify_init_ctor_eval_range (object, lower, upper, value,
6205 : array_elt_type, pre_p, cleared);
6206 427 : continue;
6207 : }
6208 : }
6209 :
6210 3034526 : if (array_elt_type)
6211 : {
6212 : /* Do not use bitsizetype for ARRAY_REF indices. */
6213 552246 : if (TYPE_DOMAIN (TREE_TYPE (object)))
6214 552246 : purpose
6215 552246 : = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
6216 : purpose);
6217 552246 : cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
6218 : purpose, NULL_TREE, NULL_TREE);
6219 : }
6220 : else
6221 : {
6222 2482280 : gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
6223 2482280 : cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
6224 : unshare_expr (object), purpose, NULL_TREE);
6225 : }
6226 :
6227 3034526 : if (TREE_CODE (value) == CONSTRUCTOR
6228 3034526 : && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
6229 294317 : gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
6230 : pre_p, cleared);
6231 2740209 : else if (TREE_CODE (value) == RAW_DATA_CST)
6232 : {
6233 37 : if (RAW_DATA_LENGTH (value) <= 32)
6234 : {
6235 13 : for (unsigned int i = 0; i < (unsigned) RAW_DATA_LENGTH (value);
6236 : ++i)
6237 12 : if (!cleared || RAW_DATA_POINTER (value)[i])
6238 : {
6239 12 : if (i)
6240 : {
6241 11 : tree p
6242 11 : = fold_build2 (PLUS_EXPR, TREE_TYPE (purpose),
6243 : purpose,
6244 : build_int_cst (TREE_TYPE (purpose),
6245 : i));
6246 11 : cref = build4 (ARRAY_REF, array_elt_type,
6247 : unshare_expr (object), p, NULL_TREE,
6248 : NULL_TREE);
6249 : }
6250 12 : tree init
6251 12 : = build2 (INIT_EXPR, TREE_TYPE (cref), cref,
6252 12 : build_int_cst (TREE_TYPE (value),
6253 12 : RAW_DATA_UCHAR_ELT (value, i)));
6254 12 : gimplify_and_add (init, pre_p);
6255 12 : ggc_free (init);
6256 : }
6257 : }
6258 : else
6259 : {
6260 36 : tree rtype = build_array_type_nelts (TREE_TYPE (value),
6261 36 : RAW_DATA_LENGTH (value));
6262 36 : tree rctor = build_constructor_single (rtype, bitsize_zero_node,
6263 : value);
6264 36 : tree addr = build_fold_addr_expr (cref);
6265 36 : cref = build2 (MEM_REF, rtype, addr,
6266 : build_int_cst (ptr_type_node, 0));
6267 36 : rctor = tree_output_constant_def (rctor);
6268 36 : if (!useless_type_conversion_p (rtype, TREE_TYPE (rctor)))
6269 4 : rctor = build1 (VIEW_CONVERT_EXPR, rtype, rctor);
6270 36 : if (gimplify_expr (&cref, pre_p, NULL, is_gimple_lvalue,
6271 : fb_lvalue) != GS_ERROR)
6272 72 : gimplify_seq_add_stmt (pre_p,
6273 36 : gimple_build_assign (cref, rctor));
6274 : }
6275 : }
6276 : else
6277 : {
6278 2740172 : tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
6279 2740172 : gimplify_and_add (init, pre_p);
6280 2740172 : ggc_free (init);
6281 : }
6282 : }
6283 1236149 : }
6284 :
6285 : /* Return the appropriate RHS predicate for this LHS. */
6286 :
6287 : gimple_predicate
6288 48518691 : rhs_predicate_for (tree lhs)
6289 : {
6290 48518691 : if (is_gimple_reg (lhs))
6291 : return is_gimple_reg_rhs_or_call;
6292 : else
6293 11372110 : return is_gimple_mem_rhs_or_call;
6294 : }
6295 :
6296 : /* Return the initial guess for an appropriate RHS predicate for this LHS,
6297 : before the LHS has been gimplified. */
6298 :
6299 : static gimple_predicate
6300 47367370 : initial_rhs_predicate_for (tree lhs)
6301 : {
6302 47367370 : if (is_gimple_reg_type (TREE_TYPE (lhs)))
6303 : return is_gimple_reg_rhs_or_call;
6304 : else
6305 2329839 : return is_gimple_mem_rhs_or_call;
6306 : }
6307 :
6308 : /* Gimplify a C99 compound literal expression. This just means adding
6309 : the DECL_EXPR before the current statement and using its anonymous
6310 : decl instead. */
6311 :
6312 : static enum gimplify_status
6313 37182 : gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
6314 : bool (*gimple_test_f) (tree),
6315 : fallback_t fallback)
6316 : {
6317 37182 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
6318 37182 : tree decl = DECL_EXPR_DECL (decl_s);
6319 37182 : tree init = DECL_INITIAL (decl);
6320 : /* Mark the decl as addressable if the compound literal
6321 : expression is addressable now, otherwise it is marked too late
6322 : after we gimplify the initialization expression. */
6323 37182 : if (TREE_ADDRESSABLE (*expr_p))
6324 542 : TREE_ADDRESSABLE (decl) = 1;
6325 : /* Otherwise, if we don't need an lvalue and have a literal directly
6326 : substitute it. Check if it matches the gimple predicate, as
6327 : otherwise we'd generate a new temporary, and we can as well just
6328 : use the decl we already have. */
6329 36640 : else if (!TREE_ADDRESSABLE (decl)
6330 36640 : && !TREE_THIS_VOLATILE (decl)
6331 36635 : && init
6332 36627 : && (fallback & fb_lvalue) == 0
6333 73117 : && gimple_test_f (init))
6334 : {
6335 35390 : *expr_p = init;
6336 35390 : return GS_OK;
6337 : }
6338 :
6339 : /* If the decl is not addressable, then it is being used in some
6340 : expression or on the right hand side of a statement, and it can
6341 : be put into a readonly data section. */
6342 1792 : if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
6343 1092 : TREE_READONLY (decl) = 1;
6344 :
6345 : /* This decl isn't mentioned in the enclosing block, so add it to the
6346 : list of temps. FIXME it seems a bit of a kludge to say that
6347 : anonymous artificial vars aren't pushed, but everything else is. */
6348 1792 : if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
6349 2 : gimple_add_tmp_var (decl);
6350 :
6351 1792 : gimplify_and_add (decl_s, pre_p);
6352 1792 : *expr_p = decl;
6353 1792 : return GS_OK;
6354 : }
6355 :
6356 : /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
6357 : return a new CONSTRUCTOR if something changed. */
6358 :
6359 : static tree
6360 1837203 : optimize_compound_literals_in_ctor (tree orig_ctor)
6361 : {
6362 1837203 : tree ctor = orig_ctor;
6363 1837203 : vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
6364 1837203 : unsigned int idx, num = vec_safe_length (elts);
6365 :
6366 6156767 : for (idx = 0; idx < num; idx++)
6367 : {
6368 4319564 : tree value = (*elts)[idx].value;
6369 4319564 : tree newval = value;
6370 4319564 : if (TREE_CODE (value) == CONSTRUCTOR)
6371 530041 : newval = optimize_compound_literals_in_ctor (value);
6372 3789523 : else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
6373 : {
6374 103 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
6375 103 : tree decl = DECL_EXPR_DECL (decl_s);
6376 103 : tree init = DECL_INITIAL (decl);
6377 :
6378 103 : if (!TREE_ADDRESSABLE (value)
6379 103 : && !TREE_ADDRESSABLE (decl)
6380 103 : && init
6381 103 : && TREE_CODE (init) == CONSTRUCTOR)
6382 52 : newval = optimize_compound_literals_in_ctor (init);
6383 : }
6384 4319564 : if (newval == value)
6385 4319504 : continue;
6386 :
6387 60 : if (ctor == orig_ctor)
6388 : {
6389 49 : ctor = copy_node (orig_ctor);
6390 98 : CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
6391 49 : elts = CONSTRUCTOR_ELTS (ctor);
6392 : }
6393 60 : (*elts)[idx].value = newval;
6394 : }
6395 1837203 : return ctor;
6396 : }
6397 :
6398 : /* A subroutine of gimplify_modify_expr. Break out elements of a
6399 : CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
6400 :
6401 : Note that we still need to clear any elements that don't have explicit
6402 : initializers, so if not all elements are initialized we keep the
6403 : original MODIFY_EXPR, we just remove all of the constructor elements.
6404 :
6405 : If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
6406 : GS_ERROR if we would have to create a temporary when gimplifying
6407 : this constructor. Otherwise, return GS_OK.
6408 :
6409 : If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
6410 :
6411 : static enum gimplify_status
6412 1307110 : gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6413 : bool want_value, bool notify_temp_creation)
6414 : {
6415 1307110 : tree object, ctor, type;
6416 1307110 : enum gimplify_status ret;
6417 1307110 : vec<constructor_elt, va_gc> *elts;
6418 1307110 : bool cleared = false;
6419 1307110 : bool is_empty_ctor = false;
6420 1307110 : bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
6421 :
6422 1307110 : gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
6423 :
6424 1307110 : if (!notify_temp_creation)
6425 : {
6426 1298783 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6427 : is_gimple_lvalue, fb_lvalue);
6428 1298783 : if (ret == GS_ERROR)
6429 : return ret;
6430 : }
6431 :
6432 1307110 : object = TREE_OPERAND (*expr_p, 0);
6433 1307110 : ctor = TREE_OPERAND (*expr_p, 1)
6434 1307110 : = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
6435 1307110 : type = TREE_TYPE (ctor);
6436 1307110 : elts = CONSTRUCTOR_ELTS (ctor);
6437 1307110 : ret = GS_ALL_DONE;
6438 :
6439 1307110 : switch (TREE_CODE (type))
6440 : {
6441 1299857 : case RECORD_TYPE:
6442 1299857 : case UNION_TYPE:
6443 1299857 : case QUAL_UNION_TYPE:
6444 1299857 : case ARRAY_TYPE:
6445 1299857 : {
6446 : /* Use readonly data for initializers of this or smaller size
6447 : regardless of the num_nonzero_elements / num_unique_nonzero_elements
6448 : ratio. */
6449 1299857 : const HOST_WIDE_INT min_unique_size = 64;
6450 : /* If num_nonzero_elements / num_unique_nonzero_elements ratio
6451 : is smaller than this, use readonly data. */
6452 1299857 : const int unique_nonzero_ratio = 8;
6453 : /* True if a single access of the object must be ensured. This is the
6454 : case if the target is volatile, the type is non-addressable and more
6455 : than one field need to be assigned. */
6456 1299857 : const bool ensure_single_access
6457 1299857 : = TREE_THIS_VOLATILE (object)
6458 255 : && !TREE_ADDRESSABLE (type)
6459 1300060 : && vec_safe_length (elts) > 1;
6460 1299857 : struct gimplify_init_ctor_preeval_data preeval_data;
6461 1299857 : HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
6462 1299857 : HOST_WIDE_INT num_unique_nonzero_elements;
6463 1299857 : int complete_p;
6464 1299857 : bool valid_const_initializer;
6465 :
6466 : /* Aggregate types must lower constructors to initialization of
6467 : individual elements. The exception is that a CONSTRUCTOR node
6468 : with no elements indicates zero-initialization of the whole. */
6469 1299857 : if (vec_safe_is_empty (elts))
6470 : {
6471 328406 : if (notify_temp_creation)
6472 9910 : return GS_OK;
6473 :
6474 : /* The var will be initialized and so appear on lhs of
6475 : assignment, it can't be TREE_READONLY anymore. */
6476 328358 : if (VAR_P (object))
6477 185513 : TREE_READONLY (object) = 0;
6478 :
6479 : is_empty_ctor = true;
6480 330852 : break;
6481 : }
6482 :
6483 : /* Fetch information about the constructor to direct later processing.
6484 : We might want to make static versions of it in various cases, and
6485 : can only do so if it known to be a valid constant initializer. */
6486 971451 : valid_const_initializer
6487 971451 : = categorize_ctor_elements (ctor, &num_nonzero_elements,
6488 : &num_unique_nonzero_elements,
6489 : &num_ctor_elements, &complete_p);
6490 :
6491 : /* If a const aggregate variable is being initialized, then it
6492 : should never be a lose to promote the variable to be static. */
6493 971451 : if (valid_const_initializer
6494 596220 : && num_nonzero_elements > 1
6495 442735 : && TREE_READONLY (object)
6496 5088 : && VAR_P (object)
6497 4711 : && !DECL_REGISTER (object)
6498 4704 : && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
6499 3733 : || DECL_MERGEABLE (object))
6500 : /* For ctors that have many repeated nonzero elements
6501 : represented through RANGE_EXPRs, prefer initializing
6502 : those through runtime loops over copies of large amounts
6503 : of data from readonly data section. */
6504 971451 : && (num_unique_nonzero_elements
6505 2494 : > num_nonzero_elements / unique_nonzero_ratio
6506 0 : || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
6507 : <= (unsigned HOST_WIDE_INT) min_unique_size)))
6508 : {
6509 2494 : if (notify_temp_creation)
6510 : return GS_ERROR;
6511 :
6512 2494 : DECL_INITIAL (object) = ctor;
6513 2494 : TREE_STATIC (object) = 1;
6514 2494 : if (!DECL_NAME (object) || DECL_NAMELESS (object))
6515 2040 : DECL_NAME (object) = create_tmp_var_name ("C");
6516 2494 : walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
6517 :
6518 : /* ??? C++ doesn't automatically append a .<number> to the
6519 : assembler name, and even when it does, it looks at FE private
6520 : data structures to figure out what that number should be,
6521 : which are not set for this variable. I suppose this is
6522 : important for local statics for inline functions, which aren't
6523 : "local" in the object file sense. So in order to get a unique
6524 : TU-local symbol, we must invoke the lhd version now. */
6525 2494 : lhd_set_decl_assembler_name (object);
6526 :
6527 2494 : *expr_p = NULL_TREE;
6528 2494 : break;
6529 : }
6530 :
6531 : /* The var will be initialized and so appear on lhs of
6532 : assignment, it can't be TREE_READONLY anymore. */
6533 968957 : if (VAR_P (object) && !notify_temp_creation)
6534 766347 : TREE_READONLY (object) = 0;
6535 :
6536 : /* If there are "lots" of initialized elements, even discounting
6537 : those that are not address constants (and thus *must* be
6538 : computed at runtime), then partition the constructor into
6539 : constant and non-constant parts. Block copy the constant
6540 : parts in, then generate code for the non-constant parts. */
6541 : /* TODO. There's code in cp/typeck.cc to do this. */
6542 :
6543 968957 : if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
6544 : /* store_constructor will ignore the clearing of variable-sized
6545 : objects. Initializers for such objects must explicitly set
6546 : every field that needs to be set. */
6547 : cleared = false;
6548 968937 : else if (!complete_p)
6549 : /* If the constructor isn't complete, clear the whole object
6550 : beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
6551 :
6552 : ??? This ought not to be needed. For any element not present
6553 : in the initializer, we should simply set them to zero. Except
6554 : we'd need to *find* the elements that are not present, and that
6555 : requires trickery to avoid quadratic compile-time behavior in
6556 : large cases or excessive memory use in small cases. */
6557 182383 : cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
6558 786554 : else if (num_ctor_elements - num_nonzero_elements
6559 786554 : > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
6560 786554 : && num_nonzero_elements < num_ctor_elements / 4)
6561 : /* If there are "lots" of zeros, it's more efficient to clear
6562 : the memory and then set the nonzero elements. */
6563 : cleared = true;
6564 785672 : else if (ensure_single_access && num_nonzero_elements == 0)
6565 : /* If a single access to the target must be ensured and all elements
6566 : are zero, then it's optimal to clear whatever their number. */
6567 : cleared = true;
6568 : /* If the object is small enough to go in registers, and it's
6569 : not required to be constructed in memory, clear it first.
6570 : That will avoid wasting cycles preserving any padding bits
6571 : that might be there, and if there aren't any, the compiler
6572 : is smart enough to optimize the clearing out. */
6573 785663 : else if (complete_p <= 0
6574 14029 : && !TREE_ADDRESSABLE (ctor)
6575 14015 : && !TREE_THIS_VOLATILE (object)
6576 14012 : && (TYPE_MODE (type) != BLKmode || TYPE_NO_FORCE_BLK (type))
6577 792739 : && optimize)
6578 : cleared = true;
6579 : else
6580 : cleared = false;
6581 :
6582 : /* If there are "lots" of initialized elements, and all of them
6583 : are valid address constants, then the entire initializer can
6584 : be dropped to memory, and then memcpy'd out. Don't do this
6585 : for sparse arrays, though, as it's more efficient to follow
6586 : the standard CONSTRUCTOR behavior of memset followed by
6587 : individual element initialization. Also don't do this for small
6588 : all-zero initializers (which aren't big enough to merit
6589 : clearing), and don't try to make bitwise copies of
6590 : TREE_ADDRESSABLE types. */
6591 968957 : if (valid_const_initializer
6592 593726 : && complete_p
6593 426038 : && !(cleared || num_nonzero_elements == 0)
6594 333018 : && !TREE_ADDRESSABLE (type))
6595 : {
6596 331501 : HOST_WIDE_INT size = int_size_in_bytes (type);
6597 331501 : unsigned int align;
6598 :
6599 : /* ??? We can still get unbounded array types, at least
6600 : from the C++ front end. This seems wrong, but attempt
6601 : to work around it for now. */
6602 331501 : if (size < 0)
6603 : {
6604 9 : size = int_size_in_bytes (TREE_TYPE (object));
6605 9 : if (size >= 0)
6606 0 : TREE_TYPE (ctor) = type = TREE_TYPE (object);
6607 : }
6608 :
6609 : /* Find the maximum alignment we can assume for the object. */
6610 : /* ??? Make use of DECL_OFFSET_ALIGN. */
6611 331501 : if (DECL_P (object))
6612 317569 : align = DECL_ALIGN (object);
6613 : else
6614 13932 : align = TYPE_ALIGN (type);
6615 :
6616 : /* Do a block move either if the size is so small as to make
6617 : each individual move a sub-unit move on average, or if it
6618 : is so large as to make individual moves inefficient. */
6619 331501 : if (size > 0
6620 331492 : && num_nonzero_elements > 1
6621 : /* For ctors that have many repeated nonzero elements
6622 : represented through RANGE_EXPRs, prefer initializing
6623 : those through runtime loops over copies of large amounts
6624 : of data from readonly data section. */
6625 299149 : && (num_unique_nonzero_elements
6626 299149 : > num_nonzero_elements / unique_nonzero_ratio
6627 56 : || size <= min_unique_size)
6628 630594 : && (size < num_nonzero_elements
6629 298987 : || !can_move_by_pieces (size, align)))
6630 : {
6631 2840 : if (notify_temp_creation)
6632 : return GS_ERROR;
6633 :
6634 1422 : walk_tree (&ctor, force_labels_r, NULL, NULL);
6635 1422 : ctor = tree_output_constant_def (ctor);
6636 1422 : if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
6637 0 : ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
6638 1422 : TREE_OPERAND (*expr_p, 1) = ctor;
6639 :
6640 : /* This is no longer an assignment of a CONSTRUCTOR, but
6641 : we still may have processing to do on the LHS. So
6642 : pretend we didn't do anything here to let that happen. */
6643 1422 : return GS_UNHANDLED;
6644 : }
6645 : }
6646 :
6647 : /* If a single access to the target must be ensured and there are
6648 : nonzero elements or the zero elements are not assigned en masse,
6649 : initialize the target from a temporary. */
6650 966117 : if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
6651 : {
6652 167 : if (notify_temp_creation)
6653 : return GS_ERROR;
6654 :
6655 161 : tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
6656 161 : TREE_OPERAND (*expr_p, 0) = temp;
6657 161 : *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
6658 : *expr_p,
6659 : build2 (MODIFY_EXPR, void_type_node,
6660 : object, temp));
6661 161 : return GS_OK;
6662 : }
6663 :
6664 965950 : if (notify_temp_creation)
6665 : return GS_OK;
6666 :
6667 : /* If there are nonzero elements and if needed, pre-evaluate to capture
6668 : elements overlapping with the lhs into temporaries. We must do this
6669 : before clearing to fetch the values before they are zeroed-out. */
6670 959095 : if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
6671 : {
6672 214924 : preeval_data.lhs_base_decl = get_base_address (object);
6673 214924 : if (!DECL_P (preeval_data.lhs_base_decl))
6674 41209 : preeval_data.lhs_base_decl = NULL;
6675 214924 : preeval_data.lhs_alias_set = get_alias_set (object);
6676 :
6677 214924 : gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
6678 : pre_p, post_p, &preeval_data);
6679 : }
6680 :
6681 959095 : bool ctor_has_side_effects_p
6682 959095 : = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
6683 :
6684 959095 : if (cleared)
6685 : {
6686 : /* Zap the CONSTRUCTOR element list, which simplifies this case.
6687 : Note that we still have to gimplify, in order to handle the
6688 : case of variable sized types. Avoid shared tree structures. */
6689 189516 : CONSTRUCTOR_ELTS (ctor) = NULL;
6690 189516 : TREE_SIDE_EFFECTS (ctor) = 0;
6691 189516 : object = unshare_expr (object);
6692 189516 : gimplify_stmt (expr_p, pre_p);
6693 : }
6694 :
6695 : /* If we have not block cleared the object, or if there are nonzero
6696 : elements in the constructor, or if the constructor has side effects,
6697 : add assignments to the individual scalar fields of the object. */
6698 189516 : if (!cleared
6699 189516 : || num_nonzero_elements > 0
6700 17722 : || ctor_has_side_effects_p)
6701 941718 : gimplify_init_ctor_eval (object, elts, pre_p, cleared);
6702 :
6703 959095 : *expr_p = NULL_TREE;
6704 : }
6705 959095 : break;
6706 :
6707 0 : case COMPLEX_TYPE:
6708 0 : {
6709 0 : tree r, i;
6710 :
6711 0 : if (notify_temp_creation)
6712 : return GS_OK;
6713 :
6714 : /* Extract the real and imaginary parts out of the ctor. */
6715 0 : gcc_assert (elts->length () == 2);
6716 0 : r = (*elts)[0].value;
6717 0 : i = (*elts)[1].value;
6718 0 : if (r == NULL || i == NULL)
6719 : {
6720 0 : tree zero = build_zero_cst (TREE_TYPE (type));
6721 0 : if (r == NULL)
6722 0 : r = zero;
6723 0 : if (i == NULL)
6724 0 : i = zero;
6725 : }
6726 :
6727 : /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
6728 : represent creation of a complex value. */
6729 0 : if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
6730 : {
6731 0 : ctor = build_complex (type, r, i);
6732 0 : TREE_OPERAND (*expr_p, 1) = ctor;
6733 : }
6734 : else
6735 : {
6736 0 : ctor = build2 (COMPLEX_EXPR, type, r, i);
6737 0 : TREE_OPERAND (*expr_p, 1) = ctor;
6738 0 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
6739 : pre_p,
6740 : post_p,
6741 0 : rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
6742 : fb_rvalue);
6743 : }
6744 : }
6745 : break;
6746 :
6747 7253 : case VECTOR_TYPE:
6748 7253 : {
6749 7253 : unsigned HOST_WIDE_INT ix;
6750 7253 : constructor_elt *ce;
6751 :
6752 7253 : if (notify_temp_creation)
6753 1307110 : return GS_OK;
6754 :
6755 : /* Vector types use CONSTRUCTOR all the way through gimple
6756 : compilation as a general initializer. */
6757 67043 : FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
6758 : {
6759 59790 : enum gimplify_status tret;
6760 59790 : tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
6761 : fb_rvalue);
6762 59790 : if (tret == GS_ERROR)
6763 : ret = GS_ERROR;
6764 59789 : else if (TREE_STATIC (ctor)
6765 59821 : && !initializer_constant_valid_p (ce->value,
6766 32 : TREE_TYPE (ce->value)))
6767 32 : TREE_STATIC (ctor) = 0;
6768 : }
6769 7253 : recompute_constructor_flags (ctor);
6770 :
6771 : /* Go ahead and simplify constant constructors to VECTOR_CST. */
6772 7253 : if (TREE_CONSTANT (ctor))
6773 : {
6774 2505 : bool constant_p = true;
6775 : tree value;
6776 :
6777 : /* Even when ctor is constant, it might contain non-*_CST
6778 : elements, such as addresses or trapping values like
6779 : 1.0/0.0 - 1.0/0.0. Such expressions don't belong
6780 : in VECTOR_CST nodes. */
6781 2505 : FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
6782 2323 : if (!CONSTANT_CLASS_P (value))
6783 : {
6784 : constant_p = false;
6785 : break;
6786 : }
6787 :
6788 182 : if (constant_p)
6789 : {
6790 182 : TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
6791 182 : break;
6792 : }
6793 : }
6794 :
6795 7071 : if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
6796 918 : TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
6797 : }
6798 : break;
6799 :
6800 0 : default:
6801 : /* So how did we get a CONSTRUCTOR for a scalar type? */
6802 0 : gcc_unreachable ();
6803 : }
6804 :
6805 1297200 : if (ret == GS_ERROR)
6806 : return GS_ERROR;
6807 : /* If we have gimplified both sides of the initializer but have
6808 : not emitted an assignment, do so now. */
6809 1297199 : if (*expr_p
6810 : /* If the type is an empty type, we don't need to emit the
6811 : assignment. */
6812 1297199 : && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
6813 : {
6814 331786 : tree lhs = TREE_OPERAND (*expr_p, 0);
6815 331786 : tree rhs = TREE_OPERAND (*expr_p, 1);
6816 331786 : if (want_value && object == lhs)
6817 12 : lhs = unshare_expr (lhs);
6818 331786 : gassign *init = gimple_build_assign (lhs, rhs);
6819 331786 : gimplify_seq_add_stmt (pre_p, init);
6820 : }
6821 1297199 : if (want_value)
6822 : {
6823 19 : *expr_p = object;
6824 19 : ret = GS_OK;
6825 : }
6826 : else
6827 : {
6828 1297180 : *expr_p = NULL;
6829 1297180 : ret = GS_ALL_DONE;
6830 : }
6831 :
6832 : /* If the user requests to initialize automatic variables, we
6833 : should initialize paddings inside the variable. Add a call to
6834 : __builtin_clear_pading (&object, 0, for_auto_init = true) to
6835 : initialize paddings of object always to zero regardless of
6836 : INIT_TYPE. Note, we will not insert this call if the aggregate
6837 : variable has be completely cleared already or it's initialized
6838 : with an empty constructor. We cannot insert this call if the
6839 : variable is a gimple register since __builtin_clear_padding will take
6840 : the address of the variable. As a result, if a long double/_Complex long
6841 : double variable will be spilled into stack later, its padding cannot
6842 : be cleared with __builtin_clear_padding. We should clear its padding
6843 : when it is spilled into memory. */
6844 1297199 : if (is_init_expr
6845 921367 : && !is_gimple_reg (object)
6846 916834 : && clear_padding_type_may_have_padding_p (type)
6847 876202 : && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
6848 221677 : || !AGGREGATE_TYPE_P (type))
6849 654525 : && var_needs_auto_init_p (object)
6850 1315884 : && flag_auto_var_init != AUTO_INIT_CXX26)
6851 40 : gimple_add_padding_init_for_auto_var (object, false, pre_p);
6852 :
6853 : return ret;
6854 : }
6855 :
6856 : /* Given a pointer value OP0, return a simplified version of an
6857 : indirection through OP0, or NULL_TREE if no simplification is
6858 : possible. This may only be applied to a rhs of an expression.
6859 : Note that the resulting type may be different from the type pointed
6860 : to in the sense that it is still compatible from the langhooks
6861 : point of view. */
6862 :
6863 : static tree
6864 516074 : gimple_fold_indirect_ref_rhs (tree t)
6865 : {
6866 0 : return gimple_fold_indirect_ref (t);
6867 : }
6868 :
6869 : /* Subroutine of gimplify_modify_expr to do simplifications of
6870 : MODIFY_EXPRs based on the code of the RHS. We loop for as long as
6871 : something changes. */
6872 :
6873 : static enum gimplify_status
6874 96742792 : gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
6875 : gimple_seq *pre_p, gimple_seq *post_p,
6876 : bool want_value)
6877 : {
6878 96742792 : enum gimplify_status ret = GS_UNHANDLED;
6879 97365095 : bool changed;
6880 :
6881 95914110 : do
6882 : {
6883 97365095 : changed = false;
6884 97365095 : switch (TREE_CODE (*from_p))
6885 : {
6886 11906914 : case VAR_DECL:
6887 : /* If we're assigning from a read-only variable initialized with
6888 : a constructor and not volatile, do the direct assignment from
6889 : the constructor, but only if the target is not volatile either
6890 : since this latter assignment might end up being done on a per
6891 : field basis. However, if the target is volatile and the type
6892 : is aggregate and non-addressable, gimplify_init_constructor
6893 : knows that it needs to ensure a single access to the target
6894 : and it will return GS_OK only in this case. */
6895 11906914 : if (TREE_READONLY (*from_p)
6896 95401 : && DECL_INITIAL (*from_p)
6897 61193 : && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
6898 8327 : && !TREE_THIS_VOLATILE (*from_p)
6899 11915241 : && (!TREE_THIS_VOLATILE (*to_p)
6900 6 : || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
6901 6 : && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
6902 : {
6903 8327 : tree old_from = *from_p;
6904 8327 : enum gimplify_status subret;
6905 :
6906 : /* Move the constructor into the RHS. */
6907 8327 : *from_p = unshare_expr (DECL_INITIAL (*from_p));
6908 :
6909 : /* Let's see if gimplify_init_constructor will need to put
6910 : it in memory. */
6911 8327 : subret = gimplify_init_constructor (expr_p, NULL, NULL,
6912 : false, true);
6913 8327 : if (subret == GS_ERROR)
6914 : {
6915 : /* If so, revert the change. */
6916 1424 : *from_p = old_from;
6917 : }
6918 : else
6919 : {
6920 : ret = GS_OK;
6921 : changed = true;
6922 : }
6923 : }
6924 : break;
6925 521236 : case INDIRECT_REF:
6926 521236 : if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
6927 : /* If we have code like
6928 :
6929 : *(const A*)(A*)&x
6930 :
6931 : where the type of "x" is a (possibly cv-qualified variant
6932 : of "A"), treat the entire expression as identical to "x".
6933 : This kind of code arises in C++ when an object is bound
6934 : to a const reference, and if "x" is a TARGET_EXPR we want
6935 : to take advantage of the optimization below. But not if
6936 : the type is TREE_ADDRESSABLE; then C++17 says that the
6937 : TARGET_EXPR needs to be a temporary. */
6938 1032148 : if (tree t
6939 516074 : = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
6940 : {
6941 120786 : bool volatile_p = TREE_THIS_VOLATILE (*from_p);
6942 120786 : if (TREE_THIS_VOLATILE (t) != volatile_p)
6943 : {
6944 5 : if (DECL_P (t))
6945 0 : t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
6946 : build_fold_addr_expr (t));
6947 5 : if (REFERENCE_CLASS_P (t))
6948 5 : TREE_THIS_VOLATILE (t) = volatile_p;
6949 : }
6950 120786 : *from_p = t;
6951 120786 : ret = GS_OK;
6952 120786 : changed = true;
6953 : }
6954 : break;
6955 :
6956 180684 : case TARGET_EXPR:
6957 180684 : {
6958 : /* If we are initializing something from a TARGET_EXPR, strip the
6959 : TARGET_EXPR and initialize it directly, if possible. This can't
6960 : be done if the initializer is void, since that implies that the
6961 : temporary is set in some non-trivial way.
6962 :
6963 : ??? What about code that pulls out the temp and uses it
6964 : elsewhere? I think that such code never uses the TARGET_EXPR as
6965 : an initializer. If I'm wrong, we'll die because the temp won't
6966 : have any RTL. In that case, I guess we'll need to replace
6967 : references somehow. */
6968 180684 : tree init = TARGET_EXPR_INITIAL (*from_p);
6969 :
6970 180684 : if (init
6971 179968 : && (TREE_CODE (*expr_p) != MODIFY_EXPR
6972 53738 : || !TARGET_EXPR_NO_ELIDE (*from_p))
6973 360450 : && !VOID_TYPE_P (TREE_TYPE (init)))
6974 : {
6975 163498 : *from_p = init;
6976 163498 : ret = GS_OK;
6977 163498 : changed = true;
6978 : }
6979 : }
6980 : break;
6981 :
6982 330754 : case COMPOUND_EXPR:
6983 : /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
6984 : caught. */
6985 330754 : gimplify_compound_expr (from_p, pre_p, true);
6986 330754 : ret = GS_OK;
6987 330754 : changed = true;
6988 330754 : break;
6989 :
6990 1319576 : case CONSTRUCTOR:
6991 : /* If we already made some changes, let the front end have a
6992 : crack at this before we break it down. */
6993 1319576 : if (ret != GS_UNHANDLED)
6994 : break;
6995 :
6996 : /* If we're initializing from a CONSTRUCTOR, break this into
6997 : individual MODIFY_EXPRs. */
6998 1298783 : ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
6999 : false);
7000 1298783 : return ret;
7001 :
7002 227379 : case COND_EXPR:
7003 : /* If we're assigning to a non-register type, push the assignment
7004 : down into the branches. This is mandatory for ADDRESSABLE types,
7005 : since we cannot generate temporaries for such, but it saves a
7006 : copy in other cases as well.
7007 : Also avoid an extra temporary and copy when assigning to
7008 : a register. */
7009 227379 : if (!is_gimple_reg_type (TREE_TYPE (*from_p))
7010 227379 : || (is_gimple_reg (*to_p) && !gimplify_ctxp->allow_rhs_cond_expr))
7011 : {
7012 : /* This code should mirror the code in gimplify_cond_expr. */
7013 145916 : enum tree_code code = TREE_CODE (*expr_p);
7014 145916 : tree cond = *from_p;
7015 145916 : tree result = *to_p;
7016 :
7017 145916 : ret = gimplify_expr (&result, pre_p, post_p,
7018 : is_gimple_lvalue, fb_lvalue);
7019 145916 : if (ret != GS_ERROR)
7020 145916 : ret = GS_OK;
7021 :
7022 : /* If we are going to write RESULT more than once, clear
7023 : TREE_READONLY flag, otherwise we might incorrectly promote
7024 : the variable to static const and initialize it at compile
7025 : time in one of the branches. */
7026 145916 : if (VAR_P (result)
7027 144875 : && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
7028 281718 : && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
7029 135710 : TREE_READONLY (result) = 0;
7030 145916 : if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
7031 136707 : TREE_OPERAND (cond, 1)
7032 273414 : = build2 (code, void_type_node, result,
7033 136707 : TREE_OPERAND (cond, 1));
7034 145916 : if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
7035 145824 : TREE_OPERAND (cond, 2)
7036 291648 : = build2 (code, void_type_node, unshare_expr (result),
7037 145824 : TREE_OPERAND (cond, 2));
7038 :
7039 145916 : TREE_TYPE (cond) = void_type_node;
7040 145916 : recalculate_side_effects (cond);
7041 :
7042 145916 : if (want_value)
7043 : {
7044 68 : gimplify_and_add (cond, pre_p);
7045 68 : *expr_p = unshare_expr (result);
7046 : }
7047 : else
7048 145848 : *expr_p = cond;
7049 145916 : return ret;
7050 : }
7051 : break;
7052 :
7053 9828974 : case CALL_EXPR:
7054 : /* For calls that return in memory, give *to_p as the CALL_EXPR's
7055 : return slot so that we don't generate a temporary. */
7056 9828974 : if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
7057 9828974 : && aggregate_value_p (*from_p, *from_p))
7058 : {
7059 247534 : bool use_target;
7060 :
7061 247534 : if (!(rhs_predicate_for (*to_p))(*from_p))
7062 : /* If we need a temporary, *to_p isn't accurate. */
7063 : use_target = false;
7064 : /* It's OK to use the return slot directly unless it's an NRV. */
7065 246762 : else if (TREE_CODE (*to_p) == RESULT_DECL
7066 5365 : && DECL_NAME (*to_p) == NULL_TREE
7067 251995 : && needs_to_live_in_memory (*to_p))
7068 : use_target = true;
7069 241529 : else if (is_gimple_reg_type (TREE_TYPE (*to_p))
7070 241529 : || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
7071 : /* Don't force regs into memory. */
7072 : use_target = false;
7073 227340 : else if (TREE_CODE (*expr_p) == INIT_EXPR)
7074 : /* It's OK to use the target directly if it's being
7075 : initialized. */
7076 : use_target = true;
7077 13660 : else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
7078 : != INTEGER_CST)
7079 : /* Always use the target and thus RSO for variable-sized types.
7080 : GIMPLE cannot deal with a variable-sized assignment
7081 : embedded in a call statement. */
7082 : use_target = true;
7083 13632 : else if (TREE_CODE (*to_p) != SSA_NAME
7084 13632 : && (!is_gimple_variable (*to_p)
7085 10500 : || needs_to_live_in_memory (*to_p)))
7086 : /* Don't use the original target if it's already addressable;
7087 : if its address escapes, and the called function uses the
7088 : NRV optimization, a conforming program could see *to_p
7089 : change before the called function returns; see c++/19317.
7090 : When optimizing, the return_slot pass marks more functions
7091 : as safe after we have escape info. */
7092 : use_target = false;
7093 : else
7094 : use_target = true;
7095 :
7096 : if (use_target)
7097 : {
7098 221039 : CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
7099 221039 : mark_addressable (*to_p);
7100 : }
7101 : }
7102 : break;
7103 :
7104 1542 : case WITH_SIZE_EXPR:
7105 : /* Likewise for calls that return an aggregate of non-constant size,
7106 : since we would not be able to generate a temporary at all. */
7107 1542 : if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
7108 : {
7109 349 : *from_p = TREE_OPERAND (*from_p, 0);
7110 : /* We don't change ret in this case because the
7111 : WITH_SIZE_EXPR might have been added in
7112 : gimplify_modify_expr, so returning GS_OK would lead to an
7113 : infinite loop. */
7114 349 : changed = true;
7115 : }
7116 : break;
7117 :
7118 : /* If we're initializing from a container, push the initialization
7119 : inside it. */
7120 1864 : case CLEANUP_POINT_EXPR:
7121 1864 : case BIND_EXPR:
7122 1864 : case STATEMENT_LIST:
7123 1864 : {
7124 1864 : tree wrap = *from_p;
7125 1864 : tree t;
7126 :
7127 1864 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
7128 : fb_lvalue);
7129 1864 : if (ret != GS_ERROR)
7130 1864 : ret = GS_OK;
7131 :
7132 1864 : t = voidify_wrapper_expr (wrap, *expr_p);
7133 1864 : gcc_assert (t == *expr_p);
7134 :
7135 1864 : if (want_value)
7136 : {
7137 6 : gimplify_and_add (wrap, pre_p);
7138 6 : *expr_p = unshare_expr (*to_p);
7139 : }
7140 : else
7141 1858 : *expr_p = wrap;
7142 : return GS_OK;
7143 : }
7144 :
7145 9490239 : case NOP_EXPR:
7146 : /* Pull out compound literal expressions from a NOP_EXPR.
7147 : Those are created in the C FE to drop qualifiers during
7148 : lvalue conversion. */
7149 9490239 : if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
7150 9490239 : && tree_ssa_useless_type_conversion (*from_p))
7151 : {
7152 13 : *from_p = TREE_OPERAND (*from_p, 0);
7153 13 : ret = GS_OK;
7154 13 : changed = true;
7155 : }
7156 : break;
7157 :
7158 4422 : case COMPOUND_LITERAL_EXPR:
7159 4422 : {
7160 4422 : tree complit = TREE_OPERAND (*expr_p, 1);
7161 4422 : tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
7162 4422 : tree decl = DECL_EXPR_DECL (decl_s);
7163 4422 : tree init = DECL_INITIAL (decl);
7164 :
7165 : /* struct T x = (struct T) { 0, 1, 2 } can be optimized
7166 : into struct T x = { 0, 1, 2 } if the address of the
7167 : compound literal has never been taken. */
7168 4422 : if (!TREE_ADDRESSABLE (complit)
7169 4422 : && !TREE_ADDRESSABLE (decl)
7170 4422 : && init)
7171 : {
7172 4422 : *expr_p = copy_node (*expr_p);
7173 4422 : TREE_OPERAND (*expr_p, 1) = init;
7174 4422 : return GS_OK;
7175 : }
7176 : }
7177 :
7178 : default:
7179 : break;
7180 : }
7181 : }
7182 : while (changed);
7183 :
7184 : return ret;
7185 : }
7186 :
7187 :
7188 : /* Return true if T looks like a valid GIMPLE statement. */
7189 :
7190 : static bool
7191 19076150 : is_gimple_stmt (tree t)
7192 : {
7193 19076150 : const enum tree_code code = TREE_CODE (t);
7194 :
7195 19076150 : switch (code)
7196 : {
7197 1715648 : case NOP_EXPR:
7198 : /* The only valid NOP_EXPR is the empty statement. */
7199 1715648 : return IS_EMPTY_STMT (t);
7200 :
7201 0 : case BIND_EXPR:
7202 0 : case COND_EXPR:
7203 : /* These are only valid if they're void. */
7204 0 : return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
7205 :
7206 : case SWITCH_EXPR:
7207 : case GOTO_EXPR:
7208 : case RETURN_EXPR:
7209 : case LABEL_EXPR:
7210 : case CASE_LABEL_EXPR:
7211 : case TRY_CATCH_EXPR:
7212 : case TRY_FINALLY_EXPR:
7213 : case EH_FILTER_EXPR:
7214 : case CATCH_EXPR:
7215 : case ASM_EXPR:
7216 : case STATEMENT_LIST:
7217 : case OACC_PARALLEL:
7218 : case OACC_KERNELS:
7219 : case OACC_SERIAL:
7220 : case OACC_DATA:
7221 : case OACC_HOST_DATA:
7222 : case OACC_DECLARE:
7223 : case OACC_UPDATE:
7224 : case OACC_ENTER_DATA:
7225 : case OACC_EXIT_DATA:
7226 : case OACC_CACHE:
7227 : case OMP_PARALLEL:
7228 : case OMP_FOR:
7229 : case OMP_SIMD:
7230 : case OMP_DISTRIBUTE:
7231 : case OMP_LOOP:
7232 : case OMP_TILE:
7233 : case OMP_UNROLL:
7234 : case OACC_LOOP:
7235 : case OMP_SCAN:
7236 : case OMP_SCOPE:
7237 : case OMP_DISPATCH:
7238 : case OMP_SECTIONS:
7239 : case OMP_SECTION:
7240 : case OMP_STRUCTURED_BLOCK:
7241 : case OMP_SINGLE:
7242 : case OMP_MASTER:
7243 : case OMP_MASKED:
7244 : case OMP_TASKGROUP:
7245 : case OMP_ORDERED:
7246 : case OMP_CRITICAL:
7247 : case OMP_METADIRECTIVE:
7248 : case OMP_TASK:
7249 : case OMP_TARGET:
7250 : case OMP_TARGET_DATA:
7251 : case OMP_TARGET_UPDATE:
7252 : case OMP_TARGET_ENTER_DATA:
7253 : case OMP_TARGET_EXIT_DATA:
7254 : case OMP_TASKLOOP:
7255 : case OMP_TEAMS:
7256 : /* These are always void. */
7257 : return true;
7258 :
7259 : case CALL_EXPR:
7260 : case MODIFY_EXPR:
7261 : case PREDICT_EXPR:
7262 : /* These are valid regardless of their type. */
7263 : return true;
7264 :
7265 : default:
7266 : return false;
7267 : }
7268 : }
7269 :
7270 :
7271 : /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
7272 : a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
7273 :
7274 : IMPORTANT NOTE: This promotion is performed by introducing a load of the
7275 : other, unmodified part of the complex object just before the total store.
7276 : As a consequence, if the object is still uninitialized, an undefined value
7277 : will be loaded into a register, which may result in a spurious exception
7278 : if the register is floating-point and the value happens to be a signaling
7279 : NaN for example. Then the fully-fledged complex operations lowering pass
7280 : followed by a DCE pass are necessary in order to fix things up. */
7281 :
7282 : static enum gimplify_status
7283 1960 : gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
7284 : bool want_value)
7285 : {
7286 1960 : enum tree_code code, ocode;
7287 1960 : tree lhs, rhs, new_rhs, other, realpart, imagpart;
7288 :
7289 1960 : lhs = TREE_OPERAND (*expr_p, 0);
7290 1960 : rhs = TREE_OPERAND (*expr_p, 1);
7291 1960 : code = TREE_CODE (lhs);
7292 1960 : lhs = TREE_OPERAND (lhs, 0);
7293 :
7294 1960 : ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
7295 1960 : other = build1 (ocode, TREE_TYPE (rhs), lhs);
7296 1960 : suppress_warning (other);
7297 1960 : other = get_formal_tmp_var (other, pre_p);
7298 :
7299 1960 : realpart = code == REALPART_EXPR ? rhs : other;
7300 966 : imagpart = code == REALPART_EXPR ? other : rhs;
7301 :
7302 1960 : if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
7303 0 : new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
7304 : else
7305 1960 : new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
7306 :
7307 1960 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
7308 1960 : *expr_p = (want_value) ? rhs : NULL_TREE;
7309 :
7310 1960 : return GS_ALL_DONE;
7311 : }
7312 :
7313 : /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
7314 :
7315 : modify_expr
7316 : : varname '=' rhs
7317 : | '*' ID '=' rhs
7318 :
7319 : PRE_P points to the list where side effects that must happen before
7320 : *EXPR_P should be stored.
7321 :
7322 : POST_P points to the list where side effects that must happen after
7323 : *EXPR_P should be stored.
7324 :
7325 : WANT_VALUE is nonzero iff we want to use the value of this expression
7326 : in another expression. */
7327 :
7328 : static enum gimplify_status
7329 49932655 : gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7330 : bool want_value)
7331 : {
7332 49932655 : tree *from_p = &TREE_OPERAND (*expr_p, 1);
7333 49932655 : tree *to_p = &TREE_OPERAND (*expr_p, 0);
7334 49932655 : enum gimplify_status ret = GS_UNHANDLED;
7335 49932655 : gimple *assign;
7336 49932655 : location_t loc = EXPR_LOCATION (*expr_p);
7337 49932655 : gimple_stmt_iterator gsi;
7338 :
7339 49932655 : if (error_operand_p (*from_p) || error_operand_p (*to_p))
7340 : return GS_ERROR;
7341 :
7342 49932627 : gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
7343 : || TREE_CODE (*expr_p) == INIT_EXPR);
7344 :
7345 : /* Trying to simplify a clobber using normal logic doesn't work,
7346 : so handle it here. */
7347 49932627 : if (TREE_CLOBBER_P (*from_p))
7348 : {
7349 556890 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7350 556890 : if (ret == GS_ERROR)
7351 : return ret;
7352 556890 : gcc_assert (!want_value);
7353 556890 : if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
7354 : {
7355 199 : tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
7356 : pre_p, post_p);
7357 199 : *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
7358 : }
7359 556890 : gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
7360 556890 : *expr_p = NULL;
7361 556890 : return GS_ALL_DONE;
7362 : }
7363 :
7364 : /* Convert initialization from an empty variable-size CONSTRUCTOR to
7365 : memset. */
7366 49375737 : if (TREE_TYPE (*from_p) != error_mark_node
7367 49375737 : && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
7368 49375737 : && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
7369 1575 : && TREE_CODE (*from_p) == CONSTRUCTOR
7370 49375841 : && CONSTRUCTOR_NELTS (*from_p) == 0)
7371 : {
7372 84 : maybe_with_size_expr (from_p);
7373 84 : gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
7374 84 : return gimplify_modify_expr_to_memset (expr_p,
7375 84 : TREE_OPERAND (*from_p, 1),
7376 84 : want_value, pre_p);
7377 : }
7378 :
7379 : /* Insert pointer conversions required by the middle-end that are not
7380 : required by the frontend. This fixes middle-end type checking for
7381 : for example gcc.dg/redecl-6.c. */
7382 49375653 : if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
7383 : {
7384 13376038 : STRIP_USELESS_TYPE_CONVERSION (*from_p);
7385 13376038 : if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
7386 520 : *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
7387 : }
7388 :
7389 : /* See if any simplifications can be done based on what the RHS is. */
7390 49375653 : ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
7391 : want_value);
7392 49375653 : if (ret != GS_UNHANDLED)
7393 : return ret;
7394 :
7395 : /* For empty types only gimplify the left hand side and right hand
7396 : side as statements and throw away the assignment. Do this after
7397 : gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
7398 : types properly. */
7399 47369460 : if (is_empty_type (TREE_TYPE (*from_p))
7400 13232 : && !want_value
7401 : /* Don't do this for calls that return addressable types, expand_call
7402 : relies on those having a lhs. */
7403 47382685 : && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
7404 11135 : && TREE_CODE (*from_p) == CALL_EXPR))
7405 : {
7406 2090 : suppress_warning (*from_p, OPT_Wunused_result);
7407 2090 : gimplify_stmt (from_p, pre_p);
7408 2090 : gimplify_stmt (to_p, pre_p);
7409 2090 : *expr_p = NULL_TREE;
7410 2090 : return GS_ALL_DONE;
7411 : }
7412 :
7413 : /* If the value being copied is of variable width, compute the length
7414 : of the copy into a WITH_SIZE_EXPR. Note that we need to do this
7415 : before gimplifying any of the operands so that we can resolve any
7416 : PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
7417 : the size of the expression to be copied, not of the destination, so
7418 : that is what we must do here. */
7419 47367370 : maybe_with_size_expr (from_p);
7420 :
7421 : /* As a special case, we have to temporarily allow for assignments
7422 : with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
7423 : a toplevel statement, when gimplifying the GENERIC expression
7424 : MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
7425 : GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
7426 :
7427 : Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
7428 : prevent gimplify_expr from trying to create a new temporary for
7429 : foo's LHS, we tell it that it should only gimplify until it
7430 : reaches the CALL_EXPR. On return from gimplify_expr, the newly
7431 : created GIMPLE_CALL <foo> will be the last statement in *PRE_P
7432 : and all we need to do here is set 'a' to be its LHS. */
7433 :
7434 : /* Gimplify the RHS first for C++17 and bug 71104. */
7435 47367370 : gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
7436 47367370 : ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
7437 47367370 : if (ret == GS_ERROR)
7438 : return ret;
7439 :
7440 : /* Then gimplify the LHS. */
7441 : /* If we gimplified the RHS to a CALL_EXPR and that call may return
7442 : twice we have to make sure to gimplify into non-SSA as otherwise
7443 : the abnormal edge added later will make those defs not dominate
7444 : their uses.
7445 : ??? Technically this applies only to the registers used in the
7446 : resulting non-register *TO_P. */
7447 47367214 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
7448 47367214 : if (saved_into_ssa
7449 45023575 : && TREE_CODE (*from_p) == CALL_EXPR
7450 52362074 : && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
7451 1505 : gimplify_ctxp->into_ssa = false;
7452 47367214 : ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
7453 47367214 : gimplify_ctxp->into_ssa = saved_into_ssa;
7454 47367214 : if (ret == GS_ERROR)
7455 : return ret;
7456 :
7457 : /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
7458 : guess for the predicate was wrong. */
7459 47367139 : gimple_predicate final_pred = rhs_predicate_for (*to_p);
7460 47367139 : if (final_pred != initial_pred)
7461 : {
7462 8065268 : ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
7463 8065268 : if (ret == GS_ERROR)
7464 : return ret;
7465 : }
7466 :
7467 : /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
7468 : size as argument to the call. */
7469 47367139 : if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
7470 : {
7471 1439 : tree call = TREE_OPERAND (*from_p, 0);
7472 1439 : tree vlasize = TREE_OPERAND (*from_p, 1);
7473 :
7474 1439 : if (TREE_CODE (call) == CALL_EXPR
7475 1439 : && CALL_EXPR_IFN (call) == IFN_VA_ARG)
7476 : {
7477 95 : int nargs = call_expr_nargs (call);
7478 95 : tree type = TREE_TYPE (call);
7479 95 : tree ap = CALL_EXPR_ARG (call, 0);
7480 95 : tree tag = CALL_EXPR_ARG (call, 1);
7481 95 : tree aptag = CALL_EXPR_ARG (call, 2);
7482 95 : tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
7483 : IFN_VA_ARG, type,
7484 : nargs + 1, ap, tag,
7485 : aptag, vlasize);
7486 95 : TREE_OPERAND (*from_p, 0) = newcall;
7487 : }
7488 : }
7489 :
7490 : /* Now see if the above changed *from_p to something we handle specially. */
7491 47367139 : ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
7492 : want_value);
7493 47367139 : if (ret != GS_UNHANDLED)
7494 : return ret;
7495 :
7496 : /* If we've got a variable sized assignment between two lvalues (i.e. does
7497 : not involve a call), then we can make things a bit more straightforward
7498 : by converting the assignment to memcpy or memset. */
7499 47366176 : if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
7500 : {
7501 1090 : tree from = TREE_OPERAND (*from_p, 0);
7502 1090 : tree size = TREE_OPERAND (*from_p, 1);
7503 :
7504 1090 : if (TREE_CODE (from) == CONSTRUCTOR)
7505 0 : return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
7506 1090 : else if (is_gimple_addressable (from)
7507 1090 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (*to_p)))
7508 2179 : && ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (from))))
7509 : {
7510 1089 : *from_p = from;
7511 1089 : return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
7512 1089 : pre_p);
7513 : }
7514 : }
7515 :
7516 : /* Transform partial stores to non-addressable complex variables into
7517 : total stores. This allows us to use real instead of virtual operands
7518 : for these variables, which improves optimization. */
7519 47365087 : if ((TREE_CODE (*to_p) == REALPART_EXPR
7520 47365087 : || TREE_CODE (*to_p) == IMAGPART_EXPR)
7521 47365087 : && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
7522 1960 : return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
7523 :
7524 : /* Try to alleviate the effects of the gimplification creating artificial
7525 : temporaries (see for example is_gimple_reg_rhs) on the debug info, but
7526 : make sure not to create DECL_DEBUG_EXPR links across functions. */
7527 47363127 : if (!gimplify_ctxp->into_ssa
7528 2342725 : && VAR_P (*from_p)
7529 468103 : && DECL_IGNORED_P (*from_p)
7530 347847 : && DECL_P (*to_p)
7531 136916 : && !DECL_IGNORED_P (*to_p)
7532 37772 : && decl_function_context (*to_p) == current_function_decl
7533 47395339 : && decl_function_context (*from_p) == current_function_decl)
7534 : {
7535 32207 : if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
7536 26957 : DECL_NAME (*from_p)
7537 53914 : = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
7538 32207 : DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
7539 32207 : SET_DECL_DEBUG_EXPR (*from_p, *to_p);
7540 : }
7541 :
7542 47363127 : if (want_value && TREE_THIS_VOLATILE (*to_p))
7543 526 : *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
7544 :
7545 47363127 : if (TREE_CODE (*from_p) == CALL_EXPR)
7546 : {
7547 : /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
7548 : instead of a GIMPLE_ASSIGN. */
7549 4702176 : gcall *call_stmt;
7550 4702176 : if (CALL_EXPR_FN (*from_p) == NULL_TREE)
7551 : {
7552 : /* Gimplify internal functions created in the FEs. */
7553 300005 : int nargs = call_expr_nargs (*from_p), i;
7554 300005 : enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
7555 300005 : auto_vec<tree> vargs (nargs);
7556 :
7557 1512456 : for (i = 0; i < nargs; i++)
7558 : {
7559 912446 : gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
7560 912446 : EXPR_LOCATION (*from_p));
7561 912446 : vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
7562 : }
7563 300005 : call_stmt = gimple_build_call_internal_vec (ifn, vargs);
7564 300005 : gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
7565 600010 : gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
7566 300005 : }
7567 : else
7568 : {
7569 4402171 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
7570 4402171 : CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
7571 4402171 : STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
7572 4402171 : tree fndecl = get_callee_fndecl (*from_p);
7573 4402171 : if (fndecl
7574 4287535 : && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
7575 4533203 : && call_expr_nargs (*from_p) == 3)
7576 65996 : call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
7577 65996 : CALL_EXPR_ARG (*from_p, 0),
7578 65996 : CALL_EXPR_ARG (*from_p, 1),
7579 65996 : CALL_EXPR_ARG (*from_p, 2));
7580 : else
7581 : {
7582 4336175 : call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
7583 : }
7584 : }
7585 4702176 : notice_special_calls (call_stmt);
7586 4702176 : if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
7587 4701999 : gimple_call_set_lhs (call_stmt, *to_p);
7588 177 : else if (TREE_CODE (*to_p) == SSA_NAME)
7589 : /* The above is somewhat premature, avoid ICEing later for a
7590 : SSA name w/o a definition. We may have uses in the GIMPLE IL.
7591 : ??? This doesn't make it a default-def. */
7592 86 : SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
7593 :
7594 : assign = call_stmt;
7595 : }
7596 : else
7597 : {
7598 42660951 : assign = gimple_build_assign (*to_p, *from_p);
7599 42660951 : gimple_set_location (assign, EXPR_LOCATION (*expr_p));
7600 42660951 : if (COMPARISON_CLASS_P (*from_p))
7601 1216271 : copy_warning (assign, *from_p);
7602 : }
7603 :
7604 47363127 : if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
7605 : {
7606 : /* We should have got an SSA name from the start. */
7607 35099402 : gcc_assert (TREE_CODE (*to_p) == SSA_NAME
7608 : || ! gimple_in_ssa_p (cfun));
7609 : }
7610 :
7611 47363127 : gimplify_seq_add_stmt (pre_p, assign);
7612 47363127 : gsi = gsi_last (*pre_p);
7613 47363127 : maybe_fold_stmt (&gsi);
7614 :
7615 47363127 : if (want_value)
7616 : {
7617 659746 : *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
7618 659746 : return GS_OK;
7619 : }
7620 : else
7621 46703381 : *expr_p = NULL;
7622 :
7623 46703381 : return GS_ALL_DONE;
7624 : }
7625 :
7626 : /* Gimplify a comparison between two variable-sized objects. Do this
7627 : with a call to BUILT_IN_MEMCMP. */
7628 :
7629 : static enum gimplify_status
7630 0 : gimplify_variable_sized_compare (tree *expr_p)
7631 : {
7632 0 : location_t loc = EXPR_LOCATION (*expr_p);
7633 0 : tree op0 = TREE_OPERAND (*expr_p, 0);
7634 0 : tree op1 = TREE_OPERAND (*expr_p, 1);
7635 0 : tree t, arg, dest, src, expr;
7636 :
7637 0 : arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
7638 0 : arg = unshare_expr (arg);
7639 0 : arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
7640 0 : src = build_fold_addr_expr_loc (loc, op1);
7641 0 : dest = build_fold_addr_expr_loc (loc, op0);
7642 0 : t = builtin_decl_implicit (BUILT_IN_MEMCMP);
7643 0 : t = build_call_expr_loc (loc, t, 3, dest, src, arg);
7644 :
7645 0 : expr
7646 0 : = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
7647 0 : SET_EXPR_LOCATION (expr, loc);
7648 0 : *expr_p = expr;
7649 :
7650 0 : return GS_OK;
7651 : }
7652 :
7653 : /* Gimplify a comparison between two aggregate objects of integral scalar
7654 : mode as a comparison between the bitwise equivalent scalar values. */
7655 :
7656 : static enum gimplify_status
7657 16 : gimplify_scalar_mode_aggregate_compare (tree *expr_p)
7658 : {
7659 16 : const location_t loc = EXPR_LOCATION (*expr_p);
7660 16 : const enum tree_code code = TREE_CODE (*expr_p);
7661 16 : tree op0 = TREE_OPERAND (*expr_p, 0);
7662 16 : tree op1 = TREE_OPERAND (*expr_p, 1);
7663 16 : tree type = TREE_TYPE (op0);
7664 16 : tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
7665 :
7666 16 : op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
7667 16 : op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
7668 :
7669 : /* We need to perform ordering comparisons in memory order like memcmp and,
7670 : therefore, may need to byte-swap operands for little-endian targets. */
7671 16 : if (code != EQ_EXPR && code != NE_EXPR)
7672 : {
7673 0 : gcc_assert (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN);
7674 0 : gcc_assert (TREE_CODE (scalar_type) == INTEGER_TYPE);
7675 0 : tree fndecl;
7676 :
7677 0 : if (BYTES_BIG_ENDIAN)
7678 : fndecl = NULL_TREE;
7679 : else
7680 0 : switch (int_size_in_bytes (scalar_type))
7681 : {
7682 : case 1:
7683 : fndecl = NULL_TREE;
7684 : break;
7685 0 : case 2:
7686 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP16);
7687 0 : break;
7688 0 : case 4:
7689 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP32);
7690 0 : break;
7691 0 : case 8:
7692 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP64);
7693 0 : break;
7694 0 : case 16:
7695 0 : fndecl = builtin_decl_implicit (BUILT_IN_BSWAP128);
7696 0 : break;
7697 0 : default:
7698 0 : gcc_unreachable ();
7699 : }
7700 :
7701 0 : if (fndecl)
7702 : {
7703 0 : op0 = build_call_expr_loc (loc, fndecl, 1, op0);
7704 0 : op1 = build_call_expr_loc (loc, fndecl, 1, op1);
7705 : }
7706 : }
7707 :
7708 16 : *expr_p = fold_build2_loc (loc, code, TREE_TYPE (*expr_p), op0, op1);
7709 :
7710 16 : return GS_OK;
7711 : }
7712 :
7713 : /* Gimplify an expression sequence. This function gimplifies each
7714 : expression and rewrites the original expression with the last
7715 : expression of the sequence in GIMPLE form.
7716 :
7717 : PRE_P points to the list where the side effects for all the
7718 : expressions in the sequence will be emitted.
7719 :
7720 : WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
7721 :
7722 : static enum gimplify_status
7723 1093855 : gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
7724 : {
7725 1093855 : tree t = *expr_p;
7726 :
7727 1179936 : do
7728 : {
7729 1179936 : tree *sub_p = &TREE_OPERAND (t, 0);
7730 :
7731 1179936 : if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
7732 203898 : gimplify_compound_expr (sub_p, pre_p, false);
7733 : else
7734 976038 : gimplify_stmt (sub_p, pre_p);
7735 :
7736 1179936 : t = TREE_OPERAND (t, 1);
7737 : }
7738 1179936 : while (TREE_CODE (t) == COMPOUND_EXPR);
7739 :
7740 1093855 : *expr_p = t;
7741 1093855 : if (want_value)
7742 : return GS_OK;
7743 : else
7744 : {
7745 597201 : gimplify_stmt (expr_p, pre_p);
7746 597201 : return GS_ALL_DONE;
7747 : }
7748 : }
7749 :
7750 : /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
7751 : gimplify. After gimplification, EXPR_P will point to a new temporary
7752 : that holds the original value of the SAVE_EXPR node.
7753 :
7754 : PRE_P points to the list where side effects that must happen before
7755 : *EXPR_P should be stored. */
7756 :
7757 : static enum gimplify_status
7758 438909 : gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7759 : {
7760 438909 : enum gimplify_status ret = GS_ALL_DONE;
7761 438909 : tree val;
7762 :
7763 438909 : gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
7764 438909 : val = TREE_OPERAND (*expr_p, 0);
7765 :
7766 438909 : if (val && TREE_TYPE (val) == error_mark_node)
7767 : return GS_ERROR;
7768 :
7769 : /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
7770 438907 : if (!SAVE_EXPR_RESOLVED_P (*expr_p))
7771 : {
7772 : /* The operand may be a void-valued expression. It is
7773 : being executed only for its side-effects. */
7774 168145 : if (TREE_TYPE (val) == void_type_node)
7775 : {
7776 24 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7777 : is_gimple_stmt, fb_none);
7778 24 : val = NULL;
7779 : }
7780 : else
7781 : /* The temporary may not be an SSA name as later abnormal and EH
7782 : control flow may invalidate use/def domination. When in SSA
7783 : form then assume there are no such issues and SAVE_EXPRs only
7784 : appear via GENERIC foldings. */
7785 336242 : val = get_initialized_tmp_var (val, pre_p, post_p,
7786 336242 : gimple_in_ssa_p (cfun));
7787 :
7788 168145 : TREE_OPERAND (*expr_p, 0) = val;
7789 168145 : SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
7790 : }
7791 :
7792 438907 : *expr_p = val;
7793 :
7794 438907 : return ret;
7795 : }
7796 :
7797 : /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
7798 :
7799 : unary_expr
7800 : : ...
7801 : | '&' varname
7802 : ...
7803 :
7804 : PRE_P points to the list where side effects that must happen before
7805 : *EXPR_P should be stored.
7806 :
7807 : POST_P points to the list where side effects that must happen after
7808 : *EXPR_P should be stored. */
7809 :
7810 : static enum gimplify_status
7811 32746839 : gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7812 : {
7813 32746839 : tree expr = *expr_p;
7814 32746839 : tree op0 = TREE_OPERAND (expr, 0);
7815 32746839 : enum gimplify_status ret;
7816 32746839 : location_t loc = EXPR_LOCATION (*expr_p);
7817 :
7818 32746839 : switch (TREE_CODE (op0))
7819 : {
7820 97724 : case INDIRECT_REF:
7821 97724 : do_indirect_ref:
7822 : /* Check if we are dealing with an expression of the form '&*ptr'.
7823 : While the front end folds away '&*ptr' into 'ptr', these
7824 : expressions may be generated internally by the compiler (e.g.,
7825 : builtins like __builtin_va_end). */
7826 : /* Caution: the silent array decomposition semantics we allow for
7827 : ADDR_EXPR means we can't always discard the pair. */
7828 : /* Gimplification of the ADDR_EXPR operand may drop
7829 : cv-qualification conversions, so make sure we add them if
7830 : needed. */
7831 97724 : {
7832 97724 : tree op00 = TREE_OPERAND (op0, 0);
7833 97724 : tree t_expr = TREE_TYPE (expr);
7834 97724 : tree t_op00 = TREE_TYPE (op00);
7835 :
7836 97724 : if (!useless_type_conversion_p (t_expr, t_op00))
7837 0 : op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
7838 97724 : *expr_p = op00;
7839 97724 : ret = GS_OK;
7840 : }
7841 97724 : break;
7842 :
7843 0 : case VIEW_CONVERT_EXPR:
7844 : /* Take the address of our operand and then convert it to the type of
7845 : this ADDR_EXPR.
7846 :
7847 : ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
7848 : all clear. The impact of this transformation is even less clear. */
7849 :
7850 : /* If the operand is a useless conversion, look through it. Doing so
7851 : guarantees that the ADDR_EXPR and its operand will remain of the
7852 : same type. */
7853 0 : if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
7854 0 : op0 = TREE_OPERAND (op0, 0);
7855 :
7856 0 : *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
7857 : build_fold_addr_expr_loc (loc,
7858 0 : TREE_OPERAND (op0, 0)));
7859 0 : ret = GS_OK;
7860 0 : break;
7861 :
7862 65002 : case MEM_REF:
7863 65002 : if (integer_zerop (TREE_OPERAND (op0, 1)))
7864 36782 : goto do_indirect_ref;
7865 :
7866 : /* fall through */
7867 :
7868 32666322 : default:
7869 : /* If we see a call to a declared builtin or see its address
7870 : being taken (we can unify those cases here) then we can mark
7871 : the builtin for implicit generation by GCC. */
7872 32666322 : if (TREE_CODE (op0) == FUNCTION_DECL
7873 16332664 : && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
7874 36613733 : && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
7875 1279057 : set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
7876 :
7877 : /* We use fb_either here because the C frontend sometimes takes
7878 : the address of a call that returns a struct; see
7879 : gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
7880 : the implied temporary explicit. */
7881 :
7882 : /* Make the operand addressable. */
7883 32666322 : ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
7884 : is_gimple_addressable, fb_either);
7885 32666322 : if (ret == GS_ERROR)
7886 : break;
7887 :
7888 : /* Then mark it. Beware that it may not be possible to do so directly
7889 : if a temporary has been created by the gimplification. */
7890 32666320 : prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
7891 :
7892 32666320 : op0 = TREE_OPERAND (expr, 0);
7893 :
7894 : /* For various reasons, the gimplification of the expression
7895 : may have made a new INDIRECT_REF. */
7896 32666320 : if (INDIRECT_REF_P (op0)
7897 32666320 : || (TREE_CODE (op0) == MEM_REF
7898 45427 : && integer_zerop (TREE_OPERAND (op0, 1))))
7899 17207 : goto do_indirect_ref;
7900 :
7901 32649113 : mark_addressable (TREE_OPERAND (expr, 0));
7902 :
7903 : /* The FEs may end up building ADDR_EXPRs early on a decl with
7904 : an incomplete type. Re-build ADDR_EXPRs in canonical form
7905 : here. */
7906 32649113 : if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
7907 144674 : *expr_p = build_fold_addr_expr (op0);
7908 :
7909 : /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
7910 32649113 : if (TREE_CODE (*expr_p) == ADDR_EXPR)
7911 32649112 : recompute_tree_invariant_for_addr_expr (*expr_p);
7912 :
7913 : /* If we re-built the ADDR_EXPR add a conversion to the original type
7914 : if required. */
7915 32649113 : if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
7916 0 : *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
7917 :
7918 : break;
7919 : }
7920 :
7921 32746839 : return ret;
7922 : }
7923 :
7924 : /* Return the number of times character C occurs in string S. */
7925 :
7926 : static int
7927 92212 : num_occurrences (int c, const char *s)
7928 : {
7929 92212 : int n = 0;
7930 250257 : while (*s)
7931 158045 : n += (*s++ == c);
7932 92212 : return n;
7933 : }
7934 :
7935 : /* A subroutine of gimplify_asm_expr. Check that all operands have
7936 : the same number of alternatives. Return -1 if this is violated. Otherwise
7937 : return the number of alternatives. */
7938 :
7939 : static int
7940 194966 : num_alternatives (const_tree link)
7941 : {
7942 194966 : if (link == nullptr)
7943 : return 0;
7944 :
7945 47649 : const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7946 47649 : int num = num_occurrences (',', constraint);
7947 :
7948 47649 : if (num + 1 > MAX_RECOG_ALTERNATIVES)
7949 : return -1;
7950 :
7951 92212 : for (link = TREE_CHAIN (link); link; link = TREE_CHAIN (link))
7952 : {
7953 44563 : constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7954 89126 : if (num_occurrences (',', constraint) != num)
7955 : return -1;
7956 : }
7957 47649 : return num + 1;
7958 : }
7959 :
7960 : /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
7961 : value; output operands should be a gimple lvalue. */
7962 :
7963 : static enum gimplify_status
7964 97483 : gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7965 : {
7966 97483 : tree expr;
7967 97483 : int noutputs;
7968 97483 : const char **oconstraints;
7969 97483 : int i;
7970 97483 : tree link;
7971 97483 : const char *constraint;
7972 97483 : bool allows_mem, allows_reg, is_inout;
7973 97483 : enum gimplify_status ret, tret;
7974 97483 : gasm *stmt;
7975 97483 : vec<tree, va_gc> *inputs;
7976 97483 : vec<tree, va_gc> *outputs;
7977 97483 : vec<tree, va_gc> *clobbers;
7978 97483 : vec<tree, va_gc> *labels;
7979 97483 : tree link_next;
7980 :
7981 97483 : expr = *expr_p;
7982 97483 : noutputs = list_length (ASM_OUTPUTS (expr));
7983 97483 : oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
7984 :
7985 97483 : inputs = NULL;
7986 97483 : outputs = NULL;
7987 97483 : clobbers = NULL;
7988 97483 : labels = NULL;
7989 :
7990 97483 : int num_alternatives_out = num_alternatives (ASM_OUTPUTS (expr));
7991 97483 : int num_alternatives_in = num_alternatives (ASM_INPUTS (expr));
7992 97483 : if (num_alternatives_out == -1 || num_alternatives_in == -1
7993 97483 : || (num_alternatives_out > 0 && num_alternatives_in > 0
7994 16758 : && num_alternatives_out != num_alternatives_in))
7995 : {
7996 0 : error ("operand constraints for %<asm%> differ "
7997 : "in number of alternatives");
7998 0 : return GS_ERROR;
7999 : }
8000 97483 : int num_alternatives = MAX (num_alternatives_out, num_alternatives_in);
8001 :
8002 97483 : gimplify_reg_info reg_info (num_alternatives, noutputs);
8003 :
8004 97483 : link_next = NULL_TREE;
8005 192931 : for (link = ASM_CLOBBERS (expr); link; link = link_next)
8006 : {
8007 : /* The clobber entry could also be an error marker. */
8008 95448 : if (TREE_CODE (TREE_VALUE (link)) == STRING_CST)
8009 : {
8010 95425 : const char *regname= TREE_STRING_POINTER (TREE_VALUE (link));
8011 95425 : int regno = decode_reg_name (regname);
8012 95425 : if (regno >= 0)
8013 41262 : reg_info.set_clobbered (regno);
8014 : }
8015 95448 : link_next = TREE_CHAIN (link);
8016 95448 : TREE_CHAIN (link) = NULL_TREE;
8017 95448 : vec_safe_push (clobbers, link);
8018 : }
8019 :
8020 97483 : ret = GS_ALL_DONE;
8021 97483 : link_next = NULL_TREE;
8022 156996 : for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
8023 : {
8024 59546 : bool ok;
8025 59546 : size_t constraint_len;
8026 :
8027 59546 : if (error_operand_p (TREE_VALUE (link)))
8028 : return GS_ERROR;
8029 59525 : link_next = TREE_CHAIN (link);
8030 :
8031 119050 : oconstraints[i]
8032 59525 : = constraint
8033 59525 : = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8034 59525 : constraint_len = strlen (constraint);
8035 59525 : if (constraint_len == 0)
8036 0 : continue;
8037 :
8038 59525 : reg_info.operand = TREE_VALUE (link);
8039 59525 : ok = parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8040 : &allows_reg, &is_inout, ®_info);
8041 59525 : if (!ok)
8042 : {
8043 11 : ret = GS_ERROR;
8044 11 : is_inout = false;
8045 : }
8046 :
8047 : /* If we can't make copies, we can only accept memory.
8048 : Similarly for VLAs. */
8049 59525 : tree outtype = TREE_TYPE (TREE_VALUE (link));
8050 59525 : if (TREE_ADDRESSABLE (outtype)
8051 59510 : || !COMPLETE_TYPE_P (outtype)
8052 119010 : || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype)))
8053 : {
8054 295 : if (allows_mem)
8055 283 : allows_reg = 0;
8056 : else
8057 : {
8058 12 : error ("impossible constraint in %<asm%>");
8059 12 : error ("non-memory output %d must stay in memory", i);
8060 12 : return GS_ERROR;
8061 : }
8062 : }
8063 :
8064 59513 : if (!allows_reg && allows_mem)
8065 1796 : mark_addressable (TREE_VALUE (link));
8066 :
8067 59513 : tree orig = TREE_VALUE (link);
8068 110361 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8069 : is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8070 : fb_lvalue | fb_mayfail);
8071 59513 : if (tret == GS_ERROR)
8072 : {
8073 0 : if (orig != error_mark_node)
8074 0 : error ("invalid lvalue in %<asm%> output %d", i);
8075 : ret = tret;
8076 : }
8077 :
8078 : /* If the gimplified operand is a register we do not allow memory. */
8079 59513 : if (allows_reg
8080 57714 : && allows_mem
8081 65431 : && (is_gimple_reg (TREE_VALUE (link))
8082 267 : || (handled_component_p (TREE_VALUE (link))
8083 36 : && is_gimple_reg (TREE_OPERAND (TREE_VALUE (link), 0)))))
8084 5652 : allows_mem = 0;
8085 :
8086 : /* If the constraint does not allow memory make sure we gimplify
8087 : it to a register if it is not already but its base is. This
8088 : happens for complex and vector components. */
8089 59513 : if (!allows_mem)
8090 : {
8091 57451 : tree op = TREE_VALUE (link);
8092 57451 : if (! is_gimple_val (op)
8093 20982 : && is_gimple_reg_type (TREE_TYPE (op))
8094 78432 : && is_gimple_reg (get_base_address (op)))
8095 : {
8096 19 : tree tem = create_tmp_reg (TREE_TYPE (op));
8097 19 : tree ass;
8098 19 : if (is_inout)
8099 : {
8100 7 : ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
8101 : tem, unshare_expr (op));
8102 7 : gimplify_and_add (ass, pre_p);
8103 : }
8104 19 : ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
8105 19 : gimplify_and_add (ass, post_p);
8106 :
8107 19 : TREE_VALUE (link) = tem;
8108 19 : tret = GS_OK;
8109 : }
8110 : }
8111 :
8112 59513 : vec_safe_push (outputs, link);
8113 59513 : TREE_CHAIN (link) = NULL_TREE;
8114 :
8115 59513 : if (is_inout)
8116 : {
8117 : /* An input/output operand. To give the optimizers more
8118 : flexibility, split it into separate input and output
8119 : operands. */
8120 8665 : tree input;
8121 : /* Buffer big enough to format a 32-bit UINT_MAX into. */
8122 8665 : char buf[11];
8123 :
8124 : /* Turn the in/out constraint into an output constraint. */
8125 8665 : char *p = xstrdup (constraint);
8126 8665 : p[0] = '=';
8127 8665 : TREE_PURPOSE (link) = unshare_expr (TREE_PURPOSE (link));
8128 8665 : TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
8129 :
8130 : /* And add a matching input constraint. */
8131 8665 : if (allows_reg)
8132 : {
8133 8167 : sprintf (buf, "%u", i);
8134 :
8135 : /* If there are multiple alternatives in the constraint,
8136 : handle each of them individually. Those that allow register
8137 : will be replaced with operand number, the others will stay
8138 : unchanged. */
8139 8167 : if (strchr (p, ',') != NULL)
8140 : {
8141 100 : size_t len = 0, buflen = strlen (buf);
8142 100 : char *beg, *end, *str, *dst;
8143 :
8144 100 : for (beg = p + 1;;)
8145 : {
8146 200 : end = strchr (beg, ',');
8147 200 : if (end == NULL)
8148 100 : end = strchr (beg, '\0');
8149 200 : if ((size_t) (end - beg) < buflen)
8150 28 : len += buflen + 1;
8151 : else
8152 172 : len += end - beg + 1;
8153 200 : if (*end)
8154 100 : beg = end + 1;
8155 : else
8156 : break;
8157 : }
8158 :
8159 100 : str = (char *) alloca (len);
8160 100 : for (beg = p + 1, dst = str;;)
8161 : {
8162 200 : const char *tem;
8163 200 : bool mem_p, reg_p, inout_p;
8164 :
8165 200 : end = strchr (beg, ',');
8166 200 : if (end)
8167 100 : *end = '\0';
8168 200 : beg[-1] = '=';
8169 200 : tem = beg - 1;
8170 200 : parse_output_constraint (&tem, i, 0, 0, &mem_p, ®_p,
8171 : &inout_p, nullptr);
8172 200 : if (dst != str)
8173 100 : *dst++ = ',';
8174 200 : if (reg_p)
8175 : {
8176 115 : memcpy (dst, buf, buflen);
8177 115 : dst += buflen;
8178 : }
8179 : else
8180 : {
8181 85 : if (end)
8182 0 : len = end - beg;
8183 : else
8184 85 : len = strlen (beg);
8185 85 : memcpy (dst, beg, len);
8186 85 : dst += len;
8187 : }
8188 200 : if (end)
8189 100 : beg = end + 1;
8190 : else
8191 : break;
8192 100 : }
8193 100 : *dst = '\0';
8194 100 : input = build_string (dst - str, str);
8195 : }
8196 : else
8197 8067 : input = build_string (strlen (buf), buf);
8198 : }
8199 : else
8200 498 : input = build_string (constraint_len - 1, constraint + 1);
8201 :
8202 8665 : free (p);
8203 :
8204 8665 : input = build_tree_list (build_tree_list (NULL_TREE, input),
8205 8665 : unshare_expr (TREE_VALUE (link)));
8206 8665 : ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
8207 : }
8208 : }
8209 :
8210 : /* After all output operands have been gimplified, verify that each output
8211 : operand is used at most once in case of hard register constraints. Thus,
8212 : error out in cases like
8213 : asm ("" : "={0}" (x), "={1}" (x));
8214 : or even for
8215 : asm ("" : "=r" (x), "={1}" (x));
8216 :
8217 : FIXME: Ideally we would also error out for cases like
8218 : int x;
8219 : asm ("" : "=r" (x), "=r" (x));
8220 : However, since code like that was previously accepted, erroring out now might
8221 : break existing code. On the other hand, we already error out for register
8222 : asm like
8223 : register int x asm ("0");
8224 : asm ("" : "=r" (x), "=r" (x));
8225 : Thus, maybe it wouldn't be too bad to also error out in the former
8226 : non-register-asm case.
8227 : */
8228 156959 : for (unsigned i = 0; i < vec_safe_length (outputs); ++i)
8229 : {
8230 59511 : tree link = (*outputs)[i];
8231 59511 : tree op1 = TREE_VALUE (link);
8232 59511 : const char *constraint
8233 59511 : = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8234 59511 : if (strchr (constraint, '{') != nullptr)
8235 236 : for (unsigned j = 0; j < vec_safe_length (outputs); ++j)
8236 : {
8237 142 : if (i == j)
8238 95 : continue;
8239 47 : tree link2 = (*outputs)[j];
8240 47 : tree op2 = TREE_VALUE (link2);
8241 47 : if (op1 == op2)
8242 : {
8243 2 : error ("multiple outputs to lvalue %qE", op2);
8244 2 : return GS_ERROR;
8245 : }
8246 : }
8247 : }
8248 :
8249 97448 : link_next = NULL_TREE;
8250 97448 : int input_num = 0;
8251 138717 : for (link = ASM_INPUTS (expr); link; ++input_num, ++i, link = link_next)
8252 : {
8253 41327 : if (error_operand_p (TREE_VALUE (link)))
8254 : return GS_ERROR;
8255 41278 : link_next = TREE_CHAIN (link);
8256 41278 : constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
8257 41278 : reg_info.operand = TREE_VALUE (link);
8258 41278 : bool ok = parse_input_constraint (&constraint, input_num, 0, noutputs, 0,
8259 : oconstraints, &allows_mem, &allows_reg,
8260 : ®_info);
8261 41278 : if (!ok)
8262 : {
8263 30 : ret = GS_ERROR;
8264 30 : is_inout = false;
8265 : }
8266 :
8267 : /* If we can't make copies, we can only accept memory. */
8268 41278 : tree intype = TREE_TYPE (TREE_VALUE (link));
8269 41278 : if (TREE_ADDRESSABLE (intype)
8270 41260 : || !COMPLETE_TYPE_P (intype)
8271 82499 : || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype)))
8272 : {
8273 304 : if (allows_mem)
8274 295 : allows_reg = 0;
8275 : else
8276 : {
8277 9 : error ("impossible constraint in %<asm%>");
8278 9 : error ("non-memory input %d must stay in memory", i);
8279 9 : return GS_ERROR;
8280 : }
8281 : }
8282 :
8283 : /* If the operand is a memory input, it should be an lvalue. */
8284 41269 : if (!allows_reg && allows_mem)
8285 : {
8286 2120 : tree inputv = TREE_VALUE (link);
8287 2120 : STRIP_NOPS (inputv);
8288 2120 : if (TREE_CODE (inputv) == PREDECREMENT_EXPR
8289 : || TREE_CODE (inputv) == PREINCREMENT_EXPR
8290 : || TREE_CODE (inputv) == POSTDECREMENT_EXPR
8291 2120 : || TREE_CODE (inputv) == POSTINCREMENT_EXPR
8292 2108 : || TREE_CODE (inputv) == MODIFY_EXPR
8293 4226 : || VOID_TYPE_P (TREE_TYPE (inputv)))
8294 38 : TREE_VALUE (link) = error_mark_node;
8295 2120 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8296 : is_gimple_lvalue, fb_lvalue | fb_mayfail);
8297 2120 : if (tret != GS_ERROR)
8298 : {
8299 : /* Unlike output operands, memory inputs are not guaranteed
8300 : to be lvalues by the FE, and while the expressions are
8301 : marked addressable there, if it is e.g. a statement
8302 : expression, temporaries in it might not end up being
8303 : addressable. They might be already used in the IL and thus
8304 : it is too late to make them addressable now though. */
8305 2074 : tree x = TREE_VALUE (link);
8306 2215 : while (handled_component_p (x))
8307 141 : x = TREE_OPERAND (x, 0);
8308 2074 : if (TREE_CODE (x) == MEM_REF
8309 2074 : && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
8310 0 : x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
8311 2074 : if ((VAR_P (x)
8312 : || TREE_CODE (x) == PARM_DECL
8313 : || TREE_CODE (x) == RESULT_DECL)
8314 1493 : && !TREE_ADDRESSABLE (x)
8315 23 : && is_gimple_reg (x))
8316 : {
8317 17 : warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
8318 17 : input_location), 0,
8319 : "memory input %d is not directly addressable",
8320 : i);
8321 17 : prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
8322 : }
8323 : }
8324 2120 : mark_addressable (TREE_VALUE (link));
8325 2120 : if (tret == GS_ERROR)
8326 : {
8327 46 : if (inputv != error_mark_node)
8328 46 : error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
8329 : "memory input %d is not directly addressable", i);
8330 : ret = tret;
8331 : }
8332 : }
8333 : else
8334 : {
8335 39149 : tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
8336 : is_gimple_asm_val, fb_rvalue);
8337 39149 : if (tret == GS_ERROR)
8338 46 : ret = tret;
8339 : }
8340 :
8341 41269 : TREE_CHAIN (link) = NULL_TREE;
8342 41269 : vec_safe_push (inputs, link);
8343 : }
8344 :
8345 97390 : link_next = NULL_TREE;
8346 98272 : for (link = ASM_LABELS (expr); link; link = link_next)
8347 : {
8348 882 : link_next = TREE_CHAIN (link);
8349 882 : TREE_CHAIN (link) = NULL_TREE;
8350 882 : vec_safe_push (labels, link);
8351 : }
8352 :
8353 : /* Do not add ASMs with errors to the gimple IL stream. */
8354 97390 : if (ret != GS_ERROR)
8355 : {
8356 97307 : stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
8357 : inputs, outputs, clobbers, labels);
8358 :
8359 : /* asm is volatile if it was marked by the user as volatile or
8360 : there are no outputs or this is an asm goto. */
8361 194614 : gimple_asm_set_volatile (stmt,
8362 97307 : ASM_VOLATILE_P (expr)
8363 7509 : || noutputs == 0
8364 104802 : || labels);
8365 97307 : gimple_asm_set_basic (stmt, ASM_BASIC_P (expr));
8366 97307 : gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
8367 :
8368 97307 : gimplify_seq_add_stmt (pre_p, stmt);
8369 : }
8370 :
8371 : return ret;
8372 97483 : }
8373 :
8374 : /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
8375 : GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
8376 : gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
8377 : return to this function.
8378 :
8379 : FIXME should we complexify the prequeue handling instead? Or use flags
8380 : for all the cleanups and let the optimizer tighten them up? The current
8381 : code seems pretty fragile; it will break on a cleanup within any
8382 : non-conditional nesting. But any such nesting would be broken, anyway;
8383 : we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
8384 : and continues out of it. We can do that at the RTL level, though, so
8385 : having an optimizer to tighten up try/finally regions would be a Good
8386 : Thing. */
8387 :
8388 : static enum gimplify_status
8389 5130300 : gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
8390 : {
8391 5130300 : gimple_stmt_iterator iter;
8392 5130300 : gimple_seq body_sequence = NULL;
8393 :
8394 5130300 : tree temp = voidify_wrapper_expr (*expr_p, NULL);
8395 :
8396 : /* We only care about the number of conditions between the innermost
8397 : CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
8398 : any cleanups collected outside the CLEANUP_POINT_EXPR. */
8399 5130300 : int old_conds = gimplify_ctxp->conditions;
8400 5130300 : gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
8401 5130300 : bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
8402 5130300 : gimplify_ctxp->conditions = 0;
8403 5130300 : gimplify_ctxp->conditional_cleanups = NULL;
8404 5130300 : gimplify_ctxp->in_cleanup_point_expr = true;
8405 :
8406 5130300 : gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
8407 :
8408 5130300 : gimplify_ctxp->conditions = old_conds;
8409 5130300 : gimplify_ctxp->conditional_cleanups = old_cleanups;
8410 5130300 : gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
8411 :
8412 26778017 : for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
8413 : {
8414 16557297 : gimple *wce = gsi_stmt (iter);
8415 :
8416 16557297 : if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
8417 : {
8418 582636 : if (gsi_one_before_end_p (iter))
8419 : {
8420 : /* Note that gsi_insert_seq_before and gsi_remove do not
8421 : scan operands, unlike some other sequence mutators. */
8422 2999 : if (!gimple_wce_cleanup_eh_only (wce))
8423 2999 : gsi_insert_seq_before_without_update (&iter,
8424 : gimple_wce_cleanup (wce),
8425 : GSI_SAME_STMT);
8426 2999 : gsi_remove (&iter, true);
8427 2999 : break;
8428 : }
8429 : else
8430 : {
8431 579637 : gtry *gtry;
8432 579637 : gimple_seq seq;
8433 579637 : enum gimple_try_flags kind;
8434 :
8435 579637 : if (gimple_wce_cleanup_eh_only (wce))
8436 : kind = GIMPLE_TRY_CATCH;
8437 : else
8438 565872 : kind = GIMPLE_TRY_FINALLY;
8439 579637 : seq = gsi_split_seq_after (iter);
8440 :
8441 579637 : gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
8442 : /* Do not use gsi_replace here, as it may scan operands.
8443 : We want to do a simple structural modification only. */
8444 579637 : gsi_set_stmt (&iter, gtry);
8445 1159274 : iter = gsi_start (gtry->eval);
8446 : }
8447 : }
8448 : else
8449 15974661 : gsi_next (&iter);
8450 : }
8451 :
8452 5130300 : gimplify_seq_add_seq (pre_p, body_sequence);
8453 5130300 : if (temp)
8454 : {
8455 345602 : *expr_p = temp;
8456 345602 : return GS_OK;
8457 : }
8458 : else
8459 : {
8460 4784698 : *expr_p = NULL;
8461 4784698 : return GS_ALL_DONE;
8462 : }
8463 : }
8464 :
8465 : /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
8466 : is the cleanup action required. EH_ONLY is true if the cleanup should
8467 : only be executed if an exception is thrown, not on normal exit.
8468 : If FORCE_UNCOND is true perform the cleanup unconditionally; this is
8469 : only valid for clobbers. */
8470 :
8471 : static void
8472 587400 : gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
8473 : bool force_uncond = false)
8474 : {
8475 587400 : gimple *wce;
8476 587400 : gimple_seq cleanup_stmts = NULL;
8477 :
8478 : /* Errors can result in improperly nested cleanups. Which results in
8479 : confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
8480 587400 : if (seen_error ())
8481 4764 : return;
8482 :
8483 582636 : if (gimple_conditional_context ())
8484 : {
8485 : /* If we're in a conditional context, this is more complex. We only
8486 : want to run the cleanup if we actually ran the initialization that
8487 : necessitates it, but we want to run it after the end of the
8488 : conditional context. So we wrap the try/finally around the
8489 : condition and use a flag to determine whether or not to actually
8490 : run the destructor. Thus
8491 :
8492 : test ? f(A()) : 0
8493 :
8494 : becomes (approximately)
8495 :
8496 : flag = 0;
8497 : try {
8498 : if (test) { A::A(temp); flag = 1; val = f(temp); }
8499 : else { val = 0; }
8500 : } finally {
8501 : if (flag) A::~A(temp);
8502 : }
8503 : val
8504 : */
8505 14205 : if (force_uncond)
8506 : {
8507 13320 : gimplify_stmt (&cleanup, &cleanup_stmts);
8508 13320 : wce = gimple_build_wce (cleanup_stmts);
8509 13320 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
8510 : }
8511 : else
8512 : {
8513 885 : tree flag = create_tmp_var (boolean_type_node, "cleanup");
8514 885 : gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
8515 885 : gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
8516 :
8517 885 : cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
8518 885 : gimplify_stmt (&cleanup, &cleanup_stmts);
8519 885 : wce = gimple_build_wce (cleanup_stmts);
8520 885 : gimple_wce_set_cleanup_eh_only (wce, eh_only);
8521 :
8522 885 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
8523 885 : gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
8524 885 : gimplify_seq_add_stmt (pre_p, ftrue);
8525 :
8526 : /* Because of this manipulation, and the EH edges that jump
8527 : threading cannot redirect, the temporary (VAR) will appear
8528 : to be used uninitialized. Don't warn. */
8529 885 : suppress_warning (var, OPT_Wuninitialized);
8530 : }
8531 : }
8532 : else
8533 : {
8534 568431 : gimplify_stmt (&cleanup, &cleanup_stmts);
8535 568431 : wce = gimple_build_wce (cleanup_stmts);
8536 568431 : gimple_wce_set_cleanup_eh_only (wce, eh_only);
8537 568431 : gimplify_seq_add_stmt (pre_p, wce);
8538 : }
8539 : }
8540 :
8541 : /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
8542 :
8543 : static enum gimplify_status
8544 786300 : gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8545 : {
8546 786300 : tree targ = *expr_p;
8547 786300 : tree temp = TARGET_EXPR_SLOT (targ);
8548 786300 : tree init = TARGET_EXPR_INITIAL (targ);
8549 786300 : enum gimplify_status ret;
8550 :
8551 786300 : bool unpoison_empty_seq = false;
8552 786300 : gimple_stmt_iterator unpoison_it;
8553 :
8554 786300 : if (init)
8555 : {
8556 749563 : gimple_seq init_pre_p = NULL;
8557 749563 : bool is_vla = false;
8558 :
8559 : /* TARGET_EXPR temps aren't part of the enclosing block, so add it
8560 : to the temps list. Handle also variable length TARGET_EXPRs. */
8561 749563 : if (!poly_int_tree_p (DECL_SIZE (temp)))
8562 : {
8563 42 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
8564 1 : gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
8565 : /* FIXME: this is correct only when the size of the type does
8566 : not depend on expressions evaluated in init. */
8567 42 : gimplify_vla_decl (temp, &init_pre_p);
8568 42 : is_vla = true;
8569 : }
8570 : else
8571 : {
8572 : /* Save location where we need to place unpoisoning. It's possible
8573 : that a variable will be converted to needs_to_live_in_memory. */
8574 749521 : unpoison_it = gsi_last (*pre_p);
8575 749521 : unpoison_empty_seq = gsi_end_p (unpoison_it);
8576 :
8577 749521 : gimple_add_tmp_var (temp);
8578 : }
8579 :
8580 749563 : if (var_needs_auto_init_p (temp) && VOID_TYPE_P (TREE_TYPE (init)))
8581 : {
8582 44829 : gimple_add_init_for_auto_var (temp, flag_auto_var_init, &init_pre_p);
8583 44829 : if (flag_auto_var_init == AUTO_INIT_PATTERN
8584 0 : && !is_gimple_reg (temp)
8585 44829 : && clear_padding_type_may_have_padding_p (TREE_TYPE (temp)))
8586 0 : gimple_add_padding_init_for_auto_var (temp, is_vla, &init_pre_p);
8587 : }
8588 :
8589 : /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
8590 : expression is supposed to initialize the slot. */
8591 749563 : if (VOID_TYPE_P (TREE_TYPE (init)))
8592 269417 : ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
8593 : fb_none);
8594 : else
8595 : {
8596 480146 : tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
8597 480146 : init = init_expr;
8598 480146 : ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
8599 : fb_none);
8600 480146 : init = NULL;
8601 480146 : ggc_free (init_expr);
8602 : }
8603 749563 : if (ret == GS_ERROR)
8604 : {
8605 : /* PR c++/28266 Make sure this is expanded only once. */
8606 10 : TARGET_EXPR_INITIAL (targ) = NULL_TREE;
8607 10 : return GS_ERROR;
8608 : }
8609 :
8610 749553 : if (init)
8611 0 : gimplify_and_add (init, &init_pre_p);
8612 :
8613 : /* Add a clobber for the temporary going out of scope, like
8614 : gimplify_bind_expr. But only if we did not promote the
8615 : temporary to static storage. */
8616 749553 : if (gimplify_ctxp->in_cleanup_point_expr
8617 604240 : && !TREE_STATIC (temp)
8618 1352457 : && needs_to_live_in_memory (temp))
8619 : {
8620 448083 : if (flag_stack_reuse == SR_ALL)
8621 : {
8622 447638 : tree clobber = build_clobber (TREE_TYPE (temp),
8623 : CLOBBER_STORAGE_END);
8624 447638 : clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
8625 447638 : gimple_push_cleanup (temp, clobber, false, pre_p, true);
8626 : }
8627 448083 : if (asan_poisoned_variables
8628 445 : && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
8629 445 : && !TREE_STATIC (temp)
8630 445 : && dbg_cnt (asan_use_after_scope)
8631 448528 : && !gimplify_omp_ctxp)
8632 : {
8633 437 : tree asan_cleanup = build_asan_poison_call_expr (temp);
8634 437 : if (asan_cleanup)
8635 : {
8636 437 : if (unpoison_empty_seq)
8637 223 : unpoison_it = gsi_start (*pre_p);
8638 :
8639 437 : asan_poison_variable (temp, false, &unpoison_it,
8640 : unpoison_empty_seq);
8641 437 : gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
8642 : }
8643 : }
8644 : }
8645 :
8646 749553 : gimple_seq_add_seq (pre_p, init_pre_p);
8647 :
8648 : /* If needed, push the cleanup for the temp. */
8649 749553 : if (TARGET_EXPR_CLEANUP (targ))
8650 139325 : gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
8651 139325 : CLEANUP_EH_ONLY (targ), pre_p);
8652 :
8653 : /* Only expand this once. */
8654 749553 : TREE_OPERAND (targ, 3) = init;
8655 749553 : TARGET_EXPR_INITIAL (targ) = NULL_TREE;
8656 : }
8657 : else
8658 : /* We should have expanded this before. */
8659 36737 : gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
8660 :
8661 786290 : *expr_p = temp;
8662 786290 : return GS_OK;
8663 : }
8664 :
8665 : /* Gimplification of expression trees. */
8666 :
8667 : /* Gimplify an expression which appears at statement context. The
8668 : corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
8669 : NULL, a new sequence is allocated.
8670 :
8671 : Return true if we actually added a statement to the queue. */
8672 :
8673 : bool
8674 101679223 : gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
8675 : {
8676 101679223 : gimple_seq_node last;
8677 :
8678 101679223 : last = gimple_seq_last (*seq_p);
8679 101679223 : gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
8680 101679223 : return last != gimple_seq_last (*seq_p);
8681 : }
8682 :
8683 : /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
8684 : to CTX. If entries already exist, force them to be some flavor of private.
8685 : If there is no enclosing parallel, do nothing. */
8686 :
8687 : void
8688 123201 : omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
8689 : {
8690 123201 : splay_tree_node n;
8691 :
8692 123201 : if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
8693 : return;
8694 :
8695 36815 : do
8696 : {
8697 36815 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8698 36815 : if (n != NULL)
8699 : {
8700 4292 : if (n->value & GOVD_SHARED)
8701 183 : n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
8702 4109 : else if (n->value & GOVD_MAP)
8703 55 : n->value |= GOVD_MAP_TO_ONLY;
8704 : else
8705 : return;
8706 : }
8707 32523 : else if ((ctx->region_type & ORT_TARGET) != 0)
8708 : {
8709 6944 : if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
8710 4189 : omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
8711 : else
8712 2755 : omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
8713 : }
8714 25579 : else if (ctx->region_type != ORT_WORKSHARE
8715 : && ctx->region_type != ORT_TASKGROUP
8716 23750 : && ctx->region_type != ORT_SIMD
8717 23126 : && ctx->region_type != ORT_ACC
8718 23016 : && !(ctx->region_type & ORT_TARGET_DATA))
8719 14435 : omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
8720 :
8721 32761 : ctx = ctx->outer_context;
8722 : }
8723 32761 : while (ctx);
8724 : }
8725 :
8726 : /* Similarly for each of the type sizes of TYPE. */
8727 :
8728 : static void
8729 39866 : omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
8730 : {
8731 39866 : if (type == NULL || type == error_mark_node)
8732 : return;
8733 39862 : type = TYPE_MAIN_VARIANT (type);
8734 :
8735 39862 : if (ctx->privatized_types->add (type))
8736 : return;
8737 :
8738 30487 : switch (TREE_CODE (type))
8739 : {
8740 12976 : case INTEGER_TYPE:
8741 12976 : case ENUMERAL_TYPE:
8742 12976 : case BOOLEAN_TYPE:
8743 12976 : case REAL_TYPE:
8744 12976 : case FIXED_POINT_TYPE:
8745 12976 : omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
8746 12976 : omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
8747 12976 : break;
8748 :
8749 5364 : case ARRAY_TYPE:
8750 5364 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
8751 5364 : omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
8752 5364 : break;
8753 :
8754 2502 : case RECORD_TYPE:
8755 2502 : case UNION_TYPE:
8756 2502 : case QUAL_UNION_TYPE:
8757 2502 : {
8758 2502 : tree field;
8759 26624 : for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8760 24122 : if (TREE_CODE (field) == FIELD_DECL)
8761 : {
8762 8997 : omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
8763 8997 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
8764 : }
8765 : }
8766 : break;
8767 :
8768 8996 : case POINTER_TYPE:
8769 8996 : case REFERENCE_TYPE:
8770 8996 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
8771 8996 : break;
8772 :
8773 : default:
8774 : break;
8775 : }
8776 :
8777 30487 : omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
8778 30487 : omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
8779 30487 : lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
8780 : }
8781 :
8782 : /* Add an entry for DECL in the OMP context CTX with FLAGS. */
8783 :
8784 : static void
8785 708497 : omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
8786 : {
8787 708497 : splay_tree_node n;
8788 708497 : unsigned int nflags;
8789 708497 : tree t;
8790 :
8791 708497 : if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
8792 : return;
8793 :
8794 : /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
8795 : there are constructors involved somewhere. Exception is a shared clause,
8796 : there is nothing privatized in that case. */
8797 708352 : if ((flags & GOVD_SHARED) == 0
8798 708352 : && (TREE_ADDRESSABLE (TREE_TYPE (decl))
8799 648124 : || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
8800 7588 : flags |= GOVD_SEEN;
8801 :
8802 708352 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8803 708352 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8804 : {
8805 : /* We shouldn't be re-adding the decl with the same data
8806 : sharing class. */
8807 824 : gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
8808 824 : nflags = n->value | flags;
8809 : /* The only combination of data sharing classes we should see is
8810 : FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
8811 : reduction variables to be used in data sharing clauses. */
8812 824 : gcc_assert ((ctx->region_type & ORT_ACC) != 0
8813 : || ((nflags & GOVD_DATA_SHARE_CLASS)
8814 : == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
8815 : || (flags & GOVD_DATA_SHARE_CLASS) == 0);
8816 824 : n->value = nflags;
8817 824 : return;
8818 : }
8819 :
8820 : /* When adding a variable-sized variable, we have to handle all sorts
8821 : of additional bits of data: the pointer replacement variable, and
8822 : the parameters of the type. */
8823 707528 : if (DECL_SIZE (decl) && !poly_int_tree_p (DECL_SIZE (decl)))
8824 : {
8825 : /* Add the pointer replacement variable as PRIVATE if the variable
8826 : replacement is private, else FIRSTPRIVATE since we'll need the
8827 : address of the original variable either for SHARED, or for the
8828 : copy into or out of the context. */
8829 1329 : if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
8830 : {
8831 1238 : if (flags & GOVD_MAP)
8832 : nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
8833 531 : else if (flags & GOVD_PRIVATE)
8834 : nflags = GOVD_PRIVATE;
8835 441 : else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8836 50 : && (flags & GOVD_FIRSTPRIVATE))
8837 431 : || (ctx->region_type == ORT_TARGET_DATA
8838 6 : && (flags & GOVD_DATA_SHARE_CLASS) == 0))
8839 : nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
8840 : else
8841 1238 : nflags = GOVD_FIRSTPRIVATE;
8842 1238 : nflags |= flags & GOVD_SEEN;
8843 1238 : t = DECL_VALUE_EXPR (decl);
8844 1238 : gcc_assert (INDIRECT_REF_P (t));
8845 1238 : t = TREE_OPERAND (t, 0);
8846 1238 : gcc_assert (DECL_P (t));
8847 1238 : omp_add_variable (ctx, t, nflags);
8848 : }
8849 :
8850 : /* Add all of the variable and type parameters (which should have
8851 : been gimplified to a formal temporary) as FIRSTPRIVATE. */
8852 1329 : omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
8853 1329 : omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
8854 1329 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
8855 :
8856 : /* The variable-sized variable itself is never SHARED, only some form
8857 : of PRIVATE. The sharing would take place via the pointer variable
8858 : which we remapped above. */
8859 1329 : if (flags & GOVD_SHARED)
8860 284 : flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
8861 284 : | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
8862 :
8863 : /* We're going to make use of the TYPE_SIZE_UNIT at least in the
8864 : alloca statement we generate for the variable, so make sure it
8865 : is available. This isn't automatically needed for the SHARED
8866 : case, since we won't be allocating local storage then.
8867 : For local variables TYPE_SIZE_UNIT might not be gimplified yet,
8868 : in this case omp_notice_variable will be called later
8869 : on when it is gimplified. */
8870 1045 : else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
8871 1045 : && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
8872 275 : omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
8873 : }
8874 706199 : else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
8875 706199 : && omp_privatize_by_reference (decl))
8876 : {
8877 9816 : omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
8878 :
8879 : /* Similar to the direct variable sized case above, we'll need the
8880 : size of references being privatized. */
8881 9816 : if ((flags & GOVD_SHARED) == 0)
8882 : {
8883 6131 : t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8884 6131 : if (t && DECL_P (t))
8885 1372 : omp_notice_variable (ctx, t, true);
8886 : }
8887 : }
8888 :
8889 707528 : if (n != NULL)
8890 1812 : n->value |= flags;
8891 : else
8892 705716 : splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
8893 :
8894 : /* For reductions clauses in OpenACC loop directives, by default create a
8895 : copy clause on the enclosing parallel construct for carrying back the
8896 : results. */
8897 707528 : if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
8898 : {
8899 4716 : struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
8900 6454 : while (outer_ctx)
8901 : {
8902 5788 : n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
8903 5788 : if (n != NULL)
8904 : {
8905 : /* Ignore local variables and explicitly declared clauses. */
8906 3990 : if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
8907 : break;
8908 443 : else if (outer_ctx->region_type == ORT_ACC_KERNELS)
8909 : {
8910 : /* According to the OpenACC spec, such a reduction variable
8911 : should already have a copy map on a kernels construct,
8912 : verify that here. */
8913 143 : gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
8914 : && (n->value & GOVD_MAP));
8915 : }
8916 300 : else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
8917 : {
8918 : /* Remove firstprivate and make it a copy map. */
8919 150 : n->value &= ~GOVD_FIRSTPRIVATE;
8920 150 : n->value |= GOVD_MAP;
8921 : }
8922 : }
8923 1798 : else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
8924 : {
8925 503 : splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
8926 : GOVD_MAP | GOVD_SEEN);
8927 503 : break;
8928 : }
8929 1738 : outer_ctx = outer_ctx->outer_context;
8930 : }
8931 : }
8932 : }
8933 :
8934 : /* Notice a threadprivate variable DECL used in OMP context CTX.
8935 : This just prints out diagnostics about threadprivate variable uses
8936 : in untied tasks. If DECL2 is non-NULL, prevent this warning
8937 : on that variable. */
8938 :
8939 : static bool
8940 14183 : omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
8941 : tree decl2)
8942 : {
8943 14183 : splay_tree_node n;
8944 14183 : struct gimplify_omp_ctx *octx;
8945 :
8946 28798 : for (octx = ctx; octx; octx = octx->outer_context)
8947 14615 : if ((octx->region_type & ORT_TARGET) != 0
8948 14615 : || octx->order_concurrent)
8949 : {
8950 112 : n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
8951 112 : if (n == NULL)
8952 : {
8953 20 : if (octx->order_concurrent)
8954 : {
8955 20 : error ("threadprivate variable %qE used in a region with"
8956 20 : " %<order(concurrent)%> clause", DECL_NAME (decl));
8957 20 : inform (octx->location, "enclosing region");
8958 : }
8959 : else
8960 : {
8961 0 : error ("threadprivate variable %qE used in target region",
8962 0 : DECL_NAME (decl));
8963 0 : inform (octx->location, "enclosing target region");
8964 : }
8965 20 : splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
8966 : }
8967 112 : if (decl2)
8968 0 : splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
8969 : }
8970 :
8971 14183 : if (ctx->region_type != ORT_UNTIED_TASK)
8972 : return false;
8973 37 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8974 37 : if (n == NULL)
8975 : {
8976 6 : error ("threadprivate variable %qE used in untied task",
8977 6 : DECL_NAME (decl));
8978 6 : inform (ctx->location, "enclosing task");
8979 6 : splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
8980 : }
8981 37 : if (decl2)
8982 4 : splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
8983 : return false;
8984 : }
8985 :
8986 : /* Return true if global var DECL is device resident. */
8987 :
8988 : static bool
8989 641 : device_resident_p (tree decl)
8990 : {
8991 641 : tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
8992 :
8993 641 : if (!attr)
8994 : return false;
8995 :
8996 0 : for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
8997 : {
8998 0 : tree c = TREE_VALUE (t);
8999 0 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
9000 : return true;
9001 : }
9002 :
9003 : return false;
9004 : }
9005 :
9006 : /* Return true if DECL has an ACC DECLARE attribute. */
9007 :
9008 : static bool
9009 9097 : is_oacc_declared (tree decl)
9010 : {
9011 9097 : tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
9012 9097 : tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
9013 9097 : return declared != NULL_TREE;
9014 : }
9015 :
9016 : /* Determine outer default flags for DECL mentioned in an OMP region
9017 : but not declared in an enclosing clause.
9018 :
9019 : ??? Some compiler-generated variables (like SAVE_EXPRs) could be
9020 : remapped firstprivate instead of shared. To some extent this is
9021 : addressed in omp_firstprivatize_type_sizes, but not
9022 : effectively. */
9023 :
9024 : static unsigned
9025 47877 : omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
9026 : bool in_code, unsigned flags)
9027 : {
9028 47877 : enum omp_clause_default_kind default_kind = ctx->default_kind;
9029 47877 : enum omp_clause_default_kind kind;
9030 :
9031 47877 : kind = lang_hooks.decls.omp_predetermined_sharing (decl);
9032 47877 : if (ctx->region_type & ORT_TASK)
9033 : {
9034 3969 : tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
9035 :
9036 : /* The event-handle specified by a detach clause should always be firstprivate,
9037 : regardless of the current default. */
9038 4181 : if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
9039 : kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
9040 : }
9041 47877 : if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
9042 : default_kind = kind;
9043 45063 : else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
9044 : default_kind = OMP_CLAUSE_DEFAULT_SHARED;
9045 : /* For C/C++ default({,first}private), variables with static storage duration
9046 : declared in a namespace or global scope and referenced in construct
9047 : must be explicitly specified, i.e. acts as default(none). */
9048 45062 : else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
9049 45062 : || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
9050 368 : && VAR_P (decl)
9051 321 : && is_global_var (decl)
9052 184 : && (DECL_FILE_SCOPE_P (decl)
9053 104 : || (DECL_CONTEXT (decl)
9054 104 : && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
9055 45190 : && !lang_GNU_Fortran ())
9056 : default_kind = OMP_CLAUSE_DEFAULT_NONE;
9057 :
9058 47748 : switch (default_kind)
9059 : {
9060 267 : case OMP_CLAUSE_DEFAULT_NONE:
9061 267 : {
9062 267 : const char *rtype;
9063 :
9064 267 : if (ctx->region_type & ORT_PARALLEL)
9065 : rtype = "parallel";
9066 108 : else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
9067 : rtype = "taskloop";
9068 72 : else if (ctx->region_type & ORT_TASK)
9069 : rtype = "task";
9070 36 : else if (ctx->region_type & ORT_TEAMS)
9071 : rtype = "teams";
9072 : else
9073 0 : gcc_unreachable ();
9074 :
9075 267 : error ("%qE not specified in enclosing %qs",
9076 267 : DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
9077 267 : inform (ctx->location, "enclosing %qs", rtype);
9078 : }
9079 : /* FALLTHRU */
9080 44246 : case OMP_CLAUSE_DEFAULT_SHARED:
9081 44246 : flags |= GOVD_SHARED;
9082 44246 : break;
9083 155 : case OMP_CLAUSE_DEFAULT_PRIVATE:
9084 155 : flags |= GOVD_PRIVATE;
9085 155 : break;
9086 277 : case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
9087 277 : flags |= GOVD_FIRSTPRIVATE;
9088 277 : break;
9089 3199 : case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
9090 : /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
9091 3199 : gcc_assert ((ctx->region_type & ORT_TASK) != 0);
9092 3199 : if (struct gimplify_omp_ctx *octx = ctx->outer_context)
9093 : {
9094 2162 : omp_notice_variable (octx, decl, in_code);
9095 2992 : for (; octx; octx = octx->outer_context)
9096 : {
9097 2936 : splay_tree_node n2;
9098 :
9099 2936 : n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
9100 2936 : if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
9101 5 : && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
9102 5 : continue;
9103 2931 : if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
9104 : {
9105 897 : flags |= GOVD_FIRSTPRIVATE;
9106 897 : goto found_outer;
9107 : }
9108 2034 : if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
9109 : {
9110 1209 : flags |= GOVD_SHARED;
9111 1209 : goto found_outer;
9112 : }
9113 : }
9114 : }
9115 :
9116 1093 : if (TREE_CODE (decl) == PARM_DECL
9117 1093 : || (!is_global_var (decl)
9118 288 : && DECL_CONTEXT (decl) == current_function_decl))
9119 713 : flags |= GOVD_FIRSTPRIVATE;
9120 : else
9121 380 : flags |= GOVD_SHARED;
9122 47877 : found_outer:
9123 : break;
9124 :
9125 0 : default:
9126 0 : gcc_unreachable ();
9127 : }
9128 :
9129 47877 : return flags;
9130 : }
9131 :
9132 : /* Return string name for types of OpenACC constructs from ORT_* values. */
9133 :
9134 : static const char *
9135 570 : oacc_region_type_name (enum omp_region_type region_type)
9136 : {
9137 570 : switch (region_type)
9138 : {
9139 : case ORT_ACC_DATA:
9140 : return "data";
9141 140 : case ORT_ACC_PARALLEL:
9142 140 : return "parallel";
9143 140 : case ORT_ACC_KERNELS:
9144 140 : return "kernels";
9145 140 : case ORT_ACC_SERIAL:
9146 140 : return "serial";
9147 0 : default:
9148 0 : gcc_unreachable ();
9149 : }
9150 : }
9151 :
9152 : /* Determine outer default flags for DECL mentioned in an OACC region
9153 : but not declared in an enclosing clause. */
9154 :
9155 : static unsigned
9156 8835 : oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
9157 : {
9158 8835 : struct gimplify_omp_ctx *ctx_default = ctx;
9159 : /* If no 'default' clause appears on this compute construct... */
9160 8835 : if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
9161 : {
9162 : /* ..., see if one appears on a lexically containing 'data'
9163 : construct. */
9164 9614 : while ((ctx_default = ctx_default->outer_context))
9165 : {
9166 1534 : if (ctx_default->region_type == ORT_ACC_DATA
9167 1534 : && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
9168 : break;
9169 : }
9170 : /* If not, reset. */
9171 8256 : if (!ctx_default)
9172 8659 : ctx_default = ctx;
9173 : }
9174 :
9175 8835 : bool on_device = false;
9176 8835 : bool is_private = false;
9177 8835 : bool declared = is_oacc_declared (decl);
9178 8835 : tree type = TREE_TYPE (decl);
9179 :
9180 8835 : if (omp_privatize_by_reference (decl))
9181 338 : type = TREE_TYPE (type);
9182 :
9183 : /* For Fortran COMMON blocks, only used variables in those blocks are
9184 : transferred and remapped. The block itself will have a private clause to
9185 : avoid transfering the data twice.
9186 : The hook evaluates to false by default. For a variable in Fortran's COMMON
9187 : or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
9188 : the variables in such a COMMON/EQUIVALENCE block shall be privatized not
9189 : the whole block. For C++ and Fortran, it can also be true under certain
9190 : other conditions, if DECL_HAS_VALUE_EXPR. */
9191 8835 : if (RECORD_OR_UNION_TYPE_P (type))
9192 982 : is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
9193 :
9194 8835 : if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
9195 8835 : && is_global_var (decl)
9196 641 : && device_resident_p (decl)
9197 8835 : && !is_private)
9198 : {
9199 0 : on_device = true;
9200 0 : flags |= GOVD_MAP_TO_ONLY;
9201 : }
9202 :
9203 8835 : switch (ctx->region_type)
9204 : {
9205 1456 : case ORT_ACC_KERNELS:
9206 1456 : if (is_private)
9207 0 : flags |= GOVD_FIRSTPRIVATE;
9208 1456 : else if (AGGREGATE_TYPE_P (type))
9209 : {
9210 : /* Aggregates default to 'present_or_copy', or 'present'. */
9211 414 : if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
9212 396 : flags |= GOVD_MAP;
9213 : else
9214 18 : flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
9215 : }
9216 : else
9217 : /* Scalars default to 'copy'. */
9218 1042 : flags |= GOVD_MAP | GOVD_MAP_FORCE;
9219 :
9220 : break;
9221 :
9222 7379 : case ORT_ACC_PARALLEL:
9223 7379 : case ORT_ACC_SERIAL:
9224 7379 : if (is_private)
9225 0 : flags |= GOVD_FIRSTPRIVATE;
9226 7379 : else if (on_device || declared)
9227 17 : flags |= GOVD_MAP;
9228 7362 : else if (AGGREGATE_TYPE_P (type))
9229 : {
9230 : /* Aggregates default to 'present_or_copy', or 'present'. */
9231 3697 : if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
9232 3431 : flags |= GOVD_MAP;
9233 : else
9234 266 : flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
9235 : }
9236 : else
9237 : /* Scalars default to 'firstprivate'. */
9238 3665 : flags |= GOVD_FIRSTPRIVATE;
9239 :
9240 : break;
9241 :
9242 0 : default:
9243 0 : gcc_unreachable ();
9244 : }
9245 :
9246 8835 : if (DECL_ARTIFICIAL (decl))
9247 : ; /* We can get compiler-generated decls, and should not complain
9248 : about them. */
9249 8527 : else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
9250 : {
9251 420 : error ("%qE not specified in enclosing OpenACC %qs construct",
9252 210 : DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
9253 : oacc_region_type_name (ctx->region_type));
9254 210 : if (ctx_default != ctx)
9255 150 : inform (ctx->location, "enclosing OpenACC %qs construct and",
9256 : oacc_region_type_name (ctx->region_type));
9257 210 : inform (ctx_default->location,
9258 : "enclosing OpenACC %qs construct with %qs clause",
9259 : oacc_region_type_name (ctx_default->region_type),
9260 : "default(none)");
9261 : }
9262 8317 : else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
9263 : ; /* Handled above. */
9264 : else
9265 7784 : gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
9266 :
9267 8835 : return flags;
9268 : }
9269 :
9270 : /* Record the fact that DECL was used within the OMP context CTX.
9271 : IN_CODE is true when real code uses DECL, and false when we should
9272 : merely emit default(none) errors. Return true if DECL is going to
9273 : be remapped and thus DECL shouldn't be gimplified into its
9274 : DECL_VALUE_EXPR (if any). */
9275 :
9276 : static bool
9277 3878670 : omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
9278 : {
9279 3878670 : splay_tree_node n;
9280 3878670 : unsigned flags = in_code ? GOVD_SEEN : 0;
9281 3878670 : bool ret = false, shared;
9282 :
9283 3878670 : if (error_operand_p (decl))
9284 : return false;
9285 :
9286 3878670 : if (DECL_ARTIFICIAL (decl))
9287 : {
9288 2212971 : tree attr = lookup_attribute ("omp allocate var", DECL_ATTRIBUTES (decl));
9289 2212971 : if (attr)
9290 543 : decl = TREE_VALUE (TREE_VALUE (attr));
9291 : }
9292 :
9293 3878670 : if (ctx->region_type == ORT_NONE)
9294 248 : return lang_hooks.decls.omp_disregard_value_expr (decl, false);
9295 :
9296 3878422 : if (is_global_var (decl))
9297 : {
9298 : /* Threadprivate variables are predetermined. */
9299 501450 : if (DECL_THREAD_LOCAL_P (decl))
9300 13565 : return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
9301 :
9302 487885 : if (DECL_HAS_VALUE_EXPR_P (decl))
9303 : {
9304 4380 : if (ctx->region_type & ORT_ACC)
9305 : /* For OpenACC, defer expansion of value to avoid transfering
9306 : privatized common block data instead of im-/explicitly
9307 : transferred variables which are in common blocks. */
9308 : ;
9309 : else
9310 : {
9311 2104 : tree value = get_base_address (DECL_VALUE_EXPR (decl));
9312 :
9313 2104 : if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
9314 618 : return omp_notice_threadprivate_variable (ctx, decl, value);
9315 : }
9316 : }
9317 :
9318 487267 : if (gimplify_omp_ctxp->outer_context == NULL
9319 113376 : && VAR_P (decl)
9320 600643 : && oacc_get_fn_attrib (current_function_decl))
9321 : {
9322 765 : location_t loc = DECL_SOURCE_LOCATION (decl);
9323 :
9324 765 : if (lookup_attribute ("omp declare target link",
9325 765 : DECL_ATTRIBUTES (decl)))
9326 : {
9327 48 : error_at (loc,
9328 : "%qE with %<link%> clause used in %<routine%> function",
9329 24 : DECL_NAME (decl));
9330 24 : return false;
9331 : }
9332 741 : else if (!lookup_attribute ("omp declare target",
9333 741 : DECL_ATTRIBUTES (decl)))
9334 : {
9335 132 : error_at (loc,
9336 : "%qE requires a %<declare%> directive for use "
9337 66 : "in a %<routine%> function", DECL_NAME (decl));
9338 66 : return false;
9339 : }
9340 : }
9341 : }
9342 :
9343 3864149 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9344 3864149 : if ((ctx->region_type & ORT_TARGET) != 0)
9345 : {
9346 824411 : if (n == NULL)
9347 : {
9348 44446 : unsigned nflags = flags;
9349 44446 : if ((ctx->region_type & ORT_ACC) == 0)
9350 : {
9351 33496 : bool is_declare_target = false;
9352 33496 : if (is_global_var (decl)
9353 33496 : && varpool_node::get_create (decl)->offloadable)
9354 : {
9355 7002 : struct gimplify_omp_ctx *octx;
9356 7002 : for (octx = ctx->outer_context;
9357 7010 : octx; octx = octx->outer_context)
9358 : {
9359 9 : n = splay_tree_lookup (octx->variables,
9360 : (splay_tree_key)decl);
9361 9 : if (n
9362 9 : && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
9363 9 : && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9364 : break;
9365 : }
9366 7002 : is_declare_target = octx == NULL;
9367 : }
9368 7002 : if (!is_declare_target)
9369 : {
9370 26495 : int gdmk;
9371 26495 : enum omp_clause_defaultmap_kind kind;
9372 26495 : if (lang_hooks.decls.omp_allocatable_p (decl))
9373 : gdmk = GDMK_ALLOCATABLE;
9374 26151 : else if (lang_hooks.decls.omp_scalar_target_p (decl))
9375 : gdmk = GDMK_SCALAR_TARGET;
9376 26076 : else if (lang_hooks.decls.omp_scalar_p (decl, false))
9377 : gdmk = GDMK_SCALAR;
9378 10939 : else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9379 10939 : || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9380 2956 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9381 : == POINTER_TYPE)))
9382 : gdmk = GDMK_POINTER;
9383 : else
9384 : gdmk = GDMK_AGGREGATE;
9385 26495 : kind = lang_hooks.decls.omp_predetermined_mapping (decl);
9386 26495 : if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
9387 : {
9388 945 : if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
9389 101 : nflags |= GOVD_FIRSTPRIVATE;
9390 844 : else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
9391 844 : nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
9392 : else
9393 0 : gcc_unreachable ();
9394 : }
9395 25550 : else if (ctx->defaultmap[gdmk] == 0)
9396 : {
9397 77 : tree d = lang_hooks.decls.omp_report_decl (decl);
9398 77 : error ("%qE not specified in enclosing %<target%>",
9399 77 : DECL_NAME (d));
9400 77 : inform (ctx->location, "enclosing %<target%>");
9401 : }
9402 25473 : else if (ctx->defaultmap[gdmk]
9403 25473 : & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
9404 8977 : nflags |= ctx->defaultmap[gdmk];
9405 16496 : else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
9406 : {
9407 42 : gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
9408 42 : nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
9409 : }
9410 : else
9411 : {
9412 16454 : gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
9413 16454 : nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
9414 : }
9415 : }
9416 : }
9417 :
9418 44446 : struct gimplify_omp_ctx *octx = ctx->outer_context;
9419 44446 : if ((ctx->region_type & ORT_ACC) && octx)
9420 : {
9421 : /* Look in outer OpenACC contexts, to see if there's a
9422 : data attribute for this variable. */
9423 3535 : omp_notice_variable (octx, decl, in_code);
9424 :
9425 5307 : for (; octx; octx = octx->outer_context)
9426 : {
9427 3883 : if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
9428 : break;
9429 3883 : splay_tree_node n2
9430 3883 : = splay_tree_lookup (octx->variables,
9431 : (splay_tree_key) decl);
9432 3883 : if (n2)
9433 : {
9434 2111 : if (octx->region_type == ORT_ACC_HOST_DATA)
9435 4 : error ("variable %qE declared in enclosing "
9436 4 : "%<host_data%> region", DECL_NAME (decl));
9437 2111 : nflags |= GOVD_MAP;
9438 2111 : if (octx->region_type == ORT_ACC_DATA
9439 2107 : && (n2->value & GOVD_MAP_0LEN_ARRAY))
9440 288 : nflags |= GOVD_MAP_0LEN_ARRAY;
9441 2111 : goto found_outer;
9442 : }
9443 : }
9444 : }
9445 :
9446 42335 : if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
9447 : | GOVD_MAP_ALLOC_ONLY)) == flags)
9448 : {
9449 32371 : tree type = TREE_TYPE (decl);
9450 32371 : location_t loc = DECL_SOURCE_LOCATION (decl);
9451 :
9452 32371 : if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9453 32371 : && omp_privatize_by_reference (decl))
9454 28 : type = TREE_TYPE (type);
9455 :
9456 32371 : if (!verify_type_context (loc, TCTX_OMP_MAP_IMP_REF, type))
9457 : /* Check if TYPE can appear in a target region.
9458 : verify_type_context has already issued an error if it
9459 : can't. */
9460 0 : nflags |= GOVD_MAP | GOVD_EXPLICIT;
9461 32371 : else if (!omp_mappable_type (type))
9462 : {
9463 8 : error ("%qD referenced in target region does not have "
9464 : "a mappable type", decl);
9465 8 : nflags |= GOVD_MAP | GOVD_EXPLICIT;
9466 : }
9467 : else
9468 : {
9469 32363 : if ((ctx->region_type & ORT_ACC) != 0)
9470 8835 : nflags = oacc_default_clause (ctx, decl, flags);
9471 : else
9472 23528 : nflags |= GOVD_MAP;
9473 : }
9474 : }
9475 9964 : found_outer:
9476 44446 : omp_add_variable (ctx, decl, nflags);
9477 44446 : if (ctx->region_type & ORT_ACC)
9478 : /* For OpenACC, as remarked above, defer expansion. */
9479 : shared = false;
9480 : else
9481 33496 : shared = (nflags & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
9482 44446 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9483 : }
9484 : else
9485 : {
9486 779965 : if (ctx->region_type & ORT_ACC)
9487 : /* For OpenACC, as remarked above, defer expansion. */
9488 : shared = false;
9489 : else
9490 483831 : shared = ((n->value | flags)
9491 483831 : & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0;
9492 779965 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9493 : /* If nothing changed, there's nothing left to do. */
9494 779965 : if ((n->value & flags) == flags)
9495 : return ret;
9496 16041 : flags |= n->value;
9497 16041 : n->value = flags;
9498 : }
9499 60487 : goto do_outer;
9500 : }
9501 :
9502 3039738 : if (n == NULL)
9503 : {
9504 1181560 : if (ctx->region_type == ORT_WORKSHARE
9505 : || ctx->region_type == ORT_TASKGROUP
9506 560593 : || ctx->region_type == ORT_SIMD
9507 288550 : || ctx->region_type == ORT_ACC
9508 92325 : || (ctx->region_type & ORT_TARGET_DATA) != 0)
9509 1133683 : goto do_outer;
9510 :
9511 47877 : flags = omp_default_clause (ctx, decl, in_code, flags);
9512 :
9513 47877 : if ((flags & GOVD_PRIVATE)
9514 47877 : && lang_hooks.decls.omp_private_outer_ref (decl))
9515 6 : flags |= GOVD_PRIVATE_OUTER_REF;
9516 :
9517 47877 : omp_add_variable (ctx, decl, flags);
9518 :
9519 47877 : shared = (flags & GOVD_SHARED) != 0;
9520 47877 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9521 47877 : goto do_outer;
9522 : }
9523 :
9524 : /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
9525 : lb, b or incr expressions, those shouldn't be turned into simd arrays. */
9526 1858178 : if (ctx->region_type == ORT_SIMD
9527 151155 : && ctx->in_for_exprs
9528 70 : && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
9529 : == GOVD_PRIVATE))
9530 1858178 : flags &= ~GOVD_SEEN;
9531 :
9532 1858178 : if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
9533 31792 : && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
9534 1889931 : && DECL_SIZE (decl))
9535 : {
9536 31752 : tree size;
9537 31752 : if (!poly_int_tree_p (DECL_SIZE (decl)))
9538 : {
9539 148 : splay_tree_node n2;
9540 148 : tree t = DECL_VALUE_EXPR (decl);
9541 148 : gcc_assert (INDIRECT_REF_P (t));
9542 148 : t = TREE_OPERAND (t, 0);
9543 148 : gcc_assert (DECL_P (t));
9544 148 : n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9545 148 : n2->value |= GOVD_SEEN;
9546 : }
9547 31604 : else if (omp_privatize_by_reference (decl)
9548 4180 : && (size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
9549 35747 : && !poly_int_tree_p (size))
9550 : {
9551 1324 : splay_tree_node n2;
9552 1324 : gcc_assert (DECL_P (size));
9553 1324 : n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) size);
9554 1324 : if (n2)
9555 617 : omp_notice_variable (ctx, size, true);
9556 : }
9557 : }
9558 :
9559 1858178 : if (ctx->region_type & ORT_ACC)
9560 : /* For OpenACC, as remarked above, defer expansion. */
9561 : shared = false;
9562 : else
9563 1676361 : shared = ((flags | n->value) & GOVD_SHARED) != 0;
9564 1858178 : ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
9565 :
9566 : /* If nothing changed, there's nothing left to do. */
9567 1858178 : if ((n->value & flags) == flags)
9568 : return ret;
9569 31753 : flags |= n->value;
9570 31753 : n->value = flags;
9571 :
9572 1273800 : do_outer:
9573 : /* If the variable is private in the current context, then we don't
9574 : need to propagate anything to an outer context. */
9575 1273800 : if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
9576 : return ret;
9577 1263417 : if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9578 : == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9579 : return ret;
9580 1263357 : if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9581 : | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9582 : == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
9583 : return ret;
9584 1263357 : if (ctx->outer_context
9585 1263357 : && omp_notice_variable (ctx->outer_context, decl, in_code))
9586 : return true;
9587 : return ret;
9588 : }
9589 :
9590 : /* Verify that DECL is private within CTX. If there's specific information
9591 : to the contrary in the innermost scope, generate an error. */
9592 :
9593 : static bool
9594 50131 : omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
9595 : {
9596 95824 : splay_tree_node n;
9597 :
9598 95824 : n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9599 95824 : if (n != NULL)
9600 : {
9601 17958 : if (n->value & GOVD_SHARED)
9602 : {
9603 407 : if (ctx == gimplify_omp_ctxp)
9604 : {
9605 0 : if (simd)
9606 0 : error ("iteration variable %qE is predetermined linear",
9607 0 : DECL_NAME (decl));
9608 : else
9609 0 : error ("iteration variable %qE should be private",
9610 0 : DECL_NAME (decl));
9611 0 : n->value = GOVD_PRIVATE;
9612 0 : return true;
9613 : }
9614 : else
9615 : return false;
9616 : }
9617 17551 : else if ((n->value & GOVD_EXPLICIT) != 0
9618 9709 : && (ctx == gimplify_omp_ctxp
9619 340 : || (ctx->region_type == ORT_COMBINED_PARALLEL
9620 220 : && gimplify_omp_ctxp->outer_context == ctx)))
9621 : {
9622 9589 : if ((n->value & GOVD_FIRSTPRIVATE) != 0)
9623 4 : error ("iteration variable %qE should not be firstprivate",
9624 4 : DECL_NAME (decl));
9625 9585 : else if ((n->value & GOVD_REDUCTION) != 0)
9626 8 : error ("iteration variable %qE should not be reduction",
9627 8 : DECL_NAME (decl));
9628 9577 : else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
9629 58 : error ("iteration variable %qE should not be linear",
9630 58 : DECL_NAME (decl));
9631 : }
9632 17551 : return (ctx == gimplify_omp_ctxp
9633 17551 : || (ctx->region_type == ORT_COMBINED_PARALLEL
9634 19433 : && gimplify_omp_ctxp->outer_context == ctx));
9635 : }
9636 :
9637 77866 : if (ctx->region_type != ORT_WORKSHARE
9638 : && ctx->region_type != ORT_TASKGROUP
9639 49418 : && ctx->region_type != ORT_SIMD
9640 36153 : && ctx->region_type != ORT_ACC)
9641 : return false;
9642 53886 : else if (ctx->outer_context)
9643 : return omp_is_private (ctx->outer_context, decl, simd);
9644 : return false;
9645 : }
9646 :
9647 : /* Return true if DECL is private within a parallel region
9648 : that binds to the current construct's context or in parallel
9649 : region's REDUCTION clause. */
9650 :
9651 : static bool
9652 11547 : omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
9653 : {
9654 11619 : splay_tree_node n;
9655 :
9656 11619 : do
9657 : {
9658 11619 : ctx = ctx->outer_context;
9659 11619 : if (ctx == NULL)
9660 : {
9661 2764 : if (is_global_var (decl))
9662 : return false;
9663 :
9664 : /* References might be private, but might be shared too,
9665 : when checking for copyprivate, assume they might be
9666 : private, otherwise assume they might be shared. */
9667 1290 : if (copyprivate)
9668 : return true;
9669 :
9670 1244 : if (omp_privatize_by_reference (decl))
9671 : return false;
9672 :
9673 : /* Treat C++ privatized non-static data members outside
9674 : of the privatization the same. */
9675 1198 : if (omp_member_access_dummy_var (decl))
9676 : return false;
9677 :
9678 : return true;
9679 : }
9680 :
9681 8855 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9682 :
9683 8855 : if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
9684 1291 : && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
9685 : {
9686 242 : if ((ctx->region_type & ORT_TARGET_DATA) != 0
9687 238 : || n == NULL
9688 230 : || (n->value & GOVD_MAP) == 0)
9689 12 : continue;
9690 : return false;
9691 : }
9692 :
9693 7564 : if (n != NULL)
9694 : {
9695 5360 : if ((n->value & GOVD_LOCAL) != 0
9696 5360 : && omp_member_access_dummy_var (decl))
9697 : return false;
9698 5321 : return (n->value & GOVD_SHARED) == 0;
9699 : }
9700 :
9701 3253 : if (ctx->region_type == ORT_WORKSHARE
9702 : || ctx->region_type == ORT_TASKGROUP
9703 3197 : || ctx->region_type == ORT_SIMD
9704 3193 : || ctx->region_type == ORT_ACC)
9705 60 : continue;
9706 :
9707 : break;
9708 : }
9709 : while (1);
9710 : return false;
9711 : }
9712 :
9713 : /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
9714 :
9715 : static tree
9716 3148 : find_decl_expr (tree *tp, int *walk_subtrees, void *data)
9717 : {
9718 3148 : tree t = *tp;
9719 :
9720 : /* If this node has been visited, unmark it and keep looking. */
9721 3148 : if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
9722 : return t;
9723 :
9724 2724 : if (IS_TYPE_OR_DECL_P (t))
9725 452 : *walk_subtrees = 0;
9726 : return NULL_TREE;
9727 : }
9728 :
9729 :
9730 : /* Gimplify the affinity clause but effectively ignore it.
9731 : Generate:
9732 : var = begin;
9733 : if ((step > 1) ? var <= end : var > end)
9734 : locatator_var_expr; */
9735 :
9736 : static void
9737 374 : gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
9738 : {
9739 374 : tree last_iter = NULL_TREE;
9740 374 : tree last_bind = NULL_TREE;
9741 374 : tree label = NULL_TREE;
9742 374 : tree *last_body = NULL;
9743 1011 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
9744 637 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
9745 : {
9746 637 : tree t = OMP_CLAUSE_DECL (c);
9747 637 : if (OMP_ITERATOR_DECL_P (t))
9748 : {
9749 389 : if (TREE_VALUE (t) == null_pointer_node)
9750 201 : continue;
9751 188 : if (TREE_PURPOSE (t) != last_iter)
9752 : {
9753 127 : if (last_bind)
9754 : {
9755 9 : append_to_statement_list (label, last_body);
9756 9 : gimplify_and_add (last_bind, pre_p);
9757 9 : last_bind = NULL_TREE;
9758 : }
9759 274 : for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9760 : {
9761 147 : if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
9762 : is_gimple_val, fb_rvalue) == GS_ERROR
9763 147 : || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
9764 : is_gimple_val, fb_rvalue) == GS_ERROR
9765 147 : || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
9766 : is_gimple_val, fb_rvalue) == GS_ERROR
9767 294 : || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
9768 : is_gimple_val, fb_rvalue)
9769 : == GS_ERROR))
9770 0 : return;
9771 : }
9772 127 : last_iter = TREE_PURPOSE (t);
9773 127 : tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
9774 127 : last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
9775 : NULL, block);
9776 127 : last_body = &BIND_EXPR_BODY (last_bind);
9777 127 : tree cond = NULL_TREE;
9778 127 : location_t loc = OMP_CLAUSE_LOCATION (c);
9779 274 : for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
9780 : {
9781 147 : tree var = TREE_VEC_ELT (it, 0);
9782 147 : tree begin = TREE_VEC_ELT (it, 1);
9783 147 : tree end = TREE_VEC_ELT (it, 2);
9784 147 : tree step = TREE_VEC_ELT (it, 3);
9785 147 : loc = DECL_SOURCE_LOCATION (var);
9786 147 : tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
9787 : var, begin);
9788 147 : append_to_statement_list_force (tem, last_body);
9789 :
9790 147 : tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9791 147 : step, build_zero_cst (TREE_TYPE (step)));
9792 147 : tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
9793 : var, end);
9794 147 : tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
9795 : var, end);
9796 147 : cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
9797 : cond1, cond2, cond3);
9798 147 : if (cond)
9799 20 : cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
9800 : boolean_type_node, cond, cond1);
9801 : else
9802 : cond = cond1;
9803 : }
9804 127 : tree cont_label = create_artificial_label (loc);
9805 127 : label = build1 (LABEL_EXPR, void_type_node, cont_label);
9806 127 : tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9807 : void_node,
9808 : build_and_jump (&cont_label));
9809 127 : append_to_statement_list_force (tem, last_body);
9810 : }
9811 188 : if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
9812 : {
9813 0 : append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
9814 : last_body);
9815 0 : TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
9816 : }
9817 188 : if (error_operand_p (TREE_VALUE (t)))
9818 : return;
9819 188 : append_to_statement_list_force (TREE_VALUE (t), last_body);
9820 188 : TREE_VALUE (t) = null_pointer_node;
9821 : }
9822 : else
9823 : {
9824 248 : if (last_bind)
9825 : {
9826 9 : append_to_statement_list (label, last_body);
9827 9 : gimplify_and_add (last_bind, pre_p);
9828 9 : last_bind = NULL_TREE;
9829 : }
9830 248 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9831 : {
9832 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9833 : NULL, is_gimple_val, fb_rvalue);
9834 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9835 : }
9836 248 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
9837 : return;
9838 248 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9839 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
9840 : return;
9841 248 : gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
9842 : }
9843 : }
9844 374 : if (last_bind)
9845 : {
9846 109 : append_to_statement_list (label, last_body);
9847 109 : gimplify_and_add (last_bind, pre_p);
9848 : }
9849 : return;
9850 : }
9851 :
9852 : /* Returns a tree expression containing the total iteration count of the
9853 : OpenMP iterator IT. */
9854 :
9855 : static tree
9856 327 : compute_omp_iterator_count (tree it, gimple_seq *pre_p)
9857 : {
9858 327 : tree tcnt = size_one_node;
9859 720 : for (; it; it = TREE_CHAIN (it))
9860 : {
9861 393 : if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
9862 : is_gimple_val, fb_rvalue) == GS_ERROR
9863 393 : || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
9864 : is_gimple_val, fb_rvalue) == GS_ERROR
9865 393 : || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
9866 : is_gimple_val, fb_rvalue) == GS_ERROR
9867 786 : || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
9868 : is_gimple_val, fb_rvalue) == GS_ERROR))
9869 0 : return NULL_TREE;
9870 393 : tree var = TREE_VEC_ELT (it, 0);
9871 393 : tree begin = TREE_VEC_ELT (it, 1);
9872 393 : tree end = TREE_VEC_ELT (it, 2);
9873 393 : tree step = TREE_VEC_ELT (it, 3);
9874 393 : tree orig_step = TREE_VEC_ELT (it, 4);
9875 393 : tree type = TREE_TYPE (var);
9876 393 : tree stype = TREE_TYPE (step);
9877 393 : location_t loc = DECL_SOURCE_LOCATION (var);
9878 393 : tree endmbegin;
9879 : /* Compute count for this iterator as
9880 : orig_step > 0
9881 : ? (begin < end ? (end - begin + (step - 1)) / step : 0)
9882 : : (begin > end ? (end - begin + (step + 1)) / step : 0)
9883 : and compute product of those for the entire clause. */
9884 393 : if (POINTER_TYPE_P (type))
9885 42 : endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR, stype, end, begin);
9886 : else
9887 351 : endmbegin = fold_build2_loc (loc, MINUS_EXPR, type, end, begin);
9888 393 : tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype, step,
9889 : build_int_cst (stype, 1));
9890 393 : tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
9891 : build_int_cst (stype, 1));
9892 393 : tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
9893 : unshare_expr (endmbegin), stepm1);
9894 393 : pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, pos, step);
9895 393 : tree neg = fold_build2_loc (loc, PLUS_EXPR, stype, endmbegin, stepp1);
9896 393 : if (TYPE_UNSIGNED (stype))
9897 : {
9898 31 : neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
9899 31 : step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
9900 : }
9901 393 : neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, neg, step);
9902 393 : step = NULL_TREE;
9903 393 : tree cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, begin, end);
9904 393 : pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
9905 : build_int_cst (stype, 0));
9906 393 : cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, end, begin);
9907 393 : neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
9908 : build_int_cst (stype, 0));
9909 393 : tree osteptype = TREE_TYPE (orig_step);
9910 393 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
9911 : build_int_cst (osteptype, 0));
9912 393 : tree cnt = fold_build3_loc (loc, COND_EXPR, stype, cond, pos, neg);
9913 393 : cnt = fold_convert_loc (loc, sizetype, cnt);
9914 393 : if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
9915 : fb_rvalue) == GS_ERROR)
9916 : return NULL_TREE;
9917 393 : tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
9918 : }
9919 327 : if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9920 : return NULL_TREE;
9921 :
9922 327 : return tcnt;
9923 : }
9924 :
9925 : /* Build loops iterating over the space defined by the OpenMP iterator IT.
9926 : Returns a pointer to the BIND_EXPR_BODY in the innermost loop body.
9927 : LAST_BIND is set to point to the BIND_EXPR containing the whole loop. */
9928 :
9929 : static tree *
9930 327 : build_omp_iterator_loop (tree it, gimple_seq *pre_p, tree *last_bind)
9931 : {
9932 327 : if (*last_bind)
9933 31 : gimplify_and_add (*last_bind, pre_p);
9934 327 : tree block = TREE_VEC_ELT (it, 5);
9935 327 : *last_bind = build3 (BIND_EXPR, void_type_node,
9936 327 : BLOCK_VARS (block), NULL, block);
9937 327 : TREE_SIDE_EFFECTS (*last_bind) = 1;
9938 327 : tree *p = &BIND_EXPR_BODY (*last_bind);
9939 720 : for (; it; it = TREE_CHAIN (it))
9940 : {
9941 393 : tree var = TREE_VEC_ELT (it, 0);
9942 393 : tree begin = TREE_VEC_ELT (it, 1);
9943 393 : tree end = TREE_VEC_ELT (it, 2);
9944 393 : tree step = TREE_VEC_ELT (it, 3);
9945 393 : tree orig_step = TREE_VEC_ELT (it, 4);
9946 393 : tree type = TREE_TYPE (var);
9947 393 : location_t loc = DECL_SOURCE_LOCATION (var);
9948 : /* Emit:
9949 : var = begin;
9950 : goto cond_label;
9951 : beg_label:
9952 : ...
9953 : var = var + step;
9954 : cond_label:
9955 : if (orig_step > 0) {
9956 : if (var < end) goto beg_label;
9957 : } else {
9958 : if (var > end) goto beg_label;
9959 : }
9960 : for each iterator, with inner iterators added to
9961 : the ... above. */
9962 393 : tree beg_label = create_artificial_label (loc);
9963 393 : tree cond_label = NULL_TREE;
9964 393 : tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, begin);
9965 393 : append_to_statement_list_force (tem, p);
9966 393 : tem = build_and_jump (&cond_label);
9967 393 : append_to_statement_list_force (tem, p);
9968 393 : tem = build1 (LABEL_EXPR, void_type_node, beg_label);
9969 393 : append_to_statement_list (tem, p);
9970 393 : tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
9971 : NULL_TREE, NULL_TREE);
9972 393 : TREE_SIDE_EFFECTS (bind) = 1;
9973 393 : SET_EXPR_LOCATION (bind, loc);
9974 393 : append_to_statement_list_force (bind, p);
9975 393 : if (POINTER_TYPE_P (type))
9976 42 : tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
9977 : var, fold_convert_loc (loc, sizetype, step));
9978 : else
9979 351 : tem = build2_loc (loc, PLUS_EXPR, type, var, step);
9980 393 : tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, tem);
9981 393 : append_to_statement_list_force (tem, p);
9982 393 : tem = build1 (LABEL_EXPR, void_type_node, cond_label);
9983 393 : append_to_statement_list (tem, p);
9984 393 : tree cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, var, end);
9985 393 : tree pos = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9986 : build_and_jump (&beg_label), void_node);
9987 393 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, var, end);
9988 393 : tree neg = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
9989 : build_and_jump (&beg_label), void_node);
9990 393 : tree osteptype = TREE_TYPE (orig_step);
9991 393 : cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
9992 : build_int_cst (osteptype, 0));
9993 393 : tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, pos, neg);
9994 393 : append_to_statement_list_force (tem, p);
9995 393 : p = &BIND_EXPR_BODY (bind);
9996 : }
9997 :
9998 327 : return p;
9999 : }
10000 :
10001 :
10002 : /* Callback for walk_tree to find a VAR_DECL (stored in DATA) in the
10003 : tree TP. */
10004 :
10005 : static tree
10006 2354 : find_var_decl (tree *tp, int *, void *data)
10007 : {
10008 2354 : if (*tp == (tree) data)
10009 214 : return *tp;
10010 :
10011 : return NULL_TREE;
10012 : }
10013 :
10014 : /* Returns an element-by-element copy of OMP iterator tree IT. */
10015 :
10016 : static tree
10017 206 : copy_omp_iterator (tree it, int elem_count = -1)
10018 : {
10019 206 : if (elem_count < 0)
10020 60 : elem_count = TREE_VEC_LENGTH (it);
10021 206 : tree new_it = make_tree_vec (elem_count);
10022 1442 : for (int i = 0; i < TREE_VEC_LENGTH (it); i++)
10023 1236 : TREE_VEC_ELT (new_it, i) = TREE_VEC_ELT (it, i);
10024 :
10025 206 : return new_it;
10026 : }
10027 :
10028 : /* Helper function for walk_tree in remap_omp_iterator_var. */
10029 :
10030 : static tree
10031 871 : remap_omp_iterator_var_1 (tree *tp, int *, void *data)
10032 : {
10033 871 : tree old_var = ((tree *) data)[0];
10034 871 : tree new_var = ((tree *) data)[1];
10035 :
10036 871 : if (*tp == old_var)
10037 92 : *tp = new_var;
10038 871 : return NULL_TREE;
10039 : }
10040 :
10041 : /* Replace instances of OLD_VAR in TP with NEW_VAR. */
10042 :
10043 : static void
10044 184 : remap_omp_iterator_var (tree *tp, tree old_var, tree new_var)
10045 : {
10046 184 : tree vars[2] = { old_var, new_var };
10047 184 : walk_tree (tp, remap_omp_iterator_var_1, vars, NULL);
10048 184 : }
10049 :
10050 : /* Scan through all clauses using OpenMP iterators in LIST_P. If any
10051 : clauses have iterators with variables that are not used by the clause
10052 : decl or size, issue a warning and replace the iterator with a copy with
10053 : the unused variables removed. */
10054 :
10055 : static void
10056 27057 : remove_unused_omp_iterator_vars (tree *list_p)
10057 : {
10058 27057 : auto_vec< vec<tree> > iter_vars;
10059 27057 : auto_vec<tree> new_iterators;
10060 :
10061 78691 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10062 : {
10063 51634 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10064 51578 : continue;
10065 154 : auto_vec<tree> vars;
10066 154 : bool need_new_iterators = false;
10067 432 : for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
10068 : {
10069 278 : tree var = TREE_VEC_ELT (it, 0);
10070 278 : tree t = walk_tree (&OMP_CLAUSE_DECL (c), find_var_decl, var, NULL);
10071 278 : if (t == NULL_TREE)
10072 130 : t = walk_tree (&OMP_CLAUSE_SIZE (c), find_var_decl, var, NULL);
10073 130 : if (t == NULL_TREE)
10074 : {
10075 64 : need_new_iterators = true;
10076 64 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10077 44 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO
10078 28 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM))
10079 44 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO
10080 92 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)
10081 40 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
10082 : "iterator variable %qE not used in clause "
10083 40 : "expression", DECL_NAME (var));
10084 : }
10085 : else
10086 214 : vars.safe_push (var);
10087 : }
10088 154 : if (!need_new_iterators)
10089 90 : continue;
10090 64 : if (need_new_iterators && vars.is_empty ())
10091 : {
10092 : /* No iteration variables are used in the clause - remove the
10093 : iterator from the clause. */
10094 8 : OMP_CLAUSE_ITERATORS (c) = NULL_TREE;
10095 8 : continue;
10096 : }
10097 :
10098 : /* If a new iterator has been created for the current set of used
10099 : iterator variables, then use that as the iterator. Otherwise,
10100 : create a new iterator for the current iterator variable set. */
10101 : unsigned i;
10102 92 : for (i = 0; i < iter_vars.length (); i++)
10103 : {
10104 112 : if (vars.length () != iter_vars[i].length ())
10105 0 : continue;
10106 : bool identical_p = true;
10107 136 : for (unsigned j = 0; j < vars.length () && identical_p; j++)
10108 80 : identical_p = vars[j] == iter_vars[i][j];
10109 :
10110 56 : if (identical_p)
10111 : break;
10112 : }
10113 56 : if (i < iter_vars.length ())
10114 20 : OMP_CLAUSE_ITERATORS (c) = new_iterators[i];
10115 : else
10116 : {
10117 36 : tree new_iters = NULL_TREE;
10118 36 : tree *new_iters_p = &new_iters;
10119 36 : tree new_vars = NULL_TREE;
10120 36 : tree *new_vars_p = &new_vars;
10121 36 : i = 0;
10122 112 : for (tree it = OMP_CLAUSE_ITERATORS (c); it && i < vars.length();
10123 76 : it = TREE_CHAIN (it))
10124 : {
10125 76 : tree var = TREE_VEC_ELT (it, 0);
10126 76 : if (var == vars[i])
10127 : {
10128 60 : *new_iters_p = copy_omp_iterator (it);
10129 60 : *new_vars_p = build_decl (OMP_CLAUSE_LOCATION (c), VAR_DECL,
10130 60 : DECL_NAME (var), TREE_TYPE (var));
10131 60 : DECL_ARTIFICIAL (*new_vars_p) = 1;
10132 60 : DECL_CONTEXT (*new_vars_p) = DECL_CONTEXT (var);
10133 60 : TREE_VEC_ELT (*new_iters_p, 0) = *new_vars_p;
10134 60 : new_iters_p = &TREE_CHAIN (*new_iters_p);
10135 60 : new_vars_p = &DECL_CHAIN (*new_vars_p);
10136 60 : i++;
10137 : }
10138 : }
10139 36 : tree new_block = make_node (BLOCK);
10140 36 : BLOCK_VARS (new_block) = new_vars;
10141 36 : TREE_VEC_ELT (new_iters, 5) = new_block;
10142 36 : new_iterators.safe_push (new_iters);
10143 36 : iter_vars.safe_push (vars.copy ());
10144 36 : OMP_CLAUSE_ITERATORS (c) = new_iters;
10145 : }
10146 :
10147 : /* Remap clause to use the new variables. */
10148 56 : i = 0;
10149 148 : for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
10150 : {
10151 92 : tree old_var = vars[i++];
10152 92 : tree new_var = TREE_VEC_ELT (it, 0);
10153 92 : remap_omp_iterator_var (&OMP_CLAUSE_DECL (c), old_var, new_var);
10154 92 : remap_omp_iterator_var (&OMP_CLAUSE_SIZE (c), old_var, new_var);
10155 : }
10156 154 : }
10157 :
10158 27093 : for (unsigned i = 0; i < iter_vars.length (); i++)
10159 36 : iter_vars[i].release ();
10160 27057 : }
10161 :
10162 148 : struct iterator_loop_info_t
10163 : {
10164 : tree bind;
10165 : tree count;
10166 : tree index;
10167 : tree body_label;
10168 : auto_vec<tree> clauses;
10169 : };
10170 :
10171 : typedef hash_map<tree, iterator_loop_info_t> iterator_loop_info_map_t;
10172 :
10173 : /* Builds a loop to expand any OpenMP iterators in the clauses in LIST_P,
10174 : reusing any previously built loops if they use the same set of iterators.
10175 : Generated Gimple statements are placed into LOOPS_SEQ_P. The clause
10176 : iterators are updated with information on how and where to insert code into
10177 : the loop body. */
10178 :
10179 : static void
10180 27057 : build_omp_iterators_loops (tree *list_p, gimple_seq *loops_seq_p)
10181 : {
10182 27057 : iterator_loop_info_map_t loops;
10183 :
10184 78691 : for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10185 : {
10186 51634 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10187 51488 : continue;
10188 :
10189 146 : bool built_p;
10190 146 : iterator_loop_info_t &loop
10191 146 : = loops.get_or_insert (OMP_CLAUSE_ITERATORS (c), &built_p);
10192 :
10193 146 : if (!built_p)
10194 : {
10195 74 : loop.count = compute_omp_iterator_count (OMP_CLAUSE_ITERATORS (c),
10196 : loops_seq_p);
10197 74 : if (!loop.count)
10198 0 : continue;
10199 74 : if (integer_zerop (loop.count))
10200 8 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
10201 : "iteration count is zero");
10202 :
10203 74 : loop.bind = NULL_TREE;
10204 74 : tree *body = build_omp_iterator_loop (OMP_CLAUSE_ITERATORS (c),
10205 : loops_seq_p, &loop.bind);
10206 :
10207 74 : loop.index = create_tmp_var (sizetype);
10208 74 : SET_EXPR_LOCATION (loop.bind, OMP_CLAUSE_LOCATION (c));
10209 :
10210 : /* BEFORE LOOP: */
10211 : /* idx = -1; */
10212 : /* This should be initialized to before the individual elements,
10213 : as idx is pre-incremented in the loop body. */
10214 74 : gimple *assign = gimple_build_assign (loop.index, size_int (-1));
10215 74 : gimple_seq_add_stmt (loops_seq_p, assign);
10216 :
10217 : /* IN LOOP BODY: */
10218 : /* Create a label so we can find this point later. */
10219 74 : loop.body_label = create_artificial_label (OMP_CLAUSE_LOCATION (c));
10220 74 : tree tem = build1 (LABEL_EXPR, void_type_node, loop.body_label);
10221 74 : append_to_statement_list_force (tem, body);
10222 :
10223 : /* idx += 2; */
10224 74 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10225 : void_type_node, loop.index,
10226 : size_binop (PLUS_EXPR, loop.index, size_int (2)));
10227 74 : append_to_statement_list_force (tem, body);
10228 : }
10229 :
10230 : /* Create array to hold expanded values. */
10231 146 : tree last_count_2 = size_binop (MULT_EXPR, loop.count, size_int (2));
10232 146 : tree arr_length = size_binop (PLUS_EXPR, last_count_2, size_int (1));
10233 146 : tree elems = NULL_TREE;
10234 146 : if (TREE_CONSTANT (arr_length))
10235 : {
10236 146 : tree type = build_array_type (ptr_type_node,
10237 : build_index_type (arr_length));
10238 146 : elems = create_tmp_var_raw (type, "omp_iter_data");
10239 146 : TREE_ADDRESSABLE (elems) = 1;
10240 146 : gimple_add_tmp_var (elems);
10241 : }
10242 : else
10243 : {
10244 : /* Handle dynamic sizes. */
10245 0 : sorry ("dynamic iterator sizes not implemented yet");
10246 : }
10247 :
10248 : /* BEFORE LOOP: */
10249 : /* elems[0] = count; */
10250 146 : tree lhs = build4 (ARRAY_REF, ptr_type_node, elems, size_int (0),
10251 : NULL_TREE, NULL_TREE);
10252 146 : tree tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10253 : void_type_node, lhs, loop.count);
10254 146 : gimplify_and_add (tem, loops_seq_p);
10255 :
10256 : /* Make a copy of the iterator with extra info at the end. */
10257 146 : int elem_count = TREE_VEC_LENGTH (OMP_CLAUSE_ITERATORS (c));
10258 146 : tree new_iterator = copy_omp_iterator (OMP_CLAUSE_ITERATORS (c),
10259 : elem_count + 3);
10260 146 : TREE_VEC_ELT (new_iterator, elem_count) = loop.body_label;
10261 146 : TREE_VEC_ELT (new_iterator, elem_count + 1) = elems;
10262 146 : TREE_VEC_ELT (new_iterator, elem_count + 2) = loop.index;
10263 146 : TREE_CHAIN (new_iterator) = TREE_CHAIN (OMP_CLAUSE_ITERATORS (c));
10264 146 : OMP_CLAUSE_ITERATORS (c) = new_iterator;
10265 :
10266 146 : loop.clauses.safe_push (c);
10267 : }
10268 :
10269 : /* Now gimplify and add all the loops that were built. */
10270 27131 : for (hash_map<tree, iterator_loop_info_t>::iterator it = loops.begin ();
10271 54262 : it != loops.end (); ++it)
10272 74 : gimplify_and_add ((*it).second.bind, loops_seq_p);
10273 27057 : }
10274 :
10275 : /* Helper function for enter_omp_iterator_loop_context. */
10276 :
10277 : static gimple_seq *
10278 1646 : enter_omp_iterator_loop_context_1 (tree iterator, gimple_seq *loops_seq_p)
10279 : {
10280 : /* Drill into the nested bind expressions to get to the loop body. */
10281 1646 : for (gimple_stmt_iterator gsi = gsi_start (*loops_seq_p);
10282 10129 : !gsi_end_p (gsi); gsi_next (&gsi))
10283 : {
10284 9713 : gimple *stmt = gsi_stmt (gsi);
10285 :
10286 9713 : switch (gimple_code (stmt))
10287 : {
10288 1296 : case GIMPLE_BIND:
10289 1296 : {
10290 1296 : gbind *bind_stmt = as_a<gbind *> (stmt);
10291 1296 : gimple_push_bind_expr (bind_stmt);
10292 1296 : gimple_seq *bind_body_p = gimple_bind_body_ptr (bind_stmt);
10293 1296 : gimple_seq *seq =
10294 1296 : enter_omp_iterator_loop_context_1 (iterator, bind_body_p);
10295 1296 : if (seq)
10296 : return seq;
10297 416 : gimple_pop_bind_expr ();
10298 : }
10299 416 : break;
10300 0 : case GIMPLE_TRY:
10301 0 : {
10302 0 : gimple_seq *try_eval_p = gimple_try_eval_ptr (stmt);
10303 0 : gimple_seq *seq =
10304 0 : enter_omp_iterator_loop_context_1 (iterator, try_eval_p);
10305 0 : if (seq)
10306 : return seq;
10307 : }
10308 : break;
10309 1830 : case GIMPLE_LABEL:
10310 1830 : {
10311 1830 : glabel *label_stmt = as_a<glabel *> (stmt);
10312 1830 : tree label = gimple_label_label (label_stmt);
10313 1830 : if (label == TREE_VEC_ELT (iterator, 6))
10314 : return loops_seq_p;
10315 : }
10316 : break;
10317 : default:
10318 : break;
10319 : }
10320 : }
10321 :
10322 : return NULL;
10323 : }
10324 :
10325 : /* Enter the Gimplification context in LOOPS_SEQ_P for the iterator loop
10326 : associated with OpenMP clause C. Returns the gimple_seq for the loop body
10327 : if C has OpenMP iterators, or ALT_SEQ_P if not. */
10328 :
10329 : static gimple_seq *
10330 63443 : enter_omp_iterator_loop_context (tree c, gimple_seq *loops_seq_p,
10331 : gimple_seq *alt_seq_p)
10332 : {
10333 63443 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10334 : return alt_seq_p;
10335 :
10336 350 : push_gimplify_context ();
10337 :
10338 350 : gimple_seq *seq = enter_omp_iterator_loop_context_1 (OMP_CLAUSE_ITERATORS (c),
10339 : loops_seq_p);
10340 350 : gcc_assert (seq);
10341 : return seq;
10342 : }
10343 :
10344 : /* Enter the Gimplification context in STMT for the iterator loop associated
10345 : with OpenMP clause C. Returns the gimple_seq for the loop body if C has
10346 : OpenMP iterators, or ALT_SEQ_P if not. */
10347 :
10348 : gimple_seq *
10349 204 : enter_omp_iterator_loop_context (tree c, gomp_target *stmt,
10350 : gimple_seq *alt_seq_p)
10351 : {
10352 204 : gimple_seq *loops_seq_p = gimple_omp_target_iterator_loops_ptr (stmt);
10353 204 : return enter_omp_iterator_loop_context (c, loops_seq_p, alt_seq_p);
10354 : }
10355 :
10356 : /* Exit the Gimplification context for the OpenMP clause C. */
10357 :
10358 : void
10359 63533 : exit_omp_iterator_loop_context (tree c)
10360 : {
10361 63533 : if (!OMP_CLAUSE_HAS_ITERATORS (c))
10362 : return;
10363 1230 : while (!gimplify_ctxp->bind_expr_stack.is_empty ())
10364 880 : gimple_pop_bind_expr ();
10365 350 : pop_gimplify_context (NULL);
10366 : }
10367 :
10368 : /* If *LIST_P contains any OpenMP depend clauses with iterators,
10369 : lower all the depend clauses by populating corresponding depend
10370 : array. Returns 0 if there are no such depend clauses, or
10371 : 2 if all depend clauses should be removed, 1 otherwise. */
10372 :
10373 : static int
10374 1902 : gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
10375 : {
10376 1902 : tree c;
10377 1902 : gimple *g;
10378 1902 : size_t n[5] = { 0, 0, 0, 0, 0 };
10379 1902 : bool unused[5];
10380 1902 : tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
10381 1902 : tree last_iter = NULL_TREE, last_count = NULL_TREE;
10382 1902 : size_t i, j;
10383 1902 : location_t first_loc = UNKNOWN_LOCATION;
10384 :
10385 6244 : for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10386 4342 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10387 : {
10388 2204 : switch (OMP_CLAUSE_DEPEND_KIND (c))
10389 : {
10390 : case OMP_CLAUSE_DEPEND_IN:
10391 : i = 2;
10392 : break;
10393 : case OMP_CLAUSE_DEPEND_OUT:
10394 : case OMP_CLAUSE_DEPEND_INOUT:
10395 : i = 0;
10396 : break;
10397 : case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10398 : i = 1;
10399 : break;
10400 : case OMP_CLAUSE_DEPEND_DEPOBJ:
10401 : i = 3;
10402 : break;
10403 : case OMP_CLAUSE_DEPEND_INOUTSET:
10404 : i = 4;
10405 : break;
10406 0 : default:
10407 0 : gcc_unreachable ();
10408 : }
10409 2204 : tree t = OMP_CLAUSE_DECL (c);
10410 2204 : if (first_loc == UNKNOWN_LOCATION)
10411 1902 : first_loc = OMP_CLAUSE_LOCATION (c);
10412 2204 : if (OMP_ITERATOR_DECL_P (t))
10413 : {
10414 300 : if (TREE_PURPOSE (t) != last_iter)
10415 : {
10416 253 : tree tcnt = compute_omp_iterator_count (TREE_PURPOSE (t),
10417 : pre_p);
10418 253 : if (!tcnt)
10419 : return 2;
10420 253 : last_iter = TREE_PURPOSE (t);
10421 253 : last_count = tcnt;
10422 : }
10423 300 : if (counts[i] == NULL_TREE)
10424 246 : counts[i] = last_count;
10425 : else
10426 54 : counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
10427 : PLUS_EXPR, counts[i], last_count);
10428 : }
10429 : else
10430 1904 : n[i]++;
10431 : }
10432 10610 : for (i = 0; i < 5; i++)
10433 8916 : if (counts[i])
10434 : break;
10435 1902 : if (i == 5)
10436 : return 0;
10437 :
10438 208 : tree total = size_zero_node;
10439 1248 : for (i = 0; i < 5; i++)
10440 : {
10441 1040 : unused[i] = counts[i] == NULL_TREE && n[i] == 0;
10442 1040 : if (counts[i] == NULL_TREE)
10443 794 : counts[i] = size_zero_node;
10444 1040 : if (n[i])
10445 44 : counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
10446 1040 : if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
10447 : fb_rvalue) == GS_ERROR)
10448 : return 2;
10449 1040 : total = size_binop (PLUS_EXPR, total, counts[i]);
10450 : }
10451 :
10452 208 : if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
10453 : == GS_ERROR)
10454 : return 2;
10455 208 : bool is_old = unused[1] && unused[3] && unused[4];
10456 208 : tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
10457 : size_int (is_old ? 1 : 4));
10458 208 : if (!unused[4])
10459 3 : totalpx = size_binop (PLUS_EXPR, totalpx,
10460 : size_binop (MULT_EXPR, counts[4], size_int (2)));
10461 208 : tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
10462 208 : tree array = create_tmp_var_raw (type);
10463 208 : TREE_ADDRESSABLE (array) = 1;
10464 208 : if (!poly_int_tree_p (totalpx))
10465 : {
10466 82 : if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
10467 82 : gimplify_type_sizes (TREE_TYPE (array), pre_p);
10468 82 : if (gimplify_omp_ctxp)
10469 : {
10470 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10471 : while (ctx
10472 21 : && (ctx->region_type == ORT_WORKSHARE
10473 : || ctx->region_type == ORT_TASKGROUP
10474 21 : || ctx->region_type == ORT_SIMD
10475 21 : || ctx->region_type == ORT_ACC))
10476 0 : ctx = ctx->outer_context;
10477 21 : if (ctx)
10478 21 : omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
10479 : }
10480 82 : gimplify_vla_decl (array, pre_p);
10481 : }
10482 : else
10483 126 : gimple_add_tmp_var (array);
10484 208 : tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
10485 : NULL_TREE);
10486 208 : tree tem;
10487 208 : if (!is_old)
10488 : {
10489 31 : tem = build2 (MODIFY_EXPR, void_type_node, r,
10490 : build_int_cst (ptr_type_node, 0));
10491 31 : gimplify_and_add (tem, pre_p);
10492 31 : r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
10493 : NULL_TREE);
10494 : }
10495 208 : tem = build2 (MODIFY_EXPR, void_type_node, r,
10496 : fold_convert (ptr_type_node, total));
10497 208 : gimplify_and_add (tem, pre_p);
10498 810 : for (i = 1; i < (is_old ? 2 : 4); i++)
10499 : {
10500 270 : r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
10501 : NULL_TREE, NULL_TREE);
10502 270 : tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
10503 270 : gimplify_and_add (tem, pre_p);
10504 : }
10505 :
10506 : tree cnts[6];
10507 732 : for (j = 5; j; j--)
10508 732 : if (!unused[j - 1])
10509 : break;
10510 1248 : for (i = 0; i < 5; i++)
10511 : {
10512 1040 : if (i && (i >= j || unused[i - 1]))
10513 : {
10514 782 : cnts[i] = cnts[i - 1];
10515 782 : continue;
10516 : }
10517 258 : cnts[i] = create_tmp_var (sizetype);
10518 258 : if (i == 0)
10519 239 : g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
10520 : else
10521 : {
10522 50 : tree t;
10523 50 : if (is_old)
10524 38 : t = size_binop (PLUS_EXPR, counts[0], size_int (2));
10525 : else
10526 12 : t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
10527 50 : if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
10528 : == GS_ERROR)
10529 0 : return 2;
10530 50 : g = gimple_build_assign (cnts[i], t);
10531 : }
10532 258 : gimple_seq_add_stmt (pre_p, g);
10533 : }
10534 208 : if (unused[4])
10535 205 : cnts[5] = NULL_TREE;
10536 : else
10537 : {
10538 3 : tree t = size_binop (PLUS_EXPR, total, size_int (5));
10539 3 : cnts[5] = create_tmp_var (sizetype);
10540 3 : g = gimple_build_assign (cnts[i], t);
10541 3 : gimple_seq_add_stmt (pre_p, g);
10542 : }
10543 :
10544 208 : last_iter = NULL_TREE;
10545 208 : tree last_bind = NULL_TREE;
10546 208 : tree *last_body = NULL;
10547 569 : for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
10548 361 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
10549 : {
10550 346 : switch (OMP_CLAUSE_DEPEND_KIND (c))
10551 : {
10552 : case OMP_CLAUSE_DEPEND_IN:
10553 : i = 2;
10554 : break;
10555 : case OMP_CLAUSE_DEPEND_OUT:
10556 : case OMP_CLAUSE_DEPEND_INOUT:
10557 : i = 0;
10558 : break;
10559 : case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
10560 : i = 1;
10561 : break;
10562 : case OMP_CLAUSE_DEPEND_DEPOBJ:
10563 : i = 3;
10564 : break;
10565 : case OMP_CLAUSE_DEPEND_INOUTSET:
10566 : i = 4;
10567 : break;
10568 0 : default:
10569 0 : gcc_unreachable ();
10570 : }
10571 346 : tree t = OMP_CLAUSE_DECL (c);
10572 346 : if (OMP_ITERATOR_DECL_P (t))
10573 : {
10574 300 : if (TREE_PURPOSE (t) != last_iter)
10575 : {
10576 253 : last_body = build_omp_iterator_loop (TREE_PURPOSE (t), pre_p,
10577 : &last_bind);
10578 253 : SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
10579 : }
10580 300 : last_iter = TREE_PURPOSE (t);
10581 300 : if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
10582 : {
10583 0 : append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
10584 : 0), last_body);
10585 0 : TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
10586 : }
10587 300 : if (error_operand_p (TREE_VALUE (t)))
10588 : return 2;
10589 300 : if (TREE_VALUE (t) != null_pointer_node)
10590 294 : TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
10591 300 : if (i == 4)
10592 : {
10593 3 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10594 : NULL_TREE, NULL_TREE);
10595 3 : tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
10596 : NULL_TREE, NULL_TREE);
10597 3 : r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
10598 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10599 : void_type_node, r, r2);
10600 3 : append_to_statement_list_force (tem, last_body);
10601 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10602 : void_type_node, cnts[i],
10603 : size_binop (PLUS_EXPR, cnts[i],
10604 : size_int (1)));
10605 3 : append_to_statement_list_force (tem, last_body);
10606 3 : i = 5;
10607 : }
10608 300 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10609 : NULL_TREE, NULL_TREE);
10610 600 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10611 300 : void_type_node, r, TREE_VALUE (t));
10612 300 : append_to_statement_list_force (tem, last_body);
10613 300 : if (i == 5)
10614 : {
10615 3 : r = build4 (ARRAY_REF, ptr_type_node, array,
10616 : size_binop (PLUS_EXPR, cnts[i], size_int (1)),
10617 : NULL_TREE, NULL_TREE);
10618 3 : tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
10619 3 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10620 : void_type_node, r, tem);
10621 3 : append_to_statement_list_force (tem, last_body);
10622 : }
10623 300 : tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
10624 : void_type_node, cnts[i],
10625 300 : size_binop (PLUS_EXPR, cnts[i],
10626 : size_int (1 + (i == 5))));
10627 300 : append_to_statement_list_force (tem, last_body);
10628 300 : TREE_VALUE (t) = null_pointer_node;
10629 : }
10630 : else
10631 : {
10632 46 : if (last_bind)
10633 : {
10634 18 : gimplify_and_add (last_bind, pre_p);
10635 18 : last_bind = NULL_TREE;
10636 : }
10637 46 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
10638 : {
10639 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
10640 : NULL, is_gimple_val, fb_rvalue);
10641 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
10642 : }
10643 46 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
10644 : return 2;
10645 46 : if (OMP_CLAUSE_DECL (c) != null_pointer_node)
10646 46 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
10647 46 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
10648 : is_gimple_val, fb_rvalue) == GS_ERROR)
10649 : return 2;
10650 46 : if (i == 4)
10651 : {
10652 0 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10653 : NULL_TREE, NULL_TREE);
10654 0 : tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
10655 : NULL_TREE, NULL_TREE);
10656 0 : r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
10657 0 : tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
10658 0 : gimplify_and_add (tem, pre_p);
10659 0 : g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
10660 : cnts[i],
10661 : size_int (1)));
10662 0 : gimple_seq_add_stmt (pre_p, g);
10663 0 : i = 5;
10664 : }
10665 46 : r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
10666 : NULL_TREE, NULL_TREE);
10667 46 : tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
10668 46 : gimplify_and_add (tem, pre_p);
10669 46 : if (i == 5)
10670 : {
10671 0 : r = build4 (ARRAY_REF, ptr_type_node, array,
10672 : size_binop (PLUS_EXPR, cnts[i], size_int (1)),
10673 : NULL_TREE, NULL_TREE);
10674 0 : tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
10675 0 : tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
10676 0 : append_to_statement_list_force (tem, last_body);
10677 0 : gimplify_and_add (tem, pre_p);
10678 : }
10679 46 : g = gimple_build_assign (cnts[i],
10680 46 : size_binop (PLUS_EXPR, cnts[i],
10681 : size_int (1 + (i == 5))));
10682 46 : gimple_seq_add_stmt (pre_p, g);
10683 : }
10684 : }
10685 208 : if (last_bind)
10686 204 : gimplify_and_add (last_bind, pre_p);
10687 208 : tree cond = boolean_false_node;
10688 208 : if (is_old)
10689 : {
10690 177 : if (!unused[0])
10691 91 : cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
10692 : size_binop_loc (first_loc, PLUS_EXPR, counts[0],
10693 : size_int (2)));
10694 177 : if (!unused[2])
10695 124 : cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
10696 : build2_loc (first_loc, NE_EXPR, boolean_type_node,
10697 : cnts[2],
10698 : size_binop_loc (first_loc, PLUS_EXPR,
10699 : totalpx,
10700 : size_int (1))));
10701 : }
10702 : else
10703 : {
10704 31 : tree prev = size_int (5);
10705 186 : for (i = 0; i < 5; i++)
10706 : {
10707 155 : if (unused[i])
10708 112 : continue;
10709 43 : prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
10710 43 : cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
10711 : build2_loc (first_loc, NE_EXPR, boolean_type_node,
10712 : cnts[i], unshare_expr (prev)));
10713 : }
10714 : }
10715 208 : tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
10716 : build_call_expr_loc (first_loc,
10717 : builtin_decl_explicit (BUILT_IN_TRAP),
10718 : 0), void_node);
10719 208 : gimplify_and_add (tem, pre_p);
10720 208 : c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
10721 208 : OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
10722 208 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
10723 208 : OMP_CLAUSE_CHAIN (c) = *list_p;
10724 208 : *list_p = c;
10725 208 : return 1;
10726 : }
10727 :
10728 : /* True if mapping node C maps, or unmaps, a (Fortran) array descriptor. */
10729 :
10730 : static bool
10731 123165 : omp_map_clause_descriptor_p (tree c)
10732 : {
10733 123165 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
10734 : return false;
10735 :
10736 123161 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
10737 : return true;
10738 :
10739 80645 : if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
10740 72910 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE)
10741 81089 : && OMP_CLAUSE_RELEASE_DESCRIPTOR (c))
10742 6135 : return true;
10743 :
10744 : return false;
10745 : }
10746 :
10747 : /* For a set of mappings describing an array section pointed to by a struct
10748 : (or derived type, etc.) component, create an "alloc" or "release" node to
10749 : insert into a list following a GOMP_MAP_STRUCT node. For some types of
10750 : mapping (e.g. Fortran arrays with descriptors), an additional mapping may
10751 : be created that is inserted into the list of mapping nodes attached to the
10752 : directive being processed -- not part of the sorted list of nodes after
10753 : GOMP_MAP_STRUCT.
10754 :
10755 : CODE is the code of the directive being processed. GRP_START and GRP_END
10756 : are the first and last of two or three nodes representing this array section
10757 : mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
10758 : GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
10759 : filled with the additional node described above, if needed.
10760 :
10761 : This function does not add the new nodes to any lists itself. It is the
10762 : responsibility of the caller to do that. */
10763 :
10764 : static tree
10765 1729 : build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
10766 : tree *extra_node)
10767 : {
10768 1524 : enum gomp_map_kind mkind
10769 1729 : = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
10770 1729 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
10771 :
10772 1729 : gcc_assert (grp_start != grp_end);
10773 :
10774 1729 : tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10775 1729 : OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10776 1729 : OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
10777 1729 : OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
10778 1729 : tree grp_mid = NULL_TREE;
10779 1729 : if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
10780 197 : grp_mid = OMP_CLAUSE_CHAIN (grp_start);
10781 :
10782 197 : if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
10783 0 : OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
10784 : else
10785 1729 : OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
10786 :
10787 1729 : if (grp_mid
10788 197 : && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
10789 1926 : && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER)
10790 : {
10791 0 : tree c3
10792 0 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10793 0 : OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
10794 0 : OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
10795 0 : OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
10796 0 : OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
10797 :
10798 0 : *extra_node = c3;
10799 : }
10800 : else
10801 1729 : *extra_node = NULL_TREE;
10802 :
10803 1729 : return c2;
10804 : }
10805 :
10806 : /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
10807 : and set *BITPOSP and *POFFSETP to the bit offset of the access.
10808 : If BASE_REF is non-NULL and the containing object is a reference, set
10809 : *BASE_REF to that reference before dereferencing the object.
10810 : If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
10811 : has array type, else return NULL. */
10812 :
10813 : static tree
10814 7323 : extract_base_bit_offset (tree base, poly_int64 *bitposp,
10815 : poly_offset_int *poffsetp,
10816 : bool *variable_offset)
10817 : {
10818 7323 : tree offset;
10819 7323 : poly_int64 bitsize, bitpos;
10820 7323 : machine_mode mode;
10821 7323 : int unsignedp, reversep, volatilep = 0;
10822 7323 : poly_offset_int poffset;
10823 :
10824 7323 : STRIP_NOPS (base);
10825 :
10826 7323 : base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
10827 : &unsignedp, &reversep, &volatilep);
10828 :
10829 7323 : STRIP_NOPS (base);
10830 :
10831 7323 : if (offset && poly_int_tree_p (offset))
10832 : {
10833 0 : poffset = wi::to_poly_offset (offset);
10834 0 : *variable_offset = false;
10835 : }
10836 : else
10837 : {
10838 7323 : poffset = 0;
10839 7323 : *variable_offset = (offset != NULL_TREE);
10840 : }
10841 :
10842 7323 : if (maybe_ne (bitpos, 0))
10843 5291 : poffset += bits_to_bytes_round_down (bitpos);
10844 :
10845 7323 : *bitposp = bitpos;
10846 7323 : *poffsetp = poffset;
10847 :
10848 7323 : return base;
10849 : }
10850 :
10851 : /* Used for topological sorting of mapping groups. UNVISITED means we haven't
10852 : started processing the group yet. The TEMPORARY mark is used when we first
10853 : encounter a group on a depth-first traversal, and the PERMANENT mark is used
10854 : when we have processed all the group's children (i.e. all the base pointers
10855 : referred to by the group's mapping nodes, recursively). */
10856 :
10857 : enum omp_tsort_mark {
10858 : UNVISITED,
10859 : TEMPORARY,
10860 : PERMANENT
10861 : };
10862 :
10863 : /* Hash for trees based on operand_equal_p. Like tree_operand_hash
10864 : but ignores side effects in the equality comparisons. */
10865 :
10866 : struct tree_operand_hash_no_se : tree_operand_hash
10867 : {
10868 : static inline bool equal (const value_type &,
10869 : const compare_type &);
10870 : };
10871 :
10872 : inline bool
10873 395962 : tree_operand_hash_no_se::equal (const value_type &t1,
10874 : const compare_type &t2)
10875 : {
10876 395962 : return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
10877 : }
10878 :
10879 : /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
10880 : clause. */
10881 :
10882 : struct omp_mapping_group {
10883 : tree *grp_start;
10884 : tree grp_end;
10885 : omp_tsort_mark mark;
10886 : /* If we've removed the group but need to reindex, mark the group as
10887 : deleted. */
10888 : bool deleted;
10889 : /* The group points to an already-created "GOMP_MAP_STRUCT
10890 : GOMP_MAP_ATTACH_DETACH" pair. */
10891 : bool reprocess_struct;
10892 : /* The group should use "zero-length" allocations for pointers that are not
10893 : mapped "to" on the same directive. */
10894 : bool fragile;
10895 : struct omp_mapping_group *sibling;
10896 : struct omp_mapping_group *next;
10897 : };
10898 :
10899 : DEBUG_FUNCTION void
10900 0 : debug_mapping_group (omp_mapping_group *grp)
10901 : {
10902 0 : tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
10903 0 : OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
10904 0 : debug_generic_expr (*grp->grp_start);
10905 0 : OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
10906 0 : }
10907 :
10908 : /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
10909 : isn't one. */
10910 :
10911 : static tree
10912 34655 : omp_get_base_pointer (tree expr)
10913 : {
10914 34655 : while (TREE_CODE (expr) == ARRAY_REF
10915 41486 : || TREE_CODE (expr) == COMPONENT_REF)
10916 6831 : expr = TREE_OPERAND (expr, 0);
10917 :
10918 34655 : if (INDIRECT_REF_P (expr)
10919 34655 : || (TREE_CODE (expr) == MEM_REF
10920 0 : && integer_zerop (TREE_OPERAND (expr, 1))))
10921 : {
10922 10398 : expr = TREE_OPERAND (expr, 0);
10923 10435 : while (TREE_CODE (expr) == COMPOUND_EXPR)
10924 37 : expr = TREE_OPERAND (expr, 1);
10925 10398 : if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
10926 841 : expr = TREE_OPERAND (expr, 0);
10927 10398 : if (TREE_CODE (expr) == SAVE_EXPR)
10928 31 : expr = TREE_OPERAND (expr, 0);
10929 10398 : STRIP_NOPS (expr);
10930 10398 : return expr;
10931 : }
10932 :
10933 : return NULL_TREE;
10934 : }
10935 :
10936 : /* An attach or detach operation depends directly on the address being
10937 : attached/detached. Return that address, or none if there are no
10938 : attachments/detachments. */
10939 :
10940 : static tree
10941 16071 : omp_get_attachment (omp_mapping_group *grp)
10942 : {
10943 16071 : tree node = *grp->grp_start;
10944 :
10945 16071 : switch (OMP_CLAUSE_MAP_KIND (node))
10946 : {
10947 13207 : case GOMP_MAP_TO:
10948 13207 : case GOMP_MAP_FROM:
10949 13207 : case GOMP_MAP_TOFROM:
10950 13207 : case GOMP_MAP_ALWAYS_FROM:
10951 13207 : case GOMP_MAP_ALWAYS_TO:
10952 13207 : case GOMP_MAP_ALWAYS_TOFROM:
10953 13207 : case GOMP_MAP_FORCE_FROM:
10954 13207 : case GOMP_MAP_FORCE_TO:
10955 13207 : case GOMP_MAP_FORCE_TOFROM:
10956 13207 : case GOMP_MAP_FORCE_PRESENT:
10957 13207 : case GOMP_MAP_PRESENT_ALLOC:
10958 13207 : case GOMP_MAP_PRESENT_FROM:
10959 13207 : case GOMP_MAP_PRESENT_TO:
10960 13207 : case GOMP_MAP_PRESENT_TOFROM:
10961 13207 : case GOMP_MAP_ALWAYS_PRESENT_FROM:
10962 13207 : case GOMP_MAP_ALWAYS_PRESENT_TO:
10963 13207 : case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
10964 13207 : case GOMP_MAP_ALLOC:
10965 13207 : case GOMP_MAP_RELEASE:
10966 13207 : case GOMP_MAP_DELETE:
10967 13207 : case GOMP_MAP_FORCE_ALLOC:
10968 13207 : if (node == grp->grp_end)
10969 : return NULL_TREE;
10970 :
10971 6439 : node = OMP_CLAUSE_CHAIN (node);
10972 6439 : if (node && omp_map_clause_descriptor_p (node))
10973 : {
10974 1007 : gcc_assert (node != grp->grp_end);
10975 1007 : node = OMP_CLAUSE_CHAIN (node);
10976 : }
10977 6439 : if (node)
10978 6439 : switch (OMP_CLAUSE_MAP_KIND (node))
10979 : {
10980 : case GOMP_MAP_POINTER:
10981 : case GOMP_MAP_ALWAYS_POINTER:
10982 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
10983 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
10984 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
10985 : return NULL_TREE;
10986 :
10987 2516 : case GOMP_MAP_ATTACH_DETACH:
10988 2516 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
10989 2516 : case GOMP_MAP_DETACH:
10990 2516 : return OMP_CLAUSE_DECL (node);
10991 :
10992 0 : default:
10993 0 : internal_error ("unexpected mapping node");
10994 : }
10995 0 : return error_mark_node;
10996 :
10997 0 : case GOMP_MAP_TO_PSET:
10998 0 : gcc_assert (node != grp->grp_end);
10999 0 : node = OMP_CLAUSE_CHAIN (node);
11000 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
11001 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
11002 0 : return OMP_CLAUSE_DECL (node);
11003 : else
11004 0 : internal_error ("unexpected mapping node");
11005 : return error_mark_node;
11006 :
11007 537 : case GOMP_MAP_ATTACH:
11008 537 : case GOMP_MAP_DETACH:
11009 537 : node = OMP_CLAUSE_CHAIN (node);
11010 537 : if (!node || *grp->grp_start == grp->grp_end)
11011 537 : return OMP_CLAUSE_DECL (*grp->grp_start);
11012 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11013 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11014 0 : return OMP_CLAUSE_DECL (*grp->grp_start);
11015 : else
11016 0 : internal_error ("unexpected mapping node");
11017 : return error_mark_node;
11018 :
11019 : case GOMP_MAP_STRUCT:
11020 : case GOMP_MAP_STRUCT_UNORD:
11021 : case GOMP_MAP_FORCE_DEVICEPTR:
11022 : case GOMP_MAP_DEVICE_RESIDENT:
11023 : case GOMP_MAP_LINK:
11024 : case GOMP_MAP_IF_PRESENT:
11025 : case GOMP_MAP_FIRSTPRIVATE:
11026 : case GOMP_MAP_FIRSTPRIVATE_INT:
11027 : case GOMP_MAP_USE_DEVICE_PTR:
11028 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11029 : return NULL_TREE;
11030 :
11031 0 : default:
11032 0 : internal_error ("unexpected mapping node");
11033 : }
11034 :
11035 : return error_mark_node;
11036 : }
11037 :
11038 : /* Given a pointer START_P to the start of a group of related (e.g. pointer)
11039 : mappings, return the chain pointer to the end of that group in the list. */
11040 :
11041 : static tree *
11042 107964 : omp_group_last (tree *start_p)
11043 : {
11044 107964 : tree c = *start_p, nc, *grp_last_p = start_p;
11045 :
11046 107964 : gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
11047 :
11048 107964 : nc = OMP_CLAUSE_CHAIN (c);
11049 :
11050 189146 : if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
11051 : return grp_last_p;
11052 :
11053 68973 : switch (OMP_CLAUSE_MAP_KIND (c))
11054 : {
11055 : default:
11056 : while (nc
11057 111646 : && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11058 234204 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11059 104484 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11060 94159 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
11061 81497 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
11062 55684 : || (OMP_CLAUSE_MAP_KIND (nc)
11063 : == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
11064 55668 : || (OMP_CLAUSE_MAP_KIND (nc)
11065 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
11066 55301 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH
11067 55246 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
11068 53334 : || omp_map_clause_descriptor_p (nc)))
11069 : {
11070 66045 : tree nc2 = OMP_CLAUSE_CHAIN (nc);
11071 66045 : if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH)
11072 : {
11073 : /* In the specific case we're doing "exit data" on an array
11074 : slice of a reference-to-pointer struct component, we will see
11075 : DETACH followed by ATTACH_DETACH here. We want to treat that
11076 : as a single group. In other cases DETACH might represent a
11077 : stand-alone "detach" clause, so we don't want to consider
11078 : that part of the group. */
11079 55 : if (nc2
11080 16 : && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
11081 71 : && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH_DETACH)
11082 0 : goto consume_two_nodes;
11083 : else
11084 : break;
11085 : }
11086 65990 : if (nc2
11087 48569 : && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
11088 42056 : && (OMP_CLAUSE_MAP_KIND (nc)
11089 : == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
11090 66006 : && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
11091 : {
11092 16 : consume_two_nodes:
11093 16 : grp_last_p = &OMP_CLAUSE_CHAIN (nc);
11094 16 : c = nc2;
11095 16 : nc = OMP_CLAUSE_CHAIN (nc2);
11096 : }
11097 : else
11098 : {
11099 65974 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11100 65974 : c = nc;
11101 65974 : nc = nc2;
11102 : }
11103 : }
11104 : break;
11105 :
11106 302 : case GOMP_MAP_ATTACH:
11107 302 : case GOMP_MAP_DETACH:
11108 : /* This is a weird artifact of how directives are parsed: bare attach or
11109 : detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
11110 : FIRSTPRIVATE_REFERENCE node. FIXME. */
11111 302 : if (nc
11112 302 : && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11113 302 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11114 302 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
11115 0 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11116 : break;
11117 :
11118 39 : case GOMP_MAP_TO_PSET:
11119 39 : if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
11120 39 : && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
11121 12 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
11122 39 : grp_last_p = &OMP_CLAUSE_CHAIN (c);
11123 : break;
11124 :
11125 5551 : case GOMP_MAP_STRUCT:
11126 5551 : case GOMP_MAP_STRUCT_UNORD:
11127 5551 : {
11128 5551 : unsigned HOST_WIDE_INT num_mappings
11129 5551 : = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
11130 5551 : if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
11131 5213 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
11132 10282 : || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
11133 1134 : grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
11134 13711 : for (unsigned i = 0; i < num_mappings; i++)
11135 8160 : grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
11136 : }
11137 : break;
11138 : }
11139 :
11140 : return grp_last_p;
11141 : }
11142 :
11143 : /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
11144 : OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
11145 : associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
11146 : if we have more than one such group, else return NULL. */
11147 :
11148 : static void
11149 87974 : omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
11150 : tree gather_sentinel)
11151 : {
11152 87974 : for (tree *cp = list_p;
11153 262014 : *cp && *cp != gather_sentinel;
11154 174040 : cp = &OMP_CLAUSE_CHAIN (*cp))
11155 : {
11156 174040 : if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
11157 97052 : continue;
11158 :
11159 76988 : tree *grp_last_p = omp_group_last (cp);
11160 76988 : omp_mapping_group grp;
11161 :
11162 76988 : grp.grp_start = cp;
11163 76988 : grp.grp_end = *grp_last_p;
11164 76988 : grp.mark = UNVISITED;
11165 76988 : grp.sibling = NULL;
11166 76988 : grp.deleted = false;
11167 76988 : grp.reprocess_struct = false;
11168 76988 : grp.fragile = false;
11169 76988 : grp.next = NULL;
11170 76988 : groups->safe_push (grp);
11171 :
11172 76988 : cp = grp_last_p;
11173 : }
11174 87974 : }
11175 :
11176 : static vec<omp_mapping_group> *
11177 87656 : omp_gather_mapping_groups (tree *list_p)
11178 : {
11179 87656 : vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
11180 :
11181 87656 : omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
11182 :
11183 87656 : if (groups->length () > 0)
11184 : return groups;
11185 : else
11186 : {
11187 44588 : delete groups;
11188 44588 : return NULL;
11189 : }
11190 : }
11191 :
11192 : /* A pointer mapping group GRP may define a block of memory starting at some
11193 : base address, and maybe also define a firstprivate pointer or firstprivate
11194 : reference that points to that block. The return value is a node containing
11195 : the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
11196 : If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
11197 : return the number of consecutive chained nodes in CHAINED. */
11198 :
11199 : static tree
11200 77382 : omp_group_base (omp_mapping_group *grp, unsigned int *chained,
11201 : tree *firstprivate)
11202 : {
11203 77382 : tree node = *grp->grp_start;
11204 :
11205 77382 : *firstprivate = NULL_TREE;
11206 77382 : *chained = 1;
11207 :
11208 77382 : switch (OMP_CLAUSE_MAP_KIND (node))
11209 : {
11210 73288 : case GOMP_MAP_TO:
11211 73288 : case GOMP_MAP_FROM:
11212 73288 : case GOMP_MAP_TOFROM:
11213 73288 : case GOMP_MAP_ALWAYS_FROM:
11214 73288 : case GOMP_MAP_ALWAYS_TO:
11215 73288 : case GOMP_MAP_ALWAYS_TOFROM:
11216 73288 : case GOMP_MAP_FORCE_FROM:
11217 73288 : case GOMP_MAP_FORCE_TO:
11218 73288 : case GOMP_MAP_FORCE_TOFROM:
11219 73288 : case GOMP_MAP_FORCE_PRESENT:
11220 73288 : case GOMP_MAP_PRESENT_ALLOC:
11221 73288 : case GOMP_MAP_PRESENT_FROM:
11222 73288 : case GOMP_MAP_PRESENT_TO:
11223 73288 : case GOMP_MAP_PRESENT_TOFROM:
11224 73288 : case GOMP_MAP_ALWAYS_PRESENT_FROM:
11225 73288 : case GOMP_MAP_ALWAYS_PRESENT_TO:
11226 73288 : case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
11227 73288 : case GOMP_MAP_ALLOC:
11228 73288 : case GOMP_MAP_RELEASE:
11229 73288 : case GOMP_MAP_DELETE:
11230 73288 : case GOMP_MAP_FORCE_ALLOC:
11231 73288 : case GOMP_MAP_IF_PRESENT:
11232 73288 : if (node == grp->grp_end)
11233 : return node;
11234 :
11235 34232 : node = OMP_CLAUSE_CHAIN (node);
11236 34232 : if (!node)
11237 0 : internal_error ("unexpected mapping node");
11238 34232 : if (omp_map_clause_descriptor_p (node))
11239 : {
11240 10595 : if (node == grp->grp_end)
11241 0 : return *grp->grp_start;
11242 10595 : node = OMP_CLAUSE_CHAIN (node);
11243 : }
11244 34232 : switch (OMP_CLAUSE_MAP_KIND (node))
11245 : {
11246 23048 : case GOMP_MAP_POINTER:
11247 23048 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11248 23048 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11249 23048 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
11250 23048 : *firstprivate = OMP_CLAUSE_DECL (node);
11251 23048 : return *grp->grp_start;
11252 :
11253 11184 : case GOMP_MAP_ALWAYS_POINTER:
11254 11184 : case GOMP_MAP_ATTACH_DETACH:
11255 11184 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11256 11184 : case GOMP_MAP_DETACH:
11257 11184 : return *grp->grp_start;
11258 :
11259 0 : default:
11260 0 : internal_error ("unexpected mapping node");
11261 : }
11262 : return error_mark_node;
11263 :
11264 26 : case GOMP_MAP_TO_PSET:
11265 26 : gcc_assert (node != grp->grp_end);
11266 26 : node = OMP_CLAUSE_CHAIN (node);
11267 26 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
11268 26 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
11269 : return NULL_TREE;
11270 : else
11271 0 : internal_error ("unexpected mapping node");
11272 : return error_mark_node;
11273 :
11274 1097 : case GOMP_MAP_ATTACH:
11275 1097 : case GOMP_MAP_DETACH:
11276 1097 : node = OMP_CLAUSE_CHAIN (node);
11277 1097 : if (!node || *grp->grp_start == grp->grp_end)
11278 : return NULL_TREE;
11279 0 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11280 0 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11281 : {
11282 : /* We're mapping the base pointer itself in a bare attach or detach
11283 : node. This is a side effect of how parsing works, and the mapping
11284 : will be removed anyway (at least for enter/exit data directives).
11285 : We should ignore the mapping here. FIXME. */
11286 : return NULL_TREE;
11287 : }
11288 : else
11289 0 : internal_error ("unexpected mapping node");
11290 : return error_mark_node;
11291 :
11292 2687 : case GOMP_MAP_STRUCT:
11293 2687 : case GOMP_MAP_STRUCT_UNORD:
11294 2687 : {
11295 2687 : unsigned HOST_WIDE_INT num_mappings
11296 2687 : = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
11297 2687 : node = OMP_CLAUSE_CHAIN (node);
11298 2687 : if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
11299 2687 : || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11300 : {
11301 393 : *firstprivate = OMP_CLAUSE_DECL (node);
11302 393 : node = OMP_CLAUSE_CHAIN (node);
11303 : }
11304 2294 : else if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
11305 314 : node = OMP_CLAUSE_CHAIN (node);
11306 2687 : *chained = num_mappings;
11307 2687 : return node;
11308 : }
11309 :
11310 : case GOMP_MAP_FORCE_DEVICEPTR:
11311 : case GOMP_MAP_DEVICE_RESIDENT:
11312 : case GOMP_MAP_LINK:
11313 : case GOMP_MAP_FIRSTPRIVATE:
11314 : case GOMP_MAP_FIRSTPRIVATE_INT:
11315 : case GOMP_MAP_USE_DEVICE_PTR:
11316 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
11317 : return NULL_TREE;
11318 :
11319 0 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11320 0 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11321 0 : case GOMP_MAP_POINTER:
11322 0 : case GOMP_MAP_ALWAYS_POINTER:
11323 0 : case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
11324 : /* These shouldn't appear by themselves. */
11325 0 : if (!seen_error ())
11326 0 : internal_error ("unexpected pointer mapping node");
11327 0 : return error_mark_node;
11328 :
11329 0 : default:
11330 0 : gcc_unreachable ();
11331 : }
11332 :
11333 : return error_mark_node;
11334 : }
11335 :
11336 : /* Given a vector of omp_mapping_groups, build a hash table so we can look up
11337 : nodes by tree_operand_hash_no_se. */
11338 :
11339 : static void
11340 43552 : omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
11341 : omp_mapping_group *> *grpmap,
11342 : vec<omp_mapping_group> *groups,
11343 : tree reindex_sentinel)
11344 : {
11345 43552 : omp_mapping_group *grp;
11346 43552 : unsigned int i;
11347 43552 : bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
11348 :
11349 121388 : FOR_EACH_VEC_ELT (*groups, i, grp)
11350 : {
11351 77836 : if (reindexing && *grp->grp_start == reindex_sentinel)
11352 77836 : above_hwm = true;
11353 :
11354 77836 : if (reindexing && !above_hwm)
11355 54395 : continue;
11356 :
11357 77536 : if (grp->reprocess_struct)
11358 154 : continue;
11359 :
11360 77382 : tree fpp;
11361 77382 : unsigned int chained;
11362 77382 : tree node = omp_group_base (grp, &chained, &fpp);
11363 :
11364 77382 : if (node == error_mark_node || (!node && !fpp))
11365 1407 : continue;
11366 :
11367 : for (unsigned j = 0;
11368 153226 : node && j < chained;
11369 77251 : node = OMP_CLAUSE_CHAIN (node), j++)
11370 : {
11371 77251 : tree decl = OMP_CLAUSE_DECL (node);
11372 : /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
11373 : meaning node-hash lookups don't work. This is a workaround for
11374 : that, but ideally we should just create the INDIRECT_REF at
11375 : source instead. FIXME. */
11376 77251 : if (TREE_CODE (decl) == MEM_REF
11377 77251 : && integer_zerop (TREE_OPERAND (decl, 1)))
11378 0 : decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
11379 :
11380 77251 : omp_mapping_group **prev = grpmap->get (decl);
11381 :
11382 77251 : if (prev && *prev == grp)
11383 : /* Empty. */;
11384 77251 : else if (prev)
11385 : {
11386 : /* Mapping the same thing twice is normally diagnosed as an error,
11387 : but can happen under some circumstances, e.g. in pr99928-16.c,
11388 : the directive:
11389 :
11390 : #pragma omp target simd reduction(+:a[:3]) \
11391 : map(always, tofrom: a[:6])
11392 : ...
11393 :
11394 : will result in two "a[0]" mappings (of different sizes). */
11395 :
11396 140 : grp->sibling = (*prev)->sibling;
11397 140 : (*prev)->sibling = grp;
11398 : }
11399 : else
11400 77111 : grpmap->put (decl, grp);
11401 : }
11402 :
11403 75975 : if (!fpp)
11404 52534 : continue;
11405 :
11406 23441 : omp_mapping_group **prev = grpmap->get (fpp);
11407 23441 : if (prev && *prev != grp)
11408 : {
11409 12 : grp->sibling = (*prev)->sibling;
11410 12 : (*prev)->sibling = grp;
11411 : }
11412 : else
11413 23429 : grpmap->put (fpp, grp);
11414 : }
11415 43552 : }
11416 :
11417 : static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
11418 43068 : omp_index_mapping_groups (vec<omp_mapping_group> *groups)
11419 : {
11420 43068 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
11421 43068 : = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
11422 :
11423 43068 : omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
11424 :
11425 43068 : return grpmap;
11426 : }
11427 :
11428 : /* Rebuild group map from partially-processed clause list (during
11429 : omp_build_struct_sibling_lists). We have already processed nodes up until
11430 : a high-water mark (HWM). This is a bit tricky because the list is being
11431 : reordered as it is scanned, but we know:
11432 :
11433 : 1. The list after HWM has not been touched yet, so we can reindex it safely.
11434 :
11435 : 2. The list before and including HWM has been altered, but remains
11436 : well-formed throughout the sibling-list building operation.
11437 :
11438 : so, we can do the reindex operation in two parts, on the processed and
11439 : then the unprocessed halves of the list. */
11440 :
11441 : static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
11442 318 : omp_reindex_mapping_groups (tree *list_p,
11443 : vec<omp_mapping_group> *groups,
11444 : vec<omp_mapping_group> *processed_groups,
11445 : tree sentinel)
11446 : {
11447 318 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
11448 318 : = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
11449 :
11450 318 : processed_groups->truncate (0);
11451 :
11452 318 : omp_gather_mapping_groups_1 (list_p, processed_groups, sentinel);
11453 318 : omp_index_mapping_groups_1 (grpmap, processed_groups, NULL_TREE);
11454 318 : if (sentinel)
11455 166 : omp_index_mapping_groups_1 (grpmap, groups, sentinel);
11456 :
11457 318 : return grpmap;
11458 : }
11459 :
11460 : /* Find the immediately-containing struct for a component ref (etc.)
11461 : expression EXPR. */
11462 :
11463 : static tree
11464 45516 : omp_containing_struct (tree expr)
11465 : {
11466 45516 : tree expr0 = expr;
11467 :
11468 45516 : STRIP_NOPS (expr);
11469 :
11470 : /* Note: don't strip NOPs unless we're also stripping off array refs or a
11471 : component ref. */
11472 45516 : if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
11473 : return expr0;
11474 :
11475 25990 : while (TREE_CODE (expr) == ARRAY_REF)
11476 3546 : expr = TREE_OPERAND (expr, 0);
11477 :
11478 22444 : if (TREE_CODE (expr) == COMPONENT_REF)
11479 20119 : expr = TREE_OPERAND (expr, 0);
11480 :
11481 : return expr;
11482 : }
11483 :
11484 : /* Return TRUE if DECL describes a component that is part of a whole structure
11485 : that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
11486 : that maps that structure, if present. */
11487 :
11488 : static bool
11489 24311 : omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
11490 : omp_mapping_group *> *grpmap,
11491 : tree decl,
11492 : omp_mapping_group **mapped_by_group)
11493 : {
11494 24311 : tree wsdecl = NULL_TREE;
11495 :
11496 24311 : *mapped_by_group = NULL;
11497 :
11498 45516 : while (true)
11499 : {
11500 45516 : wsdecl = omp_containing_struct (decl);
11501 45516 : if (wsdecl == decl)
11502 : break;
11503 22444 : omp_mapping_group **wholestruct = grpmap->get (wsdecl);
11504 22444 : if (!wholestruct
11505 19875 : && TREE_CODE (wsdecl) == MEM_REF
11506 22444 : && integer_zerop (TREE_OPERAND (wsdecl, 1)))
11507 : {
11508 0 : tree deref = TREE_OPERAND (wsdecl, 0);
11509 0 : deref = build_fold_indirect_ref (deref);
11510 0 : wholestruct = grpmap->get (deref);
11511 : }
11512 22444 : if (wholestruct)
11513 : {
11514 : /* An intermediate descriptor should not match here because the
11515 : pointee is actually not mapped by this group -- it is just a
11516 : zero-length alloc. */
11517 2569 : tree desc = OMP_CLAUSE_CHAIN (*(*wholestruct)->grp_start);
11518 2569 : if (desc != NULL_TREE && omp_map_clause_descriptor_p (desc))
11519 1330 : goto next;
11520 1239 : *mapped_by_group = *wholestruct;
11521 1239 : return true;
11522 : }
11523 19875 : next:
11524 : decl = wsdecl;
11525 : }
11526 :
11527 : return false;
11528 : }
11529 :
11530 : /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
11531 : FALSE on error. */
11532 :
11533 : static bool
11534 19389 : omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
11535 : vec<omp_mapping_group> *groups,
11536 : hash_map<tree_operand_hash_no_se,
11537 : omp_mapping_group *> *grpmap,
11538 : omp_mapping_group *grp)
11539 : {
11540 19389 : if (grp->mark == PERMANENT)
11541 : return true;
11542 16071 : if (grp->mark == TEMPORARY)
11543 : {
11544 0 : fprintf (stderr, "when processing group:\n");
11545 0 : debug_mapping_group (grp);
11546 0 : internal_error ("base pointer cycle detected");
11547 : return false;
11548 : }
11549 16071 : grp->mark = TEMPORARY;
11550 :
11551 16071 : tree attaches_to = omp_get_attachment (grp);
11552 :
11553 16071 : if (attaches_to)
11554 : {
11555 3053 : omp_mapping_group **basep = grpmap->get (attaches_to);
11556 :
11557 3053 : if (basep && *basep != grp)
11558 : {
11559 2920 : for (omp_mapping_group *w = *basep; w; w = w->sibling)
11560 1460 : if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
11561 : return false;
11562 : }
11563 : }
11564 :
11565 16071 : tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
11566 :
11567 22151 : while (decl)
11568 : {
11569 22151 : tree base = omp_get_base_pointer (decl);
11570 :
11571 22151 : if (!base)
11572 : break;
11573 :
11574 8814 : omp_mapping_group **innerp = grpmap->get (base);
11575 8814 : omp_mapping_group *wholestruct;
11576 :
11577 : /* We should treat whole-structure mappings as if all (pointer, in this
11578 : case) members are mapped as individual list items. Check if we have
11579 : such a whole-structure mapping, if we don't have an explicit reference
11580 : to the pointer member itself. */
11581 8814 : if (!innerp
11582 4072 : && TREE_CODE (base) == COMPONENT_REF
11583 10958 : && omp_mapped_by_containing_struct (grpmap, base, &wholestruct))
11584 : innerp = &wholestruct;
11585 :
11586 8814 : if (innerp && *innerp != grp)
11587 : {
11588 5468 : for (omp_mapping_group *w = *innerp; w; w = w->sibling)
11589 2734 : if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, w))
11590 0 : return false;
11591 : break;
11592 : }
11593 :
11594 6080 : decl = base;
11595 : }
11596 :
11597 16071 : grp->mark = PERMANENT;
11598 :
11599 : /* Emit grp to output list. */
11600 :
11601 16071 : **outlist = grp;
11602 16071 : *outlist = &grp->next;
11603 :
11604 16071 : return true;
11605 : }
11606 :
11607 : /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
11608 : before mappings that use those pointers. This is an implementation of the
11609 : depth-first search algorithm, described e.g. at:
11610 :
11611 : https://en.wikipedia.org/wiki/Topological_sorting
11612 : */
11613 :
11614 : static omp_mapping_group *
11615 7979 : omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
11616 : hash_map<tree_operand_hash_no_se, omp_mapping_group *>
11617 : *grpmap,
11618 : bool enter_exit_data)
11619 : {
11620 7979 : omp_mapping_group *grp, *outlist = NULL, **cursor;
11621 7979 : unsigned int i;
11622 7979 : bool saw_runtime_implicit = false;
11623 :
11624 7979 : cursor = &outlist;
11625 :
11626 24050 : FOR_EACH_VEC_ELT (*groups, i, grp)
11627 : {
11628 16071 : if (grp->mark != PERMANENT)
11629 : {
11630 15219 : if (OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
11631 : {
11632 683 : saw_runtime_implicit = true;
11633 683 : continue;
11634 : }
11635 14536 : if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
11636 : return NULL;
11637 : }
11638 : }
11639 :
11640 7979 : if (!saw_runtime_implicit)
11641 7697 : return outlist;
11642 :
11643 1634 : FOR_EACH_VEC_ELT (*groups, i, grp)
11644 : {
11645 1352 : if (grp->mark != PERMANENT
11646 1352 : && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start))
11647 : {
11648 : /* Clear the flag for enter/exit data because it is currently
11649 : meaningless for those operations in libgomp. */
11650 659 : if (enter_exit_data)
11651 446 : OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (*grp->grp_start) = 0;
11652 :
11653 659 : if (!omp_tsort_mapping_groups_1 (&cursor, groups, grpmap, grp))
11654 : return NULL;
11655 : }
11656 : }
11657 :
11658 282 : return outlist;
11659 : }
11660 :
11661 : /* Split INLIST into three parts:
11662 :
11663 : - "present" alloc/to/from groups
11664 : - other to/from groups
11665 : - other alloc/release/delete groups
11666 :
11667 : These sub-lists are then concatenated together to form the final list.
11668 : Each sub-list retains the order of the original list.
11669 : Note that ATTACH nodes are later moved to the end of the list in
11670 : gimplify_adjust_omp_clauses, for target regions. */
11671 :
11672 : static omp_mapping_group *
11673 7979 : omp_segregate_mapping_groups (omp_mapping_group *inlist)
11674 : {
11675 7979 : omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
11676 7979 : omp_mapping_group *p_groups = NULL;
11677 7979 : omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
11678 7979 : omp_mapping_group **p_tail = &p_groups;
11679 :
11680 24050 : for (omp_mapping_group *w = inlist; w;)
11681 : {
11682 16071 : tree c = *w->grp_start;
11683 16071 : omp_mapping_group *next = w->next;
11684 :
11685 16071 : gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
11686 :
11687 16071 : switch (OMP_CLAUSE_MAP_KIND (c))
11688 : {
11689 1823 : case GOMP_MAP_ALLOC:
11690 1823 : case GOMP_MAP_RELEASE:
11691 1823 : case GOMP_MAP_DELETE:
11692 1823 : *ard_tail = w;
11693 1823 : w->next = NULL;
11694 1823 : ard_tail = &w->next;
11695 1823 : break;
11696 :
11697 : /* These map types are all semantically identical, so are moved into a
11698 : single group. They will each be changed into GOMP_MAP_FORCE_PRESENT
11699 : in gimplify_adjust_omp_clauses. */
11700 125 : case GOMP_MAP_PRESENT_ALLOC:
11701 125 : case GOMP_MAP_PRESENT_FROM:
11702 125 : case GOMP_MAP_PRESENT_TO:
11703 125 : case GOMP_MAP_PRESENT_TOFROM:
11704 125 : *p_tail = w;
11705 125 : w->next = NULL;
11706 125 : p_tail = &w->next;
11707 125 : break;
11708 :
11709 14123 : default:
11710 14123 : *tf_tail = w;
11711 14123 : w->next = NULL;
11712 14123 : tf_tail = &w->next;
11713 : }
11714 :
11715 : w = next;
11716 : }
11717 :
11718 : /* Now splice the lists together... */
11719 7979 : *tf_tail = ard_groups;
11720 7979 : *p_tail = tf_groups;
11721 :
11722 7979 : return p_groups;
11723 : }
11724 :
11725 : /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
11726 : those groups based on the output list of omp_tsort_mapping_groups --
11727 : singly-linked, threaded through each element's NEXT pointer starting at
11728 : HEAD. Each list element appears exactly once in that linked list.
11729 :
11730 : Each element of GROUPS may correspond to one or several mapping nodes.
11731 : Node groups are kept together, and in the reordered list, the positions of
11732 : the original groups are reused for the positions of the reordered list.
11733 : Hence if we have e.g.
11734 :
11735 : {to ptr ptr} firstprivate {tofrom ptr} ...
11736 : ^ ^ ^
11737 : first group non-"map" second group
11738 :
11739 : and say the second group contains a base pointer for the first so must be
11740 : moved before it, the resulting list will contain:
11741 :
11742 : {tofrom ptr} firstprivate {to ptr ptr} ...
11743 : ^ prev. second group ^ prev. first group
11744 : */
11745 :
11746 : static tree *
11747 7979 : omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
11748 : omp_mapping_group *head,
11749 : tree *list_p)
11750 : {
11751 7979 : omp_mapping_group *grp;
11752 7979 : unsigned int i;
11753 7979 : unsigned numgroups = groups->length ();
11754 7979 : auto_vec<tree> old_heads (numgroups);
11755 7979 : auto_vec<tree *> old_headps (numgroups);
11756 7979 : auto_vec<tree> new_heads (numgroups);
11757 7979 : auto_vec<tree> old_succs (numgroups);
11758 7979 : bool map_at_start = (list_p == (*groups)[0].grp_start);
11759 :
11760 7979 : tree *new_grp_tail = NULL;
11761 :
11762 : /* Stash the start & end nodes of each mapping group before we start
11763 : modifying the list. */
11764 24050 : FOR_EACH_VEC_ELT (*groups, i, grp)
11765 : {
11766 16071 : old_headps.quick_push (grp->grp_start);
11767 16071 : old_heads.quick_push (*grp->grp_start);
11768 16071 : old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
11769 : }
11770 :
11771 : /* And similarly, the heads of the groups in the order we want to rearrange
11772 : the list to. */
11773 24050 : for (omp_mapping_group *w = head; w; w = w->next)
11774 16071 : new_heads.quick_push (*w->grp_start);
11775 :
11776 24050 : FOR_EACH_VEC_ELT (*groups, i, grp)
11777 : {
11778 16071 : gcc_assert (head);
11779 :
11780 16071 : if (new_grp_tail && old_succs[i - 1] == old_heads[i])
11781 : {
11782 : /* a {b c d} {e f g} h i j (original)
11783 : -->
11784 : a {k l m} {e f g} h i j (inserted new group on last iter)
11785 : -->
11786 : a {k l m} {n o p} h i j (this time, chain last group to new one)
11787 : ^new_grp_tail
11788 : */
11789 7470 : *new_grp_tail = new_heads[i];
11790 : }
11791 8601 : else if (new_grp_tail)
11792 : {
11793 : /* a {b c d} e {f g h} i j k (original)
11794 : -->
11795 : a {l m n} e {f g h} i j k (gap after last iter's group)
11796 : -->
11797 : a {l m n} e {o p q} h i j (chain last group to old successor)
11798 : ^new_grp_tail
11799 : */
11800 622 : *new_grp_tail = old_succs[i - 1];
11801 622 : *old_headps[i] = new_heads[i];
11802 : }
11803 : else
11804 : {
11805 : /* The first inserted group -- point to new group, and leave end
11806 : open.
11807 : a {b c d} e f
11808 : -->
11809 : a {g h i...
11810 : */
11811 7979 : *grp->grp_start = new_heads[i];
11812 : }
11813 :
11814 16071 : new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
11815 :
11816 16071 : head = head->next;
11817 : }
11818 :
11819 7979 : if (new_grp_tail)
11820 7979 : *new_grp_tail = old_succs[numgroups - 1];
11821 :
11822 7979 : gcc_assert (!head);
11823 :
11824 11167 : return map_at_start ? (*groups)[0].grp_start : list_p;
11825 7979 : }
11826 :
11827 : /* DECL is supposed to have lastprivate semantics in the outer contexts
11828 : of combined/composite constructs, starting with OCTX.
11829 : Add needed lastprivate, shared or map clause if no data sharing or
11830 : mapping clause are present. IMPLICIT_P is true if it is an implicit
11831 : clause (IV on simd), in which case the lastprivate will not be
11832 : copied to some constructs. */
11833 :
11834 : static void
11835 14652 : omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
11836 : tree decl, bool implicit_p)
11837 : {
11838 14652 : struct gimplify_omp_ctx *orig_octx = octx;
11839 26973 : for (; octx; octx = octx->outer_context)
11840 : {
11841 25900 : if ((octx->region_type == ORT_COMBINED_PARALLEL
11842 20864 : || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
11843 25331 : && splay_tree_lookup (octx->variables,
11844 : (splay_tree_key) decl) == NULL)
11845 : {
11846 3887 : omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
11847 3887 : continue;
11848 : }
11849 18374 : if ((octx->region_type & ORT_TASK) != 0
11850 575 : && octx->combined_loop
11851 18655 : && splay_tree_lookup (octx->variables,
11852 : (splay_tree_key) decl) == NULL)
11853 : {
11854 248 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11855 248 : continue;
11856 : }
11857 22710 : if (implicit_p
11858 13619 : && octx->region_type == ORT_WORKSHARE
11859 10405 : && octx->combined_loop
11860 10300 : && splay_tree_lookup (octx->variables,
11861 : (splay_tree_key) decl) == NULL
11862 10300 : && octx->outer_context
11863 8907 : && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
11864 22717 : && splay_tree_lookup (octx->outer_context->variables,
11865 : (splay_tree_key) decl) == NULL)
11866 : {
11867 4832 : octx = octx->outer_context;
11868 4832 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11869 4832 : continue;
11870 : }
11871 5564 : if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
11872 7482 : && octx->combined_loop
11873 7295 : && splay_tree_lookup (octx->variables,
11874 : (splay_tree_key) decl) == NULL
11875 19071 : && !omp_check_private (octx, decl, false))
11876 : {
11877 3354 : omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
11878 3354 : continue;
11879 : }
11880 9692 : if (octx->region_type == ORT_COMBINED_TARGET)
11881 : {
11882 2008 : splay_tree_node n = splay_tree_lookup (octx->variables,
11883 : (splay_tree_key) decl);
11884 2008 : if (n == NULL)
11885 : {
11886 1986 : omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
11887 1986 : octx = octx->outer_context;
11888 : }
11889 22 : else if (!implicit_p
11890 22 : && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
11891 : {
11892 12 : n->value &= ~(GOVD_FIRSTPRIVATE
11893 : | GOVD_FIRSTPRIVATE_IMPLICIT
11894 : | GOVD_EXPLICIT);
11895 12 : omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
11896 12 : octx = octx->outer_context;
11897 : }
11898 : }
11899 : break;
11900 : }
11901 14652 : if (octx && (implicit_p || octx != orig_octx))
11902 4046 : omp_notice_variable (octx, decl, true);
11903 14652 : }
11904 :
11905 : /* We might have indexed several groups for DECL, e.g. a "TO" mapping and also
11906 : a "FIRSTPRIVATE" mapping. Return the one that isn't firstprivate, etc. */
11907 :
11908 : static omp_mapping_group *
11909 5850 : omp_get_nonfirstprivate_group (hash_map<tree_operand_hash_no_se,
11910 : omp_mapping_group *> *grpmap,
11911 : tree decl, bool allow_deleted = false)
11912 : {
11913 5850 : omp_mapping_group **to_group_p = grpmap->get (decl);
11914 :
11915 5850 : if (!to_group_p)
11916 : return NULL;
11917 :
11918 2357 : omp_mapping_group *to_group = *to_group_p;
11919 :
11920 3618 : for (; to_group; to_group = to_group->sibling)
11921 : {
11922 2394 : tree grp_end = to_group->grp_end;
11923 2394 : switch (OMP_CLAUSE_MAP_KIND (grp_end))
11924 : {
11925 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
11926 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11927 : break;
11928 :
11929 1135 : default:
11930 1135 : if (allow_deleted || !to_group->deleted)
11931 : return to_group;
11932 : }
11933 : }
11934 :
11935 : return NULL;
11936 : }
11937 :
11938 : /* Return TRUE if the directive (whose clauses are described by the hash table
11939 : of mapping groups, GRPMAP) maps DECL explicitly. If TO_SPECIFICALLY is
11940 : true, only count TO mappings. If ALLOW_DELETED is true, ignore the
11941 : "deleted" flag for groups. If CONTAINED_IN_STRUCT is true, also return
11942 : TRUE if DECL is mapped as a member of a whole-struct mapping. */
11943 :
11944 : static bool
11945 4388 : omp_directive_maps_explicitly (hash_map<tree_operand_hash_no_se,
11946 : omp_mapping_group *> *grpmap,
11947 : tree decl, omp_mapping_group **base_group,
11948 : bool to_specifically, bool allow_deleted,
11949 : bool contained_in_struct)
11950 : {
11951 4388 : omp_mapping_group *decl_group
11952 4388 : = omp_get_nonfirstprivate_group (grpmap, decl, allow_deleted);
11953 :
11954 4388 : *base_group = NULL;
11955 :
11956 4388 : if (decl_group)
11957 : {
11958 1016 : tree grp_first = *decl_group->grp_start;
11959 : /* We might be called during omp_build_struct_sibling_lists, when
11960 : GOMP_MAP_STRUCT might have been inserted at the start of the group.
11961 : Skip over that, and also possibly the node after it. */
11962 1016 : if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT
11963 1016 : || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_STRUCT_UNORD)
11964 : {
11965 6 : grp_first = OMP_CLAUSE_CHAIN (grp_first);
11966 6 : if (OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_FIRSTPRIVATE_POINTER
11967 6 : || (OMP_CLAUSE_MAP_KIND (grp_first)
11968 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11969 12 : || OMP_CLAUSE_MAP_KIND (grp_first) == GOMP_MAP_ATTACH_DETACH)
11970 0 : grp_first = OMP_CLAUSE_CHAIN (grp_first);
11971 : }
11972 1016 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
11973 1016 : if (!to_specifically
11974 566 : || GOMP_MAP_COPY_TO_P (first_kind)
11975 357 : || first_kind == GOMP_MAP_ALLOC)
11976 : {
11977 959 : *base_group = decl_group;
11978 959 : return true;
11979 : }
11980 : }
11981 :
11982 3429 : if (contained_in_struct
11983 3429 : && omp_mapped_by_containing_struct (grpmap, decl, base_group))
11984 : return true;
11985 :
11986 : return false;
11987 : }
11988 :
11989 : /* If we have mappings INNER and OUTER, where INNER is a component access and
11990 : OUTER is a mapping of the whole containing struct, check that the mappings
11991 : are compatible. We'll be deleting the inner mapping, so we need to make
11992 : sure the outer mapping does (at least) the same transfers to/from the device
11993 : as the inner mapping. */
11994 :
11995 : bool
11996 172 : omp_check_mapping_compatibility (location_t loc,
11997 : omp_mapping_group *outer,
11998 : omp_mapping_group *inner)
11999 : {
12000 172 : tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
12001 :
12002 172 : gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
12003 172 : gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
12004 :
12005 172 : enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
12006 172 : enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
12007 :
12008 172 : if (outer_kind == inner_kind)
12009 : return true;
12010 :
12011 70 : switch (outer_kind)
12012 : {
12013 0 : case GOMP_MAP_ALWAYS_TO:
12014 0 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12015 0 : || inner_kind == GOMP_MAP_ALLOC
12016 0 : || inner_kind == GOMP_MAP_TO)
12017 : return true;
12018 : break;
12019 :
12020 0 : case GOMP_MAP_ALWAYS_FROM:
12021 0 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12022 0 : || inner_kind == GOMP_MAP_RELEASE
12023 : || inner_kind == GOMP_MAP_FROM)
12024 : return true;
12025 : break;
12026 :
12027 10 : case GOMP_MAP_TO:
12028 10 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12029 10 : || inner_kind == GOMP_MAP_ALLOC)
12030 : return true;
12031 : break;
12032 :
12033 8 : case GOMP_MAP_FROM:
12034 8 : if (inner_kind == GOMP_MAP_RELEASE
12035 8 : || inner_kind == GOMP_MAP_FORCE_PRESENT)
12036 : return true;
12037 : break;
12038 :
12039 32 : case GOMP_MAP_ALWAYS_TOFROM:
12040 32 : case GOMP_MAP_TOFROM:
12041 32 : if (inner_kind == GOMP_MAP_FORCE_PRESENT
12042 32 : || inner_kind == GOMP_MAP_ALLOC
12043 : || inner_kind == GOMP_MAP_TO
12044 24 : || inner_kind == GOMP_MAP_FROM
12045 12 : || inner_kind == GOMP_MAP_TOFROM)
12046 : return true;
12047 : break;
12048 :
12049 28 : default:
12050 28 : ;
12051 : }
12052 :
12053 84 : error_at (loc, "data movement for component %qE is not compatible with "
12054 28 : "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
12055 28 : OMP_CLAUSE_DECL (first_outer));
12056 :
12057 28 : return false;
12058 : }
12059 :
12060 : /* This function handles several cases where clauses on a mapping directive
12061 : can interact with each other.
12062 :
12063 : If we have a FIRSTPRIVATE_POINTER node and we're also mapping the pointer
12064 : on the same directive, change the mapping of the first node to
12065 : ATTACH_DETACH. We should have detected that this will happen already in
12066 : c-omp.cc:c_omp_adjust_map_clauses and marked the appropriate decl
12067 : as addressable. (If we didn't, bail out.)
12068 :
12069 : If we have a FIRSTPRIVATE_REFERENCE (for a reference to pointer) and we're
12070 : mapping the base pointer also, we may need to change the mapping type to
12071 : ATTACH_DETACH and synthesize an alloc node for the reference itself.
12072 :
12073 : If we have an ATTACH_DETACH node, this is an array section with a pointer
12074 : base. If we're mapping the base on the same directive too, we can drop its
12075 : mapping. However, if we have a reference to pointer, make other appropriate
12076 : adjustments to the mapping nodes instead.
12077 :
12078 : If we have an ATTACH_DETACH node with a Fortran pointer-set (array
12079 : descriptor) mapping for a derived-type component, and we're also mapping the
12080 : whole of the derived-type variable on another clause, the pointer-set
12081 : mapping is removed.
12082 :
12083 : If we have a component access but we're also mapping the whole of the
12084 : containing struct, drop the former access.
12085 :
12086 : If the expression is a component access, and we're also mapping a base
12087 : pointer used in that component access in the same expression, change the
12088 : mapping type of the latter to ALLOC (ready for processing by
12089 : omp_build_struct_sibling_lists). */
12090 :
12091 : void
12092 7979 : omp_resolve_clause_dependencies (enum tree_code code,
12093 : vec<omp_mapping_group> *groups,
12094 : hash_map<tree_operand_hash_no_se,
12095 : omp_mapping_group *> *grpmap)
12096 : {
12097 7979 : int i;
12098 7979 : omp_mapping_group *grp;
12099 7979 : bool repair_chain = false;
12100 :
12101 22851 : FOR_EACH_VEC_ELT (*groups, i, grp)
12102 : {
12103 14872 : tree grp_end = grp->grp_end;
12104 14872 : tree decl = OMP_CLAUSE_DECL (grp_end);
12105 :
12106 14872 : gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
12107 :
12108 14872 : switch (OMP_CLAUSE_MAP_KIND (grp_end))
12109 : {
12110 1150 : case GOMP_MAP_FIRSTPRIVATE_POINTER:
12111 1150 : {
12112 1150 : omp_mapping_group *to_group
12113 1150 : = omp_get_nonfirstprivate_group (grpmap, decl);
12114 :
12115 1150 : if (!to_group || to_group == grp)
12116 1125 : continue;
12117 :
12118 25 : tree grp_first = *to_group->grp_start;
12119 25 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
12120 :
12121 25 : if ((GOMP_MAP_COPY_TO_P (first_kind)
12122 7 : || first_kind == GOMP_MAP_ALLOC)
12123 50 : && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
12124 : != GOMP_MAP_FIRSTPRIVATE_POINTER))
12125 : {
12126 25 : gcc_assert (TREE_ADDRESSABLE (OMP_CLAUSE_DECL (grp_end)));
12127 25 : OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
12128 : }
12129 : }
12130 : break;
12131 :
12132 156 : case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12133 156 : {
12134 156 : tree ptr = build_fold_indirect_ref (decl);
12135 :
12136 156 : omp_mapping_group *to_group
12137 156 : = omp_get_nonfirstprivate_group (grpmap, ptr);
12138 :
12139 156 : if (!to_group || to_group == grp)
12140 152 : continue;
12141 :
12142 4 : tree grp_first = *to_group->grp_start;
12143 4 : enum gomp_map_kind first_kind = OMP_CLAUSE_MAP_KIND (grp_first);
12144 :
12145 4 : if (GOMP_MAP_COPY_TO_P (first_kind)
12146 4 : || first_kind == GOMP_MAP_ALLOC)
12147 : {
12148 4 : OMP_CLAUSE_SET_MAP_KIND (grp_end, GOMP_MAP_ATTACH_DETACH);
12149 4 : OMP_CLAUSE_DECL (grp_end) = ptr;
12150 4 : if ((OMP_CLAUSE_CHAIN (*to_group->grp_start)
12151 4 : == to_group->grp_end)
12152 4 : && (OMP_CLAUSE_MAP_KIND (to_group->grp_end)
12153 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12154 : {
12155 0 : gcc_assert (TREE_ADDRESSABLE
12156 : (OMP_CLAUSE_DECL (to_group->grp_end)));
12157 0 : OMP_CLAUSE_SET_MAP_KIND (to_group->grp_end,
12158 : GOMP_MAP_ATTACH_DETACH);
12159 :
12160 0 : location_t loc = OMP_CLAUSE_LOCATION (to_group->grp_end);
12161 0 : tree alloc
12162 0 : = build_omp_clause (loc, OMP_CLAUSE_MAP);
12163 0 : OMP_CLAUSE_SET_MAP_KIND (alloc, GOMP_MAP_ALLOC);
12164 0 : tree tmp = build_fold_addr_expr (OMP_CLAUSE_DECL
12165 : (to_group->grp_end));
12166 0 : tree char_ptr_type = build_pointer_type (char_type_node);
12167 0 : OMP_CLAUSE_DECL (alloc)
12168 0 : = build2 (MEM_REF, char_type_node,
12169 : tmp,
12170 : build_int_cst (char_ptr_type, 0));
12171 0 : OMP_CLAUSE_SIZE (alloc) = TYPE_SIZE_UNIT (TREE_TYPE (tmp));
12172 :
12173 0 : OMP_CLAUSE_CHAIN (alloc)
12174 0 : = OMP_CLAUSE_CHAIN (*to_group->grp_start);
12175 0 : OMP_CLAUSE_CHAIN (*to_group->grp_start) = alloc;
12176 : }
12177 : }
12178 : }
12179 : break;
12180 :
12181 : case GOMP_MAP_ATTACH_DETACH:
12182 : case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12183 : {
12184 2583 : tree base_ptr, referenced_ptr_node = NULL_TREE;
12185 :
12186 2583 : while (TREE_CODE (decl) == ARRAY_REF)
12187 0 : decl = TREE_OPERAND (decl, 0);
12188 :
12189 2583 : if (TREE_CODE (decl) == INDIRECT_REF)
12190 13 : decl = TREE_OPERAND (decl, 0);
12191 :
12192 : /* Only component accesses. */
12193 2583 : if (DECL_P (decl))
12194 221 : continue;
12195 :
12196 : /* We want the pointer itself when checking if the base pointer is
12197 : mapped elsewhere in the same directive -- if we have a
12198 : reference to the pointer, don't use that. */
12199 :
12200 2362 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12201 2362 : && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12202 : {
12203 394 : referenced_ptr_node = OMP_CLAUSE_CHAIN (*grp->grp_start);
12204 394 : base_ptr = OMP_CLAUSE_DECL (referenced_ptr_node);
12205 : }
12206 : else
12207 : base_ptr = decl;
12208 :
12209 1944 : gomp_map_kind zlas_kind
12210 2362 : = (code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
12211 2362 : ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
12212 :
12213 2362 : if (TREE_CODE (TREE_TYPE (base_ptr)) == POINTER_TYPE)
12214 : {
12215 : /* If we map the base TO, and we're doing an attachment, we can
12216 : skip the TO mapping altogether and create an ALLOC mapping
12217 : instead, since the attachment will overwrite the device
12218 : pointer in that location immediately anyway. Otherwise,
12219 : change our mapping to
12220 : GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION in case the
12221 : attachment target has not been copied to the device already
12222 : by some earlier directive. */
12223 :
12224 1938 : bool base_mapped_to = false;
12225 :
12226 1938 : omp_mapping_group *base_group;
12227 :
12228 1938 : if (omp_directive_maps_explicitly (grpmap, base_ptr,
12229 : &base_group, false, true,
12230 : false))
12231 : {
12232 450 : if (referenced_ptr_node)
12233 : {
12234 129 : base_mapped_to = true;
12235 129 : if ((OMP_CLAUSE_MAP_KIND (base_group->grp_end)
12236 : == GOMP_MAP_ATTACH_DETACH)
12237 129 : && (OMP_CLAUSE_CHAIN (*base_group->grp_start)
12238 : == base_group->grp_end))
12239 : {
12240 258 : OMP_CLAUSE_CHAIN (*base_group->grp_start)
12241 129 : = OMP_CLAUSE_CHAIN (base_group->grp_end);
12242 129 : base_group->grp_end = *base_group->grp_start;
12243 129 : repair_chain = true;
12244 : }
12245 : }
12246 : else
12247 : {
12248 321 : base_group->deleted = true;
12249 321 : OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end) = 1;
12250 : }
12251 : }
12252 :
12253 : /* We're dealing with a reference to a pointer, and we are
12254 : attaching both the reference and the pointer. We know the
12255 : reference itself is on the target, because we are going to
12256 : create an ALLOC node for it in accumulate_sibling_list. The
12257 : pointer might be on the target already or it might not, but
12258 : if it isn't then it's not an error, so use
12259 : GOMP_MAP_ATTACH_ZLAS for it. */
12260 1938 : if (!base_mapped_to && referenced_ptr_node)
12261 56 : OMP_CLAUSE_SET_MAP_KIND (referenced_ptr_node, zlas_kind);
12262 :
12263 1938 : omp_mapping_group *struct_group;
12264 1938 : tree desc;
12265 1938 : if ((desc = OMP_CLAUSE_CHAIN (*grp->grp_start))
12266 1938 : && omp_map_clause_descriptor_p (desc)
12267 2983 : && omp_mapped_by_containing_struct (grpmap, decl,
12268 : &struct_group))
12269 : /* If we have a pointer set but we're mapping (or unmapping)
12270 : the whole of the containing struct, we can remove the
12271 : pointer set mapping. */
12272 15 : OMP_CLAUSE_CHAIN (*grp->grp_start) = OMP_CLAUSE_CHAIN (desc);
12273 : }
12274 424 : else if (TREE_CODE (TREE_TYPE (base_ptr)) == REFERENCE_TYPE
12275 424 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (base_ptr)))
12276 : == ARRAY_TYPE)
12277 562 : && OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION
12278 : (*grp->grp_start))
12279 92 : OMP_CLAUSE_SET_MAP_KIND (grp->grp_end, zlas_kind);
12280 : }
12281 : break;
12282 :
12283 : case GOMP_MAP_ATTACH:
12284 : /* Ignore standalone attach here. */
12285 : break;
12286 :
12287 10920 : default:
12288 10920 : {
12289 10920 : omp_mapping_group *struct_group;
12290 10920 : if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
12291 10920 : && *grp->grp_start == grp_end)
12292 : {
12293 94 : omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
12294 : struct_group, grp);
12295 : /* Remove the whole of this mapping -- redundant. */
12296 94 : grp->deleted = true;
12297 : }
12298 :
12299 : tree base = decl;
12300 12504 : while ((base = omp_get_base_pointer (base)))
12301 : {
12302 1584 : omp_mapping_group *base_group;
12303 :
12304 1584 : if (omp_directive_maps_explicitly (grpmap, base, &base_group,
12305 : true, true, false))
12306 : {
12307 342 : tree grp_first = *base_group->grp_start;
12308 342 : OMP_CLAUSE_SET_MAP_KIND (grp_first, GOMP_MAP_ALLOC);
12309 : }
12310 : }
12311 : }
12312 : }
12313 : }
12314 :
12315 7979 : if (repair_chain)
12316 : {
12317 : /* Group start pointers may have become detached from the
12318 : OMP_CLAUSE_CHAIN of previous groups if elements were removed from the
12319 : end of those groups. Fix that now. */
12320 : tree *new_next = NULL;
12321 716 : FOR_EACH_VEC_ELT (*groups, i, grp)
12322 : {
12323 587 : if (new_next)
12324 458 : grp->grp_start = new_next;
12325 :
12326 587 : new_next = &OMP_CLAUSE_CHAIN (grp->grp_end);
12327 : }
12328 : }
12329 7979 : }
12330 :
12331 : /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
12332 : clause dependencies we handle for now are struct element mappings and
12333 : whole-struct mappings on the same directive, and duplicate clause
12334 : detection. */
12335 :
12336 : void
12337 9461 : oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
12338 : hash_map<tree_operand_hash_no_se,
12339 : omp_mapping_group *> *grpmap)
12340 : {
12341 9461 : int i;
12342 9461 : omp_mapping_group *grp;
12343 9461 : hash_set<tree_operand_hash> *seen_components = NULL;
12344 9461 : hash_set<tree_operand_hash> *shown_error = NULL;
12345 :
12346 24641 : FOR_EACH_VEC_ELT (*groups, i, grp)
12347 : {
12348 15180 : tree grp_end = grp->grp_end;
12349 15180 : tree decl = OMP_CLAUSE_DECL (grp_end);
12350 :
12351 15180 : gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
12352 :
12353 15180 : if (DECL_P (grp_end))
12354 14663 : continue;
12355 :
12356 15180 : tree c = OMP_CLAUSE_DECL (*grp->grp_start);
12357 16805 : while (TREE_CODE (c) == ARRAY_REF)
12358 1625 : c = TREE_OPERAND (c, 0);
12359 15180 : if (TREE_CODE (c) != COMPONENT_REF)
12360 14663 : continue;
12361 517 : if (!seen_components)
12362 474 : seen_components = new hash_set<tree_operand_hash> ();
12363 517 : if (!shown_error)
12364 474 : shown_error = new hash_set<tree_operand_hash> ();
12365 517 : if (seen_components->contains (c)
12366 517 : && !shown_error->contains (c))
12367 : {
12368 10 : error_at (OMP_CLAUSE_LOCATION (grp_end),
12369 : "%qE appears more than once in map clauses",
12370 5 : OMP_CLAUSE_DECL (grp_end));
12371 5 : shown_error->add (c);
12372 : }
12373 : else
12374 512 : seen_components->add (c);
12375 :
12376 517 : omp_mapping_group *struct_group;
12377 517 : if (omp_mapped_by_containing_struct (grpmap, decl, &struct_group)
12378 517 : && *grp->grp_start == grp_end)
12379 : {
12380 78 : omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
12381 : struct_group, grp);
12382 : /* Remove the whole of this mapping -- redundant. */
12383 78 : grp->deleted = true;
12384 : }
12385 : }
12386 :
12387 9461 : if (seen_components)
12388 474 : delete seen_components;
12389 9461 : if (shown_error)
12390 474 : delete shown_error;
12391 9461 : }
12392 :
12393 : /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
12394 : is linked to the previous node pointed to by INSERT_AT. */
12395 :
12396 : static tree *
12397 1076 : omp_siblist_insert_node_after (tree newnode, tree *insert_at)
12398 : {
12399 1076 : OMP_CLAUSE_CHAIN (newnode) = *insert_at;
12400 1076 : *insert_at = newnode;
12401 1076 : return &OMP_CLAUSE_CHAIN (newnode);
12402 : }
12403 :
12404 : /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
12405 : pointed to by chain MOVE_AFTER instead. */
12406 :
12407 : static void
12408 1152 : omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
12409 : {
12410 1152 : gcc_assert (node == *old_pos);
12411 1152 : *old_pos = OMP_CLAUSE_CHAIN (node);
12412 1152 : OMP_CLAUSE_CHAIN (node) = *move_after;
12413 1152 : *move_after = node;
12414 1152 : }
12415 :
12416 : /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
12417 : LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
12418 : new nodes are prepended to the list before splicing into the new position.
12419 : Return the position we should continue scanning the list at, or NULL to
12420 : stay where we were. */
12421 :
12422 : static tree *
12423 254 : omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
12424 : tree *move_after)
12425 : {
12426 254 : if (first_ptr == move_after)
12427 : return NULL;
12428 :
12429 243 : tree tmp = *first_ptr;
12430 243 : *first_ptr = OMP_CLAUSE_CHAIN (last_node);
12431 243 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12432 243 : *move_after = tmp;
12433 :
12434 243 : return first_ptr;
12435 : }
12436 :
12437 : /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
12438 : [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
12439 : pointer MOVE_AFTER.
12440 :
12441 : The latter list was previously part of the OMP clause list, and the former
12442 : (prepended) part is comprised of new nodes.
12443 :
12444 : We start with a list of nodes starting with a struct mapping node. We
12445 : rearrange the list so that new nodes starting from FIRST_NEW and whose last
12446 : node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
12447 : the group of mapping nodes we are currently processing (from the chain
12448 : FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
12449 : we should continue processing from, or NULL to stay where we were.
12450 :
12451 : The transformation (in the case where MOVE_AFTER and FIRST_PTR are
12452 : different) is worked through below. Here we are processing LAST_NODE, and
12453 : FIRST_PTR points at the preceding mapping clause:
12454 :
12455 : #. mapping node chain
12456 : ---------------------------------------------------
12457 : A. struct_node [->B]
12458 : B. comp_1 [->C]
12459 : C. comp_2 [->D (move_after)]
12460 : D. map_to_3 [->E]
12461 : E. attach_3 [->F (first_ptr)]
12462 : F. map_to_4 [->G (continue_at)]
12463 : G. attach_4 (last_node) [->H]
12464 : H. ...
12465 :
12466 : *last_new_tail = *first_ptr;
12467 :
12468 : I. new_node (first_new) [->F (last_new_tail)]
12469 :
12470 : *first_ptr = OMP_CLAUSE_CHAIN (last_node)
12471 :
12472 : #. mapping node chain
12473 : ----------------------------------------------------
12474 : A. struct_node [->B]
12475 : B. comp_1 [->C]
12476 : C. comp_2 [->D (move_after)]
12477 : D. map_to_3 [->E]
12478 : E. attach_3 [->H (first_ptr)]
12479 : F. map_to_4 [->G (continue_at)]
12480 : G. attach_4 (last_node) [->H]
12481 : H. ...
12482 :
12483 : I. new_node (first_new) [->F (last_new_tail)]
12484 :
12485 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12486 :
12487 : #. mapping node chain
12488 : ---------------------------------------------------
12489 : A. struct_node [->B]
12490 : B. comp_1 [->C]
12491 : C. comp_2 [->D (move_after)]
12492 : D. map_to_3 [->E]
12493 : E. attach_3 [->H (continue_at)]
12494 : F. map_to_4 [->G]
12495 : G. attach_4 (last_node) [->D]
12496 : H. ...
12497 :
12498 : I. new_node (first_new) [->F (last_new_tail)]
12499 :
12500 : *move_after = first_new;
12501 :
12502 : #. mapping node chain
12503 : ---------------------------------------------------
12504 : A. struct_node [->B]
12505 : B. comp_1 [->C]
12506 : C. comp_2 [->I (move_after)]
12507 : D. map_to_3 [->E]
12508 : E. attach_3 [->H (continue_at)]
12509 : F. map_to_4 [->G]
12510 : G. attach_4 (last_node) [->D]
12511 : H. ...
12512 : I. new_node (first_new) [->F (last_new_tail)]
12513 :
12514 : or, in order:
12515 :
12516 : #. mapping node chain
12517 : ---------------------------------------------------
12518 : A. struct_node [->B]
12519 : B. comp_1 [->C]
12520 : C. comp_2 [->I (move_after)]
12521 : I. new_node (first_new) [->F (last_new_tail)]
12522 : F. map_to_4 [->G]
12523 : G. attach_4 (last_node) [->D]
12524 : D. map_to_3 [->E]
12525 : E. attach_3 [->H (continue_at)]
12526 : H. ...
12527 : */
12528 :
12529 : static tree *
12530 71 : omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
12531 : tree *first_ptr, tree last_node,
12532 : tree *move_after)
12533 : {
12534 71 : tree *continue_at = NULL;
12535 71 : *last_new_tail = *first_ptr;
12536 71 : if (first_ptr == move_after)
12537 12 : *move_after = first_new;
12538 : else
12539 : {
12540 59 : *first_ptr = OMP_CLAUSE_CHAIN (last_node);
12541 59 : continue_at = first_ptr;
12542 59 : OMP_CLAUSE_CHAIN (last_node) = *move_after;
12543 59 : *move_after = first_new;
12544 : }
12545 71 : return continue_at;
12546 : }
12547 :
12548 : static omp_addr_token *
12549 11011 : omp_first_chained_access_token (vec<omp_addr_token *> &addr_tokens)
12550 : {
12551 11011 : using namespace omp_addr_tokenizer;
12552 11011 : int idx = addr_tokens.length () - 1;
12553 11011 : gcc_assert (idx >= 0);
12554 11011 : if (addr_tokens[idx]->type != ACCESS_METHOD)
12555 : return addr_tokens[idx];
12556 11038 : while (idx > 0 && addr_tokens[idx - 1]->type == ACCESS_METHOD)
12557 : idx--;
12558 11011 : return addr_tokens[idx];
12559 : }
12560 :
12561 : /* Mapping struct members causes an additional set of nodes to be created,
12562 : starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
12563 : number of members being mapped, in order of ascending position (address or
12564 : bitwise).
12565 :
12566 : We scan through the list of mapping clauses, calling this function for each
12567 : struct member mapping we find, and build up the list of mappings after the
12568 : initial GOMP_MAP_STRUCT node. For pointer members, these will be
12569 : newly-created ALLOC nodes. For non-pointer members, the existing mapping is
12570 : moved into place in the sorted list.
12571 :
12572 : struct {
12573 : int *a;
12574 : int *b;
12575 : int c;
12576 : int *d;
12577 : };
12578 :
12579 : #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
12580 : struct.d[0:n])
12581 :
12582 : GOMP_MAP_STRUCT (4)
12583 : [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
12584 : GOMP_MAP_ALLOC (struct.a)
12585 : GOMP_MAP_ALLOC (struct.b)
12586 : GOMP_MAP_TO (struct.c)
12587 : GOMP_MAP_ALLOC (struct.d)
12588 : ...
12589 :
12590 : In the case where we are mapping references to pointers, or in Fortran if
12591 : we are mapping an array with a descriptor, additional nodes may be created
12592 : after the struct node list also.
12593 :
12594 : The return code is either a pointer to the next node to process (if the
12595 : list has been rearranged), else NULL to continue with the next node in the
12596 : original list. */
12597 :
12598 : static tree *
12599 4512 : omp_accumulate_sibling_list (enum omp_region_type region_type,
12600 : enum tree_code code,
12601 : hash_map<tree_operand_hash, tree>
12602 : *&struct_map_to_clause,
12603 : hash_map<tree_operand_hash_no_se,
12604 : omp_mapping_group *> *group_map,
12605 : tree *grp_start_p, tree grp_end,
12606 : vec<omp_addr_token *> &addr_tokens, tree **inner,
12607 : bool *fragile_p, bool reprocessing_struct,
12608 : tree **added_tail)
12609 : {
12610 4512 : using namespace omp_addr_tokenizer;
12611 4512 : poly_offset_int coffset;
12612 4512 : poly_int64 cbitpos;
12613 4512 : tree ocd = OMP_CLAUSE_DECL (grp_end);
12614 4512 : bool openmp = !(region_type & ORT_ACC);
12615 4512 : bool target = (region_type & ORT_TARGET) != 0;
12616 4512 : tree *continue_at = NULL;
12617 :
12618 4825 : while (TREE_CODE (ocd) == ARRAY_REF)
12619 313 : ocd = TREE_OPERAND (ocd, 0);
12620 :
12621 4512 : if (*fragile_p)
12622 : {
12623 156 : omp_mapping_group *to_group
12624 156 : = omp_get_nonfirstprivate_group (group_map, ocd, true);
12625 :
12626 156 : if (to_group)
12627 : return NULL;
12628 : }
12629 :
12630 4424 : omp_addr_token *last_token = omp_first_chained_access_token (addr_tokens);
12631 4424 : if (last_token->type == ACCESS_METHOD)
12632 : {
12633 4424 : switch (last_token->u.access_kind)
12634 : {
12635 735 : case ACCESS_REF:
12636 735 : case ACCESS_REF_TO_POINTER:
12637 735 : case ACCESS_REF_TO_POINTER_OFFSET:
12638 735 : case ACCESS_INDEXED_REF_TO_ARRAY:
12639 : /* We may see either a bare reference or a dereferenced
12640 : "convert_from_reference"-like one here. Handle either way. */
12641 735 : if (TREE_CODE (ocd) == INDIRECT_REF)
12642 64 : ocd = TREE_OPERAND (ocd, 0);
12643 735 : gcc_assert (TREE_CODE (TREE_TYPE (ocd)) == REFERENCE_TYPE);
12644 : break;
12645 :
12646 : default:
12647 : ;
12648 : }
12649 : }
12650 :
12651 4424 : bool variable_offset;
12652 4424 : tree base
12653 4424 : = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
12654 :
12655 4424 : int base_token;
12656 22631 : for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
12657 : {
12658 18207 : if (addr_tokens[base_token]->type == ARRAY_BASE
12659 18207 : || addr_tokens[base_token]->type == STRUCTURE_BASE)
12660 : break;
12661 : }
12662 :
12663 : /* The two expressions in the assertion below aren't quite the same: if we
12664 : have 'struct_base_decl access_indexed_array' for something like
12665 : "myvar[2].x" then base will be "myvar" and addr_tokens[base_token]->expr
12666 : will be "myvar[2]" -- the actual base of the structure.
12667 : The former interpretation leads to a strange situation where we get
12668 : struct(myvar) alloc(myvar[2].ptr1)
12669 : That is, the array of structures is kind of treated as one big structure
12670 : for the purposes of gathering sibling lists, etc. */
12671 : /* gcc_assert (base == addr_tokens[base_token]->expr); */
12672 :
12673 4424 : bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
12674 : == GOMP_MAP_ATTACH_DETACH)
12675 4424 : || (OMP_CLAUSE_MAP_KIND (grp_end)
12676 4424 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
12677 4424 : bool has_descriptor = false;
12678 4424 : if (OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
12679 : {
12680 2884 : tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
12681 2884 : if (grp_mid && omp_map_clause_descriptor_p (grp_mid))
12682 : has_descriptor = true;
12683 : }
12684 :
12685 4424 : if (!struct_map_to_clause || struct_map_to_clause->get (base) == NULL)
12686 : {
12687 2968 : enum gomp_map_kind str_kind = GOMP_MAP_STRUCT;
12688 :
12689 2968 : if (struct_map_to_clause == NULL)
12690 2412 : struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
12691 :
12692 2968 : if (variable_offset)
12693 274 : str_kind = GOMP_MAP_STRUCT_UNORD;
12694 :
12695 2968 : tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
12696 :
12697 2968 : OMP_CLAUSE_SET_MAP_KIND (l, str_kind);
12698 2968 : OMP_CLAUSE_DECL (l) = unshare_expr (base);
12699 2968 : OMP_CLAUSE_SIZE (l) = size_int (1);
12700 :
12701 2968 : struct_map_to_clause->put (base, l);
12702 :
12703 : /* On first iterating through the clause list, we insert the struct node
12704 : just before the component access node that triggers the initial
12705 : omp_accumulate_sibling_list call for a particular sibling list (and
12706 : it then forms the first entry in that list). When reprocessing
12707 : struct bases that are themselves component accesses, we insert the
12708 : struct node on an off-side list to avoid inserting the new
12709 : GOMP_MAP_STRUCT into the middle of the old one. */
12710 2968 : tree *insert_node_pos = reprocessing_struct ? *added_tail : grp_start_p;
12711 :
12712 2968 : if (has_descriptor)
12713 : {
12714 726 : tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
12715 726 : if (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
12716 187 : OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
12717 726 : tree sc = *insert_node_pos;
12718 726 : OMP_CLAUSE_CHAIN (l) = desc;
12719 726 : OMP_CLAUSE_CHAIN (*grp_start_p) = OMP_CLAUSE_CHAIN (desc);
12720 726 : OMP_CLAUSE_CHAIN (desc) = sc;
12721 726 : *insert_node_pos = l;
12722 : }
12723 2242 : else if (attach_detach)
12724 : {
12725 1396 : tree extra_node;
12726 1396 : tree alloc_node
12727 1396 : = build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
12728 : &extra_node);
12729 1396 : tree *tail;
12730 1396 : OMP_CLAUSE_CHAIN (l) = alloc_node;
12731 :
12732 1396 : if (extra_node)
12733 : {
12734 0 : OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
12735 0 : OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
12736 0 : tail = &OMP_CLAUSE_CHAIN (extra_node);
12737 : }
12738 : else
12739 : {
12740 1396 : OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
12741 1396 : tail = &OMP_CLAUSE_CHAIN (alloc_node);
12742 : }
12743 :
12744 : /* For OpenMP semantics, we don't want to implicitly allocate
12745 : space for the pointer here for non-compute regions (e.g. "enter
12746 : data"). A FRAGILE_P node is only being created so that
12747 : omp-low.cc is able to rewrite the struct properly.
12748 : For references (to pointers), we want to actually allocate the
12749 : space for the reference itself in the sorted list following the
12750 : struct node.
12751 : For pointers, we want to allocate space if we had an explicit
12752 : mapping of the attachment point, but not otherwise. */
12753 1396 : if (*fragile_p
12754 1396 : || (openmp
12755 : && !target
12756 : && attach_detach
12757 234 : && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
12758 88 : && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
12759 : {
12760 133 : if (!lang_GNU_Fortran ())
12761 : /* In Fortran, pointers are dereferenced automatically, but may
12762 : be unassociated. So we still want to allocate space for the
12763 : pointer (as the base for an attach operation that should be
12764 : present in the same directive's clause list also). */
12765 103 : OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
12766 133 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
12767 : }
12768 :
12769 1396 : *insert_node_pos = l;
12770 :
12771 1396 : if (reprocessing_struct)
12772 : {
12773 : /* When reprocessing a struct node group used as the base of a
12774 : subcomponent access, if we have a reference-to-pointer base,
12775 : we will see:
12776 : struct(**ptr) attach(*ptr)
12777 : whereas for a non-reprocess-struct group, we see, e.g.:
12778 : tofrom(**ptr) attach(*ptr) attach(ptr)
12779 : and we create the "alloc" for the second "attach", i.e.
12780 : for the reference itself. When reprocessing a struct group we
12781 : thus change the pointer attachment into a reference attachment
12782 : by stripping the indirection. (The attachment of the
12783 : referenced pointer must happen elsewhere, either on the same
12784 : directive, or otherwise.) */
12785 180 : tree adecl = OMP_CLAUSE_DECL (alloc_node);
12786 :
12787 180 : if ((TREE_CODE (adecl) == INDIRECT_REF
12788 148 : || (TREE_CODE (adecl) == MEM_REF
12789 0 : && integer_zerop (TREE_OPERAND (adecl, 1))))
12790 32 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (adecl, 0)))
12791 : == REFERENCE_TYPE)
12792 212 : && (TREE_CODE (TREE_TYPE (TREE_TYPE
12793 : (TREE_OPERAND (adecl, 0)))) == POINTER_TYPE))
12794 32 : OMP_CLAUSE_DECL (alloc_node) = TREE_OPERAND (adecl, 0);
12795 :
12796 180 : *added_tail = tail;
12797 : }
12798 : }
12799 : else
12800 : {
12801 846 : gcc_assert (*grp_start_p == grp_end);
12802 846 : if (reprocessing_struct)
12803 : {
12804 : /* If we don't have an attach/detach node, this is a
12805 : "target data" directive or similar, not an offload region.
12806 : Synthesize an "alloc" node using just the initiating
12807 : GOMP_MAP_STRUCT decl. */
12808 16 : gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
12809 32 : || code == OACC_EXIT_DATA)
12810 32 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
12811 32 : tree alloc_node
12812 32 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
12813 : OMP_CLAUSE_MAP);
12814 32 : OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
12815 32 : OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
12816 64 : OMP_CLAUSE_SIZE (alloc_node)
12817 32 : = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
12818 :
12819 32 : OMP_CLAUSE_CHAIN (alloc_node) = OMP_CLAUSE_CHAIN (l);
12820 32 : OMP_CLAUSE_CHAIN (l) = alloc_node;
12821 32 : *insert_node_pos = l;
12822 32 : *added_tail = &OMP_CLAUSE_CHAIN (alloc_node);
12823 : }
12824 : else
12825 814 : grp_start_p = omp_siblist_insert_node_after (l, insert_node_pos);
12826 : }
12827 :
12828 2968 : unsigned last_access = base_token + 1;
12829 :
12830 2968 : while (last_access + 1 < addr_tokens.length ()
12831 3302 : && addr_tokens[last_access + 1]->type == ACCESS_METHOD)
12832 : last_access++;
12833 :
12834 2968 : if ((region_type & ORT_TARGET)
12835 2968 : && addr_tokens[base_token + 1]->type == ACCESS_METHOD)
12836 : {
12837 1674 : bool base_ref = false;
12838 1674 : access_method_kinds access_kind
12839 1674 : = addr_tokens[last_access]->u.access_kind;
12840 :
12841 1674 : switch (access_kind)
12842 : {
12843 : case ACCESS_DIRECT:
12844 : case ACCESS_INDEXED_ARRAY:
12845 1026 : return NULL;
12846 :
12847 403 : case ACCESS_REF:
12848 403 : case ACCESS_REF_TO_POINTER:
12849 403 : case ACCESS_REF_TO_POINTER_OFFSET:
12850 403 : case ACCESS_INDEXED_REF_TO_ARRAY:
12851 403 : base_ref = true;
12852 403 : break;
12853 :
12854 866 : default:
12855 866 : ;
12856 : }
12857 866 : tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
12858 : OMP_CLAUSE_MAP);
12859 866 : enum gomp_map_kind mkind;
12860 866 : omp_mapping_group *decl_group;
12861 866 : tree use_base;
12862 866 : switch (access_kind)
12863 : {
12864 463 : case ACCESS_POINTER:
12865 463 : case ACCESS_POINTER_OFFSET:
12866 463 : use_base = addr_tokens[last_access]->expr;
12867 463 : break;
12868 198 : case ACCESS_REF_TO_POINTER:
12869 198 : case ACCESS_REF_TO_POINTER_OFFSET:
12870 198 : use_base
12871 198 : = build_fold_indirect_ref (addr_tokens[last_access]->expr);
12872 198 : break;
12873 205 : default:
12874 205 : use_base = addr_tokens[base_token]->expr;
12875 : }
12876 866 : bool mapped_to_p
12877 866 : = omp_directive_maps_explicitly (group_map, use_base, &decl_group,
12878 : true, false, true);
12879 866 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12880 866 : && DECL_P (addr_tokens[last_access]->expr)
12881 1372 : && !mapped_to_p)
12882 444 : mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
12883 : : GOMP_MAP_FIRSTPRIVATE_POINTER;
12884 : else
12885 : mkind = GOMP_MAP_ATTACH_DETACH;
12886 :
12887 866 : OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
12888 : /* If we have a reference to pointer base, we want to attach the
12889 : pointer here, not the reference. The reference attachment happens
12890 : elsewhere. */
12891 866 : bool ref_to_ptr
12892 866 : = (access_kind == ACCESS_REF_TO_POINTER
12893 866 : || access_kind == ACCESS_REF_TO_POINTER_OFFSET);
12894 866 : tree sdecl = addr_tokens[last_access]->expr;
12895 866 : tree sdecl_ptr = ref_to_ptr ? build_fold_indirect_ref (sdecl)
12896 : : sdecl;
12897 : /* For the FIRSTPRIVATE_REFERENCE after the struct node, we
12898 : want to use the reference itself for the decl, but we
12899 : still want to use the pointer to calculate the bias. */
12900 866 : OMP_CLAUSE_DECL (c2) = (mkind == GOMP_MAP_ATTACH_DETACH)
12901 866 : ? sdecl_ptr : sdecl;
12902 866 : sdecl = sdecl_ptr;
12903 866 : tree baddr = build_fold_addr_expr (base);
12904 866 : baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
12905 : ptrdiff_type_node, baddr);
12906 866 : tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
12907 : ptrdiff_type_node, sdecl);
12908 866 : OMP_CLAUSE_SIZE (c2)
12909 866 : = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
12910 : ptrdiff_type_node, baddr, decladdr);
12911 : /* Insert after struct node. */
12912 866 : OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
12913 866 : OMP_CLAUSE_CHAIN (l) = c2;
12914 :
12915 866 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12916 866 : && (addr_tokens[base_token]->u.structure_base_kind
12917 : == BASE_COMPONENT_EXPR)
12918 286 : && mkind == GOMP_MAP_ATTACH_DETACH
12919 1152 : && addr_tokens[last_access]->u.access_kind != ACCESS_REF)
12920 : {
12921 218 : *inner = insert_node_pos;
12922 218 : if (openmp)
12923 166 : *fragile_p = true;
12924 218 : return NULL;
12925 : }
12926 : }
12927 :
12928 1942 : if (addr_tokens[base_token]->type == STRUCTURE_BASE
12929 1942 : && (addr_tokens[base_token]->u.structure_base_kind
12930 : == BASE_COMPONENT_EXPR)
12931 2409 : && addr_tokens[last_access]->u.access_kind == ACCESS_REF)
12932 100 : *inner = insert_node_pos;
12933 :
12934 1942 : return NULL;
12935 : }
12936 1456 : else if (struct_map_to_clause)
12937 : {
12938 1456 : tree *osc = struct_map_to_clause->get (base);
12939 1456 : tree *sc = NULL, *scp = NULL;
12940 :
12941 1456 : unsigned HOST_WIDE_INT i, elems = tree_to_uhwi (OMP_CLAUSE_SIZE (*osc));
12942 1456 : sc = &OMP_CLAUSE_CHAIN (*osc);
12943 : /* The struct mapping might be immediately followed by a
12944 : FIRSTPRIVATE_POINTER, FIRSTPRIVATE_REFERENCE or an ATTACH_DETACH --
12945 : if it's an indirect access or a reference, or if the structure base
12946 : is not a decl. The FIRSTPRIVATE_* nodes are removed in omp-low.cc
12947 : after they have been processed there, and ATTACH_DETACH nodes are
12948 : recomputed and moved out of the GOMP_MAP_STRUCT construct once
12949 : sibling list building is complete. */
12950 1456 : if (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
12951 1411 : || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
12952 2822 : || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_ATTACH_DETACH)
12953 201 : sc = &OMP_CLAUSE_CHAIN (*sc);
12954 3945 : for (i = 0; i < elems; i++, sc = &OMP_CLAUSE_CHAIN (*sc))
12955 2899 : if (attach_detach && sc == grp_start_p)
12956 : break;
12957 2899 : else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
12958 182 : && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
12959 3081 : && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
12960 : break;
12961 : else
12962 : {
12963 2899 : tree sc_decl = OMP_CLAUSE_DECL (*sc);
12964 2899 : poly_offset_int offset;
12965 2899 : poly_int64 bitpos;
12966 :
12967 2899 : if (TREE_CODE (sc_decl) == ARRAY_REF)
12968 : {
12969 366 : while (TREE_CODE (sc_decl) == ARRAY_REF)
12970 184 : sc_decl = TREE_OPERAND (sc_decl, 0);
12971 182 : if (TREE_CODE (sc_decl) != COMPONENT_REF
12972 182 : || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
12973 : break;
12974 : }
12975 2717 : else if (INDIRECT_REF_P (sc_decl)
12976 0 : && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
12977 2717 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
12978 : == REFERENCE_TYPE))
12979 0 : sc_decl = TREE_OPERAND (sc_decl, 0);
12980 :
12981 2899 : bool variable_offset2;
12982 2899 : tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
12983 : &variable_offset2);
12984 2899 : if (!base2 || !operand_equal_p (base2, base, 0))
12985 : break;
12986 2899 : if (scp)
12987 469 : continue;
12988 2679 : if (variable_offset2)
12989 : {
12990 341 : OMP_CLAUSE_SET_MAP_KIND (*osc, GOMP_MAP_STRUCT_UNORD);
12991 :
12992 341 : if (has_descriptor)
12993 : {
12994 : /* Sort mapped components by offset. This is needed for
12995 : libgomp to handle Fortran derived-type allocatable
12996 : components transparently. */
12997 :
12998 323 : poly_int64 bitsize;
12999 323 : tree offset, coffset;
13000 323 : machine_mode mode;
13001 323 : int unsignedp, reversep, volatilep;
13002 323 : tree inner_ref1
13003 323 : = get_inner_reference (sc_decl, &bitsize, &bitpos,
13004 : &offset, &mode, &unsignedp,
13005 : &reversep, &volatilep);
13006 323 : tree osc_decl = ocd;
13007 323 : STRIP_NOPS (osc_decl);
13008 323 : tree inner_ref2
13009 323 : = get_inner_reference (osc_decl, &bitsize, &bitpos,
13010 : &coffset, &mode, &unsignedp,
13011 : &reversep, &volatilep);
13012 323 : gcc_assert (operand_equal_p (inner_ref1, inner_ref2, 0));
13013 323 : tree offset_diff
13014 323 : = fold_binary_to_constant (MINUS_EXPR, size_type_node,
13015 : coffset, offset);
13016 572 : if (offset_diff == NULL_TREE
13017 323 : || TREE_INT_CST_ELT (offset_diff, 0) > 0)
13018 249 : continue;
13019 : else
13020 : break;
13021 : }
13022 : }
13023 2338 : else if ((region_type & ORT_ACC) != 0)
13024 : {
13025 : /* For OpenACC, allow (ignore) duplicate struct accesses in
13026 : the middle of a mapping clause, e.g. "mystruct->foo" in:
13027 : copy(mystruct->foo->bar) copy(mystruct->foo->qux). */
13028 223 : if (reprocessing_struct
13029 8 : && known_eq (coffset, offset)
13030 223 : && known_eq (cbitpos, bitpos))
13031 21 : return NULL;
13032 : }
13033 2115 : else if (known_eq (coffset, offset)
13034 2115 : && known_eq (cbitpos, bitpos))
13035 : {
13036 : /* Having two struct members at the same offset doesn't work,
13037 : so make sure we don't. (We're allowed to ignore this.
13038 : Should we report the error?) */
13039 : /*error_at (OMP_CLAUSE_LOCATION (grp_end),
13040 : "duplicate struct member %qE in map clauses",
13041 : OMP_CLAUSE_DECL (grp_end));*/
13042 : return NULL;
13043 : }
13044 2335 : if (maybe_lt (coffset, offset)
13045 4063 : || (known_eq (coffset, offset)
13046 20 : && maybe_lt (cbitpos, bitpos)))
13047 : {
13048 607 : if (attach_detach)
13049 : scp = sc;
13050 : else
13051 : break;
13052 : }
13053 : }
13054 :
13055 1435 : OMP_CLAUSE_SIZE (*osc)
13056 1435 : = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
13057 :
13058 1435 : if (reprocessing_struct)
13059 : {
13060 : /* If we're reprocessing a struct node, we don't want to do most of
13061 : the list manipulation below. We only need to handle the (pointer
13062 : or reference) attach/detach case. */
13063 8 : tree extra_node, alloc_node;
13064 8 : if (has_descriptor)
13065 0 : gcc_unreachable ();
13066 8 : else if (attach_detach)
13067 8 : alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
13068 : grp_end, &extra_node);
13069 : else
13070 : {
13071 : /* If we don't have an attach/detach node, this is a
13072 : "target data" directive or similar, not an offload region.
13073 : Synthesize an "alloc" node using just the initiating
13074 : GOMP_MAP_STRUCT decl. */
13075 0 : gomp_map_kind k = (code == OMP_TARGET_EXIT_DATA
13076 0 : || code == OACC_EXIT_DATA)
13077 0 : ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
13078 0 : alloc_node
13079 0 : = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
13080 : OMP_CLAUSE_MAP);
13081 0 : OMP_CLAUSE_SET_MAP_KIND (alloc_node, k);
13082 0 : OMP_CLAUSE_DECL (alloc_node) = unshare_expr (last_token->expr);
13083 0 : OMP_CLAUSE_SIZE (alloc_node)
13084 0 : = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (alloc_node)));
13085 : }
13086 :
13087 8 : if (scp)
13088 0 : omp_siblist_insert_node_after (alloc_node, scp);
13089 : else
13090 : {
13091 8 : tree *new_end = omp_siblist_insert_node_after (alloc_node, sc);
13092 8 : if (sc == *added_tail)
13093 8 : *added_tail = new_end;
13094 : }
13095 :
13096 8 : return NULL;
13097 : }
13098 :
13099 1427 : if (has_descriptor)
13100 : {
13101 538 : tree desc = OMP_CLAUSE_CHAIN (*grp_start_p);
13102 538 : if (code == OMP_TARGET_EXIT_DATA
13103 538 : || code == OACC_EXIT_DATA)
13104 105 : OMP_CLAUSE_SET_MAP_KIND (desc, GOMP_MAP_RELEASE);
13105 1076 : omp_siblist_move_node_after (desc,
13106 538 : &OMP_CLAUSE_CHAIN (*grp_start_p),
13107 : scp ? scp : sc);
13108 : }
13109 889 : else if (attach_detach)
13110 : {
13111 325 : tree cl = NULL_TREE, extra_node;
13112 325 : tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
13113 : grp_end, &extra_node);
13114 325 : tree *tail_chain = NULL;
13115 :
13116 325 : if (*fragile_p
13117 325 : || (openmp
13118 : && !target
13119 : && attach_detach
13120 62 : && TREE_CODE (TREE_TYPE (ocd)) == POINTER_TYPE
13121 23 : && !OMP_CLAUSE_ATTACHMENT_MAPPING_ERASED (grp_end)))
13122 : {
13123 6 : if (!lang_GNU_Fortran ())
13124 6 : OMP_CLAUSE_SIZE (alloc_node) = size_zero_node;
13125 6 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (alloc_node) = 1;
13126 : }
13127 :
13128 : /* Here, we have:
13129 :
13130 : grp_end : the last (or only) node in this group.
13131 : grp_start_p : pointer to the first node in a pointer mapping group
13132 : up to and including GRP_END.
13133 : sc : pointer to the chain for the end of the struct component
13134 : list.
13135 : scp : pointer to the chain for the sorted position at which we
13136 : should insert in the middle of the struct component list
13137 : (else NULL to insert at end).
13138 : alloc_node : the "alloc" node for the structure (pointer-type)
13139 : component. We insert at SCP (if present), else SC
13140 : (the end of the struct component list).
13141 : extra_node : a newly-synthesized node for an additional indirect
13142 : pointer mapping or a Fortran pointer set, if needed.
13143 : cl : first node to prepend before grp_start_p.
13144 : tail_chain : pointer to chain of last prepended node.
13145 :
13146 : The general idea is we move the nodes for this struct mapping
13147 : together: the alloc node goes into the sorted list directly after
13148 : the struct mapping, and any extra nodes (together with the nodes
13149 : mapping arrays pointed to by struct components) get moved after
13150 : that list. When SCP is NULL, we insert the nodes at SC, i.e. at
13151 : the end of the struct component mapping list. It's important that
13152 : the alloc_node comes first in that case because it's part of the
13153 : sorted component mapping list (but subsequent nodes are not!). */
13154 :
13155 325 : if (scp)
13156 254 : omp_siblist_insert_node_after (alloc_node, scp);
13157 :
13158 : /* Make [cl,tail_chain] a list of the alloc node (if we haven't
13159 : already inserted it) and the extra_node (if it is present). The
13160 : list can be empty if we added alloc_node above and there is no
13161 : extra node. */
13162 254 : if (scp && extra_node)
13163 : {
13164 0 : cl = extra_node;
13165 0 : tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
13166 : }
13167 325 : else if (extra_node)
13168 : {
13169 0 : OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
13170 0 : cl = alloc_node;
13171 0 : tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
13172 : }
13173 325 : else if (!scp)
13174 : {
13175 71 : cl = alloc_node;
13176 71 : tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
13177 : }
13178 :
13179 325 : continue_at
13180 71 : = cl ? omp_siblist_move_concat_nodes_after (cl, tail_chain,
13181 : grp_start_p, grp_end,
13182 : sc)
13183 254 : : omp_siblist_move_nodes_after (grp_start_p, grp_end, sc);
13184 : }
13185 564 : else if (*sc != grp_end)
13186 : {
13187 524 : gcc_assert (*grp_start_p == grp_end);
13188 :
13189 : /* We are moving the current node back to a previous struct node:
13190 : the node that used to point to the current node will now point to
13191 : the next node. */
13192 524 : continue_at = grp_start_p;
13193 : /* In the non-pointer case, the mapping clause itself is moved into
13194 : the correct position in the struct component list, which in this
13195 : case is just SC. */
13196 524 : omp_siblist_move_node_after (*grp_start_p, grp_start_p, sc);
13197 : }
13198 : }
13199 : return continue_at;
13200 : }
13201 :
13202 : /* Scan through GROUPS, and create sorted structure sibling lists without
13203 : gimplifying. */
13204 :
13205 : static bool
13206 17440 : omp_build_struct_sibling_lists (enum tree_code code,
13207 : enum omp_region_type region_type,
13208 : vec<omp_mapping_group> *groups,
13209 : hash_map<tree_operand_hash_no_se,
13210 : omp_mapping_group *> **grpmap,
13211 : tree *list_p)
13212 : {
13213 17440 : using namespace omp_addr_tokenizer;
13214 17440 : unsigned i;
13215 17440 : omp_mapping_group *grp;
13216 17440 : hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
13217 17440 : bool success = true;
13218 17440 : tree *new_next = NULL;
13219 34880 : tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
13220 17440 : tree added_nodes = NULL_TREE;
13221 17440 : tree *added_tail = &added_nodes;
13222 17440 : auto_vec<omp_mapping_group> pre_hwm_groups;
13223 :
13224 65250 : FOR_EACH_VEC_ELT (*groups, i, grp)
13225 : {
13226 30370 : tree c = grp->grp_end;
13227 30370 : tree decl = OMP_CLAUSE_DECL (c);
13228 30370 : tree grp_end = grp->grp_end;
13229 30370 : auto_vec<omp_addr_token *> addr_tokens;
13230 30370 : tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
13231 :
13232 30370 : if (new_next && !grp->reprocess_struct)
13233 510 : grp->grp_start = new_next;
13234 :
13235 30370 : new_next = NULL;
13236 :
13237 30370 : tree *grp_start_p = grp->grp_start;
13238 :
13239 30370 : if (DECL_P (decl))
13240 20544 : continue;
13241 :
13242 : /* Skip groups we marked for deletion in
13243 : {omp,oacc}_resolve_clause_dependencies. */
13244 9826 : if (grp->deleted)
13245 462 : continue;
13246 :
13247 9364 : if (OMP_CLAUSE_CHAIN (*grp_start_p)
13248 9364 : && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
13249 : {
13250 : /* Don't process an array descriptor that isn't inside a derived type
13251 : as a struct (the GOMP_MAP_POINTER following will have the form
13252 : "var.data", but such mappings are handled specially). */
13253 5862 : tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
13254 5862 : if (omp_map_clause_descriptor_p (grpmid)
13255 9950 : && DECL_P (OMP_CLAUSE_DECL (grpmid)))
13256 2777 : continue;
13257 : }
13258 :
13259 : tree expr = decl;
13260 :
13261 7602 : while (TREE_CODE (expr) == ARRAY_REF)
13262 1015 : expr = TREE_OPERAND (expr, 0);
13263 :
13264 6587 : if (!omp_parse_expr (addr_tokens, expr))
13265 0 : continue;
13266 :
13267 6587 : omp_addr_token *last_token
13268 6587 : = omp_first_chained_access_token (addr_tokens);
13269 :
13270 : /* A mapping of a reference to a pointer member that doesn't specify an
13271 : array section, etc., like this:
13272 : *mystruct.ref_to_ptr
13273 : should not be processed by the struct sibling-list handling code --
13274 : it just transfers the referenced pointer.
13275 :
13276 : In contrast, the quite similar-looking construct:
13277 : *mystruct.ptr
13278 : which is equivalent to e.g.
13279 : mystruct.ptr[0]
13280 : *does* trigger sibling-list processing.
13281 :
13282 : An exception for the former case is for "fragile" groups where the
13283 : reference itself is not handled otherwise; this is subject to special
13284 : handling in omp_accumulate_sibling_list also. */
13285 :
13286 6587 : if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
13287 3366 : && last_token->type == ACCESS_METHOD
13288 3366 : && last_token->u.access_kind == ACCESS_REF
13289 6871 : && !grp->fragile)
13290 219 : continue;
13291 :
13292 6368 : tree d = decl;
13293 6368 : if (TREE_CODE (d) == ARRAY_REF)
13294 : {
13295 1974 : while (TREE_CODE (d) == ARRAY_REF)
13296 998 : d = TREE_OPERAND (d, 0);
13297 976 : if (TREE_CODE (d) == COMPONENT_REF
13298 976 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
13299 : decl = d;
13300 : }
13301 6368 : if (d == decl
13302 5703 : && INDIRECT_REF_P (decl)
13303 708 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
13304 105 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
13305 : == REFERENCE_TYPE)
13306 6465 : && (OMP_CLAUSE_MAP_KIND (c)
13307 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
13308 97 : decl = TREE_OPERAND (decl, 0);
13309 :
13310 6368 : STRIP_NOPS (decl);
13311 :
13312 6368 : if (TREE_CODE (decl) != COMPONENT_REF)
13313 1318 : continue;
13314 :
13315 : /* If we're mapping the whole struct in another node, skip adding this
13316 : node to a sibling list. */
13317 5050 : omp_mapping_group *wholestruct;
13318 5050 : if (omp_mapped_by_containing_struct (*grpmap, OMP_CLAUSE_DECL (c),
13319 : &wholestruct))
13320 : {
13321 177 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
13322 160 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c) = 0;
13323 177 : continue;
13324 : }
13325 :
13326 4873 : if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
13327 4873 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
13328 4770 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
13329 : && code != OACC_UPDATE
13330 9576 : && code != OMP_TARGET_UPDATE)
13331 : {
13332 4512 : if (error_operand_p (decl))
13333 : {
13334 0 : success = false;
13335 0 : goto error_out;
13336 : }
13337 :
13338 4512 : tree stype = TREE_TYPE (decl);
13339 4512 : if (TREE_CODE (stype) == REFERENCE_TYPE)
13340 768 : stype = TREE_TYPE (stype);
13341 4512 : if (TYPE_SIZE_UNIT (stype) == NULL
13342 4512 : || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
13343 : {
13344 0 : error_at (OMP_CLAUSE_LOCATION (c),
13345 : "mapping field %qE of variable length "
13346 0 : "structure", OMP_CLAUSE_DECL (c));
13347 0 : success = false;
13348 0 : goto error_out;
13349 : }
13350 :
13351 4512 : tree *inner = NULL;
13352 4512 : bool fragile_p = grp->fragile;
13353 :
13354 4512 : new_next
13355 9024 : = omp_accumulate_sibling_list (region_type, code,
13356 : struct_map_to_clause, *grpmap,
13357 : grp_start_p, grp_end, addr_tokens,
13358 : &inner, &fragile_p,
13359 4512 : grp->reprocess_struct, &added_tail);
13360 :
13361 4512 : if (inner)
13362 : {
13363 318 : omp_mapping_group newgrp;
13364 318 : newgrp.grp_start = inner;
13365 318 : if (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (*inner))
13366 : == GOMP_MAP_ATTACH_DETACH)
13367 286 : newgrp.grp_end = OMP_CLAUSE_CHAIN (*inner);
13368 : else
13369 32 : newgrp.grp_end = *inner;
13370 318 : newgrp.mark = UNVISITED;
13371 318 : newgrp.sibling = NULL;
13372 318 : newgrp.deleted = false;
13373 318 : newgrp.reprocess_struct = true;
13374 318 : newgrp.fragile = fragile_p;
13375 318 : newgrp.next = NULL;
13376 318 : groups->safe_push (newgrp);
13377 :
13378 : /* !!! Growing GROUPS might invalidate the pointers in the group
13379 : map. Rebuild it here. This is a bit inefficient, but
13380 : shouldn't happen very often. */
13381 636 : delete (*grpmap);
13382 318 : *grpmap
13383 318 : = omp_reindex_mapping_groups (list_p, groups, &pre_hwm_groups,
13384 : sentinel);
13385 : }
13386 : }
13387 30370 : }
13388 :
13389 : /* Delete groups marked for deletion above. At this point the order of the
13390 : groups may no longer correspond to the order of the underlying list,
13391 : which complicates this a little. First clear out OMP_CLAUSE_DECL for
13392 : deleted nodes... */
13393 :
13394 47810 : FOR_EACH_VEC_ELT (*groups, i, grp)
13395 30370 : if (grp->deleted)
13396 462 : for (tree d = *grp->grp_start;
13397 924 : d != OMP_CLAUSE_CHAIN (grp->grp_end);
13398 462 : d = OMP_CLAUSE_CHAIN (d))
13399 462 : OMP_CLAUSE_DECL (d) = NULL_TREE;
13400 :
13401 : /* ...then sweep through the list removing the now-empty nodes. */
13402 :
13403 : tail = list_p;
13404 93444 : while (*tail)
13405 : {
13406 76004 : if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
13407 76004 : && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
13408 462 : *tail = OMP_CLAUSE_CHAIN (*tail);
13409 : else
13410 75542 : tail = &OMP_CLAUSE_CHAIN (*tail);
13411 : }
13412 :
13413 : /* Tack on the struct nodes added during nested struct reprocessing. */
13414 17440 : if (added_nodes)
13415 : {
13416 192 : *tail = added_nodes;
13417 192 : tail = added_tail;
13418 : }
13419 :
13420 : /* Find each attach node whose bias needs to be adjusted and move it to the
13421 : group containing its pointee, right after the struct node, so that it can
13422 : be picked up by the adjustment code further down in this function. */
13423 17440 : bool attach_bias_needs_adjustment;
13424 17440 : attach_bias_needs_adjustment = false;
13425 65250 : FOR_EACH_VEC_ELT_REVERSE (*groups, i, grp)
13426 : {
13427 30370 : tree c = *grp->grp_start;
13428 29599 : if (c != NULL && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13429 29584 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
13430 26603 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
13431 3261 : && OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) == GOMP_MAP_TO_PSET
13432 527 : && OMP_CLAUSE_MAP_KIND (grp->grp_end) == GOMP_MAP_ATTACH_DETACH
13433 30897 : && OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (grp->grp_end))
13434 : {
13435 90 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (grp->grp_end) = 0;
13436 90 : attach_bias_needs_adjustment = true;
13437 90 : tree *cp;
13438 90 : for (cp = &OMP_CLAUSE_CHAIN (c); cp != NULL;
13439 206 : cp = &OMP_CLAUSE_CHAIN (*cp))
13440 296 : if (*cp == grp->grp_end)
13441 : {
13442 : c = *cp;
13443 : break;
13444 : }
13445 :
13446 90 : tree base = OMP_CLAUSE_DECL (c);
13447 90 : gcc_assert (TREE_CODE (base) == NOP_EXPR);
13448 90 : base = build_fold_indirect_ref (base);
13449 90 : tree *struct_node = struct_map_to_clause->get (base);
13450 90 : omp_siblist_move_node_after (c, cp, &OMP_CLAUSE_CHAIN (*struct_node));
13451 : }
13452 : }
13453 :
13454 : /* Now we have finished building the struct sibling lists, reprocess
13455 : newly-added "attach" nodes: we need the address of the first
13456 : mapped element of each struct sibling list for the bias of the attach
13457 : operation -- not necessarily the base address of the whole struct. */
13458 17440 : if (struct_map_to_clause)
13459 5380 : for (hash_map<tree_operand_hash, tree>::iterator iter
13460 2412 : = struct_map_to_clause->begin ();
13461 7792 : iter != struct_map_to_clause->end ();
13462 2968 : ++iter)
13463 : {
13464 2968 : tree struct_node = (*iter).second;
13465 2968 : gcc_assert (OMP_CLAUSE_CODE (struct_node) == OMP_CLAUSE_MAP);
13466 2968 : tree attach = OMP_CLAUSE_CHAIN (struct_node);
13467 :
13468 2968 : if (OMP_CLAUSE_CODE (attach) != OMP_CLAUSE_MAP
13469 2968 : || OMP_CLAUSE_MAP_KIND (attach) != GOMP_MAP_ATTACH_DETACH)
13470 2456 : continue;
13471 :
13472 512 : OMP_CLAUSE_SET_MAP_KIND (attach, GOMP_MAP_ATTACH);
13473 :
13474 : /* Sanity check: the standalone attach node will not work if we have
13475 : an "enter data" operation (because for those, variables need to be
13476 : mapped separately and attach nodes must be grouped together with the
13477 : base they attach to). We should only have created the
13478 : ATTACH_DETACH node either after GOMP_MAP_STRUCT for a target region
13479 : or for an intermediate descriptor that needs adjustment -- so this
13480 : should never be true. */
13481 512 : gcc_assert ((region_type & ORT_TARGET) != 0
13482 : || attach_bias_needs_adjustment);
13483 :
13484 : /* This is the first sorted node in the struct sibling list. Use it
13485 : to recalculate the correct bias to use.
13486 : (&first_node - attach_decl).
13487 : For GOMP_MAP_STRUCT_UNORD, we need e.g. the
13488 : min(min(min(first,second),third),fourth) element, because the
13489 : elements aren't in any particular order. */
13490 512 : tree lowest_addr;
13491 512 : if (OMP_CLAUSE_MAP_KIND (struct_node) == GOMP_MAP_STRUCT_UNORD)
13492 : {
13493 94 : tree first_node = OMP_CLAUSE_CHAIN (attach);
13494 94 : unsigned HOST_WIDE_INT num_mappings
13495 94 : = tree_to_uhwi (OMP_CLAUSE_SIZE (struct_node));
13496 94 : lowest_addr = OMP_CLAUSE_DECL (first_node);
13497 94 : lowest_addr = build_fold_addr_expr (lowest_addr);
13498 94 : lowest_addr = fold_convert (pointer_sized_int_node, lowest_addr);
13499 94 : tree next_node = OMP_CLAUSE_CHAIN (first_node);
13500 185 : while (num_mappings > 1)
13501 : {
13502 91 : tree tmp = OMP_CLAUSE_DECL (next_node);
13503 91 : tmp = build_fold_addr_expr (tmp);
13504 91 : tmp = fold_convert (pointer_sized_int_node, tmp);
13505 91 : lowest_addr = fold_build2 (MIN_EXPR, pointer_sized_int_node,
13506 : lowest_addr, tmp);
13507 91 : next_node = OMP_CLAUSE_CHAIN (next_node);
13508 91 : num_mappings--;
13509 : }
13510 94 : lowest_addr = fold_convert (ptrdiff_type_node, lowest_addr);
13511 : }
13512 : else
13513 : {
13514 418 : tree first_node = OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (attach));
13515 418 : first_node = build_fold_addr_expr (first_node);
13516 418 : lowest_addr = fold_convert (ptrdiff_type_node, first_node);
13517 : }
13518 512 : tree attach_decl = OMP_CLAUSE_DECL (attach);
13519 512 : attach_decl = fold_convert (ptrdiff_type_node, attach_decl);
13520 512 : OMP_CLAUSE_SIZE (attach)
13521 512 : = fold_build2 (MINUS_EXPR, ptrdiff_type_node, lowest_addr,
13522 : attach_decl);
13523 :
13524 : /* Remove GOMP_MAP_ATTACH node from after struct node. */
13525 512 : OMP_CLAUSE_CHAIN (struct_node) = OMP_CLAUSE_CHAIN (attach);
13526 : /* ...and re-insert it at the end of our clause list. */
13527 512 : *tail = attach;
13528 512 : OMP_CLAUSE_CHAIN (attach) = NULL_TREE;
13529 512 : tail = &OMP_CLAUSE_CHAIN (attach);
13530 : }
13531 :
13532 15028 : error_out:
13533 17440 : if (struct_map_to_clause)
13534 2412 : delete struct_map_to_clause;
13535 :
13536 17440 : return success;
13537 17440 : }
13538 :
13539 : struct instantiate_mapper_info
13540 : {
13541 : tree *mapper_clauses_p;
13542 : struct gimplify_omp_ctx *omp_ctx;
13543 : gimple_seq *pre_p;
13544 : };
13545 :
13546 : /* Helper function for omp_instantiate_mapper. */
13547 :
13548 : static tree
13549 1272 : remap_mapper_decl_1 (tree *tp, int *walk_subtrees, void *data)
13550 : {
13551 1272 : copy_body_data *id = (copy_body_data *) data;
13552 :
13553 1272 : if (DECL_P (*tp))
13554 : {
13555 564 : tree replacement = remap_decl (*tp, id);
13556 564 : if (*tp != replacement)
13557 : {
13558 289 : *tp = unshare_expr (replacement);
13559 289 : *walk_subtrees = 0;
13560 : }
13561 : }
13562 :
13563 1272 : return NULL_TREE;
13564 : }
13565 :
13566 : /* A copy_decl implementation (for use with tree-inline.cc functions) that
13567 : only transform decls or SSA names that are part of a map we already
13568 : prepared. */
13569 :
13570 : static tree
13571 94 : omp_mapper_copy_decl (tree var, copy_body_data *cb)
13572 : {
13573 94 : tree *repl = cb->decl_map->get (var);
13574 :
13575 94 : if (repl)
13576 0 : return *repl;
13577 :
13578 94 : return var;
13579 : }
13580 :
13581 : static tree *
13582 84 : omp_instantiate_mapper (gimple_seq *pre_p,
13583 : hash_map<omp_name_type<tree>, tree> *implicit_mappers,
13584 : tree mapperfn, tree expr, enum gomp_map_kind outer_kind,
13585 : tree *mapper_clauses_p)
13586 : {
13587 84 : tree mapper_name = NULL_TREE;
13588 84 : tree mapper = lang_hooks.decls.omp_extract_mapper_directive (mapperfn);
13589 84 : gcc_assert (TREE_CODE (mapper) == OMP_DECLARE_MAPPER);
13590 :
13591 84 : tree clause = OMP_DECLARE_MAPPER_CLAUSES (mapper);
13592 84 : tree dummy_var = OMP_DECLARE_MAPPER_DECL (mapper);
13593 :
13594 : /* The "extraction map" is used to map the mapper variable in the "declare
13595 : mapper" directive, and also any temporary variables that have been created
13596 : as part of expanding the mapper function's body (which are expanded as a
13597 : "bind" expression in the pre_p sequence). */
13598 84 : hash_map<tree, tree> extraction_map;
13599 :
13600 84 : extraction_map.put (dummy_var, expr);
13601 84 : extraction_map.put (expr, expr);
13602 :
13603 : /* This copy_body_data is only used to remap the decls in the
13604 : OMP_DECLARE_MAPPER tree node expansion itself. All relevant decls should
13605 : already be in the current function. */
13606 84 : copy_body_data id;
13607 84 : memset (&id, 0, sizeof (id));
13608 84 : id.src_fn = current_function_decl;
13609 84 : id.dst_fn = current_function_decl;
13610 84 : id.src_cfun = cfun;
13611 84 : id.decl_map = &extraction_map;
13612 84 : id.copy_decl = omp_mapper_copy_decl;
13613 84 : id.transform_call_graph_edges = CB_CGE_DUPLICATE; // ???
13614 84 : id.transform_new_cfg = true; // ???
13615 :
13616 246 : for (; clause; clause = OMP_CLAUSE_CHAIN (clause))
13617 : {
13618 162 : enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (clause);
13619 162 : tree *nested_mapper_p = NULL;
13620 :
13621 162 : if (map_kind == GOMP_MAP_PUSH_MAPPER_NAME)
13622 : {
13623 0 : mapper_name = OMP_CLAUSE_DECL (clause);
13624 27 : continue;
13625 : }
13626 162 : else if (map_kind == GOMP_MAP_POP_MAPPER_NAME)
13627 : {
13628 0 : mapper_name = NULL_TREE;
13629 0 : continue;
13630 : }
13631 :
13632 162 : if (OMP_CLAUSE_HAS_ITERATORS (clause))
13633 : {
13634 4 : sorry_at (OMP_CLAUSE_LOCATION (clause),
13635 : "user-defined mapper that uses a %<map%> clause "
13636 : "with %<iterator%>");
13637 4 : continue;
13638 : }
13639 :
13640 158 : tree decl = OMP_CLAUSE_DECL (clause);
13641 158 : tree unshared, type;
13642 158 : bool nonunit_array_with_mapper = false;
13643 :
13644 158 : if (TREE_CODE (decl) == OMP_ARRAY_SECTION)
13645 : {
13646 62 : location_t loc = OMP_CLAUSE_LOCATION (clause);
13647 62 : tree tmp = lang_hooks.decls.omp_map_array_section (loc, decl);
13648 62 : if (tmp == decl)
13649 : {
13650 48 : unshared = unshare_expr (clause);
13651 48 : nonunit_array_with_mapper = true;
13652 48 : type = TREE_TYPE (TREE_TYPE (decl));
13653 : }
13654 : else
13655 : {
13656 14 : unshared = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
13657 14 : OMP_CLAUSE_CODE (clause));
13658 14 : OMP_CLAUSE_DECL (unshared) = tmp;
13659 14 : OMP_CLAUSE_SIZE (unshared)
13660 28 : = DECL_P (tmp) ? DECL_SIZE_UNIT (tmp)
13661 14 : : TYPE_SIZE_UNIT (TREE_TYPE (tmp));
13662 14 : type = TREE_TYPE (tmp);
13663 : }
13664 : }
13665 : else
13666 : {
13667 96 : unshared = unshare_expr (clause);
13668 96 : type = TREE_TYPE (decl);
13669 : }
13670 :
13671 158 : walk_tree (&unshared, remap_mapper_decl_1, &id, NULL);
13672 :
13673 158 : if (OMP_CLAUSE_MAP_KIND (unshared) == GOMP_MAP_UNSET)
13674 28 : OMP_CLAUSE_SET_MAP_KIND (unshared, outer_kind);
13675 :
13676 158 : decl = OMP_CLAUSE_DECL (unshared);
13677 158 : type = TYPE_MAIN_VARIANT (type);
13678 :
13679 158 : nested_mapper_p = implicit_mappers->get ({ mapper_name, type });
13680 :
13681 158 : if (nested_mapper_p && *nested_mapper_p != mapperfn)
13682 : {
13683 23 : if (nonunit_array_with_mapper)
13684 : {
13685 8 : sorry ("user-defined mapper with non-unit length array section");
13686 8 : continue;
13687 : }
13688 :
13689 15 : if (map_kind == GOMP_MAP_UNSET)
13690 0 : map_kind = outer_kind;
13691 :
13692 15 : mapper_clauses_p
13693 15 : = omp_instantiate_mapper (pre_p, implicit_mappers,
13694 : *nested_mapper_p, decl, map_kind,
13695 : mapper_clauses_p);
13696 15 : continue;
13697 : }
13698 :
13699 135 : *mapper_clauses_p = unshared;
13700 135 : mapper_clauses_p = &OMP_CLAUSE_CHAIN (unshared);
13701 : }
13702 :
13703 84 : return mapper_clauses_p;
13704 84 : }
13705 :
13706 : static int
13707 132150 : omp_instantiate_implicit_mappers (splay_tree_node n, void *data)
13708 : {
13709 132150 : tree decl = (tree) n->key;
13710 132150 : instantiate_mapper_info *im_info = (instantiate_mapper_info *) data;
13711 132150 : gimplify_omp_ctx *ctx = im_info->omp_ctx;
13712 132150 : tree *mapper_p = NULL;
13713 132150 : tree type = TREE_TYPE (decl);
13714 132150 : bool ref_p = false;
13715 132150 : unsigned flags = n->value;
13716 :
13717 132150 : if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
13718 : return 0;
13719 23221 : if ((flags & GOVD_SEEN) == 0)
13720 : return 0;
13721 : /* If we already have clauses pertaining to a struct variable, then we don't
13722 : want to implicitly invoke a user-defined mapper. */
13723 19727 : if ((flags & GOVD_EXPLICIT) != 0 && AGGREGATE_TYPE_P (TREE_TYPE (decl)))
13724 : return 0;
13725 :
13726 19727 : if (TREE_CODE (type) == REFERENCE_TYPE)
13727 : {
13728 899 : ref_p = true;
13729 899 : type = TREE_TYPE (type);
13730 : }
13731 :
13732 19727 : type = TYPE_MAIN_VARIANT (type);
13733 :
13734 19727 : if (DECL_P (decl) && type && AGGREGATE_TYPE_P (type))
13735 : {
13736 5272 : gcc_assert (ctx);
13737 5272 : mapper_p = ctx->implicit_mappers->get ({ NULL_TREE, type });
13738 : }
13739 :
13740 5272 : if (mapper_p)
13741 : {
13742 : /* If we have a reference, map the pointed-to object rather than the
13743 : reference itself. */
13744 69 : if (ref_p)
13745 2 : decl = build_fold_indirect_ref (decl);
13746 :
13747 69 : im_info->mapper_clauses_p
13748 69 : = omp_instantiate_mapper (im_info->pre_p, ctx->implicit_mappers,
13749 : *mapper_p, decl, GOMP_MAP_TOFROM,
13750 : im_info->mapper_clauses_p);
13751 : /* Make sure we don't map the same variable implicitly in
13752 : gimplify_adjust_omp_clauses_1 also. */
13753 69 : n->value |= GOVD_EXPLICIT;
13754 : }
13755 :
13756 : return 0;
13757 : }
13758 :
13759 : /* Scan the OMP clauses in *LIST_P, installing mappings into a new
13760 : and previous omp contexts. */
13761 :
13762 : static void
13763 129929 : gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
13764 : enum omp_region_type region_type,
13765 : enum tree_code code,
13766 : gimple_seq *loops_seq_p = NULL)
13767 : {
13768 129929 : using namespace omp_addr_tokenizer;
13769 129929 : struct gimplify_omp_ctx *ctx, *outer_ctx;
13770 129929 : tree c;
13771 129929 : tree *orig_list_p = list_p;
13772 129929 : int handled_depend_iterators = -1;
13773 129929 : int nowait = -1;
13774 :
13775 129929 : ctx = new_omp_context (region_type);
13776 129929 : ctx->code = code;
13777 129929 : outer_ctx = ctx->outer_context;
13778 129929 : if (code == OMP_TARGET)
13779 : {
13780 13137 : if (!lang_GNU_Fortran ())
13781 11037 : ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
13782 13137 : ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
13783 26274 : ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
13784 13137 : ? GOVD_MAP : GOVD_FIRSTPRIVATE);
13785 : }
13786 129929 : if (!lang_GNU_Fortran ())
13787 99092 : switch (code)
13788 : {
13789 18376 : case OMP_TARGET:
13790 18376 : case OMP_TARGET_DATA:
13791 18376 : case OMP_TARGET_ENTER_DATA:
13792 18376 : case OMP_TARGET_EXIT_DATA:
13793 18376 : case OACC_DECLARE:
13794 18376 : case OACC_HOST_DATA:
13795 18376 : case OACC_PARALLEL:
13796 18376 : case OACC_KERNELS:
13797 18376 : ctx->target_firstprivatize_array_bases = true;
13798 : default:
13799 : break;
13800 : }
13801 :
13802 129929 : vec<omp_mapping_group> *groups = NULL;
13803 129929 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
13804 129929 : unsigned grpnum = 0;
13805 129929 : tree *grp_start_p = NULL, grp_end = NULL_TREE;
13806 :
13807 129929 : if (code == OMP_TARGET
13808 129929 : || code == OMP_TARGET_DATA
13809 129929 : || code == OMP_TARGET_ENTER_DATA
13810 : || code == OMP_TARGET_EXIT_DATA
13811 : || code == OACC_DATA
13812 : || code == OACC_KERNELS
13813 : || code == OACC_PARALLEL
13814 : || code == OACC_SERIAL
13815 : || code == OACC_ENTER_DATA
13816 : || code == OACC_EXIT_DATA
13817 : || code == OACC_UPDATE
13818 : || code == OACC_DECLARE)
13819 : {
13820 33299 : groups = omp_gather_mapping_groups (list_p);
13821 :
13822 33299 : if (groups)
13823 17649 : grpmap = omp_index_mapping_groups (groups);
13824 : }
13825 :
13826 334007 : while ((c = *list_p) != NULL)
13827 : {
13828 204078 : bool remove = false;
13829 204078 : bool notice_outer = true;
13830 204078 : bool map_descriptor;
13831 204078 : const char *check_non_private = NULL;
13832 204078 : unsigned int flags;
13833 204078 : tree decl;
13834 204078 : auto_vec<omp_addr_token *, 10> addr_tokens;
13835 204078 : tree op = NULL_TREE;
13836 204078 : location_t loc = OMP_CLAUSE_LOCATION (c);
13837 :
13838 241012 : if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
13839 : {
13840 : grp_start_p = NULL;
13841 : grp_end = NULL_TREE;
13842 : }
13843 :
13844 204078 : if (code == OMP_TARGET
13845 : || code == OMP_TARGET_DATA
13846 : || code == OMP_TARGET_ENTER_DATA
13847 172457 : || code == OMP_TARGET_EXIT_DATA)
13848 : /* Do some target-specific type checks for map operands. */
13849 33857 : switch (OMP_CLAUSE_CODE (c))
13850 : {
13851 24530 : case OMP_CLAUSE_MAP:
13852 24530 : op = OMP_CLAUSE_OPERAND (c, 0);
13853 24530 : verify_type_context (loc, TCTX_OMP_MAP, TREE_TYPE (op));
13854 24530 : break;
13855 180 : case OMP_CLAUSE_PRIVATE:
13856 180 : op = OMP_CLAUSE_OPERAND (c, 0);
13857 180 : verify_type_context (loc, TCTX_OMP_PRIVATE, TREE_TYPE (op));
13858 180 : break;
13859 1296 : case OMP_CLAUSE_FIRSTPRIVATE:
13860 1296 : op = OMP_CLAUSE_OPERAND (c, 0);
13861 1296 : verify_type_context (loc, TCTX_OMP_FIRSTPRIVATE, TREE_TYPE (op));
13862 1296 : break;
13863 2922 : case OMP_CLAUSE_IS_DEVICE_PTR:
13864 2922 : case OMP_CLAUSE_USE_DEVICE_ADDR:
13865 2922 : case OMP_CLAUSE_USE_DEVICE_PTR:
13866 2922 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
13867 2922 : op = OMP_CLAUSE_OPERAND (c, 0);
13868 2922 : verify_type_context (loc, TCTX_OMP_DEVICE_ADDR, TREE_TYPE (op));
13869 2922 : break;
13870 : default:
13871 : break;
13872 : }
13873 :
13874 204078 : switch (OMP_CLAUSE_CODE (c))
13875 : {
13876 12227 : case OMP_CLAUSE_PRIVATE:
13877 12227 : flags = GOVD_PRIVATE | GOVD_EXPLICIT;
13878 12227 : if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
13879 : {
13880 166 : flags |= GOVD_PRIVATE_OUTER_REF;
13881 166 : OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
13882 : }
13883 : else
13884 : notice_outer = false;
13885 12227 : goto do_add;
13886 5536 : case OMP_CLAUSE_SHARED:
13887 5536 : flags = GOVD_SHARED | GOVD_EXPLICIT;
13888 5536 : goto do_add;
13889 7927 : case OMP_CLAUSE_FIRSTPRIVATE:
13890 7927 : flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
13891 7927 : check_non_private = "firstprivate";
13892 7927 : if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13893 : {
13894 380 : gcc_assert (code == OMP_TARGET);
13895 : flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
13896 : }
13897 7927 : goto do_add;
13898 7332 : case OMP_CLAUSE_LASTPRIVATE:
13899 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13900 496 : switch (code)
13901 : {
13902 25 : case OMP_DISTRIBUTE:
13903 25 : error_at (OMP_CLAUSE_LOCATION (c),
13904 : "conditional %<lastprivate%> clause on "
13905 : "%qs construct", "distribute");
13906 25 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13907 25 : break;
13908 13 : case OMP_TASKLOOP:
13909 13 : error_at (OMP_CLAUSE_LOCATION (c),
13910 : "conditional %<lastprivate%> clause on "
13911 : "%qs construct", "taskloop");
13912 13 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13913 13 : break;
13914 : default:
13915 : break;
13916 : }
13917 7332 : flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
13918 7332 : if (code != OMP_LOOP)
13919 6981 : check_non_private = "lastprivate";
13920 7332 : decl = OMP_CLAUSE_DECL (c);
13921 7332 : if (error_operand_p (decl))
13922 0 : goto do_add;
13923 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
13924 7332 : && !lang_hooks.decls.omp_scalar_p (decl, true))
13925 : {
13926 5 : error_at (OMP_CLAUSE_LOCATION (c),
13927 : "non-scalar variable %qD in conditional "
13928 : "%<lastprivate%> clause", decl);
13929 5 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
13930 : }
13931 7332 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13932 453 : flags |= GOVD_LASTPRIVATE_CONDITIONAL;
13933 7332 : omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
13934 : false);
13935 7332 : goto do_add;
13936 15218 : case OMP_CLAUSE_REDUCTION:
13937 15218 : if (OMP_CLAUSE_REDUCTION_TASK (c))
13938 : {
13939 595 : if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
13940 : {
13941 408 : if (nowait == -1)
13942 293 : nowait = omp_find_clause (*list_p,
13943 293 : OMP_CLAUSE_NOWAIT) != NULL_TREE;
13944 408 : if (nowait
13945 15 : && (outer_ctx == NULL
13946 0 : || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
13947 : {
13948 15 : error_at (OMP_CLAUSE_LOCATION (c),
13949 : "%<task%> reduction modifier on a construct "
13950 : "with a %<nowait%> clause");
13951 15 : OMP_CLAUSE_REDUCTION_TASK (c) = 0;
13952 : }
13953 : }
13954 187 : else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
13955 : {
13956 40 : error_at (OMP_CLAUSE_LOCATION (c),
13957 : "invalid %<task%> reduction modifier on construct "
13958 : "other than %<parallel%>, %qs, %<sections%> or "
13959 20 : "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
13960 20 : OMP_CLAUSE_REDUCTION_TASK (c) = 0;
13961 : }
13962 : }
13963 15218 : if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13964 831 : switch (code)
13965 : {
13966 4 : case OMP_SECTIONS:
13967 4 : error_at (OMP_CLAUSE_LOCATION (c),
13968 : "%<inscan%> %<reduction%> clause on "
13969 : "%qs construct", "sections");
13970 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13971 4 : break;
13972 4 : case OMP_PARALLEL:
13973 4 : error_at (OMP_CLAUSE_LOCATION (c),
13974 : "%<inscan%> %<reduction%> clause on "
13975 : "%qs construct", "parallel");
13976 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13977 4 : break;
13978 4 : case OMP_TEAMS:
13979 4 : error_at (OMP_CLAUSE_LOCATION (c),
13980 : "%<inscan%> %<reduction%> clause on "
13981 : "%qs construct", "teams");
13982 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13983 4 : break;
13984 4 : case OMP_TASKLOOP:
13985 4 : error_at (OMP_CLAUSE_LOCATION (c),
13986 : "%<inscan%> %<reduction%> clause on "
13987 : "%qs construct", "taskloop");
13988 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13989 4 : break;
13990 4 : case OMP_SCOPE:
13991 4 : error_at (OMP_CLAUSE_LOCATION (c),
13992 : "%<inscan%> %<reduction%> clause on "
13993 : "%qs construct", "scope");
13994 4 : OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
13995 4 : break;
13996 : default:
13997 : break;
13998 : }
13999 : /* FALLTHRU */
14000 17822 : case OMP_CLAUSE_IN_REDUCTION:
14001 17822 : case OMP_CLAUSE_TASK_REDUCTION:
14002 17822 : flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
14003 : /* OpenACC permits reductions on private variables. */
14004 17822 : if (!(region_type & ORT_ACC)
14005 : /* taskgroup is actually not a worksharing region. */
14006 12303 : && code != OMP_TASKGROUP)
14007 11773 : check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
14008 17822 : decl = OMP_CLAUSE_DECL (c);
14009 17822 : if (TREE_CODE (decl) == MEM_REF)
14010 : {
14011 2539 : tree type = TREE_TYPE (decl);
14012 2539 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
14013 2539 : gimplify_ctxp->into_ssa = false;
14014 2539 : if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
14015 : NULL, is_gimple_val, fb_rvalue, false)
14016 : == GS_ERROR)
14017 : {
14018 0 : gimplify_ctxp->into_ssa = saved_into_ssa;
14019 0 : remove = true;
14020 0 : break;
14021 : }
14022 2539 : gimplify_ctxp->into_ssa = saved_into_ssa;
14023 2539 : tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14024 2539 : if (DECL_P (v))
14025 : {
14026 571 : omp_firstprivatize_variable (ctx, v);
14027 571 : omp_notice_variable (ctx, v, true);
14028 : }
14029 2539 : decl = TREE_OPERAND (decl, 0);
14030 2539 : if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
14031 : {
14032 477 : gimplify_ctxp->into_ssa = false;
14033 477 : if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
14034 : NULL, is_gimple_val, fb_rvalue, false)
14035 : == GS_ERROR)
14036 : {
14037 0 : gimplify_ctxp->into_ssa = saved_into_ssa;
14038 0 : remove = true;
14039 0 : break;
14040 : }
14041 477 : gimplify_ctxp->into_ssa = saved_into_ssa;
14042 477 : v = TREE_OPERAND (decl, 1);
14043 477 : if (DECL_P (v))
14044 : {
14045 477 : omp_firstprivatize_variable (ctx, v);
14046 477 : omp_notice_variable (ctx, v, true);
14047 : }
14048 477 : decl = TREE_OPERAND (decl, 0);
14049 : }
14050 2539 : if (TREE_CODE (decl) == ADDR_EXPR
14051 1163 : || TREE_CODE (decl) == INDIRECT_REF)
14052 1474 : decl = TREE_OPERAND (decl, 0);
14053 : }
14054 17822 : goto do_add_decl;
14055 2668 : case OMP_CLAUSE_LINEAR:
14056 2668 : if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
14057 : is_gimple_val, fb_rvalue) == GS_ERROR)
14058 : {
14059 : remove = true;
14060 : break;
14061 : }
14062 : else
14063 : {
14064 2668 : if (code == OMP_SIMD
14065 2668 : && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14066 : {
14067 1154 : struct gimplify_omp_ctx *octx = outer_ctx;
14068 1154 : if (octx
14069 741 : && octx->region_type == ORT_WORKSHARE
14070 500 : && octx->combined_loop
14071 500 : && !octx->distribute)
14072 : {
14073 492 : if (octx->outer_context
14074 430 : && (octx->outer_context->region_type
14075 : == ORT_COMBINED_PARALLEL))
14076 393 : octx = octx->outer_context->outer_context;
14077 : else
14078 : octx = octx->outer_context;
14079 : }
14080 905 : if (octx
14081 316 : && octx->region_type == ORT_WORKSHARE
14082 16 : && octx->combined_loop
14083 16 : && octx->distribute)
14084 : {
14085 16 : error_at (OMP_CLAUSE_LOCATION (c),
14086 : "%<linear%> clause for variable other than "
14087 : "loop iterator specified on construct "
14088 : "combined with %<distribute%>");
14089 16 : remove = true;
14090 16 : break;
14091 : }
14092 : }
14093 : /* For combined #pragma omp parallel for simd, need to put
14094 : lastprivate and perhaps firstprivate too on the
14095 : parallel. Similarly for #pragma omp for simd. */
14096 : struct gimplify_omp_ctx *octx = outer_ctx;
14097 : bool taskloop_seen = false;
14098 : decl = NULL_TREE;
14099 3443 : do
14100 : {
14101 3443 : if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14102 3443 : && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14103 : break;
14104 3383 : decl = OMP_CLAUSE_DECL (c);
14105 3383 : if (error_operand_p (decl))
14106 : {
14107 : decl = NULL_TREE;
14108 : break;
14109 : }
14110 3383 : flags = GOVD_SEEN;
14111 3383 : if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
14112 1672 : flags |= GOVD_FIRSTPRIVATE;
14113 3383 : if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14114 3383 : flags |= GOVD_LASTPRIVATE;
14115 3383 : if (octx
14116 2506 : && octx->region_type == ORT_WORKSHARE
14117 1213 : && octx->combined_loop)
14118 : {
14119 1207 : if (octx->outer_context
14120 1037 : && (octx->outer_context->region_type
14121 : == ORT_COMBINED_PARALLEL))
14122 : octx = octx->outer_context;
14123 476 : else if (omp_check_private (octx, decl, false))
14124 : break;
14125 : }
14126 : else if (octx
14127 1299 : && (octx->region_type & ORT_TASK) != 0
14128 307 : && octx->combined_loop)
14129 : taskloop_seen = true;
14130 : else if (octx
14131 996 : && octx->region_type == ORT_COMBINED_PARALLEL
14132 301 : && ((ctx->region_type == ORT_WORKSHARE
14133 201 : && octx == outer_ctx)
14134 100 : || taskloop_seen))
14135 : flags = GOVD_SEEN | GOVD_SHARED;
14136 : else if (octx
14137 695 : && ((octx->region_type & ORT_COMBINED_TEAMS)
14138 : == ORT_COMBINED_TEAMS))
14139 : flags = GOVD_SEEN | GOVD_SHARED;
14140 540 : else if (octx
14141 540 : && octx->region_type == ORT_COMBINED_TARGET)
14142 : {
14143 195 : if (flags & GOVD_LASTPRIVATE)
14144 195 : flags = GOVD_SEEN | GOVD_MAP;
14145 : }
14146 : else
14147 : break;
14148 2071 : splay_tree_node on
14149 2071 : = splay_tree_lookup (octx->variables,
14150 : (splay_tree_key) decl);
14151 2071 : if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
14152 : {
14153 : octx = NULL;
14154 : break;
14155 : }
14156 2067 : omp_add_variable (octx, decl, flags);
14157 2067 : if (octx->outer_context == NULL)
14158 : break;
14159 : octx = octx->outer_context;
14160 : }
14161 : while (1);
14162 2652 : if (octx
14163 2652 : && decl
14164 2652 : && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14165 728 : || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
14166 1711 : omp_notice_variable (octx, decl, true);
14167 : }
14168 2652 : flags = GOVD_LINEAR | GOVD_EXPLICIT;
14169 2652 : if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
14170 2652 : && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14171 : {
14172 : notice_outer = false;
14173 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14174 : }
14175 2652 : goto do_add;
14176 :
14177 50113 : case OMP_CLAUSE_MAP:
14178 50113 : if (!grp_start_p)
14179 : {
14180 30179 : grp_start_p = list_p;
14181 30179 : grp_end = (*groups)[grpnum].grp_end;
14182 30179 : grpnum++;
14183 : }
14184 50113 : decl = OMP_CLAUSE_DECL (c);
14185 :
14186 50113 : if (error_operand_p (decl))
14187 : {
14188 : remove = true;
14189 : break;
14190 : }
14191 :
14192 50113 : if (!omp_parse_expr (addr_tokens, decl))
14193 : {
14194 : remove = true;
14195 : break;
14196 : }
14197 :
14198 50113 : if (remove)
14199 : break;
14200 50113 : if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
14201 : {
14202 : struct gimplify_omp_ctx *octx;
14203 1167 : for (octx = outer_ctx; octx; octx = octx->outer_context)
14204 : {
14205 1167 : if (octx->region_type != ORT_ACC_HOST_DATA)
14206 : break;
14207 12 : splay_tree_node n2
14208 12 : = splay_tree_lookup (octx->variables,
14209 : (splay_tree_key) decl);
14210 12 : if (n2)
14211 4 : error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
14212 : "declared in enclosing %<host_data%> region",
14213 4 : DECL_NAME (decl));
14214 : }
14215 : }
14216 :
14217 50113 : map_descriptor = false;
14218 :
14219 : /* This condition checks if we're mapping an array descriptor that
14220 : isn't inside a derived type -- these have special handling, and
14221 : are not handled as structs in omp_build_struct_sibling_lists.
14222 : See that function for further details. */
14223 50113 : if (*grp_start_p != grp_end
14224 33755 : && OMP_CLAUSE_CHAIN (*grp_start_p)
14225 83868 : && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
14226 : {
14227 16903 : tree grp_mid = OMP_CLAUSE_CHAIN (*grp_start_p);
14228 16903 : if (omp_map_clause_descriptor_p (grp_mid)
14229 31979 : && DECL_P (OMP_CLAUSE_DECL (grp_mid)))
14230 : map_descriptor = true;
14231 : }
14232 33210 : else if (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP
14233 33210 : && (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_RELEASE
14234 32566 : || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DELETE)
14235 33996 : && OMP_CLAUSE_RELEASE_DESCRIPTOR (grp_end))
14236 : map_descriptor = true;
14237 :
14238 : /* Adding the decl for a struct access: we haven't created
14239 : GOMP_MAP_STRUCT nodes yet, so this statement needs to predict
14240 : whether they will be created in gimplify_adjust_omp_clauses.
14241 : NOTE: Technically we should probably look through DECL_VALUE_EXPR
14242 : here because something that looks like a DECL_P may actually be a
14243 : struct access, e.g. variables in a lambda closure
14244 : (__closure->__foo) or class members (this->foo). Currently in both
14245 : those cases we map the whole of the containing object (directly in
14246 : the C++ FE) though, so struct nodes are not created. */
14247 50113 : if (c == grp_end
14248 30179 : && addr_tokens[0]->type == STRUCTURE_BASE
14249 8145 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14250 58258 : && !map_descriptor)
14251 : {
14252 5371 : gcc_assert (addr_tokens[1]->type == ACCESS_METHOD);
14253 : /* If we got to this struct via a chain of pointers, maybe we
14254 : want to map it implicitly instead. */
14255 5371 : if (omp_access_chain_p (addr_tokens, 1))
14256 : break;
14257 5211 : omp_mapping_group *wholestruct;
14258 5211 : if (!(region_type & ORT_ACC)
14259 9147 : && omp_mapped_by_containing_struct (grpmap,
14260 3936 : OMP_CLAUSE_DECL (c),
14261 : &wholestruct))
14262 : break;
14263 4975 : decl = addr_tokens[1]->expr;
14264 4975 : if (splay_tree_lookup (ctx->variables, (splay_tree_key) decl))
14265 : break;
14266 : /* Standalone attach or detach clauses for a struct element
14267 : should not inhibit implicit mapping of the whole struct. */
14268 2902 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
14269 2902 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
14270 : break;
14271 2752 : flags = GOVD_MAP | GOVD_EXPLICIT;
14272 :
14273 2752 : gcc_assert (addr_tokens[1]->u.access_kind != ACCESS_DIRECT
14274 : || TREE_ADDRESSABLE (decl));
14275 2752 : goto do_add_decl;
14276 : }
14277 :
14278 44742 : if (!DECL_P (decl))
14279 : {
14280 20817 : tree d = decl, *pd;
14281 20817 : if (TREE_CODE (d) == ARRAY_REF)
14282 : {
14283 4938 : while (TREE_CODE (d) == ARRAY_REF)
14284 2526 : d = TREE_OPERAND (d, 0);
14285 2412 : if (TREE_CODE (d) == COMPONENT_REF
14286 2412 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
14287 : decl = d;
14288 : }
14289 20817 : pd = &OMP_CLAUSE_DECL (c);
14290 20817 : if (d == decl
14291 18451 : && TREE_CODE (decl) == INDIRECT_REF
14292 13778 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
14293 1069 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
14294 : == REFERENCE_TYPE)
14295 21468 : && (OMP_CLAUSE_MAP_KIND (c)
14296 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
14297 : {
14298 647 : pd = &TREE_OPERAND (decl, 0);
14299 647 : decl = TREE_OPERAND (decl, 0);
14300 : }
14301 :
14302 20817 : if (addr_tokens[0]->type == STRUCTURE_BASE
14303 11287 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14304 11287 : && addr_tokens[1]->type == ACCESS_METHOD
14305 11287 : && (addr_tokens[1]->u.access_kind == ACCESS_POINTER
14306 10790 : || (addr_tokens[1]->u.access_kind
14307 : == ACCESS_POINTER_OFFSET))
14308 21396 : && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)))
14309 : {
14310 0 : tree base = addr_tokens[1]->expr;
14311 0 : splay_tree_node n
14312 0 : = splay_tree_lookup (ctx->variables,
14313 : (splay_tree_key) base);
14314 0 : n->value |= GOVD_SEEN;
14315 : }
14316 :
14317 20817 : if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
14318 : {
14319 : /* Don't gimplify *pd fully at this point, as the base
14320 : will need to be adjusted during omp lowering. */
14321 88 : auto_vec<tree, 10> expr_stack;
14322 88 : tree *p = pd;
14323 88 : while (handled_component_p (*p)
14324 : || TREE_CODE (*p) == INDIRECT_REF
14325 : || TREE_CODE (*p) == ADDR_EXPR
14326 : || TREE_CODE (*p) == MEM_REF
14327 224 : || TREE_CODE (*p) == NON_LVALUE_EXPR)
14328 : {
14329 136 : expr_stack.safe_push (*p);
14330 136 : p = &TREE_OPERAND (*p, 0);
14331 : }
14332 312 : for (int i = expr_stack.length () - 1; i >= 0; i--)
14333 : {
14334 136 : tree t = expr_stack[i];
14335 136 : if (TREE_CODE (t) == ARRAY_REF
14336 136 : || TREE_CODE (t) == ARRAY_RANGE_REF)
14337 : {
14338 56 : if (TREE_OPERAND (t, 2) == NULL_TREE)
14339 : {
14340 56 : tree low = unshare_expr (array_ref_low_bound (t));
14341 56 : if (!is_gimple_min_invariant (low))
14342 : {
14343 0 : TREE_OPERAND (t, 2) = low;
14344 0 : if (gimplify_expr (&TREE_OPERAND (t, 2),
14345 : pre_p, NULL,
14346 : is_gimple_reg,
14347 : fb_rvalue) == GS_ERROR)
14348 0 : remove = true;
14349 : }
14350 : }
14351 0 : else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
14352 : NULL, is_gimple_reg,
14353 : fb_rvalue) == GS_ERROR)
14354 0 : remove = true;
14355 56 : if (TREE_OPERAND (t, 3) == NULL_TREE)
14356 : {
14357 56 : tree elmt_size = array_ref_element_size (t);
14358 56 : if (!is_gimple_min_invariant (elmt_size))
14359 : {
14360 0 : elmt_size = unshare_expr (elmt_size);
14361 0 : tree elmt_type
14362 0 : = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
14363 : 0)));
14364 0 : tree factor
14365 0 : = size_int (TYPE_ALIGN_UNIT (elmt_type));
14366 0 : elmt_size
14367 0 : = size_binop (EXACT_DIV_EXPR, elmt_size,
14368 : factor);
14369 0 : TREE_OPERAND (t, 3) = elmt_size;
14370 0 : if (gimplify_expr (&TREE_OPERAND (t, 3),
14371 : pre_p, NULL,
14372 : is_gimple_reg,
14373 : fb_rvalue) == GS_ERROR)
14374 0 : remove = true;
14375 : }
14376 : }
14377 0 : else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
14378 : NULL, is_gimple_reg,
14379 : fb_rvalue) == GS_ERROR)
14380 0 : remove = true;
14381 : }
14382 80 : else if (TREE_CODE (t) == COMPONENT_REF)
14383 : {
14384 0 : if (TREE_OPERAND (t, 2) == NULL_TREE)
14385 : {
14386 0 : tree offset = component_ref_field_offset (t);
14387 0 : if (!is_gimple_min_invariant (offset))
14388 : {
14389 0 : offset = unshare_expr (offset);
14390 0 : tree field = TREE_OPERAND (t, 1);
14391 0 : tree factor
14392 0 : = size_int (DECL_OFFSET_ALIGN (field)
14393 : / BITS_PER_UNIT);
14394 0 : offset = size_binop (EXACT_DIV_EXPR, offset,
14395 : factor);
14396 0 : TREE_OPERAND (t, 2) = offset;
14397 0 : if (gimplify_expr (&TREE_OPERAND (t, 2),
14398 : pre_p, NULL,
14399 : is_gimple_reg,
14400 : fb_rvalue) == GS_ERROR)
14401 0 : remove = true;
14402 : }
14403 : }
14404 0 : else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
14405 : NULL, is_gimple_reg,
14406 : fb_rvalue) == GS_ERROR)
14407 0 : remove = true;
14408 : }
14409 : }
14410 224 : for (; expr_stack.length () > 0; )
14411 : {
14412 136 : tree t = expr_stack.pop ();
14413 :
14414 136 : if (TREE_CODE (t) == ARRAY_REF
14415 136 : || TREE_CODE (t) == ARRAY_RANGE_REF)
14416 : {
14417 56 : if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
14418 56 : && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
14419 : NULL, is_gimple_val,
14420 : fb_rvalue) == GS_ERROR)
14421 144 : remove = true;
14422 : }
14423 : }
14424 88 : }
14425 : break;
14426 : }
14427 :
14428 23925 : if ((code == OMP_TARGET
14429 : || code == OMP_TARGET_DATA
14430 : || code == OMP_TARGET_ENTER_DATA
14431 14141 : || code == OMP_TARGET_EXIT_DATA)
14432 24480 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
14433 : {
14434 : /* If we have attach/detach but the decl we have is a pointer to
14435 : pointer, we're probably mapping the "base level" array
14436 : implicitly. Make sure we don't add the decl as if we mapped
14437 : it explicitly. That is,
14438 :
14439 : int **arr;
14440 : [...]
14441 : #pragma omp target map(arr[a][b:c])
14442 :
14443 : should *not* map "arr" explicitly. That way we get a
14444 : zero-length "alloc" mapping for it, and assuming it's been
14445 : mapped by some previous directive, etc., things work as they
14446 : should. */
14447 :
14448 208 : tree basetype = TREE_TYPE (addr_tokens[0]->expr);
14449 :
14450 208 : if (TREE_CODE (basetype) == REFERENCE_TYPE)
14451 34 : basetype = TREE_TYPE (basetype);
14452 :
14453 208 : if (code == OMP_TARGET
14454 54 : && addr_tokens[0]->type == ARRAY_BASE
14455 54 : && addr_tokens[0]->u.structure_base_kind == BASE_DECL
14456 54 : && TREE_CODE (basetype) == POINTER_TYPE
14457 262 : && TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
14458 : break;
14459 : }
14460 :
14461 23903 : flags = GOVD_MAP | GOVD_EXPLICIT;
14462 23903 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
14463 23792 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM
14464 23288 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
14465 47165 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
14466 : flags |= GOVD_MAP_ALWAYS_TO;
14467 :
14468 23903 : goto do_add;
14469 :
14470 374 : case OMP_CLAUSE_AFFINITY:
14471 374 : gimplify_omp_affinity (list_p, pre_p);
14472 374 : remove = true;
14473 374 : break;
14474 8 : case OMP_CLAUSE_DOACROSS:
14475 8 : if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
14476 : {
14477 4 : tree deps = OMP_CLAUSE_DECL (c);
14478 8 : while (deps && TREE_CODE (deps) == TREE_LIST)
14479 : {
14480 4 : if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
14481 4 : && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
14482 0 : gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
14483 : pre_p, NULL, is_gimple_val, fb_rvalue);
14484 4 : deps = TREE_CHAIN (deps);
14485 : }
14486 : }
14487 : else
14488 4 : gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
14489 : == OMP_CLAUSE_DOACROSS_SOURCE);
14490 : break;
14491 2204 : case OMP_CLAUSE_DEPEND:
14492 2204 : if (handled_depend_iterators == -1)
14493 1902 : handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
14494 2204 : if (handled_depend_iterators)
14495 : {
14496 346 : if (handled_depend_iterators == 2)
14497 0 : remove = true;
14498 : break;
14499 : }
14500 1858 : if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
14501 : {
14502 0 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
14503 : NULL, is_gimple_val, fb_rvalue);
14504 0 : OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
14505 : }
14506 1858 : if (error_operand_p (OMP_CLAUSE_DECL (c)))
14507 : {
14508 : remove = true;
14509 : break;
14510 : }
14511 1858 : if (OMP_CLAUSE_DECL (c) != null_pointer_node)
14512 : {
14513 1827 : OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
14514 1827 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
14515 : is_gimple_val, fb_rvalue) == GS_ERROR)
14516 : {
14517 : remove = true;
14518 : break;
14519 : }
14520 : }
14521 1858 : if (code == OMP_TASK)
14522 1400 : ctx->has_depend = true;
14523 : break;
14524 :
14525 8207 : case OMP_CLAUSE_TO:
14526 8207 : case OMP_CLAUSE_FROM:
14527 8207 : case OMP_CLAUSE__CACHE_:
14528 8207 : decl = OMP_CLAUSE_DECL (c);
14529 8207 : if (error_operand_p (decl))
14530 : {
14531 : remove = true;
14532 : break;
14533 : }
14534 8207 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
14535 7435 : OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
14536 684 : : TYPE_SIZE_UNIT (TREE_TYPE (decl));
14537 8207 : gimple_seq *seq_p;
14538 8207 : seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
14539 8207 : if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
14540 : is_gimple_val, fb_rvalue) == GS_ERROR)
14541 : {
14542 0 : remove = true;
14543 0 : exit_omp_iterator_loop_context (c);
14544 0 : break;
14545 : }
14546 8207 : if (!DECL_P (decl))
14547 : {
14548 2019 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
14549 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
14550 0 : remove = true;
14551 2019 : exit_omp_iterator_loop_context (c);
14552 2019 : break;
14553 : }
14554 6188 : exit_omp_iterator_loop_context (c);
14555 6188 : goto do_notice;
14556 :
14557 129 : case OMP_CLAUSE__MAPPER_BINDING_:
14558 129 : {
14559 129 : tree name = OMP_CLAUSE__MAPPER_BINDING__ID (c);
14560 129 : tree var = OMP_CLAUSE__MAPPER_BINDING__DECL (c);
14561 129 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (var));
14562 129 : tree fndecl = OMP_CLAUSE__MAPPER_BINDING__MAPPER (c);
14563 129 : ctx->implicit_mappers->put ({ name, type }, fndecl);
14564 129 : remove = true;
14565 129 : break;
14566 : }
14567 :
14568 2118 : case OMP_CLAUSE_USE_DEVICE_PTR:
14569 2118 : case OMP_CLAUSE_USE_DEVICE_ADDR:
14570 2118 : flags = GOVD_EXPLICIT;
14571 2118 : goto do_add;
14572 :
14573 557 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
14574 557 : decl = OMP_CLAUSE_DECL (c);
14575 557 : while (TREE_CODE (decl) == INDIRECT_REF
14576 606 : || TREE_CODE (decl) == ARRAY_REF)
14577 49 : decl = TREE_OPERAND (decl, 0);
14578 557 : flags = GOVD_EXPLICIT;
14579 557 : goto do_add_decl;
14580 :
14581 500 : case OMP_CLAUSE_IS_DEVICE_PTR:
14582 500 : flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
14583 500 : goto do_add;
14584 :
14585 62385 : do_add:
14586 62385 : decl = OMP_CLAUSE_DECL (c);
14587 83516 : do_add_decl:
14588 83516 : if (error_operand_p (decl))
14589 : {
14590 : remove = true;
14591 : break;
14592 : }
14593 83508 : if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
14594 : {
14595 1755 : tree t = omp_member_access_dummy_var (decl);
14596 1755 : if (t)
14597 : {
14598 668 : tree v = DECL_VALUE_EXPR (decl);
14599 668 : DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
14600 668 : if (outer_ctx)
14601 140 : omp_notice_variable (outer_ctx, t, true);
14602 : }
14603 : }
14604 83508 : if (code == OACC_DATA
14605 2402 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
14606 85910 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
14607 307 : flags |= GOVD_MAP_0LEN_ARRAY;
14608 83508 : omp_add_variable (ctx, decl, flags);
14609 83508 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14610 68290 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
14611 66216 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
14612 86112 : && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
14613 : {
14614 2091 : struct gimplify_omp_ctx *pctx
14615 2151 : = code == OMP_TARGET ? outer_ctx : ctx;
14616 2151 : if (pctx)
14617 2121 : omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
14618 : GOVD_LOCAL | GOVD_SEEN);
14619 2121 : if (pctx
14620 2121 : && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
14621 632 : && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
14622 : find_decl_expr,
14623 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
14624 : NULL) == NULL_TREE)
14625 208 : omp_add_variable (pctx,
14626 208 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
14627 : GOVD_LOCAL | GOVD_SEEN);
14628 2151 : gimplify_omp_ctxp = pctx;
14629 2151 : push_gimplify_context ();
14630 :
14631 2151 : OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
14632 2151 : OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
14633 :
14634 2151 : gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
14635 2151 : &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
14636 2151 : pop_gimplify_context
14637 2151 : (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
14638 2151 : push_gimplify_context ();
14639 4302 : gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
14640 2151 : &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
14641 2151 : pop_gimplify_context
14642 2151 : (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
14643 2151 : OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
14644 2151 : OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
14645 :
14646 2151 : gimplify_omp_ctxp = outer_ctx;
14647 : }
14648 81357 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14649 81357 : && OMP_CLAUSE_LASTPRIVATE_STMT (c))
14650 : {
14651 303 : gimplify_omp_ctxp = ctx;
14652 303 : push_gimplify_context ();
14653 303 : if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
14654 : {
14655 303 : tree bind = build3 (BIND_EXPR, void_type_node, NULL,
14656 : NULL, NULL);
14657 303 : TREE_SIDE_EFFECTS (bind) = 1;
14658 303 : BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
14659 303 : OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
14660 : }
14661 606 : gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
14662 303 : &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
14663 303 : pop_gimplify_context
14664 303 : (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
14665 303 : OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
14666 :
14667 303 : gimplify_omp_ctxp = outer_ctx;
14668 : }
14669 81054 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14670 81054 : && OMP_CLAUSE_LINEAR_STMT (c))
14671 : {
14672 60 : gimplify_omp_ctxp = ctx;
14673 60 : push_gimplify_context ();
14674 60 : if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
14675 : {
14676 60 : tree bind = build3 (BIND_EXPR, void_type_node, NULL,
14677 : NULL, NULL);
14678 60 : TREE_SIDE_EFFECTS (bind) = 1;
14679 60 : BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
14680 60 : OMP_CLAUSE_LINEAR_STMT (c) = bind;
14681 : }
14682 120 : gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
14683 60 : &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
14684 60 : pop_gimplify_context
14685 60 : (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
14686 60 : OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
14687 :
14688 60 : gimplify_omp_ctxp = outer_ctx;
14689 : }
14690 83508 : if (notice_outer)
14691 71387 : goto do_notice;
14692 : break;
14693 :
14694 906 : case OMP_CLAUSE_COPYIN:
14695 906 : case OMP_CLAUSE_COPYPRIVATE:
14696 906 : decl = OMP_CLAUSE_DECL (c);
14697 906 : if (error_operand_p (decl))
14698 : {
14699 : remove = true;
14700 : break;
14701 : }
14702 906 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
14703 : && !remove
14704 906 : && !omp_check_private (ctx, decl, true))
14705 : {
14706 35 : remove = true;
14707 35 : if (is_global_var (decl))
14708 : {
14709 30 : if (DECL_THREAD_LOCAL_P (decl))
14710 : remove = false;
14711 10 : else if (DECL_HAS_VALUE_EXPR_P (decl))
14712 : {
14713 2 : tree value = get_base_address (DECL_VALUE_EXPR (decl));
14714 :
14715 2 : if (value
14716 2 : && DECL_P (value)
14717 4 : && DECL_THREAD_LOCAL_P (value))
14718 : remove = false;
14719 : }
14720 : }
14721 : if (remove)
14722 13 : error_at (OMP_CLAUSE_LOCATION (c),
14723 : "copyprivate variable %qE is not threadprivate"
14724 13 : " or private in outer context", DECL_NAME (decl));
14725 : }
14726 78481 : do_notice:
14727 78481 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14728 63263 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
14729 55336 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14730 30477 : && outer_ctx
14731 17547 : && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
14732 16208 : || (region_type == ORT_WORKSHARE
14733 3884 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14734 1087 : && (OMP_CLAUSE_REDUCTION_INSCAN (c)
14735 912 : || code == OMP_LOOP)))
14736 80426 : && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
14737 857 : || (code == OMP_LOOP
14738 138 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14739 138 : && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
14740 : == ORT_COMBINED_TEAMS))))
14741 : {
14742 1209 : splay_tree_node on
14743 1209 : = splay_tree_lookup (outer_ctx->variables,
14744 : (splay_tree_key)decl);
14745 1209 : if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
14746 : {
14747 891 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
14748 705 : && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
14749 1003 : && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
14750 56 : || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
14751 0 : && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
14752 : == POINTER_TYPE))))
14753 56 : omp_firstprivatize_variable (outer_ctx, decl);
14754 : else
14755 : {
14756 835 : omp_add_variable (outer_ctx, decl,
14757 : GOVD_SEEN | GOVD_SHARED);
14758 835 : if (outer_ctx->outer_context)
14759 235 : omp_notice_variable (outer_ctx->outer_context, decl,
14760 : true);
14761 : }
14762 : }
14763 : }
14764 77881 : if (outer_ctx)
14765 29584 : omp_notice_variable (outer_ctx, decl, true);
14766 78481 : if (check_non_private
14767 26681 : && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
14768 4747 : && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
14769 1641 : || decl == OMP_CLAUSE_DECL (c)
14770 224 : || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
14771 224 : && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14772 : == ADDR_EXPR
14773 103 : || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
14774 : == POINTER_PLUS_EXPR
14775 22 : && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
14776 : (OMP_CLAUSE_DECL (c), 0), 0))
14777 : == ADDR_EXPR)))))
14778 83140 : && omp_check_private (ctx, decl, false))
14779 : {
14780 58 : error ("%s variable %qE is private in outer context",
14781 29 : check_non_private, DECL_NAME (decl));
14782 29 : remove = true;
14783 : }
14784 : break;
14785 :
14786 190 : case OMP_CLAUSE_DETACH:
14787 190 : flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
14788 190 : goto do_add;
14789 :
14790 4102 : case OMP_CLAUSE_IF:
14791 4102 : if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
14792 4102 : && OMP_CLAUSE_IF_MODIFIER (c) != code)
14793 : {
14794 : const char *p[2];
14795 168 : for (int i = 0; i < 2; i++)
14796 112 : switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
14797 : {
14798 8 : case VOID_CST: p[i] = "cancel"; break;
14799 24 : case OMP_PARALLEL: p[i] = "parallel"; break;
14800 4 : case OMP_SIMD: p[i] = "simd"; break;
14801 12 : case OMP_TASK: p[i] = "task"; break;
14802 12 : case OMP_TASKLOOP: p[i] = "taskloop"; break;
14803 8 : case OMP_TARGET_DATA: p[i] = "target data"; break;
14804 12 : case OMP_TARGET: p[i] = "target"; break;
14805 12 : case OMP_TARGET_UPDATE: p[i] = "target update"; break;
14806 8 : case OMP_TARGET_ENTER_DATA:
14807 8 : p[i] = "target enter data"; break;
14808 12 : case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
14809 0 : default: gcc_unreachable ();
14810 : }
14811 56 : error_at (OMP_CLAUSE_LOCATION (c),
14812 : "expected %qs %<if%> clause modifier rather than %qs",
14813 : p[0], p[1]);
14814 56 : remove = true;
14815 : }
14816 : /* Fall through. */
14817 :
14818 4812 : case OMP_CLAUSE_SELF:
14819 4812 : case OMP_CLAUSE_FINAL:
14820 4812 : OMP_CLAUSE_OPERAND (c, 0)
14821 9624 : = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
14822 : /* Fall through. */
14823 :
14824 5715 : case OMP_CLAUSE_NUM_TEAMS:
14825 5715 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
14826 903 : && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14827 5960 : && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
14828 : {
14829 208 : if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
14830 : {
14831 : remove = true;
14832 : break;
14833 : }
14834 208 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14835 416 : = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
14836 : pre_p, NULL, true);
14837 : }
14838 : /* Fall through. */
14839 :
14840 21796 : case OMP_CLAUSE_SCHEDULE:
14841 21796 : case OMP_CLAUSE_NUM_THREADS:
14842 21796 : case OMP_CLAUSE_THREAD_LIMIT:
14843 21796 : case OMP_CLAUSE_DIST_SCHEDULE:
14844 21796 : case OMP_CLAUSE_DEVICE:
14845 21796 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
14846 21796 : && OMP_CLAUSE_DEVICE_ANCESTOR (c))
14847 : {
14848 125 : if (code != OMP_TARGET)
14849 : {
14850 20 : error_at (OMP_CLAUSE_LOCATION (c),
14851 : "%<device%> clause with %<ancestor%> is only "
14852 : "allowed on %<target%> construct");
14853 20 : remove = true;
14854 20 : break;
14855 : }
14856 :
14857 105 : tree clauses = *orig_list_p;
14858 330 : for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
14859 236 : if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
14860 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
14861 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
14862 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
14863 : && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
14864 : )
14865 : {
14866 11 : error_at (OMP_CLAUSE_LOCATION (c),
14867 : "with %<ancestor%>, only the %<device%>, "
14868 : "%<firstprivate%>, %<private%>, %<defaultmap%>, "
14869 : "and %<map%> clauses may appear on the "
14870 : "construct");
14871 11 : remove = true;
14872 11 : break;
14873 : }
14874 : }
14875 21671 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
14876 21671 : && code == OMP_DISPATCH)
14877 : {
14878 272 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
14879 272 : gimplify_ctxp->into_ssa = false;
14880 272 : if (gimplify_expr (&OMP_CLAUSE_DEVICE_ID (c), pre_p, NULL,
14881 : is_gimple_val, fb_rvalue)
14882 : == GS_ERROR)
14883 : remove = true;
14884 272 : else if (DECL_P (OMP_CLAUSE_DEVICE_ID (c)))
14885 55 : omp_add_variable (ctx, OMP_CLAUSE_DEVICE_ID (c),
14886 : GOVD_SHARED | GOVD_SEEN);
14887 272 : gimplify_ctxp->into_ssa = saved_into_ssa;
14888 272 : break;
14889 : }
14890 : /* Fall through. */
14891 :
14892 31245 : case OMP_CLAUSE_PRIORITY:
14893 31245 : case OMP_CLAUSE_GRAINSIZE:
14894 31245 : case OMP_CLAUSE_NUM_TASKS:
14895 31245 : case OMP_CLAUSE_FILTER:
14896 31245 : case OMP_CLAUSE_HINT:
14897 31245 : case OMP_CLAUSE_ASYNC:
14898 31245 : case OMP_CLAUSE_WAIT:
14899 31245 : case OMP_CLAUSE_NUM_GANGS:
14900 31245 : case OMP_CLAUSE_NUM_WORKERS:
14901 31245 : case OMP_CLAUSE_VECTOR_LENGTH:
14902 31245 : case OMP_CLAUSE_WORKER:
14903 31245 : case OMP_CLAUSE_VECTOR:
14904 31245 : if (OMP_CLAUSE_OPERAND (c, 0)
14905 31245 : && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
14906 : {
14907 7774 : if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
14908 : {
14909 : remove = true;
14910 : break;
14911 : }
14912 : /* All these clauses care about value, not a particular decl,
14913 : so try to force it into a SSA_NAME or fresh temporary. */
14914 7767 : OMP_CLAUSE_OPERAND (c, 0)
14915 15534 : = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
14916 : pre_p, NULL, true);
14917 : }
14918 : break;
14919 :
14920 2331 : case OMP_CLAUSE_GANG:
14921 2331 : if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
14922 : is_gimple_val, fb_rvalue) == GS_ERROR)
14923 0 : remove = true;
14924 2331 : if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
14925 : is_gimple_val, fb_rvalue) == GS_ERROR)
14926 0 : remove = true;
14927 : break;
14928 :
14929 13092 : case OMP_CLAUSE_NOWAIT:
14930 13092 : nowait = 1;
14931 13092 : break;
14932 :
14933 72 : case OMP_CLAUSE_USES_ALLOCATORS:
14934 72 : sorry_at (OMP_CLAUSE_LOCATION (c), "%<uses_allocators%> clause");
14935 72 : remove = 1;
14936 72 : break;
14937 :
14938 : case OMP_CLAUSE_ORDERED:
14939 : case OMP_CLAUSE_UNTIED:
14940 : case OMP_CLAUSE_COLLAPSE:
14941 : case OMP_CLAUSE_TILE:
14942 : case OMP_CLAUSE_AUTO:
14943 : case OMP_CLAUSE_SEQ:
14944 : case OMP_CLAUSE_INDEPENDENT:
14945 : case OMP_CLAUSE_MERGEABLE:
14946 : case OMP_CLAUSE_PROC_BIND:
14947 : case OMP_CLAUSE_SAFELEN:
14948 : case OMP_CLAUSE_SIMDLEN:
14949 : case OMP_CLAUSE_NOGROUP:
14950 : case OMP_CLAUSE_THREADS:
14951 : case OMP_CLAUSE_SIMD:
14952 : case OMP_CLAUSE_BIND:
14953 : case OMP_CLAUSE_IF_PRESENT:
14954 : case OMP_CLAUSE_FINALIZE:
14955 : case OMP_CLAUSE_INTEROP:
14956 : case OMP_CLAUSE_INIT:
14957 : case OMP_CLAUSE_USE:
14958 : case OMP_CLAUSE_DESTROY:
14959 : case OMP_CLAUSE_DEVICE_TYPE:
14960 : break;
14961 :
14962 52 : case OMP_CLAUSE_DYN_GROUPPRIVATE:
14963 52 : remove = true;
14964 52 : sorry_at (OMP_CLAUSE_LOCATION (c),"%<dyn_groupprivate%> clause");
14965 52 : break;
14966 :
14967 3976 : case OMP_CLAUSE_ORDER:
14968 3976 : ctx->order_concurrent = true;
14969 3976 : break;
14970 :
14971 1010 : case OMP_CLAUSE_DEFAULTMAP:
14972 1010 : enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
14973 1010 : switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
14974 : {
14975 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
14976 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
14977 : gdmkmin = GDMK_SCALAR;
14978 : gdmkmax = GDMK_POINTER;
14979 : break;
14980 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
14981 : gdmkmin = GDMK_SCALAR;
14982 : gdmkmax = GDMK_SCALAR_TARGET;
14983 : break;
14984 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
14985 : gdmkmin = gdmkmax = GDMK_AGGREGATE;
14986 : break;
14987 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
14988 : gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
14989 : break;
14990 : case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
14991 : gdmkmin = gdmkmax = GDMK_POINTER;
14992 : break;
14993 0 : default:
14994 0 : gcc_unreachable ();
14995 : }
14996 4471 : for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
14997 3461 : switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
14998 : {
14999 91 : case OMP_CLAUSE_DEFAULTMAP_ALLOC:
15000 91 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
15001 91 : break;
15002 96 : case OMP_CLAUSE_DEFAULTMAP_TO:
15003 96 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
15004 96 : break;
15005 28 : case OMP_CLAUSE_DEFAULTMAP_FROM:
15006 28 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
15007 28 : break;
15008 671 : case OMP_CLAUSE_DEFAULTMAP_TOFROM:
15009 671 : ctx->defaultmap[gdmk] = GOVD_MAP;
15010 671 : break;
15011 380 : case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
15012 380 : ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
15013 380 : break;
15014 2094 : case OMP_CLAUSE_DEFAULTMAP_NONE:
15015 2094 : ctx->defaultmap[gdmk] = 0;
15016 2094 : break;
15017 45 : case OMP_CLAUSE_DEFAULTMAP_PRESENT:
15018 45 : ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
15019 45 : break;
15020 56 : case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
15021 56 : switch (gdmk)
15022 : {
15023 11 : case GDMK_SCALAR:
15024 11 : ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
15025 11 : break;
15026 11 : case GDMK_SCALAR_TARGET:
15027 11 : ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
15028 11 : ? GOVD_MAP : GOVD_FIRSTPRIVATE);
15029 11 : break;
15030 14 : case GDMK_AGGREGATE:
15031 14 : case GDMK_ALLOCATABLE:
15032 14 : ctx->defaultmap[gdmk] = GOVD_MAP;
15033 14 : break;
15034 20 : case GDMK_POINTER:
15035 20 : ctx->defaultmap[gdmk] = GOVD_MAP;
15036 20 : if (!lang_GNU_Fortran ())
15037 12 : ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
15038 : break;
15039 : default:
15040 : gcc_unreachable ();
15041 : }
15042 : break;
15043 0 : default:
15044 0 : gcc_unreachable ();
15045 : }
15046 : break;
15047 :
15048 824 : case OMP_CLAUSE_ALIGNED:
15049 824 : decl = OMP_CLAUSE_DECL (c);
15050 824 : if (error_operand_p (decl))
15051 : {
15052 : remove = true;
15053 : break;
15054 : }
15055 824 : if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
15056 : is_gimple_val, fb_rvalue) == GS_ERROR)
15057 : {
15058 : remove = true;
15059 : break;
15060 : }
15061 824 : if (!is_global_var (decl)
15062 824 : && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
15063 592 : omp_add_variable (ctx, decl, GOVD_ALIGNED);
15064 : break;
15065 :
15066 489 : case OMP_CLAUSE_NONTEMPORAL:
15067 489 : decl = OMP_CLAUSE_DECL (c);
15068 489 : if (error_operand_p (decl))
15069 : {
15070 : remove = true;
15071 : break;
15072 : }
15073 489 : omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
15074 489 : break;
15075 :
15076 3541 : case OMP_CLAUSE_ALLOCATE:
15077 3541 : decl = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
15078 3541 : if (decl
15079 1700 : && TREE_CODE (decl) == INTEGER_CST
15080 3570 : && wi::eq_p (wi::to_widest (decl), GOMP_OMP_PREDEF_ALLOC_THREADS)
15081 3570 : && (code == OMP_TARGET || code == OMP_TASK || code == OMP_TASKLOOP))
15082 35 : warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
15083 : "allocator with access trait set to %<thread%> "
15084 : "results in undefined behavior for %qs directive",
15085 : code == OMP_TARGET ? "target"
15086 : : (code == OMP_TASK
15087 15 : ? "task" : "taskloop"));
15088 3541 : decl = OMP_CLAUSE_DECL (c);
15089 3541 : if (error_operand_p (decl))
15090 : {
15091 : remove = true;
15092 : break;
15093 : }
15094 3541 : if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
15095 : is_gimple_val, fb_rvalue) == GS_ERROR)
15096 : {
15097 : remove = true;
15098 : break;
15099 : }
15100 3541 : else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
15101 3541 : || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
15102 : == INTEGER_CST))
15103 : ;
15104 503 : else if (code == OMP_TASKLOOP
15105 503 : || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
15106 66 : OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
15107 132 : = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
15108 : pre_p, NULL, false);
15109 : break;
15110 :
15111 4365 : case OMP_CLAUSE_DEFAULT:
15112 4365 : ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
15113 4365 : break;
15114 :
15115 640 : case OMP_CLAUSE_INCLUSIVE:
15116 640 : case OMP_CLAUSE_EXCLUSIVE:
15117 640 : decl = OMP_CLAUSE_DECL (c);
15118 640 : {
15119 640 : splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
15120 : (splay_tree_key) decl);
15121 640 : if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
15122 : {
15123 5 : error_at (OMP_CLAUSE_LOCATION (c),
15124 : "%qD specified in %qs clause but not in %<inscan%> "
15125 : "%<reduction%> clause on the containing construct",
15126 5 : decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
15127 5 : remove = true;
15128 : }
15129 : else
15130 : {
15131 635 : n->value |= GOVD_REDUCTION_INSCAN;
15132 635 : if (outer_ctx->region_type == ORT_SIMD
15133 520 : && outer_ctx->outer_context
15134 107 : && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
15135 : {
15136 107 : n = splay_tree_lookup (outer_ctx->outer_context->variables,
15137 : (splay_tree_key) decl);
15138 107 : if (n && (n->value & GOVD_REDUCTION) != 0)
15139 107 : n->value |= GOVD_REDUCTION_INSCAN;
15140 : }
15141 : }
15142 : }
15143 : break;
15144 :
15145 103 : case OMP_CLAUSE_NOVARIANTS:
15146 103 : OMP_CLAUSE_NOVARIANTS_EXPR (c)
15147 103 : = gimple_boolify (OMP_CLAUSE_NOVARIANTS_EXPR (c));
15148 103 : break;
15149 115 : case OMP_CLAUSE_NOCONTEXT:
15150 115 : OMP_CLAUSE_NOCONTEXT_EXPR (c)
15151 115 : = gimple_boolify (OMP_CLAUSE_NOCONTEXT_EXPR (c));
15152 115 : break;
15153 0 : case OMP_CLAUSE_NOHOST:
15154 0 : default:
15155 0 : gcc_unreachable ();
15156 : }
15157 :
15158 1400 : if (code == OACC_DATA
15159 5123 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
15160 207489 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15161 4504 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
15162 : remove = true;
15163 203771 : if (remove)
15164 1099 : *list_p = OMP_CLAUSE_CHAIN (c);
15165 : else
15166 202979 : list_p = &OMP_CLAUSE_CHAIN (c);
15167 204078 : }
15168 :
15169 129929 : if (groups)
15170 : {
15171 35298 : delete grpmap;
15172 17649 : delete groups;
15173 : }
15174 :
15175 129929 : ctx->clauses = *orig_list_p;
15176 129929 : gimplify_omp_ctxp = ctx;
15177 129929 : }
15178 :
15179 : /* Return true if DECL is a candidate for shared to firstprivate
15180 : optimization. We only consider non-addressable scalars, not
15181 : too big, and not references. */
15182 :
15183 : static bool
15184 404130 : omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
15185 : {
15186 404130 : if (TREE_ADDRESSABLE (decl))
15187 : return false;
15188 355015 : tree type = TREE_TYPE (decl);
15189 355015 : if (!is_gimple_reg_type (type)
15190 330267 : || TREE_CODE (type) == REFERENCE_TYPE
15191 681017 : || TREE_ADDRESSABLE (type))
15192 : return false;
15193 : /* Don't optimize too large decls, as each thread/task will have
15194 : its own. */
15195 326002 : HOST_WIDE_INT len = int_size_in_bytes (type);
15196 326002 : if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
15197 : return false;
15198 325985 : if (omp_privatize_by_reference (decl))
15199 : return false;
15200 : return true;
15201 : }
15202 :
15203 : /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
15204 : For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
15205 : GOVD_WRITTEN in outer contexts. */
15206 :
15207 : static void
15208 303091 : omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
15209 : {
15210 464920 : for (; ctx; ctx = ctx->outer_context)
15211 : {
15212 447914 : splay_tree_node n = splay_tree_lookup (ctx->variables,
15213 : (splay_tree_key) decl);
15214 447914 : if (n == NULL)
15215 158250 : continue;
15216 289664 : else if (n->value & GOVD_SHARED)
15217 : {
15218 9121 : n->value |= GOVD_WRITTEN;
15219 9121 : return;
15220 : }
15221 280543 : else if (n->value & GOVD_DATA_SHARE_CLASS)
15222 : return;
15223 : }
15224 : }
15225 :
15226 : /* Helper callback for walk_gimple_seq to discover possible stores
15227 : to omp_shared_to_firstprivate_optimizable_decl_p decls and set
15228 : GOVD_WRITTEN if they are GOVD_SHARED in some outer context
15229 : for those. */
15230 :
15231 : static tree
15232 1123913 : omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
15233 : {
15234 1123913 : struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
15235 :
15236 1123913 : *walk_subtrees = 0;
15237 1123913 : if (!wi->is_lhs)
15238 : return NULL_TREE;
15239 :
15240 320847 : tree op = *tp;
15241 399600 : do
15242 : {
15243 399600 : if (handled_component_p (op))
15244 78753 : op = TREE_OPERAND (op, 0);
15245 320847 : else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
15246 320847 : && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
15247 0 : op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
15248 : else
15249 : break;
15250 : }
15251 : while (1);
15252 320847 : if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
15253 58065 : return NULL_TREE;
15254 :
15255 262782 : omp_mark_stores (gimplify_omp_ctxp, op);
15256 262782 : return NULL_TREE;
15257 : }
15258 :
15259 : /* Helper callback for walk_gimple_seq to discover possible stores
15260 : to omp_shared_to_firstprivate_optimizable_decl_p decls and set
15261 : GOVD_WRITTEN if they are GOVD_SHARED in some outer context
15262 : for those. */
15263 :
15264 : static tree
15265 624948 : omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
15266 : bool *handled_ops_p,
15267 : struct walk_stmt_info *wi)
15268 : {
15269 624948 : gimple *stmt = gsi_stmt (*gsi_p);
15270 624948 : switch (gimple_code (stmt))
15271 : {
15272 : /* Don't recurse on OpenMP constructs for which
15273 : gimplify_adjust_omp_clauses already handled the bodies,
15274 : except handle gimple_omp_for_pre_body. */
15275 26688 : case GIMPLE_OMP_FOR:
15276 26688 : *handled_ops_p = true;
15277 26688 : if (gimple_omp_for_pre_body (stmt))
15278 1780 : walk_gimple_seq (gimple_omp_for_pre_body (stmt),
15279 : omp_find_stores_stmt, omp_find_stores_op, wi);
15280 : break;
15281 8561 : case GIMPLE_OMP_PARALLEL:
15282 8561 : case GIMPLE_OMP_TASK:
15283 8561 : case GIMPLE_OMP_SECTIONS:
15284 8561 : case GIMPLE_OMP_SINGLE:
15285 8561 : case GIMPLE_OMP_SCOPE:
15286 8561 : case GIMPLE_OMP_TARGET:
15287 8561 : case GIMPLE_OMP_TEAMS:
15288 8561 : case GIMPLE_OMP_CRITICAL:
15289 8561 : *handled_ops_p = true;
15290 8561 : break;
15291 : default:
15292 : break;
15293 : }
15294 624948 : return NULL_TREE;
15295 : }
15296 :
15297 : struct gimplify_adjust_omp_clauses_data
15298 : {
15299 : tree *list_p;
15300 : gimple_seq *pre_p;
15301 : };
15302 :
15303 : /* For all variables that were not actually used within the context,
15304 : remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
15305 :
15306 : static int
15307 670109 : gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
15308 : {
15309 670109 : tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
15310 670109 : gimple_seq *pre_p
15311 : = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
15312 670109 : tree decl = (tree) n->key;
15313 670109 : unsigned flags = n->value;
15314 670109 : enum omp_clause_code code;
15315 670109 : tree clause;
15316 670109 : bool private_debug;
15317 :
15318 670109 : if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
15319 132785 : && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
15320 : flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
15321 669984 : if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
15322 : return 0;
15323 178136 : if ((flags & GOVD_SEEN) == 0)
15324 : return 0;
15325 157120 : if (flags & GOVD_DEBUG_PRIVATE)
15326 : {
15327 260 : gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
15328 : private_debug = true;
15329 : }
15330 156860 : else if (flags & GOVD_MAP)
15331 : private_debug = false;
15332 : else
15333 138862 : private_debug
15334 138862 : = lang_hooks.decls.omp_private_debug_clause (decl,
15335 138862 : !!(flags & GOVD_SHARED));
15336 138862 : if (private_debug)
15337 : code = OMP_CLAUSE_PRIVATE;
15338 156738 : else if (flags & GOVD_MAP)
15339 : {
15340 17998 : code = OMP_CLAUSE_MAP;
15341 17998 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
15342 17998 : && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
15343 : {
15344 2 : error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
15345 2 : return 0;
15346 : }
15347 17996 : if (VAR_P (decl)
15348 16058 : && DECL_IN_CONSTANT_POOL (decl)
15349 17997 : && !lookup_attribute ("omp declare target",
15350 1 : DECL_ATTRIBUTES (decl)))
15351 : {
15352 1 : tree id = get_identifier ("omp declare target");
15353 1 : DECL_ATTRIBUTES (decl)
15354 1 : = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
15355 1 : varpool_node *node = varpool_node::get (decl);
15356 1 : if (node)
15357 : {
15358 1 : node->offloadable = 1;
15359 1 : if (ENABLE_OFFLOADING)
15360 : g->have_offload = true;
15361 : }
15362 : }
15363 : }
15364 138740 : else if (flags & GOVD_SHARED)
15365 : {
15366 49055 : if (is_global_var (decl))
15367 : {
15368 16138 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
15369 25325 : while (ctx != NULL)
15370 : {
15371 17604 : splay_tree_node on
15372 17604 : = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15373 17604 : if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
15374 : | GOVD_PRIVATE | GOVD_REDUCTION
15375 : | GOVD_LINEAR | GOVD_MAP)) != 0)
15376 : break;
15377 9187 : ctx = ctx->outer_context;
15378 : }
15379 16138 : if (ctx == NULL)
15380 : return 0;
15381 : }
15382 41334 : code = OMP_CLAUSE_SHARED;
15383 : /* Don't optimize shared into firstprivate for read-only vars
15384 : on tasks with depend clause, we shouldn't try to copy them
15385 : until the dependencies are satisfied. */
15386 41334 : if (gimplify_omp_ctxp->has_depend)
15387 350 : flags |= GOVD_WRITTEN;
15388 : }
15389 89685 : else if (flags & GOVD_PRIVATE)
15390 : code = OMP_CLAUSE_PRIVATE;
15391 31218 : else if (flags & GOVD_FIRSTPRIVATE)
15392 : {
15393 21817 : code = OMP_CLAUSE_FIRSTPRIVATE;
15394 21817 : if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
15395 13783 : && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
15396 31919 : && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
15397 : {
15398 1 : error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
15399 : "%<target%> construct", decl);
15400 1 : return 0;
15401 : }
15402 : }
15403 9401 : else if (flags & GOVD_LASTPRIVATE)
15404 : code = OMP_CLAUSE_LASTPRIVATE;
15405 241 : else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
15406 : return 0;
15407 118 : else if (flags & GOVD_CONDTEMP)
15408 : {
15409 118 : code = OMP_CLAUSE__CONDTEMP_;
15410 118 : gimple_add_tmp_var (decl);
15411 : }
15412 : else
15413 0 : gcc_unreachable ();
15414 :
15415 140113 : if (((flags & GOVD_LASTPRIVATE)
15416 139423 : || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
15417 155332 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15418 15205 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15419 :
15420 149273 : tree chain = *list_p;
15421 149273 : clause = build_omp_clause (input_location, code);
15422 149273 : OMP_CLAUSE_DECL (clause) = decl;
15423 149273 : OMP_CLAUSE_CHAIN (clause) = chain;
15424 149273 : if (private_debug)
15425 382 : OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
15426 148891 : else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
15427 6 : OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
15428 148885 : else if (code == OMP_CLAUSE_SHARED
15429 41334 : && (flags & GOVD_WRITTEN) == 0
15430 184160 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15431 21044 : OMP_CLAUSE_SHARED_READONLY (clause) = 1;
15432 127841 : else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
15433 21816 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
15434 106025 : else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
15435 : {
15436 626 : tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
15437 626 : OMP_CLAUSE_DECL (nc) = decl;
15438 626 : if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
15439 626 : && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
15440 15 : OMP_CLAUSE_DECL (clause)
15441 30 : = build_fold_indirect_ref_loc (input_location, decl);
15442 626 : OMP_CLAUSE_DECL (clause)
15443 626 : = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
15444 : build_int_cst (build_pointer_type (char_type_node), 0));
15445 626 : OMP_CLAUSE_SIZE (clause) = size_zero_node;
15446 626 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15447 626 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
15448 626 : OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
15449 626 : tree dtype = TREE_TYPE (decl);
15450 626 : if (TREE_CODE (dtype) == REFERENCE_TYPE)
15451 15 : dtype = TREE_TYPE (dtype);
15452 : /* FIRSTPRIVATE_POINTER doesn't work well if we have a
15453 : multiply-indirected pointer. If we have a reference to a pointer to
15454 : a pointer, it's possible that this should really be
15455 : GOMP_MAP_FIRSTPRIVATE_REFERENCE -- but that also doesn't work at the
15456 : moment, so stick with this. (See PR113279 and testcases
15457 : baseptrs-{4,6}.C:ref2ptrptr_offset_decl_member_slice). */
15458 626 : if (TREE_CODE (dtype) == POINTER_TYPE
15459 626 : && TREE_CODE (TREE_TYPE (dtype)) == POINTER_TYPE)
15460 19 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
15461 : else
15462 607 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
15463 626 : OMP_CLAUSE_CHAIN (nc) = chain;
15464 626 : OMP_CLAUSE_CHAIN (clause) = nc;
15465 626 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15466 626 : gimplify_omp_ctxp = ctx->outer_context;
15467 626 : gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
15468 : pre_p, NULL, is_gimple_val, fb_rvalue);
15469 626 : gimplify_omp_ctxp = ctx;
15470 626 : }
15471 17370 : else if (code == OMP_CLAUSE_MAP)
15472 : {
15473 17370 : int kind;
15474 : /* Not all combinations of these GOVD_MAP flags are actually valid. */
15475 17370 : switch (flags & (GOVD_MAP_TO_ONLY
15476 : | GOVD_MAP_FORCE
15477 : | GOVD_MAP_FORCE_PRESENT
15478 : | GOVD_MAP_ALLOC_ONLY
15479 : | GOVD_MAP_FROM_ONLY))
15480 : {
15481 : case 0:
15482 : kind = GOMP_MAP_TOFROM;
15483 : break;
15484 1042 : case GOVD_MAP_FORCE:
15485 1042 : kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
15486 1042 : break;
15487 863 : case GOVD_MAP_TO_ONLY:
15488 863 : kind = GOMP_MAP_TO;
15489 863 : break;
15490 16 : case GOVD_MAP_FROM_ONLY:
15491 16 : kind = GOMP_MAP_FROM;
15492 16 : break;
15493 37 : case GOVD_MAP_ALLOC_ONLY:
15494 37 : kind = GOMP_MAP_ALLOC;
15495 37 : break;
15496 0 : case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
15497 0 : kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
15498 0 : break;
15499 : case GOVD_MAP_FORCE_PRESENT:
15500 326 : kind = GOMP_MAP_FORCE_PRESENT;
15501 : break;
15502 : case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
15503 326 : kind = GOMP_MAP_FORCE_PRESENT;
15504 : break;
15505 0 : default:
15506 0 : gcc_unreachable ();
15507 : }
15508 17370 : OMP_CLAUSE_SET_MAP_KIND (clause, kind);
15509 : /* Setting of the implicit flag for the runtime is currently disabled for
15510 : OpenACC. */
15511 17370 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
15512 9841 : OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
15513 17370 : if (DECL_SIZE (decl)
15514 17370 : && !poly_int_tree_p (DECL_SIZE (decl)))
15515 : {
15516 535 : tree decl2 = DECL_VALUE_EXPR (decl);
15517 535 : gcc_assert (INDIRECT_REF_P (decl2));
15518 535 : decl2 = TREE_OPERAND (decl2, 0);
15519 535 : gcc_assert (DECL_P (decl2));
15520 535 : tree mem = build_simple_mem_ref (decl2);
15521 535 : OMP_CLAUSE_DECL (clause) = mem;
15522 535 : OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
15523 535 : if (gimplify_omp_ctxp->outer_context)
15524 : {
15525 446 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
15526 446 : omp_notice_variable (ctx, decl2, true);
15527 446 : omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
15528 : }
15529 535 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
15530 : OMP_CLAUSE_MAP);
15531 535 : OMP_CLAUSE_DECL (nc) = decl;
15532 535 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15533 535 : if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
15534 535 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
15535 : else
15536 0 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
15537 535 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
15538 535 : OMP_CLAUSE_CHAIN (clause) = nc;
15539 : }
15540 16835 : else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
15541 16835 : && omp_privatize_by_reference (decl))
15542 : {
15543 28 : OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
15544 28 : OMP_CLAUSE_SIZE (clause)
15545 28 : = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
15546 28 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15547 28 : gimplify_omp_ctxp = ctx->outer_context;
15548 28 : gimplify_expr (&OMP_CLAUSE_SIZE (clause),
15549 : pre_p, NULL, is_gimple_val, fb_rvalue);
15550 28 : gimplify_omp_ctxp = ctx;
15551 28 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
15552 : OMP_CLAUSE_MAP);
15553 28 : OMP_CLAUSE_DECL (nc) = decl;
15554 28 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
15555 28 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
15556 28 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
15557 28 : OMP_CLAUSE_CHAIN (clause) = nc;
15558 : }
15559 : else
15560 16807 : OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
15561 : }
15562 149273 : if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
15563 : {
15564 690 : tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
15565 690 : OMP_CLAUSE_DECL (nc) = decl;
15566 690 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
15567 690 : OMP_CLAUSE_CHAIN (nc) = chain;
15568 690 : OMP_CLAUSE_CHAIN (clause) = nc;
15569 690 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15570 690 : gimplify_omp_ctxp = ctx->outer_context;
15571 690 : lang_hooks.decls.omp_finish_clause (nc, pre_p,
15572 690 : (ctx->region_type & ORT_ACC) != 0);
15573 690 : gimplify_omp_ctxp = ctx;
15574 : }
15575 149273 : *list_p = clause;
15576 149273 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15577 149273 : gimplify_omp_ctxp = ctx->outer_context;
15578 : /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
15579 : in simd. Those are only added for the local vars inside of simd body
15580 : and they don't need to be e.g. default constructible. */
15581 149273 : if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
15582 141223 : lang_hooks.decls.omp_finish_clause (clause, pre_p,
15583 141223 : (ctx->region_type & ORT_ACC) != 0);
15584 149273 : if (gimplify_omp_ctxp)
15585 184133 : for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
15586 93479 : if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
15587 93479 : && DECL_P (OMP_CLAUSE_SIZE (clause)))
15588 1241 : omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
15589 : true);
15590 149273 : gimplify_omp_ctxp = ctx;
15591 149273 : return 0;
15592 : }
15593 :
15594 : static void
15595 128465 : gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
15596 : enum tree_code code,
15597 : gimple_seq *loops_seq_p = NULL)
15598 : {
15599 128465 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15600 128465 : tree *orig_list_p = list_p;
15601 128465 : tree c, decl;
15602 128465 : bool has_inscan_reductions = false;
15603 :
15604 128465 : if (body)
15605 : {
15606 : struct gimplify_omp_ctx *octx;
15607 224177 : for (octx = ctx; octx; octx = octx->outer_context)
15608 173232 : if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
15609 : break;
15610 111129 : if (octx)
15611 : {
15612 60184 : struct walk_stmt_info wi;
15613 60184 : memset (&wi, 0, sizeof (wi));
15614 60184 : walk_gimple_seq (body, omp_find_stores_stmt,
15615 : omp_find_stores_op, &wi);
15616 : }
15617 : }
15618 :
15619 128465 : if (ctx->add_safelen1)
15620 : {
15621 : /* If there are VLAs in the body of simd loop, prevent
15622 : vectorization. */
15623 2 : gcc_assert (ctx->region_type == ORT_SIMD);
15624 2 : c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
15625 2 : OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
15626 2 : OMP_CLAUSE_CHAIN (c) = *list_p;
15627 2 : *list_p = c;
15628 2 : list_p = &OMP_CLAUSE_CHAIN (c);
15629 : }
15630 :
15631 128465 : if (ctx->region_type == ORT_WORKSHARE
15632 39436 : && ctx->outer_context
15633 27882 : && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
15634 : {
15635 24916 : for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
15636 12568 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15637 12568 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15638 : {
15639 125 : decl = OMP_CLAUSE_DECL (c);
15640 125 : splay_tree_node n
15641 125 : = splay_tree_lookup (ctx->outer_context->variables,
15642 : (splay_tree_key) decl);
15643 125 : gcc_checking_assert (!splay_tree_lookup (ctx->variables,
15644 : (splay_tree_key) decl));
15645 125 : omp_add_variable (ctx, decl, n->value);
15646 125 : tree c2 = copy_node (c);
15647 125 : OMP_CLAUSE_CHAIN (c2) = *list_p;
15648 125 : *list_p = c2;
15649 125 : if ((n->value & GOVD_FIRSTPRIVATE) == 0)
15650 103 : continue;
15651 22 : c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15652 : OMP_CLAUSE_FIRSTPRIVATE);
15653 22 : OMP_CLAUSE_DECL (c2) = decl;
15654 22 : OMP_CLAUSE_CHAIN (c2) = *list_p;
15655 22 : *list_p = c2;
15656 : }
15657 : }
15658 :
15659 128465 : if (code == OMP_TARGET
15660 128465 : || code == OMP_TARGET_DATA
15661 128465 : || code == OMP_TARGET_ENTER_DATA
15662 112637 : || code == OMP_TARGET_EXIT_DATA)
15663 : {
15664 16535 : tree mapper_clauses = NULL_TREE;
15665 16535 : instantiate_mapper_info im_info;
15666 :
15667 16535 : im_info.mapper_clauses_p = &mapper_clauses;
15668 16535 : im_info.omp_ctx = ctx;
15669 16535 : im_info.pre_p = pre_p;
15670 :
15671 16535 : splay_tree_foreach (ctx->variables,
15672 : omp_instantiate_implicit_mappers,
15673 : (void *) &im_info);
15674 :
15675 16535 : if (mapper_clauses)
15676 : {
15677 45 : mapper_clauses
15678 45 : = lang_hooks.decls.omp_finish_mapper_clauses (mapper_clauses);
15679 :
15680 : /* Stick the implicitly-expanded mapper clauses at the end of the
15681 : clause list. */
15682 45 : tree *tail = list_p;
15683 139 : while (*tail)
15684 94 : tail = &OMP_CLAUSE_CHAIN (*tail);
15685 45 : *tail = mapper_clauses;
15686 : }
15687 :
15688 16535 : vec<omp_mapping_group> *groups;
15689 16535 : groups = omp_gather_mapping_groups (list_p);
15690 16535 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
15691 :
15692 16535 : if (groups)
15693 : {
15694 7979 : grpmap = omp_index_mapping_groups (groups);
15695 :
15696 7979 : omp_resolve_clause_dependencies (code, groups, grpmap);
15697 7979 : omp_build_struct_sibling_lists (code, ctx->region_type, groups,
15698 : &grpmap, list_p);
15699 :
15700 7979 : omp_mapping_group *outlist = NULL;
15701 :
15702 15958 : delete grpmap;
15703 7979 : delete groups;
15704 :
15705 : /* Rebuild now we have struct sibling lists. */
15706 7979 : groups = omp_gather_mapping_groups (list_p);
15707 7979 : grpmap = omp_index_mapping_groups (groups);
15708 :
15709 7979 : bool enter_exit = (code == OMP_TARGET_ENTER_DATA
15710 7979 : || code == OMP_TARGET_EXIT_DATA);
15711 :
15712 7979 : outlist = omp_tsort_mapping_groups (groups, grpmap, enter_exit);
15713 7979 : outlist = omp_segregate_mapping_groups (outlist);
15714 7979 : list_p = omp_reorder_mapping_groups (groups, outlist, list_p);
15715 :
15716 7979 : delete grpmap;
15717 7979 : delete groups;
15718 : }
15719 16535 : }
15720 111930 : else if (ctx->region_type & ORT_ACC)
15721 : {
15722 29843 : vec<omp_mapping_group> *groups;
15723 29843 : groups = omp_gather_mapping_groups (list_p);
15724 29843 : if (groups)
15725 : {
15726 9461 : hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
15727 9461 : grpmap = omp_index_mapping_groups (groups);
15728 :
15729 9461 : oacc_resolve_clause_dependencies (groups, grpmap);
15730 9461 : omp_build_struct_sibling_lists (code, ctx->region_type, groups,
15731 : &grpmap, list_p);
15732 :
15733 9461 : delete groups;
15734 18922 : delete grpmap;
15735 : }
15736 : }
15737 :
15738 128465 : tree attach_list = NULL_TREE;
15739 128465 : tree *attach_tail = &attach_list;
15740 :
15741 128465 : tree *grp_start_p = NULL, grp_end = NULL_TREE;
15742 :
15743 375147 : while ((c = *list_p) != NULL)
15744 : {
15745 246682 : splay_tree_node n;
15746 246682 : bool remove = false;
15747 246682 : bool move_attach = false;
15748 :
15749 288600 : if (grp_end && c == OMP_CLAUSE_CHAIN (grp_end))
15750 : grp_end = NULL_TREE;
15751 :
15752 246682 : switch (OMP_CLAUSE_CODE (c))
15753 : {
15754 7943 : case OMP_CLAUSE_FIRSTPRIVATE:
15755 7943 : if ((ctx->region_type & ORT_TARGET)
15756 1902 : && (ctx->region_type & ORT_ACC) == 0
15757 9239 : && TYPE_ATOMIC (strip_array_types
15758 : (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
15759 : {
15760 4 : error_at (OMP_CLAUSE_LOCATION (c),
15761 : "%<_Atomic%> %qD in %<firstprivate%> clause on "
15762 2 : "%<target%> construct", OMP_CLAUSE_DECL (c));
15763 2 : remove = true;
15764 2 : break;
15765 : }
15766 7941 : if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
15767 : {
15768 380 : decl = OMP_CLAUSE_DECL (c);
15769 380 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15770 380 : if ((n->value & GOVD_MAP) != 0)
15771 : {
15772 : remove = true;
15773 : break;
15774 : }
15775 368 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
15776 368 : OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
15777 : }
15778 : /* FALLTHRU */
15779 36739 : case OMP_CLAUSE_PRIVATE:
15780 36739 : case OMP_CLAUSE_SHARED:
15781 36739 : case OMP_CLAUSE_LINEAR:
15782 36739 : decl = OMP_CLAUSE_DECL (c);
15783 36739 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15784 36739 : remove = !(n->value & GOVD_SEEN);
15785 36739 : if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
15786 48 : && code == OMP_PARALLEL
15787 36761 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
15788 : remove = true;
15789 36717 : if (! remove)
15790 : {
15791 32115 : bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
15792 32115 : if ((n->value & GOVD_DEBUG_PRIVATE)
15793 32115 : || lang_hooks.decls.omp_private_debug_clause (decl, shared))
15794 : {
15795 87 : gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
15796 : || ((n->value & GOVD_DATA_SHARE_CLASS)
15797 : == GOVD_SHARED));
15798 87 : OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
15799 87 : OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
15800 : }
15801 32115 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15802 4159 : && ctx->has_depend
15803 32673 : && DECL_P (decl))
15804 558 : n->value |= GOVD_WRITTEN;
15805 32115 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15806 4159 : && (n->value & GOVD_WRITTEN) == 0
15807 3042 : && DECL_P (decl)
15808 35157 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15809 526 : OMP_CLAUSE_SHARED_READONLY (c) = 1;
15810 31589 : else if (DECL_P (decl)
15811 31589 : && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
15812 3633 : && (n->value & GOVD_WRITTEN) != 0)
15813 30472 : || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
15814 9032 : && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
15815 38627 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15816 5734 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15817 : }
15818 : else
15819 4624 : n->value &= ~GOVD_EXPLICIT;
15820 : break;
15821 :
15822 13004 : case OMP_CLAUSE_LASTPRIVATE:
15823 : /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
15824 : accurately reflect the presence of a FIRSTPRIVATE clause. */
15825 13004 : decl = OMP_CLAUSE_DECL (c);
15826 13004 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15827 13004 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
15828 13004 : = (n->value & GOVD_FIRSTPRIVATE) != 0;
15829 13004 : if (code == OMP_DISTRIBUTE
15830 13004 : && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
15831 : {
15832 4 : remove = true;
15833 4 : error_at (OMP_CLAUSE_LOCATION (c),
15834 : "same variable used in %<firstprivate%> and "
15835 : "%<lastprivate%> clauses on %<distribute%> "
15836 : "construct");
15837 : }
15838 13004 : if (!remove
15839 13000 : && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15840 13000 : && DECL_P (decl)
15841 13000 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
15842 11407 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
15843 13004 : if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
15844 : remove = true;
15845 : break;
15846 :
15847 824 : case OMP_CLAUSE_ALIGNED:
15848 824 : decl = OMP_CLAUSE_DECL (c);
15849 824 : if (!is_global_var (decl))
15850 : {
15851 738 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15852 738 : remove = n == NULL || !(n->value & GOVD_SEEN);
15853 88 : if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
15854 : {
15855 88 : struct gimplify_omp_ctx *octx;
15856 88 : if (n != NULL
15857 88 : && (n->value & (GOVD_DATA_SHARE_CLASS
15858 : & ~GOVD_FIRSTPRIVATE)))
15859 : remove = true;
15860 : else
15861 100 : for (octx = ctx->outer_context; octx;
15862 12 : octx = octx->outer_context)
15863 : {
15864 24 : n = splay_tree_lookup (octx->variables,
15865 : (splay_tree_key) decl);
15866 24 : if (n == NULL)
15867 12 : continue;
15868 12 : if (n->value & GOVD_LOCAL)
15869 : break;
15870 : /* We have to avoid assigning a shared variable
15871 : to itself when trying to add
15872 : __builtin_assume_aligned. */
15873 12 : if (n->value & GOVD_SHARED)
15874 : {
15875 : remove = true;
15876 : break;
15877 : }
15878 : }
15879 : }
15880 : }
15881 86 : else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
15882 : {
15883 86 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15884 86 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
15885 : remove = true;
15886 : }
15887 : break;
15888 :
15889 552 : case OMP_CLAUSE_HAS_DEVICE_ADDR:
15890 552 : decl = OMP_CLAUSE_DECL (c);
15891 552 : while (INDIRECT_REF_P (decl)
15892 601 : || TREE_CODE (decl) == ARRAY_REF)
15893 49 : decl = TREE_OPERAND (decl, 0);
15894 552 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15895 552 : remove = n == NULL || !(n->value & GOVD_SEEN);
15896 : break;
15897 :
15898 900 : case OMP_CLAUSE_IS_DEVICE_PTR:
15899 900 : case OMP_CLAUSE_NONTEMPORAL:
15900 900 : decl = OMP_CLAUSE_DECL (c);
15901 900 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
15902 900 : remove = n == NULL || !(n->value & GOVD_SEEN);
15903 : break;
15904 :
15905 55457 : case OMP_CLAUSE_MAP:
15906 55457 : if (OMP_CLAUSE_MAP_GIMPLE_ONLY (c))
15907 : {
15908 90 : remove = true;
15909 90 : goto end_adjust_omp_map_clause;
15910 : }
15911 55367 : decl = OMP_CLAUSE_DECL (c);
15912 55367 : if (!grp_end)
15913 : {
15914 30976 : grp_start_p = list_p;
15915 30976 : grp_end = *omp_group_last (grp_start_p);
15916 : }
15917 55367 : switch (OMP_CLAUSE_MAP_KIND (c))
15918 : {
15919 125 : case GOMP_MAP_PRESENT_ALLOC:
15920 125 : case GOMP_MAP_PRESENT_TO:
15921 125 : case GOMP_MAP_PRESENT_FROM:
15922 125 : case GOMP_MAP_PRESENT_TOFROM:
15923 125 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
15924 125 : break;
15925 : default:
15926 : break;
15927 : }
15928 55367 : switch (code)
15929 : {
15930 4563 : case OACC_DATA:
15931 4563 : if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
15932 : break;
15933 : /* Fallthrough. */
15934 18731 : case OACC_HOST_DATA:
15935 18731 : case OACC_ENTER_DATA:
15936 18731 : case OACC_EXIT_DATA:
15937 18731 : case OMP_TARGET_DATA:
15938 18731 : case OMP_TARGET_ENTER_DATA:
15939 18731 : case OMP_TARGET_EXIT_DATA:
15940 18731 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15941 18731 : || (OMP_CLAUSE_MAP_KIND (c)
15942 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
15943 : /* For target {,enter ,exit }data only the array slice is
15944 : mapped, but not the pointer to it. */
15945 : remove = true;
15946 18731 : if (code == OMP_TARGET_EXIT_DATA
15947 18731 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
15948 2586 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER))
15949 : remove = true;
15950 : break;
15951 : case OMP_TARGET:
15952 : break;
15953 : default:
15954 : break;
15955 : }
15956 18729 : if (remove)
15957 : break;
15958 55032 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
15959 : {
15960 : /* Sanity check: attach/detach map kinds use the size as a bias,
15961 : and it's never right to use the decl size for such
15962 : mappings. */
15963 16985 : gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
15964 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
15965 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH
15966 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
15967 : && (OMP_CLAUSE_MAP_KIND (c)
15968 : != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
15969 20760 : OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
15970 3775 : : TYPE_SIZE_UNIT (TREE_TYPE (decl));
15971 : }
15972 55032 : gimplify_omp_ctxp = ctx->outer_context;
15973 55032 : gimple_seq *seq_p;
15974 55032 : seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
15975 55032 : if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
15976 : is_gimple_val, fb_rvalue) == GS_ERROR)
15977 : {
15978 0 : gimplify_omp_ctxp = ctx;
15979 0 : remove = true;
15980 0 : goto end_adjust_omp_map_clause;
15981 : }
15982 55032 : else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
15983 52203 : || (OMP_CLAUSE_MAP_KIND (c)
15984 : == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
15985 51794 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
15986 61536 : && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
15987 : {
15988 852 : OMP_CLAUSE_SIZE (c)
15989 852 : = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), seq_p, NULL,
15990 : false);
15991 852 : if ((ctx->region_type & ORT_TARGET) != 0)
15992 643 : omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
15993 : GOVD_FIRSTPRIVATE | GOVD_SEEN);
15994 : }
15995 55032 : gimplify_omp_ctxp = ctx;
15996 : /* Data clauses associated with reductions must be
15997 : compatible with present_or_copy. Warn and adjust the clause
15998 : if that is not the case. */
15999 55032 : if (ctx->region_type == ORT_ACC_PARALLEL
16000 46053 : || ctx->region_type == ORT_ACC_SERIAL)
16001 : {
16002 9535 : tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
16003 9535 : n = NULL;
16004 :
16005 9535 : if (DECL_P (t))
16006 7770 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
16007 :
16008 7770 : if (n && (n->value & GOVD_REDUCTION))
16009 : {
16010 809 : enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
16011 :
16012 809 : OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
16013 809 : if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
16014 159 : && kind != GOMP_MAP_FORCE_PRESENT
16015 159 : && kind != GOMP_MAP_POINTER)
16016 : {
16017 120 : warning_at (OMP_CLAUSE_LOCATION (c), 0,
16018 : "incompatible data clause with reduction "
16019 : "on %qE; promoting to %<present_or_copy%>",
16020 120 : DECL_NAME (t));
16021 120 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
16022 : }
16023 : }
16024 : }
16025 55032 : if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
16026 52338 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
16027 55306 : && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
16028 : {
16029 514 : remove = true;
16030 514 : goto end_adjust_omp_map_clause;
16031 : }
16032 : /* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
16033 : a variable captured in a lambda closure), look through that now
16034 : before the DECL_P check below. (A code other than COMPONENT_REF,
16035 : i.e. INDIRECT_REF, will be a VLA/variable-length array
16036 : section. A global var may be a variable in a common block. We
16037 : don't want to do this here for either of those.) */
16038 54518 : if ((ctx->region_type & ORT_ACC) == 0
16039 28313 : && DECL_P (decl)
16040 11940 : && !is_global_var (decl)
16041 10561 : && DECL_HAS_VALUE_EXPR_P (decl)
16042 54708 : && TREE_CODE (DECL_VALUE_EXPR (decl)) == COMPONENT_REF)
16043 0 : decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
16044 54518 : if (TREE_CODE (decl) == TARGET_EXPR)
16045 : {
16046 0 : if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
16047 : is_gimple_lvalue, fb_lvalue) == GS_ERROR)
16048 25498 : remove = true;
16049 : }
16050 54518 : else if (!DECL_P (decl))
16051 : {
16052 29020 : if ((ctx->region_type & ORT_TARGET) != 0
16053 29020 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
16054 : {
16055 24 : if (INDIRECT_REF_P (decl)
16056 0 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
16057 24 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
16058 : == REFERENCE_TYPE))
16059 0 : decl = TREE_OPERAND (decl, 0);
16060 24 : if (TREE_CODE (decl) == COMPONENT_REF)
16061 : {
16062 0 : while (TREE_CODE (decl) == COMPONENT_REF)
16063 0 : decl = TREE_OPERAND (decl, 0);
16064 0 : if (DECL_P (decl))
16065 : {
16066 0 : n = splay_tree_lookup (ctx->variables,
16067 : (splay_tree_key) decl);
16068 0 : if (!(n->value & GOVD_SEEN))
16069 29020 : remove = true;
16070 : }
16071 : }
16072 : }
16073 :
16074 29020 : tree d = decl, *pd;
16075 29020 : if (TREE_CODE (d) == ARRAY_REF)
16076 : {
16077 5642 : while (TREE_CODE (d) == ARRAY_REF)
16078 2881 : d = TREE_OPERAND (d, 0);
16079 2761 : if (TREE_CODE (d) == COMPONENT_REF
16080 2761 : && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
16081 : decl = d;
16082 : }
16083 29020 : pd = &OMP_CLAUSE_DECL (c);
16084 29020 : if (d == decl
16085 26616 : && TREE_CODE (decl) == INDIRECT_REF
16086 15291 : && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
16087 1470 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
16088 : == REFERENCE_TYPE)
16089 29820 : && (OMP_CLAUSE_MAP_KIND (c)
16090 : != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
16091 : {
16092 796 : pd = &TREE_OPERAND (decl, 0);
16093 796 : decl = TREE_OPERAND (decl, 0);
16094 : }
16095 :
16096 29020 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16097 3033 : switch (code)
16098 : {
16099 427 : case OACC_ENTER_DATA:
16100 427 : case OACC_EXIT_DATA:
16101 427 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
16102 : == ARRAY_TYPE)
16103 : remove = true;
16104 417 : else if (code == OACC_ENTER_DATA)
16105 263 : goto change_to_attach;
16106 : /* Fallthrough. */
16107 605 : case OMP_TARGET_EXIT_DATA:
16108 605 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DETACH);
16109 605 : OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c) = 0;
16110 605 : break;
16111 71 : case OACC_UPDATE:
16112 : /* An "attach/detach" operation on an update directive
16113 : should behave as a GOMP_MAP_ALWAYS_POINTER. Note that
16114 : both GOMP_MAP_ATTACH_DETACH and GOMP_MAP_ALWAYS_POINTER
16115 : kinds depend on the previous mapping (for non-TARGET
16116 : regions). */
16117 71 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
16118 71 : break;
16119 2357 : default:
16120 2357 : change_to_attach:
16121 2357 : gcc_assert (!OMP_CLAUSE_MAP_SIZE_NEEDS_ADJUSTMENT (c));
16122 2357 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH);
16123 2357 : if ((ctx->region_type & ORT_TARGET) != 0)
16124 1777 : move_attach = true;
16125 : }
16126 25987 : else if ((ctx->region_type & ORT_TARGET) != 0
16127 25987 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
16128 11562 : || (OMP_CLAUSE_MAP_KIND (c)
16129 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
16130 : move_attach = true;
16131 :
16132 : /* If we have e.g. map(struct: *var), don't gimplify the
16133 : argument since omp-low.cc wants to see the decl itself. */
16134 29020 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
16135 1135 : goto end_adjust_omp_map_clause;
16136 :
16137 : /* We've already partly gimplified this in
16138 : gimplify_scan_omp_clauses. Don't do any more. */
16139 27885 : if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
16140 88 : goto end_adjust_omp_map_clause;
16141 :
16142 27797 : gimplify_omp_ctxp = ctx->outer_context;
16143 27797 : if (gimplify_expr (pd, seq_p, NULL, is_gimple_lvalue,
16144 : fb_lvalue) == GS_ERROR)
16145 0 : remove = true;
16146 27797 : gimplify_omp_ctxp = ctx;
16147 27797 : goto end_adjust_omp_map_clause;
16148 : }
16149 :
16150 25498 : if ((code == OMP_TARGET
16151 : || code == OMP_TARGET_DATA
16152 : || code == OMP_TARGET_ENTER_DATA
16153 14365 : || code == OMP_TARGET_EXIT_DATA)
16154 26043 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16155 : {
16156 233 : bool firstprivatize = false;
16157 :
16158 238 : for (struct gimplify_omp_ctx *octx = ctx->outer_context; octx;
16159 5 : octx = octx->outer_context)
16160 : {
16161 6 : splay_tree_node n
16162 12 : = splay_tree_lookup (octx->variables,
16163 6 : (splay_tree_key) OMP_CLAUSE_DECL (c));
16164 : /* If this is contained in an outer OpenMP region as a
16165 : firstprivate value, remove the attach/detach. */
16166 6 : if (n && (n->value & GOVD_FIRSTPRIVATE))
16167 : {
16168 : firstprivatize = true;
16169 : break;
16170 : }
16171 : }
16172 :
16173 233 : enum gomp_map_kind map_kind;
16174 233 : if (firstprivatize)
16175 : map_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
16176 232 : else if (code == OMP_TARGET_EXIT_DATA)
16177 : map_kind = GOMP_MAP_DETACH;
16178 : else
16179 184 : map_kind = GOMP_MAP_ATTACH;
16180 233 : OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
16181 : }
16182 25265 : else if ((ctx->region_type & ORT_ACC) != 0
16183 25265 : && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
16184 : {
16185 0 : enum gomp_map_kind map_kind = (code == OACC_EXIT_DATA
16186 0 : ? GOMP_MAP_DETACH
16187 : : GOMP_MAP_ATTACH);
16188 0 : OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
16189 : }
16190 :
16191 25498 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16192 25498 : if ((ctx->region_type & ORT_TARGET) != 0
16193 15800 : && !(n->value & GOVD_SEEN)
16194 1479 : && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
16195 26730 : && (!is_global_var (decl)
16196 75 : || !lookup_attribute ("omp declare target link",
16197 75 : DECL_ATTRIBUTES (decl))))
16198 : {
16199 1220 : remove = true;
16200 : /* For struct element mapping, if struct is never referenced
16201 : in target block and none of the mapping has always modifier,
16202 : remove all the struct element mappings, which immediately
16203 : follow the GOMP_MAP_STRUCT map clause. */
16204 1220 : if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
16205 1220 : || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT_UNORD)
16206 : {
16207 28 : HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
16208 76 : while (cnt--)
16209 48 : OMP_CLAUSE_CHAIN (c)
16210 48 : = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
16211 : }
16212 : }
16213 24278 : else if (DECL_SIZE (decl)
16214 24254 : && !poly_int_tree_p (DECL_SIZE (decl))
16215 199 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
16216 199 : && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
16217 24329 : && (OMP_CLAUSE_MAP_KIND (c)
16218 : != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
16219 : {
16220 : /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
16221 : for these, TREE_CODE (DECL_SIZE (decl)) will always be
16222 : INTEGER_CST. */
16223 51 : gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
16224 :
16225 51 : tree decl2 = DECL_VALUE_EXPR (decl);
16226 51 : gcc_assert (INDIRECT_REF_P (decl2));
16227 51 : decl2 = TREE_OPERAND (decl2, 0);
16228 51 : gcc_assert (DECL_P (decl2));
16229 51 : tree mem = build_simple_mem_ref (decl2);
16230 51 : OMP_CLAUSE_DECL (c) = mem;
16231 51 : OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
16232 51 : if (ctx->outer_context)
16233 : {
16234 15 : omp_notice_variable (ctx->outer_context, decl2, true);
16235 30 : omp_notice_variable (ctx->outer_context,
16236 15 : OMP_CLAUSE_SIZE (c), true);
16237 : }
16238 51 : if (((ctx->region_type & ORT_TARGET) != 0
16239 24 : || !ctx->target_firstprivatize_array_bases)
16240 34 : && ((n->value & GOVD_SEEN) == 0
16241 30 : || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
16242 : {
16243 34 : tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
16244 : OMP_CLAUSE_MAP);
16245 34 : OMP_CLAUSE_DECL (nc) = decl;
16246 34 : OMP_CLAUSE_SIZE (nc) = size_zero_node;
16247 34 : if (ctx->target_firstprivatize_array_bases)
16248 27 : OMP_CLAUSE_SET_MAP_KIND (nc,
16249 : GOMP_MAP_FIRSTPRIVATE_POINTER);
16250 : else
16251 7 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
16252 34 : OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
16253 34 : OMP_CLAUSE_CHAIN (c) = nc;
16254 34 : c = nc;
16255 : }
16256 : }
16257 : else
16258 : {
16259 24227 : if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
16260 0 : OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
16261 24227 : gcc_assert ((n->value & GOVD_SEEN) == 0
16262 : || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
16263 : == 0));
16264 : }
16265 :
16266 : /* If we have a target region, we can push all the attaches to the
16267 : end of the list (we may have standalone "attach" operations
16268 : synthesized for GOMP_MAP_STRUCT nodes that must be processed after
16269 : the attachment point AND the pointed-to block have been mapped).
16270 : If we have something else, e.g. "enter data", we need to keep
16271 : "attach" nodes together with the previous node they attach to so
16272 : that separate "exit data" operations work properly (see
16273 : libgomp/target.c). */
16274 25498 : if ((ctx->region_type & ORT_TARGET) != 0
16275 25498 : && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
16276 15684 : || (OMP_CLAUSE_MAP_KIND (c)
16277 : == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
16278 : move_attach = true;
16279 :
16280 55122 : end_adjust_omp_map_clause:
16281 55122 : exit_omp_iterator_loop_context (c);
16282 55122 : break;
16283 :
16284 8207 : case OMP_CLAUSE_TO:
16285 8207 : case OMP_CLAUSE_FROM:
16286 8207 : case OMP_CLAUSE__CACHE_:
16287 8207 : decl = OMP_CLAUSE_DECL (c);
16288 8207 : if (!DECL_P (decl))
16289 : break;
16290 6188 : if (DECL_SIZE (decl)
16291 6188 : && !poly_int_tree_p (DECL_SIZE (decl)))
16292 : {
16293 4 : tree decl2 = DECL_VALUE_EXPR (decl);
16294 4 : gcc_assert (INDIRECT_REF_P (decl2));
16295 4 : decl2 = TREE_OPERAND (decl2, 0);
16296 4 : gcc_assert (DECL_P (decl2));
16297 4 : tree mem = build_simple_mem_ref (decl2);
16298 4 : OMP_CLAUSE_DECL (c) = mem;
16299 4 : OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
16300 4 : if (ctx->outer_context)
16301 : {
16302 4 : omp_notice_variable (ctx->outer_context, decl2, true);
16303 4 : omp_notice_variable (ctx->outer_context,
16304 4 : OMP_CLAUSE_SIZE (c), true);
16305 : }
16306 : }
16307 6184 : else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
16308 0 : OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
16309 : break;
16310 :
16311 15213 : case OMP_CLAUSE_REDUCTION:
16312 15213 : if (OMP_CLAUSE_REDUCTION_INSCAN (c))
16313 : {
16314 811 : decl = OMP_CLAUSE_DECL (c);
16315 811 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16316 811 : if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
16317 : {
16318 69 : remove = true;
16319 69 : error_at (OMP_CLAUSE_LOCATION (c),
16320 : "%qD specified in %<inscan%> %<reduction%> clause "
16321 : "but not in %<scan%> directive clause", decl);
16322 69 : break;
16323 : }
16324 : has_inscan_reductions = true;
16325 : }
16326 : /* FALLTHRU */
16327 17748 : case OMP_CLAUSE_IN_REDUCTION:
16328 17748 : case OMP_CLAUSE_TASK_REDUCTION:
16329 17748 : decl = OMP_CLAUSE_DECL (c);
16330 : /* OpenACC reductions need a present_or_copy data clause.
16331 : Add one if necessary. Emit error when the reduction is private. */
16332 17748 : if (ctx->region_type == ORT_ACC_PARALLEL
16333 17216 : || ctx->region_type == ORT_ACC_SERIAL)
16334 : {
16335 803 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16336 803 : if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
16337 : {
16338 13 : remove = true;
16339 13 : error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
16340 13 : "reduction on %qE", DECL_NAME (decl));
16341 : }
16342 790 : else if ((n->value & GOVD_MAP) == 0)
16343 : {
16344 524 : tree next = OMP_CLAUSE_CHAIN (c);
16345 524 : tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
16346 524 : OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
16347 524 : OMP_CLAUSE_DECL (nc) = decl;
16348 524 : OMP_CLAUSE_CHAIN (c) = nc;
16349 524 : lang_hooks.decls.omp_finish_clause (nc, pre_p,
16350 524 : (ctx->region_type
16351 : & ORT_ACC) != 0);
16352 550 : while (1)
16353 : {
16354 537 : OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
16355 537 : if (OMP_CLAUSE_CHAIN (nc) == NULL)
16356 : break;
16357 13 : nc = OMP_CLAUSE_CHAIN (nc);
16358 : }
16359 524 : OMP_CLAUSE_CHAIN (nc) = next;
16360 524 : n->value |= GOVD_MAP;
16361 : }
16362 : }
16363 17748 : if (DECL_P (decl)
16364 17748 : && omp_shared_to_firstprivate_optimizable_decl_p (decl))
16365 7963 : omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
16366 : break;
16367 :
16368 3541 : case OMP_CLAUSE_ALLOCATE:
16369 3541 : decl = OMP_CLAUSE_DECL (c);
16370 3541 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16371 3541 : if (n != NULL && !(n->value & GOVD_SEEN))
16372 : {
16373 1176 : if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
16374 : != 0
16375 1176 : && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
16376 : remove = true;
16377 : }
16378 : if (!remove
16379 2365 : && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
16380 1273 : && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
16381 483 : && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
16382 347 : || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
16383 287 : || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
16384 : {
16385 214 : tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
16386 214 : n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
16387 214 : if (n == NULL)
16388 : {
16389 92 : enum omp_clause_default_kind default_kind
16390 : = ctx->default_kind;
16391 92 : ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
16392 92 : omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
16393 : true);
16394 92 : ctx->default_kind = default_kind;
16395 : }
16396 : else
16397 122 : omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
16398 : true);
16399 : }
16400 : break;
16401 :
16402 : case OMP_CLAUSE_COPYIN:
16403 : case OMP_CLAUSE_COPYPRIVATE:
16404 : case OMP_CLAUSE_IF:
16405 : case OMP_CLAUSE_SELF:
16406 : case OMP_CLAUSE_NUM_THREADS:
16407 : case OMP_CLAUSE_NUM_TEAMS:
16408 : case OMP_CLAUSE_THREAD_LIMIT:
16409 : case OMP_CLAUSE_DIST_SCHEDULE:
16410 : case OMP_CLAUSE_DEVICE:
16411 : case OMP_CLAUSE_SCHEDULE:
16412 : case OMP_CLAUSE_NOWAIT:
16413 : case OMP_CLAUSE_ORDERED:
16414 : case OMP_CLAUSE_DEFAULT:
16415 : case OMP_CLAUSE_UNTIED:
16416 : case OMP_CLAUSE_COLLAPSE:
16417 : case OMP_CLAUSE_FINAL:
16418 : case OMP_CLAUSE_MERGEABLE:
16419 : case OMP_CLAUSE_PROC_BIND:
16420 : case OMP_CLAUSE_SAFELEN:
16421 : case OMP_CLAUSE_SIMDLEN:
16422 : case OMP_CLAUSE_DEPEND:
16423 : case OMP_CLAUSE_DOACROSS:
16424 : case OMP_CLAUSE_PRIORITY:
16425 : case OMP_CLAUSE_GRAINSIZE:
16426 : case OMP_CLAUSE_NUM_TASKS:
16427 : case OMP_CLAUSE_NOGROUP:
16428 : case OMP_CLAUSE_THREADS:
16429 : case OMP_CLAUSE_SIMD:
16430 : case OMP_CLAUSE_FILTER:
16431 : case OMP_CLAUSE_HINT:
16432 : case OMP_CLAUSE_DEFAULTMAP:
16433 : case OMP_CLAUSE_ORDER:
16434 : case OMP_CLAUSE_BIND:
16435 : case OMP_CLAUSE_DETACH:
16436 : case OMP_CLAUSE_USE_DEVICE_PTR:
16437 : case OMP_CLAUSE_USE_DEVICE_ADDR:
16438 : case OMP_CLAUSE_ASYNC:
16439 : case OMP_CLAUSE_WAIT:
16440 : case OMP_CLAUSE_INDEPENDENT:
16441 : case OMP_CLAUSE_NUM_GANGS:
16442 : case OMP_CLAUSE_NUM_WORKERS:
16443 : case OMP_CLAUSE_VECTOR_LENGTH:
16444 : case OMP_CLAUSE_GANG:
16445 : case OMP_CLAUSE_WORKER:
16446 : case OMP_CLAUSE_VECTOR:
16447 : case OMP_CLAUSE_AUTO:
16448 : case OMP_CLAUSE_SEQ:
16449 : case OMP_CLAUSE_TILE:
16450 : case OMP_CLAUSE_IF_PRESENT:
16451 : case OMP_CLAUSE_FINALIZE:
16452 : case OMP_CLAUSE_INCLUSIVE:
16453 : case OMP_CLAUSE_EXCLUSIVE:
16454 : case OMP_CLAUSE_USES_ALLOCATORS:
16455 : case OMP_CLAUSE_DEVICE_TYPE:
16456 : break;
16457 :
16458 0 : case OMP_CLAUSE_NOHOST:
16459 0 : default:
16460 0 : gcc_unreachable ();
16461 : }
16462 :
16463 128294 : if (remove)
16464 9883 : *list_p = OMP_CLAUSE_CHAIN (c);
16465 236799 : else if (move_attach)
16466 : {
16467 : /* Remove attach node from here, separate out into its own list. */
16468 2334 : *attach_tail = c;
16469 2334 : *list_p = OMP_CLAUSE_CHAIN (c);
16470 2334 : OMP_CLAUSE_CHAIN (c) = NULL_TREE;
16471 2334 : attach_tail = &OMP_CLAUSE_CHAIN (c);
16472 : }
16473 : else
16474 234465 : list_p = &OMP_CLAUSE_CHAIN (c);
16475 : }
16476 :
16477 : /* Splice attach nodes at the end of the list. */
16478 128465 : if (attach_list)
16479 : {
16480 1108 : *list_p = attach_list;
16481 1108 : list_p = attach_tail;
16482 : }
16483 :
16484 : /* Add in any implicit data sharing. */
16485 128465 : struct gimplify_adjust_omp_clauses_data data;
16486 128465 : if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
16487 : {
16488 : /* OpenMP. Implicit clauses are added at the start of the clause list,
16489 : but after any non-map clauses. */
16490 : tree *implicit_add_list_p = orig_list_p;
16491 250907 : while (*implicit_add_list_p
16492 250907 : && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
16493 152285 : implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
16494 98622 : data.list_p = implicit_add_list_p;
16495 : }
16496 : else
16497 : /* OpenACC. */
16498 29843 : data.list_p = list_p;
16499 128465 : data.pre_p = pre_p;
16500 128465 : splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
16501 :
16502 128465 : if (has_inscan_reductions)
16503 2431 : for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
16504 1822 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
16505 1822 : && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
16506 : {
16507 5 : error_at (OMP_CLAUSE_LOCATION (c),
16508 : "%<inscan%> %<reduction%> clause used together with "
16509 : "%<linear%> clause for a variable other than loop "
16510 : "iterator");
16511 5 : break;
16512 : }
16513 :
16514 128465 : gimplify_omp_ctxp = ctx->outer_context;
16515 128465 : delete_omp_context (ctx);
16516 128465 : }
16517 :
16518 : /* Try to evaluate a novariants clause. Return 1 if true, 0 if false or absent,
16519 : * -1 if run-time evaluation is needed. */
16520 :
16521 : int
16522 248697 : omp_has_novariants (void)
16523 : {
16524 248697 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16525 248697 : if (ctx != NULL && ctx->code == OMP_DISPATCH && !ctx->in_call_args)
16526 : {
16527 3290 : tree c = omp_find_clause (ctx->clauses, OMP_CLAUSE_NOVARIANTS);
16528 3290 : if (c != NULL_TREE)
16529 : {
16530 299 : if (integer_nonzerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
16531 : return 1;
16532 171 : else if (integer_zerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
16533 : return 0;
16534 : else
16535 : return -1;
16536 : }
16537 : return 0;
16538 : }
16539 : return 0;
16540 : }
16541 :
16542 : /* Try to evaluate a nocontext clause. Return 1 if true, 0 if false or absent,
16543 : * -1 if run-time evaluation is needed. */
16544 :
16545 : static int
16546 1903 : omp_has_nocontext (void)
16547 : {
16548 1903 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16549 1903 : if (ctx != NULL && ctx->code == OMP_DISPATCH)
16550 : {
16551 1903 : tree c = omp_find_clause (ctx->clauses, OMP_CLAUSE_NOCONTEXT);
16552 1903 : if (c != NULL_TREE)
16553 : {
16554 256 : if (integer_nonzerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
16555 : return 1;
16556 94 : else if (integer_zerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
16557 : return 0;
16558 : else
16559 : return -1;
16560 : }
16561 : return 0;
16562 : }
16563 : return 0;
16564 : }
16565 :
16566 : /* Collect a list of traits for enclosing constructs in the current
16567 : OpenMP context. The list is in the same format as the trait selector
16568 : list of construct trait sets built by the front ends.
16569 :
16570 : Per the OpenMP specification, the construct trait set includes constructs
16571 : up to an enclosing "target" construct. If there is no "target" construct,
16572 : then additional things may be added to the construct trait set (simd for
16573 : simd clones, additional constructs associated with "declare variant",
16574 : the target trait for "declare target"); those are not handled here.
16575 : In particular simd clones are not known during gimplification so
16576 : matching/scoring of context selectors that might involve them needs
16577 : to be deferred to the omp_device_lower pass. */
16578 :
16579 : tree
16580 2209 : omp_get_construct_context (void)
16581 : {
16582 2209 : tree result = NULL_TREE;
16583 4180 : for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
16584 : {
16585 2092 : if (((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
16586 : == ORT_TARGET)
16587 121 : && ctx->code == OMP_TARGET)
16588 : {
16589 121 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_TARGET,
16590 : NULL_TREE, NULL_TREE, result);
16591 : /* We're not interested in any outer constructs. */
16592 121 : break;
16593 : }
16594 1971 : else if ((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
16595 214 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_PARALLEL,
16596 : NULL_TREE, NULL_TREE, result);
16597 1757 : else if ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
16598 75 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_TEAMS,
16599 : NULL_TREE, NULL_TREE, result);
16600 1682 : else if (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
16601 164 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_FOR,
16602 : NULL_TREE, NULL_TREE, result);
16603 1518 : else if (ctx->code == OMP_DISPATCH && omp_has_nocontext () != 1)
16604 1279 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_DISPATCH,
16605 : NULL_TREE, NULL_TREE, result);
16606 239 : else if (ctx->region_type == ORT_SIMD
16607 32 : && ctx->code == OMP_SIMD
16608 271 : && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND))
16609 : {
16610 32 : tree props = NULL_TREE;
16611 32 : tree *last = &props;
16612 72 : for (tree c = ctx->clauses; c; c = OMP_CLAUSE_CHAIN (c))
16613 40 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMDLEN
16614 32 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INBRANCH
16615 72 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOTINBRANCH)
16616 : {
16617 8 : *last = unshare_expr (c);
16618 8 : last = &(OMP_CLAUSE_CHAIN (c));
16619 : }
16620 32 : result = make_trait_selector (OMP_TRAIT_CONSTRUCT_SIMD,
16621 : NULL_TREE, props, result);
16622 : }
16623 207 : else if (ctx->region_type == ORT_WORKSHARE
16624 5 : && ctx->code == OMP_LOOP
16625 0 : && ctx->outer_context
16626 0 : && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
16627 0 : && ctx->outer_context->outer_context
16628 0 : && ctx->outer_context->outer_context->code == OMP_LOOP
16629 0 : && ctx->outer_context->outer_context->distribute)
16630 1971 : ctx = ctx->outer_context->outer_context;
16631 1971 : ctx = ctx->outer_context;
16632 : }
16633 :
16634 2209 : return result;
16635 : }
16636 :
16637 : /* Gimplify OACC_CACHE. */
16638 :
16639 : static void
16640 665 : gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
16641 : {
16642 665 : tree expr = *expr_p;
16643 :
16644 665 : gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
16645 : OACC_CACHE);
16646 665 : gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
16647 : OACC_CACHE);
16648 :
16649 : /* TODO: Do something sensible with this information. */
16650 :
16651 665 : *expr_p = NULL_TREE;
16652 665 : }
16653 :
16654 : /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
16655 : if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
16656 : kind. The entry kind will replace the one in CLAUSE, while the exit
16657 : kind will be used in a new omp_clause and returned to the caller. */
16658 :
16659 : static tree
16660 190 : gimplify_oacc_declare_1 (tree clause)
16661 : {
16662 190 : HOST_WIDE_INT kind, new_op;
16663 190 : bool ret = false;
16664 190 : tree c = NULL;
16665 :
16666 190 : kind = OMP_CLAUSE_MAP_KIND (clause);
16667 :
16668 190 : switch (kind)
16669 : {
16670 : case GOMP_MAP_ALLOC:
16671 : new_op = GOMP_MAP_RELEASE;
16672 : ret = true;
16673 : break;
16674 :
16675 29 : case GOMP_MAP_FROM:
16676 29 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
16677 29 : new_op = GOMP_MAP_FROM;
16678 29 : ret = true;
16679 29 : break;
16680 :
16681 40 : case GOMP_MAP_TOFROM:
16682 40 : OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
16683 40 : new_op = GOMP_MAP_FROM;
16684 40 : ret = true;
16685 40 : break;
16686 :
16687 : case GOMP_MAP_DEVICE_RESIDENT:
16688 : case GOMP_MAP_FORCE_DEVICEPTR:
16689 : case GOMP_MAP_FORCE_PRESENT:
16690 : case GOMP_MAP_LINK:
16691 : case GOMP_MAP_POINTER:
16692 : case GOMP_MAP_TO:
16693 : break;
16694 :
16695 0 : default:
16696 0 : gcc_unreachable ();
16697 69 : break;
16698 : }
16699 :
16700 69 : if (ret)
16701 : {
16702 116 : c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
16703 116 : OMP_CLAUSE_SET_MAP_KIND (c, new_op);
16704 116 : OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
16705 : }
16706 :
16707 190 : return c;
16708 : }
16709 :
16710 : /* Gimplify OACC_DECLARE. */
16711 :
16712 : static void
16713 254 : gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
16714 : {
16715 254 : tree expr = *expr_p;
16716 254 : gomp_target *stmt;
16717 254 : tree clauses, t, decl;
16718 :
16719 254 : clauses = OACC_DECLARE_CLAUSES (expr);
16720 :
16721 254 : gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
16722 254 : gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
16723 :
16724 516 : for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
16725 : {
16726 262 : decl = OMP_CLAUSE_DECL (t);
16727 :
16728 262 : if (TREE_CODE (decl) == MEM_REF)
16729 8 : decl = TREE_OPERAND (decl, 0);
16730 :
16731 262 : if (VAR_P (decl) && !is_oacc_declared (decl))
16732 : {
16733 262 : tree attr = get_identifier ("oacc declare target");
16734 262 : DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
16735 262 : DECL_ATTRIBUTES (decl));
16736 : }
16737 :
16738 262 : if (VAR_P (decl)
16739 262 : && !is_global_var (decl)
16740 452 : && DECL_CONTEXT (decl) == current_function_decl)
16741 : {
16742 190 : tree c = gimplify_oacc_declare_1 (t);
16743 190 : if (c)
16744 : {
16745 116 : if (oacc_declare_returns == NULL)
16746 40 : oacc_declare_returns = new hash_map<tree, tree>;
16747 :
16748 116 : oacc_declare_returns->put (decl, c);
16749 : }
16750 : }
16751 :
16752 262 : if (gimplify_omp_ctxp)
16753 72 : omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
16754 : }
16755 :
16756 254 : stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
16757 : clauses);
16758 :
16759 254 : gimplify_seq_add_stmt (pre_p, stmt);
16760 :
16761 254 : *expr_p = NULL_TREE;
16762 254 : }
16763 :
16764 : /* Gimplify the contents of an OMP_PARALLEL statement. This involves
16765 : gimplification of the body, as well as scanning the body for used
16766 : variables. We need to do this scan now, because variable-sized
16767 : decls will be decomposed during gimplification. */
16768 :
16769 : static void
16770 18228 : gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
16771 : {
16772 18228 : tree expr = *expr_p;
16773 18228 : gimple *g;
16774 18228 : gimple_seq body = NULL;
16775 :
16776 36456 : gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
16777 18228 : OMP_PARALLEL_COMBINED (expr)
16778 : ? ORT_COMBINED_PARALLEL
16779 : : ORT_PARALLEL, OMP_PARALLEL);
16780 :
16781 18228 : push_gimplify_context ();
16782 :
16783 18228 : g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
16784 18228 : if (gimple_code (g) == GIMPLE_BIND)
16785 18228 : pop_gimplify_context (g);
16786 : else
16787 0 : pop_gimplify_context (NULL);
16788 :
16789 18228 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
16790 : OMP_PARALLEL);
16791 :
16792 36456 : g = gimple_build_omp_parallel (body,
16793 18228 : OMP_PARALLEL_CLAUSES (expr),
16794 : NULL_TREE, NULL_TREE);
16795 18228 : if (OMP_PARALLEL_COMBINED (expr))
16796 12535 : gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
16797 18228 : gimplify_seq_add_stmt (pre_p, g);
16798 18228 : *expr_p = NULL_TREE;
16799 18228 : }
16800 :
16801 : /* Gimplify the contents of an OMP_TASK statement. This involves
16802 : gimplification of the body, as well as scanning the body for used
16803 : variables. We need to do this scan now, because variable-sized
16804 : decls will be decomposed during gimplification. */
16805 :
16806 : static void
16807 3852 : gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
16808 : {
16809 3852 : tree expr = *expr_p;
16810 3852 : gimple *g;
16811 3852 : gimple_seq body = NULL;
16812 3852 : bool nowait = false;
16813 3852 : bool has_depend = false;
16814 :
16815 3852 : if (OMP_TASK_BODY (expr) == NULL_TREE)
16816 : {
16817 238 : for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16818 151 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
16819 : {
16820 109 : has_depend = true;
16821 109 : if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
16822 : {
16823 9 : error_at (OMP_CLAUSE_LOCATION (c),
16824 : "%<mutexinoutset%> kind in %<depend%> clause on a "
16825 : "%<taskwait%> construct");
16826 9 : break;
16827 : }
16828 : }
16829 42 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
16830 42 : nowait = true;
16831 96 : if (nowait && !has_depend)
16832 : {
16833 5 : error_at (EXPR_LOCATION (expr),
16834 : "%<taskwait%> construct with %<nowait%> clause but no "
16835 : "%<depend%> clauses");
16836 5 : *expr_p = NULL_TREE;
16837 5 : return;
16838 : }
16839 : }
16840 :
16841 11541 : gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
16842 3847 : omp_find_clause (OMP_TASK_CLAUSES (expr),
16843 : OMP_CLAUSE_UNTIED)
16844 : ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
16845 :
16846 3847 : if (OMP_TASK_BODY (expr))
16847 : {
16848 3756 : push_gimplify_context ();
16849 :
16850 3756 : g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
16851 3756 : if (gimple_code (g) == GIMPLE_BIND)
16852 3756 : pop_gimplify_context (g);
16853 : else
16854 0 : pop_gimplify_context (NULL);
16855 : }
16856 :
16857 3847 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
16858 : OMP_TASK);
16859 :
16860 7694 : g = gimple_build_omp_task (body,
16861 3847 : OMP_TASK_CLAUSES (expr),
16862 : NULL_TREE, NULL_TREE,
16863 : NULL_TREE, NULL_TREE, NULL_TREE);
16864 3847 : if (OMP_TASK_BODY (expr) == NULL_TREE)
16865 91 : gimple_omp_task_set_taskwait_p (g, true);
16866 3847 : gimplify_seq_add_stmt (pre_p, g);
16867 3847 : *expr_p = NULL_TREE;
16868 : }
16869 :
16870 : /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
16871 : force it into a temporary initialized in PRE_P and add firstprivate clause
16872 : to ORIG_FOR_STMT. */
16873 :
16874 : static void
16875 4399 : gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
16876 : tree orig_for_stmt)
16877 : {
16878 4399 : if (*tp == NULL || is_gimple_constant (*tp))
16879 : return;
16880 :
16881 779 : if (TREE_CODE (*tp) == SAVE_EXPR)
16882 135 : gimplify_save_expr (tp, pre_p, NULL);
16883 : else
16884 644 : *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
16885 : /* Reference to pointer conversion is considered useless,
16886 : but is significant for firstprivate clause. Force it
16887 : here. */
16888 779 : if (type
16889 687 : && TREE_CODE (type) == POINTER_TYPE
16890 869 : && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
16891 : {
16892 2 : tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
16893 2 : tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
16894 2 : gimplify_and_add (m, pre_p);
16895 2 : *tp = v;
16896 : }
16897 :
16898 779 : tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
16899 779 : OMP_CLAUSE_DECL (c) = *tp;
16900 779 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
16901 779 : OMP_FOR_CLAUSES (orig_for_stmt) = c;
16902 : }
16903 :
16904 : /* Helper function of gimplify_omp_for, find OMP_ORDERED with
16905 : null OMP_ORDERED_BODY inside of OMP_FOR's body. */
16906 :
16907 : static tree
16908 16496 : find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
16909 : {
16910 16496 : switch (TREE_CODE (*tp))
16911 : {
16912 901 : case OMP_ORDERED:
16913 901 : if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
16914 : return *tp;
16915 : break;
16916 23 : case OMP_SIMD:
16917 23 : case OMP_PARALLEL:
16918 23 : case OMP_TARGET:
16919 23 : *walk_subtrees = 0;
16920 23 : break;
16921 : default:
16922 : break;
16923 : }
16924 : return NULL_TREE;
16925 : }
16926 :
16927 : /* Gimplify standalone loop transforming directive which has the
16928 : transformations applied already. So, all that is needed is gimplify
16929 : the remaining loops as normal loops. */
16930 :
16931 : static enum gimplify_status
16932 2264 : gimplify_omp_loop_xform (tree *expr_p, gimple_seq *pre_p)
16933 : {
16934 2264 : tree for_stmt = *expr_p;
16935 :
16936 2264 : if (OMP_FOR_PRE_BODY (for_stmt))
16937 759 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), pre_p);
16938 :
16939 2264 : gimple_seq pre_body = NULL, post_body = NULL;
16940 5203 : for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
16941 : {
16942 2939 : if (TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i) == NULL_TREE)
16943 1707 : continue;
16944 1283 : tree iters = NULL_TREE;
16945 1283 : if (i == 0
16946 826 : && TREE_CODE (for_stmt) == OMP_UNROLL
16947 1771 : && !omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_PARTIAL))
16948 : {
16949 311 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_FULL))
16950 269 : iters = omp_loop_number_of_iterations (for_stmt, 0, NULL);
16951 : else
16952 42 : iters = build_int_cst (integer_type_node, 8);
16953 : }
16954 1283 : tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
16955 1283 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
16956 1283 : tree decl = TREE_OPERAND (t, 0);
16957 1283 : gcc_assert (DECL_P (decl));
16958 1283 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
16959 : || POINTER_TYPE_P (TREE_TYPE (decl)));
16960 1283 : if (DECL_ARTIFICIAL (decl)
16961 876 : && TREE_PRIVATE (t)
16962 867 : && gimplify_omp_ctxp
16963 1613 : && gimplify_omp_ctxp->region_type != ORT_NONE)
16964 : {
16965 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
16966 504 : do
16967 : {
16968 504 : splay_tree_node n
16969 504 : = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
16970 504 : if (n != NULL)
16971 : break;
16972 346 : else if (ctx->region_type != ORT_WORKSHARE
16973 : && ctx->region_type != ORT_TASKGROUP
16974 62 : && ctx->region_type != ORT_SIMD
16975 38 : && ctx->region_type != ORT_ACC
16976 38 : && !(ctx->region_type & ORT_TARGET_DATA))
16977 : {
16978 38 : omp_add_variable (ctx, decl, GOVD_PRIVATE);
16979 38 : break;
16980 : }
16981 308 : ctx = ctx->outer_context;
16982 : }
16983 308 : while (ctx);
16984 : }
16985 1283 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
16986 : {
16987 23 : gcc_assert (seen_error ());
16988 23 : continue;
16989 : }
16990 1260 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
16991 : fb_rvalue);
16992 1260 : gimplify_and_add (t, &pre_body);
16993 1260 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
16994 1260 : gcc_assert (TREE_OPERAND (t, 0) == decl);
16995 1260 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
16996 : {
16997 28 : gcc_assert (seen_error ());
16998 28 : continue;
16999 : }
17000 1232 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
17001 : fb_rvalue);
17002 1232 : tree l1 = create_artificial_label (UNKNOWN_LOCATION);
17003 1232 : tree l2 = create_artificial_label (UNKNOWN_LOCATION);
17004 1232 : tree l3 = create_artificial_label (UNKNOWN_LOCATION);
17005 1232 : gimplify_seq_add_stmt (&pre_body, gimple_build_goto (l2));
17006 1232 : gimplify_seq_add_stmt (&pre_body, gimple_build_label (l1));
17007 1232 : gimple_seq this_post_body = NULL;
17008 1232 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17009 1232 : if (TREE_CODE (t) == MODIFY_EXPR)
17010 : {
17011 975 : t = TREE_OPERAND (t, 1);
17012 975 : if (TREE_CODE (t) == PLUS_EXPR
17013 975 : && TREE_OPERAND (t, 1) == decl)
17014 : {
17015 0 : TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
17016 0 : TREE_OPERAND (t, 0) = decl;
17017 : }
17018 975 : gimplify_expr (&TREE_OPERAND (t, 1), pre_p, NULL, is_gimple_val,
17019 : fb_rvalue);
17020 : }
17021 1232 : gimplify_and_add (TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i),
17022 : &this_post_body);
17023 1232 : gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l2));
17024 1232 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17025 1232 : gcond *cond = NULL;
17026 1232 : tree d = decl;
17027 1232 : gimplify_expr (&d, &this_post_body, NULL, is_gimple_val, fb_rvalue);
17028 1232 : if (iters && tree_fits_uhwi_p (iters))
17029 : {
17030 299 : unsigned HOST_WIDE_INT niters = tree_to_uhwi (iters);
17031 299 : if ((unsigned HOST_WIDE_INT) (int) niters == niters
17032 299 : && (int) niters > 0)
17033 : {
17034 299 : t = build2 (TREE_CODE (t), boolean_type_node, d,
17035 299 : TREE_OPERAND (t, 1));
17036 299 : t = build3 (ANNOTATE_EXPR, TREE_TYPE (t), t,
17037 : build_int_cst (integer_type_node,
17038 : annot_expr_unroll_kind),
17039 299 : build_int_cst (integer_type_node, niters));
17040 299 : gimplify_expr (&t, &this_post_body, NULL, is_gimple_val,
17041 : fb_rvalue);
17042 299 : cond = gimple_build_cond (NE_EXPR, t, boolean_false_node,
17043 : l1, l3);
17044 : }
17045 : }
17046 299 : if (cond == NULL)
17047 933 : cond = gimple_build_cond (TREE_CODE (t), d, TREE_OPERAND (t, 1),
17048 : l1, l3);
17049 1232 : gimplify_seq_add_stmt (&this_post_body, cond);
17050 1232 : gimplify_seq_add_stmt (&this_post_body, gimple_build_label (l3));
17051 1232 : gimplify_seq_add_seq (&this_post_body, post_body);
17052 1232 : post_body = this_post_body;
17053 : }
17054 2264 : gimplify_seq_add_seq (pre_p, pre_body);
17055 2264 : gimplify_and_add (OMP_FOR_BODY (for_stmt), pre_p);
17056 2264 : gimplify_seq_add_seq (pre_p, post_body);
17057 :
17058 2264 : *expr_p = NULL_TREE;
17059 2264 : return GS_ALL_DONE;
17060 : }
17061 :
17062 : /* Gimplify the gross structure of an OMP_FOR statement. */
17063 :
17064 : static enum gimplify_status
17065 58081 : gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
17066 : {
17067 58081 : tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
17068 58081 : enum gimplify_status ret = GS_ALL_DONE;
17069 58081 : enum gimplify_status tret;
17070 58081 : gomp_for *gfor;
17071 58081 : gimple_seq for_body, for_pre_body;
17072 58081 : int i;
17073 58081 : bitmap has_decl_expr = NULL;
17074 58081 : enum omp_region_type ort = ORT_WORKSHARE;
17075 58081 : bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
17076 :
17077 58081 : orig_for_stmt = for_stmt = *expr_p;
17078 :
17079 58081 : bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
17080 58081 : != NULL_TREE);
17081 58152 : while (OMP_FOR_INIT (for_stmt) == NULL_TREE)
17082 : {
17083 17738 : tree *data[4] = { NULL, NULL, NULL, NULL };
17084 17738 : gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
17085 17738 : inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
17086 : find_combined_omp_for, data, NULL);
17087 17738 : if (inner_for_stmt == NULL_TREE)
17088 : {
17089 36 : gcc_assert (seen_error ());
17090 36 : *expr_p = NULL_TREE;
17091 3741 : return GS_ERROR;
17092 : }
17093 17702 : gcc_assert (inner_for_stmt == *data[3]);
17094 17702 : omp_maybe_apply_loop_xforms (data[3],
17095 17702 : data[2]
17096 4279 : ? OMP_FOR_CLAUSES (*data[2])
17097 13423 : : TREE_CODE (for_stmt) == OMP_FOR
17098 13423 : ? OMP_FOR_CLAUSES (for_stmt)
17099 : : NULL_TREE);
17100 17702 : if (inner_for_stmt != *data[3])
17101 71 : continue;
17102 17631 : if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
17103 : {
17104 2 : append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
17105 : &OMP_FOR_PRE_BODY (for_stmt));
17106 2 : OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
17107 : }
17108 17631 : if (OMP_FOR_PRE_BODY (inner_for_stmt))
17109 : {
17110 7193 : append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
17111 : &OMP_FOR_PRE_BODY (for_stmt));
17112 7193 : OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
17113 : }
17114 :
17115 17631 : if (data[0])
17116 : {
17117 : /* We have some statements or variable declarations in between
17118 : the composite construct directives. Move them around the
17119 : inner_for_stmt. */
17120 3705 : data[0] = expr_p;
17121 14820 : for (i = 0; i < 3; i++)
17122 11115 : if (data[i])
17123 : {
17124 7028 : tree t = *data[i];
17125 7028 : if (i < 2 && data[i + 1] == &OMP_BODY (t))
17126 1005 : data[i + 1] = data[i];
17127 7028 : *data[i] = OMP_BODY (t);
17128 7028 : tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
17129 : NULL_TREE, make_node (BLOCK));
17130 7028 : OMP_BODY (t) = body;
17131 7028 : append_to_statement_list_force (inner_for_stmt,
17132 : &BIND_EXPR_BODY (body));
17133 7028 : *data[3] = t;
17134 7028 : data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
17135 7028 : gcc_assert (*data[3] == inner_for_stmt);
17136 : }
17137 : return GS_OK;
17138 : }
17139 :
17140 36734 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
17141 22808 : if (!loop_p
17142 21927 : && OMP_FOR_ORIG_DECLS (inner_for_stmt)
17143 11098 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17144 : i)) == TREE_LIST
17145 22876 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17146 : i)))
17147 : {
17148 40 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
17149 : /* Class iterators aren't allowed on OMP_SIMD, so the only
17150 : case we need to solve is distribute parallel for. They are
17151 : allowed on the loop construct, but that is already handled
17152 : in gimplify_omp_loop. */
17153 40 : gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
17154 : && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
17155 : && data[1]);
17156 40 : tree orig_decl = TREE_PURPOSE (orig);
17157 40 : tree last = TREE_VALUE (orig);
17158 40 : tree *pc;
17159 40 : for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
17160 74 : *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
17161 42 : if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
17162 35 : || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
17163 43 : && OMP_CLAUSE_DECL (*pc) == orig_decl)
17164 : break;
17165 40 : if (*pc == NULL_TREE)
17166 : {
17167 32 : tree *spc;
17168 32 : for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
17169 98 : *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
17170 67 : if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
17171 67 : && OMP_CLAUSE_DECL (*spc) == orig_decl)
17172 : break;
17173 32 : if (*spc)
17174 : {
17175 1 : tree c = *spc;
17176 1 : *spc = OMP_CLAUSE_CHAIN (c);
17177 1 : OMP_CLAUSE_CHAIN (c) = NULL_TREE;
17178 1 : *pc = c;
17179 : }
17180 : }
17181 40 : if (*pc == NULL_TREE)
17182 : ;
17183 9 : else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
17184 : {
17185 : /* private clause will appear only on inner_for_stmt.
17186 : Change it into firstprivate, and add private clause
17187 : on for_stmt. */
17188 8 : tree c = copy_node (*pc);
17189 8 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17190 8 : OMP_FOR_CLAUSES (for_stmt) = c;
17191 8 : OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
17192 8 : lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
17193 : }
17194 : else
17195 : {
17196 : /* lastprivate clause will appear on both inner_for_stmt
17197 : and for_stmt. Add firstprivate clause to
17198 : inner_for_stmt. */
17199 1 : tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
17200 : OMP_CLAUSE_FIRSTPRIVATE);
17201 1 : OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
17202 1 : OMP_CLAUSE_CHAIN (c) = *pc;
17203 1 : *pc = c;
17204 1 : lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
17205 : }
17206 40 : tree c = build_omp_clause (UNKNOWN_LOCATION,
17207 : OMP_CLAUSE_FIRSTPRIVATE);
17208 40 : OMP_CLAUSE_DECL (c) = last;
17209 40 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17210 40 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17211 40 : c = build_omp_clause (UNKNOWN_LOCATION,
17212 40 : *pc ? OMP_CLAUSE_SHARED
17213 : : OMP_CLAUSE_FIRSTPRIVATE);
17214 40 : OMP_CLAUSE_DECL (c) = orig_decl;
17215 40 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17216 40 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17217 : }
17218 : /* Similarly, take care of C++ range for temporaries, those should
17219 : be firstprivate on OMP_PARALLEL if any. */
17220 13926 : if (data[1])
17221 17503 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
17222 11108 : if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
17223 5733 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17224 : i)) == TREE_LIST
17225 11184 : && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
17226 : i)))
17227 : {
17228 49 : tree orig
17229 49 : = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
17230 49 : tree v = TREE_CHAIN (orig);
17231 49 : tree c = build_omp_clause (UNKNOWN_LOCATION,
17232 : OMP_CLAUSE_FIRSTPRIVATE);
17233 : /* First add firstprivate clause for the __for_end artificial
17234 : decl. */
17235 49 : OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
17236 49 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
17237 : == REFERENCE_TYPE)
17238 0 : OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
17239 49 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17240 49 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17241 49 : if (TREE_VEC_ELT (v, 0))
17242 : {
17243 : /* And now the same for __for_range artificial decl if it
17244 : exists. */
17245 49 : c = build_omp_clause (UNKNOWN_LOCATION,
17246 : OMP_CLAUSE_FIRSTPRIVATE);
17247 49 : OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
17248 49 : if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
17249 : == REFERENCE_TYPE)
17250 49 : OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
17251 49 : OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
17252 49 : OMP_PARALLEL_CLAUSES (*data[1]) = c;
17253 : }
17254 : }
17255 13926 : break;
17256 : }
17257 54340 : if (OMP_FOR_INIT (for_stmt) != NULL_TREE)
17258 : {
17259 40414 : omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
17260 40414 : if (*expr_p != for_stmt)
17261 : return GS_OK;
17262 : }
17263 :
17264 53458 : switch (TREE_CODE (for_stmt))
17265 : {
17266 18186 : case OMP_FOR:
17267 30403 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
17268 : {
17269 458 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17270 : OMP_CLAUSE_SCHEDULE))
17271 47 : error_at (EXPR_LOCATION (for_stmt),
17272 : "%qs clause may not appear on non-rectangular %qs",
17273 27 : "schedule", lang_GNU_Fortran () ? "do" : "for");
17274 458 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
17275 34 : error_at (EXPR_LOCATION (for_stmt),
17276 : "%qs clause may not appear on non-rectangular %qs",
17277 18 : "ordered", lang_GNU_Fortran () ? "do" : "for");
17278 : }
17279 : break;
17280 8219 : case OMP_DISTRIBUTE:
17281 9130 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
17282 8219 : && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17283 : OMP_CLAUSE_DIST_SCHEDULE))
17284 29 : error_at (EXPR_LOCATION (for_stmt),
17285 : "%qs clause may not appear on non-rectangular %qs",
17286 : "dist_schedule", "distribute");
17287 : break;
17288 : case OACC_LOOP:
17289 : ort = ORT_ACC;
17290 : break;
17291 1586 : case OMP_TASKLOOP:
17292 2523 : if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
17293 : {
17294 36 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17295 : OMP_CLAUSE_GRAINSIZE))
17296 11 : error_at (EXPR_LOCATION (for_stmt),
17297 : "%qs clause may not appear on non-rectangular %qs",
17298 : "grainsize", "taskloop");
17299 36 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17300 : OMP_CLAUSE_NUM_TASKS))
17301 6 : error_at (EXPR_LOCATION (for_stmt),
17302 : "%qs clause may not appear on non-rectangular %qs",
17303 : "num_tasks", "taskloop");
17304 : }
17305 1586 : if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
17306 : ort = ORT_UNTIED_TASKLOOP;
17307 : else
17308 : ort = ORT_TASKLOOP;
17309 : break;
17310 10653 : case OMP_SIMD:
17311 10653 : ort = ORT_SIMD;
17312 10653 : break;
17313 2264 : case OMP_TILE:
17314 2264 : case OMP_UNROLL:
17315 2264 : gcc_assert (inner_for_stmt == NULL_TREE);
17316 2264 : return gimplify_omp_loop_xform (expr_p, pre_p);
17317 0 : default:
17318 0 : gcc_unreachable ();
17319 : }
17320 :
17321 : /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
17322 : clause for the IV. */
17323 10700 : if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
17324 : {
17325 7825 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
17326 7825 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17327 7825 : decl = TREE_OPERAND (t, 0);
17328 18635 : for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
17329 11924 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
17330 11924 : && OMP_CLAUSE_DECL (c) == decl)
17331 : {
17332 1114 : OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
17333 1114 : break;
17334 : }
17335 : }
17336 :
17337 51194 : if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
17338 51518 : gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
17339 1910 : loop_p && TREE_CODE (for_stmt) != OMP_SIMD
17340 : ? OMP_LOOP : TREE_CODE (for_stmt));
17341 :
17342 51194 : if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
17343 8219 : gimplify_omp_ctxp->distribute = true;
17344 :
17345 : /* Handle OMP_FOR_INIT. */
17346 51194 : for_pre_body = NULL;
17347 51194 : if ((ort == ORT_SIMD
17348 40541 : || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
17349 61934 : && OMP_FOR_PRE_BODY (for_stmt))
17350 : {
17351 6784 : has_decl_expr = BITMAP_ALLOC (NULL);
17352 6784 : if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
17353 6784 : && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
17354 : {
17355 18 : t = OMP_FOR_PRE_BODY (for_stmt);
17356 18 : bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
17357 : }
17358 6766 : else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
17359 : {
17360 6764 : tree_stmt_iterator si;
17361 11234 : for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
17362 4470 : tsi_next (&si))
17363 : {
17364 4470 : t = tsi_stmt (si);
17365 4470 : if (TREE_CODE (t) == DECL_EXPR
17366 4470 : && VAR_P (DECL_EXPR_DECL (t)))
17367 4397 : bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
17368 : }
17369 : }
17370 : }
17371 51194 : if (OMP_FOR_PRE_BODY (for_stmt))
17372 : {
17373 20979 : if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
17374 20254 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
17375 : else
17376 : {
17377 725 : struct gimplify_omp_ctx ctx;
17378 725 : memset (&ctx, 0, sizeof (ctx));
17379 725 : ctx.region_type = ORT_NONE;
17380 725 : gimplify_omp_ctxp = &ctx;
17381 725 : gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
17382 725 : gimplify_omp_ctxp = NULL;
17383 : }
17384 : }
17385 51194 : OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
17386 :
17387 51194 : if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
17388 13926 : for_stmt = inner_for_stmt;
17389 :
17390 : /* For taskloop, need to gimplify the start, end and step before the
17391 : taskloop, outside of the taskloop omp context. */
17392 51194 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
17393 : {
17394 3458 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17395 : {
17396 1872 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17397 1872 : gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
17398 1872 : ? pre_p : &for_pre_body);
17399 1872 : tree type = TREE_TYPE (TREE_OPERAND (t, 0));
17400 1872 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17401 : {
17402 34 : tree v = TREE_OPERAND (t, 1);
17403 34 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
17404 : for_pre_p, orig_for_stmt);
17405 34 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
17406 : for_pre_p, orig_for_stmt);
17407 : }
17408 : else
17409 1838 : gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
17410 : orig_for_stmt);
17411 :
17412 : /* Handle OMP_FOR_COND. */
17413 1872 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17414 1872 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17415 : {
17416 31 : tree v = TREE_OPERAND (t, 1);
17417 31 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
17418 : for_pre_p, orig_for_stmt);
17419 31 : gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
17420 : for_pre_p, orig_for_stmt);
17421 : }
17422 : else
17423 1841 : gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
17424 : orig_for_stmt);
17425 :
17426 : /* Handle OMP_FOR_INCR. */
17427 1872 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17428 1872 : if (TREE_CODE (t) == MODIFY_EXPR)
17429 : {
17430 590 : decl = TREE_OPERAND (t, 0);
17431 590 : t = TREE_OPERAND (t, 1);
17432 590 : tree *tp = &TREE_OPERAND (t, 1);
17433 590 : if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
17434 22 : tp = &TREE_OPERAND (t, 0);
17435 :
17436 590 : gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
17437 : orig_for_stmt);
17438 : }
17439 : }
17440 :
17441 1586 : gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
17442 : OMP_TASKLOOP);
17443 : }
17444 :
17445 51194 : if (orig_for_stmt != for_stmt)
17446 13926 : gimplify_omp_ctxp->combined_loop = true;
17447 :
17448 51194 : for_body = NULL;
17449 51194 : gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17450 : == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
17451 51194 : gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17452 : == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
17453 :
17454 51194 : tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
17455 51194 : bool is_doacross = false;
17456 51194 : if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
17457 : find_standalone_omp_ordered, NULL))
17458 : {
17459 531 : OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
17460 531 : is_doacross = true;
17461 531 : int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
17462 531 : gimplify_omp_ctxp->loop_iter_var.create (len * 2);
17463 2200 : for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
17464 1669 : if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
17465 : {
17466 10 : error_at (OMP_CLAUSE_LOCATION (*pc),
17467 : "%<linear%> clause may not be specified together "
17468 : "with %<ordered%> clause if stand-alone %<ordered%> "
17469 : "construct is nested in it");
17470 10 : *pc = OMP_CLAUSE_CHAIN (*pc);
17471 : }
17472 : else
17473 1659 : pc = &OMP_CLAUSE_CHAIN (*pc);
17474 : }
17475 51194 : int collapse = 1, tile = 0;
17476 51194 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
17477 51194 : if (c)
17478 13726 : collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
17479 51194 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
17480 51194 : if (c)
17481 384 : tile = list_length (OMP_CLAUSE_TILE_LIST (c));
17482 51194 : c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
17483 51194 : hash_set<tree> *allocate_uids = NULL;
17484 51194 : if (c)
17485 : {
17486 275 : allocate_uids = new hash_set<tree>;
17487 1682 : for (; c; c = OMP_CLAUSE_CHAIN (c))
17488 1132 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
17489 478 : allocate_uids->add (OMP_CLAUSE_DECL (c));
17490 : }
17491 124133 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17492 : {
17493 72939 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17494 72939 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17495 72939 : decl = TREE_OPERAND (t, 0);
17496 72939 : gcc_assert (DECL_P (decl));
17497 72939 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
17498 : || POINTER_TYPE_P (TREE_TYPE (decl)));
17499 72939 : if (is_doacross)
17500 : {
17501 1654 : if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
17502 : {
17503 960 : tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17504 960 : if (TREE_CODE (orig_decl) == TREE_LIST)
17505 : {
17506 15 : orig_decl = TREE_PURPOSE (orig_decl);
17507 15 : if (!orig_decl)
17508 0 : orig_decl = decl;
17509 : }
17510 960 : gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
17511 : }
17512 : else
17513 694 : gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
17514 1654 : gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
17515 : }
17516 :
17517 72939 : if (for_stmt == orig_for_stmt)
17518 : {
17519 50131 : tree orig_decl = decl;
17520 50131 : if (OMP_FOR_ORIG_DECLS (for_stmt))
17521 : {
17522 23916 : tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17523 23916 : if (TREE_CODE (orig_decl) == TREE_LIST)
17524 : {
17525 : orig_decl = TREE_PURPOSE (orig_decl);
17526 : if (!orig_decl)
17527 : orig_decl = decl;
17528 : }
17529 : }
17530 50131 : if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
17531 36 : error_at (EXPR_LOCATION (for_stmt),
17532 : "threadprivate iteration variable %qD", orig_decl);
17533 : }
17534 :
17535 : /* Make sure the iteration variable is private. */
17536 72939 : tree c = NULL_TREE;
17537 72939 : tree c2 = NULL_TREE;
17538 72939 : if (orig_for_stmt != for_stmt)
17539 : {
17540 : /* Preserve this information until we gimplify the inner simd. */
17541 22808 : if (has_decl_expr
17542 22808 : && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
17543 3413 : TREE_PRIVATE (t) = 1;
17544 : }
17545 50131 : else if (ort == ORT_SIMD)
17546 : {
17547 15726 : splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
17548 : (splay_tree_key) decl);
17549 15726 : omp_is_private (gimplify_omp_ctxp, decl,
17550 15726 : 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
17551 : != 1));
17552 15726 : if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
17553 : {
17554 2518 : omp_notice_variable (gimplify_omp_ctxp, decl, true);
17555 2518 : if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
17556 0 : for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17557 : OMP_CLAUSE_LASTPRIVATE);
17558 0 : c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
17559 : OMP_CLAUSE_LASTPRIVATE))
17560 0 : if (OMP_CLAUSE_DECL (c3) == decl)
17561 : {
17562 0 : warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
17563 : "conditional %<lastprivate%> on loop "
17564 : "iterator %qD ignored", decl);
17565 0 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
17566 0 : n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
17567 : }
17568 : }
17569 13208 : else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
17570 : {
17571 5637 : c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
17572 5637 : OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
17573 5637 : unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
17574 5637 : if ((has_decl_expr
17575 1309 : && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
17576 6321 : || TREE_PRIVATE (t))
17577 : {
17578 2281 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17579 2281 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17580 : }
17581 5637 : struct gimplify_omp_ctx *outer
17582 5637 : = gimplify_omp_ctxp->outer_context;
17583 5637 : if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
17584 : {
17585 2433 : if (outer->region_type == ORT_WORKSHARE
17586 2193 : && outer->combined_loop)
17587 : {
17588 2142 : n = splay_tree_lookup (outer->variables,
17589 : (splay_tree_key)decl);
17590 2142 : if (n != NULL && (n->value & GOVD_LOCAL) != 0)
17591 : {
17592 0 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17593 0 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17594 : }
17595 : else
17596 : {
17597 2142 : struct gimplify_omp_ctx *octx = outer->outer_context;
17598 2142 : if (octx
17599 1714 : && octx->region_type == ORT_COMBINED_PARALLEL
17600 1476 : && octx->outer_context
17601 1209 : && (octx->outer_context->region_type
17602 : == ORT_WORKSHARE)
17603 1079 : && octx->outer_context->combined_loop)
17604 : {
17605 1079 : octx = octx->outer_context;
17606 1079 : n = splay_tree_lookup (octx->variables,
17607 : (splay_tree_key)decl);
17608 1079 : if (n != NULL && (n->value & GOVD_LOCAL) != 0)
17609 : {
17610 0 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
17611 0 : flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
17612 : }
17613 : }
17614 : }
17615 : }
17616 : }
17617 :
17618 5637 : OMP_CLAUSE_DECL (c) = decl;
17619 5637 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17620 5637 : OMP_FOR_CLAUSES (for_stmt) = c;
17621 5637 : omp_add_variable (gimplify_omp_ctxp, decl, flags);
17622 5637 : if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
17623 2433 : omp_lastprivate_for_combined_outer_constructs (outer, decl,
17624 : true);
17625 : }
17626 : else
17627 : {
17628 7571 : bool lastprivate
17629 : = (!has_decl_expr
17630 7571 : || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
17631 7571 : if (TREE_PRIVATE (t))
17632 1787 : lastprivate = false;
17633 7571 : if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
17634 : {
17635 509 : tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
17636 509 : if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
17637 : lastprivate = false;
17638 : }
17639 :
17640 7571 : struct gimplify_omp_ctx *outer
17641 7571 : = gimplify_omp_ctxp->outer_context;
17642 7571 : if (outer && lastprivate)
17643 4887 : omp_lastprivate_for_combined_outer_constructs (outer, decl,
17644 : true);
17645 :
17646 9577 : c = build_omp_clause (input_location,
17647 : lastprivate ? OMP_CLAUSE_LASTPRIVATE
17648 : : OMP_CLAUSE_PRIVATE);
17649 7571 : OMP_CLAUSE_DECL (c) = decl;
17650 7571 : OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
17651 7571 : OMP_FOR_CLAUSES (for_stmt) = c;
17652 7571 : omp_add_variable (gimplify_omp_ctxp, decl,
17653 : (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
17654 : | GOVD_EXPLICIT | GOVD_SEEN);
17655 7571 : c = NULL_TREE;
17656 : }
17657 : }
17658 34405 : else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
17659 : {
17660 8733 : omp_notice_variable (gimplify_omp_ctxp, decl, true);
17661 8733 : splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
17662 : (splay_tree_key) decl);
17663 8733 : if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
17664 80 : for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
17665 : OMP_CLAUSE_LASTPRIVATE);
17666 80 : c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
17667 : OMP_CLAUSE_LASTPRIVATE))
17668 40 : if (OMP_CLAUSE_DECL (c3) == decl)
17669 : {
17670 40 : warning_at (OMP_CLAUSE_LOCATION (c3), OPT_Wopenmp,
17671 : "conditional %<lastprivate%> on loop "
17672 : "iterator %qD ignored", decl);
17673 40 : OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
17674 40 : n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
17675 : }
17676 : }
17677 : else
17678 25672 : omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
17679 :
17680 : /* If DECL is not a gimple register, create a temporary variable to act
17681 : as an iteration counter. This is valid, since DECL cannot be
17682 : modified in the body of the loop. Similarly for any iteration vars
17683 : in simd with collapse > 1 where the iterator vars must be
17684 : lastprivate. And similarly for vars mentioned in allocate clauses. */
17685 72939 : if (orig_for_stmt != for_stmt)
17686 22808 : var = decl;
17687 50131 : else if (!is_gimple_reg (decl)
17688 46864 : || (ort == ORT_SIMD
17689 14125 : && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
17690 89925 : || (allocate_uids && allocate_uids->contains (decl)))
17691 : {
17692 10369 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
17693 : /* Make sure omp_add_variable is not called on it prematurely.
17694 : We call it ourselves a few lines later. */
17695 10369 : gimplify_omp_ctxp = NULL;
17696 10369 : var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
17697 10369 : gimplify_omp_ctxp = ctx;
17698 10369 : TREE_OPERAND (t, 0) = var;
17699 :
17700 10369 : gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
17701 :
17702 10369 : if (ort == ORT_SIMD
17703 10369 : && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
17704 : {
17705 770 : c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
17706 770 : OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
17707 770 : OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
17708 770 : OMP_CLAUSE_DECL (c2) = var;
17709 770 : OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
17710 770 : OMP_FOR_CLAUSES (for_stmt) = c2;
17711 770 : omp_add_variable (gimplify_omp_ctxp, var,
17712 : GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
17713 770 : if (c == NULL_TREE)
17714 : {
17715 518 : c = c2;
17716 518 : c2 = NULL_TREE;
17717 : }
17718 : }
17719 : else
17720 9599 : omp_add_variable (gimplify_omp_ctxp, var,
17721 : GOVD_PRIVATE | GOVD_SEEN);
17722 : }
17723 : else
17724 : var = decl;
17725 :
17726 72939 : gimplify_omp_ctxp->in_for_exprs = true;
17727 72939 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17728 : {
17729 719 : tree lb = TREE_OPERAND (t, 1);
17730 719 : tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
17731 : is_gimple_val, fb_rvalue, false);
17732 719 : ret = MIN (ret, tret);
17733 719 : tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
17734 : is_gimple_val, fb_rvalue, false);
17735 : }
17736 : else
17737 72220 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17738 : is_gimple_val, fb_rvalue, false);
17739 72939 : gimplify_omp_ctxp->in_for_exprs = false;
17740 72939 : ret = MIN (ret, tret);
17741 72939 : if (ret == GS_ERROR)
17742 : return ret;
17743 :
17744 : /* Handle OMP_FOR_COND. */
17745 72939 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
17746 72939 : gcc_assert (COMPARISON_CLASS_P (t));
17747 72939 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17748 :
17749 72939 : gimplify_omp_ctxp->in_for_exprs = true;
17750 72939 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
17751 : {
17752 599 : tree ub = TREE_OPERAND (t, 1);
17753 599 : tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
17754 : is_gimple_val, fb_rvalue, false);
17755 599 : ret = MIN (ret, tret);
17756 599 : tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
17757 : is_gimple_val, fb_rvalue, false);
17758 : }
17759 : else
17760 72340 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17761 : is_gimple_val, fb_rvalue, false);
17762 72939 : gimplify_omp_ctxp->in_for_exprs = false;
17763 72939 : ret = MIN (ret, tret);
17764 :
17765 : /* Handle OMP_FOR_INCR. */
17766 72939 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17767 72939 : switch (TREE_CODE (t))
17768 : {
17769 35676 : case PREINCREMENT_EXPR:
17770 35676 : case POSTINCREMENT_EXPR:
17771 35676 : {
17772 35676 : tree decl = TREE_OPERAND (t, 0);
17773 : /* c_omp_for_incr_canonicalize_ptr() should have been
17774 : called to massage things appropriately. */
17775 35676 : gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
17776 :
17777 35676 : if (orig_for_stmt != for_stmt)
17778 : break;
17779 25291 : t = build_int_cst (TREE_TYPE (decl), 1);
17780 25291 : if (c)
17781 4470 : OMP_CLAUSE_LINEAR_STEP (c) = t;
17782 25291 : t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
17783 25291 : t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
17784 25291 : TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
17785 25291 : break;
17786 : }
17787 :
17788 3830 : case PREDECREMENT_EXPR:
17789 3830 : case POSTDECREMENT_EXPR:
17790 : /* c_omp_for_incr_canonicalize_ptr() should have been
17791 : called to massage things appropriately. */
17792 3830 : gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
17793 3830 : if (orig_for_stmt != for_stmt)
17794 : break;
17795 1992 : t = build_int_cst (TREE_TYPE (decl), -1);
17796 1992 : if (c)
17797 112 : OMP_CLAUSE_LINEAR_STEP (c) = t;
17798 1992 : t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
17799 1992 : t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
17800 1992 : TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
17801 1992 : break;
17802 :
17803 33433 : case MODIFY_EXPR:
17804 33433 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17805 33433 : TREE_OPERAND (t, 0) = var;
17806 :
17807 33433 : t = TREE_OPERAND (t, 1);
17808 33433 : switch (TREE_CODE (t))
17809 : {
17810 26501 : case PLUS_EXPR:
17811 26501 : if (TREE_OPERAND (t, 1) == decl)
17812 : {
17813 215 : TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
17814 215 : TREE_OPERAND (t, 0) = var;
17815 215 : break;
17816 : }
17817 :
17818 : /* Fallthru. */
17819 33218 : case MINUS_EXPR:
17820 33218 : case POINTER_PLUS_EXPR:
17821 33218 : gcc_assert (TREE_OPERAND (t, 0) == decl);
17822 33218 : TREE_OPERAND (t, 0) = var;
17823 33218 : break;
17824 0 : default:
17825 0 : gcc_unreachable ();
17826 : }
17827 :
17828 33433 : gimplify_omp_ctxp->in_for_exprs = true;
17829 33433 : tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
17830 : is_gimple_val, fb_rvalue, false);
17831 33433 : ret = MIN (ret, tret);
17832 33433 : if (c)
17833 : {
17834 1573 : tree step = TREE_OPERAND (t, 1);
17835 1573 : tree stept = TREE_TYPE (decl);
17836 1573 : if (POINTER_TYPE_P (stept))
17837 280 : stept = sizetype;
17838 1573 : step = fold_convert (stept, step);
17839 1573 : if (TREE_CODE (t) == MINUS_EXPR)
17840 295 : step = fold_build1 (NEGATE_EXPR, stept, step);
17841 1573 : OMP_CLAUSE_LINEAR_STEP (c) = step;
17842 1573 : if (step != TREE_OPERAND (t, 1))
17843 : {
17844 295 : tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
17845 : &for_pre_body, NULL,
17846 : is_gimple_val, fb_rvalue, false);
17847 295 : ret = MIN (ret, tret);
17848 : }
17849 : }
17850 33433 : gimplify_omp_ctxp->in_for_exprs = false;
17851 33433 : break;
17852 :
17853 0 : default:
17854 0 : gcc_unreachable ();
17855 : }
17856 :
17857 72939 : if (c2)
17858 : {
17859 252 : gcc_assert (c);
17860 252 : OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
17861 : }
17862 :
17863 72939 : if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
17864 : {
17865 86692 : for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
17866 65132 : if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
17867 16218 : && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
17868 56123 : || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
17869 2159 : && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
17870 1373 : && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
17871 75492 : && OMP_CLAUSE_DECL (c) == decl)
17872 : {
17873 7535 : if (is_doacross && (collapse == 1 || i >= collapse))
17874 : t = var;
17875 : else
17876 : {
17877 7462 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17878 7462 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17879 7462 : gcc_assert (TREE_OPERAND (t, 0) == var);
17880 7462 : t = TREE_OPERAND (t, 1);
17881 7462 : gcc_assert (TREE_CODE (t) == PLUS_EXPR
17882 : || TREE_CODE (t) == MINUS_EXPR
17883 : || TREE_CODE (t) == POINTER_PLUS_EXPR);
17884 7462 : gcc_assert (TREE_OPERAND (t, 0) == var);
17885 14822 : t = build2 (TREE_CODE (t), TREE_TYPE (decl),
17886 : is_doacross ? var : decl,
17887 7462 : TREE_OPERAND (t, 1));
17888 : }
17889 7535 : gimple_seq *seq;
17890 7535 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
17891 6939 : seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
17892 : else
17893 596 : seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
17894 7535 : push_gimplify_context ();
17895 7535 : gimplify_assign (decl, t, seq);
17896 7535 : gimple *bind = NULL;
17897 7535 : if (gimplify_ctxp->temps)
17898 : {
17899 2072 : bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
17900 2072 : *seq = NULL;
17901 2072 : gimplify_seq_add_stmt (seq, bind);
17902 : }
17903 7535 : pop_gimplify_context (bind);
17904 : }
17905 : }
17906 72939 : if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
17907 2609 : for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
17908 : {
17909 1197 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
17910 1197 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17911 1197 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17912 1197 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17913 336 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17914 1197 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
17915 1197 : gcc_assert (COMPARISON_CLASS_P (t));
17916 1197 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17917 1197 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17918 343 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17919 : }
17920 : }
17921 :
17922 51194 : BITMAP_FREE (has_decl_expr);
17923 51469 : delete allocate_uids;
17924 :
17925 51194 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
17926 49608 : || (loop_p && orig_for_stmt == for_stmt))
17927 : {
17928 2642 : push_gimplify_context ();
17929 2642 : if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
17930 : {
17931 1891 : OMP_FOR_BODY (orig_for_stmt)
17932 1891 : = build3 (BIND_EXPR, void_type_node, NULL,
17933 1891 : OMP_FOR_BODY (orig_for_stmt), NULL);
17934 1891 : TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
17935 : }
17936 : }
17937 :
17938 51194 : gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
17939 : &for_body);
17940 :
17941 51194 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
17942 49608 : || (loop_p && orig_for_stmt == for_stmt))
17943 : {
17944 2642 : if (gimple_code (g) == GIMPLE_BIND)
17945 2642 : pop_gimplify_context (g);
17946 : else
17947 0 : pop_gimplify_context (NULL);
17948 : }
17949 :
17950 51194 : if (orig_for_stmt != for_stmt)
17951 36734 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
17952 : {
17953 22808 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
17954 22808 : decl = TREE_OPERAND (t, 0);
17955 22808 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
17956 22808 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
17957 772 : gimplify_omp_ctxp = ctx->outer_context;
17958 22808 : var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
17959 22808 : gimplify_omp_ctxp = ctx;
17960 22808 : omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
17961 22808 : TREE_OPERAND (t, 0) = var;
17962 22808 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
17963 22808 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17964 22808 : TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
17965 22808 : if (OMP_FOR_NON_RECTANGULAR (for_stmt))
17966 791 : for (int j = i + 1;
17967 791 : j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
17968 : {
17969 377 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
17970 377 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
17971 377 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17972 377 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17973 : {
17974 133 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17975 133 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17976 : }
17977 377 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
17978 377 : gcc_assert (COMPARISON_CLASS_P (t));
17979 377 : if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
17980 377 : && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
17981 : {
17982 67 : TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
17983 67 : TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
17984 : }
17985 : }
17986 : }
17987 :
17988 51194 : gimplify_adjust_omp_clauses (pre_p, for_body,
17989 : &OMP_FOR_CLAUSES (orig_for_stmt),
17990 51194 : TREE_CODE (orig_for_stmt));
17991 :
17992 51194 : int kind;
17993 51194 : switch (TREE_CODE (orig_for_stmt))
17994 : {
17995 : case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
17996 10653 : case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
17997 8219 : case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
17998 1586 : case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
17999 12550 : case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
18000 0 : default:
18001 0 : gcc_unreachable ();
18002 : }
18003 51194 : if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
18004 : {
18005 1056 : gimplify_seq_add_seq (pre_p, for_pre_body);
18006 1056 : for_pre_body = NULL;
18007 : }
18008 51194 : gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
18009 51194 : TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
18010 : for_pre_body);
18011 51194 : if (orig_for_stmt != for_stmt)
18012 13926 : gimple_omp_for_set_combined_p (gfor, true);
18013 51194 : if (gimplify_omp_ctxp
18014 42566 : && (gimplify_omp_ctxp->combined_loop
18015 35035 : || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
18016 12298 : && gimplify_omp_ctxp->outer_context
18017 7858 : && gimplify_omp_ctxp->outer_context->combined_loop)))
18018 : {
18019 13926 : gimple_omp_for_set_combined_into_p (gfor, true);
18020 13926 : if (gimplify_omp_ctxp->combined_loop)
18021 7531 : gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
18022 : else
18023 6395 : gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
18024 : }
18025 :
18026 124133 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18027 : {
18028 72939 : t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
18029 72939 : gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
18030 72939 : gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
18031 72939 : t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
18032 72939 : gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
18033 72939 : gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
18034 72939 : t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
18035 72939 : gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
18036 : }
18037 :
18038 : /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
18039 : constructs with GIMPLE_OMP_TASK sandwiched in between them.
18040 : The outer taskloop stands for computing the number of iterations,
18041 : counts for collapsed loops and holding taskloop specific clauses.
18042 : The task construct stands for the effect of data sharing on the
18043 : explicit task it creates and the inner taskloop stands for expansion
18044 : of the static loop inside of the explicit task construct. */
18045 51194 : if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
18046 : {
18047 1586 : tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
18048 1586 : tree task_clauses = NULL_TREE;
18049 1586 : tree c = *gfor_clauses_ptr;
18050 1586 : tree *gtask_clauses_ptr = &task_clauses;
18051 1586 : tree outer_for_clauses = NULL_TREE;
18052 1586 : tree *gforo_clauses_ptr = &outer_for_clauses;
18053 1586 : bitmap lastprivate_uids = NULL;
18054 1586 : if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
18055 : {
18056 36 : c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
18057 36 : if (c)
18058 : {
18059 18 : lastprivate_uids = BITMAP_ALLOC (NULL);
18060 54 : for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
18061 : OMP_CLAUSE_LASTPRIVATE))
18062 18 : bitmap_set_bit (lastprivate_uids,
18063 18 : DECL_UID (OMP_CLAUSE_DECL (c)));
18064 : }
18065 36 : c = *gfor_clauses_ptr;
18066 : }
18067 12252 : for (; c; c = OMP_CLAUSE_CHAIN (c))
18068 10666 : switch (OMP_CLAUSE_CODE (c))
18069 : {
18070 : /* These clauses are allowed on task, move them there. */
18071 6228 : case OMP_CLAUSE_SHARED:
18072 6228 : case OMP_CLAUSE_FIRSTPRIVATE:
18073 6228 : case OMP_CLAUSE_DEFAULT:
18074 6228 : case OMP_CLAUSE_IF:
18075 6228 : case OMP_CLAUSE_UNTIED:
18076 6228 : case OMP_CLAUSE_FINAL:
18077 6228 : case OMP_CLAUSE_MERGEABLE:
18078 6228 : case OMP_CLAUSE_PRIORITY:
18079 6228 : case OMP_CLAUSE_REDUCTION:
18080 6228 : case OMP_CLAUSE_IN_REDUCTION:
18081 6228 : *gtask_clauses_ptr = c;
18082 6228 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18083 6228 : break;
18084 2068 : case OMP_CLAUSE_PRIVATE:
18085 2068 : if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
18086 : {
18087 : /* We want private on outer for and firstprivate
18088 : on task. */
18089 26 : *gtask_clauses_ptr
18090 26 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18091 : OMP_CLAUSE_FIRSTPRIVATE);
18092 26 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18093 26 : lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
18094 : openacc);
18095 26 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18096 26 : *gforo_clauses_ptr = c;
18097 26 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18098 : }
18099 : else
18100 : {
18101 2042 : *gtask_clauses_ptr = c;
18102 2042 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18103 : }
18104 : break;
18105 : /* These clauses go into outer taskloop clauses. */
18106 533 : case OMP_CLAUSE_GRAINSIZE:
18107 533 : case OMP_CLAUSE_NUM_TASKS:
18108 533 : case OMP_CLAUSE_NOGROUP:
18109 533 : *gforo_clauses_ptr = c;
18110 533 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18111 533 : break;
18112 : /* Collapse clause we duplicate on both taskloops. */
18113 584 : case OMP_CLAUSE_COLLAPSE:
18114 584 : *gfor_clauses_ptr = c;
18115 584 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18116 584 : *gforo_clauses_ptr = copy_node (c);
18117 584 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
18118 584 : break;
18119 : /* For lastprivate, keep the clause on inner taskloop, and add
18120 : a shared clause on task. If the same decl is also firstprivate,
18121 : add also firstprivate clause on the inner taskloop. */
18122 1191 : case OMP_CLAUSE_LASTPRIVATE:
18123 1191 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
18124 : {
18125 : /* For taskloop C++ lastprivate IVs, we want:
18126 : 1) private on outer taskloop
18127 : 2) firstprivate and shared on task
18128 : 3) lastprivate on inner taskloop */
18129 38 : *gtask_clauses_ptr
18130 38 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18131 : OMP_CLAUSE_FIRSTPRIVATE);
18132 38 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18133 38 : lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
18134 : openacc);
18135 38 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18136 38 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
18137 38 : *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18138 : OMP_CLAUSE_PRIVATE);
18139 38 : OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
18140 38 : OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
18141 38 : TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
18142 38 : gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
18143 : }
18144 1191 : *gfor_clauses_ptr = c;
18145 1191 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18146 1191 : *gtask_clauses_ptr
18147 1191 : = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
18148 1191 : OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
18149 1191 : if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
18150 312 : OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
18151 1191 : gtask_clauses_ptr
18152 1191 : = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18153 1191 : break;
18154 : /* Allocate clause we duplicate on task and inner taskloop
18155 : if the decl is lastprivate, otherwise just put on task. */
18156 62 : case OMP_CLAUSE_ALLOCATE:
18157 62 : if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
18158 62 : && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
18159 : {
18160 : /* Additionally, put firstprivate clause on task
18161 : for the allocator if it is not constant. */
18162 34 : *gtask_clauses_ptr
18163 34 : = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18164 : OMP_CLAUSE_FIRSTPRIVATE);
18165 34 : OMP_CLAUSE_DECL (*gtask_clauses_ptr)
18166 34 : = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
18167 34 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18168 : }
18169 62 : if (lastprivate_uids
18170 97 : && bitmap_bit_p (lastprivate_uids,
18171 35 : DECL_UID (OMP_CLAUSE_DECL (c))))
18172 : {
18173 17 : *gfor_clauses_ptr = c;
18174 17 : gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18175 17 : *gtask_clauses_ptr = copy_node (c);
18176 17 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
18177 : }
18178 : else
18179 : {
18180 45 : *gtask_clauses_ptr = c;
18181 45 : gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
18182 : }
18183 : break;
18184 0 : default:
18185 0 : gcc_unreachable ();
18186 : }
18187 1586 : *gfor_clauses_ptr = NULL_TREE;
18188 1586 : *gtask_clauses_ptr = NULL_TREE;
18189 1586 : *gforo_clauses_ptr = NULL_TREE;
18190 1586 : BITMAP_FREE (lastprivate_uids);
18191 1586 : gimple_set_location (gfor, input_location);
18192 1586 : g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
18193 1586 : g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
18194 : NULL_TREE, NULL_TREE, NULL_TREE);
18195 1586 : gimple_set_location (g, input_location);
18196 1586 : gimple_omp_task_set_taskloop_p (g, true);
18197 1586 : g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
18198 1586 : gomp_for *gforo
18199 1586 : = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
18200 : gimple_omp_for_collapse (gfor),
18201 : gimple_omp_for_pre_body (gfor));
18202 1586 : gimple_omp_for_set_pre_body (gfor, NULL);
18203 1586 : gimple_omp_for_set_combined_p (gforo, true);
18204 1586 : gimple_omp_for_set_combined_into_p (gfor, true);
18205 3458 : for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
18206 : {
18207 1872 : tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
18208 1872 : tree v = create_tmp_var (type);
18209 1872 : gimple_omp_for_set_index (gforo, i, v);
18210 1872 : t = unshare_expr (gimple_omp_for_initial (gfor, i));
18211 1872 : gimple_omp_for_set_initial (gforo, i, t);
18212 1872 : gimple_omp_for_set_cond (gforo, i,
18213 : gimple_omp_for_cond (gfor, i));
18214 1872 : t = unshare_expr (gimple_omp_for_final (gfor, i));
18215 1872 : gimple_omp_for_set_final (gforo, i, t);
18216 1872 : t = unshare_expr (gimple_omp_for_incr (gfor, i));
18217 1872 : gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
18218 1872 : TREE_OPERAND (t, 0) = v;
18219 1872 : gimple_omp_for_set_incr (gforo, i, t);
18220 1872 : t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
18221 1872 : OMP_CLAUSE_DECL (t) = v;
18222 1872 : OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
18223 1872 : gimple_omp_for_set_clauses (gforo, t);
18224 1872 : if (OMP_FOR_NON_RECTANGULAR (for_stmt))
18225 : {
18226 90 : tree *p1 = NULL, *p2 = NULL;
18227 90 : t = gimple_omp_for_initial (gforo, i);
18228 90 : if (TREE_CODE (t) == TREE_VEC)
18229 34 : p1 = &TREE_VEC_ELT (t, 0);
18230 90 : t = gimple_omp_for_final (gforo, i);
18231 90 : if (TREE_CODE (t) == TREE_VEC)
18232 : {
18233 31 : if (p1)
18234 23 : p2 = &TREE_VEC_ELT (t, 0);
18235 : else
18236 8 : p1 = &TREE_VEC_ELT (t, 0);
18237 : }
18238 90 : if (p1)
18239 : {
18240 : int j;
18241 58 : for (j = 0; j < i; j++)
18242 58 : if (*p1 == gimple_omp_for_index (gfor, j))
18243 : {
18244 42 : *p1 = gimple_omp_for_index (gforo, j);
18245 42 : if (p2)
18246 23 : *p2 = *p1;
18247 : break;
18248 : }
18249 42 : gcc_assert (j < i);
18250 : }
18251 : }
18252 : }
18253 1586 : gimplify_seq_add_stmt (pre_p, gforo);
18254 : }
18255 : else
18256 49608 : gimplify_seq_add_stmt (pre_p, gfor);
18257 :
18258 51194 : if (TREE_CODE (orig_for_stmt) == OMP_FOR)
18259 : {
18260 18186 : struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
18261 18186 : unsigned lastprivate_conditional = 0;
18262 18186 : while (ctx
18263 18222 : && (ctx->region_type == ORT_TARGET_DATA
18264 14184 : || ctx->region_type == ORT_TASKGROUP))
18265 36 : ctx = ctx->outer_context;
18266 18186 : if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
18267 13924 : for (tree c = gimple_omp_for_clauses (gfor);
18268 63172 : c; c = OMP_CLAUSE_CHAIN (c))
18269 49248 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
18270 49248 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
18271 158 : ++lastprivate_conditional;
18272 13924 : if (lastprivate_conditional)
18273 : {
18274 118 : struct omp_for_data fd;
18275 118 : omp_extract_for_data (gfor, &fd, NULL);
18276 118 : tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
18277 118 : lastprivate_conditional);
18278 118 : tree var = create_tmp_var_raw (type);
18279 118 : tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
18280 118 : OMP_CLAUSE_DECL (c) = var;
18281 118 : OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
18282 118 : gimple_omp_for_set_clauses (gfor, c);
18283 118 : omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
18284 : }
18285 : }
18286 33008 : else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
18287 : {
18288 10653 : unsigned lastprivate_conditional = 0;
18289 49159 : for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
18290 38506 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
18291 38506 : && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
18292 119 : ++lastprivate_conditional;
18293 10653 : if (lastprivate_conditional)
18294 : {
18295 91 : struct omp_for_data fd;
18296 91 : omp_extract_for_data (gfor, &fd, NULL);
18297 91 : tree type = unsigned_type_for (fd.iter_type);
18298 301 : while (lastprivate_conditional--)
18299 : {
18300 119 : tree c = build_omp_clause (UNKNOWN_LOCATION,
18301 : OMP_CLAUSE__CONDTEMP_);
18302 119 : OMP_CLAUSE_DECL (c) = create_tmp_var (type);
18303 119 : OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
18304 119 : gimple_omp_for_set_clauses (gfor, c);
18305 : }
18306 : }
18307 : }
18308 :
18309 51194 : if (ret != GS_ALL_DONE)
18310 : return GS_ERROR;
18311 51194 : *expr_p = NULL_TREE;
18312 51194 : return GS_ALL_DONE;
18313 : }
18314 :
18315 : /* Helper for gimplify_omp_loop, called through walk_tree. */
18316 :
18317 : static tree
18318 294 : note_no_context_vars (tree *tp, int *, void *data)
18319 : {
18320 294 : if (VAR_P (*tp)
18321 56 : && DECL_CONTEXT (*tp) == NULL_TREE
18322 309 : && !is_global_var (*tp))
18323 : {
18324 15 : vec<tree> *d = (vec<tree> *) data;
18325 15 : d->safe_push (*tp);
18326 15 : DECL_CONTEXT (*tp) = current_function_decl;
18327 : }
18328 294 : return NULL_TREE;
18329 : }
18330 :
18331 : /* Gimplify the gross structure of an OMP_LOOP statement. */
18332 :
18333 : static enum gimplify_status
18334 1056 : gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
18335 : {
18336 1056 : tree for_stmt = *expr_p;
18337 1056 : tree clauses = OMP_FOR_CLAUSES (for_stmt);
18338 1056 : struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
18339 1056 : enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
18340 1056 : int i;
18341 :
18342 1056 : omp_maybe_apply_loop_xforms (expr_p, NULL_TREE);
18343 1056 : if (*expr_p != for_stmt)
18344 : return GS_OK;
18345 :
18346 : /* If order is not present, the behavior is as if order(concurrent)
18347 : appeared. */
18348 1056 : tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
18349 1056 : if (order == NULL_TREE)
18350 : {
18351 799 : order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
18352 799 : OMP_CLAUSE_CHAIN (order) = clauses;
18353 799 : OMP_FOR_CLAUSES (for_stmt) = clauses = order;
18354 : }
18355 :
18356 1056 : tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
18357 1056 : if (bind == NULL_TREE)
18358 : {
18359 574 : if (!flag_openmp) /* flag_openmp_simd */
18360 : ;
18361 557 : else if (octx && (octx->region_type & ORT_TEAMS) != 0)
18362 : kind = OMP_CLAUSE_BIND_TEAMS;
18363 327 : else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
18364 : kind = OMP_CLAUSE_BIND_PARALLEL;
18365 : else
18366 : {
18367 187 : for (; octx; octx = octx->outer_context)
18368 : {
18369 79 : if ((octx->region_type & ORT_ACC) != 0
18370 79 : || octx->region_type == ORT_NONE
18371 79 : || octx->region_type == ORT_IMPLICIT_TARGET)
18372 0 : continue;
18373 : break;
18374 : }
18375 187 : if (octx == NULL && !in_omp_construct)
18376 4 : error_at (EXPR_LOCATION (for_stmt),
18377 : "%<bind%> clause not specified on a %<loop%> "
18378 : "construct not nested inside another OpenMP construct");
18379 : }
18380 574 : bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
18381 574 : OMP_CLAUSE_CHAIN (bind) = clauses;
18382 574 : OMP_CLAUSE_BIND_KIND (bind) = kind;
18383 574 : OMP_FOR_CLAUSES (for_stmt) = bind;
18384 : }
18385 : else
18386 482 : switch (OMP_CLAUSE_BIND_KIND (bind))
18387 : {
18388 : case OMP_CLAUSE_BIND_THREAD:
18389 : break;
18390 166 : case OMP_CLAUSE_BIND_PARALLEL:
18391 166 : if (!flag_openmp) /* flag_openmp_simd */
18392 : {
18393 0 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18394 0 : break;
18395 : }
18396 240 : for (; octx; octx = octx->outer_context)
18397 78 : if (octx->region_type == ORT_SIMD
18398 78 : && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
18399 : {
18400 4 : error_at (EXPR_LOCATION (for_stmt),
18401 : "%<bind(parallel)%> on a %<loop%> construct nested "
18402 : "inside %<simd%> construct");
18403 4 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18404 4 : break;
18405 : }
18406 : kind = OMP_CLAUSE_BIND_PARALLEL;
18407 : break;
18408 162 : case OMP_CLAUSE_BIND_TEAMS:
18409 162 : if (!flag_openmp) /* flag_openmp_simd */
18410 : {
18411 0 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18412 0 : break;
18413 : }
18414 162 : if ((octx
18415 128 : && octx->region_type != ORT_IMPLICIT_TARGET
18416 111 : && octx->region_type != ORT_NONE
18417 111 : && (octx->region_type & ORT_TEAMS) == 0)
18418 110 : || in_omp_construct)
18419 : {
18420 64 : error_at (EXPR_LOCATION (for_stmt),
18421 : "%<bind(teams)%> on a %<loop%> region not strictly "
18422 : "nested inside of a %<teams%> region");
18423 64 : OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
18424 64 : break;
18425 : }
18426 : kind = OMP_CLAUSE_BIND_TEAMS;
18427 : break;
18428 0 : default:
18429 0 : gcc_unreachable ();
18430 : }
18431 :
18432 4280 : for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
18433 3224 : switch (OMP_CLAUSE_CODE (*pc))
18434 : {
18435 343 : case OMP_CLAUSE_REDUCTION:
18436 343 : if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
18437 : {
18438 4 : error_at (OMP_CLAUSE_LOCATION (*pc),
18439 : "%<inscan%> %<reduction%> clause on "
18440 : "%qs construct", "loop");
18441 4 : OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
18442 : }
18443 343 : if (OMP_CLAUSE_REDUCTION_TASK (*pc))
18444 : {
18445 8 : error_at (OMP_CLAUSE_LOCATION (*pc),
18446 : "invalid %<task%> reduction modifier on construct "
18447 : "other than %<parallel%>, %qs or %<sections%>",
18448 4 : lang_GNU_Fortran () ? "do" : "for");
18449 4 : OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
18450 : }
18451 343 : pc = &OMP_CLAUSE_CHAIN (*pc);
18452 343 : break;
18453 : case OMP_CLAUSE_LASTPRIVATE:
18454 293 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18455 : {
18456 289 : tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
18457 289 : gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
18458 289 : if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
18459 : break;
18460 26 : if (OMP_FOR_ORIG_DECLS (for_stmt)
18461 25 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
18462 : i)) == TREE_LIST
18463 48 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
18464 : i)))
18465 : {
18466 22 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
18467 22 : if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
18468 : break;
18469 : }
18470 : }
18471 289 : if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
18472 : {
18473 4 : error_at (OMP_CLAUSE_LOCATION (*pc),
18474 : "%<lastprivate%> clause on a %<loop%> construct refers "
18475 : "to a variable %qD which is not the loop iterator",
18476 4 : OMP_CLAUSE_DECL (*pc));
18477 4 : *pc = OMP_CLAUSE_CHAIN (*pc);
18478 4 : break;
18479 : }
18480 285 : pc = &OMP_CLAUSE_CHAIN (*pc);
18481 285 : break;
18482 2592 : default:
18483 2592 : pc = &OMP_CLAUSE_CHAIN (*pc);
18484 2592 : break;
18485 : }
18486 :
18487 1056 : TREE_SET_CODE (for_stmt, OMP_SIMD);
18488 :
18489 1056 : int last;
18490 1056 : switch (kind)
18491 : {
18492 : case OMP_CLAUSE_BIND_THREAD: last = 0; break;
18493 414 : case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
18494 220 : case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
18495 : }
18496 1910 : for (int pass = 1; pass <= last; pass++)
18497 : {
18498 854 : if (pass == 2)
18499 : {
18500 220 : tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
18501 : make_node (BLOCK));
18502 220 : append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
18503 220 : *expr_p = make_node (OMP_PARALLEL);
18504 220 : TREE_TYPE (*expr_p) = void_type_node;
18505 220 : OMP_PARALLEL_BODY (*expr_p) = bind;
18506 220 : OMP_PARALLEL_COMBINED (*expr_p) = 1;
18507 220 : SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
18508 220 : tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
18509 452 : for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
18510 232 : if (OMP_FOR_ORIG_DECLS (for_stmt)
18511 232 : && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
18512 : == TREE_LIST))
18513 : {
18514 16 : tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
18515 16 : if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
18516 : {
18517 13 : *pc = build_omp_clause (UNKNOWN_LOCATION,
18518 : OMP_CLAUSE_FIRSTPRIVATE);
18519 13 : OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
18520 13 : pc = &OMP_CLAUSE_CHAIN (*pc);
18521 : }
18522 : }
18523 : }
18524 854 : tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
18525 854 : tree *pc = &OMP_FOR_CLAUSES (t);
18526 854 : TREE_TYPE (t) = void_type_node;
18527 854 : OMP_FOR_BODY (t) = *expr_p;
18528 854 : SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
18529 3968 : for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
18530 3114 : switch (OMP_CLAUSE_CODE (c))
18531 : {
18532 1992 : case OMP_CLAUSE_BIND:
18533 1992 : case OMP_CLAUSE_ORDER:
18534 1992 : case OMP_CLAUSE_COLLAPSE:
18535 1992 : *pc = copy_node (c);
18536 1992 : pc = &OMP_CLAUSE_CHAIN (*pc);
18537 1992 : break;
18538 : case OMP_CLAUSE_PRIVATE:
18539 : case OMP_CLAUSE_FIRSTPRIVATE:
18540 : /* Only needed on innermost. */
18541 : break;
18542 351 : case OMP_CLAUSE_LASTPRIVATE:
18543 351 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
18544 : {
18545 7 : *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
18546 : OMP_CLAUSE_FIRSTPRIVATE);
18547 7 : OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
18548 7 : lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
18549 7 : pc = &OMP_CLAUSE_CHAIN (*pc);
18550 : }
18551 351 : *pc = copy_node (c);
18552 351 : OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
18553 351 : TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
18554 351 : if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
18555 : {
18556 22 : if (pass != last)
18557 7 : OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
18558 : else
18559 15 : lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
18560 22 : OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
18561 : }
18562 351 : pc = &OMP_CLAUSE_CHAIN (*pc);
18563 351 : break;
18564 463 : case OMP_CLAUSE_REDUCTION:
18565 463 : *pc = copy_node (c);
18566 463 : OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
18567 463 : TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
18568 463 : if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
18569 : {
18570 15 : auto_vec<tree> no_context_vars;
18571 15 : int walk_subtrees = 0;
18572 15 : note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
18573 : &walk_subtrees, &no_context_vars);
18574 15 : if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
18575 0 : note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
18576 15 : walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
18577 : note_no_context_vars,
18578 : &no_context_vars);
18579 15 : walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
18580 : note_no_context_vars,
18581 : &no_context_vars);
18582 :
18583 15 : OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
18584 15 : = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
18585 15 : if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
18586 0 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
18587 0 : = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
18588 :
18589 15 : hash_map<tree, tree> decl_map;
18590 15 : decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
18591 15 : decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
18592 15 : OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
18593 15 : if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
18594 0 : decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
18595 0 : OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
18596 :
18597 15 : copy_body_data id;
18598 15 : memset (&id, 0, sizeof (id));
18599 15 : id.src_fn = current_function_decl;
18600 15 : id.dst_fn = current_function_decl;
18601 15 : id.src_cfun = cfun;
18602 15 : id.decl_map = &decl_map;
18603 15 : id.copy_decl = copy_decl_no_change;
18604 15 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
18605 15 : id.transform_new_cfg = true;
18606 15 : id.transform_return_to_modify = false;
18607 15 : id.eh_lp_nr = 0;
18608 15 : walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
18609 : &id, NULL);
18610 15 : walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
18611 : &id, NULL);
18612 :
18613 60 : for (tree d : no_context_vars)
18614 : {
18615 15 : DECL_CONTEXT (d) = NULL_TREE;
18616 15 : DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
18617 : }
18618 15 : }
18619 : else
18620 : {
18621 448 : OMP_CLAUSE_REDUCTION_INIT (*pc)
18622 448 : = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
18623 448 : OMP_CLAUSE_REDUCTION_MERGE (*pc)
18624 896 : = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
18625 : }
18626 463 : pc = &OMP_CLAUSE_CHAIN (*pc);
18627 463 : break;
18628 0 : default:
18629 0 : gcc_unreachable ();
18630 : }
18631 854 : *pc = NULL_TREE;
18632 854 : *expr_p = t;
18633 : }
18634 1056 : return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
18635 : }
18636 :
18637 :
18638 : /* Helper function of optimize_target_teams, find OMP_TEAMS inside
18639 : of OMP_TARGET's body. */
18640 :
18641 : static tree
18642 92641 : find_omp_teams (tree *tp, int *walk_subtrees, void *)
18643 : {
18644 92641 : *walk_subtrees = 0;
18645 92641 : switch (TREE_CODE (*tp))
18646 : {
18647 : case OMP_TEAMS:
18648 : return *tp;
18649 25856 : case BIND_EXPR:
18650 25856 : case STATEMENT_LIST:
18651 25856 : *walk_subtrees = 1;
18652 25856 : break;
18653 : default:
18654 : break;
18655 : }
18656 : return NULL_TREE;
18657 : }
18658 :
18659 : /* Helper function of optimize_target_teams, determine if the expression
18660 : can be computed safely before the target construct on the host. */
18661 :
18662 : static tree
18663 1311 : computable_teams_clause (tree *tp, int *walk_subtrees, void *)
18664 : {
18665 1831 : splay_tree_node n;
18666 :
18667 1831 : if (TYPE_P (*tp))
18668 : {
18669 0 : *walk_subtrees = 0;
18670 0 : return NULL_TREE;
18671 : }
18672 1831 : switch (TREE_CODE (*tp))
18673 : {
18674 990 : case VAR_DECL:
18675 990 : case PARM_DECL:
18676 990 : case RESULT_DECL:
18677 990 : *walk_subtrees = 0;
18678 990 : if (error_operand_p (*tp)
18679 990 : || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
18680 990 : || DECL_HAS_VALUE_EXPR_P (*tp)
18681 990 : || DECL_THREAD_LOCAL_P (*tp)
18682 990 : || TREE_SIDE_EFFECTS (*tp)
18683 1980 : || TREE_THIS_VOLATILE (*tp))
18684 0 : return *tp;
18685 990 : if (is_global_var (*tp)
18686 990 : && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
18687 0 : || lookup_attribute ("omp declare target link",
18688 0 : DECL_ATTRIBUTES (*tp))))
18689 16 : return *tp;
18690 974 : if (VAR_P (*tp)
18691 668 : && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
18692 50 : && !is_global_var (*tp)
18693 1024 : && decl_function_context (*tp) == current_function_decl)
18694 50 : return *tp;
18695 1848 : n = splay_tree_lookup (gimplify_omp_ctxp->variables,
18696 924 : (splay_tree_key) *tp);
18697 924 : if (n == NULL)
18698 : {
18699 292 : if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
18700 : return NULL_TREE;
18701 24 : return *tp;
18702 : }
18703 632 : else if (n->value & GOVD_LOCAL)
18704 0 : return *tp;
18705 632 : else if (n->value & GOVD_FIRSTPRIVATE)
18706 : return NULL_TREE;
18707 112 : else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
18708 : == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
18709 : return NULL_TREE;
18710 96 : return *tp;
18711 76 : case INTEGER_CST:
18712 76 : if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
18713 : return *tp;
18714 : return NULL_TREE;
18715 520 : case TARGET_EXPR:
18716 520 : if (TARGET_EXPR_INITIAL (*tp)
18717 520 : || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
18718 : return *tp;
18719 520 : return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
18720 520 : walk_subtrees, NULL);
18721 : /* Allow some reasonable subset of integral arithmetics. */
18722 193 : case PLUS_EXPR:
18723 193 : case MINUS_EXPR:
18724 193 : case MULT_EXPR:
18725 193 : case TRUNC_DIV_EXPR:
18726 193 : case CEIL_DIV_EXPR:
18727 193 : case FLOOR_DIV_EXPR:
18728 193 : case ROUND_DIV_EXPR:
18729 193 : case TRUNC_MOD_EXPR:
18730 193 : case CEIL_MOD_EXPR:
18731 193 : case FLOOR_MOD_EXPR:
18732 193 : case ROUND_MOD_EXPR:
18733 193 : case RDIV_EXPR:
18734 193 : case EXACT_DIV_EXPR:
18735 193 : case MIN_EXPR:
18736 193 : case MAX_EXPR:
18737 193 : case LSHIFT_EXPR:
18738 193 : case RSHIFT_EXPR:
18739 193 : case BIT_IOR_EXPR:
18740 193 : case BIT_XOR_EXPR:
18741 193 : case BIT_AND_EXPR:
18742 193 : case NEGATE_EXPR:
18743 193 : case ABS_EXPR:
18744 193 : case BIT_NOT_EXPR:
18745 193 : case NON_LVALUE_EXPR:
18746 193 : CASE_CONVERT:
18747 193 : if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
18748 : return *tp;
18749 : return NULL_TREE;
18750 : /* And disallow anything else, except for comparisons. */
18751 52 : default:
18752 52 : if (COMPARISON_CLASS_P (*tp))
18753 : return NULL_TREE;
18754 : return *tp;
18755 : }
18756 : }
18757 :
18758 : /* Try to determine if the num_teams and/or thread_limit expressions
18759 : can have their values determined already before entering the
18760 : target construct.
18761 : INTEGER_CSTs trivially are,
18762 : integral decls that are firstprivate (explicitly or implicitly)
18763 : or explicitly map(always, to:) or map(always, tofrom:) on the target
18764 : region too, and expressions involving simple arithmetics on those
18765 : too, function calls are not ok, dereferencing something neither etc.
18766 : Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
18767 : EXPR based on what we find:
18768 : 0 stands for clause not specified at all, use implementation default
18769 : -1 stands for value that can't be determined easily before entering
18770 : the target construct.
18771 : -2 means that no explicit teams construct was specified
18772 : If teams construct is not present at all, use 1 for num_teams
18773 : and 0 for thread_limit (only one team is involved, and the thread
18774 : limit is implementation defined. */
18775 :
18776 : static void
18777 13137 : optimize_target_teams (tree target, gimple_seq *pre_p)
18778 : {
18779 13137 : tree body = OMP_BODY (target);
18780 13137 : tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
18781 13137 : tree num_teams_lower = NULL_TREE;
18782 13137 : tree num_teams_upper = integer_zero_node;
18783 13137 : tree thread_limit = integer_zero_node;
18784 13137 : location_t num_teams_loc = EXPR_LOCATION (target);
18785 13137 : location_t thread_limit_loc = EXPR_LOCATION (target);
18786 13137 : tree c, *p, expr;
18787 13137 : struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
18788 :
18789 13137 : if (teams == NULL_TREE)
18790 7016 : num_teams_upper = build_int_cst (integer_type_node, -2);
18791 : else
18792 10587 : for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
18793 : {
18794 4466 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
18795 : {
18796 617 : p = &num_teams_upper;
18797 617 : num_teams_loc = OMP_CLAUSE_LOCATION (c);
18798 617 : if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
18799 : {
18800 148 : expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
18801 148 : if (TREE_CODE (expr) == INTEGER_CST)
18802 21 : num_teams_lower = expr;
18803 127 : else if (walk_tree (&expr, computable_teams_clause,
18804 : NULL, NULL))
18805 19 : num_teams_lower = integer_minus_one_node;
18806 : else
18807 : {
18808 108 : num_teams_lower = expr;
18809 108 : gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
18810 108 : if (gimplify_expr (&num_teams_lower, pre_p, NULL,
18811 : is_gimple_val, fb_rvalue, false)
18812 : == GS_ERROR)
18813 : {
18814 0 : gimplify_omp_ctxp = target_ctx;
18815 0 : num_teams_lower = integer_minus_one_node;
18816 : }
18817 : else
18818 : {
18819 108 : gimplify_omp_ctxp = target_ctx;
18820 108 : if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
18821 28 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
18822 56 : = num_teams_lower;
18823 : }
18824 : }
18825 : }
18826 : }
18827 3849 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
18828 : {
18829 471 : p = &thread_limit;
18830 471 : thread_limit_loc = OMP_CLAUSE_LOCATION (c);
18831 : }
18832 : else
18833 3378 : continue;
18834 1088 : expr = OMP_CLAUSE_OPERAND (c, 0);
18835 1088 : if (TREE_CODE (expr) == INTEGER_CST)
18836 : {
18837 173 : *p = expr;
18838 173 : continue;
18839 : }
18840 915 : if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
18841 : {
18842 219 : *p = integer_minus_one_node;
18843 219 : continue;
18844 : }
18845 696 : *p = expr;
18846 696 : gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
18847 696 : if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
18848 : == GS_ERROR)
18849 : {
18850 0 : gimplify_omp_ctxp = target_ctx;
18851 0 : *p = integer_minus_one_node;
18852 0 : continue;
18853 : }
18854 696 : gimplify_omp_ctxp = target_ctx;
18855 696 : if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
18856 48 : OMP_CLAUSE_OPERAND (c, 0) = *p;
18857 : }
18858 13137 : if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
18859 : {
18860 12869 : c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
18861 12869 : OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
18862 12869 : OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
18863 12869 : OMP_TARGET_CLAUSES (target) = c;
18864 : }
18865 13137 : c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
18866 13137 : OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
18867 13137 : OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
18868 13137 : OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
18869 13137 : OMP_TARGET_CLAUSES (target) = c;
18870 13137 : }
18871 :
18872 : /* Gimplify the gross structure of several OMP constructs. */
18873 :
18874 : static void
18875 39458 : gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
18876 : {
18877 39458 : tree expr = *expr_p;
18878 39458 : gimple *stmt;
18879 39458 : gimple_seq body = NULL;
18880 39458 : enum omp_region_type ort;
18881 :
18882 39458 : switch (TREE_CODE (expr))
18883 : {
18884 : case OMP_SECTIONS:
18885 : case OMP_SINGLE:
18886 : ort = ORT_WORKSHARE;
18887 : break;
18888 214 : case OMP_SCOPE:
18889 214 : ort = ORT_TASKGROUP;
18890 214 : break;
18891 13137 : case OMP_TARGET:
18892 13137 : ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
18893 : break;
18894 2541 : case OACC_KERNELS:
18895 2541 : ort = ORT_ACC_KERNELS;
18896 2541 : break;
18897 7901 : case OACC_PARALLEL:
18898 7901 : ort = ORT_ACC_PARALLEL;
18899 7901 : break;
18900 1040 : case OACC_SERIAL:
18901 1040 : ort = ORT_ACC_SERIAL;
18902 1040 : break;
18903 1950 : case OACC_DATA:
18904 1950 : ort = ORT_ACC_DATA;
18905 1950 : break;
18906 1863 : case OMP_TARGET_DATA:
18907 1863 : ort = ORT_TARGET_DATA;
18908 1863 : break;
18909 8793 : case OMP_TEAMS:
18910 8793 : ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
18911 8793 : if (gimplify_omp_ctxp == NULL
18912 6161 : || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
18913 2632 : ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
18914 : break;
18915 118 : case OACC_HOST_DATA:
18916 118 : ort = ORT_ACC_HOST_DATA;
18917 118 : break;
18918 0 : default:
18919 0 : gcc_unreachable ();
18920 : }
18921 :
18922 39458 : gimple_seq iterator_loops_seq = NULL;
18923 39458 : if (TREE_CODE (expr) == OMP_TARGET || TREE_CODE (expr) == OMP_TARGET_DATA)
18924 : {
18925 15000 : remove_unused_omp_iterator_vars (&OMP_CLAUSES (expr));
18926 15000 : build_omp_iterators_loops (&OMP_CLAUSES (expr), &iterator_loops_seq);
18927 : }
18928 :
18929 39458 : bool save_in_omp_construct = in_omp_construct;
18930 39458 : if ((ort & ORT_ACC) == 0)
18931 25908 : in_omp_construct = false;
18932 39458 : gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
18933 39458 : TREE_CODE (expr), &iterator_loops_seq);
18934 39458 : if (TREE_CODE (expr) == OMP_TARGET)
18935 13137 : optimize_target_teams (expr, pre_p);
18936 39458 : if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
18937 10908 : || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
18938 : {
18939 31182 : push_gimplify_context ();
18940 31182 : gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
18941 31182 : if (gimple_code (g) == GIMPLE_BIND)
18942 31182 : pop_gimplify_context (g);
18943 : else
18944 0 : pop_gimplify_context (NULL);
18945 31182 : if ((ort & ORT_TARGET_DATA) != 0)
18946 : {
18947 3931 : enum built_in_function end_ix;
18948 3931 : switch (TREE_CODE (expr))
18949 : {
18950 : case OACC_DATA:
18951 : case OACC_HOST_DATA:
18952 : end_ix = BUILT_IN_GOACC_DATA_END;
18953 : break;
18954 1863 : case OMP_TARGET_DATA:
18955 1863 : end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
18956 1863 : break;
18957 0 : default:
18958 0 : gcc_unreachable ();
18959 : }
18960 3931 : tree fn = builtin_decl_explicit (end_ix);
18961 3931 : g = gimple_build_call (fn, 0);
18962 3931 : gimple_seq cleanup = NULL;
18963 3931 : gimple_seq_add_stmt (&cleanup, g);
18964 3931 : g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
18965 3931 : body = NULL;
18966 3931 : gimple_seq_add_stmt (&body, g);
18967 : }
18968 : }
18969 : else
18970 8276 : gimplify_and_add (OMP_BODY (expr), &body);
18971 39458 : gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
18972 39458 : TREE_CODE (expr), &iterator_loops_seq);
18973 39458 : in_omp_construct = save_in_omp_construct;
18974 :
18975 39458 : switch (TREE_CODE (expr))
18976 : {
18977 1950 : case OACC_DATA:
18978 3900 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
18979 1950 : OMP_CLAUSES (expr));
18980 1950 : break;
18981 118 : case OACC_HOST_DATA:
18982 118 : if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
18983 : {
18984 128 : for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
18985 95 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
18986 57 : OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
18987 : }
18988 :
18989 236 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
18990 118 : OMP_CLAUSES (expr));
18991 118 : break;
18992 2541 : case OACC_KERNELS:
18993 5082 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
18994 2541 : OMP_CLAUSES (expr));
18995 2541 : break;
18996 7901 : case OACC_PARALLEL:
18997 15802 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
18998 7901 : OMP_CLAUSES (expr));
18999 7901 : break;
19000 1040 : case OACC_SERIAL:
19001 2080 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
19002 1040 : OMP_CLAUSES (expr));
19003 1040 : break;
19004 626 : case OMP_SECTIONS:
19005 626 : stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
19006 626 : break;
19007 1275 : case OMP_SINGLE:
19008 1275 : stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
19009 1275 : break;
19010 214 : case OMP_SCOPE:
19011 214 : stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
19012 214 : break;
19013 13137 : case OMP_TARGET:
19014 26274 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
19015 13137 : OMP_CLAUSES (expr), iterator_loops_seq);
19016 13137 : break;
19017 1863 : case OMP_TARGET_DATA:
19018 : /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
19019 : to be evaluated before the use_device_{ptr,addr} clauses if they
19020 : refer to the same variables. */
19021 1863 : {
19022 1863 : tree use_device_clauses;
19023 1863 : tree *pc, *uc = &use_device_clauses;
19024 9916 : for (pc = &OMP_CLAUSES (expr); *pc; )
19025 8053 : if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
19026 8053 : || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
19027 : {
19028 1959 : *uc = *pc;
19029 1959 : *pc = OMP_CLAUSE_CHAIN (*pc);
19030 1959 : uc = &OMP_CLAUSE_CHAIN (*uc);
19031 : }
19032 : else
19033 6094 : pc = &OMP_CLAUSE_CHAIN (*pc);
19034 1863 : *uc = NULL_TREE;
19035 1863 : *pc = use_device_clauses;
19036 1863 : stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
19037 1863 : OMP_CLAUSES (expr), iterator_loops_seq);
19038 : }
19039 1863 : break;
19040 8793 : case OMP_TEAMS:
19041 8793 : stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
19042 8793 : if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
19043 2632 : gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
19044 : break;
19045 0 : default:
19046 0 : gcc_unreachable ();
19047 : }
19048 :
19049 39458 : gimplify_seq_add_stmt (pre_p, stmt);
19050 39458 : *expr_p = NULL_TREE;
19051 39458 : }
19052 :
19053 : /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
19054 : target update constructs. */
19055 :
19056 : static void
19057 12057 : gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
19058 : {
19059 12057 : tree expr = *expr_p;
19060 12057 : int kind;
19061 12057 : gomp_target *stmt;
19062 12057 : enum omp_region_type ort = ORT_WORKSHARE;
19063 :
19064 12057 : switch (TREE_CODE (expr))
19065 : {
19066 : case OACC_ENTER_DATA:
19067 : kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
19068 : ort = ORT_ACC;
19069 : break;
19070 : case OACC_EXIT_DATA:
19071 : kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
19072 : ort = ORT_ACC;
19073 : break;
19074 : case OACC_UPDATE:
19075 : kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
19076 : ort = ORT_ACC;
19077 : break;
19078 : case OMP_TARGET_UPDATE:
19079 : kind = GF_OMP_TARGET_KIND_UPDATE;
19080 : break;
19081 : case OMP_TARGET_ENTER_DATA:
19082 : kind = GF_OMP_TARGET_KIND_ENTER_DATA;
19083 : break;
19084 : case OMP_TARGET_EXIT_DATA:
19085 : kind = GF_OMP_TARGET_KIND_EXIT_DATA;
19086 : break;
19087 0 : default:
19088 0 : gcc_unreachable ();
19089 : }
19090 :
19091 12057 : gimple_seq iterator_loops_seq = NULL;
19092 12057 : remove_unused_omp_iterator_vars (&OMP_STANDALONE_CLAUSES (expr));
19093 12057 : build_omp_iterators_loops (&OMP_STANDALONE_CLAUSES (expr),
19094 : &iterator_loops_seq);
19095 :
19096 12057 : gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
19097 12057 : ort, TREE_CODE (expr), &iterator_loops_seq);
19098 12057 : gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
19099 12057 : TREE_CODE (expr), &iterator_loops_seq);
19100 12057 : if (TREE_CODE (expr) == OACC_UPDATE
19101 12057 : && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
19102 : OMP_CLAUSE_IF_PRESENT))
19103 : {
19104 : /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
19105 : clause. */
19106 123 : for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19107 97 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
19108 47 : switch (OMP_CLAUSE_MAP_KIND (c))
19109 : {
19110 14 : case GOMP_MAP_FORCE_TO:
19111 14 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
19112 14 : break;
19113 24 : case GOMP_MAP_FORCE_FROM:
19114 24 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
19115 24 : break;
19116 : default:
19117 : break;
19118 : }
19119 : }
19120 12031 : else if (TREE_CODE (expr) == OACC_EXIT_DATA
19121 12031 : && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
19122 : OMP_CLAUSE_FINALIZE))
19123 : {
19124 : /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
19125 : semantics. */
19126 75 : bool have_clause = false;
19127 275 : for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19128 200 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
19129 117 : switch (OMP_CLAUSE_MAP_KIND (c))
19130 : {
19131 45 : case GOMP_MAP_FROM:
19132 45 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
19133 45 : have_clause = true;
19134 45 : break;
19135 47 : case GOMP_MAP_RELEASE:
19136 47 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
19137 47 : have_clause = true;
19138 47 : break;
19139 : case GOMP_MAP_TO_PSET:
19140 : /* Fortran arrays with descriptors must map that descriptor when
19141 : doing standalone "attach" operations (in OpenACC). In that
19142 : case GOMP_MAP_TO_PSET appears by itself with no preceding
19143 : clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
19144 : break;
19145 2 : case GOMP_MAP_POINTER:
19146 : /* TODO PR92929: we may see these here, but they'll always follow
19147 : one of the clauses above, and will be handled by libgomp as
19148 : one group, so no handling required here. */
19149 2 : gcc_assert (have_clause);
19150 : break;
19151 22 : case GOMP_MAP_DETACH:
19152 22 : OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
19153 22 : have_clause = false;
19154 22 : break;
19155 : case GOMP_MAP_STRUCT:
19156 : case GOMP_MAP_STRUCT_UNORD:
19157 22 : have_clause = false;
19158 : break;
19159 0 : default:
19160 0 : gcc_unreachable ();
19161 : }
19162 : }
19163 12057 : stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr),
19164 : iterator_loops_seq);
19165 :
19166 12057 : gimplify_seq_add_stmt (pre_p, stmt);
19167 12057 : *expr_p = NULL_TREE;
19168 12057 : }
19169 :
19170 : /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
19171 : stabilized the lhs of the atomic operation as *ADDR. Return true if
19172 : EXPR is this stabilized form. */
19173 :
19174 : static bool
19175 36791 : goa_lhs_expr_p (tree expr, tree addr)
19176 : {
19177 : /* Also include casts to other type variants. The C front end is fond
19178 : of adding these for e.g. volatile variables. This is like
19179 : STRIP_TYPE_NOPS but includes the main variant lookup. */
19180 36791 : STRIP_USELESS_TYPE_CONVERSION (expr);
19181 :
19182 36791 : if (INDIRECT_REF_P (expr))
19183 : {
19184 5011 : expr = TREE_OPERAND (expr, 0);
19185 5011 : while (expr != addr
19186 92 : && (CONVERT_EXPR_P (expr)
19187 92 : || TREE_CODE (expr) == NON_LVALUE_EXPR)
19188 0 : && TREE_CODE (expr) == TREE_CODE (addr)
19189 5011 : && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
19190 : {
19191 0 : expr = TREE_OPERAND (expr, 0);
19192 0 : addr = TREE_OPERAND (addr, 0);
19193 : }
19194 5011 : if (expr == addr)
19195 : return true;
19196 92 : return (TREE_CODE (addr) == ADDR_EXPR
19197 62 : && TREE_CODE (expr) == ADDR_EXPR
19198 92 : && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
19199 : }
19200 31780 : if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
19201 : return true;
19202 : return false;
19203 : }
19204 :
19205 : /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
19206 : expression does not involve the lhs, evaluate it into a temporary.
19207 : Return 1 if the lhs appeared as a subexpression, 0 if it did not,
19208 : or -1 if an error was encountered. */
19209 :
19210 : static int
19211 36791 : goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
19212 : tree lhs_var, tree &target_expr, bool rhs, int depth)
19213 : {
19214 36791 : tree expr = *expr_p;
19215 36791 : int saw_lhs = 0;
19216 :
19217 36791 : if (goa_lhs_expr_p (expr, lhs_addr))
19218 : {
19219 9183 : if (pre_p)
19220 8525 : *expr_p = lhs_var;
19221 9183 : return 1;
19222 : }
19223 27608 : if (is_gimple_val (expr))
19224 : return 0;
19225 :
19226 : /* Maximum depth of lhs in expression is for the
19227 : __builtin_clear_padding (...), __builtin_clear_padding (...),
19228 : __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
19229 17276 : if (++depth > 7)
19230 16 : goto finish;
19231 :
19232 17260 : switch (TREE_CODE_CLASS (TREE_CODE (expr)))
19233 : {
19234 9652 : case tcc_binary:
19235 9652 : case tcc_comparison:
19236 9652 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
19237 : lhs_var, target_expr, true, depth);
19238 : /* FALLTHRU */
19239 11699 : case tcc_unary:
19240 11699 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
19241 : lhs_var, target_expr, true, depth);
19242 11699 : break;
19243 2982 : case tcc_expression:
19244 2982 : switch (TREE_CODE (expr))
19245 : {
19246 836 : case TRUTH_ANDIF_EXPR:
19247 836 : case TRUTH_ORIF_EXPR:
19248 836 : case TRUTH_AND_EXPR:
19249 836 : case TRUTH_OR_EXPR:
19250 836 : case TRUTH_XOR_EXPR:
19251 836 : case BIT_INSERT_EXPR:
19252 836 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19253 : lhs_addr, lhs_var, target_expr, true,
19254 : depth);
19255 : /* FALLTHRU */
19256 888 : case TRUTH_NOT_EXPR:
19257 888 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19258 : lhs_addr, lhs_var, target_expr, true,
19259 : depth);
19260 888 : break;
19261 550 : case MODIFY_EXPR:
19262 550 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
19263 : target_expr, true, depth))
19264 : break;
19265 544 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19266 : lhs_addr, lhs_var, target_expr, true,
19267 : depth);
19268 544 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19269 : lhs_addr, lhs_var, target_expr, false,
19270 : depth);
19271 544 : break;
19272 : /* FALLTHRU */
19273 164 : case ADDR_EXPR:
19274 164 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
19275 : target_expr, true, depth))
19276 : break;
19277 138 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19278 : lhs_addr, lhs_var, target_expr, false,
19279 : depth);
19280 138 : break;
19281 : case COMPOUND_EXPR:
19282 : /* Break out any preevaluations from cp_build_modify_expr. */
19283 76 : for (; TREE_CODE (expr) == COMPOUND_EXPR;
19284 38 : expr = TREE_OPERAND (expr, 1))
19285 : {
19286 : /* Special-case __builtin_clear_padding call before
19287 : __builtin_memcmp. */
19288 38 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
19289 : {
19290 2 : tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
19291 2 : if (fndecl
19292 2 : && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
19293 0 : && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
19294 2 : && (!pre_p
19295 0 : || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
19296 : lhs_addr, lhs_var,
19297 : target_expr, true, depth)))
19298 : {
19299 0 : if (pre_p)
19300 0 : *expr_p = expr;
19301 0 : saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
19302 : pre_p, lhs_addr, lhs_var,
19303 : target_expr, true, depth);
19304 0 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
19305 : pre_p, lhs_addr, lhs_var,
19306 : target_expr, rhs, depth);
19307 0 : return saw_lhs;
19308 : }
19309 : }
19310 :
19311 38 : if (pre_p)
19312 35 : gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
19313 : }
19314 38 : if (!pre_p)
19315 3 : return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
19316 3 : target_expr, rhs, depth);
19317 35 : *expr_p = expr;
19318 35 : return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
19319 35 : target_expr, rhs, depth);
19320 441 : case COND_EXPR:
19321 441 : if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
19322 : lhs_var, target_expr, true, depth))
19323 : break;
19324 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19325 : lhs_addr, lhs_var, target_expr, true,
19326 : depth);
19327 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
19328 : lhs_addr, lhs_var, target_expr, true,
19329 : depth);
19330 428 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
19331 : lhs_addr, lhs_var, target_expr, true,
19332 : depth);
19333 428 : break;
19334 880 : case TARGET_EXPR:
19335 880 : if (TARGET_EXPR_INITIAL (expr))
19336 : {
19337 880 : if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
19338 : lhs_var, target_expr, true,
19339 : depth))
19340 : break;
19341 702 : if (expr == target_expr)
19342 : saw_lhs = 1;
19343 : else
19344 : {
19345 702 : saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
19346 : pre_p, lhs_addr, lhs_var,
19347 : target_expr, true, depth);
19348 702 : if (saw_lhs && target_expr == NULL_TREE && pre_p)
19349 26 : target_expr = expr;
19350 : }
19351 : }
19352 : break;
19353 : default:
19354 : break;
19355 : }
19356 : break;
19357 556 : case tcc_reference:
19358 556 : if (TREE_CODE (expr) == BIT_FIELD_REF
19359 490 : || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
19360 362 : saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
19361 : lhs_addr, lhs_var, target_expr, true,
19362 : depth);
19363 : break;
19364 58 : case tcc_vl_exp:
19365 58 : if (TREE_CODE (expr) == CALL_EXPR)
19366 : {
19367 58 : if (tree fndecl = get_callee_fndecl (expr))
19368 58 : if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING,
19369 : BUILT_IN_MEMCMP))
19370 : {
19371 56 : int nargs = call_expr_nargs (expr);
19372 224 : for (int i = 0; i < nargs; i++)
19373 168 : saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
19374 : pre_p, lhs_addr, lhs_var,
19375 : target_expr, true, depth);
19376 : }
19377 : }
19378 : break;
19379 : default:
19380 : break;
19381 : }
19382 :
19383 17238 : finish:
19384 17238 : if (saw_lhs == 0 && pre_p)
19385 : {
19386 3485 : enum gimplify_status gs;
19387 3485 : if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
19388 : {
19389 0 : gimplify_stmt (&expr, pre_p);
19390 0 : return saw_lhs;
19391 : }
19392 3485 : else if (rhs)
19393 3307 : gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
19394 : else
19395 178 : gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
19396 3485 : if (gs != GS_ALL_DONE)
19397 36791 : saw_lhs = -1;
19398 : }
19399 :
19400 : return saw_lhs;
19401 : }
19402 :
19403 : /* Gimplify an OMP_ATOMIC statement. */
19404 :
19405 : static enum gimplify_status
19406 10236 : gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
19407 : {
19408 10236 : tree addr = TREE_OPERAND (*expr_p, 0);
19409 10236 : tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
19410 10236 : ? NULL : TREE_OPERAND (*expr_p, 1);
19411 10236 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
19412 10236 : tree tmp_load;
19413 10236 : gomp_atomic_load *loadstmt;
19414 10236 : gomp_atomic_store *storestmt;
19415 10236 : tree target_expr = NULL_TREE;
19416 :
19417 10236 : tmp_load = create_tmp_reg (type);
19418 10236 : if (rhs
19419 10236 : && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
19420 : true, 0) < 0)
19421 : return GS_ERROR;
19422 :
19423 10236 : if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
19424 : != GS_ALL_DONE)
19425 : return GS_ERROR;
19426 :
19427 10236 : loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
19428 10236 : OMP_ATOMIC_MEMORY_ORDER (*expr_p));
19429 10236 : gimplify_seq_add_stmt (pre_p, loadstmt);
19430 10236 : if (rhs)
19431 : {
19432 : /* BIT_INSERT_EXPR is not valid for non-integral bitfield
19433 : representatives. Use BIT_FIELD_REF on the lhs instead. */
19434 9055 : tree rhsarg = rhs;
19435 9055 : if (TREE_CODE (rhs) == COND_EXPR)
19436 428 : rhsarg = TREE_OPERAND (rhs, 1);
19437 9055 : if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
19438 9055 : && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
19439 : {
19440 32 : tree bitpos = TREE_OPERAND (rhsarg, 2);
19441 32 : tree op1 = TREE_OPERAND (rhsarg, 1);
19442 32 : tree bitsize;
19443 32 : tree tmp_store = tmp_load;
19444 32 : if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
19445 12 : tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
19446 32 : if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
19447 32 : bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
19448 : else
19449 0 : bitsize = TYPE_SIZE (TREE_TYPE (op1));
19450 32 : gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
19451 32 : tree t = build2_loc (EXPR_LOCATION (rhsarg),
19452 : MODIFY_EXPR, void_type_node,
19453 32 : build3_loc (EXPR_LOCATION (rhsarg),
19454 32 : BIT_FIELD_REF, TREE_TYPE (op1),
19455 : tmp_store, bitsize, bitpos), op1);
19456 32 : if (TREE_CODE (rhs) == COND_EXPR)
19457 16 : t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
19458 16 : TREE_OPERAND (rhs, 0), t, void_node);
19459 32 : gimplify_and_add (t, pre_p);
19460 32 : rhs = tmp_store;
19461 : }
19462 9055 : bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
19463 9055 : if (TREE_CODE (rhs) == COND_EXPR)
19464 412 : gimplify_ctxp->allow_rhs_cond_expr = true;
19465 9055 : enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
19466 : is_gimple_val, fb_rvalue);
19467 9055 : gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
19468 9055 : if (gs != GS_ALL_DONE)
19469 : return GS_ERROR;
19470 : }
19471 :
19472 10236 : if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
19473 1181 : rhs = tmp_load;
19474 10236 : storestmt
19475 10236 : = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
19476 10236 : if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
19477 : {
19478 37 : gimple_omp_atomic_set_weak (loadstmt);
19479 37 : gimple_omp_atomic_set_weak (storestmt);
19480 : }
19481 10236 : gimplify_seq_add_stmt (pre_p, storestmt);
19482 10236 : switch (TREE_CODE (*expr_p))
19483 : {
19484 2065 : case OMP_ATOMIC_READ:
19485 2065 : case OMP_ATOMIC_CAPTURE_OLD:
19486 2065 : *expr_p = tmp_load;
19487 2065 : gimple_omp_atomic_set_need_value (loadstmt);
19488 2065 : break;
19489 868 : case OMP_ATOMIC_CAPTURE_NEW:
19490 868 : *expr_p = rhs;
19491 868 : gimple_omp_atomic_set_need_value (storestmt);
19492 868 : break;
19493 7303 : default:
19494 7303 : *expr_p = NULL;
19495 7303 : break;
19496 : }
19497 :
19498 : return GS_ALL_DONE;
19499 : }
19500 :
19501 : /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
19502 : body, and adding some EH bits. */
19503 :
19504 : static enum gimplify_status
19505 478 : gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
19506 : {
19507 478 : tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
19508 478 : gimple *body_stmt;
19509 478 : gtransaction *trans_stmt;
19510 478 : gimple_seq body = NULL;
19511 478 : int subcode = 0;
19512 :
19513 : /* Wrap the transaction body in a BIND_EXPR so we have a context
19514 : where to put decls for OMP. */
19515 478 : if (TREE_CODE (tbody) != BIND_EXPR)
19516 : {
19517 439 : tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
19518 439 : TREE_SIDE_EFFECTS (bind) = 1;
19519 439 : SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
19520 439 : TRANSACTION_EXPR_BODY (expr) = bind;
19521 : }
19522 :
19523 478 : push_gimplify_context ();
19524 478 : temp = voidify_wrapper_expr (*expr_p, NULL);
19525 :
19526 478 : body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
19527 478 : pop_gimplify_context (body_stmt);
19528 :
19529 478 : trans_stmt = gimple_build_transaction (body);
19530 478 : if (TRANSACTION_EXPR_OUTER (expr))
19531 : subcode = GTMA_IS_OUTER;
19532 447 : else if (TRANSACTION_EXPR_RELAXED (expr))
19533 81 : subcode = GTMA_IS_RELAXED;
19534 478 : gimple_transaction_set_subcode (trans_stmt, subcode);
19535 :
19536 478 : gimplify_seq_add_stmt (pre_p, trans_stmt);
19537 :
19538 478 : if (temp)
19539 : {
19540 76 : *expr_p = temp;
19541 76 : return GS_OK;
19542 : }
19543 :
19544 402 : *expr_p = NULL_TREE;
19545 402 : return GS_ALL_DONE;
19546 : }
19547 :
19548 : /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
19549 : is the OMP_BODY of the original EXPR (which has already been
19550 : gimplified so it's not present in the EXPR).
19551 :
19552 : Return the gimplified GIMPLE_OMP_ORDERED tuple. */
19553 :
19554 : static gimple *
19555 1950 : gimplify_omp_ordered (tree expr, gimple_seq body)
19556 : {
19557 1950 : tree c, decls;
19558 1950 : int failures = 0;
19559 1950 : unsigned int i;
19560 1950 : tree source_c = NULL_TREE;
19561 1950 : tree sink_c = NULL_TREE;
19562 :
19563 1950 : if (gimplify_omp_ctxp)
19564 : {
19565 3291 : for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
19566 1515 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19567 1515 : && gimplify_omp_ctxp->loop_iter_var.is_empty ())
19568 : {
19569 74 : error_at (OMP_CLAUSE_LOCATION (c),
19570 : "%<ordered%> construct with %qs clause must be "
19571 : "closely nested inside a loop with %<ordered%> clause",
19572 74 : OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
19573 74 : failures++;
19574 : }
19575 1441 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19576 1441 : && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
19577 : {
19578 642 : bool fail = false;
19579 642 : sink_c = c;
19580 642 : if (OMP_CLAUSE_DECL (c) == NULL_TREE)
19581 72 : continue; /* omp_cur_iteration - 1 */
19582 570 : for (decls = OMP_CLAUSE_DECL (c), i = 0;
19583 3201 : decls && TREE_CODE (decls) == TREE_LIST;
19584 2631 : decls = TREE_CHAIN (decls), ++i)
19585 2631 : if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
19586 4 : continue;
19587 5254 : else if (TREE_VALUE (decls)
19588 2627 : != gimplify_omp_ctxp->loop_iter_var[2 * i])
19589 : {
19590 8 : error_at (OMP_CLAUSE_LOCATION (c),
19591 : "variable %qE is not an iteration "
19592 : "of outermost loop %d, expected %qE",
19593 8 : TREE_VALUE (decls), i + 1,
19594 8 : gimplify_omp_ctxp->loop_iter_var[2 * i]);
19595 8 : fail = true;
19596 8 : failures++;
19597 : }
19598 : else
19599 5238 : TREE_VALUE (decls)
19600 2619 : = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
19601 1136 : if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
19602 : {
19603 16 : error_at (OMP_CLAUSE_LOCATION (c),
19604 : "number of variables in %qs clause with "
19605 : "%<sink%> modifier does not match number of "
19606 : "iteration variables",
19607 16 : OMP_CLAUSE_DOACROSS_DEPEND (c)
19608 : ? "depend" : "doacross");
19609 16 : failures++;
19610 : }
19611 : }
19612 799 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
19613 799 : && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
19614 : {
19615 475 : if (source_c)
19616 : {
19617 4 : error_at (OMP_CLAUSE_LOCATION (c),
19618 : "more than one %qs clause with %<source%> "
19619 : "modifier on an %<ordered%> construct",
19620 4 : OMP_CLAUSE_DOACROSS_DEPEND (source_c)
19621 : ? "depend" : "doacross");
19622 4 : failures++;
19623 : }
19624 : else
19625 : source_c = c;
19626 : }
19627 : }
19628 1950 : if (source_c && sink_c)
19629 : {
19630 4 : error_at (OMP_CLAUSE_LOCATION (source_c),
19631 : "%qs clause with %<source%> modifier specified "
19632 : "together with %qs clauses with %<sink%> modifier "
19633 : "on the same construct",
19634 4 : OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
19635 4 : OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
19636 4 : failures++;
19637 : }
19638 :
19639 1950 : if (failures)
19640 102 : return gimple_build_nop ();
19641 1848 : return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
19642 : }
19643 :
19644 : /* Gimplify an OMP_INTEROP statement. */
19645 :
19646 : static enum gimplify_status
19647 612 : gimplify_omp_interop (tree *expr_p, gimple_seq *pre_p)
19648 : {
19649 612 : tree expr = *expr_p;
19650 :
19651 612 : gimplify_scan_omp_clauses (&OMP_INTEROP_CLAUSES (expr), pre_p, ORT_TASK,
19652 : OMP_INTEROP);
19653 612 : gimple *stmt = gimple_build_omp_interop (OMP_INTEROP_CLAUSES (expr));
19654 612 : gimplify_seq_add_stmt (pre_p, stmt);
19655 612 : *expr_p = NULL_TREE;
19656 612 : return GS_ALL_DONE;
19657 : }
19658 :
19659 : /* Callback for walk_tree to find an IFN_GOMP_DISPATCH. */
19660 :
19661 : static tree
19662 3100 : find_ifn_gomp_dispatch (tree *tp, int *, void *modify)
19663 : {
19664 3100 : tree t = *tp;
19665 :
19666 3100 : if (TREE_CODE (t) == CALL_EXPR && CALL_EXPR_IFN (t) == IFN_GOMP_DISPATCH)
19667 1414 : return *(tree *) modify ? *(tree *) modify : *tp;
19668 :
19669 2248 : if (TREE_CODE (t) == MODIFY_EXPR)
19670 513 : *(tree *) modify = *tp;
19671 :
19672 : return NULL_TREE;
19673 : }
19674 :
19675 : /* Gimplify an OMP_DISPATCH construct. */
19676 :
19677 : static enum gimplify_status
19678 852 : gimplify_omp_dispatch (tree *expr_p, gimple_seq *pre_p)
19679 : {
19680 852 : tree expr = *expr_p;
19681 852 : gimple_seq body = NULL;
19682 :
19683 852 : gimplify_scan_omp_clauses (&OMP_DISPATCH_CLAUSES (expr), pre_p, ORT_DISPATCH,
19684 : OMP_DISPATCH);
19685 852 : push_gimplify_context ();
19686 :
19687 : // If device clause, adjust ICV
19688 852 : tree device
19689 852 : = omp_find_clause (OMP_DISPATCH_CLAUSES (expr), OMP_CLAUSE_DEVICE);
19690 : // If no device clause exists but an interop clause with a single list
19691 : // item, use it to obtain the device number.
19692 852 : if (device)
19693 272 : device = OMP_CLAUSE_DEVICE_ID (device);
19694 : else
19695 : {
19696 580 : tree first_interop_obj
19697 580 : = omp_find_clause (OMP_DISPATCH_CLAUSES (expr), OMP_CLAUSE_INTEROP);
19698 580 : if (first_interop_obj)
19699 96 : for (tree c = TREE_CHAIN (first_interop_obj); c; c = TREE_CHAIN (c))
19700 8 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INTEROP)
19701 : {
19702 : first_interop_obj = NULL_TREE;
19703 : break;
19704 : }
19705 95 : if (first_interop_obj)
19706 : {
19707 88 : device = create_tmp_var (integer_type_node);
19708 88 : tree c = build_omp_clause (OMP_CLAUSE_LOCATION (first_interop_obj),
19709 : OMP_CLAUSE_DEVICE);
19710 88 : OMP_CLAUSE_DEVICE_ID (c) = device;
19711 88 : TREE_CHAIN (c) = TREE_CHAIN (first_interop_obj);
19712 88 : TREE_CHAIN (first_interop_obj) = c;
19713 88 : first_interop_obj = OMP_CLAUSE_DECL (first_interop_obj);
19714 : /* device = omp_get_interop_int (obj, omp_ipr_device_num, NULL); */
19715 88 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_INTEROP_INT);
19716 88 : fn = build_call_expr (fn, 3, first_interop_obj,
19717 : build_int_cst (integer_type_node, -5),
19718 : null_pointer_node);
19719 88 : gimplify_assign (device, fold_convert (integer_type_node, fn), &body);
19720 : }
19721 : }
19722 852 : tree saved_device_icv = NULL_TREE;
19723 852 : if (device
19724 852 : && (TREE_CODE (device) != INTEGER_CST
19725 709 : || !wi::eq_p (wi::to_wide (device), -1 /* omp_initial_device */)))
19726 : {
19727 : // Save current default-device-var ICV
19728 360 : saved_device_icv = create_tmp_var (integer_type_node);
19729 360 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
19730 360 : gcall *call = gimple_build_call (fn, 0);
19731 360 : gimple_call_set_lhs (call, saved_device_icv);
19732 360 : gimplify_seq_add_stmt (&body, call);
19733 :
19734 : // Set default device
19735 360 : fn = builtin_decl_explicit (BUILT_IN_OMP_SET_DEFAULT_DEVICE);
19736 360 : call = gimple_build_call (fn, 1, device);
19737 360 : gimplify_seq_add_stmt (&body, call);
19738 : }
19739 :
19740 : // If the novariants and nocontext clauses are not compile-time constants,
19741 : // we need to generate code for all possible cases:
19742 : // if (novariants) // implies nocontext
19743 : // base()
19744 : // else if (nocontext)
19745 : // variant1()
19746 : // else
19747 : // variant2()
19748 852 : tree *dispatch_body_p = &OMP_DISPATCH_BODY (expr);
19749 852 : if (TREE_CODE (*dispatch_body_p) == BIND_EXPR)
19750 28 : dispatch_body_p = &BIND_EXPR_BODY (*dispatch_body_p);
19751 852 : tree dispatch_body = *dispatch_body_p;
19752 :
19753 : // Look for IFN_GOMP_DISPATCH and extract the base function call
19754 852 : tree base_call_expr = NULL_TREE;
19755 852 : if (TREE_CODE (dispatch_body) == STATEMENT_LIST)
19756 243 : for (tree_stmt_iterator tsi = tsi_start (dispatch_body); !tsi_end_p (tsi);
19757 187 : tsi_next (&tsi))
19758 : {
19759 243 : tree modify = NULL_TREE;
19760 243 : tree stmt = tsi_stmt (tsi);
19761 243 : base_call_expr
19762 243 : = walk_tree (&stmt, find_ifn_gomp_dispatch, &modify, NULL);
19763 243 : if (base_call_expr != NULL_TREE)
19764 : break;
19765 : }
19766 : else
19767 : {
19768 796 : tree modify = NULL_TREE;
19769 796 : base_call_expr
19770 796 : = walk_tree (dispatch_body_p, find_ifn_gomp_dispatch, &modify, NULL);
19771 : }
19772 852 : gcc_assert (base_call_expr != NULL_TREE);
19773 :
19774 852 : tree dst = NULL_TREE;
19775 852 : if (TREE_CODE (base_call_expr) == MODIFY_EXPR)
19776 : {
19777 290 : dst = TREE_OPERAND (base_call_expr, 0);
19778 290 : base_call_expr = TREE_OPERAND (base_call_expr, 1);
19779 : }
19780 :
19781 880 : while (TREE_CODE (base_call_expr) == FLOAT_EXPR
19782 : || TREE_CODE (base_call_expr) == CONVERT_EXPR
19783 : || TREE_CODE (base_call_expr) == COMPLEX_EXPR
19784 : || TREE_CODE (base_call_expr) == INDIRECT_REF
19785 880 : || TREE_CODE (base_call_expr) == NOP_EXPR)
19786 28 : base_call_expr = TREE_OPERAND (base_call_expr, 0);
19787 :
19788 852 : gcc_assert (CALL_EXPR_IFN (base_call_expr) == IFN_GOMP_DISPATCH);
19789 852 : base_call_expr = CALL_EXPR_ARG (base_call_expr, 0);
19790 :
19791 852 : tree base_fndecl = get_callee_fndecl (base_call_expr);
19792 852 : if (base_fndecl != NULL_TREE)
19793 : {
19794 844 : if (DECL_VIRTUAL_P (base_fndecl))
19795 : {
19796 6 : error_at (
19797 3 : EXPR_LOCATION (base_call_expr),
19798 : "%qD is a virtual function but only a direct call is allowed "
19799 : "in a dispatch construct",
19800 3 : DECL_NAME (base_fndecl));
19801 : }
19802 :
19803 : /* We are not actually going to expand the variant call or use
19804 : the result of omp_get_dynamic candidates here; only check that
19805 : it does not trivially resolve to a call to the base function
19806 : so that we can avoid some extra work in building code that's
19807 : not needed in that case. */
19808 844 : tree construct_context = omp_get_construct_context ();
19809 844 : vec<struct omp_variant> all_candidates
19810 844 : = omp_declare_variant_candidates (base_fndecl, construct_context);
19811 844 : gcc_assert (!all_candidates.is_empty ());
19812 844 : vec<struct omp_variant> candidates
19813 844 : = omp_get_dynamic_candidates (all_candidates, construct_context);
19814 844 : tree variant_fndecl
19815 1676 : = (candidates.length () == 1 ? candidates[0].alternative : NULL_TREE);
19816 :
19817 844 : if (base_fndecl != variant_fndecl
19818 844 : && (omp_has_novariants () == -1 || omp_has_nocontext () == -1))
19819 : {
19820 22 : tree novariants_clause = NULL_TREE, nocontext_clause = NULL_TREE,
19821 22 : novariants_cond = NULL_TREE, nocontext_cond = NULL_TREE;
19822 59 : for (tree c = OMP_DISPATCH_CLAUSES (expr); c; c = TREE_CHAIN (c))
19823 : {
19824 37 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOVARIANTS
19825 37 : && !integer_zerop (OMP_CLAUSE_NOVARIANTS_EXPR (c)))
19826 : {
19827 16 : gcc_assert (novariants_cond == NULL_TREE);
19828 16 : novariants_clause = c;
19829 16 : novariants_cond = OMP_CLAUSE_NOVARIANTS_EXPR (c);
19830 : }
19831 21 : else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOCONTEXT
19832 21 : && !integer_zerop (OMP_CLAUSE_NOCONTEXT_EXPR (c)))
19833 : {
19834 16 : gcc_assert (nocontext_cond == NULL_TREE);
19835 16 : nocontext_clause = c;
19836 16 : nocontext_cond = OMP_CLAUSE_NOCONTEXT_EXPR (c);
19837 : }
19838 : }
19839 22 : gcc_assert (novariants_cond != NULL_TREE
19840 : || nocontext_cond != NULL_TREE);
19841 :
19842 22 : enum gimplify_status ret
19843 22 : = gimplify_expr (&novariants_cond, &body, NULL, is_gimple_val,
19844 : fb_rvalue);
19845 22 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
19846 0 : return ret;
19847 22 : ret = gimplify_expr (&nocontext_cond, &body, NULL, is_gimple_val,
19848 : fb_rvalue);
19849 22 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
19850 : return ret;
19851 :
19852 22 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
19853 :
19854 22 : if (novariants_cond != NULL_TREE)
19855 : {
19856 16 : tree base_label = create_artificial_label (UNKNOWN_LOCATION);
19857 16 : tree cond_label = create_artificial_label (UNKNOWN_LOCATION);
19858 16 : gcond *novariants_cond_stmt
19859 16 : = gimple_build_cond_from_tree (novariants_cond, base_label,
19860 : cond_label);
19861 16 : gimplify_seq_add_stmt (&body, novariants_cond_stmt);
19862 :
19863 16 : gimplify_seq_add_stmt (&body, gimple_build_label (base_label));
19864 16 : tree base_call_expr2 = copy_node (base_call_expr);
19865 16 : base_call_expr2
19866 16 : = build_call_expr_internal_loc (EXPR_LOCATION (base_call_expr2),
19867 : IFN_GOMP_DISPATCH,
19868 16 : TREE_TYPE (base_call_expr2), 1,
19869 : base_call_expr2);
19870 16 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19871 : {
19872 16 : base_call_expr2 = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst,
19873 : base_call_expr2);
19874 : }
19875 16 : OMP_CLAUSE_NOVARIANTS_EXPR (novariants_clause)
19876 16 : = boolean_true_node;
19877 16 : gimplify_and_add (base_call_expr2, &body);
19878 16 : gimplify_seq_add_stmt (&body, gimple_build_goto (end_label));
19879 :
19880 16 : OMP_CLAUSE_NOVARIANTS_EXPR (novariants_clause)
19881 16 : = boolean_false_node;
19882 16 : gimplify_seq_add_stmt (&body, gimple_build_label (cond_label));
19883 : }
19884 :
19885 22 : if (nocontext_cond != NULL_TREE)
19886 : {
19887 16 : tree variant1_label = create_artificial_label (UNKNOWN_LOCATION);
19888 16 : tree variant2_label = create_artificial_label (UNKNOWN_LOCATION);
19889 16 : gcond *nocontext_cond_stmt
19890 16 : = gimple_build_cond_from_tree (nocontext_cond, variant1_label,
19891 : variant2_label);
19892 16 : gimplify_seq_add_stmt (&body, nocontext_cond_stmt);
19893 :
19894 32 : gimplify_seq_add_stmt (&body,
19895 16 : gimple_build_label (variant1_label));
19896 16 : tree variant_call_expr = copy_node (base_call_expr);
19897 32 : variant_call_expr = build_call_expr_internal_loc (
19898 16 : EXPR_LOCATION (variant_call_expr), IFN_GOMP_DISPATCH,
19899 16 : TREE_TYPE (variant_call_expr), 1, variant_call_expr);
19900 16 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19901 : {
19902 16 : variant_call_expr = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst,
19903 : variant_call_expr);
19904 : }
19905 16 : OMP_CLAUSE_NOCONTEXT_EXPR (nocontext_clause) = boolean_true_node;
19906 16 : gimplify_and_add (variant_call_expr, &body);
19907 16 : gimplify_seq_add_stmt (&body, gimple_build_goto (end_label));
19908 16 : OMP_CLAUSE_NOCONTEXT_EXPR (nocontext_clause) = boolean_false_node;
19909 32 : gimplify_seq_add_stmt (&body,
19910 16 : gimple_build_label (variant2_label));
19911 : }
19912 :
19913 22 : tree variant_call_expr = base_call_expr;
19914 22 : variant_call_expr
19915 22 : = build_call_expr_internal_loc (EXPR_LOCATION (variant_call_expr),
19916 : IFN_GOMP_DISPATCH,
19917 22 : TREE_TYPE (variant_call_expr), 1,
19918 : variant_call_expr);
19919 22 : if (TREE_CODE (dispatch_body) == MODIFY_EXPR)
19920 : {
19921 22 : variant_call_expr
19922 22 : = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, variant_call_expr);
19923 : }
19924 22 : gimplify_and_add (variant_call_expr, &body);
19925 22 : gimplify_seq_add_stmt (&body, gimple_build_label (end_label));
19926 : }
19927 : else
19928 822 : gimplify_and_add (OMP_DISPATCH_BODY (expr), &body);
19929 : }
19930 : else
19931 8 : gimplify_and_add (OMP_DISPATCH_BODY (expr), &body);
19932 :
19933 : // Restore default-device-var ICV
19934 852 : if (saved_device_icv != NULL_TREE)
19935 : {
19936 360 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_SET_DEFAULT_DEVICE);
19937 360 : gcall *call = gimple_build_call (fn, 1, saved_device_icv);
19938 360 : gimplify_seq_add_stmt (&body, call);
19939 : }
19940 :
19941 : // Wrap dispatch body into a bind
19942 852 : gimple *bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
19943 852 : pop_gimplify_context (bind);
19944 :
19945 : // Manually tear down context created by gimplify_scan_omp_clauses to avoid a
19946 : // call to gimplify_adjust_omp_clauses
19947 852 : gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
19948 852 : if (ctx != NULL)
19949 : {
19950 852 : gcc_assert (ctx->code == OMP_DISPATCH);
19951 852 : gimplify_omp_ctxp = ctx->outer_context;
19952 852 : delete_omp_context (ctx);
19953 : }
19954 :
19955 : // Remove nowait as it has no effect on dispatch (OpenMP 5.2), device as it
19956 : // has been handled above, and depend as the front end handled it by inserting
19957 : // taskwait.
19958 852 : tree *dispatch_clauses_ptr = &OMP_DISPATCH_CLAUSES (expr);
19959 1445 : for (tree c = *dispatch_clauses_ptr; c; c = *dispatch_clauses_ptr)
19960 : {
19961 991 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT
19962 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
19963 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE)
19964 : {
19965 398 : *dispatch_clauses_ptr = OMP_CLAUSE_CHAIN (c);
19966 398 : break;
19967 : }
19968 : else
19969 593 : dispatch_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
19970 : }
19971 :
19972 852 : gimple *stmt = gimple_build_omp_dispatch (bind, OMP_DISPATCH_CLAUSES (expr));
19973 852 : gimplify_seq_add_stmt (pre_p, stmt);
19974 852 : *expr_p = NULL_TREE;
19975 852 : return GS_ALL_DONE;
19976 : }
19977 :
19978 : /* Expand a metadirective that has been resolved at gimplification time
19979 : into the candidate directive variants in CANDIDATES. */
19980 :
19981 : static enum gimplify_status
19982 170 : expand_omp_metadirective (vec<struct omp_variant> &candidates,
19983 : gimple_seq *pre_p)
19984 : {
19985 170 : auto_vec<tree> selectors;
19986 170 : auto_vec<tree> directive_labels;
19987 170 : auto_vec<gimple_seq> directive_bodies;
19988 170 : tree body_label = NULL_TREE;
19989 170 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
19990 :
19991 : /* Construct bodies for each candidate. */
19992 415 : for (unsigned i = 0; i < candidates.length(); i++)
19993 : {
19994 245 : struct omp_variant &candidate = candidates[i];
19995 245 : gimple_seq body = NULL;
19996 :
19997 245 : selectors.safe_push (omp_dynamic_cond (candidate.selector,
19998 : find_supercontext ()));
19999 245 : directive_labels.safe_push (create_artificial_label (UNKNOWN_LOCATION));
20000 :
20001 245 : gimplify_seq_add_stmt (&body,
20002 245 : gimple_build_label (directive_labels.last ()));
20003 245 : if (candidate.alternative != NULL_TREE)
20004 245 : gimplify_stmt (&candidate.alternative, &body);
20005 245 : if (candidate.body != NULL_TREE)
20006 : {
20007 35 : if (body_label != NULL_TREE)
20008 0 : gimplify_seq_add_stmt (&body, gimple_build_goto (body_label));
20009 : else
20010 : {
20011 35 : body_label = create_artificial_label (UNKNOWN_LOCATION);
20012 35 : gimplify_seq_add_stmt (&body, gimple_build_label (body_label));
20013 35 : gimplify_stmt (&candidate.body, &body);
20014 : }
20015 : }
20016 :
20017 245 : directive_bodies.safe_push (body);
20018 : }
20019 :
20020 170 : auto_vec<tree> cond_labels;
20021 :
20022 170 : cond_labels.safe_push (NULL_TREE);
20023 376 : for (unsigned i = 1; i < candidates.length () - 1; i++)
20024 18 : cond_labels.safe_push (create_artificial_label (UNKNOWN_LOCATION));
20025 170 : if (candidates.length () > 1)
20026 57 : cond_labels.safe_push (directive_labels.last ());
20027 :
20028 : /* Generate conditionals to test each dynamic selector in turn, executing
20029 : the directive candidate if successful. */
20030 490 : for (unsigned i = 0; i < candidates.length () - 1; i++)
20031 : {
20032 75 : if (i != 0)
20033 18 : gimplify_seq_add_stmt (pre_p, gimple_build_label (cond_labels [i]));
20034 :
20035 75 : enum gimplify_status ret = gimplify_expr (&selectors[i], pre_p, NULL,
20036 : is_gimple_val, fb_rvalue);
20037 75 : if (ret == GS_ERROR || ret == GS_UNHANDLED)
20038 : return ret;
20039 :
20040 75 : gcond *cond_stmt
20041 75 : = gimple_build_cond_from_tree (selectors[i], directive_labels[i],
20042 75 : cond_labels[i + 1]);
20043 :
20044 75 : gimplify_seq_add_stmt (pre_p, cond_stmt);
20045 75 : gimplify_seq_add_seq (pre_p, directive_bodies[i]);
20046 75 : gimplify_seq_add_stmt (pre_p, gimple_build_goto (end_label));
20047 : }
20048 :
20049 170 : gimplify_seq_add_seq (pre_p, directive_bodies.last ());
20050 170 : gimplify_seq_add_stmt (pre_p, gimple_build_label (end_label));
20051 :
20052 170 : return GS_ALL_DONE;
20053 170 : }
20054 :
20055 : /* Expand a variant construct that requires late resolution in the ompdevlow
20056 : pass. It's a bit easier to do this in tree form and then gimplify that,
20057 : than to emit gimple. The output is going to look something like:
20058 :
20059 : switch_var = OMP_NEXT_VARIANT (0, state);
20060 : loop_label:
20061 : switch (switch_var)
20062 : {
20063 : case 1:
20064 : if (dynamic_selector_predicate_1)
20065 : {
20066 : alternative_1;
20067 : goto end_label;
20068 : }
20069 : else
20070 : {
20071 : switch_var = OMP_NEXT_VARIANT (1, state);
20072 : goto loop_label;
20073 : }
20074 : case 2:
20075 : ...
20076 : }
20077 : end_label:
20078 :
20079 : OMP_NEXT_VARIANT is a magic cookie that is replaced with the switch variable
20080 : index of the next variant to try, after late resolution. */
20081 :
20082 : static tree
20083 16 : expand_late_variant_directive (vec<struct omp_variant> all_candidates,
20084 : tree construct_context)
20085 : {
20086 16 : tree body_label = NULL_TREE;
20087 16 : tree standalone_body = NULL_TREE;
20088 16 : tree loop_label = create_artificial_label (UNKNOWN_LOCATION);
20089 16 : tree end_label = create_artificial_label (UNKNOWN_LOCATION);
20090 32 : tree selectors = make_tree_vec (all_candidates.length ());
20091 16 : tree switch_body = NULL_TREE;
20092 16 : tree switch_var = create_tmp_var (integer_type_node, "variant");
20093 16 : tree state = tree_cons (NULL_TREE, construct_context, selectors);
20094 :
20095 92 : for (unsigned int i = 0; i < all_candidates.length (); i++)
20096 : {
20097 76 : tree selector = all_candidates[i].selector;
20098 76 : tree alternative = all_candidates[i].alternative;
20099 76 : tree body = all_candidates[i].body;
20100 76 : TREE_VEC_ELT (selectors, i) = selector;
20101 :
20102 : /* Case label. Numbering is 1-based. */
20103 76 : tree case_val = build_int_cst (integer_type_node, i + 1);
20104 76 : tree case_label
20105 76 : = build_case_label (case_val, NULL_TREE,
20106 : create_artificial_label (UNKNOWN_LOCATION));
20107 76 : append_to_statement_list (case_label, &switch_body);
20108 :
20109 : /* The actual body of the variant. */
20110 76 : tree variant_body = NULL_TREE;
20111 76 : append_to_statement_list (alternative, &variant_body);
20112 :
20113 76 : if (body != NULL_TREE)
20114 : {
20115 0 : if (standalone_body == NULL)
20116 : {
20117 0 : standalone_body = body;
20118 0 : body_label = create_artificial_label (UNKNOWN_LOCATION);
20119 : }
20120 0 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20121 : body_label),
20122 : &variant_body);
20123 : }
20124 : else
20125 76 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20126 : end_label),
20127 : &variant_body);
20128 :
20129 : /* If this is a dynamic selector, wrap variant_body with a conditional.
20130 : If the predicate doesn't match, the else clause sets switch_var and
20131 : jumps to loop_var to try again. */
20132 76 : tree dynamic_selector = omp_dynamic_cond (selector, find_supercontext ());
20133 76 : if (dynamic_selector)
20134 : {
20135 20 : tree else_stmt = NULL_TREE;
20136 20 : tree next = build2 (OMP_NEXT_VARIANT, integer_type_node,
20137 : case_val, state);
20138 20 : append_to_statement_list (build2 (MODIFY_EXPR, integer_type_node,
20139 : switch_var, next),
20140 : &else_stmt);
20141 20 : append_to_statement_list (build1 (GOTO_EXPR, void_type_node,
20142 : loop_label),
20143 : &else_stmt);
20144 20 : variant_body = build3 (COND_EXPR, void_type_node, dynamic_selector,
20145 : variant_body, else_stmt);
20146 : }
20147 76 : append_to_statement_list (variant_body, &switch_body);
20148 : }
20149 :
20150 : /* Put it all together. */
20151 16 : tree result = NULL_TREE;
20152 16 : tree first = build2 (OMP_NEXT_VARIANT, integer_type_node, integer_zero_node,
20153 : state);
20154 16 : append_to_statement_list (build2 (MODIFY_EXPR, integer_type_node,
20155 : switch_var, first),
20156 : &result);
20157 16 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node, loop_label),
20158 : &result);
20159 16 : append_to_statement_list (build2 (SWITCH_EXPR, integer_type_node,
20160 : switch_var, switch_body),
20161 : &result);
20162 16 : if (standalone_body)
20163 : {
20164 0 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node,
20165 : body_label),
20166 : &result);
20167 0 : append_to_statement_list (standalone_body, &result);
20168 : }
20169 16 : append_to_statement_list (build1 (LABEL_EXPR, void_type_node, end_label),
20170 : &result);
20171 16 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
20172 16 : return result;
20173 : }
20174 :
20175 :
20176 : /* Gimplify an OMP_METADIRECTIVE construct. EXPR is the tree version.
20177 : The metadirective will be resolved at this point if possible, otherwise
20178 : a GIMPLE_OMP_VARIANT_CONSTRUCT is created. */
20179 :
20180 : static enum gimplify_status
20181 174 : gimplify_omp_metadirective (tree *expr_p, gimple_seq *pre_p, gimple_seq *,
20182 : bool (*) (tree), fallback_t)
20183 : {
20184 : /* Try to resolve the metadirective. */
20185 174 : tree construct_context = omp_get_construct_context ();
20186 174 : vec<struct omp_variant> all_candidates
20187 174 : = omp_metadirective_candidates (*expr_p, construct_context);
20188 174 : vec<struct omp_variant> candidates
20189 174 : = omp_get_dynamic_candidates (all_candidates, construct_context);
20190 174 : if (!candidates.is_empty ())
20191 170 : return expand_omp_metadirective (candidates, pre_p);
20192 :
20193 : /* The metadirective cannot be resolved yet. Turn it into a loop with
20194 : a nested switch statement, using OMP_NEXT_VARIANT to set the control
20195 : variable for the switch. */
20196 4 : *expr_p = expand_late_variant_directive (all_candidates, construct_context);
20197 4 : return GS_OK;
20198 : }
20199 :
20200 : /* Gimplify an OMP_DECLARE_MAPPER node (by just removing it). */
20201 :
20202 : static enum gimplify_status
20203 0 : gimplify_omp_declare_mapper (tree *expr_p)
20204 : {
20205 0 : *expr_p = NULL_TREE;
20206 0 : return GS_ALL_DONE;
20207 : }
20208 :
20209 : /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
20210 : expression produces a value to be used as an operand inside a GIMPLE
20211 : statement, the value will be stored back in *EXPR_P. This value will
20212 : be a tree of class tcc_declaration, tcc_constant, tcc_reference or
20213 : an SSA_NAME. The corresponding sequence of GIMPLE statements is
20214 : emitted in PRE_P and POST_P.
20215 :
20216 : Additionally, this process may overwrite parts of the input
20217 : expression during gimplification. Ideally, it should be
20218 : possible to do non-destructive gimplification.
20219 :
20220 : EXPR_P points to the GENERIC expression to convert to GIMPLE. If
20221 : the expression needs to evaluate to a value to be used as
20222 : an operand in a GIMPLE statement, this value will be stored in
20223 : *EXPR_P on exit. This happens when the caller specifies one
20224 : of fb_lvalue or fb_rvalue fallback flags.
20225 :
20226 : PRE_P will contain the sequence of GIMPLE statements corresponding
20227 : to the evaluation of EXPR and all the side-effects that must
20228 : be executed before the main expression. On exit, the last
20229 : statement of PRE_P is the core statement being gimplified. For
20230 : instance, when gimplifying 'if (++a)' the last statement in
20231 : PRE_P will be 'if (t.1)' where t.1 is the result of
20232 : pre-incrementing 'a'.
20233 :
20234 : POST_P will contain the sequence of GIMPLE statements corresponding
20235 : to the evaluation of all the side-effects that must be executed
20236 : after the main expression. If this is NULL, the post
20237 : side-effects are stored at the end of PRE_P.
20238 :
20239 : The reason why the output is split in two is to handle post
20240 : side-effects explicitly. In some cases, an expression may have
20241 : inner and outer post side-effects which need to be emitted in
20242 : an order different from the one given by the recursive
20243 : traversal. For instance, for the expression (*p--)++ the post
20244 : side-effects of '--' must actually occur *after* the post
20245 : side-effects of '++'. However, gimplification will first visit
20246 : the inner expression, so if a separate POST sequence was not
20247 : used, the resulting sequence would be:
20248 :
20249 : 1 t.1 = *p
20250 : 2 p = p - 1
20251 : 3 t.2 = t.1 + 1
20252 : 4 *p = t.2
20253 :
20254 : However, the post-decrement operation in line #2 must not be
20255 : evaluated until after the store to *p at line #4, so the
20256 : correct sequence should be:
20257 :
20258 : 1 t.1 = *p
20259 : 2 t.2 = t.1 + 1
20260 : 3 *p = t.2
20261 : 4 p = p - 1
20262 :
20263 : So, by specifying a separate post queue, it is possible
20264 : to emit the post side-effects in the correct order.
20265 : If POST_P is NULL, an internal queue will be used. Before
20266 : returning to the caller, the sequence POST_P is appended to
20267 : the main output sequence PRE_P.
20268 :
20269 : GIMPLE_TEST_F points to a function that takes a tree T and
20270 : returns nonzero if T is in the GIMPLE form requested by the
20271 : caller. The GIMPLE predicates are in gimple.cc.
20272 :
20273 : FALLBACK tells the function what sort of a temporary we want if
20274 : gimplification cannot produce an expression that complies with
20275 : GIMPLE_TEST_F.
20276 :
20277 : fb_none means that no temporary should be generated
20278 : fb_rvalue means that an rvalue is OK to generate
20279 : fb_lvalue means that an lvalue is OK to generate
20280 : fb_either means that either is OK, but an lvalue is preferable.
20281 : fb_mayfail means that gimplification may fail (in which case
20282 : GS_ERROR will be returned)
20283 :
20284 : The return value is either GS_ERROR or GS_ALL_DONE, since this
20285 : function iterates until EXPR is completely gimplified or an error
20286 : occurs. */
20287 :
20288 : enum gimplify_status
20289 515052848 : gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
20290 : bool (*gimple_test_f) (tree), fallback_t fallback)
20291 : {
20292 515052848 : tree tmp;
20293 515052848 : gimple_seq internal_pre = NULL;
20294 515052848 : gimple_seq internal_post = NULL;
20295 515052848 : tree save_expr;
20296 515052848 : bool is_statement;
20297 515052848 : location_t saved_location;
20298 515052848 : enum gimplify_status ret;
20299 515052848 : gimple_stmt_iterator pre_last_gsi, post_last_gsi;
20300 515052848 : tree label;
20301 :
20302 515052848 : save_expr = *expr_p;
20303 515052848 : if (save_expr == NULL_TREE)
20304 : return GS_ALL_DONE;
20305 :
20306 : /* If we are gimplifying a top-level statement, PRE_P must be valid. */
20307 463435496 : is_statement = gimple_test_f == is_gimple_stmt;
20308 463435496 : if (is_statement)
20309 100552480 : gcc_assert (pre_p);
20310 :
20311 : /* Consistency checks. */
20312 463435496 : if (gimple_test_f == is_gimple_reg)
20313 6276440 : gcc_assert (fallback & (fb_rvalue | fb_lvalue));
20314 457159056 : else if (gimple_test_f == is_gimple_val
20315 337106349 : || gimple_test_f == is_gimple_call_addr
20316 320854525 : || gimple_test_f == is_gimple_condexpr_for_cond
20317 315271425 : || gimple_test_f == is_gimple_mem_rhs
20318 315128300 : || gimple_test_f == is_gimple_mem_rhs_or_call
20319 303987964 : || gimple_test_f == is_gimple_reg_rhs
20320 303374320 : || gimple_test_f == is_gimple_reg_rhs_or_call
20321 231707837 : || gimple_test_f == is_gimple_asm_val
20322 231668666 : || gimple_test_f == is_gimple_mem_ref_addr)
20323 240819934 : gcc_assert (fallback & fb_rvalue);
20324 216339122 : else if (gimple_test_f == is_gimple_min_lval
20325 186509532 : || gimple_test_f == is_gimple_lvalue)
20326 83120320 : gcc_assert (fallback & fb_lvalue);
20327 133218802 : else if (gimple_test_f == is_gimple_addressable)
20328 32666322 : gcc_assert (fallback & fb_either);
20329 100552480 : else if (gimple_test_f == is_gimple_stmt)
20330 100552480 : gcc_assert (fallback == fb_none);
20331 : else
20332 : {
20333 : /* We should have recognized the GIMPLE_TEST_F predicate to
20334 : know what kind of fallback to use in case a temporary is
20335 : needed to hold the value or address of *EXPR_P. */
20336 0 : gcc_unreachable ();
20337 : }
20338 :
20339 : /* We used to check the predicate here and return immediately if it
20340 : succeeds. This is wrong; the design is for gimplification to be
20341 : idempotent, and for the predicates to only test for valid forms, not
20342 : whether they are fully simplified. */
20343 463435496 : if (pre_p == NULL)
20344 0 : pre_p = &internal_pre;
20345 :
20346 463435496 : if (post_p == NULL)
20347 186383154 : post_p = &internal_post;
20348 :
20349 : /* Remember the last statements added to PRE_P and POST_P. Every
20350 : new statement added by the gimplification helpers needs to be
20351 : annotated with location information. To centralize the
20352 : responsibility, we remember the last statement that had been
20353 : added to both queues before gimplifying *EXPR_P. If
20354 : gimplification produces new statements in PRE_P and POST_P, those
20355 : statements will be annotated with the same location information
20356 : as *EXPR_P. */
20357 463435496 : pre_last_gsi = gsi_last (*pre_p);
20358 463435496 : post_last_gsi = gsi_last (*post_p);
20359 :
20360 463435496 : saved_location = input_location;
20361 463435496 : if (save_expr != error_mark_node
20362 463435496 : && EXPR_HAS_LOCATION (*expr_p))
20363 173296488 : input_location = EXPR_LOCATION (*expr_p);
20364 :
20365 : /* Loop over the specific gimplifiers until the toplevel node
20366 : remains the same. */
20367 481475606 : do
20368 : {
20369 : /* Strip away as many useless type conversions as possible
20370 : at the toplevel. */
20371 481475606 : STRIP_USELESS_TYPE_CONVERSION (*expr_p);
20372 :
20373 : /* Remember the expr. */
20374 481475606 : save_expr = *expr_p;
20375 :
20376 : /* Die, die, die, my darling. */
20377 481475606 : if (error_operand_p (save_expr))
20378 : {
20379 : ret = GS_ERROR;
20380 : break;
20381 : }
20382 :
20383 : /* Do any language-specific gimplification. */
20384 481472021 : ret = ((enum gimplify_status)
20385 481472021 : lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
20386 481472021 : if (ret == GS_OK)
20387 : {
20388 28134581 : if (*expr_p == NULL_TREE)
20389 : break;
20390 28134581 : if (*expr_p != save_expr)
20391 4667457 : continue;
20392 : }
20393 453337440 : else if (ret != GS_UNHANDLED)
20394 : break;
20395 :
20396 : /* Make sure that all the cases set 'ret' appropriately. */
20397 476228919 : ret = GS_UNHANDLED;
20398 476228919 : switch (TREE_CODE (*expr_p))
20399 : {
20400 : /* First deal with the special cases. */
20401 :
20402 1136127 : case POSTINCREMENT_EXPR:
20403 1136127 : case POSTDECREMENT_EXPR:
20404 1136127 : case PREINCREMENT_EXPR:
20405 1136127 : case PREDECREMENT_EXPR:
20406 2272254 : ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
20407 : fallback != fb_none,
20408 1136127 : TREE_TYPE (*expr_p));
20409 1136127 : break;
20410 :
20411 591080 : case VIEW_CONVERT_EXPR:
20412 591080 : if ((fallback & fb_rvalue)
20413 590954 : && is_gimple_reg_type (TREE_TYPE (*expr_p))
20414 1004387 : && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
20415 : {
20416 401554 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20417 : post_p, is_gimple_val, fb_rvalue);
20418 401554 : recalculate_side_effects (*expr_p);
20419 401554 : break;
20420 : }
20421 : /* Fallthru. */
20422 :
20423 29766943 : case ARRAY_REF:
20424 29766943 : case ARRAY_RANGE_REF:
20425 29766943 : case REALPART_EXPR:
20426 29766943 : case IMAGPART_EXPR:
20427 29766943 : case COMPONENT_REF:
20428 29766943 : ret = gimplify_compound_lval (expr_p, pre_p, post_p,
20429 : fallback ? fallback : fb_rvalue);
20430 29766943 : break;
20431 :
20432 6203415 : case COND_EXPR:
20433 6203415 : ret = gimplify_cond_expr (expr_p, pre_p, fallback);
20434 :
20435 : /* C99 code may assign to an array in a structure value of a
20436 : conditional expression, and this has undefined behavior
20437 : only on execution, so create a temporary if an lvalue is
20438 : required. */
20439 6203415 : if (fallback == fb_lvalue)
20440 : {
20441 7 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20442 7 : mark_addressable (*expr_p);
20443 7 : ret = GS_OK;
20444 : }
20445 : break;
20446 :
20447 16770417 : case CALL_EXPR:
20448 16770417 : ret = gimplify_call_expr (expr_p, pre_p, fallback);
20449 :
20450 : /* C99 code may assign to an array in a structure returned
20451 : from a function, and this has undefined behavior only on
20452 : execution, so create a temporary if an lvalue is
20453 : required. */
20454 16770417 : if (fallback == fb_lvalue)
20455 : {
20456 18225 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20457 18225 : mark_addressable (*expr_p);
20458 18225 : ret = GS_OK;
20459 : }
20460 : break;
20461 :
20462 0 : case TREE_LIST:
20463 0 : gcc_unreachable ();
20464 :
20465 0 : case OMP_ARRAY_SECTION:
20466 0 : gcc_unreachable ();
20467 :
20468 478246 : case COMPOUND_EXPR:
20469 478246 : ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
20470 478246 : break;
20471 :
20472 37182 : case COMPOUND_LITERAL_EXPR:
20473 37182 : ret = gimplify_compound_literal_expr (expr_p, pre_p,
20474 : gimple_test_f, fallback);
20475 37182 : break;
20476 :
20477 49932655 : case MODIFY_EXPR:
20478 49932655 : case INIT_EXPR:
20479 49932655 : ret = gimplify_modify_expr (expr_p, pre_p, post_p,
20480 : fallback != fb_none);
20481 49932655 : break;
20482 :
20483 113161 : case TRUTH_ANDIF_EXPR:
20484 113161 : case TRUTH_ORIF_EXPR:
20485 113161 : {
20486 : /* Preserve the original type of the expression and the
20487 : source location of the outer expression. */
20488 113161 : tree org_type = TREE_TYPE (*expr_p);
20489 113161 : *expr_p = gimple_boolify (*expr_p);
20490 113161 : *expr_p = build3_loc (input_location, COND_EXPR,
20491 : org_type, *expr_p,
20492 : fold_convert_loc
20493 : (input_location,
20494 : org_type, boolean_true_node),
20495 : fold_convert_loc
20496 : (input_location,
20497 : org_type, boolean_false_node));
20498 113161 : ret = GS_OK;
20499 113161 : break;
20500 : }
20501 :
20502 231892 : case TRUTH_NOT_EXPR:
20503 231892 : {
20504 231892 : tree type = TREE_TYPE (*expr_p);
20505 : /* The parsers are careful to generate TRUTH_NOT_EXPR
20506 : only with operands that are always zero or one.
20507 : We do not fold here but handle the only interesting case
20508 : manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
20509 231892 : *expr_p = gimple_boolify (*expr_p);
20510 231892 : if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
20511 231892 : *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
20512 231892 : TREE_TYPE (*expr_p),
20513 231892 : TREE_OPERAND (*expr_p, 0));
20514 : else
20515 0 : *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
20516 0 : TREE_TYPE (*expr_p),
20517 0 : TREE_OPERAND (*expr_p, 0),
20518 0 : build_int_cst (TREE_TYPE (*expr_p), 1));
20519 231892 : if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
20520 5675 : *expr_p = fold_convert_loc (input_location, type, *expr_p);
20521 : ret = GS_OK;
20522 : break;
20523 : }
20524 :
20525 32746839 : case ADDR_EXPR:
20526 32746839 : ret = gimplify_addr_expr (expr_p, pre_p, post_p);
20527 32746839 : break;
20528 :
20529 5509 : case ANNOTATE_EXPR:
20530 5509 : {
20531 5509 : tree cond = TREE_OPERAND (*expr_p, 0);
20532 5509 : tree kind = TREE_OPERAND (*expr_p, 1);
20533 5509 : tree data = TREE_OPERAND (*expr_p, 2);
20534 5509 : tree type = TREE_TYPE (cond);
20535 5509 : if (!INTEGRAL_TYPE_P (type))
20536 : {
20537 0 : *expr_p = cond;
20538 0 : ret = GS_OK;
20539 0 : break;
20540 : }
20541 5509 : tree tmp = create_tmp_var (type);
20542 5509 : gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
20543 5509 : gcall *call
20544 5509 : = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
20545 5509 : gimple_call_set_lhs (call, tmp);
20546 5509 : gimplify_seq_add_stmt (pre_p, call);
20547 5509 : *expr_p = tmp;
20548 5509 : ret = GS_ALL_DONE;
20549 5509 : break;
20550 : }
20551 :
20552 50965 : case VA_ARG_EXPR:
20553 50965 : ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
20554 50965 : break;
20555 :
20556 17185323 : CASE_CONVERT:
20557 17185323 : if (IS_EMPTY_STMT (*expr_p))
20558 : {
20559 : ret = GS_ALL_DONE;
20560 : break;
20561 : }
20562 :
20563 15469685 : if (VOID_TYPE_P (TREE_TYPE (*expr_p))
20564 15469685 : || fallback == fb_none)
20565 : {
20566 : /* Just strip a conversion to void (or in void context) and
20567 : try again. */
20568 2610090 : *expr_p = TREE_OPERAND (*expr_p, 0);
20569 2610090 : ret = GS_OK;
20570 2610090 : break;
20571 : }
20572 :
20573 12859595 : ret = gimplify_conversion (expr_p);
20574 12859595 : if (ret == GS_ERROR)
20575 : break;
20576 12859595 : if (*expr_p != save_expr)
20577 : break;
20578 : /* FALLTHRU */
20579 :
20580 12985389 : case FIX_TRUNC_EXPR:
20581 : /* unary_expr: ... | '(' cast ')' val | ... */
20582 12985389 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20583 : is_gimple_val, fb_rvalue);
20584 12985389 : recalculate_side_effects (*expr_p);
20585 12985389 : break;
20586 :
20587 6204367 : case INDIRECT_REF:
20588 6204367 : {
20589 6204367 : bool volatilep = TREE_THIS_VOLATILE (*expr_p);
20590 6204367 : bool notrap = TREE_THIS_NOTRAP (*expr_p);
20591 6204367 : tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
20592 :
20593 6204367 : *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
20594 6204367 : if (*expr_p != save_expr)
20595 : {
20596 : ret = GS_OK;
20597 : break;
20598 : }
20599 :
20600 6186359 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20601 : is_gimple_reg, fb_rvalue);
20602 6186359 : if (ret == GS_ERROR)
20603 : break;
20604 :
20605 6186357 : recalculate_side_effects (*expr_p);
20606 12372714 : *expr_p = fold_build2_loc (input_location, MEM_REF,
20607 6186357 : TREE_TYPE (*expr_p),
20608 6186357 : TREE_OPERAND (*expr_p, 0),
20609 : build_int_cst (saved_ptr_type, 0));
20610 6186357 : TREE_THIS_VOLATILE (*expr_p) = volatilep;
20611 6186357 : TREE_THIS_NOTRAP (*expr_p) = notrap;
20612 6186357 : ret = GS_OK;
20613 6186357 : break;
20614 : }
20615 :
20616 : /* We arrive here through the various re-gimplifcation paths. */
20617 15785694 : case MEM_REF:
20618 : /* First try re-folding the whole thing. */
20619 15785694 : tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
20620 : TREE_OPERAND (*expr_p, 0),
20621 : TREE_OPERAND (*expr_p, 1));
20622 15785694 : if (tmp)
20623 : {
20624 6580 : REF_REVERSE_STORAGE_ORDER (tmp)
20625 3290 : = REF_REVERSE_STORAGE_ORDER (*expr_p);
20626 3290 : *expr_p = tmp;
20627 3290 : recalculate_side_effects (*expr_p);
20628 3290 : ret = GS_OK;
20629 3290 : break;
20630 : }
20631 : /* Avoid re-gimplifying the address operand if it is already
20632 : in suitable form. Re-gimplifying would mark the address
20633 : operand addressable. Always gimplify when not in SSA form
20634 : as we still may have to gimplify decls with value-exprs. */
20635 15782404 : if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
20636 16788196 : || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
20637 : {
20638 14831577 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
20639 : is_gimple_mem_ref_addr, fb_rvalue);
20640 14831577 : if (ret == GS_ERROR)
20641 : break;
20642 : }
20643 15782404 : recalculate_side_effects (*expr_p);
20644 15782404 : ret = GS_ALL_DONE;
20645 15782404 : break;
20646 :
20647 : /* Constants need not be gimplified. */
20648 43824717 : case INTEGER_CST:
20649 43824717 : case REAL_CST:
20650 43824717 : case FIXED_CST:
20651 43824717 : case STRING_CST:
20652 43824717 : case COMPLEX_CST:
20653 43824717 : case VECTOR_CST:
20654 : /* Drop the overflow flag on constants, we do not want
20655 : that in the GIMPLE IL. */
20656 43824717 : if (TREE_OVERFLOW_P (*expr_p))
20657 1165 : *expr_p = drop_tree_overflow (*expr_p);
20658 : ret = GS_ALL_DONE;
20659 : break;
20660 :
20661 116995 : case CONST_DECL:
20662 : /* If we require an lvalue, such as for ADDR_EXPR, retain the
20663 : CONST_DECL node. Otherwise the decl is replaceable by its
20664 : value. */
20665 : /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
20666 116995 : if (fallback & fb_lvalue)
20667 : ret = GS_ALL_DONE;
20668 : else
20669 : {
20670 3489 : *expr_p = DECL_INITIAL (*expr_p);
20671 3489 : ret = GS_OK;
20672 : }
20673 : break;
20674 :
20675 6549103 : case DECL_EXPR:
20676 6549103 : ret = gimplify_decl_expr (expr_p, pre_p);
20677 6549103 : break;
20678 :
20679 5920956 : case BIND_EXPR:
20680 5920956 : ret = gimplify_bind_expr (expr_p, pre_p);
20681 5920956 : break;
20682 :
20683 200453 : case LOOP_EXPR:
20684 200453 : ret = gimplify_loop_expr (expr_p, pre_p);
20685 200453 : break;
20686 :
20687 50660 : case SWITCH_EXPR:
20688 50660 : ret = gimplify_switch_expr (expr_p, pre_p);
20689 50660 : break;
20690 :
20691 3220 : case EXIT_EXPR:
20692 3220 : ret = gimplify_exit_expr (expr_p);
20693 3220 : break;
20694 :
20695 1105093 : case GOTO_EXPR:
20696 : /* If the target is not LABEL, then it is a computed jump
20697 : and the target needs to be gimplified. */
20698 1105093 : if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
20699 : {
20700 1111 : ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
20701 : NULL, is_gimple_val, fb_rvalue);
20702 1111 : if (ret == GS_ERROR)
20703 : break;
20704 : }
20705 2210184 : gimplify_seq_add_stmt (pre_p,
20706 1105092 : gimple_build_goto (GOTO_DESTINATION (*expr_p)));
20707 1105092 : ret = GS_ALL_DONE;
20708 1105092 : break;
20709 :
20710 102422 : case PREDICT_EXPR:
20711 409688 : gimplify_seq_add_stmt (pre_p,
20712 102422 : gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
20713 102422 : PREDICT_EXPR_OUTCOME (*expr_p)));
20714 102422 : ret = GS_ALL_DONE;
20715 102422 : break;
20716 :
20717 2551053 : case LABEL_EXPR:
20718 2551053 : ret = gimplify_label_expr (expr_p, pre_p);
20719 2551053 : label = LABEL_EXPR_LABEL (*expr_p);
20720 2551053 : gcc_assert (decl_function_context (label) == current_function_decl);
20721 :
20722 : /* If the label is used in a goto statement, or address of the label
20723 : is taken, we need to unpoison all variables that were seen so far.
20724 : Doing so would prevent us from reporting a false positives. */
20725 2551053 : if (asan_poisoned_variables
20726 3906 : && asan_used_labels != NULL
20727 1213 : && asan_used_labels->contains (label)
20728 2551867 : && !gimplify_omp_ctxp)
20729 806 : asan_poison_variables (asan_poisoned_variables, false, pre_p);
20730 : break;
20731 :
20732 1044816 : case CASE_LABEL_EXPR:
20733 1044816 : ret = gimplify_case_label_expr (expr_p, pre_p);
20734 :
20735 1044816 : if (gimplify_ctxp->live_switch_vars)
20736 1044629 : asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
20737 : pre_p);
20738 : break;
20739 :
20740 2210890 : case RETURN_EXPR:
20741 2210890 : ret = gimplify_return_expr (*expr_p, pre_p);
20742 2210890 : break;
20743 :
20744 942616 : case CONSTRUCTOR:
20745 : /* Don't reduce this in place; let gimplify_init_constructor work its
20746 : magic. Buf if we're just elaborating this for side effects, just
20747 : gimplify any element that has side-effects. */
20748 942616 : if (fallback == fb_none)
20749 : {
20750 357 : unsigned HOST_WIDE_INT ix;
20751 357 : tree val;
20752 357 : tree temp = NULL_TREE;
20753 374 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
20754 17 : if (TREE_SIDE_EFFECTS (val))
20755 4 : append_to_statement_list (val, &temp);
20756 :
20757 357 : *expr_p = temp;
20758 357 : ret = temp ? GS_OK : GS_ALL_DONE;
20759 : }
20760 : /* C99 code may assign to an array in a constructed
20761 : structure or union, and this has undefined behavior only
20762 : on execution, so create a temporary if an lvalue is
20763 : required. */
20764 942259 : else if (fallback == fb_lvalue)
20765 : {
20766 12 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
20767 12 : mark_addressable (*expr_p);
20768 12 : ret = GS_OK;
20769 : }
20770 : else
20771 : ret = GS_ALL_DONE;
20772 : break;
20773 :
20774 : /* The following are special cases that are not handled by the
20775 : original GIMPLE grammar. */
20776 :
20777 : /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
20778 : eliminated. */
20779 438774 : case SAVE_EXPR:
20780 438774 : ret = gimplify_save_expr (expr_p, pre_p, post_p);
20781 438774 : break;
20782 :
20783 410625 : case BIT_FIELD_REF:
20784 410625 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20785 : post_p, is_gimple_lvalue, fb_either);
20786 410625 : recalculate_side_effects (*expr_p);
20787 410625 : break;
20788 :
20789 2445 : case TARGET_MEM_REF:
20790 2445 : {
20791 2445 : enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
20792 :
20793 2445 : if (TMR_BASE (*expr_p))
20794 2445 : r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
20795 : post_p, is_gimple_mem_ref_addr, fb_either);
20796 2445 : if (TMR_INDEX (*expr_p))
20797 1182 : r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
20798 : post_p, is_gimple_val, fb_rvalue);
20799 2445 : if (TMR_INDEX2 (*expr_p))
20800 60 : r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
20801 : post_p, is_gimple_val, fb_rvalue);
20802 : /* TMR_STEP and TMR_OFFSET are always integer constants. */
20803 2445 : ret = MIN (r0, r1);
20804 : }
20805 : break;
20806 :
20807 0 : case NON_LVALUE_EXPR:
20808 : /* This should have been stripped above. */
20809 0 : gcc_unreachable ();
20810 :
20811 97483 : case ASM_EXPR:
20812 97483 : ret = gimplify_asm_expr (expr_p, pre_p, post_p);
20813 97483 : break;
20814 :
20815 485021 : case TRY_FINALLY_EXPR:
20816 485021 : case TRY_CATCH_EXPR:
20817 485021 : {
20818 485021 : gimple_seq eval, cleanup;
20819 485021 : gtry *try_;
20820 :
20821 : /* Calls to destructors are generated automatically in FINALLY/CATCH
20822 : block. They should have location as UNKNOWN_LOCATION. However,
20823 : gimplify_call_expr will reset these call stmts to input_location
20824 : if it finds stmt's location is unknown. To prevent resetting for
20825 : destructors, we set the input_location to unknown.
20826 : Note that this only affects the destructor calls in FINALLY/CATCH
20827 : block, and will automatically reset to its original value by the
20828 : end of gimplify_expr. */
20829 485021 : input_location = UNKNOWN_LOCATION;
20830 485021 : eval = cleanup = NULL;
20831 485021 : gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
20832 485021 : bool save_in_handler_expr = gimplify_ctxp->in_handler_expr;
20833 485021 : if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
20834 485021 : && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
20835 : {
20836 155 : gimple_seq n = NULL, e = NULL;
20837 155 : gimplify_ctxp->in_handler_expr = true;
20838 155 : gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
20839 : 0), &n);
20840 155 : gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
20841 : 1), &e);
20842 155 : if (!gimple_seq_empty_p (n) || !gimple_seq_empty_p (e))
20843 : {
20844 151 : geh_else *stmt = gimple_build_eh_else (n, e);
20845 151 : gimple_seq_add_stmt (&cleanup, stmt);
20846 : }
20847 : }
20848 : else
20849 : {
20850 484866 : gimplify_ctxp->in_handler_expr = true;
20851 484866 : gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
20852 : }
20853 485021 : gimplify_ctxp->in_handler_expr = save_in_handler_expr;
20854 : /* Don't create bogus GIMPLE_TRY with empty cleanup. */
20855 485021 : if (gimple_seq_empty_p (cleanup))
20856 : {
20857 26964 : gimple_seq_add_seq (pre_p, eval);
20858 26964 : ret = GS_ALL_DONE;
20859 26964 : break;
20860 : }
20861 458057 : try_ = gimple_build_try (eval, cleanup,
20862 458057 : TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
20863 : ? GIMPLE_TRY_FINALLY
20864 : : GIMPLE_TRY_CATCH);
20865 458057 : if (EXPR_HAS_LOCATION (save_expr))
20866 838814 : gimple_set_location (try_, EXPR_LOCATION (save_expr));
20867 38650 : else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
20868 28287 : gimple_set_location (try_, saved_location);
20869 458057 : if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
20870 163467 : gimple_try_set_catch_is_cleanup (try_,
20871 163467 : TRY_CATCH_IS_CLEANUP (*expr_p));
20872 458057 : gimplify_seq_add_stmt (pre_p, try_);
20873 458057 : ret = GS_ALL_DONE;
20874 458057 : break;
20875 : }
20876 :
20877 5130300 : case CLEANUP_POINT_EXPR:
20878 5130300 : ret = gimplify_cleanup_point_expr (expr_p, pre_p);
20879 5130300 : break;
20880 :
20881 786300 : case TARGET_EXPR:
20882 786300 : ret = gimplify_target_expr (expr_p, pre_p, post_p);
20883 786300 : break;
20884 :
20885 39611 : case CATCH_EXPR:
20886 39611 : {
20887 39611 : gimple *c;
20888 39611 : gimple_seq handler = NULL;
20889 39611 : gimplify_and_add (CATCH_BODY (*expr_p), &handler);
20890 39611 : c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
20891 39611 : gimplify_seq_add_stmt (pre_p, c);
20892 39611 : ret = GS_ALL_DONE;
20893 39611 : break;
20894 : }
20895 :
20896 4682 : case EH_FILTER_EXPR:
20897 4682 : {
20898 4682 : gimple *ehf;
20899 4682 : gimple_seq failure = NULL;
20900 :
20901 4682 : gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
20902 4682 : ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
20903 4682 : copy_warning (ehf, *expr_p);
20904 4682 : gimplify_seq_add_stmt (pre_p, ehf);
20905 4682 : ret = GS_ALL_DONE;
20906 4682 : break;
20907 : }
20908 :
20909 50404 : case OBJ_TYPE_REF:
20910 50404 : {
20911 50404 : enum gimplify_status r0, r1;
20912 50404 : r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
20913 : post_p, is_gimple_val, fb_rvalue);
20914 50404 : r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
20915 : post_p, is_gimple_val, fb_rvalue);
20916 50404 : TREE_SIDE_EFFECTS (*expr_p) = 0;
20917 50404 : ret = MIN (r0, r1);
20918 : }
20919 : break;
20920 :
20921 40563 : case LABEL_DECL:
20922 : /* We get here when taking the address of a label. We mark
20923 : the label as "forced"; meaning it can never be removed and
20924 : it is a potential target for any computed goto. */
20925 40563 : FORCED_LABEL (*expr_p) = 1;
20926 40563 : ret = GS_ALL_DONE;
20927 40563 : break;
20928 :
20929 8463779 : case STATEMENT_LIST:
20930 8463779 : ret = gimplify_statement_list (expr_p, pre_p);
20931 8463779 : break;
20932 :
20933 1721 : case WITH_SIZE_EXPR:
20934 1721 : {
20935 2003 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
20936 : post_p == &internal_post ? NULL : post_p,
20937 : gimple_test_f, fallback);
20938 1721 : gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
20939 : is_gimple_val, fb_rvalue);
20940 1721 : ret = GS_ALL_DONE;
20941 : }
20942 1721 : break;
20943 :
20944 108215563 : case VAR_DECL:
20945 108215563 : case PARM_DECL:
20946 108215563 : ret = gimplify_var_or_parm_decl (expr_p);
20947 108215563 : break;
20948 :
20949 331805 : case RESULT_DECL:
20950 : /* When within an OMP context, notice uses of variables. */
20951 331805 : if (gimplify_omp_ctxp)
20952 1341 : omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
20953 : /* Handlers can refer to the function result; if that has been
20954 : moved, we need to track it. */
20955 331805 : if (gimplify_ctxp->in_handler_expr && gimplify_ctxp->return_temp)
20956 50 : *expr_p = gimplify_ctxp->return_temp;
20957 : ret = GS_ALL_DONE;
20958 : break;
20959 :
20960 0 : case DEBUG_EXPR_DECL:
20961 0 : gcc_unreachable ();
20962 :
20963 2433836 : case DEBUG_BEGIN_STMT:
20964 4867672 : gimplify_seq_add_stmt (pre_p,
20965 : gimple_build_debug_begin_stmt
20966 2433836 : (TREE_BLOCK (*expr_p),
20967 2433836 : EXPR_LOCATION (*expr_p)));
20968 2433836 : ret = GS_ALL_DONE;
20969 2433836 : *expr_p = NULL;
20970 2433836 : break;
20971 :
20972 : case SSA_NAME:
20973 : /* Allow callbacks into the gimplifier during optimization. */
20974 : ret = GS_ALL_DONE;
20975 : break;
20976 :
20977 18228 : case OMP_PARALLEL:
20978 18228 : gimplify_omp_parallel (expr_p, pre_p);
20979 18228 : ret = GS_ALL_DONE;
20980 18228 : break;
20981 :
20982 3852 : case OMP_TASK:
20983 3852 : gimplify_omp_task (expr_p, pre_p);
20984 3852 : ret = GS_ALL_DONE;
20985 3852 : break;
20986 :
20987 10683 : case OMP_SIMD:
20988 10683 : {
20989 : /* Temporarily disable into_ssa, as scan_omp_simd
20990 : which calls copy_gimple_seq_and_replace_locals can't deal
20991 : with SSA_NAMEs defined outside of the body properly. */
20992 10683 : bool saved_into_ssa = gimplify_ctxp->into_ssa;
20993 10683 : gimplify_ctxp->into_ssa = false;
20994 10683 : ret = gimplify_omp_for (expr_p, pre_p);
20995 10683 : gimplify_ctxp->into_ssa = saved_into_ssa;
20996 10683 : break;
20997 : }
20998 :
20999 47398 : case OMP_FOR:
21000 47398 : case OMP_DISTRIBUTE:
21001 47398 : case OMP_TASKLOOP:
21002 47398 : case OMP_TILE:
21003 47398 : case OMP_UNROLL:
21004 47398 : case OACC_LOOP:
21005 47398 : ret = gimplify_omp_for (expr_p, pre_p);
21006 47398 : break;
21007 :
21008 1056 : case OMP_LOOP:
21009 1056 : ret = gimplify_omp_loop (expr_p, pre_p);
21010 1056 : break;
21011 :
21012 665 : case OACC_CACHE:
21013 665 : gimplify_oacc_cache (expr_p, pre_p);
21014 665 : ret = GS_ALL_DONE;
21015 665 : break;
21016 :
21017 254 : case OACC_DECLARE:
21018 254 : gimplify_oacc_declare (expr_p, pre_p);
21019 254 : ret = GS_ALL_DONE;
21020 254 : break;
21021 :
21022 39458 : case OACC_HOST_DATA:
21023 39458 : case OACC_DATA:
21024 39458 : case OACC_KERNELS:
21025 39458 : case OACC_PARALLEL:
21026 39458 : case OACC_SERIAL:
21027 39458 : case OMP_SCOPE:
21028 39458 : case OMP_SECTIONS:
21029 39458 : case OMP_SINGLE:
21030 39458 : case OMP_TARGET:
21031 39458 : case OMP_TARGET_DATA:
21032 39458 : case OMP_TEAMS:
21033 39458 : gimplify_omp_workshare (expr_p, pre_p);
21034 39458 : ret = GS_ALL_DONE;
21035 39458 : break;
21036 :
21037 12057 : case OACC_ENTER_DATA:
21038 12057 : case OACC_EXIT_DATA:
21039 12057 : case OACC_UPDATE:
21040 12057 : case OMP_TARGET_UPDATE:
21041 12057 : case OMP_TARGET_ENTER_DATA:
21042 12057 : case OMP_TARGET_EXIT_DATA:
21043 12057 : gimplify_omp_target_update (expr_p, pre_p);
21044 12057 : ret = GS_ALL_DONE;
21045 12057 : break;
21046 :
21047 7037 : case OMP_SECTION:
21048 7037 : case OMP_STRUCTURED_BLOCK:
21049 7037 : case OMP_MASTER:
21050 7037 : case OMP_MASKED:
21051 7037 : case OMP_ORDERED:
21052 7037 : case OMP_CRITICAL:
21053 7037 : case OMP_SCAN:
21054 7037 : {
21055 7037 : gimple_seq body = NULL;
21056 7037 : gimple *g;
21057 7037 : bool saved_in_omp_construct = in_omp_construct;
21058 :
21059 7037 : in_omp_construct = true;
21060 7037 : gimplify_and_add (OMP_BODY (*expr_p), &body);
21061 7037 : in_omp_construct = saved_in_omp_construct;
21062 7037 : switch (TREE_CODE (*expr_p))
21063 : {
21064 1271 : case OMP_SECTION:
21065 1271 : g = gimple_build_omp_section (body);
21066 1271 : break;
21067 791 : case OMP_STRUCTURED_BLOCK:
21068 791 : g = gimple_build_omp_structured_block (body);
21069 791 : break;
21070 874 : case OMP_MASTER:
21071 874 : g = gimple_build_omp_master (body);
21072 874 : break;
21073 1950 : case OMP_ORDERED:
21074 1950 : g = gimplify_omp_ordered (*expr_p, body);
21075 1950 : if (OMP_BODY (*expr_p) == NULL_TREE
21076 1950 : && gimple_code (g) == GIMPLE_OMP_ORDERED)
21077 1025 : gimple_omp_ordered_standalone (g);
21078 : break;
21079 487 : case OMP_MASKED:
21080 487 : gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
21081 : pre_p, ORT_WORKSHARE, OMP_MASKED);
21082 487 : gimplify_adjust_omp_clauses (pre_p, body,
21083 : &OMP_MASKED_CLAUSES (*expr_p),
21084 : OMP_MASKED);
21085 974 : g = gimple_build_omp_masked (body,
21086 487 : OMP_MASKED_CLAUSES (*expr_p));
21087 487 : break;
21088 546 : case OMP_CRITICAL:
21089 546 : gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
21090 : pre_p, ORT_WORKSHARE, OMP_CRITICAL);
21091 546 : gimplify_adjust_omp_clauses (pre_p, body,
21092 : &OMP_CRITICAL_CLAUSES (*expr_p),
21093 : OMP_CRITICAL);
21094 1638 : g = gimple_build_omp_critical (body,
21095 546 : OMP_CRITICAL_NAME (*expr_p),
21096 546 : OMP_CRITICAL_CLAUSES (*expr_p));
21097 546 : break;
21098 1118 : case OMP_SCAN:
21099 1118 : gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
21100 : pre_p, ORT_WORKSHARE, OMP_SCAN);
21101 1118 : gimplify_adjust_omp_clauses (pre_p, body,
21102 : &OMP_SCAN_CLAUSES (*expr_p),
21103 : OMP_SCAN);
21104 1118 : g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
21105 1118 : break;
21106 0 : default:
21107 0 : gcc_unreachable ();
21108 : }
21109 7037 : gimplify_seq_add_stmt (pre_p, g);
21110 7037 : ret = GS_ALL_DONE;
21111 7037 : break;
21112 : }
21113 :
21114 611 : case OMP_TASKGROUP:
21115 611 : {
21116 611 : gimple_seq body = NULL;
21117 :
21118 611 : tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
21119 611 : bool saved_in_omp_construct = in_omp_construct;
21120 611 : gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
21121 : OMP_TASKGROUP);
21122 611 : gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
21123 :
21124 611 : in_omp_construct = true;
21125 611 : gimplify_and_add (OMP_BODY (*expr_p), &body);
21126 611 : in_omp_construct = saved_in_omp_construct;
21127 611 : gimple_seq cleanup = NULL;
21128 611 : tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
21129 611 : gimple *g = gimple_build_call (fn, 0);
21130 611 : gimple_seq_add_stmt (&cleanup, g);
21131 611 : g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
21132 611 : body = NULL;
21133 611 : gimple_seq_add_stmt (&body, g);
21134 611 : g = gimple_build_omp_taskgroup (body, *pclauses);
21135 611 : gimplify_seq_add_stmt (pre_p, g);
21136 611 : ret = GS_ALL_DONE;
21137 611 : break;
21138 : }
21139 :
21140 612 : case OMP_INTEROP:
21141 612 : ret = gimplify_omp_interop (expr_p, pre_p);
21142 612 : break;
21143 10236 : case OMP_ATOMIC:
21144 10236 : case OMP_ATOMIC_READ:
21145 10236 : case OMP_ATOMIC_CAPTURE_OLD:
21146 10236 : case OMP_ATOMIC_CAPTURE_NEW:
21147 10236 : ret = gimplify_omp_atomic (expr_p, pre_p);
21148 10236 : break;
21149 :
21150 852 : case OMP_DISPATCH:
21151 852 : ret = gimplify_omp_dispatch (expr_p, pre_p);
21152 852 : break;
21153 :
21154 174 : case OMP_METADIRECTIVE:
21155 174 : ret = gimplify_omp_metadirective (expr_p, pre_p, post_p,
21156 : gimple_test_f, fallback);
21157 174 : break;
21158 :
21159 36 : case OMP_NEXT_VARIANT:
21160 36 : case OMP_TARGET_DEVICE_MATCHES:
21161 : /* These are placeholders for constants. There's nothing to do with
21162 : them here but we must mark the containing function as needing
21163 : to run the ompdevlow pass to resolve them. Note that
21164 : OMP_TARGET_DEVICE_MATCHES, in particular, may be inserted by
21165 : the front ends. */
21166 36 : cgraph_node::get (cfun->decl)->has_omp_variant_constructs = 1;
21167 36 : ret = GS_ALL_DONE;
21168 36 : break;
21169 :
21170 0 : case OMP_DECLARE_MAPPER:
21171 0 : ret = gimplify_omp_declare_mapper (expr_p);
21172 0 : break;
21173 :
21174 478 : case TRANSACTION_EXPR:
21175 478 : ret = gimplify_transaction (expr_p, pre_p);
21176 478 : break;
21177 :
21178 288089 : case TRUTH_AND_EXPR:
21179 288089 : case TRUTH_OR_EXPR:
21180 288089 : case TRUTH_XOR_EXPR:
21181 288089 : {
21182 288089 : tree orig_type = TREE_TYPE (*expr_p);
21183 288089 : tree new_type, xop0, xop1;
21184 288089 : *expr_p = gimple_boolify (*expr_p);
21185 288089 : new_type = TREE_TYPE (*expr_p);
21186 288089 : if (!useless_type_conversion_p (orig_type, new_type))
21187 : {
21188 1143 : *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
21189 1143 : ret = GS_OK;
21190 1143 : break;
21191 : }
21192 :
21193 : /* Boolified binary truth expressions are semantically equivalent
21194 : to bitwise binary expressions. Canonicalize them to the
21195 : bitwise variant. */
21196 286946 : switch (TREE_CODE (*expr_p))
21197 : {
21198 170429 : case TRUTH_AND_EXPR:
21199 170429 : TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
21200 170429 : break;
21201 115916 : case TRUTH_OR_EXPR:
21202 115916 : TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
21203 115916 : break;
21204 601 : case TRUTH_XOR_EXPR:
21205 601 : TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
21206 601 : break;
21207 : default:
21208 : break;
21209 : }
21210 : /* Now make sure that operands have compatible type to
21211 : expression's new_type. */
21212 286946 : xop0 = TREE_OPERAND (*expr_p, 0);
21213 286946 : xop1 = TREE_OPERAND (*expr_p, 1);
21214 286946 : if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
21215 326 : TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
21216 : new_type,
21217 : xop0);
21218 286946 : if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
21219 366 : TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
21220 : new_type,
21221 : xop1);
21222 : /* Continue classified as tcc_binary. */
21223 286946 : goto expr_2;
21224 : }
21225 :
21226 10267 : case VEC_COND_EXPR:
21227 10267 : goto expr_3;
21228 :
21229 133408 : case VEC_PERM_EXPR:
21230 : /* Classified as tcc_expression. */
21231 133408 : goto expr_3;
21232 :
21233 48 : case BIT_INSERT_EXPR:
21234 : /* Argument 3 is a constant. */
21235 48 : goto expr_2;
21236 :
21237 3848331 : case POINTER_PLUS_EXPR:
21238 3848331 : {
21239 3848331 : enum gimplify_status r0, r1;
21240 3848331 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21241 : post_p, is_gimple_val, fb_rvalue);
21242 3848331 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21243 : post_p, is_gimple_val, fb_rvalue);
21244 3848331 : recalculate_side_effects (*expr_p);
21245 3848331 : ret = MIN (r0, r1);
21246 : break;
21247 : }
21248 :
21249 44163191 : default:
21250 44163191 : switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
21251 : {
21252 6952579 : case tcc_comparison:
21253 : /* Handle comparison of objects of non scalar mode aggregates
21254 : with a call to memcmp. It would be nice to only have to do
21255 : this for variable-sized objects, but then we'd have to allow
21256 : the same nest of reference nodes we allow for MODIFY_EXPR and
21257 : that's too complex.
21258 :
21259 : Compare scalar mode aggregates as scalar mode values. Using
21260 : memcmp for them would be very inefficient at best, and is
21261 : plain wrong if bitfields are involved. */
21262 6952579 : if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
21263 : ret = GS_ERROR;
21264 : else
21265 : {
21266 6952579 : tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
21267 :
21268 : /* Vector comparisons need no boolification. */
21269 6952579 : if (TREE_CODE (type) == VECTOR_TYPE)
21270 12300 : goto expr_2;
21271 6940279 : else if (!AGGREGATE_TYPE_P (type))
21272 : {
21273 6940263 : tree org_type = TREE_TYPE (*expr_p);
21274 6940263 : *expr_p = gimple_boolify (*expr_p);
21275 6940263 : if (!useless_type_conversion_p (org_type,
21276 6940263 : TREE_TYPE (*expr_p)))
21277 : {
21278 55769 : *expr_p = fold_convert_loc (input_location,
21279 : org_type, *expr_p);
21280 55769 : ret = GS_OK;
21281 : }
21282 : else
21283 6884494 : goto expr_2;
21284 : }
21285 16 : else if (SCALAR_INT_MODE_P (TYPE_MODE (type)))
21286 16 : ret = gimplify_scalar_mode_aggregate_compare (expr_p);
21287 : else
21288 0 : ret = gimplify_variable_sized_compare (expr_p);
21289 : }
21290 : break;
21291 :
21292 : /* If *EXPR_P does not need to be special-cased, handle it
21293 : according to its class. */
21294 1606487 : case tcc_unary:
21295 1606487 : ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21296 : post_p, is_gimple_val, fb_rvalue);
21297 1606487 : break;
21298 :
21299 26278051 : case tcc_binary:
21300 26278051 : expr_2:
21301 26278051 : {
21302 26278051 : enum gimplify_status r0, r1;
21303 :
21304 26278051 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21305 : post_p, is_gimple_val, fb_rvalue);
21306 26278051 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21307 : post_p, is_gimple_val, fb_rvalue);
21308 :
21309 26278051 : ret = MIN (r0, r1);
21310 : break;
21311 : }
21312 :
21313 143675 : expr_3:
21314 143675 : {
21315 143675 : enum gimplify_status r0, r1, r2;
21316 :
21317 143675 : r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
21318 : post_p, is_gimple_val, fb_rvalue);
21319 143675 : r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
21320 : post_p, is_gimple_val, fb_rvalue);
21321 143675 : r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
21322 : post_p, is_gimple_val, fb_rvalue);
21323 :
21324 143675 : ret = MIN (MIN (r0, r1), r2);
21325 : break;
21326 : }
21327 :
21328 16509862 : case tcc_declaration:
21329 16509862 : case tcc_constant:
21330 16509862 : ret = GS_ALL_DONE;
21331 16509862 : goto dont_recalculate;
21332 :
21333 0 : default:
21334 0 : gcc_unreachable ();
21335 : }
21336 :
21337 28083998 : recalculate_side_effects (*expr_p);
21338 :
21339 476228919 : dont_recalculate:
21340 : break;
21341 : }
21342 :
21343 476228919 : gcc_assert (*expr_p || ret != GS_OK);
21344 : }
21345 480896376 : while (ret == GS_OK);
21346 :
21347 : /* If we encountered an error_mark somewhere nested inside, either
21348 : stub out the statement or propagate the error back out. */
21349 463435496 : if (ret == GS_ERROR)
21350 : {
21351 5206 : if (is_statement)
21352 3985 : *expr_p = NULL;
21353 5206 : goto out;
21354 : }
21355 :
21356 : /* This was only valid as a return value from the langhook, which
21357 : we handled. Make sure it doesn't escape from any other context. */
21358 463430290 : gcc_assert (ret != GS_UNHANDLED);
21359 :
21360 463430290 : if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
21361 : {
21362 : /* We aren't looking for a value, and we don't have a valid
21363 : statement. If it doesn't have side-effects, throw it away.
21364 : We can also get here with code such as "*&&L;", where L is
21365 : a LABEL_DECL that is marked as FORCED_LABEL. */
21366 1171159 : if (TREE_CODE (*expr_p) == LABEL_DECL
21367 1171159 : || !TREE_SIDE_EFFECTS (*expr_p))
21368 1170143 : *expr_p = NULL;
21369 1016 : else if (!TREE_THIS_VOLATILE (*expr_p))
21370 : {
21371 : /* This is probably a _REF that contains something nested that
21372 : has side effects. Recurse through the operands to find it. */
21373 0 : enum tree_code code = TREE_CODE (*expr_p);
21374 :
21375 0 : switch (code)
21376 : {
21377 0 : case COMPONENT_REF:
21378 0 : case REALPART_EXPR:
21379 0 : case IMAGPART_EXPR:
21380 0 : case VIEW_CONVERT_EXPR:
21381 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
21382 : gimple_test_f, fallback);
21383 0 : break;
21384 :
21385 0 : case ARRAY_REF:
21386 0 : case ARRAY_RANGE_REF:
21387 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
21388 : gimple_test_f, fallback);
21389 0 : gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
21390 : gimple_test_f, fallback);
21391 0 : break;
21392 :
21393 0 : default:
21394 : /* Anything else with side-effects must be converted to
21395 : a valid statement before we get here. */
21396 0 : gcc_unreachable ();
21397 : }
21398 :
21399 0 : *expr_p = NULL;
21400 : }
21401 1016 : else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
21402 966 : && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
21403 1969 : && !is_empty_type (TREE_TYPE (*expr_p)))
21404 : {
21405 : /* Historically, the compiler has treated a bare reference
21406 : to a non-BLKmode volatile lvalue as forcing a load. */
21407 893 : tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
21408 :
21409 : /* Normally, we do not want to create a temporary for a
21410 : TREE_ADDRESSABLE type because such a type should not be
21411 : copied by bitwise-assignment. However, we make an
21412 : exception here, as all we are doing here is ensuring that
21413 : we read the bytes that make up the type. We use
21414 : create_tmp_var_raw because create_tmp_var will abort when
21415 : given a TREE_ADDRESSABLE type. */
21416 893 : tree tmp = create_tmp_var_raw (type, "vol");
21417 893 : gimple_add_tmp_var (tmp);
21418 893 : gimplify_assign (tmp, *expr_p, pre_p);
21419 893 : *expr_p = NULL;
21420 : }
21421 : else
21422 : /* We can't do anything useful with a volatile reference to
21423 : an incomplete type, so just throw it away. Likewise for
21424 : a BLKmode type, since any implicit inner load should
21425 : already have been turned into an explicit one by the
21426 : gimplification process. */
21427 123 : *expr_p = NULL;
21428 : }
21429 :
21430 : /* If we are gimplifying at the statement level, we're done. Tack
21431 : everything together and return. */
21432 463430290 : if (fallback == fb_none || is_statement)
21433 : {
21434 : /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
21435 : it out for GC to reclaim it. */
21436 100548495 : *expr_p = NULL_TREE;
21437 :
21438 100548495 : if (!gimple_seq_empty_p (internal_pre)
21439 100548495 : || !gimple_seq_empty_p (internal_post))
21440 : {
21441 19 : gimplify_seq_add_seq (&internal_pre, internal_post);
21442 19 : gimplify_seq_add_seq (pre_p, internal_pre);
21443 : }
21444 :
21445 : /* The result of gimplifying *EXPR_P is going to be the last few
21446 : statements in *PRE_P and *POST_P. Add location information
21447 : to all the statements that were added by the gimplification
21448 : helpers. */
21449 100548495 : if (!gimple_seq_empty_p (*pre_p))
21450 98039833 : annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
21451 :
21452 100548495 : if (!gimple_seq_empty_p (*post_p))
21453 19 : annotate_all_with_location_after (*post_p, post_last_gsi,
21454 : input_location);
21455 :
21456 100548495 : goto out;
21457 : }
21458 :
21459 : #ifdef ENABLE_GIMPLE_CHECKING
21460 362881795 : if (*expr_p)
21461 : {
21462 362881795 : enum tree_code code = TREE_CODE (*expr_p);
21463 : /* These expressions should already be in gimple IR form. */
21464 362881795 : gcc_assert (code != MODIFY_EXPR
21465 : && code != ASM_EXPR
21466 : && code != BIND_EXPR
21467 : && code != CATCH_EXPR
21468 : && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
21469 : && code != EH_FILTER_EXPR
21470 : && code != GOTO_EXPR
21471 : && code != LABEL_EXPR
21472 : && code != LOOP_EXPR
21473 : && code != SWITCH_EXPR
21474 : && code != TRY_FINALLY_EXPR
21475 : && code != EH_ELSE_EXPR
21476 : && code != OACC_PARALLEL
21477 : && code != OACC_KERNELS
21478 : && code != OACC_SERIAL
21479 : && code != OACC_DATA
21480 : && code != OACC_HOST_DATA
21481 : && code != OACC_DECLARE
21482 : && code != OACC_UPDATE
21483 : && code != OACC_ENTER_DATA
21484 : && code != OACC_EXIT_DATA
21485 : && code != OACC_CACHE
21486 : && code != OMP_CRITICAL
21487 : && code != OMP_FOR
21488 : && code != OACC_LOOP
21489 : && code != OMP_MASTER
21490 : && code != OMP_MASKED
21491 : && code != OMP_TASKGROUP
21492 : && code != OMP_ORDERED
21493 : && code != OMP_PARALLEL
21494 : && code != OMP_SCAN
21495 : && code != OMP_SECTIONS
21496 : && code != OMP_SECTION
21497 : && code != OMP_STRUCTURED_BLOCK
21498 : && code != OMP_SINGLE
21499 : && code != OMP_SCOPE
21500 : && code != OMP_DISPATCH);
21501 : }
21502 : #endif
21503 :
21504 : /* Otherwise we're gimplifying a subexpression, so the resulting
21505 : value is interesting. If it's a valid operand that matches
21506 : GIMPLE_TEST_F, we're done. Unless we are handling some
21507 : post-effects internally; if that's the case, we need to copy into
21508 : a temporary before adding the post-effects to POST_P. */
21509 362881795 : if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
21510 336952845 : goto out;
21511 :
21512 : /* Otherwise, we need to create a new temporary for the gimplified
21513 : expression. */
21514 :
21515 : /* We can't return an lvalue if we have an internal postqueue. The
21516 : object the lvalue refers to would (probably) be modified by the
21517 : postqueue; we need to copy the value out first, which means an
21518 : rvalue. */
21519 25928950 : if ((fallback & fb_lvalue)
21520 574162 : && gimple_seq_empty_p (internal_post)
21521 26503112 : && is_gimple_addressable (*expr_p))
21522 : {
21523 : /* An lvalue will do. Take the address of the expression, store it
21524 : in a temporary, and replace the expression with an INDIRECT_REF of
21525 : that temporary. */
21526 15 : tree ref_alias_type = reference_alias_ptr_type (*expr_p);
21527 15 : unsigned int ref_align = get_object_alignment (*expr_p);
21528 15 : tree ref_type = TREE_TYPE (*expr_p);
21529 15 : tmp = build_fold_addr_expr_loc (input_location, *expr_p);
21530 15 : gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
21531 15 : if (TYPE_ALIGN (ref_type) != ref_align)
21532 1 : ref_type = build_aligned_type (ref_type, ref_align);
21533 15 : *expr_p = build2 (MEM_REF, ref_type,
21534 : tmp, build_zero_cst (ref_alias_type));
21535 : }
21536 25928935 : else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
21537 : {
21538 : /* An rvalue will do. Assign the gimplified expression into a
21539 : new temporary TMP and replace the original expression with
21540 : TMP. First, make sure that the expression has a type so that
21541 : it can be assigned into a temporary. */
21542 25928927 : gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
21543 25928927 : *expr_p = get_formal_tmp_var (*expr_p, pre_p);
21544 : }
21545 : else
21546 : {
21547 : #ifdef ENABLE_GIMPLE_CHECKING
21548 8 : if (!(fallback & fb_mayfail))
21549 : {
21550 0 : fprintf (stderr, "gimplification failed:\n");
21551 0 : print_generic_expr (stderr, *expr_p);
21552 0 : debug_tree (*expr_p);
21553 0 : internal_error ("gimplification failed");
21554 : }
21555 : #endif
21556 8 : gcc_assert (fallback & fb_mayfail);
21557 :
21558 : /* If this is an asm statement, and the user asked for the
21559 : impossible, don't die. Fail and let gimplify_asm_expr
21560 : issue an error. */
21561 8 : ret = GS_ERROR;
21562 8 : goto out;
21563 : }
21564 :
21565 : /* Make sure the temporary matches our predicate. */
21566 25928942 : gcc_assert ((*gimple_test_f) (*expr_p));
21567 :
21568 25928942 : if (!gimple_seq_empty_p (internal_post))
21569 : {
21570 0 : annotate_all_with_location (internal_post, input_location);
21571 0 : gimplify_seq_add_seq (pre_p, internal_post);
21572 : }
21573 :
21574 25928942 : out:
21575 463435496 : input_location = saved_location;
21576 463435496 : return ret;
21577 : }
21578 :
21579 : /* Like gimplify_expr but make sure the gimplified result is not itself
21580 : a SSA name (but a decl if it were). Temporaries required by
21581 : evaluating *EXPR_P may be still SSA names. */
21582 :
21583 : static enum gimplify_status
21584 33511729 : gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
21585 : bool (*gimple_test_f) (tree), fallback_t fallback,
21586 : bool allow_ssa)
21587 : {
21588 33511729 : enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
21589 : gimple_test_f, fallback);
21590 33511729 : if (! allow_ssa
21591 266081 : && TREE_CODE (*expr_p) == SSA_NAME)
21592 68561 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
21593 33511729 : return ret;
21594 : }
21595 :
21596 : /* Look through TYPE for variable-sized objects and gimplify each such
21597 : size that we find. Add to LIST_P any statements generated. */
21598 :
21599 : void
21600 8070376 : gimplify_type_sizes (tree type, gimple_seq *list_p)
21601 : {
21602 8070376 : if (type == NULL || type == error_mark_node)
21603 : return;
21604 :
21605 8070130 : const bool ignored_p
21606 8070130 : = TYPE_NAME (type)
21607 4030090 : && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21608 11862403 : && DECL_IGNORED_P (TYPE_NAME (type));
21609 8070130 : tree t;
21610 :
21611 : /* We first do the main variant, then copy into any other variants. */
21612 8070130 : type = TYPE_MAIN_VARIANT (type);
21613 :
21614 : /* Avoid infinite recursion. */
21615 8070130 : if (TYPE_SIZES_GIMPLIFIED (type))
21616 : return;
21617 :
21618 2413719 : TYPE_SIZES_GIMPLIFIED (type) = 1;
21619 :
21620 2413719 : switch (TREE_CODE (type))
21621 : {
21622 485867 : case INTEGER_TYPE:
21623 485867 : case ENUMERAL_TYPE:
21624 485867 : case BOOLEAN_TYPE:
21625 485867 : case REAL_TYPE:
21626 485867 : case FIXED_POINT_TYPE:
21627 485867 : gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
21628 485867 : gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
21629 :
21630 10209519 : for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
21631 : {
21632 9723652 : TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
21633 9723652 : TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
21634 : }
21635 : break;
21636 :
21637 178591 : case ARRAY_TYPE:
21638 : /* These types may not have declarations, so handle them here. */
21639 178591 : gimplify_type_sizes (TREE_TYPE (type), list_p);
21640 178591 : gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
21641 : /* Ensure VLA bounds aren't removed, for -O0 they should be variables
21642 : with assigned stack slots, for -O1+ -g they should be tracked
21643 : by VTA. */
21644 178591 : if (!ignored_p
21645 178591 : && TYPE_DOMAIN (type)
21646 356957 : && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
21647 : {
21648 178366 : t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
21649 178366 : if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
21650 0 : DECL_IGNORED_P (t) = 0;
21651 178366 : t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21652 178366 : if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
21653 18971 : DECL_IGNORED_P (t) = 0;
21654 : }
21655 : break;
21656 :
21657 477782 : case RECORD_TYPE:
21658 477782 : case UNION_TYPE:
21659 477782 : case QUAL_UNION_TYPE:
21660 10218214 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
21661 9740432 : if (TREE_CODE (field) == FIELD_DECL)
21662 : {
21663 1052169 : gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
21664 : /* Likewise, ensure variable offsets aren't removed. */
21665 1052169 : if (!ignored_p
21666 1052169 : && (t = DECL_FIELD_OFFSET (field))
21667 1052157 : && VAR_P (t)
21668 1052331 : && DECL_ARTIFICIAL (t))
21669 162 : DECL_IGNORED_P (t) = 0;
21670 1052169 : gimplify_one_sizepos (&DECL_SIZE (field), list_p);
21671 1052169 : gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
21672 1052169 : gimplify_type_sizes (TREE_TYPE (field), list_p);
21673 : }
21674 : break;
21675 :
21676 : case POINTER_TYPE:
21677 : case REFERENCE_TYPE:
21678 : /* We used to recurse on the pointed-to type here, which turned out to
21679 : be incorrect because its definition might refer to variables not
21680 : yet initialized at this point if a forward declaration is involved.
21681 :
21682 : It was actually useful for anonymous pointed-to types to ensure
21683 : that the sizes evaluation dominates every possible later use of the
21684 : values. Restricting to such types here would be safe since there
21685 : is no possible forward declaration around, but would introduce an
21686 : undesirable middle-end semantic to anonymity. We then defer to
21687 : front-ends the responsibility of ensuring that the sizes are
21688 : evaluated both early and late enough, e.g. by attaching artificial
21689 : type declarations to the tree. */
21690 : break;
21691 :
21692 : default:
21693 : break;
21694 : }
21695 :
21696 2413719 : gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
21697 2413719 : gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
21698 :
21699 16562419 : for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
21700 : {
21701 14148700 : TYPE_SIZE (t) = TYPE_SIZE (type);
21702 14148700 : TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
21703 14148700 : TYPE_SIZES_GIMPLIFIED (t) = 1;
21704 : }
21705 : }
21706 :
21707 : /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
21708 : a size or position, has had all of its SAVE_EXPRs evaluated.
21709 : We add any required statements to *STMT_P. */
21710 :
21711 : void
21712 8973773 : gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
21713 : {
21714 8973773 : tree expr = *expr_p;
21715 :
21716 : /* We don't do anything if the value isn't there, is constant, or contains
21717 : A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
21718 : a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
21719 : will want to replace it with a new variable, but that will cause problems
21720 : if this type is from outside the function. It's OK to have that here. */
21721 8973773 : if (expr == NULL_TREE
21722 8977647 : || is_gimple_constant (expr)
21723 80770 : || VAR_P (expr)
21724 9050669 : || CONTAINS_PLACEHOLDER_P (expr))
21725 8896877 : return;
21726 :
21727 76896 : *expr_p = unshare_expr (expr);
21728 :
21729 : /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
21730 : if the def vanishes. */
21731 76896 : gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
21732 :
21733 : /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
21734 : FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
21735 : as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
21736 76896 : if (is_gimple_constant (*expr_p))
21737 548 : *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
21738 : }
21739 :
21740 : /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
21741 : containing the sequence of corresponding GIMPLE statements. If DO_PARMS
21742 : is true, also gimplify the parameters. */
21743 :
21744 : gbind *
21745 2874855 : gimplify_body (tree fndecl, bool do_parms)
21746 : {
21747 2874855 : location_t saved_location = input_location;
21748 2874855 : gimple_seq parm_stmts, parm_cleanup = NULL, seq;
21749 2874855 : gimple *outer_stmt;
21750 2874855 : gbind *outer_bind;
21751 :
21752 2874855 : timevar_push (TV_TREE_GIMPLIFY);
21753 :
21754 2874855 : init_tree_ssa (cfun);
21755 :
21756 : /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
21757 : gimplification. */
21758 2874855 : default_rtl_profile ();
21759 :
21760 2874855 : gcc_assert (gimplify_ctxp == NULL);
21761 2874855 : push_gimplify_context (true);
21762 :
21763 2874855 : if (flag_openacc || flag_openmp)
21764 : {
21765 54388 : gcc_assert (gimplify_omp_ctxp == NULL);
21766 54388 : if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
21767 9640 : gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
21768 : }
21769 :
21770 : /* Unshare most shared trees in the body and in that of any nested functions.
21771 : It would seem we don't have to do this for nested functions because
21772 : they are supposed to be output and then the outer function gimplified
21773 : first, but the g++ front end doesn't always do it that way. */
21774 2874855 : unshare_body (fndecl);
21775 2874855 : unvisit_body (fndecl);
21776 :
21777 : /* Make sure input_location isn't set to something weird. */
21778 2874855 : input_location = DECL_SOURCE_LOCATION (fndecl);
21779 :
21780 : /* Resolve callee-copies. This has to be done before processing
21781 : the body so that DECL_VALUE_EXPR gets processed correctly. */
21782 2874855 : parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
21783 :
21784 : /* Gimplify the function's body. */
21785 2874855 : seq = NULL;
21786 2874855 : gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
21787 2874855 : outer_stmt = gimple_seq_first_nondebug_stmt (seq);
21788 2874855 : if (!outer_stmt)
21789 : {
21790 26120 : outer_stmt = gimple_build_nop ();
21791 26120 : gimplify_seq_add_stmt (&seq, outer_stmt);
21792 : }
21793 :
21794 : /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
21795 : not the case, wrap everything in a GIMPLE_BIND to make it so. */
21796 2874855 : if (gimple_code (outer_stmt) == GIMPLE_BIND
21797 2874855 : && (gimple_seq_first_nondebug_stmt (seq)
21798 1674357 : == gimple_seq_last_nondebug_stmt (seq)))
21799 : {
21800 1611282 : outer_bind = as_a <gbind *> (outer_stmt);
21801 1611282 : if (gimple_seq_first_stmt (seq) != outer_stmt
21802 1611282 : || gimple_seq_last_stmt (seq) != outer_stmt)
21803 : {
21804 : /* If there are debug stmts before or after outer_stmt, move them
21805 : inside of outer_bind body. */
21806 1 : gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
21807 1 : gimple_seq second_seq = NULL;
21808 1 : if (gimple_seq_first_stmt (seq) != outer_stmt
21809 2 : && gimple_seq_last_stmt (seq) != outer_stmt)
21810 : {
21811 0 : second_seq = gsi_split_seq_after (gsi);
21812 0 : gsi_remove (&gsi, false);
21813 : }
21814 1 : else if (gimple_seq_first_stmt (seq) != outer_stmt)
21815 1 : gsi_remove (&gsi, false);
21816 : else
21817 : {
21818 0 : gsi_remove (&gsi, false);
21819 0 : second_seq = seq;
21820 0 : seq = NULL;
21821 : }
21822 1 : gimple_seq_add_seq_without_update (&seq,
21823 : gimple_bind_body (outer_bind));
21824 1 : gimple_seq_add_seq_without_update (&seq, second_seq);
21825 1 : gimple_bind_set_body (outer_bind, seq);
21826 : }
21827 : }
21828 : else
21829 1263573 : outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
21830 :
21831 2874855 : DECL_SAVED_TREE (fndecl) = NULL_TREE;
21832 :
21833 : /* If we had callee-copies statements, insert them at the beginning
21834 : of the function and clear DECL_HAS_VALUE_EXPR_P on the parameters. */
21835 2874855 : if (!gimple_seq_empty_p (parm_stmts))
21836 : {
21837 41 : tree parm;
21838 :
21839 41 : gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
21840 41 : if (parm_cleanup)
21841 : {
21842 0 : gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
21843 : GIMPLE_TRY_FINALLY);
21844 0 : parm_stmts = NULL;
21845 0 : gimple_seq_add_stmt (&parm_stmts, g);
21846 : }
21847 41 : gimple_bind_set_body (outer_bind, parm_stmts);
21848 :
21849 41 : for (parm = DECL_ARGUMENTS (current_function_decl);
21850 101 : parm; parm = DECL_CHAIN (parm))
21851 60 : if (DECL_HAS_VALUE_EXPR_P (parm))
21852 : {
21853 0 : DECL_HAS_VALUE_EXPR_P (parm) = 0;
21854 0 : DECL_IGNORED_P (parm) = 0;
21855 : }
21856 : }
21857 :
21858 2874855 : if ((flag_openacc || flag_openmp || flag_openmp_simd)
21859 56644 : && gimplify_omp_ctxp)
21860 : {
21861 9693 : delete_omp_context (gimplify_omp_ctxp);
21862 9693 : gimplify_omp_ctxp = NULL;
21863 : }
21864 :
21865 2874855 : pop_gimplify_context (outer_bind);
21866 2874855 : gcc_assert (gimplify_ctxp == NULL);
21867 :
21868 2874855 : if (flag_checking && !seen_error ())
21869 2826297 : verify_gimple_in_seq (gimple_bind_body (outer_bind));
21870 :
21871 2874855 : timevar_pop (TV_TREE_GIMPLIFY);
21872 2874855 : input_location = saved_location;
21873 :
21874 2874855 : return outer_bind;
21875 : }
21876 :
21877 : typedef char *char_p; /* For DEF_VEC_P. */
21878 :
21879 : /* Return whether we should exclude FNDECL from instrumentation. */
21880 :
21881 : static bool
21882 43 : flag_instrument_functions_exclude_p (tree fndecl)
21883 : {
21884 43 : vec<char_p> *v;
21885 :
21886 43 : v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
21887 44 : if (v && v->length () > 0)
21888 : {
21889 1 : const char *name;
21890 1 : int i;
21891 1 : char *s;
21892 :
21893 1 : name = lang_hooks.decl_printable_name (fndecl, 1);
21894 2 : FOR_EACH_VEC_ELT (*v, i, s)
21895 1 : if (strstr (name, s) != NULL)
21896 2 : return true;
21897 : }
21898 :
21899 42 : v = (vec<char_p> *) flag_instrument_functions_exclude_files;
21900 43 : if (v && v->length () > 0)
21901 : {
21902 1 : const char *name;
21903 1 : int i;
21904 1 : char *s;
21905 :
21906 1 : name = DECL_SOURCE_FILE (fndecl);
21907 1 : FOR_EACH_VEC_ELT (*v, i, s)
21908 1 : if (strstr (name, s) != NULL)
21909 2 : return true;
21910 : }
21911 :
21912 : return false;
21913 : }
21914 :
21915 : /* Build a call to the instrumentation function FNCODE and add it to SEQ.
21916 : If COND_VAR is not NULL, it is a boolean variable guarding the call to
21917 : the instrumentation function. IF STMT is not NULL, it is a statement
21918 : to be executed just before the call to the instrumentation function. */
21919 :
21920 : static void
21921 82 : build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
21922 : tree cond_var, gimple *stmt)
21923 : {
21924 : /* The instrumentation hooks aren't going to call the instrumented
21925 : function and the address they receive is expected to be matchable
21926 : against symbol addresses. Make sure we don't create a trampoline,
21927 : in case the current function is nested. */
21928 82 : tree this_fn_addr = build_fold_addr_expr (current_function_decl);
21929 82 : TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
21930 :
21931 82 : tree label_true, label_false;
21932 82 : if (cond_var)
21933 : {
21934 20 : label_true = create_artificial_label (UNKNOWN_LOCATION);
21935 20 : label_false = create_artificial_label (UNKNOWN_LOCATION);
21936 20 : gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
21937 : label_true, label_false);
21938 20 : gimplify_seq_add_stmt (seq, cond);
21939 20 : gimplify_seq_add_stmt (seq, gimple_build_label (label_true));
21940 20 : gimplify_seq_add_stmt (seq, gimple_build_predict (PRED_COLD_LABEL,
21941 : NOT_TAKEN));
21942 : }
21943 :
21944 82 : if (stmt)
21945 10 : gimplify_seq_add_stmt (seq, stmt);
21946 :
21947 82 : tree x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
21948 82 : gcall *call = gimple_build_call (x, 1, integer_zero_node);
21949 82 : tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
21950 82 : gimple_call_set_lhs (call, tmp_var);
21951 82 : gimplify_seq_add_stmt (seq, call);
21952 82 : x = builtin_decl_implicit (fncode);
21953 82 : call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
21954 82 : gimplify_seq_add_stmt (seq, call);
21955 :
21956 82 : if (cond_var)
21957 20 : gimplify_seq_add_stmt (seq, gimple_build_label (label_false));
21958 82 : }
21959 :
21960 : /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
21961 : node for the function we want to gimplify.
21962 :
21963 : Return the sequence of GIMPLE statements corresponding to the body
21964 : of FNDECL. */
21965 :
21966 : void
21967 2874351 : gimplify_function_tree (tree fndecl)
21968 : {
21969 2874351 : gimple_seq seq;
21970 2874351 : gbind *bind;
21971 :
21972 2874351 : gcc_assert (!gimple_body (fndecl));
21973 :
21974 2874351 : if (DECL_STRUCT_FUNCTION (fndecl))
21975 2870803 : push_cfun (DECL_STRUCT_FUNCTION (fndecl));
21976 : else
21977 3548 : push_struct_function (fndecl);
21978 :
21979 2874351 : reset_cond_uid ();
21980 2874351 : if (cond_uids)
21981 : {
21982 65 : delete cond_uids;
21983 65 : cond_uids = NULL;
21984 : }
21985 :
21986 : /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
21987 : if necessary. */
21988 2874351 : cfun->curr_properties |= PROP_gimple_lva;
21989 :
21990 2874351 : if (asan_sanitize_use_after_scope ())
21991 8177 : asan_poisoned_variables = new hash_set<tree> ();
21992 2874351 : if (flag_openmp)
21993 46752 : omp_resolved_variant_calls = new hash_set<tree> ();
21994 :
21995 2874351 : bind = gimplify_body (fndecl, true);
21996 :
21997 2874351 : if (omp_resolved_variant_calls)
21998 : {
21999 46752 : delete omp_resolved_variant_calls;
22000 46752 : omp_resolved_variant_calls = NULL;
22001 : }
22002 2874351 : if (asan_poisoned_variables)
22003 : {
22004 8177 : delete asan_poisoned_variables;
22005 8177 : asan_poisoned_variables = NULL;
22006 : }
22007 :
22008 : /* The tree body of the function is no longer needed, replace it
22009 : with the new GIMPLE body. */
22010 2874351 : seq = NULL;
22011 2874351 : gimple_seq_add_stmt (&seq, bind);
22012 2874351 : gimple_set_body (fndecl, seq);
22013 :
22014 : /* If we're instrumenting function entry/exit, then prepend the call to
22015 : the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
22016 : catch the exit hook. */
22017 : /* ??? Add some way to ignore exceptions for this TFE. */
22018 2874351 : if (flag_instrument_function_entry_exit
22019 97 : && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
22020 : /* Do not instrument extern inline functions. */
22021 44 : && !(DECL_DECLARED_INLINE_P (fndecl)
22022 6 : && DECL_EXTERNAL (fndecl)
22023 1 : && DECL_DISREGARD_INLINE_LIMITS (fndecl))
22024 2874394 : && !flag_instrument_functions_exclude_p (fndecl))
22025 : {
22026 41 : gimple_seq body = NULL, cleanup = NULL;
22027 41 : gassign *assign;
22028 41 : tree cond_var;
22029 :
22030 : /* If -finstrument-functions-once is specified, generate:
22031 :
22032 : static volatile bool C.0 = false;
22033 : bool tmp_called;
22034 :
22035 : tmp_called = C.0;
22036 : if (!tmp_called)
22037 : {
22038 : C.0 = true;
22039 : [call profiling enter function]
22040 : }
22041 :
22042 : without specific protection for data races. */
22043 41 : if (flag_instrument_function_entry_exit > 1)
22044 : {
22045 10 : tree first_var
22046 10 : = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
22047 : VAR_DECL,
22048 : create_tmp_var_name ("C"),
22049 : boolean_type_node);
22050 10 : DECL_ARTIFICIAL (first_var) = 1;
22051 10 : DECL_IGNORED_P (first_var) = 1;
22052 10 : TREE_STATIC (first_var) = 1;
22053 10 : TREE_THIS_VOLATILE (first_var) = 1;
22054 10 : TREE_USED (first_var) = 1;
22055 10 : DECL_INITIAL (first_var) = boolean_false_node;
22056 10 : varpool_node::add (first_var);
22057 :
22058 10 : cond_var = create_tmp_var (boolean_type_node, "tmp_called");
22059 10 : assign = gimple_build_assign (cond_var, first_var);
22060 10 : gimplify_seq_add_stmt (&body, assign);
22061 :
22062 10 : assign = gimple_build_assign (first_var, boolean_true_node);
22063 : }
22064 :
22065 : else
22066 : {
22067 : cond_var = NULL_TREE;
22068 : assign = NULL;
22069 : }
22070 :
22071 41 : build_instrumentation_call (&body, BUILT_IN_PROFILE_FUNC_ENTER,
22072 : cond_var, assign);
22073 :
22074 : /* If -finstrument-functions-once is specified, generate:
22075 :
22076 : if (!tmp_called)
22077 : [call profiling exit function]
22078 :
22079 : without specific protection for data races. */
22080 41 : build_instrumentation_call (&cleanup, BUILT_IN_PROFILE_FUNC_EXIT,
22081 : cond_var, NULL);
22082 :
22083 41 : gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
22084 41 : gimplify_seq_add_stmt (&body, tf);
22085 41 : gbind *new_bind = gimple_build_bind (NULL, body, NULL);
22086 :
22087 : /* Replace the current function body with the body
22088 : wrapped in the try/finally TF. */
22089 41 : seq = NULL;
22090 41 : gimple_seq_add_stmt (&seq, new_bind);
22091 41 : gimple_set_body (fndecl, seq);
22092 41 : bind = new_bind;
22093 : }
22094 :
22095 2874351 : if (sanitize_flags_p (SANITIZE_THREAD)
22096 2874351 : && param_tsan_instrument_func_entry_exit)
22097 : {
22098 1005 : gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
22099 1005 : gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
22100 1005 : gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
22101 : /* Replace the current function body with the body
22102 : wrapped in the try/finally TF. */
22103 1005 : seq = NULL;
22104 1005 : gimple_seq_add_stmt (&seq, new_bind);
22105 1005 : gimple_set_body (fndecl, seq);
22106 : }
22107 :
22108 2874351 : DECL_SAVED_TREE (fndecl) = NULL_TREE;
22109 2874351 : cfun->curr_properties |= PROP_gimple_any;
22110 :
22111 2874351 : pop_cfun ();
22112 :
22113 2874351 : dump_function (TDI_gimple, fndecl);
22114 2874351 : }
22115 :
22116 : /* Return a dummy expression of type TYPE in order to keep going after an
22117 : error. */
22118 :
22119 : static tree
22120 30 : dummy_object (tree type)
22121 : {
22122 30 : tree t = build_int_cst (build_pointer_type (type), 0);
22123 30 : return build2 (MEM_REF, type, t, t);
22124 : }
22125 :
22126 : /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
22127 : builtin function, but a very special sort of operator. */
22128 :
22129 : enum gimplify_status
22130 50965 : gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
22131 : gimple_seq *post_p ATTRIBUTE_UNUSED)
22132 : {
22133 50965 : tree promoted_type, have_va_type;
22134 50965 : tree valist = TREE_OPERAND (*expr_p, 0);
22135 50965 : tree type = TREE_TYPE (*expr_p);
22136 50965 : tree t, tag, aptag;
22137 50965 : location_t loc = EXPR_LOCATION (*expr_p);
22138 :
22139 : /* Verify that valist is of the proper type. */
22140 50965 : have_va_type = TREE_TYPE (valist);
22141 50965 : if (have_va_type == error_mark_node)
22142 : return GS_ERROR;
22143 50946 : have_va_type = targetm.canonical_va_list_type (have_va_type);
22144 50946 : if (have_va_type == NULL_TREE
22145 50946 : && POINTER_TYPE_P (TREE_TYPE (valist)))
22146 : /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
22147 258 : have_va_type
22148 258 : = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
22149 50946 : gcc_assert (have_va_type != NULL_TREE);
22150 :
22151 : /* Generate a diagnostic for requesting data of a type that cannot
22152 : be passed through `...' due to type promotion at the call site. */
22153 50946 : if ((promoted_type = lang_hooks.types.type_promotes_to (type))
22154 : != type)
22155 : {
22156 30 : static bool gave_help;
22157 30 : bool warned;
22158 : /* Use the expansion point to handle cases such as passing bool (defined
22159 : in a system header) through `...'. */
22160 30 : location_t xloc
22161 30 : = expansion_point_location_if_in_system_header (loc);
22162 :
22163 : /* Unfortunately, this is merely undefined, rather than a constraint
22164 : violation, so we cannot make this an error. If this call is never
22165 : executed, the program is still strictly conforming. */
22166 30 : auto_diagnostic_group d;
22167 30 : warned = warning_at (xloc, 0,
22168 : "%qT is promoted to %qT when passed through %<...%>",
22169 : type, promoted_type);
22170 30 : if (!gave_help && warned)
22171 : {
22172 15 : gave_help = true;
22173 15 : inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
22174 : promoted_type, type);
22175 : }
22176 :
22177 : /* We can, however, treat "undefined" any way we please.
22178 : Call abort to encourage the user to fix the program. */
22179 23 : if (warned)
22180 23 : inform (xloc, "if this code is reached, the program will abort");
22181 : /* Before the abort, allow the evaluation of the va_list
22182 : expression to exit or longjmp. */
22183 30 : gimplify_and_add (valist, pre_p);
22184 60 : t = build_call_expr_loc (loc,
22185 : builtin_decl_implicit (BUILT_IN_TRAP), 0);
22186 30 : gimplify_and_add (t, pre_p);
22187 :
22188 : /* This is dead code, but go ahead and finish so that the
22189 : mode of the result comes out right. */
22190 30 : *expr_p = dummy_object (type);
22191 30 : return GS_ALL_DONE;
22192 30 : }
22193 :
22194 50916 : tag = build_int_cst (build_pointer_type (type), 0);
22195 50916 : aptag = build_int_cst (TREE_TYPE (valist), 0);
22196 :
22197 50916 : *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
22198 : valist, tag, aptag);
22199 :
22200 : /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
22201 : needs to be expanded. */
22202 50916 : cfun->curr_properties &= ~PROP_gimple_lva;
22203 :
22204 50916 : return GS_OK;
22205 : }
22206 :
22207 : /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
22208 :
22209 : DST/SRC are the destination and source respectively. You can pass
22210 : ungimplified trees in DST or SRC, in which case they will be
22211 : converted to a gimple operand if necessary.
22212 :
22213 : This function returns the newly created GIMPLE_ASSIGN tuple. */
22214 :
22215 : gimple *
22216 866036 : gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
22217 : {
22218 866036 : tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
22219 866036 : gimplify_and_add (t, seq_p);
22220 866036 : ggc_free (t);
22221 866036 : return gimple_seq_last_stmt (*seq_p);
22222 : }
22223 :
22224 : inline hashval_t
22225 1564391 : gimplify_hasher::hash (const elt_t *p)
22226 : {
22227 1564391 : tree t = p->val;
22228 1564391 : return iterative_hash_expr (t, 0);
22229 : }
22230 :
22231 : inline bool
22232 582308 : gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
22233 : {
22234 582308 : tree t1 = p1->val;
22235 582308 : tree t2 = p2->val;
22236 582308 : enum tree_code code = TREE_CODE (t1);
22237 :
22238 582308 : if (TREE_CODE (t2) != code
22239 582308 : || TREE_TYPE (t1) != TREE_TYPE (t2))
22240 : return false;
22241 :
22242 363673 : if (!operand_equal_p (t1, t2, 0))
22243 : return false;
22244 :
22245 : return true;
22246 : }
|