Line data Source code
1 : /* Tree inlining.
2 : Copyright (C) 2001-2026 Free Software Foundation, Inc.
3 : Contributed by Alexandre Oliva <aoliva@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "gimple.h"
29 : #include "cfghooks.h"
30 : #include "tree-pass.h"
31 : #include "ssa.h"
32 : #include "cgraph.h"
33 : #include "tree-pretty-print.h"
34 : #include "diagnostic-core.h"
35 : #include "gimple-predict.h"
36 : #include "fold-const.h"
37 : #include "stor-layout.h"
38 : #include "calls.h"
39 : #include "tree-inline.h"
40 : #include "langhooks.h"
41 : #include "cfganal.h"
42 : #include "tree-iterator.h"
43 : #include "intl.h"
44 : #include "gimple-iterator.h"
45 : #include "gimple-fold.h"
46 : #include "tree-eh.h"
47 : #include "gimplify.h"
48 : #include "gimplify-me.h"
49 : #include "gimple-walk.h"
50 : #include "tree-cfg.h"
51 : #include "tree-into-ssa.h"
52 : #include "tree-dfa.h"
53 : #include "tree-ssa.h"
54 : #include "except.h"
55 : #include "debug.h"
56 : #include "value-prof.h"
57 : #include "cfgloop.h"
58 : #include "builtins.h"
59 : #include "stringpool.h"
60 : #include "attribs.h"
61 : #include "sreal.h"
62 : #include "tree-cfgcleanup.h"
63 : #include "tree-ssa-live.h"
64 : #include "alloc-pool.h"
65 : #include "symbol-summary.h"
66 : #include "symtab-thunks.h"
67 : #include "symtab-clones.h"
68 : #include "asan.h"
69 :
70 : /* I'm not real happy about this, but we need to handle gimple and
71 : non-gimple trees. */
72 :
73 : /* Inlining, Cloning, Versioning, Parallelization
74 :
75 : Inlining: a function body is duplicated, but the PARM_DECLs are
76 : remapped into VAR_DECLs, and non-void RETURN_EXPRs become
77 : MODIFY_EXPRs that store to a dedicated returned-value variable.
78 : The duplicated eh_region info of the copy will later be appended
79 : to the info for the caller; the eh_region info in copied throwing
80 : statements and RESX statements are adjusted accordingly.
81 :
82 : Cloning: (only in C++) We have one body for a con/de/structor, and
83 : multiple function decls, each with a unique parameter list.
84 : Duplicate the body, using the given splay tree; some parameters
85 : will become constants (like 0 or 1).
86 :
87 : Versioning: a function body is duplicated and the result is a new
88 : function rather than into blocks of an existing function as with
89 : inlining. Some parameters will become constants.
90 :
91 : Parallelization: a region of a function is duplicated resulting in
92 : a new function. Variables may be replaced with complex expressions
93 : to enable shared variable semantics.
94 :
95 : All of these will simultaneously lookup any callgraph edges. If
96 : we're going to inline the duplicated function body, and the given
97 : function has some cloned callgraph nodes (one for each place this
98 : function will be inlined) those callgraph edges will be duplicated.
99 : If we're cloning the body, those callgraph edges will be
100 : updated to point into the new body. (Note that the original
101 : callgraph node and edge list will not be altered.)
102 :
103 : See the CALL_EXPR handling case in copy_tree_body_r (). */
104 :
105 : /* To Do:
106 :
107 : o In order to make inlining-on-trees work, we pessimized
108 : function-local static constants. In particular, they are now
109 : always output, even when not addressed. Fix this by treating
110 : function-local static constants just like global static
111 : constants; the back-end already knows not to output them if they
112 : are not needed.
113 :
114 : o Provide heuristics to clamp inlining of recursive template
115 : calls? */
116 :
117 :
118 : /* Weights that estimate_num_insns uses to estimate the size of the
119 : produced code. */
120 :
121 : eni_weights eni_size_weights;
122 :
123 : /* Weights that estimate_num_insns uses to estimate the time necessary
124 : to execute the produced code. */
125 :
126 : eni_weights eni_time_weights;
127 :
128 : /* Prototypes. */
129 :
130 : static tree declare_return_variable (copy_body_data *, tree, tree,
131 : basic_block);
132 : static void remap_block (tree *, copy_body_data *);
133 : static void copy_bind_expr (tree *, int *, copy_body_data *);
134 : static void declare_inline_vars (tree, tree);
135 : static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
136 : static void prepend_lexical_block (tree current_block, tree new_block);
137 : static tree copy_result_decl_to_var (tree, copy_body_data *);
138 : static tree copy_decl_maybe_to_var (tree, copy_body_data *);
139 : static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
140 : static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 :
142 : /* Insert a tree->tree mapping for ID. Despite the name suggests
143 : that the trees should be variables, it is used for more than that. */
144 :
145 : void
146 718805367 : insert_decl_map (copy_body_data *id, tree key, tree value)
147 : {
148 718805367 : id->decl_map->put (key, value);
149 :
150 : /* Always insert an identity map as well. If we see this same new
151 : node again, we won't want to duplicate it a second time. */
152 718805367 : if (key != value && value)
153 210500477 : id->decl_map->put (value, value);
154 718805367 : }
155 :
156 : /* If nonzero, we're remapping the contents of inlined debug
157 : statements. If negative, an error has occurred, such as a
158 : reference to a variable that isn't available in the inlined
159 : context. */
160 : static int processing_debug_stmt = 0;
161 :
162 : /* Construct new SSA name for old NAME. ID is the inline context. */
163 :
164 : static tree
165 66886418 : remap_ssa_name (tree name, copy_body_data *id)
166 : {
167 66886418 : tree new_tree, var;
168 66886418 : tree *n;
169 :
170 66886418 : gcc_assert (TREE_CODE (name) == SSA_NAME);
171 :
172 66886418 : n = id->decl_map->get (name);
173 66886418 : if (n)
174 : {
175 : /* When we perform edge redirection as part of CFG copy, IPA-SRA can
176 : remove an unused LHS from a call statement. Such LHS can however
177 : still appear in debug statements, but their value is lost in this
178 : function and we do not want to map them. */
179 49035700 : if (id->killed_new_ssa_names
180 49035700 : && id->killed_new_ssa_names->contains (*n))
181 : {
182 939 : gcc_assert (processing_debug_stmt);
183 939 : processing_debug_stmt = -1;
184 939 : return name;
185 : }
186 :
187 49034761 : return unshare_expr (*n);
188 : }
189 :
190 17850718 : if (processing_debug_stmt)
191 : {
192 92824 : if (SSA_NAME_IS_DEFAULT_DEF (name)
193 92781 : && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
194 92734 : && id->entry_bb == NULL
195 185557 : && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
196 : {
197 92733 : gimple *def_temp;
198 92733 : gimple_stmt_iterator gsi;
199 92733 : tree val = SSA_NAME_VAR (name);
200 :
201 92733 : n = id->decl_map->get (val);
202 92733 : if (n != NULL)
203 92733 : val = *n;
204 92733 : if (TREE_CODE (val) != PARM_DECL
205 92733 : && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
206 : {
207 0 : processing_debug_stmt = -1;
208 0 : return name;
209 : }
210 92733 : n = id->decl_map->get (val);
211 92733 : if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 : return *n;
213 31644 : tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
214 : /* FIXME: Is setting the mode really necessary? */
215 31644 : SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
216 31644 : def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 31644 : gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 31644 : gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 31644 : insert_decl_map (id, val, vexpr);
220 31644 : return vexpr;
221 : }
222 :
223 91 : processing_debug_stmt = -1;
224 91 : return name;
225 : }
226 :
227 : /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 17757894 : var = SSA_NAME_VAR (name);
229 3527026 : if (!var
230 3527026 : || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 3151162 : && VAR_P (var)
232 2964344 : && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 2964344 : && DECL_ARTIFICIAL (var)
234 477894 : && DECL_IGNORED_P (var)
235 147184 : && !DECL_NAME (var)))
236 : {
237 14236188 : struct ptr_info_def *pi;
238 14236188 : new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 14236188 : if (!var && SSA_NAME_IDENTIFIER (name))
240 1945884 : SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 14236188 : insert_decl_map (id, name, new_tree);
242 28472376 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 14236188 : = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 : /* At least IPA points-to info can be directly transferred. */
245 14236188 : if (id->src_cfun->gimple_df
246 14236188 : && id->src_cfun->gimple_df->ipa_pta
247 14739 : && POINTER_TYPE_P (TREE_TYPE (name))
248 2595 : && (pi = SSA_NAME_PTR_INFO (name))
249 14238778 : && !pi->pt.anything)
250 : {
251 2569 : struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 2569 : new_pi->pt = pi->pt;
253 : }
254 : /* So can range-info. */
255 23606425 : if (!POINTER_TYPE_P (TREE_TYPE (name))
256 23077007 : && SSA_NAME_RANGE_INFO (name))
257 2995563 : duplicate_ssa_name_range_info (new_tree, name);
258 14236188 : return new_tree;
259 : }
260 :
261 : /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262 : in copy_bb. */
263 3521706 : new_tree = remap_decl (var, id);
264 :
265 : /* We might've substituted constant or another SSA_NAME for
266 : the variable.
267 :
268 : Replace the SSA name representing RESULT_DECL by variable during
269 : inlining: this saves us from need to introduce PHI node in a case
270 : return value is just partly initialized. */
271 357764 : if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
272 3879470 : && (!SSA_NAME_VAR (name)
273 3521706 : || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
274 0 : || !id->transform_return_to_modify))
275 : {
276 3521706 : struct ptr_info_def *pi;
277 3521706 : new_tree = make_ssa_name (new_tree);
278 3521706 : insert_decl_map (id, name, new_tree);
279 7043412 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
280 3521706 : = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
281 : /* At least IPA points-to info can be directly transferred. */
282 3521706 : if (id->src_cfun->gimple_df
283 3521706 : && id->src_cfun->gimple_df->ipa_pta
284 2523 : && POINTER_TYPE_P (TREE_TYPE (name))
285 288 : && (pi = SSA_NAME_PTR_INFO (name))
286 3521988 : && !pi->pt.anything)
287 : {
288 282 : struct ptr_info_def *new_pi = get_ptr_info (new_tree);
289 282 : new_pi->pt = pi->pt;
290 : }
291 : /* So can range-info. */
292 6218798 : if (!POINTER_TYPE_P (TREE_TYPE (name))
293 6133153 : && SSA_NAME_RANGE_INFO (name))
294 934968 : duplicate_ssa_name_range_info (new_tree, name);
295 3521706 : if (SSA_NAME_IS_DEFAULT_DEF (name))
296 : {
297 : /* By inlining function having uninitialized variable, we might
298 : extend the lifetime (variable might get reused). This cause
299 : ICE in the case we end up extending lifetime of SSA name across
300 : abnormal edge, but also increase register pressure.
301 :
302 : We simply initialize all uninitialized vars by 0 except
303 : for case we are inlining to very first BB. We can avoid
304 : this for all BBs that are not inside strongly connected
305 : regions of the CFG, but this is expensive to test. */
306 375864 : if (id->entry_bb
307 41905 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 0 : && (!SSA_NAME_VAR (name)
309 0 : || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 375864 : && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0 : 0)->dest
312 0 : || EDGE_COUNT (id->entry_bb->preds) != 1))
313 : {
314 0 : gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 0 : gimple *init_stmt;
316 0 : tree zero = build_zero_cst (TREE_TYPE (new_tree));
317 :
318 0 : init_stmt = gimple_build_assign (new_tree, zero);
319 0 : gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 0 : SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 : }
322 : else
323 : {
324 375864 : SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 375864 : set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 : }
327 : }
328 : }
329 : else
330 0 : insert_decl_map (id, name, new_tree);
331 : return new_tree;
332 : }
333 :
334 : /* Remap DECL during the copying of the BLOCK tree for the function. */
335 :
336 : tree
337 366621639 : remap_decl (tree decl, copy_body_data *id)
338 : {
339 366621639 : tree *n;
340 :
341 : /* We only remap local variables in the current function. */
342 :
343 : /* See if we have remapped this declaration. */
344 :
345 366621639 : n = id->decl_map->get (decl);
346 :
347 366621639 : if (!n && processing_debug_stmt)
348 : {
349 586359 : processing_debug_stmt = -1;
350 586359 : return decl;
351 : }
352 :
353 : /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 : necessary DECLs have already been remapped and we do not want to duplicate
355 : a decl coming from outside of the sequence we are copying. */
356 132876349 : if (!n
357 132876349 : && id->prevent_decl_creation_for_types
358 0 : && id->remapping_type_depth > 0
359 0 : && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 : return decl;
361 :
362 : /* If we didn't already have an equivalent for this declaration, create one
363 : now. */
364 366035280 : if (!n)
365 : {
366 : /* Make a copy of the variable or label. */
367 132876349 : tree t = id->copy_decl (decl, id);
368 :
369 : /* Remember it, so that if we encounter this local entity again
370 : we can reuse this copy. Do this early because remap_type may
371 : need this decl for TYPE_STUB_DECL. */
372 132876349 : insert_decl_map (id, decl, t);
373 :
374 132876349 : if (!DECL_P (t) || t == decl)
375 : return t;
376 :
377 : /* Remap types, if necessary. */
378 130286301 : TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 130286301 : if (TREE_CODE (t) == TYPE_DECL)
380 : {
381 1248060 : DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
382 :
383 : /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 : which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 : is not set on the TYPE_DECL, for example in LTO mode. */
386 1248060 : if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
387 : {
388 10 : tree x = build_variant_type_copy (TREE_TYPE (t));
389 10 : TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 10 : TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 10 : DECL_ORIGINAL_TYPE (t) = x;
392 : }
393 : }
394 :
395 : /* Remap sizes as necessary. */
396 130286301 : walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 130286301 : walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398 :
399 : /* If fields, do likewise for offset and qualifier. */
400 130286301 : if (TREE_CODE (t) == FIELD_DECL)
401 : {
402 781 : walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 781 : if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 0 : walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
405 : }
406 :
407 130286301 : return t;
408 : }
409 :
410 233158931 : if (id->do_not_unshare)
411 156808241 : return *n;
412 : else
413 76350690 : return unshare_expr (*n);
414 : }
415 :
416 : static tree
417 101581 : remap_type_1 (tree type, copy_body_data *id)
418 : {
419 101581 : tree new_tree, t;
420 :
421 : /* We do need a copy. build and register it now. If this is a pointer or
422 : reference type, remap the designated type and make a new pointer or
423 : reference type. */
424 101581 : if (TREE_CODE (type) == POINTER_TYPE)
425 : {
426 27976 : new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 27976 : TYPE_MODE (type),
428 27976 : TYPE_REF_CAN_ALIAS_ALL (type));
429 27976 : if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 14858 : new_tree = build_type_attribute_qual_variant (new_tree,
431 14858 : TYPE_ATTRIBUTES (type),
432 14858 : TYPE_QUALS (type));
433 27976 : insert_decl_map (id, type, new_tree);
434 27976 : return new_tree;
435 : }
436 73605 : else if (TREE_CODE (type) == REFERENCE_TYPE)
437 : {
438 7838 : new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 7838 : TYPE_MODE (type),
440 7838 : TYPE_REF_CAN_ALIAS_ALL (type));
441 7838 : if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 5438 : new_tree = build_type_attribute_qual_variant (new_tree,
443 5438 : TYPE_ATTRIBUTES (type),
444 5438 : TYPE_QUALS (type));
445 7838 : insert_decl_map (id, type, new_tree);
446 7838 : return new_tree;
447 : }
448 : else
449 65767 : new_tree = copy_node (type);
450 :
451 65767 : insert_decl_map (id, type, new_tree);
452 :
453 : /* This is a new type, not a copy of an old type. Need to reassociate
454 : variants. We can handle everything except the main variant lazily. */
455 65767 : t = TYPE_MAIN_VARIANT (type);
456 65767 : if (type != t)
457 : {
458 87 : t = remap_type (t, id);
459 87 : TYPE_MAIN_VARIANT (new_tree) = t;
460 87 : TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 87 : TYPE_NEXT_VARIANT (t) = new_tree;
462 : }
463 : else
464 : {
465 65680 : TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 65680 : TYPE_NEXT_VARIANT (new_tree) = NULL;
467 : }
468 :
469 65767 : if (TYPE_STUB_DECL (type))
470 186 : TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471 :
472 : /* Lazily create pointer and reference types. */
473 65767 : TYPE_POINTER_TO (new_tree) = NULL;
474 65767 : TYPE_REFERENCE_TO (new_tree) = NULL;
475 :
476 : /* Copy all types that may contain references to local variables; be sure to
477 : preserve sharing in between type and its main variant when possible. */
478 65767 : switch (TREE_CODE (new_tree))
479 : {
480 31504 : case INTEGER_TYPE:
481 31504 : case REAL_TYPE:
482 31504 : case FIXED_POINT_TYPE:
483 31504 : case ENUMERAL_TYPE:
484 31504 : case BOOLEAN_TYPE:
485 31504 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 : {
487 0 : gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 0 : gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489 :
490 0 : TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 0 : TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 : }
493 : else
494 : {
495 31504 : t = TYPE_MIN_VALUE (new_tree);
496 31504 : if (t && TREE_CODE (t) != INTEGER_CST)
497 0 : walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498 :
499 31504 : t = TYPE_MAX_VALUE (new_tree);
500 31504 : if (t && TREE_CODE (t) != INTEGER_CST)
501 31504 : walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 : }
503 : return new_tree;
504 :
505 16 : case FUNCTION_TYPE:
506 16 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 16 : && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 0 : TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 : else
510 16 : TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 16 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 16 : && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 0 : TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 : else
515 16 : walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 : return new_tree;
517 :
518 34012 : case ARRAY_TYPE:
519 34012 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 34012 : && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 29 : TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 : else
523 33983 : TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524 :
525 34012 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 : {
527 70 : gcc_checking_assert (TYPE_DOMAIN (type)
528 : == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 70 : TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
530 : }
531 : else
532 : {
533 33942 : TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 : /* For array bounds where we have decided not to copy over the bounds
535 : variable which isn't used in OpenMP/OpenACC region, change them to
536 : an uninitialized VAR_DECL temporary. */
537 33942 : if (id->adjust_array_error_bounds
538 3712 : && TYPE_DOMAIN (new_tree)
539 3712 : && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
540 36826 : && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
541 : {
542 2884 : tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
543 2884 : DECL_ATTRIBUTES (v)
544 2884 : = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
545 2884 : DECL_ATTRIBUTES (v));
546 2884 : TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
547 : }
548 : }
549 : break;
550 :
551 235 : case RECORD_TYPE:
552 235 : case UNION_TYPE:
553 235 : case QUAL_UNION_TYPE:
554 235 : if (TYPE_MAIN_VARIANT (type) != type
555 235 : && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
556 17 : TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
557 : else
558 : {
559 218 : tree f, nf = NULL;
560 :
561 999 : for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
562 : {
563 781 : t = remap_decl (f, id);
564 781 : DECL_CONTEXT (t) = new_tree;
565 781 : DECL_CHAIN (t) = nf;
566 781 : nf = t;
567 : }
568 218 : TYPE_FIELDS (new_tree) = nreverse (nf);
569 : }
570 : break;
571 :
572 0 : case OFFSET_TYPE:
573 0 : default:
574 : /* Shouldn't have been thought variable sized. */
575 0 : gcc_unreachable ();
576 : }
577 :
578 : /* All variants of type share the same size, so use the already remaped data. */
579 34247 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
580 : {
581 87 : tree s = TYPE_SIZE (type);
582 87 : tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
583 87 : tree su = TYPE_SIZE_UNIT (type);
584 87 : tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
585 87 : gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
586 : && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
587 : || s == mvs);
588 87 : gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
589 : && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
590 : || su == mvsu);
591 87 : TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
592 87 : TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
593 : }
594 : else
595 : {
596 34160 : walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
597 34160 : walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
598 : }
599 :
600 : return new_tree;
601 : }
602 :
603 : /* Helper function for remap_type_2, called through walk_tree. */
604 :
605 : static tree
606 30409 : remap_type_3 (tree *tp, int *walk_subtrees, void *data)
607 : {
608 30409 : copy_body_data *id = (copy_body_data *) data;
609 :
610 30409 : if (TYPE_P (*tp))
611 0 : *walk_subtrees = 0;
612 :
613 30409 : else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
614 : return *tp;
615 :
616 : return NULL_TREE;
617 : }
618 :
619 : /* Return true if TYPE needs to be remapped because remap_decl on any
620 : needed embedded decl returns something other than that decl. */
621 :
622 : static bool
623 51321 : remap_type_2 (tree type, copy_body_data *id)
624 : {
625 61850 : tree t;
626 :
627 : #define RETURN_TRUE_IF_VAR(T) \
628 : do \
629 : { \
630 : tree _t = (T); \
631 : if (_t) \
632 : { \
633 : if (DECL_P (_t) && remap_decl (_t, id) != _t) \
634 : return true; \
635 : if (!TYPE_SIZES_GIMPLIFIED (type) \
636 : && walk_tree (&_t, remap_type_3, id, NULL)) \
637 : return true; \
638 : } \
639 : } \
640 : while (0)
641 :
642 61850 : switch (TREE_CODE (type))
643 : {
644 10529 : case POINTER_TYPE:
645 10529 : case REFERENCE_TYPE:
646 10529 : case FUNCTION_TYPE:
647 10529 : case METHOD_TYPE:
648 10529 : return remap_type_2 (TREE_TYPE (type), id);
649 :
650 34492 : case INTEGER_TYPE:
651 34492 : case REAL_TYPE:
652 34492 : case FIXED_POINT_TYPE:
653 34492 : case ENUMERAL_TYPE:
654 34492 : case BOOLEAN_TYPE:
655 34492 : RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
656 34492 : RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
657 22662 : return false;
658 :
659 16356 : case ARRAY_TYPE:
660 16356 : if (remap_type_2 (TREE_TYPE (type), id)
661 32058 : || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
662 9065 : return true;
663 : break;
664 :
665 357 : case RECORD_TYPE:
666 357 : case UNION_TYPE:
667 357 : case QUAL_UNION_TYPE:
668 3561 : for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
669 3204 : if (TREE_CODE (t) == FIELD_DECL)
670 : {
671 1242 : RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
672 1242 : RETURN_TRUE_IF_VAR (DECL_SIZE (t));
673 1242 : RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
674 1242 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
675 0 : RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
676 : }
677 : break;
678 :
679 : default:
680 : return false;
681 : }
682 :
683 7648 : RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
684 7639 : RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
685 6964 : return false;
686 : #undef RETURN_TRUE_IF_VAR
687 : }
688 :
689 : tree
690 1491276522 : remap_type (tree type, copy_body_data *id)
691 : {
692 1491276522 : tree *node;
693 1491276522 : tree tmp;
694 :
695 1491276522 : if (type == NULL)
696 : return type;
697 :
698 : /* See if we have remapped this type. */
699 1491068140 : node = id->decl_map->get (type);
700 1491068140 : if (node)
701 985770151 : return *node;
702 :
703 : /* The type only needs remapping if it's variably modified. */
704 505297989 : if (! variably_modified_type_p (type, id->src_fn)
705 : /* Don't remap if copy_decl method doesn't always return a new
706 : decl and for all embedded decls returns the passed in decl. */
707 505297989 : || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
708 : {
709 505196471 : insert_decl_map (id, type, type);
710 505196471 : return type;
711 : }
712 :
713 101518 : id->remapping_type_depth++;
714 101518 : tmp = remap_type_1 (type, id);
715 101518 : id->remapping_type_depth--;
716 :
717 101518 : return tmp;
718 : }
719 :
720 : /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
721 :
722 : static bool
723 48621065 : can_be_nonlocal (tree decl, copy_body_data *id)
724 : {
725 : /* We cannot duplicate function decls. */
726 48621065 : if (TREE_CODE (decl) == FUNCTION_DECL)
727 : return true;
728 :
729 : /* Local static vars must be non-local or we get multiple declaration
730 : problems. */
731 48603011 : if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
732 : return true;
733 :
734 : return false;
735 : }
736 :
737 : static tree
738 51308484 : remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
739 : copy_body_data *id)
740 : {
741 51308484 : tree old_var;
742 51308484 : tree new_decls = NULL_TREE;
743 :
744 : /* Remap its variables. */
745 95984470 : for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
746 : {
747 44675986 : tree new_var;
748 :
749 44675986 : if (can_be_nonlocal (old_var, id))
750 : {
751 : /* We need to add this variable to the local decls as otherwise
752 : nothing else will do so. */
753 127980 : if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
754 109750 : add_local_decl (cfun, old_var);
755 89393 : if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
756 101632 : && !DECL_IGNORED_P (old_var)
757 229511 : && nonlocalized_list)
758 68346 : vec_safe_push (*nonlocalized_list, old_var);
759 127980 : continue;
760 : }
761 :
762 : /* Remap the variable. */
763 44548006 : new_var = remap_decl (old_var, id);
764 :
765 : /* If we didn't remap this variable, we can't mess with its
766 : TREE_CHAIN. If we remapped this variable to the return slot, it's
767 : already declared somewhere else, so don't declare it here. */
768 :
769 44548006 : if (new_var == old_var || new_var == id->retvar)
770 : ;
771 39804800 : else if (!new_var)
772 : {
773 0 : if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
774 0 : && !DECL_IGNORED_P (old_var)
775 0 : && nonlocalized_list)
776 0 : vec_safe_push (*nonlocalized_list, old_var);
777 : }
778 : else
779 : {
780 39804800 : gcc_assert (DECL_P (new_var));
781 39804800 : DECL_CHAIN (new_var) = new_decls;
782 39804800 : new_decls = new_var;
783 :
784 : /* Also copy value-expressions. */
785 39804800 : if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
786 : {
787 2384279 : tree tem = DECL_VALUE_EXPR (new_var);
788 2384279 : bool old_regimplify = id->regimplify;
789 2384279 : id->remapping_type_depth++;
790 2384279 : walk_tree (&tem, copy_tree_body_r, id, NULL);
791 2384279 : id->remapping_type_depth--;
792 2384279 : id->regimplify = old_regimplify;
793 2384279 : SET_DECL_VALUE_EXPR (new_var, tem);
794 : }
795 : }
796 : }
797 :
798 51308484 : return nreverse (new_decls);
799 : }
800 :
801 : /* Copy the BLOCK to contain remapped versions of the variables
802 : therein. And hook the new block into the block-tree. */
803 :
804 : static void
805 45333428 : remap_block (tree *block, copy_body_data *id)
806 : {
807 45333428 : tree old_block;
808 45333428 : tree new_block;
809 :
810 : /* Make the new block. */
811 45333428 : old_block = *block;
812 45333428 : new_block = make_node (BLOCK);
813 45333428 : TREE_USED (new_block) = TREE_USED (old_block);
814 45333428 : BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
815 45333428 : BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
816 45333428 : BLOCK_NONLOCALIZED_VARS (new_block)
817 45400476 : = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
818 45333428 : *block = new_block;
819 :
820 : /* Remap its variables. */
821 90666856 : BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
822 45333428 : &BLOCK_NONLOCALIZED_VARS (new_block),
823 : id);
824 :
825 : /* Remember the remapped block. */
826 45333428 : insert_decl_map (id, old_block, new_block);
827 45333428 : }
828 :
829 : /* Copy the whole block tree and root it in id->block. */
830 :
831 : static tree
832 22977991 : remap_blocks (tree block, copy_body_data *id)
833 : {
834 22977991 : tree t;
835 22977991 : tree new_tree = block;
836 :
837 22977991 : if (!block)
838 : return NULL;
839 :
840 22977991 : remap_block (&new_tree, id);
841 22977991 : gcc_assert (new_tree != block);
842 41452172 : for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
843 18474181 : prepend_lexical_block (new_tree, remap_blocks (t, id));
844 : /* Blocks are in arbitrary order, but make things slightly prettier and do
845 : not swap order when producing a copy. */
846 22977991 : BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
847 22977991 : return new_tree;
848 : }
849 :
850 : /* Remap the block tree rooted at BLOCK to nothing. */
851 :
852 : static void
853 69005 : remap_blocks_to_null (tree block, copy_body_data *id)
854 : {
855 69005 : tree t;
856 69005 : insert_decl_map (id, block, NULL_TREE);
857 111290 : for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
858 42285 : remap_blocks_to_null (t, id);
859 69005 : }
860 :
861 : /* Remap the location info pointed to by LOCUS. */
862 :
863 : static location_t
864 24988960 : remap_location (location_t locus, copy_body_data *id)
865 : {
866 24988960 : if (LOCATION_BLOCK (locus))
867 : {
868 10134426 : tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
869 10134426 : gcc_assert (n);
870 10134426 : if (*n)
871 10100902 : return set_block (locus, *n);
872 : }
873 :
874 14888058 : locus = LOCATION_LOCUS (locus);
875 :
876 14888058 : if (locus != UNKNOWN_LOCATION && id->block)
877 15 : return set_block (locus, id->block);
878 :
879 : return locus;
880 : }
881 :
882 : static void
883 39490730 : copy_statement_list (tree *tp)
884 : {
885 39490730 : tree_stmt_iterator oi, ni;
886 39490730 : tree new_tree;
887 :
888 39490730 : new_tree = alloc_stmt_list ();
889 39490730 : ni = tsi_start (new_tree);
890 39490730 : oi = tsi_start (*tp);
891 39490730 : TREE_TYPE (new_tree) = TREE_TYPE (*tp);
892 39490730 : *tp = new_tree;
893 :
894 131891749 : for (; !tsi_end_p (oi); tsi_next (&oi))
895 : {
896 92401019 : tree stmt = tsi_stmt (oi);
897 92401019 : if (TREE_CODE (stmt) == STATEMENT_LIST)
898 : /* This copy is not redundant; tsi_link_after will smash this
899 : STATEMENT_LIST into the end of the one we're building, and we
900 : don't want to do that with the original. */
901 72653 : copy_statement_list (&stmt);
902 92401019 : tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
903 : }
904 39490730 : }
905 :
906 : static void
907 22355685 : copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
908 : {
909 22355685 : tree block = BIND_EXPR_BLOCK (*tp);
910 : /* Copy (and replace) the statement. */
911 22355685 : copy_tree_r (tp, walk_subtrees, NULL);
912 22355685 : if (block)
913 : {
914 22355150 : remap_block (&block, id);
915 22355150 : BIND_EXPR_BLOCK (*tp) = block;
916 : }
917 :
918 22355685 : if (BIND_EXPR_VARS (*tp))
919 : /* This will remap a lot of the same decls again, but this should be
920 : harmless. */
921 5974888 : BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
922 22355685 : }
923 :
924 :
925 : /* Create a new gimple_seq by remapping all the statements in BODY
926 : using the inlining information in ID. */
927 :
928 : static gimple_seq
929 67 : remap_gimple_seq (gimple_seq body, copy_body_data *id)
930 : {
931 67 : gimple_stmt_iterator si;
932 67 : gimple_seq new_body = NULL;
933 :
934 67 : for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
935 : {
936 0 : gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
937 0 : gimple_seq_add_seq (&new_body, new_stmts);
938 : }
939 :
940 67 : return new_body;
941 : }
942 :
943 :
944 : /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
945 : block using the mapping information in ID. */
946 :
947 : static gimple *
948 0 : copy_gimple_bind (gbind *stmt, copy_body_data *id)
949 : {
950 0 : gimple *new_bind;
951 0 : tree new_block, new_vars;
952 0 : gimple_seq body, new_body;
953 :
954 : /* Copy the statement. Note that we purposely don't use copy_stmt
955 : here because we need to remap statements as we copy. */
956 0 : body = gimple_bind_body (stmt);
957 0 : new_body = remap_gimple_seq (body, id);
958 :
959 0 : new_block = gimple_bind_block (stmt);
960 0 : if (new_block)
961 0 : remap_block (&new_block, id);
962 :
963 : /* This will remap a lot of the same decls again, but this should be
964 : harmless. */
965 0 : new_vars = gimple_bind_vars (stmt);
966 0 : if (new_vars)
967 0 : new_vars = remap_decls (new_vars, NULL, id);
968 :
969 0 : new_bind = gimple_build_bind (new_vars, new_body, new_block);
970 :
971 0 : return new_bind;
972 : }
973 :
974 : /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
975 :
976 : static bool
977 31536 : is_parm (tree decl)
978 : {
979 31536 : if (TREE_CODE (decl) == SSA_NAME)
980 : {
981 29645 : decl = SSA_NAME_VAR (decl);
982 : if (!decl)
983 : return false;
984 : }
985 :
986 18997 : return (TREE_CODE (decl) == PARM_DECL);
987 : }
988 :
989 : /* Copy the TREE_THIS_NOTRAP flag from OLD to T if it is appropriate to do so.
990 : T and OLD must be both either INDIRECT_REF or MEM_REF. */
991 :
992 : static void
993 22152761 : maybe_copy_this_notrap (copy_body_data *id, tree t, tree old)
994 : {
995 22152761 : gcc_assert (TREE_CODE (t) == TREE_CODE (old));
996 :
997 : /* We cannot blindly propagate the TREE_THIS_NOTRAP flag if we have remapped
998 : a parameter as the property might be valid only for the parameter itself,
999 : typically when it is passed by reference. But we propagate the flag when
1000 : this is the dereference of an entire object done in a type that has self-
1001 : referential size, to avoid the static size check in tree_could_trap_p. */
1002 22152761 : if (TREE_THIS_NOTRAP (old)
1003 22152761 : && (!is_parm (TREE_OPERAND (old, 0))
1004 13197 : || (!id->transform_parameter && is_parm (TREE_OPERAND (t, 0)))
1005 9542 : || ((TREE_CODE (t) == INDIRECT_REF
1006 9542 : || integer_zerop (TREE_OPERAND (t, 1)))
1007 9542 : && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1008 8872 : && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
1009 8872 : && type_contains_placeholder_p (TREE_TYPE (t)))))
1010 18336 : TREE_THIS_NOTRAP (t) = 1;
1011 22152761 : }
1012 :
1013 : /* Remap the dependence CLIQUE from the source to the destination function
1014 : as specified in ID. */
1015 :
1016 : static unsigned short
1017 2607373 : remap_dependence_clique (copy_body_data *id, unsigned short clique)
1018 : {
1019 2607373 : if (clique == 0 || processing_debug_stmt)
1020 : return 0;
1021 2574168 : if (!id->dependence_map)
1022 675756 : id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1023 2574168 : bool existed;
1024 2574168 : unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1025 2574168 : if (!existed)
1026 : {
1027 : /* Clique 1 is reserved for local ones set by PTA. */
1028 1002557 : if (cfun->last_clique == 0)
1029 314057 : cfun->last_clique = 1;
1030 2005114 : newc = get_new_clique (cfun);
1031 : }
1032 2574168 : return newc;
1033 : }
1034 :
1035 : /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1036 : 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1037 : WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1038 : recursing into the children nodes of *TP. */
1039 :
1040 : static tree
1041 189975421 : remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1042 : {
1043 189975421 : struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1044 189975421 : copy_body_data *id = (copy_body_data *) wi_p->info;
1045 189975421 : tree fn = id->src_fn;
1046 :
1047 : /* For recursive invocations this is no longer the LHS itself. */
1048 189975421 : bool is_lhs = wi_p->is_lhs;
1049 189975421 : wi_p->is_lhs = false;
1050 :
1051 189975421 : if (TREE_CODE (*tp) == SSA_NAME)
1052 : {
1053 63019427 : *tp = remap_ssa_name (*tp, id);
1054 63019427 : *walk_subtrees = 0;
1055 63019427 : if (is_lhs)
1056 15744767 : SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1057 63019427 : return NULL;
1058 : }
1059 126955994 : else if (auto_var_in_fn_p (*tp, fn))
1060 : {
1061 : /* Local variables and labels need to be replaced by equivalent
1062 : variables. We don't want to copy static variables; there's
1063 : only one of those, no matter how many times we inline the
1064 : containing function. Similarly for globals from an outer
1065 : function. */
1066 40941712 : tree new_decl;
1067 :
1068 : /* Remap the declaration. */
1069 40941712 : new_decl = remap_decl (*tp, id);
1070 40941712 : gcc_assert (new_decl);
1071 : /* Replace this variable with the copy. */
1072 40941712 : STRIP_TYPE_NOPS (new_decl);
1073 : /* ??? The C++ frontend uses void * pointer zero to initialize
1074 : any other type. This confuses the middle-end type verification.
1075 : As cloned bodies do not go through gimplification again the fixup
1076 : there doesn't trigger. */
1077 40941712 : if (TREE_CODE (new_decl) == INTEGER_CST
1078 40941712 : && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1079 0 : new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1080 40941712 : *tp = new_decl;
1081 40941712 : *walk_subtrees = 0;
1082 : }
1083 86014282 : else if (TREE_CODE (*tp) == STATEMENT_LIST)
1084 0 : gcc_unreachable ();
1085 86014282 : else if (TREE_CODE (*tp) == SAVE_EXPR)
1086 0 : gcc_unreachable ();
1087 86014282 : else if (TREE_CODE (*tp) == LABEL_DECL
1088 86014282 : && (!DECL_CONTEXT (*tp)
1089 441 : || decl_function_context (*tp) == id->src_fn))
1090 : /* These may need to be remapped for EH handling. */
1091 0 : *tp = remap_decl (*tp, id);
1092 86014282 : else if (TREE_CODE (*tp) == FIELD_DECL)
1093 : {
1094 : /* If the enclosing record type is variably_modified_type_p, the field
1095 : has already been remapped. Otherwise, it need not be. */
1096 15149012 : tree *n = id->decl_map->get (*tp);
1097 15149012 : if (n)
1098 95 : *tp = *n;
1099 15149012 : *walk_subtrees = 0;
1100 : }
1101 70865270 : else if (TYPE_P (*tp))
1102 : /* Types may need remapping as well. */
1103 0 : *tp = remap_type (*tp, id);
1104 70865270 : else if (CONSTANT_CLASS_P (*tp))
1105 : {
1106 : /* If this is a constant, we have to copy the node iff the type
1107 : will be remapped. copy_tree_r will not copy a constant. */
1108 11976532 : tree new_type = remap_type (TREE_TYPE (*tp), id);
1109 :
1110 11976532 : if (new_type == TREE_TYPE (*tp))
1111 11972595 : *walk_subtrees = 0;
1112 :
1113 3937 : else if (TREE_CODE (*tp) == INTEGER_CST)
1114 3937 : *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1115 : else
1116 : {
1117 0 : *tp = copy_node (*tp);
1118 0 : TREE_TYPE (*tp) = new_type;
1119 : }
1120 : }
1121 : else
1122 : {
1123 : /* Otherwise, just copy the node. Note that copy_tree_r already
1124 : knows not to copy VAR_DECLs, etc., so this is safe. */
1125 :
1126 58888738 : if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1127 : {
1128 : /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 : that can happen when a pointer argument is an ADDR_EXPR.
1130 : Recurse here manually to allow that. */
1131 13227579 : tree ptr = TREE_OPERAND (*tp, 0);
1132 13227579 : tree type = remap_type (TREE_TYPE (*tp), id);
1133 13227579 : tree old = *tp;
1134 13227579 : walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1135 13227579 : *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 13227579 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 13227579 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 13227579 : copy_warning (*tp, old);
1139 13227579 : if (MR_DEPENDENCE_CLIQUE (old) != 0)
1140 : {
1141 2528959 : MR_DEPENDENCE_CLIQUE (*tp)
1142 2528959 : = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1143 2528959 : MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1144 : }
1145 13227579 : maybe_copy_this_notrap (id, *tp, old);
1146 13227579 : REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1147 13227579 : *walk_subtrees = 0;
1148 13227579 : return NULL;
1149 : }
1150 :
1151 : /* Here is the "usual case". Copy this tree node, and then
1152 : tweak some special cases. */
1153 45661159 : copy_tree_r (tp, walk_subtrees, NULL);
1154 :
1155 45661159 : if (TREE_CODE (*tp) != OMP_CLAUSE)
1156 45661159 : TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1157 :
1158 45661159 : if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1159 : {
1160 : /* The copied TARGET_EXPR has never been expanded, even if the
1161 : original node was expanded already. */
1162 0 : TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1163 0 : TREE_OPERAND (*tp, 3) = NULL_TREE;
1164 : }
1165 45661159 : else if (TREE_CODE (*tp) == ADDR_EXPR)
1166 : {
1167 : /* Variable substitution need not be simple. In particular,
1168 : the MEM_REF substitution above. Make sure that
1169 : TREE_CONSTANT and friends are up-to-date. */
1170 14201239 : int invariant = is_gimple_min_invariant (*tp);
1171 14201239 : walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1172 14201239 : recompute_tree_invariant_for_addr_expr (*tp);
1173 :
1174 : /* If this used to be invariant, but is not any longer,
1175 : then regimplification is probably needed. */
1176 14201239 : if (invariant && !is_gimple_min_invariant (*tp))
1177 3424 : id->regimplify = true;
1178 :
1179 14201239 : *walk_subtrees = 0;
1180 : }
1181 31459920 : else if (TREE_CODE (*tp) == OMP_NEXT_VARIANT)
1182 : {
1183 : /* Neither operand is interesting, and walking the selector
1184 : causes problems because it's not an expression. */
1185 288 : gcc_assert (TREE_CODE (TREE_OPERAND (*tp, 0)) == INTEGER_CST);
1186 288 : *walk_subtrees = 0;
1187 : }
1188 : }
1189 :
1190 : /* Update the TREE_BLOCK for the cloned expr. */
1191 113728415 : if (EXPR_P (*tp))
1192 : {
1193 30552936 : tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 30552936 : tree old_block = TREE_BLOCK (*tp);
1195 30552936 : if (old_block)
1196 : {
1197 17035777 : tree *n;
1198 17035777 : n = id->decl_map->get (TREE_BLOCK (*tp));
1199 17035777 : if (n)
1200 17035293 : new_block = *n;
1201 : }
1202 30552936 : TREE_SET_BLOCK (*tp, new_block);
1203 : }
1204 :
1205 : /* Keep iterating. */
1206 : return NULL_TREE;
1207 : }
1208 :
1209 :
1210 : /* Called from copy_body_id via walk_tree. DATA is really a
1211 : `copy_body_data *'. */
1212 :
1213 : tree
1214 1538444385 : copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 : {
1216 1538444385 : copy_body_data *id = (copy_body_data *) data;
1217 1538444385 : tree fn = id->src_fn;
1218 1538444385 : tree new_block;
1219 :
1220 : /* Begin by recognizing trees that we'll completely rewrite for the
1221 : inlining context. Our output for these trees is completely
1222 : different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 : into an edge). Further down, we'll handle trees that get
1224 : duplicated and/or tweaked. */
1225 :
1226 : /* When requested, RETURN_EXPRs should be transformed to just the
1227 : contained MODIFY_EXPR. The branch semantics of the return will
1228 : be handled elsewhere by manipulating the CFG rather than a statement. */
1229 1538444385 : if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230 : {
1231 0 : tree assignment = TREE_OPERAND (*tp, 0);
1232 :
1233 : /* If we're returning something, just turn that into an
1234 : assignment into the equivalent of the original RESULT_DECL.
1235 : If the "assignment" is just the result decl, the result
1236 : decl has already been set (e.g. a recent "foo (&result_decl,
1237 : ...)"); just toss the entire RETURN_EXPR. */
1238 0 : if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 : {
1240 : /* Replace the RETURN_EXPR with (a copy of) the
1241 : MODIFY_EXPR hanging underneath. */
1242 0 : *tp = copy_node (assignment);
1243 : }
1244 : else /* Else the RETURN_EXPR returns no value. */
1245 : {
1246 0 : *tp = NULL;
1247 0 : return (tree) (void *)1;
1248 : }
1249 0 : }
1250 1538444385 : else if (TREE_CODE (*tp) == SSA_NAME)
1251 : {
1252 3822496 : *tp = remap_ssa_name (*tp, id);
1253 3822496 : *walk_subtrees = 0;
1254 3822496 : return NULL;
1255 : }
1256 :
1257 : /* Local variables and labels need to be replaced by equivalent
1258 : variables. We don't want to copy static variables; there's only
1259 : one of those, no matter how many times we inline the containing
1260 : function. Similarly for globals from an outer function. */
1261 1534621889 : else if (auto_var_in_fn_p (*tp, fn))
1262 : {
1263 191166998 : tree new_decl;
1264 :
1265 : /* Remap the declaration. */
1266 191166998 : new_decl = remap_decl (*tp, id);
1267 191166998 : gcc_assert (new_decl);
1268 : /* Replace this variable with the copy. */
1269 191166998 : STRIP_TYPE_NOPS (new_decl);
1270 191166998 : *tp = new_decl;
1271 191166998 : *walk_subtrees = 0;
1272 : }
1273 1343454891 : else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 39417879 : copy_statement_list (tp);
1275 1304037012 : else if (TREE_CODE (*tp) == SAVE_EXPR
1276 1303346528 : || TREE_CODE (*tp) == TARGET_EXPR)
1277 16004501 : remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 1288032511 : else if (TREE_CODE (*tp) == LABEL_DECL
1279 1288032511 : && (! DECL_CONTEXT (*tp)
1280 14 : || decl_function_context (*tp) == id->src_fn))
1281 : /* These may need to be remapped for EH handling. */
1282 0 : *tp = remap_decl (*tp, id);
1283 1288032511 : else if (TREE_CODE (*tp) == BIND_EXPR)
1284 22355685 : copy_bind_expr (tp, walk_subtrees, id);
1285 : /* Types may need remapping as well. */
1286 1265676826 : else if (TYPE_P (*tp))
1287 277833 : *tp = remap_type (*tp, id);
1288 :
1289 : /* If this is a constant, we have to copy the node iff the type will be
1290 : remapped. copy_tree_r will not copy a constant. */
1291 1265398993 : else if (CONSTANT_CLASS_P (*tp))
1292 : {
1293 373421230 : tree new_type = remap_type (TREE_TYPE (*tp), id);
1294 :
1295 373421230 : if (new_type == TREE_TYPE (*tp))
1296 373420033 : *walk_subtrees = 0;
1297 :
1298 1197 : else if (TREE_CODE (*tp) == INTEGER_CST)
1299 1197 : *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 : else
1301 : {
1302 0 : *tp = copy_node (*tp);
1303 0 : TREE_TYPE (*tp) = new_type;
1304 : }
1305 : }
1306 :
1307 : /* Otherwise, just copy the node. Note that copy_tree_r already
1308 : knows not to copy VAR_DECLs, etc., so this is safe. */
1309 : else
1310 : {
1311 : /* Here we handle trees that are not completely rewritten.
1312 : First we detect some inlining-induced bogosities for
1313 : discarding. */
1314 891977763 : if (TREE_CODE (*tp) == MODIFY_EXPR
1315 12406229 : && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 891977782 : && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 : {
1318 : /* Some assignments VAR = VAR; don't generate any rtl code
1319 : and thus don't count as variable modification. Avoid
1320 : keeping bogosities like 0 = 0. */
1321 0 : tree decl = TREE_OPERAND (*tp, 0), value;
1322 0 : tree *n;
1323 :
1324 0 : n = id->decl_map->get (decl);
1325 0 : if (n)
1326 : {
1327 0 : value = *n;
1328 0 : STRIP_TYPE_NOPS (value);
1329 0 : if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 : {
1331 0 : *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 0 : return copy_tree_body_r (tp, walk_subtrees, data);
1333 : }
1334 : }
1335 : }
1336 891977763 : else if (INDIRECT_REF_P (*tp))
1337 : {
1338 : /* Get rid of *& from inline substitutions that can happen when a
1339 : pointer argument is an ADDR_EXPR. */
1340 51322464 : tree decl = TREE_OPERAND (*tp, 0);
1341 51322464 : tree *n = id->decl_map->get (decl);
1342 51322464 : if (n)
1343 : {
1344 : /* If we happen to get an ADDR_EXPR in n->value, strip
1345 : it manually here as we'll eventually get ADDR_EXPRs
1346 : which lie about their types pointed to. In this case
1347 : build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 : but we absolutely rely on that. As fold_indirect_ref
1349 : does other useful transformations, try that first, though. */
1350 8860093 : tree type = TREE_TYPE (*tp);
1351 8860093 : tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 8860093 : tree old = *tp;
1353 8860093 : *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 8860093 : if (! *tp)
1355 : {
1356 8859969 : type = remap_type (type, id);
1357 8859969 : if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 : {
1359 0 : *tp
1360 0 : = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 : /* ??? We should either assert here or build
1362 : a VIEW_CONVERT_EXPR instead of blindly leaking
1363 : incompatible types to our IL. */
1364 0 : if (! *tp)
1365 0 : *tp = TREE_OPERAND (ptr, 0);
1366 : }
1367 : else
1368 : {
1369 8859969 : *tp = build1 (INDIRECT_REF, type, ptr);
1370 8859969 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 8859969 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 8859969 : TREE_READONLY (*tp) = TREE_READONLY (old);
1373 8859969 : maybe_copy_this_notrap (id, *tp, old);
1374 : }
1375 : }
1376 8860093 : *walk_subtrees = 0;
1377 8860093 : return NULL;
1378 : }
1379 : }
1380 840655299 : else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1381 : {
1382 : /* We need to re-canonicalize MEM_REFs from inline substitutions
1383 : that can happen when a pointer argument is an ADDR_EXPR.
1384 : Recurse here manually to allow that. */
1385 65213 : tree ptr = TREE_OPERAND (*tp, 0);
1386 65213 : tree type = remap_type (TREE_TYPE (*tp), id);
1387 65213 : tree old = *tp;
1388 65213 : walk_tree (&ptr, copy_tree_body_r, data, NULL);
1389 65213 : *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1390 65213 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1391 65213 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1392 65213 : copy_warning (*tp, old);
1393 65213 : if (MR_DEPENDENCE_CLIQUE (old) != 0)
1394 : {
1395 4791 : MR_DEPENDENCE_CLIQUE (*tp)
1396 4791 : = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1397 4791 : MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1398 : }
1399 65213 : maybe_copy_this_notrap (id, *tp, old);
1400 65213 : REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1401 65213 : *walk_subtrees = 0;
1402 65213 : return NULL;
1403 : }
1404 :
1405 : /* Here is the "usual case". Copy this tree node, and then
1406 : tweak some special cases. */
1407 883052457 : copy_tree_r (tp, walk_subtrees, NULL);
1408 :
1409 : /* If EXPR has block defined, map it to newly constructed block.
1410 : When inlining we want EXPRs without block appear in the block
1411 : of function call if we are not remapping a type. */
1412 883052457 : if (EXPR_P (*tp))
1413 : {
1414 770223815 : new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1415 770223815 : if (TREE_BLOCK (*tp))
1416 : {
1417 12636 : tree *n;
1418 12636 : n = id->decl_map->get (TREE_BLOCK (*tp));
1419 12636 : if (n)
1420 12636 : new_block = *n;
1421 : }
1422 770223815 : TREE_SET_BLOCK (*tp, new_block);
1423 : }
1424 :
1425 883052457 : if (TREE_CODE (*tp) != OMP_CLAUSE)
1426 883052366 : TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1427 :
1428 : /* The copied TARGET_EXPR has never been expanded, even if the
1429 : original node was expanded already. */
1430 883052457 : if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1431 : {
1432 0 : TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1433 0 : TREE_OPERAND (*tp, 3) = NULL_TREE;
1434 : }
1435 :
1436 : /* Variable substitution need not be simple. In particular, the
1437 : INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1438 : and friends are up-to-date. */
1439 883052457 : else if (TREE_CODE (*tp) == ADDR_EXPR)
1440 : {
1441 85128906 : int invariant = is_gimple_min_invariant (*tp);
1442 85128906 : walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1443 :
1444 : /* Handle the case where we substituted an INDIRECT_REF
1445 : into the operand of the ADDR_EXPR. */
1446 85128906 : if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
1447 85128906 : && !id->do_not_fold)
1448 : {
1449 147 : tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1450 147 : if (TREE_TYPE (t) != TREE_TYPE (*tp))
1451 147 : t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1452 147 : *tp = t;
1453 : }
1454 : else
1455 85128759 : recompute_tree_invariant_for_addr_expr (*tp);
1456 :
1457 : /* If this used to be invariant, but is not any longer,
1458 : then regimplification is probably needed. */
1459 85128906 : if (invariant && !is_gimple_min_invariant (*tp))
1460 19 : id->regimplify = true;
1461 :
1462 85128906 : *walk_subtrees = 0;
1463 : }
1464 797923551 : else if (TREE_CODE (*tp) == OMP_CLAUSE
1465 797923551 : && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1466 76 : || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1467 : {
1468 30 : tree t = OMP_CLAUSE_DECL (*tp);
1469 30 : if (t && OMP_ITERATOR_DECL_P (t))
1470 : {
1471 18 : *walk_subtrees = 0;
1472 18 : OMP_CLAUSE_DECL (*tp) = copy_node (t);
1473 18 : t = OMP_CLAUSE_DECL (*tp);
1474 18 : TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1475 108 : for (int i = 0; i <= 4; i++)
1476 90 : walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1477 : copy_tree_body_r, id, NULL);
1478 18 : if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1479 18 : remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1480 18 : walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1481 : }
1482 : }
1483 : }
1484 :
1485 : /* Keep iterating. */
1486 : return NULL_TREE;
1487 : }
1488 :
1489 : /* Helper for remap_gimple_stmt. Given an EH region number for the
1490 : source function, map that to the duplicate EH region number in
1491 : the destination function. */
1492 :
1493 : static int
1494 84328 : remap_eh_region_nr (int old_nr, copy_body_data *id)
1495 : {
1496 84328 : eh_region old_r, new_r;
1497 :
1498 84328 : old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1499 84328 : new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1500 :
1501 84328 : return new_r->index;
1502 : }
1503 :
1504 : /* Similar, but operate on INTEGER_CSTs. */
1505 :
1506 : static tree
1507 7861 : remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1508 : {
1509 7861 : int old_nr, new_nr;
1510 :
1511 7861 : old_nr = tree_to_shwi (old_t_nr);
1512 7861 : new_nr = remap_eh_region_nr (old_nr, id);
1513 :
1514 7861 : return build_int_cst (integer_type_node, new_nr);
1515 : }
1516 :
1517 : /* Helper for copy_bb. Remap statement STMT using the inlining
1518 : information in ID. Return the new statement copy. */
1519 :
1520 : static gimple_seq
1521 87063776 : remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1522 : {
1523 87063776 : gimple *copy = NULL;
1524 87063776 : struct walk_stmt_info wi;
1525 87063776 : bool skip_first = false;
1526 87063776 : gimple_seq stmts = NULL;
1527 :
1528 87063776 : if (is_gimple_debug (stmt)
1529 87063776 : && (gimple_debug_nonbind_marker_p (stmt)
1530 12286929 : ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1531 40889429 : : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1532 : return NULL;
1533 :
1534 87037557 : if (!is_gimple_debug (stmt)
1535 33887418 : && id->param_body_adjs
1536 90315878 : && id->param_body_adjs->m_dead_stmts.contains (stmt))
1537 : {
1538 2377 : tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1539 2377 : if (!dval)
1540 : return NULL;
1541 :
1542 1280 : gcc_assert (is_gimple_assign (stmt));
1543 1280 : tree lhs = gimple_assign_lhs (stmt);
1544 1280 : tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1545 1280 : gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1546 1280 : if (id->reset_location)
1547 0 : gimple_set_location (bind, input_location);
1548 1280 : id->debug_stmts.safe_push (bind);
1549 1280 : gimple_seq_add_stmt_without_update (&stmts, bind);
1550 1280 : return stmts;
1551 : }
1552 :
1553 : /* Begin by recognizing trees that we'll completely rewrite for the
1554 : inlining context. Our output for these trees is completely
1555 : different from our input (e.g. RETURN_EXPR is deleted and morphs
1556 : into an edge). Further down, we'll handle trees that get
1557 : duplicated and/or tweaked. */
1558 :
1559 : /* When requested, GIMPLE_RETURN should be transformed to just the
1560 : contained GIMPLE_ASSIGN. The branch semantics of the return will
1561 : be handled elsewhere by manipulating the CFG rather than the
1562 : statement. */
1563 87035180 : if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1564 : {
1565 4266537 : tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1566 :
1567 : /* If we're returning something, just turn that into an
1568 : assignment to the equivalent of the original RESULT_DECL.
1569 : If RETVAL is just the result decl, the result decl has
1570 : already been set (e.g. a recent "foo (&result_decl, ...)");
1571 : just toss the entire GIMPLE_RETURN. Likewise for when the
1572 : call doesn't want the return value. */
1573 4266537 : if (retval
1574 4266537 : && (TREE_CODE (retval) != RESULT_DECL
1575 2241821 : && (!id->call_stmt
1576 2241821 : || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1577 2130235 : && (TREE_CODE (retval) != SSA_NAME
1578 1730687 : || ! SSA_NAME_VAR (retval)
1579 398402 : || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1580 : {
1581 4114428 : copy = gimple_build_assign (id->do_not_unshare
1582 2057214 : ? id->retvar : unshare_expr (id->retvar),
1583 : retval);
1584 : /* id->retvar is already substituted. Skip it on later remapping. */
1585 2057214 : skip_first = true;
1586 : }
1587 : else
1588 : return NULL;
1589 : }
1590 82768643 : else if (gimple_has_substatements (stmt))
1591 : {
1592 67 : gimple_seq s1, s2;
1593 :
1594 : /* When cloning bodies from the C++ front end, we will be handed bodies
1595 : in High GIMPLE form. Handle here all the High GIMPLE statements that
1596 : have embedded statements. */
1597 67 : switch (gimple_code (stmt))
1598 : {
1599 0 : case GIMPLE_BIND:
1600 0 : copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1601 0 : break;
1602 :
1603 0 : case GIMPLE_CATCH:
1604 0 : {
1605 0 : gcatch *catch_stmt = as_a <gcatch *> (stmt);
1606 0 : s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1607 0 : copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1608 : }
1609 0 : break;
1610 :
1611 0 : case GIMPLE_EH_FILTER:
1612 0 : s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1613 0 : copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1614 0 : break;
1615 :
1616 0 : case GIMPLE_TRY:
1617 0 : s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1618 0 : s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1619 0 : copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1620 0 : break;
1621 :
1622 0 : case GIMPLE_WITH_CLEANUP_EXPR:
1623 0 : s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1624 0 : copy = gimple_build_wce (s1);
1625 0 : break;
1626 :
1627 0 : case GIMPLE_OMP_PARALLEL:
1628 0 : {
1629 0 : gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1630 0 : s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1631 0 : copy = gimple_build_omp_parallel
1632 0 : (s1,
1633 : gimple_omp_parallel_clauses (omp_par_stmt),
1634 : gimple_omp_parallel_child_fn (omp_par_stmt),
1635 : gimple_omp_parallel_data_arg (omp_par_stmt));
1636 : }
1637 0 : break;
1638 :
1639 0 : case GIMPLE_OMP_TASK:
1640 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1641 0 : copy = gimple_build_omp_task
1642 0 : (s1,
1643 : gimple_omp_task_clauses (stmt),
1644 : gimple_omp_task_child_fn (stmt),
1645 : gimple_omp_task_data_arg (stmt),
1646 : gimple_omp_task_copy_fn (stmt),
1647 : gimple_omp_task_arg_size (stmt),
1648 : gimple_omp_task_arg_align (stmt));
1649 0 : break;
1650 :
1651 0 : case GIMPLE_OMP_FOR:
1652 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 0 : s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1654 0 : copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1655 : gimple_omp_for_clauses (stmt),
1656 : gimple_omp_for_collapse (stmt), s2);
1657 0 : {
1658 0 : size_t i;
1659 0 : for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1660 : {
1661 0 : gimple_omp_for_set_index (copy, i,
1662 : gimple_omp_for_index (stmt, i));
1663 0 : gimple_omp_for_set_initial (copy, i,
1664 : gimple_omp_for_initial (stmt, i));
1665 0 : gimple_omp_for_set_final (copy, i,
1666 : gimple_omp_for_final (stmt, i));
1667 0 : gimple_omp_for_set_incr (copy, i,
1668 : gimple_omp_for_incr (stmt, i));
1669 0 : gimple_omp_for_set_cond (copy, i,
1670 : gimple_omp_for_cond (stmt, i));
1671 : }
1672 : }
1673 : break;
1674 :
1675 0 : case GIMPLE_OMP_MASTER:
1676 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1677 0 : copy = gimple_build_omp_master (s1);
1678 0 : break;
1679 :
1680 0 : case GIMPLE_OMP_MASKED:
1681 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1682 0 : copy = gimple_build_omp_masked
1683 0 : (s1, gimple_omp_masked_clauses (stmt));
1684 0 : break;
1685 :
1686 0 : case GIMPLE_OMP_SCOPE:
1687 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1688 0 : copy = gimple_build_omp_scope
1689 0 : (s1, gimple_omp_scope_clauses (stmt));
1690 0 : break;
1691 :
1692 0 : case GIMPLE_OMP_DISPATCH:
1693 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1694 0 : copy = gimple_build_omp_dispatch (s1,
1695 : gimple_omp_dispatch_clauses (stmt));
1696 0 : break;
1697 :
1698 0 : case GIMPLE_OMP_TASKGROUP:
1699 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1700 0 : copy = gimple_build_omp_taskgroup
1701 0 : (s1, gimple_omp_taskgroup_clauses (stmt));
1702 0 : break;
1703 :
1704 0 : case GIMPLE_OMP_ORDERED:
1705 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1706 0 : copy = gimple_build_omp_ordered
1707 0 : (s1,
1708 0 : gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1709 0 : break;
1710 :
1711 0 : case GIMPLE_OMP_SCAN:
1712 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1713 0 : copy = gimple_build_omp_scan
1714 0 : (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1715 0 : break;
1716 :
1717 0 : case GIMPLE_OMP_SECTION:
1718 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1719 0 : copy = gimple_build_omp_section (s1);
1720 0 : break;
1721 :
1722 0 : case GIMPLE_OMP_SECTIONS:
1723 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1724 0 : copy = gimple_build_omp_sections
1725 0 : (s1, gimple_omp_sections_clauses (stmt));
1726 0 : break;
1727 :
1728 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
1729 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1730 0 : copy = gimple_build_omp_structured_block (s1);
1731 0 : break;
1732 :
1733 0 : case GIMPLE_OMP_SINGLE:
1734 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1735 0 : copy = gimple_build_omp_single
1736 0 : (s1, gimple_omp_single_clauses (stmt));
1737 0 : break;
1738 :
1739 0 : case GIMPLE_OMP_TARGET:
1740 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1741 0 : copy = gimple_build_omp_target
1742 0 : (s1, gimple_omp_target_kind (stmt),
1743 : gimple_omp_target_clauses (stmt));
1744 0 : break;
1745 :
1746 0 : case GIMPLE_OMP_TEAMS:
1747 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1748 0 : copy = gimple_build_omp_teams
1749 0 : (s1, gimple_omp_teams_clauses (stmt));
1750 0 : break;
1751 :
1752 0 : case GIMPLE_OMP_CRITICAL:
1753 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1754 0 : copy = gimple_build_omp_critical (s1,
1755 : gimple_omp_critical_name
1756 0 : (as_a <gomp_critical *> (stmt)),
1757 : gimple_omp_critical_clauses
1758 0 : (as_a <gomp_critical *> (stmt)));
1759 0 : break;
1760 :
1761 0 : case GIMPLE_ASSUME:
1762 0 : s1 = remap_gimple_seq (gimple_assume_body (stmt), id);
1763 0 : copy = gimple_build_assume (gimple_assume_guard (stmt), s1);
1764 0 : break;
1765 :
1766 67 : case GIMPLE_TRANSACTION:
1767 67 : {
1768 67 : gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1769 67 : gtransaction *new_trans_stmt;
1770 67 : s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1771 : id);
1772 67 : copy = new_trans_stmt = gimple_build_transaction (s1);
1773 67 : gimple_transaction_set_subcode (new_trans_stmt,
1774 : gimple_transaction_subcode (old_trans_stmt));
1775 67 : gimple_transaction_set_label_norm (new_trans_stmt,
1776 : gimple_transaction_label_norm (old_trans_stmt));
1777 67 : gimple_transaction_set_label_uninst (new_trans_stmt,
1778 : gimple_transaction_label_uninst (old_trans_stmt));
1779 67 : gimple_transaction_set_label_over (new_trans_stmt,
1780 : gimple_transaction_label_over (old_trans_stmt));
1781 : }
1782 67 : break;
1783 :
1784 0 : default:
1785 0 : gcc_unreachable ();
1786 : }
1787 : }
1788 : else
1789 : {
1790 82768576 : if (gimple_assign_single_p (stmt)
1791 12875404 : && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1792 82768576 : && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1793 : {
1794 : /* Here we handle statements that are not completely rewritten.
1795 : First we detect some inlining-induced bogosities for
1796 : discarding. */
1797 :
1798 : /* Some assignments VAR = VAR; don't generate any rtl code
1799 : and thus don't count as variable modification. Avoid
1800 : keeping bogosities like 0 = 0. */
1801 0 : tree decl = gimple_assign_lhs (stmt), value;
1802 0 : tree *n;
1803 :
1804 0 : n = id->decl_map->get (decl);
1805 0 : if (n)
1806 : {
1807 0 : value = *n;
1808 0 : STRIP_TYPE_NOPS (value);
1809 0 : if (TREE_CONSTANT (value) || TREE_READONLY (value))
1810 0 : return NULL;
1811 : }
1812 : }
1813 :
1814 : /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1815 : in a block that we aren't copying during tree_function_versioning,
1816 : just drop the clobber stmt. */
1817 82768576 : if (id->blocks_to_copy && gimple_clobber_p (stmt))
1818 : {
1819 17168 : tree lhs = gimple_assign_lhs (stmt);
1820 17168 : if (TREE_CODE (lhs) == MEM_REF
1821 17168 : && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1822 : {
1823 866 : gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1824 866 : if (gimple_bb (def_stmt)
1825 1167 : && !bitmap_bit_p (id->blocks_to_copy,
1826 301 : gimple_bb (def_stmt)->index))
1827 : return NULL;
1828 : }
1829 : }
1830 :
1831 : /* We do not allow CLOBBERs of handled components. In case
1832 : returned value is stored via such handled component, remove
1833 : the clobber so stmt verifier is happy. */
1834 82768573 : if (gimple_clobber_p (stmt)
1835 82768573 : && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1836 : {
1837 0 : tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1838 0 : if (!DECL_P (remapped)
1839 0 : && TREE_CODE (remapped) != MEM_REF)
1840 : return NULL;
1841 : }
1842 :
1843 82768573 : if (gimple_debug_bind_p (stmt))
1844 : {
1845 40429374 : tree var = gimple_debug_bind_get_var (stmt);
1846 40429374 : tree value = gimple_debug_bind_get_value (stmt);
1847 40429374 : if (id->param_body_adjs
1848 40429374 : && id->param_body_adjs->m_dead_stmts.contains (stmt))
1849 : {
1850 9286 : value = unshare_expr_without_location (value);
1851 9286 : id->param_body_adjs->remap_with_debug_expressions (&value);
1852 : }
1853 :
1854 40429374 : gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1855 40429374 : if (id->reset_location)
1856 4 : gimple_set_location (copy, input_location);
1857 40429374 : id->debug_stmts.safe_push (copy);
1858 40429374 : gimple_seq_add_stmt_without_update (&stmts, copy);
1859 40429374 : return stmts;
1860 : }
1861 42339199 : if (gimple_debug_source_bind_p (stmt))
1862 : {
1863 460055 : gdebug *copy = gimple_build_debug_source_bind
1864 460055 : (gimple_debug_source_bind_get_var (stmt),
1865 : gimple_debug_source_bind_get_value (stmt),
1866 460055 : stmt);
1867 460055 : if (id->reset_location)
1868 0 : gimple_set_location (copy, input_location);
1869 460055 : id->debug_stmts.safe_push (copy);
1870 460055 : gimple_seq_add_stmt_without_update (&stmts, copy);
1871 460055 : return stmts;
1872 : }
1873 41879144 : if (gimple_debug_nonbind_marker_p (stmt))
1874 : {
1875 : /* If the inlined function has too many debug markers,
1876 : don't copy them. */
1877 12260710 : if (id->src_cfun->debug_marker_count
1878 12260710 : > param_max_debug_marker_count
1879 12260710 : || id->reset_location)
1880 0 : return stmts;
1881 :
1882 12260710 : gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1883 12260710 : id->debug_stmts.safe_push (copy);
1884 12260710 : gimple_seq_add_stmt_without_update (&stmts, copy);
1885 12260710 : return stmts;
1886 : }
1887 :
1888 : /* Create a new deep copy of the statement. */
1889 29618434 : copy = gimple_copy (stmt);
1890 :
1891 : /* Clear flags that need revisiting. */
1892 29618434 : if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1893 : {
1894 4719356 : if (gimple_call_tail_p (call_stmt))
1895 127 : gimple_call_set_tail (call_stmt, false);
1896 4719356 : if (gimple_call_from_thunk_p (call_stmt))
1897 166 : gimple_call_set_from_thunk (call_stmt, false);
1898 : /* Silently clear musttail flag when inlining a function
1899 : with must tail call from a non-musttail call. The inlining
1900 : removes one frame so acts like musttail's intent, and we
1901 : can be inlining a function with musttail calls in the middle
1902 : of caller where musttail will always error. */
1903 4719356 : if (gimple_call_must_tail_p (call_stmt)
1904 49 : && id->call_stmt
1905 4719397 : && !gimple_call_must_tail_p (id->call_stmt))
1906 14 : gimple_call_set_must_tail (call_stmt, false);
1907 4719356 : if (gimple_call_internal_p (call_stmt))
1908 51064 : switch (gimple_call_internal_fn (call_stmt))
1909 : {
1910 163 : case IFN_GOMP_SIMD_LANE:
1911 163 : case IFN_GOMP_SIMD_VF:
1912 163 : case IFN_GOMP_SIMD_LAST_LANE:
1913 163 : case IFN_GOMP_SIMD_ORDERED_START:
1914 163 : case IFN_GOMP_SIMD_ORDERED_END:
1915 163 : DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1916 163 : break;
1917 : default:
1918 : break;
1919 : }
1920 : }
1921 :
1922 : /* Remap the region numbers for __builtin_eh_{pointer,filter},
1923 : RESX and EH_DISPATCH. */
1924 29618434 : if (id->eh_map)
1925 29618434 : switch (gimple_code (copy))
1926 : {
1927 4719356 : case GIMPLE_CALL:
1928 4719356 : {
1929 4719356 : tree r, fndecl = gimple_call_fndecl (copy);
1930 4719356 : if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1931 1326904 : switch (DECL_FUNCTION_CODE (fndecl))
1932 : {
1933 0 : case BUILT_IN_EH_COPY_VALUES:
1934 0 : r = gimple_call_arg (copy, 1);
1935 0 : r = remap_eh_region_tree_nr (r, id);
1936 0 : gimple_call_set_arg (copy, 1, r);
1937 : /* FALLTHRU */
1938 :
1939 7861 : case BUILT_IN_EH_POINTER:
1940 7861 : case BUILT_IN_EH_FILTER:
1941 7861 : r = gimple_call_arg (copy, 0);
1942 7861 : r = remap_eh_region_tree_nr (r, id);
1943 7861 : gimple_call_set_arg (copy, 0, r);
1944 7861 : break;
1945 :
1946 : default:
1947 : break;
1948 : }
1949 :
1950 : /* Reset alias info if we didn't apply measures to
1951 : keep it valid over inlining by setting DECL_PT_UID. */
1952 4719356 : if (!id->src_cfun->gimple_df
1953 4719356 : || !id->src_cfun->gimple_df->ipa_pta)
1954 4712834 : gimple_call_reset_alias_info (as_a <gcall *> (copy));
1955 : }
1956 : break;
1957 :
1958 67357 : case GIMPLE_RESX:
1959 67357 : {
1960 67357 : gresx *resx_stmt = as_a <gresx *> (copy);
1961 67357 : int r = gimple_resx_region (resx_stmt);
1962 67357 : r = remap_eh_region_nr (r, id);
1963 67357 : gimple_resx_set_region (resx_stmt, r);
1964 : }
1965 67357 : break;
1966 :
1967 9110 : case GIMPLE_EH_DISPATCH:
1968 9110 : {
1969 9110 : geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1970 9110 : int r = gimple_eh_dispatch_region (eh_dispatch);
1971 9110 : r = remap_eh_region_nr (r, id);
1972 9110 : gimple_eh_dispatch_set_region (eh_dispatch, r);
1973 : }
1974 9110 : break;
1975 :
1976 : default:
1977 : break;
1978 : }
1979 : }
1980 :
1981 : /* If STMT has a block defined, map it to the newly constructed block. */
1982 31675715 : if (tree block = gimple_block (copy))
1983 : {
1984 28587934 : tree *n;
1985 28587934 : n = id->decl_map->get (block);
1986 28587934 : gcc_assert (n);
1987 28587934 : gimple_set_block (copy, *n);
1988 : }
1989 31675715 : if (id->param_body_adjs)
1990 : {
1991 3275944 : gimple_seq extra_stmts = NULL;
1992 3275944 : id->param_body_adjs->modify_gimple_stmt (©, &extra_stmts, stmt);
1993 3275944 : if (!gimple_seq_empty_p (extra_stmts))
1994 : {
1995 30 : memset (&wi, 0, sizeof (wi));
1996 30 : wi.info = id;
1997 30 : for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1998 88 : !gsi_end_p (egsi);
1999 58 : gsi_next (&egsi))
2000 58 : walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
2001 30 : gimple_seq_add_seq_without_update (&stmts, extra_stmts);
2002 : }
2003 : }
2004 :
2005 31675715 : if (id->reset_location)
2006 756 : gimple_set_location (copy, input_location);
2007 :
2008 : /* Debug statements ought to be rebuilt and not copied. */
2009 31675715 : gcc_checking_assert (!is_gimple_debug (copy));
2010 :
2011 : /* Remap all the operands in COPY. */
2012 31675715 : memset (&wi, 0, sizeof (wi));
2013 31675715 : wi.info = id;
2014 31675715 : if (skip_first)
2015 2057214 : walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
2016 : else
2017 29618501 : walk_gimple_op (copy, remap_gimple_op_r, &wi);
2018 :
2019 : /* Clear the copied virtual operands. We are not remapping them here
2020 : but are going to recreate them from scratch. */
2021 31675715 : if (gimple_has_mem_ops (copy))
2022 : {
2023 27078923 : gimple_set_vdef (copy, NULL_TREE);
2024 27078923 : gimple_set_vuse (copy, NULL_TREE);
2025 : }
2026 :
2027 31675715 : if (cfun->can_throw_non_call_exceptions)
2028 : {
2029 : /* When inlining a function which does not have non-call exceptions
2030 : enabled into a function that has (which only happens with
2031 : always-inline) we have to fixup stmts that cannot throw. */
2032 1707670 : if (gcond *cond = dyn_cast <gcond *> (copy))
2033 199356 : if (gimple_could_trap_p (cond))
2034 : {
2035 1 : gassign *cmp
2036 1 : = gimple_build_assign (make_ssa_name (boolean_type_node),
2037 : gimple_cond_code (cond),
2038 : gimple_cond_lhs (cond),
2039 : gimple_cond_rhs (cond));
2040 1 : gimple_seq_add_stmt_without_update (&stmts, cmp);
2041 1 : gimple_cond_set_code (cond, NE_EXPR);
2042 1 : gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2043 1 : gimple_cond_set_rhs (cond, boolean_false_node);
2044 : }
2045 : }
2046 :
2047 31675715 : gimple_seq_add_stmt_without_update (&stmts, copy);
2048 31675715 : return stmts;
2049 : }
2050 :
2051 :
2052 : /* Copy basic block, scale profile accordingly. Edges will be taken care of
2053 : later */
2054 :
2055 : static basic_block
2056 13856044 : copy_bb (copy_body_data *id, basic_block bb,
2057 : profile_count num, profile_count den)
2058 : {
2059 13856044 : gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2060 13856044 : basic_block copy_basic_block;
2061 13856044 : tree decl;
2062 13856044 : basic_block prev;
2063 :
2064 13856044 : profile_count::adjust_for_ipa_scaling (&num, &den);
2065 :
2066 : /* Search for previous copied basic block. */
2067 13856044 : prev = bb->prev_bb;
2068 14005405 : while (!prev->aux)
2069 149361 : prev = prev->prev_bb;
2070 :
2071 : /* create_basic_block() will append every new block to
2072 : basic_block_info automatically. */
2073 13856044 : copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2074 13856044 : copy_basic_block->count = bb->count.apply_scale (num, den);
2075 :
2076 13856044 : copy_gsi = gsi_start_bb (copy_basic_block);
2077 :
2078 13856044 : unsigned min_cond_uid = 0;
2079 13856044 : if (id->src_cfun->cond_uids)
2080 : {
2081 59 : if (!cfun->cond_uids)
2082 6 : cfun->cond_uids = new hash_map <gcond*, unsigned> ();
2083 :
2084 236 : for (auto itr : *id->src_cfun->cond_uids)
2085 59 : if (itr.second >= min_cond_uid)
2086 59 : min_cond_uid = itr.second + 1;
2087 : }
2088 :
2089 114775864 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2090 : {
2091 87063776 : gimple_seq stmts;
2092 87063776 : gimple *stmt = gsi_stmt (gsi);
2093 87063776 : gimple *orig_stmt = stmt;
2094 87063776 : gimple_stmt_iterator stmts_gsi;
2095 87063776 : bool stmt_added = false;
2096 :
2097 87063776 : id->regimplify = false;
2098 87063776 : stmts = remap_gimple_stmt (stmt, id);
2099 :
2100 87063776 : if (gimple_seq_empty_p (stmts))
2101 2236824 : continue;
2102 :
2103 84827134 : seq_gsi = copy_gsi;
2104 :
2105 84827134 : for (stmts_gsi = gsi_start (stmts);
2106 169654327 : !gsi_end_p (stmts_gsi); )
2107 : {
2108 84827193 : stmt = gsi_stmt (stmts_gsi);
2109 :
2110 : /* Advance iterator now before stmt is moved to seq_gsi. */
2111 84827193 : gsi_next (&stmts_gsi);
2112 :
2113 84827193 : if (gimple_nop_p (stmt))
2114 182 : continue;
2115 :
2116 : /* If -fcondition-coverage is used, register the inlined conditions
2117 : in the cond->expression mapping of the caller. The expression tag
2118 : is shifted conditions from the two bodies are not mixed. */
2119 84827011 : if (id->src_cfun->cond_uids && is_a <gcond*> (stmt))
2120 : {
2121 17 : gcond *orig_cond = as_a <gcond*> (orig_stmt);
2122 17 : gcond *cond = as_a <gcond*> (stmt);
2123 17 : unsigned *v = id->src_cfun->cond_uids->get (orig_cond);
2124 17 : if (v)
2125 17 : cfun->cond_uids->put (cond, *v + min_cond_uid);
2126 : }
2127 :
2128 84827011 : gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2129 : orig_stmt);
2130 :
2131 84827011 : gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2132 :
2133 84827011 : if (id->regimplify)
2134 3383 : gimple_regimplify_operands (stmt, &seq_gsi);
2135 :
2136 : stmt_added = true;
2137 : }
2138 :
2139 84827134 : if (!stmt_added)
2140 182 : continue;
2141 :
2142 : /* If copy_basic_block has been empty at the start of this iteration,
2143 : call gsi_start_bb again to get at the newly added statements. */
2144 84826952 : if (gsi_end_p (copy_gsi))
2145 24424274 : copy_gsi = gsi_start_bb (copy_basic_block);
2146 : else
2147 72614815 : gsi_next (©_gsi);
2148 :
2149 : /* Process the new statement. The call to gimple_regimplify_operands
2150 : possibly turned the statement into multiple statements, we
2151 : need to process all of them. */
2152 84827015 : do
2153 : {
2154 84827015 : tree fn;
2155 84827015 : gcall *call_stmt;
2156 :
2157 84827015 : stmt = gsi_stmt (copy_gsi);
2158 84827015 : call_stmt = dyn_cast <gcall *> (stmt);
2159 4719356 : if (call_stmt
2160 4719356 : && gimple_call_va_arg_pack_p (call_stmt)
2161 323 : && id->call_stmt
2162 321 : && ! gimple_call_va_arg_pack_p (id->call_stmt))
2163 : {
2164 : /* __builtin_va_arg_pack () should be replaced by
2165 : all arguments corresponding to ... in the caller. */
2166 307 : tree p;
2167 307 : gcall *new_call;
2168 307 : vec<tree> argarray;
2169 307 : size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2170 307 : size_t nargs = nargs_caller;
2171 :
2172 789 : for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2173 : {
2174 : /* Avoid crashing on invalid IL that doesn't have a
2175 : varargs function or that passes not enough arguments. */
2176 498 : if (nargs == 0)
2177 : break;
2178 482 : nargs--;
2179 : }
2180 :
2181 : /* Create the new array of arguments. */
2182 307 : size_t nargs_callee = gimple_call_num_args (call_stmt);
2183 307 : size_t n = nargs + nargs_callee;
2184 307 : argarray.create (n);
2185 307 : argarray.safe_grow_cleared (n, true);
2186 :
2187 : /* Copy all the arguments before '...' */
2188 307 : if (nargs_callee)
2189 610 : memcpy (argarray.address (),
2190 305 : gimple_call_arg_ptr (call_stmt, 0),
2191 : nargs_callee * sizeof (tree));
2192 :
2193 : /* Append the arguments passed in '...' */
2194 307 : if (nargs)
2195 173 : memcpy (argarray.address () + nargs_callee,
2196 173 : gimple_call_arg_ptr (id->call_stmt, 0)
2197 173 : + (nargs_caller - nargs), nargs * sizeof (tree));
2198 :
2199 307 : new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2200 : argarray);
2201 :
2202 307 : argarray.release ();
2203 :
2204 : /* Copy all GIMPLE_CALL flags, location and block, except
2205 : GF_CALL_VA_ARG_PACK. */
2206 307 : gimple_call_copy_flags (new_call, call_stmt);
2207 307 : gimple_call_set_va_arg_pack (new_call, false);
2208 614 : gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2209 : /* location includes block. */
2210 307 : gimple_set_location (new_call, gimple_location (stmt));
2211 307 : gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2212 :
2213 307 : gsi_replace (©_gsi, new_call, false);
2214 307 : stmt = new_call;
2215 : }
2216 84826708 : else if (call_stmt
2217 4719049 : && id->call_stmt
2218 3839018 : && (decl = gimple_call_fndecl (stmt))
2219 88537226 : && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2220 : {
2221 : /* __builtin_va_arg_pack_len () should be replaced by
2222 : the number of anonymous arguments. */
2223 147 : size_t nargs = gimple_call_num_args (id->call_stmt);
2224 147 : tree count, p;
2225 147 : gimple *new_stmt;
2226 :
2227 423 : for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2228 276 : nargs--;
2229 :
2230 147 : if (!gimple_call_lhs (stmt))
2231 : {
2232 : /* Drop unused calls. */
2233 1 : gsi_remove (©_gsi, false);
2234 1 : continue;
2235 : }
2236 146 : else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2237 : {
2238 124 : count = build_int_cst (integer_type_node, nargs);
2239 124 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2240 124 : gsi_replace (©_gsi, new_stmt, false);
2241 124 : stmt = new_stmt;
2242 : }
2243 22 : else if (nargs != 0)
2244 : {
2245 7 : tree newlhs = make_ssa_name (integer_type_node);
2246 7 : count = build_int_cst (integer_type_node, nargs);
2247 7 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2248 : PLUS_EXPR, newlhs, count);
2249 7 : gimple_call_set_lhs (stmt, newlhs);
2250 7 : gsi_insert_after (©_gsi, new_stmt, GSI_NEW_STMT);
2251 : }
2252 : }
2253 84826561 : else if (call_stmt
2254 4718902 : && id->call_stmt
2255 88665432 : && gimple_call_internal_p (stmt))
2256 39450 : switch (gimple_call_internal_fn (stmt))
2257 : {
2258 153 : case IFN_TSAN_FUNC_EXIT:
2259 : /* Drop .TSAN_FUNC_EXIT () internal calls during inlining. */
2260 153 : gsi_remove (©_gsi, false);
2261 153 : continue;
2262 1392 : case IFN_ASAN_MARK:
2263 : /* Drop .ASAN_MARK internal calls during inlining into
2264 : no_sanitize functions. */
2265 1392 : if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
2266 1392 : && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
2267 : {
2268 14 : gsi_remove (©_gsi, false);
2269 14 : continue;
2270 : }
2271 : break;
2272 : default:
2273 : break;
2274 : }
2275 :
2276 : /* Statements produced by inlining can be unfolded, especially
2277 : when we constant propagated some operands. We can't fold
2278 : them right now for two reasons:
2279 : 1) folding require SSA_NAME_DEF_STMTs to be correct
2280 : 2) we can't change function calls to builtins.
2281 : So we just mark statement for later folding. We mark
2282 : all new statements, instead just statements that has changed
2283 : by some nontrivial substitution so even statements made
2284 : foldable indirectly are updated. If this turns out to be
2285 : expensive, copy_body can be told to watch for nontrivial
2286 : changes. */
2287 84826847 : if (id->statements_to_fold)
2288 84826847 : id->statements_to_fold->add (stmt);
2289 :
2290 : /* We're duplicating a CALL_EXPR. Find any corresponding
2291 : callgraph edges and update or duplicate them. */
2292 84826847 : if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2293 : {
2294 4719064 : struct cgraph_edge *edge;
2295 :
2296 4719064 : switch (id->transform_call_graph_edges)
2297 : {
2298 3839033 : case CB_CGE_DUPLICATE:
2299 3839033 : edge = id->src_node->get_edge (orig_stmt);
2300 3839033 : if (edge)
2301 : {
2302 3799750 : struct cgraph_edge *old_edge = edge;
2303 :
2304 : /* A speculative call is consist of multiple
2305 : edges - indirect edge and one or more direct edges
2306 : Duplicate the whole thing and distribute frequencies
2307 : accordingly. */
2308 3799750 : if (edge->speculative)
2309 : {
2310 20175 : int n = 0;
2311 20175 : profile_count direct_cnt
2312 20175 : = profile_count::zero ();
2313 :
2314 : /* First figure out the distribution of counts
2315 : so we can re-scale BB profile accordingly. */
2316 47239 : for (cgraph_edge *e = old_edge; e;
2317 27064 : e = e->next_speculative_call_target ())
2318 27064 : direct_cnt = direct_cnt + e->count;
2319 :
2320 20175 : cgraph_edge *indirect
2321 20175 : = old_edge->speculative_call_indirect_edge ();
2322 20175 : profile_count indir_cnt = indirect->count;
2323 :
2324 : /* Next iterate all direct edges, clone it and its
2325 : corresponding reference and update profile. */
2326 20175 : for (cgraph_edge *e = old_edge;
2327 47239 : e;
2328 27064 : e = e->next_speculative_call_target ())
2329 : {
2330 27064 : profile_count cnt = e->count;
2331 :
2332 27064 : id->dst_node->clone_reference
2333 27064 : (e->speculative_call_target_ref (), stmt);
2334 27064 : edge = e->clone (id->dst_node, call_stmt,
2335 : gimple_uid (stmt), num, den,
2336 : true);
2337 27064 : profile_probability prob
2338 27064 : = cnt.probability_in (direct_cnt
2339 : + indir_cnt);
2340 27064 : edge->count
2341 : = copy_basic_block->count.apply_probability
2342 27064 : (prob);
2343 27064 : n++;
2344 : }
2345 20175 : gcc_checking_assert
2346 : (indirect->num_speculative_call_targets_p ()
2347 : == n);
2348 :
2349 : /* Duplicate the indirect edge after all direct edges
2350 : cloned. */
2351 20175 : indirect = indirect->clone (id->dst_node, call_stmt,
2352 : gimple_uid (stmt),
2353 : num, den,
2354 : true);
2355 :
2356 20175 : profile_probability prob
2357 20175 : = indir_cnt.probability_in (direct_cnt
2358 : + indir_cnt);
2359 20175 : indirect->count
2360 20175 : = copy_basic_block->count.apply_probability (prob);
2361 : }
2362 : /* If edge is a callback-carrying edge, copy all its
2363 : attached edges as well. */
2364 3779575 : else if (edge->has_callback)
2365 : {
2366 2362 : edge
2367 2362 : = edge->clone (id->dst_node, call_stmt,
2368 : gimple_uid (stmt), num, den, true);
2369 2362 : cgraph_edge *e;
2370 2873 : for (e = old_edge->first_callback_edge (); e;
2371 511 : e = e->next_callback_edge ())
2372 511 : edge = e->clone (id->dst_node, call_stmt,
2373 : gimple_uid (stmt), num, den, true);
2374 : }
2375 : else
2376 : {
2377 3777213 : edge = edge->clone (id->dst_node, call_stmt,
2378 : gimple_uid (stmt),
2379 : num, den,
2380 : true);
2381 3777213 : edge->count = copy_basic_block->count;
2382 : }
2383 : }
2384 : break;
2385 :
2386 692223 : case CB_CGE_MOVE_CLONES:
2387 692223 : id->dst_node->set_call_stmt_including_clones (orig_stmt,
2388 : call_stmt);
2389 692223 : edge = id->dst_node->get_edge (stmt);
2390 692223 : break;
2391 :
2392 187808 : case CB_CGE_MOVE:
2393 187808 : edge = id->dst_node->get_edge (orig_stmt);
2394 187808 : if (edge)
2395 187450 : edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2396 : break;
2397 :
2398 0 : default:
2399 0 : gcc_unreachable ();
2400 : }
2401 :
2402 : /* Constant propagation on argument done during inlining
2403 : may create new direct call. Produce an edge for it. */
2404 3777213 : if ((!edge
2405 4569045 : || (edge->indirect_inlining_edge
2406 4109 : && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2407 150487 : && id->dst_node->definition
2408 952538 : && (fn = gimple_call_fndecl (stmt)) != NULL)
2409 : {
2410 0 : struct cgraph_node *dest = cgraph_node::get_create (fn);
2411 :
2412 : /* We have missing edge in the callgraph. This can happen
2413 : when previous inlining turned an indirect call into a
2414 : direct call by constant propagating arguments or we are
2415 : producing dead clone (for further cloning). In all
2416 : other cases we hit a bug (incorrect node sharing is the
2417 : most common reason for missing edges). */
2418 0 : gcc_assert (!dest->definition
2419 : || dest->address_taken
2420 : || !id->src_node->definition
2421 : || !id->dst_node->definition);
2422 0 : if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2423 0 : id->dst_node->create_edge_including_clones
2424 0 : (dest, orig_stmt, call_stmt, bb->count,
2425 : CIF_ORIGINALLY_INDIRECT_CALL);
2426 : else
2427 0 : id->dst_node->create_edge (dest, call_stmt,
2428 : bb->count)->inline_failed
2429 0 : = CIF_ORIGINALLY_INDIRECT_CALL;
2430 0 : if (dump_file)
2431 : {
2432 0 : fprintf (dump_file, "Created new direct edge to %s\n",
2433 : dest->dump_name ());
2434 : }
2435 : }
2436 :
2437 4719064 : notice_special_calls (as_a <gcall *> (stmt));
2438 : }
2439 :
2440 84826847 : maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2441 : id->eh_map, id->eh_lp_nr);
2442 :
2443 84826847 : gsi_next (©_gsi);
2444 : }
2445 84827015 : while (!gsi_end_p (copy_gsi));
2446 :
2447 169653904 : copy_gsi = gsi_last_bb (copy_basic_block);
2448 : }
2449 :
2450 13856044 : return copy_basic_block;
2451 : }
2452 :
2453 : /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2454 : form is quite easy, since dominator relationship for old basic blocks does
2455 : not change.
2456 :
2457 : There is however exception where inlining might change dominator relation
2458 : across EH edges from basic block within inlined functions destinating
2459 : to landing pads in function we inline into.
2460 :
2461 : The function fills in PHI_RESULTs of such PHI nodes if they refer
2462 : to gimple regs. Otherwise, the function mark PHI_RESULT of such
2463 : PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2464 : EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2465 : set, and this means that there will be no overlapping live ranges
2466 : for the underlying symbol.
2467 :
2468 : This might change in future if we allow redirecting of EH edges and
2469 : we might want to change way build CFG pre-inlining to include
2470 : all the possible edges then. */
2471 : static void
2472 674967 : update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2473 : bool can_throw, bool nonlocal_goto)
2474 : {
2475 674967 : edge e;
2476 674967 : edge_iterator ei;
2477 :
2478 1854204 : FOR_EACH_EDGE (e, ei, bb->succs)
2479 1179237 : if (!e->dest->aux
2480 624378 : || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2481 : {
2482 554859 : gphi *phi;
2483 554859 : gphi_iterator si;
2484 :
2485 554859 : if (!nonlocal_goto)
2486 554426 : gcc_assert (e->flags & EDGE_EH);
2487 :
2488 554859 : if (!can_throw)
2489 224 : gcc_assert (!(e->flags & EDGE_EH));
2490 :
2491 940145 : for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2492 : {
2493 385286 : edge re;
2494 :
2495 385286 : phi = si.phi ();
2496 :
2497 : /* For abnormal goto/call edges the receiver can be the
2498 : ENTRY_BLOCK. Do not assert this cannot happen. */
2499 :
2500 385286 : gcc_assert ((e->flags & EDGE_EH)
2501 : || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2502 :
2503 385286 : re = find_edge (ret_bb, e->dest);
2504 385286 : gcc_checking_assert (re);
2505 385286 : gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2506 : == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2507 :
2508 385286 : SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2509 : USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2510 : }
2511 : }
2512 674967 : }
2513 :
2514 : /* Insert clobbers for automatic variables of inlined ID->src_fn
2515 : function at the start of basic block ID->eh_landing_pad_dest. */
2516 :
2517 : static void
2518 389457 : add_clobbers_to_eh_landing_pad (copy_body_data *id)
2519 : {
2520 389457 : tree var;
2521 389457 : basic_block bb = id->eh_landing_pad_dest;
2522 389457 : live_vars_map *vars = NULL;
2523 389457 : unsigned int cnt = 0;
2524 389457 : unsigned int i;
2525 907700 : FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2526 518243 : if (VAR_P (var)
2527 518243 : && !DECL_HARD_REGISTER (var)
2528 518243 : && !TREE_THIS_VOLATILE (var)
2529 518223 : && !DECL_HAS_VALUE_EXPR_P (var)
2530 509765 : && !is_gimple_reg (var)
2531 209722 : && auto_var_in_fn_p (var, id->src_fn)
2532 725950 : && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2533 : {
2534 207707 : tree *t = id->decl_map->get (var);
2535 207707 : if (!t)
2536 0 : continue;
2537 207707 : tree new_var = *t;
2538 207707 : if (VAR_P (new_var)
2539 207707 : && !DECL_HARD_REGISTER (new_var)
2540 207707 : && !TREE_THIS_VOLATILE (new_var)
2541 207707 : && !DECL_HAS_VALUE_EXPR_P (new_var)
2542 207707 : && !is_gimple_reg (new_var)
2543 415414 : && auto_var_in_fn_p (new_var, id->dst_fn))
2544 : {
2545 207707 : if (vars == NULL)
2546 123861 : vars = new live_vars_map;
2547 207707 : vars->put (DECL_UID (var), cnt++);
2548 : }
2549 : }
2550 389457 : if (vars == NULL)
2551 265596 : return;
2552 :
2553 123861 : vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2554 435836 : FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2555 311975 : if (VAR_P (var))
2556 : {
2557 311975 : edge e;
2558 311975 : edge_iterator ei;
2559 311975 : bool needed = false;
2560 311975 : unsigned int *v = vars->get (DECL_UID (var));
2561 311975 : if (v == NULL)
2562 104268 : continue;
2563 4543923 : FOR_EACH_EDGE (e, ei, bb->preds)
2564 4452821 : if ((e->flags & EDGE_EH) != 0
2565 4452797 : && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2566 : {
2567 335032 : basic_block src_bb = (basic_block) e->src->aux;
2568 :
2569 335032 : if (bitmap_bit_p (&live[src_bb->index], *v))
2570 : {
2571 : needed = true;
2572 : break;
2573 : }
2574 : }
2575 207707 : if (needed)
2576 : {
2577 116605 : tree new_var = *id->decl_map->get (var);
2578 116605 : gimple_stmt_iterator gsi = gsi_after_labels (bb);
2579 116605 : tree clobber = build_clobber (TREE_TYPE (new_var));
2580 116605 : gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2581 116605 : gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2582 : }
2583 : }
2584 123861 : destroy_live_vars (live);
2585 123861 : delete vars;
2586 : }
2587 :
2588 : /* Copy edges from BB into its copy constructed earlier, scale profile
2589 : accordingly. Edges will be taken care of later. Assume aux
2590 : pointers to point to the copies of each BB. Return true if any
2591 : debug stmts are left after a statement that must end the basic block. */
2592 :
2593 : static bool
2594 22823574 : copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2595 : basic_block ret_bb, basic_block abnormal_goto_dest,
2596 : copy_body_data *id)
2597 : {
2598 22823574 : basic_block new_bb = (basic_block) bb->aux;
2599 22823574 : edge_iterator ei;
2600 22823574 : edge old_edge;
2601 22823574 : gimple_stmt_iterator si;
2602 22823574 : bool need_debug_cleanup = false;
2603 :
2604 : /* Use the indices from the original blocks to create edges for the
2605 : new ones. */
2606 44262197 : FOR_EACH_EDGE (old_edge, ei, bb->succs)
2607 21438623 : if (!(old_edge->flags & EDGE_EH))
2608 : {
2609 21318251 : edge new_edge;
2610 21318251 : int flags = old_edge->flags;
2611 21318251 : location_t locus = old_edge->goto_locus;
2612 :
2613 : /* Return edges do get a FALLTHRU flag when they get inlined. */
2614 21318251 : if (old_edge->dest->index == EXIT_BLOCK
2615 4483997 : && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2616 4483997 : && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2617 4266537 : flags |= EDGE_FALLTHRU;
2618 :
2619 21318251 : new_edge
2620 21318251 : = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2621 21318251 : new_edge->probability = old_edge->probability;
2622 21318251 : if (!id->reset_location)
2623 21317661 : new_edge->goto_locus = remap_location (locus, id);
2624 : }
2625 :
2626 22823574 : if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2627 : return false;
2628 :
2629 : /* When doing function splitting, we must decrease count of the return block
2630 : which was previously reachable by block we did not copy. */
2631 13856044 : if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2632 10688914 : FOR_EACH_EDGE (old_edge, ei, bb->preds)
2633 6204917 : if (old_edge->src->index != ENTRY_BLOCK
2634 3559092 : && !old_edge->src->aux)
2635 44103 : new_bb->count -= old_edge->count ().apply_scale (num, den);
2636 :
2637 : /* Walk stmts from end to start so that splitting will adjust the BB
2638 : pointer for each stmt at most once, even when we split the block
2639 : multiple times. */
2640 13856044 : bool seen_nondebug = false;
2641 13856044 : for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
2642 : {
2643 84826854 : bool can_throw, nonlocal_goto;
2644 84826854 : gimple *copy_stmt = gsi_stmt (si);
2645 :
2646 : /* Do this before the possible split_block. */
2647 84826854 : gsi_prev (&si);
2648 :
2649 : /* If this tree could throw an exception, there are two
2650 : cases where we need to add abnormal edge(s): the
2651 : tree wasn't in a region and there is a "current
2652 : region" in the caller; or the original tree had
2653 : EH edges. In both cases split the block after the tree,
2654 : and add abnormal edge(s) as needed; we need both
2655 : those from the callee and the caller.
2656 : We check whether the copy can throw, because the const
2657 : propagation can change an INDIRECT_REF which throws
2658 : into a COMPONENT_REF which doesn't. If the copy
2659 : can throw, the original could also throw. */
2660 84826854 : can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2661 84826854 : nonlocal_goto
2662 84826854 : = (stmt_can_make_abnormal_goto (copy_stmt)
2663 84826854 : && !computed_goto_p (copy_stmt));
2664 :
2665 84826363 : if (can_throw || nonlocal_goto)
2666 : {
2667 : /* If there's only debug insns after copy_stmt don't split
2668 : the block but instead mark the block for cleanup. */
2669 675105 : if (!seen_nondebug)
2670 : need_debug_cleanup = true;
2671 : else
2672 : {
2673 : /* Note that bb's predecessor edges aren't necessarily
2674 : right at this point; split_block doesn't care. */
2675 246176 : edge e = split_block (new_bb, copy_stmt);
2676 246176 : e->dest->aux = new_bb->aux;
2677 246176 : seen_nondebug = false;
2678 : }
2679 : }
2680 :
2681 84826854 : if (!is_gimple_debug (copy_stmt))
2682 31675435 : seen_nondebug = true;
2683 :
2684 84826854 : bool update_probs = false;
2685 :
2686 84826854 : if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2687 : {
2688 9110 : make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2689 9110 : update_probs = true;
2690 : }
2691 84817744 : else if (can_throw)
2692 : {
2693 674743 : make_eh_edge (copy_stmt);
2694 674743 : update_probs = true;
2695 : }
2696 :
2697 : /* EH edges may not match old edges. Copy as much as possible. */
2698 683853 : if (update_probs)
2699 : {
2700 683853 : edge e;
2701 683853 : edge_iterator ei;
2702 683853 : basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2703 :
2704 1349684 : FOR_EACH_EDGE (old_edge, ei, bb->succs)
2705 665831 : if ((old_edge->flags & EDGE_EH)
2706 128691 : && (e = find_edge (copy_stmt_bb,
2707 128691 : (basic_block) old_edge->dest->aux))
2708 786048 : && (e->flags & EDGE_EH))
2709 120217 : e->probability = old_edge->probability;
2710 :
2711 1872738 : FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2712 1188885 : if (e->flags & EDGE_EH)
2713 : {
2714 674743 : if (!e->probability.initialized_p ())
2715 554714 : e->probability = profile_probability::never ();
2716 674743 : if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2717 : {
2718 553602 : if (id->eh_landing_pad_dest == NULL)
2719 389457 : id->eh_landing_pad_dest = e->dest;
2720 : else
2721 164145 : gcc_assert (id->eh_landing_pad_dest == e->dest);
2722 : }
2723 : }
2724 : }
2725 :
2726 :
2727 : /* If the call we inline cannot make abnormal goto do not add
2728 : additional abnormal edges but only retain those already present
2729 : in the original function body. */
2730 84826854 : if (abnormal_goto_dest == NULL)
2731 : nonlocal_goto = false;
2732 1092 : if (nonlocal_goto)
2733 : {
2734 333 : basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2735 :
2736 333 : if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2737 : nonlocal_goto = false;
2738 : /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2739 : in OpenMP regions which aren't allowed to be left abnormally.
2740 : So, no need to add abnormal edge in that case. */
2741 333 : else if (is_gimple_call (copy_stmt)
2742 333 : && gimple_call_internal_p (copy_stmt)
2743 0 : && (gimple_call_internal_fn (copy_stmt)
2744 : == IFN_ABNORMAL_DISPATCHER)
2745 333 : && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2746 : nonlocal_goto = false;
2747 : else
2748 : {
2749 333 : make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2750 : EDGE_ABNORMAL);
2751 333 : gimple_call_set_ctrl_altering (copy_stmt, true);
2752 333 : if (is_a <gcall *> (copy_stmt)
2753 333 : && (gimple_call_flags (copy_stmt) & ECF_NORETURN))
2754 75 : fixup_noreturn_call (copy_stmt);
2755 : }
2756 : }
2757 :
2758 84826854 : if ((can_throw || nonlocal_goto)
2759 183509752 : && gimple_in_ssa_p (cfun))
2760 674967 : update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2761 : can_throw, nonlocal_goto);
2762 : }
2763 : return need_debug_cleanup;
2764 : }
2765 :
2766 : /* Copy the PHIs. All blocks and edges are copied, some blocks
2767 : was possibly split and new outgoing EH edges inserted.
2768 : BB points to the block of original function and AUX pointers links
2769 : the original and newly copied blocks. */
2770 :
2771 : static void
2772 22823574 : copy_phis_for_bb (basic_block bb, copy_body_data *id)
2773 : {
2774 22823574 : basic_block const new_bb = (basic_block) bb->aux;
2775 22823574 : edge_iterator ei;
2776 22823574 : gphi *phi;
2777 22823574 : gphi_iterator si;
2778 22823574 : edge new_edge;
2779 22823574 : bool inserted = false;
2780 :
2781 25781835 : for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2782 : {
2783 2958261 : tree res, new_res;
2784 2958261 : gphi *new_phi;
2785 :
2786 2958261 : phi = si.phi ();
2787 2958261 : res = PHI_RESULT (phi);
2788 2958261 : new_res = res;
2789 2958261 : if (!virtual_operand_p (res)
2790 2958261 : && (!id->param_body_adjs
2791 1519661 : || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2792 : {
2793 1625222 : walk_tree (&new_res, copy_tree_body_r, id, NULL);
2794 1625222 : if (EDGE_COUNT (new_bb->preds) == 0)
2795 : {
2796 : /* Technically we'd want a SSA_DEFAULT_DEF here... */
2797 0 : SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2798 : }
2799 : else
2800 : {
2801 1625222 : new_phi = create_phi_node (new_res, new_bb);
2802 5296521 : FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2803 : {
2804 3671299 : edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2805 3671299 : bb);
2806 3671299 : tree arg;
2807 3671299 : tree new_arg;
2808 3671299 : edge_iterator ei2;
2809 3671299 : location_t locus;
2810 :
2811 : /* When doing partial cloning, we allow PHIs on the entry
2812 : block as long as all the arguments are the same.
2813 : Find any input edge to see argument to copy. */
2814 3671299 : if (!old_edge)
2815 2039 : FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2816 2039 : if (!old_edge->src->aux)
2817 : break;
2818 :
2819 3671299 : arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2820 3671299 : new_arg = arg;
2821 3671299 : walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2822 3671299 : gcc_assert (new_arg);
2823 : /* With return slot optimization we can end up with
2824 : non-gimple (foo *)&this->m, fix that here. */
2825 3671299 : if (TREE_CODE (new_arg) != SSA_NAME
2826 1527903 : && TREE_CODE (new_arg) != FUNCTION_DECL
2827 5199202 : && !is_gimple_val (new_arg))
2828 : {
2829 12 : gimple_seq stmts = NULL;
2830 12 : new_arg = force_gimple_operand (new_arg, &stmts, true,
2831 : NULL);
2832 12 : gsi_insert_seq_on_edge (new_edge, stmts);
2833 12 : inserted = true;
2834 : }
2835 3671299 : locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2836 3671299 : if (id->reset_location)
2837 0 : locus = input_location;
2838 : else
2839 3671299 : locus = remap_location (locus, id);
2840 3671299 : add_phi_arg (new_phi, new_arg, new_edge, locus);
2841 : }
2842 : }
2843 : }
2844 : }
2845 :
2846 : /* Commit the delayed edge insertions. */
2847 22823574 : if (inserted)
2848 36 : FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2849 24 : gsi_commit_one_edge_insert (new_edge, NULL);
2850 22823574 : }
2851 :
2852 :
2853 : /* Wrapper for remap_decl so it can be used as a callback. */
2854 :
2855 : static tree
2856 85901 : remap_decl_1 (tree decl, void *data)
2857 : {
2858 85901 : return remap_decl (decl, (copy_body_data *) data);
2859 : }
2860 :
2861 : /* Build struct function and associated datastructures for the new clone
2862 : NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2863 : the cfun to the function of new_fndecl (and current_function_decl too). */
2864 :
2865 : static void
2866 230763 : initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2867 : {
2868 230763 : struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2869 :
2870 : /* Register specific tree functions. */
2871 230763 : gimple_register_cfg_hooks ();
2872 :
2873 : /* Get clean struct function. */
2874 230763 : push_struct_function (new_fndecl, true);
2875 230763 : targetm.target_option.relayout_function (new_fndecl);
2876 :
2877 : /* We will rebuild these, so just sanity check that they are empty. */
2878 230763 : gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2879 230763 : gcc_assert (cfun->local_decls == NULL);
2880 230763 : gcc_assert (cfun->cfg == NULL);
2881 230763 : gcc_assert (cfun->decl == new_fndecl);
2882 :
2883 : /* Copy items we preserve during cloning. */
2884 230763 : cfun->static_chain_decl = src_cfun->static_chain_decl;
2885 230763 : cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2886 230763 : cfun->function_end_locus = src_cfun->function_end_locus;
2887 230763 : cfun->curr_properties = src_cfun->curr_properties;
2888 230763 : cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2889 230763 : cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2890 230763 : cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2891 230763 : cfun->calls_eh_return = src_cfun->calls_eh_return;
2892 230763 : cfun->stdarg = src_cfun->stdarg;
2893 230763 : cfun->after_inlining = src_cfun->after_inlining;
2894 230763 : cfun->can_throw_non_call_exceptions
2895 230763 : = src_cfun->can_throw_non_call_exceptions;
2896 230763 : cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2897 230763 : cfun->returns_struct = src_cfun->returns_struct;
2898 230763 : cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2899 :
2900 230763 : init_empty_tree_cfg ();
2901 :
2902 230763 : profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2903 230763 : cfun->cfg->full_profile = src_cfun->cfg->full_profile;
2904 :
2905 230763 : profile_count num = count;
2906 230763 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2907 230763 : profile_count::adjust_for_ipa_scaling (&num, &den);
2908 :
2909 230763 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2910 230763 : ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
2911 230763 : EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2912 230763 : EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
2913 230763 : if (src_cfun->eh)
2914 230763 : init_eh_for_function ();
2915 :
2916 230763 : if (src_cfun->gimple_df)
2917 : {
2918 230763 : init_tree_ssa (cfun);
2919 230763 : cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2920 230763 : if (cfun->gimple_df->in_ssa_p)
2921 230763 : init_ssa_operands (cfun);
2922 : }
2923 230763 : }
2924 :
2925 : /* Helper function for copy_cfg_body. Move debug stmts from the end
2926 : of NEW_BB to the beginning of successor basic blocks when needed. If the
2927 : successor has multiple predecessors, reset them, otherwise keep
2928 : their value. */
2929 :
2930 : static void
2931 1466279 : maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2932 : {
2933 1466279 : edge e;
2934 1466279 : edge_iterator ei;
2935 1466279 : gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2936 :
2937 1466279 : if (gsi_end_p (si)
2938 1527699 : || gsi_one_before_end_p (si)
2939 1618911 : || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2940 61421 : || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2941 1436488 : return;
2942 :
2943 89375 : FOR_EACH_EDGE (e, ei, new_bb->succs)
2944 : {
2945 59584 : gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2946 59584 : gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2947 209379 : while (is_gimple_debug (gsi_stmt (ssi)))
2948 : {
2949 149795 : gimple *stmt = gsi_stmt (ssi);
2950 149795 : gdebug *new_stmt;
2951 149795 : tree var;
2952 149795 : tree value;
2953 :
2954 : /* For the last edge move the debug stmts instead of copying
2955 : them. */
2956 149795 : if (ei_one_before_end_p (ei))
2957 : {
2958 74896 : si = ssi;
2959 74896 : gsi_prev (&ssi);
2960 74896 : if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2961 : {
2962 73491 : gimple_debug_bind_reset_value (stmt);
2963 146400 : gimple_set_location (stmt, UNKNOWN_LOCATION);
2964 : }
2965 74896 : gsi_remove (&si, false);
2966 74896 : gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2967 74896 : continue;
2968 : }
2969 :
2970 74899 : if (gimple_debug_bind_p (stmt))
2971 : {
2972 73494 : var = gimple_debug_bind_get_var (stmt);
2973 73494 : if (single_pred_p (e->dest))
2974 : {
2975 46510 : value = gimple_debug_bind_get_value (stmt);
2976 46510 : value = unshare_expr (value);
2977 46510 : new_stmt = gimple_build_debug_bind (var, value, stmt);
2978 : }
2979 : else
2980 26984 : new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2981 : }
2982 1405 : else if (gimple_debug_source_bind_p (stmt))
2983 : {
2984 0 : var = gimple_debug_source_bind_get_var (stmt);
2985 0 : value = gimple_debug_source_bind_get_value (stmt);
2986 0 : new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2987 : }
2988 1405 : else if (gimple_debug_nonbind_marker_p (stmt))
2989 1405 : new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2990 : else
2991 0 : gcc_unreachable ();
2992 74899 : gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2993 74899 : id->debug_stmts.safe_push (new_stmt);
2994 74899 : gsi_prev (&ssi);
2995 : }
2996 : }
2997 : }
2998 :
2999 : /* Make a copy of the sub-loops of SRC_PARENT and place them
3000 : as siblings of DEST_PARENT. */
3001 :
3002 : static void
3003 5138308 : copy_loops (copy_body_data *id,
3004 : class loop *dest_parent, class loop *src_parent)
3005 : {
3006 5138308 : class loop *src_loop = src_parent->inner;
3007 5749978 : while (src_loop)
3008 : {
3009 611670 : if (!id->blocks_to_copy
3010 611670 : || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
3011 : {
3012 607778 : class loop *dest_loop = alloc_loop ();
3013 :
3014 : /* Assign the new loop its header and latch and associate
3015 : those with the new loop. */
3016 607778 : dest_loop->header = (basic_block)src_loop->header->aux;
3017 607778 : dest_loop->header->loop_father = dest_loop;
3018 607778 : if (src_loop->latch != NULL)
3019 : {
3020 607762 : dest_loop->latch = (basic_block)src_loop->latch->aux;
3021 607762 : dest_loop->latch->loop_father = dest_loop;
3022 : }
3023 :
3024 : /* Copy loop meta-data. */
3025 607778 : copy_loop_info (src_loop, dest_loop);
3026 607778 : if (dest_loop->unroll)
3027 2148 : cfun->has_unroll = true;
3028 607778 : if (dest_loop->force_vectorize)
3029 117 : cfun->has_force_vectorize_loops = true;
3030 607778 : if (id->src_cfun->last_clique != 0)
3031 73623 : dest_loop->owned_clique
3032 73623 : = remap_dependence_clique (id,
3033 73623 : src_loop->owned_clique
3034 : ? src_loop->owned_clique : 1);
3035 :
3036 : /* Finally place it into the loop array and the loop tree. */
3037 607778 : place_new_loop (cfun, dest_loop);
3038 607778 : flow_loop_tree_node_add (dest_parent, dest_loop);
3039 :
3040 607778 : if (src_loop->simduid)
3041 : {
3042 67 : dest_loop->simduid = remap_decl (src_loop->simduid, id);
3043 67 : cfun->has_simduid_loops = true;
3044 : }
3045 :
3046 : /* Recurse. */
3047 607778 : copy_loops (id, dest_loop, src_loop);
3048 : }
3049 611670 : src_loop = src_loop->next;
3050 : }
3051 5138308 : }
3052 :
3053 : /* Call redirect_call_stmt_to_callee on all calls in BB. */
3054 :
3055 : void
3056 12373057 : redirect_all_calls (copy_body_data * id, basic_block bb)
3057 : {
3058 12373057 : gimple_stmt_iterator si;
3059 12373057 : gimple *last = last_nondebug_stmt (bb);
3060 98142088 : for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3061 : {
3062 73395974 : gimple *stmt = gsi_stmt (si);
3063 73395974 : if (is_gimple_call (stmt))
3064 : {
3065 3893161 : struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
3066 3893161 : if (edge)
3067 : {
3068 3853878 : if (!id->killed_new_ssa_names)
3069 2043298 : id->killed_new_ssa_names = new hash_set<tree> (16);
3070 3853878 : cgraph_edge::redirect_call_stmt_to_callee (
3071 : edge, id->killed_new_ssa_names);
3072 3853878 : if (edge->has_callback)
3073 : {
3074 : /* When redirecting a carrying edge, we need to redirect its
3075 : attached edges as well. */
3076 2362 : cgraph_edge *cbe;
3077 2873 : for (cbe = edge->first_callback_edge (); cbe;
3078 511 : cbe = cbe->next_callback_edge ())
3079 511 : cgraph_edge::redirect_call_stmt_to_callee (
3080 : cbe, id->killed_new_ssa_names);
3081 : }
3082 :
3083 3853878 : if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3084 37094 : gimple_purge_dead_eh_edges (bb);
3085 : }
3086 : }
3087 : }
3088 12373057 : }
3089 :
3090 : /* Make a copy of the body of FN so that it can be inserted inline in
3091 : another function. Walks FN via CFG, returns new fndecl. */
3092 :
3093 : static tree
3094 4530530 : copy_cfg_body (copy_body_data * id,
3095 : basic_block entry_block_map, basic_block exit_block_map,
3096 : basic_block new_entry)
3097 : {
3098 4530530 : tree callee_fndecl = id->src_fn;
3099 : /* Original cfun for the callee, doesn't change. */
3100 4530530 : struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3101 4530530 : struct function *cfun_to_copy;
3102 4530530 : basic_block bb;
3103 4530530 : tree new_fndecl = NULL;
3104 4530530 : bool need_debug_cleanup = false;
3105 4530530 : int last;
3106 4530530 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3107 4530530 : profile_count num = entry_block_map->count;
3108 :
3109 4530530 : cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3110 :
3111 : /* Register specific tree functions. */
3112 4530530 : gimple_register_cfg_hooks ();
3113 :
3114 : /* If we are offlining region of the function, make sure to connect
3115 : new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3116 : part of loop, we must compute frequency and probability of
3117 : ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3118 : probabilities of edges incoming from nonduplicated region. */
3119 4530530 : if (new_entry)
3120 : {
3121 46765 : edge e;
3122 46765 : edge_iterator ei;
3123 46765 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
3124 :
3125 101488 : FOR_EACH_EDGE (e, ei, new_entry->preds)
3126 54723 : if (!e->src->aux)
3127 54723 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count += e->count ();
3128 : /* Do not scale - the profile of offlined region should
3129 : remain unchanged. */
3130 46765 : num = den = profile_count::one ();
3131 : }
3132 :
3133 4530530 : profile_count::adjust_for_ipa_scaling (&num, &den);
3134 :
3135 : /* Must have a CFG here at this point. */
3136 4530530 : gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3137 : (DECL_STRUCT_FUNCTION (callee_fndecl)));
3138 :
3139 :
3140 4530530 : ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3141 4530530 : EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3142 4530530 : entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3143 4530530 : exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3144 :
3145 : /* Duplicate any exception-handling regions. */
3146 4530530 : if (cfun->eh)
3147 4530530 : id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3148 : remap_decl_1, id);
3149 :
3150 : /* Use aux pointers to map the original blocks to copy. */
3151 18553760 : FOR_EACH_BB_FN (bb, cfun_to_copy)
3152 14023230 : if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3153 : {
3154 13856044 : basic_block new_bb = copy_bb (id, bb, num, den);
3155 13856044 : bb->aux = new_bb;
3156 13856044 : new_bb->aux = bb;
3157 13856044 : new_bb->loop_father = entry_block_map->loop_father;
3158 : }
3159 :
3160 4530530 : last = last_basic_block_for_fn (cfun);
3161 :
3162 : /* Now that we've duplicated the blocks, duplicate their edges. */
3163 4530530 : basic_block abnormal_goto_dest = NULL;
3164 4530530 : if (id->call_stmt
3165 4530530 : && stmt_can_make_abnormal_goto (id->call_stmt))
3166 : {
3167 324 : gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3168 :
3169 324 : bb = gimple_bb (id->call_stmt);
3170 324 : gsi_next (&gsi);
3171 324 : if (gsi_end_p (gsi))
3172 324 : abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3173 : }
3174 27614820 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3175 23084290 : if (!id->blocks_to_copy
3176 23084290 : || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3177 22823574 : need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3178 : abnormal_goto_dest, id);
3179 :
3180 4530530 : if (id->eh_landing_pad_dest)
3181 : {
3182 389457 : add_clobbers_to_eh_landing_pad (id);
3183 389457 : id->eh_landing_pad_dest = NULL;
3184 : }
3185 :
3186 4530530 : if (new_entry)
3187 : {
3188 46765 : edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3189 : EDGE_FALLTHRU);
3190 46765 : e->probability = profile_probability::always ();
3191 : }
3192 :
3193 : /* Duplicate the loop tree, if available and wanted. */
3194 4530530 : if (loops_for_fn (src_cfun) != NULL
3195 4530530 : && current_loops != NULL)
3196 : {
3197 4530530 : copy_loops (id, entry_block_map->loop_father,
3198 : get_loop (src_cfun, 0));
3199 : /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3200 4530530 : loops_state_set (LOOPS_NEED_FIXUP);
3201 : }
3202 :
3203 : /* If the loop tree in the source function needed fixup, mark the
3204 : destination loop tree for fixup, too. */
3205 4530530 : if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3206 0 : loops_state_set (LOOPS_NEED_FIXUP);
3207 :
3208 4530530 : if (gimple_in_ssa_p (cfun))
3209 27614820 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3210 23084290 : if (!id->blocks_to_copy
3211 23084290 : || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3212 22823574 : copy_phis_for_bb (bb, id);
3213 :
3214 27614820 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3215 23084290 : if (bb->aux)
3216 : {
3217 22917104 : if (need_debug_cleanup
3218 1913787 : && bb->index != ENTRY_BLOCK
3219 1639546 : && bb->index != EXIT_BLOCK)
3220 1365305 : maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3221 : /* Update call edge destinations. This cannot be done before loop
3222 : info is updated, because we may split basic blocks. */
3223 22917104 : if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3224 20645223 : && bb->index != ENTRY_BLOCK
3225 16345456 : && bb->index != EXIT_BLOCK)
3226 12045689 : redirect_all_calls (id, (basic_block)bb->aux);
3227 22917104 : ((basic_block)bb->aux)->aux = NULL;
3228 22917104 : bb->aux = NULL;
3229 : }
3230 :
3231 : /* Zero out AUX fields of newly created block during EH edge
3232 : insertion. */
3233 4857898 : for (; last < last_basic_block_for_fn (cfun); last++)
3234 : {
3235 327368 : if (need_debug_cleanup)
3236 100974 : maybe_move_debug_stmts_to_successors (id,
3237 100974 : BASIC_BLOCK_FOR_FN (cfun, last));
3238 327368 : BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3239 : /* Update call edge destinations. This cannot be done before loop
3240 : info is updated, because we may split basic blocks. */
3241 327368 : if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3242 327368 : redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3243 : }
3244 4530530 : entry_block_map->aux = NULL;
3245 4530530 : exit_block_map->aux = NULL;
3246 :
3247 4530530 : if (id->eh_map)
3248 : {
3249 4530530 : delete id->eh_map;
3250 4530530 : id->eh_map = NULL;
3251 : }
3252 4530530 : if (id->dependence_map)
3253 : {
3254 675756 : delete id->dependence_map;
3255 675756 : id->dependence_map = NULL;
3256 : }
3257 :
3258 4530530 : return new_fndecl;
3259 : }
3260 :
3261 : /* Copy the debug STMT using ID. We deal with these statements in a
3262 : special way: if any variable in their VALUE expression wasn't
3263 : remapped yet, we won't remap it, because that would get decl uids
3264 : out of sync, causing codegen differences between -g and -g0. If
3265 : this arises, we drop the VALUE expression altogether. */
3266 :
3267 : static void
3268 53226318 : copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3269 : {
3270 53226318 : tree t, *n;
3271 53226318 : struct walk_stmt_info wi;
3272 :
3273 53226318 : if (tree block = gimple_block (stmt))
3274 : {
3275 35376087 : n = id->decl_map->get (block);
3276 35376087 : gimple_set_block (stmt, n ? *n : id->block);
3277 : }
3278 :
3279 53226318 : if (gimple_debug_nonbind_marker_p (stmt))
3280 : {
3281 12262115 : if (id->call_stmt && !gimple_block (stmt))
3282 : {
3283 35013 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3284 35013 : gsi_remove (&gsi, true);
3285 : }
3286 12262115 : return;
3287 : }
3288 :
3289 : /* Remap all the operands in COPY. */
3290 40964203 : memset (&wi, 0, sizeof (wi));
3291 40964203 : wi.info = id;
3292 :
3293 40964203 : processing_debug_stmt = 1;
3294 :
3295 40964203 : if (gimple_debug_source_bind_p (stmt))
3296 460055 : t = gimple_debug_source_bind_get_var (stmt);
3297 40504148 : else if (gimple_debug_bind_p (stmt))
3298 40504148 : t = gimple_debug_bind_get_var (stmt);
3299 : else
3300 0 : gcc_unreachable ();
3301 :
3302 40964203 : if (TREE_CODE (t) == PARM_DECL
3303 197872 : && id->debug_map
3304 40965148 : && (n = id->debug_map->get (t)))
3305 : {
3306 0 : gcc_assert (VAR_P (*n));
3307 0 : t = *n;
3308 : }
3309 40964203 : else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3310 : /* T is a non-localized variable. */;
3311 : else
3312 35947962 : walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3313 :
3314 40964203 : if (gimple_debug_bind_p (stmt))
3315 : {
3316 40504148 : gimple_debug_bind_set_var (stmt, t);
3317 :
3318 40504148 : if (gimple_debug_bind_has_value_p (stmt))
3319 22792827 : walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3320 : remap_gimple_op_r, &wi, NULL);
3321 :
3322 : /* Punt if any decl couldn't be remapped. */
3323 40504148 : if (processing_debug_stmt < 0)
3324 587371 : gimple_debug_bind_reset_value (stmt);
3325 : }
3326 460055 : else if (gimple_debug_source_bind_p (stmt))
3327 : {
3328 460055 : gimple_debug_source_bind_set_var (stmt, t);
3329 : /* When inlining and source bind refers to one of the optimized
3330 : away parameters, change the source bind into normal debug bind
3331 : referring to the corresponding DEBUG_EXPR_DECL that should have
3332 : been bound before the call stmt. */
3333 460055 : t = gimple_debug_source_bind_get_value (stmt);
3334 460055 : if (t != NULL_TREE
3335 460055 : && TREE_CODE (t) == PARM_DECL
3336 328888 : && id->call_stmt)
3337 : {
3338 324583 : vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3339 324583 : unsigned int i;
3340 324583 : if (debug_args != NULL)
3341 : {
3342 433583 : for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3343 433582 : if ((**debug_args)[i] == DECL_ORIGIN (t)
3344 433582 : && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3345 : {
3346 323455 : t = (**debug_args)[i + 1];
3347 323455 : stmt->subcode = GIMPLE_DEBUG_BIND;
3348 323455 : gimple_debug_bind_set_value (stmt, t);
3349 323455 : break;
3350 : }
3351 : }
3352 : }
3353 460055 : if (gimple_debug_source_bind_p (stmt))
3354 136600 : walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3355 : remap_gimple_op_r, &wi, NULL);
3356 : }
3357 :
3358 40964203 : processing_debug_stmt = 0;
3359 :
3360 40964203 : update_stmt (stmt);
3361 : }
3362 :
3363 : /* Process deferred debug stmts. In order to give values better odds
3364 : of being successfully remapped, we delay the processing of debug
3365 : stmts until all other stmts that might require remapping are
3366 : processed. */
3367 :
3368 : static void
3369 4530530 : copy_debug_stmts (copy_body_data *id)
3370 : {
3371 4530530 : if (!id->debug_stmts.exists ())
3372 : return;
3373 :
3374 56033846 : for (gdebug *stmt : id->debug_stmts)
3375 : /* But avoid re-processing debug stmts that have been elided. */
3376 53226318 : if (gimple_bb (stmt))
3377 53226318 : copy_debug_stmt (stmt, id);
3378 :
3379 2807528 : id->debug_stmts.release ();
3380 : }
3381 :
3382 : /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3383 : another function. */
3384 :
3385 : static tree
3386 32838385 : copy_tree_body (copy_body_data *id)
3387 : {
3388 32838385 : tree fndecl = id->src_fn;
3389 32838385 : tree body = DECL_SAVED_TREE (fndecl);
3390 :
3391 32838385 : walk_tree (&body, copy_tree_body_r, id, NULL);
3392 :
3393 32838385 : return body;
3394 : }
3395 :
3396 : /* Make a copy of the body of FN so that it can be inserted inline in
3397 : another function. */
3398 :
3399 : static tree
3400 4530530 : copy_body (copy_body_data *id,
3401 : basic_block entry_block_map, basic_block exit_block_map,
3402 : basic_block new_entry)
3403 : {
3404 4530530 : tree fndecl = id->src_fn;
3405 4530530 : tree body;
3406 :
3407 : /* If this body has a CFG, walk CFG and copy. */
3408 4530530 : gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3409 4530530 : body = copy_cfg_body (id, entry_block_map, exit_block_map,
3410 : new_entry);
3411 4530530 : copy_debug_stmts (id);
3412 4530530 : if (id->killed_new_ssa_names)
3413 : {
3414 2043298 : ipa_release_ssas_in_hash (id->killed_new_ssa_names);
3415 4086596 : delete id->killed_new_ssa_names;
3416 2043298 : id->killed_new_ssa_names = NULL;
3417 : }
3418 :
3419 4530530 : return body;
3420 : }
3421 :
3422 : /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3423 : defined in function FN, or of a data member thereof. */
3424 :
3425 : static bool
3426 147618 : self_inlining_addr_expr (tree value, tree fn)
3427 : {
3428 147618 : tree var;
3429 :
3430 147618 : if (TREE_CODE (value) != ADDR_EXPR)
3431 : return false;
3432 :
3433 141343 : var = get_base_address (TREE_OPERAND (value, 0));
3434 :
3435 141343 : return var && auto_var_in_fn_p (var, fn);
3436 : }
3437 :
3438 : /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3439 : lexical block and line number information from base_stmt, if given,
3440 : or from the last stmt of the block otherwise. */
3441 :
3442 : static gimple *
3443 7410490 : insert_init_debug_bind (copy_body_data *id,
3444 : basic_block bb, tree var, tree value,
3445 : gimple *base_stmt)
3446 : {
3447 7410490 : gimple *note;
3448 7410490 : gimple_stmt_iterator gsi;
3449 7410490 : tree tracked_var;
3450 :
3451 8856879 : if (!gimple_in_ssa_p (id->src_cfun))
3452 : return NULL;
3453 :
3454 7410490 : if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3455 : return NULL;
3456 :
3457 6527182 : tracked_var = target_for_debug_bind (var);
3458 6527182 : if (!tracked_var)
3459 : return NULL;
3460 :
3461 5964101 : if (bb)
3462 : {
3463 5956741 : gsi = gsi_last_bb (bb);
3464 5956741 : if (!base_stmt && !gsi_end_p (gsi))
3465 5964101 : base_stmt = gsi_stmt (gsi);
3466 : }
3467 :
3468 5964101 : note = gimple_build_debug_bind (tracked_var,
3469 5964101 : value == error_mark_node
3470 5964101 : ? NULL_TREE : unshare_expr (value),
3471 : base_stmt);
3472 :
3473 5964101 : if (bb)
3474 : {
3475 5956741 : if (!gsi_end_p (gsi))
3476 5162065 : gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3477 : else
3478 794676 : gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3479 : }
3480 :
3481 : return note;
3482 : }
3483 :
3484 : static void
3485 448528 : insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3486 : {
3487 : /* If VAR represents a zero-sized variable, it's possible that the
3488 : assignment statement may result in no gimple statements. */
3489 448528 : if (init_stmt)
3490 : {
3491 448528 : gimple_stmt_iterator si = gsi_last_bb (bb);
3492 :
3493 : /* We can end up with init statements that store to a non-register
3494 : from a rhs with a conversion. Handle that here by forcing the
3495 : rhs into a temporary. gimple_regimplify_operands is not
3496 : prepared to do this for us. */
3497 448528 : if (!is_gimple_debug (init_stmt)
3498 441168 : && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3499 344822 : && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3500 483470 : && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3501 : {
3502 2 : tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3503 1 : TREE_TYPE (gimple_assign_lhs (init_stmt)),
3504 : gimple_assign_rhs1 (init_stmt));
3505 1 : rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3506 : GSI_NEW_STMT);
3507 1 : gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3508 1 : gimple_assign_set_rhs1 (init_stmt, rhs);
3509 : }
3510 448528 : gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3511 448528 : if (!is_gimple_debug (init_stmt))
3512 : {
3513 441168 : gimple_regimplify_operands (init_stmt, &si);
3514 :
3515 441168 : tree def = gimple_assign_lhs (init_stmt);
3516 441168 : insert_init_debug_bind (id, bb, def, def, init_stmt);
3517 : }
3518 : }
3519 448528 : }
3520 :
3521 : /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3522 : if need be (which should only be necessary for invalid programs). Attempt
3523 : to convert VAL to TYPE and return the result if it is possible, just return
3524 : a zero constant of the given type if it fails. */
3525 :
3526 : tree
3527 2066 : force_value_to_type (tree type, tree value)
3528 : {
3529 : /* If we can match up types by promotion/demotion do so. */
3530 2066 : if (fold_convertible_p (type, value))
3531 536 : return fold_convert (type, value);
3532 :
3533 : /* ??? For valid programs we should not end up here.
3534 : Still if we end up with truly mismatched types here, fall back
3535 : to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3536 : GIMPLE to the following passes. */
3537 1530 : if (TREE_CODE (value) == WITH_SIZE_EXPR)
3538 0 : return error_mark_node;
3539 1530 : else if (!is_gimple_reg_type (TREE_TYPE (value))
3540 1530 : || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3541 1239 : return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3542 : else
3543 291 : return build_zero_cst (type);
3544 : }
3545 :
3546 : /* Initialize parameter P with VALUE. If needed, produce init statement
3547 : at the end of BB. When BB is NULL, we return init statement to be
3548 : output later. */
3549 : static gimple *
3550 7396821 : setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3551 : basic_block bb, tree *vars)
3552 : {
3553 7396821 : gimple *init_stmt = NULL;
3554 7396821 : tree var;
3555 7396821 : tree def = (gimple_in_ssa_p (cfun)
3556 7396821 : ? ssa_default_def (id->src_cfun, p) : NULL);
3557 :
3558 : /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3559 : here since the type of this decl must be visible to the calling
3560 : function. */
3561 7396821 : var = copy_decl_to_var (p, id);
3562 :
3563 : /* Declare this new variable. */
3564 7396821 : DECL_CHAIN (var) = *vars;
3565 7396821 : *vars = var;
3566 :
3567 : /* Make gimplifier happy about this variable. */
3568 7396821 : DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3569 :
3570 : /* If the parameter is never assigned to, has no SSA_NAMEs created,
3571 : we would not need to create a new variable here at all, if it
3572 : weren't for debug info. Still, we can just use the argument
3573 : value. */
3574 7396821 : if (TREE_READONLY (p)
3575 3933206 : && !TREE_ADDRESSABLE (p)
3576 3931846 : && value
3577 3931839 : && !TREE_SIDE_EFFECTS (value)
3578 11328654 : && !def)
3579 : {
3580 : /* We may produce non-gimple trees by adding NOPs or introduce invalid
3581 : sharing when the value is not constant or DECL. And we need to make
3582 : sure that it cannot be modified from another path in the callee. */
3583 352122 : if (((is_gimple_min_invariant (value)
3584 : /* When the parameter is used in a context that forces it to
3585 : not be a GIMPLE register avoid substituting something that
3586 : is not a decl there. */
3587 147287 : && ! DECL_NOT_GIMPLE_REG_P (p))
3588 204846 : || (DECL_P (value) && TREE_READONLY (value))
3589 204762 : || (auto_var_in_fn_p (value, id->dst_fn)
3590 264 : && !TREE_ADDRESSABLE (value)))
3591 147621 : && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3592 : /* We have to be very careful about ADDR_EXPR. Make sure
3593 : the base variable isn't a local variable of the inlined
3594 : function, e.g., when doing recursive inlining, direct or
3595 : mutually-recursive or whatever, which is why we don't
3596 : just test whether fn == current_function_decl. */
3597 499740 : && ! self_inlining_addr_expr (value, fn))
3598 : {
3599 147618 : insert_decl_map (id, p, value);
3600 147618 : if (!id->debug_map)
3601 142759 : id->debug_map = new hash_map<tree, tree>;
3602 147618 : id->debug_map->put (p, var);
3603 147618 : return insert_init_debug_bind (id, bb, var, value, NULL);
3604 : }
3605 : }
3606 :
3607 : /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3608 : that way, when the PARM_DECL is encountered, it will be
3609 : automatically replaced by the VAR_DECL. */
3610 7249203 : insert_decl_map (id, p, var);
3611 :
3612 : /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3613 : code, we would have constructed a temporary, and then the function body
3614 : would have never changed the value of P. However, now, we will be
3615 : constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3616 7249203 : TREE_READONLY (var) = 0;
3617 :
3618 7249203 : tree rhs = value;
3619 7249203 : if (value
3620 7248291 : && value != error_mark_node
3621 14497494 : && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3622 2055 : rhs = force_value_to_type (TREE_TYPE (p), value);
3623 :
3624 : /* If there is no setup required and we are in SSA, take the easy route
3625 : replacing all SSA names representing the function parameter by the
3626 : SSA name passed to function.
3627 :
3628 : We need to construct map for the variable anyway as it might be used
3629 : in different SSA names when parameter is set in function.
3630 :
3631 : Do replacement at -O0 for const arguments replaced by constant.
3632 : This is important for builtin_constant_p and other construct requiring
3633 : constant argument to be visible in inlined function body. */
3634 14498406 : if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3635 6514078 : && (optimize
3636 23175 : || (TREE_READONLY (p)
3637 10752 : && is_gimple_min_invariant (rhs)))
3638 6493971 : && (TREE_CODE (rhs) == SSA_NAME
3639 2481131 : || is_gimple_min_invariant (rhs))
3640 6493665 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3641 : {
3642 6493665 : insert_decl_map (id, def, rhs);
3643 6493665 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3644 : }
3645 :
3646 : /* If the value of argument is never used, don't care about initializing
3647 : it. */
3648 1487288 : if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3649 : {
3650 : /* When there's a gross type mismatch between the passed value
3651 : and the declared argument type drop it on the floor and do
3652 : not bother to insert a debug bind. */
3653 328053 : if (value && !is_gimple_reg_type (TREE_TYPE (value)))
3654 : return NULL;
3655 328039 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3656 : }
3657 :
3658 : /* Initialize this VAR_DECL from the equivalent argument. Convert
3659 : the argument to the proper type in case it was promoted. */
3660 427485 : if (value)
3661 : {
3662 427397 : if (rhs == error_mark_node)
3663 : {
3664 0 : insert_decl_map (id, p, var);
3665 0 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3666 : }
3667 :
3668 427397 : STRIP_USELESS_TYPE_CONVERSION (rhs);
3669 :
3670 : /* If we are in SSA form properly remap the default definition. */
3671 854794 : if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3672 : {
3673 23269 : if (def)
3674 : {
3675 20413 : def = remap_ssa_name (def, id);
3676 20413 : init_stmt = gimple_build_assign (def, rhs);
3677 20413 : SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3678 20413 : set_ssa_default_def (cfun, var, NULL);
3679 : }
3680 : }
3681 404128 : else if (!is_empty_type (TREE_TYPE (var)))
3682 344822 : init_stmt = gimple_build_assign (var, rhs);
3683 :
3684 427397 : if (bb && init_stmt)
3685 364794 : insert_init_stmt (id, bb, init_stmt);
3686 : }
3687 : return init_stmt;
3688 : }
3689 :
3690 : /* Generate code to initialize the parameters of the function at the
3691 : top of the stack in ID from the GIMPLE_CALL STMT. */
3692 :
3693 : static void
3694 4299767 : initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3695 : tree fn, basic_block bb)
3696 : {
3697 4299767 : tree parms;
3698 4299767 : size_t i;
3699 4299767 : tree p;
3700 4299767 : tree vars = NULL_TREE;
3701 4299767 : tree static_chain = gimple_call_chain (stmt);
3702 :
3703 : /* Figure out what the parameters are. */
3704 4299767 : parms = DECL_ARGUMENTS (fn);
3705 :
3706 : /* Loop through the parameter declarations, replacing each with an
3707 : equivalent VAR_DECL, appropriately initialized. */
3708 11668700 : for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3709 : {
3710 7368933 : tree val;
3711 7368933 : val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3712 7368933 : setup_one_parameter (id, p, val, fn, bb, &vars);
3713 : }
3714 : /* After remapping parameters remap their types. This has to be done
3715 : in a second loop over all parameters to appropriately remap
3716 : variable sized arrays when the size is specified in a
3717 : parameter following the array. */
3718 11668700 : for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3719 : {
3720 7368933 : tree *varp = id->decl_map->get (p);
3721 7368933 : if (varp && VAR_P (*varp))
3722 : {
3723 14446402 : tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3724 6819213 : ? ssa_default_def (id->src_cfun, p) : NULL);
3725 7223201 : tree var = *varp;
3726 7223201 : TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3727 : /* Also remap the default definition if it was remapped
3728 : to the default definition of the parameter replacement
3729 : by the parameter setup. */
3730 7223201 : if (def)
3731 : {
3732 6490166 : tree *defp = id->decl_map->get (def);
3733 6490166 : if (defp
3734 6490079 : && TREE_CODE (*defp) == SSA_NAME
3735 12647219 : && SSA_NAME_VAR (*defp) == var)
3736 20413 : TREE_TYPE (*defp) = TREE_TYPE (var);
3737 : }
3738 : /* When not optimizing and the parameter is unused, assign to
3739 : a dummy SSA name. Do this after remapping the type above. */
3740 733035 : else if (!optimize
3741 3681 : && is_gimple_reg (p)
3742 735891 : && i < gimple_call_num_args (stmt))
3743 : {
3744 2856 : tree val = gimple_call_arg (stmt, i);
3745 2856 : if (val != error_mark_node)
3746 : {
3747 2856 : if (!useless_type_conversion_p (TREE_TYPE (p),
3748 2856 : TREE_TYPE (val)))
3749 0 : val = force_value_to_type (TREE_TYPE (p), val);
3750 2856 : def = make_ssa_name (var);
3751 2856 : gimple *init_stmt = gimple_build_assign (def, val);
3752 2856 : insert_init_stmt (id, bb, init_stmt);
3753 : }
3754 : }
3755 : }
3756 : }
3757 :
3758 : /* Initialize the static chain. */
3759 4299767 : p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3760 4299767 : gcc_assert (fn != current_function_decl);
3761 4299767 : if (p)
3762 : {
3763 : /* No static chain? Seems like a bug in tree-nested.cc. */
3764 2279 : gcc_assert (static_chain);
3765 :
3766 2279 : setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3767 : }
3768 :
3769 : /* Reverse so the variables appear in the correct order in DWARF
3770 : debug info. */
3771 4299767 : vars = nreverse (vars);
3772 :
3773 4299767 : declare_inline_vars (id->block, vars);
3774 4299767 : }
3775 :
3776 :
3777 : /* Declare a return variable to replace the RESULT_DECL for the
3778 : function we are calling. An appropriate DECL_STMT is returned.
3779 : The USE_STMT is filled to contain a use of the declaration to
3780 : indicate the return value of the function.
3781 :
3782 : RETURN_SLOT, if non-null is place where to store the result. It
3783 : is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3784 : was the LHS of the MODIFY_EXPR to which this call is the RHS.
3785 :
3786 : The return value is a (possibly null) value that holds the result
3787 : as seen by the caller. */
3788 :
3789 : static tree
3790 4299767 : declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3791 : basic_block entry_bb)
3792 : {
3793 4299767 : tree callee = id->src_fn;
3794 4299767 : tree result = DECL_RESULT (callee);
3795 4299767 : tree callee_type = TREE_TYPE (result);
3796 4299767 : tree caller_type;
3797 4299767 : tree var, use;
3798 :
3799 : /* Handle type-mismatches in the function declaration return type
3800 : vs. the call expression. */
3801 4299767 : if (modify_dest)
3802 2041025 : caller_type = TREE_TYPE (modify_dest);
3803 2258742 : else if (return_slot)
3804 127944 : caller_type = TREE_TYPE (return_slot);
3805 : else /* No LHS on the call. */
3806 2130798 : caller_type = TREE_TYPE (TREE_TYPE (callee));
3807 :
3808 : /* We don't need to do anything for functions that don't return anything. */
3809 4299767 : if (VOID_TYPE_P (callee_type))
3810 : return NULL_TREE;
3811 :
3812 : /* If there was a return slot, then the return value is the
3813 : dereferenced address of that object. */
3814 2312332 : if (return_slot)
3815 : {
3816 : /* The front end shouldn't have used both return_slot and
3817 : a modify expression. */
3818 127944 : gcc_assert (!modify_dest);
3819 127944 : if (DECL_BY_REFERENCE (result))
3820 : {
3821 73077 : tree return_slot_addr = build_fold_addr_expr (return_slot);
3822 73077 : STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3823 :
3824 : /* We are going to construct *&return_slot and we can't do that
3825 : for variables believed to be not addressable.
3826 :
3827 : FIXME: This check possibly can match, because values returned
3828 : via return slot optimization are not believed to have address
3829 : taken by alias analysis. */
3830 73077 : gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3831 73077 : var = return_slot_addr;
3832 73077 : mark_addressable (return_slot);
3833 : }
3834 : else
3835 : {
3836 54867 : var = return_slot;
3837 54867 : gcc_assert (TREE_CODE (var) != SSA_NAME);
3838 54867 : if (TREE_ADDRESSABLE (result))
3839 19623 : mark_addressable (var);
3840 : }
3841 127944 : if (DECL_NOT_GIMPLE_REG_P (result)
3842 127944 : && DECL_P (var))
3843 0 : DECL_NOT_GIMPLE_REG_P (var) = 1;
3844 :
3845 127944 : if (!useless_type_conversion_p (callee_type, caller_type))
3846 73078 : var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3847 :
3848 127944 : use = NULL;
3849 127944 : goto done;
3850 : }
3851 :
3852 : /* All types requiring non-trivial constructors should have been handled. */
3853 2184388 : gcc_assert (!TREE_ADDRESSABLE (callee_type));
3854 :
3855 : /* Attempt to avoid creating a new temporary variable. */
3856 2184388 : if (modify_dest
3857 2041004 : && TREE_CODE (modify_dest) != SSA_NAME)
3858 : {
3859 290351 : bool use_it = false;
3860 :
3861 : /* We can't use MODIFY_DEST if there's type promotion involved. */
3862 290351 : if (!useless_type_conversion_p (callee_type, caller_type))
3863 : use_it = false;
3864 :
3865 : /* ??? If we're assigning to a variable sized type, then we must
3866 : reuse the destination variable, because we've no good way to
3867 : create variable sized temporaries at this point. */
3868 290349 : else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3869 : use_it = true;
3870 :
3871 : /* If the callee cannot possibly modify MODIFY_DEST, then we can
3872 : reuse it as the result of the call directly. Don't do this if
3873 : it would promote MODIFY_DEST to addressable. */
3874 290349 : else if (TREE_ADDRESSABLE (result))
3875 : use_it = false;
3876 : else
3877 : {
3878 289609 : tree base_m = get_base_address (modify_dest);
3879 :
3880 : /* If the base isn't a decl, then it's a pointer, and we don't
3881 : know where that's going to go. */
3882 289609 : if (!DECL_P (base_m))
3883 : use_it = false;
3884 285616 : else if (is_global_var (base_m))
3885 : use_it = false;
3886 285229 : else if (DECL_NOT_GIMPLE_REG_P (result)
3887 285229 : && !DECL_NOT_GIMPLE_REG_P (base_m))
3888 : use_it = false;
3889 285229 : else if (!TREE_ADDRESSABLE (base_m))
3890 : use_it = true;
3891 : }
3892 :
3893 : if (use_it)
3894 : {
3895 197863 : var = modify_dest;
3896 197863 : use = NULL;
3897 197863 : goto done;
3898 : }
3899 : }
3900 :
3901 1986525 : gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3902 :
3903 1986525 : var = copy_result_decl_to_var (result, id);
3904 1986525 : DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3905 :
3906 : /* Do not have the rest of GCC warn about this variable as it should
3907 : not be visible to the user. */
3908 1986525 : suppress_warning (var /* OPT_Wuninitialized? */);
3909 :
3910 1986525 : declare_inline_vars (id->block, var);
3911 :
3912 : /* Build the use expr. If the return type of the function was
3913 : promoted, convert it back to the expected type. */
3914 1986525 : use = var;
3915 1986525 : if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3916 : {
3917 : /* If we can match up types by promotion/demotion do so. */
3918 7 : if (fold_convertible_p (caller_type, var))
3919 0 : use = fold_convert (caller_type, var);
3920 : else
3921 : {
3922 : /* ??? For valid programs we should not end up here.
3923 : Still if we end up with truly mismatched types here, fall back
3924 : to using a MEM_REF to not leak invalid GIMPLE to the following
3925 : passes. */
3926 : /* Prevent var from being written into SSA form. */
3927 7 : if (is_gimple_reg_type (TREE_TYPE (var)))
3928 7 : DECL_NOT_GIMPLE_REG_P (var) = true;
3929 7 : use = fold_build2 (MEM_REF, caller_type,
3930 : build_fold_addr_expr (var),
3931 : build_int_cst (ptr_type_node, 0));
3932 : }
3933 : }
3934 :
3935 1986525 : STRIP_USELESS_TYPE_CONVERSION (use);
3936 :
3937 1986525 : if (DECL_BY_REFERENCE (result))
3938 : {
3939 0 : TREE_ADDRESSABLE (var) = 1;
3940 0 : var = build_fold_addr_expr (var);
3941 : }
3942 :
3943 1986525 : done:
3944 : /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3945 : way, when the RESULT_DECL is encountered, it will be
3946 : automatically replaced by the VAR_DECL.
3947 :
3948 : When returning by reference, ensure that RESULT_DECL remaps to
3949 : gimple_val. */
3950 2312332 : if (DECL_BY_REFERENCE (result)
3951 2312332 : && !is_gimple_val (var))
3952 : {
3953 73077 : tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3954 73077 : insert_decl_map (id, result, temp);
3955 : /* When RESULT_DECL is in SSA form, we need to remap and initialize
3956 : it's default_def SSA_NAME. */
3957 73077 : if (gimple_in_ssa_p (id->src_cfun)
3958 73077 : && is_gimple_reg (result))
3959 73077 : if (tree default_def = ssa_default_def (id->src_cfun, result))
3960 : {
3961 73040 : temp = make_ssa_name (temp);
3962 73040 : insert_decl_map (id, default_def, temp);
3963 : }
3964 73077 : insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3965 : }
3966 : else
3967 2239255 : insert_decl_map (id, result, var);
3968 :
3969 : /* Remember this so we can ignore it in remap_decls. */
3970 2312332 : id->retvar = var;
3971 2312332 : return use;
3972 : }
3973 :
3974 : /* Determine if the function can be copied. If so return NULL. If
3975 : not return a string describng the reason for failure. */
3976 :
3977 : const char *
3978 18562081 : copy_forbidden (struct function *fun)
3979 : {
3980 18562081 : const char *reason = fun->cannot_be_copied_reason;
3981 :
3982 : /* Only examine the function once. */
3983 18562081 : if (fun->cannot_be_copied_set)
3984 : return reason;
3985 :
3986 : /* We cannot copy a function that receives a non-local goto
3987 : because we cannot remap the destination label used in the
3988 : function that is performing the non-local goto. */
3989 : /* ??? Actually, this should be possible, if we work at it.
3990 : No doubt there's just a handful of places that simply
3991 : assume it doesn't happen and don't substitute properly. */
3992 11417621 : if (fun->has_nonlocal_label)
3993 : {
3994 779 : reason = G_("function %q+F can never be copied "
3995 : "because it receives a non-local goto");
3996 779 : goto fail;
3997 : }
3998 :
3999 11416842 : if (fun->has_forced_label_in_static)
4000 : {
4001 225 : reason = G_("function %q+F can never be copied because it saves "
4002 : "address of local label in a static variable");
4003 225 : goto fail;
4004 : }
4005 :
4006 11416617 : fail:
4007 11417621 : fun->cannot_be_copied_reason = reason;
4008 11417621 : fun->cannot_be_copied_set = true;
4009 11417621 : return reason;
4010 : }
4011 :
4012 :
4013 : static const char *inline_forbidden_reason;
4014 :
4015 : /* A callback for walk_gimple_seq to handle statements. Returns non-null
4016 : iff a function cannot be inlined. Also sets the reason why. */
4017 :
4018 : static tree
4019 166477786 : inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4020 : struct walk_stmt_info *wip)
4021 : {
4022 166477786 : tree fn = (tree) wip->info;
4023 166477786 : tree t;
4024 166477786 : gimple *stmt = gsi_stmt (*gsi);
4025 :
4026 166477786 : switch (gimple_code (stmt))
4027 : {
4028 18505869 : case GIMPLE_CALL:
4029 : /* Refuse to inline alloca call unless user explicitly forced so as
4030 : this may change program's memory overhead drastically when the
4031 : function using alloca is called in loop. In GCC present in
4032 : SPEC2000 inlining into schedule_block cause it to require 2GB of
4033 : RAM instead of 256MB. Don't do so for alloca calls emitted for
4034 : VLA objects as those can't cause unbounded growth (they're always
4035 : wrapped inside stack_save/stack_restore regions. */
4036 18505869 : if (gimple_maybe_alloca_call_p (stmt)
4037 9521 : && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
4038 18510695 : && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
4039 : {
4040 4698 : inline_forbidden_reason
4041 4698 : = G_("function %q+F can never be inlined because it uses "
4042 : "alloca (override using the always_inline attribute)");
4043 4698 : *handled_ops_p = true;
4044 4698 : return fn;
4045 : }
4046 :
4047 18501171 : t = gimple_call_fndecl (stmt);
4048 18501171 : if (t == NULL_TREE)
4049 : break;
4050 :
4051 : /* We cannot inline functions that call setjmp. */
4052 17843598 : if (setjmp_call_p (t))
4053 : {
4054 965 : inline_forbidden_reason
4055 965 : = G_("function %q+F can never be inlined because it uses setjmp");
4056 965 : *handled_ops_p = true;
4057 965 : return t;
4058 : }
4059 :
4060 17842633 : if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
4061 3669622 : switch (DECL_FUNCTION_CODE (t))
4062 : {
4063 : /* We cannot inline functions that take a variable number of
4064 : arguments. */
4065 2758 : case BUILT_IN_VA_START:
4066 2758 : case BUILT_IN_NEXT_ARG:
4067 2758 : case BUILT_IN_VA_END:
4068 2758 : inline_forbidden_reason
4069 2758 : = G_("function %q+F can never be inlined because it "
4070 : "uses variable argument lists");
4071 2758 : *handled_ops_p = true;
4072 2758 : return t;
4073 :
4074 226 : case BUILT_IN_LONGJMP:
4075 : /* We can't inline functions that call __builtin_longjmp at
4076 : all. The non-local goto machinery really requires the
4077 : destination be in a different function. If we allow the
4078 : function calling __builtin_longjmp to be inlined into the
4079 : function calling __builtin_setjmp, Things will Go Awry. */
4080 226 : inline_forbidden_reason
4081 226 : = G_("function %q+F can never be inlined because "
4082 : "it uses setjmp-longjmp exception handling");
4083 226 : *handled_ops_p = true;
4084 226 : return t;
4085 :
4086 269 : case BUILT_IN_NONLOCAL_GOTO:
4087 : /* Similarly. */
4088 269 : inline_forbidden_reason
4089 269 : = G_("function %q+F can never be inlined because "
4090 : "it uses non-local goto");
4091 269 : *handled_ops_p = true;
4092 269 : return t;
4093 :
4094 384 : case BUILT_IN_RETURN:
4095 384 : case BUILT_IN_APPLY_ARGS:
4096 : /* If a __builtin_apply_args caller would be inlined,
4097 : it would be saving arguments of the function it has
4098 : been inlined into. Similarly __builtin_return would
4099 : return from the function the inline has been inlined into. */
4100 384 : inline_forbidden_reason
4101 384 : = G_("function %q+F can never be inlined because "
4102 : "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4103 384 : *handled_ops_p = true;
4104 384 : return t;
4105 :
4106 : default:
4107 : break;
4108 : }
4109 : break;
4110 :
4111 292 : case GIMPLE_GOTO:
4112 292 : t = gimple_goto_dest (stmt);
4113 :
4114 : /* We will not inline a function which uses computed goto. The
4115 : addresses of its local labels, which may be tucked into
4116 : global storage, are of course not constant across
4117 : instantiations, which causes unexpected behavior. */
4118 292 : if (TREE_CODE (t) != LABEL_DECL)
4119 : {
4120 292 : inline_forbidden_reason
4121 292 : = G_("function %q+F can never be inlined "
4122 : "because it contains a computed goto");
4123 292 : *handled_ops_p = true;
4124 292 : return t;
4125 : }
4126 : break;
4127 :
4128 : default:
4129 : break;
4130 : }
4131 :
4132 166468194 : *handled_ops_p = false;
4133 166468194 : return NULL_TREE;
4134 : }
4135 :
4136 : /* Return true if FNDECL is a function that cannot be inlined into
4137 : another one. */
4138 :
4139 : static bool
4140 5670352 : inline_forbidden_p (tree fndecl)
4141 : {
4142 5670352 : struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4143 5670352 : struct walk_stmt_info wi;
4144 5670352 : basic_block bb;
4145 5670352 : bool forbidden_p = false;
4146 :
4147 : /* First check for shared reasons not to copy the code. */
4148 5670352 : inline_forbidden_reason = copy_forbidden (fun);
4149 5670352 : if (inline_forbidden_reason != NULL)
4150 : return true;
4151 :
4152 : /* Next, walk the statements of the function looking for
4153 : constraucts we can't handle, or are non-optimal for inlining. */
4154 5669654 : hash_set<tree> visited_nodes;
4155 5669654 : memset (&wi, 0, sizeof (wi));
4156 5669654 : wi.info = (void *) fndecl;
4157 5669654 : wi.pset = &visited_nodes;
4158 :
4159 : /* We cannot inline a function with a variable-sized parameter because we
4160 : cannot materialize a temporary of such a type in the caller if need be.
4161 : Note that the return case is not symmetrical because we can guarantee
4162 : that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4163 18094650 : for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4164 12424998 : if (!poly_int_tree_p (DECL_SIZE (parm)))
4165 : {
4166 2 : inline_forbidden_reason
4167 2 : = G_("function %q+F can never be inlined because "
4168 : "it has a VLA argument");
4169 2 : return true;
4170 : }
4171 :
4172 37167776 : FOR_EACH_BB_FN (bb, fun)
4173 : {
4174 31507716 : gimple *ret;
4175 31507716 : gimple_seq seq = bb_seq (bb);
4176 31507716 : ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4177 31507716 : forbidden_p = (ret != NULL);
4178 31507716 : if (forbidden_p)
4179 : break;
4180 : }
4181 :
4182 : return forbidden_p;
4183 5669654 : }
4184 :
4185 : /* Return false if the function FNDECL cannot be inlined on account of its
4186 : attributes, true otherwise. */
4187 : static bool
4188 5670352 : function_attribute_inlinable_p (const_tree fndecl)
4189 : {
4190 11318482 : for (auto scoped_attributes : targetm.attribute_table)
4191 : {
4192 5670352 : const_tree a;
4193 :
4194 7466339 : for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4195 : {
4196 1818209 : const_tree name = get_attribute_name (a);
4197 :
4198 52174204 : for (const attribute_spec &attribute : scoped_attributes->attributes)
4199 50378217 : if (is_attribute_p (attribute.name, name))
4200 22222 : return targetm.function_attribute_inlinable_p (fndecl);
4201 : }
4202 : }
4203 :
4204 : return true;
4205 : }
4206 :
4207 : /* Returns nonzero if FN is a function that does not have any
4208 : fundamental inline blocking properties. */
4209 :
4210 : bool
4211 6288726 : tree_inlinable_function_p (tree fn)
4212 : {
4213 6288726 : bool inlinable = true;
4214 6288726 : bool do_warning;
4215 6288726 : tree always_inline;
4216 :
4217 : /* If we've already decided this function shouldn't be inlined,
4218 : there's no need to check again. */
4219 6288726 : if (DECL_UNINLINABLE (fn))
4220 : return false;
4221 :
4222 : /* We only warn for functions declared `inline' by the user. */
4223 5692439 : do_warning = (opt_for_fn (fn, warn_inline)
4224 631 : && DECL_DECLARED_INLINE_P (fn)
4225 502 : && !DECL_NO_INLINE_WARNING_P (fn)
4226 5692820 : && !DECL_IN_SYSTEM_HEADER (fn));
4227 :
4228 5692439 : always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4229 :
4230 5692439 : if (flag_no_inline
4231 95599 : && always_inline == NULL)
4232 : {
4233 22087 : if (do_warning)
4234 0 : warning (OPT_Winline, "function %q+F can never be inlined because it "
4235 : "is suppressed using %<-fno-inline%>", fn);
4236 : inlinable = false;
4237 : }
4238 :
4239 5670352 : else if (!function_attribute_inlinable_p (fn))
4240 : {
4241 0 : if (do_warning)
4242 0 : warning (OPT_Winline, "function %q+F can never be inlined because it "
4243 : "uses attributes conflicting with inlining", fn);
4244 : inlinable = false;
4245 : }
4246 :
4247 5670352 : else if (inline_forbidden_p (fn))
4248 : {
4249 : /* See if we should warn about uninlinable functions. Previously,
4250 : some of these warnings would be issued while trying to expand
4251 : the function inline, but that would cause multiple warnings
4252 : about functions that would for example call alloca. But since
4253 : this a property of the function, just one warning is enough.
4254 : As a bonus we can now give more details about the reason why a
4255 : function is not inlinable. */
4256 10292 : if (always_inline)
4257 2 : error (inline_forbidden_reason, fn);
4258 10290 : else if (do_warning)
4259 2 : warning (OPT_Winline, inline_forbidden_reason, fn);
4260 :
4261 : inlinable = false;
4262 : }
4263 :
4264 : /* Squirrel away the result so that we don't have to check again. */
4265 5692439 : DECL_UNINLINABLE (fn) = !inlinable;
4266 :
4267 5692439 : return inlinable;
4268 : }
4269 :
4270 : /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4271 : word size and take possible memcpy call into account and return
4272 : cost based on whether optimizing for size or speed according to SPEED_P. */
4273 :
4274 : int
4275 337932515 : estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4276 : {
4277 337932515 : HOST_WIDE_INT size;
4278 :
4279 337932515 : gcc_assert (!VOID_TYPE_P (type));
4280 :
4281 337932515 : if (VECTOR_TYPE_P (type))
4282 : {
4283 4103273 : scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4284 4103273 : machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4285 4103273 : int orig_mode_size
4286 8206546 : = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4287 8206546 : int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4288 4103273 : return ((orig_mode_size + simd_mode_size - 1)
4289 4103273 : / simd_mode_size);
4290 : }
4291 :
4292 333829242 : size = int_size_in_bytes (type);
4293 :
4294 333831710 : if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4295 : /* Cost of a memcpy call, 3 arguments and the call. */
4296 : return 4;
4297 : else
4298 332904495 : return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4299 : }
4300 :
4301 : /* Returns cost of operation CODE, according to WEIGHTS */
4302 :
4303 : static int
4304 466328022 : estimate_operator_cost (enum tree_code code, eni_weights *weights,
4305 : tree op1 ATTRIBUTE_UNUSED, tree op2)
4306 : {
4307 466328022 : switch (code)
4308 : {
4309 : /* These are "free" conversions, or their presumed cost
4310 : is folded into other operations. */
4311 : case RANGE_EXPR:
4312 : CASE_CONVERT:
4313 : case COMPLEX_EXPR:
4314 : case PAREN_EXPR:
4315 : case VIEW_CONVERT_EXPR:
4316 : return 0;
4317 :
4318 : /* Assign cost of 1 to usual operations.
4319 : ??? We may consider mapping RTL costs to this. */
4320 : case COND_EXPR:
4321 : case VEC_COND_EXPR:
4322 : case VEC_PERM_EXPR:
4323 :
4324 : case PLUS_EXPR:
4325 : case POINTER_PLUS_EXPR:
4326 : case POINTER_DIFF_EXPR:
4327 : case MINUS_EXPR:
4328 : case MULT_EXPR:
4329 : case MULT_HIGHPART_EXPR:
4330 :
4331 : case ADDR_SPACE_CONVERT_EXPR:
4332 : case FIXED_CONVERT_EXPR:
4333 : case FIX_TRUNC_EXPR:
4334 :
4335 : case NEGATE_EXPR:
4336 : case FLOAT_EXPR:
4337 : case MIN_EXPR:
4338 : case MAX_EXPR:
4339 : case ABS_EXPR:
4340 : case ABSU_EXPR:
4341 :
4342 : case LSHIFT_EXPR:
4343 : case RSHIFT_EXPR:
4344 : case LROTATE_EXPR:
4345 : case RROTATE_EXPR:
4346 :
4347 : case BIT_IOR_EXPR:
4348 : case BIT_XOR_EXPR:
4349 : case BIT_AND_EXPR:
4350 : case BIT_NOT_EXPR:
4351 :
4352 : case TRUTH_ANDIF_EXPR:
4353 : case TRUTH_ORIF_EXPR:
4354 : case TRUTH_AND_EXPR:
4355 : case TRUTH_OR_EXPR:
4356 : case TRUTH_XOR_EXPR:
4357 : case TRUTH_NOT_EXPR:
4358 :
4359 : case LT_EXPR:
4360 : case LE_EXPR:
4361 : case GT_EXPR:
4362 : case GE_EXPR:
4363 : case EQ_EXPR:
4364 : case NE_EXPR:
4365 : case ORDERED_EXPR:
4366 : case UNORDERED_EXPR:
4367 :
4368 : case UNLT_EXPR:
4369 : case UNLE_EXPR:
4370 : case UNGT_EXPR:
4371 : case UNGE_EXPR:
4372 : case UNEQ_EXPR:
4373 : case LTGT_EXPR:
4374 :
4375 : case CONJ_EXPR:
4376 :
4377 : case PREDECREMENT_EXPR:
4378 : case PREINCREMENT_EXPR:
4379 : case POSTDECREMENT_EXPR:
4380 : case POSTINCREMENT_EXPR:
4381 :
4382 : case REALIGN_LOAD_EXPR:
4383 :
4384 : case WIDEN_SUM_EXPR:
4385 : case WIDEN_MULT_EXPR:
4386 : case DOT_PROD_EXPR:
4387 : case SAD_EXPR:
4388 : case WIDEN_MULT_PLUS_EXPR:
4389 : case WIDEN_MULT_MINUS_EXPR:
4390 : case WIDEN_LSHIFT_EXPR:
4391 :
4392 : case VEC_WIDEN_MULT_HI_EXPR:
4393 : case VEC_WIDEN_MULT_LO_EXPR:
4394 : case VEC_WIDEN_MULT_EVEN_EXPR:
4395 : case VEC_WIDEN_MULT_ODD_EXPR:
4396 : case VEC_UNPACK_HI_EXPR:
4397 : case VEC_UNPACK_LO_EXPR:
4398 : case VEC_UNPACK_FLOAT_HI_EXPR:
4399 : case VEC_UNPACK_FLOAT_LO_EXPR:
4400 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4401 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4402 : case VEC_PACK_TRUNC_EXPR:
4403 : case VEC_PACK_SAT_EXPR:
4404 : case VEC_PACK_FIX_TRUNC_EXPR:
4405 : case VEC_PACK_FLOAT_EXPR:
4406 : case VEC_WIDEN_LSHIFT_HI_EXPR:
4407 : case VEC_WIDEN_LSHIFT_LO_EXPR:
4408 : case VEC_DUPLICATE_EXPR:
4409 : case VEC_SERIES_EXPR:
4410 :
4411 : return 1;
4412 :
4413 : /* Few special cases of expensive operations. This is useful
4414 : to avoid inlining on functions having too many of these. */
4415 2931199 : case TRUNC_DIV_EXPR:
4416 2931199 : case CEIL_DIV_EXPR:
4417 2931199 : case FLOOR_DIV_EXPR:
4418 2931199 : case ROUND_DIV_EXPR:
4419 2931199 : case EXACT_DIV_EXPR:
4420 2931199 : case TRUNC_MOD_EXPR:
4421 2931199 : case CEIL_MOD_EXPR:
4422 2931199 : case FLOOR_MOD_EXPR:
4423 2931199 : case ROUND_MOD_EXPR:
4424 2931199 : case RDIV_EXPR:
4425 2931199 : if (TREE_CODE (op2) != INTEGER_CST)
4426 1323159 : return weights->div_mod_cost;
4427 : return 1;
4428 :
4429 : /* Bit-field insertion needs several shift and mask operations. */
4430 : case BIT_INSERT_EXPR:
4431 : return 3;
4432 :
4433 190216810 : default:
4434 : /* We expect a copy assignment with no operator. */
4435 190216810 : gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4436 : return 0;
4437 : }
4438 : }
4439 :
4440 :
4441 : /* Estimate number of instructions that will be created by expanding
4442 : the statements in the statement sequence STMTS.
4443 : WEIGHTS contains weights attributed to various constructs. */
4444 :
4445 : int
4446 232343 : estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4447 : {
4448 232343 : int cost;
4449 232343 : gimple_stmt_iterator gsi;
4450 :
4451 232343 : cost = 0;
4452 615360 : for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4453 383017 : cost += estimate_num_insns (gsi_stmt (gsi), weights);
4454 :
4455 232343 : return cost;
4456 : }
4457 :
4458 :
4459 : /* Estimate number of instructions that will be created by expanding STMT.
4460 : WEIGHTS contains weights attributed to various constructs. */
4461 :
4462 : int
4463 628923845 : estimate_num_insns (gimple *stmt, eni_weights *weights)
4464 : {
4465 628923845 : unsigned cost, i;
4466 628923845 : enum gimple_code code = gimple_code (stmt);
4467 628923845 : tree lhs;
4468 628923845 : tree rhs;
4469 :
4470 628923845 : switch (code)
4471 : {
4472 357093721 : case GIMPLE_ASSIGN:
4473 : /* Try to estimate the cost of assignments. We have three cases to
4474 : deal with:
4475 : 1) Simple assignments to registers;
4476 : 2) Stores to things that must live in memory. This includes
4477 : "normal" stores to scalars, but also assignments of large
4478 : structures, or constructors of big arrays;
4479 :
4480 : Let us look at the first two cases, assuming we have "a = b + C":
4481 : <GIMPLE_ASSIGN <var_decl "a">
4482 : <plus_expr <var_decl "b"> <constant C>>
4483 : If "a" is a GIMPLE register, the assignment to it is free on almost
4484 : any target, because "a" usually ends up in a real register. Hence
4485 : the only cost of this expression comes from the PLUS_EXPR, and we
4486 : can ignore the GIMPLE_ASSIGN.
4487 : If "a" is not a GIMPLE register, the assignment to "a" will most
4488 : likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4489 : of moving something into "a", which we compute using the function
4490 : estimate_move_cost. */
4491 357093721 : if (gimple_clobber_p (stmt))
4492 : return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4493 :
4494 339233208 : lhs = gimple_assign_lhs (stmt);
4495 339233208 : rhs = gimple_assign_rhs1 (stmt);
4496 :
4497 339233208 : cost = 0;
4498 :
4499 : /* Account for the cost of moving to / from memory. */
4500 339233208 : if (gimple_store_p (stmt))
4501 77425993 : cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4502 339233208 : if (gimple_assign_load_p (stmt))
4503 90579596 : cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4504 :
4505 446878706 : cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4506 : gimple_assign_rhs1 (stmt),
4507 339233208 : get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4508 : == GIMPLE_BINARY_RHS
4509 107645498 : ? gimple_assign_rhs2 (stmt) : NULL);
4510 339233208 : break;
4511 :
4512 127094814 : case GIMPLE_COND:
4513 127094814 : cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4514 : gimple_op (stmt, 0),
4515 : gimple_op (stmt, 1));
4516 127094814 : break;
4517 :
4518 846488 : case GIMPLE_SWITCH:
4519 846488 : {
4520 846488 : gswitch *switch_stmt = as_a <gswitch *> (stmt);
4521 : /* Take into account cost of the switch + guess 2 conditional jumps for
4522 : each case label.
4523 :
4524 : TODO: once the switch expansion logic is sufficiently separated, we can
4525 : do better job on estimating cost of the switch. */
4526 846488 : if (weights->time_based)
4527 125781 : cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4528 : else
4529 720707 : cost = gimple_switch_num_labels (switch_stmt) * 2;
4530 : }
4531 : break;
4532 :
4533 72096339 : case GIMPLE_CALL:
4534 72096339 : {
4535 72096339 : tree decl;
4536 :
4537 72096339 : if (gimple_call_internal_p (stmt))
4538 : return 0;
4539 69290113 : else if ((decl = gimple_call_fndecl (stmt))
4540 69290113 : && fndecl_built_in_p (decl))
4541 : {
4542 : /* Do not special case builtins where we see the body.
4543 : This just confuse inliner. */
4544 17932695 : struct cgraph_node *node;
4545 17932695 : if ((node = cgraph_node::get (decl))
4546 17932695 : && node->definition)
4547 : ;
4548 : /* For buitins that are likely expanded to nothing or
4549 : inlined do not account operand costs. */
4550 17902744 : else if (is_simple_builtin (decl))
4551 : return 0;
4552 15267111 : else if (is_inexpensive_builtin (decl))
4553 1984562 : return weights->target_builtin_call_cost;
4554 13282549 : else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4555 : {
4556 : /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4557 : specialize the cheap expansion we do here.
4558 : ??? This asks for a more general solution. */
4559 13045833 : switch (DECL_FUNCTION_CODE (decl))
4560 : {
4561 9154 : case BUILT_IN_POW:
4562 9154 : case BUILT_IN_POWF:
4563 9154 : case BUILT_IN_POWL:
4564 9154 : if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4565 13023 : && (real_equal
4566 3869 : (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4567 : &dconst2)))
4568 565 : return estimate_operator_cost
4569 565 : (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4570 565 : gimple_call_arg (stmt, 0));
4571 : break;
4572 :
4573 : default:
4574 : break;
4575 : }
4576 : }
4577 : }
4578 :
4579 64669353 : cost = decl ? weights->call_cost : weights->indirect_call_cost;
4580 64669353 : if (gimple_call_lhs (stmt))
4581 25471670 : cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4582 25471670 : weights->time_based);
4583 195004343 : for (i = 0; i < gimple_call_num_args (stmt); i++)
4584 : {
4585 130334990 : tree arg = gimple_call_arg (stmt, i);
4586 130334990 : cost += estimate_move_cost (TREE_TYPE (arg),
4587 130334990 : weights->time_based);
4588 : }
4589 : break;
4590 : }
4591 :
4592 17526280 : case GIMPLE_RETURN:
4593 17526280 : return weights->return_cost;
4594 :
4595 : case GIMPLE_GOTO:
4596 : case GIMPLE_LABEL:
4597 : case GIMPLE_NOP:
4598 : case GIMPLE_PHI:
4599 : case GIMPLE_PREDICT:
4600 : case GIMPLE_DEBUG:
4601 : return 0;
4602 :
4603 761746 : case GIMPLE_ASM:
4604 761746 : {
4605 761746 : int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4606 : /* 1000 means infinity. This avoids overflows later
4607 : with very long asm statements. */
4608 761746 : if (count > 1000)
4609 : count = 1000;
4610 : /* If this asm is asm inline, count anything as minimum size. */
4611 761746 : if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4612 662 : count = MIN (1, count);
4613 761746 : return MAX (1, count);
4614 : }
4615 :
4616 : case GIMPLE_RESX:
4617 : /* This is either going to be an external function call with one
4618 : argument, or two register copy statements plus a goto. */
4619 : return 2;
4620 :
4621 14088 : case GIMPLE_EH_DISPATCH:
4622 : /* ??? This is going to turn into a switch statement. Ideally
4623 : we'd have a look at the eh region and estimate the number of
4624 : edges involved. */
4625 14088 : return 10;
4626 :
4627 0 : case GIMPLE_BIND:
4628 0 : return estimate_num_insns_seq (
4629 0 : gimple_bind_body (as_a <gbind *> (stmt)),
4630 0 : weights);
4631 :
4632 0 : case GIMPLE_EH_FILTER:
4633 0 : return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4634 :
4635 8292 : case GIMPLE_CATCH:
4636 8292 : return estimate_num_insns_seq (gimple_catch_handler (
4637 8292 : as_a <gcatch *> (stmt)),
4638 8292 : weights);
4639 :
4640 8297 : case GIMPLE_TRY:
4641 8297 : return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4642 8297 : + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4643 :
4644 : /* OMP directives are generally very expensive. */
4645 :
4646 : case GIMPLE_OMP_RETURN:
4647 : case GIMPLE_OMP_SECTIONS_SWITCH:
4648 : case GIMPLE_OMP_ATOMIC_STORE:
4649 : case GIMPLE_OMP_CONTINUE:
4650 : /* ...except these, which are cheap. */
4651 : return 0;
4652 :
4653 0 : case GIMPLE_OMP_ATOMIC_LOAD:
4654 0 : return weights->omp_cost;
4655 :
4656 0 : case GIMPLE_OMP_FOR:
4657 0 : return (weights->omp_cost
4658 0 : + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4659 0 : + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4660 :
4661 0 : case GIMPLE_OMP_PARALLEL:
4662 0 : case GIMPLE_OMP_TASK:
4663 0 : case GIMPLE_OMP_CRITICAL:
4664 0 : case GIMPLE_OMP_MASTER:
4665 0 : case GIMPLE_OMP_MASKED:
4666 0 : case GIMPLE_OMP_SCOPE:
4667 0 : case GIMPLE_OMP_DISPATCH:
4668 0 : case GIMPLE_OMP_TASKGROUP:
4669 0 : case GIMPLE_OMP_ORDERED:
4670 0 : case GIMPLE_OMP_SCAN:
4671 0 : case GIMPLE_OMP_SECTION:
4672 0 : case GIMPLE_OMP_SECTIONS:
4673 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
4674 0 : case GIMPLE_OMP_SINGLE:
4675 0 : case GIMPLE_OMP_TARGET:
4676 0 : case GIMPLE_OMP_TEAMS:
4677 0 : return (weights->omp_cost
4678 0 : + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4679 :
4680 82 : case GIMPLE_TRANSACTION:
4681 82 : return (weights->tm_cost
4682 82 : + estimate_num_insns_seq (gimple_transaction_body (
4683 82 : as_a <gtransaction *> (stmt)),
4684 82 : weights));
4685 :
4686 0 : default:
4687 0 : gcc_unreachable ();
4688 : }
4689 :
4690 531843863 : return cost;
4691 : }
4692 :
4693 : /* Estimate number of instructions that will be created by expanding
4694 : function FNDECL. WEIGHTS contains weights attributed to various
4695 : constructs. */
4696 :
4697 : int
4698 0 : estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4699 : {
4700 0 : struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4701 0 : gimple_stmt_iterator bsi;
4702 0 : basic_block bb;
4703 0 : int n = 0;
4704 :
4705 0 : gcc_assert (my_function && my_function->cfg);
4706 0 : FOR_EACH_BB_FN (bb, my_function)
4707 : {
4708 0 : for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4709 0 : n += estimate_num_insns (gsi_stmt (bsi), weights);
4710 : }
4711 :
4712 0 : return n;
4713 : }
4714 :
4715 :
4716 : /* Initializes weights used by estimate_num_insns. */
4717 :
4718 : void
4719 280826 : init_inline_once (void)
4720 : {
4721 280826 : eni_size_weights.call_cost = 1;
4722 280826 : eni_size_weights.indirect_call_cost = 3;
4723 280826 : eni_size_weights.target_builtin_call_cost = 1;
4724 280826 : eni_size_weights.div_mod_cost = 1;
4725 280826 : eni_size_weights.omp_cost = 40;
4726 280826 : eni_size_weights.tm_cost = 10;
4727 280826 : eni_size_weights.time_based = false;
4728 280826 : eni_size_weights.return_cost = 1;
4729 :
4730 : /* Estimating time for call is difficult, since we have no idea what the
4731 : called function does. In the current uses of eni_time_weights,
4732 : underestimating the cost does less harm than overestimating it, so
4733 : we choose a rather small value here. */
4734 280826 : eni_time_weights.call_cost = 10;
4735 280826 : eni_time_weights.indirect_call_cost = 15;
4736 280826 : eni_time_weights.target_builtin_call_cost = 1;
4737 280826 : eni_time_weights.div_mod_cost = 10;
4738 280826 : eni_time_weights.omp_cost = 40;
4739 280826 : eni_time_weights.tm_cost = 40;
4740 280826 : eni_time_weights.time_based = true;
4741 280826 : eni_time_weights.return_cost = 2;
4742 280826 : }
4743 :
4744 :
4745 : /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4746 :
4747 : static void
4748 27020275 : prepend_lexical_block (tree current_block, tree new_block)
4749 : {
4750 27020275 : BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4751 27020275 : BLOCK_SUBBLOCKS (current_block) = new_block;
4752 27020275 : BLOCK_SUPERCONTEXT (new_block) = current_block;
4753 27020275 : }
4754 :
4755 : /* Add local variables from CALLEE to CALLER. */
4756 :
4757 : static inline void
4758 4433622 : add_local_variables (struct function *callee, struct function *caller,
4759 : copy_body_data *id)
4760 : {
4761 4433622 : tree var;
4762 4433622 : unsigned ix;
4763 :
4764 12030668 : FOR_EACH_LOCAL_DECL (callee, ix, var)
4765 3944719 : if (!can_be_nonlocal (var, id))
4766 : {
4767 3860829 : tree new_var = remap_decl (var, id);
4768 :
4769 : /* Remap debug-expressions. */
4770 3860829 : if (VAR_P (new_var)
4771 3860829 : && DECL_HAS_DEBUG_EXPR_P (var)
4772 4108695 : && new_var != var)
4773 : {
4774 247866 : tree tem = DECL_DEBUG_EXPR (var);
4775 247866 : bool old_regimplify = id->regimplify;
4776 247866 : id->remapping_type_depth++;
4777 247866 : walk_tree (&tem, copy_tree_body_r, id, NULL);
4778 247866 : id->remapping_type_depth--;
4779 247866 : id->regimplify = old_regimplify;
4780 247866 : SET_DECL_DEBUG_EXPR (new_var, tem);
4781 247866 : DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4782 : }
4783 3860829 : add_local_decl (caller, new_var);
4784 : }
4785 4433622 : }
4786 :
4787 : /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4788 : have brought in or introduced any debug stmts for SRCVAR. */
4789 :
4790 : static inline void
4791 8997586 : reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4792 : {
4793 8997586 : tree *remappedvarp = id->decl_map->get (srcvar);
4794 :
4795 8997586 : if (!remappedvarp)
4796 : return;
4797 :
4798 8944926 : if (!VAR_P (*remappedvarp))
4799 : return;
4800 :
4801 8813638 : if (*remappedvarp == id->retvar)
4802 : return;
4803 :
4804 8813638 : tree tvar = target_for_debug_bind (*remappedvarp);
4805 8813638 : if (!tvar)
4806 : return;
4807 :
4808 14486748 : gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4809 7243374 : id->call_stmt);
4810 7243374 : gimple_seq_add_stmt (bindings, stmt);
4811 : }
4812 :
4813 : /* For each inlined variable for which we may have debug bind stmts,
4814 : add before GSI a final debug stmt resetting it, marking the end of
4815 : its life, so that var-tracking knows it doesn't have to compute
4816 : further locations for it. */
4817 :
4818 : static inline void
4819 4299767 : reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4820 : {
4821 4299767 : tree var;
4822 4299767 : unsigned ix;
4823 4299767 : gimple_seq bindings = NULL;
4824 :
4825 4299767 : if (!gimple_in_ssa_p (id->src_cfun))
4826 539138 : return;
4827 :
4828 4299767 : if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4829 : return;
4830 :
4831 3760629 : for (var = DECL_ARGUMENTS (id->src_fn);
4832 10267059 : var; var = DECL_CHAIN (var))
4833 6506430 : reset_debug_binding (id, var, &bindings);
4834 :
4835 9336596 : FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4836 2491156 : reset_debug_binding (id, var, &bindings);
4837 :
4838 3760629 : gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4839 : }
4840 :
4841 : /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4842 :
4843 : static bool
4844 14962258 : expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4845 : bitmap to_purge)
4846 : {
4847 14962258 : tree use_retvar;
4848 14962258 : tree fn;
4849 14962258 : hash_map<tree, tree> *dst;
4850 14962258 : hash_map<tree, tree> *st = NULL;
4851 14962258 : tree return_slot;
4852 14962258 : tree modify_dest;
4853 14962258 : struct cgraph_edge *cg_edge;
4854 14962258 : cgraph_inline_failed_t reason;
4855 14962258 : basic_block return_block;
4856 14962258 : edge e;
4857 14962258 : gimple_stmt_iterator gsi, stmt_gsi;
4858 14962258 : bool successfully_inlined = false;
4859 14962258 : bool purge_dead_abnormal_edges;
4860 14962258 : gcall *call_stmt;
4861 14962258 : unsigned int prop_mask, src_properties;
4862 14962258 : struct function *dst_cfun;
4863 14962258 : tree simduid;
4864 14962258 : use_operand_p use;
4865 14962258 : gimple *simtenter_stmt = NULL;
4866 14962258 : vec<tree> *simtvars_save;
4867 14962258 : tree save_stack = NULL_TREE;
4868 :
4869 : /* The gimplifier uses input_location in too many places, such as
4870 : internal_get_tmp_var (). */
4871 14962258 : location_t saved_location = input_location;
4872 14962258 : input_location = gimple_location (stmt);
4873 :
4874 : /* From here on, we're only interested in CALL_EXPRs. */
4875 14962258 : call_stmt = dyn_cast <gcall *> (stmt);
4876 14962258 : if (!call_stmt)
4877 0 : goto egress;
4878 :
4879 14962258 : cg_edge = id->dst_node->get_edge (stmt);
4880 : /* Edge should exist and speculations should be resolved at this
4881 : stage. */
4882 14962258 : gcc_checking_assert (cg_edge && !cg_edge->speculative);
4883 : /* First, see if we can figure out what function is being called.
4884 : If we cannot, then there is no hope of inlining the function. */
4885 14962258 : if (cg_edge->indirect_unknown_callee)
4886 239191 : goto egress;
4887 14723067 : fn = cg_edge->callee->decl;
4888 14723067 : gcc_checking_assert (fn);
4889 :
4890 : /* If FN is a declaration of a function in a nested scope that was
4891 : globally declared inline, we don't set its DECL_INITIAL.
4892 : However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4893 : C++ front-end uses it for cdtors to refer to their internal
4894 : declarations, that are not real functions. Fortunately those
4895 : don't have trees to be saved, so we can tell by checking their
4896 : gimple_body. */
4897 14723067 : if (!DECL_INITIAL (fn)
4898 6641770 : && DECL_ABSTRACT_ORIGIN (fn)
4899 14848582 : && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4900 291 : fn = DECL_ABSTRACT_ORIGIN (fn);
4901 :
4902 : /* Don't try to inline functions that are not well-suited to inlining. */
4903 14723067 : if (cg_edge->inline_failed)
4904 : {
4905 10423087 : reason = cg_edge->inline_failed;
4906 : /* If this call was originally indirect, we do not want to emit any
4907 : inlining related warnings or sorry messages because there are no
4908 : guarantees regarding those. */
4909 10423087 : if (cg_edge->indirect_inlining_edge)
4910 1926 : goto egress;
4911 :
4912 10421161 : if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4913 : /* For extern inline functions that get redefined we always
4914 : silently ignored always_inline flag. Better behavior would
4915 : be to be able to keep both bodies and use extern inline body
4916 : for inlining, but we can't do that because frontends overwrite
4917 : the body. */
4918 53 : && !cg_edge->callee->redefined_extern_inline
4919 : /* During early inline pass, report only when optimization is
4920 : not turned on. */
4921 53 : && (symtab->global_info_ready
4922 51 : || !optimize
4923 41 : || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4924 : /* PR 20090218-1_0.c. Body can be provided by another module. */
4925 10421195 : && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4926 : {
4927 27 : error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4928 : cgraph_inline_failed_string (reason));
4929 27 : if (gimple_location (stmt) != UNKNOWN_LOCATION)
4930 27 : inform (gimple_location (stmt), "called from here");
4931 0 : else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4932 0 : inform (DECL_SOURCE_LOCATION (cfun->decl),
4933 : "called from this function");
4934 : }
4935 10421134 : else if (opt_for_fn (fn, warn_inline)
4936 325 : && DECL_DECLARED_INLINE_P (fn)
4937 100 : && !DECL_NO_INLINE_WARNING_P (fn)
4938 71 : && !DECL_IN_SYSTEM_HEADER (fn)
4939 71 : && reason != CIF_UNSPECIFIED
4940 71 : && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4941 : /* Do not warn about not inlined recursive calls. */
4942 71 : && !cg_edge->recursive_p ()
4943 : /* Avoid warnings during early inline pass. */
4944 10421205 : && symtab->global_info_ready)
4945 : {
4946 8 : auto_diagnostic_group d;
4947 8 : if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4948 : fn, _(cgraph_inline_failed_string (reason))))
4949 : {
4950 8 : if (gimple_location (stmt) != UNKNOWN_LOCATION)
4951 8 : inform (gimple_location (stmt), "called from here");
4952 0 : else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4953 0 : inform (DECL_SOURCE_LOCATION (cfun->decl),
4954 : "called from this function");
4955 : }
4956 8 : }
4957 10421161 : goto egress;
4958 : }
4959 4299980 : id->src_node = cg_edge->callee;
4960 :
4961 : /* If callee is thunk, all we need is to adjust the THIS pointer
4962 : and redirect to function being thunked. */
4963 4299980 : if (id->src_node->thunk)
4964 : {
4965 213 : cgraph_edge *edge;
4966 213 : tree virtual_offset = NULL;
4967 213 : profile_count count = cg_edge->count;
4968 213 : tree op;
4969 213 : gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4970 213 : thunk_info *info = thunk_info::get (id->src_node);
4971 :
4972 213 : cgraph_edge::remove (cg_edge);
4973 426 : edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4974 : gimple_uid (stmt),
4975 : profile_count::one (),
4976 : profile_count::one (),
4977 : true);
4978 213 : edge->count = count;
4979 213 : if (info->virtual_offset_p)
4980 7 : virtual_offset = size_int (info->virtual_value);
4981 213 : op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4982 : NULL);
4983 213 : gsi_insert_before (&iter, gimple_build_assign (op,
4984 : gimple_call_arg (stmt, 0)),
4985 : GSI_NEW_STMT);
4986 213 : gcc_assert (info->this_adjusting);
4987 213 : op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4988 : virtual_offset, info->indirect_offset);
4989 :
4990 213 : gimple_call_set_arg (stmt, 0, op);
4991 213 : gimple_call_set_fndecl (stmt, edge->callee->decl);
4992 213 : update_stmt (stmt);
4993 213 : id->src_node->remove ();
4994 213 : successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4995 213 : maybe_remove_unused_call_args (cfun, stmt);
4996 : /* This used to return true even though we do fail to inline in
4997 : some cases. See PR98525. */
4998 213 : goto egress;
4999 : }
5000 4299767 : fn = cg_edge->callee->decl;
5001 4299767 : cg_edge->callee->get_untransformed_body ();
5002 :
5003 4299767 : if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
5004 4299758 : cg_edge->callee->verify ();
5005 :
5006 : /* We will be inlining this callee. */
5007 4299767 : id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
5008 :
5009 : /* Update the callers EH personality. */
5010 4299767 : if (DECL_FUNCTION_PERSONALITY (fn))
5011 271826 : DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
5012 135913 : = DECL_FUNCTION_PERSONALITY (fn);
5013 :
5014 : /* Split the block before the GIMPLE_CALL. */
5015 4299767 : stmt_gsi = gsi_for_stmt (stmt);
5016 4299767 : gsi_prev (&stmt_gsi);
5017 4299767 : e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
5018 4299767 : bb = e->src;
5019 4299767 : return_block = e->dest;
5020 4299767 : remove_edge (e);
5021 :
5022 : /* If the GIMPLE_CALL was in the last statement of BB, it may have
5023 : been the source of abnormal edges. In this case, schedule
5024 : the removal of dead abnormal edges. */
5025 4299767 : gsi = gsi_start_bb (return_block);
5026 4299767 : gsi_next (&gsi);
5027 4299767 : purge_dead_abnormal_edges = gsi_end_p (gsi);
5028 :
5029 4299767 : stmt_gsi = gsi_start_bb (return_block);
5030 :
5031 : /* Build a block containing code to initialize the arguments, the
5032 : actual inline expansion of the body, and a label for the return
5033 : statements within the function to jump to. The type of the
5034 : statement expression is the return type of the function call.
5035 : ??? If the call does not have an associated block then we will
5036 : remap all callee blocks to NULL, effectively dropping most of
5037 : its debug information. This should only happen for calls to
5038 : artificial decls inserted by the compiler itself. We need to
5039 : either link the inlined blocks into the caller block tree or
5040 : not refer to them in any way to not break GC for locations. */
5041 4299767 : if (tree block = gimple_block (stmt))
5042 : {
5043 : /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
5044 : to make inlined_function_outer_scope_p return true on this BLOCK. */
5045 4273047 : location_t loc = LOCATION_LOCUS (gimple_location (stmt));
5046 4273047 : if (loc == UNKNOWN_LOCATION)
5047 205543 : loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
5048 205543 : if (loc == UNKNOWN_LOCATION)
5049 : loc = BUILTINS_LOCATION;
5050 4273047 : if (has_discriminator (gimple_location (stmt)))
5051 1432092 : loc = location_with_discriminator
5052 1432092 : (loc, get_discriminator_from_loc (gimple_location (stmt)));
5053 4273047 : id->block = make_node (BLOCK);
5054 7332613 : BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
5055 4273047 : BLOCK_SOURCE_LOCATION (id->block) = loc;
5056 4273047 : prepend_lexical_block (block, id->block);
5057 : }
5058 :
5059 : /* Local declarations will be replaced by their equivalents in this map. */
5060 4299767 : st = id->decl_map;
5061 4299767 : id->decl_map = new hash_map<tree, tree>;
5062 4299767 : dst = id->debug_map;
5063 4299767 : id->debug_map = NULL;
5064 4299767 : if (flag_stack_reuse != SR_NONE)
5065 4293479 : id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
5066 :
5067 : /* Record the function we are about to inline. */
5068 4299767 : id->src_fn = fn;
5069 4299767 : id->src_cfun = DECL_STRUCT_FUNCTION (fn);
5070 4299767 : id->reset_location = DECL_IGNORED_P (fn);
5071 4299767 : id->call_stmt = call_stmt;
5072 4299767 : cfun->cfg->full_profile &= id->src_cfun->cfg->full_profile;
5073 :
5074 : /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
5075 : variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
5076 4299767 : dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
5077 4299767 : simtvars_save = id->dst_simt_vars;
5078 4299767 : if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
5079 20464 : && (simduid = bb->loop_father->simduid) != NULL_TREE
5080 0 : && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
5081 0 : && single_imm_use (simduid, &use, &simtenter_stmt)
5082 0 : && is_gimple_call (simtenter_stmt)
5083 4299767 : && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
5084 0 : vec_alloc (id->dst_simt_vars, 0);
5085 : else
5086 4299767 : id->dst_simt_vars = NULL;
5087 :
5088 4299767 : if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
5089 28823 : profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
5090 :
5091 : /* If the src function contains an IFN_VA_ARG, then so will the dst
5092 : function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
5093 4299767 : prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
5094 4299767 : src_properties = id->src_cfun->curr_properties & prop_mask;
5095 4299767 : if (src_properties != prop_mask)
5096 1047 : dst_cfun->curr_properties &= src_properties | ~prop_mask;
5097 4299767 : dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5098 4299767 : id->dst_node->has_omp_variant_constructs
5099 4299767 : |= id->src_node->has_omp_variant_constructs;
5100 :
5101 4299767 : gcc_assert (!id->src_cfun->after_inlining);
5102 :
5103 4299767 : id->entry_bb = bb;
5104 4299767 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5105 : {
5106 1102 : gimple_stmt_iterator si = gsi_last_bb (bb);
5107 1102 : gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5108 : NOT_TAKEN),
5109 : GSI_NEW_STMT);
5110 : }
5111 4299767 : initialize_inlined_parameters (id, stmt, fn, bb);
5112 3760647 : if (debug_nonbind_markers_p && debug_inline_points && id->block
5113 8047795 : && inlined_function_outer_scope_p (id->block))
5114 : {
5115 3748028 : gimple_stmt_iterator si = gsi_last_bb (bb);
5116 3748028 : gsi_insert_after (&si, gimple_build_debug_inline_entry
5117 3748028 : (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5118 : GSI_NEW_STMT);
5119 : }
5120 :
5121 : /* If function to be inlined calls alloca, wrap the inlined function
5122 : in between save_stack = __builtin_stack_save (); and
5123 : __builtin_stack_restore (save_stack); calls. */
5124 4299767 : if (id->src_cfun->calls_alloca && !gimple_call_noreturn_p (stmt))
5125 : /* Don't do this for VLA allocations though, just for user alloca
5126 : calls. */
5127 5255 : for (struct cgraph_edge *e = id->src_node->callees; e; e = e->next_callee)
5128 4851 : if (gimple_maybe_alloca_call_p (e->call_stmt)
5129 4851 : && !gimple_call_alloca_for_var_p (e->call_stmt))
5130 : {
5131 88 : tree fn = builtin_decl_implicit (BUILT_IN_STACK_SAVE);
5132 88 : gcall *call = gimple_build_call (fn, 0);
5133 88 : save_stack = make_ssa_name (ptr_type_node);
5134 88 : gimple_call_set_lhs (call, save_stack);
5135 88 : gimple_stmt_iterator si = gsi_last_bb (bb);
5136 88 : gsi_insert_after (&si, call, GSI_NEW_STMT);
5137 88 : struct cgraph_node *dest = cgraph_node::get_create (fn);
5138 88 : id->dst_node->create_edge (dest, call, bb->count)->inline_failed
5139 88 : = CIF_BODY_NOT_AVAILABLE;
5140 88 : break;
5141 : }
5142 :
5143 4299767 : if (DECL_INITIAL (fn))
5144 : {
5145 4299767 : if (gimple_block (stmt))
5146 : {
5147 4273047 : tree *var;
5148 :
5149 4273047 : prepend_lexical_block (id->block,
5150 4273047 : remap_blocks (DECL_INITIAL (fn), id));
5151 4273047 : gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5152 : && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5153 : == NULL_TREE));
5154 : /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5155 : otherwise for DWARF DW_TAG_formal_parameter will not be children of
5156 : DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5157 : under it. The parameters can be then evaluated in the debugger,
5158 : but don't show in backtraces. */
5159 6216146 : for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5160 1943099 : if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5161 : {
5162 486926 : tree v = *var;
5163 486926 : *var = TREE_CHAIN (v);
5164 486926 : TREE_CHAIN (v) = BLOCK_VARS (id->block);
5165 486926 : BLOCK_VARS (id->block) = v;
5166 : }
5167 : else
5168 1456173 : var = &TREE_CHAIN (*var);
5169 : }
5170 : else
5171 26720 : remap_blocks_to_null (DECL_INITIAL (fn), id);
5172 : }
5173 :
5174 : /* Return statements in the function body will be replaced by jumps
5175 : to the RET_LABEL. */
5176 4299767 : gcc_assert (DECL_INITIAL (fn));
5177 4299767 : gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5178 :
5179 : /* Find the LHS to which the result of this call is assigned. */
5180 4299767 : return_slot = NULL;
5181 4299767 : if (gimple_call_lhs (stmt))
5182 : {
5183 2188030 : modify_dest = gimple_call_lhs (stmt);
5184 :
5185 : /* The function which we are inlining might not return a value,
5186 : in which case we should issue a warning that the function
5187 : does not return a value. In that case the optimizers will
5188 : see that the variable to which the value is assigned was not
5189 : initialized. We do not want to issue a warning about that
5190 : uninitialized variable. */
5191 2188030 : if (DECL_P (modify_dest))
5192 393206 : suppress_warning (modify_dest, OPT_Wuninitialized);
5193 :
5194 : /* If we have a return slot, we can assign it the result directly,
5195 : except in the case where it is a global variable that is only
5196 : written to because, the callee being permitted to read or take
5197 : the address of its DECL_RESULT, this could invalidate the flag
5198 : on the global variable; instead we preventively remove the store,
5199 : which would have happened later if the call was not inlined. */
5200 2188030 : if (gimple_call_return_slot_opt_p (call_stmt))
5201 : {
5202 127944 : tree base = get_base_address (modify_dest);
5203 :
5204 127944 : if (VAR_P (base)
5205 105707 : && (TREE_STATIC (base) || DECL_EXTERNAL (base))
5206 128004 : && varpool_node::get (base)->writeonly)
5207 : return_slot = NULL;
5208 : else
5209 : return_slot = modify_dest;
5210 :
5211 : modify_dest = NULL;
5212 : }
5213 : }
5214 : else
5215 : modify_dest = NULL;
5216 :
5217 : /* If we are inlining a call to the C++ operator new, we don't want
5218 : to use type based alias analysis on the return value. Otherwise
5219 : we may get confused if the compiler sees that the inlined new
5220 : function returns a pointer which was just deleted. See bug
5221 : 33407. */
5222 4299767 : if (DECL_IS_OPERATOR_NEW_P (fn))
5223 : {
5224 19070 : return_slot = NULL;
5225 19070 : modify_dest = NULL;
5226 : }
5227 :
5228 : /* Declare the return variable for the function. */
5229 4299767 : use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5230 :
5231 : /* Add local vars in this inlined callee to caller. */
5232 4299767 : add_local_variables (id->src_cfun, cfun, id);
5233 :
5234 4299767 : if (dump_enabled_p ())
5235 : {
5236 678 : char buf[128];
5237 678 : snprintf (buf, sizeof(buf), "%4.2f",
5238 678 : cg_edge->sreal_frequency ().to_double ());
5239 678 : dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5240 678 : call_stmt,
5241 : "Inlining %C to %C with frequency %s\n",
5242 : id->src_node, id->dst_node, buf);
5243 678 : if (dump_file && (dump_flags & TDF_DETAILS))
5244 : {
5245 199 : id->src_node->dump (dump_file);
5246 199 : id->dst_node->dump (dump_file);
5247 : }
5248 : }
5249 :
5250 : /* This is it. Duplicate the callee body. Assume callee is
5251 : pre-gimplified. Note that we must not alter the caller
5252 : function in any way before this point, as this CALL_EXPR may be
5253 : a self-referential call; if we're calling ourselves, we need to
5254 : duplicate our body before altering anything. */
5255 4299767 : copy_body (id, bb, return_block, NULL);
5256 :
5257 4299767 : reset_debug_bindings (id, stmt_gsi);
5258 :
5259 4299767 : if (flag_stack_reuse != SR_NONE)
5260 11653092 : for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5261 7359613 : if (!TREE_THIS_VOLATILE (p))
5262 : {
5263 : /* The value associated with P is a local temporary only if
5264 : there is no value associated with P in the debug map. */
5265 7359350 : tree *varp = id->decl_map->get (p);
5266 7359350 : if (varp
5267 7359350 : && VAR_P (*varp)
5268 7213756 : && !is_gimple_reg (*varp)
5269 7762851 : && !(id->debug_map && id->debug_map->get (p)))
5270 : {
5271 403201 : tree clobber = build_clobber (TREE_TYPE (*varp),
5272 : CLOBBER_STORAGE_END);
5273 403201 : gimple *clobber_stmt;
5274 403201 : clobber_stmt = gimple_build_assign (*varp, clobber);
5275 403201 : gimple_set_location (clobber_stmt, gimple_location (stmt));
5276 403201 : gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5277 : }
5278 : }
5279 :
5280 4299767 : if (save_stack)
5281 : {
5282 88 : tree fn = builtin_decl_implicit (BUILT_IN_STACK_RESTORE);
5283 88 : gcall *call = gimple_build_call (fn, 1, save_stack);
5284 88 : gsi_insert_before (&stmt_gsi, call, GSI_SAME_STMT);
5285 88 : struct cgraph_node *dest = cgraph_node::get_create (fn);
5286 88 : id->dst_node->create_edge (dest, call,
5287 : return_block->count)->inline_failed
5288 88 : = CIF_BODY_NOT_AVAILABLE;
5289 : }
5290 :
5291 : /* Reset the escaped solution. */
5292 4299767 : if (cfun->gimple_df)
5293 : {
5294 4299767 : pt_solution_reset (&cfun->gimple_df->escaped);
5295 4299767 : pt_solution_reset (&cfun->gimple_df->escaped_return);
5296 : }
5297 :
5298 : /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5299 4299767 : if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5300 : {
5301 0 : size_t nargs = gimple_call_num_args (simtenter_stmt);
5302 0 : vec<tree> *vars = id->dst_simt_vars;
5303 0 : auto_vec<tree> newargs (nargs + vars->length ());
5304 0 : for (size_t i = 0; i < nargs; i++)
5305 0 : newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5306 0 : for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5307 : {
5308 0 : tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5309 0 : newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5310 : }
5311 0 : gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5312 0 : gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5313 0 : gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5314 0 : gsi_replace (&gsi, g, false);
5315 0 : }
5316 4299767 : vec_free (id->dst_simt_vars);
5317 4299767 : id->dst_simt_vars = simtvars_save;
5318 :
5319 : /* Clean up. */
5320 4299767 : if (id->debug_map)
5321 : {
5322 141490 : delete id->debug_map;
5323 141490 : id->debug_map = dst;
5324 : }
5325 8599534 : delete id->decl_map;
5326 4299767 : id->decl_map = st;
5327 :
5328 : /* Unlink the calls virtual operands before replacing it. */
5329 4299767 : unlink_stmt_vdef (stmt);
5330 4299767 : if (gimple_vdef (stmt)
5331 4299767 : && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5332 1997310 : release_ssa_name (gimple_vdef (stmt));
5333 :
5334 : /* If the inlined function returns a result that we care about,
5335 : substitute the GIMPLE_CALL with an assignment of the return
5336 : variable to the LHS of the call. That is, if STMT was
5337 : 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5338 4299767 : if (use_retvar && gimple_call_lhs (stmt))
5339 : {
5340 1862202 : gimple *old_stmt = stmt;
5341 1862202 : tree lhs = gimple_call_lhs (stmt);
5342 1862202 : if (!is_gimple_reg (lhs)
5343 92488 : && !is_gimple_reg (use_retvar)
5344 1954648 : && is_gimple_reg_type (TREE_TYPE (lhs)))
5345 : {
5346 : /* If both lhs and use_retvar aren't gimple regs, yet have
5347 : gimple reg type, copy through a temporary SSA_NAME. */
5348 0 : gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (lhs)),
5349 : use_retvar);
5350 0 : gimple_set_location (g, gimple_location (old_stmt));
5351 0 : gsi_insert_before (&stmt_gsi, g, GSI_SAME_STMT);
5352 0 : use_retvar = gimple_assign_lhs (g);
5353 : }
5354 1862202 : stmt = gimple_build_assign (lhs, use_retvar);
5355 1862202 : gimple_set_location (stmt, gimple_location (old_stmt));
5356 1862202 : gsi_replace (&stmt_gsi, stmt, false);
5357 1862202 : maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5358 : /* Append a clobber for id->retvar if easily possible. */
5359 1862202 : if (flag_stack_reuse != SR_NONE
5360 1860069 : && id->retvar
5361 1860069 : && VAR_P (id->retvar)
5362 1860069 : && id->retvar != return_slot
5363 1860069 : && id->retvar != modify_dest
5364 1860069 : && !TREE_THIS_VOLATILE (id->retvar)
5365 1860043 : && !is_gimple_reg (id->retvar)
5366 1955676 : && !stmt_ends_bb_p (stmt))
5367 : {
5368 93474 : tree clobber = build_clobber (TREE_TYPE (id->retvar),
5369 : CLOBBER_STORAGE_END);
5370 93474 : gimple *clobber_stmt;
5371 93474 : clobber_stmt = gimple_build_assign (id->retvar, clobber);
5372 93474 : gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5373 93474 : gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5374 : }
5375 : }
5376 : else
5377 : {
5378 : /* Handle the case of inlining a function with no return
5379 : statement, which causes the return value to become undefined. */
5380 2437565 : if (gimple_call_lhs (stmt)
5381 2437565 : && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5382 : {
5383 21 : tree name = gimple_call_lhs (stmt);
5384 21 : tree var = SSA_NAME_VAR (name);
5385 6 : tree def = var ? ssa_default_def (cfun, var) : NULL;
5386 :
5387 6 : if (def)
5388 : {
5389 : /* If the variable is used undefined, make this name
5390 : undefined via a move. */
5391 0 : stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5392 0 : gsi_replace (&stmt_gsi, stmt, true);
5393 : }
5394 : else
5395 : {
5396 21 : if (!var)
5397 : {
5398 15 : var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5399 30 : SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5400 : }
5401 : /* Otherwise make this variable undefined. */
5402 21 : gsi_remove (&stmt_gsi, true);
5403 21 : set_ssa_default_def (cfun, var, name);
5404 21 : SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5405 : }
5406 : }
5407 : /* Replace with a clobber for id->retvar. */
5408 2437544 : else if (flag_stack_reuse != SR_NONE
5409 2433389 : && id->retvar
5410 449645 : && VAR_P (id->retvar)
5411 369127 : && id->retvar != return_slot
5412 320787 : && id->retvar != modify_dest
5413 124122 : && !TREE_THIS_VOLATILE (id->retvar)
5414 2561666 : && !is_gimple_reg (id->retvar))
5415 : {
5416 12064 : tree clobber = build_clobber (TREE_TYPE (id->retvar));
5417 12064 : gimple *clobber_stmt;
5418 12064 : clobber_stmt = gimple_build_assign (id->retvar, clobber);
5419 12064 : gimple_set_location (clobber_stmt, gimple_location (stmt));
5420 12064 : gsi_replace (&stmt_gsi, clobber_stmt, false);
5421 12064 : maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5422 : }
5423 : else
5424 2425480 : gsi_remove (&stmt_gsi, true);
5425 : }
5426 :
5427 4299767 : if (purge_dead_abnormal_edges)
5428 1006023 : bitmap_set_bit (to_purge, return_block->index);
5429 :
5430 : /* If the value of the new expression is ignored, that's OK. We
5431 : don't warn about this for CALL_EXPRs, so we shouldn't warn about
5432 : the equivalent inlined version either. */
5433 4299767 : if (is_gimple_assign (stmt))
5434 : {
5435 1862202 : gcc_assert (gimple_assign_single_p (stmt)
5436 : || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5437 1862202 : TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5438 : }
5439 :
5440 4299767 : id->add_clobbers_to_eh_landing_pads = 0;
5441 :
5442 : /* Output the inlining info for this abstract function, since it has been
5443 : inlined. If we don't do this now, we can lose the information about the
5444 : variables in the function when the blocks get blown away as soon as we
5445 : remove the cgraph node. */
5446 4299767 : if (gimple_block (stmt))
5447 4273047 : (*debug_hooks->outlining_inline_function) (fn);
5448 :
5449 : /* Update callgraph if needed. */
5450 4299767 : cg_edge->callee->remove ();
5451 :
5452 4299767 : id->block = NULL_TREE;
5453 4299767 : id->retvar = NULL_TREE;
5454 4299767 : successfully_inlined = true;
5455 :
5456 14962258 : egress:
5457 14962258 : input_location = saved_location;
5458 14962258 : return successfully_inlined;
5459 : }
5460 :
5461 : /* Expand call statements reachable from STMT_P.
5462 : We can only have CALL_EXPRs as the "toplevel" tree code or nested
5463 : in a MODIFY_EXPR. */
5464 :
5465 : static bool
5466 30895501 : gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5467 : bitmap to_purge)
5468 : {
5469 30895501 : gimple_stmt_iterator gsi;
5470 30895501 : bool inlined = false;
5471 :
5472 225256815 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5473 : {
5474 163465813 : gimple *stmt = gsi_stmt (gsi);
5475 163465813 : gsi_prev (&gsi);
5476 :
5477 163465813 : if (is_gimple_call (stmt)
5478 163465813 : && !gimple_call_internal_p (stmt))
5479 14962045 : inlined |= expand_call_inline (bb, stmt, id, to_purge);
5480 : }
5481 :
5482 30895501 : return inlined;
5483 : }
5484 :
5485 :
5486 : /* Walk all basic blocks created after FIRST and try to fold every statement
5487 : in the STATEMENTS pointer set. */
5488 :
5489 : static void
5490 1557826 : fold_marked_statements (int first, hash_set<gimple *> *statements)
5491 : {
5492 1557826 : auto_bitmap to_purge;
5493 1557826 : auto_bitmap to_purge_abnormal;
5494 :
5495 1557826 : auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5496 1557826 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
5497 1557826 : bitmap_clear (visited);
5498 :
5499 1557826 : stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5500 37794524 : while (!stack.is_empty ())
5501 : {
5502 : /* Look at the edge on the top of the stack. */
5503 36236698 : edge e = stack.pop ();
5504 36236698 : basic_block dest = e->dest;
5505 :
5506 44642883 : if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5507 34701999 : || bitmap_bit_p (visited, dest->index))
5508 8406185 : continue;
5509 :
5510 27830513 : bitmap_set_bit (visited, dest->index);
5511 :
5512 27830513 : if (dest->index >= first)
5513 36015584 : for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5514 122241411 : !gsi_end_p (gsi); gsi_next (&gsi))
5515 : {
5516 104233619 : if (!statements->contains (gsi_stmt (gsi)))
5517 22375074 : continue;
5518 :
5519 81858545 : gimple *old_stmt = gsi_stmt (gsi);
5520 81858545 : bool can_make_abnormal_goto = false;
5521 81858545 : tree old_decl = NULL_TREE;
5522 :
5523 81858545 : if (is_gimple_call (old_stmt))
5524 : {
5525 3779715 : old_decl = gimple_call_fndecl (old_stmt);
5526 3779715 : if (stmt_can_make_abnormal_goto (old_stmt))
5527 : can_make_abnormal_goto = true;
5528 : }
5529 :
5530 3779715 : if (old_decl && fndecl_built_in_p (old_decl))
5531 : {
5532 : /* Folding builtins can create multiple instructions,
5533 : we need to look at all of them. */
5534 1405129 : gimple_stmt_iterator i2 = gsi;
5535 1405129 : gsi_prev (&i2);
5536 1405129 : if (fold_stmt (&gsi))
5537 : {
5538 92132 : gimple *new_stmt;
5539 : /* If a builtin at the end of a bb folded into nothing,
5540 : the following loop won't work. */
5541 92132 : if (gsi_end_p (gsi))
5542 : {
5543 0 : cgraph_update_edges_for_call_stmt (old_stmt,
5544 : old_decl, NULL);
5545 0 : if (can_make_abnormal_goto)
5546 0 : bitmap_set_bit (to_purge_abnormal, dest->index);
5547 18007792 : break;
5548 : }
5549 92132 : if (gsi_end_p (i2))
5550 124762 : i2 = gsi_start_bb (dest);
5551 : else
5552 29751 : gsi_next (&i2);
5553 427 : while (1)
5554 : {
5555 92559 : new_stmt = gsi_stmt (i2);
5556 92559 : update_stmt (new_stmt);
5557 92559 : cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5558 : new_stmt);
5559 :
5560 92559 : if (new_stmt == gsi_stmt (gsi))
5561 : {
5562 : /* It is okay to check only for the very last
5563 : of these statements. If it is a throwing
5564 : statement nothing will change. If it isn't
5565 : this can remove EH edges. If that weren't
5566 : correct then because some intermediate stmts
5567 : throw, but not the last one. That would mean
5568 : we'd have to split the block, which we can't
5569 : here and we'd loose anyway. And as builtins
5570 : probably never throw, this all
5571 : is mood anyway. */
5572 92132 : if (maybe_clean_or_replace_eh_stmt (old_stmt,
5573 : new_stmt))
5574 7 : bitmap_set_bit (to_purge, dest->index);
5575 92132 : if (can_make_abnormal_goto
5576 92132 : && !stmt_can_make_abnormal_goto (new_stmt))
5577 0 : bitmap_set_bit (to_purge_abnormal, dest->index);
5578 : break;
5579 : }
5580 427 : gsi_next (&i2);
5581 : }
5582 : }
5583 : }
5584 80453416 : else if (fold_stmt (&gsi))
5585 : {
5586 : /* Re-read the statement from GSI as fold_stmt() may
5587 : have changed it. */
5588 2981158 : gimple *new_stmt = gsi_stmt (gsi);
5589 2981158 : update_stmt (new_stmt);
5590 :
5591 2981158 : if (is_gimple_call (old_stmt)
5592 2981158 : || is_gimple_call (new_stmt))
5593 4485 : cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5594 : new_stmt);
5595 :
5596 2981158 : if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5597 164 : bitmap_set_bit (to_purge, dest->index);
5598 2981158 : if (can_make_abnormal_goto
5599 2981158 : && !stmt_can_make_abnormal_goto (new_stmt))
5600 7 : bitmap_set_bit (to_purge_abnormal, dest->index);
5601 : }
5602 : }
5603 :
5604 54124521 : if (EDGE_COUNT (dest->succs) > 0)
5605 : {
5606 : /* Avoid warnings emitted from folding statements that
5607 : became unreachable because of inlined function parameter
5608 : propagation. */
5609 26294008 : e = find_taken_edge (dest, NULL_TREE);
5610 26294008 : if (e)
5611 18066831 : stack.quick_push (e);
5612 : else
5613 : {
5614 8227177 : edge_iterator ei;
5615 24839218 : FOR_EACH_EDGE (e, ei, dest->succs)
5616 16612041 : stack.safe_push (e);
5617 : }
5618 : }
5619 : }
5620 :
5621 1557826 : gimple_purge_all_dead_eh_edges (to_purge);
5622 1557826 : gimple_purge_all_dead_abnormal_call_edges (to_purge_abnormal);
5623 1557826 : }
5624 :
5625 : /* Expand calls to inline functions in the body of FN. */
5626 :
5627 : unsigned int
5628 1944415 : optimize_inline_calls (tree fn)
5629 : {
5630 1944415 : copy_body_data id;
5631 1944415 : basic_block bb;
5632 1944415 : int last = n_basic_blocks_for_fn (cfun);
5633 1944415 : bool inlined_p = false;
5634 :
5635 : /* Clear out ID. */
5636 1944415 : memset (&id, 0, sizeof (id));
5637 :
5638 1944415 : id.src_node = id.dst_node = cgraph_node::get (fn);
5639 1944415 : gcc_assert (id.dst_node->definition);
5640 1944415 : id.dst_fn = fn;
5641 : /* Or any functions that aren't finished yet. */
5642 1944415 : if (current_function_decl)
5643 1944415 : id.dst_fn = current_function_decl;
5644 :
5645 1944415 : id.copy_decl = copy_decl_maybe_to_var;
5646 1944415 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5647 1944415 : id.transform_new_cfg = false;
5648 1944415 : id.transform_return_to_modify = true;
5649 1944415 : id.transform_parameter = true;
5650 1944415 : id.statements_to_fold = new hash_set<gimple *>;
5651 :
5652 1944415 : push_gimplify_context ();
5653 :
5654 : /* We make no attempts to keep dominance info up-to-date. */
5655 1944415 : free_dominance_info (CDI_DOMINATORS);
5656 1944415 : free_dominance_info (CDI_POST_DOMINATORS);
5657 :
5658 : /* Register specific gimple functions. */
5659 1944415 : gimple_register_cfg_hooks ();
5660 :
5661 : /* Reach the trees by walking over the CFG, and note the
5662 : enclosing basic-blocks in the call edges. */
5663 : /* We walk the blocks going forward, because inlined function bodies
5664 : will split id->current_basic_block, and the new blocks will
5665 : follow it; we'll trudge through them, processing their CALL_EXPRs
5666 : along the way. */
5667 1944415 : auto_bitmap to_purge;
5668 32839916 : FOR_EACH_BB_FN (bb, cfun)
5669 30895501 : inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5670 :
5671 1944415 : pop_gimplify_context (NULL);
5672 :
5673 1944415 : if (flag_checking)
5674 : {
5675 1944397 : struct cgraph_edge *e;
5676 :
5677 1944397 : id.dst_node->verify ();
5678 :
5679 : /* Double check that we inlined everything we are supposed to inline. */
5680 11703012 : for (e = id.dst_node->callees; e; e = e->next_callee)
5681 9758615 : gcc_assert (e->inline_failed);
5682 : }
5683 :
5684 : /* If we didn't inline into the function there is nothing to do. */
5685 1944415 : if (!inlined_p)
5686 : {
5687 1234704 : delete id.statements_to_fold;
5688 617352 : return 0;
5689 : }
5690 :
5691 : /* Fold queued statements. */
5692 1327063 : update_max_bb_count ();
5693 1327063 : fold_marked_statements (last, id.statements_to_fold);
5694 2654126 : delete id.statements_to_fold;
5695 :
5696 : /* Finally purge EH and abnormal edges from the call stmts we inlined.
5697 : We need to do this after fold_marked_statements since that may walk
5698 : the SSA use-def chain. */
5699 1327063 : unsigned i;
5700 1327063 : bitmap_iterator bi;
5701 2333086 : EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5702 : {
5703 1006023 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5704 1006023 : if (bb)
5705 : {
5706 1006023 : gimple_purge_dead_eh_edges (bb);
5707 1006023 : gimple_purge_dead_abnormal_call_edges (bb);
5708 : }
5709 : }
5710 :
5711 1327063 : gcc_assert (!id.debug_stmts.exists ());
5712 :
5713 : /* Renumber the lexical scoping (non-code) blocks consecutively. */
5714 1327063 : number_blocks (fn);
5715 :
5716 1327063 : delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5717 1327063 : id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5718 :
5719 1327063 : if (flag_checking)
5720 1327054 : id.dst_node->verify ();
5721 :
5722 : /* It would be nice to check SSA/CFG/statement consistency here, but it is
5723 : not possible yet - the IPA passes might make various functions to not
5724 : throw and they don't care to proactively update local EH info. This is
5725 : done later in fixup_cfg pass that also execute the verification. */
5726 1327063 : return (TODO_update_ssa
5727 : | TODO_cleanup_cfg
5728 1327063 : | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5729 2654126 : | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0));
5730 1944415 : }
5731 :
5732 : /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5733 :
5734 : tree
5735 2164275263 : copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5736 : {
5737 2164275263 : enum tree_code code = TREE_CODE (*tp);
5738 2164275263 : enum tree_code_class cl = TREE_CODE_CLASS (code);
5739 :
5740 : /* We make copies of most nodes. */
5741 2164275263 : if (IS_EXPR_CODE_CLASS (cl)
5742 : || code == TREE_LIST
5743 302616299 : || code == TREE_VEC
5744 298107660 : || code == TYPE_DECL
5745 298107660 : || code == OMP_CLAUSE)
5746 : {
5747 : /* Because the chain gets clobbered when we make a copy, we save it
5748 : here. */
5749 1866190660 : tree chain = NULL_TREE, new_tree;
5750 :
5751 1866190660 : if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5752 4531696 : chain = TREE_CHAIN (*tp);
5753 :
5754 : /* Copy the node. */
5755 1866190660 : new_tree = copy_node (*tp);
5756 :
5757 1866190660 : *tp = new_tree;
5758 :
5759 : /* Now, restore the chain, if appropriate. That will cause
5760 : walk_tree to walk into the chain as well. */
5761 1866190660 : if (code == PARM_DECL
5762 1866190660 : || code == TREE_LIST
5763 1864170999 : || code == OMP_CLAUSE)
5764 2042692 : TREE_CHAIN (*tp) = chain;
5765 :
5766 : /* For now, we don't update BLOCKs when we make copies. So, we
5767 : have to nullify all BIND_EXPRs. */
5768 1866190660 : if (TREE_CODE (*tp) == BIND_EXPR)
5769 22355686 : BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5770 : }
5771 298084603 : else if (code == CONSTRUCTOR)
5772 : {
5773 : /* CONSTRUCTOR nodes need special handling because
5774 : we need to duplicate the vector of elements. */
5775 21185790 : tree new_tree;
5776 :
5777 21185790 : new_tree = copy_node (*tp);
5778 32300192 : CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5779 21185790 : *tp = new_tree;
5780 : }
5781 276898813 : else if (code == STATEMENT_LIST)
5782 : /* We used to just abort on STATEMENT_LIST, but we can run into them
5783 : with statement-expressions (c++/40975). */
5784 198 : copy_statement_list (tp);
5785 276898615 : else if (TREE_CODE_CLASS (code) == tcc_type)
5786 278 : *walk_subtrees = 0;
5787 276898337 : else if (TREE_CODE_CLASS (code) == tcc_declaration)
5788 127609497 : *walk_subtrees = 0;
5789 149288840 : else if (TREE_CODE_CLASS (code) == tcc_constant)
5790 0 : *walk_subtrees = 0;
5791 2164275263 : return NULL_TREE;
5792 : }
5793 :
5794 : /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5795 : information indicating to what new SAVE_EXPR this one should be mapped,
5796 : use that one. Otherwise, create a new node and enter it in ST. FN is
5797 : the function into which the copy will be placed. */
5798 :
5799 : static void
5800 16004501 : remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5801 : {
5802 16004501 : tree *n;
5803 16004501 : tree t;
5804 :
5805 : /* See if we already encountered this SAVE_EXPR. */
5806 16004501 : n = st->get (*tp);
5807 :
5808 : /* If we didn't already remap this SAVE_EXPR, do so now. */
5809 16004501 : if (!n)
5810 : {
5811 15551289 : t = copy_node (*tp);
5812 :
5813 : /* Remember this SAVE_EXPR. */
5814 15551289 : st->put (*tp, t);
5815 : /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5816 15551289 : st->put (t, t);
5817 : }
5818 : else
5819 : {
5820 : /* We've already walked into this SAVE_EXPR; don't do it again. */
5821 453212 : *walk_subtrees = 0;
5822 453212 : t = *n;
5823 : }
5824 :
5825 : /* Replace this SAVE_EXPR with the copy. */
5826 16004501 : *tp = t;
5827 16004501 : }
5828 :
5829 : /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5830 : label, copies the declaration and enters it in the splay_tree in DATA (which
5831 : is really a 'copy_body_data *'. */
5832 :
5833 : static tree
5834 1537259 : mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5835 : bool *handled_ops_p ATTRIBUTE_UNUSED,
5836 : struct walk_stmt_info *wi)
5837 : {
5838 1537259 : copy_body_data *id = (copy_body_data *) wi->info;
5839 1537259 : glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5840 :
5841 51345 : if (stmt)
5842 : {
5843 51345 : tree decl = gimple_label_label (stmt);
5844 :
5845 : /* Copy the decl and remember the copy. */
5846 51345 : insert_decl_map (id, decl, id->copy_decl (decl, id));
5847 : }
5848 :
5849 1537259 : return NULL_TREE;
5850 : }
5851 :
5852 : static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5853 : struct walk_stmt_info *wi);
5854 :
5855 : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5856 : Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5857 : remaps all local declarations to appropriate replacements in gimple
5858 : operands. */
5859 :
5860 : static tree
5861 3351916 : replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5862 : {
5863 3351916 : struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5864 3351916 : copy_body_data *id = (copy_body_data *) wi->info;
5865 3351916 : hash_map<tree, tree> *st = id->decl_map;
5866 3351916 : tree *n;
5867 3351916 : tree expr = *tp;
5868 :
5869 : /* For recursive invocations this is no longer the LHS itself. */
5870 3351916 : bool is_lhs = wi->is_lhs;
5871 3351916 : wi->is_lhs = false;
5872 :
5873 3351916 : if (TREE_CODE (expr) == SSA_NAME)
5874 : {
5875 24082 : *tp = remap_ssa_name (*tp, id);
5876 24082 : *walk_subtrees = 0;
5877 24082 : if (is_lhs)
5878 12041 : SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5879 : }
5880 : /* Only a local declaration (variable or label). */
5881 3327834 : else if ((VAR_P (expr) && !TREE_STATIC (expr))
5882 1941025 : || TREE_CODE (expr) == LABEL_DECL)
5883 : {
5884 : /* Lookup the declaration. */
5885 1491555 : n = st->get (expr);
5886 :
5887 : /* If it's there, remap it. */
5888 1491555 : if (n)
5889 103404 : *tp = *n;
5890 1491555 : *walk_subtrees = 0;
5891 : }
5892 1836279 : else if (TREE_CODE (expr) == STATEMENT_LIST
5893 1836279 : || TREE_CODE (expr) == BIND_EXPR
5894 1836279 : || TREE_CODE (expr) == SAVE_EXPR)
5895 0 : gcc_unreachable ();
5896 1836279 : else if (TREE_CODE (expr) == TARGET_EXPR)
5897 : {
5898 : /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5899 : It's OK for this to happen if it was part of a subtree that
5900 : isn't immediately expanded, such as operand 2 of another
5901 : TARGET_EXPR. */
5902 0 : if (!TREE_OPERAND (expr, 1))
5903 : {
5904 0 : TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5905 0 : TREE_OPERAND (expr, 3) = NULL_TREE;
5906 : }
5907 : }
5908 1836279 : else if (TREE_CODE (expr) == OMP_CLAUSE)
5909 : {
5910 : /* Before the omplower pass completes, some OMP clauses can contain
5911 : sequences that are neither copied by gimple_seq_copy nor walked by
5912 : walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5913 : in those situations, we have to copy and process them explicitly. */
5914 :
5915 552 : if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5916 : {
5917 14 : gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5918 14 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5919 14 : OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5920 : }
5921 538 : else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5922 : {
5923 77 : gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5924 77 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5925 77 : OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5926 : }
5927 461 : else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5928 : {
5929 99 : gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5930 99 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5931 99 : OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5932 99 : seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5933 99 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5934 99 : OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5935 : }
5936 : }
5937 :
5938 : /* Keep iterating. */
5939 3351916 : return NULL_TREE;
5940 : }
5941 :
5942 :
5943 : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5944 : Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5945 : remaps all local declarations to appropriate replacements in gimple
5946 : statements. */
5947 :
5948 : static tree
5949 1537259 : replace_locals_stmt (gimple_stmt_iterator *gsip,
5950 : bool *handled_ops_p ATTRIBUTE_UNUSED,
5951 : struct walk_stmt_info *wi)
5952 : {
5953 1537259 : copy_body_data *id = (copy_body_data *) wi->info;
5954 1537259 : gimple *gs = gsi_stmt (*gsip);
5955 :
5956 1537259 : if (gbind *stmt = dyn_cast <gbind *> (gs))
5957 : {
5958 349 : tree block = gimple_bind_block (stmt);
5959 :
5960 349 : if (block)
5961 : {
5962 269 : remap_block (&block, id);
5963 269 : gimple_bind_set_block (stmt, block);
5964 : }
5965 :
5966 : /* This will remap a lot of the same decls again, but this should be
5967 : harmless. */
5968 349 : if (gimple_bind_vars (stmt))
5969 : {
5970 : tree old_var, decls = gimple_bind_vars (stmt);
5971 :
5972 528 : for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5973 360 : if (!can_be_nonlocal (old_var, id)
5974 360 : && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5975 360 : remap_decl (old_var, id);
5976 :
5977 168 : gcc_checking_assert (!id->prevent_decl_creation_for_types);
5978 168 : id->prevent_decl_creation_for_types = true;
5979 168 : gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5980 168 : id->prevent_decl_creation_for_types = false;
5981 : }
5982 : }
5983 :
5984 : /* Keep iterating. */
5985 1537259 : return NULL_TREE;
5986 : }
5987 :
5988 : /* Create a copy of SEQ and remap all decls in it. */
5989 :
5990 : static gimple_seq
5991 289 : duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5992 : {
5993 289 : if (!seq)
5994 : return NULL;
5995 :
5996 : /* If there are any labels in OMP sequences, they can be only referred to in
5997 : the sequence itself and therefore we can do both here. */
5998 60 : walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5999 60 : gimple_seq copy = gimple_seq_copy (seq);
6000 60 : walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
6001 60 : return copy;
6002 : }
6003 :
6004 : /* Copies everything in SEQ and replaces variables and labels local to
6005 : current_function_decl. */
6006 :
6007 : gimple_seq
6008 1018064 : copy_gimple_seq_and_replace_locals (gimple_seq seq)
6009 : {
6010 1018064 : copy_body_data id;
6011 1018064 : struct walk_stmt_info wi;
6012 1018064 : gimple_seq copy;
6013 :
6014 : /* There's nothing to do for NULL_TREE. */
6015 1018064 : if (seq == NULL)
6016 : return seq;
6017 :
6018 : /* Set up ID. */
6019 1018048 : memset (&id, 0, sizeof (id));
6020 1018048 : id.src_fn = current_function_decl;
6021 1018048 : id.dst_fn = current_function_decl;
6022 1018048 : id.src_cfun = cfun;
6023 1018048 : id.decl_map = new hash_map<tree, tree>;
6024 1018048 : id.debug_map = NULL;
6025 :
6026 1018048 : id.copy_decl = copy_decl_no_change;
6027 1018048 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6028 1018048 : id.transform_new_cfg = false;
6029 1018048 : id.transform_return_to_modify = false;
6030 1018048 : id.transform_parameter = false;
6031 :
6032 : /* Walk the tree once to find local labels. */
6033 1018048 : memset (&wi, 0, sizeof (wi));
6034 1018048 : hash_set<tree> visited;
6035 1018048 : wi.info = &id;
6036 1018048 : wi.pset = &visited;
6037 1018048 : walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
6038 :
6039 1018048 : copy = gimple_seq_copy (seq);
6040 :
6041 : /* Walk the copy, remapping decls. */
6042 1018048 : memset (&wi, 0, sizeof (wi));
6043 1018048 : wi.info = &id;
6044 1018048 : walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
6045 :
6046 : /* Clean up. */
6047 2036096 : delete id.decl_map;
6048 1018048 : if (id.debug_map)
6049 0 : delete id.debug_map;
6050 1018048 : if (id.dependence_map)
6051 : {
6052 0 : delete id.dependence_map;
6053 0 : id.dependence_map = NULL;
6054 : }
6055 :
6056 1018048 : return copy;
6057 1018048 : }
6058 :
6059 :
6060 : /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
6061 :
6062 : static tree
6063 0 : debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
6064 : {
6065 0 : if (*tp == data)
6066 : return (tree) data;
6067 : else
6068 0 : return NULL;
6069 : }
6070 :
6071 : DEBUG_FUNCTION bool
6072 0 : debug_find_tree (tree top, tree search)
6073 : {
6074 0 : return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
6075 : }
6076 :
6077 :
6078 : /* Declare the variables created by the inliner. Add all the variables in
6079 : VARS to BIND_EXPR. */
6080 :
6081 : static void
6082 6557400 : declare_inline_vars (tree block, tree vars)
6083 : {
6084 6557400 : tree t;
6085 16132060 : for (t = vars; t; t = DECL_CHAIN (t))
6086 : {
6087 9574660 : DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
6088 9574660 : gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
6089 9574660 : add_local_decl (cfun, t);
6090 : }
6091 :
6092 6557400 : if (block)
6093 6478840 : BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
6094 6557400 : }
6095 :
6096 : /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
6097 : but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
6098 : VAR_DECL translation. */
6099 :
6100 : tree
6101 139672684 : copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
6102 : {
6103 : /* Don't generate debug information for the copy if we wouldn't have
6104 : generated it for the copy either. */
6105 139672684 : DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
6106 139672684 : DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
6107 :
6108 : /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
6109 : declaration inspired this copy. */
6110 167811864 : DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
6111 :
6112 : /* The new variable/label has no RTL, yet. */
6113 139672684 : if (HAS_RTL_P (copy)
6114 139672684 : && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
6115 139667766 : SET_DECL_RTL (copy, 0);
6116 : /* For vector typed decls make sure to update DECL_MODE according
6117 : to the new function context. */
6118 139672684 : if (VECTOR_TYPE_P (TREE_TYPE (copy)))
6119 242530 : SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
6120 :
6121 : /* These args would always appear unused, if not for this. */
6122 139672684 : TREE_USED (copy) = 1;
6123 :
6124 : /* Set the context for the new declaration. */
6125 139672684 : if (!DECL_CONTEXT (decl))
6126 : /* Globals stay global. */
6127 : ;
6128 139672533 : else if (DECL_CONTEXT (decl) != id->src_fn)
6129 : /* Things that weren't in the scope of the function we're inlining
6130 : from aren't in the scope we're inlining to, either. */
6131 : ;
6132 139669337 : else if (TREE_STATIC (decl))
6133 : /* Function-scoped static variables should stay in the original
6134 : function. */
6135 : ;
6136 : else
6137 : {
6138 : /* Ordinary automatic local variables are now in the scope of the
6139 : new function. */
6140 139665943 : DECL_CONTEXT (copy) = id->dst_fn;
6141 139665943 : if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
6142 : {
6143 0 : if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
6144 0 : DECL_ATTRIBUTES (copy)
6145 0 : = tree_cons (get_identifier ("omp simt private"), NULL,
6146 0 : DECL_ATTRIBUTES (copy));
6147 0 : id->dst_simt_vars->safe_push (copy);
6148 : }
6149 : }
6150 :
6151 139672684 : return copy;
6152 : }
6153 :
6154 : /* Create a new VAR_DECL that is indentical in all respect to DECL except that
6155 : DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
6156 : DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
6157 :
6158 : tree
6159 7560172 : copy_decl_to_var (tree decl, copy_body_data *id)
6160 : {
6161 7560172 : tree copy, type;
6162 :
6163 7560172 : gcc_assert (TREE_CODE (decl) == PARM_DECL
6164 : || TREE_CODE (decl) == RESULT_DECL);
6165 :
6166 7560172 : type = TREE_TYPE (decl);
6167 :
6168 7560172 : copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6169 7560172 : VAR_DECL, DECL_NAME (decl), type);
6170 7560172 : if (DECL_PT_UID_SET_P (decl))
6171 289 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6172 7560172 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6173 7560172 : TREE_READONLY (copy) = TREE_READONLY (decl);
6174 7560172 : TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6175 7560172 : DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
6176 7560172 : DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6177 :
6178 7560172 : return copy_decl_for_dup_finish (id, decl, copy);
6179 : }
6180 :
6181 : /* Like copy_decl_to_var, but create a return slot object instead of a
6182 : pointer variable for return by invisible reference. */
6183 :
6184 : static tree
6185 2026870 : copy_result_decl_to_var (tree decl, copy_body_data *id)
6186 : {
6187 2026870 : tree copy, type;
6188 :
6189 2026870 : gcc_assert (TREE_CODE (decl) == PARM_DECL
6190 : || TREE_CODE (decl) == RESULT_DECL);
6191 :
6192 2026870 : type = TREE_TYPE (decl);
6193 2026870 : if (DECL_BY_REFERENCE (decl))
6194 111 : type = TREE_TYPE (type);
6195 :
6196 2026870 : copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6197 2026870 : VAR_DECL, DECL_NAME (decl), type);
6198 2026870 : if (DECL_PT_UID_SET_P (decl))
6199 0 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6200 2026870 : TREE_READONLY (copy) = TREE_READONLY (decl);
6201 2026870 : TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6202 2026870 : if (!DECL_BY_REFERENCE (decl))
6203 : {
6204 2026759 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6205 2026759 : DECL_NOT_GIMPLE_REG_P (copy)
6206 4053518 : = (DECL_NOT_GIMPLE_REG_P (decl)
6207 : /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6208 : mirror that to the created VAR_DECL. */
6209 2026759 : || (TREE_CODE (decl) == RESULT_DECL
6210 2026599 : && aggregate_value_p (decl, id->src_fn)));
6211 : }
6212 :
6213 2026870 : return copy_decl_for_dup_finish (id, decl, copy);
6214 : }
6215 :
6216 : tree
6217 130085472 : copy_decl_no_change (tree decl, copy_body_data *id)
6218 : {
6219 130085472 : tree copy;
6220 :
6221 130085472 : copy = copy_node (decl);
6222 :
6223 : /* The COPY is not abstract; it will be generated in DST_FN. */
6224 130085472 : DECL_ABSTRACT_P (copy) = false;
6225 130085472 : lang_hooks.dup_lang_specific_decl (copy);
6226 :
6227 : /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6228 : been taken; it's for internal bookkeeping in expand_goto_internal. */
6229 130085472 : if (TREE_CODE (copy) == LABEL_DECL)
6230 : {
6231 1799079 : TREE_ADDRESSABLE (copy) = 0;
6232 1799079 : LABEL_DECL_UID (copy) = -1;
6233 : }
6234 :
6235 130085472 : return copy_decl_for_dup_finish (id, decl, copy);
6236 : }
6237 :
6238 : static tree
6239 20574249 : copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6240 : {
6241 20574249 : if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6242 12382 : return copy_decl_to_var (decl, id);
6243 : else
6244 20561867 : return copy_decl_no_change (decl, id);
6245 : }
6246 :
6247 : /* Return a copy of the function's argument tree without any modifications. */
6248 :
6249 : static tree
6250 74796 : copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6251 : {
6252 74796 : tree arg, *parg;
6253 74796 : tree new_parm = NULL;
6254 :
6255 74796 : parg = &new_parm;
6256 226154 : for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6257 : {
6258 151358 : tree new_tree = remap_decl (arg, id);
6259 151358 : if (TREE_CODE (new_tree) != PARM_DECL)
6260 4317 : new_tree = id->copy_decl (arg, id);
6261 151358 : lang_hooks.dup_lang_specific_decl (new_tree);
6262 151358 : *parg = new_tree;
6263 151358 : parg = &DECL_CHAIN (new_tree);
6264 : }
6265 74796 : return new_parm;
6266 : }
6267 :
6268 : /* Return a copy of the function's static chain. */
6269 : static tree
6270 975 : copy_static_chain (tree static_chain, copy_body_data * id)
6271 : {
6272 975 : tree *chain_copy, *pvar;
6273 :
6274 975 : chain_copy = &static_chain;
6275 1950 : for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6276 : {
6277 975 : tree new_tree = remap_decl (*pvar, id);
6278 975 : lang_hooks.dup_lang_specific_decl (new_tree);
6279 975 : DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6280 975 : *pvar = new_tree;
6281 : }
6282 975 : return static_chain;
6283 : }
6284 :
6285 : /* Return true if the function is allowed to be versioned.
6286 : This is a guard for the versioning functionality. */
6287 :
6288 : bool
6289 13162318 : tree_versionable_function_p (tree fndecl)
6290 : {
6291 13162318 : return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6292 26054045 : && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6293 : }
6294 :
6295 : /* Update clone info after duplication. */
6296 :
6297 : static void
6298 230763 : update_clone_info (copy_body_data * id)
6299 : {
6300 230763 : struct cgraph_node *this_node = id->dst_node;
6301 230763 : if (!this_node->clones)
6302 : return;
6303 588744 : for (cgraph_node *node = this_node->clones; node != this_node;)
6304 : {
6305 : /* First update replace maps to match the new body. */
6306 512401 : clone_info *info = clone_info::get (node);
6307 512401 : if (info && info->tree_map)
6308 : {
6309 : unsigned int i;
6310 0 : for (i = 0; i < vec_safe_length (info->tree_map); i++)
6311 : {
6312 0 : struct ipa_replace_map *replace_info;
6313 0 : replace_info = (*info->tree_map)[i];
6314 0 : walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6315 : }
6316 : }
6317 :
6318 512401 : if (node->clones)
6319 : node = node->clones;
6320 489751 : else if (node->next_sibling_clone)
6321 : node = node->next_sibling_clone;
6322 : else
6323 : {
6324 187548 : while (node != id->dst_node && !node->next_sibling_clone)
6325 98993 : node = node->clone_of;
6326 88555 : if (node != id->dst_node)
6327 12212 : node = node->next_sibling_clone;
6328 : }
6329 : }
6330 : }
6331 :
6332 : /* Create a copy of a function's tree.
6333 : OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6334 : of the original function and the new copied function
6335 : respectively. In case we want to replace a DECL
6336 : tree with another tree while duplicating the function's
6337 : body, TREE_MAP represents the mapping between these
6338 : trees. If UPDATE_CLONES is set, the call_stmt fields
6339 : of edges of clones of the function will be updated.
6340 :
6341 : If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6342 : function parameters and return value) should be modified).
6343 : If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6344 : If non_NULL NEW_ENTRY determine new entry BB of the clone.
6345 : */
6346 : void
6347 230763 : tree_function_versioning (tree old_decl, tree new_decl,
6348 : vec<ipa_replace_map *, va_gc> *tree_map,
6349 : ipa_param_adjustments *param_adjustments,
6350 : bool update_clones, bitmap blocks_to_copy,
6351 : basic_block new_entry)
6352 : {
6353 230763 : struct cgraph_node *old_version_node;
6354 230763 : struct cgraph_node *new_version_node;
6355 230763 : copy_body_data id;
6356 230763 : tree p;
6357 230763 : unsigned i;
6358 230763 : struct ipa_replace_map *replace_info;
6359 230763 : basic_block old_entry_block, bb;
6360 230763 : auto_vec<gimple *, 10> init_stmts;
6361 230763 : tree vars = NULL_TREE;
6362 :
6363 : /* We can get called recursively from expand_call_inline via clone
6364 : materialization. While expand_call_inline maintains input_location
6365 : we cannot tolerate it to leak into the materialized clone. */
6366 230763 : location_t saved_location = input_location;
6367 230763 : input_location = UNKNOWN_LOCATION;
6368 :
6369 230763 : gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6370 : && TREE_CODE (new_decl) == FUNCTION_DECL);
6371 230763 : DECL_POSSIBLY_INLINED (old_decl) = 1;
6372 :
6373 230763 : old_version_node = cgraph_node::get (old_decl);
6374 230763 : gcc_checking_assert (old_version_node);
6375 230763 : new_version_node = cgraph_node::get (new_decl);
6376 230763 : gcc_checking_assert (new_version_node);
6377 :
6378 : /* Copy over debug args. */
6379 230763 : if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6380 : {
6381 3289 : vec<tree, va_gc> **new_debug_args, **old_debug_args;
6382 3289 : gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6383 3289 : DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6384 3289 : old_debug_args = decl_debug_args_lookup (old_decl);
6385 3289 : if (old_debug_args)
6386 : {
6387 3289 : new_debug_args = decl_debug_args_insert (new_decl);
6388 6578 : *new_debug_args = vec_safe_copy (*old_debug_args);
6389 : }
6390 : }
6391 :
6392 : /* Output the inlining info for this abstract function, since it has been
6393 : inlined. If we don't do this now, we can lose the information about the
6394 : variables in the function when the blocks get blown away as soon as we
6395 : remove the cgraph node. */
6396 230763 : (*debug_hooks->outlining_inline_function) (old_decl);
6397 :
6398 230763 : DECL_ARTIFICIAL (new_decl) = 1;
6399 414707 : DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6400 414707 : if (DECL_ORIGIN (old_decl) == old_decl)
6401 207015 : old_version_node->used_as_abstract_origin = true;
6402 230763 : DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6403 :
6404 : /* Prepare the data structures for the tree copy. */
6405 230763 : memset (&id, 0, sizeof (id));
6406 :
6407 : /* Generate a new name for the new version. */
6408 230763 : id.statements_to_fold = new hash_set<gimple *>;
6409 :
6410 230763 : id.decl_map = new hash_map<tree, tree>;
6411 230763 : id.debug_map = NULL;
6412 230763 : id.src_fn = old_decl;
6413 230763 : id.dst_fn = new_decl;
6414 230763 : id.src_node = old_version_node;
6415 230763 : id.dst_node = new_version_node;
6416 230763 : id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6417 230763 : id.blocks_to_copy = blocks_to_copy;
6418 :
6419 230763 : id.copy_decl = copy_decl_no_change;
6420 230763 : id.transform_call_graph_edges
6421 230763 : = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6422 230763 : id.transform_new_cfg = true;
6423 230763 : id.transform_return_to_modify = false;
6424 230763 : id.transform_parameter = false;
6425 :
6426 230763 : old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6427 230763 : DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6428 230763 : DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6429 230763 : initialize_cfun (new_decl, old_decl,
6430 230763 : new_entry ? new_entry->count : old_entry_block->count);
6431 230763 : new_version_node->has_omp_variant_constructs
6432 230763 : = old_version_node->has_omp_variant_constructs;
6433 230763 : if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6434 230763 : DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6435 230763 : = id.src_cfun->gimple_df->ipa_pta;
6436 :
6437 : /* Copy the function's static chain. */
6438 230763 : p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6439 230763 : if (p)
6440 1950 : DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6441 975 : = copy_static_chain (p, &id);
6442 :
6443 230763 : auto_vec<int, 16> new_param_indices;
6444 230763 : clone_info *info = clone_info::get (old_version_node);
6445 237169 : ipa_param_adjustments *old_param_adjustments
6446 230763 : = info ? info->param_adjustments : NULL;
6447 6406 : if (old_param_adjustments)
6448 6323 : old_param_adjustments->get_updated_indices (&new_param_indices);
6449 :
6450 : /* If there's a tree_map, prepare for substitution. */
6451 230763 : if (tree_map)
6452 40280 : for (i = 0; i < tree_map->length (); i++)
6453 : {
6454 25609 : gimple *init;
6455 25609 : replace_info = (*tree_map)[i];
6456 :
6457 25609 : int p = replace_info->parm_num;
6458 25609 : if (old_param_adjustments)
6459 0 : p = new_param_indices[p];
6460 :
6461 25609 : tree parm;
6462 75549 : for (parm = DECL_ARGUMENTS (old_decl); p;
6463 49940 : parm = DECL_CHAIN (parm))
6464 49940 : p--;
6465 25609 : gcc_assert (parm);
6466 25609 : init = setup_one_parameter (&id, parm, replace_info->new_tree,
6467 : id.src_fn, NULL, &vars);
6468 25609 : if (init)
6469 7801 : init_stmts.safe_push (init);
6470 : }
6471 :
6472 230763 : ipa_param_body_adjustments *param_body_adjs = NULL;
6473 230763 : if (param_adjustments)
6474 : {
6475 300120 : param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6476 : new_decl, old_decl,
6477 150060 : &id, &vars, tree_map);
6478 150060 : id.param_body_adjs = param_body_adjs;
6479 150060 : DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6480 : }
6481 80703 : else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6482 149592 : DECL_ARGUMENTS (new_decl)
6483 74796 : = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6484 :
6485 230763 : DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6486 230763 : BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6487 :
6488 230763 : declare_inline_vars (DECL_INITIAL (new_decl), vars);
6489 :
6490 230763 : if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6491 : /* Add local vars. */
6492 133855 : add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6493 :
6494 230763 : if (DECL_RESULT (old_decl) == NULL_TREE)
6495 : ;
6496 150060 : else if (param_adjustments && param_adjustments->m_skip_return
6497 277697 : && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6498 : {
6499 40345 : tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6500 : &id);
6501 40345 : declare_inline_vars (NULL, resdecl_repl);
6502 40345 : if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6503 111 : resdecl_repl = build_fold_addr_expr (resdecl_repl);
6504 40345 : insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6505 :
6506 80690 : DECL_RESULT (new_decl)
6507 40345 : = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6508 : RESULT_DECL, NULL_TREE, void_type_node);
6509 40345 : DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6510 40345 : DECL_IS_MALLOC (new_decl) = false;
6511 40345 : cfun->returns_struct = 0;
6512 40345 : cfun->returns_pcc_struct = 0;
6513 : }
6514 : else
6515 : {
6516 190418 : tree old_name;
6517 190418 : DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6518 190418 : lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6519 380836 : if (gimple_in_ssa_p (id.src_cfun)
6520 190418 : && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6521 4386 : && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6522 : {
6523 4367 : tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6524 4367 : insert_decl_map (&id, old_name, new_name);
6525 4367 : SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6526 4367 : set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6527 : }
6528 : }
6529 :
6530 : /* Set up the destination functions loop tree. */
6531 230763 : if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6532 : {
6533 230763 : cfun->curr_properties &= ~PROP_loops;
6534 230763 : loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6535 230763 : cfun->curr_properties |= PROP_loops;
6536 : }
6537 :
6538 : /* Copy the Function's body. */
6539 230763 : copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6540 : new_entry);
6541 :
6542 : /* Renumber the lexical scoping (non-code) blocks consecutively. */
6543 230763 : number_blocks (new_decl);
6544 :
6545 : /* We want to create the BB unconditionally, so that the addition of
6546 : debug stmts doesn't affect BB count, which may in the end cause
6547 : codegen differences. */
6548 230763 : bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6549 469327 : while (init_stmts.length ())
6550 7801 : insert_init_stmt (&id, bb, init_stmts.pop ());
6551 230763 : if (param_body_adjs)
6552 150060 : param_body_adjs->append_init_stmts (bb);
6553 230763 : update_clone_info (&id);
6554 :
6555 : /* Remap the nonlocal_goto_save_area, if any. */
6556 230763 : if (cfun->nonlocal_goto_save_area)
6557 : {
6558 0 : struct walk_stmt_info wi;
6559 :
6560 0 : memset (&wi, 0, sizeof (wi));
6561 0 : wi.info = &id;
6562 0 : walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6563 : }
6564 :
6565 : /* Clean up. */
6566 461526 : delete id.decl_map;
6567 230763 : if (id.debug_map)
6568 1269 : delete id.debug_map;
6569 230763 : free_dominance_info (CDI_DOMINATORS);
6570 230763 : free_dominance_info (CDI_POST_DOMINATORS);
6571 :
6572 230763 : update_max_bb_count ();
6573 230763 : fold_marked_statements (0, id.statements_to_fold);
6574 461526 : delete id.statements_to_fold;
6575 230763 : delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6576 230763 : if (id.dst_node->definition)
6577 226174 : cgraph_edge::rebuild_references ();
6578 230763 : if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6579 : {
6580 230763 : calculate_dominance_info (CDI_DOMINATORS);
6581 230763 : fix_loop_structure (NULL);
6582 : }
6583 230763 : update_ssa (TODO_update_ssa);
6584 :
6585 : /* After partial cloning we need to rescale frequencies, so they are
6586 : within proper range in the cloned function. */
6587 230763 : if (new_entry)
6588 : {
6589 46765 : struct cgraph_edge *e;
6590 46765 : rebuild_frequencies ();
6591 :
6592 46765 : new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6593 222178 : for (e = new_version_node->callees; e; e = e->next_callee)
6594 : {
6595 175413 : basic_block bb = gimple_bb (e->call_stmt);
6596 175413 : e->count = bb->count;
6597 : }
6598 51704 : for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6599 : {
6600 4939 : basic_block bb = gimple_bb (e->call_stmt);
6601 4939 : e->count = bb->count;
6602 : }
6603 : }
6604 :
6605 230763 : if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6606 : {
6607 125357 : vec<tree, va_gc> **debug_args = NULL;
6608 125357 : unsigned int len = 0;
6609 125357 : unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6610 :
6611 223396 : for (i = 0; i < reset_len; i++)
6612 : {
6613 98039 : tree parm = param_body_adjs->m_reset_debug_decls[i];
6614 98039 : gcc_assert (is_gimple_reg (parm));
6615 98039 : tree ddecl;
6616 :
6617 98039 : if (debug_args == NULL)
6618 : {
6619 73131 : debug_args = decl_debug_args_insert (new_decl);
6620 73131 : len = vec_safe_length (*debug_args);
6621 : }
6622 98039 : ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6623 : /* FIXME: Is setting the mode really necessary? */
6624 98039 : SET_DECL_MODE (ddecl, DECL_MODE (parm));
6625 98039 : vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6626 98039 : vec_safe_push (*debug_args, ddecl);
6627 : }
6628 125357 : if (debug_args != NULL)
6629 : {
6630 : /* On the callee side, add
6631 : DEBUG D#Y s=> parm
6632 : DEBUG var => D#Y
6633 : stmts to the first bb where var is a VAR_DECL created for the
6634 : optimized away parameter in DECL_INITIAL block. This hints
6635 : in the debug info that var (whole DECL_ORIGIN is the parm
6636 : PARM_DECL) is optimized away, but could be looked up at the
6637 : call site as value of D#X there. */
6638 73131 : gimple_stmt_iterator cgsi
6639 73131 : = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6640 73131 : gimple *def_temp;
6641 73131 : tree var = vars;
6642 73131 : i = vec_safe_length (*debug_args);
6643 98039 : do
6644 : {
6645 98039 : tree vexpr = NULL_TREE;
6646 98039 : i -= 2;
6647 98039 : while (var != NULL_TREE
6648 130843 : && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6649 32804 : var = TREE_CHAIN (var);
6650 98039 : if (var == NULL_TREE)
6651 : break;
6652 98039 : tree parm = (**debug_args)[i];
6653 98039 : if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6654 139938 : if (tree *d
6655 69969 : = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6656 24420 : vexpr = *d;
6657 98039 : if (!vexpr)
6658 : {
6659 73619 : vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6660 : /* FIXME: Is setting the mode really necessary? */
6661 73619 : SET_DECL_MODE (vexpr, DECL_MODE (parm));
6662 : }
6663 98039 : def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6664 98039 : gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6665 98039 : def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6666 98039 : gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6667 : }
6668 98039 : while (i > len);
6669 : }
6670 : }
6671 150060 : delete param_body_adjs;
6672 230763 : free_dominance_info (CDI_DOMINATORS);
6673 230763 : free_dominance_info (CDI_POST_DOMINATORS);
6674 :
6675 230763 : gcc_assert (!id.debug_stmts.exists ());
6676 230763 : pop_cfun ();
6677 230763 : input_location = saved_location;
6678 230763 : return;
6679 230763 : }
6680 :
6681 : /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6682 : the callee and return the inlined body on success. */
6683 :
6684 : tree
6685 0 : maybe_inline_call_in_expr (tree exp)
6686 : {
6687 0 : tree fn = get_callee_fndecl (exp);
6688 :
6689 : /* We can only try to inline "const" functions. */
6690 0 : if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6691 : {
6692 0 : call_expr_arg_iterator iter;
6693 0 : copy_body_data id;
6694 0 : tree param, arg, t;
6695 0 : hash_map<tree, tree> decl_map;
6696 :
6697 : /* Remap the parameters. */
6698 0 : for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6699 0 : param;
6700 0 : param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6701 0 : decl_map.put (param, arg);
6702 :
6703 0 : memset (&id, 0, sizeof (id));
6704 0 : id.src_fn = fn;
6705 0 : id.dst_fn = current_function_decl;
6706 0 : id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6707 0 : id.decl_map = &decl_map;
6708 :
6709 0 : id.copy_decl = copy_decl_no_change;
6710 0 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6711 0 : id.transform_new_cfg = false;
6712 0 : id.transform_return_to_modify = true;
6713 0 : id.transform_parameter = true;
6714 :
6715 : /* Make sure not to unshare trees behind the front-end's back
6716 : since front-end specific mechanisms may rely on sharing. */
6717 0 : id.regimplify = false;
6718 0 : id.do_not_unshare = true;
6719 :
6720 : /* We're not inside any EH region. */
6721 0 : id.eh_lp_nr = 0;
6722 :
6723 0 : t = copy_tree_body (&id);
6724 :
6725 : /* We can only return something suitable for use in a GENERIC
6726 : expression tree. */
6727 0 : if (TREE_CODE (t) == MODIFY_EXPR)
6728 0 : return TREE_OPERAND (t, 1);
6729 0 : }
6730 :
6731 : return NULL_TREE;
6732 : }
6733 :
6734 : /* Duplicate a type, fields and all. */
6735 :
6736 : tree
6737 63 : build_duplicate_type (tree type)
6738 : {
6739 63 : struct copy_body_data id;
6740 :
6741 63 : memset (&id, 0, sizeof (id));
6742 63 : id.src_fn = current_function_decl;
6743 63 : id.dst_fn = current_function_decl;
6744 63 : id.src_cfun = cfun;
6745 63 : id.decl_map = new hash_map<tree, tree>;
6746 63 : id.debug_map = NULL;
6747 63 : id.copy_decl = copy_decl_no_change;
6748 :
6749 63 : type = remap_type_1 (type, &id);
6750 :
6751 126 : delete id.decl_map;
6752 63 : if (id.debug_map)
6753 0 : delete id.debug_map;
6754 :
6755 63 : TYPE_CANONICAL (type) = type;
6756 :
6757 63 : return type;
6758 : }
6759 :
6760 : /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6761 : parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6762 : evaluation. */
6763 :
6764 : tree
6765 32838385 : copy_fn (tree fn, tree& parms, tree& result)
6766 : {
6767 32838385 : copy_body_data id;
6768 32838385 : tree param;
6769 32838385 : hash_map<tree, tree> decl_map;
6770 :
6771 32838385 : tree *p = &parms;
6772 32838385 : *p = NULL_TREE;
6773 :
6774 32838385 : memset (&id, 0, sizeof (id));
6775 32838385 : id.src_fn = fn;
6776 32838385 : id.dst_fn = current_function_decl;
6777 32838385 : id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6778 32838385 : id.decl_map = &decl_map;
6779 :
6780 139019492 : id.copy_decl = [] (tree decl, copy_body_data *id)
6781 : {
6782 106181107 : if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
6783 : /* Don't make copies of local types or injected enumerators,
6784 : the C++ constexpr evaluator doesn't need them and they
6785 : confuse modules streaming. */
6786 : return decl;
6787 103809504 : return copy_decl_no_change (decl, id);
6788 : };
6789 32838385 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6790 32838385 : id.transform_new_cfg = false;
6791 32838385 : id.transform_return_to_modify = false;
6792 32838385 : id.transform_parameter = true;
6793 :
6794 : /* Make sure not to unshare trees behind the front-end's back
6795 : since front-end specific mechanisms may rely on sharing. */
6796 32838385 : id.regimplify = false;
6797 32838385 : id.do_not_unshare = true;
6798 32838385 : id.do_not_fold = true;
6799 :
6800 : /* We're not inside any EH region. */
6801 32838385 : id.eh_lp_nr = 0;
6802 :
6803 : /* Remap the parameters and result and return them to the caller. */
6804 32838385 : for (param = DECL_ARGUMENTS (fn);
6805 79759355 : param;
6806 46920970 : param = DECL_CHAIN (param))
6807 : {
6808 46920970 : *p = remap_decl (param, &id);
6809 46920970 : p = &DECL_CHAIN (*p);
6810 : }
6811 :
6812 32838385 : if (DECL_RESULT (fn))
6813 32838385 : result = remap_decl (DECL_RESULT (fn), &id);
6814 : else
6815 0 : result = NULL_TREE;
6816 :
6817 32838385 : return copy_tree_body (&id);
6818 32838385 : }
|