Branch data Line data Source code
1 : : /* Tree inlining.
2 : : Copyright (C) 2001-2025 Free Software Foundation, Inc.
3 : : Contributed by Alexandre Oliva <aoliva@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "target.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "gimple.h"
29 : : #include "cfghooks.h"
30 : : #include "tree-pass.h"
31 : : #include "ssa.h"
32 : : #include "cgraph.h"
33 : : #include "tree-pretty-print.h"
34 : : #include "diagnostic-core.h"
35 : : #include "gimple-predict.h"
36 : : #include "fold-const.h"
37 : : #include "stor-layout.h"
38 : : #include "calls.h"
39 : : #include "tree-inline.h"
40 : : #include "langhooks.h"
41 : : #include "cfganal.h"
42 : : #include "tree-iterator.h"
43 : : #include "intl.h"
44 : : #include "gimple-iterator.h"
45 : : #include "gimple-fold.h"
46 : : #include "tree-eh.h"
47 : : #include "gimplify.h"
48 : : #include "gimplify-me.h"
49 : : #include "gimple-walk.h"
50 : : #include "tree-cfg.h"
51 : : #include "tree-into-ssa.h"
52 : : #include "tree-dfa.h"
53 : : #include "tree-ssa.h"
54 : : #include "except.h"
55 : : #include "debug.h"
56 : : #include "value-prof.h"
57 : : #include "cfgloop.h"
58 : : #include "builtins.h"
59 : : #include "stringpool.h"
60 : : #include "attribs.h"
61 : : #include "sreal.h"
62 : : #include "tree-cfgcleanup.h"
63 : : #include "tree-ssa-live.h"
64 : : #include "alloc-pool.h"
65 : : #include "symbol-summary.h"
66 : : #include "symtab-thunks.h"
67 : : #include "symtab-clones.h"
68 : : #include "asan.h"
69 : :
70 : : /* I'm not real happy about this, but we need to handle gimple and
71 : : non-gimple trees. */
72 : :
73 : : /* Inlining, Cloning, Versioning, Parallelization
74 : :
75 : : Inlining: a function body is duplicated, but the PARM_DECLs are
76 : : remapped into VAR_DECLs, and non-void RETURN_EXPRs become
77 : : MODIFY_EXPRs that store to a dedicated returned-value variable.
78 : : The duplicated eh_region info of the copy will later be appended
79 : : to the info for the caller; the eh_region info in copied throwing
80 : : statements and RESX statements are adjusted accordingly.
81 : :
82 : : Cloning: (only in C++) We have one body for a con/de/structor, and
83 : : multiple function decls, each with a unique parameter list.
84 : : Duplicate the body, using the given splay tree; some parameters
85 : : will become constants (like 0 or 1).
86 : :
87 : : Versioning: a function body is duplicated and the result is a new
88 : : function rather than into blocks of an existing function as with
89 : : inlining. Some parameters will become constants.
90 : :
91 : : Parallelization: a region of a function is duplicated resulting in
92 : : a new function. Variables may be replaced with complex expressions
93 : : to enable shared variable semantics.
94 : :
95 : : All of these will simultaneously lookup any callgraph edges. If
96 : : we're going to inline the duplicated function body, and the given
97 : : function has some cloned callgraph nodes (one for each place this
98 : : function will be inlined) those callgraph edges will be duplicated.
99 : : If we're cloning the body, those callgraph edges will be
100 : : updated to point into the new body. (Note that the original
101 : : callgraph node and edge list will not be altered.)
102 : :
103 : : See the CALL_EXPR handling case in copy_tree_body_r (). */
104 : :
105 : : /* To Do:
106 : :
107 : : o In order to make inlining-on-trees work, we pessimized
108 : : function-local static constants. In particular, they are now
109 : : always output, even when not addressed. Fix this by treating
110 : : function-local static constants just like global static
111 : : constants; the back-end already knows not to output them if they
112 : : are not needed.
113 : :
114 : : o Provide heuristics to clamp inlining of recursive template
115 : : calls? */
116 : :
117 : :
118 : : /* Weights that estimate_num_insns uses to estimate the size of the
119 : : produced code. */
120 : :
121 : : eni_weights eni_size_weights;
122 : :
123 : : /* Weights that estimate_num_insns uses to estimate the time necessary
124 : : to execute the produced code. */
125 : :
126 : : eni_weights eni_time_weights;
127 : :
128 : : /* Prototypes. */
129 : :
130 : : static tree declare_return_variable (copy_body_data *, tree, tree,
131 : : basic_block);
132 : : static void remap_block (tree *, copy_body_data *);
133 : : static void copy_bind_expr (tree *, int *, copy_body_data *);
134 : : static void declare_inline_vars (tree, tree);
135 : : static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
136 : : static void prepend_lexical_block (tree current_block, tree new_block);
137 : : static tree copy_result_decl_to_var (tree, copy_body_data *);
138 : : static tree copy_decl_maybe_to_var (tree, copy_body_data *);
139 : : static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
140 : : static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
141 : :
142 : : /* Insert a tree->tree mapping for ID. Despite the name suggests
143 : : that the trees should be variables, it is used for more than that. */
144 : :
145 : : void
146 : 453699261 : insert_decl_map (copy_body_data *id, tree key, tree value)
147 : : {
148 : 453699261 : id->decl_map->put (key, value);
149 : :
150 : : /* Always insert an identity map as well. If we see this same new
151 : : node again, we won't want to duplicate it a second time. */
152 : 453699261 : if (key != value && value)
153 : 149839009 : id->decl_map->put (value, value);
154 : 453699261 : }
155 : :
156 : : /* If nonzero, we're remapping the contents of inlined debug
157 : : statements. If negative, an error has occurred, such as a
158 : : reference to a variable that isn't available in the inlined
159 : : context. */
160 : : static int processing_debug_stmt = 0;
161 : :
162 : : /* Construct new SSA name for old NAME. ID is the inline context. */
163 : :
164 : : static tree
165 : 65199304 : remap_ssa_name (tree name, copy_body_data *id)
166 : : {
167 : 65199304 : tree new_tree, var;
168 : 65199304 : tree *n;
169 : :
170 : 65199304 : gcc_assert (TREE_CODE (name) == SSA_NAME);
171 : :
172 : 65199304 : n = id->decl_map->get (name);
173 : 65199304 : if (n)
174 : : {
175 : : /* When we perform edge redirection as part of CFG copy, IPA-SRA can
176 : : remove an unused LHS from a call statement. Such LHS can however
177 : : still appear in debug statements, but their value is lost in this
178 : : function and we do not want to map them. */
179 : 47493672 : if (id->killed_new_ssa_names
180 : 47493672 : && id->killed_new_ssa_names->contains (*n))
181 : : {
182 : 967 : gcc_assert (processing_debug_stmt);
183 : 967 : processing_debug_stmt = -1;
184 : 967 : return name;
185 : : }
186 : :
187 : 47492705 : return unshare_expr (*n);
188 : : }
189 : :
190 : 17705632 : if (processing_debug_stmt)
191 : : {
192 : 100036 : if (SSA_NAME_IS_DEFAULT_DEF (name)
193 : 99981 : && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
194 : 99934 : && id->entry_bb == NULL
195 : 199969 : && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
196 : : {
197 : 99933 : gimple *def_temp;
198 : 99933 : gimple_stmt_iterator gsi;
199 : 99933 : tree val = SSA_NAME_VAR (name);
200 : :
201 : 99933 : n = id->decl_map->get (val);
202 : 99933 : if (n != NULL)
203 : 99933 : val = *n;
204 : 99933 : if (TREE_CODE (val) != PARM_DECL
205 : 99933 : && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
206 : : {
207 : 0 : processing_debug_stmt = -1;
208 : 0 : return name;
209 : : }
210 : 99933 : n = id->decl_map->get (val);
211 : 99933 : if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
212 : : return *n;
213 : 32880 : tree vexpr = build_debug_expr_decl (TREE_TYPE (name));
214 : : /* FIXME: Is setting the mode really necessary? */
215 : 32880 : SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
216 : 32880 : def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
217 : 32880 : gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
218 : 32880 : gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
219 : 32880 : insert_decl_map (id, val, vexpr);
220 : 32880 : return vexpr;
221 : : }
222 : :
223 : 103 : processing_debug_stmt = -1;
224 : 103 : return name;
225 : : }
226 : :
227 : : /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 : 17605596 : var = SSA_NAME_VAR (name);
229 : 3533088 : if (!var
230 : 3533088 : || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 : 3173584 : && VAR_P (var)
232 : 2964821 : && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 : 2964821 : && DECL_ARTIFICIAL (var)
234 : 501378 : && DECL_IGNORED_P (var)
235 : 164920 : && !DECL_NAME (var)))
236 : : {
237 : 14077964 : struct ptr_info_def *pi;
238 : 14077964 : new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 : 14077964 : if (!var && SSA_NAME_IDENTIFIER (name))
240 : 1914820 : SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 : 14077964 : insert_decl_map (id, name, new_tree);
242 : 28155928 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 : 14077964 : = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 : : /* At least IPA points-to info can be directly transferred. */
245 : 14077964 : if (id->src_cfun->gimple_df
246 : 14077964 : && id->src_cfun->gimple_df->ipa_pta
247 : 14729 : && POINTER_TYPE_P (TREE_TYPE (name))
248 : 2595 : && (pi = SSA_NAME_PTR_INFO (name))
249 : 14080554 : && !pi->pt.anything)
250 : : {
251 : 2569 : struct ptr_info_def *new_pi = get_ptr_info (new_tree);
252 : 2569 : new_pi->pt = pi->pt;
253 : : }
254 : : /* So can range-info. */
255 : 23214188 : if (!POINTER_TYPE_P (TREE_TYPE (name))
256 : 22673915 : && SSA_NAME_RANGE_INFO (name))
257 : 2954300 : duplicate_ssa_name_range_info (new_tree, name);
258 : 14077964 : return new_tree;
259 : : }
260 : :
261 : : /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262 : : in copy_bb. */
263 : 3527632 : new_tree = remap_decl (var, id);
264 : :
265 : : /* We might've substituted constant or another SSA_NAME for
266 : : the variable.
267 : :
268 : : Replace the SSA name representing RESULT_DECL by variable during
269 : : inlining: this saves us from need to introduce PHI node in a case
270 : : return value is just partly initialized. */
271 : 343493 : if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
272 : 3871125 : && (!SSA_NAME_VAR (name)
273 : 3527632 : || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
274 : 0 : || !id->transform_return_to_modify))
275 : : {
276 : 3527632 : struct ptr_info_def *pi;
277 : 3527632 : new_tree = make_ssa_name (new_tree);
278 : 3527632 : insert_decl_map (id, name, new_tree);
279 : 7055264 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
280 : 3527632 : = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
281 : : /* At least IPA points-to info can be directly transferred. */
282 : 3527632 : if (id->src_cfun->gimple_df
283 : 3527632 : && id->src_cfun->gimple_df->ipa_pta
284 : 2515 : && POINTER_TYPE_P (TREE_TYPE (name))
285 : 284 : && (pi = SSA_NAME_PTR_INFO (name))
286 : 3527910 : && !pi->pt.anything)
287 : : {
288 : 278 : struct ptr_info_def *new_pi = get_ptr_info (new_tree);
289 : 278 : new_pi->pt = pi->pt;
290 : : }
291 : : /* So can range-info. */
292 : 6239824 : if (!POINTER_TYPE_P (TREE_TYPE (name))
293 : 6155909 : && SSA_NAME_RANGE_INFO (name))
294 : 952675 : duplicate_ssa_name_range_info (new_tree, name);
295 : 3527632 : if (SSA_NAME_IS_DEFAULT_DEF (name))
296 : : {
297 : : /* By inlining function having uninitialized variable, we might
298 : : extend the lifetime (variable might get reused). This cause
299 : : ICE in the case we end up extending lifetime of SSA name across
300 : : abnormal edge, but also increase register pressure.
301 : :
302 : : We simply initialize all uninitialized vars by 0 except
303 : : for case we are inlining to very first BB. We can avoid
304 : : this for all BBs that are not inside strongly connected
305 : : regions of the CFG, but this is expensive to test. */
306 : 359504 : if (id->entry_bb
307 : 40289 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 : 0 : && (!SSA_NAME_VAR (name)
309 : 0 : || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 : 359504 : && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 : 0 : 0)->dest
312 : 0 : || EDGE_COUNT (id->entry_bb->preds) != 1))
313 : : {
314 : 0 : gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 : 0 : gimple *init_stmt;
316 : 0 : tree zero = build_zero_cst (TREE_TYPE (new_tree));
317 : :
318 : 0 : init_stmt = gimple_build_assign (new_tree, zero);
319 : 0 : gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 : 0 : SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 : : }
322 : : else
323 : : {
324 : 359504 : SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 : 359504 : set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 : : }
327 : : }
328 : : }
329 : : else
330 : 0 : insert_decl_map (id, name, new_tree);
331 : : return new_tree;
332 : : }
333 : :
334 : : /* Remap DECL during the copying of the BLOCK tree for the function. */
335 : :
336 : : tree
337 : 220121401 : remap_decl (tree decl, copy_body_data *id)
338 : : {
339 : 220121401 : tree *n;
340 : :
341 : : /* We only remap local variables in the current function. */
342 : :
343 : : /* See if we have remapped this declaration. */
344 : :
345 : 220121401 : n = id->decl_map->get (decl);
346 : :
347 : 220121401 : if (!n && processing_debug_stmt)
348 : : {
349 : 652065 : processing_debug_stmt = -1;
350 : 652065 : return decl;
351 : : }
352 : :
353 : : /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 : : necessary DECLs have already been remapped and we do not want to duplicate
355 : : a decl coming from outside of the sequence we are copying. */
356 : 79320436 : if (!n
357 : 79320436 : && id->prevent_decl_creation_for_types
358 : 0 : && id->remapping_type_depth > 0
359 : 0 : && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 : : return decl;
361 : :
362 : : /* If we didn't already have an equivalent for this declaration, create one
363 : : now. */
364 : 219469336 : if (!n)
365 : : {
366 : : /* Make a copy of the variable or label. */
367 : 79320436 : tree t = id->copy_decl (decl, id);
368 : :
369 : : /* Remember it, so that if we encounter this local entity again
370 : : we can reuse this copy. Do this early because remap_type may
371 : : need this decl for TYPE_STUB_DECL. */
372 : 79320436 : insert_decl_map (id, decl, t);
373 : :
374 : 79320436 : if (!DECL_P (t) || t == decl)
375 : : return t;
376 : :
377 : : /* Remap types, if necessary. */
378 : 77783668 : TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 : 77783668 : if (TREE_CODE (t) == TYPE_DECL)
380 : : {
381 : 1323967 : DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
382 : :
383 : : /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 : : which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 : : is not set on the TYPE_DECL, for example in LTO mode. */
386 : 1323967 : if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
387 : : {
388 : 10 : tree x = build_variant_type_copy (TREE_TYPE (t));
389 : 10 : TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 : 10 : TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 : 10 : DECL_ORIGINAL_TYPE (t) = x;
392 : : }
393 : : }
394 : :
395 : : /* Remap sizes as necessary. */
396 : 77783668 : walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 : 77783668 : walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398 : :
399 : : /* If fields, do likewise for offset and qualifier. */
400 : 77783668 : if (TREE_CODE (t) == FIELD_DECL)
401 : : {
402 : 764 : walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 : 764 : if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 : 0 : walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
405 : : }
406 : :
407 : 77783668 : return t;
408 : : }
409 : :
410 : 140148900 : if (id->do_not_unshare)
411 : 69411489 : return *n;
412 : : else
413 : 70737411 : return unshare_expr (*n);
414 : : }
415 : :
416 : : static tree
417 : 98410 : remap_type_1 (tree type, copy_body_data *id)
418 : : {
419 : 98410 : tree new_tree, t;
420 : :
421 : : /* We do need a copy. build and register it now. If this is a pointer or
422 : : reference type, remap the designated type and make a new pointer or
423 : : reference type. */
424 : 98410 : if (TREE_CODE (type) == POINTER_TYPE)
425 : : {
426 : 27122 : new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 : 27122 : TYPE_MODE (type),
428 : 27122 : TYPE_REF_CAN_ALIAS_ALL (type));
429 : 27122 : if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 : 14362 : new_tree = build_type_attribute_qual_variant (new_tree,
431 : 14362 : TYPE_ATTRIBUTES (type),
432 : 14362 : TYPE_QUALS (type));
433 : 27122 : insert_decl_map (id, type, new_tree);
434 : 27122 : return new_tree;
435 : : }
436 : 71288 : else if (TREE_CODE (type) == REFERENCE_TYPE)
437 : : {
438 : 7707 : new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 : 7707 : TYPE_MODE (type),
440 : 7707 : TYPE_REF_CAN_ALIAS_ALL (type));
441 : 7707 : if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 : 5360 : new_tree = build_type_attribute_qual_variant (new_tree,
443 : 5360 : TYPE_ATTRIBUTES (type),
444 : 5360 : TYPE_QUALS (type));
445 : 7707 : insert_decl_map (id, type, new_tree);
446 : 7707 : return new_tree;
447 : : }
448 : : else
449 : 63581 : new_tree = copy_node (type);
450 : :
451 : 63581 : insert_decl_map (id, type, new_tree);
452 : :
453 : : /* This is a new type, not a copy of an old type. Need to reassociate
454 : : variants. We can handle everything except the main variant lazily. */
455 : 63581 : t = TYPE_MAIN_VARIANT (type);
456 : 63581 : if (type != t)
457 : : {
458 : 78 : t = remap_type (t, id);
459 : 78 : TYPE_MAIN_VARIANT (new_tree) = t;
460 : 78 : TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 : 78 : TYPE_NEXT_VARIANT (t) = new_tree;
462 : : }
463 : : else
464 : : {
465 : 63503 : TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 : 63503 : TYPE_NEXT_VARIANT (new_tree) = NULL;
467 : : }
468 : :
469 : 63581 : if (TYPE_STUB_DECL (type))
470 : 177 : TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471 : :
472 : : /* Lazily create pointer and reference types. */
473 : 63581 : TYPE_POINTER_TO (new_tree) = NULL;
474 : 63581 : TYPE_REFERENCE_TO (new_tree) = NULL;
475 : :
476 : : /* Copy all types that may contain references to local variables; be sure to
477 : : preserve sharing in between type and its main variant when possible. */
478 : 63581 : switch (TREE_CODE (new_tree))
479 : : {
480 : 30470 : case INTEGER_TYPE:
481 : 30470 : case REAL_TYPE:
482 : 30470 : case FIXED_POINT_TYPE:
483 : 30470 : case ENUMERAL_TYPE:
484 : 30470 : case BOOLEAN_TYPE:
485 : 30470 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 : : {
487 : 0 : gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 : 0 : gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489 : :
490 : 0 : TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 : 0 : TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 : : }
493 : : else
494 : : {
495 : 30470 : t = TYPE_MIN_VALUE (new_tree);
496 : 30470 : if (t && TREE_CODE (t) != INTEGER_CST)
497 : 0 : walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498 : :
499 : 30470 : t = TYPE_MAX_VALUE (new_tree);
500 : 30470 : if (t && TREE_CODE (t) != INTEGER_CST)
501 : 30470 : walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 : : }
503 : : return new_tree;
504 : :
505 : 16 : case FUNCTION_TYPE:
506 : 16 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 : 16 : && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 : 0 : TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 : : else
510 : 16 : TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 : 16 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 : 16 : && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 : 0 : TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 : : else
515 : 16 : walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 : : return new_tree;
517 : :
518 : 32869 : case ARRAY_TYPE:
519 : 32869 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 : 32869 : && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 : 28 : TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 : : else
523 : 32841 : TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524 : :
525 : 32869 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 : : {
527 : 61 : gcc_checking_assert (TYPE_DOMAIN (type)
528 : : == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 : 61 : TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
530 : : }
531 : : else
532 : : {
533 : 32808 : TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 : : /* For array bounds where we have decided not to copy over the bounds
535 : : variable which isn't used in OpenMP/OpenACC region, change them to
536 : : an uninitialized VAR_DECL temporary. */
537 : 32808 : if (id->adjust_array_error_bounds
538 : 3634 : && TYPE_DOMAIN (new_tree)
539 : 3634 : && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
540 : 35687 : && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
541 : : {
542 : 2879 : tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
543 : 2879 : DECL_ATTRIBUTES (v)
544 : 2879 : = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
545 : 2879 : DECL_ATTRIBUTES (v));
546 : 2879 : TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
547 : : }
548 : : }
549 : : break;
550 : :
551 : 226 : case RECORD_TYPE:
552 : 226 : case UNION_TYPE:
553 : 226 : case QUAL_UNION_TYPE:
554 : 226 : if (TYPE_MAIN_VARIANT (type) != type
555 : 226 : && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
556 : 17 : TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
557 : : else
558 : : {
559 : 209 : tree f, nf = NULL;
560 : :
561 : 973 : for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
562 : : {
563 : 764 : t = remap_decl (f, id);
564 : 764 : DECL_CONTEXT (t) = new_tree;
565 : 764 : DECL_CHAIN (t) = nf;
566 : 764 : nf = t;
567 : : }
568 : 209 : TYPE_FIELDS (new_tree) = nreverse (nf);
569 : : }
570 : : break;
571 : :
572 : 0 : case OFFSET_TYPE:
573 : 0 : default:
574 : : /* Shouldn't have been thought variable sized. */
575 : 0 : gcc_unreachable ();
576 : : }
577 : :
578 : : /* All variants of type share the same size, so use the already remaped data. */
579 : 33095 : if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
580 : : {
581 : 78 : tree s = TYPE_SIZE (type);
582 : 78 : tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
583 : 78 : tree su = TYPE_SIZE_UNIT (type);
584 : 78 : tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
585 : 78 : gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
586 : : && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
587 : : || s == mvs);
588 : 78 : gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
589 : : && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
590 : : || su == mvsu);
591 : 78 : TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
592 : 78 : TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
593 : : }
594 : : else
595 : : {
596 : 33017 : walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
597 : 33017 : walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
598 : : }
599 : :
600 : : return new_tree;
601 : : }
602 : :
603 : : /* Helper function for remap_type_2, called through walk_tree. */
604 : :
605 : : static tree
606 : 30303 : remap_type_3 (tree *tp, int *walk_subtrees, void *data)
607 : : {
608 : 30303 : copy_body_data *id = (copy_body_data *) data;
609 : :
610 : 30303 : if (TYPE_P (*tp))
611 : 0 : *walk_subtrees = 0;
612 : :
613 : 30303 : else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
614 : : return *tp;
615 : :
616 : : return NULL_TREE;
617 : : }
618 : :
619 : : /* Return true if TYPE needs to be remapped because remap_decl on any
620 : : needed embedded decl returns something other than that decl. */
621 : :
622 : : static bool
623 : 50694 : remap_type_2 (tree type, copy_body_data *id)
624 : : {
625 : 61109 : tree t;
626 : :
627 : : #define RETURN_TRUE_IF_VAR(T) \
628 : : do \
629 : : { \
630 : : tree _t = (T); \
631 : : if (_t) \
632 : : { \
633 : : if (DECL_P (_t) && remap_decl (_t, id) != _t) \
634 : : return true; \
635 : : if (!TYPE_SIZES_GIMPLIFIED (type) \
636 : : && walk_tree (&_t, remap_type_3, id, NULL)) \
637 : : return true; \
638 : : } \
639 : : } \
640 : : while (0)
641 : :
642 : 61109 : switch (TREE_CODE (type))
643 : : {
644 : 10415 : case POINTER_TYPE:
645 : 10415 : case REFERENCE_TYPE:
646 : 10415 : case FUNCTION_TYPE:
647 : 10415 : case METHOD_TYPE:
648 : 10415 : return remap_type_2 (TREE_TYPE (type), id);
649 : :
650 : 34086 : case INTEGER_TYPE:
651 : 34086 : case REAL_TYPE:
652 : 34086 : case FIXED_POINT_TYPE:
653 : 34086 : case ENUMERAL_TYPE:
654 : 34086 : case BOOLEAN_TYPE:
655 : 34086 : RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
656 : 34086 : RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
657 : 22366 : return false;
658 : :
659 : 16137 : case ARRAY_TYPE:
660 : 16137 : if (remap_type_2 (TREE_TYPE (type), id)
661 : 31660 : || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
662 : 8940 : return true;
663 : : break;
664 : :
665 : 357 : case RECORD_TYPE:
666 : 357 : case UNION_TYPE:
667 : 357 : case QUAL_UNION_TYPE:
668 : 3561 : for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
669 : 3204 : if (TREE_CODE (t) == FIELD_DECL)
670 : : {
671 : 1242 : RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
672 : 1242 : RETURN_TRUE_IF_VAR (DECL_SIZE (t));
673 : 1242 : RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
674 : 1242 : if (TREE_CODE (type) == QUAL_UNION_TYPE)
675 : 0 : RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
676 : : }
677 : : break;
678 : :
679 : : default:
680 : : return false;
681 : : }
682 : :
683 : 7554 : RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
684 : 7545 : RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
685 : 6942 : return false;
686 : : #undef RETURN_TRUE_IF_VAR
687 : : }
688 : :
689 : : tree
690 : 859152905 : remap_type (tree type, copy_body_data *id)
691 : : {
692 : 859152905 : tree *node;
693 : 859152905 : tree tmp;
694 : :
695 : 859152905 : if (type == NULL)
696 : : return type;
697 : :
698 : : /* See if we have remapped this type. */
699 : 859025688 : node = id->decl_map->get (type);
700 : 859025688 : if (node)
701 : 557124732 : return *node;
702 : :
703 : : /* The type only needs remapping if it's variably modified. */
704 : 301900956 : if (! variably_modified_type_p (type, id->src_fn)
705 : : /* Don't remap if copy_decl method doesn't always return a new
706 : : decl and for all embedded decls returns the passed in decl. */
707 : 301900956 : || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
708 : : {
709 : 301802608 : insert_decl_map (id, type, type);
710 : 301802608 : return type;
711 : : }
712 : :
713 : 98348 : id->remapping_type_depth++;
714 : 98348 : tmp = remap_type_1 (type, id);
715 : 98348 : id->remapping_type_depth--;
716 : :
717 : 98348 : return tmp;
718 : : }
719 : :
720 : : /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
721 : :
722 : : static bool
723 : 35860270 : can_be_nonlocal (tree decl, copy_body_data *id)
724 : : {
725 : : /* We cannot duplicate function decls. */
726 : 35860270 : if (TREE_CODE (decl) == FUNCTION_DECL)
727 : : return true;
728 : :
729 : : /* Local static vars must be non-local or we get multiple declaration
730 : : problems. */
731 : 35842550 : if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
732 : : return true;
733 : :
734 : : return false;
735 : : }
736 : :
737 : : static tree
738 : 39546606 : remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
739 : : copy_body_data *id)
740 : : {
741 : 39546606 : tree old_var;
742 : 39546606 : tree new_decls = NULL_TREE;
743 : :
744 : : /* Remap its variables. */
745 : 71382888 : for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
746 : : {
747 : 31836282 : tree new_var;
748 : :
749 : 31836282 : if (can_be_nonlocal (old_var, id))
750 : : {
751 : : /* We need to add this variable to the local decls as otherwise
752 : : nothing else will do so. */
753 : 109188 : if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
754 : 91379 : add_local_decl (cfun, old_var);
755 : 89524 : if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
756 : 83472 : && !DECL_IGNORED_P (old_var)
757 : 192590 : && nonlocalized_list)
758 : 60138 : vec_safe_push (*nonlocalized_list, old_var);
759 : 109188 : continue;
760 : : }
761 : :
762 : : /* Remap the variable. */
763 : 31727094 : new_var = remap_decl (old_var, id);
764 : :
765 : : /* If we didn't remap this variable, we can't mess with its
766 : : TREE_CHAIN. If we remapped this variable to the return slot, it's
767 : : already declared somewhere else, so don't declare it here. */
768 : :
769 : 31727094 : if (new_var == old_var || new_var == id->retvar)
770 : : ;
771 : 29088794 : else if (!new_var)
772 : : {
773 : 0 : if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
774 : 0 : && !DECL_IGNORED_P (old_var)
775 : 0 : && nonlocalized_list)
776 : 0 : vec_safe_push (*nonlocalized_list, old_var);
777 : : }
778 : : else
779 : : {
780 : 29088794 : gcc_assert (DECL_P (new_var));
781 : 29088794 : DECL_CHAIN (new_var) = new_decls;
782 : 29088794 : new_decls = new_var;
783 : :
784 : : /* Also copy value-expressions. */
785 : 29088794 : if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
786 : : {
787 : 1087670 : tree tem = DECL_VALUE_EXPR (new_var);
788 : 1087670 : bool old_regimplify = id->regimplify;
789 : 1087670 : id->remapping_type_depth++;
790 : 1087670 : walk_tree (&tem, copy_tree_body_r, id, NULL);
791 : 1087670 : id->remapping_type_depth--;
792 : 1087670 : id->regimplify = old_regimplify;
793 : 1087670 : SET_DECL_VALUE_EXPR (new_var, tem);
794 : : }
795 : : }
796 : : }
797 : :
798 : 39546606 : return nreverse (new_decls);
799 : : }
800 : :
801 : : /* Copy the BLOCK to contain remapped versions of the variables
802 : : therein. And hook the new block into the block-tree. */
803 : :
804 : : static void
805 : 36869186 : remap_block (tree *block, copy_body_data *id)
806 : : {
807 : 36869186 : tree old_block;
808 : 36869186 : tree new_block;
809 : :
810 : : /* Make the new block. */
811 : 36869186 : old_block = *block;
812 : 36869186 : new_block = make_node (BLOCK);
813 : 36869186 : TREE_USED (new_block) = TREE_USED (old_block);
814 : 36869186 : BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
815 : 36869186 : BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
816 : 36869186 : BLOCK_NONLOCALIZED_VARS (new_block)
817 : 36936611 : = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
818 : 36869186 : *block = new_block;
819 : :
820 : : /* Remap its variables. */
821 : 73738372 : BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
822 : 36869186 : &BLOCK_NONLOCALIZED_VARS (new_block),
823 : : id);
824 : :
825 : : /* Remember the remapped block. */
826 : 36869186 : insert_decl_map (id, old_block, new_block);
827 : 36869186 : }
828 : :
829 : : /* Copy the whole block tree and root it in id->block. */
830 : :
831 : : static tree
832 : 23657516 : remap_blocks (tree block, copy_body_data *id)
833 : : {
834 : 23657516 : tree t;
835 : 23657516 : tree new_tree = block;
836 : :
837 : 23657516 : if (!block)
838 : : return NULL;
839 : :
840 : 23657516 : remap_block (&new_tree, id);
841 : 23657516 : gcc_assert (new_tree != block);
842 : 42660973 : for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
843 : 19003457 : prepend_lexical_block (new_tree, remap_blocks (t, id));
844 : : /* Blocks are in arbitrary order, but make things slightly prettier and do
845 : : not swap order when producing a copy. */
846 : 23657516 : BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
847 : 23657516 : return new_tree;
848 : : }
849 : :
850 : : /* Remap the block tree rooted at BLOCK to nothing. */
851 : :
852 : : static void
853 : 77787 : remap_blocks_to_null (tree block, copy_body_data *id)
854 : : {
855 : 77787 : tree t;
856 : 77787 : insert_decl_map (id, block, NULL_TREE);
857 : 126889 : for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
858 : 49102 : remap_blocks_to_null (t, id);
859 : 77787 : }
860 : :
861 : : /* Remap the location info pointed to by LOCUS. */
862 : :
863 : : static location_t
864 : 24741942 : remap_location (location_t locus, copy_body_data *id)
865 : : {
866 : 24741942 : if (LOCATION_BLOCK (locus))
867 : : {
868 : 10358408 : tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
869 : 10358408 : gcc_assert (n);
870 : 10358408 : if (*n)
871 : 10320976 : return set_block (locus, *n);
872 : : }
873 : :
874 : 14420966 : locus = LOCATION_LOCUS (locus);
875 : :
876 : 14420966 : if (locus != UNKNOWN_LOCATION && id->block)
877 : 24 : return set_block (locus, id->block);
878 : :
879 : : return locus;
880 : : }
881 : :
882 : : static void
883 : 24003783 : copy_statement_list (tree *tp)
884 : : {
885 : 24003783 : tree_stmt_iterator oi, ni;
886 : 24003783 : tree new_tree;
887 : :
888 : 24003783 : new_tree = alloc_stmt_list ();
889 : 24003783 : ni = tsi_start (new_tree);
890 : 24003783 : oi = tsi_start (*tp);
891 : 24003783 : TREE_TYPE (new_tree) = TREE_TYPE (*tp);
892 : 24003783 : *tp = new_tree;
893 : :
894 : 74512406 : for (; !tsi_end_p (oi); tsi_next (&oi))
895 : : {
896 : 50508623 : tree stmt = tsi_stmt (oi);
897 : 50508623 : if (TREE_CODE (stmt) == STATEMENT_LIST)
898 : : /* This copy is not redundant; tsi_link_after will smash this
899 : : STATEMENT_LIST into the end of the one we're building, and we
900 : : don't want to do that with the original. */
901 : 50451 : copy_statement_list (&stmt);
902 : 50508623 : tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
903 : : }
904 : 24003783 : }
905 : :
906 : : static void
907 : 13211854 : copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
908 : : {
909 : 13211854 : tree block = BIND_EXPR_BLOCK (*tp);
910 : : /* Copy (and replace) the statement. */
911 : 13211854 : copy_tree_r (tp, walk_subtrees, NULL);
912 : 13211854 : if (block)
913 : : {
914 : 13211383 : remap_block (&block, id);
915 : 13211383 : BIND_EXPR_BLOCK (*tp) = block;
916 : : }
917 : :
918 : 13211854 : if (BIND_EXPR_VARS (*tp))
919 : : /* This will remap a lot of the same decls again, but this should be
920 : : harmless. */
921 : 2677252 : BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
922 : 13211854 : }
923 : :
924 : :
925 : : /* Create a new gimple_seq by remapping all the statements in BODY
926 : : using the inlining information in ID. */
927 : :
928 : : static gimple_seq
929 : 67 : remap_gimple_seq (gimple_seq body, copy_body_data *id)
930 : : {
931 : 67 : gimple_stmt_iterator si;
932 : 67 : gimple_seq new_body = NULL;
933 : :
934 : 67 : for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
935 : : {
936 : 0 : gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
937 : 0 : gimple_seq_add_seq (&new_body, new_stmts);
938 : : }
939 : :
940 : 67 : return new_body;
941 : : }
942 : :
943 : :
944 : : /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
945 : : block using the mapping information in ID. */
946 : :
947 : : static gimple *
948 : 0 : copy_gimple_bind (gbind *stmt, copy_body_data *id)
949 : : {
950 : 0 : gimple *new_bind;
951 : 0 : tree new_block, new_vars;
952 : 0 : gimple_seq body, new_body;
953 : :
954 : : /* Copy the statement. Note that we purposely don't use copy_stmt
955 : : here because we need to remap statements as we copy. */
956 : 0 : body = gimple_bind_body (stmt);
957 : 0 : new_body = remap_gimple_seq (body, id);
958 : :
959 : 0 : new_block = gimple_bind_block (stmt);
960 : 0 : if (new_block)
961 : 0 : remap_block (&new_block, id);
962 : :
963 : : /* This will remap a lot of the same decls again, but this should be
964 : : harmless. */
965 : 0 : new_vars = gimple_bind_vars (stmt);
966 : 0 : if (new_vars)
967 : 0 : new_vars = remap_decls (new_vars, NULL, id);
968 : :
969 : 0 : new_bind = gimple_build_bind (new_vars, new_body, new_block);
970 : :
971 : 0 : return new_bind;
972 : : }
973 : :
974 : : /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
975 : :
976 : : static bool
977 : 30119 : is_parm (tree decl)
978 : : {
979 : 30119 : if (TREE_CODE (decl) == SSA_NAME)
980 : : {
981 : 28416 : decl = SSA_NAME_VAR (decl);
982 : : if (!decl)
983 : : return false;
984 : : }
985 : :
986 : 17602 : return (TREE_CODE (decl) == PARM_DECL);
987 : : }
988 : :
989 : : /* Copy the TREE_THIS_NOTRAP flag from OLD to T if it is appropriate to do so.
990 : : T and OLD must be both either INDIRECT_REF or MEM_REF. */
991 : :
992 : : static void
993 : 18631863 : maybe_copy_this_notrap (copy_body_data *id, tree t, tree old)
994 : : {
995 : 18631863 : gcc_assert (TREE_CODE (t) == TREE_CODE (old));
996 : :
997 : : /* We cannot blindly propagate the TREE_THIS_NOTRAP flag if we have remapped
998 : : a parameter as the property might be valid only for the parameter itself,
999 : : typically when it is passed by reference. But we propagate the flag when
1000 : : this is the dereference of an entire object done in a type that has self-
1001 : : referential size, to avoid the static size check in tree_could_trap_p. */
1002 : 18631863 : if (TREE_THIS_NOTRAP (old)
1003 : 18631863 : && (!is_parm (TREE_OPERAND (old, 0))
1004 : 12494 : || (!id->transform_parameter && is_parm (TREE_OPERAND (t, 0)))
1005 : 9487 : || ((TREE_CODE (t) == INDIRECT_REF
1006 : 9487 : || integer_zerop (TREE_OPERAND (t, 1)))
1007 : 9487 : && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1008 : 8822 : && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
1009 : 8822 : && type_contains_placeholder_p (TREE_TYPE (t)))))
1010 : 17625 : TREE_THIS_NOTRAP (t) = 1;
1011 : 18631863 : }
1012 : :
1013 : : /* Remap the dependence CLIQUE from the source to the destination function
1014 : : as specified in ID. */
1015 : :
1016 : : static unsigned short
1017 : 2793376 : remap_dependence_clique (copy_body_data *id, unsigned short clique)
1018 : : {
1019 : 2793376 : if (clique == 0 || processing_debug_stmt)
1020 : : return 0;
1021 : 2757543 : if (!id->dependence_map)
1022 : 726600 : id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1023 : 2757543 : bool existed;
1024 : 2757543 : unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1025 : 2757543 : if (!existed)
1026 : : {
1027 : : /* Clique 1 is reserved for local ones set by PTA. */
1028 : 1086721 : if (cfun->last_clique == 0)
1029 : 337087 : cfun->last_clique = 1;
1030 : 2173442 : newc = get_new_clique (cfun);
1031 : : }
1032 : 2757543 : return newc;
1033 : : }
1034 : :
1035 : : /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1036 : : 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1037 : : WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1038 : : recursing into the children nodes of *TP. */
1039 : :
1040 : : static tree
1041 : 187039749 : remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1042 : : {
1043 : 187039749 : struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1044 : 187039749 : copy_body_data *id = (copy_body_data *) wi_p->info;
1045 : 187039749 : tree fn = id->src_fn;
1046 : :
1047 : : /* For recursive invocations this is no longer the LHS itself. */
1048 : 187039749 : bool is_lhs = wi_p->is_lhs;
1049 : 187039749 : wi_p->is_lhs = false;
1050 : :
1051 : 187039749 : if (TREE_CODE (*tp) == SSA_NAME)
1052 : : {
1053 : 61303727 : *tp = remap_ssa_name (*tp, id);
1054 : 61303727 : *walk_subtrees = 0;
1055 : 61303727 : if (is_lhs)
1056 : 15592830 : SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1057 : 61303727 : return NULL;
1058 : : }
1059 : 125736022 : else if (auto_var_in_fn_p (*tp, fn))
1060 : : {
1061 : : /* Local variables and labels need to be replaced by equivalent
1062 : : variables. We don't want to copy static variables; there's
1063 : : only one of those, no matter how many times we inline the
1064 : : containing function. Similarly for globals from an outer
1065 : : function. */
1066 : 40273325 : tree new_decl;
1067 : :
1068 : : /* Remap the declaration. */
1069 : 40273325 : new_decl = remap_decl (*tp, id);
1070 : 40273325 : gcc_assert (new_decl);
1071 : : /* Replace this variable with the copy. */
1072 : 40273325 : STRIP_TYPE_NOPS (new_decl);
1073 : : /* ??? The C++ frontend uses void * pointer zero to initialize
1074 : : any other type. This confuses the middle-end type verification.
1075 : : As cloned bodies do not go through gimplification again the fixup
1076 : : there doesn't trigger. */
1077 : 40273325 : if (TREE_CODE (new_decl) == INTEGER_CST
1078 : 40273325 : && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1079 : 0 : new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1080 : 40273325 : *tp = new_decl;
1081 : 40273325 : *walk_subtrees = 0;
1082 : : }
1083 : 85462697 : else if (TREE_CODE (*tp) == STATEMENT_LIST)
1084 : 0 : gcc_unreachable ();
1085 : 85462697 : else if (TREE_CODE (*tp) == SAVE_EXPR)
1086 : 0 : gcc_unreachable ();
1087 : 85462697 : else if (TREE_CODE (*tp) == LABEL_DECL
1088 : 85462697 : && (!DECL_CONTEXT (*tp)
1089 : 441 : || decl_function_context (*tp) == id->src_fn))
1090 : : /* These may need to be remapped for EH handling. */
1091 : 0 : *tp = remap_decl (*tp, id);
1092 : 85462697 : else if (TREE_CODE (*tp) == FIELD_DECL)
1093 : : {
1094 : : /* If the enclosing record type is variably_modified_type_p, the field
1095 : : has already been remapped. Otherwise, it need not be. */
1096 : 15233365 : tree *n = id->decl_map->get (*tp);
1097 : 15233365 : if (n)
1098 : 71 : *tp = *n;
1099 : 15233365 : *walk_subtrees = 0;
1100 : : }
1101 : 70229332 : else if (TYPE_P (*tp))
1102 : : /* Types may need remapping as well. */
1103 : 0 : *tp = remap_type (*tp, id);
1104 : 70229332 : else if (CONSTANT_CLASS_P (*tp))
1105 : : {
1106 : : /* If this is a constant, we have to copy the node iff the type
1107 : : will be remapped. copy_tree_r will not copy a constant. */
1108 : 11266860 : tree new_type = remap_type (TREE_TYPE (*tp), id);
1109 : :
1110 : 11266860 : if (new_type == TREE_TYPE (*tp))
1111 : 11262994 : *walk_subtrees = 0;
1112 : :
1113 : 3866 : else if (TREE_CODE (*tp) == INTEGER_CST)
1114 : 3866 : *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1115 : : else
1116 : : {
1117 : 0 : *tp = copy_node (*tp);
1118 : 0 : TREE_TYPE (*tp) = new_type;
1119 : : }
1120 : : }
1121 : : else
1122 : : {
1123 : : /* Otherwise, just copy the node. Note that copy_tree_r already
1124 : : knows not to copy VAR_DECLs, etc., so this is safe. */
1125 : :
1126 : 58962472 : if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1127 : : {
1128 : : /* We need to re-canonicalize MEM_REFs from inline substitutions
1129 : : that can happen when a pointer argument is an ADDR_EXPR.
1130 : : Recurse here manually to allow that. */
1131 : 13434680 : tree ptr = TREE_OPERAND (*tp, 0);
1132 : 13434680 : tree type = remap_type (TREE_TYPE (*tp), id);
1133 : 13434680 : tree old = *tp;
1134 : 13434680 : walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1135 : 13434680 : *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1136 : 13434680 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1137 : 13434680 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1138 : 13434680 : copy_warning (*tp, old);
1139 : 13434680 : if (MR_DEPENDENCE_CLIQUE (old) != 0)
1140 : : {
1141 : 2718088 : MR_DEPENDENCE_CLIQUE (*tp)
1142 : 2718088 : = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1143 : 2718088 : MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1144 : : }
1145 : 13434680 : maybe_copy_this_notrap (id, *tp, old);
1146 : 13434680 : REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1147 : 13434680 : *walk_subtrees = 0;
1148 : 13434680 : return NULL;
1149 : : }
1150 : :
1151 : : /* Here is the "usual case". Copy this tree node, and then
1152 : : tweak some special cases. */
1153 : 45527792 : copy_tree_r (tp, walk_subtrees, NULL);
1154 : :
1155 : 45527792 : if (TREE_CODE (*tp) != OMP_CLAUSE)
1156 : 45527792 : TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1157 : :
1158 : 45527792 : if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1159 : : {
1160 : : /* The copied TARGET_EXPR has never been expanded, even if the
1161 : : original node was expanded already. */
1162 : 0 : TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1163 : 0 : TREE_OPERAND (*tp, 3) = NULL_TREE;
1164 : : }
1165 : 45527792 : else if (TREE_CODE (*tp) == ADDR_EXPR)
1166 : : {
1167 : : /* Variable substitution need not be simple. In particular,
1168 : : the MEM_REF substitution above. Make sure that
1169 : : TREE_CONSTANT and friends are up-to-date. */
1170 : 14237068 : int invariant = is_gimple_min_invariant (*tp);
1171 : 14237068 : walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1172 : 14237068 : recompute_tree_invariant_for_addr_expr (*tp);
1173 : :
1174 : : /* If this used to be invariant, but is not any longer,
1175 : : then regimplification is probably needed. */
1176 : 14237068 : if (invariant && !is_gimple_min_invariant (*tp))
1177 : 5318 : id->regimplify = true;
1178 : :
1179 : 14237068 : *walk_subtrees = 0;
1180 : : }
1181 : 31290724 : else if (TREE_CODE (*tp) == OMP_NEXT_VARIANT)
1182 : : {
1183 : : /* Neither operand is interesting, and walking the selector
1184 : : causes problems because it's not an expression. */
1185 : 288 : gcc_assert (TREE_CODE (TREE_OPERAND (*tp, 0)) == INTEGER_CST);
1186 : 288 : *walk_subtrees = 0;
1187 : : }
1188 : : }
1189 : :
1190 : : /* Update the TREE_BLOCK for the cloned expr. */
1191 : 112301342 : if (EXPR_P (*tp))
1192 : : {
1193 : 30600035 : tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 : 30600035 : tree old_block = TREE_BLOCK (*tp);
1195 : 30600035 : if (old_block)
1196 : : {
1197 : 16895447 : tree *n;
1198 : 16895447 : n = id->decl_map->get (TREE_BLOCK (*tp));
1199 : 16895447 : if (n)
1200 : 16895001 : new_block = *n;
1201 : : }
1202 : 30600035 : TREE_SET_BLOCK (*tp, new_block);
1203 : : }
1204 : :
1205 : : /* Keep iterating. */
1206 : : return NULL_TREE;
1207 : : }
1208 : :
1209 : :
1210 : : /* Called from copy_body_id via walk_tree. DATA is really a
1211 : : `copy_body_data *'. */
1212 : :
1213 : : tree
1214 : 830898720 : copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 : : {
1216 : 830898720 : copy_body_data *id = (copy_body_data *) data;
1217 : 830898720 : tree fn = id->src_fn;
1218 : 830898720 : tree new_block;
1219 : :
1220 : : /* Begin by recognizing trees that we'll completely rewrite for the
1221 : : inlining context. Our output for these trees is completely
1222 : : different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 : : into an edge). Further down, we'll handle trees that get
1224 : : duplicated and/or tweaked. */
1225 : :
1226 : : /* When requested, RETURN_EXPRs should be transformed to just the
1227 : : contained MODIFY_EXPR. The branch semantics of the return will
1228 : : be handled elsewhere by manipulating the CFG rather than a statement. */
1229 : 830898720 : if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230 : : {
1231 : 0 : tree assignment = TREE_OPERAND (*tp, 0);
1232 : :
1233 : : /* If we're returning something, just turn that into an
1234 : : assignment into the equivalent of the original RESULT_DECL.
1235 : : If the "assignment" is just the result decl, the result
1236 : : decl has already been set (e.g. a recent "foo (&result_decl,
1237 : : ...)"); just toss the entire RETURN_EXPR. */
1238 : 0 : if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 : : {
1240 : : /* Replace the RETURN_EXPR with (a copy of) the
1241 : : MODIFY_EXPR hanging underneath. */
1242 : 0 : *tp = copy_node (assignment);
1243 : : }
1244 : : else /* Else the RETURN_EXPR returns no value. */
1245 : : {
1246 : 0 : *tp = NULL;
1247 : 0 : return (tree) (void *)1;
1248 : : }
1249 : 0 : }
1250 : 830898720 : else if (TREE_CODE (*tp) == SSA_NAME)
1251 : : {
1252 : 3851664 : *tp = remap_ssa_name (*tp, id);
1253 : 3851664 : *walk_subtrees = 0;
1254 : 3851664 : return NULL;
1255 : : }
1256 : :
1257 : : /* Local variables and labels need to be replaced by equivalent
1258 : : variables. We don't want to copy static variables; there's only
1259 : : one of those, no matter how many times we inline the containing
1260 : : function. Similarly for globals from an outer function. */
1261 : 827047056 : else if (auto_var_in_fn_p (*tp, fn))
1262 : : {
1263 : 96187126 : tree new_decl;
1264 : :
1265 : : /* Remap the declaration. */
1266 : 96187126 : new_decl = remap_decl (*tp, id);
1267 : 96187126 : gcc_assert (new_decl);
1268 : : /* Replace this variable with the copy. */
1269 : 96187126 : STRIP_TYPE_NOPS (new_decl);
1270 : 96187126 : *tp = new_decl;
1271 : 96187126 : *walk_subtrees = 0;
1272 : : }
1273 : 730859930 : else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 : 23953092 : copy_statement_list (tp);
1275 : 706906838 : else if (TREE_CODE (*tp) == SAVE_EXPR
1276 : 706566207 : || TREE_CODE (*tp) == TARGET_EXPR)
1277 : 7120207 : remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 : 699786631 : else if (TREE_CODE (*tp) == LABEL_DECL
1279 : 699786631 : && (! DECL_CONTEXT (*tp)
1280 : 14 : || decl_function_context (*tp) == id->src_fn))
1281 : : /* These may need to be remapped for EH handling. */
1282 : 0 : *tp = remap_decl (*tp, id);
1283 : 699786631 : else if (TREE_CODE (*tp) == BIND_EXPR)
1284 : 13211854 : copy_bind_expr (tp, walk_subtrees, id);
1285 : : /* Types may need remapping as well. */
1286 : 686574777 : else if (TYPE_P (*tp))
1287 : 172616 : *tp = remap_type (*tp, id);
1288 : :
1289 : : /* If this is a constant, we have to copy the node iff the type will be
1290 : : remapped. copy_tree_r will not copy a constant. */
1291 : 686402161 : else if (CONSTANT_CLASS_P (*tp))
1292 : : {
1293 : 212656372 : tree new_type = remap_type (TREE_TYPE (*tp), id);
1294 : :
1295 : 212656372 : if (new_type == TREE_TYPE (*tp))
1296 : 212655158 : *walk_subtrees = 0;
1297 : :
1298 : 1214 : else if (TREE_CODE (*tp) == INTEGER_CST)
1299 : 1214 : *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 : : else
1301 : : {
1302 : 0 : *tp = copy_node (*tp);
1303 : 0 : TREE_TYPE (*tp) = new_type;
1304 : : }
1305 : : }
1306 : :
1307 : : /* Otherwise, just copy the node. Note that copy_tree_r already
1308 : : knows not to copy VAR_DECLs, etc., so this is safe. */
1309 : : else
1310 : : {
1311 : : /* Here we handle trees that are not completely rewritten.
1312 : : First we detect some inlining-induced bogosities for
1313 : : discarding. */
1314 : 473745789 : if (TREE_CODE (*tp) == MODIFY_EXPR
1315 : 6402556 : && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 : 473745809 : && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 : : {
1318 : : /* Some assignments VAR = VAR; don't generate any rtl code
1319 : : and thus don't count as variable modification. Avoid
1320 : : keeping bogosities like 0 = 0. */
1321 : 0 : tree decl = TREE_OPERAND (*tp, 0), value;
1322 : 0 : tree *n;
1323 : :
1324 : 0 : n = id->decl_map->get (decl);
1325 : 0 : if (n)
1326 : : {
1327 : 0 : value = *n;
1328 : 0 : STRIP_TYPE_NOPS (value);
1329 : 0 : if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 : : {
1331 : 0 : *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 : 0 : return copy_tree_body_r (tp, walk_subtrees, data);
1333 : : }
1334 : : }
1335 : : }
1336 : 473745789 : else if (INDIRECT_REF_P (*tp))
1337 : : {
1338 : : /* Get rid of *& from inline substitutions that can happen when a
1339 : : pointer argument is an ADDR_EXPR. */
1340 : 29206263 : tree decl = TREE_OPERAND (*tp, 0);
1341 : 29206263 : tree *n = id->decl_map->get (decl);
1342 : 29206263 : if (n)
1343 : : {
1344 : : /* If we happen to get an ADDR_EXPR in n->value, strip
1345 : : it manually here as we'll eventually get ADDR_EXPRs
1346 : : which lie about their types pointed to. In this case
1347 : : build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 : : but we absolutely rely on that. As fold_indirect_ref
1349 : : does other useful transformations, try that first, though. */
1350 : 5130189 : tree type = TREE_TYPE (*tp);
1351 : 5130189 : tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 : 5130189 : tree old = *tp;
1353 : 5130189 : *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 : 5130189 : if (! *tp)
1355 : : {
1356 : 5130052 : type = remap_type (type, id);
1357 : 5130052 : if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 : : {
1359 : 0 : *tp
1360 : 0 : = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 : : /* ??? We should either assert here or build
1362 : : a VIEW_CONVERT_EXPR instead of blindly leaking
1363 : : incompatible types to our IL. */
1364 : 0 : if (! *tp)
1365 : 0 : *tp = TREE_OPERAND (ptr, 0);
1366 : : }
1367 : : else
1368 : : {
1369 : 5130052 : *tp = build1 (INDIRECT_REF, type, ptr);
1370 : 5130052 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 : 5130052 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 : 5130052 : TREE_READONLY (*tp) = TREE_READONLY (old);
1373 : 5130052 : maybe_copy_this_notrap (id, *tp, old);
1374 : : }
1375 : : }
1376 : 5130189 : *walk_subtrees = 0;
1377 : 5130189 : return NULL;
1378 : : }
1379 : : }
1380 : 444539526 : else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1381 : : {
1382 : : /* We need to re-canonicalize MEM_REFs from inline substitutions
1383 : : that can happen when a pointer argument is an ADDR_EXPR.
1384 : : Recurse here manually to allow that. */
1385 : 67131 : tree ptr = TREE_OPERAND (*tp, 0);
1386 : 67131 : tree type = remap_type (TREE_TYPE (*tp), id);
1387 : 67131 : tree old = *tp;
1388 : 67131 : walk_tree (&ptr, copy_tree_body_r, data, NULL);
1389 : 67131 : *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1390 : 67131 : TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1391 : 67131 : TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1392 : 67131 : copy_warning (*tp, old);
1393 : 67131 : if (MR_DEPENDENCE_CLIQUE (old) != 0)
1394 : : {
1395 : 4924 : MR_DEPENDENCE_CLIQUE (*tp)
1396 : 4924 : = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1397 : 4924 : MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1398 : : }
1399 : 67131 : maybe_copy_this_notrap (id, *tp, old);
1400 : 67131 : REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1401 : 67131 : *walk_subtrees = 0;
1402 : 67131 : return NULL;
1403 : : }
1404 : :
1405 : : /* Here is the "usual case". Copy this tree node, and then
1406 : : tweak some special cases. */
1407 : 468548469 : copy_tree_r (tp, walk_subtrees, NULL);
1408 : :
1409 : : /* If EXPR has block defined, map it to newly constructed block.
1410 : : When inlining we want EXPRs without block appear in the block
1411 : : of function call if we are not remapping a type. */
1412 : 468548469 : if (EXPR_P (*tp))
1413 : : {
1414 : 404917829 : new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1415 : 404917829 : if (TREE_BLOCK (*tp))
1416 : : {
1417 : 13210 : tree *n;
1418 : 13210 : n = id->decl_map->get (TREE_BLOCK (*tp));
1419 : 13210 : if (n)
1420 : 13210 : new_block = *n;
1421 : : }
1422 : 404917829 : TREE_SET_BLOCK (*tp, new_block);
1423 : : }
1424 : :
1425 : 468548469 : if (TREE_CODE (*tp) != OMP_CLAUSE)
1426 : 468548378 : TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1427 : :
1428 : : /* The copied TARGET_EXPR has never been expanded, even if the
1429 : : original node was expanded already. */
1430 : 468548469 : if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1431 : : {
1432 : 0 : TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1433 : 0 : TREE_OPERAND (*tp, 3) = NULL_TREE;
1434 : : }
1435 : :
1436 : : /* Variable substitution need not be simple. In particular, the
1437 : : INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1438 : : and friends are up-to-date. */
1439 : 468548469 : else if (TREE_CODE (*tp) == ADDR_EXPR)
1440 : : {
1441 : 45445721 : int invariant = is_gimple_min_invariant (*tp);
1442 : 45445721 : walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1443 : :
1444 : : /* Handle the case where we substituted an INDIRECT_REF
1445 : : into the operand of the ADDR_EXPR. */
1446 : 45445721 : if (INDIRECT_REF_P (TREE_OPERAND (*tp, 0))
1447 : 45445721 : && !id->do_not_fold)
1448 : : {
1449 : 147 : tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1450 : 147 : if (TREE_TYPE (t) != TREE_TYPE (*tp))
1451 : 147 : t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1452 : 147 : *tp = t;
1453 : : }
1454 : : else
1455 : 45445574 : recompute_tree_invariant_for_addr_expr (*tp);
1456 : :
1457 : : /* If this used to be invariant, but is not any longer,
1458 : : then regimplification is probably needed. */
1459 : 45445721 : if (invariant && !is_gimple_min_invariant (*tp))
1460 : 16 : id->regimplify = true;
1461 : :
1462 : 45445721 : *walk_subtrees = 0;
1463 : : }
1464 : 423102748 : else if (TREE_CODE (*tp) == OMP_CLAUSE
1465 : 423102748 : && (OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_AFFINITY
1466 : 76 : || OMP_CLAUSE_CODE (*tp) == OMP_CLAUSE_DEPEND))
1467 : : {
1468 : 30 : tree t = OMP_CLAUSE_DECL (*tp);
1469 : 30 : if (t && OMP_ITERATOR_DECL_P (t))
1470 : : {
1471 : 18 : *walk_subtrees = 0;
1472 : 18 : OMP_CLAUSE_DECL (*tp) = copy_node (t);
1473 : 18 : t = OMP_CLAUSE_DECL (*tp);
1474 : 18 : TREE_PURPOSE (t) = copy_node (TREE_PURPOSE (t));
1475 : 108 : for (int i = 0; i <= 4; i++)
1476 : 90 : walk_tree (&TREE_VEC_ELT (TREE_PURPOSE (t), i),
1477 : : copy_tree_body_r, id, NULL);
1478 : 18 : if (TREE_VEC_ELT (TREE_PURPOSE (t), 5))
1479 : 18 : remap_block (&TREE_VEC_ELT (TREE_PURPOSE (t), 5), id);
1480 : 18 : walk_tree (&TREE_VALUE (t), copy_tree_body_r, id, NULL);
1481 : : }
1482 : : }
1483 : : }
1484 : :
1485 : : /* Keep iterating. */
1486 : : return NULL_TREE;
1487 : : }
1488 : :
1489 : : /* Helper for remap_gimple_stmt. Given an EH region number for the
1490 : : source function, map that to the duplicate EH region number in
1491 : : the destination function. */
1492 : :
1493 : : static int
1494 : 96675 : remap_eh_region_nr (int old_nr, copy_body_data *id)
1495 : : {
1496 : 96675 : eh_region old_r, new_r;
1497 : :
1498 : 96675 : old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1499 : 96675 : new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1500 : :
1501 : 96675 : return new_r->index;
1502 : : }
1503 : :
1504 : : /* Similar, but operate on INTEGER_CSTs. */
1505 : :
1506 : : static tree
1507 : 7736 : remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1508 : : {
1509 : 7736 : int old_nr, new_nr;
1510 : :
1511 : 7736 : old_nr = tree_to_shwi (old_t_nr);
1512 : 7736 : new_nr = remap_eh_region_nr (old_nr, id);
1513 : :
1514 : 7736 : return build_int_cst (integer_type_node, new_nr);
1515 : : }
1516 : :
1517 : : /* Helper for copy_bb. Remap statement STMT using the inlining
1518 : : information in ID. Return the new statement copy. */
1519 : :
1520 : : static gimple_seq
1521 : 84969755 : remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1522 : : {
1523 : 84969755 : gimple *copy = NULL;
1524 : 84969755 : struct walk_stmt_info wi;
1525 : 84969755 : bool skip_first = false;
1526 : 84969755 : gimple_seq stmts = NULL;
1527 : :
1528 : 84969755 : if (is_gimple_debug (stmt)
1529 : 84969755 : && (gimple_debug_nonbind_marker_p (stmt)
1530 : 12197703 : ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1531 : 39105054 : : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1532 : : return NULL;
1533 : :
1534 : 84943706 : if (!is_gimple_debug (stmt)
1535 : 33666998 : && id->param_body_adjs
1536 : 88044652 : && id->param_body_adjs->m_dead_stmts.contains (stmt))
1537 : : {
1538 : 1858 : tree *dval = id->param_body_adjs->m_dead_stmt_debug_equiv.get (stmt);
1539 : 1858 : if (!dval)
1540 : : return NULL;
1541 : :
1542 : 730 : gcc_assert (is_gimple_assign (stmt));
1543 : 730 : tree lhs = gimple_assign_lhs (stmt);
1544 : 730 : tree *dvar = id->param_body_adjs->m_dead_ssa_debug_equiv.get (lhs);
1545 : 730 : gdebug *bind = gimple_build_debug_bind (*dvar, *dval, stmt);
1546 : 730 : if (id->reset_location)
1547 : 0 : gimple_set_location (bind, input_location);
1548 : 730 : id->debug_stmts.safe_push (bind);
1549 : 730 : gimple_seq_add_stmt_without_update (&stmts, bind);
1550 : 730 : return stmts;
1551 : : }
1552 : :
1553 : : /* Begin by recognizing trees that we'll completely rewrite for the
1554 : : inlining context. Our output for these trees is completely
1555 : : different from our input (e.g. RETURN_EXPR is deleted and morphs
1556 : : into an edge). Further down, we'll handle trees that get
1557 : : duplicated and/or tweaked. */
1558 : :
1559 : : /* When requested, GIMPLE_RETURN should be transformed to just the
1560 : : contained GIMPLE_ASSIGN. The branch semantics of the return will
1561 : : be handled elsewhere by manipulating the CFG rather than the
1562 : : statement. */
1563 : 84941848 : if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1564 : : {
1565 : 4418012 : tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1566 : :
1567 : : /* If we're returning something, just turn that into an
1568 : : assignment to the equivalent of the original RESULT_DECL.
1569 : : If RETVAL is just the result decl, the result decl has
1570 : : already been set (e.g. a recent "foo (&result_decl, ...)");
1571 : : just toss the entire GIMPLE_RETURN. Likewise for when the
1572 : : call doesn't want the return value. */
1573 : 4418012 : if (retval
1574 : 4418012 : && (TREE_CODE (retval) != RESULT_DECL
1575 : 2295099 : && (!id->call_stmt
1576 : 2295099 : || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1577 : 2182386 : && (TREE_CODE (retval) != SSA_NAME
1578 : 1739760 : || ! SSA_NAME_VAR (retval)
1579 : 404585 : || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1580 : : {
1581 : 4204334 : copy = gimple_build_assign (id->do_not_unshare
1582 : 2102167 : ? id->retvar : unshare_expr (id->retvar),
1583 : : retval);
1584 : : /* id->retvar is already substituted. Skip it on later remapping. */
1585 : 2102167 : skip_first = true;
1586 : : }
1587 : : else
1588 : : return NULL;
1589 : : }
1590 : 80523836 : else if (gimple_has_substatements (stmt))
1591 : : {
1592 : 67 : gimple_seq s1, s2;
1593 : :
1594 : : /* When cloning bodies from the C++ front end, we will be handed bodies
1595 : : in High GIMPLE form. Handle here all the High GIMPLE statements that
1596 : : have embedded statements. */
1597 : 67 : switch (gimple_code (stmt))
1598 : : {
1599 : 0 : case GIMPLE_BIND:
1600 : 0 : copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1601 : 0 : break;
1602 : :
1603 : 0 : case GIMPLE_CATCH:
1604 : 0 : {
1605 : 0 : gcatch *catch_stmt = as_a <gcatch *> (stmt);
1606 : 0 : s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1607 : 0 : copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1608 : : }
1609 : 0 : break;
1610 : :
1611 : 0 : case GIMPLE_EH_FILTER:
1612 : 0 : s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1613 : 0 : copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1614 : 0 : break;
1615 : :
1616 : 0 : case GIMPLE_TRY:
1617 : 0 : s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1618 : 0 : s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1619 : 0 : copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1620 : 0 : break;
1621 : :
1622 : 0 : case GIMPLE_WITH_CLEANUP_EXPR:
1623 : 0 : s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1624 : 0 : copy = gimple_build_wce (s1);
1625 : 0 : break;
1626 : :
1627 : 0 : case GIMPLE_OMP_PARALLEL:
1628 : 0 : {
1629 : 0 : gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1630 : 0 : s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1631 : 0 : copy = gimple_build_omp_parallel
1632 : 0 : (s1,
1633 : : gimple_omp_parallel_clauses (omp_par_stmt),
1634 : : gimple_omp_parallel_child_fn (omp_par_stmt),
1635 : : gimple_omp_parallel_data_arg (omp_par_stmt));
1636 : : }
1637 : 0 : break;
1638 : :
1639 : 0 : case GIMPLE_OMP_TASK:
1640 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1641 : 0 : copy = gimple_build_omp_task
1642 : 0 : (s1,
1643 : : gimple_omp_task_clauses (stmt),
1644 : : gimple_omp_task_child_fn (stmt),
1645 : : gimple_omp_task_data_arg (stmt),
1646 : : gimple_omp_task_copy_fn (stmt),
1647 : : gimple_omp_task_arg_size (stmt),
1648 : : gimple_omp_task_arg_align (stmt));
1649 : 0 : break;
1650 : :
1651 : 0 : case GIMPLE_OMP_FOR:
1652 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 : 0 : s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1654 : 0 : copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1655 : : gimple_omp_for_clauses (stmt),
1656 : : gimple_omp_for_collapse (stmt), s2);
1657 : 0 : {
1658 : 0 : size_t i;
1659 : 0 : for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1660 : : {
1661 : 0 : gimple_omp_for_set_index (copy, i,
1662 : : gimple_omp_for_index (stmt, i));
1663 : 0 : gimple_omp_for_set_initial (copy, i,
1664 : : gimple_omp_for_initial (stmt, i));
1665 : 0 : gimple_omp_for_set_final (copy, i,
1666 : : gimple_omp_for_final (stmt, i));
1667 : 0 : gimple_omp_for_set_incr (copy, i,
1668 : : gimple_omp_for_incr (stmt, i));
1669 : 0 : gimple_omp_for_set_cond (copy, i,
1670 : : gimple_omp_for_cond (stmt, i));
1671 : : }
1672 : : }
1673 : : break;
1674 : :
1675 : 0 : case GIMPLE_OMP_MASTER:
1676 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1677 : 0 : copy = gimple_build_omp_master (s1);
1678 : 0 : break;
1679 : :
1680 : 0 : case GIMPLE_OMP_MASKED:
1681 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1682 : 0 : copy = gimple_build_omp_masked
1683 : 0 : (s1, gimple_omp_masked_clauses (stmt));
1684 : 0 : break;
1685 : :
1686 : 0 : case GIMPLE_OMP_SCOPE:
1687 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1688 : 0 : copy = gimple_build_omp_scope
1689 : 0 : (s1, gimple_omp_scope_clauses (stmt));
1690 : 0 : break;
1691 : :
1692 : 0 : case GIMPLE_OMP_DISPATCH:
1693 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1694 : 0 : copy = gimple_build_omp_dispatch (s1,
1695 : : gimple_omp_dispatch_clauses (stmt));
1696 : 0 : break;
1697 : :
1698 : 0 : case GIMPLE_OMP_TASKGROUP:
1699 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1700 : 0 : copy = gimple_build_omp_taskgroup
1701 : 0 : (s1, gimple_omp_taskgroup_clauses (stmt));
1702 : 0 : break;
1703 : :
1704 : 0 : case GIMPLE_OMP_ORDERED:
1705 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1706 : 0 : copy = gimple_build_omp_ordered
1707 : 0 : (s1,
1708 : 0 : gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1709 : 0 : break;
1710 : :
1711 : 0 : case GIMPLE_OMP_SCAN:
1712 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1713 : 0 : copy = gimple_build_omp_scan
1714 : 0 : (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1715 : 0 : break;
1716 : :
1717 : 0 : case GIMPLE_OMP_SECTION:
1718 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1719 : 0 : copy = gimple_build_omp_section (s1);
1720 : 0 : break;
1721 : :
1722 : 0 : case GIMPLE_OMP_SECTIONS:
1723 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1724 : 0 : copy = gimple_build_omp_sections
1725 : 0 : (s1, gimple_omp_sections_clauses (stmt));
1726 : 0 : break;
1727 : :
1728 : 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
1729 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1730 : 0 : copy = gimple_build_omp_structured_block (s1);
1731 : 0 : break;
1732 : :
1733 : 0 : case GIMPLE_OMP_SINGLE:
1734 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1735 : 0 : copy = gimple_build_omp_single
1736 : 0 : (s1, gimple_omp_single_clauses (stmt));
1737 : 0 : break;
1738 : :
1739 : 0 : case GIMPLE_OMP_TARGET:
1740 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1741 : 0 : copy = gimple_build_omp_target
1742 : 0 : (s1, gimple_omp_target_kind (stmt),
1743 : : gimple_omp_target_clauses (stmt));
1744 : 0 : break;
1745 : :
1746 : 0 : case GIMPLE_OMP_TEAMS:
1747 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1748 : 0 : copy = gimple_build_omp_teams
1749 : 0 : (s1, gimple_omp_teams_clauses (stmt));
1750 : 0 : break;
1751 : :
1752 : 0 : case GIMPLE_OMP_CRITICAL:
1753 : 0 : s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1754 : 0 : copy = gimple_build_omp_critical (s1,
1755 : : gimple_omp_critical_name
1756 : 0 : (as_a <gomp_critical *> (stmt)),
1757 : : gimple_omp_critical_clauses
1758 : 0 : (as_a <gomp_critical *> (stmt)));
1759 : 0 : break;
1760 : :
1761 : 0 : case GIMPLE_ASSUME:
1762 : 0 : s1 = remap_gimple_seq (gimple_assume_body (stmt), id);
1763 : 0 : copy = gimple_build_assume (gimple_assume_guard (stmt), s1);
1764 : 0 : break;
1765 : :
1766 : 67 : case GIMPLE_TRANSACTION:
1767 : 67 : {
1768 : 67 : gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1769 : 67 : gtransaction *new_trans_stmt;
1770 : 67 : s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1771 : : id);
1772 : 67 : copy = new_trans_stmt = gimple_build_transaction (s1);
1773 : 67 : gimple_transaction_set_subcode (new_trans_stmt,
1774 : : gimple_transaction_subcode (old_trans_stmt));
1775 : 67 : gimple_transaction_set_label_norm (new_trans_stmt,
1776 : : gimple_transaction_label_norm (old_trans_stmt));
1777 : 67 : gimple_transaction_set_label_uninst (new_trans_stmt,
1778 : : gimple_transaction_label_uninst (old_trans_stmt));
1779 : 67 : gimple_transaction_set_label_over (new_trans_stmt,
1780 : : gimple_transaction_label_over (old_trans_stmt));
1781 : : }
1782 : 67 : break;
1783 : :
1784 : 0 : default:
1785 : 0 : gcc_unreachable ();
1786 : : }
1787 : : }
1788 : : else
1789 : : {
1790 : 80523769 : if (gimple_assign_single_p (stmt)
1791 : 13025491 : && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1792 : 80523769 : && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1793 : : {
1794 : : /* Here we handle statements that are not completely rewritten.
1795 : : First we detect some inlining-induced bogosities for
1796 : : discarding. */
1797 : :
1798 : : /* Some assignments VAR = VAR; don't generate any rtl code
1799 : : and thus don't count as variable modification. Avoid
1800 : : keeping bogosities like 0 = 0. */
1801 : 0 : tree decl = gimple_assign_lhs (stmt), value;
1802 : 0 : tree *n;
1803 : :
1804 : 0 : n = id->decl_map->get (decl);
1805 : 0 : if (n)
1806 : : {
1807 : 0 : value = *n;
1808 : 0 : STRIP_TYPE_NOPS (value);
1809 : 0 : if (TREE_CONSTANT (value) || TREE_READONLY (value))
1810 : 0 : return NULL;
1811 : : }
1812 : : }
1813 : :
1814 : : /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1815 : : in a block that we aren't copying during tree_function_versioning,
1816 : : just drop the clobber stmt. */
1817 : 80523769 : if (id->blocks_to_copy && gimple_clobber_p (stmt))
1818 : : {
1819 : 16995 : tree lhs = gimple_assign_lhs (stmt);
1820 : 16995 : if (TREE_CODE (lhs) == MEM_REF
1821 : 16995 : && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1822 : : {
1823 : 914 : gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1824 : 914 : if (gimple_bb (def_stmt)
1825 : 1231 : && !bitmap_bit_p (id->blocks_to_copy,
1826 : 317 : gimple_bb (def_stmt)->index))
1827 : : return NULL;
1828 : : }
1829 : : }
1830 : :
1831 : : /* We do not allow CLOBBERs of handled components. In case
1832 : : returned value is stored via such handled component, remove
1833 : : the clobber so stmt verifier is happy. */
1834 : 80523766 : if (gimple_clobber_p (stmt)
1835 : 80523766 : && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1836 : : {
1837 : 0 : tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1838 : 0 : if (!DECL_P (remapped)
1839 : 0 : && TREE_CODE (remapped) != MEM_REF)
1840 : : return NULL;
1841 : : }
1842 : :
1843 : 80523766 : if (gimple_debug_bind_p (stmt))
1844 : : {
1845 : 38644716 : tree var = gimple_debug_bind_get_var (stmt);
1846 : 38644716 : tree value = gimple_debug_bind_get_value (stmt);
1847 : 38644716 : if (id->param_body_adjs
1848 : 38644716 : && id->param_body_adjs->m_dead_stmts.contains (stmt))
1849 : : {
1850 : 7357 : value = unshare_expr_without_location (value);
1851 : 7357 : id->param_body_adjs->remap_with_debug_expressions (&value);
1852 : : }
1853 : :
1854 : 38644716 : gdebug *copy = gimple_build_debug_bind (var, value, stmt);
1855 : 38644716 : if (id->reset_location)
1856 : 4 : gimple_set_location (copy, input_location);
1857 : 38644716 : id->debug_stmts.safe_push (copy);
1858 : 38644716 : gimple_seq_add_stmt_without_update (&stmts, copy);
1859 : 38644716 : return stmts;
1860 : : }
1861 : 41879050 : if (gimple_debug_source_bind_p (stmt))
1862 : : {
1863 : 460338 : gdebug *copy = gimple_build_debug_source_bind
1864 : 460338 : (gimple_debug_source_bind_get_var (stmt),
1865 : : gimple_debug_source_bind_get_value (stmt),
1866 : 460338 : stmt);
1867 : 460338 : if (id->reset_location)
1868 : 0 : gimple_set_location (copy, input_location);
1869 : 460338 : id->debug_stmts.safe_push (copy);
1870 : 460338 : gimple_seq_add_stmt_without_update (&stmts, copy);
1871 : 460338 : return stmts;
1872 : : }
1873 : 41418712 : if (gimple_debug_nonbind_marker_p (stmt))
1874 : : {
1875 : : /* If the inlined function has too many debug markers,
1876 : : don't copy them. */
1877 : 12171654 : if (id->src_cfun->debug_marker_count
1878 : 12171654 : > param_max_debug_marker_count
1879 : 12171654 : || id->reset_location)
1880 : 0 : return stmts;
1881 : :
1882 : 12171654 : gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1883 : 12171654 : id->debug_stmts.safe_push (copy);
1884 : 12171654 : gimple_seq_add_stmt_without_update (&stmts, copy);
1885 : 12171654 : return stmts;
1886 : : }
1887 : :
1888 : : /* Create a new deep copy of the statement. */
1889 : 29247058 : copy = gimple_copy (stmt);
1890 : :
1891 : : /* Clear flags that need revisiting. */
1892 : 29247058 : if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1893 : : {
1894 : 4530916 : if (gimple_call_tail_p (call_stmt))
1895 : 92 : gimple_call_set_tail (call_stmt, false);
1896 : 4530916 : if (gimple_call_from_thunk_p (call_stmt))
1897 : 131 : gimple_call_set_from_thunk (call_stmt, false);
1898 : : /* Silently clear musttail flag when inlining a function
1899 : : with must tail call from a non-musttail call. The inlining
1900 : : removes one frame so acts like musttail's intent, and we
1901 : : can be inlining a function with musttail calls in the middle
1902 : : of caller where musttail will always error. */
1903 : 4530916 : if (gimple_call_must_tail_p (call_stmt)
1904 : 49 : && id->call_stmt
1905 : 4530957 : && !gimple_call_must_tail_p (id->call_stmt))
1906 : 14 : gimple_call_set_must_tail (call_stmt, false);
1907 : 4530916 : if (gimple_call_internal_p (call_stmt))
1908 : 63088 : switch (gimple_call_internal_fn (call_stmt))
1909 : : {
1910 : 135 : case IFN_GOMP_SIMD_LANE:
1911 : 135 : case IFN_GOMP_SIMD_VF:
1912 : 135 : case IFN_GOMP_SIMD_LAST_LANE:
1913 : 135 : case IFN_GOMP_SIMD_ORDERED_START:
1914 : 135 : case IFN_GOMP_SIMD_ORDERED_END:
1915 : 135 : DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1916 : 135 : break;
1917 : : default:
1918 : : break;
1919 : : }
1920 : : }
1921 : :
1922 : : /* Remap the region numbers for __builtin_eh_{pointer,filter},
1923 : : RESX and EH_DISPATCH. */
1924 : 29247058 : if (id->eh_map)
1925 : 29247058 : switch (gimple_code (copy))
1926 : : {
1927 : 4530916 : case GIMPLE_CALL:
1928 : 4530916 : {
1929 : 4530916 : tree r, fndecl = gimple_call_fndecl (copy);
1930 : 4530916 : if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1931 : 1201324 : switch (DECL_FUNCTION_CODE (fndecl))
1932 : : {
1933 : 0 : case BUILT_IN_EH_COPY_VALUES:
1934 : 0 : r = gimple_call_arg (copy, 1);
1935 : 0 : r = remap_eh_region_tree_nr (r, id);
1936 : 0 : gimple_call_set_arg (copy, 1, r);
1937 : : /* FALLTHRU */
1938 : :
1939 : 7736 : case BUILT_IN_EH_POINTER:
1940 : 7736 : case BUILT_IN_EH_FILTER:
1941 : 7736 : r = gimple_call_arg (copy, 0);
1942 : 7736 : r = remap_eh_region_tree_nr (r, id);
1943 : 7736 : gimple_call_set_arg (copy, 0, r);
1944 : 7736 : break;
1945 : :
1946 : : default:
1947 : : break;
1948 : : }
1949 : :
1950 : : /* Reset alias info if we didn't apply measures to
1951 : : keep it valid over inlining by setting DECL_PT_UID. */
1952 : 4530916 : if (!id->src_cfun->gimple_df
1953 : 4530916 : || !id->src_cfun->gimple_df->ipa_pta)
1954 : 4524400 : gimple_call_reset_alias_info (as_a <gcall *> (copy));
1955 : : }
1956 : : break;
1957 : :
1958 : 79901 : case GIMPLE_RESX:
1959 : 79901 : {
1960 : 79901 : gresx *resx_stmt = as_a <gresx *> (copy);
1961 : 79901 : int r = gimple_resx_region (resx_stmt);
1962 : 79901 : r = remap_eh_region_nr (r, id);
1963 : 79901 : gimple_resx_set_region (resx_stmt, r);
1964 : : }
1965 : 79901 : break;
1966 : :
1967 : 9038 : case GIMPLE_EH_DISPATCH:
1968 : 9038 : {
1969 : 9038 : geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1970 : 9038 : int r = gimple_eh_dispatch_region (eh_dispatch);
1971 : 9038 : r = remap_eh_region_nr (r, id);
1972 : 9038 : gimple_eh_dispatch_set_region (eh_dispatch, r);
1973 : : }
1974 : 9038 : break;
1975 : :
1976 : : default:
1977 : : break;
1978 : : }
1979 : : }
1980 : :
1981 : : /* If STMT has a block defined, map it to the newly constructed block. */
1982 : 31349292 : if (tree block = gimple_block (copy))
1983 : : {
1984 : 28153977 : tree *n;
1985 : 28153977 : n = id->decl_map->get (block);
1986 : 28153977 : gcc_assert (n);
1987 : 28153977 : gimple_set_block (copy, *n);
1988 : : }
1989 : 31349292 : if (id->param_body_adjs)
1990 : : {
1991 : 3099088 : gimple_seq extra_stmts = NULL;
1992 : 3099088 : id->param_body_adjs->modify_gimple_stmt (©, &extra_stmts, stmt);
1993 : 3099088 : if (!gimple_seq_empty_p (extra_stmts))
1994 : : {
1995 : 3 : memset (&wi, 0, sizeof (wi));
1996 : 3 : wi.info = id;
1997 : 3 : for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1998 : 6 : !gsi_end_p (egsi);
1999 : 3 : gsi_next (&egsi))
2000 : 3 : walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
2001 : 3 : gimple_seq_add_seq_without_update (&stmts, extra_stmts);
2002 : : }
2003 : : }
2004 : :
2005 : 31349292 : if (id->reset_location)
2006 : 662 : gimple_set_location (copy, input_location);
2007 : :
2008 : : /* Debug statements ought to be rebuilt and not copied. */
2009 : 31349292 : gcc_checking_assert (!is_gimple_debug (copy));
2010 : :
2011 : : /* Remap all the operands in COPY. */
2012 : 31349292 : memset (&wi, 0, sizeof (wi));
2013 : 31349292 : wi.info = id;
2014 : 31349292 : if (skip_first)
2015 : 2102167 : walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
2016 : : else
2017 : 29247125 : walk_gimple_op (copy, remap_gimple_op_r, &wi);
2018 : :
2019 : : /* Clear the copied virtual operands. We are not remapping them here
2020 : : but are going to recreate them from scratch. */
2021 : 31349292 : if (gimple_has_mem_ops (copy))
2022 : : {
2023 : 27015207 : gimple_set_vdef (copy, NULL_TREE);
2024 : 27015207 : gimple_set_vuse (copy, NULL_TREE);
2025 : : }
2026 : :
2027 : 31349292 : if (cfun->can_throw_non_call_exceptions)
2028 : : {
2029 : : /* When inlining a function which does not have non-call exceptions
2030 : : enabled into a function that has (which only happens with
2031 : : always-inline) we have to fixup stmts that cannot throw. */
2032 : 1709346 : if (gcond *cond = dyn_cast <gcond *> (copy))
2033 : 199371 : if (gimple_could_trap_p (cond))
2034 : : {
2035 : 1 : gassign *cmp
2036 : 1 : = gimple_build_assign (make_ssa_name (boolean_type_node),
2037 : : gimple_cond_code (cond),
2038 : : gimple_cond_lhs (cond),
2039 : : gimple_cond_rhs (cond));
2040 : 1 : gimple_seq_add_stmt_without_update (&stmts, cmp);
2041 : 1 : gimple_cond_set_code (cond, NE_EXPR);
2042 : 1 : gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
2043 : 1 : gimple_cond_set_rhs (cond, boolean_false_node);
2044 : : }
2045 : : }
2046 : :
2047 : 31349292 : gimple_seq_add_stmt_without_update (&stmts, copy);
2048 : 31349292 : return stmts;
2049 : : }
2050 : :
2051 : :
2052 : : /* Copy basic block, scale profile accordingly. Edges will be taken care of
2053 : : later */
2054 : :
2055 : : static basic_block
2056 : 13681332 : copy_bb (copy_body_data *id, basic_block bb,
2057 : : profile_count num, profile_count den)
2058 : : {
2059 : 13681332 : gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
2060 : 13681332 : basic_block copy_basic_block;
2061 : 13681332 : tree decl;
2062 : 13681332 : basic_block prev;
2063 : :
2064 : 13681332 : profile_count::adjust_for_ipa_scaling (&num, &den);
2065 : :
2066 : : /* Search for previous copied basic block. */
2067 : 13681332 : prev = bb->prev_bb;
2068 : 13835627 : while (!prev->aux)
2069 : 154295 : prev = prev->prev_bb;
2070 : :
2071 : : /* create_basic_block() will append every new block to
2072 : : basic_block_info automatically. */
2073 : 13681332 : copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2074 : 13681332 : copy_basic_block->count = bb->count.apply_scale (num, den);
2075 : :
2076 : 13681332 : copy_gsi = gsi_start_bb (copy_basic_block);
2077 : :
2078 : 13681332 : unsigned min_cond_uid = 0;
2079 : 13681332 : if (id->src_cfun->cond_uids)
2080 : : {
2081 : 23 : if (!cfun->cond_uids)
2082 : 3 : cfun->cond_uids = new hash_map <gcond*, unsigned> ();
2083 : :
2084 : 92 : for (auto itr : *id->src_cfun->cond_uids)
2085 : 23 : if (itr.second >= min_cond_uid)
2086 : 23 : min_cond_uid = itr.second + 1;
2087 : : }
2088 : :
2089 : 112332419 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2090 : : {
2091 : 84969755 : gimple_seq stmts;
2092 : 84969755 : gimple *stmt = gsi_stmt (gsi);
2093 : 84969755 : gimple *orig_stmt = stmt;
2094 : 84969755 : gimple_stmt_iterator stmts_gsi;
2095 : 84969755 : bool stmt_added = false;
2096 : :
2097 : 84969755 : id->regimplify = false;
2098 : 84969755 : stmts = remap_gimple_stmt (stmt, id);
2099 : :
2100 : 84969755 : if (gimple_seq_empty_p (stmts))
2101 : 2343207 : continue;
2102 : :
2103 : 82626730 : seq_gsi = copy_gsi;
2104 : :
2105 : 82626730 : for (stmts_gsi = gsi_start (stmts);
2106 : 165253464 : !gsi_end_p (stmts_gsi); )
2107 : : {
2108 : 82626734 : stmt = gsi_stmt (stmts_gsi);
2109 : :
2110 : : /* Advance iterator now before stmt is moved to seq_gsi. */
2111 : 82626734 : gsi_next (&stmts_gsi);
2112 : :
2113 : 82626734 : if (gimple_nop_p (stmt))
2114 : 182 : continue;
2115 : :
2116 : : /* If -fcondition-coverage is used, register the inlined conditions
2117 : : in the cond->expression mapping of the caller. The expression tag
2118 : : is shifted conditions from the two bodies are not mixed. */
2119 : 82626552 : if (id->src_cfun->cond_uids && is_a <gcond*> (stmt))
2120 : : {
2121 : 5 : gcond *orig_cond = as_a <gcond*> (orig_stmt);
2122 : 5 : gcond *cond = as_a <gcond*> (stmt);
2123 : 5 : unsigned *v = id->src_cfun->cond_uids->get (orig_cond);
2124 : 5 : if (v)
2125 : 5 : cfun->cond_uids->put (cond, *v + min_cond_uid);
2126 : : }
2127 : :
2128 : 82626552 : gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2129 : : orig_stmt);
2130 : :
2131 : 82626552 : gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2132 : :
2133 : 82626552 : if (id->regimplify)
2134 : 5279 : gimple_regimplify_operands (stmt, &seq_gsi);
2135 : :
2136 : : stmt_added = true;
2137 : : }
2138 : :
2139 : 82626730 : if (!stmt_added)
2140 : 182 : continue;
2141 : :
2142 : : /* If copy_basic_block has been empty at the start of this iteration,
2143 : : call gsi_start_bb again to get at the newly added statements. */
2144 : 82626548 : if (gsi_end_p (copy_gsi))
2145 : 24028132 : copy_gsi = gsi_start_bb (copy_basic_block);
2146 : : else
2147 : 70612482 : gsi_next (©_gsi);
2148 : :
2149 : : /* Process the new statement. The call to gimple_regimplify_operands
2150 : : possibly turned the statement into multiple statements, we
2151 : : need to process all of them. */
2152 : 82626556 : do
2153 : : {
2154 : 82626556 : tree fn;
2155 : 82626556 : gcall *call_stmt;
2156 : :
2157 : 82626556 : stmt = gsi_stmt (copy_gsi);
2158 : 82626556 : call_stmt = dyn_cast <gcall *> (stmt);
2159 : 4530916 : if (call_stmt
2160 : 4530916 : && gimple_call_va_arg_pack_p (call_stmt)
2161 : 321 : && id->call_stmt
2162 : 319 : && ! gimple_call_va_arg_pack_p (id->call_stmt))
2163 : : {
2164 : : /* __builtin_va_arg_pack () should be replaced by
2165 : : all arguments corresponding to ... in the caller. */
2166 : 305 : tree p;
2167 : 305 : gcall *new_call;
2168 : 305 : vec<tree> argarray;
2169 : 305 : size_t nargs_caller = gimple_call_num_args (id->call_stmt);
2170 : 305 : size_t nargs = nargs_caller;
2171 : :
2172 : 785 : for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2173 : : {
2174 : : /* Avoid crashing on invalid IL that doesn't have a
2175 : : varargs function or that passes not enough arguments. */
2176 : 496 : if (nargs == 0)
2177 : : break;
2178 : 480 : nargs--;
2179 : : }
2180 : :
2181 : : /* Create the new array of arguments. */
2182 : 305 : size_t nargs_callee = gimple_call_num_args (call_stmt);
2183 : 305 : size_t n = nargs + nargs_callee;
2184 : 305 : argarray.create (n);
2185 : 305 : argarray.safe_grow_cleared (n, true);
2186 : :
2187 : : /* Copy all the arguments before '...' */
2188 : 305 : if (nargs_callee)
2189 : 610 : memcpy (argarray.address (),
2190 : 305 : gimple_call_arg_ptr (call_stmt, 0),
2191 : : nargs_callee * sizeof (tree));
2192 : :
2193 : : /* Append the arguments passed in '...' */
2194 : 305 : if (nargs)
2195 : 171 : memcpy (argarray.address () + nargs_callee,
2196 : 171 : gimple_call_arg_ptr (id->call_stmt, 0)
2197 : 171 : + (nargs_caller - nargs), nargs * sizeof (tree));
2198 : :
2199 : 305 : new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2200 : : argarray);
2201 : :
2202 : 305 : argarray.release ();
2203 : :
2204 : : /* Copy all GIMPLE_CALL flags, location and block, except
2205 : : GF_CALL_VA_ARG_PACK. */
2206 : 305 : gimple_call_copy_flags (new_call, call_stmt);
2207 : 305 : gimple_call_set_va_arg_pack (new_call, false);
2208 : 610 : gimple_call_set_fntype (new_call, gimple_call_fntype (call_stmt));
2209 : : /* location includes block. */
2210 : 305 : gimple_set_location (new_call, gimple_location (stmt));
2211 : 305 : gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2212 : :
2213 : 305 : gsi_replace (©_gsi, new_call, false);
2214 : 305 : stmt = new_call;
2215 : : }
2216 : 82626251 : else if (call_stmt
2217 : 4530611 : && id->call_stmt
2218 : 3685088 : && (decl = gimple_call_fndecl (stmt))
2219 : 86163191 : && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2220 : : {
2221 : : /* __builtin_va_arg_pack_len () should be replaced by
2222 : : the number of anonymous arguments. */
2223 : 147 : size_t nargs = gimple_call_num_args (id->call_stmt);
2224 : 147 : tree count, p;
2225 : 147 : gimple *new_stmt;
2226 : :
2227 : 423 : for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2228 : 276 : nargs--;
2229 : :
2230 : 147 : if (!gimple_call_lhs (stmt))
2231 : : {
2232 : : /* Drop unused calls. */
2233 : 1 : gsi_remove (©_gsi, false);
2234 : 1 : continue;
2235 : : }
2236 : 146 : else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2237 : : {
2238 : 124 : count = build_int_cst (integer_type_node, nargs);
2239 : 124 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2240 : 124 : gsi_replace (©_gsi, new_stmt, false);
2241 : 124 : stmt = new_stmt;
2242 : : }
2243 : 22 : else if (nargs != 0)
2244 : : {
2245 : 7 : tree newlhs = make_ssa_name (integer_type_node);
2246 : 7 : count = build_int_cst (integer_type_node, nargs);
2247 : 7 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2248 : : PLUS_EXPR, newlhs, count);
2249 : 7 : gimple_call_set_lhs (stmt, newlhs);
2250 : 7 : gsi_insert_after (©_gsi, new_stmt, GSI_NEW_STMT);
2251 : : }
2252 : : }
2253 : 82626104 : else if (call_stmt
2254 : 4530464 : && id->call_stmt
2255 : 86311045 : && gimple_call_internal_p (stmt))
2256 : 50469 : switch (gimple_call_internal_fn (stmt))
2257 : : {
2258 : 145 : case IFN_TSAN_FUNC_EXIT:
2259 : : /* Drop .TSAN_FUNC_EXIT () internal calls during inlining. */
2260 : 145 : gsi_remove (©_gsi, false);
2261 : 145 : continue;
2262 : 1034 : case IFN_ASAN_MARK:
2263 : : /* Drop .ASAN_MARK internal calls during inlining into
2264 : : no_sanitize functions. */
2265 : 1034 : if (!sanitize_flags_p (SANITIZE_ADDRESS, id->dst_fn)
2266 : 1034 : && !sanitize_flags_p (SANITIZE_HWADDRESS, id->dst_fn))
2267 : : {
2268 : 14 : gsi_remove (©_gsi, false);
2269 : 14 : continue;
2270 : : }
2271 : : break;
2272 : : default:
2273 : : break;
2274 : : }
2275 : :
2276 : : /* Statements produced by inlining can be unfolded, especially
2277 : : when we constant propagated some operands. We can't fold
2278 : : them right now for two reasons:
2279 : : 1) folding require SSA_NAME_DEF_STMTs to be correct
2280 : : 2) we can't change function calls to builtins.
2281 : : So we just mark statement for later folding. We mark
2282 : : all new statements, instead just statements that has changed
2283 : : by some nontrivial substitution so even statements made
2284 : : foldable indirectly are updated. If this turns out to be
2285 : : expensive, copy_body can be told to watch for nontrivial
2286 : : changes. */
2287 : 82626396 : if (id->statements_to_fold)
2288 : 82626396 : id->statements_to_fold->add (stmt);
2289 : :
2290 : : /* We're duplicating a CALL_EXPR. Find any corresponding
2291 : : callgraph edges and update or duplicate them. */
2292 : 82626396 : if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2293 : : {
2294 : 4530632 : struct cgraph_edge *edge;
2295 : :
2296 : 4530632 : switch (id->transform_call_graph_edges)
2297 : : {
2298 : 3685109 : case CB_CGE_DUPLICATE:
2299 : 3685109 : edge = id->src_node->get_edge (orig_stmt);
2300 : 3685109 : if (edge)
2301 : : {
2302 : 3634799 : struct cgraph_edge *old_edge = edge;
2303 : :
2304 : : /* A speculative call is consist of multiple
2305 : : edges - indirect edge and one or more direct edges
2306 : : Duplicate the whole thing and distribute frequencies
2307 : : accordingly. */
2308 : 3634799 : if (edge->speculative)
2309 : : {
2310 : 18987 : int n = 0;
2311 : 18987 : profile_count direct_cnt
2312 : 18987 : = profile_count::zero ();
2313 : :
2314 : : /* First figure out the distribution of counts
2315 : : so we can re-scale BB profile accordingly. */
2316 : 44894 : for (cgraph_edge *e = old_edge; e;
2317 : 25907 : e = e->next_speculative_call_target ())
2318 : 25907 : direct_cnt = direct_cnt + e->count;
2319 : :
2320 : 18987 : cgraph_edge *indirect
2321 : 18987 : = old_edge->speculative_call_indirect_edge ();
2322 : 18987 : profile_count indir_cnt = indirect->count;
2323 : :
2324 : : /* Next iterate all direct edges, clone it and its
2325 : : corresponding reference and update profile. */
2326 : 18987 : for (cgraph_edge *e = old_edge;
2327 : 44894 : e;
2328 : 25907 : e = e->next_speculative_call_target ())
2329 : : {
2330 : 25907 : profile_count cnt = e->count;
2331 : :
2332 : 25907 : id->dst_node->clone_reference
2333 : 25907 : (e->speculative_call_target_ref (), stmt);
2334 : 25907 : edge = e->clone (id->dst_node, call_stmt,
2335 : : gimple_uid (stmt), num, den,
2336 : : true);
2337 : 25907 : profile_probability prob
2338 : 25907 : = cnt.probability_in (direct_cnt
2339 : : + indir_cnt);
2340 : 25907 : edge->count
2341 : : = copy_basic_block->count.apply_probability
2342 : 25907 : (prob);
2343 : 25907 : n++;
2344 : : }
2345 : 18987 : gcc_checking_assert
2346 : : (indirect->num_speculative_call_targets_p ()
2347 : : == n);
2348 : :
2349 : : /* Duplicate the indirect edge after all direct edges
2350 : : cloned. */
2351 : 18987 : indirect = indirect->clone (id->dst_node, call_stmt,
2352 : : gimple_uid (stmt),
2353 : : num, den,
2354 : : true);
2355 : :
2356 : 18987 : profile_probability prob
2357 : 18987 : = indir_cnt.probability_in (direct_cnt
2358 : : + indir_cnt);
2359 : 18987 : indirect->count
2360 : 18987 : = copy_basic_block->count.apply_probability (prob);
2361 : : }
2362 : : /* If edge is a callback-carrying edge, copy all its
2363 : : attached edges as well. */
2364 : 3615812 : else if (edge->has_callback)
2365 : : {
2366 : 1 : edge
2367 : 1 : = edge->clone (id->dst_node, call_stmt,
2368 : : gimple_uid (stmt), num, den, true);
2369 : 1 : cgraph_edge *e;
2370 : 1 : for (e = old_edge->first_callback_edge (); e;
2371 : 0 : e = e->next_callback_edge ())
2372 : 0 : edge = e->clone (id->dst_node, call_stmt,
2373 : : gimple_uid (stmt), num, den, true);
2374 : : }
2375 : : else
2376 : : {
2377 : 3615811 : edge = edge->clone (id->dst_node, call_stmt,
2378 : : gimple_uid (stmt),
2379 : : num, den,
2380 : : true);
2381 : 3615811 : edge->count = copy_basic_block->count;
2382 : : }
2383 : : }
2384 : : break;
2385 : :
2386 : 655207 : case CB_CGE_MOVE_CLONES:
2387 : 655207 : id->dst_node->set_call_stmt_including_clones (orig_stmt,
2388 : : call_stmt);
2389 : 655207 : edge = id->dst_node->get_edge (stmt);
2390 : 655207 : break;
2391 : :
2392 : 190316 : case CB_CGE_MOVE:
2393 : 190316 : edge = id->dst_node->get_edge (orig_stmt);
2394 : 190316 : if (edge)
2395 : 189875 : edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2396 : : break;
2397 : :
2398 : 0 : default:
2399 : 0 : gcc_unreachable ();
2400 : : }
2401 : :
2402 : : /* Constant propagation on argument done during inlining
2403 : : may create new direct call. Produce an edge for it. */
2404 : 3615811 : if ((!edge
2405 : 4369208 : || (edge->indirect_inlining_edge
2406 : 4061 : && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2407 : 161901 : && id->dst_node->definition
2408 : 926491 : && (fn = gimple_call_fndecl (stmt)) != NULL)
2409 : : {
2410 : 0 : struct cgraph_node *dest = cgraph_node::get_create (fn);
2411 : :
2412 : : /* We have missing edge in the callgraph. This can happen
2413 : : when previous inlining turned an indirect call into a
2414 : : direct call by constant propagating arguments or we are
2415 : : producing dead clone (for further cloning). In all
2416 : : other cases we hit a bug (incorrect node sharing is the
2417 : : most common reason for missing edges). */
2418 : 0 : gcc_assert (!dest->definition
2419 : : || dest->address_taken
2420 : : || !id->src_node->definition
2421 : : || !id->dst_node->definition);
2422 : 0 : if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2423 : 0 : id->dst_node->create_edge_including_clones
2424 : 0 : (dest, orig_stmt, call_stmt, bb->count,
2425 : : CIF_ORIGINALLY_INDIRECT_CALL);
2426 : : else
2427 : 0 : id->dst_node->create_edge (dest, call_stmt,
2428 : : bb->count)->inline_failed
2429 : 0 : = CIF_ORIGINALLY_INDIRECT_CALL;
2430 : 0 : if (dump_file)
2431 : : {
2432 : 0 : fprintf (dump_file, "Created new direct edge to %s\n",
2433 : : dest->dump_name ());
2434 : : }
2435 : : }
2436 : :
2437 : 4530632 : notice_special_calls (as_a <gcall *> (stmt));
2438 : : }
2439 : :
2440 : 82626396 : maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2441 : : id->eh_map, id->eh_lp_nr);
2442 : :
2443 : 82626396 : gsi_next (©_gsi);
2444 : : }
2445 : 82626556 : while (!gsi_end_p (copy_gsi));
2446 : :
2447 : 165253096 : copy_gsi = gsi_last_bb (copy_basic_block);
2448 : : }
2449 : :
2450 : 13681332 : return copy_basic_block;
2451 : : }
2452 : :
2453 : : /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2454 : : form is quite easy, since dominator relationship for old basic blocks does
2455 : : not change.
2456 : :
2457 : : There is however exception where inlining might change dominator relation
2458 : : across EH edges from basic block within inlined functions destinating
2459 : : to landing pads in function we inline into.
2460 : :
2461 : : The function fills in PHI_RESULTs of such PHI nodes if they refer
2462 : : to gimple regs. Otherwise, the function mark PHI_RESULT of such
2463 : : PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2464 : : EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2465 : : set, and this means that there will be no overlapping live ranges
2466 : : for the underlying symbol.
2467 : :
2468 : : This might change in future if we allow redirecting of EH edges and
2469 : : we might want to change way build CFG pre-inlining to include
2470 : : all the possible edges then. */
2471 : : static void
2472 : 699959 : update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2473 : : bool can_throw, bool nonlocal_goto)
2474 : : {
2475 : 699959 : edge e;
2476 : 699959 : edge_iterator ei;
2477 : :
2478 : 1943173 : FOR_EACH_EDGE (e, ei, bb->succs)
2479 : 1243214 : if (!e->dest->aux
2480 : 688099 : || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2481 : : {
2482 : 555115 : gphi *phi;
2483 : 555115 : gphi_iterator si;
2484 : :
2485 : 555115 : if (!nonlocal_goto)
2486 : 554873 : gcc_assert (e->flags & EDGE_EH);
2487 : :
2488 : 555115 : if (!can_throw)
2489 : 187 : gcc_assert (!(e->flags & EDGE_EH));
2490 : :
2491 : 933282 : for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2492 : : {
2493 : 378167 : edge re;
2494 : :
2495 : 378167 : phi = si.phi ();
2496 : :
2497 : : /* For abnormal goto/call edges the receiver can be the
2498 : : ENTRY_BLOCK. Do not assert this cannot happen. */
2499 : :
2500 : 378167 : gcc_assert ((e->flags & EDGE_EH)
2501 : : || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2502 : :
2503 : 378167 : re = find_edge (ret_bb, e->dest);
2504 : 378167 : gcc_checking_assert (re);
2505 : 378167 : gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2506 : : == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2507 : :
2508 : 378167 : SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2509 : : USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2510 : : }
2511 : : }
2512 : 699959 : }
2513 : :
2514 : : /* Insert clobbers for automatic variables of inlined ID->src_fn
2515 : : function at the start of basic block ID->eh_landing_pad_dest. */
2516 : :
2517 : : static void
2518 : 405912 : add_clobbers_to_eh_landing_pad (copy_body_data *id)
2519 : : {
2520 : 405912 : tree var;
2521 : 405912 : basic_block bb = id->eh_landing_pad_dest;
2522 : 405912 : live_vars_map *vars = NULL;
2523 : 405912 : unsigned int cnt = 0;
2524 : 405912 : unsigned int i;
2525 : 957022 : FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2526 : 551110 : if (VAR_P (var)
2527 : 551110 : && !DECL_HARD_REGISTER (var)
2528 : 551110 : && !TREE_THIS_VOLATILE (var)
2529 : 551090 : && !DECL_HAS_VALUE_EXPR_P (var)
2530 : 542323 : && !is_gimple_reg (var)
2531 : 232430 : && auto_var_in_fn_p (var, id->src_fn)
2532 : 781530 : && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2533 : : {
2534 : 230420 : tree *t = id->decl_map->get (var);
2535 : 230420 : if (!t)
2536 : 0 : continue;
2537 : 230420 : tree new_var = *t;
2538 : 230420 : if (VAR_P (new_var)
2539 : 230420 : && !DECL_HARD_REGISTER (new_var)
2540 : 230420 : && !TREE_THIS_VOLATILE (new_var)
2541 : 230420 : && !DECL_HAS_VALUE_EXPR_P (new_var)
2542 : 230420 : && !is_gimple_reg (new_var)
2543 : 460840 : && auto_var_in_fn_p (new_var, id->dst_fn))
2544 : : {
2545 : 230420 : if (vars == NULL)
2546 : 137872 : vars = new live_vars_map;
2547 : 230420 : vars->put (DECL_UID (var), cnt++);
2548 : : }
2549 : : }
2550 : 405912 : if (vars == NULL)
2551 : 268040 : return;
2552 : :
2553 : 137872 : vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2554 : 484222 : FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2555 : 346350 : if (VAR_P (var))
2556 : : {
2557 : 346350 : edge e;
2558 : 346350 : edge_iterator ei;
2559 : 346350 : bool needed = false;
2560 : 346350 : unsigned int *v = vars->get (DECL_UID (var));
2561 : 346350 : if (v == NULL)
2562 : 115930 : continue;
2563 : 4793608 : FOR_EACH_EDGE (e, ei, bb->preds)
2564 : 4696096 : if ((e->flags & EDGE_EH) != 0
2565 : 4696072 : && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2566 : : {
2567 : 432181 : basic_block src_bb = (basic_block) e->src->aux;
2568 : :
2569 : 432181 : if (bitmap_bit_p (&live[src_bb->index], *v))
2570 : : {
2571 : : needed = true;
2572 : : break;
2573 : : }
2574 : : }
2575 : 230420 : if (needed)
2576 : : {
2577 : 132908 : tree new_var = *id->decl_map->get (var);
2578 : 132908 : gimple_stmt_iterator gsi = gsi_after_labels (bb);
2579 : 132908 : tree clobber = build_clobber (TREE_TYPE (new_var));
2580 : 132908 : gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2581 : 132908 : gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2582 : : }
2583 : : }
2584 : 137872 : destroy_live_vars (live);
2585 : 137872 : delete vars;
2586 : : }
2587 : :
2588 : : /* Copy edges from BB into its copy constructed earlier, scale profile
2589 : : accordingly. Edges will be taken care of later. Assume aux
2590 : : pointers to point to the copies of each BB. Return true if any
2591 : : debug stmts are left after a statement that must end the basic block. */
2592 : :
2593 : : static bool
2594 : 22948696 : copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2595 : : basic_block ret_bb, basic_block abnormal_goto_dest,
2596 : : copy_body_data *id)
2597 : : {
2598 : 22948696 : basic_block new_bb = (basic_block) bb->aux;
2599 : 22948696 : edge_iterator ei;
2600 : 22948696 : edge old_edge;
2601 : 22948696 : gimple_stmt_iterator si;
2602 : 22948696 : bool need_debug_cleanup = false;
2603 : :
2604 : : /* Use the indices from the original blocks to create edges for the
2605 : : new ones. */
2606 : 44193432 : FOR_EACH_EDGE (old_edge, ei, bb->succs)
2607 : 21244736 : if (!(old_edge->flags & EDGE_EH))
2608 : : {
2609 : 21099723 : edge new_edge;
2610 : 21099723 : int flags = old_edge->flags;
2611 : 21099723 : location_t locus = old_edge->goto_locus;
2612 : :
2613 : : /* Return edges do get a FALLTHRU flag when they get inlined. */
2614 : 21099723 : if (old_edge->dest->index == EXIT_BLOCK
2615 : 4633643 : && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2616 : 4633643 : && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2617 : 4418012 : flags |= EDGE_FALLTHRU;
2618 : :
2619 : 21099723 : new_edge
2620 : 21099723 : = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2621 : 21099723 : new_edge->probability = old_edge->probability;
2622 : 21099723 : if (!id->reset_location)
2623 : 21099297 : new_edge->goto_locus = remap_location (locus, id);
2624 : : }
2625 : :
2626 : 22948696 : if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2627 : : return false;
2628 : :
2629 : : /* When doing function splitting, we must decrease count of the return block
2630 : : which was previously reachable by block we did not copy. */
2631 : 13681332 : if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2632 : 10918708 : FOR_EACH_EDGE (old_edge, ei, bb->preds)
2633 : 6285065 : if (old_edge->src->index != ENTRY_BLOCK
2634 : 3494794 : && !old_edge->src->aux)
2635 : 45799 : new_bb->count -= old_edge->count ().apply_scale (num, den);
2636 : :
2637 : : /* Walk stmts from end to start so that splitting will adjust the BB
2638 : : pointer for each stmt at most once, even when we split the block
2639 : : multiple times. */
2640 : 13681332 : bool seen_nondebug = false;
2641 : 13681332 : for (si = gsi_last_bb (new_bb); !gsi_end_p (si);)
2642 : : {
2643 : 82626403 : bool can_throw, nonlocal_goto;
2644 : 82626403 : gimple *copy_stmt = gsi_stmt (si);
2645 : :
2646 : : /* Do this before the possible split_block. */
2647 : 82626403 : gsi_prev (&si);
2648 : :
2649 : : /* If this tree could throw an exception, there are two
2650 : : cases where we need to add abnormal edge(s): the
2651 : : tree wasn't in a region and there is a "current
2652 : : region" in the caller; or the original tree had
2653 : : EH edges. In both cases split the block after the tree,
2654 : : and add abnormal edge(s) as needed; we need both
2655 : : those from the callee and the caller.
2656 : : We check whether the copy can throw, because the const
2657 : : propagation can change an INDIRECT_REF which throws
2658 : : into a COMPONENT_REF which doesn't. If the copy
2659 : : can throw, the original could also throw. */
2660 : 82626403 : can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2661 : 82626403 : nonlocal_goto
2662 : 82626403 : = (stmt_can_make_abnormal_goto (copy_stmt)
2663 : 82626403 : && !computed_goto_p (copy_stmt));
2664 : :
2665 : 82626116 : if (can_throw || nonlocal_goto)
2666 : : {
2667 : : /* If there's only debug insns after copy_stmt don't split
2668 : : the block but instead mark the block for cleanup. */
2669 : 700027 : if (!seen_nondebug)
2670 : : need_debug_cleanup = true;
2671 : : else
2672 : : {
2673 : : /* Note that bb's predecessor edges aren't necessarily
2674 : : right at this point; split_block doesn't care. */
2675 : 246067 : edge e = split_block (new_bb, copy_stmt);
2676 : 246067 : e->dest->aux = new_bb->aux;
2677 : 246067 : seen_nondebug = false;
2678 : : }
2679 : : }
2680 : :
2681 : 82626403 : if (!is_gimple_debug (copy_stmt))
2682 : 31348965 : seen_nondebug = true;
2683 : :
2684 : 82626403 : bool update_probs = false;
2685 : :
2686 : 82626403 : if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2687 : : {
2688 : 9038 : make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2689 : 9038 : update_probs = true;
2690 : : }
2691 : 82617365 : else if (can_throw)
2692 : : {
2693 : 699772 : make_eh_edge (copy_stmt);
2694 : 699772 : update_probs = true;
2695 : : }
2696 : :
2697 : : /* EH edges may not match old edges. Copy as much as possible. */
2698 : 708810 : if (update_probs)
2699 : : {
2700 : 708810 : edge e;
2701 : 708810 : edge_iterator ei;
2702 : 708810 : basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2703 : :
2704 : 1444948 : FOR_EACH_EDGE (old_edge, ei, bb->succs)
2705 : 736138 : if ((old_edge->flags & EDGE_EH)
2706 : 154258 : && (e = find_edge (copy_stmt_bb,
2707 : 154258 : (basic_block) old_edge->dest->aux))
2708 : 881014 : && (e->flags & EDGE_EH))
2709 : 144876 : e->probability = old_edge->probability;
2710 : :
2711 : 1961672 : FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2712 : 1252862 : if (e->flags & EDGE_EH)
2713 : : {
2714 : 699772 : if (!e->probability.initialized_p ())
2715 : 555103 : e->probability = profile_probability::never ();
2716 : 699772 : if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2717 : : {
2718 : 554176 : if (id->eh_landing_pad_dest == NULL)
2719 : 405912 : id->eh_landing_pad_dest = e->dest;
2720 : : else
2721 : 148264 : gcc_assert (id->eh_landing_pad_dest == e->dest);
2722 : : }
2723 : : }
2724 : : }
2725 : :
2726 : :
2727 : : /* If the call we inline cannot make abnormal goto do not add
2728 : : additional abnormal edges but only retain those already present
2729 : : in the original function body. */
2730 : 82626403 : if (abnormal_goto_dest == NULL)
2731 : : nonlocal_goto = false;
2732 : 738 : if (nonlocal_goto)
2733 : : {
2734 : 219 : basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2735 : :
2736 : 219 : if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2737 : : nonlocal_goto = false;
2738 : : /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2739 : : in OpenMP regions which aren't allowed to be left abnormally.
2740 : : So, no need to add abnormal edge in that case. */
2741 : 219 : else if (is_gimple_call (copy_stmt)
2742 : 219 : && gimple_call_internal_p (copy_stmt)
2743 : 0 : && (gimple_call_internal_fn (copy_stmt)
2744 : : == IFN_ABNORMAL_DISPATCHER)
2745 : 219 : && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2746 : : nonlocal_goto = false;
2747 : : else
2748 : : {
2749 : 219 : make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2750 : : EDGE_ABNORMAL);
2751 : 219 : gimple_call_set_ctrl_altering (copy_stmt, true);
2752 : : }
2753 : : }
2754 : :
2755 : 82626403 : if ((can_throw || nonlocal_goto)
2756 : 178934138 : && gimple_in_ssa_p (cfun))
2757 : 699959 : update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2758 : : can_throw, nonlocal_goto);
2759 : : }
2760 : : return need_debug_cleanup;
2761 : : }
2762 : :
2763 : : /* Copy the PHIs. All blocks and edges are copied, some blocks
2764 : : was possibly split and new outgoing EH edges inserted.
2765 : : BB points to the block of original function and AUX pointers links
2766 : : the original and newly copied blocks. */
2767 : :
2768 : : static void
2769 : 22948696 : copy_phis_for_bb (basic_block bb, copy_body_data *id)
2770 : : {
2771 : 22948696 : basic_block const new_bb = (basic_block) bb->aux;
2772 : 22948696 : edge_iterator ei;
2773 : 22948696 : gphi *phi;
2774 : 22948696 : gphi_iterator si;
2775 : 22948696 : edge new_edge;
2776 : 22948696 : bool inserted = false;
2777 : :
2778 : 25911895 : for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2779 : : {
2780 : 2963199 : tree res, new_res;
2781 : 2963199 : gphi *new_phi;
2782 : :
2783 : 2963199 : phi = si.phi ();
2784 : 2963199 : res = PHI_RESULT (phi);
2785 : 2963199 : new_res = res;
2786 : 2963199 : if (!virtual_operand_p (res)
2787 : 2963199 : && (!id->param_body_adjs
2788 : 1504405 : || !id->param_body_adjs->m_dead_stmts.contains (phi)))
2789 : : {
2790 : 1641330 : walk_tree (&new_res, copy_tree_body_r, id, NULL);
2791 : 1641330 : if (EDGE_COUNT (new_bb->preds) == 0)
2792 : : {
2793 : : /* Technically we'd want a SSA_DEFAULT_DEF here... */
2794 : 0 : SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2795 : : }
2796 : : else
2797 : : {
2798 : 1641330 : new_phi = create_phi_node (new_res, new_bb);
2799 : 5283975 : FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2800 : : {
2801 : 3642645 : edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2802 : 3642645 : bb);
2803 : 3642645 : tree arg;
2804 : 3642645 : tree new_arg;
2805 : 3642645 : edge_iterator ei2;
2806 : 3642645 : location_t locus;
2807 : :
2808 : : /* When doing partial cloning, we allow PHIs on the entry
2809 : : block as long as all the arguments are the same.
2810 : : Find any input edge to see argument to copy. */
2811 : 3642645 : if (!old_edge)
2812 : 1788 : FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2813 : 1788 : if (!old_edge->src->aux)
2814 : : break;
2815 : :
2816 : 3642645 : arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2817 : 3642645 : new_arg = arg;
2818 : 3642645 : walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2819 : 3642645 : gcc_assert (new_arg);
2820 : : /* With return slot optimization we can end up with
2821 : : non-gimple (foo *)&this->m, fix that here. */
2822 : 3642645 : if (TREE_CODE (new_arg) != SSA_NAME
2823 : 1486533 : && TREE_CODE (new_arg) != FUNCTION_DECL
2824 : 5129178 : && !is_gimple_val (new_arg))
2825 : : {
2826 : 12 : gimple_seq stmts = NULL;
2827 : 12 : new_arg = force_gimple_operand (new_arg, &stmts, true,
2828 : : NULL);
2829 : 12 : gsi_insert_seq_on_edge (new_edge, stmts);
2830 : 12 : inserted = true;
2831 : : }
2832 : 3642645 : locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2833 : 3642645 : if (id->reset_location)
2834 : 0 : locus = input_location;
2835 : : else
2836 : 3642645 : locus = remap_location (locus, id);
2837 : 3642645 : add_phi_arg (new_phi, new_arg, new_edge, locus);
2838 : : }
2839 : : }
2840 : : }
2841 : : }
2842 : :
2843 : : /* Commit the delayed edge insertions. */
2844 : 22948696 : if (inserted)
2845 : 36 : FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2846 : 24 : gsi_commit_one_edge_insert (new_edge, NULL);
2847 : 22948696 : }
2848 : :
2849 : :
2850 : : /* Wrapper for remap_decl so it can be used as a callback. */
2851 : :
2852 : : static tree
2853 : 98857 : remap_decl_1 (tree decl, void *data)
2854 : : {
2855 : 98857 : return remap_decl (decl, (copy_body_data *) data);
2856 : : }
2857 : :
2858 : : /* Build struct function and associated datastructures for the new clone
2859 : : NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2860 : : the cfun to the function of new_fndecl (and current_function_decl too). */
2861 : :
2862 : : static void
2863 : 229860 : initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2864 : : {
2865 : 229860 : struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2866 : :
2867 : : /* Register specific tree functions. */
2868 : 229860 : gimple_register_cfg_hooks ();
2869 : :
2870 : : /* Get clean struct function. */
2871 : 229860 : push_struct_function (new_fndecl, true);
2872 : 229860 : targetm.target_option.relayout_function (new_fndecl);
2873 : :
2874 : : /* We will rebuild these, so just sanity check that they are empty. */
2875 : 229860 : gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2876 : 229860 : gcc_assert (cfun->local_decls == NULL);
2877 : 229860 : gcc_assert (cfun->cfg == NULL);
2878 : 229860 : gcc_assert (cfun->decl == new_fndecl);
2879 : :
2880 : : /* Copy items we preserve during cloning. */
2881 : 229860 : cfun->static_chain_decl = src_cfun->static_chain_decl;
2882 : 229860 : cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2883 : 229860 : cfun->function_end_locus = src_cfun->function_end_locus;
2884 : 229860 : cfun->curr_properties = src_cfun->curr_properties;
2885 : 229860 : cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2886 : 229860 : cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2887 : 229860 : cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2888 : 229860 : cfun->calls_eh_return = src_cfun->calls_eh_return;
2889 : 229860 : cfun->stdarg = src_cfun->stdarg;
2890 : 229860 : cfun->after_inlining = src_cfun->after_inlining;
2891 : 229860 : cfun->can_throw_non_call_exceptions
2892 : 229860 : = src_cfun->can_throw_non_call_exceptions;
2893 : 229860 : cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2894 : 229860 : cfun->returns_struct = src_cfun->returns_struct;
2895 : 229860 : cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2896 : :
2897 : 229860 : init_empty_tree_cfg ();
2898 : :
2899 : 229860 : profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2900 : 229860 : cfun->cfg->full_profile = src_cfun->cfg->full_profile;
2901 : :
2902 : 229860 : profile_count num = count;
2903 : 229860 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2904 : 229860 : profile_count::adjust_for_ipa_scaling (&num, &den);
2905 : :
2906 : 229860 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2907 : 229860 : ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
2908 : 229860 : EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2909 : 229860 : EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (num, den);
2910 : 229860 : if (src_cfun->eh)
2911 : 229860 : init_eh_for_function ();
2912 : :
2913 : 229860 : if (src_cfun->gimple_df)
2914 : : {
2915 : 229860 : init_tree_ssa (cfun);
2916 : 229860 : cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2917 : 229860 : if (cfun->gimple_df->in_ssa_p)
2918 : 229860 : init_ssa_operands (cfun);
2919 : : }
2920 : 229860 : }
2921 : :
2922 : : /* Helper function for copy_cfg_body. Move debug stmts from the end
2923 : : of NEW_BB to the beginning of successor basic blocks when needed. If the
2924 : : successor has multiple predecessors, reset them, otherwise keep
2925 : : their value. */
2926 : :
2927 : : static void
2928 : 1458327 : maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2929 : : {
2930 : 1458327 : edge e;
2931 : 1458327 : edge_iterator ei;
2932 : 1458327 : gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2933 : :
2934 : 1458327 : if (gsi_end_p (si)
2935 : 1526257 : || gsi_one_before_end_p (si)
2936 : 1628268 : || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2937 : 67930 : || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2938 : 1424246 : return;
2939 : :
2940 : 102244 : FOR_EACH_EDGE (e, ei, new_bb->succs)
2941 : : {
2942 : 68163 : gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2943 : 68163 : gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2944 : 220655 : while (is_gimple_debug (gsi_stmt (ssi)))
2945 : : {
2946 : 152492 : gimple *stmt = gsi_stmt (ssi);
2947 : 152492 : gdebug *new_stmt;
2948 : 152492 : tree var;
2949 : 152492 : tree value;
2950 : :
2951 : : /* For the last edge move the debug stmts instead of copying
2952 : : them. */
2953 : 152492 : if (ei_one_before_end_p (ei))
2954 : : {
2955 : 76245 : si = ssi;
2956 : 76245 : gsi_prev (&ssi);
2957 : 76245 : if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2958 : : {
2959 : 74634 : gimple_debug_bind_reset_value (stmt);
2960 : 148513 : gimple_set_location (stmt, UNKNOWN_LOCATION);
2961 : : }
2962 : 76245 : gsi_remove (&si, false);
2963 : 76245 : gsi_insert_before (&dsi, stmt, GSI_NEW_STMT);
2964 : 76245 : continue;
2965 : : }
2966 : :
2967 : 76247 : if (gimple_debug_bind_p (stmt))
2968 : : {
2969 : 74636 : var = gimple_debug_bind_get_var (stmt);
2970 : 74636 : if (single_pred_p (e->dest))
2971 : : {
2972 : 49847 : value = gimple_debug_bind_get_value (stmt);
2973 : 49847 : value = unshare_expr (value);
2974 : 49847 : new_stmt = gimple_build_debug_bind (var, value, stmt);
2975 : : }
2976 : : else
2977 : 24789 : new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2978 : : }
2979 : 1611 : else if (gimple_debug_source_bind_p (stmt))
2980 : : {
2981 : 0 : var = gimple_debug_source_bind_get_var (stmt);
2982 : 0 : value = gimple_debug_source_bind_get_value (stmt);
2983 : 0 : new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2984 : : }
2985 : 1611 : else if (gimple_debug_nonbind_marker_p (stmt))
2986 : 1611 : new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2987 : : else
2988 : 0 : gcc_unreachable ();
2989 : 76247 : gsi_insert_before (&dsi, new_stmt, GSI_NEW_STMT);
2990 : 76247 : id->debug_stmts.safe_push (new_stmt);
2991 : 76247 : gsi_prev (&ssi);
2992 : : }
2993 : : }
2994 : : }
2995 : :
2996 : : /* Make a copy of the sub-loops of SRC_PARENT and place them
2997 : : as siblings of DEST_PARENT. */
2998 : :
2999 : : static void
3000 : 5297284 : copy_loops (copy_body_data *id,
3001 : : class loop *dest_parent, class loop *src_parent)
3002 : : {
3003 : 5297284 : class loop *src_loop = src_parent->inner;
3004 : 5915061 : while (src_loop)
3005 : : {
3006 : 617777 : if (!id->blocks_to_copy
3007 : 617777 : || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
3008 : : {
3009 : 614540 : class loop *dest_loop = alloc_loop ();
3010 : :
3011 : : /* Assign the new loop its header and latch and associate
3012 : : those with the new loop. */
3013 : 614540 : dest_loop->header = (basic_block)src_loop->header->aux;
3014 : 614540 : dest_loop->header->loop_father = dest_loop;
3015 : 614540 : if (src_loop->latch != NULL)
3016 : : {
3017 : 614524 : dest_loop->latch = (basic_block)src_loop->latch->aux;
3018 : 614524 : dest_loop->latch->loop_father = dest_loop;
3019 : : }
3020 : :
3021 : : /* Copy loop meta-data. */
3022 : 614540 : copy_loop_info (src_loop, dest_loop);
3023 : 614540 : if (dest_loop->unroll)
3024 : 2252 : cfun->has_unroll = true;
3025 : 614540 : if (dest_loop->force_vectorize)
3026 : 101 : cfun->has_force_vectorize_loops = true;
3027 : 614540 : if (id->src_cfun->last_clique != 0)
3028 : 70364 : dest_loop->owned_clique
3029 : 70364 : = remap_dependence_clique (id,
3030 : 70364 : src_loop->owned_clique
3031 : : ? src_loop->owned_clique : 1);
3032 : :
3033 : : /* Finally place it into the loop array and the loop tree. */
3034 : 614540 : place_new_loop (cfun, dest_loop);
3035 : 614540 : flow_loop_tree_node_add (dest_parent, dest_loop);
3036 : :
3037 : 614540 : if (src_loop->simduid)
3038 : : {
3039 : 53 : dest_loop->simduid = remap_decl (src_loop->simduid, id);
3040 : 53 : cfun->has_simduid_loops = true;
3041 : : }
3042 : :
3043 : : /* Recurse. */
3044 : 614540 : copy_loops (id, dest_loop, src_loop);
3045 : : }
3046 : 617777 : src_loop = src_loop->next;
3047 : : }
3048 : 5297284 : }
3049 : :
3050 : : /* Call redirect_call_stmt_to_callee on all calls in BB. */
3051 : :
3052 : : void
3053 : 12263289 : redirect_all_calls (copy_body_data * id, basic_block bb)
3054 : : {
3055 : 12263289 : gimple_stmt_iterator si;
3056 : 12263289 : gimple *last = last_nondebug_stmt (bb);
3057 : 96331787 : for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
3058 : : {
3059 : 71805209 : gimple *stmt = gsi_stmt (si);
3060 : 71805209 : if (is_gimple_call (stmt))
3061 : : {
3062 : 3736923 : struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
3063 : 3736923 : if (edge)
3064 : : {
3065 : 3686613 : if (!id->killed_new_ssa_names)
3066 : 2043340 : id->killed_new_ssa_names = new hash_set<tree> (16);
3067 : 3686613 : cgraph_edge::redirect_call_stmt_to_callee (
3068 : : edge, id->killed_new_ssa_names);
3069 : 3686613 : if (edge->has_callback)
3070 : : {
3071 : : /* When redirecting a carrying edge, we need to redirect its
3072 : : attached edges as well. */
3073 : 1 : cgraph_edge *cbe;
3074 : 1 : for (cbe = edge->first_callback_edge (); cbe;
3075 : 0 : cbe = cbe->next_callback_edge ())
3076 : 0 : cgraph_edge::redirect_call_stmt_to_callee (
3077 : : cbe, id->killed_new_ssa_names);
3078 : : }
3079 : :
3080 : 3686613 : if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
3081 : 37095 : gimple_purge_dead_eh_edges (bb);
3082 : : }
3083 : : }
3084 : : }
3085 : 12263289 : }
3086 : :
3087 : : /* Make a copy of the body of FN so that it can be inserted inline in
3088 : : another function. Walks FN via CFG, returns new fndecl. */
3089 : :
3090 : : static tree
3091 : 4682744 : copy_cfg_body (copy_body_data * id,
3092 : : basic_block entry_block_map, basic_block exit_block_map,
3093 : : basic_block new_entry)
3094 : : {
3095 : 4682744 : tree callee_fndecl = id->src_fn;
3096 : : /* Original cfun for the callee, doesn't change. */
3097 : 4682744 : struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3098 : 4682744 : struct function *cfun_to_copy;
3099 : 4682744 : basic_block bb;
3100 : 4682744 : tree new_fndecl = NULL;
3101 : 4682744 : bool need_debug_cleanup = false;
3102 : 4682744 : int last;
3103 : 4682744 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3104 : 4682744 : profile_count num = entry_block_map->count;
3105 : :
3106 : 4682744 : cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3107 : :
3108 : : /* Register specific tree functions. */
3109 : 4682744 : gimple_register_cfg_hooks ();
3110 : :
3111 : : /* If we are offlining region of the function, make sure to connect
3112 : : new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3113 : : part of loop, we must compute frequency and probability of
3114 : : ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3115 : : probabilities of edges incoming from nonduplicated region. */
3116 : 4682744 : if (new_entry)
3117 : : {
3118 : 49062 : edge e;
3119 : 49062 : edge_iterator ei;
3120 : 49062 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
3121 : :
3122 : 106574 : FOR_EACH_EDGE (e, ei, new_entry->preds)
3123 : 57512 : if (!e->src->aux)
3124 : 57512 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count += e->count ();
3125 : : /* Do not scale - the profile of offlined region should
3126 : : remain unchanged. */
3127 : 49062 : num = den = profile_count::one ();
3128 : : }
3129 : :
3130 : 4682744 : profile_count::adjust_for_ipa_scaling (&num, &den);
3131 : :
3132 : : /* Must have a CFG here at this point. */
3133 : 4682744 : gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3134 : : (DECL_STRUCT_FUNCTION (callee_fndecl)));
3135 : :
3136 : :
3137 : 4682744 : ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3138 : 4682744 : EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3139 : 4682744 : entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3140 : 4682744 : exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3141 : :
3142 : : /* Duplicate any exception-handling regions. */
3143 : 4682744 : if (cfun->eh)
3144 : 4682744 : id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3145 : : remap_decl_1, id);
3146 : :
3147 : : /* Use aux pointers to map the original blocks to copy. */
3148 : 18536383 : FOR_EACH_BB_FN (bb, cfun_to_copy)
3149 : 13853639 : if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3150 : : {
3151 : 13681332 : basic_block new_bb = copy_bb (id, bb, num, den);
3152 : 13681332 : bb->aux = new_bb;
3153 : 13681332 : new_bb->aux = bb;
3154 : 13681332 : new_bb->loop_father = entry_block_map->loop_father;
3155 : : }
3156 : :
3157 : 4682744 : last = last_basic_block_for_fn (cfun);
3158 : :
3159 : : /* Now that we've duplicated the blocks, duplicate their edges. */
3160 : 4682744 : basic_block abnormal_goto_dest = NULL;
3161 : 4682744 : if (id->call_stmt
3162 : 4682744 : && stmt_can_make_abnormal_goto (id->call_stmt))
3163 : : {
3164 : 248 : gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3165 : :
3166 : 248 : bb = gimple_bb (id->call_stmt);
3167 : 248 : gsi_next (&gsi);
3168 : 248 : if (gsi_end_p (gsi))
3169 : 248 : abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3170 : : }
3171 : 27901871 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3172 : 23219127 : if (!id->blocks_to_copy
3173 : 23219127 : || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3174 : 22948696 : need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3175 : : abnormal_goto_dest, id);
3176 : :
3177 : 4682744 : if (id->eh_landing_pad_dest)
3178 : : {
3179 : 405912 : add_clobbers_to_eh_landing_pad (id);
3180 : 405912 : id->eh_landing_pad_dest = NULL;
3181 : : }
3182 : :
3183 : 4682744 : if (new_entry)
3184 : : {
3185 : 49062 : edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3186 : : EDGE_FALLTHRU);
3187 : 49062 : e->probability = profile_probability::always ();
3188 : : }
3189 : :
3190 : : /* Duplicate the loop tree, if available and wanted. */
3191 : 4682744 : if (loops_for_fn (src_cfun) != NULL
3192 : 4682744 : && current_loops != NULL)
3193 : : {
3194 : 4682744 : copy_loops (id, entry_block_map->loop_father,
3195 : : get_loop (src_cfun, 0));
3196 : : /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3197 : 4682744 : loops_state_set (LOOPS_NEED_FIXUP);
3198 : : }
3199 : :
3200 : : /* If the loop tree in the source function needed fixup, mark the
3201 : : destination loop tree for fixup, too. */
3202 : 4682744 : if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3203 : 3 : loops_state_set (LOOPS_NEED_FIXUP);
3204 : :
3205 : 4682744 : if (gimple_in_ssa_p (cfun))
3206 : 27901871 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3207 : 23219127 : if (!id->blocks_to_copy
3208 : 23219127 : || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3209 : 22948696 : copy_phis_for_bb (bb, id);
3210 : :
3211 : 27901871 : FOR_ALL_BB_FN (bb, cfun_to_copy)
3212 : 23219127 : if (bb->aux)
3213 : : {
3214 : 23046820 : if (need_debug_cleanup
3215 : 1940566 : && bb->index != ENTRY_BLOCK
3216 : 1652495 : && bb->index != EXIT_BLOCK)
3217 : 1364424 : maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3218 : : /* Update call edge destinations. This cannot be done before loop
3219 : : info is updated, because we may split basic blocks. */
3220 : 23046820 : if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3221 : 20845269 : && bb->index != ENTRY_BLOCK
3222 : 16392385 : && bb->index != EXIT_BLOCK)
3223 : 11939501 : redirect_all_calls (id, (basic_block)bb->aux);
3224 : 23046820 : ((basic_block)bb->aux)->aux = NULL;
3225 : 23046820 : bb->aux = NULL;
3226 : : }
3227 : :
3228 : : /* Zero out AUX fields of newly created block during EH edge
3229 : : insertion. */
3230 : 5006532 : for (; last < last_basic_block_for_fn (cfun); last++)
3231 : : {
3232 : 323788 : if (need_debug_cleanup)
3233 : 93903 : maybe_move_debug_stmts_to_successors (id,
3234 : 93903 : BASIC_BLOCK_FOR_FN (cfun, last));
3235 : 323788 : BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3236 : : /* Update call edge destinations. This cannot be done before loop
3237 : : info is updated, because we may split basic blocks. */
3238 : 323788 : if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3239 : 323788 : redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3240 : : }
3241 : 4682744 : entry_block_map->aux = NULL;
3242 : 4682744 : exit_block_map->aux = NULL;
3243 : :
3244 : 4682744 : if (id->eh_map)
3245 : : {
3246 : 4682744 : delete id->eh_map;
3247 : 4682744 : id->eh_map = NULL;
3248 : : }
3249 : 4682744 : if (id->dependence_map)
3250 : : {
3251 : 726600 : delete id->dependence_map;
3252 : 726600 : id->dependence_map = NULL;
3253 : : }
3254 : :
3255 : 4682744 : return new_fndecl;
3256 : : }
3257 : :
3258 : : /* Copy the debug STMT using ID. We deal with these statements in a
3259 : : special way: if any variable in their VALUE expression wasn't
3260 : : remapped yet, we won't remap it, because that would get decl uids
3261 : : out of sync, causing codegen differences between -g and -g0. If
3262 : : this arises, we drop the VALUE expression altogether. */
3263 : :
3264 : : static void
3265 : 51353685 : copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3266 : : {
3267 : 51353685 : tree t, *n;
3268 : 51353685 : struct walk_stmt_info wi;
3269 : :
3270 : 51353685 : if (tree block = gimple_block (stmt))
3271 : : {
3272 : 34493019 : n = id->decl_map->get (block);
3273 : 34493019 : gimple_set_block (stmt, n ? *n : id->block);
3274 : : }
3275 : :
3276 : 51353685 : if (gimple_debug_nonbind_marker_p (stmt))
3277 : : {
3278 : 12173265 : if (id->call_stmt && !gimple_block (stmt))
3279 : : {
3280 : 38205 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3281 : 38205 : gsi_remove (&gsi, true);
3282 : : }
3283 : 12173265 : return;
3284 : : }
3285 : :
3286 : : /* Remap all the operands in COPY. */
3287 : 39180420 : memset (&wi, 0, sizeof (wi));
3288 : 39180420 : wi.info = id;
3289 : :
3290 : 39180420 : processing_debug_stmt = 1;
3291 : :
3292 : 39180420 : if (gimple_debug_source_bind_p (stmt))
3293 : 460338 : t = gimple_debug_source_bind_get_var (stmt);
3294 : 38720082 : else if (gimple_debug_bind_p (stmt))
3295 : 38720082 : t = gimple_debug_bind_get_var (stmt);
3296 : : else
3297 : 0 : gcc_unreachable ();
3298 : :
3299 : 39180420 : if (TREE_CODE (t) == PARM_DECL
3300 : 225404 : && id->debug_map
3301 : 39181992 : && (n = id->debug_map->get (t)))
3302 : : {
3303 : 0 : gcc_assert (VAR_P (*n));
3304 : 0 : t = *n;
3305 : : }
3306 : 39180420 : else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3307 : : /* T is a non-localized variable. */;
3308 : : else
3309 : 34995413 : walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3310 : :
3311 : 39180420 : if (gimple_debug_bind_p (stmt))
3312 : : {
3313 : 38720082 : gimple_debug_bind_set_var (stmt, t);
3314 : :
3315 : 38720082 : if (gimple_debug_bind_has_value_p (stmt))
3316 : 21630689 : walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3317 : : remap_gimple_op_r, &wi, NULL);
3318 : :
3319 : : /* Punt if any decl couldn't be remapped. */
3320 : 38720082 : if (processing_debug_stmt < 0)
3321 : 653115 : gimple_debug_bind_reset_value (stmt);
3322 : : }
3323 : 460338 : else if (gimple_debug_source_bind_p (stmt))
3324 : : {
3325 : 460338 : gimple_debug_source_bind_set_var (stmt, t);
3326 : : /* When inlining and source bind refers to one of the optimized
3327 : : away parameters, change the source bind into normal debug bind
3328 : : referring to the corresponding DEBUG_EXPR_DECL that should have
3329 : : been bound before the call stmt. */
3330 : 460338 : t = gimple_debug_source_bind_get_value (stmt);
3331 : 460338 : if (t != NULL_TREE
3332 : 460338 : && TREE_CODE (t) == PARM_DECL
3333 : 347634 : && id->call_stmt)
3334 : : {
3335 : 343563 : vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3336 : 343563 : unsigned int i;
3337 : 343563 : if (debug_args != NULL)
3338 : : {
3339 : 465868 : for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3340 : 465867 : if ((**debug_args)[i] == DECL_ORIGIN (t)
3341 : 465867 : && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3342 : : {
3343 : 342556 : t = (**debug_args)[i + 1];
3344 : 342556 : stmt->subcode = GIMPLE_DEBUG_BIND;
3345 : 342556 : gimple_debug_bind_set_value (stmt, t);
3346 : 342556 : break;
3347 : : }
3348 : : }
3349 : : }
3350 : 460338 : if (gimple_debug_source_bind_p (stmt))
3351 : 117782 : walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3352 : : remap_gimple_op_r, &wi, NULL);
3353 : : }
3354 : :
3355 : 39180420 : processing_debug_stmt = 0;
3356 : :
3357 : 39180420 : update_stmt (stmt);
3358 : : }
3359 : :
3360 : : /* Process deferred debug stmts. In order to give values better odds
3361 : : of being successfully remapped, we delay the processing of debug
3362 : : stmts until all other stmts that might require remapping are
3363 : : processed. */
3364 : :
3365 : : static void
3366 : 4682744 : copy_debug_stmts (copy_body_data *id)
3367 : : {
3368 : 4682744 : if (!id->debug_stmts.exists ())
3369 : : return;
3370 : :
3371 : 54287594 : for (gdebug *stmt : id->debug_stmts)
3372 : 51353685 : copy_debug_stmt (stmt, id);
3373 : :
3374 : 2933909 : id->debug_stmts.release ();
3375 : : }
3376 : :
3377 : : /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3378 : : another function. */
3379 : :
3380 : : static tree
3381 : 18371740 : copy_tree_body (copy_body_data *id)
3382 : : {
3383 : 18371740 : tree fndecl = id->src_fn;
3384 : 18371740 : tree body = DECL_SAVED_TREE (fndecl);
3385 : :
3386 : 18371740 : walk_tree (&body, copy_tree_body_r, id, NULL);
3387 : :
3388 : 18371740 : return body;
3389 : : }
3390 : :
3391 : : /* Make a copy of the body of FN so that it can be inserted inline in
3392 : : another function. */
3393 : :
3394 : : static tree
3395 : 4682744 : copy_body (copy_body_data *id,
3396 : : basic_block entry_block_map, basic_block exit_block_map,
3397 : : basic_block new_entry)
3398 : : {
3399 : 4682744 : tree fndecl = id->src_fn;
3400 : 4682744 : tree body;
3401 : :
3402 : : /* If this body has a CFG, walk CFG and copy. */
3403 : 4682744 : gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3404 : 4682744 : body = copy_cfg_body (id, entry_block_map, exit_block_map,
3405 : : new_entry);
3406 : 4682744 : copy_debug_stmts (id);
3407 : 4682744 : if (id->killed_new_ssa_names)
3408 : : {
3409 : 2043340 : ipa_release_ssas_in_hash (id->killed_new_ssa_names);
3410 : 4086680 : delete id->killed_new_ssa_names;
3411 : 2043340 : id->killed_new_ssa_names = NULL;
3412 : : }
3413 : :
3414 : 4682744 : return body;
3415 : : }
3416 : :
3417 : : /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3418 : : defined in function FN, or of a data member thereof. */
3419 : :
3420 : : static bool
3421 : 102369 : self_inlining_addr_expr (tree value, tree fn)
3422 : : {
3423 : 102369 : tree var;
3424 : :
3425 : 102369 : if (TREE_CODE (value) != ADDR_EXPR)
3426 : : return false;
3427 : :
3428 : 96435 : var = get_base_address (TREE_OPERAND (value, 0));
3429 : :
3430 : 96435 : return var && auto_var_in_fn_p (var, fn);
3431 : : }
3432 : :
3433 : : /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3434 : : lexical block and line number information from base_stmt, if given,
3435 : : or from the last stmt of the block otherwise. */
3436 : :
3437 : : static gimple *
3438 : 7595594 : insert_init_debug_bind (copy_body_data *id,
3439 : : basic_block bb, tree var, tree value,
3440 : : gimple *base_stmt)
3441 : : {
3442 : 7595594 : gimple *note;
3443 : 7595594 : gimple_stmt_iterator gsi;
3444 : 7595594 : tree tracked_var;
3445 : :
3446 : 9132889 : if (!gimple_in_ssa_p (id->src_cfun))
3447 : : return NULL;
3448 : :
3449 : 7595594 : if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3450 : : return NULL;
3451 : :
3452 : 6700483 : tracked_var = target_for_debug_bind (var);
3453 : 6700483 : if (!tracked_var)
3454 : : return NULL;
3455 : :
3456 : 6058299 : if (bb)
3457 : : {
3458 : 6052255 : gsi = gsi_last_bb (bb);
3459 : 6052255 : if (!base_stmt && !gsi_end_p (gsi))
3460 : 6058299 : base_stmt = gsi_stmt (gsi);
3461 : : }
3462 : :
3463 : 6058299 : note = gimple_build_debug_bind (tracked_var,
3464 : 6058299 : value == error_mark_node
3465 : 6058299 : ? NULL_TREE : unshare_expr (value),
3466 : : base_stmt);
3467 : :
3468 : 6058299 : if (bb)
3469 : : {
3470 : 6052255 : if (!gsi_end_p (gsi))
3471 : 5173915 : gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3472 : : else
3473 : 878340 : gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3474 : : }
3475 : :
3476 : : return note;
3477 : : }
3478 : :
3479 : : static void
3480 : 472296 : insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3481 : : {
3482 : : /* If VAR represents a zero-sized variable, it's possible that the
3483 : : assignment statement may result in no gimple statements. */
3484 : 472296 : if (init_stmt)
3485 : : {
3486 : 472296 : gimple_stmt_iterator si = gsi_last_bb (bb);
3487 : :
3488 : : /* We can end up with init statements that store to a non-register
3489 : : from a rhs with a conversion. Handle that here by forcing the
3490 : : rhs into a temporary. gimple_regimplify_operands is not
3491 : : prepared to do this for us. */
3492 : 472296 : if (!is_gimple_debug (init_stmt)
3493 : 466252 : && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3494 : 363187 : && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3495 : 503753 : && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3496 : : {
3497 : 2 : tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3498 : 1 : TREE_TYPE (gimple_assign_lhs (init_stmt)),
3499 : : gimple_assign_rhs1 (init_stmt));
3500 : 1 : rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3501 : : GSI_NEW_STMT);
3502 : 1 : gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3503 : 1 : gimple_assign_set_rhs1 (init_stmt, rhs);
3504 : : }
3505 : 472296 : gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3506 : 472296 : if (!is_gimple_debug (init_stmt))
3507 : : {
3508 : 466252 : gimple_regimplify_operands (init_stmt, &si);
3509 : :
3510 : 466252 : tree def = gimple_assign_lhs (init_stmt);
3511 : 466252 : insert_init_debug_bind (id, bb, def, def, init_stmt);
3512 : : }
3513 : : }
3514 : 472296 : }
3515 : :
3516 : : /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3517 : : if need be (which should only be necessary for invalid programs). Attempt
3518 : : to convert VAL to TYPE and return the result if it is possible, just return
3519 : : a zero constant of the given type if it fails. */
3520 : :
3521 : : tree
3522 : 2071 : force_value_to_type (tree type, tree value)
3523 : : {
3524 : : /* If we can match up types by promotion/demotion do so. */
3525 : 2071 : if (fold_convertible_p (type, value))
3526 : 541 : return fold_convert (type, value);
3527 : :
3528 : : /* ??? For valid programs we should not end up here.
3529 : : Still if we end up with truly mismatched types here, fall back
3530 : : to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3531 : : GIMPLE to the following passes. */
3532 : 1530 : if (TREE_CODE (value) == WITH_SIZE_EXPR)
3533 : 0 : return error_mark_node;
3534 : 1530 : else if (!is_gimple_reg_type (TREE_TYPE (value))
3535 : 1530 : || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3536 : 1239 : return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3537 : : else
3538 : 291 : return build_zero_cst (type);
3539 : : }
3540 : :
3541 : : /* Initialize parameter P with VALUE. If needed, produce init statement
3542 : : at the end of BB. When BB is NULL, we return init statement to be
3543 : : output later. */
3544 : : static gimple *
3545 : 7575187 : setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3546 : : basic_block bb, tree *vars)
3547 : : {
3548 : 7575187 : gimple *init_stmt = NULL;
3549 : 7575187 : tree var;
3550 : 7575187 : tree def = (gimple_in_ssa_p (cfun)
3551 : 7575187 : ? ssa_default_def (id->src_cfun, p) : NULL);
3552 : :
3553 : : /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3554 : : here since the type of this decl must be visible to the calling
3555 : : function. */
3556 : 7575187 : var = copy_decl_to_var (p, id);
3557 : :
3558 : : /* Declare this new variable. */
3559 : 7575187 : DECL_CHAIN (var) = *vars;
3560 : 7575187 : *vars = var;
3561 : :
3562 : : /* Make gimplifier happy about this variable. */
3563 : 7575187 : DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3564 : :
3565 : : /* If the parameter is never assigned to, has no SSA_NAMEs created,
3566 : : we would not need to create a new variable here at all, if it
3567 : : weren't for debug info. Still, we can just use the argument
3568 : : value. */
3569 : 7575187 : if (TREE_READONLY (p)
3570 : 4093341 : && !TREE_ADDRESSABLE (p)
3571 : 4092073 : && value
3572 : 4092064 : && !TREE_SIDE_EFFECTS (value)
3573 : 11667245 : && !def)
3574 : : {
3575 : : /* We may produce non-gimple trees by adding NOPs or introduce invalid
3576 : : sharing when the value is not constant or DECL. And we need to make
3577 : : sure that it cannot be modified from another path in the callee. */
3578 : 302853 : if (((is_gimple_min_invariant (value)
3579 : : /* When the parameter is used in a context that forces it to
3580 : : not be a GIMPLE register avoid substituting something that
3581 : : is not a decl there. */
3582 : 102100 : && ! DECL_NOT_GIMPLE_REG_P (p))
3583 : 200764 : || (DECL_P (value) && TREE_READONLY (value))
3584 : 200680 : || (auto_var_in_fn_p (value, id->dst_fn)
3585 : 202 : && !TREE_ADDRESSABLE (value)))
3586 : 102372 : && useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value))
3587 : : /* We have to be very careful about ADDR_EXPR. Make sure
3588 : : the base variable isn't a local variable of the inlined
3589 : : function, e.g., when doing recursive inlining, direct or
3590 : : mutually-recursive or whatever, which is why we don't
3591 : : just test whether fn == current_function_decl. */
3592 : 405222 : && ! self_inlining_addr_expr (value, fn))
3593 : : {
3594 : 102369 : insert_decl_map (id, p, value);
3595 : 102369 : if (!id->debug_map)
3596 : 97928 : id->debug_map = new hash_map<tree, tree>;
3597 : 102369 : id->debug_map->put (p, var);
3598 : 102369 : return insert_init_debug_bind (id, bb, var, value, NULL);
3599 : : }
3600 : : }
3601 : :
3602 : : /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3603 : : that way, when the PARM_DECL is encountered, it will be
3604 : : automatically replaced by the VAR_DECL. */
3605 : 7472818 : insert_decl_map (id, p, var);
3606 : :
3607 : : /* Even if P was TREE_READONLY, the new VAR should not be. In the original
3608 : : code, we would have constructed a temporary, and then the function body
3609 : : would have never changed the value of P. However, now, we will be
3610 : : constructing VAR directly. Therefore, it must not be TREE_READONLY. */
3611 : 7472818 : TREE_READONLY (var) = 0;
3612 : :
3613 : 7472818 : tree rhs = value;
3614 : 7472818 : if (value
3615 : 7471905 : && value != error_mark_node
3616 : 14944723 : && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3617 : 2060 : rhs = force_value_to_type (TREE_TYPE (p), value);
3618 : :
3619 : : /* If there is no setup required and we are in SSA, take the easy route
3620 : : replacing all SSA names representing the function parameter by the
3621 : : SSA name passed to function.
3622 : :
3623 : : We need to construct map for the variable anyway as it might be used
3624 : : in different SSA names when parameter is set in function.
3625 : :
3626 : : Do replacement at -O0 for const arguments replaced by constant.
3627 : : This is important for builtin_constant_p and other construct requiring
3628 : : constant argument to be visible in inlined function body. */
3629 : 14945636 : if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3630 : 6723168 : && (optimize
3631 : 23672 : || (TREE_READONLY (p)
3632 : 11634 : && is_gimple_min_invariant (rhs)))
3633 : 6703407 : && (TREE_CODE (rhs) == SSA_NAME
3634 : 2645332 : || is_gimple_min_invariant (rhs))
3635 : 6703119 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3636 : : {
3637 : 6703119 : insert_decl_map (id, def, rhs);
3638 : 6703119 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3639 : : }
3640 : :
3641 : : /* If the value of argument is never used, don't care about initializing
3642 : : it. */
3643 : 1516128 : if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3644 : : {
3645 : : /* When there's a gross type mismatch between the passed value
3646 : : and the declared argument type drop it on the floor and do
3647 : : not bother to insert a debug bind. */
3648 : 323868 : if (value && !is_gimple_reg_type (TREE_TYPE (value)))
3649 : : return NULL;
3650 : 323854 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3651 : : }
3652 : :
3653 : : /* Initialize this VAR_DECL from the equivalent argument. Convert
3654 : : the argument to the proper type in case it was promoted. */
3655 : 445831 : if (value)
3656 : : {
3657 : 445744 : if (rhs == error_mark_node)
3658 : : {
3659 : 0 : insert_decl_map (id, p, var);
3660 : 0 : return insert_init_debug_bind (id, bb, var, rhs, NULL);
3661 : : }
3662 : :
3663 : 445744 : STRIP_USELESS_TYPE_CONVERSION (rhs);
3664 : :
3665 : : /* If we are in SSA form properly remap the default definition. */
3666 : 891488 : if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3667 : : {
3668 : 22790 : if (def)
3669 : : {
3670 : 20049 : def = remap_ssa_name (def, id);
3671 : 20049 : init_stmt = gimple_build_assign (def, rhs);
3672 : 20049 : SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3673 : 20049 : set_ssa_default_def (cfun, var, NULL);
3674 : : }
3675 : : }
3676 : 422954 : else if (!is_empty_type (TREE_TYPE (var)))
3677 : 363187 : init_stmt = gimple_build_assign (var, rhs);
3678 : :
3679 : 445744 : if (bb && init_stmt)
3680 : 382782 : insert_init_stmt (id, bb, init_stmt);
3681 : : }
3682 : : return init_stmt;
3683 : : }
3684 : :
3685 : : /* Generate code to initialize the parameters of the function at the
3686 : : top of the stack in ID from the GIMPLE_CALL STMT. */
3687 : :
3688 : : static void
3689 : 4452884 : initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3690 : : tree fn, basic_block bb)
3691 : : {
3692 : 4452884 : tree parms;
3693 : 4452884 : size_t i;
3694 : 4452884 : tree p;
3695 : 4452884 : tree vars = NULL_TREE;
3696 : 4452884 : tree static_chain = gimple_call_chain (stmt);
3697 : :
3698 : : /* Figure out what the parameters are. */
3699 : 4452884 : parms = DECL_ARGUMENTS (fn);
3700 : :
3701 : : /* Loop through the parameter declarations, replacing each with an
3702 : : equivalent VAR_DECL, appropriately initialized. */
3703 : 12004552 : for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3704 : : {
3705 : 7551668 : tree val;
3706 : 7551668 : val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3707 : 7551668 : setup_one_parameter (id, p, val, fn, bb, &vars);
3708 : : }
3709 : : /* After remapping parameters remap their types. This has to be done
3710 : : in a second loop over all parameters to appropriately remap
3711 : : variable sized arrays when the size is specified in a
3712 : : parameter following the array. */
3713 : 12004552 : for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3714 : : {
3715 : 7551668 : tree *varp = id->decl_map->get (p);
3716 : 7551668 : if (varp && VAR_P (*varp))
3717 : : {
3718 : 14902482 : tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3719 : 7028507 : ? ssa_default_def (id->src_cfun, p) : NULL);
3720 : 7451241 : tree var = *varp;
3721 : 7451241 : TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3722 : : /* Also remap the default definition if it was remapped
3723 : : to the default definition of the parameter replacement
3724 : : by the parameter setup. */
3725 : 7451241 : if (def)
3726 : : {
3727 : 6703609 : tree *defp = id->decl_map->get (def);
3728 : 6703609 : if (defp
3729 : 6703523 : && TREE_CODE (*defp) == SSA_NAME
3730 : 12964685 : && SSA_NAME_VAR (*defp) == var)
3731 : 20049 : TREE_TYPE (*defp) = TREE_TYPE (var);
3732 : : }
3733 : : /* When not optimizing and the parameter is unused, assign to
3734 : : a dummy SSA name. Do this after remapping the type above. */
3735 : 747632 : else if (!optimize
3736 : 3509 : && is_gimple_reg (p)
3737 : 750373 : && i < gimple_call_num_args (stmt))
3738 : : {
3739 : 2741 : tree val = gimple_call_arg (stmt, i);
3740 : 2741 : if (val != error_mark_node)
3741 : : {
3742 : 2741 : if (!useless_type_conversion_p (TREE_TYPE (p),
3743 : 2741 : TREE_TYPE (val)))
3744 : 0 : val = force_value_to_type (TREE_TYPE (p), val);
3745 : 2741 : def = make_ssa_name (var);
3746 : 2741 : gimple *init_stmt = gimple_build_assign (def, val);
3747 : 2741 : insert_init_stmt (id, bb, init_stmt);
3748 : : }
3749 : : }
3750 : : }
3751 : : }
3752 : :
3753 : : /* Initialize the static chain. */
3754 : 4452884 : p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3755 : 4452884 : gcc_assert (fn != current_function_decl);
3756 : 4452884 : if (p)
3757 : : {
3758 : : /* No static chain? Seems like a bug in tree-nested.cc. */
3759 : 2280 : gcc_assert (static_chain);
3760 : :
3761 : 2280 : setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3762 : : }
3763 : :
3764 : : /* Reverse so the variables appear in the correct order in DWARF
3765 : : debug info. */
3766 : 4452884 : vars = nreverse (vars);
3767 : :
3768 : 4452884 : declare_inline_vars (id->block, vars);
3769 : 4452884 : }
3770 : :
3771 : :
3772 : : /* Declare a return variable to replace the RESULT_DECL for the
3773 : : function we are calling. An appropriate DECL_STMT is returned.
3774 : : The USE_STMT is filled to contain a use of the declaration to
3775 : : indicate the return value of the function.
3776 : :
3777 : : RETURN_SLOT, if non-null is place where to store the result. It
3778 : : is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3779 : : was the LHS of the MODIFY_EXPR to which this call is the RHS.
3780 : :
3781 : : The return value is a (possibly null) value that holds the result
3782 : : as seen by the caller. */
3783 : :
3784 : : static tree
3785 : 4452884 : declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3786 : : basic_block entry_bb)
3787 : : {
3788 : 4452884 : tree callee = id->src_fn;
3789 : 4452884 : tree result = DECL_RESULT (callee);
3790 : 4452884 : tree callee_type = TREE_TYPE (result);
3791 : 4452884 : tree caller_type;
3792 : 4452884 : tree var, use;
3793 : :
3794 : : /* Handle type-mismatches in the function declaration return type
3795 : : vs. the call expression. */
3796 : 4452884 : if (modify_dest)
3797 : 2086117 : caller_type = TREE_TYPE (modify_dest);
3798 : 2366767 : else if (return_slot)
3799 : 138882 : caller_type = TREE_TYPE (return_slot);
3800 : : else /* No LHS on the call. */
3801 : 2227885 : caller_type = TREE_TYPE (TREE_TYPE (callee));
3802 : :
3803 : : /* We don't need to do anything for functions that don't return anything. */
3804 : 4452884 : if (VOID_TYPE_P (callee_type))
3805 : : return NULL_TREE;
3806 : :
3807 : : /* If there was a return slot, then the return value is the
3808 : : dereferenced address of that object. */
3809 : 2370707 : if (return_slot)
3810 : : {
3811 : : /* The front end shouldn't have used both return_slot and
3812 : : a modify expression. */
3813 : 138882 : gcc_assert (!modify_dest);
3814 : 138882 : if (DECL_BY_REFERENCE (result))
3815 : : {
3816 : 80275 : tree return_slot_addr = build_fold_addr_expr (return_slot);
3817 : 80275 : STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3818 : :
3819 : : /* We are going to construct *&return_slot and we can't do that
3820 : : for variables believed to be not addressable.
3821 : :
3822 : : FIXME: This check possibly can match, because values returned
3823 : : via return slot optimization are not believed to have address
3824 : : taken by alias analysis. */
3825 : 80275 : gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3826 : 80275 : var = return_slot_addr;
3827 : 80275 : mark_addressable (return_slot);
3828 : : }
3829 : : else
3830 : : {
3831 : 58607 : var = return_slot;
3832 : 58607 : gcc_assert (TREE_CODE (var) != SSA_NAME);
3833 : 58607 : if (TREE_ADDRESSABLE (result))
3834 : 21341 : mark_addressable (var);
3835 : : }
3836 : 138882 : if (DECL_NOT_GIMPLE_REG_P (result)
3837 : 138882 : && DECL_P (var))
3838 : 0 : DECL_NOT_GIMPLE_REG_P (var) = 1;
3839 : :
3840 : 138882 : if (!useless_type_conversion_p (callee_type, caller_type))
3841 : 80276 : var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3842 : :
3843 : 138882 : use = NULL;
3844 : 138882 : goto done;
3845 : : }
3846 : :
3847 : : /* All types requiring non-trivial constructors should have been handled. */
3848 : 2231825 : gcc_assert (!TREE_ADDRESSABLE (callee_type));
3849 : :
3850 : : /* Attempt to avoid creating a new temporary variable. */
3851 : 2231825 : if (modify_dest
3852 : 2086096 : && TREE_CODE (modify_dest) != SSA_NAME)
3853 : : {
3854 : 329320 : bool use_it = false;
3855 : :
3856 : : /* We can't use MODIFY_DEST if there's type promotion involved. */
3857 : 329320 : if (!useless_type_conversion_p (callee_type, caller_type))
3858 : : use_it = false;
3859 : :
3860 : : /* ??? If we're assigning to a variable sized type, then we must
3861 : : reuse the destination variable, because we've no good way to
3862 : : create variable sized temporaries at this point. */
3863 : 329318 : else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3864 : : use_it = true;
3865 : :
3866 : : /* If the callee cannot possibly modify MODIFY_DEST, then we can
3867 : : reuse it as the result of the call directly. Don't do this if
3868 : : it would promote MODIFY_DEST to addressable. */
3869 : 329318 : else if (TREE_ADDRESSABLE (result))
3870 : : use_it = false;
3871 : : else
3872 : : {
3873 : 328539 : tree base_m = get_base_address (modify_dest);
3874 : :
3875 : : /* If the base isn't a decl, then it's a pointer, and we don't
3876 : : know where that's going to go. */
3877 : 328539 : if (!DECL_P (base_m))
3878 : : use_it = false;
3879 : 323097 : else if (is_global_var (base_m))
3880 : : use_it = false;
3881 : 322727 : else if (DECL_NOT_GIMPLE_REG_P (result)
3882 : 322727 : && !DECL_NOT_GIMPLE_REG_P (base_m))
3883 : : use_it = false;
3884 : 322727 : else if (!TREE_ADDRESSABLE (base_m))
3885 : : use_it = true;
3886 : : }
3887 : :
3888 : : if (use_it)
3889 : : {
3890 : 223535 : var = modify_dest;
3891 : 223535 : use = NULL;
3892 : 223535 : goto done;
3893 : : }
3894 : : }
3895 : :
3896 : 2008290 : gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3897 : :
3898 : 2008290 : var = copy_result_decl_to_var (result, id);
3899 : 2008290 : DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3900 : :
3901 : : /* Do not have the rest of GCC warn about this variable as it should
3902 : : not be visible to the user. */
3903 : 2008290 : suppress_warning (var /* OPT_Wuninitialized? */);
3904 : :
3905 : 2008290 : declare_inline_vars (id->block, var);
3906 : :
3907 : : /* Build the use expr. If the return type of the function was
3908 : : promoted, convert it back to the expected type. */
3909 : 2008290 : use = var;
3910 : 2008290 : if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3911 : : {
3912 : : /* If we can match up types by promotion/demotion do so. */
3913 : 7 : if (fold_convertible_p (caller_type, var))
3914 : 0 : use = fold_convert (caller_type, var);
3915 : : else
3916 : : {
3917 : : /* ??? For valid programs we should not end up here.
3918 : : Still if we end up with truly mismatched types here, fall back
3919 : : to using a MEM_REF to not leak invalid GIMPLE to the following
3920 : : passes. */
3921 : : /* Prevent var from being written into SSA form. */
3922 : 7 : if (is_gimple_reg_type (TREE_TYPE (var)))
3923 : 7 : DECL_NOT_GIMPLE_REG_P (var) = true;
3924 : 7 : use = fold_build2 (MEM_REF, caller_type,
3925 : : build_fold_addr_expr (var),
3926 : : build_int_cst (ptr_type_node, 0));
3927 : : }
3928 : : }
3929 : :
3930 : 2008290 : STRIP_USELESS_TYPE_CONVERSION (use);
3931 : :
3932 : 2008290 : if (DECL_BY_REFERENCE (result))
3933 : : {
3934 : 0 : TREE_ADDRESSABLE (var) = 1;
3935 : 0 : var = build_fold_addr_expr (var);
3936 : : }
3937 : :
3938 : 2008290 : done:
3939 : : /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3940 : : way, when the RESULT_DECL is encountered, it will be
3941 : : automatically replaced by the VAR_DECL.
3942 : :
3943 : : When returning by reference, ensure that RESULT_DECL remaps to
3944 : : gimple_val. */
3945 : 2370707 : if (DECL_BY_REFERENCE (result)
3946 : 2370707 : && !is_gimple_val (var))
3947 : : {
3948 : 80275 : tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3949 : 80275 : insert_decl_map (id, result, temp);
3950 : : /* When RESULT_DECL is in SSA form, we need to remap and initialize
3951 : : it's default_def SSA_NAME. */
3952 : 80275 : if (gimple_in_ssa_p (id->src_cfun)
3953 : 80275 : && is_gimple_reg (result))
3954 : 80275 : if (tree default_def = ssa_default_def (id->src_cfun, result))
3955 : : {
3956 : 80238 : temp = make_ssa_name (temp);
3957 : 80238 : insert_decl_map (id, default_def, temp);
3958 : : }
3959 : 80275 : insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3960 : : }
3961 : : else
3962 : 2290432 : insert_decl_map (id, result, var);
3963 : :
3964 : : /* Remember this so we can ignore it in remap_decls. */
3965 : 2370707 : id->retvar = var;
3966 : 2370707 : return use;
3967 : : }
3968 : :
3969 : : /* Determine if the function can be copied. If so return NULL. If
3970 : : not return a string describng the reason for failure. */
3971 : :
3972 : : const char *
3973 : 17027769 : copy_forbidden (struct function *fun)
3974 : : {
3975 : 17027769 : const char *reason = fun->cannot_be_copied_reason;
3976 : :
3977 : : /* Only examine the function once. */
3978 : 17027769 : if (fun->cannot_be_copied_set)
3979 : : return reason;
3980 : :
3981 : : /* We cannot copy a function that receives a non-local goto
3982 : : because we cannot remap the destination label used in the
3983 : : function that is performing the non-local goto. */
3984 : : /* ??? Actually, this should be possible, if we work at it.
3985 : : No doubt there's just a handful of places that simply
3986 : : assume it doesn't happen and don't substitute properly. */
3987 : 9704872 : if (fun->has_nonlocal_label)
3988 : : {
3989 : 778 : reason = G_("function %q+F can never be copied "
3990 : : "because it receives a non-local goto");
3991 : 778 : goto fail;
3992 : : }
3993 : :
3994 : 9704094 : if (fun->has_forced_label_in_static)
3995 : : {
3996 : 224 : reason = G_("function %q+F can never be copied because it saves "
3997 : : "address of local label in a static variable");
3998 : 224 : goto fail;
3999 : : }
4000 : :
4001 : 9703870 : fail:
4002 : 9704872 : fun->cannot_be_copied_reason = reason;
4003 : 9704872 : fun->cannot_be_copied_set = true;
4004 : 9704872 : return reason;
4005 : : }
4006 : :
4007 : :
4008 : : static const char *inline_forbidden_reason;
4009 : :
4010 : : /* A callback for walk_gimple_seq to handle statements. Returns non-null
4011 : : iff a function cannot be inlined. Also sets the reason why. */
4012 : :
4013 : : static tree
4014 : 168531220 : inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4015 : : struct walk_stmt_info *wip)
4016 : : {
4017 : 168531220 : tree fn = (tree) wip->info;
4018 : 168531220 : tree t;
4019 : 168531220 : gimple *stmt = gsi_stmt (*gsi);
4020 : :
4021 : 168531220 : switch (gimple_code (stmt))
4022 : : {
4023 : 18696215 : case GIMPLE_CALL:
4024 : : /* Refuse to inline alloca call unless user explicitly forced so as
4025 : : this may change program's memory overhead drastically when the
4026 : : function using alloca is called in loop. In GCC present in
4027 : : SPEC2000 inlining into schedule_block cause it to require 2GB of
4028 : : RAM instead of 256MB. Don't do so for alloca calls emitted for
4029 : : VLA objects as those can't cause unbounded growth (they're always
4030 : : wrapped inside stack_save/stack_restore regions. */
4031 : 18696215 : if (gimple_maybe_alloca_call_p (stmt)
4032 : 9568 : && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
4033 : 18701153 : && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
4034 : : {
4035 : 4810 : inline_forbidden_reason
4036 : 4810 : = G_("function %q+F can never be inlined because it uses "
4037 : : "alloca (override using the always_inline attribute)");
4038 : 4810 : *handled_ops_p = true;
4039 : 4810 : return fn;
4040 : : }
4041 : :
4042 : 18691405 : t = gimple_call_fndecl (stmt);
4043 : 18691405 : if (t == NULL_TREE)
4044 : : break;
4045 : :
4046 : : /* We cannot inline functions that call setjmp. */
4047 : 17957129 : if (setjmp_call_p (t))
4048 : : {
4049 : 936 : inline_forbidden_reason
4050 : 936 : = G_("function %q+F can never be inlined because it uses setjmp");
4051 : 936 : *handled_ops_p = true;
4052 : 936 : return t;
4053 : : }
4054 : :
4055 : 17956193 : if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
4056 : 3621481 : switch (DECL_FUNCTION_CODE (t))
4057 : : {
4058 : : /* We cannot inline functions that take a variable number of
4059 : : arguments. */
4060 : 2742 : case BUILT_IN_VA_START:
4061 : 2742 : case BUILT_IN_NEXT_ARG:
4062 : 2742 : case BUILT_IN_VA_END:
4063 : 2742 : inline_forbidden_reason
4064 : 2742 : = G_("function %q+F can never be inlined because it "
4065 : : "uses variable argument lists");
4066 : 2742 : *handled_ops_p = true;
4067 : 2742 : return t;
4068 : :
4069 : 226 : case BUILT_IN_LONGJMP:
4070 : : /* We can't inline functions that call __builtin_longjmp at
4071 : : all. The non-local goto machinery really requires the
4072 : : destination be in a different function. If we allow the
4073 : : function calling __builtin_longjmp to be inlined into the
4074 : : function calling __builtin_setjmp, Things will Go Awry. */
4075 : 226 : inline_forbidden_reason
4076 : 226 : = G_("function %q+F can never be inlined because "
4077 : : "it uses setjmp-longjmp exception handling");
4078 : 226 : *handled_ops_p = true;
4079 : 226 : return t;
4080 : :
4081 : 269 : case BUILT_IN_NONLOCAL_GOTO:
4082 : : /* Similarly. */
4083 : 269 : inline_forbidden_reason
4084 : 269 : = G_("function %q+F can never be inlined because "
4085 : : "it uses non-local goto");
4086 : 269 : *handled_ops_p = true;
4087 : 269 : return t;
4088 : :
4089 : 384 : case BUILT_IN_RETURN:
4090 : 384 : case BUILT_IN_APPLY_ARGS:
4091 : : /* If a __builtin_apply_args caller would be inlined,
4092 : : it would be saving arguments of the function it has
4093 : : been inlined into. Similarly __builtin_return would
4094 : : return from the function the inline has been inlined into. */
4095 : 384 : inline_forbidden_reason
4096 : 384 : = G_("function %q+F can never be inlined because "
4097 : : "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
4098 : 384 : *handled_ops_p = true;
4099 : 384 : return t;
4100 : :
4101 : : default:
4102 : : break;
4103 : : }
4104 : : break;
4105 : :
4106 : 284 : case GIMPLE_GOTO:
4107 : 284 : t = gimple_goto_dest (stmt);
4108 : :
4109 : : /* We will not inline a function which uses computed goto. The
4110 : : addresses of its local labels, which may be tucked into
4111 : : global storage, are of course not constant across
4112 : : instantiations, which causes unexpected behavior. */
4113 : 284 : if (TREE_CODE (t) != LABEL_DECL)
4114 : : {
4115 : 284 : inline_forbidden_reason
4116 : 284 : = G_("function %q+F can never be inlined "
4117 : : "because it contains a computed goto");
4118 : 284 : *handled_ops_p = true;
4119 : 284 : return t;
4120 : : }
4121 : : break;
4122 : :
4123 : : default:
4124 : : break;
4125 : : }
4126 : :
4127 : 168521569 : *handled_ops_p = false;
4128 : 168521569 : return NULL_TREE;
4129 : : }
4130 : :
4131 : : /* Return true if FNDECL is a function that cannot be inlined into
4132 : : another one. */
4133 : :
4134 : : static bool
4135 : 5823227 : inline_forbidden_p (tree fndecl)
4136 : : {
4137 : 5823227 : struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4138 : 5823227 : struct walk_stmt_info wi;
4139 : 5823227 : basic_block bb;
4140 : 5823227 : bool forbidden_p = false;
4141 : :
4142 : : /* First check for shared reasons not to copy the code. */
4143 : 5823227 : inline_forbidden_reason = copy_forbidden (fun);
4144 : 5823227 : if (inline_forbidden_reason != NULL)
4145 : : return true;
4146 : :
4147 : : /* Next, walk the statements of the function looking for
4148 : : constraucts we can't handle, or are non-optimal for inlining. */
4149 : 5822530 : hash_set<tree> visited_nodes;
4150 : 5822530 : memset (&wi, 0, sizeof (wi));
4151 : 5822530 : wi.info = (void *) fndecl;
4152 : 5822530 : wi.pset = &visited_nodes;
4153 : :
4154 : : /* We cannot inline a function with a variable-sized parameter because we
4155 : : cannot materialize a temporary of such a type in the caller if need be.
4156 : : Note that the return case is not symmetrical because we can guarantee
4157 : : that a temporary is not needed by means of CALL_EXPR_RETURN_SLOT_OPT. */
4158 : 18452694 : for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
4159 : 12630166 : if (!poly_int_tree_p (DECL_SIZE (parm)))
4160 : : {
4161 : 2 : inline_forbidden_reason
4162 : 2 : = G_("function %q+F can never be inlined because "
4163 : : "it has a VLA argument");
4164 : 2 : return true;
4165 : : }
4166 : :
4167 : 37788730 : FOR_EACH_BB_FN (bb, fun)
4168 : : {
4169 : 31975853 : gimple *ret;
4170 : 31975853 : gimple_seq seq = bb_seq (bb);
4171 : 31975853 : ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4172 : 31975853 : forbidden_p = (ret != NULL);
4173 : 31975853 : if (forbidden_p)
4174 : : break;
4175 : : }
4176 : :
4177 : : return forbidden_p;
4178 : 5822530 : }
4179 : :
4180 : : /* Return false if the function FNDECL cannot be inlined on account of its
4181 : : attributes, true otherwise. */
4182 : : static bool
4183 : 5823227 : function_attribute_inlinable_p (const_tree fndecl)
4184 : : {
4185 : 11624571 : for (auto scoped_attributes : targetm.attribute_table)
4186 : : {
4187 : 5823227 : const_tree a;
4188 : :
4189 : 7623778 : for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4190 : : {
4191 : 1822434 : const_tree name = get_attribute_name (a);
4192 : :
4193 : 52305204 : for (const attribute_spec &attribute : scoped_attributes->attributes)
4194 : 50504653 : if (is_attribute_p (attribute.name, name))
4195 : 21883 : return targetm.function_attribute_inlinable_p (fndecl);
4196 : : }
4197 : : }
4198 : :
4199 : : return true;
4200 : : }
4201 : :
4202 : : /* Returns nonzero if FN is a function that does not have any
4203 : : fundamental inline blocking properties. */
4204 : :
4205 : : bool
4206 : 6427338 : tree_inlinable_function_p (tree fn)
4207 : : {
4208 : 6427338 : bool inlinable = true;
4209 : 6427338 : bool do_warning;
4210 : 6427338 : tree always_inline;
4211 : :
4212 : : /* If we've already decided this function shouldn't be inlined,
4213 : : there's no need to check again. */
4214 : 6427338 : if (DECL_UNINLINABLE (fn))
4215 : : return false;
4216 : :
4217 : : /* We only warn for functions declared `inline' by the user. */
4218 : 5842567 : do_warning = (opt_for_fn (fn, warn_inline)
4219 : 597 : && DECL_DECLARED_INLINE_P (fn)
4220 : 468 : && !DECL_NO_INLINE_WARNING_P (fn)
4221 : 5842792 : && !DECL_IN_SYSTEM_HEADER (fn));
4222 : :
4223 : 5842567 : always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4224 : :
4225 : 5842567 : if (flag_no_inline
4226 : 91234 : && always_inline == NULL)
4227 : : {
4228 : 19340 : if (do_warning)
4229 : 0 : warning (OPT_Winline, "function %q+F can never be inlined because it "
4230 : : "is suppressed using %<-fno-inline%>", fn);
4231 : : inlinable = false;
4232 : : }
4233 : :
4234 : 5823227 : else if (!function_attribute_inlinable_p (fn))
4235 : : {
4236 : 0 : if (do_warning)
4237 : 0 : warning (OPT_Winline, "function %q+F can never be inlined because it "
4238 : : "uses attributes conflicting with inlining", fn);
4239 : : inlinable = false;
4240 : : }
4241 : :
4242 : 5823227 : else if (inline_forbidden_p (fn))
4243 : : {
4244 : : /* See if we should warn about uninlinable functions. Previously,
4245 : : some of these warnings would be issued while trying to expand
4246 : : the function inline, but that would cause multiple warnings
4247 : : about functions that would for example call alloca. But since
4248 : : this a property of the function, just one warning is enough.
4249 : : As a bonus we can now give more details about the reason why a
4250 : : function is not inlinable. */
4251 : 10350 : if (always_inline)
4252 : 2 : error (inline_forbidden_reason, fn);
4253 : 10348 : else if (do_warning)
4254 : 2 : warning (OPT_Winline, inline_forbidden_reason, fn);
4255 : :
4256 : : inlinable = false;
4257 : : }
4258 : :
4259 : : /* Squirrel away the result so that we don't have to check again. */
4260 : 5842567 : DECL_UNINLINABLE (fn) = !inlinable;
4261 : :
4262 : 5842567 : return inlinable;
4263 : : }
4264 : :
4265 : : /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4266 : : word size and take possible memcpy call into account and return
4267 : : cost based on whether optimizing for size or speed according to SPEED_P. */
4268 : :
4269 : : int
4270 : 337875220 : estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4271 : : {
4272 : 337875220 : HOST_WIDE_INT size;
4273 : :
4274 : 337875220 : gcc_assert (!VOID_TYPE_P (type));
4275 : :
4276 : 337875220 : if (VECTOR_TYPE_P (type))
4277 : : {
4278 : 3932257 : scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4279 : 3932257 : machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4280 : 3932257 : int orig_mode_size
4281 : 7864514 : = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4282 : 7864514 : int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4283 : 3932257 : return ((orig_mode_size + simd_mode_size - 1)
4284 : 3932257 : / simd_mode_size);
4285 : : }
4286 : :
4287 : 333942963 : size = int_size_in_bytes (type);
4288 : :
4289 : 333945431 : if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4290 : : /* Cost of a memcpy call, 3 arguments and the call. */
4291 : : return 4;
4292 : : else
4293 : 333035541 : return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4294 : : }
4295 : :
4296 : : /* Returns cost of operation CODE, according to WEIGHTS */
4297 : :
4298 : : static int
4299 : 461224848 : estimate_operator_cost (enum tree_code code, eni_weights *weights,
4300 : : tree op1 ATTRIBUTE_UNUSED, tree op2)
4301 : : {
4302 : 461224848 : switch (code)
4303 : : {
4304 : : /* These are "free" conversions, or their presumed cost
4305 : : is folded into other operations. */
4306 : : case RANGE_EXPR:
4307 : : CASE_CONVERT:
4308 : : case COMPLEX_EXPR:
4309 : : case PAREN_EXPR:
4310 : : case VIEW_CONVERT_EXPR:
4311 : : return 0;
4312 : :
4313 : : /* Assign cost of 1 to usual operations.
4314 : : ??? We may consider mapping RTL costs to this. */
4315 : : case COND_EXPR:
4316 : : case VEC_COND_EXPR:
4317 : : case VEC_PERM_EXPR:
4318 : :
4319 : : case PLUS_EXPR:
4320 : : case POINTER_PLUS_EXPR:
4321 : : case POINTER_DIFF_EXPR:
4322 : : case MINUS_EXPR:
4323 : : case MULT_EXPR:
4324 : : case MULT_HIGHPART_EXPR:
4325 : :
4326 : : case ADDR_SPACE_CONVERT_EXPR:
4327 : : case FIXED_CONVERT_EXPR:
4328 : : case FIX_TRUNC_EXPR:
4329 : :
4330 : : case NEGATE_EXPR:
4331 : : case FLOAT_EXPR:
4332 : : case MIN_EXPR:
4333 : : case MAX_EXPR:
4334 : : case ABS_EXPR:
4335 : : case ABSU_EXPR:
4336 : :
4337 : : case LSHIFT_EXPR:
4338 : : case RSHIFT_EXPR:
4339 : : case LROTATE_EXPR:
4340 : : case RROTATE_EXPR:
4341 : :
4342 : : case BIT_IOR_EXPR:
4343 : : case BIT_XOR_EXPR:
4344 : : case BIT_AND_EXPR:
4345 : : case BIT_NOT_EXPR:
4346 : :
4347 : : case TRUTH_ANDIF_EXPR:
4348 : : case TRUTH_ORIF_EXPR:
4349 : : case TRUTH_AND_EXPR:
4350 : : case TRUTH_OR_EXPR:
4351 : : case TRUTH_XOR_EXPR:
4352 : : case TRUTH_NOT_EXPR:
4353 : :
4354 : : case LT_EXPR:
4355 : : case LE_EXPR:
4356 : : case GT_EXPR:
4357 : : case GE_EXPR:
4358 : : case EQ_EXPR:
4359 : : case NE_EXPR:
4360 : : case ORDERED_EXPR:
4361 : : case UNORDERED_EXPR:
4362 : :
4363 : : case UNLT_EXPR:
4364 : : case UNLE_EXPR:
4365 : : case UNGT_EXPR:
4366 : : case UNGE_EXPR:
4367 : : case UNEQ_EXPR:
4368 : : case LTGT_EXPR:
4369 : :
4370 : : case CONJ_EXPR:
4371 : :
4372 : : case PREDECREMENT_EXPR:
4373 : : case PREINCREMENT_EXPR:
4374 : : case POSTDECREMENT_EXPR:
4375 : : case POSTINCREMENT_EXPR:
4376 : :
4377 : : case REALIGN_LOAD_EXPR:
4378 : :
4379 : : case WIDEN_SUM_EXPR:
4380 : : case WIDEN_MULT_EXPR:
4381 : : case DOT_PROD_EXPR:
4382 : : case SAD_EXPR:
4383 : : case WIDEN_MULT_PLUS_EXPR:
4384 : : case WIDEN_MULT_MINUS_EXPR:
4385 : : case WIDEN_LSHIFT_EXPR:
4386 : :
4387 : : case VEC_WIDEN_MULT_HI_EXPR:
4388 : : case VEC_WIDEN_MULT_LO_EXPR:
4389 : : case VEC_WIDEN_MULT_EVEN_EXPR:
4390 : : case VEC_WIDEN_MULT_ODD_EXPR:
4391 : : case VEC_UNPACK_HI_EXPR:
4392 : : case VEC_UNPACK_LO_EXPR:
4393 : : case VEC_UNPACK_FLOAT_HI_EXPR:
4394 : : case VEC_UNPACK_FLOAT_LO_EXPR:
4395 : : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4396 : : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4397 : : case VEC_PACK_TRUNC_EXPR:
4398 : : case VEC_PACK_SAT_EXPR:
4399 : : case VEC_PACK_FIX_TRUNC_EXPR:
4400 : : case VEC_PACK_FLOAT_EXPR:
4401 : : case VEC_WIDEN_LSHIFT_HI_EXPR:
4402 : : case VEC_WIDEN_LSHIFT_LO_EXPR:
4403 : : case VEC_DUPLICATE_EXPR:
4404 : : case VEC_SERIES_EXPR:
4405 : :
4406 : : return 1;
4407 : :
4408 : : /* Few special cases of expensive operations. This is useful
4409 : : to avoid inlining on functions having too many of these. */
4410 : 2979560 : case TRUNC_DIV_EXPR:
4411 : 2979560 : case CEIL_DIV_EXPR:
4412 : 2979560 : case FLOOR_DIV_EXPR:
4413 : 2979560 : case ROUND_DIV_EXPR:
4414 : 2979560 : case EXACT_DIV_EXPR:
4415 : 2979560 : case TRUNC_MOD_EXPR:
4416 : 2979560 : case CEIL_MOD_EXPR:
4417 : 2979560 : case FLOOR_MOD_EXPR:
4418 : 2979560 : case ROUND_MOD_EXPR:
4419 : 2979560 : case RDIV_EXPR:
4420 : 2979560 : if (TREE_CODE (op2) != INTEGER_CST)
4421 : 1313829 : return weights->div_mod_cost;
4422 : : return 1;
4423 : :
4424 : : /* Bit-field insertion needs several shift and mask operations. */
4425 : : case BIT_INSERT_EXPR:
4426 : : return 3;
4427 : :
4428 : 189625511 : default:
4429 : : /* We expect a copy assignment with no operator. */
4430 : 189625511 : gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4431 : : return 0;
4432 : : }
4433 : : }
4434 : :
4435 : :
4436 : : /* Estimate number of instructions that will be created by expanding
4437 : : the statements in the statement sequence STMTS.
4438 : : WEIGHTS contains weights attributed to various constructs. */
4439 : :
4440 : : int
4441 : 234274 : estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4442 : : {
4443 : 234274 : int cost;
4444 : 234274 : gimple_stmt_iterator gsi;
4445 : :
4446 : 234274 : cost = 0;
4447 : 622222 : for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4448 : 387948 : cost += estimate_num_insns (gsi_stmt (gsi), weights);
4449 : :
4450 : 234274 : return cost;
4451 : : }
4452 : :
4453 : :
4454 : : /* Estimate number of instructions that will be created by expanding STMT.
4455 : : WEIGHTS contains weights attributed to various constructs. */
4456 : :
4457 : : int
4458 : 626052138 : estimate_num_insns (gimple *stmt, eni_weights *weights)
4459 : : {
4460 : 626052138 : unsigned cost, i;
4461 : 626052138 : enum gimple_code code = gimple_code (stmt);
4462 : 626052138 : tree lhs;
4463 : 626052138 : tree rhs;
4464 : :
4465 : 626052138 : switch (code)
4466 : : {
4467 : 354565819 : case GIMPLE_ASSIGN:
4468 : : /* Try to estimate the cost of assignments. We have three cases to
4469 : : deal with:
4470 : : 1) Simple assignments to registers;
4471 : : 2) Stores to things that must live in memory. This includes
4472 : : "normal" stores to scalars, but also assignments of large
4473 : : structures, or constructors of big arrays;
4474 : :
4475 : : Let us look at the first two cases, assuming we have "a = b + C":
4476 : : <GIMPLE_ASSIGN <var_decl "a">
4477 : : <plus_expr <var_decl "b"> <constant C>>
4478 : : If "a" is a GIMPLE register, the assignment to it is free on almost
4479 : : any target, because "a" usually ends up in a real register. Hence
4480 : : the only cost of this expression comes from the PLUS_EXPR, and we
4481 : : can ignore the GIMPLE_ASSIGN.
4482 : : If "a" is not a GIMPLE register, the assignment to "a" will most
4483 : : likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4484 : : of moving something into "a", which we compute using the function
4485 : : estimate_move_cost. */
4486 : 354565819 : if (gimple_clobber_p (stmt))
4487 : : return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4488 : :
4489 : 336318161 : lhs = gimple_assign_lhs (stmt);
4490 : 336318161 : rhs = gimple_assign_rhs1 (stmt);
4491 : :
4492 : 336318161 : cost = 0;
4493 : :
4494 : : /* Account for the cost of moving to / from memory. */
4495 : 336318161 : if (gimple_store_p (stmt))
4496 : 77394090 : cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4497 : 336318161 : if (gimple_assign_load_p (stmt))
4498 : 90559823 : cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4499 : :
4500 : 442067671 : cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4501 : : gimple_assign_rhs1 (stmt),
4502 : 336318161 : get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4503 : : == GIMPLE_BINARY_RHS
4504 : 105749510 : ? gimple_assign_rhs2 (stmt) : NULL);
4505 : 336318161 : break;
4506 : :
4507 : 124906687 : case GIMPLE_COND:
4508 : 124906687 : cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4509 : : gimple_op (stmt, 0),
4510 : : gimple_op (stmt, 1));
4511 : 124906687 : break;
4512 : :
4513 : 919491 : case GIMPLE_SWITCH:
4514 : 919491 : {
4515 : 919491 : gswitch *switch_stmt = as_a <gswitch *> (stmt);
4516 : : /* Take into account cost of the switch + guess 2 conditional jumps for
4517 : : each case label.
4518 : :
4519 : : TODO: once the switch expansion logic is sufficiently separated, we can
4520 : : do better job on estimating cost of the switch. */
4521 : 919491 : if (weights->time_based)
4522 : 132070 : cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4523 : : else
4524 : 787421 : cost = gimple_switch_num_labels (switch_stmt) * 2;
4525 : : }
4526 : : break;
4527 : :
4528 : 72052129 : case GIMPLE_CALL:
4529 : 72052129 : {
4530 : 72052129 : tree decl;
4531 : :
4532 : 72052129 : if (gimple_call_internal_p (stmt))
4533 : : return 0;
4534 : 69039759 : else if ((decl = gimple_call_fndecl (stmt))
4535 : 69039759 : && fndecl_built_in_p (decl))
4536 : : {
4537 : : /* Do not special case builtins where we see the body.
4538 : : This just confuse inliner. */
4539 : 17539726 : struct cgraph_node *node;
4540 : 17539726 : if ((node = cgraph_node::get (decl))
4541 : 17539726 : && node->definition)
4542 : : ;
4543 : : /* For buitins that are likely expanded to nothing or
4544 : : inlined do not account operand costs. */
4545 : 17509790 : else if (is_simple_builtin (decl))
4546 : : return 0;
4547 : 15114810 : else if (is_inexpensive_builtin (decl))
4548 : 1989718 : return weights->target_builtin_call_cost;
4549 : 13125092 : else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4550 : : {
4551 : : /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4552 : : specialize the cheap expansion we do here.
4553 : : ??? This asks for a more general solution. */
4554 : 12891730 : switch (DECL_FUNCTION_CODE (decl))
4555 : : {
4556 : 9010 : case BUILT_IN_POW:
4557 : 9010 : case BUILT_IN_POWF:
4558 : 9010 : case BUILT_IN_POWL:
4559 : 9010 : if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4560 : 12764 : && (real_equal
4561 : 3754 : (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4562 : : &dconst2)))
4563 : 446 : return estimate_operator_cost
4564 : 446 : (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4565 : 446 : gimple_call_arg (stmt, 0));
4566 : : break;
4567 : :
4568 : : default:
4569 : : break;
4570 : : }
4571 : : }
4572 : : }
4573 : :
4574 : 64654615 : cost = decl ? weights->call_cost : weights->indirect_call_cost;
4575 : 64654615 : if (gimple_call_lhs (stmt))
4576 : 25481358 : cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4577 : 25481358 : weights->time_based);
4578 : 195057241 : for (i = 0; i < gimple_call_num_args (stmt); i++)
4579 : : {
4580 : 130402626 : tree arg = gimple_call_arg (stmt, i);
4581 : 130402626 : cost += estimate_move_cost (TREE_TYPE (arg),
4582 : 130402626 : weights->time_based);
4583 : : }
4584 : : break;
4585 : : }
4586 : :
4587 : 17878400 : case GIMPLE_RETURN:
4588 : 17878400 : return weights->return_cost;
4589 : :
4590 : : case GIMPLE_GOTO:
4591 : : case GIMPLE_LABEL:
4592 : : case GIMPLE_NOP:
4593 : : case GIMPLE_PHI:
4594 : : case GIMPLE_PREDICT:
4595 : : case GIMPLE_DEBUG:
4596 : : return 0;
4597 : :
4598 : 743884 : case GIMPLE_ASM:
4599 : 743884 : {
4600 : 743884 : int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4601 : : /* 1000 means infinity. This avoids overflows later
4602 : : with very long asm statements. */
4603 : 743884 : if (count > 1000)
4604 : : count = 1000;
4605 : : /* If this asm is asm inline, count anything as minimum size. */
4606 : 743884 : if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4607 : 662 : count = MIN (1, count);
4608 : 743884 : return MAX (1, count);
4609 : : }
4610 : :
4611 : : case GIMPLE_RESX:
4612 : : /* This is either going to be an external function call with one
4613 : : argument, or two register copy statements plus a goto. */
4614 : : return 2;
4615 : :
4616 : 14140 : case GIMPLE_EH_DISPATCH:
4617 : : /* ??? This is going to turn into a switch statement. Ideally
4618 : : we'd have a look at the eh region and estimate the number of
4619 : : edges involved. */
4620 : 14140 : return 10;
4621 : :
4622 : 0 : case GIMPLE_BIND:
4623 : 0 : return estimate_num_insns_seq (
4624 : 0 : gimple_bind_body (as_a <gbind *> (stmt)),
4625 : 0 : weights);
4626 : :
4627 : 0 : case GIMPLE_EH_FILTER:
4628 : 0 : return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4629 : :
4630 : 8292 : case GIMPLE_CATCH:
4631 : 8292 : return estimate_num_insns_seq (gimple_catch_handler (
4632 : 8292 : as_a <gcatch *> (stmt)),
4633 : 8292 : weights);
4634 : :
4635 : 8297 : case GIMPLE_TRY:
4636 : 8297 : return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4637 : 8297 : + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4638 : :
4639 : : /* OMP directives are generally very expensive. */
4640 : :
4641 : : case GIMPLE_OMP_RETURN:
4642 : : case GIMPLE_OMP_SECTIONS_SWITCH:
4643 : : case GIMPLE_OMP_ATOMIC_STORE:
4644 : : case GIMPLE_OMP_CONTINUE:
4645 : : /* ...except these, which are cheap. */
4646 : : return 0;
4647 : :
4648 : 0 : case GIMPLE_OMP_ATOMIC_LOAD:
4649 : 0 : return weights->omp_cost;
4650 : :
4651 : 0 : case GIMPLE_OMP_FOR:
4652 : 0 : return (weights->omp_cost
4653 : 0 : + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4654 : 0 : + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4655 : :
4656 : 0 : case GIMPLE_OMP_PARALLEL:
4657 : 0 : case GIMPLE_OMP_TASK:
4658 : 0 : case GIMPLE_OMP_CRITICAL:
4659 : 0 : case GIMPLE_OMP_MASTER:
4660 : 0 : case GIMPLE_OMP_MASKED:
4661 : 0 : case GIMPLE_OMP_SCOPE:
4662 : 0 : case GIMPLE_OMP_DISPATCH:
4663 : 0 : case GIMPLE_OMP_TASKGROUP:
4664 : 0 : case GIMPLE_OMP_ORDERED:
4665 : 0 : case GIMPLE_OMP_SCAN:
4666 : 0 : case GIMPLE_OMP_SECTION:
4667 : 0 : case GIMPLE_OMP_SECTIONS:
4668 : 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
4669 : 0 : case GIMPLE_OMP_SINGLE:
4670 : 0 : case GIMPLE_OMP_TARGET:
4671 : 0 : case GIMPLE_OMP_TEAMS:
4672 : 0 : return (weights->omp_cost
4673 : 0 : + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4674 : :
4675 : 82 : case GIMPLE_TRANSACTION:
4676 : 82 : return (weights->tm_cost
4677 : 82 : + estimate_num_insns_seq (gimple_transaction_body (
4678 : 82 : as_a <gtransaction *> (stmt)),
4679 : 82 : weights));
4680 : :
4681 : 0 : default:
4682 : 0 : gcc_unreachable ();
4683 : : }
4684 : :
4685 : 526798954 : return cost;
4686 : : }
4687 : :
4688 : : /* Estimate number of instructions that will be created by expanding
4689 : : function FNDECL. WEIGHTS contains weights attributed to various
4690 : : constructs. */
4691 : :
4692 : : int
4693 : 0 : estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4694 : : {
4695 : 0 : struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4696 : 0 : gimple_stmt_iterator bsi;
4697 : 0 : basic_block bb;
4698 : 0 : int n = 0;
4699 : :
4700 : 0 : gcc_assert (my_function && my_function->cfg);
4701 : 0 : FOR_EACH_BB_FN (bb, my_function)
4702 : : {
4703 : 0 : for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4704 : 0 : n += estimate_num_insns (gsi_stmt (bsi), weights);
4705 : : }
4706 : :
4707 : 0 : return n;
4708 : : }
4709 : :
4710 : :
4711 : : /* Initializes weights used by estimate_num_insns. */
4712 : :
4713 : : void
4714 : 281966 : init_inline_once (void)
4715 : : {
4716 : 281966 : eni_size_weights.call_cost = 1;
4717 : 281966 : eni_size_weights.indirect_call_cost = 3;
4718 : 281966 : eni_size_weights.target_builtin_call_cost = 1;
4719 : 281966 : eni_size_weights.div_mod_cost = 1;
4720 : 281966 : eni_size_weights.omp_cost = 40;
4721 : 281966 : eni_size_weights.tm_cost = 10;
4722 : 281966 : eni_size_weights.time_based = false;
4723 : 281966 : eni_size_weights.return_cost = 1;
4724 : :
4725 : : /* Estimating time for call is difficult, since we have no idea what the
4726 : : called function does. In the current uses of eni_time_weights,
4727 : : underestimating the cost does less harm than overestimating it, so
4728 : : we choose a rather small value here. */
4729 : 281966 : eni_time_weights.call_cost = 10;
4730 : 281966 : eni_time_weights.indirect_call_cost = 15;
4731 : 281966 : eni_time_weights.target_builtin_call_cost = 1;
4732 : 281966 : eni_time_weights.div_mod_cost = 10;
4733 : 281966 : eni_time_weights.omp_cost = 40;
4734 : 281966 : eni_time_weights.tm_cost = 40;
4735 : 281966 : eni_time_weights.time_based = true;
4736 : 281966 : eni_time_weights.return_cost = 2;
4737 : 281966 : }
4738 : :
4739 : :
4740 : : /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4741 : :
4742 : : static void
4743 : 27851855 : prepend_lexical_block (tree current_block, tree new_block)
4744 : : {
4745 : 27851855 : BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4746 : 27851855 : BLOCK_SUBBLOCKS (current_block) = new_block;
4747 : 27851855 : BLOCK_SUPERCONTEXT (new_block) = current_block;
4748 : 27851855 : }
4749 : :
4750 : : /* Add local variables from CALLEE to CALLER. */
4751 : :
4752 : : static inline void
4753 : 4586784 : add_local_variables (struct function *callee, struct function *caller,
4754 : : copy_body_data *id)
4755 : : {
4756 : 4586784 : tree var;
4757 : 4586784 : unsigned ix;
4758 : :
4759 : 12484491 : FOR_EACH_LOCAL_DECL (callee, ix, var)
4760 : 4023628 : if (!can_be_nonlocal (var, id))
4761 : : {
4762 : 3936791 : tree new_var = remap_decl (var, id);
4763 : :
4764 : : /* Remap debug-expressions. */
4765 : 3936791 : if (VAR_P (new_var)
4766 : 3936791 : && DECL_HAS_DEBUG_EXPR_P (var)
4767 : 4190231 : && new_var != var)
4768 : : {
4769 : 253440 : tree tem = DECL_DEBUG_EXPR (var);
4770 : 253440 : bool old_regimplify = id->regimplify;
4771 : 253440 : id->remapping_type_depth++;
4772 : 253440 : walk_tree (&tem, copy_tree_body_r, id, NULL);
4773 : 253440 : id->remapping_type_depth--;
4774 : 253440 : id->regimplify = old_regimplify;
4775 : 253440 : SET_DECL_DEBUG_EXPR (new_var, tem);
4776 : 253440 : DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4777 : : }
4778 : 3936791 : add_local_decl (caller, new_var);
4779 : : }
4780 : 4586784 : }
4781 : :
4782 : : /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4783 : : have brought in or introduced any debug stmts for SRCVAR. */
4784 : :
4785 : : static inline void
4786 : 9261382 : reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4787 : : {
4788 : 9261382 : tree *remappedvarp = id->decl_map->get (srcvar);
4789 : :
4790 : 9261382 : if (!remappedvarp)
4791 : : return;
4792 : :
4793 : 9205237 : if (!VAR_P (*remappedvarp))
4794 : : return;
4795 : :
4796 : 9117368 : if (*remappedvarp == id->retvar)
4797 : : return;
4798 : :
4799 : 9117368 : tree tvar = target_for_debug_bind (*remappedvarp);
4800 : 9117368 : if (!tvar)
4801 : : return;
4802 : :
4803 : 14775240 : gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4804 : 7387620 : id->call_stmt);
4805 : 7387620 : gimple_seq_add_stmt (bindings, stmt);
4806 : : }
4807 : :
4808 : : /* For each inlined variable for which we may have debug bind stmts,
4809 : : add before GSI a final debug stmt resetting it, marking the end of
4810 : : its life, so that var-tracking knows it doesn't have to compute
4811 : : further locations for it. */
4812 : :
4813 : : static inline void
4814 : 4452884 : reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4815 : : {
4816 : 4452884 : tree var;
4817 : 4452884 : unsigned ix;
4818 : 4452884 : gimple_seq bindings = NULL;
4819 : :
4820 : 4452884 : if (!gimple_in_ssa_p (id->src_cfun))
4821 : 533943 : return;
4822 : :
4823 : 4452884 : if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4824 : : return;
4825 : :
4826 : 3918941 : for (var = DECL_ARGUMENTS (id->src_fn);
4827 : 10593240 : var; var = DECL_CHAIN (var))
4828 : 6674299 : reset_debug_binding (id, var, &bindings);
4829 : :
4830 : 9815753 : FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4831 : 2587083 : reset_debug_binding (id, var, &bindings);
4832 : :
4833 : 3918941 : gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4834 : : }
4835 : :
4836 : : /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4837 : :
4838 : : static bool
4839 : 15051203 : expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4840 : : bitmap to_purge)
4841 : : {
4842 : 15051203 : tree use_retvar;
4843 : 15051203 : tree fn;
4844 : 15051203 : hash_map<tree, tree> *dst;
4845 : 15051203 : hash_map<tree, tree> *st = NULL;
4846 : 15051203 : tree return_slot;
4847 : 15051203 : tree modify_dest;
4848 : 15051203 : struct cgraph_edge *cg_edge;
4849 : 15051203 : cgraph_inline_failed_t reason;
4850 : 15051203 : basic_block return_block;
4851 : 15051203 : edge e;
4852 : 15051203 : gimple_stmt_iterator gsi, stmt_gsi;
4853 : 15051203 : bool successfully_inlined = false;
4854 : 15051203 : bool purge_dead_abnormal_edges;
4855 : 15051203 : gcall *call_stmt;
4856 : 15051203 : unsigned int prop_mask, src_properties;
4857 : 15051203 : struct function *dst_cfun;
4858 : 15051203 : tree simduid;
4859 : 15051203 : use_operand_p use;
4860 : 15051203 : gimple *simtenter_stmt = NULL;
4861 : 15051203 : vec<tree> *simtvars_save;
4862 : 15051203 : tree save_stack = NULL_TREE;
4863 : :
4864 : : /* The gimplifier uses input_location in too many places, such as
4865 : : internal_get_tmp_var (). */
4866 : 15051203 : location_t saved_location = input_location;
4867 : 15051203 : input_location = gimple_location (stmt);
4868 : :
4869 : : /* From here on, we're only interested in CALL_EXPRs. */
4870 : 15051203 : call_stmt = dyn_cast <gcall *> (stmt);
4871 : 15051203 : if (!call_stmt)
4872 : 0 : goto egress;
4873 : :
4874 : 15051203 : cg_edge = id->dst_node->get_edge (stmt);
4875 : : /* Edge should exist and speculations should be resolved at this
4876 : : stage. */
4877 : 15051203 : gcc_checking_assert (cg_edge && !cg_edge->speculative);
4878 : : /* First, see if we can figure out what function is being called.
4879 : : If we cannot, then there is no hope of inlining the function. */
4880 : 15051203 : if (cg_edge->indirect_unknown_callee)
4881 : 250722 : goto egress;
4882 : 14800481 : fn = cg_edge->callee->decl;
4883 : 14800481 : gcc_checking_assert (fn);
4884 : :
4885 : : /* If FN is a declaration of a function in a nested scope that was
4886 : : globally declared inline, we don't set its DECL_INITIAL.
4887 : : However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4888 : : C++ front-end uses it for cdtors to refer to their internal
4889 : : declarations, that are not real functions. Fortunately those
4890 : : don't have trees to be saved, so we can tell by checking their
4891 : : gimple_body. */
4892 : 14800481 : if (!DECL_INITIAL (fn)
4893 : 6430700 : && DECL_ABSTRACT_ORIGIN (fn)
4894 : 14942021 : && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4895 : 319 : fn = DECL_ABSTRACT_ORIGIN (fn);
4896 : :
4897 : : /* Don't try to inline functions that are not well-suited to inlining. */
4898 : 14800481 : if (cg_edge->inline_failed)
4899 : : {
4900 : 10347390 : reason = cg_edge->inline_failed;
4901 : : /* If this call was originally indirect, we do not want to emit any
4902 : : inlining related warnings or sorry messages because there are no
4903 : : guarantees regarding those. */
4904 : 10347390 : if (cg_edge->indirect_inlining_edge)
4905 : 1900 : goto egress;
4906 : :
4907 : 10345490 : if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4908 : : /* For extern inline functions that get redefined we always
4909 : : silently ignored always_inline flag. Better behavior would
4910 : : be to be able to keep both bodies and use extern inline body
4911 : : for inlining, but we can't do that because frontends overwrite
4912 : : the body. */
4913 : 53 : && !cg_edge->callee->redefined_extern_inline
4914 : : /* During early inline pass, report only when optimization is
4915 : : not turned on. */
4916 : 53 : && (symtab->global_info_ready
4917 : 51 : || !optimize
4918 : 41 : || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4919 : : /* PR 20090218-1_0.c. Body can be provided by another module. */
4920 : 10345524 : && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4921 : : {
4922 : 27 : error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4923 : : cgraph_inline_failed_string (reason));
4924 : 27 : if (gimple_location (stmt) != UNKNOWN_LOCATION)
4925 : 27 : inform (gimple_location (stmt), "called from here");
4926 : 0 : else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4927 : 0 : inform (DECL_SOURCE_LOCATION (cfun->decl),
4928 : : "called from this function");
4929 : : }
4930 : 10345463 : else if (opt_for_fn (fn, warn_inline)
4931 : 257 : && DECL_DECLARED_INLINE_P (fn)
4932 : 106 : && !DECL_NO_INLINE_WARNING_P (fn)
4933 : 38 : && !DECL_IN_SYSTEM_HEADER (fn)
4934 : 38 : && reason != CIF_UNSPECIFIED
4935 : 38 : && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4936 : : /* Do not warn about not inlined recursive calls. */
4937 : 38 : && !cg_edge->recursive_p ()
4938 : : /* Avoid warnings during early inline pass. */
4939 : 10345501 : && symtab->global_info_ready)
4940 : : {
4941 : 8 : auto_diagnostic_group d;
4942 : 8 : if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4943 : : fn, _(cgraph_inline_failed_string (reason))))
4944 : : {
4945 : 8 : if (gimple_location (stmt) != UNKNOWN_LOCATION)
4946 : 8 : inform (gimple_location (stmt), "called from here");
4947 : 0 : else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4948 : 0 : inform (DECL_SOURCE_LOCATION (cfun->decl),
4949 : : "called from this function");
4950 : : }
4951 : 8 : }
4952 : 10345490 : goto egress;
4953 : : }
4954 : 4453091 : id->src_node = cg_edge->callee;
4955 : :
4956 : : /* If callee is thunk, all we need is to adjust the THIS pointer
4957 : : and redirect to function being thunked. */
4958 : 4453091 : if (id->src_node->thunk)
4959 : : {
4960 : 207 : cgraph_edge *edge;
4961 : 207 : tree virtual_offset = NULL;
4962 : 207 : profile_count count = cg_edge->count;
4963 : 207 : tree op;
4964 : 207 : gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4965 : 207 : thunk_info *info = thunk_info::get (id->src_node);
4966 : :
4967 : 207 : cgraph_edge::remove (cg_edge);
4968 : 414 : edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4969 : : gimple_uid (stmt),
4970 : : profile_count::one (),
4971 : : profile_count::one (),
4972 : : true);
4973 : 207 : edge->count = count;
4974 : 207 : if (info->virtual_offset_p)
4975 : 7 : virtual_offset = size_int (info->virtual_value);
4976 : 207 : op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4977 : : NULL);
4978 : 207 : gsi_insert_before (&iter, gimple_build_assign (op,
4979 : : gimple_call_arg (stmt, 0)),
4980 : : GSI_NEW_STMT);
4981 : 207 : gcc_assert (info->this_adjusting);
4982 : 207 : op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4983 : : virtual_offset, info->indirect_offset);
4984 : :
4985 : 207 : gimple_call_set_arg (stmt, 0, op);
4986 : 207 : gimple_call_set_fndecl (stmt, edge->callee->decl);
4987 : 207 : update_stmt (stmt);
4988 : 207 : id->src_node->remove ();
4989 : 207 : successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4990 : 207 : maybe_remove_unused_call_args (cfun, stmt);
4991 : : /* This used to return true even though we do fail to inline in
4992 : : some cases. See PR98525. */
4993 : 207 : goto egress;
4994 : : }
4995 : 4452884 : fn = cg_edge->callee->decl;
4996 : 4452884 : cg_edge->callee->get_untransformed_body ();
4997 : :
4998 : 4452884 : if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4999 : 4452875 : cg_edge->callee->verify ();
5000 : :
5001 : : /* We will be inlining this callee. */
5002 : 4452884 : id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
5003 : :
5004 : : /* Update the callers EH personality. */
5005 : 4452884 : if (DECL_FUNCTION_PERSONALITY (fn))
5006 : 278650 : DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
5007 : 139325 : = DECL_FUNCTION_PERSONALITY (fn);
5008 : :
5009 : : /* Split the block before the GIMPLE_CALL. */
5010 : 4452884 : stmt_gsi = gsi_for_stmt (stmt);
5011 : 4452884 : gsi_prev (&stmt_gsi);
5012 : 4452884 : e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
5013 : 4452884 : bb = e->src;
5014 : 4452884 : return_block = e->dest;
5015 : 4452884 : remove_edge (e);
5016 : :
5017 : : /* If the GIMPLE_CALL was in the last statement of BB, it may have
5018 : : been the source of abnormal edges. In this case, schedule
5019 : : the removal of dead abnormal edges. */
5020 : 4452884 : gsi = gsi_start_bb (return_block);
5021 : 4452884 : gsi_next (&gsi);
5022 : 4452884 : purge_dead_abnormal_edges = gsi_end_p (gsi);
5023 : :
5024 : 4452884 : stmt_gsi = gsi_start_bb (return_block);
5025 : :
5026 : : /* Build a block containing code to initialize the arguments, the
5027 : : actual inline expansion of the body, and a label for the return
5028 : : statements within the function to jump to. The type of the
5029 : : statement expression is the return type of the function call.
5030 : : ??? If the call does not have an associated block then we will
5031 : : remap all callee blocks to NULL, effectively dropping most of
5032 : : its debug information. This should only happen for calls to
5033 : : artificial decls inserted by the compiler itself. We need to
5034 : : either link the inlined blocks into the caller block tree or
5035 : : not refer to them in any way to not break GC for locations. */
5036 : 4452884 : if (tree block = gimple_block (stmt))
5037 : : {
5038 : : /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
5039 : : to make inlined_function_outer_scope_p return true on this BLOCK. */
5040 : 4424199 : location_t loc = LOCATION_LOCUS (gimple_location (stmt));
5041 : 4424199 : if (loc == UNKNOWN_LOCATION)
5042 : 236326 : loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
5043 : 236326 : if (loc == UNKNOWN_LOCATION)
5044 : : loc = BUILTINS_LOCATION;
5045 : 4424199 : if (has_discriminator (gimple_location (stmt)))
5046 : 1528363 : loc = location_with_discriminator
5047 : 1528363 : (loc, get_discriminator_from_loc (gimple_location (stmt)));
5048 : 4424199 : id->block = make_node (BLOCK);
5049 : 7611654 : BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
5050 : 4424199 : BLOCK_SOURCE_LOCATION (id->block) = loc;
5051 : 4424199 : prepend_lexical_block (block, id->block);
5052 : : }
5053 : :
5054 : : /* Local declarations will be replaced by their equivalents in this map. */
5055 : 4452884 : st = id->decl_map;
5056 : 4452884 : id->decl_map = new hash_map<tree, tree>;
5057 : 4452884 : dst = id->debug_map;
5058 : 4452884 : id->debug_map = NULL;
5059 : 4452884 : if (flag_stack_reuse != SR_NONE)
5060 : 4446971 : id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
5061 : :
5062 : : /* Record the function we are about to inline. */
5063 : 4452884 : id->src_fn = fn;
5064 : 4452884 : id->src_cfun = DECL_STRUCT_FUNCTION (fn);
5065 : 4452884 : id->reset_location = DECL_IGNORED_P (fn);
5066 : 4452884 : id->call_stmt = call_stmt;
5067 : 4452884 : cfun->cfg->full_profile &= id->src_cfun->cfg->full_profile;
5068 : :
5069 : : /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
5070 : : variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
5071 : 4452884 : dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
5072 : 4452884 : simtvars_save = id->dst_simt_vars;
5073 : 4452884 : if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
5074 : 20312 : && (simduid = bb->loop_father->simduid) != NULL_TREE
5075 : 0 : && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
5076 : 0 : && single_imm_use (simduid, &use, &simtenter_stmt)
5077 : 0 : && is_gimple_call (simtenter_stmt)
5078 : 4452884 : && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
5079 : 0 : vec_alloc (id->dst_simt_vars, 0);
5080 : : else
5081 : 4452884 : id->dst_simt_vars = NULL;
5082 : :
5083 : 4452884 : if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
5084 : 28292 : profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
5085 : :
5086 : : /* If the src function contains an IFN_VA_ARG, then so will the dst
5087 : : function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
5088 : 4452884 : prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
5089 : 4452884 : src_properties = id->src_cfun->curr_properties & prop_mask;
5090 : 4452884 : if (src_properties != prop_mask)
5091 : 961 : dst_cfun->curr_properties &= src_properties | ~prop_mask;
5092 : 4452884 : dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
5093 : 4452884 : id->dst_node->has_omp_variant_constructs
5094 : 4452884 : |= id->src_node->has_omp_variant_constructs;
5095 : :
5096 : 4452884 : gcc_assert (!id->src_cfun->after_inlining);
5097 : :
5098 : 4452884 : id->entry_bb = bb;
5099 : 4452884 : if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
5100 : : {
5101 : 72 : gimple_stmt_iterator si = gsi_last_bb (bb);
5102 : 72 : gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
5103 : : NOT_TAKEN),
5104 : : GSI_NEW_STMT);
5105 : : }
5106 : 4452884 : initialize_inlined_parameters (id, stmt, fn, bb);
5107 : 3918959 : if (debug_nonbind_markers_p && debug_inline_points && id->block
5108 : 8357057 : && inlined_function_outer_scope_p (id->block))
5109 : : {
5110 : 3904173 : gimple_stmt_iterator si = gsi_last_bb (bb);
5111 : 3904173 : gsi_insert_after (&si, gimple_build_debug_inline_entry
5112 : 3904173 : (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
5113 : : GSI_NEW_STMT);
5114 : : }
5115 : :
5116 : : /* If function to be inlined calls alloca, wrap the inlined function
5117 : : in between save_stack = __builtin_stack_save (); and
5118 : : __builtin_stack_restore (save_stack); calls. */
5119 : 4452884 : if (id->src_cfun->calls_alloca && !gimple_call_noreturn_p (stmt))
5120 : : /* Don't do this for VLA allocations though, just for user alloca
5121 : : calls. */
5122 : 5378 : for (struct cgraph_edge *e = id->src_node->callees; e; e = e->next_callee)
5123 : 4979 : if (gimple_maybe_alloca_call_p (e->call_stmt)
5124 : 4979 : && !gimple_call_alloca_for_var_p (e->call_stmt))
5125 : : {
5126 : 88 : tree fn = builtin_decl_implicit (BUILT_IN_STACK_SAVE);
5127 : 88 : gcall *call = gimple_build_call (fn, 0);
5128 : 88 : save_stack = make_ssa_name (ptr_type_node);
5129 : 88 : gimple_call_set_lhs (call, save_stack);
5130 : 88 : gimple_stmt_iterator si = gsi_last_bb (bb);
5131 : 88 : gsi_insert_after (&si, call, GSI_NEW_STMT);
5132 : 88 : struct cgraph_node *dest = cgraph_node::get_create (fn);
5133 : 88 : id->dst_node->create_edge (dest, call, bb->count)->inline_failed
5134 : 88 : = CIF_BODY_NOT_AVAILABLE;
5135 : 88 : break;
5136 : : }
5137 : :
5138 : 4452884 : if (DECL_INITIAL (fn))
5139 : : {
5140 : 4452884 : if (gimple_block (stmt))
5141 : : {
5142 : 4424199 : tree *var;
5143 : :
5144 : 4424199 : prepend_lexical_block (id->block,
5145 : 4424199 : remap_blocks (DECL_INITIAL (fn), id));
5146 : 4424199 : gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
5147 : : && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
5148 : : == NULL_TREE));
5149 : : /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
5150 : : otherwise for DWARF DW_TAG_formal_parameter will not be children of
5151 : : DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
5152 : : under it. The parameters can be then evaluated in the debugger,
5153 : : but don't show in backtraces. */
5154 : 6372262 : for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
5155 : 1948063 : if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
5156 : : {
5157 : 495483 : tree v = *var;
5158 : 495483 : *var = TREE_CHAIN (v);
5159 : 495483 : TREE_CHAIN (v) = BLOCK_VARS (id->block);
5160 : 495483 : BLOCK_VARS (id->block) = v;
5161 : : }
5162 : : else
5163 : 1452580 : var = &TREE_CHAIN (*var);
5164 : : }
5165 : : else
5166 : 28685 : remap_blocks_to_null (DECL_INITIAL (fn), id);
5167 : : }
5168 : :
5169 : : /* Return statements in the function body will be replaced by jumps
5170 : : to the RET_LABEL. */
5171 : 4452884 : gcc_assert (DECL_INITIAL (fn));
5172 : 4452884 : gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5173 : :
5174 : : /* Find the LHS to which the result of this call is assigned. */
5175 : 4452884 : return_slot = NULL;
5176 : 4452884 : if (gimple_call_lhs (stmt))
5177 : : {
5178 : 2243958 : modify_dest = gimple_call_lhs (stmt);
5179 : :
5180 : : /* The function which we are inlining might not return a value,
5181 : : in which case we should issue a warning that the function
5182 : : does not return a value. In that case the optimizers will
5183 : : see that the variable to which the value is assigned was not
5184 : : initialized. We do not want to issue a warning about that
5185 : : uninitialized variable. */
5186 : 2243958 : if (DECL_P (modify_dest))
5187 : 437946 : suppress_warning (modify_dest, OPT_Wuninitialized);
5188 : :
5189 : : /* If we have a return slot, we can assign it the result directly,
5190 : : except in the case where it is a global variable that is only
5191 : : written to because, the callee being permitted to read or take
5192 : : the address of its DECL_RESULT, this could invalidate the flag
5193 : : on the global variable; instead we preventively remove the store,
5194 : : which would have happened later if the call was not inlined. */
5195 : 2243958 : if (gimple_call_return_slot_opt_p (call_stmt))
5196 : : {
5197 : 138882 : tree base = get_base_address (modify_dest);
5198 : :
5199 : 138882 : if (VAR_P (base)
5200 : 112947 : && (TREE_STATIC (base) || DECL_EXTERNAL (base))
5201 : 138956 : && varpool_node::get (base)->writeonly)
5202 : : return_slot = NULL;
5203 : : else
5204 : : return_slot = modify_dest;
5205 : :
5206 : : modify_dest = NULL;
5207 : : }
5208 : : }
5209 : : else
5210 : : modify_dest = NULL;
5211 : :
5212 : : /* If we are inlining a call to the C++ operator new, we don't want
5213 : : to use type based alias analysis on the return value. Otherwise
5214 : : we may get confused if the compiler sees that the inlined new
5215 : : function returns a pointer which was just deleted. See bug
5216 : : 33407. */
5217 : 4452884 : if (DECL_IS_OPERATOR_NEW_P (fn))
5218 : : {
5219 : 18968 : return_slot = NULL;
5220 : 18968 : modify_dest = NULL;
5221 : : }
5222 : :
5223 : : /* Declare the return variable for the function. */
5224 : 4452884 : use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5225 : :
5226 : : /* Add local vars in this inlined callee to caller. */
5227 : 4452884 : add_local_variables (id->src_cfun, cfun, id);
5228 : :
5229 : 4452884 : if (dump_enabled_p ())
5230 : : {
5231 : 677 : char buf[128];
5232 : 677 : snprintf (buf, sizeof(buf), "%4.2f",
5233 : 677 : cg_edge->sreal_frequency ().to_double ());
5234 : 677 : dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5235 : 677 : call_stmt,
5236 : : "Inlining %C to %C with frequency %s\n",
5237 : : id->src_node, id->dst_node, buf);
5238 : 677 : if (dump_file && (dump_flags & TDF_DETAILS))
5239 : : {
5240 : 198 : id->src_node->dump (dump_file);
5241 : 198 : id->dst_node->dump (dump_file);
5242 : : }
5243 : : }
5244 : :
5245 : : /* This is it. Duplicate the callee body. Assume callee is
5246 : : pre-gimplified. Note that we must not alter the caller
5247 : : function in any way before this point, as this CALL_EXPR may be
5248 : : a self-referential call; if we're calling ourselves, we need to
5249 : : duplicate our body before altering anything. */
5250 : 4452884 : copy_body (id, bb, return_block, NULL);
5251 : :
5252 : 4452884 : reset_debug_bindings (id, stmt_gsi);
5253 : :
5254 : 4452884 : if (flag_stack_reuse != SR_NONE)
5255 : 11990154 : for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5256 : 7543183 : if (!TREE_THIS_VOLATILE (p))
5257 : : {
5258 : : /* The value associated with P is a local temporary only if
5259 : : there is no value associated with P in the debug map. */
5260 : 7542914 : tree *varp = id->decl_map->get (p);
5261 : 7542914 : if (varp
5262 : 7542914 : && VAR_P (*varp)
5263 : 7442537 : && !is_gimple_reg (*varp)
5264 : 7965169 : && !(id->debug_map && id->debug_map->get (p)))
5265 : : {
5266 : 422022 : tree clobber = build_clobber (TREE_TYPE (*varp),
5267 : : CLOBBER_STORAGE_END);
5268 : 422022 : gimple *clobber_stmt;
5269 : 422022 : clobber_stmt = gimple_build_assign (*varp, clobber);
5270 : 422022 : gimple_set_location (clobber_stmt, gimple_location (stmt));
5271 : 422022 : gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5272 : : }
5273 : : }
5274 : :
5275 : 4452884 : if (save_stack)
5276 : : {
5277 : 88 : tree fn = builtin_decl_implicit (BUILT_IN_STACK_RESTORE);
5278 : 88 : gcall *call = gimple_build_call (fn, 1, save_stack);
5279 : 88 : gsi_insert_before (&stmt_gsi, call, GSI_SAME_STMT);
5280 : 88 : struct cgraph_node *dest = cgraph_node::get_create (fn);
5281 : 88 : id->dst_node->create_edge (dest, call,
5282 : : return_block->count)->inline_failed
5283 : 88 : = CIF_BODY_NOT_AVAILABLE;
5284 : : }
5285 : :
5286 : : /* Reset the escaped solution. */
5287 : 4452884 : if (cfun->gimple_df)
5288 : : {
5289 : 4452884 : pt_solution_reset (&cfun->gimple_df->escaped);
5290 : 4452884 : pt_solution_reset (&cfun->gimple_df->escaped_return);
5291 : : }
5292 : :
5293 : : /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5294 : 4452884 : if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5295 : : {
5296 : 0 : size_t nargs = gimple_call_num_args (simtenter_stmt);
5297 : 0 : vec<tree> *vars = id->dst_simt_vars;
5298 : 0 : auto_vec<tree> newargs (nargs + vars->length ());
5299 : 0 : for (size_t i = 0; i < nargs; i++)
5300 : 0 : newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5301 : 0 : for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5302 : : {
5303 : 0 : tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5304 : 0 : newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5305 : : }
5306 : 0 : gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5307 : 0 : gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5308 : 0 : gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5309 : 0 : gsi_replace (&gsi, g, false);
5310 : 0 : }
5311 : 4452884 : vec_free (id->dst_simt_vars);
5312 : 4452884 : id->dst_simt_vars = simtvars_save;
5313 : :
5314 : : /* Clean up. */
5315 : 4452884 : if (id->debug_map)
5316 : : {
5317 : 96498 : delete id->debug_map;
5318 : 96498 : id->debug_map = dst;
5319 : : }
5320 : 8905768 : delete id->decl_map;
5321 : 4452884 : id->decl_map = st;
5322 : :
5323 : : /* Unlink the calls virtual operands before replacing it. */
5324 : 4452884 : unlink_stmt_vdef (stmt);
5325 : 4452884 : if (gimple_vdef (stmt)
5326 : 4452884 : && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5327 : 2077274 : release_ssa_name (gimple_vdef (stmt));
5328 : :
5329 : : /* If the inlined function returns a result that we care about,
5330 : : substitute the GIMPLE_CALL with an assignment of the return
5331 : : variable to the LHS of the call. That is, if STMT was
5332 : : 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5333 : 4452884 : if (use_retvar && gimple_call_lhs (stmt))
5334 : : {
5335 : 1881520 : gimple *old_stmt = stmt;
5336 : 1881520 : stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5337 : 1881520 : gimple_set_location (stmt, gimple_location (old_stmt));
5338 : 1881520 : gsi_replace (&stmt_gsi, stmt, false);
5339 : 1881520 : maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5340 : : /* Append a clobber for id->retvar if easily possible. */
5341 : 1881520 : if (flag_stack_reuse != SR_NONE
5342 : 1879596 : && id->retvar
5343 : 1879596 : && VAR_P (id->retvar)
5344 : 1879596 : && id->retvar != return_slot
5345 : 1879596 : && id->retvar != modify_dest
5346 : 1879596 : && !TREE_THIS_VOLATILE (id->retvar)
5347 : 1879570 : && !is_gimple_reg (id->retvar)
5348 : 1988235 : && !stmt_ends_bb_p (stmt))
5349 : : {
5350 : 106715 : tree clobber = build_clobber (TREE_TYPE (id->retvar),
5351 : : CLOBBER_STORAGE_END);
5352 : 106715 : gimple *clobber_stmt;
5353 : 106715 : clobber_stmt = gimple_build_assign (id->retvar, clobber);
5354 : 106715 : gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5355 : 106715 : gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5356 : : }
5357 : : }
5358 : : else
5359 : : {
5360 : : /* Handle the case of inlining a function with no return
5361 : : statement, which causes the return value to become undefined. */
5362 : 2571364 : if (gimple_call_lhs (stmt)
5363 : 2571364 : && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5364 : : {
5365 : 21 : tree name = gimple_call_lhs (stmt);
5366 : 21 : tree var = SSA_NAME_VAR (name);
5367 : 6 : tree def = var ? ssa_default_def (cfun, var) : NULL;
5368 : :
5369 : 6 : if (def)
5370 : : {
5371 : : /* If the variable is used undefined, make this name
5372 : : undefined via a move. */
5373 : 0 : stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5374 : 0 : gsi_replace (&stmt_gsi, stmt, true);
5375 : : }
5376 : : else
5377 : : {
5378 : 21 : if (!var)
5379 : : {
5380 : 15 : var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5381 : 30 : SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5382 : : }
5383 : : /* Otherwise make this variable undefined. */
5384 : 21 : gsi_remove (&stmt_gsi, true);
5385 : 21 : set_ssa_default_def (cfun, var, name);
5386 : 21 : SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5387 : : }
5388 : : }
5389 : : /* Replace with a clobber for id->retvar. */
5390 : 2571343 : else if (flag_stack_reuse != SR_NONE
5391 : 2567354 : && id->retvar
5392 : 488708 : && VAR_P (id->retvar)
5393 : 398064 : && id->retvar != return_slot
5394 : 348219 : && id->retvar != modify_dest
5395 : 126572 : && !TREE_THIS_VOLATILE (id->retvar)
5396 : 2697915 : && !is_gimple_reg (id->retvar))
5397 : : {
5398 : 11809 : tree clobber = build_clobber (TREE_TYPE (id->retvar));
5399 : 11809 : gimple *clobber_stmt;
5400 : 11809 : clobber_stmt = gimple_build_assign (id->retvar, clobber);
5401 : 11809 : gimple_set_location (clobber_stmt, gimple_location (stmt));
5402 : 11809 : gsi_replace (&stmt_gsi, clobber_stmt, false);
5403 : 11809 : maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5404 : : }
5405 : : else
5406 : 2559534 : gsi_remove (&stmt_gsi, true);
5407 : : }
5408 : :
5409 : 4452884 : if (purge_dead_abnormal_edges)
5410 : 1052632 : bitmap_set_bit (to_purge, return_block->index);
5411 : :
5412 : : /* If the value of the new expression is ignored, that's OK. We
5413 : : don't warn about this for CALL_EXPRs, so we shouldn't warn about
5414 : : the equivalent inlined version either. */
5415 : 4452884 : if (is_gimple_assign (stmt))
5416 : : {
5417 : 1881520 : gcc_assert (gimple_assign_single_p (stmt)
5418 : : || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5419 : 1881520 : TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5420 : : }
5421 : :
5422 : 4452884 : id->add_clobbers_to_eh_landing_pads = 0;
5423 : :
5424 : : /* Output the inlining info for this abstract function, since it has been
5425 : : inlined. If we don't do this now, we can lose the information about the
5426 : : variables in the function when the blocks get blown away as soon as we
5427 : : remove the cgraph node. */
5428 : 4452884 : if (gimple_block (stmt))
5429 : 4424199 : (*debug_hooks->outlining_inline_function) (fn);
5430 : :
5431 : : /* Update callgraph if needed. */
5432 : 4452884 : cg_edge->callee->remove ();
5433 : :
5434 : 4452884 : id->block = NULL_TREE;
5435 : 4452884 : id->retvar = NULL_TREE;
5436 : 4452884 : successfully_inlined = true;
5437 : :
5438 : 15051203 : egress:
5439 : 15051203 : input_location = saved_location;
5440 : 15051203 : return successfully_inlined;
5441 : : }
5442 : :
5443 : : /* Expand call statements reachable from STMT_P.
5444 : : We can only have CALL_EXPRs as the "toplevel" tree code or nested
5445 : : in a MODIFY_EXPR. */
5446 : :
5447 : : static bool
5448 : 31416624 : gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5449 : : bitmap to_purge)
5450 : : {
5451 : 31416624 : gimple_stmt_iterator gsi;
5452 : 31416624 : bool inlined = false;
5453 : :
5454 : 227836922 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5455 : : {
5456 : 165003674 : gimple *stmt = gsi_stmt (gsi);
5457 : 165003674 : gsi_prev (&gsi);
5458 : :
5459 : 165003674 : if (is_gimple_call (stmt)
5460 : 165003674 : && !gimple_call_internal_p (stmt))
5461 : 15050996 : inlined |= expand_call_inline (bb, stmt, id, to_purge);
5462 : : }
5463 : :
5464 : 31416624 : return inlined;
5465 : : }
5466 : :
5467 : :
5468 : : /* Walk all basic blocks created after FIRST and try to fold every statement
5469 : : in the STATEMENTS pointer set. */
5470 : :
5471 : : static void
5472 : 1613022 : fold_marked_statements (int first, hash_set<gimple *> *statements)
5473 : : {
5474 : 1613022 : auto_bitmap to_purge;
5475 : 1613022 : auto_bitmap to_purge_abnormal;
5476 : :
5477 : 1613022 : auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5478 : 1613022 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
5479 : 1613022 : bitmap_clear (visited);
5480 : :
5481 : 1613022 : stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5482 : 38316962 : while (!stack.is_empty ())
5483 : : {
5484 : : /* Look at the edge on the top of the stack. */
5485 : 36703940 : edge e = stack.pop ();
5486 : 36703940 : basic_block dest = e->dest;
5487 : :
5488 : 45175013 : if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5489 : 35114923 : || bitmap_bit_p (visited, dest->index))
5490 : 8471073 : continue;
5491 : :
5492 : 28232867 : bitmap_set_bit (visited, dest->index);
5493 : :
5494 : 28232867 : if (dest->index >= first)
5495 : 35835334 : for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5496 : 120497137 : !gsi_end_p (gsi); gsi_next (&gsi))
5497 : : {
5498 : 102579470 : if (!statements->contains (gsi_stmt (gsi)))
5499 : 22944786 : continue;
5500 : :
5501 : 79634684 : gimple *old_stmt = gsi_stmt (gsi);
5502 : 79634684 : bool can_make_abnormal_goto = false;
5503 : 79634684 : tree old_decl = NULL_TREE;
5504 : :
5505 : 79634684 : if (is_gimple_call (old_stmt))
5506 : : {
5507 : 3593195 : old_decl = gimple_call_fndecl (old_stmt);
5508 : 3593195 : if (stmt_can_make_abnormal_goto (old_stmt))
5509 : : can_make_abnormal_goto = true;
5510 : : }
5511 : :
5512 : 3593195 : if (old_decl && fndecl_built_in_p (old_decl))
5513 : : {
5514 : : /* Folding builtins can create multiple instructions,
5515 : : we need to look at all of them. */
5516 : 1257678 : gimple_stmt_iterator i2 = gsi;
5517 : 1257678 : gsi_prev (&i2);
5518 : 1257678 : if (fold_stmt (&gsi))
5519 : : {
5520 : 90926 : gimple *new_stmt;
5521 : : /* If a builtin at the end of a bb folded into nothing,
5522 : : the following loop won't work. */
5523 : 90926 : if (gsi_end_p (gsi))
5524 : : {
5525 : 0 : cgraph_update_edges_for_call_stmt (old_stmt,
5526 : : old_decl, NULL);
5527 : 0 : if (can_make_abnormal_goto)
5528 : 0 : bitmap_set_bit (to_purge_abnormal, dest->index);
5529 : 17917667 : break;
5530 : : }
5531 : 90926 : if (gsi_end_p (i2))
5532 : 125280 : i2 = gsi_start_bb (dest);
5533 : : else
5534 : 28286 : gsi_next (&i2);
5535 : 418 : while (1)
5536 : : {
5537 : 91344 : new_stmt = gsi_stmt (i2);
5538 : 91344 : update_stmt (new_stmt);
5539 : 91344 : cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5540 : : new_stmt);
5541 : :
5542 : 91344 : if (new_stmt == gsi_stmt (gsi))
5543 : : {
5544 : : /* It is okay to check only for the very last
5545 : : of these statements. If it is a throwing
5546 : : statement nothing will change. If it isn't
5547 : : this can remove EH edges. If that weren't
5548 : : correct then because some intermediate stmts
5549 : : throw, but not the last one. That would mean
5550 : : we'd have to split the block, which we can't
5551 : : here and we'd loose anyway. And as builtins
5552 : : probably never throw, this all
5553 : : is mood anyway. */
5554 : 90926 : if (maybe_clean_or_replace_eh_stmt (old_stmt,
5555 : : new_stmt))
5556 : 7 : bitmap_set_bit (to_purge, dest->index);
5557 : 90926 : if (can_make_abnormal_goto
5558 : 90926 : && !stmt_can_make_abnormal_goto (new_stmt))
5559 : 0 : bitmap_set_bit (to_purge_abnormal, dest->index);
5560 : : break;
5561 : : }
5562 : 418 : gsi_next (&i2);
5563 : : }
5564 : : }
5565 : : }
5566 : 78377006 : else if (fold_stmt (&gsi))
5567 : : {
5568 : : /* Re-read the statement from GSI as fold_stmt() may
5569 : : have changed it. */
5570 : 2938997 : gimple *new_stmt = gsi_stmt (gsi);
5571 : 2938997 : update_stmt (new_stmt);
5572 : :
5573 : 2938997 : if (is_gimple_call (old_stmt)
5574 : 2938997 : || is_gimple_call (new_stmt))
5575 : 4442 : cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5576 : : new_stmt);
5577 : :
5578 : 2938997 : if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5579 : 164 : bitmap_set_bit (to_purge, dest->index);
5580 : 2938997 : if (can_make_abnormal_goto
5581 : 2938997 : && !stmt_can_make_abnormal_goto (new_stmt))
5582 : 7 : bitmap_set_bit (to_purge_abnormal, dest->index);
5583 : : }
5584 : : }
5585 : :
5586 : 54991946 : if (EDGE_COUNT (dest->succs) > 0)
5587 : : {
5588 : : /* Avoid warnings emitted from folding statements that
5589 : : became unreachable because of inlined function parameter
5590 : : propagation. */
5591 : 26759079 : e = find_taken_edge (dest, NULL_TREE);
5592 : 26759079 : if (e)
5593 : 18614962 : stack.quick_push (e);
5594 : : else
5595 : : {
5596 : 8144117 : edge_iterator ei;
5597 : 24620073 : FOR_EACH_EDGE (e, ei, dest->succs)
5598 : 16475956 : stack.safe_push (e);
5599 : : }
5600 : : }
5601 : : }
5602 : :
5603 : 1613022 : gimple_purge_all_dead_eh_edges (to_purge);
5604 : 1613022 : gimple_purge_all_dead_abnormal_call_edges (to_purge_abnormal);
5605 : 1613022 : }
5606 : :
5607 : : /* Expand calls to inline functions in the body of FN. */
5608 : :
5609 : : unsigned int
5610 : 1995660 : optimize_inline_calls (tree fn)
5611 : : {
5612 : 1995660 : copy_body_data id;
5613 : 1995660 : basic_block bb;
5614 : 1995660 : int last = n_basic_blocks_for_fn (cfun);
5615 : 1995660 : bool inlined_p = false;
5616 : :
5617 : : /* Clear out ID. */
5618 : 1995660 : memset (&id, 0, sizeof (id));
5619 : :
5620 : 1995660 : id.src_node = id.dst_node = cgraph_node::get (fn);
5621 : 1995660 : gcc_assert (id.dst_node->definition);
5622 : 1995660 : id.dst_fn = fn;
5623 : : /* Or any functions that aren't finished yet. */
5624 : 1995660 : if (current_function_decl)
5625 : 1995660 : id.dst_fn = current_function_decl;
5626 : :
5627 : 1995660 : id.copy_decl = copy_decl_maybe_to_var;
5628 : 1995660 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5629 : 1995660 : id.transform_new_cfg = false;
5630 : 1995660 : id.transform_return_to_modify = true;
5631 : 1995660 : id.transform_parameter = true;
5632 : 1995660 : id.statements_to_fold = new hash_set<gimple *>;
5633 : :
5634 : 1995660 : push_gimplify_context ();
5635 : :
5636 : : /* We make no attempts to keep dominance info up-to-date. */
5637 : 1995660 : free_dominance_info (CDI_DOMINATORS);
5638 : 1995660 : free_dominance_info (CDI_POST_DOMINATORS);
5639 : :
5640 : : /* Register specific gimple functions. */
5641 : 1995660 : gimple_register_cfg_hooks ();
5642 : :
5643 : : /* Reach the trees by walking over the CFG, and note the
5644 : : enclosing basic-blocks in the call edges. */
5645 : : /* We walk the blocks going forward, because inlined function bodies
5646 : : will split id->current_basic_block, and the new blocks will
5647 : : follow it; we'll trudge through them, processing their CALL_EXPRs
5648 : : along the way. */
5649 : 1995660 : auto_bitmap to_purge;
5650 : 33412284 : FOR_EACH_BB_FN (bb, cfun)
5651 : 31416624 : inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5652 : :
5653 : 1995660 : pop_gimplify_context (NULL);
5654 : :
5655 : 1995660 : if (flag_checking)
5656 : : {
5657 : 1995642 : struct cgraph_edge *e;
5658 : :
5659 : 1995642 : id.dst_node->verify ();
5660 : :
5661 : : /* Double check that we inlined everything we are supposed to inline. */
5662 : 11648146 : for (e = id.dst_node->callees; e; e = e->next_callee)
5663 : 9652504 : gcc_assert (e->inline_failed);
5664 : : }
5665 : :
5666 : : /* If we didn't inline into the function there is nothing to do. */
5667 : 1995660 : if (!inlined_p)
5668 : : {
5669 : 1224996 : delete id.statements_to_fold;
5670 : 612498 : return 0;
5671 : : }
5672 : :
5673 : : /* Fold queued statements. */
5674 : 1383162 : update_max_bb_count ();
5675 : 1383162 : fold_marked_statements (last, id.statements_to_fold);
5676 : 2766324 : delete id.statements_to_fold;
5677 : :
5678 : : /* Finally purge EH and abnormal edges from the call stmts we inlined.
5679 : : We need to do this after fold_marked_statements since that may walk
5680 : : the SSA use-def chain. */
5681 : 1383162 : unsigned i;
5682 : 1383162 : bitmap_iterator bi;
5683 : 2435794 : EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5684 : : {
5685 : 1052632 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5686 : 1052632 : if (bb)
5687 : : {
5688 : 1052632 : gimple_purge_dead_eh_edges (bb);
5689 : 1052632 : gimple_purge_dead_abnormal_call_edges (bb);
5690 : : }
5691 : : }
5692 : :
5693 : 1383162 : gcc_assert (!id.debug_stmts.exists ());
5694 : :
5695 : : /* Renumber the lexical scoping (non-code) blocks consecutively. */
5696 : 1383162 : number_blocks (fn);
5697 : :
5698 : 1383162 : delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5699 : 1383162 : id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5700 : :
5701 : 1383162 : if (flag_checking)
5702 : 1383153 : id.dst_node->verify ();
5703 : :
5704 : : /* It would be nice to check SSA/CFG/statement consistency here, but it is
5705 : : not possible yet - the IPA passes might make various functions to not
5706 : : throw and they don't care to proactively update local EH info. This is
5707 : : done later in fixup_cfg pass that also execute the verification. */
5708 : 1383162 : return (TODO_update_ssa
5709 : : | TODO_cleanup_cfg
5710 : 1383162 : | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5711 : 2766324 : | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0));
5712 : 1995660 : }
5713 : :
5714 : : /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5715 : :
5716 : : tree
5717 : 1416979131 : copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5718 : : {
5719 : 1416979131 : enum tree_code code = TREE_CODE (*tp);
5720 : 1416979131 : enum tree_code_class cl = TREE_CODE_CLASS (code);
5721 : :
5722 : : /* We make copies of most nodes. */
5723 : 1416979131 : if (IS_EXPR_CODE_CLASS (cl)
5724 : : || code == TREE_LIST
5725 : 240218021 : || code == TREE_VEC
5726 : 236148787 : || code == TYPE_DECL
5727 : 236148787 : || code == OMP_CLAUSE)
5728 : : {
5729 : : /* Because the chain gets clobbered when we make a copy, we save it
5730 : : here. */
5731 : 1180853400 : tree chain = NULL_TREE, new_tree;
5732 : :
5733 : 1180853400 : if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5734 : 4092290 : chain = TREE_CHAIN (*tp);
5735 : :
5736 : : /* Copy the node. */
5737 : 1180853400 : new_tree = copy_node (*tp);
5738 : :
5739 : 1180853400 : *tp = new_tree;
5740 : :
5741 : : /* Now, restore the chain, if appropriate. That will cause
5742 : : walk_tree to walk into the chain as well. */
5743 : 1180853400 : if (code == PARM_DECL
5744 : 1180853400 : || code == TREE_LIST
5745 : 1179059039 : || code == OMP_CLAUSE)
5746 : 1817393 : TREE_CHAIN (*tp) = chain;
5747 : :
5748 : : /* For now, we don't update BLOCKs when we make copies. So, we
5749 : : have to nullify all BIND_EXPRs. */
5750 : 1180853400 : if (TREE_CODE (*tp) == BIND_EXPR)
5751 : 13211855 : BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5752 : : }
5753 : 236125731 : else if (code == CONSTRUCTOR)
5754 : : {
5755 : : /* CONSTRUCTOR nodes need special handling because
5756 : : we need to duplicate the vector of elements. */
5757 : 14405096 : tree new_tree;
5758 : :
5759 : 14405096 : new_tree = copy_node (*tp);
5760 : 21040169 : CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5761 : 14405096 : *tp = new_tree;
5762 : : }
5763 : 221720635 : else if (code == STATEMENT_LIST)
5764 : : /* We used to just abort on STATEMENT_LIST, but we can run into them
5765 : : with statement-expressions (c++/40975). */
5766 : 240 : copy_statement_list (tp);
5767 : 221720395 : else if (TREE_CODE_CLASS (code) == tcc_type)
5768 : 320 : *walk_subtrees = 0;
5769 : 221720075 : else if (TREE_CODE_CLASS (code) == tcc_declaration)
5770 : 77006214 : *walk_subtrees = 0;
5771 : 144713861 : else if (TREE_CODE_CLASS (code) == tcc_constant)
5772 : 0 : *walk_subtrees = 0;
5773 : 1416979131 : return NULL_TREE;
5774 : : }
5775 : :
5776 : : /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5777 : : information indicating to what new SAVE_EXPR this one should be mapped,
5778 : : use that one. Otherwise, create a new node and enter it in ST. FN is
5779 : : the function into which the copy will be placed. */
5780 : :
5781 : : static void
5782 : 7120207 : remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5783 : : {
5784 : 7120207 : tree *n;
5785 : 7120207 : tree t;
5786 : :
5787 : : /* See if we already encountered this SAVE_EXPR. */
5788 : 7120207 : n = st->get (*tp);
5789 : :
5790 : : /* If we didn't already remap this SAVE_EXPR, do so now. */
5791 : 7120207 : if (!n)
5792 : : {
5793 : 6833268 : t = copy_node (*tp);
5794 : :
5795 : : /* Remember this SAVE_EXPR. */
5796 : 6833268 : st->put (*tp, t);
5797 : : /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5798 : 6833268 : st->put (t, t);
5799 : : }
5800 : : else
5801 : : {
5802 : : /* We've already walked into this SAVE_EXPR; don't do it again. */
5803 : 286939 : *walk_subtrees = 0;
5804 : 286939 : t = *n;
5805 : : }
5806 : :
5807 : : /* Replace this SAVE_EXPR with the copy. */
5808 : 7120207 : *tp = t;
5809 : 7120207 : }
5810 : :
5811 : : /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5812 : : label, copies the declaration and enters it in the splay_tree in DATA (which
5813 : : is really a 'copy_body_data *'. */
5814 : :
5815 : : static tree
5816 : 1564525 : mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5817 : : bool *handled_ops_p ATTRIBUTE_UNUSED,
5818 : : struct walk_stmt_info *wi)
5819 : : {
5820 : 1564525 : copy_body_data *id = (copy_body_data *) wi->info;
5821 : 1564525 : glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5822 : :
5823 : 51227 : if (stmt)
5824 : : {
5825 : 51227 : tree decl = gimple_label_label (stmt);
5826 : :
5827 : : /* Copy the decl and remember the copy. */
5828 : 51227 : insert_decl_map (id, decl, id->copy_decl (decl, id));
5829 : : }
5830 : :
5831 : 1564525 : return NULL_TREE;
5832 : : }
5833 : :
5834 : : static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5835 : : struct walk_stmt_info *wi);
5836 : :
5837 : : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5838 : : Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5839 : : remaps all local declarations to appropriate replacements in gimple
5840 : : operands. */
5841 : :
5842 : : static tree
5843 : 3410309 : replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5844 : : {
5845 : 3410309 : struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5846 : 3410309 : copy_body_data *id = (copy_body_data *) wi->info;
5847 : 3410309 : hash_map<tree, tree> *st = id->decl_map;
5848 : 3410309 : tree *n;
5849 : 3410309 : tree expr = *tp;
5850 : :
5851 : : /* For recursive invocations this is no longer the LHS itself. */
5852 : 3410309 : bool is_lhs = wi->is_lhs;
5853 : 3410309 : wi->is_lhs = false;
5854 : :
5855 : 3410309 : if (TREE_CODE (expr) == SSA_NAME)
5856 : : {
5857 : 23864 : *tp = remap_ssa_name (*tp, id);
5858 : 23864 : *walk_subtrees = 0;
5859 : 23864 : if (is_lhs)
5860 : 11932 : SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5861 : : }
5862 : : /* Only a local declaration (variable or label). */
5863 : 3386445 : else if ((VAR_P (expr) && !TREE_STATIC (expr))
5864 : 1972029 : || TREE_CODE (expr) == LABEL_DECL)
5865 : : {
5866 : : /* Lookup the declaration. */
5867 : 1518926 : n = st->get (expr);
5868 : :
5869 : : /* If it's there, remap it. */
5870 : 1518926 : if (n)
5871 : 103168 : *tp = *n;
5872 : 1518926 : *walk_subtrees = 0;
5873 : : }
5874 : 1867519 : else if (TREE_CODE (expr) == STATEMENT_LIST
5875 : 1867519 : || TREE_CODE (expr) == BIND_EXPR
5876 : 1867519 : || TREE_CODE (expr) == SAVE_EXPR)
5877 : 0 : gcc_unreachable ();
5878 : 1867519 : else if (TREE_CODE (expr) == TARGET_EXPR)
5879 : : {
5880 : : /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5881 : : It's OK for this to happen if it was part of a subtree that
5882 : : isn't immediately expanded, such as operand 2 of another
5883 : : TARGET_EXPR. */
5884 : 0 : if (!TREE_OPERAND (expr, 1))
5885 : : {
5886 : 0 : TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5887 : 0 : TREE_OPERAND (expr, 3) = NULL_TREE;
5888 : : }
5889 : : }
5890 : 1867519 : else if (TREE_CODE (expr) == OMP_CLAUSE)
5891 : : {
5892 : : /* Before the omplower pass completes, some OMP clauses can contain
5893 : : sequences that are neither copied by gimple_seq_copy nor walked by
5894 : : walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5895 : : in those situations, we have to copy and process them explicitely. */
5896 : :
5897 : 552 : if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5898 : : {
5899 : 14 : gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5900 : 14 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5901 : 14 : OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5902 : : }
5903 : 538 : else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5904 : : {
5905 : 77 : gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5906 : 77 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5907 : 77 : OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5908 : : }
5909 : 461 : else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5910 : : {
5911 : 99 : gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5912 : 99 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5913 : 99 : OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5914 : 99 : seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5915 : 99 : seq = duplicate_remap_omp_clause_seq (seq, wi);
5916 : 99 : OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5917 : : }
5918 : : }
5919 : :
5920 : : /* Keep iterating. */
5921 : 3410309 : return NULL_TREE;
5922 : : }
5923 : :
5924 : :
5925 : : /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5926 : : Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5927 : : remaps all local declarations to appropriate replacements in gimple
5928 : : statements. */
5929 : :
5930 : : static tree
5931 : 1564525 : replace_locals_stmt (gimple_stmt_iterator *gsip,
5932 : : bool *handled_ops_p ATTRIBUTE_UNUSED,
5933 : : struct walk_stmt_info *wi)
5934 : : {
5935 : 1564525 : copy_body_data *id = (copy_body_data *) wi->info;
5936 : 1564525 : gimple *gs = gsi_stmt (*gsip);
5937 : :
5938 : 1564525 : if (gbind *stmt = dyn_cast <gbind *> (gs))
5939 : : {
5940 : 349 : tree block = gimple_bind_block (stmt);
5941 : :
5942 : 349 : if (block)
5943 : : {
5944 : 269 : remap_block (&block, id);
5945 : 269 : gimple_bind_set_block (stmt, block);
5946 : : }
5947 : :
5948 : : /* This will remap a lot of the same decls again, but this should be
5949 : : harmless. */
5950 : 349 : if (gimple_bind_vars (stmt))
5951 : : {
5952 : : tree old_var, decls = gimple_bind_vars (stmt);
5953 : :
5954 : 528 : for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5955 : 360 : if (!can_be_nonlocal (old_var, id)
5956 : 360 : && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5957 : 360 : remap_decl (old_var, id);
5958 : :
5959 : 168 : gcc_checking_assert (!id->prevent_decl_creation_for_types);
5960 : 168 : id->prevent_decl_creation_for_types = true;
5961 : 168 : gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5962 : 168 : id->prevent_decl_creation_for_types = false;
5963 : : }
5964 : : }
5965 : :
5966 : : /* Keep iterating. */
5967 : 1564525 : return NULL_TREE;
5968 : : }
5969 : :
5970 : : /* Create a copy of SEQ and remap all decls in it. */
5971 : :
5972 : : static gimple_seq
5973 : 289 : duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5974 : : {
5975 : 289 : if (!seq)
5976 : : return NULL;
5977 : :
5978 : : /* If there are any labels in OMP sequences, they can be only referred to in
5979 : : the sequence itself and therefore we can do both here. */
5980 : 60 : walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5981 : 60 : gimple_seq copy = gimple_seq_copy (seq);
5982 : 60 : walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5983 : 60 : return copy;
5984 : : }
5985 : :
5986 : : /* Copies everything in SEQ and replaces variables and labels local to
5987 : : current_function_decl. */
5988 : :
5989 : : gimple_seq
5990 : 1042093 : copy_gimple_seq_and_replace_locals (gimple_seq seq)
5991 : : {
5992 : 1042093 : copy_body_data id;
5993 : 1042093 : struct walk_stmt_info wi;
5994 : 1042093 : gimple_seq copy;
5995 : :
5996 : : /* There's nothing to do for NULL_TREE. */
5997 : 1042093 : if (seq == NULL)
5998 : : return seq;
5999 : :
6000 : : /* Set up ID. */
6001 : 1042077 : memset (&id, 0, sizeof (id));
6002 : 1042077 : id.src_fn = current_function_decl;
6003 : 1042077 : id.dst_fn = current_function_decl;
6004 : 1042077 : id.src_cfun = cfun;
6005 : 1042077 : id.decl_map = new hash_map<tree, tree>;
6006 : 1042077 : id.debug_map = NULL;
6007 : :
6008 : 1042077 : id.copy_decl = copy_decl_no_change;
6009 : 1042077 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6010 : 1042077 : id.transform_new_cfg = false;
6011 : 1042077 : id.transform_return_to_modify = false;
6012 : 1042077 : id.transform_parameter = false;
6013 : :
6014 : : /* Walk the tree once to find local labels. */
6015 : 1042077 : memset (&wi, 0, sizeof (wi));
6016 : 1042077 : hash_set<tree> visited;
6017 : 1042077 : wi.info = &id;
6018 : 1042077 : wi.pset = &visited;
6019 : 1042077 : walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
6020 : :
6021 : 1042077 : copy = gimple_seq_copy (seq);
6022 : :
6023 : : /* Walk the copy, remapping decls. */
6024 : 1042077 : memset (&wi, 0, sizeof (wi));
6025 : 1042077 : wi.info = &id;
6026 : 1042077 : walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
6027 : :
6028 : : /* Clean up. */
6029 : 2084154 : delete id.decl_map;
6030 : 1042077 : if (id.debug_map)
6031 : 0 : delete id.debug_map;
6032 : 1042077 : if (id.dependence_map)
6033 : : {
6034 : 0 : delete id.dependence_map;
6035 : 0 : id.dependence_map = NULL;
6036 : : }
6037 : :
6038 : 1042077 : return copy;
6039 : 1042077 : }
6040 : :
6041 : :
6042 : : /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
6043 : :
6044 : : static tree
6045 : 0 : debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
6046 : : {
6047 : 0 : if (*tp == data)
6048 : : return (tree) data;
6049 : : else
6050 : 0 : return NULL;
6051 : : }
6052 : :
6053 : : DEBUG_FUNCTION bool
6054 : 0 : debug_find_tree (tree top, tree search)
6055 : : {
6056 : 0 : return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
6057 : : }
6058 : :
6059 : :
6060 : : /* Declare the variables created by the inliner. Add all the variables in
6061 : : VARS to BIND_EXPR. */
6062 : :
6063 : : static void
6064 : 6730490 : declare_inline_vars (tree block, tree vars)
6065 : : {
6066 : 6730490 : tree t;
6067 : 16509718 : for (t = vars; t; t = DECL_CHAIN (t))
6068 : : {
6069 : 9779228 : DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
6070 : 9779228 : gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
6071 : 9779228 : add_local_decl (cfun, t);
6072 : : }
6073 : :
6074 : 6730490 : if (block)
6075 : 6649416 : BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
6076 : 6730490 : }
6077 : :
6078 : : /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
6079 : : but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
6080 : : VAR_DECL translation. */
6081 : :
6082 : : tree
6083 : 87375755 : copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
6084 : : {
6085 : : /* Don't generate debug information for the copy if we wouldn't have
6086 : : generated it for the copy either. */
6087 : 87375755 : DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
6088 : 87375755 : DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
6089 : :
6090 : : /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
6091 : : declaration inspired this copy. */
6092 : 115584665 : DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
6093 : :
6094 : : /* The new variable/label has no RTL, yet. */
6095 : 87375755 : if (HAS_RTL_P (copy)
6096 : 87375755 : && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
6097 : 87371111 : SET_DECL_RTL (copy, 0);
6098 : : /* For vector typed decls make sure to update DECL_MODE according
6099 : : to the new function context. */
6100 : 87375755 : if (VECTOR_TYPE_P (TREE_TYPE (copy)))
6101 : 163521 : SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
6102 : :
6103 : : /* These args would always appear unused, if not for this. */
6104 : 87375755 : TREE_USED (copy) = 1;
6105 : :
6106 : : /* Set the context for the new declaration. */
6107 : 87375755 : if (!DECL_CONTEXT (decl))
6108 : : /* Globals stay global. */
6109 : : ;
6110 : 87375605 : else if (DECL_CONTEXT (decl) != id->src_fn)
6111 : : /* Things that weren't in the scope of the function we're inlining
6112 : : from aren't in the scope we're inlining to, either. */
6113 : : ;
6114 : 87373489 : else if (TREE_STATIC (decl))
6115 : : /* Function-scoped static variables should stay in the original
6116 : : function. */
6117 : : ;
6118 : : else
6119 : : {
6120 : : /* Ordinary automatic local variables are now in the scope of the
6121 : : new function. */
6122 : 87370192 : DECL_CONTEXT (copy) = id->dst_fn;
6123 : 87370192 : if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
6124 : : {
6125 : 0 : if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
6126 : 0 : DECL_ATTRIBUTES (copy)
6127 : 0 : = tree_cons (get_identifier ("omp simt private"), NULL,
6128 : 0 : DECL_ATTRIBUTES (copy));
6129 : 0 : id->dst_simt_vars->safe_push (copy);
6130 : : }
6131 : : }
6132 : :
6133 : 87375755 : return copy;
6134 : : }
6135 : :
6136 : : /* Create a new VAR_DECL that is indentical in all respect to DECL except that
6137 : : DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
6138 : : DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
6139 : :
6140 : : tree
6141 : 7743546 : copy_decl_to_var (tree decl, copy_body_data *id)
6142 : : {
6143 : 7743546 : tree copy, type;
6144 : :
6145 : 7743546 : gcc_assert (TREE_CODE (decl) == PARM_DECL
6146 : : || TREE_CODE (decl) == RESULT_DECL);
6147 : :
6148 : 7743546 : type = TREE_TYPE (decl);
6149 : :
6150 : 7743546 : copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6151 : 7743546 : VAR_DECL, DECL_NAME (decl), type);
6152 : 7743546 : if (DECL_PT_UID_SET_P (decl))
6153 : 289 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6154 : 7743546 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6155 : 7743546 : TREE_READONLY (copy) = TREE_READONLY (decl);
6156 : 7743546 : TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6157 : 7743546 : DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
6158 : 7743546 : DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
6159 : :
6160 : 7743546 : return copy_decl_for_dup_finish (id, decl, copy);
6161 : : }
6162 : :
6163 : : /* Like copy_decl_to_var, but create a return slot object instead of a
6164 : : pointer variable for return by invisible reference. */
6165 : :
6166 : : static tree
6167 : 2047746 : copy_result_decl_to_var (tree decl, copy_body_data *id)
6168 : : {
6169 : 2047746 : tree copy, type;
6170 : :
6171 : 2047746 : gcc_assert (TREE_CODE (decl) == PARM_DECL
6172 : : || TREE_CODE (decl) == RESULT_DECL);
6173 : :
6174 : 2047746 : type = TREE_TYPE (decl);
6175 : 2047746 : if (DECL_BY_REFERENCE (decl))
6176 : 73 : type = TREE_TYPE (type);
6177 : :
6178 : 2047746 : copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
6179 : 2047746 : VAR_DECL, DECL_NAME (decl), type);
6180 : 2047746 : if (DECL_PT_UID_SET_P (decl))
6181 : 0 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
6182 : 2047746 : TREE_READONLY (copy) = TREE_READONLY (decl);
6183 : 2047746 : TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
6184 : 2047746 : if (!DECL_BY_REFERENCE (decl))
6185 : : {
6186 : 2047673 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6187 : 2047673 : DECL_NOT_GIMPLE_REG_P (copy)
6188 : 4095346 : = (DECL_NOT_GIMPLE_REG_P (decl)
6189 : : /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6190 : : mirror that to the created VAR_DECL. */
6191 : 2047673 : || (TREE_CODE (decl) == RESULT_DECL
6192 : 2047517 : && aggregate_value_p (decl, id->src_fn)));
6193 : : }
6194 : :
6195 : 2047746 : return copy_decl_for_dup_finish (id, decl, copy);
6196 : : }
6197 : :
6198 : : tree
6199 : 77584293 : copy_decl_no_change (tree decl, copy_body_data *id)
6200 : : {
6201 : 77584293 : tree copy;
6202 : :
6203 : 77584293 : copy = copy_node (decl);
6204 : :
6205 : : /* The COPY is not abstract; it will be generated in DST_FN. */
6206 : 77584293 : DECL_ABSTRACT_P (copy) = false;
6207 : 77584293 : lang_hooks.dup_lang_specific_decl (copy);
6208 : :
6209 : : /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6210 : : been taken; it's for internal bookkeeping in expand_goto_internal. */
6211 : 77584293 : if (TREE_CODE (copy) == LABEL_DECL)
6212 : : {
6213 : 1288593 : TREE_ADDRESSABLE (copy) = 0;
6214 : 1288593 : LABEL_DECL_UID (copy) = -1;
6215 : : }
6216 : :
6217 : 77584293 : return copy_decl_for_dup_finish (id, decl, copy);
6218 : : }
6219 : :
6220 : : static tree
6221 : 20851150 : copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6222 : : {
6223 : 20851150 : if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6224 : 12064 : return copy_decl_to_var (decl, id);
6225 : : else
6226 : 20839086 : return copy_decl_no_change (decl, id);
6227 : : }
6228 : :
6229 : : /* Return a copy of the function's argument tree without any modifications. */
6230 : :
6231 : : static tree
6232 : 74569 : copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6233 : : {
6234 : 74569 : tree arg, *parg;
6235 : 74569 : tree new_parm = NULL;
6236 : :
6237 : 74569 : parg = &new_parm;
6238 : 221942 : for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6239 : : {
6240 : 147373 : tree new_tree = remap_decl (arg, id);
6241 : 147373 : if (TREE_CODE (new_tree) != PARM_DECL)
6242 : 4050 : new_tree = id->copy_decl (arg, id);
6243 : 147373 : lang_hooks.dup_lang_specific_decl (new_tree);
6244 : 147373 : *parg = new_tree;
6245 : 147373 : parg = &DECL_CHAIN (new_tree);
6246 : : }
6247 : 74569 : return new_parm;
6248 : : }
6249 : :
6250 : : /* Return a copy of the function's static chain. */
6251 : : static tree
6252 : 969 : copy_static_chain (tree static_chain, copy_body_data * id)
6253 : : {
6254 : 969 : tree *chain_copy, *pvar;
6255 : :
6256 : 969 : chain_copy = &static_chain;
6257 : 1938 : for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6258 : : {
6259 : 969 : tree new_tree = remap_decl (*pvar, id);
6260 : 969 : lang_hooks.dup_lang_specific_decl (new_tree);
6261 : 969 : DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6262 : 969 : *pvar = new_tree;
6263 : : }
6264 : 969 : return static_chain;
6265 : : }
6266 : :
6267 : : /* Return true if the function is allowed to be versioned.
6268 : : This is a guard for the versioning functionality. */
6269 : :
6270 : : bool
6271 : 11471970 : tree_versionable_function_p (tree fndecl)
6272 : : {
6273 : 11471970 : return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6274 : 22676510 : && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6275 : : }
6276 : :
6277 : : /* Update clone info after duplication. */
6278 : :
6279 : : static void
6280 : 229860 : update_clone_info (copy_body_data * id)
6281 : : {
6282 : 229860 : struct cgraph_node *this_node = id->dst_node;
6283 : 229860 : if (!this_node->clones)
6284 : : return;
6285 : 576495 : for (cgraph_node *node = this_node->clones; node != this_node;)
6286 : : {
6287 : : /* First update replace maps to match the new body. */
6288 : 503402 : clone_info *info = clone_info::get (node);
6289 : 503402 : if (info && info->tree_map)
6290 : : {
6291 : : unsigned int i;
6292 : 0 : for (i = 0; i < vec_safe_length (info->tree_map); i++)
6293 : : {
6294 : 0 : struct ipa_replace_map *replace_info;
6295 : 0 : replace_info = (*info->tree_map)[i];
6296 : 0 : walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6297 : : }
6298 : : }
6299 : :
6300 : 503402 : if (node->clones)
6301 : : node = node->clones;
6302 : 482985 : else if (node->next_sibling_clone)
6303 : : node = node->next_sibling_clone;
6304 : : else
6305 : : {
6306 : 177790 : while (node != id->dst_node && !node->next_sibling_clone)
6307 : 93510 : node = node->clone_of;
6308 : 84280 : if (node != id->dst_node)
6309 : 11187 : node = node->next_sibling_clone;
6310 : : }
6311 : : }
6312 : : }
6313 : :
6314 : : /* Create a copy of a function's tree.
6315 : : OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6316 : : of the original function and the new copied function
6317 : : respectively. In case we want to replace a DECL
6318 : : tree with another tree while duplicating the function's
6319 : : body, TREE_MAP represents the mapping between these
6320 : : trees. If UPDATE_CLONES is set, the call_stmt fields
6321 : : of edges of clones of the function will be updated.
6322 : :
6323 : : If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6324 : : function parameters and return value) should be modified).
6325 : : If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6326 : : If non_NULL NEW_ENTRY determine new entry BB of the clone.
6327 : : */
6328 : : void
6329 : 229860 : tree_function_versioning (tree old_decl, tree new_decl,
6330 : : vec<ipa_replace_map *, va_gc> *tree_map,
6331 : : ipa_param_adjustments *param_adjustments,
6332 : : bool update_clones, bitmap blocks_to_copy,
6333 : : basic_block new_entry)
6334 : : {
6335 : 229860 : struct cgraph_node *old_version_node;
6336 : 229860 : struct cgraph_node *new_version_node;
6337 : 229860 : copy_body_data id;
6338 : 229860 : tree p;
6339 : 229860 : unsigned i;
6340 : 229860 : struct ipa_replace_map *replace_info;
6341 : 229860 : basic_block old_entry_block, bb;
6342 : 229860 : auto_vec<gimple *, 10> init_stmts;
6343 : 229860 : tree vars = NULL_TREE;
6344 : :
6345 : : /* We can get called recursively from expand_call_inline via clone
6346 : : materialization. While expand_call_inline maintains input_location
6347 : : we cannot tolerate it to leak into the materialized clone. */
6348 : 229860 : location_t saved_location = input_location;
6349 : 229860 : input_location = UNKNOWN_LOCATION;
6350 : :
6351 : 229860 : gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6352 : : && TREE_CODE (new_decl) == FUNCTION_DECL);
6353 : 229860 : DECL_POSSIBLY_INLINED (old_decl) = 1;
6354 : :
6355 : 229860 : old_version_node = cgraph_node::get (old_decl);
6356 : 229860 : gcc_checking_assert (old_version_node);
6357 : 229860 : new_version_node = cgraph_node::get (new_decl);
6358 : 229860 : gcc_checking_assert (new_version_node);
6359 : :
6360 : : /* Copy over debug args. */
6361 : 229860 : if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6362 : : {
6363 : 3175 : vec<tree, va_gc> **new_debug_args, **old_debug_args;
6364 : 3175 : gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6365 : 3175 : DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6366 : 3175 : old_debug_args = decl_debug_args_lookup (old_decl);
6367 : 3175 : if (old_debug_args)
6368 : : {
6369 : 3175 : new_debug_args = decl_debug_args_insert (new_decl);
6370 : 6350 : *new_debug_args = vec_safe_copy (*old_debug_args);
6371 : : }
6372 : : }
6373 : :
6374 : : /* Output the inlining info for this abstract function, since it has been
6375 : : inlined. If we don't do this now, we can lose the information about the
6376 : : variables in the function when the blocks get blown away as soon as we
6377 : : remove the cgraph node. */
6378 : 229860 : (*debug_hooks->outlining_inline_function) (old_decl);
6379 : :
6380 : 229860 : DECL_ARTIFICIAL (new_decl) = 1;
6381 : 415236 : DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6382 : 415236 : if (DECL_ORIGIN (old_decl) == old_decl)
6383 : 203028 : old_version_node->used_as_abstract_origin = true;
6384 : 229860 : DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6385 : :
6386 : : /* Prepare the data structures for the tree copy. */
6387 : 229860 : memset (&id, 0, sizeof (id));
6388 : :
6389 : : /* Generate a new name for the new version. */
6390 : 229860 : id.statements_to_fold = new hash_set<gimple *>;
6391 : :
6392 : 229860 : id.decl_map = new hash_map<tree, tree>;
6393 : 229860 : id.debug_map = NULL;
6394 : 229860 : id.src_fn = old_decl;
6395 : 229860 : id.dst_fn = new_decl;
6396 : 229860 : id.src_node = old_version_node;
6397 : 229860 : id.dst_node = new_version_node;
6398 : 229860 : id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6399 : 229860 : id.blocks_to_copy = blocks_to_copy;
6400 : :
6401 : 229860 : id.copy_decl = copy_decl_no_change;
6402 : 229860 : id.transform_call_graph_edges
6403 : 229860 : = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6404 : 229860 : id.transform_new_cfg = true;
6405 : 229860 : id.transform_return_to_modify = false;
6406 : 229860 : id.transform_parameter = false;
6407 : :
6408 : 229860 : old_entry_block = ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (old_decl));
6409 : 229860 : DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6410 : 229860 : DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6411 : 229860 : initialize_cfun (new_decl, old_decl,
6412 : 229860 : new_entry ? new_entry->count : old_entry_block->count);
6413 : 229860 : new_version_node->has_omp_variant_constructs
6414 : 229860 : = old_version_node->has_omp_variant_constructs;
6415 : 229860 : if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6416 : 229860 : DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6417 : 229860 : = id.src_cfun->gimple_df->ipa_pta;
6418 : :
6419 : : /* Copy the function's static chain. */
6420 : 229860 : p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6421 : 229860 : if (p)
6422 : 1938 : DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6423 : 969 : = copy_static_chain (p, &id);
6424 : :
6425 : 229860 : auto_vec<int, 16> new_param_indices;
6426 : 229860 : clone_info *info = clone_info::get (old_version_node);
6427 : 235334 : ipa_param_adjustments *old_param_adjustments
6428 : 229860 : = info ? info->param_adjustments : NULL;
6429 : 5474 : if (old_param_adjustments)
6430 : 5392 : old_param_adjustments->get_updated_indices (&new_param_indices);
6431 : :
6432 : : /* If there's a tree_map, prepare for substitution. */
6433 : 229860 : if (tree_map)
6434 : 34080 : for (i = 0; i < tree_map->length (); i++)
6435 : : {
6436 : 21239 : gimple *init;
6437 : 21239 : replace_info = (*tree_map)[i];
6438 : :
6439 : 21239 : int p = replace_info->parm_num;
6440 : 21239 : if (old_param_adjustments)
6441 : 0 : p = new_param_indices[p];
6442 : :
6443 : 21239 : tree parm;
6444 : 63689 : for (parm = DECL_ARGUMENTS (old_decl); p;
6445 : 42450 : parm = DECL_CHAIN (parm))
6446 : 42450 : p--;
6447 : 21239 : gcc_assert (parm);
6448 : 21239 : init = setup_one_parameter (&id, parm, replace_info->new_tree,
6449 : : id.src_fn, NULL, &vars);
6450 : 21239 : if (init)
6451 : 6498 : init_stmts.safe_push (init);
6452 : : }
6453 : :
6454 : 229860 : ipa_param_body_adjustments *param_body_adjs = NULL;
6455 : 229860 : if (param_adjustments)
6456 : : {
6457 : 299382 : param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6458 : : new_decl, old_decl,
6459 : 149691 : &id, &vars, tree_map);
6460 : 149691 : id.param_body_adjs = param_body_adjs;
6461 : 149691 : DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6462 : : }
6463 : 80169 : else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6464 : 149138 : DECL_ARGUMENTS (new_decl)
6465 : 74569 : = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6466 : :
6467 : 229860 : DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6468 : 229860 : BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6469 : :
6470 : 229860 : declare_inline_vars (DECL_INITIAL (new_decl), vars);
6471 : :
6472 : 229860 : if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6473 : : /* Add local vars. */
6474 : 133900 : add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6475 : :
6476 : 229860 : if (DECL_RESULT (old_decl) == NULL_TREE)
6477 : : ;
6478 : 149691 : else if (param_adjustments && param_adjustments->m_skip_return
6479 : 276529 : && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6480 : : {
6481 : 39456 : tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6482 : : &id);
6483 : 39456 : declare_inline_vars (NULL, resdecl_repl);
6484 : 39456 : if (DECL_BY_REFERENCE (DECL_RESULT (old_decl)))
6485 : 73 : resdecl_repl = build_fold_addr_expr (resdecl_repl);
6486 : 39456 : insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6487 : :
6488 : 78912 : DECL_RESULT (new_decl)
6489 : 39456 : = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6490 : : RESULT_DECL, NULL_TREE, void_type_node);
6491 : 39456 : DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6492 : 39456 : DECL_IS_MALLOC (new_decl) = false;
6493 : 39456 : cfun->returns_struct = 0;
6494 : 39456 : cfun->returns_pcc_struct = 0;
6495 : : }
6496 : : else
6497 : : {
6498 : 190404 : tree old_name;
6499 : 190404 : DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6500 : 190404 : lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6501 : 380808 : if (gimple_in_ssa_p (id.src_cfun)
6502 : 190404 : && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6503 : 4945 : && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6504 : : {
6505 : 4926 : tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6506 : 4926 : insert_decl_map (&id, old_name, new_name);
6507 : 4926 : SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6508 : 4926 : set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6509 : : }
6510 : : }
6511 : :
6512 : : /* Set up the destination functions loop tree. */
6513 : 229860 : if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6514 : : {
6515 : 229860 : cfun->curr_properties &= ~PROP_loops;
6516 : 229860 : loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6517 : 229860 : cfun->curr_properties |= PROP_loops;
6518 : : }
6519 : :
6520 : : /* Copy the Function's body. */
6521 : 229860 : copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6522 : : new_entry);
6523 : :
6524 : : /* Renumber the lexical scoping (non-code) blocks consecutively. */
6525 : 229860 : number_blocks (new_decl);
6526 : :
6527 : : /* We want to create the BB unconditionally, so that the addition of
6528 : : debug stmts doesn't affect BB count, which may in the end cause
6529 : : codegen differences. */
6530 : 229860 : bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6531 : 466218 : while (init_stmts.length ())
6532 : 6498 : insert_init_stmt (&id, bb, init_stmts.pop ());
6533 : 229860 : if (param_body_adjs)
6534 : 149691 : param_body_adjs->append_init_stmts (bb);
6535 : 229860 : update_clone_info (&id);
6536 : :
6537 : : /* Remap the nonlocal_goto_save_area, if any. */
6538 : 229860 : if (cfun->nonlocal_goto_save_area)
6539 : : {
6540 : 0 : struct walk_stmt_info wi;
6541 : :
6542 : 0 : memset (&wi, 0, sizeof (wi));
6543 : 0 : wi.info = &id;
6544 : 0 : walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6545 : : }
6546 : :
6547 : : /* Clean up. */
6548 : 459720 : delete id.decl_map;
6549 : 229860 : if (id.debug_map)
6550 : 1430 : delete id.debug_map;
6551 : 229860 : free_dominance_info (CDI_DOMINATORS);
6552 : 229860 : free_dominance_info (CDI_POST_DOMINATORS);
6553 : :
6554 : 229860 : update_max_bb_count ();
6555 : 229860 : fold_marked_statements (0, id.statements_to_fold);
6556 : 459720 : delete id.statements_to_fold;
6557 : 229860 : delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6558 : 229860 : if (id.dst_node->definition)
6559 : 225450 : cgraph_edge::rebuild_references ();
6560 : 229860 : if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6561 : : {
6562 : 229860 : calculate_dominance_info (CDI_DOMINATORS);
6563 : 229860 : fix_loop_structure (NULL);
6564 : : }
6565 : 229860 : update_ssa (TODO_update_ssa);
6566 : :
6567 : : /* After partial cloning we need to rescale frequencies, so they are
6568 : : within proper range in the cloned function. */
6569 : 229860 : if (new_entry)
6570 : : {
6571 : 49062 : struct cgraph_edge *e;
6572 : 49062 : rebuild_frequencies ();
6573 : :
6574 : 49062 : new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6575 : 226790 : for (e = new_version_node->callees; e; e = e->next_callee)
6576 : : {
6577 : 177728 : basic_block bb = gimple_bb (e->call_stmt);
6578 : 177728 : e->count = bb->count;
6579 : : }
6580 : 54120 : for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6581 : : {
6582 : 5058 : basic_block bb = gimple_bb (e->call_stmt);
6583 : 5058 : e->count = bb->count;
6584 : : }
6585 : : }
6586 : :
6587 : 229860 : if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6588 : : {
6589 : 127054 : vec<tree, va_gc> **debug_args = NULL;
6590 : 127054 : unsigned int len = 0;
6591 : 127054 : unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6592 : :
6593 : 230514 : for (i = 0; i < reset_len; i++)
6594 : : {
6595 : 103460 : tree parm = param_body_adjs->m_reset_debug_decls[i];
6596 : 103460 : gcc_assert (is_gimple_reg (parm));
6597 : 103460 : tree ddecl;
6598 : :
6599 : 103460 : if (debug_args == NULL)
6600 : : {
6601 : 76019 : debug_args = decl_debug_args_insert (new_decl);
6602 : 76019 : len = vec_safe_length (*debug_args);
6603 : : }
6604 : 103460 : ddecl = build_debug_expr_decl (TREE_TYPE (parm));
6605 : : /* FIXME: Is setting the mode really necessary? */
6606 : 103460 : SET_DECL_MODE (ddecl, DECL_MODE (parm));
6607 : 103460 : vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6608 : 103460 : vec_safe_push (*debug_args, ddecl);
6609 : : }
6610 : 127054 : if (debug_args != NULL)
6611 : : {
6612 : : /* On the callee side, add
6613 : : DEBUG D#Y s=> parm
6614 : : DEBUG var => D#Y
6615 : : stmts to the first bb where var is a VAR_DECL created for the
6616 : : optimized away parameter in DECL_INITIAL block. This hints
6617 : : in the debug info that var (whole DECL_ORIGIN is the parm
6618 : : PARM_DECL) is optimized away, but could be looked up at the
6619 : : call site as value of D#X there. */
6620 : 76019 : gimple_stmt_iterator cgsi
6621 : 76019 : = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6622 : 76019 : gimple *def_temp;
6623 : 76019 : tree var = vars;
6624 : 76019 : i = vec_safe_length (*debug_args);
6625 : 103460 : do
6626 : : {
6627 : 103460 : tree vexpr = NULL_TREE;
6628 : 103460 : i -= 2;
6629 : 103460 : while (var != NULL_TREE
6630 : 139274 : && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6631 : 35814 : var = TREE_CHAIN (var);
6632 : 103460 : if (var == NULL_TREE)
6633 : : break;
6634 : 103460 : tree parm = (**debug_args)[i];
6635 : 103460 : if (tree parm_ddef = ssa_default_def (id.src_cfun, parm))
6636 : 139438 : if (tree *d
6637 : 69719 : = param_body_adjs->m_dead_ssa_debug_equiv.get (parm_ddef))
6638 : 24962 : vexpr = *d;
6639 : 103460 : if (!vexpr)
6640 : : {
6641 : 78498 : vexpr = build_debug_expr_decl (TREE_TYPE (parm));
6642 : : /* FIXME: Is setting the mode really necessary? */
6643 : 78498 : SET_DECL_MODE (vexpr, DECL_MODE (parm));
6644 : : }
6645 : 103460 : def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6646 : 103460 : gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6647 : 103460 : def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6648 : 103460 : gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6649 : : }
6650 : 103460 : while (i > len);
6651 : : }
6652 : : }
6653 : 149691 : delete param_body_adjs;
6654 : 229860 : free_dominance_info (CDI_DOMINATORS);
6655 : 229860 : free_dominance_info (CDI_POST_DOMINATORS);
6656 : :
6657 : 229860 : gcc_assert (!id.debug_stmts.exists ());
6658 : 229860 : pop_cfun ();
6659 : 229860 : input_location = saved_location;
6660 : 229860 : return;
6661 : 229860 : }
6662 : :
6663 : : /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6664 : : the callee and return the inlined body on success. */
6665 : :
6666 : : tree
6667 : 0 : maybe_inline_call_in_expr (tree exp)
6668 : : {
6669 : 0 : tree fn = get_callee_fndecl (exp);
6670 : :
6671 : : /* We can only try to inline "const" functions. */
6672 : 0 : if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6673 : : {
6674 : 0 : call_expr_arg_iterator iter;
6675 : 0 : copy_body_data id;
6676 : 0 : tree param, arg, t;
6677 : 0 : hash_map<tree, tree> decl_map;
6678 : :
6679 : : /* Remap the parameters. */
6680 : 0 : for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6681 : 0 : param;
6682 : 0 : param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6683 : 0 : decl_map.put (param, arg);
6684 : :
6685 : 0 : memset (&id, 0, sizeof (id));
6686 : 0 : id.src_fn = fn;
6687 : 0 : id.dst_fn = current_function_decl;
6688 : 0 : id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6689 : 0 : id.decl_map = &decl_map;
6690 : :
6691 : 0 : id.copy_decl = copy_decl_no_change;
6692 : 0 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6693 : 0 : id.transform_new_cfg = false;
6694 : 0 : id.transform_return_to_modify = true;
6695 : 0 : id.transform_parameter = true;
6696 : :
6697 : : /* Make sure not to unshare trees behind the front-end's back
6698 : : since front-end specific mechanisms may rely on sharing. */
6699 : 0 : id.regimplify = false;
6700 : 0 : id.do_not_unshare = true;
6701 : :
6702 : : /* We're not inside any EH region. */
6703 : 0 : id.eh_lp_nr = 0;
6704 : :
6705 : 0 : t = copy_tree_body (&id);
6706 : :
6707 : : /* We can only return something suitable for use in a GENERIC
6708 : : expression tree. */
6709 : 0 : if (TREE_CODE (t) == MODIFY_EXPR)
6710 : 0 : return TREE_OPERAND (t, 1);
6711 : 0 : }
6712 : :
6713 : : return NULL_TREE;
6714 : : }
6715 : :
6716 : : /* Duplicate a type, fields and all. */
6717 : :
6718 : : tree
6719 : 62 : build_duplicate_type (tree type)
6720 : : {
6721 : 62 : struct copy_body_data id;
6722 : :
6723 : 62 : memset (&id, 0, sizeof (id));
6724 : 62 : id.src_fn = current_function_decl;
6725 : 62 : id.dst_fn = current_function_decl;
6726 : 62 : id.src_cfun = cfun;
6727 : 62 : id.decl_map = new hash_map<tree, tree>;
6728 : 62 : id.debug_map = NULL;
6729 : 62 : id.copy_decl = copy_decl_no_change;
6730 : :
6731 : 62 : type = remap_type_1 (type, &id);
6732 : :
6733 : 124 : delete id.decl_map;
6734 : 62 : if (id.debug_map)
6735 : 0 : delete id.debug_map;
6736 : :
6737 : 62 : TYPE_CANONICAL (type) = type;
6738 : :
6739 : 62 : return type;
6740 : : }
6741 : :
6742 : : /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6743 : : parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6744 : : evaluation. */
6745 : :
6746 : : tree
6747 : 18371740 : copy_fn (tree fn, tree& parms, tree& result)
6748 : : {
6749 : 18371740 : copy_body_data id;
6750 : 18371740 : tree param;
6751 : 18371740 : hash_map<tree, tree> decl_map;
6752 : :
6753 : 18371740 : tree *p = &parms;
6754 : 18371740 : *p = NULL_TREE;
6755 : :
6756 : 18371740 : memset (&id, 0, sizeof (id));
6757 : 18371740 : id.src_fn = fn;
6758 : 18371740 : id.dst_fn = current_function_decl;
6759 : 18371740 : id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6760 : 18371740 : id.decl_map = &decl_map;
6761 : :
6762 : 71303718 : id.copy_decl = [] (tree decl, copy_body_data *id)
6763 : : {
6764 : 52931978 : if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
6765 : : /* Don't make copies of local types or injected enumerators,
6766 : : the C++ constexpr evaluator doesn't need them and they
6767 : : confuse modules streaming. */
6768 : : return decl;
6769 : 51612828 : return copy_decl_no_change (decl, id);
6770 : : };
6771 : 18371740 : id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6772 : 18371740 : id.transform_new_cfg = false;
6773 : 18371740 : id.transform_return_to_modify = false;
6774 : 18371740 : id.transform_parameter = true;
6775 : :
6776 : : /* Make sure not to unshare trees behind the front-end's back
6777 : : since front-end specific mechanisms may rely on sharing. */
6778 : 18371740 : id.regimplify = false;
6779 : 18371740 : id.do_not_unshare = true;
6780 : 18371740 : id.do_not_fold = true;
6781 : :
6782 : : /* We're not inside any EH region. */
6783 : 18371740 : id.eh_lp_nr = 0;
6784 : :
6785 : : /* Remap the parameters and result and return them to the caller. */
6786 : 18371740 : for (param = DECL_ARGUMENTS (fn);
6787 : 41674272 : param;
6788 : 23302532 : param = DECL_CHAIN (param))
6789 : : {
6790 : 23302532 : *p = remap_decl (param, &id);
6791 : 23302532 : p = &DECL_CHAIN (*p);
6792 : : }
6793 : :
6794 : 18371740 : if (DECL_RESULT (fn))
6795 : 18371740 : result = remap_decl (DECL_RESULT (fn), &id);
6796 : : else
6797 : 0 : result = NULL_TREE;
6798 : :
6799 : 18371740 : return copy_tree_body (&id);
6800 : 18371740 : }
|