Line data Source code
1 : /* Callgraph transformations to handle inlining
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 : Contributed by Jan Hubicka
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : /* The inline decisions are stored in callgraph in "inline plan" and
22 : applied later.
23 :
24 : To mark given call inline, use inline_call function.
25 : The function marks the edge inlinable and, if necessary, produces
26 : virtual clone in the callgraph representing the new copy of callee's
27 : function body.
28 :
29 : The inline plan is applied on given function body by inline_transform. */
30 :
31 : #define INCLUDE_ALGORITHM
32 : #include "config.h"
33 : #include "system.h"
34 : #include "coretypes.h"
35 : #include "tm.h"
36 : #include "function.h"
37 : #include "tree.h"
38 : #include "alloc-pool.h"
39 : #include "tree-pass.h"
40 : #include "cgraph.h"
41 : #include "tree-cfg.h"
42 : #include "symbol-summary.h"
43 : #include "tree-vrp.h"
44 : #include "sreal.h"
45 : #include "ipa-cp.h"
46 : #include "ipa-prop.h"
47 : #include "ipa-fnsummary.h"
48 : #include "ipa-inline.h"
49 : #include "tree-inline.h"
50 : #include "function.h"
51 : #include "cfg.h"
52 : #include "basic-block.h"
53 : #include "ipa-utils.h"
54 : #include "ipa-modref-tree.h"
55 : #include "ipa-modref.h"
56 : #include "symtab-thunks.h"
57 : #include "symtab-clones.h"
58 :
59 : int ncalls_inlined;
60 : int nfunctions_inlined;
61 :
62 : /* We removed or are going to remove the last call to NODE.
63 : Return true if we can and want proactively remove the NODE now.
64 : This is important to do, since we want inliner to know when offline
65 : copy of function was removed. */
66 :
67 : static bool
68 2687501 : can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
69 : {
70 2687501 : ipa_ref *ref;
71 :
72 2950293 : FOR_EACH_ALIAS (node, ref)
73 : {
74 765531 : cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
75 489717 : if ((alias->callers && alias->callers != e)
76 765531 : || !can_remove_node_now_p_1 (alias, e))
77 502739 : return false;
78 : }
79 : /* FIXME: When address is taken of DECL_EXTERNAL function we still
80 : can remove its offline copy, but we would need to keep unanalyzed node in
81 : the callgraph so references can point to it.
82 :
83 : Also for comdat group we can ignore references inside a group as we
84 : want to prove the group as a whole to be dead. */
85 2184762 : return (!node->address_taken
86 2150093 : && node->can_remove_if_no_direct_calls_and_refs_p ()
87 : /* Inlining might enable more devirtualizing, so we want to remove
88 : those only after all devirtualizable virtual calls are processed.
89 : Lacking may edges in callgraph we just preserve them post
90 : inlining. */
91 2082979 : && (!DECL_VIRTUAL_P (node->decl)
92 6168 : || !opt_for_fn (node->decl, flag_devirtualize))
93 : /* During early inlining some unanalyzed cgraph nodes might be in the
94 : callgraph and they might refer the function in question. */
95 4261657 : && !cgraph_new_nodes.exists ());
96 : }
97 :
98 : /* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
99 : Verify that the NODE can be removed from unit and if it is contained in comdat
100 : group that the whole comdat group is removable. */
101 :
102 : static bool
103 2217486 : can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
104 : {
105 2217486 : struct cgraph_node *next;
106 2217486 : if (!can_remove_node_now_p_1 (node, e))
107 : return false;
108 :
109 : /* When we see same comdat group, we need to be sure that all
110 : items can be removed. */
111 1619928 : if (!node->same_comdat_group || !node->externally_visible)
112 : return true;
113 265348 : for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
114 791512 : next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
115 : {
116 265862 : if (next->alias)
117 68907 : continue;
118 196595 : if ((next->callers && next->callers != e)
119 390796 : || !can_remove_node_now_p_1 (next, e))
120 : return false;
121 : }
122 : return true;
123 : }
124 :
125 : /* Return true if NODE is a master clone with non-inline clones. */
126 :
127 : static bool
128 1409070 : master_clone_with_noninline_clones_p (struct cgraph_node *node)
129 : {
130 1409070 : if (node->clone_of)
131 : return false;
132 :
133 1794120 : for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
134 485123 : if (n->decl != node->decl)
135 : return true;
136 :
137 : return false;
138 : }
139 :
140 : /* E is expected to be an edge being inlined. Clone destination node of
141 : the edge and redirect it to the new clone.
142 : DUPLICATE is used for bookkeeping on whether we are actually creating new
143 : clones or re-using node originally representing out-of-line function call.
144 : By default the offline copy is removed, when it appears dead after inlining.
145 : KEEP_OFFLINE_COPY prevents this transformation.
146 : If UPDATE_ORIGINAL is set, clones profile is subtracted from the offline version.
147 : If OVERALL_SIZE is non-NULL, the size is updated to reflect the
148 : transformation. */
149 :
150 : void
151 4571293 : clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
152 : bool keep_offline_copy,
153 : bool update_original, int *overall_size)
154 : {
155 4571293 : struct cgraph_node *inlining_into;
156 4571293 : struct cgraph_edge *next;
157 :
158 4571293 : if (e->caller->inlined_to)
159 : inlining_into = e->caller->inlined_to;
160 : else
161 3664144 : inlining_into = e->caller;
162 :
163 4571293 : if (duplicate)
164 : {
165 : /* We may eliminate the need for out-of-line copy to be output.
166 : In that case just go ahead and re-use it. This is not just an
167 : memory optimization. Making offline copy of function disappear
168 : from the program will improve future decisions on inlining. */
169 4334445 : if (!e->callee->callers->next_caller
170 : /* Recursive inlining never wants the master clone to
171 : be overwritten. */
172 2009278 : && !keep_offline_copy
173 1997528 : && can_remove_node_now_p (e->callee, e)
174 : /* We cannot overwrite a master clone with non-inline clones
175 : until after these clones are materialized. */
176 5743515 : && !master_clone_with_noninline_clones_p (e->callee))
177 : {
178 : /* TODO: When callee is in a comdat group, we could remove all of it,
179 : including all inline clones inlined into it. That would however
180 : need small function inlining to register edge removal hook to
181 : maintain the priority queue.
182 :
183 : For now we keep the other functions in the group in program until
184 : cgraph_remove_unreachable_functions gets rid of them. */
185 1408806 : gcc_assert (!e->callee->inlined_to);
186 1408806 : e->callee->remove_from_same_comdat_group ();
187 1408806 : if (e->callee->definition
188 1408806 : && inline_account_function_p (e->callee))
189 : {
190 1305282 : gcc_assert (!e->callee->alias);
191 1305282 : if (overall_size)
192 256042 : *overall_size -= ipa_size_summaries->get (e->callee)->size;
193 1305282 : nfunctions_inlined++;
194 : }
195 1408806 : duplicate = false;
196 1408806 : e->callee->externally_visible = false;
197 1408806 : profile_count num = e->count;
198 1408806 : profile_count den = e->callee->count;
199 1408806 : profile_count::adjust_for_ipa_scaling (&num, &den);
200 1408806 : e->callee->apply_scale (num, den);
201 :
202 1408806 : dump_callgraph_transformation (e->callee, inlining_into,
203 : "inlining to");
204 : }
205 : else
206 : {
207 2925639 : struct cgraph_node *n;
208 :
209 2925639 : n = e->callee->create_clone (e->callee->decl,
210 : e->count,
211 2925639 : update_original, vNULL, true,
212 : inlining_into,
213 : NULL, NULL);
214 2925639 : n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
215 2925639 : e->redirect_callee (n);
216 : }
217 : }
218 : else
219 236848 : e->callee->remove_from_same_comdat_group ();
220 :
221 4571293 : e->callee->inlined_to = inlining_into;
222 4571293 : if (e->callee->ipa_transforms_to_apply.length ())
223 : {
224 294229 : e->callee->ipa_transforms_to_apply.release ();
225 294229 : e->callee->ipa_transforms_to_apply = vNULL;
226 : }
227 :
228 : /* Recursively clone all bodies. */
229 8759349 : for (e = e->callee->callees; e; e = next)
230 : {
231 4188056 : next = e->next_callee;
232 4188056 : if (!e->inline_failed)
233 670711 : clone_inlined_nodes (e, duplicate, keep_offline_copy,
234 : update_original, overall_size);
235 : }
236 4571293 : }
237 :
238 : /* Check all speculations in N and if any seem useless, resolve them. When a
239 : first edge is resolved, pop all edges from NEW_EDGES and insert them to
240 : EDGE_SET. Then remove each resolved edge from EDGE_SET, if it is there. */
241 :
242 : static bool
243 4569734 : check_speculations_1 (cgraph_node *n, vec<cgraph_edge *> *new_edges,
244 : hash_set <cgraph_edge *> *edge_set)
245 : {
246 4569734 : bool speculation_removed = false;
247 4569734 : cgraph_edge *next;
248 :
249 8758400 : for (cgraph_edge *e = n->callees; e; e = next)
250 : {
251 4188666 : next = e->next_callee;
252 4188666 : if (e->speculative && !speculation_useful_p (e, true))
253 : {
254 51 : while (new_edges && !new_edges->is_empty ())
255 6 : edge_set->add (new_edges->pop ());
256 45 : edge_set->remove (e);
257 :
258 45 : cgraph_edge::resolve_speculation (e, NULL);
259 45 : speculation_removed = true;
260 : }
261 4188621 : else if (!e->inline_failed)
262 669640 : speculation_removed |= check_speculations_1 (e->callee, new_edges,
263 : edge_set);
264 : }
265 4569734 : return speculation_removed;
266 : }
267 :
268 : /* Push E to NEW_EDGES. Called from hash_set traverse method, which
269 : unfortunately means this function has to have external linkage, otherwise
270 : the code will not compile with gcc 4.8. */
271 :
272 : bool
273 0 : push_all_edges_in_set_to_vec (cgraph_edge * const &e,
274 : vec<cgraph_edge *> *new_edges)
275 : {
276 0 : new_edges->safe_push (e);
277 0 : return true;
278 : }
279 :
280 : /* Check all speculations in N and if any seem useless, resolve them and remove
281 : them from NEW_EDGES. */
282 :
283 : static bool
284 3900094 : check_speculations (cgraph_node *n, vec<cgraph_edge *> *new_edges)
285 : {
286 3900094 : hash_set <cgraph_edge *> edge_set;
287 3900094 : bool res = check_speculations_1 (n, new_edges, &edge_set);
288 3900094 : if (!edge_set.is_empty ())
289 0 : edge_set.traverse <vec<cgraph_edge *> *,
290 0 : push_all_edges_in_set_to_vec> (new_edges);
291 3900094 : return res;
292 3900094 : }
293 :
294 : /* Mark all call graph edges coming out of NODE and all nodes that have been
295 : inlined to it as in_polymorphic_cdtor. */
296 :
297 : static void
298 116652 : mark_all_inlined_calls_cdtor (cgraph_node *node)
299 : {
300 244974 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
301 : {
302 128322 : cs->in_polymorphic_cdtor = true;
303 128322 : if (!cs->inline_failed)
304 20177 : mark_all_inlined_calls_cdtor (cs->callee);
305 : }
306 124128 : for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
307 7476 : cs->in_polymorphic_cdtor = true;
308 116652 : }
309 :
310 :
311 : /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
312 : specify whether profile of original function should be updated and whether
313 : offline copy should be removed if unnecesary. If any new
314 : indirect edges are discovered in the process, add them to NEW_EDGES, unless
315 : it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
316 : size of caller after inlining. Caller is required to eventually do it via
317 : ipa_update_overall_fn_summary.
318 : If callee_removed is non-NULL, set it to true if we removed callee node.
319 :
320 : Return true iff any new callgraph edges were discovered as a
321 : result of inlining. */
322 :
323 : bool
324 3900094 : inline_call (struct cgraph_edge *e, bool update_original,
325 : vec<cgraph_edge *> *new_edges,
326 : int *overall_size, bool update_overall_summary,
327 : bool *callee_removed)
328 : {
329 3900094 : int old_size = 0, new_size = 0;
330 3900094 : struct cgraph_node *to = NULL;
331 3900094 : struct cgraph_edge *curr = e;
332 3900094 : bool comdat_local = e->callee->comdat_local_p ();
333 3900094 : struct cgraph_node *callee = e->callee->ultimate_alias_target ();
334 3900094 : bool new_edges_found = false;
335 3900094 : bool keep_offline_copy = !update_original;
336 :
337 3900094 : int estimated_growth = 0;
338 3900094 : if (! update_overall_summary)
339 3026723 : estimated_growth = estimate_edge_growth (e);
340 : /* This is used only for assert below. */
341 : #if 0
342 : bool predicated = inline_edge_summary (e)->predicate != NULL;
343 : #endif
344 :
345 : /* Don't inline inlined edges. */
346 3900094 : gcc_assert (e->inline_failed);
347 : /* Don't even think of inlining inline clone. */
348 3900094 : gcc_assert (!callee->inlined_to);
349 :
350 3900094 : to = e->caller;
351 3900094 : if (to->inlined_to)
352 236438 : to = to->inlined_to;
353 :
354 : /* In case callee has AFDO profile but caller has GLOBAL0 we need
355 : to re-scale it so it can have non-zero AFDO profile. */
356 3900094 : if (callee->count.quality () == AFDO
357 0 : && e->count.nonzero_p ()
358 3900094 : && (to->count.quality () == GUESSED_GLOBAL0_AFDO
359 0 : || to->count.quality () == GUESSED_GLOBAL0_ADJUSTED))
360 : {
361 0 : profile_count num = callee->count;
362 0 : profile_count den = e->count;
363 0 : profile_count::adjust_for_ipa_scaling (&num, &den);
364 0 : if (dump_file)
365 : {
366 0 : fprintf (dump_file, "Rescalling profile of caller %s "
367 : "to allow non-zero AFDO counts:",
368 : to->dump_name ());
369 0 : den.dump (dump_file);
370 0 : fprintf (dump_file, " -> ");
371 0 : num.dump (dump_file);
372 0 : fprintf (dump_file, "\n");
373 : }
374 0 : to->apply_scale (num, den);
375 0 : to->frequency = std::max (to->frequency, callee->frequency);
376 : /* Do not update original, so possible additional calls of callee
377 : are handled reasonably well. */
378 0 : update_original = false;
379 0 : gcc_checking_assert (to->count.quality () == AFDO);
380 0 : if (dump_file)
381 : {
382 0 : fprintf (dump_file, "Scaled profile of %s: ", to->dump_name ());
383 0 : to->count.dump (dump_file);
384 0 : fprintf (dump_file, "\n");
385 : }
386 : }
387 : /* Do sanity checking of the profile and in case of inconsistencies do not
388 : update profile of original. This reduces the chances that inlining
389 : turns callee cold while in reality it is still hot. */
390 3900094 : if (!(callee->count.ipa ().force_nonzero () == callee->count.ipa ()))
391 : {
392 7366 : if (dump_file)
393 0 : fprintf (dump_file, "Callee count is 0; not updating callee profile\n");
394 : update_original = false;
395 : }
396 3892728 : else if (e->count.ipa ().quality () == AFDO
397 3892728 : && !(e->count.ipa ().force_nonzero () == e->count.ipa ()))
398 : {
399 0 : if (dump_file)
400 0 : fprintf (dump_file, "Edge count is AFDO 0; not updating callee profile\n");
401 : update_original = false;
402 : }
403 3900094 : if (e->count.ipa () > callee->count.ipa ().apply_scale (9, 8))
404 : {
405 0 : if (dump_file)
406 0 : fprintf (dump_file, "Calee count is too small (profile is inconsistent);"
407 : " not updating callee profile\n");
408 : update_original = false;
409 : }
410 3900094 : if (to->thunk)
411 : {
412 1126 : struct cgraph_node *target = to->callees->callee;
413 1126 : thunk_expansion = true;
414 :
415 : /* Remove all annotations, but keep thunk info. */
416 1126 : thunk_info info = *thunk_info::get (to);
417 1126 : symtab->call_cgraph_removal_hooks (to);
418 1126 : *thunk_info::get_create (to) = info;
419 1126 : if (in_lto_p)
420 51 : to->get_untransformed_body ();
421 1126 : expand_thunk (to, false, true);
422 : /* When thunk is instrumented we may have multiple callees. */
423 1126 : for (e = to->callees; e && e->callee != target; e = e->next_callee)
424 : ;
425 1126 : symtab->call_cgraph_insertion_hooks (to);
426 1126 : thunk_expansion = false;
427 1126 : gcc_assert (e);
428 : }
429 :
430 :
431 3900094 : e->inline_failed = CIF_OK;
432 3900094 : DECL_POSSIBLY_INLINED (callee->decl) = true;
433 :
434 3900094 : if (DECL_FUNCTION_PERSONALITY (callee->decl))
435 269142 : DECL_FUNCTION_PERSONALITY (to->decl)
436 134571 : = DECL_FUNCTION_PERSONALITY (callee->decl);
437 :
438 3900094 : bool reload_optimization_node = false;
439 3900094 : bool remove_strict_aliasing
440 3900094 : = (!opt_for_fn (callee->decl, flag_strict_aliasing)
441 3900094 : && opt_for_fn (to->decl, flag_strict_aliasing));
442 3900094 : bool remove_assume_sane_operators_new_delete
443 3900094 : = (!opt_for_fn (callee->decl, flag_assume_sane_operators_new_delete)
444 3900094 : && opt_for_fn (to->decl, flag_assume_sane_operators_new_delete));
445 3900076 : if (remove_strict_aliasing || remove_assume_sane_operators_new_delete)
446 : {
447 31 : struct gcc_options opts = global_options;
448 31 : struct gcc_options opts_set = global_options_set;
449 :
450 31 : cl_optimization_restore (&opts, &opts_set, opts_for_fn (to->decl));
451 31 : if (remove_strict_aliasing)
452 : {
453 13 : opts.x_flag_strict_aliasing = false;
454 13 : if (dump_file)
455 0 : fprintf (dump_file, "Dropping flag_strict_aliasing on %s\n",
456 : to->dump_name ());
457 : }
458 31 : if (remove_assume_sane_operators_new_delete)
459 : {
460 18 : opts.x_flag_assume_sane_operators_new_delete = false;
461 18 : if (dump_file)
462 0 : fprintf (dump_file,
463 : "Dropping flag_assume_sane_operators_new_delete on %s\n",
464 : to->dump_name ());
465 : }
466 31 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
467 31 : = build_optimization_node (&opts, &opts_set);
468 31 : reload_optimization_node = true;
469 : }
470 :
471 3900094 : ipa_fn_summary *caller_info = ipa_fn_summaries->get (to);
472 3900094 : ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
473 3900094 : if (!caller_info->fp_expressions && callee_info->fp_expressions)
474 : {
475 15142 : caller_info->fp_expressions = true;
476 15142 : if (opt_for_fn (callee->decl, flag_rounding_math)
477 15142 : != opt_for_fn (to->decl, flag_rounding_math)
478 15142 : || opt_for_fn (callee->decl, flag_trapping_math)
479 15142 : != opt_for_fn (to->decl, flag_trapping_math)
480 15135 : || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
481 15135 : != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
482 15135 : || opt_for_fn (callee->decl, flag_finite_math_only)
483 15135 : != opt_for_fn (to->decl, flag_finite_math_only)
484 15135 : || opt_for_fn (callee->decl, flag_signaling_nans)
485 15135 : != opt_for_fn (to->decl, flag_signaling_nans)
486 15135 : || opt_for_fn (callee->decl, flag_complex_method)
487 15135 : != opt_for_fn (to->decl, flag_complex_method)
488 15135 : || opt_for_fn (callee->decl, flag_signed_zeros)
489 15135 : != opt_for_fn (to->decl, flag_signed_zeros)
490 15135 : || opt_for_fn (callee->decl, flag_associative_math)
491 15135 : != opt_for_fn (to->decl, flag_associative_math)
492 15134 : || opt_for_fn (callee->decl, flag_reciprocal_math)
493 15134 : != opt_for_fn (to->decl, flag_reciprocal_math)
494 15134 : || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
495 15134 : != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
496 15142 : || opt_for_fn (callee->decl, flag_errno_math)
497 15134 : != opt_for_fn (to->decl, flag_errno_math))
498 : {
499 8 : struct gcc_options opts = global_options;
500 8 : struct gcc_options opts_set = global_options_set;
501 :
502 8 : cl_optimization_restore (&opts, &opts_set, opts_for_fn (to->decl));
503 8 : opts.x_flag_rounding_math
504 8 : = opt_for_fn (callee->decl, flag_rounding_math);
505 8 : opts.x_flag_trapping_math
506 8 : = opt_for_fn (callee->decl, flag_trapping_math);
507 8 : opts.x_flag_unsafe_math_optimizations
508 8 : = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
509 8 : opts.x_flag_finite_math_only
510 8 : = opt_for_fn (callee->decl, flag_finite_math_only);
511 8 : opts.x_flag_signaling_nans
512 8 : = opt_for_fn (callee->decl, flag_signaling_nans);
513 8 : opts.x_flag_complex_method
514 8 : = opt_for_fn (callee->decl, flag_complex_method);
515 8 : opts.x_flag_signed_zeros
516 8 : = opt_for_fn (callee->decl, flag_signed_zeros);
517 8 : opts.x_flag_associative_math
518 8 : = opt_for_fn (callee->decl, flag_associative_math);
519 8 : opts.x_flag_reciprocal_math
520 8 : = opt_for_fn (callee->decl, flag_reciprocal_math);
521 8 : opts.x_flag_fp_int_builtin_inexact
522 8 : = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
523 8 : opts.x_flag_errno_math
524 8 : = opt_for_fn (callee->decl, flag_errno_math);
525 8 : if (dump_file)
526 0 : fprintf (dump_file, "Copying FP flags from %s to %s\n",
527 : callee->dump_name (), to->dump_name ());
528 8 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
529 8 : = build_optimization_node (&opts, &opts_set);
530 8 : reload_optimization_node = true;
531 : }
532 : }
533 :
534 : /* Reload global optimization flags. */
535 3900094 : if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
536 34 : set_cfun (cfun, true);
537 :
538 : /* If aliases are involved, redirect edge to the actual destination and
539 : possibly remove the aliases. */
540 3900094 : if (e->callee != callee)
541 : {
542 701726 : struct cgraph_node *alias = e->callee, *next_alias;
543 701726 : e->redirect_callee (callee);
544 1611530 : while (alias && alias != callee)
545 : {
546 701745 : if (!alias->callers
547 921703 : && can_remove_node_now_p (alias,
548 219958 : !e->next_caller && !e->prev_caller ? e : NULL))
549 : {
550 208078 : next_alias = alias->get_alias_target ();
551 208078 : alias->remove ();
552 208078 : if (callee_removed)
553 1163 : *callee_removed = true;
554 : alias = next_alias;
555 : }
556 : else
557 : break;
558 : }
559 : }
560 :
561 3900094 : if (callee->must_remain_in_tu_body)
562 : {
563 0 : gcc_assert (callee->lto_file_data == to->lto_file_data);
564 0 : to->must_remain_in_tu_body = true;
565 : }
566 :
567 3900094 : clone_inlined_nodes (e, true, keep_offline_copy,
568 : update_original, overall_size);
569 :
570 3900094 : gcc_assert (curr->callee->inlined_to == to);
571 :
572 3900094 : old_size = ipa_size_summaries->get (to)->size;
573 3900094 : ipa_merge_modref_summary_after_inlining (e);
574 3900094 : ipa_merge_fn_summary_after_inlining (e);
575 3900094 : if (e->in_polymorphic_cdtor)
576 96475 : mark_all_inlined_calls_cdtor (e->callee);
577 3900094 : if (opt_for_fn (e->caller->decl, optimize))
578 3873018 : new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
579 3900094 : bool removed_p = check_speculations (e->callee, new_edges);
580 3900094 : if (update_overall_summary)
581 873371 : ipa_update_overall_fn_summary (to, new_edges_found || removed_p);
582 : else
583 : /* Update self size by the estimate so overall function growth limits
584 : work for further inlining into this function. Before inlining
585 : the function we inlined to again we expect the caller to update
586 : the overall summary. */
587 3026723 : ipa_size_summaries->get (to)->size += estimated_growth;
588 3900094 : new_size = ipa_size_summaries->get (to)->size;
589 :
590 3900094 : if (callee->calls_comdat_local)
591 0 : to->calls_comdat_local = true;
592 3900094 : else if (to->calls_comdat_local && comdat_local)
593 3342 : to->calls_comdat_local = to->check_calls_comdat_local_p ();
594 :
595 : /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
596 : and revisit it after conversion to sreals in GCC 6.
597 : See PR 65654. */
598 : #if 0
599 : /* Verify that estimated growth match real growth. Allow off-by-one
600 : error due to ipa_fn_summary::size_scale roudoff errors. */
601 : gcc_assert (!update_overall_summary || !overall_size || new_edges_found
602 : || abs (estimated_growth - (new_size - old_size)) <= 1
603 : || speculation_removed
604 : /* FIXME: a hack. Edges with false predicate are accounted
605 : wrong, we should remove them from callgraph. */
606 : || predicated);
607 : #endif
608 :
609 : /* Account the change of overall unit size; external functions will be
610 : removed and are thus not accounted. */
611 3900094 : if (overall_size && inline_account_function_p (to))
612 860212 : *overall_size += new_size - old_size;
613 3900094 : ncalls_inlined++;
614 :
615 : /* This must happen after ipa_merge_fn_summary_after_inlining that rely on jump
616 : functions of callee to not be updated. */
617 3900094 : return new_edges_found;
618 : }
619 :
620 : /* For each node that was made the holder of function body by
621 : save_inline_function_body, this summary contains pointer to the previous
622 : holder of the body. */
623 :
624 : function_summary <tree *> *ipa_saved_clone_sources;
625 :
626 : /* Copy function body of NODE and redirect all inline clones to it.
627 : This is done before inline plan is applied to NODE when there are
628 : still some inline clones if it.
629 :
630 : This is necessary because inline decisions are not really transitive
631 : and the other inline clones may have different bodies. */
632 :
633 : static struct cgraph_node *
634 49446 : save_inline_function_body (struct cgraph_node *node)
635 : {
636 49446 : struct cgraph_node *first_clone, *n;
637 :
638 49446 : if (dump_file)
639 144 : fprintf (dump_file, "\nSaving body of %s for later reuse\n",
640 : node->dump_name ());
641 :
642 49446 : gcc_assert (node == cgraph_node::get (node->decl));
643 :
644 : /* first_clone will be turned into real function. */
645 49446 : first_clone = node->clones;
646 :
647 : /* Arrange first clone to not be thunk as those do not have bodies. */
648 49446 : if (first_clone->thunk)
649 : {
650 0 : while (first_clone->thunk)
651 0 : first_clone = first_clone->next_sibling_clone;
652 0 : first_clone->prev_sibling_clone->next_sibling_clone
653 0 : = first_clone->next_sibling_clone;
654 0 : if (first_clone->next_sibling_clone)
655 0 : first_clone->next_sibling_clone->prev_sibling_clone
656 0 : = first_clone->prev_sibling_clone;
657 0 : first_clone->next_sibling_clone = node->clones;
658 0 : first_clone->prev_sibling_clone = NULL;
659 0 : node->clones->prev_sibling_clone = first_clone;
660 0 : node->clones = first_clone;
661 : }
662 49446 : first_clone->decl = copy_node (node->decl);
663 49446 : first_clone->decl->decl_with_vis.symtab_node = first_clone;
664 49446 : gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
665 :
666 : /* Now reshape the clone tree, so all other clones descends from
667 : first_clone. */
668 49446 : if (first_clone->next_sibling_clone)
669 : {
670 210063 : for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
671 184534 : n = n->next_sibling_clone)
672 184534 : n->clone_of = first_clone;
673 25529 : n->clone_of = first_clone;
674 25529 : n->next_sibling_clone = first_clone->clones;
675 25529 : if (first_clone->clones)
676 490 : first_clone->clones->prev_sibling_clone = n;
677 25529 : first_clone->clones = first_clone->next_sibling_clone;
678 25529 : first_clone->next_sibling_clone->prev_sibling_clone = NULL;
679 25529 : first_clone->next_sibling_clone = NULL;
680 25529 : gcc_assert (!first_clone->prev_sibling_clone);
681 : }
682 :
683 49446 : tree prev_body_holder = node->decl;
684 49446 : if (!ipa_saved_clone_sources)
685 : {
686 14709 : ipa_saved_clone_sources = new function_summary <tree *> (symtab);
687 14709 : ipa_saved_clone_sources->disable_insertion_hook ();
688 : }
689 : else
690 : {
691 34737 : tree *p = ipa_saved_clone_sources->get (node);
692 34737 : if (p)
693 : {
694 0 : prev_body_holder = *p;
695 0 : gcc_assert (prev_body_holder);
696 : }
697 : }
698 49446 : *ipa_saved_clone_sources->get_create (first_clone) = prev_body_holder;
699 49446 : first_clone->former_clone_of
700 49446 : = node->former_clone_of ? node->former_clone_of : node->decl;
701 49446 : first_clone->clone_of = NULL;
702 :
703 : /* Now node in question has no clones. */
704 49446 : node->clones = NULL;
705 :
706 : /* Inline clones share decl with the function they are cloned
707 : from. Walk the whole clone tree and redirect them all to the
708 : new decl. */
709 49446 : if (first_clone->clones)
710 294318 : for (n = first_clone->clones; n != first_clone;)
711 : {
712 266606 : gcc_assert (n->decl == node->decl);
713 266606 : n->decl = first_clone->decl;
714 266606 : if (n->clones)
715 : n = n->clones;
716 259309 : else if (n->next_sibling_clone)
717 : n = n->next_sibling_clone;
718 : else
719 : {
720 66930 : while (n != first_clone && !n->next_sibling_clone)
721 35009 : n = n->clone_of;
722 31921 : if (n != first_clone)
723 4209 : n = n->next_sibling_clone;
724 : }
725 : }
726 :
727 : /* Copy the OLD_VERSION_NODE function tree to the new version. */
728 49446 : tree_function_versioning (node->decl, first_clone->decl,
729 : NULL, NULL, true, NULL, NULL);
730 :
731 : /* The function will be short lived and removed after we inline all the
732 : clones, but make it internal so we won't confuse ourself. */
733 49446 : DECL_EXTERNAL (first_clone->decl) = 0;
734 49446 : TREE_PUBLIC (first_clone->decl) = 0;
735 49446 : DECL_COMDAT (first_clone->decl) = 0;
736 49446 : first_clone->ipa_transforms_to_apply.release ();
737 :
738 : /* When doing recursive inlining, the clone may become unnecessary.
739 : This is possible i.e. in the case when the recursive function is proved to
740 : be non-throwing and the recursion happens only in the EH landing pad.
741 : We cannot remove the clone until we are done with saving the body.
742 : Remove it now. */
743 49446 : if (!first_clone->callers)
744 : {
745 0 : first_clone->remove_symbol_and_inline_clones ();
746 0 : first_clone = NULL;
747 : }
748 49446 : else if (flag_checking)
749 49446 : first_clone->verify ();
750 :
751 49446 : return first_clone;
752 : }
753 :
754 : /* Return true when function body of DECL still needs to be kept around
755 : for later re-use. */
756 : static bool
757 1463827 : preserve_function_body_p (struct cgraph_node *node)
758 : {
759 1463827 : gcc_assert (symtab->global_info_ready);
760 1463827 : gcc_assert (!node->alias && !node->thunk);
761 :
762 : /* Look if there is any non-thunk clone around. */
763 1463861 : for (node = node->clones; node; node = node->next_sibling_clone)
764 49480 : if (!node->thunk)
765 : return true;
766 : return false;
767 : }
768 :
769 : /* tree-inline can not recurse; materialize all function bodie we will need
770 : during inlining. This includes inlined functions, but also called functions
771 : with param manipulation because IPA param manipulation attaches debug
772 : statements to PARM_DECLs of called clone. Materialize them if needed.
773 :
774 : FIXME: This is somehwat broken by design because it does not play well
775 : with partitioning. */
776 :
777 : static void
778 2788685 : maybe_materialize_called_clones (cgraph_node *node)
779 : {
780 11475765 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
781 : {
782 8687080 : clone_info *info;
783 :
784 8687080 : if (!e->inline_failed)
785 1324858 : maybe_materialize_called_clones (e->callee);
786 :
787 8687080 : cgraph_node *callee = cgraph_node::get (e->callee->decl);
788 8687080 : if (callee->clone_of
789 8687080 : && (info = clone_info::get (callee)) && info->param_adjustments)
790 94453 : callee->get_untransformed_body ();
791 : }
792 2788685 : }
793 :
794 : /* Apply inline plan to function. */
795 :
796 : unsigned int
797 1463827 : inline_transform (struct cgraph_node *node)
798 : {
799 1463827 : unsigned int todo = 0;
800 1463827 : struct cgraph_edge *e, *next;
801 1463827 : bool has_inline = false;
802 :
803 : /* FIXME: Currently the pass manager is adding inline transform more than
804 : once to some clones. This needs revisiting after WPA cleanups. */
805 1463827 : if (cfun->after_inlining)
806 : return 0;
807 :
808 1463827 : cgraph_node *next_clone;
809 1723370 : for (cgraph_node *n = node->clones; n; n = next_clone)
810 : {
811 259543 : next_clone = n->next_sibling_clone;
812 259543 : if (n->decl != node->decl)
813 0 : n->materialize_clone ();
814 : }
815 1463827 : node->clear_stmts_in_references ();
816 :
817 : /* We might need the body of this function so that we can expand
818 : it inline somewhere else. */
819 1463827 : if (preserve_function_body_p (node))
820 49446 : save_inline_function_body (node);
821 :
822 1463827 : profile_count num = node->count;
823 1463827 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
824 1463827 : bool scale = num.initialized_p () && !(num == den);
825 46 : if (scale)
826 : {
827 46 : profile_count::adjust_for_ipa_scaling (&num, &den);
828 46 : if (dump_file)
829 : {
830 0 : fprintf (dump_file, "Applying count scale ");
831 0 : num.dump (dump_file);
832 0 : fprintf (dump_file, "/");
833 0 : den.dump (dump_file);
834 0 : fprintf (dump_file, "\n");
835 : }
836 :
837 46 : basic_block bb;
838 46 : cfun->cfg->count_max = profile_count::uninitialized ();
839 358 : FOR_ALL_BB_FN (bb, cfun)
840 : {
841 312 : bb->count = bb->count.apply_scale (num, den);
842 312 : cfun->cfg->count_max = profile_count::max_prefer_initialized
843 312 : (cfun->cfg->count_max, bb->count);
844 : }
845 46 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
846 : }
847 :
848 1463827 : maybe_materialize_called_clones (node);
849 7372428 : for (e = node->callees; e; e = next)
850 : {
851 5908601 : if (!e->inline_failed)
852 532120 : has_inline = true;
853 5908601 : next = e->next_callee;
854 5908601 : if (e->has_callback)
855 : {
856 : /* Redirect callback edges when redirecting their carrying edge. */
857 13298 : cgraph_edge *cbe;
858 13298 : cgraph_edge::redirect_call_stmt_to_callee (e);
859 14757 : for (cbe = e->first_callback_edge (); cbe;
860 1459 : cbe = cbe->next_callback_edge ())
861 1459 : cgraph_edge::redirect_call_stmt_to_callee (cbe);
862 : }
863 : else
864 5895303 : cgraph_edge::redirect_call_stmt_to_callee (e);
865 : }
866 1463827 : node->remove_all_references ();
867 :
868 1463827 : timevar_push (TV_INTEGRATION);
869 1463827 : if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
870 : {
871 809317 : todo = optimize_inline_calls (current_function_decl);
872 : }
873 1463827 : timevar_pop (TV_INTEGRATION);
874 :
875 1463827 : cfun->always_inline_functions_inlined = true;
876 1463827 : cfun->after_inlining = true;
877 1463827 : todo |= execute_fixup_cfg ();
878 :
879 1463827 : if (!(todo & TODO_update_ssa_any))
880 : /* Redirecting edges might lead to a need for vops to be recomputed. */
881 1092825 : todo |= TODO_update_ssa_only_virtuals;
882 :
883 : return todo;
884 : }
|