Branch data Line data Source code
1 : : /* Callgraph transformations to handle inlining
2 : : Copyright (C) 2003-2025 Free Software Foundation, Inc.
3 : : Contributed by Jan Hubicka
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : /* The inline decisions are stored in callgraph in "inline plan" and
22 : : applied later.
23 : :
24 : : To mark given call inline, use inline_call function.
25 : : The function marks the edge inlinable and, if necessary, produces
26 : : virtual clone in the callgraph representing the new copy of callee's
27 : : function body.
28 : :
29 : : The inline plan is applied on given function body by inline_transform. */
30 : :
31 : : #include "config.h"
32 : : #include "system.h"
33 : : #include "coretypes.h"
34 : : #include "tm.h"
35 : : #include "function.h"
36 : : #include "tree.h"
37 : : #include "alloc-pool.h"
38 : : #include "tree-pass.h"
39 : : #include "cgraph.h"
40 : : #include "tree-cfg.h"
41 : : #include "symbol-summary.h"
42 : : #include "tree-vrp.h"
43 : : #include "sreal.h"
44 : : #include "ipa-cp.h"
45 : : #include "ipa-prop.h"
46 : : #include "ipa-fnsummary.h"
47 : : #include "ipa-inline.h"
48 : : #include "tree-inline.h"
49 : : #include "function.h"
50 : : #include "cfg.h"
51 : : #include "basic-block.h"
52 : : #include "ipa-utils.h"
53 : : #include "ipa-modref-tree.h"
54 : : #include "ipa-modref.h"
55 : : #include "symtab-thunks.h"
56 : : #include "symtab-clones.h"
57 : :
58 : : int ncalls_inlined;
59 : : int nfunctions_inlined;
60 : :
61 : : /* We removed or are going to remove the last call to NODE.
62 : : Return true if we can and want proactively remove the NODE now.
63 : : This is important to do, since we want inliner to know when offline
64 : : copy of function was removed. */
65 : :
66 : : static bool
67 : 2772709 : can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
68 : : {
69 : 2772709 : ipa_ref *ref;
70 : :
71 : 3048172 : FOR_EACH_ALIAS (node, ref)
72 : : {
73 : 799967 : cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
74 : 511536 : if ((alias->callers && alias->callers != e)
75 : 799967 : || !can_remove_node_now_p_1 (alias, e))
76 : 524504 : return false;
77 : : }
78 : : /* FIXME: When address is taken of DECL_EXTERNAL function we still
79 : : can remove its offline copy, but we would need to keep unanalyzed node in
80 : : the callgraph so references can point to it.
81 : :
82 : : Also for comdat group we can ignore references inside a group as we
83 : : want to prove the group as a whole to be dead. */
84 : 2248205 : return (!node->address_taken
85 : 2217292 : && node->can_remove_if_no_direct_calls_and_refs_p ()
86 : : /* Inlining might enable more devirtualizing, so we want to remove
87 : : those only after all devirtualizable virtual calls are processed.
88 : : Lacking may edges in callgraph we just preserve them post
89 : : inlining. */
90 : 2151697 : && (!DECL_VIRTUAL_P (node->decl)
91 : 6492 : || !opt_for_fn (node->decl, flag_devirtualize))
92 : : /* During early inlining some unanalyzed cgraph nodes might be in the
93 : : callgraph and they might refer the function in question. */
94 : 4393504 : && !cgraph_new_nodes.exists ());
95 : : }
96 : :
97 : : /* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
98 : : Verify that the NODE can be removed from unit and if it is contained in comdat
99 : : group that the whole comdat group is removable. */
100 : :
101 : : static bool
102 : 2290210 : can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
103 : : {
104 : 2290210 : struct cgraph_node *next;
105 : 2290210 : if (!can_remove_node_now_p_1 (node, e))
106 : : return false;
107 : :
108 : : /* When we see same comdat group, we need to be sure that all
109 : : items can be removed. */
110 : 1675794 : if (!node->same_comdat_group || !node->externally_visible)
111 : : return true;
112 : 278451 : for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
113 : 830029 : next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
114 : : {
115 : 278774 : if (next->alias)
116 : 81747 : continue;
117 : 196654 : if ((next->callers && next->callers != e)
118 : 390722 : || !can_remove_node_now_p_1 (next, e))
119 : : return false;
120 : : }
121 : : return true;
122 : : }
123 : :
124 : : /* Return true if NODE is a master clone with non-inline clones. */
125 : :
126 : : static bool
127 : 1452249 : master_clone_with_noninline_clones_p (struct cgraph_node *node)
128 : : {
129 : 1452249 : if (node->clone_of)
130 : : return false;
131 : :
132 : 1838557 : for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
133 : 485773 : if (n->decl != node->decl)
134 : : return true;
135 : :
136 : : return false;
137 : : }
138 : :
139 : : /* E is expected to be an edge being inlined. Clone destination node of
140 : : the edge and redirect it to the new clone.
141 : : DUPLICATE is used for bookkeeping on whether we are actually creating new
142 : : clones or re-using node originally representing out-of-line function call.
143 : : By default the offline copy is removed, when it appears dead after inlining.
144 : : UPDATE_ORIGINAL prevents this transformation.
145 : : If OVERALL_SIZE is non-NULL, the size is updated to reflect the
146 : : transformation. */
147 : :
148 : : void
149 : 4607555 : clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
150 : : bool update_original, int *overall_size)
151 : : {
152 : 4607555 : struct cgraph_node *inlining_into;
153 : 4607555 : struct cgraph_edge *next;
154 : :
155 : 4607555 : if (e->caller->inlined_to)
156 : : inlining_into = e->caller->inlined_to;
157 : : else
158 : 3757489 : inlining_into = e->caller;
159 : :
160 : 4607555 : if (duplicate)
161 : : {
162 : : /* We may eliminate the need for out-of-line copy to be output.
163 : : In that case just go ahead and re-use it. This is not just an
164 : : memory optimization. Making offline copy of function disappear
165 : : from the program will improve future decisions on inlining. */
166 : 4409469 : if (!e->callee->callers->next_caller
167 : : /* Recursive inlining never wants the master clone to
168 : : be overwritten. */
169 : 2069311 : && update_original
170 : 2057480 : && can_remove_node_now_p (e->callee, e)
171 : : /* We cannot overwrite a master clone with non-inline clones
172 : : until after these clones are materialized. */
173 : 5861718 : && !master_clone_with_noninline_clones_p (e->callee))
174 : : {
175 : : /* TODO: When callee is in a comdat group, we could remove all of it,
176 : : including all inline clones inlined into it. That would however
177 : : need small function inlining to register edge removal hook to
178 : : maintain the priority queue.
179 : :
180 : : For now we keep the other functions in the group in program until
181 : : cgraph_remove_unreachable_functions gets rid of them. */
182 : 1452149 : gcc_assert (!e->callee->inlined_to);
183 : 1452149 : e->callee->remove_from_same_comdat_group ();
184 : 1452149 : if (e->callee->definition
185 : 1452149 : && inline_account_function_p (e->callee))
186 : : {
187 : 1286381 : gcc_assert (!e->callee->alias);
188 : 1286381 : if (overall_size)
189 : 256351 : *overall_size -= ipa_size_summaries->get (e->callee)->size;
190 : 1286381 : nfunctions_inlined++;
191 : : }
192 : 1452149 : duplicate = false;
193 : 1452149 : e->callee->externally_visible = false;
194 : 1452149 : profile_count num = e->count;
195 : 1452149 : profile_count den = e->callee->count;
196 : 1452149 : profile_count::adjust_for_ipa_scaling (&num, &den);
197 : 1452149 : e->callee->apply_scale (num, den);
198 : :
199 : 1452149 : dump_callgraph_transformation (e->callee, inlining_into,
200 : : "inlining to");
201 : : }
202 : : else
203 : : {
204 : 2957320 : struct cgraph_node *n;
205 : :
206 : 2957320 : n = e->callee->create_clone (e->callee->decl,
207 : : e->count,
208 : 2957320 : update_original, vNULL, true,
209 : : inlining_into,
210 : : NULL, NULL);
211 : 2957320 : n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
212 : 2957320 : e->redirect_callee (n);
213 : : }
214 : : }
215 : : else
216 : 198086 : e->callee->remove_from_same_comdat_group ();
217 : :
218 : 4607555 : e->callee->inlined_to = inlining_into;
219 : 4607555 : if (e->callee->ipa_transforms_to_apply.length ())
220 : : {
221 : 298608 : e->callee->ipa_transforms_to_apply.release ();
222 : 298608 : e->callee->ipa_transforms_to_apply = vNULL;
223 : : }
224 : :
225 : : /* Recursively clone all bodies. */
226 : 8509866 : for (e = e->callee->callees; e; e = next)
227 : : {
228 : 3902311 : next = e->next_callee;
229 : 3902311 : if (!e->inline_failed)
230 : 625040 : clone_inlined_nodes (e, duplicate, update_original, overall_size);
231 : : }
232 : 4607555 : }
233 : :
234 : : /* Check all speculations in N and if any seem useless, resolve them. When a
235 : : first edge is resolved, pop all edges from NEW_EDGES and insert them to
236 : : EDGE_SET. Then remove each resolved edge from EDGE_SET, if it is there. */
237 : :
238 : : static bool
239 : 4606575 : check_speculations_1 (cgraph_node *n, vec<cgraph_edge *> *new_edges,
240 : : hash_set <cgraph_edge *> *edge_set)
241 : : {
242 : 4606575 : bool speculation_removed = false;
243 : 4606575 : cgraph_edge *next;
244 : :
245 : 8510432 : for (cgraph_edge *e = n->callees; e; e = next)
246 : : {
247 : 3903857 : next = e->next_callee;
248 : 3903857 : if (e->speculative && !speculation_useful_p (e, true))
249 : : {
250 : 48 : while (new_edges && !new_edges->is_empty ())
251 : 6 : edge_set->add (new_edges->pop ());
252 : 42 : edge_set->remove (e);
253 : :
254 : 42 : cgraph_edge::resolve_speculation (e, NULL);
255 : 42 : speculation_removed = true;
256 : : }
257 : 3903815 : else if (!e->inline_failed)
258 : 624536 : speculation_removed |= check_speculations_1 (e->callee, new_edges,
259 : : edge_set);
260 : : }
261 : 4606575 : return speculation_removed;
262 : : }
263 : :
264 : : /* Push E to NEW_EDGES. Called from hash_set traverse method, which
265 : : unfortunately means this function has to have external linkage, otherwise
266 : : the code will not compile with gcc 4.8. */
267 : :
268 : : bool
269 : 0 : push_all_edges_in_set_to_vec (cgraph_edge * const &e,
270 : : vec<cgraph_edge *> *new_edges)
271 : : {
272 : 0 : new_edges->safe_push (e);
273 : 0 : return true;
274 : : }
275 : :
276 : : /* Check all speculations in N and if any seem useless, resolve them and remove
277 : : them from NEW_EDGES. */
278 : :
279 : : static bool
280 : 3982039 : check_speculations (cgraph_node *n, vec<cgraph_edge *> *new_edges)
281 : : {
282 : 3982039 : hash_set <cgraph_edge *> edge_set;
283 : 3982039 : bool res = check_speculations_1 (n, new_edges, &edge_set);
284 : 3982039 : if (!edge_set.is_empty ())
285 : 0 : edge_set.traverse <vec<cgraph_edge *> *,
286 : 0 : push_all_edges_in_set_to_vec> (new_edges);
287 : 3982039 : return res;
288 : 3982039 : }
289 : :
290 : : /* Mark all call graph edges coming out of NODE and all nodes that have been
291 : : inlined to it as in_polymorphic_cdtor. */
292 : :
293 : : static void
294 : 124163 : mark_all_inlined_calls_cdtor (cgraph_node *node)
295 : : {
296 : 253442 : for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
297 : : {
298 : 129279 : cs->in_polymorphic_cdtor = true;
299 : 129279 : if (!cs->inline_failed)
300 : 21397 : mark_all_inlined_calls_cdtor (cs->callee);
301 : : }
302 : 130066 : for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
303 : 5903 : cs->in_polymorphic_cdtor = true;
304 : 124163 : }
305 : :
306 : :
307 : : /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
308 : : specify whether profile of original function should be updated. If any new
309 : : indirect edges are discovered in the process, add them to NEW_EDGES, unless
310 : : it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
311 : : size of caller after inlining. Caller is required to eventually do it via
312 : : ipa_update_overall_fn_summary.
313 : : If callee_removed is non-NULL, set it to true if we removed callee node.
314 : :
315 : : Return true iff any new callgraph edges were discovered as a
316 : : result of inlining. */
317 : :
318 : : bool
319 : 3982039 : inline_call (struct cgraph_edge *e, bool update_original,
320 : : vec<cgraph_edge *> *new_edges,
321 : : int *overall_size, bool update_overall_summary,
322 : : bool *callee_removed)
323 : : {
324 : 3982039 : int old_size = 0, new_size = 0;
325 : 3982039 : struct cgraph_node *to = NULL;
326 : 3982039 : struct cgraph_edge *curr = e;
327 : 3982039 : bool comdat_local = e->callee->comdat_local_p ();
328 : 3982039 : struct cgraph_node *callee = e->callee->ultimate_alias_target ();
329 : 3982039 : bool new_edges_found = false;
330 : :
331 : 3982039 : int estimated_growth = 0;
332 : 3982039 : if (! update_overall_summary)
333 : 3105280 : estimated_growth = estimate_edge_growth (e);
334 : : /* This is used only for assert bellow. */
335 : : #if 0
336 : : bool predicated = inline_edge_summary (e)->predicate != NULL;
337 : : #endif
338 : :
339 : : /* Don't inline inlined edges. */
340 : 3982039 : gcc_assert (e->inline_failed);
341 : : /* Don't even think of inlining inline clone. */
342 : 3982039 : gcc_assert (!callee->inlined_to);
343 : :
344 : 3982039 : to = e->caller;
345 : 3982039 : if (to->inlined_to)
346 : 225026 : to = to->inlined_to;
347 : 3982039 : if (to->thunk)
348 : : {
349 : 1142 : struct cgraph_node *target = to->callees->callee;
350 : 1142 : thunk_expansion = true;
351 : :
352 : : /* Remove all annotations, but keep thunk info. */
353 : 1142 : thunk_info info = *thunk_info::get (to);
354 : 1142 : symtab->call_cgraph_removal_hooks (to);
355 : 1142 : *thunk_info::get_create (to) = info;
356 : 1142 : if (in_lto_p)
357 : 49 : to->get_untransformed_body ();
358 : 1142 : expand_thunk (to, false, true);
359 : : /* When thunk is instrumented we may have multiple callees. */
360 : 1142 : for (e = to->callees; e && e->callee != target; e = e->next_callee)
361 : : ;
362 : 1142 : symtab->call_cgraph_insertion_hooks (to);
363 : 1142 : thunk_expansion = false;
364 : 1142 : gcc_assert (e);
365 : : }
366 : :
367 : :
368 : 3982039 : e->inline_failed = CIF_OK;
369 : 3982039 : DECL_POSSIBLY_INLINED (callee->decl) = true;
370 : :
371 : 3982039 : if (DECL_FUNCTION_PERSONALITY (callee->decl))
372 : 258478 : DECL_FUNCTION_PERSONALITY (to->decl)
373 : 129239 : = DECL_FUNCTION_PERSONALITY (callee->decl);
374 : :
375 : 3982039 : bool reload_optimization_node = false;
376 : 3982039 : bool remove_strict_aliasing
377 : 3982039 : = (!opt_for_fn (callee->decl, flag_strict_aliasing)
378 : 3982039 : && opt_for_fn (to->decl, flag_strict_aliasing));
379 : 3982039 : bool remove_assume_sane_operators_new_delete
380 : 3982039 : = (!opt_for_fn (callee->decl, flag_assume_sane_operators_new_delete)
381 : 3982039 : && opt_for_fn (to->decl, flag_assume_sane_operators_new_delete));
382 : 3982021 : if (remove_strict_aliasing || remove_assume_sane_operators_new_delete)
383 : : {
384 : 31 : struct gcc_options opts = global_options;
385 : 31 : struct gcc_options opts_set = global_options_set;
386 : :
387 : 31 : cl_optimization_restore (&opts, &opts_set, opts_for_fn (to->decl));
388 : 31 : if (remove_strict_aliasing)
389 : : {
390 : 13 : opts.x_flag_strict_aliasing = false;
391 : 13 : if (dump_file)
392 : 0 : fprintf (dump_file, "Dropping flag_strict_aliasing on %s\n",
393 : : to->dump_name ());
394 : : }
395 : 31 : if (remove_assume_sane_operators_new_delete)
396 : : {
397 : 18 : opts.x_flag_assume_sane_operators_new_delete = false;
398 : 18 : if (dump_file)
399 : 0 : fprintf (dump_file,
400 : : "Dropping flag_assume_sane_operators_new_delete on %s\n",
401 : : to->dump_name ());
402 : : }
403 : 31 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
404 : 31 : = build_optimization_node (&opts, &opts_set);
405 : 31 : reload_optimization_node = true;
406 : : }
407 : :
408 : 3982039 : ipa_fn_summary *caller_info = ipa_fn_summaries->get (to);
409 : 3982039 : ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
410 : 3982039 : if (!caller_info->fp_expressions && callee_info->fp_expressions)
411 : : {
412 : 15722 : caller_info->fp_expressions = true;
413 : 15722 : if (opt_for_fn (callee->decl, flag_rounding_math)
414 : 15722 : != opt_for_fn (to->decl, flag_rounding_math)
415 : 15722 : || opt_for_fn (callee->decl, flag_trapping_math)
416 : 15722 : != opt_for_fn (to->decl, flag_trapping_math)
417 : 15715 : || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
418 : 15715 : != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
419 : 15715 : || opt_for_fn (callee->decl, flag_finite_math_only)
420 : 15715 : != opt_for_fn (to->decl, flag_finite_math_only)
421 : 15715 : || opt_for_fn (callee->decl, flag_signaling_nans)
422 : 15715 : != opt_for_fn (to->decl, flag_signaling_nans)
423 : 15715 : || opt_for_fn (callee->decl, flag_complex_method)
424 : 15715 : != opt_for_fn (to->decl, flag_complex_method)
425 : 15715 : || opt_for_fn (callee->decl, flag_signed_zeros)
426 : 15715 : != opt_for_fn (to->decl, flag_signed_zeros)
427 : 15715 : || opt_for_fn (callee->decl, flag_associative_math)
428 : 15715 : != opt_for_fn (to->decl, flag_associative_math)
429 : 15714 : || opt_for_fn (callee->decl, flag_reciprocal_math)
430 : 15714 : != opt_for_fn (to->decl, flag_reciprocal_math)
431 : 15714 : || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
432 : 15714 : != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
433 : 15722 : || opt_for_fn (callee->decl, flag_errno_math)
434 : 15714 : != opt_for_fn (to->decl, flag_errno_math))
435 : : {
436 : 8 : struct gcc_options opts = global_options;
437 : 8 : struct gcc_options opts_set = global_options_set;
438 : :
439 : 8 : cl_optimization_restore (&opts, &opts_set, opts_for_fn (to->decl));
440 : 8 : opts.x_flag_rounding_math
441 : 8 : = opt_for_fn (callee->decl, flag_rounding_math);
442 : 8 : opts.x_flag_trapping_math
443 : 8 : = opt_for_fn (callee->decl, flag_trapping_math);
444 : 8 : opts.x_flag_unsafe_math_optimizations
445 : 8 : = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
446 : 8 : opts.x_flag_finite_math_only
447 : 8 : = opt_for_fn (callee->decl, flag_finite_math_only);
448 : 8 : opts.x_flag_signaling_nans
449 : 8 : = opt_for_fn (callee->decl, flag_signaling_nans);
450 : 8 : opts.x_flag_complex_method
451 : 8 : = opt_for_fn (callee->decl, flag_complex_method);
452 : 8 : opts.x_flag_signed_zeros
453 : 8 : = opt_for_fn (callee->decl, flag_signed_zeros);
454 : 8 : opts.x_flag_associative_math
455 : 8 : = opt_for_fn (callee->decl, flag_associative_math);
456 : 8 : opts.x_flag_reciprocal_math
457 : 8 : = opt_for_fn (callee->decl, flag_reciprocal_math);
458 : 8 : opts.x_flag_fp_int_builtin_inexact
459 : 8 : = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
460 : 8 : opts.x_flag_errno_math
461 : 8 : = opt_for_fn (callee->decl, flag_errno_math);
462 : 8 : if (dump_file)
463 : 0 : fprintf (dump_file, "Copying FP flags from %s to %s\n",
464 : : callee->dump_name (), to->dump_name ());
465 : 8 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
466 : 8 : = build_optimization_node (&opts, &opts_set);
467 : 8 : reload_optimization_node = true;
468 : : }
469 : : }
470 : :
471 : : /* Reload global optimization flags. */
472 : 3982039 : if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
473 : 34 : set_cfun (cfun, true);
474 : :
475 : : /* If aliases are involved, redirect edge to the actual destination and
476 : : possibly remove the aliases. */
477 : 3982039 : if (e->callee != callee)
478 : : {
479 : 736759 : struct cgraph_node *alias = e->callee, *next_alias;
480 : 736759 : e->redirect_callee (callee);
481 : 1694078 : while (alias && alias != callee)
482 : : {
483 : 736778 : if (!alias->callers
484 : 969508 : && can_remove_node_now_p (alias,
485 : 232730 : !e->next_caller && !e->prev_caller ? e : NULL))
486 : : {
487 : 220560 : next_alias = alias->get_alias_target ();
488 : 220560 : alias->remove ();
489 : 220560 : if (callee_removed)
490 : 1137 : *callee_removed = true;
491 : : alias = next_alias;
492 : : }
493 : : else
494 : : break;
495 : : }
496 : : }
497 : :
498 : 3982039 : clone_inlined_nodes (e, true, update_original, overall_size);
499 : :
500 : 3982039 : gcc_assert (curr->callee->inlined_to == to);
501 : :
502 : 3982039 : old_size = ipa_size_summaries->get (to)->size;
503 : 3982039 : ipa_merge_modref_summary_after_inlining (e);
504 : 3982039 : ipa_merge_fn_summary_after_inlining (e);
505 : 3982039 : if (e->in_polymorphic_cdtor)
506 : 102766 : mark_all_inlined_calls_cdtor (e->callee);
507 : 3982039 : if (opt_for_fn (e->caller->decl, optimize))
508 : 3955543 : new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
509 : 3982039 : bool removed_p = check_speculations (e->callee, new_edges);
510 : 3982039 : if (update_overall_summary)
511 : 876759 : ipa_update_overall_fn_summary (to, new_edges_found || removed_p);
512 : : else
513 : : /* Update self size by the estimate so overall function growth limits
514 : : work for further inlining into this function. Before inlining
515 : : the function we inlined to again we expect the caller to update
516 : : the overall summary. */
517 : 3105280 : ipa_size_summaries->get (to)->size += estimated_growth;
518 : 3982039 : new_size = ipa_size_summaries->get (to)->size;
519 : :
520 : 3982039 : if (callee->calls_comdat_local)
521 : 0 : to->calls_comdat_local = true;
522 : 3982039 : else if (to->calls_comdat_local && comdat_local)
523 : 3415 : to->calls_comdat_local = to->check_calls_comdat_local_p ();
524 : :
525 : : /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
526 : : and revisit it after conversion to sreals in GCC 6.
527 : : See PR 65654. */
528 : : #if 0
529 : : /* Verify that estimated growth match real growth. Allow off-by-one
530 : : error due to ipa_fn_summary::size_scale roudoff errors. */
531 : : gcc_assert (!update_overall_summary || !overall_size || new_edges_found
532 : : || abs (estimated_growth - (new_size - old_size)) <= 1
533 : : || speculation_removed
534 : : /* FIXME: a hack. Edges with false predicate are accounted
535 : : wrong, we should remove them from callgraph. */
536 : : || predicated);
537 : : #endif
538 : :
539 : : /* Account the change of overall unit size; external functions will be
540 : : removed and are thus not accounted. */
541 : 3982039 : if (overall_size && inline_account_function_p (to))
542 : 862294 : *overall_size += new_size - old_size;
543 : 3982039 : ncalls_inlined++;
544 : :
545 : : /* This must happen after ipa_merge_fn_summary_after_inlining that rely on jump
546 : : functions of callee to not be updated. */
547 : 3982039 : return new_edges_found;
548 : : }
549 : :
550 : : /* For each node that was made the holder of function body by
551 : : save_inline_function_body, this summary contains pointer to the previous
552 : : holder of the body. */
553 : :
554 : : function_summary <tree *> *ipa_saved_clone_sources;
555 : :
556 : : /* Copy function body of NODE and redirect all inline clones to it.
557 : : This is done before inline plan is applied to NODE when there are
558 : : still some inline clones if it.
559 : :
560 : : This is necessary because inline decisions are not really transitive
561 : : and the other inline clones may have different bodies. */
562 : :
563 : : static struct cgraph_node *
564 : 44867 : save_inline_function_body (struct cgraph_node *node)
565 : : {
566 : 44867 : struct cgraph_node *first_clone, *n;
567 : :
568 : 44867 : if (dump_file)
569 : 126 : fprintf (dump_file, "\nSaving body of %s for later reuse\n",
570 : : node->dump_name ());
571 : :
572 : 44867 : gcc_assert (node == cgraph_node::get (node->decl));
573 : :
574 : : /* first_clone will be turned into real function. */
575 : 44867 : first_clone = node->clones;
576 : :
577 : : /* Arrange first clone to not be thunk as those do not have bodies. */
578 : 44867 : if (first_clone->thunk)
579 : : {
580 : 0 : while (first_clone->thunk)
581 : 0 : first_clone = first_clone->next_sibling_clone;
582 : 0 : first_clone->prev_sibling_clone->next_sibling_clone
583 : 0 : = first_clone->next_sibling_clone;
584 : 0 : if (first_clone->next_sibling_clone)
585 : 0 : first_clone->next_sibling_clone->prev_sibling_clone
586 : 0 : = first_clone->prev_sibling_clone;
587 : 0 : first_clone->next_sibling_clone = node->clones;
588 : 0 : first_clone->prev_sibling_clone = NULL;
589 : 0 : node->clones->prev_sibling_clone = first_clone;
590 : 0 : node->clones = first_clone;
591 : : }
592 : 44867 : first_clone->decl = copy_node (node->decl);
593 : 44867 : first_clone->decl->decl_with_vis.symtab_node = first_clone;
594 : 44867 : gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
595 : :
596 : : /* Now reshape the clone tree, so all other clones descends from
597 : : first_clone. */
598 : 44867 : if (first_clone->next_sibling_clone)
599 : : {
600 : 181588 : for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
601 : 159013 : n = n->next_sibling_clone)
602 : 159013 : n->clone_of = first_clone;
603 : 22575 : n->clone_of = first_clone;
604 : 22575 : n->next_sibling_clone = first_clone->clones;
605 : 22575 : if (first_clone->clones)
606 : 571 : first_clone->clones->prev_sibling_clone = n;
607 : 22575 : first_clone->clones = first_clone->next_sibling_clone;
608 : 22575 : first_clone->next_sibling_clone->prev_sibling_clone = NULL;
609 : 22575 : first_clone->next_sibling_clone = NULL;
610 : 22575 : gcc_assert (!first_clone->prev_sibling_clone);
611 : : }
612 : :
613 : 44867 : tree prev_body_holder = node->decl;
614 : 44867 : if (!ipa_saved_clone_sources)
615 : : {
616 : 13734 : ipa_saved_clone_sources = new function_summary <tree *> (symtab);
617 : 13734 : ipa_saved_clone_sources->disable_insertion_hook ();
618 : : }
619 : : else
620 : : {
621 : 31133 : tree *p = ipa_saved_clone_sources->get (node);
622 : 31133 : if (p)
623 : : {
624 : 0 : prev_body_holder = *p;
625 : 0 : gcc_assert (prev_body_holder);
626 : : }
627 : : }
628 : 44867 : *ipa_saved_clone_sources->get_create (first_clone) = prev_body_holder;
629 : 44867 : first_clone->former_clone_of
630 : 44867 : = node->former_clone_of ? node->former_clone_of : node->decl;
631 : 44867 : first_clone->clone_of = NULL;
632 : :
633 : : /* Now node in question has no clones. */
634 : 44867 : node->clones = NULL;
635 : :
636 : : /* Inline clones share decl with the function they are cloned
637 : : from. Walk the whole clone tree and redirect them all to the
638 : : new decl. */
639 : 44867 : if (first_clone->clones)
640 : 258657 : for (n = first_clone->clones; n != first_clone;)
641 : : {
642 : 234602 : gcc_assert (n->decl == node->decl);
643 : 234602 : n->decl = first_clone->decl;
644 : 234602 : if (n->clones)
645 : : n = n->clones;
646 : 228826 : else if (n->next_sibling_clone)
647 : : n = n->next_sibling_clone;
648 : : else
649 : : {
650 : 57490 : while (n != first_clone && !n->next_sibling_clone)
651 : 29831 : n = n->clone_of;
652 : 27659 : if (n != first_clone)
653 : 3604 : n = n->next_sibling_clone;
654 : : }
655 : : }
656 : :
657 : : /* Copy the OLD_VERSION_NODE function tree to the new version. */
658 : 44867 : tree_function_versioning (node->decl, first_clone->decl,
659 : : NULL, NULL, true, NULL, NULL);
660 : :
661 : : /* The function will be short lived and removed after we inline all the
662 : : clones, but make it internal so we won't confuse ourself. */
663 : 44867 : DECL_EXTERNAL (first_clone->decl) = 0;
664 : 44867 : TREE_PUBLIC (first_clone->decl) = 0;
665 : 44867 : DECL_COMDAT (first_clone->decl) = 0;
666 : 44867 : first_clone->ipa_transforms_to_apply.release ();
667 : :
668 : : /* When doing recursive inlining, the clone may become unnecessary.
669 : : This is possible i.e. in the case when the recursive function is proved to
670 : : be non-throwing and the recursion happens only in the EH landing pad.
671 : : We cannot remove the clone until we are done with saving the body.
672 : : Remove it now. */
673 : 44867 : if (!first_clone->callers)
674 : : {
675 : 0 : first_clone->remove_symbol_and_inline_clones ();
676 : 0 : first_clone = NULL;
677 : : }
678 : 44867 : else if (flag_checking)
679 : 44867 : first_clone->verify ();
680 : :
681 : 44867 : return first_clone;
682 : : }
683 : :
684 : : /* Return true when function body of DECL still needs to be kept around
685 : : for later re-use. */
686 : : static bool
687 : 1442297 : preserve_function_body_p (struct cgraph_node *node)
688 : : {
689 : 1442297 : gcc_assert (symtab->global_info_ready);
690 : 1442297 : gcc_assert (!node->alias && !node->thunk);
691 : :
692 : : /* Look if there is any non-thunk clone around. */
693 : 1442331 : for (node = node->clones; node; node = node->next_sibling_clone)
694 : 44901 : if (!node->thunk)
695 : : return true;
696 : : return false;
697 : : }
698 : :
699 : : /* tree-inline can not recurse; materialize all function bodie we will need
700 : : during inlining. This includes inlined functions, but also called functions
701 : : with param manipulation because IPA param manipulation attaches debug
702 : : statements to PARM_DECLs of called clone. Materialize them if needed.
703 : :
704 : : FIXME: This is somehwat broken by design because it does not play well
705 : : with partitioning. */
706 : :
707 : : static void
708 : 2756635 : maybe_materialize_called_clones (cgraph_node *node)
709 : : {
710 : 11279374 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
711 : : {
712 : 8522739 : clone_info *info;
713 : :
714 : 8522739 : if (!e->inline_failed)
715 : 1314338 : maybe_materialize_called_clones (e->callee);
716 : :
717 : 8522739 : cgraph_node *callee = cgraph_node::get (e->callee->decl);
718 : 8522739 : if (callee->clone_of
719 : 8522739 : && (info = clone_info::get (callee)) && info->param_adjustments)
720 : 94348 : callee->get_untransformed_body ();
721 : : }
722 : 2756635 : }
723 : :
724 : : /* Apply inline plan to function. */
725 : :
726 : : unsigned int
727 : 1442297 : inline_transform (struct cgraph_node *node)
728 : : {
729 : 1442297 : unsigned int todo = 0;
730 : 1442297 : struct cgraph_edge *e, *next;
731 : 1442297 : bool has_inline = false;
732 : :
733 : : /* FIXME: Currently the pass manager is adding inline transform more than
734 : : once to some clones. This needs revisiting after WPA cleanups. */
735 : 1442297 : if (cfun->after_inlining)
736 : : return 0;
737 : :
738 : 1442297 : cgraph_node *next_clone;
739 : 1668786 : for (cgraph_node *n = node->clones; n; n = next_clone)
740 : : {
741 : 226489 : next_clone = n->next_sibling_clone;
742 : 226489 : if (n->decl != node->decl)
743 : 0 : n->materialize_clone ();
744 : : }
745 : 1442297 : node->clear_stmts_in_references ();
746 : :
747 : : /* We might need the body of this function so that we can expand
748 : : it inline somewhere else. */
749 : 1442297 : if (preserve_function_body_p (node))
750 : 44867 : save_inline_function_body (node);
751 : :
752 : 1442297 : profile_count num = node->count;
753 : 1442297 : profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
754 : 1442297 : bool scale = num.initialized_p () && !(num == den);
755 : 44 : if (scale)
756 : : {
757 : 44 : profile_count::adjust_for_ipa_scaling (&num, &den);
758 : 44 : if (dump_file)
759 : : {
760 : 0 : fprintf (dump_file, "Applying count scale ");
761 : 0 : num.dump (dump_file);
762 : 0 : fprintf (dump_file, "/");
763 : 0 : den.dump (dump_file);
764 : 0 : fprintf (dump_file, "\n");
765 : : }
766 : :
767 : 44 : basic_block bb;
768 : 44 : cfun->cfg->count_max = profile_count::uninitialized ();
769 : 345 : FOR_ALL_BB_FN (bb, cfun)
770 : : {
771 : 301 : bb->count = bb->count.apply_scale (num, den);
772 : 301 : cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
773 : : }
774 : 44 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
775 : : }
776 : :
777 : 1442297 : maybe_materialize_called_clones (node);
778 : 7324006 : for (e = node->callees; e; e = next)
779 : : {
780 : 5881709 : if (!e->inline_failed)
781 : 555433 : has_inline = true;
782 : 5881709 : next = e->next_callee;
783 : 5881709 : cgraph_edge::redirect_call_stmt_to_callee (e);
784 : : }
785 : 1442297 : node->remove_all_references ();
786 : :
787 : 1442297 : timevar_push (TV_INTEGRATION);
788 : 1442297 : if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
789 : : {
790 : 797778 : todo = optimize_inline_calls (current_function_decl);
791 : : }
792 : 1442297 : timevar_pop (TV_INTEGRATION);
793 : :
794 : 1442297 : cfun->always_inline_functions_inlined = true;
795 : 1442297 : cfun->after_inlining = true;
796 : 1442297 : todo |= execute_fixup_cfg ();
797 : :
798 : 1442297 : if (!(todo & TODO_update_ssa_any))
799 : : /* Redirecting edges might lead to a need for vops to be recomputed. */
800 : 1073523 : todo |= TODO_update_ssa_only_virtuals;
801 : :
802 : : return todo;
803 : : }
|