Branch data Line data Source code
1 : : /* Function splitting pass
2 : : Copyright (C) 2010-2024 Free Software Foundation, Inc.
3 : : Contributed by Jan Hubicka <jh@suse.cz>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : /* The purpose of this pass is to split function bodies to improve
22 : : inlining. I.e. for function of the form:
23 : :
24 : : func (...)
25 : : {
26 : : if (cheap_test)
27 : : something_small
28 : : else
29 : : something_big
30 : : }
31 : :
32 : : Produce:
33 : :
34 : : func.part (...)
35 : : {
36 : : something_big
37 : : }
38 : :
39 : : func (...)
40 : : {
41 : : if (cheap_test)
42 : : something_small
43 : : else
44 : : func.part (...);
45 : : }
46 : :
47 : : When func becomes inlinable and when cheap_test is often true, inlining func,
48 : : but not fund.part leads to performance improvement similar as inlining
49 : : original func while the code size growth is smaller.
50 : :
51 : : The pass is organized in three stages:
52 : : 1) Collect local info about basic block into BB_INFO structure and
53 : : compute function body estimated size and time.
54 : : 2) Via DFS walk find all possible basic blocks where we can split
55 : : and chose best one.
56 : : 3) If split point is found, split at the specified BB by creating a clone
57 : : and updating function to call it.
58 : :
59 : : The decisions what functions to split are in execute_split_functions
60 : : and consider_split.
61 : :
62 : : There are several possible future improvements for this pass including:
63 : :
64 : : 1) Splitting to break up large functions
65 : : 2) Splitting to reduce stack frame usage
66 : : 3) Allow split part of function to use values computed in the header part.
67 : : The values needs to be passed to split function, perhaps via same
68 : : interface as for nested functions or as argument.
69 : : 4) Support for simple rematerialization. I.e. when split part use
70 : : value computed in header from function parameter in very cheap way, we
71 : : can just recompute it.
72 : : 5) Support splitting of nested functions.
73 : : 6) Support non-SSA arguments.
74 : : 7) There is nothing preventing us from producing multiple parts of single function
75 : : when needed or splitting also the parts. */
76 : :
77 : : #include "config.h"
78 : : #include "system.h"
79 : : #include "coretypes.h"
80 : : #include "backend.h"
81 : : #include "rtl.h"
82 : : #include "tree.h"
83 : : #include "gimple.h"
84 : : #include "cfghooks.h"
85 : : #include "alloc-pool.h"
86 : : #include "tree-pass.h"
87 : : #include "ssa.h"
88 : : #include "cgraph.h"
89 : : #include "diagnostic.h"
90 : : #include "fold-const.h"
91 : : #include "cfganal.h"
92 : : #include "calls.h"
93 : : #include "gimplify.h"
94 : : #include "gimple-iterator.h"
95 : : #include "gimplify-me.h"
96 : : #include "gimple-walk.h"
97 : : #include "symbol-summary.h"
98 : : #include "sreal.h"
99 : : #include "ipa-cp.h"
100 : : #include "ipa-prop.h"
101 : : #include "tree-cfg.h"
102 : : #include "tree-into-ssa.h"
103 : : #include "tree-dfa.h"
104 : : #include "tree-inline.h"
105 : : #include "gimple-pretty-print.h"
106 : : #include "ipa-fnsummary.h"
107 : : #include "cfgloop.h"
108 : : #include "attribs.h"
109 : : #include "ipa-strub.h"
110 : :
111 : : /* Per basic block info. */
112 : :
113 : 7559171 : class split_bb_info
114 : : {
115 : : public:
116 : : unsigned int size;
117 : : sreal time;
118 : : };
119 : :
120 : : static vec<split_bb_info> bb_info_vec;
121 : :
122 : : /* Description of split point. */
123 : :
124 : 1179748 : class split_point
125 : : {
126 : : public:
127 : : /* Size of the partitions. */
128 : : sreal header_time, split_time;
129 : : unsigned int header_size, split_size;
130 : :
131 : : /* SSA names that need to be passed into spit function. */
132 : : bitmap ssa_names_to_pass;
133 : :
134 : : /* Basic block where we split (that will become entry point of new function. */
135 : : basic_block entry_bb;
136 : :
137 : : /* Count for entering the split part.
138 : : This is not count of the entry_bb because it may be in loop. */
139 : : profile_count count;
140 : :
141 : : /* Basic blocks we are splitting away. */
142 : : bitmap split_bbs;
143 : :
144 : : /* True when return value is computed on split part and thus it needs
145 : : to be returned. */
146 : : bool split_part_set_retval;
147 : : };
148 : :
149 : : /* Best split point found. */
150 : :
151 : : class split_point best_split_point;
152 : :
153 : : /* Set of basic blocks that are not allowed to dominate a split point. */
154 : :
155 : : static bitmap forbidden_dominators;
156 : :
157 : : static tree find_retval (basic_block return_bb);
158 : :
159 : : /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
160 : : variable, check it if it is present in bitmap passed via DATA. */
161 : :
162 : : static bool
163 : 21605 : test_nonssa_use (gimple *, tree t, tree, void *data)
164 : : {
165 : 21605 : t = get_base_address (t);
166 : :
167 : 21605 : if (!t || is_gimple_reg (t))
168 : 0 : return false;
169 : :
170 : 21605 : if (TREE_CODE (t) == PARM_DECL
171 : 21526 : || (VAR_P (t)
172 : 3070 : && auto_var_in_fn_p (t, current_function_decl))
173 : 19266 : || TREE_CODE (t) == RESULT_DECL
174 : : /* Normal labels are part of CFG and will be handled gratefully.
175 : : Forced labels however can be used directly by statements and
176 : : need to stay in one partition along with their uses. */
177 : 40327 : || (TREE_CODE (t) == LABEL_DECL
178 : 6076 : && FORCED_LABEL (t)))
179 : 2886 : return bitmap_bit_p ((bitmap)data, DECL_UID (t));
180 : :
181 : : /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
182 : : to pretend that the value pointed to is actual result decl. */
183 : 8033 : if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
184 : 10686 : && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
185 : 10682 : && SSA_NAME_VAR (TREE_OPERAND (t, 0))
186 : 8001 : && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
187 : 18719 : && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
188 : 0 : return
189 : 0 : bitmap_bit_p ((bitmap)data,
190 : 0 : DECL_UID (DECL_RESULT (current_function_decl)));
191 : :
192 : : return false;
193 : : }
194 : :
195 : : /* Dump split point CURRENT. */
196 : :
197 : : static void
198 : 13 : dump_split_point (FILE * file, class split_point *current)
199 : : {
200 : 13 : fprintf (file,
201 : : "Split point at BB %i\n"
202 : : " header time: %f header size: %i\n"
203 : : " split time: %f split size: %i\n bbs: ",
204 : 13 : current->entry_bb->index, current->header_time.to_double (),
205 : : current->header_size, current->split_time.to_double (),
206 : : current->split_size);
207 : 13 : dump_bitmap (file, current->split_bbs);
208 : 13 : fprintf (file, " SSA names to pass: ");
209 : 13 : dump_bitmap (file, current->ssa_names_to_pass);
210 : 13 : }
211 : :
212 : : /* Look for all BBs in header that might lead to the split part and verify
213 : : that they are not defining any non-SSA var used by the split part.
214 : : Parameters are the same as for consider_split. */
215 : :
216 : : static bool
217 : 11267 : verify_non_ssa_vars (class split_point *current, bitmap non_ssa_vars,
218 : : basic_block return_bb)
219 : : {
220 : 11267 : bitmap seen = BITMAP_ALLOC (NULL);
221 : 11267 : vec<basic_block> worklist = vNULL;
222 : 11267 : edge e;
223 : 11267 : edge_iterator ei;
224 : 11267 : bool ok = true;
225 : 11267 : basic_block bb;
226 : :
227 : 23922 : FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
228 : 12655 : if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
229 : 12655 : && !bitmap_bit_p (current->split_bbs, e->src->index))
230 : : {
231 : 12060 : worklist.safe_push (e->src);
232 : 12060 : bitmap_set_bit (seen, e->src->index);
233 : : }
234 : :
235 : 24919 : while (!worklist.is_empty ())
236 : : {
237 : 15752 : bb = worklist.pop ();
238 : 31892 : FOR_EACH_EDGE (e, ei, bb->preds)
239 : 16140 : if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
240 : 16140 : && bitmap_set_bit (seen, e->src->index))
241 : : {
242 : 4950 : gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
243 : : e->src->index));
244 : 4950 : worklist.safe_push (e->src);
245 : : }
246 : 121388 : for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
247 : 89884 : gsi_next (&bsi))
248 : : {
249 : 91984 : gimple *stmt = gsi_stmt (bsi);
250 : 91984 : if (is_gimple_debug (stmt))
251 : 56808 : continue;
252 : 35176 : if (walk_stmt_load_store_addr_ops
253 : 35176 : (stmt, non_ssa_vars, test_nonssa_use, test_nonssa_use,
254 : : test_nonssa_use))
255 : : {
256 : 2100 : ok = false;
257 : 2100 : goto done;
258 : : }
259 : 89988 : if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
260 : 104 : if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
261 : : NULL_TREE, non_ssa_vars))
262 : : {
263 : 0 : ok = false;
264 : 0 : goto done;
265 : : }
266 : : }
267 : 14076 : for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
268 : 424 : gsi_next (&bsi))
269 : : {
270 : 424 : if (walk_stmt_load_store_addr_ops
271 : 424 : (gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
272 : : test_nonssa_use))
273 : : {
274 : 0 : ok = false;
275 : 0 : goto done;
276 : : }
277 : : }
278 : 42161 : FOR_EACH_EDGE (e, ei, bb->succs)
279 : : {
280 : 28509 : if (e->dest != return_bb)
281 : 26397 : continue;
282 : 2112 : for (gphi_iterator bsi = gsi_start_phis (return_bb);
283 : 4618 : !gsi_end_p (bsi);
284 : 2506 : gsi_next (&bsi))
285 : : {
286 : 2506 : gphi *stmt = bsi.phi ();
287 : 2506 : tree op = gimple_phi_arg_def (stmt, e->dest_idx);
288 : :
289 : 5012 : if (virtual_operand_p (gimple_phi_result (stmt)))
290 : 1991 : continue;
291 : 515 : if (TREE_CODE (op) != SSA_NAME
292 : 515 : && test_nonssa_use (stmt, op, op, non_ssa_vars))
293 : : {
294 : 0 : ok = false;
295 : 0 : goto done;
296 : : }
297 : : }
298 : : }
299 : : }
300 : :
301 : : /* Verify that the rest of function does not define any label
302 : : used by the split part. */
303 : 109102 : FOR_EACH_BB_FN (bb, cfun)
304 : 99938 : if (!bitmap_bit_p (current->split_bbs, bb->index)
305 : 99938 : && !bitmap_bit_p (seen, bb->index))
306 : : {
307 : 32736 : gimple_stmt_iterator bsi;
308 : 71441 : for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
309 : 105907 : if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
310 : : {
311 : 5972 : if (test_nonssa_use (label_stmt,
312 : : gimple_label_label (label_stmt),
313 : : NULL_TREE, non_ssa_vars))
314 : : {
315 : 3 : ok = false;
316 : 3 : goto done;
317 : : }
318 : : }
319 : : else
320 : : break;
321 : : }
322 : :
323 : 9164 : done:
324 : 11267 : BITMAP_FREE (seen);
325 : 11267 : worklist.release ();
326 : 11267 : return ok;
327 : : }
328 : :
329 : : /* If STMT is a call, check the callee against a list of forbidden
330 : : predicate functions. If a match is found, look for uses of the
331 : : call result in condition statements that compare against zero.
332 : : For each such use, find the block targeted by the condition
333 : : statement for the nonzero result, and set the bit for this block
334 : : in the forbidden dominators bitmap. The purpose of this is to avoid
335 : : selecting a split point where we are likely to lose the chance
336 : : to optimize away an unused function call. */
337 : :
338 : : static void
339 : 32516034 : check_forbidden_calls (gimple *stmt)
340 : : {
341 : 32516034 : imm_use_iterator use_iter;
342 : 32516034 : use_operand_p use_p;
343 : 32516034 : tree lhs;
344 : :
345 : : /* At the moment, __builtin_constant_p is the only forbidden
346 : : predicate function call (see PR49642). */
347 : 32516034 : if (!gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P))
348 : 32514924 : return;
349 : :
350 : 1110 : lhs = gimple_call_lhs (stmt);
351 : :
352 : 1110 : if (!lhs || TREE_CODE (lhs) != SSA_NAME)
353 : : return;
354 : :
355 : 3129 : FOR_EACH_IMM_USE_FAST (use_p, use_iter, lhs)
356 : : {
357 : 2019 : tree op1;
358 : 2019 : basic_block use_bb, forbidden_bb;
359 : 2019 : enum tree_code code;
360 : 2019 : edge true_edge, false_edge;
361 : 2019 : gcond *use_stmt;
362 : :
363 : 2019 : use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
364 : 2019 : if (!use_stmt)
365 : 29 : continue;
366 : :
367 : : /* Assuming canonical form for GIMPLE_COND here, with constant
368 : : in second position. */
369 : 1990 : op1 = gimple_cond_rhs (use_stmt);
370 : 1990 : code = gimple_cond_code (use_stmt);
371 : 1990 : use_bb = gimple_bb (use_stmt);
372 : :
373 : 1990 : extract_true_false_edges_from_block (use_bb, &true_edge, &false_edge);
374 : :
375 : : /* We're only interested in comparisons that distinguish
376 : : unambiguously from zero. */
377 : 1990 : if (!integer_zerop (op1) || code == LE_EXPR || code == GE_EXPR)
378 : 0 : continue;
379 : :
380 : 1990 : if (code == EQ_EXPR)
381 : 4 : forbidden_bb = false_edge->dest;
382 : : else
383 : 1986 : forbidden_bb = true_edge->dest;
384 : :
385 : 1990 : bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
386 : : }
387 : : }
388 : :
389 : : /* If BB is dominated by any block in the forbidden dominators set,
390 : : return TRUE; else FALSE. */
391 : :
392 : : static bool
393 : 551 : dominated_by_forbidden (basic_block bb)
394 : : {
395 : 551 : unsigned dom_bb;
396 : 551 : bitmap_iterator bi;
397 : :
398 : 1626 : EXECUTE_IF_SET_IN_BITMAP (forbidden_dominators, 1, dom_bb, bi)
399 : : {
400 : 1089 : if (dominated_by_p (CDI_DOMINATORS, bb,
401 : 1089 : BASIC_BLOCK_FOR_FN (cfun, dom_bb)))
402 : : return true;
403 : : }
404 : :
405 : : return false;
406 : : }
407 : :
408 : : /* For give split point CURRENT and return block RETURN_BB return 1
409 : : if ssa name VAL is set by split part and 0 otherwise. */
410 : : static bool
411 : 17392 : split_part_set_ssa_name_p (tree val, class split_point *current,
412 : : basic_block return_bb)
413 : : {
414 : 17392 : if (TREE_CODE (val) != SSA_NAME)
415 : : return false;
416 : :
417 : 17392 : return (!SSA_NAME_IS_DEFAULT_DEF (val)
418 : 17392 : && (bitmap_bit_p (current->split_bbs,
419 : 14057 : gimple_bb (SSA_NAME_DEF_STMT (val))->index)
420 : 14055 : || gimple_bb (SSA_NAME_DEF_STMT (val)) == return_bb));
421 : : }
422 : :
423 : : /* We found an split_point CURRENT. NON_SSA_VARS is bitmap of all non ssa
424 : : variables used and RETURN_BB is return basic block.
425 : : See if we can split function here. */
426 : :
427 : : static void
428 : 2181887 : consider_split (class split_point *current, bitmap non_ssa_vars,
429 : : basic_block return_bb)
430 : : {
431 : 2181887 : tree parm;
432 : 2181887 : unsigned int num_args = 0;
433 : 2181887 : unsigned int call_overhead;
434 : 2181887 : edge e;
435 : 2181887 : edge_iterator ei;
436 : 2181887 : gphi_iterator bsi;
437 : 2181887 : unsigned int i;
438 : 2181887 : tree retval;
439 : 2181887 : bool back_edge = false;
440 : :
441 : 2181887 : if (dump_file && (dump_flags & TDF_DETAILS))
442 : 6 : dump_split_point (dump_file, current);
443 : :
444 : 2181887 : current->count = profile_count::zero ();
445 : 4743686 : FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
446 : : {
447 : 2561799 : if (e->flags & EDGE_DFS_BACK)
448 : 132013 : back_edge = true;
449 : 2561799 : if (!bitmap_bit_p (current->split_bbs, e->src->index))
450 : 2429785 : current->count += e->count ();
451 : : }
452 : :
453 : : /* Do not split when we would end up calling function anyway.
454 : : Compares are three state, use !(...<...) to also give up when outcome
455 : : is unknown. */
456 : 4363774 : if (!(current->count
457 : 2181887 : < (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale
458 : 4363774 : (param_partial_inlining_entry_probability, 100))))
459 : : {
460 : : /* When profile is guessed, we cannot expect it to give us
461 : : realistic estimate on likeliness of function taking the
462 : : complex path. As a special case, when tail of the function is
463 : : a loop, enable splitting since inlining code skipping the loop
464 : : is likely noticeable win. */
465 : 1272877 : if (back_edge
466 : 75576 : && profile_status_for_fn (cfun) != PROFILE_READ
467 : 1348416 : && current->count
468 : 75539 : < ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
469 : : {
470 : 11955 : if (dump_file && (dump_flags & TDF_DETAILS))
471 : : {
472 : 0 : fprintf (dump_file,
473 : : " Split before loop, accepting despite low counts");
474 : 0 : current->count.dump (dump_file);
475 : 0 : fprintf (dump_file, " ");
476 : 0 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.dump (dump_file);
477 : : }
478 : : }
479 : : else
480 : : {
481 : 1260922 : if (dump_file && (dump_flags & TDF_DETAILS))
482 : 3 : fprintf (dump_file,
483 : : " Refused: incoming frequency is too large.\n");
484 : 2122946 : return;
485 : : }
486 : : }
487 : :
488 : 920965 : if (!current->header_size)
489 : : {
490 : 0 : if (dump_file && (dump_flags & TDF_DETAILS))
491 : 0 : fprintf (dump_file, " Refused: header empty\n");
492 : 0 : return;
493 : : }
494 : :
495 : : /* Verify that PHI args on entry are either virtual or all their operands
496 : : incoming from header are the same. */
497 : 1085015 : for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
498 : : {
499 : 177002 : gphi *stmt = bsi.phi ();
500 : 177002 : tree val = NULL;
501 : :
502 : 354004 : if (virtual_operand_p (gimple_phi_result (stmt)))
503 : 43385 : continue;
504 : 387958 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
505 : : {
506 : 267293 : edge e = gimple_phi_arg_edge (stmt, i);
507 : 267293 : if (!bitmap_bit_p (current->split_bbs, e->src->index))
508 : : {
509 : 146763 : tree edge_val = gimple_phi_arg_def (stmt, i);
510 : 146763 : if (val && edge_val != val)
511 : : {
512 : 12952 : if (dump_file && (dump_flags & TDF_DETAILS))
513 : 0 : fprintf (dump_file,
514 : : " Refused: entry BB has PHI with multiple variants\n");
515 : 12952 : return;
516 : : }
517 : : val = edge_val;
518 : : }
519 : : }
520 : : }
521 : :
522 : :
523 : : /* See what argument we will pass to the split function and compute
524 : : call overhead. */
525 : 908013 : call_overhead = eni_size_weights.call_cost;
526 : 2574388 : for (parm = DECL_ARGUMENTS (current_function_decl); parm;
527 : 1666375 : parm = DECL_CHAIN (parm))
528 : : {
529 : 1666375 : if (!is_gimple_reg (parm))
530 : : {
531 : 85011 : if (bitmap_bit_p (non_ssa_vars, DECL_UID (parm)))
532 : : {
533 : 0 : if (dump_file && (dump_flags & TDF_DETAILS))
534 : 0 : fprintf (dump_file,
535 : : " Refused: need to pass non-ssa param values\n");
536 : 0 : return;
537 : : }
538 : : }
539 : : else
540 : : {
541 : 1581364 : tree ddef = ssa_default_def (cfun, parm);
542 : 1581364 : if (ddef
543 : 3123353 : && bitmap_bit_p (current->ssa_names_to_pass,
544 : 1541989 : SSA_NAME_VERSION (ddef)))
545 : : {
546 : 576459 : if (!VOID_TYPE_P (TREE_TYPE (parm)))
547 : 576459 : call_overhead += estimate_move_cost (TREE_TYPE (parm), false);
548 : 576459 : num_args++;
549 : : }
550 : : }
551 : : }
552 : 908013 : if (!VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
553 : 623878 : call_overhead += estimate_move_cost (TREE_TYPE (TREE_TYPE
554 : : (current_function_decl)),
555 : : false);
556 : :
557 : 908013 : if (current->split_size <= call_overhead)
558 : : {
559 : 390608 : if (dump_file && (dump_flags & TDF_DETAILS))
560 : 0 : fprintf (dump_file,
561 : : " Refused: split size is smaller than call overhead\n");
562 : 390608 : return;
563 : : }
564 : : /* FIXME: The logic here is not very precise, because inliner does use
565 : : inline predicates to reduce function body size. We add 10 to anticipate
566 : : that. Next stage1 we should try to be more meaningful here. */
567 : 517405 : if (current->header_size + call_overhead
568 : 517405 : >= (unsigned int)(DECL_DECLARED_INLINE_P (current_function_decl)
569 : 109838 : ? param_max_inline_insns_single
570 : 517405 : : param_max_inline_insns_auto) + 10)
571 : : {
572 : 377054 : if (dump_file && (dump_flags & TDF_DETAILS))
573 : 0 : fprintf (dump_file,
574 : : " Refused: header size is too large for inline candidate\n");
575 : 377054 : return;
576 : : }
577 : :
578 : : /* Splitting functions brings the target out of comdat group; this will
579 : : lead to code duplication if the function is reused by other unit.
580 : : Limit this duplication. This is consistent with limit in tree-sra.cc
581 : : FIXME: with LTO we ought to be able to do better! */
582 : 241741 : if (DECL_ONE_ONLY (current_function_decl)
583 : 241734 : && current->split_size >= (unsigned int) param_max_inline_insns_auto + 10)
584 : : {
585 : 8633 : if (dump_file && (dump_flags & TDF_DETAILS))
586 : 0 : fprintf (dump_file,
587 : : " Refused: function is COMDAT and tail is too large\n");
588 : 8633 : return;
589 : : }
590 : : /* For comdat functions also reject very small tails; those will likely get
591 : : inlined back and we do not want to risk the duplication overhead.
592 : : FIXME: with LTO we ought to be able to do better! */
593 : 224475 : if (DECL_ONE_ONLY (current_function_decl)
594 : 131718 : && current->split_size
595 : 92750 : <= (unsigned int) param_early_inlining_insns / 2)
596 : : {
597 : 25664 : if (dump_file && (dump_flags & TDF_DETAILS))
598 : 0 : fprintf (dump_file,
599 : : " Refused: function is COMDAT and tail is too small\n");
600 : 25664 : return;
601 : : }
602 : :
603 : : /* FIXME: we currently can pass only SSA function parameters to the split
604 : : arguments. Once parm_adjustment infrastructure is supported by cloning,
605 : : we can pass more than that. */
606 : 106054 : if (num_args != bitmap_count_bits (current->ssa_names_to_pass))
607 : : {
608 : :
609 : 44996 : if (dump_file && (dump_flags & TDF_DETAILS))
610 : 0 : fprintf (dump_file,
611 : : " Refused: need to pass non-param values\n");
612 : 44996 : return;
613 : : }
614 : :
615 : : /* When there are non-ssa vars used in the split region, see if they
616 : : are used in the header region. If so, reject the split.
617 : : FIXME: we can use nested function support to access both. */
618 : 61058 : if (!bitmap_empty_p (non_ssa_vars)
619 : 61058 : && !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
620 : : {
621 : 2103 : if (dump_file && (dump_flags & TDF_DETAILS))
622 : 0 : fprintf (dump_file,
623 : : " Refused: split part has non-ssa uses\n");
624 : 2103 : return;
625 : : }
626 : :
627 : : /* If the split point is dominated by a forbidden block, reject
628 : : the split. */
629 : 58955 : if (!bitmap_empty_p (forbidden_dominators)
630 : 58955 : && dominated_by_forbidden (current->entry_bb))
631 : : {
632 : 14 : if (dump_file && (dump_flags & TDF_DETAILS))
633 : 0 : fprintf (dump_file,
634 : : " Refused: split point dominated by forbidden block\n");
635 : 14 : return;
636 : : }
637 : :
638 : : /* See if retval used by return bb is computed by header or split part.
639 : : When it is computed by split part, we need to produce return statement
640 : : in the split part and add code to header to pass it around.
641 : :
642 : : This is bit tricky to test:
643 : : 1) When there is no return_bb or no return value, we always pass
644 : : value around.
645 : : 2) Invariants are always computed by caller.
646 : : 3) For SSA we need to look if defining statement is in header or split part
647 : : 4) For non-SSA we need to look where the var is computed. */
648 : 58941 : retval = find_retval (return_bb);
649 : 58941 : if (!retval)
650 : : {
651 : : /* If there is a return_bb with no return value in function returning
652 : : value by reference, also make the split part return void, otherwise
653 : : we expansion would try to create a non-POD temporary, which is
654 : : invalid. */
655 : 35389 : if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
656 : 27500 : && DECL_RESULT (current_function_decl)
657 : 62889 : && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
658 : 0 : current->split_part_set_retval = false;
659 : : else
660 : 35389 : current->split_part_set_retval = true;
661 : : }
662 : 23552 : else if (is_gimple_min_invariant (retval))
663 : 2185 : current->split_part_set_retval = false;
664 : : /* Special case is value returned by reference we record as if it was non-ssa
665 : : set to result_decl. */
666 : 21367 : else if (TREE_CODE (retval) == SSA_NAME
667 : 17392 : && SSA_NAME_VAR (retval)
668 : 5778 : && TREE_CODE (SSA_NAME_VAR (retval)) == RESULT_DECL
669 : 21367 : && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
670 : 0 : current->split_part_set_retval
671 : 0 : = bitmap_bit_p (non_ssa_vars, DECL_UID (SSA_NAME_VAR (retval)));
672 : 21367 : else if (TREE_CODE (retval) == SSA_NAME)
673 : 17392 : current->split_part_set_retval
674 : 17392 : = split_part_set_ssa_name_p (retval, current, return_bb);
675 : 3975 : else if (TREE_CODE (retval) == PARM_DECL)
676 : 0 : current->split_part_set_retval = false;
677 : 3975 : else if (VAR_P (retval)
678 : 437 : || TREE_CODE (retval) == RESULT_DECL)
679 : 3975 : current->split_part_set_retval
680 : 3975 : = bitmap_bit_p (non_ssa_vars, DECL_UID (retval));
681 : : else
682 : 0 : current->split_part_set_retval = true;
683 : :
684 : : /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
685 : : for the return value. If there are other PHIs, give up. */
686 : 58941 : if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
687 : : {
688 : 51052 : gphi_iterator psi;
689 : :
690 : 105731 : for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
691 : 54679 : if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
692 : 54679 : && !(retval
693 : 12634 : && current->split_part_set_retval
694 : 12634 : && TREE_CODE (retval) == SSA_NAME
695 : 12634 : && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
696 : 12634 : && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
697 : : {
698 : 0 : if (dump_file && (dump_flags & TDF_DETAILS))
699 : 0 : fprintf (dump_file,
700 : : " Refused: return bb has extra PHIs\n");
701 : 0 : return;
702 : : }
703 : : }
704 : :
705 : 58941 : if (dump_file && (dump_flags & TDF_DETAILS))
706 : 3 : fprintf (dump_file, " Accepted!\n");
707 : :
708 : : /* At the moment chose split point with lowest count and that leaves
709 : : out smallest size of header.
710 : : In future we might re-consider this heuristics. */
711 : 58941 : if (!best_split_point.split_bbs
712 : 15074 : || best_split_point.count
713 : 15074 : > current->count
714 : 122562 : || (best_split_point.count == current->count
715 : 4680 : && best_split_point.split_size < current->split_size))
716 : :
717 : : {
718 : 49192 : if (dump_file && (dump_flags & TDF_DETAILS))
719 : 2 : fprintf (dump_file, " New best split point!\n");
720 : 49192 : if (best_split_point.ssa_names_to_pass)
721 : : {
722 : 5325 : BITMAP_FREE (best_split_point.ssa_names_to_pass);
723 : 5325 : BITMAP_FREE (best_split_point.split_bbs);
724 : : }
725 : 49192 : best_split_point = *current;
726 : 49192 : best_split_point.ssa_names_to_pass = BITMAP_ALLOC (NULL);
727 : 49192 : bitmap_copy (best_split_point.ssa_names_to_pass,
728 : 49192 : current->ssa_names_to_pass);
729 : 49192 : best_split_point.split_bbs = BITMAP_ALLOC (NULL);
730 : 49192 : bitmap_copy (best_split_point.split_bbs, current->split_bbs);
731 : : }
732 : : }
733 : :
734 : : /* Return basic block containing RETURN statement. We allow basic blocks
735 : : of the form:
736 : : <retval> = tmp_var;
737 : : return <retval>
738 : : but return_bb cannot be more complex than this (except for
739 : : -fsanitize=thread we allow TSAN_FUNC_EXIT () internal call in there).
740 : : If nothing is found, return the exit block.
741 : :
742 : : When there are multiple RETURN statement, chose one with return value,
743 : : since that one is more likely shared by multiple code paths.
744 : :
745 : : Return BB is special, because for function splitting it is the only
746 : : basic block that is duplicated in between header and split part of the
747 : : function.
748 : :
749 : : TODO: We might support multiple return blocks. */
750 : :
751 : : static basic_block
752 : 1179764 : find_return_bb (void)
753 : : {
754 : 1179764 : edge e;
755 : 1179764 : basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
756 : 1179764 : gimple_stmt_iterator bsi;
757 : 1179764 : bool found_return = false;
758 : 1179764 : tree retval = NULL_TREE;
759 : :
760 : 2359528 : if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
761 : : return return_bb;
762 : :
763 : 1179558 : e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
764 : 8961178 : for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
765 : : {
766 : 4108213 : gimple *stmt = gsi_stmt (bsi);
767 : 4108213 : if (gimple_code (stmt) == GIMPLE_LABEL
768 : 4098678 : || is_gimple_debug (stmt)
769 : 6374344 : || gimple_clobber_p (stmt))
770 : : ;
771 : 1997342 : else if (gimple_code (stmt) == GIMPLE_ASSIGN
772 : 562277 : && found_return
773 : 562277 : && gimple_assign_single_p (stmt)
774 : 461492 : && (auto_var_in_fn_p (gimple_assign_rhs1 (stmt),
775 : : current_function_decl)
776 : 448174 : || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
777 : 2104070 : && retval == gimple_assign_lhs (stmt))
778 : : ;
779 : 1986841 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
780 : : {
781 : 1179558 : found_return = true;
782 : 1179558 : retval = gimple_return_retval (return_stmt);
783 : : }
784 : : /* For -fsanitize=thread, allow also TSAN_FUNC_EXIT () in the return
785 : : bb. */
786 : 807283 : else if ((flag_sanitize & SANITIZE_THREAD)
787 : 807283 : && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
788 : : ;
789 : : else
790 : : break;
791 : : }
792 : 1179558 : if (gsi_end_p (bsi) && found_return)
793 : 372376 : return_bb = e->src;
794 : :
795 : : return return_bb;
796 : : }
797 : :
798 : : /* Given return basic block RETURN_BB, see where return value is really
799 : : stored. */
800 : : static tree
801 : 91701 : find_retval (basic_block return_bb)
802 : : {
803 : 91701 : gimple_stmt_iterator bsi;
804 : 273214 : for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
805 : 173624 : if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
806 : 83768 : return gimple_return_retval (return_stmt);
807 : 89856 : else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
808 : 89856 : && !gimple_clobber_p (gsi_stmt (bsi)))
809 : 44 : return gimple_assign_rhs1 (gsi_stmt (bsi));
810 : : return NULL;
811 : : }
812 : :
813 : : /* Callback for walk_stmt_load_store_addr_ops. If T is non-SSA automatic
814 : : variable, mark it as used in bitmap passed via DATA.
815 : : Return true when access to T prevents splitting the function. */
816 : :
817 : : static bool
818 : 6371362 : mark_nonssa_use (gimple *, tree t, tree, void *data)
819 : : {
820 : 6371362 : t = get_base_address (t);
821 : :
822 : 6371362 : if (!t || is_gimple_reg (t))
823 : 0 : return false;
824 : :
825 : : /* At present we can't pass non-SSA arguments to split function.
826 : : FIXME: this can be relaxed by passing references to arguments. */
827 : 6371362 : if (TREE_CODE (t) == PARM_DECL)
828 : : {
829 : 193789 : if (dump_file && (dump_flags & TDF_DETAILS))
830 : 0 : fprintf (dump_file,
831 : : "Cannot split: use of non-ssa function parameter.\n");
832 : 193789 : return true;
833 : : }
834 : :
835 : 2507707 : if ((VAR_P (t) && auto_var_in_fn_p (t, current_function_decl))
836 : 4284529 : || TREE_CODE (t) == RESULT_DECL
837 : 10436014 : || (TREE_CODE (t) == LABEL_DECL && FORCED_LABEL (t)))
838 : 1919168 : bitmap_set_bit ((bitmap)data, DECL_UID (t));
839 : :
840 : : /* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
841 : : to pretend that the value pointed to is actual result decl. */
842 : 3236711 : if ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
843 : 2940862 : && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
844 : 2940293 : && SSA_NAME_VAR (TREE_OPERAND (t, 0))
845 : 2382731 : && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (t, 0))) == RESULT_DECL
846 : 6227443 : && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
847 : 49870 : return
848 : 49870 : bitmap_bit_p ((bitmap)data,
849 : 49870 : DECL_UID (DECL_RESULT (current_function_decl)));
850 : :
851 : : return false;
852 : : }
853 : :
854 : : /* Compute local properties of basic block BB we collect when looking for
855 : : split points. We look for ssa defs and store them in SET_SSA_NAMES,
856 : : for ssa uses and store them in USED_SSA_NAMES and for any non-SSA automatic
857 : : vars stored in NON_SSA_VARS.
858 : :
859 : : When BB has edge to RETURN_BB, collect uses in RETURN_BB too.
860 : :
861 : : Return false when BB contains something that prevents it from being put into
862 : : split function. */
863 : :
864 : : static bool
865 : 3647439 : visit_bb (basic_block bb, basic_block return_bb,
866 : : bitmap set_ssa_names, bitmap used_ssa_names,
867 : : bitmap non_ssa_vars)
868 : : {
869 : 3647439 : edge e;
870 : 3647439 : edge_iterator ei;
871 : 3647439 : bool can_split = true;
872 : :
873 : 38594695 : for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
874 : 31299817 : gsi_next (&bsi))
875 : : {
876 : 31299817 : gimple *stmt = gsi_stmt (bsi);
877 : 31299817 : tree op;
878 : 31299817 : ssa_op_iter iter;
879 : :
880 : 31299817 : if (is_gimple_debug (stmt))
881 : 20150386 : continue;
882 : :
883 : 12085593 : if (gimple_clobber_p (stmt))
884 : 936162 : continue;
885 : :
886 : : /* FIXME: We can split regions containing EH. We cannot however
887 : : split RESX, EH_DISPATCH and EH_POINTER referring to same region
888 : : into different partitions. This would require tracking of
889 : : EH regions and checking in consider_split_point if they
890 : : are not used elsewhere. */
891 : 11149431 : if (gimple_code (stmt) == GIMPLE_RESX)
892 : : {
893 : 95334 : if (dump_file && (dump_flags & TDF_DETAILS))
894 : 0 : fprintf (dump_file, "Cannot split: resx.\n");
895 : : can_split = false;
896 : : }
897 : 11149431 : if (gimple_code (stmt) == GIMPLE_EH_DISPATCH)
898 : : {
899 : 5530 : if (dump_file && (dump_flags & TDF_DETAILS))
900 : 0 : fprintf (dump_file, "Cannot split: eh dispatch.\n");
901 : : can_split = false;
902 : : }
903 : :
904 : : /* Check calls that would prevent splitting. */
905 : 11149431 : if (gimple_code (stmt) == GIMPLE_CALL)
906 : : {
907 : 1926870 : if (tree decl = gimple_call_fndecl (stmt))
908 : : {
909 : : /* Check builtins that would prevent splitting. */
910 : 1868632 : if (fndecl_built_in_p (decl, BUILT_IN_NORMAL))
911 : 491664 : switch (DECL_FUNCTION_CODE (decl))
912 : : {
913 : : /* FIXME: once we will allow passing non-parm values to
914 : : split part, we need to be sure to handle correct
915 : : builtin_stack_save and builtin_stack_restore. At the
916 : : moment we are safe; there is no way to store
917 : : builtin_stack_save result in non-SSA variable since all
918 : : calls to those are compiler generated. */
919 : 10 : case BUILT_IN_APPLY:
920 : 10 : case BUILT_IN_APPLY_ARGS:
921 : 10 : case BUILT_IN_VA_START:
922 : 10 : if (dump_file && (dump_flags & TDF_DETAILS))
923 : 0 : fprintf (dump_file,
924 : : "Cannot split: builtin_apply and va_start.\n");
925 : : can_split = false;
926 : : break;
927 : 6397 : case BUILT_IN_EH_POINTER:
928 : 6397 : if (dump_file && (dump_flags & TDF_DETAILS))
929 : 0 : fprintf (dump_file,
930 : : "Cannot split: builtin_eh_pointer.\n");
931 : : can_split = false;
932 : : break;
933 : : default:
934 : : break;
935 : : }
936 : :
937 : : /* Calls to functions (which have the warning or error
938 : : attribute on them) should not be split off into another
939 : : function. */
940 : 1868632 : if (lookup_attribute ("warning", DECL_ATTRIBUTES (decl))
941 : 1868632 : || lookup_attribute ("error", DECL_ATTRIBUTES (decl)))
942 : : {
943 : 20 : if (dump_file && (dump_flags & TDF_DETAILS))
944 : 0 : fprintf (dump_file,
945 : : "Cannot split: warning or error attribute.\n");
946 : : can_split = false;
947 : : }
948 : : }
949 : : }
950 : :
951 : 16617591 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
952 : 5468160 : bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
953 : 22152512 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
954 : 11003081 : bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
955 : 11149431 : can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
956 : : mark_nonssa_use,
957 : : mark_nonssa_use,
958 : : mark_nonssa_use);
959 : : }
960 : 4469990 : for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
961 : 822551 : gsi_next (&bsi))
962 : : {
963 : 822551 : gphi *stmt = bsi.phi ();
964 : 822551 : unsigned int i;
965 : :
966 : 1645102 : if (virtual_operand_p (gimple_phi_result (stmt)))
967 : 348336 : continue;
968 : 948430 : bitmap_set_bit (set_ssa_names,
969 : 474215 : SSA_NAME_VERSION (gimple_phi_result (stmt)));
970 : 1559945 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
971 : : {
972 : 1085730 : tree op = gimple_phi_arg_def (stmt, i);
973 : 1085730 : if (TREE_CODE (op) == SSA_NAME)
974 : 753086 : bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
975 : : }
976 : 474215 : can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
977 : : mark_nonssa_use,
978 : : mark_nonssa_use,
979 : : mark_nonssa_use);
980 : : }
981 : : /* Record also uses coming from PHI operand in return BB. */
982 : 8514172 : FOR_EACH_EDGE (e, ei, bb->succs)
983 : 4866733 : if (e->dest == return_bb)
984 : : {
985 : 1438652 : for (gphi_iterator bsi = gsi_start_phis (return_bb);
986 : 2086268 : !gsi_end_p (bsi);
987 : 647616 : gsi_next (&bsi))
988 : : {
989 : 647616 : gphi *stmt = bsi.phi ();
990 : 647616 : tree op = gimple_phi_arg_def (stmt, e->dest_idx);
991 : :
992 : 1295232 : if (virtual_operand_p (gimple_phi_result (stmt)))
993 : 386381 : continue;
994 : 261235 : if (TREE_CODE (op) == SSA_NAME)
995 : 128366 : bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
996 : : else
997 : 132869 : can_split &= !mark_nonssa_use (stmt, op, op, non_ssa_vars);
998 : : }
999 : : }
1000 : 3647439 : return can_split;
1001 : : }
1002 : :
1003 : : /* Stack entry for recursive DFS walk in find_split_point. */
1004 : :
1005 : 4827187 : class stack_entry
1006 : : {
1007 : : public:
1008 : : /* Basic block we are examining. */
1009 : : basic_block bb;
1010 : :
1011 : : /* SSA names set and used by the BB and all BBs reachable
1012 : : from it via DFS walk. */
1013 : : bitmap set_ssa_names, used_ssa_names;
1014 : : bitmap non_ssa_vars;
1015 : :
1016 : : /* All BBS visited from this BB via DFS walk. */
1017 : : bitmap bbs_visited;
1018 : :
1019 : : /* Last examined edge in DFS walk. Since we walk unoriented graph,
1020 : : the value is up to sum of incoming and outgoing edges of BB. */
1021 : : unsigned int edge_num;
1022 : :
1023 : : /* Stack entry index of earliest BB reachable from current BB
1024 : : or any BB visited later in DFS walk. */
1025 : : int earliest;
1026 : :
1027 : : /* Overall time and size of all BBs reached from this BB in DFS walk. */
1028 : : sreal overall_time;
1029 : : int overall_size;
1030 : :
1031 : : /* When false we cannot split on this BB. */
1032 : : bool can_split;
1033 : : };
1034 : :
1035 : :
1036 : : /* Find all articulations and call consider_split on them.
1037 : : OVERALL_TIME and OVERALL_SIZE is time and size of the function.
1038 : :
1039 : : We perform basic algorithm for finding an articulation in a graph
1040 : : created from CFG by considering it to be an unoriented graph.
1041 : :
1042 : : The articulation is discovered via DFS walk. We collect earliest
1043 : : basic block on stack that is reachable via backward edge. Articulation
1044 : : is any basic block such that there is no backward edge bypassing it.
1045 : : To reduce stack usage we maintain heap allocated stack in STACK vector.
1046 : : AUX pointer of BB is set to index it appears in the stack or -1 once
1047 : : it is visited and popped off the stack.
1048 : :
1049 : : The algorithm finds articulation after visiting the whole component
1050 : : reachable by it. This makes it convenient to collect information about
1051 : : the component used by consider_split. */
1052 : :
1053 : : static void
1054 : 1179748 : find_split_points (basic_block return_bb, sreal overall_time, int overall_size)
1055 : : {
1056 : 1179748 : stack_entry first;
1057 : 1179748 : vec<stack_entry> stack = vNULL;
1058 : 1179748 : basic_block bb;
1059 : 1179748 : class split_point current;
1060 : :
1061 : 1179748 : current.header_time = overall_time;
1062 : 1179748 : current.header_size = overall_size;
1063 : 1179748 : current.split_time = 0;
1064 : 1179748 : current.split_size = 0;
1065 : 1179748 : current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1066 : :
1067 : 1179748 : first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1068 : 1179748 : first.edge_num = 0;
1069 : 1179748 : first.overall_time = 0;
1070 : 1179748 : first.overall_size = 0;
1071 : 1179748 : first.earliest = INT_MAX;
1072 : 1179748 : first.set_ssa_names = 0;
1073 : 1179748 : first.used_ssa_names = 0;
1074 : 1179748 : first.non_ssa_vars = 0;
1075 : 1179748 : first.bbs_visited = 0;
1076 : 1179748 : first.can_split = false;
1077 : 1179748 : stack.safe_push (first);
1078 : 1179748 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
1079 : :
1080 : 16523048 : while (!stack.is_empty ())
1081 : : {
1082 : 15343300 : stack_entry *entry = &stack.last ();
1083 : :
1084 : : /* We are walking an acyclic graph, so edge_num counts
1085 : : succ and pred edges together. However when considering
1086 : : articulation, we want to have processed everything reachable
1087 : : from articulation but nothing that reaches into it. */
1088 : 15343300 : if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
1089 : 15343300 : && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1090 : : {
1091 : 3647439 : int pos = stack.length ();
1092 : 3647439 : entry->can_split &= visit_bb (entry->bb, return_bb,
1093 : : entry->set_ssa_names,
1094 : : entry->used_ssa_names,
1095 : : entry->non_ssa_vars);
1096 : 2409401 : if (pos <= entry->earliest && !entry->can_split
1097 : 3874953 : && dump_file && (dump_flags & TDF_DETAILS))
1098 : 0 : fprintf (dump_file,
1099 : : "found articulation at bb %i but cannot split\n",
1100 : 0 : entry->bb->index);
1101 : 3647439 : if (pos <= entry->earliest && entry->can_split)
1102 : : {
1103 : 2181887 : if (dump_file && (dump_flags & TDF_DETAILS))
1104 : 6 : fprintf (dump_file, "found articulation at bb %i\n",
1105 : 6 : entry->bb->index);
1106 : 2181887 : current.entry_bb = entry->bb;
1107 : 2181887 : current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
1108 : 2181887 : bitmap_and_compl (current.ssa_names_to_pass,
1109 : 2181887 : entry->used_ssa_names, entry->set_ssa_names);
1110 : 2181887 : current.header_time = overall_time - entry->overall_time;
1111 : 2181887 : current.header_size = overall_size - entry->overall_size;
1112 : 2181887 : current.split_time = entry->overall_time;
1113 : 2181887 : current.split_size = entry->overall_size;
1114 : 2181887 : current.split_bbs = entry->bbs_visited;
1115 : 2181887 : consider_split (¤t, entry->non_ssa_vars, return_bb);
1116 : 2181887 : BITMAP_FREE (current.ssa_names_to_pass);
1117 : : }
1118 : : }
1119 : : /* Do actual DFS walk. */
1120 : 30686600 : if (entry->edge_num
1121 : 15343300 : < (EDGE_COUNT (entry->bb->succs)
1122 : 28327104 : + EDGE_COUNT (entry->bb->preds)))
1123 : : {
1124 : 10516113 : edge e;
1125 : 10516113 : basic_block dest;
1126 : 10516113 : if (entry->edge_num < EDGE_COUNT (entry->bb->succs))
1127 : : {
1128 : 6046481 : e = EDGE_SUCC (entry->bb, entry->edge_num);
1129 : 6046481 : dest = e->dest;
1130 : : }
1131 : : else
1132 : : {
1133 : 4469632 : e = EDGE_PRED (entry->bb, entry->edge_num
1134 : : - EDGE_COUNT (entry->bb->succs));
1135 : 4469632 : dest = e->src;
1136 : : }
1137 : :
1138 : 10516113 : entry->edge_num++;
1139 : :
1140 : : /* New BB to visit, push it to the stack. */
1141 : 10516113 : if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1142 : 8939264 : && !dest->aux)
1143 : : {
1144 : 3647439 : stack_entry new_entry;
1145 : :
1146 : 3647439 : new_entry.bb = dest;
1147 : 3647439 : new_entry.edge_num = 0;
1148 : 3647439 : new_entry.overall_time
1149 : 3647439 : = bb_info_vec[dest->index].time;
1150 : 3647439 : new_entry.overall_size
1151 : 3647439 : = bb_info_vec[dest->index].size;
1152 : 3647439 : new_entry.earliest = INT_MAX;
1153 : 3647439 : new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
1154 : 3647439 : new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
1155 : 3647439 : new_entry.bbs_visited = BITMAP_ALLOC (NULL);
1156 : 3647439 : new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
1157 : 3647439 : new_entry.can_split = true;
1158 : 3647439 : bitmap_set_bit (new_entry.bbs_visited, dest->index);
1159 : 3647439 : stack.safe_push (new_entry);
1160 : 3647439 : dest->aux = (void *)(intptr_t)stack.length ();
1161 : 3647439 : }
1162 : : /* Back edge found, record the earliest point. */
1163 : 6868674 : else if ((intptr_t)dest->aux > 0
1164 : 3428109 : && (intptr_t)dest->aux < entry->earliest)
1165 : 2254434 : entry->earliest = (intptr_t)dest->aux;
1166 : : }
1167 : : /* We are done with examining the edges. Pop off the value from stack
1168 : : and merge stuff we accumulate during the walk. */
1169 : 4827187 : else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1170 : : {
1171 : 3647439 : stack_entry *prev = &stack[stack.length () - 2];
1172 : :
1173 : 3647439 : entry->bb->aux = (void *)(intptr_t)-1;
1174 : 3647439 : prev->can_split &= entry->can_split;
1175 : 3647439 : if (prev->set_ssa_names)
1176 : : {
1177 : 2605888 : bitmap_ior_into (prev->set_ssa_names, entry->set_ssa_names);
1178 : 2605888 : bitmap_ior_into (prev->used_ssa_names, entry->used_ssa_names);
1179 : 2605888 : bitmap_ior_into (prev->bbs_visited, entry->bbs_visited);
1180 : 2605888 : bitmap_ior_into (prev->non_ssa_vars, entry->non_ssa_vars);
1181 : : }
1182 : 3647439 : if (prev->earliest > entry->earliest)
1183 : 2204696 : prev->earliest = entry->earliest;
1184 : 3647439 : prev->overall_time += entry->overall_time;
1185 : 3647439 : prev->overall_size += entry->overall_size;
1186 : 3647439 : BITMAP_FREE (entry->set_ssa_names);
1187 : 3647439 : BITMAP_FREE (entry->used_ssa_names);
1188 : 3647439 : BITMAP_FREE (entry->bbs_visited);
1189 : 3647439 : BITMAP_FREE (entry->non_ssa_vars);
1190 : 3647439 : stack.pop ();
1191 : : }
1192 : : else
1193 : 1179748 : stack.pop ();
1194 : : }
1195 : 1179748 : ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
1196 : 5199552 : FOR_EACH_BB_FN (bb, cfun)
1197 : 4019804 : bb->aux = NULL;
1198 : 1179748 : stack.release ();
1199 : 1179748 : BITMAP_FREE (current.ssa_names_to_pass);
1200 : 1179748 : }
1201 : :
1202 : : /* Split function at SPLIT_POINT. */
1203 : :
1204 : : static void
1205 : 43867 : split_function (basic_block return_bb, class split_point *split_point,
1206 : : bool add_tsan_func_exit)
1207 : : {
1208 : 43867 : vec<tree> args_to_pass = vNULL;
1209 : 43867 : bitmap args_to_skip;
1210 : 43867 : tree parm;
1211 : 43867 : int num = 0;
1212 : 43867 : cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
1213 : 43867 : basic_block call_bb;
1214 : 43867 : gcall *call, *tsan_func_exit_call = NULL;
1215 : 43867 : edge e;
1216 : 43867 : edge_iterator ei;
1217 : 43867 : tree retval = NULL, real_retval = NULL;
1218 : 43867 : gimple *last_stmt = NULL;
1219 : 43867 : unsigned int i;
1220 : 43867 : tree arg, ddef;
1221 : :
1222 : 43867 : if (dump_file)
1223 : : {
1224 : 7 : fprintf (dump_file, "\n\nSplitting function at:\n");
1225 : 7 : dump_split_point (dump_file, split_point);
1226 : : }
1227 : :
1228 : 43867 : if (cur_node->can_change_signature)
1229 : 43145 : args_to_skip = BITMAP_ALLOC (NULL);
1230 : : else
1231 : : args_to_skip = NULL;
1232 : :
1233 : : /* Collect the parameters of new function and args_to_skip bitmap. */
1234 : 43867 : for (parm = DECL_ARGUMENTS (current_function_decl);
1235 : 138867 : parm; parm = DECL_CHAIN (parm), num++)
1236 : 95000 : if (args_to_skip
1237 : 95000 : && (!is_gimple_reg (parm)
1238 : 90864 : || (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
1239 : 88764 : || !bitmap_bit_p (split_point->ssa_names_to_pass,
1240 : 88764 : SSA_NAME_VERSION (ddef))))
1241 : 33046 : bitmap_set_bit (args_to_skip, num);
1242 : : else
1243 : : {
1244 : : /* This parm might not have been used up to now, but is going to be
1245 : : used, hence register it. */
1246 : 61954 : if (is_gimple_reg (parm))
1247 : 61939 : arg = get_or_create_ssa_default_def (cfun, parm);
1248 : : else
1249 : 15 : arg = parm;
1250 : :
1251 : 61954 : if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
1252 : 1660 : arg = fold_convert (DECL_ARG_TYPE (parm), arg);
1253 : 61954 : args_to_pass.safe_push (arg);
1254 : : }
1255 : :
1256 : : /* See if the split function will return. */
1257 : 43867 : bool split_part_return_p = false;
1258 : 228385 : FOR_EACH_EDGE (e, ei, return_bb->preds)
1259 : : {
1260 : 184518 : if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1261 : 124635 : split_part_return_p = true;
1262 : : }
1263 : :
1264 : : /* Add return block to what will become the split function.
1265 : : We do not return; no return block is needed. */
1266 : 43867 : if (!split_part_return_p)
1267 : : ;
1268 : : /* We have no return block, so nothing is needed. */
1269 : 32864 : else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1270 : : ;
1271 : : /* When we do not want to return value, we need to construct
1272 : : new return block with empty return statement.
1273 : : FIXME: Once we are able to change return type, we should change function
1274 : : to return void instead of just outputting function with undefined return
1275 : : value. For structures this affects quality of codegen. */
1276 : 32760 : else if ((retval = find_retval (return_bb))
1277 : 32760 : && !split_point->split_part_set_retval)
1278 : : {
1279 : 3563 : bool redirected = true;
1280 : 3563 : basic_block new_return_bb = create_basic_block (NULL, 0, return_bb);
1281 : 3563 : gimple_stmt_iterator gsi = gsi_start_bb (new_return_bb);
1282 : 3563 : gsi_insert_after (&gsi, gimple_build_return (NULL), GSI_NEW_STMT);
1283 : 3563 : new_return_bb->count = profile_count::zero ();
1284 : 14592 : while (redirected)
1285 : : {
1286 : 7466 : redirected = false;
1287 : 19470 : FOR_EACH_EDGE (e, ei, return_bb->preds)
1288 : 15907 : if (bitmap_bit_p (split_point->split_bbs, e->src->index))
1289 : : {
1290 : 3903 : new_return_bb->count += e->count ();
1291 : 3903 : redirect_edge_and_branch (e, new_return_bb);
1292 : 3903 : redirected = true;
1293 : 3903 : break;
1294 : : }
1295 : : }
1296 : 3563 : e = make_single_succ_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1297 : 3563 : add_bb_to_loop (new_return_bb, current_loops->tree_root);
1298 : 3563 : bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
1299 : : }
1300 : : /* When we pass around the value, use existing return block. */
1301 : : else
1302 : 29197 : bitmap_set_bit (split_point->split_bbs, return_bb->index);
1303 : :
1304 : : /* If RETURN_BB has virtual operand PHIs, they must be removed and the
1305 : : virtual operand marked for renaming as we change the CFG in a way that
1306 : : tree-inline is not able to compensate for.
1307 : :
1308 : : Note this can happen whether or not we have a return value. If we have
1309 : : a return value, then RETURN_BB may have PHIs for real operands too. */
1310 : 43867 : if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1311 : : {
1312 : 38152 : bool phi_p = false;
1313 : 38152 : for (gphi_iterator gsi = gsi_start_phis (return_bb);
1314 : 79282 : !gsi_end_p (gsi);)
1315 : : {
1316 : 41130 : gphi *stmt = gsi.phi ();
1317 : 82260 : if (!virtual_operand_p (gimple_phi_result (stmt)))
1318 : : {
1319 : 9842 : gsi_next (&gsi);
1320 : 9842 : continue;
1321 : : }
1322 : 31288 : mark_virtual_phi_result_for_renaming (stmt);
1323 : 31288 : remove_phi_node (&gsi, true);
1324 : 31288 : phi_p = true;
1325 : : }
1326 : : /* In reality we have to rename the reaching definition of the
1327 : : virtual operand at return_bb as we will eventually release it
1328 : : when we remove the code region we outlined.
1329 : : So we have to rename all immediate virtual uses of that region
1330 : : if we didn't see a PHI definition yet. */
1331 : : /* ??? In real reality we want to set the reaching vdef of the
1332 : : entry of the SESE region as the vuse of the call and the reaching
1333 : : vdef of the exit of the SESE region as the vdef of the call. */
1334 : 38152 : if (!phi_p)
1335 : 13728 : for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1336 : 25756 : !gsi_end_p (gsi);
1337 : 18892 : gsi_next (&gsi))
1338 : : {
1339 : 19790 : gimple *stmt = gsi_stmt (gsi);
1340 : 26654 : if (gimple_vuse (stmt))
1341 : : {
1342 : 6864 : gimple_set_vuse (stmt, NULL_TREE);
1343 : 6864 : update_stmt (stmt);
1344 : : }
1345 : 26654 : if (gimple_vdef (stmt))
1346 : : break;
1347 : : }
1348 : : }
1349 : :
1350 : 43867 : ipa_param_adjustments *adjustments;
1351 : 87734 : bool skip_return = (!split_part_return_p
1352 : 43867 : || !split_point->split_part_set_retval);
1353 : : /* TODO: Perhaps get rid of args_to_skip entirely, after we make sure the
1354 : : debug info generation and discrepancy avoiding works well too. */
1355 : 43145 : if ((args_to_skip && !bitmap_empty_p (args_to_skip))
1356 : 67349 : || skip_return)
1357 : : {
1358 : 23208 : vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1359 : 23208 : unsigned j;
1360 : 23208 : for (parm = DECL_ARGUMENTS (current_function_decl), j = 0;
1361 : 78672 : parm; parm = DECL_CHAIN (parm), j++)
1362 : 55464 : if (!args_to_skip || !bitmap_bit_p (args_to_skip, j))
1363 : : {
1364 : 22418 : ipa_adjusted_param adj;
1365 : 22418 : memset (&adj, 0, sizeof (adj));
1366 : 22418 : adj.op = IPA_PARAM_OP_COPY;
1367 : 22418 : adj.base_index = j;
1368 : 22418 : adj.prev_clone_index = j;
1369 : 22418 : vec_safe_push (new_params, adj);
1370 : : }
1371 : 23208 : adjustments = new ipa_param_adjustments (new_params, j, skip_return);
1372 : : }
1373 : : else
1374 : : adjustments = NULL;
1375 : :
1376 : : /* Now create the actual clone. */
1377 : 43867 : cgraph_edge::rebuild_edges ();
1378 : 43867 : node = cur_node->create_version_clone_with_body
1379 : 43867 : (vNULL, NULL, adjustments,
1380 : : split_point->split_bbs, split_point->entry_bb, "part");
1381 : 43867 : delete adjustments;
1382 : 43867 : node->split_part = true;
1383 : :
1384 : 43867 : if (cur_node->same_comdat_group)
1385 : : {
1386 : : /* TODO: call is versionable if we make sure that all
1387 : : callers are inside of a comdat group. */
1388 : 1747 : cur_node->calls_comdat_local = true;
1389 : 1747 : node->add_to_same_comdat_group (cur_node);
1390 : : }
1391 : :
1392 : :
1393 : : /* Let's take a time profile for splitted function. */
1394 : 43867 : if (cur_node->tp_first_run)
1395 : 2 : node->tp_first_run = cur_node->tp_first_run + 1;
1396 : :
1397 : : /* For usual cloning it is enough to clear builtin only when signature
1398 : : changes. For partial inlining we however cannot expect the part
1399 : : of builtin implementation to have same semantic as the whole. */
1400 : 43867 : if (fndecl_built_in_p (node->decl))
1401 : 32 : set_decl_built_in_function (node->decl, NOT_BUILT_IN, 0);
1402 : :
1403 : : /* If return_bb contains any clobbers that refer to SSA_NAMEs
1404 : : set in the split part, remove them. Also reset debug stmts that
1405 : : refer to SSA_NAMEs set in the split part. */
1406 : 43867 : if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1407 : : {
1408 : 38152 : gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
1409 : 122287 : while (!gsi_end_p (gsi))
1410 : : {
1411 : 84135 : tree op;
1412 : 84135 : ssa_op_iter iter;
1413 : 84135 : gimple *stmt = gsi_stmt (gsi);
1414 : 84135 : bool remove = false;
1415 : 84135 : if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
1416 : 46397 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1417 : : {
1418 : 2395 : basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op));
1419 : 2395 : if (op != retval
1420 : 2395 : && bb
1421 : 128 : && bb != return_bb
1422 : 2514 : && bitmap_bit_p (split_point->split_bbs, bb->index))
1423 : : {
1424 : 0 : if (is_gimple_debug (stmt))
1425 : : {
1426 : 0 : gimple_debug_bind_reset_value (stmt);
1427 : 0 : update_stmt (stmt);
1428 : : }
1429 : : else
1430 : : remove = true;
1431 : : break;
1432 : : }
1433 : : }
1434 : 0 : if (remove)
1435 : 0 : gsi_remove (&gsi, true);
1436 : : else
1437 : 84135 : gsi_next (&gsi);
1438 : : }
1439 : : }
1440 : :
1441 : : /* If the original function is declared inline, there is no point in issuing
1442 : : a warning for the non-inlinable part. */
1443 : 43867 : DECL_NO_INLINE_WARNING_P (node->decl) = 1;
1444 : 43867 : cur_node->remove_callees ();
1445 : 43867 : cur_node->remove_all_references ();
1446 : 43867 : if (!split_part_return_p)
1447 : 11003 : TREE_THIS_VOLATILE (node->decl) = 1;
1448 : 43867 : if (dump_file)
1449 : 7 : dump_function_to_file (node->decl, dump_file, dump_flags);
1450 : :
1451 : : /* Create the basic block we place call into. It is the entry basic block
1452 : : split after last label. */
1453 : 43867 : call_bb = split_point->entry_bb;
1454 : 89318 : for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
1455 : 45427 : if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1456 : : {
1457 : 1584 : last_stmt = gsi_stmt (gsi);
1458 : 1584 : gsi_next (&gsi);
1459 : : }
1460 : : else
1461 : : break;
1462 : 43867 : call_bb->count = split_point->count;
1463 : 43867 : e = split_block (split_point->entry_bb, last_stmt);
1464 : 43867 : remove_edge (e);
1465 : :
1466 : : /* Produce the call statement. */
1467 : 43867 : gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
1468 : 105821 : FOR_EACH_VEC_ELT (args_to_pass, i, arg)
1469 : 61954 : if (!is_gimple_val (arg))
1470 : : {
1471 : 1667 : arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
1472 : : false, GSI_CONTINUE_LINKING);
1473 : 1667 : args_to_pass[i] = arg;
1474 : : }
1475 : 43867 : call = gimple_build_call_vec (node->decl, args_to_pass);
1476 : 43867 : gimple_set_block (call, DECL_INITIAL (current_function_decl));
1477 : 43867 : args_to_pass.release ();
1478 : :
1479 : : /* For optimized away parameters, add on the caller side
1480 : : before the call
1481 : : DEBUG D#X => parm_Y(D)
1482 : : stmts and associate D#X with parm in decl_debug_args_lookup
1483 : : vector to say for debug info that if parameter parm had been passed,
1484 : : it would have value parm_Y(D). */
1485 : 43867 : if (args_to_skip)
1486 : : {
1487 : 43145 : vec<tree, va_gc> **debug_args = NULL;
1488 : 43145 : unsigned i = 0, len = 0;
1489 : 43145 : if (MAY_HAVE_DEBUG_BIND_STMTS)
1490 : : {
1491 : 37609 : debug_args = decl_debug_args_lookup (node->decl);
1492 : 37609 : if (debug_args)
1493 : 15527 : len = vec_safe_length (*debug_args);
1494 : : }
1495 : 43145 : for (parm = DECL_ARGUMENTS (current_function_decl), num = 0;
1496 : 135776 : parm; parm = DECL_CHAIN (parm), num++)
1497 : 92631 : if (bitmap_bit_p (args_to_skip, num) && is_gimple_reg (parm))
1498 : : {
1499 : 31279 : tree ddecl;
1500 : 31279 : gimple *def_temp;
1501 : :
1502 : : /* This needs to be done even without
1503 : : MAY_HAVE_DEBUG_BIND_STMTS, otherwise if it didn't exist
1504 : : before, we'd end up with different SSA_NAME_VERSIONs
1505 : : between -g and -g0. */
1506 : 31279 : arg = get_or_create_ssa_default_def (cfun, parm);
1507 : 31279 : if (!MAY_HAVE_DEBUG_BIND_STMTS || debug_args == NULL)
1508 : 5722 : continue;
1509 : :
1510 : 71174 : while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1511 : 10030 : i += 2;
1512 : 25557 : if (i >= len)
1513 : 0 : continue;
1514 : 25557 : ddecl = (**debug_args)[i + 1];
1515 : 25557 : def_temp
1516 : 25557 : = gimple_build_debug_bind (ddecl, unshare_expr (arg), call);
1517 : 25557 : gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
1518 : : }
1519 : 43145 : BITMAP_FREE (args_to_skip);
1520 : : }
1521 : :
1522 : : /* We avoid address being taken on any variable used by split part,
1523 : : so return slot optimization is always possible. Moreover this is
1524 : : required to make DECL_BY_REFERENCE work. */
1525 : 43867 : if (aggregate_value_p (DECL_RESULT (current_function_decl),
1526 : 43867 : TREE_TYPE (current_function_decl))
1527 : 43867 : && (!is_gimple_reg_type (TREE_TYPE (DECL_RESULT (current_function_decl)))
1528 : 89 : || DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))))
1529 : 343 : gimple_call_set_return_slot_opt (call, true);
1530 : :
1531 : 43867 : if (add_tsan_func_exit)
1532 : 2 : tsan_func_exit_call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
1533 : :
1534 : : /* Update return value. This is bit tricky. When we do not return,
1535 : : do nothing. When we return we might need to update return_bb
1536 : : or produce a new return statement. */
1537 : 43867 : if (!split_part_return_p)
1538 : : {
1539 : 11003 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1540 : 11003 : if (tsan_func_exit_call)
1541 : 0 : gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1542 : : }
1543 : : else
1544 : : {
1545 : 65728 : e = make_single_succ_edge (call_bb, return_bb,
1546 : 32864 : return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
1547 : 32864 : ? 0 : EDGE_FALLTHRU);
1548 : :
1549 : : /* If there is return basic block, see what value we need to store
1550 : : return value into and put call just before it. */
1551 : 32864 : if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
1552 : : {
1553 : 32760 : real_retval = retval;
1554 : 32760 : if (real_retval && split_point->split_part_set_retval)
1555 : : {
1556 : 11762 : gphi_iterator psi;
1557 : :
1558 : : /* See if we need new SSA_NAME for the result.
1559 : : When DECL_BY_REFERENCE is true, retval is actually pointer to
1560 : : return value and it is constant in whole function. */
1561 : 11762 : if (TREE_CODE (retval) == SSA_NAME
1562 : 11762 : && !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1563 : : {
1564 : 9575 : retval = copy_ssa_name (retval, call);
1565 : :
1566 : : /* See if there is PHI defining return value. */
1567 : 9575 : for (psi = gsi_start_phis (return_bb);
1568 : 9575 : !gsi_end_p (psi); gsi_next (&psi))
1569 : 19150 : if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
1570 : : break;
1571 : :
1572 : : /* When there is PHI, just update its value. */
1573 : 9575 : if (TREE_CODE (retval) == SSA_NAME
1574 : 9575 : && !gsi_end_p (psi))
1575 : 9575 : add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
1576 : : /* Otherwise update the return BB itself.
1577 : : find_return_bb allows at most one assignment to return value,
1578 : : so update first statement. */
1579 : : else
1580 : : {
1581 : 0 : gimple_stmt_iterator bsi;
1582 : 0 : for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1583 : 0 : gsi_next (&bsi))
1584 : 0 : if (greturn *return_stmt
1585 : 0 : = dyn_cast <greturn *> (gsi_stmt (bsi)))
1586 : : {
1587 : 0 : gimple_return_set_retval (return_stmt, retval);
1588 : 0 : break;
1589 : : }
1590 : 0 : else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
1591 : 0 : && !gimple_clobber_p (gsi_stmt (bsi)))
1592 : : {
1593 : 0 : gimple_assign_set_rhs1 (gsi_stmt (bsi), retval);
1594 : 0 : break;
1595 : : }
1596 : 0 : update_stmt (gsi_stmt (bsi));
1597 : : /* Also adjust clobbers and debug stmts in return_bb. */
1598 : 0 : for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
1599 : 0 : gsi_next (&bsi))
1600 : : {
1601 : 0 : gimple *stmt = gsi_stmt (bsi);
1602 : 0 : if (gimple_clobber_p (stmt)
1603 : 0 : || is_gimple_debug (stmt))
1604 : : {
1605 : 0 : ssa_op_iter iter;
1606 : 0 : use_operand_p use_p;
1607 : 0 : bool update = false;
1608 : 0 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
1609 : : SSA_OP_USE)
1610 : 0 : if (USE_FROM_PTR (use_p) == real_retval)
1611 : : {
1612 : 0 : SET_USE (use_p, retval);
1613 : 0 : update = true;
1614 : : }
1615 : 0 : if (update)
1616 : 0 : update_stmt (stmt);
1617 : : }
1618 : : }
1619 : : }
1620 : : }
1621 : 11762 : if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1622 : : {
1623 : 0 : gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1624 : 0 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1625 : : }
1626 : : else
1627 : : {
1628 : 11762 : tree restype;
1629 : 11762 : restype = TREE_TYPE (DECL_RESULT (current_function_decl));
1630 : 11762 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1631 : 11762 : if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
1632 : : {
1633 : 0 : gimple *cpy;
1634 : 0 : tree tem = create_tmp_reg (restype);
1635 : 0 : tem = make_ssa_name (tem, call);
1636 : 0 : cpy = gimple_build_assign (retval, NOP_EXPR, tem);
1637 : 0 : gsi_insert_after (&gsi, cpy, GSI_NEW_STMT);
1638 : 0 : retval = tem;
1639 : : }
1640 : 11762 : gimple_call_set_lhs (call, retval);
1641 : 11762 : update_stmt (call);
1642 : : }
1643 : 11762 : }
1644 : : else
1645 : 20998 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1646 : 32760 : if (tsan_func_exit_call)
1647 : 2 : gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1648 : : }
1649 : : /* We don't use return block (there is either no return in function or
1650 : : multiple of them). So create new basic block with return statement.
1651 : : */
1652 : : else
1653 : : {
1654 : 104 : greturn *ret;
1655 : 104 : if (split_point->split_part_set_retval
1656 : 104 : && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1657 : : {
1658 : 71 : retval = DECL_RESULT (current_function_decl);
1659 : :
1660 : : /* We use temporary register to hold value when aggregate_value_p
1661 : : is false. Similarly for DECL_BY_REFERENCE we must avoid extra
1662 : : copy. */
1663 : 71 : if (!aggregate_value_p (retval, TREE_TYPE (current_function_decl))
1664 : 71 : && !DECL_BY_REFERENCE (retval))
1665 : 71 : retval = create_tmp_reg (TREE_TYPE (retval));
1666 : 71 : if (is_gimple_reg (retval))
1667 : : {
1668 : : /* When returning by reference, there is only one SSA name
1669 : : assigned to RESULT_DECL (that is pointer to return value).
1670 : : Look it up or create new one if it is missing. */
1671 : 64 : if (DECL_BY_REFERENCE (retval))
1672 : 0 : retval = get_or_create_ssa_default_def (cfun, retval);
1673 : : /* Otherwise produce new SSA name for return value. */
1674 : : else
1675 : 64 : retval = make_ssa_name (retval, call);
1676 : : }
1677 : 71 : if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
1678 : 0 : gimple_call_set_lhs (call, build_simple_mem_ref (retval));
1679 : : else
1680 : 71 : gimple_call_set_lhs (call, retval);
1681 : 71 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1682 : : }
1683 : : else
1684 : : {
1685 : 33 : gsi_insert_after (&gsi, call, GSI_NEW_STMT);
1686 : 33 : if (retval
1687 : 0 : && is_gimple_reg_type (TREE_TYPE (retval))
1688 : 33 : && !is_gimple_val (retval))
1689 : : {
1690 : 0 : gassign *g
1691 : 0 : = gimple_build_assign (make_ssa_name (TREE_TYPE (retval)),
1692 : : retval);
1693 : 0 : retval = gimple_assign_lhs (g);
1694 : 0 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1695 : : }
1696 : : }
1697 : 104 : if (tsan_func_exit_call)
1698 : 0 : gsi_insert_after (&gsi, tsan_func_exit_call, GSI_NEW_STMT);
1699 : 104 : ret = gimple_build_return (retval);
1700 : 104 : gsi_insert_after (&gsi, ret, GSI_NEW_STMT);
1701 : : }
1702 : : }
1703 : 43867 : free_dominance_info (CDI_DOMINATORS);
1704 : 43867 : free_dominance_info (CDI_POST_DOMINATORS);
1705 : 43867 : compute_fn_summary (node, true);
1706 : 43867 : }
1707 : :
1708 : : /* Execute function splitting pass. */
1709 : :
1710 : : static unsigned int
1711 : 1882629 : execute_split_functions (void)
1712 : : {
1713 : 1882629 : gimple_stmt_iterator bsi;
1714 : 1882629 : basic_block bb;
1715 : 1882629 : sreal overall_time = 0;
1716 : 1882629 : int overall_size = 0;
1717 : 1882629 : int todo = 0;
1718 : 1882629 : struct cgraph_node *node = cgraph_node::get (current_function_decl);
1719 : :
1720 : 1882629 : if (flags_from_decl_or_type (current_function_decl)
1721 : 1882629 : & (ECF_NORETURN|ECF_MALLOC|ECF_RETURNS_TWICE))
1722 : : {
1723 : 47133 : if (dump_file)
1724 : 0 : fprintf (dump_file, "Not splitting: noreturn/malloc/returns_twice "
1725 : : "function.\n");
1726 : 47133 : return 0;
1727 : : }
1728 : 1835496 : if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
1729 : : {
1730 : 55161 : if (dump_file)
1731 : 3 : fprintf (dump_file, "Not splitting: main function.\n");
1732 : 55161 : return 0;
1733 : : }
1734 : 1780335 : if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
1735 : : {
1736 : 637 : if (dump_file)
1737 : 0 : fprintf (dump_file, "Not splitting: function is unlikely executed.\n");
1738 : 637 : return 0;
1739 : : }
1740 : : /* This can be relaxed; function might become inlinable after splitting
1741 : : away the uninlinable part. */
1742 : 1779698 : if (ipa_fn_summaries
1743 : 1779663 : && ipa_fn_summaries->get (node)
1744 : 1779660 : && !ipa_fn_summaries->get (node)->inlinable)
1745 : : {
1746 : 188858 : if (dump_file)
1747 : 0 : fprintf (dump_file, "Not splitting: not inlinable.\n");
1748 : 188858 : return 0;
1749 : : }
1750 : 1590840 : if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
1751 : : {
1752 : 202994 : if (dump_file)
1753 : 0 : fprintf (dump_file, "Not splitting: disregarding inline limits.\n");
1754 : 202994 : return 0;
1755 : : }
1756 : : /* This can be relaxed; most of versioning tests actually prevents
1757 : : a duplication. */
1758 : 1387846 : if (!tree_versionable_function_p (current_function_decl))
1759 : : {
1760 : 33 : if (dump_file)
1761 : 0 : fprintf (dump_file, "Not splitting: not versionable.\n");
1762 : 33 : return 0;
1763 : : }
1764 : : /* FIXME: we could support this. */
1765 : 1387813 : if (DECL_STRUCT_FUNCTION (current_function_decl)->static_chain_decl)
1766 : : {
1767 : 1610 : if (dump_file)
1768 : 0 : fprintf (dump_file, "Not splitting: nested function.\n");
1769 : 1610 : return 0;
1770 : : }
1771 : :
1772 : : /* See if it makes sense to try to split.
1773 : : It makes sense to split if we inline, that is if we have direct calls to
1774 : : handle or direct calls are possibly going to appear as result of indirect
1775 : : inlining or LTO. Also handle -fprofile-generate as LTO to allow non-LTO
1776 : : training for LTO -fprofile-use build.
1777 : :
1778 : : Note that we are not completely conservative about disqualifying functions
1779 : : called once. It is possible that the caller is called more then once and
1780 : : then inlining would still benefit. */
1781 : 1386203 : if ((!node->callers
1782 : : /* Local functions called once will be completely inlined most of time. */
1783 : 967147 : || (!node->callers->next_caller && node->local))
1784 : 485690 : && !node->address_taken
1785 : 400236 : && !node->has_aliases_p ()
1786 : 1603735 : && (!flag_lto || !node->externally_visible))
1787 : : {
1788 : 206267 : if (dump_file)
1789 : 21 : fprintf (dump_file, "Not splitting: not called directly "
1790 : : "or called once.\n");
1791 : 206267 : return 0;
1792 : : }
1793 : :
1794 : : /* FIXME: We can actually split if splitting reduces call overhead. */
1795 : 1179936 : if (!flag_inline_small_functions
1796 : 1179936 : && !DECL_DECLARED_INLINE_P (current_function_decl))
1797 : : {
1798 : 23 : if (dump_file)
1799 : 0 : fprintf (dump_file, "Not splitting: not autoinlining and function"
1800 : : " is not inline.\n");
1801 : 23 : return 0;
1802 : : }
1803 : :
1804 : 1179913 : if (lookup_attribute ("noinline", DECL_ATTRIBUTES (current_function_decl)))
1805 : : {
1806 : 0 : if (dump_file)
1807 : 0 : fprintf (dump_file, "Not splitting: function is noinline.\n");
1808 : 0 : return 0;
1809 : : }
1810 : 1179913 : if (lookup_attribute ("section", DECL_ATTRIBUTES (current_function_decl)))
1811 : : {
1812 : 15 : if (dump_file)
1813 : 0 : fprintf (dump_file, "Not splitting: function is in user defined "
1814 : : "section.\n");
1815 : 15 : return 0;
1816 : : }
1817 : 1179898 : if (!strub_splittable_p (node))
1818 : : {
1819 : 134 : if (dump_file)
1820 : 0 : fprintf (dump_file, "Not splitting: function is a strub context.\n");
1821 : 134 : return 0;
1822 : : }
1823 : :
1824 : : /* We enforce splitting after loop headers when profile info is not
1825 : : available. */
1826 : 1179764 : if (profile_status_for_fn (cfun) != PROFILE_READ)
1827 : 1179691 : mark_dfs_back_edges ();
1828 : :
1829 : : /* Initialize bitmap to track forbidden calls. */
1830 : 1179764 : forbidden_dominators = BITMAP_ALLOC (NULL);
1831 : 1179764 : calculate_dominance_info (CDI_DOMINATORS);
1832 : :
1833 : : /* Compute local info about basic blocks and determine function size/time. */
1834 : 1179764 : bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1, true);
1835 : 1179764 : best_split_point.split_bbs = NULL;
1836 : 1179764 : basic_block return_bb = find_return_bb ();
1837 : 1179764 : int tsan_exit_found = -1;
1838 : 5199627 : FOR_EACH_BB_FN (bb, cfun)
1839 : : {
1840 : 4019879 : sreal time = 0;
1841 : 4019879 : int size = 0;
1842 : 4019879 : sreal freq = bb->count.to_sreal_scale
1843 : 4019879 : (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
1844 : :
1845 : 4019879 : if (dump_file && (dump_flags & TDF_DETAILS))
1846 : 9 : fprintf (dump_file, "Basic block %i\n", bb->index);
1847 : :
1848 : 40555776 : for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1849 : : {
1850 : 32516034 : sreal this_time;
1851 : 32516034 : int this_size;
1852 : 32516034 : gimple *stmt = gsi_stmt (bsi);
1853 : :
1854 : 32516034 : this_size = estimate_num_insns (stmt, &eni_size_weights);
1855 : 32516034 : this_time = (sreal)estimate_num_insns (stmt, &eni_time_weights)
1856 : 32516034 : * freq;
1857 : 32516034 : size += this_size;
1858 : 32516034 : time += this_time;
1859 : 32516034 : check_forbidden_calls (stmt);
1860 : :
1861 : 32516034 : if (dump_file && (dump_flags & TDF_DETAILS))
1862 : : {
1863 : 17 : fprintf (dump_file, " freq:%4.2f size:%3i time:%4.2f ",
1864 : : freq.to_double (), this_size, this_time.to_double ());
1865 : 17 : print_gimple_stmt (dump_file, stmt, 0);
1866 : : }
1867 : :
1868 : 32516034 : if ((flag_sanitize & SANITIZE_THREAD)
1869 : 32516034 : && gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
1870 : : {
1871 : : /* We handle TSAN_FUNC_EXIT for splitting either in the
1872 : : return_bb, or in its immediate predecessors. */
1873 : 93 : if ((bb != return_bb && !find_edge (bb, return_bb))
1874 : 198 : || (tsan_exit_found != -1
1875 : 2 : && tsan_exit_found != (bb != return_bb)))
1876 : : {
1877 : 16 : if (dump_file)
1878 : 0 : fprintf (dump_file, "Not splitting: TSAN_FUNC_EXIT"
1879 : : " in unexpected basic block.\n");
1880 : 16 : BITMAP_FREE (forbidden_dominators);
1881 : 16 : bb_info_vec.release ();
1882 : 16 : return 0;
1883 : : }
1884 : 105 : tsan_exit_found = bb != return_bb;
1885 : : }
1886 : : }
1887 : 4019863 : overall_time += time;
1888 : 4019863 : overall_size += size;
1889 : 4019863 : bb_info_vec[bb->index].time = time;
1890 : 4019863 : bb_info_vec[bb->index].size = size;
1891 : : }
1892 : 1179748 : find_split_points (return_bb, overall_time, overall_size);
1893 : 1179748 : if (best_split_point.split_bbs)
1894 : : {
1895 : 43867 : split_function (return_bb, &best_split_point, tsan_exit_found == 1);
1896 : 43867 : BITMAP_FREE (best_split_point.ssa_names_to_pass);
1897 : 43867 : BITMAP_FREE (best_split_point.split_bbs);
1898 : 43867 : todo = TODO_update_ssa | TODO_cleanup_cfg;
1899 : : }
1900 : 1179748 : BITMAP_FREE (forbidden_dominators);
1901 : 1179748 : bb_info_vec.release ();
1902 : 1179748 : return todo;
1903 : : }
1904 : :
1905 : : namespace {
1906 : :
1907 : : const pass_data pass_data_split_functions =
1908 : : {
1909 : : GIMPLE_PASS, /* type */
1910 : : "fnsplit", /* name */
1911 : : OPTGROUP_NONE, /* optinfo_flags */
1912 : : TV_IPA_FNSPLIT, /* tv_id */
1913 : : PROP_cfg, /* properties_required */
1914 : : 0, /* properties_provided */
1915 : : 0, /* properties_destroyed */
1916 : : 0, /* todo_flags_start */
1917 : : 0, /* todo_flags_finish */
1918 : : };
1919 : :
1920 : : class pass_split_functions : public gimple_opt_pass
1921 : : {
1922 : : public:
1923 : 273196 : pass_split_functions (gcc::context *ctxt)
1924 : 546392 : : gimple_opt_pass (pass_data_split_functions, ctxt)
1925 : : {}
1926 : :
1927 : : /* opt_pass methods: */
1928 : : bool gate (function *) final override;
1929 : 1882197 : unsigned int execute (function *) final override
1930 : : {
1931 : 1882197 : return execute_split_functions ();
1932 : : }
1933 : :
1934 : : }; // class pass_split_functions
1935 : :
1936 : : bool
1937 : 2240867 : pass_split_functions::gate (function *)
1938 : : {
1939 : : /* When doing profile feedback, we want to execute the pass after profiling
1940 : : is read. So disable one in early optimization. */
1941 : 2240867 : return (flag_partial_inlining
1942 : 2240867 : && !profile_arc_flag && !flag_branch_probabilities);
1943 : : }
1944 : :
1945 : : } // anon namespace
1946 : :
1947 : : gimple_opt_pass *
1948 : 273196 : make_pass_split_functions (gcc::context *ctxt)
1949 : : {
1950 : 273196 : return new pass_split_functions (ctxt);
1951 : : }
1952 : :
1953 : : /* Execute function splitting pass. */
1954 : :
1955 : : static unsigned int
1956 : 432 : execute_feedback_split_functions (void)
1957 : : {
1958 : 0 : unsigned int retval = execute_split_functions ();
1959 : 432 : if (retval)
1960 : 2 : retval |= TODO_rebuild_cgraph_edges;
1961 : 432 : return retval;
1962 : : }
1963 : :
1964 : : namespace {
1965 : :
1966 : : const pass_data pass_data_feedback_split_functions =
1967 : : {
1968 : : GIMPLE_PASS, /* type */
1969 : : "feedback_fnsplit", /* name */
1970 : : OPTGROUP_NONE, /* optinfo_flags */
1971 : : TV_IPA_FNSPLIT, /* tv_id */
1972 : : PROP_cfg, /* properties_required */
1973 : : 0, /* properties_provided */
1974 : : 0, /* properties_destroyed */
1975 : : 0, /* todo_flags_start */
1976 : : 0, /* todo_flags_finish */
1977 : : };
1978 : :
1979 : : class pass_feedback_split_functions : public gimple_opt_pass
1980 : : {
1981 : : public:
1982 : 273196 : pass_feedback_split_functions (gcc::context *ctxt)
1983 : 546392 : : gimple_opt_pass (pass_data_feedback_split_functions, ctxt)
1984 : : {}
1985 : :
1986 : : /* opt_pass methods: */
1987 : : bool gate (function *) final override;
1988 : 432 : unsigned int execute (function *) final override
1989 : : {
1990 : 432 : return execute_feedback_split_functions ();
1991 : : }
1992 : :
1993 : : }; // class pass_feedback_split_functions
1994 : :
1995 : : bool
1996 : 2230 : pass_feedback_split_functions::gate (function *)
1997 : : {
1998 : : /* We don't need to split when profiling at all, we are producing
1999 : : lousy code anyway. */
2000 : 2230 : return (flag_partial_inlining
2001 : 2230 : && flag_branch_probabilities);
2002 : : }
2003 : :
2004 : : } // anon namespace
2005 : :
2006 : : gimple_opt_pass *
2007 : 273196 : make_pass_feedback_split_functions (gcc::context *ctxt)
2008 : : {
2009 : 273196 : return new pass_feedback_split_functions (ctxt);
2010 : : }
|