Line data Source code
1 : /* Pass computing data for optimizing stdarg functions.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Jakub Jelinek <jakub@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "gimple-pretty-print.h"
31 : #include "fold-const.h"
32 : #include "langhooks.h"
33 : #include "gimple-iterator.h"
34 : #include "gimple-walk.h"
35 : #include "gimplify.h"
36 : #include "tree-into-ssa.h"
37 : #include "tree-cfg.h"
38 : #include "tree-stdarg.h"
39 :
40 : /* A simple pass that attempts to optimize stdarg functions on architectures
41 : that need to save register arguments to stack on entry to stdarg functions.
42 : If the function doesn't use any va_start macros, no registers need to
43 : be saved. If va_start macros are used, the va_list variables don't escape
44 : the function, it is only necessary to save registers that will be used
45 : in va_arg macros. E.g. if va_arg is only used with integral types
46 : in the function, floating point registers don't need to be saved, etc. */
47 :
48 :
49 : /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
50 : is executed at most as many times as VA_START_BB. */
51 :
52 : static bool
53 5498 : reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
54 : {
55 5498 : auto_vec<edge, 10> stack;
56 5498 : edge e;
57 5498 : edge_iterator ei;
58 5498 : bool ret;
59 :
60 5498 : if (va_arg_bb == va_start_bb)
61 : return true;
62 :
63 5434 : if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
64 : return false;
65 :
66 5434 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
67 5434 : bitmap_clear (visited);
68 5434 : ret = true;
69 :
70 10870 : FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
71 5436 : stack.safe_push (e);
72 :
73 90544 : while (! stack.is_empty ())
74 : {
75 86696 : basic_block src;
76 :
77 86696 : e = stack.pop ();
78 86696 : src = e->src;
79 :
80 86696 : if (e->flags & EDGE_COMPLEX)
81 : {
82 : ret = false;
83 : break;
84 : }
85 :
86 86696 : if (src == va_start_bb)
87 4119 : continue;
88 :
89 : /* va_arg_bb can be executed more times than va_start_bb. */
90 82577 : if (src == va_arg_bb)
91 : {
92 : ret = false;
93 : break;
94 : }
95 :
96 80991 : gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
97 :
98 80991 : if (! bitmap_bit_p (visited, src->index))
99 : {
100 69160 : bitmap_set_bit (visited, src->index);
101 153245 : FOR_EACH_EDGE (e, ei, src->preds)
102 84085 : stack.safe_push (e);
103 : }
104 : }
105 :
106 5434 : return ret;
107 5434 : }
108 :
109 :
110 : /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
111 : return constant, otherwise return HOST_WIDE_INT_M1U.
112 : GPR_P is true if this is GPR counter. */
113 :
114 : static unsigned HOST_WIDE_INT
115 2149 : va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
116 : bool gpr_p)
117 : {
118 2149 : tree lhs, orig_lhs;
119 2149 : gimple *stmt;
120 2149 : unsigned HOST_WIDE_INT ret = 0, val, counter_val;
121 2149 : unsigned int max_size;
122 :
123 2149 : if (si->offsets == NULL)
124 : {
125 802 : unsigned int i;
126 :
127 1604 : si->offsets = XNEWVEC (int, num_ssa_names);
128 49321 : for (i = 0; i < num_ssa_names; ++i)
129 48519 : si->offsets[i] = -1;
130 : }
131 :
132 2149 : counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
133 2149 : max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
134 2149 : orig_lhs = lhs = rhs;
135 4063 : while (lhs)
136 : {
137 4063 : enum tree_code rhs_code;
138 4063 : tree rhs1;
139 :
140 4063 : if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
141 : {
142 68 : if (counter_val >= max_size)
143 : {
144 : ret = max_size;
145 : break;
146 : }
147 :
148 68 : ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
149 68 : break;
150 : }
151 :
152 3995 : stmt = SSA_NAME_DEF_STMT (lhs);
153 :
154 3995 : if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
155 : return HOST_WIDE_INT_M1U;
156 :
157 3995 : rhs_code = gimple_assign_rhs_code (stmt);
158 3995 : rhs1 = gimple_assign_rhs1 (stmt);
159 3995 : if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
160 1916 : || gimple_assign_cast_p (stmt))
161 3995 : && TREE_CODE (rhs1) == SSA_NAME)
162 : {
163 0 : lhs = rhs1;
164 0 : continue;
165 : }
166 :
167 5909 : if ((rhs_code == POINTER_PLUS_EXPR
168 3995 : || rhs_code == PLUS_EXPR)
169 1914 : && TREE_CODE (rhs1) == SSA_NAME
170 5909 : && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
171 : {
172 1914 : ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
173 1914 : lhs = rhs1;
174 1914 : continue;
175 : }
176 :
177 2081 : if (rhs_code == ADDR_EXPR
178 0 : && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
179 0 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
180 2081 : && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
181 : {
182 0 : ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
183 0 : lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
184 0 : continue;
185 : }
186 :
187 2081 : if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
188 : return HOST_WIDE_INT_M1U;
189 :
190 2079 : rhs = gimple_assign_rhs1 (stmt);
191 2079 : if (TREE_CODE (counter) != TREE_CODE (rhs))
192 : return HOST_WIDE_INT_M1U;
193 :
194 2079 : if (TREE_CODE (counter) == COMPONENT_REF)
195 : {
196 1877 : if (get_base_address (counter) != get_base_address (rhs)
197 1877 : || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
198 3754 : || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
199 : return HOST_WIDE_INT_M1U;
200 : }
201 202 : else if (counter != rhs)
202 : return HOST_WIDE_INT_M1U;
203 :
204 : lhs = NULL;
205 : }
206 :
207 2147 : lhs = orig_lhs;
208 2147 : val = ret + counter_val;
209 4059 : while (lhs)
210 : {
211 4059 : enum tree_code rhs_code;
212 4059 : tree rhs1;
213 :
214 4059 : if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
215 : break;
216 :
217 3991 : if (val >= max_size)
218 322 : si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
219 : else
220 3669 : si->offsets[SSA_NAME_VERSION (lhs)] = val;
221 :
222 3991 : stmt = SSA_NAME_DEF_STMT (lhs);
223 :
224 3991 : rhs_code = gimple_assign_rhs_code (stmt);
225 3991 : rhs1 = gimple_assign_rhs1 (stmt);
226 3991 : if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
227 1912 : || gimple_assign_cast_p (stmt))
228 3991 : && TREE_CODE (rhs1) == SSA_NAME)
229 : {
230 0 : lhs = rhs1;
231 0 : continue;
232 : }
233 :
234 5903 : if ((rhs_code == POINTER_PLUS_EXPR
235 3991 : || rhs_code == PLUS_EXPR)
236 1912 : && TREE_CODE (rhs1) == SSA_NAME
237 5903 : && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
238 : {
239 1912 : val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
240 1912 : lhs = rhs1;
241 1912 : continue;
242 : }
243 :
244 2079 : if (rhs_code == ADDR_EXPR
245 0 : && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
246 0 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
247 2079 : && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
248 : {
249 0 : val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
250 0 : lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
251 0 : continue;
252 : }
253 :
254 : lhs = NULL;
255 : }
256 :
257 : return ret;
258 : }
259 :
260 :
261 : /* Called by walk_tree to look for references to va_list variables. */
262 :
263 : static tree
264 151112 : find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
265 : void *data)
266 : {
267 151112 : bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
268 151112 : tree var = *tp;
269 :
270 151112 : if (TREE_CODE (var) == SSA_NAME)
271 : {
272 77500 : if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
273 : return var;
274 : }
275 73612 : else if (VAR_P (var))
276 : {
277 17774 : if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
278 : return var;
279 : }
280 :
281 : return NULL_TREE;
282 : }
283 :
284 :
285 : /* Helper function of va_list_counter_struct_op. Compute
286 : cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
287 : if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
288 : statement. GPR_P is true if AP is a GPR counter, false if it is
289 : a FPR counter. */
290 :
291 : static void
292 10760 : va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
293 : bool write_p)
294 : {
295 10760 : unsigned HOST_WIDE_INT increment;
296 :
297 10760 : if (si->compute_sizes < 0)
298 : {
299 5328 : si->compute_sizes = 0;
300 5328 : if (si->va_start_count == 1
301 5328 : && reachable_at_most_once (si->bb, si->va_start_bb))
302 3710 : si->compute_sizes = 1;
303 :
304 5328 : if (dump_file && (dump_flags & TDF_DETAILS))
305 0 : fprintf (dump_file,
306 : "bb%d will %sbe executed at most once for each va_start "
307 0 : "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
308 0 : si->va_start_bb->index);
309 : }
310 :
311 10760 : if (write_p
312 2690 : && si->compute_sizes
313 12637 : && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
314 : {
315 1877 : if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
316 : {
317 1295 : cfun->va_list_gpr_size += increment;
318 1295 : return;
319 : }
320 :
321 445 : if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
322 : {
323 421 : cfun->va_list_fpr_size += increment;
324 421 : return;
325 : }
326 : }
327 :
328 9044 : if (write_p || !si->compute_sizes)
329 : {
330 3413 : if (gpr_p)
331 2613 : cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
332 : else
333 800 : cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
334 : }
335 : }
336 :
337 :
338 : /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
339 : If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
340 : is false, AP has been seen in VAR = AP assignment.
341 : Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
342 : va_arg operation that doesn't cause the va_list variable to escape
343 : current function. */
344 :
345 : static bool
346 56920 : va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
347 : bool write_p)
348 : {
349 56920 : tree base;
350 :
351 56920 : if (TREE_CODE (ap) != COMPONENT_REF
352 56920 : || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
353 : return false;
354 :
355 20233 : if (TREE_CODE (var) != SSA_NAME
356 20233 : || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
357 257 : return false;
358 :
359 19976 : base = get_base_address (ap);
360 19976 : if (!VAR_P (base)
361 39572 : || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
362 966 : return false;
363 :
364 19010 : if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
365 8204 : va_list_counter_op (si, ap, var, true, write_p);
366 10806 : else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
367 2556 : va_list_counter_op (si, ap, var, false, write_p);
368 :
369 : return true;
370 : }
371 :
372 :
373 : /* Check for TEM = AP. Return true if found and the caller shouldn't
374 : search for va_list references in the statement. */
375 :
376 : static bool
377 1150 : va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
378 : {
379 1150 : if (!VAR_P (ap)
380 1476 : || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
381 936 : return false;
382 :
383 214 : if (TREE_CODE (tem) != SSA_NAME
384 214 : || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
385 0 : return false;
386 :
387 214 : if (si->compute_sizes < 0)
388 : {
389 214 : si->compute_sizes = 0;
390 214 : if (si->va_start_count == 1
391 214 : && reachable_at_most_once (si->bb, si->va_start_bb))
392 202 : si->compute_sizes = 1;
393 :
394 214 : if (dump_file && (dump_flags & TDF_DETAILS))
395 0 : fprintf (dump_file,
396 : "bb%d will %sbe executed at most once for each va_start "
397 0 : "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
398 0 : si->va_start_bb->index);
399 : }
400 :
401 : /* For void * or char * va_list types, there is just one counter.
402 : If va_arg is used in a loop, we don't know how many registers need
403 : saving. */
404 214 : if (! si->compute_sizes)
405 : return false;
406 :
407 202 : if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
408 : return false;
409 :
410 : /* Note the temporary, as we need to track whether it doesn't escape
411 : the current function. */
412 202 : bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
413 :
414 202 : return true;
415 : }
416 :
417 :
418 : /* Check for:
419 : tem1 = AP;
420 : TEM2 = tem1 + CST;
421 : AP = TEM2;
422 : sequence and update cfun->va_list_gpr_size. Return true if found. */
423 :
424 : static bool
425 948 : va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
426 : {
427 948 : unsigned HOST_WIDE_INT increment;
428 :
429 948 : if (!VAR_P (ap)
430 996 : || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
431 913 : return false;
432 :
433 35 : if (TREE_CODE (tem2) != SSA_NAME
434 35 : || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
435 0 : return false;
436 :
437 35 : if (si->compute_sizes <= 0)
438 : return false;
439 :
440 35 : increment = va_list_counter_bump (si, ap, tem2, true);
441 35 : if (increment + 1 <= 1)
442 : return false;
443 :
444 33 : if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
445 33 : cfun->va_list_gpr_size += increment;
446 : else
447 0 : cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
448 :
449 : return true;
450 : }
451 :
452 :
453 : /* If RHS is X, (some type *) X or X + CST for X a temporary variable
454 : containing value of some va_list variable plus optionally some constant,
455 : either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
456 : depending whether LHS is a function local temporary. */
457 :
458 : static void
459 1098 : check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
460 : {
461 1098 : if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
462 : return;
463 :
464 386 : if (TREE_CODE (rhs) == SSA_NAME)
465 : {
466 134 : if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
467 : return;
468 : }
469 252 : else if (TREE_CODE (rhs) == ADDR_EXPR
470 61 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
471 252 : && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
472 : {
473 0 : tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
474 0 : if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
475 : return;
476 : }
477 : else
478 : return;
479 :
480 37 : if (TREE_CODE (lhs) != SSA_NAME)
481 : {
482 2 : si->va_list_escapes = true;
483 2 : return;
484 : }
485 :
486 35 : if (si->compute_sizes < 0)
487 : {
488 0 : si->compute_sizes = 0;
489 0 : if (si->va_start_count == 1
490 0 : && reachable_at_most_once (si->bb, si->va_start_bb))
491 0 : si->compute_sizes = 1;
492 :
493 0 : if (dump_file && (dump_flags & TDF_DETAILS))
494 0 : fprintf (dump_file,
495 : "bb%d will %sbe executed at most once for each va_start "
496 0 : "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
497 0 : si->va_start_bb->index);
498 : }
499 :
500 : /* For void * or char * va_list types, there is just one counter.
501 : If va_arg is used in a loop, we don't know how many registers need
502 : saving. */
503 35 : if (! si->compute_sizes)
504 : {
505 0 : si->va_list_escapes = true;
506 0 : return;
507 : }
508 :
509 35 : if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
510 : == HOST_WIDE_INT_M1U)
511 : {
512 0 : si->va_list_escapes = true;
513 0 : return;
514 : }
515 :
516 35 : bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
517 : }
518 :
519 :
520 : /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
521 : Return true if va_list might be escaping. */
522 :
523 : static bool
524 131 : check_all_va_list_escapes (struct stdarg_info *si)
525 : {
526 131 : basic_block bb;
527 :
528 363 : FOR_EACH_BB_FN (bb, cfun)
529 : {
530 388 : for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
531 44 : gsi_next (&i))
532 : {
533 44 : tree lhs;
534 44 : use_operand_p uop;
535 44 : ssa_op_iter soi;
536 44 : gphi *phi = i.phi ();
537 :
538 44 : lhs = PHI_RESULT (phi);
539 64 : if (virtual_operand_p (lhs)
540 64 : || bitmap_bit_p (si->va_list_escape_vars,
541 20 : SSA_NAME_VERSION (lhs)))
542 24 : continue;
543 :
544 70 : FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
545 : {
546 50 : tree rhs = USE_FROM_PTR (uop);
547 50 : if (TREE_CODE (rhs) == SSA_NAME
548 94 : && bitmap_bit_p (si->va_list_escape_vars,
549 44 : SSA_NAME_VERSION (rhs)))
550 : {
551 0 : if (dump_file && (dump_flags & TDF_DETAILS))
552 : {
553 0 : fputs ("va_list escapes in ", dump_file);
554 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
555 0 : fputc ('\n', dump_file);
556 : }
557 0 : return true;
558 : }
559 : }
560 : }
561 :
562 3595 : for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
563 2907 : gsi_next (&i))
564 : {
565 3019 : gimple *stmt = gsi_stmt (i);
566 3019 : tree use;
567 3019 : ssa_op_iter iter;
568 :
569 3019 : if (is_gimple_debug (stmt))
570 1848 : continue;
571 :
572 2904 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
573 : {
574 3690 : if (! bitmap_bit_p (si->va_list_escape_vars,
575 1845 : SSA_NAME_VERSION (use)))
576 1670 : continue;
577 :
578 175 : if (is_gimple_assign (stmt))
579 : {
580 63 : tree rhs = gimple_assign_rhs1 (stmt);
581 63 : enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
582 :
583 : /* x = *ap_temp; */
584 63 : if (rhs_code == MEM_REF
585 21 : && TREE_OPERAND (rhs, 0) == use
586 21 : && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
587 21 : && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
588 84 : && si->offsets[SSA_NAME_VERSION (use)] != -1)
589 : {
590 21 : unsigned HOST_WIDE_INT gpr_size;
591 21 : tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
592 :
593 42 : gpr_size = si->offsets[SSA_NAME_VERSION (use)]
594 21 : + tree_to_shwi (TREE_OPERAND (rhs, 1))
595 21 : + tree_to_uhwi (access_size);
596 21 : if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
597 0 : cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
598 21 : else if (gpr_size > cfun->va_list_gpr_size)
599 0 : cfun->va_list_gpr_size = gpr_size;
600 21 : continue;
601 21 : }
602 :
603 : /* va_arg sequences may contain
604 : other_ap_temp = ap_temp;
605 : other_ap_temp = ap_temp + constant;
606 : other_ap_temp = (some_type *) ap_temp;
607 : ap = ap_temp;
608 : statements. */
609 42 : if (rhs == use
610 42 : && ((rhs_code == POINTER_PLUS_EXPR
611 21 : && (TREE_CODE (gimple_assign_rhs2 (stmt))
612 : == INTEGER_CST))
613 21 : || gimple_assign_cast_p (stmt)
614 21 : || (get_gimple_rhs_class (rhs_code)
615 : == GIMPLE_SINGLE_RHS)))
616 : {
617 42 : tree lhs = gimple_assign_lhs (stmt);
618 :
619 63 : if (TREE_CODE (lhs) == SSA_NAME
620 63 : && bitmap_bit_p (si->va_list_escape_vars,
621 21 : SSA_NAME_VERSION (lhs)))
622 21 : continue;
623 :
624 42 : if (VAR_P (lhs)
625 42 : && bitmap_bit_p (si->va_list_vars,
626 42 : DECL_UID (lhs) + num_ssa_names))
627 21 : continue;
628 : }
629 0 : else if (rhs_code == ADDR_EXPR
630 0 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
631 0 : && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
632 : {
633 0 : tree lhs = gimple_assign_lhs (stmt);
634 :
635 0 : if (bitmap_bit_p (si->va_list_escape_vars,
636 0 : SSA_NAME_VERSION (lhs)))
637 0 : continue;
638 : }
639 : }
640 :
641 112 : if (dump_file && (dump_flags & TDF_DETAILS))
642 : {
643 0 : fputs ("va_list escapes in ", dump_file);
644 0 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
645 0 : fputc ('\n', dump_file);
646 : }
647 112 : return true;
648 : }
649 : }
650 : }
651 :
652 : return false;
653 : }
654 :
655 : /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
656 :
657 : static void
658 7158 : optimize_va_list_gpr_fpr_size (function *fun)
659 : {
660 7158 : basic_block bb;
661 7158 : bool va_list_escapes = false;
662 7158 : bool va_list_simple_ptr;
663 7158 : struct stdarg_info si;
664 7158 : struct walk_stmt_info wi;
665 7158 : const char *funcname = NULL;
666 7158 : tree cfun_va_list;
667 :
668 7158 : fun->va_list_gpr_size = 0;
669 7158 : fun->va_list_fpr_size = 0;
670 7158 : memset (&si, 0, sizeof (si));
671 7158 : si.va_list_vars = BITMAP_ALLOC (NULL);
672 7158 : si.va_list_escape_vars = BITMAP_ALLOC (NULL);
673 :
674 7158 : if (dump_file)
675 40 : funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
676 :
677 7158 : cfun_va_list = targetm.fn_abi_va_list (fun->decl);
678 14316 : va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
679 7158 : && (TREE_TYPE (cfun_va_list) == void_type_node
680 3750 : || TREE_TYPE (cfun_va_list) == char_type_node);
681 7158 : gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
682 :
683 46701 : FOR_EACH_BB_FN (bb, fun)
684 : {
685 43765 : gimple_stmt_iterator i;
686 :
687 237017 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
688 : {
689 153708 : gimple *stmt = gsi_stmt (i);
690 153708 : tree callee, ap;
691 :
692 153708 : if (!is_gimple_call (stmt))
693 129733 : continue;
694 :
695 23975 : callee = gimple_call_fndecl (stmt);
696 25969 : if (!callee
697 23975 : || !fndecl_built_in_p (callee, BUILT_IN_NORMAL))
698 1994 : continue;
699 :
700 21981 : switch (DECL_FUNCTION_CODE (callee))
701 : {
702 6874 : case BUILT_IN_VA_START:
703 6874 : break;
704 : /* If old style builtins are used, don't optimize anything. */
705 1 : case BUILT_IN_SAVEREGS:
706 1 : case BUILT_IN_NEXT_ARG:
707 1 : va_list_escapes = true;
708 1 : continue;
709 15106 : default:
710 15106 : continue;
711 : }
712 :
713 6874 : si.va_start_count++;
714 6874 : ap = gimple_call_arg (stmt, 0);
715 :
716 6874 : if (TREE_CODE (ap) != ADDR_EXPR)
717 : {
718 : va_list_escapes = true;
719 : break;
720 : }
721 6856 : ap = TREE_OPERAND (ap, 0);
722 6856 : if (TREE_CODE (ap) == ARRAY_REF)
723 : {
724 50 : if (! integer_zerop (TREE_OPERAND (ap, 1)))
725 : {
726 : va_list_escapes = true;
727 : break;
728 : }
729 0 : ap = TREE_OPERAND (ap, 0);
730 : }
731 6806 : if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
732 6806 : != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
733 6806 : || !VAR_P (ap))
734 : {
735 : va_list_escapes = true;
736 : break;
737 : }
738 :
739 2671 : if (is_global_var (ap))
740 : {
741 : va_list_escapes = true;
742 : break;
743 : }
744 :
745 5306 : bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
746 :
747 : /* VA_START_BB and VA_START_AP will be only used if there is just
748 : one va_start in the function. */
749 2653 : si.va_start_bb = bb;
750 2653 : si.va_start_ap = ap;
751 : }
752 :
753 43765 : if (va_list_escapes)
754 : break;
755 : }
756 :
757 : /* If there were no va_start uses in the function, there is no need to
758 : save anything. */
759 7158 : if (si.va_start_count == 0)
760 329 : goto finish;
761 :
762 : /* If some va_list arguments weren't local, we can't optimize. */
763 6829 : if (va_list_escapes)
764 4221 : goto finish;
765 :
766 : /* For void * or char * va_list, something useful can be done only
767 : if there is just one va_start. */
768 2608 : if (va_list_simple_ptr && si.va_start_count > 1)
769 : {
770 4 : va_list_escapes = true;
771 4 : goto finish;
772 : }
773 :
774 : /* For struct * va_list, if the backend didn't tell us what the counter fields
775 : are, there is nothing more we can do. */
776 : if (!va_list_simple_ptr
777 2447 : && va_list_gpr_counter_field == NULL_TREE
778 0 : && va_list_fpr_counter_field == NULL_TREE)
779 : {
780 0 : va_list_escapes = true;
781 0 : goto finish;
782 : }
783 :
784 : /* For void * or char * va_list there is just one counter
785 : (va_list itself). Use VA_LIST_GPR_SIZE for it. */
786 : if (va_list_simple_ptr)
787 157 : fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
788 :
789 2604 : calculate_dominance_info (CDI_DOMINATORS);
790 2604 : memset (&wi, 0, sizeof (wi));
791 2604 : wi.info = si.va_list_vars;
792 :
793 28316 : FOR_EACH_BB_FN (bb, fun)
794 : {
795 26955 : si.compute_sizes = -1;
796 26955 : si.bb = bb;
797 :
798 : /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
799 : them as assignments for the purpose of escape analysis. This is
800 : not needed for non-simple va_list because virtual phis don't perform
801 : any real data movement. Also, check PHI nodes for taking address of
802 : the va_list vars. */
803 26955 : tree lhs, rhs;
804 26955 : use_operand_p uop;
805 26955 : ssa_op_iter soi;
806 :
807 34979 : for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
808 8024 : gsi_next (&i))
809 : {
810 8024 : gphi *phi = i.phi ();
811 8024 : lhs = PHI_RESULT (phi);
812 :
813 16048 : if (virtual_operand_p (lhs))
814 4102 : continue;
815 :
816 3922 : if (va_list_simple_ptr)
817 : {
818 399 : FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
819 : {
820 271 : rhs = USE_FROM_PTR (uop);
821 271 : if (va_list_ptr_read (&si, rhs, lhs))
822 0 : continue;
823 271 : else if (va_list_ptr_write (&si, lhs, rhs))
824 0 : continue;
825 : else
826 271 : check_va_list_escapes (&si, lhs, rhs);
827 :
828 271 : if (si.va_list_escapes)
829 : {
830 0 : if (dump_file && (dump_flags & TDF_DETAILS))
831 : {
832 0 : fputs ("va_list escapes in ", dump_file);
833 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
834 0 : fputc ('\n', dump_file);
835 : }
836 : va_list_escapes = true;
837 : }
838 : }
839 : }
840 :
841 7901 : for (unsigned j = 0; !va_list_escapes
842 23639 : && j < gimple_phi_num_args (phi); ++j)
843 7901 : if ((!va_list_simple_ptr
844 271 : || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
845 7955 : && walk_tree (gimple_phi_arg_def_ptr (phi, j),
846 : find_va_list_reference, &wi, NULL))
847 : {
848 7 : if (dump_file && (dump_flags & TDF_DETAILS))
849 : {
850 0 : fputs ("va_list escapes in ", dump_file);
851 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
852 0 : fputc ('\n', dump_file);
853 : }
854 : va_list_escapes = true;
855 : }
856 : }
857 :
858 53910 : for (gimple_stmt_iterator i = gsi_start_bb (bb);
859 121052 : !gsi_end_p (i) && !va_list_escapes;
860 94097 : gsi_next (&i))
861 : {
862 94097 : gimple *stmt = gsi_stmt (i);
863 :
864 : /* Don't look at __builtin_va_{start,end}, they are ok. */
865 94097 : if (is_gimple_call (stmt))
866 : {
867 9796 : tree callee = gimple_call_fndecl (stmt);
868 :
869 13720 : if (callee
870 9796 : && fndecl_built_in_p (callee, BUILT_IN_VA_START,
871 : BUILT_IN_VA_END))
872 3924 : continue;
873 : }
874 :
875 90173 : if (is_gimple_assign (stmt))
876 : {
877 50520 : lhs = gimple_assign_lhs (stmt);
878 50520 : rhs = gimple_assign_rhs1 (stmt);
879 :
880 50520 : if (va_list_simple_ptr)
881 : {
882 1500 : if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
883 : == GIMPLE_SINGLE_RHS)
884 : {
885 : /* Check for ap ={v} {}. */
886 1013 : if (TREE_CLOBBER_P (rhs))
887 134 : continue;
888 :
889 : /* Check for tem = ap. */
890 879 : else if (va_list_ptr_read (&si, rhs, lhs))
891 202 : continue;
892 :
893 : /* Check for the last insn in:
894 : tem1 = ap;
895 : tem2 = tem1 + CST;
896 : ap = tem2;
897 : sequence. */
898 677 : else if (va_list_ptr_write (&si, lhs, rhs))
899 33 : continue;
900 : }
901 :
902 1131 : if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
903 111 : && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
904 1080 : || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
905 2079 : || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
906 : == GIMPLE_SINGLE_RHS))
907 827 : check_va_list_escapes (&si, lhs, rhs);
908 : }
909 : else
910 : {
911 49020 : if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
912 : == GIMPLE_SINGLE_RHS)
913 : {
914 : /* Check for ap ={v} {}. */
915 32887 : if (TREE_CLOBBER_P (rhs))
916 1692 : continue;
917 :
918 : /* Check for ap[0].field = temp. */
919 31195 : else if (va_list_counter_struct_op (&si, lhs, rhs, true))
920 5470 : continue;
921 :
922 : /* Check for temp = ap[0].field. */
923 25725 : else if (va_list_counter_struct_op (&si, rhs, lhs,
924 : false))
925 13540 : continue;
926 : }
927 :
928 : /* Do any architecture specific checking. */
929 28318 : if (targetm.stdarg_optimize_hook
930 28318 : && targetm.stdarg_optimize_hook (&si, stmt))
931 0 : continue;
932 : }
933 : }
934 39653 : else if (is_gimple_debug (stmt))
935 15368 : continue;
936 :
937 : /* All other uses of va_list are either va_copy (that is not handled
938 : in this optimization), taking address of va_list variable or
939 : passing va_list to other functions (in that case va_list might
940 : escape the function and therefore va_start needs to set it up
941 : fully), or some unexpected use of va_list. None of these should
942 : happen in a gimplified VA_ARG_EXPR. */
943 53734 : if (si.va_list_escapes
944 53734 : || walk_gimple_op (stmt, find_va_list_reference, &wi))
945 : {
946 1236 : if (dump_file && (dump_flags & TDF_DETAILS))
947 : {
948 0 : fputs ("va_list escapes in ", dump_file);
949 0 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
950 0 : fputc ('\n', dump_file);
951 : }
952 : va_list_escapes = true;
953 : }
954 : }
955 :
956 26955 : if (va_list_escapes)
957 : break;
958 : }
959 :
960 2604 : if (! va_list_escapes
961 2604 : && va_list_simple_ptr
962 131 : && ! bitmap_empty_p (si.va_list_escape_vars)
963 2735 : && check_all_va_list_escapes (&si))
964 : va_list_escapes = true;
965 :
966 7046 : finish:
967 7046 : if (va_list_escapes)
968 : {
969 5581 : fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
970 5581 : fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
971 : }
972 7158 : BITMAP_FREE (si.va_list_vars);
973 7158 : BITMAP_FREE (si.va_list_escape_vars);
974 7158 : free (si.offsets);
975 7158 : if (dump_file)
976 : {
977 40 : fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
978 : funcname, (int) va_list_escapes);
979 40 : if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
980 21 : fputs ("all", dump_file);
981 : else
982 19 : fprintf (dump_file, "%d", cfun->va_list_gpr_size);
983 40 : fputs (" GPR units and ", dump_file);
984 40 : if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
985 21 : fputs ("all", dump_file);
986 : else
987 19 : fprintf (dump_file, "%d", cfun->va_list_fpr_size);
988 40 : fputs (" FPR units.\n", dump_file);
989 : }
990 7158 : }
991 :
992 : /* Expand IFN_VA_ARGs in FUN. */
993 :
994 : static void
995 32973 : expand_ifn_va_arg_1 (function *fun)
996 : {
997 32973 : bool modified = false;
998 32973 : basic_block bb;
999 32973 : gimple_stmt_iterator i;
1000 32973 : location_t saved_location;
1001 :
1002 537431 : FOR_EACH_BB_FN (bb, fun)
1003 2473600 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1004 : {
1005 1516777 : gimple *stmt = gsi_stmt (i);
1006 1516777 : tree ap, aptype, expr, lhs, type;
1007 1516777 : gimple_seq pre = NULL, post = NULL;
1008 :
1009 1516777 : if (!gimple_call_internal_p (stmt, IFN_VA_ARG))
1010 1464684 : continue;
1011 :
1012 52093 : modified = true;
1013 :
1014 52093 : type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1015 52093 : ap = gimple_call_arg (stmt, 0);
1016 52093 : aptype = TREE_TYPE (gimple_call_arg (stmt, 2));
1017 52093 : gcc_assert (POINTER_TYPE_P (aptype));
1018 :
1019 : /* Balanced out the &ap, usually added by build_va_arg. */
1020 52093 : ap = build2 (MEM_REF, TREE_TYPE (aptype), ap,
1021 : build_int_cst (aptype, 0));
1022 :
1023 52093 : push_gimplify_context (false);
1024 52093 : saved_location = input_location;
1025 52093 : input_location = gimple_location (stmt);
1026 :
1027 : /* Make it easier for the backends by protecting the valist argument
1028 : from multiple evaluations. */
1029 52093 : gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1030 :
1031 52093 : expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1032 :
1033 52093 : lhs = gimple_call_lhs (stmt);
1034 52093 : if (lhs != NULL_TREE)
1035 : {
1036 51353 : unsigned int nargs = gimple_call_num_args (stmt);
1037 51353 : gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1038 :
1039 51353 : if (nargs == 4)
1040 : {
1041 : /* We've transported the size of with WITH_SIZE_EXPR here as
1042 : the last argument of the internal fn call. Now reinstate
1043 : it. */
1044 103 : tree size = gimple_call_arg (stmt, nargs - 1);
1045 103 : expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1046 : }
1047 :
1048 : /* We use gimplify_assign here, rather than gimple_build_assign,
1049 : because gimple_assign knows how to deal with variable-sized
1050 : types. */
1051 51353 : gimplify_assign (lhs, expr, &pre);
1052 : }
1053 : else
1054 740 : gimplify_and_add (expr, &pre);
1055 :
1056 52093 : input_location = saved_location;
1057 52093 : pop_gimplify_context (NULL);
1058 :
1059 52093 : gimple_seq_add_seq (&pre, post);
1060 52093 : update_modified_stmts (pre);
1061 :
1062 : /* Add the sequence after IFN_VA_ARG. This splits the bb right
1063 : after IFN_VA_ARG, and adds the sequence in one or more new bbs
1064 : inbetween. */
1065 52093 : gimple_find_sub_bbs (pre, &i);
1066 :
1067 : /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1068 : bb. */
1069 52093 : unlink_stmt_vdef (stmt);
1070 104186 : release_ssa_name_fn (fun, gimple_vdef (stmt));
1071 52093 : gsi_remove (&i, true);
1072 52093 : gcc_assert (gsi_end_p (i));
1073 :
1074 : /* We're walking here into the bbs which contain the expansion of
1075 : IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1076 : expanding. We could try to skip walking these bbs, perhaps by
1077 : walking backwards over gimples and bbs. */
1078 52093 : break;
1079 : }
1080 :
1081 32973 : if (!modified)
1082 20696 : return;
1083 :
1084 12277 : free_dominance_info (CDI_DOMINATORS);
1085 12277 : update_ssa (TODO_update_ssa);
1086 : }
1087 :
1088 : /* Expand IFN_VA_ARGs in FUN, if necessary. */
1089 :
1090 : static void
1091 1052211 : expand_ifn_va_arg (function *fun)
1092 : {
1093 1052211 : if ((fun->curr_properties & PROP_gimple_lva) == 0)
1094 32973 : expand_ifn_va_arg_1 (fun);
1095 :
1096 1052211 : if (flag_checking)
1097 : {
1098 1052193 : basic_block bb;
1099 1052193 : gimple_stmt_iterator i;
1100 11768283 : FOR_EACH_BB_FN (bb, fun)
1101 101216848 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1102 79784668 : gcc_assert (!gimple_call_internal_p (gsi_stmt (i), IFN_VA_ARG));
1103 : }
1104 1052211 : }
1105 :
1106 : namespace {
1107 :
1108 : const pass_data pass_data_stdarg =
1109 : {
1110 : GIMPLE_PASS, /* type */
1111 : "stdarg", /* name */
1112 : OPTGROUP_NONE, /* optinfo_flags */
1113 : TV_NONE, /* tv_id */
1114 : ( PROP_cfg | PROP_ssa ), /* properties_required */
1115 : PROP_gimple_lva, /* properties_provided */
1116 : 0, /* properties_destroyed */
1117 : 0, /* todo_flags_start */
1118 : 0, /* todo_flags_finish */
1119 : };
1120 :
1121 : class pass_stdarg : public gimple_opt_pass
1122 : {
1123 : public:
1124 285722 : pass_stdarg (gcc::context *ctxt)
1125 571444 : : gimple_opt_pass (pass_data_stdarg, ctxt)
1126 : {}
1127 :
1128 : /* opt_pass methods: */
1129 1041484 : bool gate (function *) final override
1130 : {
1131 : /* Always run this pass, in order to expand va_arg internal_fns. We
1132 : also need to do that if fun->stdarg == 0, because a va_arg may also
1133 : occur in a function without varargs, f.i. if when passing a va_list to
1134 : another function. */
1135 1041484 : return true;
1136 : }
1137 :
1138 : unsigned int execute (function *) final override;
1139 :
1140 : }; // class pass_stdarg
1141 :
1142 : unsigned int
1143 1041479 : pass_stdarg::execute (function *fun)
1144 : {
1145 : /* TODO: Postpone expand_ifn_va_arg till after
1146 : optimize_va_list_gpr_fpr_size. */
1147 1041479 : expand_ifn_va_arg (fun);
1148 :
1149 1041479 : if (flag_stdarg_opt
1150 : /* This optimization is only for stdarg functions. */
1151 1041479 : && fun->stdarg != 0)
1152 7158 : optimize_va_list_gpr_fpr_size (fun);
1153 :
1154 1041479 : return 0;
1155 : }
1156 :
1157 : } // anon namespace
1158 :
1159 : gimple_opt_pass *
1160 285722 : make_pass_stdarg (gcc::context *ctxt)
1161 : {
1162 285722 : return new pass_stdarg (ctxt);
1163 : }
1164 :
1165 : namespace {
1166 :
1167 : const pass_data pass_data_lower_vaarg =
1168 : {
1169 : GIMPLE_PASS, /* type */
1170 : "lower_vaarg", /* name */
1171 : OPTGROUP_NONE, /* optinfo_flags */
1172 : TV_NONE, /* tv_id */
1173 : ( PROP_cfg | PROP_ssa ), /* properties_required */
1174 : PROP_gimple_lva, /* properties_provided */
1175 : 0, /* properties_destroyed */
1176 : 0, /* todo_flags_start */
1177 : 0, /* todo_flags_finish */
1178 : };
1179 :
1180 : class pass_lower_vaarg : public gimple_opt_pass
1181 : {
1182 : public:
1183 285722 : pass_lower_vaarg (gcc::context *ctxt)
1184 571444 : : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1185 : {}
1186 :
1187 : /* opt_pass methods: */
1188 1472150 : bool gate (function *) final override
1189 : {
1190 1472150 : return (cfun->curr_properties & PROP_gimple_lva) == 0;
1191 : }
1192 :
1193 : unsigned int execute (function *) final override;
1194 :
1195 : }; // class pass_lower_vaarg
1196 :
1197 : unsigned int
1198 10732 : pass_lower_vaarg::execute (function *fun)
1199 : {
1200 10732 : expand_ifn_va_arg (fun);
1201 10732 : return 0;
1202 : }
1203 :
1204 : } // anon namespace
1205 :
1206 : gimple_opt_pass *
1207 285722 : make_pass_lower_vaarg (gcc::context *ctxt)
1208 : {
1209 285722 : return new pass_lower_vaarg (ctxt);
1210 : }
|