Line data Source code
1 : /* strub (stack scrubbing) support.
2 : Copyright (C) 2021-2026 Free Software Foundation, Inc.
3 : Contributed by Alexandre Oliva <oliva@adacore.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "gimplify.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "gimple-iterator.h"
31 : #include "gimplify-me.h"
32 : #include "tree-into-ssa.h"
33 : #include "tree-ssa.h"
34 : #include "tree-cfg.h"
35 : #include "cfghooks.h"
36 : #include "cfgloop.h"
37 : #include "cfgcleanup.h"
38 : #include "tree-eh.h"
39 : #include "except.h"
40 : #include "builtins.h"
41 : #include "attribs.h"
42 : #include "tree-inline.h"
43 : #include "cgraph.h"
44 : #include "alloc-pool.h"
45 : #include "symbol-summary.h"
46 : #include "sreal.h"
47 : #include "ipa-cp.h"
48 : #include "ipa-prop.h"
49 : #include "ipa-fnsummary.h"
50 : #include "gimple-fold.h"
51 : #include "fold-const.h"
52 : #include "gimple-walk.h"
53 : #include "tree-dfa.h"
54 : #include "langhooks.h"
55 : #include "calls.h"
56 : #include "vec.h"
57 : #include "stor-layout.h"
58 : #include "varasm.h"
59 : #include "alias.h"
60 : #include "diagnostic.h"
61 : #include "intl.h"
62 : #include "ipa-strub.h"
63 : #include "symtab-thunks.h"
64 : #include "attr-fnspec.h"
65 : #include "target.h"
66 : #include "gcc-urlifier.h"
67 :
68 : /* This file introduces two passes that, together, implement
69 : machine-independent stack scrubbing, strub for short. It arranges
70 : for stack frames that have strub enabled to be zeroed-out after
71 : relinquishing control to a caller, whether by returning or by
72 : propagating an exception. This admittedly unusual design decision
73 : was driven by exception support (one needs a stack frame to be
74 : active to propagate exceptions out of it), and it enabled an
75 : implementation that is entirely machine-independent (no custom
76 : epilogue code is required).
77 :
78 : Strub modes can be selected for stack frames by attaching attribute
79 : strub to functions or to variables (to their types, actually).
80 : Different strub modes, with different implementation details, are
81 : available, and they can be selected by an argument to the strub
82 : attribute. When enabled by strub-enabled variables, whether by
83 : accessing (as in reading from) statically-allocated ones, or by
84 : introducing (as in declaring) automatically-allocated ones, a
85 : suitable mode is selected automatically.
86 :
87 : At-calls mode modifies the interface of a function, adding a stack
88 : watermark argument, that callers use to clean up the stack frame of
89 : the called function. Because of the interface change, it can only
90 : be used when explicitly selected, or when a function is internal to
91 : a translation unit. Strub-at-calls function types are distinct
92 : from their original types (they're not modified in-place), and they
93 : are not interchangeable with other function types.
94 :
95 : Internal mode, in turn, does not modify the type or the interface
96 : of a function. It is currently implemented by turning the function
97 : into a wrapper, moving the function body to a separate wrapped
98 : function, and scrubbing the wrapped body's stack in the wrapper.
99 : Internal-strub function types are mostly interface-compatible with
100 : other strub modes, namely callable (from strub functions, though
101 : not strub-enabled) and disabled (not callable from strub
102 : functions).
103 :
104 : Always_inline functions can be strub functions, but they can only
105 : be called from other strub functions, because strub functions must
106 : never be inlined into non-strub functions. Internal and at-calls
107 : modes are indistinguishable when it comes to always_inline
108 : functions: they will necessarily be inlined into another strub
109 : function, and will thus be integrated into the caller's stack
110 : frame, whatever the mode. (Contrast with non-always_inline strub
111 : functions: an at-calls function can be called from other strub
112 : functions, ensuring no discontinuity in stack erasing, whereas an
113 : internal-strub function can only be called from other strub
114 : functions if it happens to be inlined, or if -fstrub=relaxed mode
115 : is in effect (that's the default). In -fstrub=strict mode,
116 : internal-strub functions are not callable from strub functions,
117 : because the wrapper itself is not strubbed.
118 :
119 : The implementation involves two simple-IPA passes. The earliest
120 : one, strub-mode, assigns strub modes to functions. It needs to run
121 : before any inlining, so that we can prevent inlining of strub
122 : functions into non-strub functions. It notes explicit strub mode
123 : requests, enables strub in response to strub variables and testing
124 : options, and flags unsatisfiable requests.
125 :
126 : Three possibilities of unsatisfiable requests come to mind: (a)
127 : when a strub mode is explicitly selected, but the function uses
128 : features that make it ineligible for that mode (e.g. at-calls rules
129 : out calling __builtin_apply_args, because of the interface changes,
130 : and internal mode rules out noclone or otherwise non-versionable
131 : functions, non-default varargs, non-local or forced labels, and
132 : functions with far too many arguments); (b) when some strub mode
133 : must be enabled because of a strub variable, but the function is
134 : not eligible or not viable for any mode; and (c) when
135 : -fstrub=strict is enabled, and calls are found in strub functions
136 : to functions that are not callable from strub contexts.
137 : compute_strub_mode implements (a) and (b), and verify_strub
138 : implements (c).
139 :
140 : The second IPA pass modifies interfaces of at-calls-strub functions
141 : and types, introduces strub calls in and around them. and splits
142 : internal-strub functions. It is placed after early inlining, so
143 : that even internal-strub functions get a chance of being inlined
144 : into other strub functions, but before non-early inlining, so that
145 : internal-strub wrapper functions still get a chance of inlining
146 : after splitting.
147 :
148 : Wrappers avoid duplicating the copying of large arguments again by
149 : passing them by reference to the wrapped bodies. This involves
150 : occasional SSA rewriting of address computations, because of the
151 : additional indirection. Besides these changes, and the
152 : introduction of the stack watermark parameter, wrappers and wrapped
153 : functions cooperate to handle variable argument lists (performing
154 : va_start in the wrapper, passing the list as an argument, and
155 : replacing va_start calls in the wrapped body with va_copy), and
156 : __builtin_apply_args (also called in the wrapper and passed to the
157 : wrapped body as an argument).
158 :
159 : Strub bodies (both internal-mode wrapped bodies, and at-calls
160 : functions) always start by adjusting the watermark parameter, by
161 : calling __builtin___strub_update. The compiler inserts them in the
162 : main strub pass. Allocations of additional stack space for the
163 : frame (__builtin_alloca) are also followed by watermark updates.
164 : Stack space temporarily allocated to pass arguments to other
165 : functions, released right after the call, is not regarded as part
166 : of the frame. Around calls to them, i.e., in internal-mode
167 : wrappers and at-calls callers (even calls through pointers), calls
168 : to __builtin___strub_enter and __builtin___strub_leave are
169 : inserted, the latter as a __finally block, so that it runs at
170 : regular and exceptional exit paths. strub_enter only initializes
171 : the stack watermark, and strub_leave is where the scrubbing takes
172 : place, overwriting with zeros the stack space from the top of the
173 : stack to the watermark.
174 :
175 : These calls can be optimized in various cases. In
176 : pass_ipa_strub::adjust_at_calls_call, for example, we enable
177 : tail-calling and other optimized calls from one strub body to
178 : another by passing on the watermark parameter. The builtins
179 : themselves may undergo inline substitution during expansion,
180 : dependign on optimization levels. This involves dealing with stack
181 : red zones (when the builtins are called out-of-line, the red zone
182 : cannot be used) and other ugly details related with inlining strub
183 : bodies into other strub bodies (see expand_builtin_strub_update).
184 : expand_builtin_strub_leave may even perform partial inline
185 : substitution. */
186 :
187 : /* Const and pure functions that gain a watermark parameter for strub purposes
188 : are still regarded as such, which may cause the inline expansions of the
189 : __strub builtins to malfunction. Ideally, attribute "fn spec" would enable
190 : us to inform the backend about requirements and side effects of the call, but
191 : call_fusage building in calls.c:expand_call does not even look at
192 : attr_fnspec, so we resort to asm loads and updates to attain an equivalent
193 : effect. Once expand_call gains the ability to issue extra memory uses and
194 : clobbers based on pure/const function's fnspec, we can define this to 1. */
195 : #define ATTR_FNSPEC_DECONST_WATERMARK 0
196 :
197 : enum strub_mode {
198 : /* This mode denotes a regular function, that does not require stack
199 : scrubbing (strubbing). It may call any other functions, but if
200 : it calls AT_CALLS (or WRAPPED) ones, strubbing logic is
201 : automatically introduced around those calls (the latter, by
202 : inlining INTERNAL wrappers). */
203 : STRUB_DISABLED = 0,
204 :
205 : /* This denotes a function whose signature is (to be) modified to
206 : take an extra parameter, for stack use annotation, and its
207 : callers must initialize and pass that argument, and perform the
208 : strubbing. Functions that are explicitly marked with attribute
209 : strub must have the mark visible wherever the function is,
210 : including aliases, and overriders and overriding methods.
211 : Functions that are implicitly marked for strubbing, for accessing
212 : variables explicitly marked as such, will only select this
213 : strubbing method if they are internal to a translation unit. It
214 : can only be inlined into other strubbing functions, i.e.,
215 : STRUB_AT_CALLS or STRUB_WRAPPED. */
216 : STRUB_AT_CALLS = 1,
217 :
218 : /* This denotes a function that is to perform strubbing internally,
219 : without any changes to its interface (the function is turned into
220 : a strubbing wrapper, and its original body is moved to a separate
221 : STRUB_WRAPPED function, with a modified interface). Functions
222 : may be explicitly marked with attribute strub(2), and the
223 : attribute must be visible at the point of definition. Functions
224 : that are explicitly marked for strubbing, for accessing variables
225 : explicitly marked as such, may select this strubbing mode if
226 : their interface cannot change, e.g. because its interface is
227 : visible to other translation units, directly, by indirection
228 : (having its address taken), inheritance, etc. Functions that use
229 : this method must not have the noclone attribute, nor the noipa
230 : one. Functions marked as always_inline may select this mode, but
231 : they are NOT wrapped, they remain unchanged, and are only inlined
232 : into strubbed contexts. Once non-always_inline functions are
233 : wrapped, the wrapper becomes STRUB_WRAPPER, and the wrapped becomes
234 : STRUB_WRAPPED. */
235 : STRUB_INTERNAL = 2,
236 :
237 : /* This denotes a function whose stack is not strubbed, but that is
238 : nevertheless explicitly or implicitly marked as callable from strubbing
239 : functions. Normally, only STRUB_AT_CALLS (and STRUB_INTERNAL ->
240 : STRUB_WRAPPED) functions can be called from strubbing contexts (bodies of
241 : STRUB_AT_CALLS, STRUB_INTERNAL and STRUB_WRAPPED functions), but attribute
242 : strub(3) enables other functions to be (indirectly) called from these
243 : contexts. Some builtins and internal functions may be implicitly marked as
244 : STRUB_CALLABLE. */
245 : STRUB_CALLABLE = 3,
246 :
247 : /* This denotes the function that took over the body of a
248 : STRUB_INTERNAL function. At first, it's only called by its
249 : wrapper, but the wrapper may be inlined. The wrapped function,
250 : in turn, can only be inlined into other functions whose stack
251 : frames are strubbed, i.e., that are STRUB_WRAPPED or
252 : STRUB_AT_CALLS. */
253 : STRUB_WRAPPED = -1,
254 :
255 : /* This denotes the wrapper function that replaced the STRUB_INTERNAL
256 : function. This mode overrides the STRUB_INTERNAL mode at the time the
257 : internal to-be-wrapped function becomes a wrapper, so that inlining logic
258 : can tell one from the other. */
259 : STRUB_WRAPPER = -2,
260 :
261 : /* This denotes an always_inline function that requires strubbing. It can
262 : only be called from, and inlined into, other strubbing contexts. */
263 : STRUB_INLINABLE = -3,
264 :
265 : /* This denotes a function that accesses strub variables, so it would call for
266 : internal strubbing (whether or not it's eligible for that), but since
267 : at-calls strubbing is viable, that's selected as an optimization. This
268 : mode addresses the inconvenience that such functions may have different
269 : modes selected depending on optimization flags, and get a different
270 : callable status depending on that choice: if we assigned them
271 : STRUB_AT_CALLS mode, they would be callable when optimizing, whereas
272 : STRUB_INTERNAL would not be callable. */
273 : STRUB_AT_CALLS_OPT = -4,
274 :
275 : };
276 :
277 : /* Look up a strub attribute in TYPE, and return it. */
278 :
279 : static tree
280 13488665 : get_strub_attr_from_type (tree type)
281 : {
282 13488665 : return lookup_attribute ("strub", TYPE_ATTRIBUTES (type));
283 : }
284 :
285 : /* Look up a strub attribute in DECL or in its type, and return it. */
286 :
287 : static tree
288 10505981 : get_strub_attr_from_decl (tree decl)
289 : {
290 10505981 : tree ret = lookup_attribute ("strub", DECL_ATTRIBUTES (decl));
291 10505981 : if (ret)
292 : return ret;
293 10490289 : return get_strub_attr_from_type (TREE_TYPE (decl));
294 : }
295 :
296 : #define STRUB_ID_COUNT 8
297 : #define STRUB_IDENT_COUNT 3
298 : #define STRUB_TYPE_COUNT 5
299 :
300 : #define STRUB_ID_BASE 0
301 : #define STRUB_IDENT_BASE (STRUB_ID_BASE + STRUB_ID_COUNT)
302 : #define STRUB_TYPE_BASE (STRUB_IDENT_BASE + STRUB_IDENT_COUNT)
303 : #define STRUB_CACHE_SIZE (STRUB_TYPE_BASE + STRUB_TYPE_COUNT)
304 :
305 : /* Keep the strub mode and temp identifiers and types from being GC'd. */
306 : static GTY((deletable)) tree strub_cache[STRUB_CACHE_SIZE];
307 :
308 : /* Define a function to cache identifier ID, to be used as a strub attribute
309 : parameter for a strub mode named after NAME. */
310 : #define DEF_STRUB_IDS(IDX, NAME, ID) \
311 : static inline tree get_strub_mode_id_ ## NAME () { \
312 : int idx = STRUB_ID_BASE + IDX; \
313 : tree identifier = strub_cache[idx]; \
314 : if (!identifier) \
315 : strub_cache[idx] = identifier = get_identifier (ID); \
316 : return identifier; \
317 : }
318 : /* Same as DEF_STRUB_IDS, but use the string expansion of NAME as ID. */
319 : #define DEF_STRUB_ID(IDX, NAME) \
320 : DEF_STRUB_IDS (IDX, NAME, #NAME)
321 :
322 : /* Define functions for each of the strub mode identifiers.
323 : Expose dashes rather than underscores. */
324 10183 : DEF_STRUB_ID (0, disabled)
325 18598 : DEF_STRUB_IDS (1, at_calls, "at-calls")
326 15467 : DEF_STRUB_ID (2, internal)
327 24100 : DEF_STRUB_ID (3, callable)
328 2209 : DEF_STRUB_ID (4, wrapped)
329 2363 : DEF_STRUB_ID (5, wrapper)
330 3488 : DEF_STRUB_ID (6, inlinable)
331 40 : DEF_STRUB_IDS (7, at_calls_opt, "at-calls-opt")
332 :
333 : /* Release the temporary macro names. */
334 : #undef DEF_STRUB_IDS
335 : #undef DEF_STRUB_ID
336 :
337 : /* Return the identifier corresponding to strub MODE. */
338 :
339 : static tree
340 76448 : get_strub_mode_attr_parm (enum strub_mode mode)
341 : {
342 76448 : switch (mode)
343 : {
344 10183 : case STRUB_DISABLED:
345 10183 : return get_strub_mode_id_disabled ();
346 :
347 18598 : case STRUB_AT_CALLS:
348 18598 : return get_strub_mode_id_at_calls ();
349 :
350 15467 : case STRUB_INTERNAL:
351 15467 : return get_strub_mode_id_internal ();
352 :
353 24100 : case STRUB_CALLABLE:
354 24100 : return get_strub_mode_id_callable ();
355 :
356 2209 : case STRUB_WRAPPED:
357 2209 : return get_strub_mode_id_wrapped ();
358 :
359 2363 : case STRUB_WRAPPER:
360 2363 : return get_strub_mode_id_wrapper ();
361 :
362 3488 : case STRUB_INLINABLE:
363 3488 : return get_strub_mode_id_inlinable ();
364 :
365 40 : case STRUB_AT_CALLS_OPT:
366 40 : return get_strub_mode_id_at_calls_opt ();
367 :
368 0 : default:
369 0 : gcc_unreachable ();
370 : }
371 : }
372 :
373 : /* Return the parmeters (TREE_VALUE) for a strub attribute of MODE.
374 : We know we use a single parameter, so we bypass the creation of a
375 : tree list. */
376 :
377 : static tree
378 2616 : get_strub_mode_attr_value (enum strub_mode mode)
379 : {
380 0 : return get_strub_mode_attr_parm (mode);
381 : }
382 :
383 : /* Determine whether ID is a well-formed strub mode-specifying attribute
384 : parameter for a function (type). Only user-visible modes are accepted, and
385 : ID must be non-NULL.
386 :
387 : For unacceptable parms, return 0, otherwise a nonzero value as below.
388 :
389 : If the parm enables strub, return positive, otherwise negative.
390 :
391 : If the affected type must be a distinct, incompatible type,return an integer
392 : of absolute value 2, otherwise 1. */
393 :
394 : int
395 2174 : strub_validate_fn_attr_parm (tree id)
396 : {
397 2174 : int ret;
398 2174 : const char *s = NULL;
399 2174 : size_t len = 0;
400 :
401 : /* do NOT test for NULL. This is only to be called with non-NULL arguments.
402 : We assume that the strub parameter applies to a function, because only
403 : functions accept an explicit argument. If we accepted NULL, and we
404 : happened to be called to verify the argument for a variable, our return
405 : values would be wrong. */
406 2174 : if (TREE_CODE (id) == STRING_CST)
407 : {
408 2174 : s = TREE_STRING_POINTER (id);
409 2174 : len = TREE_STRING_LENGTH (id) - 1;
410 : }
411 0 : else if (TREE_CODE (id) == IDENTIFIER_NODE)
412 : {
413 0 : s = IDENTIFIER_POINTER (id);
414 0 : len = IDENTIFIER_LENGTH (id);
415 : }
416 : else
417 : return 0;
418 :
419 2174 : enum strub_mode mode;
420 :
421 2174 : if (len != 8)
422 : return 0;
423 :
424 2174 : switch (s[0])
425 : {
426 : case 'd':
427 : mode = STRUB_DISABLED;
428 : ret = -1;
429 : break;
430 :
431 578 : case 'a':
432 578 : mode = STRUB_AT_CALLS;
433 578 : ret = 2;
434 578 : break;
435 :
436 562 : case 'i':
437 562 : mode = STRUB_INTERNAL;
438 562 : ret = 1;
439 562 : break;
440 :
441 580 : case 'c':
442 580 : mode = STRUB_CALLABLE;
443 580 : ret = -2;
444 580 : break;
445 :
446 : default:
447 : /* Other parms are for internal use only. */
448 : return 0;
449 : }
450 :
451 2174 : tree mode_id = get_strub_mode_attr_parm (mode);
452 :
453 2174 : if (TREE_CODE (id) == IDENTIFIER_NODE
454 2174 : ? id != mode_id
455 2174 : : strncmp (s, IDENTIFIER_POINTER (mode_id), len) != 0)
456 : return 0;
457 :
458 : return ret;
459 : }
460 :
461 : /* Return the strub mode from STRUB_ATTR. VAR_P should be TRUE if the attribute
462 : is taken from a variable, rather than from a function, or a type thereof. */
463 :
464 : static enum strub_mode
465 10537538 : get_strub_mode_from_attr (tree strub_attr, bool var_p = false)
466 : {
467 10537538 : enum strub_mode mode = STRUB_DISABLED;
468 :
469 10537538 : if (strub_attr)
470 : {
471 75121 : if (!TREE_VALUE (strub_attr))
472 3487 : mode = !var_p ? STRUB_AT_CALLS : STRUB_INTERNAL;
473 : else
474 : {
475 71634 : gcc_checking_assert (!var_p);
476 71634 : tree id = TREE_VALUE (strub_attr);
477 71634 : if (TREE_CODE (id) == TREE_LIST)
478 55942 : id = TREE_VALUE (id);
479 71634 : const char *s = (TREE_CODE (id) == STRING_CST
480 71634 : ? TREE_STRING_POINTER (id)
481 71634 : : IDENTIFIER_POINTER (id));
482 71634 : size_t len = (TREE_CODE (id) == STRING_CST
483 71634 : ? TREE_STRING_LENGTH (id) - 1
484 71634 : : IDENTIFIER_LENGTH (id));
485 :
486 71634 : switch (len)
487 : {
488 3632 : case 7:
489 3632 : switch (s[6])
490 : {
491 : case 'r':
492 : mode = STRUB_WRAPPER;
493 : break;
494 :
495 1739 : case 'd':
496 1739 : mode = STRUB_WRAPPED;
497 1739 : break;
498 :
499 0 : default:
500 0 : gcc_unreachable ();
501 : }
502 : break;
503 :
504 64860 : case 8:
505 64860 : switch (s[0])
506 : {
507 : case 'd':
508 : mode = STRUB_DISABLED;
509 : break;
510 :
511 17943 : case 'a':
512 17943 : mode = STRUB_AT_CALLS;
513 17943 : break;
514 :
515 14400 : case 'i':
516 14400 : mode = STRUB_INTERNAL;
517 14400 : break;
518 :
519 22812 : case 'c':
520 22812 : mode = STRUB_CALLABLE;
521 22812 : break;
522 :
523 0 : default:
524 0 : gcc_unreachable ();
525 : }
526 : break;
527 :
528 : case 9:
529 : mode = STRUB_INLINABLE;
530 : break;
531 :
532 32 : case 12:
533 32 : mode = STRUB_AT_CALLS_OPT;
534 32 : break;
535 :
536 0 : default:
537 0 : gcc_unreachable ();
538 : }
539 :
540 71634 : gcc_checking_assert (TREE_CODE (id) == IDENTIFIER_NODE
541 : ? id == get_strub_mode_attr_parm (mode)
542 : : strncmp (IDENTIFIER_POINTER
543 : (get_strub_mode_attr_parm (mode)),
544 : s, len) == 0);
545 : }
546 : }
547 :
548 10537538 : return mode;
549 : }
550 :
551 : /* Look up, decode and return the strub mode associated with FNDECL. */
552 :
553 : static enum strub_mode
554 10495721 : get_strub_mode_from_fndecl (tree fndecl)
555 : {
556 10495721 : return get_strub_mode_from_attr (get_strub_attr_from_decl (fndecl));
557 : }
558 :
559 : /* Look up, decode and return the strub mode associated with NODE. */
560 :
561 : static enum strub_mode
562 10475978 : get_strub_mode (cgraph_node *node)
563 : {
564 0 : return get_strub_mode_from_fndecl (node->decl);
565 : }
566 :
567 : /* Look up, decode and return the strub mode associated with TYPE. */
568 :
569 : static enum strub_mode
570 2998376 : get_strub_mode_from_type (tree type)
571 : {
572 2998376 : bool var_p = !FUNC_OR_METHOD_TYPE_P (type);
573 2998376 : tree attr = get_strub_attr_from_type (type);
574 :
575 2998376 : if (attr)
576 32240 : return get_strub_mode_from_attr (attr, var_p);
577 :
578 2966136 : if (flag_strub >= -1 && !var_p)
579 7968 : return STRUB_CALLABLE;
580 :
581 : return STRUB_DISABLED;
582 : }
583 :
584 :
585 : /* Return TRUE iff NODE calls builtin va_start. */
586 :
587 : static bool
588 470 : calls_builtin_va_start_p (cgraph_node *node)
589 : {
590 470 : bool result = false;
591 :
592 3072 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
593 : {
594 2610 : tree cdecl = e->callee->decl;
595 2610 : if (fndecl_built_in_p (cdecl, BUILT_IN_VA_START))
596 : return true;
597 : }
598 :
599 : return result;
600 : }
601 :
602 : /* Return TRUE iff NODE calls builtin apply_args, and optionally REPORT it. */
603 :
604 : static bool
605 2830 : calls_builtin_apply_args_p (cgraph_node *node, bool report = false)
606 : {
607 2830 : bool result = false;
608 :
609 10133 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
610 : {
611 7331 : tree cdecl = e->callee->decl;
612 7331 : if (!fndecl_built_in_p (cdecl, BUILT_IN_APPLY_ARGS))
613 7299 : continue;
614 :
615 32 : result = true;
616 :
617 32 : if (!report)
618 : break;
619 :
620 8 : sorry_at (e->call_stmt
621 4 : ? gimple_location (e->call_stmt)
622 0 : : DECL_SOURCE_LOCATION (node->decl),
623 : "at-calls %<strub%> does not support call to %qD",
624 : cdecl);
625 : }
626 :
627 2830 : return result;
628 : }
629 :
630 : /* Return TRUE iff NODE carries the always_inline attribute. */
631 :
632 : static inline bool
633 7668 : strub_always_inline_p (cgraph_node *node)
634 : {
635 7668 : return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
636 : }
637 :
638 : /* Return TRUE iff the target has strub support for T, a function
639 : decl, or a type used in an indirect call, and optionally REPORT the
640 : reasons for ineligibility. If T is a type and error REPORTing is
641 : enabled, the LOCation (of the indirect call) should be provided. */
642 : static inline bool
643 4229 : strub_target_support_p (tree t, bool report = false,
644 : location_t loc = UNKNOWN_LOCATION)
645 : {
646 4229 : bool result = true;
647 :
648 4229 : if (!targetm.have_strub_support_for (t))
649 : {
650 0 : result = false;
651 :
652 0 : if (!report)
653 : return result;
654 :
655 0 : if (DECL_P (t))
656 0 : sorry_at (DECL_SOURCE_LOCATION (t),
657 : "%qD is not eligible for %<strub%>"
658 : " on the target system", t);
659 : else
660 0 : sorry_at (loc,
661 : "unsupported %<strub%> call"
662 : " on the target system");
663 : }
664 :
665 : return result;
666 : }
667 :
668 : /* Return TRUE iff NODE is potentially eligible for any strub-enabled mode, and
669 : optionally REPORT the reasons for ineligibility. */
670 :
671 : static inline bool
672 2305 : can_strub_p (cgraph_node *node, bool report = false)
673 : {
674 2305 : bool result = strub_target_support_p (node->decl, report);
675 :
676 2305 : if (!report && (!result || strub_always_inline_p (node)))
677 0 : return result;
678 :
679 2305 : auto_urlify_attributes sentinel;
680 :
681 2305 : if (flag_split_stack)
682 : {
683 16 : result = false;
684 :
685 16 : if (!report)
686 : return result;
687 :
688 8 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
689 : "%qD is not eligible for %<strub%>"
690 : " because %<-fsplit-stack%> is enabled",
691 : node->decl);
692 : }
693 :
694 2297 : if (lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)))
695 : {
696 1 : result = false;
697 :
698 1 : if (!report)
699 : return result;
700 :
701 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
702 : "%qD is not eligible for %<strub%>"
703 : " because of attribute %<noipa%>",
704 : node->decl);
705 : }
706 :
707 : /* We can't, and don't want to vectorize the watermark and other
708 : strub-introduced parms. */
709 2296 : if (lookup_attribute ("simd", DECL_ATTRIBUTES (node->decl)))
710 : {
711 0 : result = false;
712 :
713 0 : if (!report)
714 : return result;
715 :
716 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
717 : "%qD is not eligible for %<strub%>"
718 : " because of attribute %<simd%>",
719 : node->decl);
720 : }
721 :
722 : return result;
723 2305 : }
724 :
725 : /* Return TRUE iff NODE is eligible for at-calls strub, and optionally REPORT
726 : the reasons for ineligibility. Besides general non-eligibility for
727 : strub-enabled modes, at-calls rules out calling builtin apply_args. */
728 :
729 : static bool
730 2360 : can_strub_at_calls_p (cgraph_node *node, bool report = false)
731 : {
732 2360 : bool result = !report || can_strub_p (node, report);
733 :
734 : if (!result && !report)
735 : return result;
736 :
737 2360 : return !calls_builtin_apply_args_p (node, report);
738 : }
739 :
740 : /* Return TRUE iff the called function (pointer or, if available,
741 : decl) undergoes a significant type conversion for the call. Strub
742 : mode changes between function types, and other non-useless type
743 : conversions, are regarded as significant. When the function type
744 : is overridden, the effective strub mode for the call is that of the
745 : call fntype, rather than that of the pointer or of the decl.
746 : Functions called with type overrides cannot undergo type changes;
747 : it's as if their address was taken, so they're considered
748 : non-viable for implicit at-calls strub mode. */
749 :
750 : static inline bool
751 19508 : strub_call_fntype_override_p (const gcall *gs)
752 : {
753 19508 : if (gimple_call_internal_p (gs))
754 : return false;
755 19508 : tree fn_type = TREE_TYPE (TREE_TYPE (gimple_call_fn (gs)));
756 19508 : if (tree decl = gimple_call_fndecl (gs))
757 19236 : fn_type = TREE_TYPE (decl);
758 :
759 : /* We do NOT want to take the mode from the decl here. This
760 : function is used to tell whether we can change the strub mode of
761 : a function, and whether the effective mode for the call is to be
762 : taken from the decl or from an overrider type. When the strub
763 : mode is explicitly declared, or overridden with a type cast, the
764 : difference will be noticed in function types. However, if the
765 : strub mode is implicit due to e.g. strub variables or -fstrub=*
766 : command-line flags, we will adjust call types along with function
767 : types. In either case, the presence of type or strub mode
768 : overriders in calls will prevent a function from having its strub
769 : modes changed in ways that would imply type changes, but taking
770 : strub modes from decls would defeat this, since we set strub
771 : modes and then call this function to tell whether the original
772 : type was overridden to decide whether to adjust the call. We
773 : need the answer to be about the type, not the decl. */
774 19508 : enum strub_mode mode = get_strub_mode_from_type (fn_type);
775 19508 : return (get_strub_mode_from_type (gs->u.fntype) != mode
776 19508 : || !useless_type_conversion_p (gs->u.fntype, fn_type));
777 : }
778 :
779 : /* Return TRUE iff NODE is called directly with a type override. */
780 :
781 : static bool
782 449 : called_directly_with_type_override_p (cgraph_node *node, void *)
783 : {
784 1274 : for (cgraph_edge *e = node->callers; e; e = e->next_caller)
785 825 : if (e->call_stmt && strub_call_fntype_override_p (e->call_stmt))
786 : return true;
787 :
788 : return false;
789 : }
790 :
791 : /* Return TRUE iff NODE or any other nodes aliased to it are called
792 : with type overrides. We can't safely change the type of such
793 : functions. */
794 :
795 : static bool
796 449 : called_with_type_override_p (cgraph_node *node)
797 : {
798 449 : return (node->call_for_symbol_thunks_and_aliases
799 449 : (called_directly_with_type_override_p, NULL, true, true));
800 : }
801 :
802 : /* Symbolic macro for the max number of arguments that internal strub may add to
803 : a function. */
804 :
805 : #define STRUB_INTERNAL_MAX_EXTRA_ARGS 3
806 :
807 : /* We can't perform internal strubbing if the function body involves certain
808 : features:
809 :
810 : - a non-default __builtin_va_start (e.g. x86's __builtin_ms_va_start) is
811 : currently unsupported because we can't discover the corresponding va_copy and
812 : va_end decls in the wrapper, and we don't convey the alternate variable
813 : arguments ABI to the modified wrapped function. The default
814 : __builtin_va_start is supported by calling va_start/va_end at the wrapper,
815 : that takes variable arguments, passing a pointer to the va_list object to the
816 : wrapped function, that runs va_copy from it where the original function ran
817 : va_start.
818 :
819 : __builtin_next_arg is currently unsupported because the wrapped function
820 : won't be a variable argument function. We could process it in the wrapper,
821 : that remains a variable argument function, and replace calls in the wrapped
822 : body, but we currently don't.
823 :
824 : __builtin_return_address is rejected because it's generally used when the
825 : actual caller matters, and introducing a wrapper breaks such uses as those in
826 : the unwinder. */
827 :
828 : static bool
829 1888 : can_strub_internally_p (cgraph_node *node, bool report = false)
830 : {
831 1888 : bool result = !report || can_strub_p (node, report);
832 :
833 : if (!result && !report)
834 : return result;
835 :
836 1888 : if (!report && strub_always_inline_p (node))
837 : return result;
838 :
839 : /* Since we're not changing the function identity proper, just
840 : moving its full implementation, we *could* disable
841 : fun->cannot_be_copied_reason and/or temporarily drop a noclone
842 : attribute, but we'd have to prevent remapping of the labels. */
843 1888 : if (lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
844 : {
845 48 : result = false;
846 :
847 48 : if (!report)
848 : return result;
849 :
850 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
851 : "%qD is not eligible for internal %<strub%>"
852 : " because of attribute %<noclone%>",
853 : node->decl);
854 : }
855 :
856 1840 : if (node->has_gimple_body_p ())
857 : {
858 3771 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
859 : {
860 2908 : tree cdecl = e->callee->decl;
861 5816 : if (!((fndecl_built_in_p (cdecl, BUILT_IN_VA_START)
862 8 : && cdecl != builtin_decl_explicit (BUILT_IN_VA_START))
863 2908 : || fndecl_built_in_p (cdecl, BUILT_IN_NEXT_ARG)
864 2908 : || fndecl_built_in_p (cdecl, BUILT_IN_RETURN_ADDRESS)))
865 2908 : continue;
866 :
867 0 : result = false;
868 :
869 0 : if (!report)
870 : return result;
871 :
872 0 : sorry_at (e->call_stmt
873 0 : ? gimple_location (e->call_stmt)
874 0 : : DECL_SOURCE_LOCATION (node->decl),
875 : "%qD is not eligible for internal %<strub%> "
876 : "because it calls %qD",
877 : node->decl, cdecl);
878 : }
879 :
880 863 : struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
881 863 : if (fun->has_nonlocal_label)
882 : {
883 0 : result = false;
884 :
885 0 : if (!report)
886 : return result;
887 :
888 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
889 : "%qD is not eligible for internal %<strub%> "
890 : "because it contains a non-local goto target",
891 : node->decl);
892 : }
893 :
894 863 : if (fun->has_forced_label_in_static)
895 : {
896 0 : result = false;
897 :
898 0 : if (!report)
899 : return result;
900 :
901 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
902 : "%qD is not eligible for internal %<strub%> "
903 : "because the address of a local label escapes",
904 : node->decl);
905 : }
906 :
907 : /* Catch any other case that would prevent versioning/cloning
908 : so as to also have it covered above. */
909 863 : gcc_checking_assert (!result /* || !node->has_gimple_body_p () */
910 : || tree_versionable_function_p (node->decl));
911 :
912 :
913 : /* Label values references are not preserved when copying. If referenced
914 : in nested functions, as in 920415-1.c and 920721-4.c their decls get
915 : remapped independently. The exclusion below might be too broad, in
916 : that we might be able to support correctly cases in which the labels
917 : are only used internally in a function, but disconnecting forced labels
918 : from their original declarations is undesirable in general. */
919 863 : basic_block bb;
920 2979 : FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
921 4232 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
922 2255 : !gsi_end_p (gsi); gsi_next (&gsi))
923 : {
924 2255 : glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
925 139 : tree target;
926 :
927 139 : if (!label_stmt)
928 : break;
929 :
930 139 : target = gimple_label_label (label_stmt);
931 :
932 139 : if (!FORCED_LABEL (target))
933 139 : continue;
934 :
935 0 : result = false;
936 :
937 0 : if (!report)
938 48 : return result;
939 :
940 0 : sorry_at (gimple_location (label_stmt),
941 : "internal %<strub%> does not support forced labels");
942 : }
943 : }
944 :
945 1840 : if (list_length (TYPE_ARG_TYPES (TREE_TYPE (node->decl)))
946 : >= ((HOST_WIDE_INT_1 << IPA_PARAM_MAX_INDEX_BITS)
947 : - STRUB_INTERNAL_MAX_EXTRA_ARGS))
948 : {
949 0 : result = false;
950 :
951 0 : if (!report)
952 : return result;
953 :
954 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
955 : "%qD has too many arguments for internal %<strub%>",
956 : node->decl);
957 : }
958 :
959 : return result;
960 : }
961 :
962 : /* Return TRUE iff NODE has any strub-requiring local variable, or accesses (as
963 : in reading) any variable through a strub-requiring type. */
964 :
965 : static bool
966 1881 : strub_from_body_p (cgraph_node *node)
967 : {
968 1881 : if (!node->has_gimple_body_p ())
969 : return false;
970 :
971 : /* If any local variable is marked for strub... */
972 1129 : unsigned i;
973 1129 : tree var;
974 9510 : FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (node->decl),
975 : i, var)
976 7435 : if (get_strub_mode_from_type (TREE_TYPE (var))
977 : != STRUB_DISABLED)
978 : return true;
979 :
980 : /* Now scan the body for loads with strub-requiring types.
981 : ??? Compound types don't propagate the strub requirement to
982 : component types. */
983 1115 : basic_block bb;
984 4134 : FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
985 6602 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
986 16996 : !gsi_end_p (gsi); gsi_next (&gsi))
987 : {
988 13977 : gimple *stmt = gsi_stmt (gsi);
989 :
990 13977 : if (!gimple_assign_load_p (stmt))
991 11422 : continue;
992 :
993 2555 : tree rhs = gimple_assign_rhs1 (stmt);
994 2555 : if (get_strub_mode_from_type (TREE_TYPE (rhs))
995 : != STRUB_DISABLED)
996 296 : return true;
997 : }
998 :
999 : return false;
1000 : }
1001 :
1002 : /* Return TRUE iff node is associated with a builtin that should be callable
1003 : from strub contexts. */
1004 :
1005 : static inline bool
1006 1175 : strub_callable_builtin_p (cgraph_node *node)
1007 : {
1008 1175 : if (DECL_BUILT_IN_CLASS (node->decl) != BUILT_IN_NORMAL)
1009 : return false;
1010 :
1011 489 : enum built_in_function fcode = DECL_FUNCTION_CODE (node->decl);
1012 :
1013 489 : switch (fcode)
1014 : {
1015 0 : case BUILT_IN_NONE:
1016 0 : gcc_unreachable ();
1017 :
1018 : /* This temporarily allocates stack for the call, and we can't reasonably
1019 : update the watermark for that. Besides, we don't check the actual call
1020 : target, nor its signature, and it seems to be overkill to as much as
1021 : try to do so. */
1022 : case BUILT_IN_APPLY:
1023 : return false;
1024 :
1025 : /* Conversely, this shouldn't be called from within strub contexts, since
1026 : the caller may have had its signature modified. STRUB_INTERNAL is ok,
1027 : the call will remain in the STRUB_WRAPPER, and removed from the
1028 : STRUB_WRAPPED clone. */
1029 : case BUILT_IN_APPLY_ARGS:
1030 : return false;
1031 :
1032 : /* ??? Make all other builtins callable. We wish to make any builtin call
1033 : the compiler might introduce on its own callable. Anything that is
1034 : predictable enough as to be known not to allow stack data that should
1035 : be strubbed to unintentionally escape to non-strub contexts can be
1036 : allowed, and pretty much every builtin appears to fit this description.
1037 : The exceptions to this rule seem to be rare, and only available as
1038 : explicit __builtin calls, so let's keep it simple and allow all of
1039 : them... */
1040 : default:
1041 : return true;
1042 : }
1043 : }
1044 :
1045 : /* Compute the strub mode to be used for NODE. STRUB_ATTR should be the strub
1046 : attribute,found for NODE, if any. */
1047 :
1048 : static enum strub_mode
1049 3491 : compute_strub_mode (cgraph_node *node, tree strub_attr)
1050 : {
1051 3491 : enum strub_mode req_mode = get_strub_mode_from_attr (strub_attr);
1052 :
1053 3491 : gcc_checking_assert (flag_strub >= -2 && flag_strub <= 3);
1054 :
1055 : /* Symbolic encodings of the -fstrub-* flags. */
1056 : /* Enable strub when explicitly requested through attributes to functions or
1057 : variables, reporting errors if the requests cannot be satisfied. */
1058 3491 : const bool strub_flag_auto = flag_strub < 0;
1059 : /* strub_flag_auto with strub call verification; without this, functions are
1060 : implicitly callable. */
1061 3491 : const bool strub_flag_strict = flag_strub < -1;
1062 : /* Disable strub altogether, ignore attributes entirely. */
1063 3491 : const bool strub_flag_disabled = flag_strub == 0;
1064 : /* On top of _auto, also enable strub implicitly for functions that can
1065 : safely undergo at-calls strubbing. Internal mode will still be used in
1066 : functions that request it explicitly with attribute strub(2), or when the
1067 : function body requires strubbing and at-calls strubbing is not viable. */
1068 3491 : const bool strub_flag_at_calls = flag_strub == 1;
1069 : /* On top of default, also enable strub implicitly for functions that can
1070 : safely undergo internal strubbing. At-calls mode will still be used in
1071 : functions that requiest it explicitly with attribute strub() or strub(1),
1072 : or when the function body requires strubbing and internal strubbing is not
1073 : viable. */
1074 3491 : const bool strub_flag_internal = flag_strub == 2;
1075 : /* On top of default, also enable strub implicitly for functions that can
1076 : safely undergo strubbing in either mode. When both modes are viable,
1077 : at-calls is preferred. */
1078 3491 : const bool strub_flag_either = flag_strub == 3;
1079 : /* Besides the default behavior, enable strub implicitly for all viable
1080 : functions. */
1081 3491 : const bool strub_flag_viable = flag_strub > 0;
1082 :
1083 : /* The consider_* variables should be TRUE if selecting the corresponding
1084 : strub modes would be consistent with requests from attributes and command
1085 : line flags. Attributes associated with functions pretty much mandate a
1086 : selection, and should report an error if not satisfied; strub_flag_auto
1087 : implicitly enables some viable strub mode if that's required by references
1088 : to variables marked for strub; strub_flag_viable enables strub if viable
1089 : (even when favoring one mode, body-requested strub can still be satisfied
1090 : by either mode), and falls back to callable, silently unless variables
1091 : require strubbing. */
1092 :
1093 10473 : const bool consider_at_calls
1094 : = (!strub_flag_disabled
1095 3491 : && (strub_attr
1096 3491 : ? req_mode == STRUB_AT_CALLS
1097 : : true));
1098 6982 : const bool consider_internal
1099 : = (!strub_flag_disabled
1100 : && (strub_attr
1101 3491 : ? req_mode == STRUB_INTERNAL
1102 : : true));
1103 :
1104 3491 : const bool consider_callable
1105 : = (!strub_flag_disabled
1106 3491 : && (strub_attr
1107 1845 : ? req_mode == STRUB_CALLABLE
1108 : : (!strub_flag_strict
1109 1175 : || strub_callable_builtin_p (node))));
1110 :
1111 : /* This is a shorthand for either strub-enabled mode. */
1112 3491 : const bool consider_strub
1113 : = (consider_at_calls || consider_internal);
1114 :
1115 : /* We can cope with always_inline functions even with noipa and noclone,
1116 : because we just leave them alone. */
1117 3491 : const bool is_always_inline
1118 3491 : = strub_always_inline_p (node);
1119 :
1120 : /* Strubbing in general, and each specific strub mode, may have its own set of
1121 : requirements. We require noipa for strubbing, either because of cloning
1122 : required for internal strub, or because of caller enumeration required for
1123 : at-calls strub. We don't consider the at-calls mode eligible if it's not
1124 : even considered, it has no further requirements. Internal mode requires
1125 : cloning and the absence of certain features in the body and, like at-calls,
1126 : it's not eligible if it's not even under consideration.
1127 :
1128 : ??? Do we need target hooks for further constraints? E.g., x86's
1129 : "interrupt" attribute breaks internal strubbing because the wrapped clone
1130 : carries the attribute and thus isn't callable; in this case, we could use a
1131 : target hook to adjust the clone instead. */
1132 3491 : const bool strub_eligible
1133 : = (consider_strub
1134 3491 : && (is_always_inline || can_strub_p (node)));
1135 2824 : const bool at_calls_eligible
1136 : = (consider_at_calls && strub_eligible
1137 2824 : && can_strub_at_calls_p (node));
1138 3491 : const bool internal_eligible
1139 3491 : = (consider_internal && strub_eligible
1140 3491 : && (is_always_inline
1141 1884 : || can_strub_internally_p (node)));
1142 :
1143 : /* In addition to the strict eligibility requirements, some additional
1144 : constraints are placed on implicit selection of certain modes. These do
1145 : not prevent the selection of a mode if explicitly specified as part of a
1146 : function interface (the strub attribute), but they may prevent modes from
1147 : being selected by the command line or by function bodies. The only actual
1148 : constraint is on at-calls mode: since we change the function's exposed
1149 : signature, we won't do it implicitly if the function can possibly be used
1150 : in ways that do not expect the signature change, e.g., if the function is
1151 : available to or interposable by other units, if its address is taken,
1152 : etc. */
1153 3491 : const bool at_calls_viable
1154 : = (at_calls_eligible
1155 3491 : && (strub_attr
1156 1836 : || (node->has_gimple_body_p ()
1157 931 : && (!node->externally_visible
1158 448 : || (node->binds_to_current_def_p ()
1159 436 : && node->can_be_local_p ()))
1160 483 : && node->only_called_directly_p ()
1161 449 : && !called_with_type_override_p (node))));
1162 4633 : const bool internal_viable
1163 : = (internal_eligible);
1164 :
1165 : /* Shorthand. */
1166 4633 : const bool strub_viable
1167 : = (at_calls_viable || internal_viable);
1168 :
1169 : /* We wish to analyze the body, to look for implicit requests for strub, both
1170 : to implicitly enable it when the body calls for it, and to report errors if
1171 : the body calls for it but neither mode is viable (even if that follows from
1172 : non-eligibility because of the explicit specification of some non-strubbing
1173 : mode). We can refrain from scanning the body only in rare circumstances:
1174 : when strub is enabled by a function attribute (scanning might be redundant
1175 : in telling us to also enable it), and when we are enabling strub implicitly
1176 : but there are non-viable modes: we want to know whether strubbing is
1177 : required, to fallback to another mode, even if we're only enabling a
1178 : certain mode, or, when either mode would do, to report an error if neither
1179 : happens to be viable. */
1180 1646 : const bool analyze_body
1181 : = (strub_attr
1182 2987 : ? !consider_strub
1183 : : (strub_flag_auto
1184 623 : || (strub_flag_viable && (!at_calls_viable && !internal_viable))
1185 2467 : || (strub_flag_either && !strub_viable)));
1186 :
1187 : /* Cases in which strubbing is enabled or disabled by strub_flag_auto.
1188 : Unsatisfiable requests ought to be reported. */
1189 3491 : const bool strub_required
1190 3491 : = ((strub_attr && consider_strub)
1191 3491 : || (analyze_body && strub_from_body_p (node)));
1192 :
1193 : /* Besides the required cases, we want to abide by the requests to enabling on
1194 : an if-viable basis. */
1195 7007 : const bool strub_enable
1196 : = (strub_required
1197 2207 : || (strub_flag_at_calls && at_calls_viable)
1198 2143 : || (strub_flag_internal && internal_viable)
1199 1932 : || (strub_flag_either && strub_viable));
1200 :
1201 : /* And now we're finally ready to select a mode that abides by the viability
1202 : and eligibility constraints, and that satisfies the strubbing requirements
1203 : and requests, subject to the constraints. If both modes are viable and
1204 : strub is to be enabled, pick STRUB_AT_CALLS unless STRUB_INTERNAL was named
1205 : as preferred. */
1206 1787 : const enum strub_mode mode
1207 : = ((strub_enable && is_always_inline)
1208 1787 : ? (strub_required ? STRUB_INLINABLE : STRUB_CALLABLE)
1209 : : (strub_enable && internal_viable
1210 901 : && (strub_flag_internal || !at_calls_viable))
1211 3013 : ? STRUB_INTERNAL
1212 2189 : : (strub_enable && at_calls_viable)
1213 2189 : ? (strub_required && !strub_attr
1214 473 : ? STRUB_AT_CALLS_OPT
1215 : : STRUB_AT_CALLS)
1216 : : consider_callable
1217 1716 : ? STRUB_CALLABLE
1218 : : STRUB_DISABLED);
1219 :
1220 8 : switch (mode)
1221 : {
1222 1078 : case STRUB_CALLABLE:
1223 1078 : if (is_always_inline)
1224 : break;
1225 : /* Fall through. */
1226 :
1227 1528 : case STRUB_DISABLED:
1228 1528 : if (strub_enable && !strub_attr)
1229 : {
1230 0 : gcc_checking_assert (analyze_body);
1231 0 : error_at (DECL_SOURCE_LOCATION (node->decl),
1232 : "%qD requires %<strub%>,"
1233 : " but no viable %<strub%> mode was found",
1234 : node->decl);
1235 0 : break;
1236 : }
1237 : /* Fall through. */
1238 :
1239 : case STRUB_AT_CALLS:
1240 : case STRUB_INTERNAL:
1241 : case STRUB_INLINABLE:
1242 : /* Differences from an mode requested through a function attribute are
1243 : reported in set_strub_mode_to. */
1244 : break;
1245 :
1246 8 : case STRUB_AT_CALLS_OPT:
1247 : /* Functions that select this mode do so because of references to strub
1248 : variables. Even if we choose at-calls as an optimization, the
1249 : requirements for internal strub must still be satisfied. Optimization
1250 : options may render implicit at-calls strub not viable (-O0 sets
1251 : force_output for static non-inline functions), and it would not be good
1252 : if changing optimization options turned a well-formed into an
1253 : ill-formed one. */
1254 8 : if (!internal_viable)
1255 0 : can_strub_internally_p (node, true);
1256 : break;
1257 :
1258 : case STRUB_WRAPPED:
1259 : case STRUB_WRAPPER:
1260 : default:
1261 : gcc_unreachable ();
1262 : }
1263 :
1264 3491 : return mode;
1265 : }
1266 :
1267 : /* Set FNDT's strub mode to MODE; FNDT may be a function decl or
1268 : function type. If OVERRIDE, do not check whether a mode is already
1269 : set. */
1270 :
1271 : static void
1272 2616 : strub_set_fndt_mode_to (tree fndt, enum strub_mode mode, bool override)
1273 : {
1274 2616 : gcc_checking_assert (override
1275 : || !(DECL_P (fndt)
1276 : ? get_strub_attr_from_decl (fndt)
1277 : : get_strub_attr_from_type (fndt)));
1278 :
1279 2616 : tree attr = tree_cons (get_identifier ("strub"),
1280 : get_strub_mode_attr_value (mode),
1281 : NULL_TREE);
1282 2616 : tree *attrp = NULL;
1283 2616 : if (DECL_P (fndt))
1284 : {
1285 2616 : gcc_checking_assert (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (fndt)));
1286 2616 : attrp = &DECL_ATTRIBUTES (fndt);
1287 : }
1288 0 : else if (FUNC_OR_METHOD_TYPE_P (fndt))
1289 0 : attrp = &TYPE_ATTRIBUTES (fndt);
1290 : else
1291 0 : gcc_unreachable ();
1292 :
1293 2616 : TREE_CHAIN (attr) = *attrp;
1294 2616 : *attrp = attr;
1295 2616 : }
1296 :
1297 : /* Set FNDT's strub mode to callable.
1298 : FNDT may be a function decl or a function type. */
1299 :
1300 : void
1301 0 : strub_make_callable (tree fndt)
1302 : {
1303 0 : strub_set_fndt_mode_to (fndt, STRUB_CALLABLE, false);
1304 0 : }
1305 :
1306 : /* Set NODE to strub MODE. Report incompatibilities between MODE and the mode
1307 : requested through explicit attributes, and cases of non-eligibility. */
1308 :
1309 : static void
1310 4440 : set_strub_mode_to (cgraph_node *node, enum strub_mode mode)
1311 : {
1312 4440 : tree attr = get_strub_attr_from_decl (node->decl);
1313 4440 : enum strub_mode req_mode = get_strub_mode_from_attr (attr);
1314 :
1315 4440 : if (attr)
1316 : {
1317 : /* Check for and report incompatible mode changes. */
1318 2586 : if (mode != req_mode
1319 1200 : && !(req_mode == STRUB_INTERNAL
1320 : && (mode == STRUB_WRAPPED
1321 1200 : || mode == STRUB_WRAPPER))
1322 260 : && !((req_mode == STRUB_INTERNAL
1323 : || req_mode == STRUB_AT_CALLS
1324 260 : || req_mode == STRUB_CALLABLE)
1325 : && mode == STRUB_INLINABLE))
1326 : {
1327 12 : error_at (DECL_SOURCE_LOCATION (node->decl),
1328 : "%<strub%> mode %qE selected for %qD, when %qE was requested",
1329 : get_strub_mode_attr_parm (mode),
1330 : node->decl,
1331 : get_strub_mode_attr_parm (req_mode));
1332 12 : if (node->alias)
1333 : {
1334 0 : cgraph_node *target = node->ultimate_alias_target ();
1335 0 : if (target != node)
1336 0 : error_at (DECL_SOURCE_LOCATION (target->decl),
1337 : "the incompatible selection was determined"
1338 : " by ultimate alias target %qD",
1339 : target->decl);
1340 : }
1341 :
1342 : /* Report any incompatibilities with explicitly-requested strub. */
1343 12 : switch (req_mode)
1344 : {
1345 8 : case STRUB_AT_CALLS:
1346 8 : can_strub_at_calls_p (node, true);
1347 8 : break;
1348 :
1349 4 : case STRUB_INTERNAL:
1350 4 : can_strub_internally_p (node, true);
1351 4 : break;
1352 :
1353 : default:
1354 : break;
1355 : }
1356 : }
1357 :
1358 : /* Drop any incompatible strub attributes leading the decl attribute
1359 : chain. Return if we find one with the mode we need. */
1360 2586 : for (;;)
1361 : {
1362 2586 : if (mode == req_mode)
1363 : return;
1364 :
1365 1200 : if (DECL_ATTRIBUTES (node->decl) != attr)
1366 : break;
1367 :
1368 452 : DECL_ATTRIBUTES (node->decl) = TREE_CHAIN (attr);
1369 452 : attr = get_strub_attr_from_decl (node->decl);
1370 452 : if (!attr)
1371 : break;
1372 :
1373 0 : req_mode = get_strub_mode_from_attr (attr);
1374 : }
1375 : }
1376 1854 : else if (mode == req_mode)
1377 : return;
1378 :
1379 2616 : strub_set_fndt_mode_to (node->decl, mode, attr);
1380 : }
1381 :
1382 : /* Compute and set NODE's strub mode. */
1383 :
1384 : static void
1385 3500 : set_strub_mode (cgraph_node *node)
1386 : {
1387 3500 : tree attr = get_strub_attr_from_decl (node->decl);
1388 :
1389 3500 : if (attr)
1390 1646 : switch (get_strub_mode_from_attr (attr))
1391 : {
1392 : /* These can't have been requested through user attributes, so we must
1393 : have already gone through them. */
1394 : case STRUB_WRAPPER:
1395 : case STRUB_WRAPPED:
1396 : case STRUB_INLINABLE:
1397 : case STRUB_AT_CALLS_OPT:
1398 : return;
1399 :
1400 : case STRUB_DISABLED:
1401 : case STRUB_AT_CALLS:
1402 : case STRUB_INTERNAL:
1403 : case STRUB_CALLABLE:
1404 : break;
1405 :
1406 : default:
1407 : gcc_unreachable ();
1408 : }
1409 :
1410 3500 : cgraph_node *xnode = node;
1411 3500 : if (node->alias)
1412 9 : xnode = node->ultimate_alias_target ();
1413 : /* Weakrefs may remain unresolved (the above will return node) if
1414 : their targets are not defined, so make sure we compute a strub
1415 : mode for them, instead of defaulting to STRUB_DISABLED and
1416 : rendering them uncallable. */
1417 9 : enum strub_mode mode = (xnode != node && !xnode->alias
1418 3509 : ? get_strub_mode (xnode)
1419 3500 : : compute_strub_mode (node, attr));
1420 :
1421 3500 : set_strub_mode_to (node, mode);
1422 : }
1423 :
1424 :
1425 : /* Non-strub functions shouldn't be called from within strub contexts,
1426 : except through callable ones. Always inline strub functions can
1427 : only be called from strub functions. */
1428 :
1429 : static bool
1430 9148 : strub_callable_from_p (strub_mode caller_mode, strub_mode callee_mode)
1431 : {
1432 9148 : switch (caller_mode)
1433 : {
1434 4834 : case STRUB_WRAPPED:
1435 4834 : case STRUB_AT_CALLS_OPT:
1436 4834 : case STRUB_AT_CALLS:
1437 4834 : case STRUB_INTERNAL:
1438 4834 : case STRUB_INLINABLE:
1439 4834 : break;
1440 :
1441 4314 : case STRUB_WRAPPER:
1442 4314 : case STRUB_DISABLED:
1443 4314 : case STRUB_CALLABLE:
1444 4314 : return callee_mode != STRUB_INLINABLE;
1445 :
1446 0 : default:
1447 0 : gcc_unreachable ();
1448 : }
1449 :
1450 4834 : switch (callee_mode)
1451 : {
1452 : case STRUB_WRAPPED:
1453 : case STRUB_AT_CALLS:
1454 : case STRUB_INLINABLE:
1455 : break;
1456 :
1457 861 : case STRUB_AT_CALLS_OPT:
1458 861 : case STRUB_INTERNAL:
1459 861 : case STRUB_WRAPPER:
1460 861 : return (flag_strub >= -1);
1461 :
1462 : case STRUB_DISABLED:
1463 : return false;
1464 :
1465 : case STRUB_CALLABLE:
1466 : break;
1467 :
1468 0 : default:
1469 0 : gcc_unreachable ();
1470 : }
1471 :
1472 3637 : return true;
1473 : }
1474 :
1475 : /* Return TRUE iff CALLEE can be inlined into CALLER. We wish to avoid inlining
1476 : WRAPPED functions back into their WRAPPERs. More generally, we wish to avoid
1477 : inlining strubbed functions into non-strubbed ones. CALLER doesn't have to
1478 : be an immediate caller of CALLEE: the immediate caller may have already been
1479 : cloned for inlining, and then CALLER may be further up the original call
1480 : chain. ??? It would be nice if our own caller would retry inlining callee
1481 : if caller gets inlined. */
1482 :
1483 : bool
1484 9225378 : strub_inlinable_to_p (cgraph_node *callee, cgraph_node *caller)
1485 : {
1486 9225378 : strub_mode callee_mode = get_strub_mode (callee);
1487 :
1488 9225378 : switch (callee_mode)
1489 : {
1490 3862 : case STRUB_WRAPPED:
1491 3862 : case STRUB_AT_CALLS:
1492 3862 : case STRUB_INTERNAL:
1493 3862 : case STRUB_INLINABLE:
1494 3862 : case STRUB_AT_CALLS_OPT:
1495 3862 : break;
1496 :
1497 : case STRUB_WRAPPER:
1498 : case STRUB_DISABLED:
1499 : case STRUB_CALLABLE:
1500 : /* When we consider inlining, we've already verified callability, so we
1501 : can even inline callable and then disabled into a strub context. That
1502 : will get strubbed along with the context, so it's hopefully not a
1503 : problem. */
1504 : return true;
1505 :
1506 : default:
1507 : gcc_unreachable ();
1508 : }
1509 :
1510 3862 : strub_mode caller_mode = get_strub_mode (caller);
1511 :
1512 3862 : switch (caller_mode)
1513 : {
1514 : case STRUB_WRAPPED:
1515 : case STRUB_AT_CALLS:
1516 : case STRUB_INTERNAL:
1517 : case STRUB_INLINABLE:
1518 : case STRUB_AT_CALLS_OPT:
1519 : return true;
1520 :
1521 : case STRUB_WRAPPER:
1522 : case STRUB_DISABLED:
1523 : case STRUB_CALLABLE:
1524 : break;
1525 :
1526 : default:
1527 : gcc_unreachable ();
1528 : }
1529 :
1530 : return false;
1531 : }
1532 :
1533 : /* Check that types T1 and T2 are strub-compatible. Return 1 if the strub modes
1534 : are the same, 2 if they are interchangeable, and 0 otherwise. */
1535 :
1536 : int
1537 1473171 : strub_comptypes (tree t1, tree t2)
1538 : {
1539 1473171 : if (TREE_CODE (t1) != TREE_CODE (t2))
1540 : return 0;
1541 :
1542 1473171 : enum strub_mode m1 = get_strub_mode_from_type (t1);
1543 1473171 : enum strub_mode m2 = get_strub_mode_from_type (t2);
1544 :
1545 1473171 : if (m1 == m2)
1546 : return 1;
1547 :
1548 : /* We're dealing with types, so only strub modes that can be selected by
1549 : attributes in the front end matter. If either mode is at-calls (for
1550 : functions) or internal (for variables), the conversion is not
1551 : compatible. */
1552 4500 : bool var_p = !FUNC_OR_METHOD_TYPE_P (t1);
1553 4500 : enum strub_mode mr = var_p ? STRUB_INTERNAL : STRUB_AT_CALLS;
1554 4500 : if (m1 == mr || m2 == mr)
1555 : return 0;
1556 :
1557 : return 2;
1558 : }
1559 :
1560 : /* Return the effective strub mode used for CALL, and set *TYPEP to
1561 : the effective type used for the call. The effective type and mode
1562 : are those of the callee, unless the call involves a typecast. */
1563 :
1564 : static enum strub_mode
1565 18022 : effective_strub_mode_for_call (gcall *call, tree *typep)
1566 : {
1567 18022 : tree type;
1568 18022 : enum strub_mode mode;
1569 :
1570 18022 : if (strub_call_fntype_override_p (call))
1571 : {
1572 0 : type = gimple_call_fntype (call);
1573 0 : mode = get_strub_mode_from_type (type);
1574 : }
1575 : else
1576 : {
1577 18022 : type = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1578 18022 : tree decl = gimple_call_fndecl (call);
1579 18022 : if (decl)
1580 17750 : mode = get_strub_mode_from_fndecl (decl);
1581 : else
1582 272 : mode = get_strub_mode_from_type (type);
1583 : }
1584 :
1585 18022 : if (typep)
1586 17860 : *typep = type;
1587 :
1588 18022 : return mode;
1589 : }
1590 :
1591 : /* Create a distinct copy of the type of NODE's function, and change
1592 : the fntype of all calls to it with the same main type to the new
1593 : type. */
1594 :
1595 : static void
1596 73 : distinctify_node_type (cgraph_node *node)
1597 : {
1598 73 : tree old_type = TREE_TYPE (node->decl);
1599 73 : tree new_type = build_distinct_type_copy (old_type);
1600 73 : tree new_ptr_type = NULL_TREE;
1601 :
1602 : /* Remap any calls to node->decl that use old_type, or a variant
1603 : thereof, to new_type as well. We don't look for aliases, their
1604 : declarations will have their types changed independently, and
1605 : we'll adjust their fntypes then. */
1606 146 : for (cgraph_edge *e = node->callers; e; e = e->next_caller)
1607 : {
1608 73 : if (!e->call_stmt)
1609 0 : continue;
1610 73 : tree fnaddr = gimple_call_fn (e->call_stmt);
1611 73 : gcc_checking_assert (TREE_CODE (fnaddr) == ADDR_EXPR
1612 : && TREE_OPERAND (fnaddr, 0) == node->decl);
1613 73 : if (strub_call_fntype_override_p (e->call_stmt))
1614 0 : continue;
1615 73 : if (!new_ptr_type)
1616 73 : new_ptr_type = build_pointer_type (new_type);
1617 73 : TREE_TYPE (fnaddr) = new_ptr_type;
1618 73 : gimple_call_set_fntype (e->call_stmt, new_type);
1619 : }
1620 :
1621 73 : TREE_TYPE (node->decl) = new_type;
1622 73 : }
1623 :
1624 : /* Return TRUE iff TYPE and any variants have the same strub mode. */
1625 :
1626 : static bool
1627 2311 : same_strub_mode_in_variants_p (tree type)
1628 : {
1629 2311 : enum strub_mode mode = get_strub_mode_from_type (type);
1630 :
1631 2311 : for (tree other = TYPE_MAIN_VARIANT (type);
1632 4680 : other != NULL_TREE; other = TYPE_NEXT_VARIANT (other))
1633 2369 : if (type != other && mode != get_strub_mode_from_type (other))
1634 : return false;
1635 :
1636 : /* Check that the canonical type, if set, either is in the same
1637 : variant chain, or has the same strub mode as type. Also check
1638 : the variants of the canonical type. */
1639 2311 : if (TYPE_CANONICAL (type)
1640 2311 : && (TYPE_MAIN_VARIANT (TYPE_CANONICAL (type))
1641 146 : != TYPE_MAIN_VARIANT (type)))
1642 : {
1643 0 : if (mode != get_strub_mode_from_type (TYPE_CANONICAL (type)))
1644 : return false;
1645 : else
1646 0 : return same_strub_mode_in_variants_p (TYPE_CANONICAL (type));
1647 : }
1648 :
1649 : return true;
1650 : }
1651 :
1652 : /* Check that strub functions don't call non-strub functions, and that
1653 : always_inline strub functions are only called by strub
1654 : functions. */
1655 :
1656 : static void
1657 614 : verify_strub ()
1658 : {
1659 614 : cgraph_node *node;
1660 :
1661 : /* It's expected that check strub-wise pointer type compatibility of variables
1662 : and of functions is already taken care of by front-ends, on account of the
1663 : attribute's being marked as affecting type identity and of the creation of
1664 : distinct types. */
1665 :
1666 : /* Check that call targets in strub contexts have strub-callable types. */
1667 :
1668 2839 : FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1669 : {
1670 2225 : enum strub_mode caller_mode = get_strub_mode (node);
1671 :
1672 2387 : for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
1673 : {
1674 162 : gcc_checking_assert (e->indirect_unknown_callee);
1675 :
1676 162 : if (!e->call_stmt)
1677 0 : continue;
1678 :
1679 162 : enum strub_mode callee_mode
1680 162 : = effective_strub_mode_for_call (e->call_stmt, NULL);
1681 :
1682 162 : if (!strub_callable_from_p (caller_mode, callee_mode))
1683 28 : error_at (gimple_location (e->call_stmt),
1684 : "indirect non-%<strub%> call in %<strub%> context %qD",
1685 : node->decl);
1686 : }
1687 :
1688 11211 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
1689 : {
1690 8986 : gcc_checking_assert (!e->indirect_unknown_callee);
1691 :
1692 8986 : if (!e->call_stmt)
1693 16 : continue;
1694 :
1695 8986 : tree callee_fntype;
1696 8986 : enum strub_mode callee_mode
1697 8986 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
1698 :
1699 8986 : if (!strub_callable_from_p (caller_mode, callee_mode))
1700 : {
1701 814 : if (callee_mode == STRUB_INLINABLE)
1702 210 : error_at (gimple_location (e->call_stmt),
1703 : "calling %<always_inline%> %<strub%> %qD"
1704 : " in non-%<strub%> context %qD",
1705 210 : e->callee->decl, node->decl);
1706 604 : else if (fndecl_built_in_p (e->callee->decl, BUILT_IN_APPLY_ARGS)
1707 604 : && caller_mode == STRUB_INTERNAL)
1708 : /* This is ok, it will be kept in the STRUB_WRAPPER, and removed
1709 : from the STRUB_WRAPPED's strub context. */
1710 16 : continue;
1711 588 : else if (!strub_call_fntype_override_p (e->call_stmt))
1712 588 : error_at (gimple_location (e->call_stmt),
1713 : "calling non-%<strub%> %qD in %<strub%> context %qD",
1714 588 : e->callee->decl, node->decl);
1715 : else
1716 0 : error_at (gimple_location (e->call_stmt),
1717 : "calling %qD using non-%<strub%> type %qT"
1718 : " in %<strub%> context %qD",
1719 0 : e->callee->decl, callee_fntype, node->decl);
1720 : }
1721 : }
1722 : }
1723 614 : }
1724 :
1725 : namespace {
1726 :
1727 : /* Define a pass to compute strub modes. */
1728 : const pass_data pass_data_ipa_strub_mode = {
1729 : SIMPLE_IPA_PASS,
1730 : "strubm",
1731 : OPTGROUP_NONE,
1732 : TV_NONE,
1733 : PROP_cfg, // properties_required
1734 : 0, // properties_provided
1735 : 0, // properties_destroyed
1736 : 0, // properties_start
1737 : 0, // properties_finish
1738 : };
1739 :
1740 : class pass_ipa_strub_mode : public simple_ipa_opt_pass
1741 : {
1742 : public:
1743 285722 : pass_ipa_strub_mode (gcc::context *ctxt)
1744 571444 : : simple_ipa_opt_pass (pass_data_ipa_strub_mode, ctxt)
1745 : {}
1746 0 : opt_pass *clone () { return new pass_ipa_strub_mode (m_ctxt); }
1747 229960 : virtual bool gate (function *) {
1748 : /* In relaxed (-3) and strict (-4) settings, that only enable strub at a
1749 : function or variable attribute's request, the attribute handler changes
1750 : flag_strub to -1 or -2, respectively, if any strub-enabling occurence of
1751 : the attribute is found. Therefore, if it remains at -3 or -4, nothing
1752 : that would enable strub was found, so we can disable it and avoid the
1753 : overhead. */
1754 229960 : if (flag_strub < -2)
1755 229341 : flag_strub = 0;
1756 229960 : return flag_strub;
1757 : }
1758 : virtual unsigned int execute (function *);
1759 : };
1760 :
1761 : /* Define a pass to introduce strub transformations. */
1762 : const pass_data pass_data_ipa_strub = {
1763 : SIMPLE_IPA_PASS,
1764 : "strub",
1765 : OPTGROUP_NONE,
1766 : TV_NONE,
1767 : PROP_cfg | PROP_ssa, // properties_required
1768 : 0, // properties_provided
1769 : 0, // properties_destroyed
1770 : 0, // properties_start
1771 : TODO_update_ssa
1772 : | TODO_cleanup_cfg
1773 : | TODO_rebuild_cgraph_edges, // properties_finish
1774 : };
1775 :
1776 : class pass_ipa_strub : public simple_ipa_opt_pass
1777 : {
1778 : public:
1779 285722 : pass_ipa_strub (gcc::context *ctxt)
1780 571444 : : simple_ipa_opt_pass (pass_data_ipa_strub, ctxt)
1781 : {}
1782 0 : opt_pass *clone () { return new pass_ipa_strub (m_ctxt); }
1783 229960 : virtual bool gate (function *) { return flag_strub && !seen_error (); }
1784 : virtual unsigned int execute (function *);
1785 :
1786 : /* Define on demand and cache some types we use often. */
1787 : #define DEF_TYPE(IDX, NAME, INIT) \
1788 : static inline tree get_ ## NAME () { \
1789 : int idx = STRUB_TYPE_BASE + IDX; \
1790 : static tree type = strub_cache[idx]; \
1791 : if (!type) \
1792 : strub_cache[idx] = type = (INIT); \
1793 : return type; \
1794 : }
1795 :
1796 : /* Use a distinct ptr_type_node to denote the watermark, so that we can
1797 : recognize it in arg lists and avoid modifying types twice. */
1798 2520 : DEF_TYPE (0, wmt, build_variant_type_copy (ptr_type_node))
1799 :
1800 2534 : DEF_TYPE (1, pwmt, build_reference_type (get_wmt ()))
1801 :
1802 6268 : DEF_TYPE (2, qpwmt,
1803 : build_qualified_type (get_pwmt (),
1804 : TYPE_QUAL_RESTRICT
1805 : /* | TYPE_QUAL_CONST */))
1806 :
1807 16 : DEF_TYPE (3, qptr,
1808 : build_qualified_type (ptr_type_node,
1809 : TYPE_QUAL_RESTRICT
1810 : | TYPE_QUAL_CONST))
1811 :
1812 8 : DEF_TYPE (4, qpvalst,
1813 : build_qualified_type (build_reference_type
1814 : (va_list_type_node),
1815 : TYPE_QUAL_RESTRICT
1816 : /* | TYPE_QUAL_CONST */))
1817 :
1818 : #undef DEF_TYPE
1819 :
1820 : /* Define non-strub builtins on demand. */
1821 : #define DEF_NM_BUILTIN(NAME, CODE, FNTYPELIST) \
1822 : static tree get_ ## NAME () { \
1823 : tree decl = builtin_decl_explicit (CODE); \
1824 : if (!decl) \
1825 : { \
1826 : tree type = build_function_type_list FNTYPELIST; \
1827 : decl = add_builtin_function \
1828 : ("__builtin_" #NAME, \
1829 : type, CODE, BUILT_IN_NORMAL, \
1830 : NULL, NULL); \
1831 : TREE_NOTHROW (decl) = true; \
1832 : set_builtin_decl ((CODE), decl, true); \
1833 : } \
1834 : return decl; \
1835 : }
1836 :
1837 : DEF_NM_BUILTIN (stack_address,
1838 : BUILT_IN_STACK_ADDRESS,
1839 : (ptr_type_node, NULL))
1840 :
1841 : #undef DEF_NM_BUILTIN
1842 :
1843 : /* Define strub builtins on demand. */
1844 : #define DEF_SS_BUILTIN(NAME, FNSPEC, CODE, FNTYPELIST) \
1845 : static tree get_ ## NAME () { \
1846 : tree decl = builtin_decl_explicit (CODE); \
1847 : if (!decl) \
1848 : { \
1849 : tree type = build_function_type_list FNTYPELIST; \
1850 : tree attrs = NULL; \
1851 : if (FNSPEC) \
1852 : attrs = tree_cons (get_identifier ("fn spec"), \
1853 : build_tree_list \
1854 : (NULL_TREE, \
1855 : build_string (strlen (FNSPEC), \
1856 : (FNSPEC))), \
1857 : attrs); \
1858 : decl = add_builtin_function_ext_scope \
1859 : ("__builtin___strub_" #NAME, \
1860 : type, CODE, BUILT_IN_NORMAL, \
1861 : "__strub_" #NAME, attrs); \
1862 : TREE_NOTHROW (decl) = true; \
1863 : set_builtin_decl ((CODE), decl, true); \
1864 : } \
1865 : return decl; \
1866 : }
1867 :
1868 1990 : DEF_SS_BUILTIN (enter, ". Ot",
1869 : BUILT_IN___STRUB_ENTER,
1870 : (void_type_node, get_qpwmt (), NULL))
1871 1032 : DEF_SS_BUILTIN (update, ". Wt",
1872 : BUILT_IN___STRUB_UPDATE,
1873 : (void_type_node, get_qpwmt (), NULL))
1874 1990 : DEF_SS_BUILTIN (leave, ". w ",
1875 : BUILT_IN___STRUB_LEAVE,
1876 : (void_type_node, get_qpwmt (), NULL))
1877 :
1878 : #undef DEF_SS_BUILTIN
1879 :
1880 : /* Define strub identifiers on demand. */
1881 : #define DEF_IDENT(IDX, NAME) \
1882 : static inline tree get_ ## NAME () { \
1883 : int idx = STRUB_IDENT_BASE + IDX; \
1884 : tree identifier = strub_cache[idx]; \
1885 : if (!identifier) \
1886 : strub_cache[idx] = identifier = get_identifier (".strub." #NAME); \
1887 : return identifier; \
1888 : }
1889 :
1890 838 : DEF_IDENT (0, watermark_ptr)
1891 8 : DEF_IDENT (1, va_list_ptr)
1892 16 : DEF_IDENT (2, apply_args)
1893 :
1894 : #undef DEF_IDENT
1895 :
1896 : static inline int adjust_at_calls_type (tree);
1897 : static inline void adjust_at_calls_call (cgraph_edge *, int, tree);
1898 : static inline void adjust_at_calls_calls (cgraph_node *);
1899 :
1900 : /* Add to SEQ a call to the strub watermark update builtin, taking NODE's
1901 : location if given. Optionally add the corresponding edge from NODE, with
1902 : execution frequency COUNT. Return the modified SEQ. */
1903 :
1904 : static inline gimple_seq
1905 1032 : call_update_watermark (tree wmptr, cgraph_node *node, profile_count count,
1906 : gimple_seq seq = NULL)
1907 : {
1908 1032 : tree uwm = get_update ();
1909 1032 : gcall *update = gimple_build_call (uwm, 1, wmptr);
1910 1032 : if (node)
1911 794 : gimple_set_location (update, DECL_SOURCE_LOCATION (node->decl));
1912 1032 : gimple_seq_add_stmt (&seq, update);
1913 1032 : if (node)
1914 794 : node->create_edge (cgraph_node::get_create (uwm), update, count, false);
1915 1032 : return seq;
1916 : }
1917 :
1918 : };
1919 :
1920 : } // anon namespace
1921 :
1922 : /* Gather with this type a collection of parameters that we're turning into
1923 : explicit references. */
1924 :
1925 : typedef hash_set<tree> indirect_parms_t;
1926 :
1927 : /* Dereference OP's incoming turned-into-reference parm if it's an
1928 : INDIRECT_PARMS or an ADDR_EXPR thereof. Set *REC and return according to
1929 : gimple-walking expectations. */
1930 :
1931 : static tree
1932 170 : maybe_make_indirect (indirect_parms_t &indirect_parms, tree op, int *rec)
1933 : {
1934 170 : if (DECL_P (op))
1935 : {
1936 28 : *rec = 0;
1937 28 : if (indirect_parms.contains (op))
1938 : {
1939 6 : tree ret = gimple_fold_indirect_ref (op);
1940 6 : if (!ret)
1941 12 : ret = build2 (MEM_REF,
1942 6 : TREE_TYPE (TREE_TYPE (op)),
1943 : op,
1944 6 : build_int_cst (TREE_TYPE (op), 0));
1945 6 : if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op)))
1946 6 : && !TREE_THIS_VOLATILE (ret))
1947 6 : TREE_SIDE_EFFECTS (ret) = TREE_THIS_VOLATILE (ret) = 1;
1948 6 : return ret;
1949 : }
1950 : }
1951 142 : else if (TREE_CODE (op) == ADDR_EXPR
1952 142 : && DECL_P (TREE_OPERAND (op, 0)))
1953 : {
1954 14 : *rec = 0;
1955 14 : if (indirect_parms.contains (TREE_OPERAND (op, 0)))
1956 : {
1957 0 : op = TREE_OPERAND (op, 0);
1958 0 : return op;
1959 : }
1960 : }
1961 :
1962 : return NULL_TREE;
1963 : }
1964 :
1965 : /* A gimple-walking function that adds dereferencing to indirect parms. */
1966 :
1967 : static tree
1968 170 : walk_make_indirect (tree *op, int *rec, void *arg)
1969 : {
1970 170 : walk_stmt_info *wi = (walk_stmt_info *)arg;
1971 170 : indirect_parms_t &indirect_parms = *(indirect_parms_t *)wi->info;
1972 :
1973 170 : if (!*op || TYPE_P (*op))
1974 : {
1975 0 : *rec = 0;
1976 0 : return NULL_TREE;
1977 : }
1978 :
1979 170 : if (tree repl = maybe_make_indirect (indirect_parms, *op, rec))
1980 : {
1981 6 : *op = repl;
1982 6 : wi->changed = true;
1983 : }
1984 :
1985 : return NULL_TREE;
1986 : }
1987 :
1988 : /* A gimple-walking function that turns any non-gimple-val ADDR_EXPRs into a
1989 : separate SSA. Though addresses of e.g. parameters, and of members thereof,
1990 : are gimple vals, turning parameters into references, with an extra layer of
1991 : indirection and thus explicit dereferencing, need to be regimplified. */
1992 :
1993 : static tree
1994 12 : walk_regimplify_addr_expr (tree *op, int *rec, void *arg)
1995 : {
1996 12 : walk_stmt_info *wi = (walk_stmt_info *)arg;
1997 12 : gimple_stmt_iterator &gsi = *(gimple_stmt_iterator *)wi->info;
1998 :
1999 12 : *rec = 0;
2000 :
2001 12 : if (!*op || TREE_CODE (*op) != ADDR_EXPR)
2002 : return NULL_TREE;
2003 :
2004 0 : if (!is_gimple_val (*op))
2005 : {
2006 0 : tree ret = force_gimple_operand_gsi (&gsi, *op, true,
2007 : NULL_TREE, true, GSI_SAME_STMT);
2008 0 : gcc_assert (ret != *op);
2009 0 : *op = ret;
2010 0 : wi->changed = true;
2011 : }
2012 :
2013 : return NULL_TREE;
2014 : }
2015 :
2016 : /* Turn STMT's PHI arg defs into separate SSA defs if they've become
2017 : non-gimple_val. Return TRUE if any edge insertions need to be committed. */
2018 :
2019 : static bool
2020 0 : walk_regimplify_phi (gphi *stmt)
2021 : {
2022 0 : bool needs_commit = false;
2023 :
2024 0 : for (unsigned i = 0, n = gimple_phi_num_args (stmt); i < n; i++)
2025 : {
2026 0 : tree op = gimple_phi_arg_def (stmt, i);
2027 0 : if ((TREE_CODE (op) == ADDR_EXPR
2028 0 : && !is_gimple_val (op))
2029 : /* ??? A PARM_DECL that was addressable in the original function and
2030 : had its address in PHI nodes, but that became a reference in the
2031 : wrapped clone would NOT be updated by update_ssa in PHI nodes.
2032 : Alas, if we were to create a default def for it now, update_ssa
2033 : would complain that the symbol that needed rewriting already has
2034 : SSA names associated with it. OTOH, leaving the PARM_DECL alone,
2035 : it eventually causes errors because it remains unchanged in PHI
2036 : nodes, but it gets rewritten as expected if it appears in other
2037 : stmts. So we cheat a little here, and force the PARM_DECL out of
2038 : the PHI node and into an assignment. It's a little expensive,
2039 : because we insert it at the edge, which introduces a basic block
2040 : that's entirely unnecessary, but it works, and the block will be
2041 : removed as the default def gets propagated back into the PHI node,
2042 : so the final optimized code looks just as expected. */
2043 0 : || (TREE_CODE (op) == PARM_DECL
2044 0 : && !TREE_ADDRESSABLE (op)))
2045 : {
2046 0 : tree temp = make_ssa_name (TREE_TYPE (op), stmt);
2047 0 : if (TREE_CODE (op) == PARM_DECL)
2048 0 : SET_SSA_NAME_VAR_OR_IDENTIFIER (temp, DECL_NAME (op));
2049 0 : SET_PHI_ARG_DEF (stmt, i, temp);
2050 :
2051 0 : gimple *assign = gimple_build_assign (temp, op);
2052 0 : if (gimple_phi_arg_has_location (stmt, i))
2053 0 : gimple_set_location (assign, gimple_phi_arg_location (stmt, i));
2054 0 : gsi_insert_on_edge (gimple_phi_arg_edge (stmt, i), assign);
2055 0 : needs_commit = true;
2056 : }
2057 : }
2058 :
2059 0 : return needs_commit;
2060 : }
2061 :
2062 : /* Create a reference type to use for PARM when turning it into a
2063 : reference. */
2064 :
2065 : static tree
2066 14 : build_ref_type_for (tree parm)
2067 : {
2068 14 : gcc_checking_assert (TREE_CODE (parm) == PARM_DECL);
2069 :
2070 14 : tree ref_type = build_reference_type (TREE_TYPE (parm));
2071 :
2072 14 : return ref_type;
2073 : }
2074 :
2075 : /* Add cgraph edges from current_function_decl to callees in SEQ with frequency
2076 : COUNT, assuming all calls in SEQ are direct. */
2077 :
2078 : static void
2079 2858 : add_call_edges_for_seq (gimple_seq seq, profile_count count)
2080 : {
2081 2858 : cgraph_node *node = cgraph_node::get_create (current_function_decl);
2082 :
2083 2858 : for (gimple_stmt_iterator gsi = gsi_start (seq);
2084 9146 : !gsi_end_p (gsi); gsi_next (&gsi))
2085 : {
2086 6288 : gimple *stmt = gsi_stmt (gsi);
2087 :
2088 6288 : gcall *call = dyn_cast <gcall *> (stmt);
2089 6288 : if (!call)
2090 3422 : continue;
2091 :
2092 2866 : tree callee = gimple_call_fndecl (call);
2093 2866 : gcc_checking_assert (callee);
2094 2866 : node->create_edge (cgraph_node::get_create (callee), call, count, false);
2095 : }
2096 2858 : }
2097 :
2098 : /* Insert SEQ after the call at GSI, as if the call was in a try block with SEQ
2099 : as finally, i.e., SEQ will run after the call whether it returns or
2100 : propagates an exception. This handles block splitting, EH edge and block
2101 : creation, noreturn and nothrow optimizations, and even throwing calls without
2102 : preexisting local handlers. */
2103 :
2104 : static void
2105 2632 : gsi_insert_finally_seq_after_call (gimple_stmt_iterator gsi, gimple_seq seq)
2106 : {
2107 2632 : if (!seq)
2108 : return;
2109 :
2110 2228 : gimple *stmt = gsi_stmt (gsi);
2111 :
2112 2228 : if (gimple_has_location (stmt))
2113 1758 : annotate_all_with_location (seq, gimple_location (stmt));
2114 :
2115 2228 : gcall *call = dyn_cast <gcall *> (stmt);
2116 2228 : bool noreturn_p = call && gimple_call_noreturn_p (call);
2117 2228 : int eh_lp = lookup_stmt_eh_lp (stmt);
2118 2228 : bool must_not_throw_p = eh_lp < 0;
2119 2228 : bool nothrow_p = (must_not_throw_p
2120 4456 : || (call && gimple_call_nothrow_p (call))
2121 3478 : || (eh_lp <= 0
2122 1240 : && (TREE_NOTHROW (cfun->decl)
2123 903 : || !opt_for_fn (cfun->decl, flag_exceptions))));
2124 :
2125 1597 : if (noreturn_p && nothrow_p)
2126 : return;
2127 :
2128 : /* Don't expect an EH edge if we're not to throw, or if we're not in an EH
2129 : region yet. */
2130 2227 : bool no_eh_edge_p = (nothrow_p || !eh_lp);
2131 2227 : bool must_end_bb = stmt_ends_bb_p (stmt);
2132 :
2133 2227 : edge eft = NULL, eeh = NULL;
2134 2227 : if (must_end_bb && !(noreturn_p && no_eh_edge_p))
2135 : {
2136 10 : gcc_checking_assert (gsi_one_before_end_p (gsi));
2137 :
2138 10 : edge e;
2139 10 : edge_iterator ei;
2140 30 : FOR_EACH_EDGE (e, ei, gsi_bb (gsi)->succs)
2141 : {
2142 20 : if ((e->flags & EDGE_EH))
2143 : {
2144 10 : gcc_checking_assert (!eeh);
2145 : eeh = e;
2146 : #if !CHECKING_P
2147 : if (eft || noreturn_p)
2148 : break;
2149 : #endif
2150 : }
2151 20 : if ((e->flags & EDGE_FALLTHRU))
2152 : {
2153 10 : gcc_checking_assert (!eft);
2154 : eft = e;
2155 : #if !CHECKING_P
2156 : if (eeh || no_eh_edge_p)
2157 : break;
2158 : #endif
2159 : }
2160 : }
2161 :
2162 10 : gcc_checking_assert (!(eft && (eft->flags & EDGE_FALLTHRU))
2163 : == noreturn_p);
2164 10 : gcc_checking_assert (!(eeh && (eeh->flags & EDGE_EH))
2165 : == no_eh_edge_p);
2166 10 : gcc_checking_assert (eft != eeh);
2167 : }
2168 :
2169 2227 : if (!noreturn_p)
2170 : {
2171 2227 : gimple_seq nseq = nothrow_p ? seq : gimple_seq_copy (seq);
2172 :
2173 2227 : if (must_end_bb)
2174 : {
2175 10 : gcc_checking_assert (gsi_one_before_end_p (gsi));
2176 10 : add_call_edges_for_seq (nseq, eft->count ());
2177 10 : gsi_insert_seq_on_edge_immediate (eft, nseq);
2178 : }
2179 : else
2180 : {
2181 2217 : add_call_edges_for_seq (nseq, gsi_bb (gsi)->count);
2182 2217 : gsi_insert_seq_after (&gsi, nseq, GSI_SAME_STMT);
2183 : }
2184 : }
2185 :
2186 2227 : if (nothrow_p)
2187 : return;
2188 :
2189 631 : if (eh_lp)
2190 : {
2191 10 : add_call_edges_for_seq (seq, eeh->count ());
2192 10 : gsi_insert_seq_on_edge_immediate (eeh, seq);
2193 10 : return;
2194 : }
2195 :
2196 : /* A throwing call may appear within a basic block in a function that doesn't
2197 : have any EH regions. We're going to add a cleanup if so, therefore the
2198 : block will have to be split. */
2199 621 : basic_block bb = gsi_bb (gsi);
2200 621 : if (!gsi_one_before_end_p (gsi))
2201 621 : split_block (bb, stmt);
2202 :
2203 : /* Create a new block for the EH cleanup. */
2204 621 : basic_block bb_eh_cleanup = create_empty_bb (bb);
2205 621 : if (dom_info_available_p (CDI_DOMINATORS))
2206 0 : set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
2207 621 : if (current_loops)
2208 621 : add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
2209 :
2210 : /* Make the new block an EH cleanup for the call. */
2211 621 : eh_region new_r = gen_eh_region_cleanup (NULL);
2212 621 : eh_landing_pad lp = gen_eh_landing_pad (new_r);
2213 621 : tree label = gimple_block_label (bb_eh_cleanup);
2214 621 : lp->post_landing_pad = label;
2215 621 : EH_LANDING_PAD_NR (label) = lp->index;
2216 621 : add_stmt_to_eh_lp (stmt, lp->index);
2217 :
2218 : /* Add the cleanup code to the EH cleanup block. */
2219 621 : gsi = gsi_after_labels (bb_eh_cleanup);
2220 621 : gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2221 :
2222 : /* And then propagate the exception further. */
2223 621 : gresx *resx = gimple_build_resx (new_r->index);
2224 621 : if (gimple_has_location (stmt))
2225 511 : gimple_set_location (resx, gimple_location (stmt));
2226 621 : gsi_insert_before (&gsi, resx, GSI_SAME_STMT);
2227 :
2228 : /* Finally, wire the EH cleanup block into the CFG. */
2229 621 : edge neeh = make_eh_edge (stmt);
2230 621 : neeh->probability = profile_probability::never ();
2231 621 : gcc_checking_assert (neeh->dest == bb_eh_cleanup);
2232 621 : gcc_checking_assert (!neeh->dest->count.initialized_p ());
2233 621 : neeh->dest->count = neeh->count ();
2234 621 : add_call_edges_for_seq (seq, neeh->dest->count);
2235 : }
2236 :
2237 : /* Copy the attribute list at *ATTRS, minus any NAME attributes, leaving
2238 : shareable trailing nodes alone. */
2239 :
2240 : static inline void
2241 0 : remove_named_attribute_unsharing (const char *name, tree *attrs)
2242 : {
2243 0 : while (tree found = lookup_attribute (name, *attrs))
2244 : {
2245 : /* Copy nodes up to the next NAME attribute. */
2246 0 : while (*attrs != found)
2247 : {
2248 0 : *attrs = tree_cons (TREE_PURPOSE (*attrs),
2249 0 : TREE_VALUE (*attrs),
2250 0 : TREE_CHAIN (*attrs));
2251 0 : attrs = &TREE_CHAIN (*attrs);
2252 : }
2253 : /* Then drop it. */
2254 0 : gcc_checking_assert (*attrs == found);
2255 0 : *attrs = TREE_CHAIN (*attrs);
2256 0 : }
2257 0 : }
2258 :
2259 : /* Record the uid of the last cgraph entry whose mode we've already set, so
2260 : that we can perform mode setting incrementally without duplication. */
2261 : static int last_cgraph_uid;
2262 :
2263 : /* Set strub modes for functions introduced since the last call. */
2264 :
2265 : static void
2266 1160 : ipa_strub_set_mode_for_new_functions ()
2267 : {
2268 1160 : if (symtab->cgraph_max_uid == last_cgraph_uid)
2269 : return;
2270 :
2271 : cgraph_node *node;
2272 :
2273 : /* Go through the functions twice, once over non-aliases, and then over
2274 : aliases, so that aliases can reuse the mode computation of their ultimate
2275 : targets. */
2276 2148 : for (int aliases = 0; aliases <= 1; aliases++)
2277 10230 : FOR_EACH_FUNCTION (node)
2278 : {
2279 8798 : if (!node->alias != !aliases)
2280 4399 : continue;
2281 :
2282 : /* Already done. */
2283 4399 : if (node->get_uid () < last_cgraph_uid)
2284 899 : continue;
2285 :
2286 3500 : set_strub_mode (node);
2287 : }
2288 :
2289 716 : last_cgraph_uid = symtab->cgraph_max_uid;
2290 : }
2291 :
2292 : /* Return FALSE if NODE is a strub context, and TRUE otherwise. */
2293 :
2294 : bool
2295 1240971 : strub_splittable_p (cgraph_node *node)
2296 : {
2297 1240971 : switch (get_strub_mode (node))
2298 : {
2299 : case STRUB_WRAPPED:
2300 : case STRUB_AT_CALLS:
2301 : case STRUB_AT_CALLS_OPT:
2302 : case STRUB_INLINABLE:
2303 : case STRUB_INTERNAL:
2304 : case STRUB_WRAPPER:
2305 : return false;
2306 :
2307 1240837 : case STRUB_CALLABLE:
2308 1240837 : case STRUB_DISABLED:
2309 1240837 : break;
2310 :
2311 : default:
2312 : gcc_unreachable ();
2313 : }
2314 :
2315 1240837 : return true;
2316 : }
2317 :
2318 : /* Return the PARM_DECL of the incoming watermark pointer, if there is one. */
2319 :
2320 : tree
2321 1993 : strub_watermark_parm (tree fndecl)
2322 : {
2323 1993 : switch (get_strub_mode_from_fndecl (fndecl))
2324 : {
2325 1069 : case STRUB_WRAPPED:
2326 1069 : case STRUB_AT_CALLS:
2327 1069 : case STRUB_AT_CALLS_OPT:
2328 1069 : break;
2329 :
2330 : case STRUB_INTERNAL:
2331 : case STRUB_WRAPPER:
2332 : case STRUB_CALLABLE:
2333 : case STRUB_DISABLED:
2334 : case STRUB_INLINABLE:
2335 : return NULL_TREE;
2336 :
2337 : default:
2338 : gcc_unreachable ();
2339 : }
2340 :
2341 1069 : for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2342 : /* The type (variant) compare finds the parameter even in a just-created
2343 : clone, before we set its name, but the type-based compare doesn't work
2344 : during builtin expansion within the lto compiler, because we'll have
2345 : created a separate variant in that run. */
2346 1069 : if (TREE_TYPE (parm) == pass_ipa_strub::get_qpwmt ()
2347 1069 : || DECL_NAME (parm) == pass_ipa_strub::get_watermark_ptr ())
2348 1069 : return parm;
2349 :
2350 0 : gcc_unreachable ();
2351 : }
2352 :
2353 : /* Adjust a STRUB_AT_CALLS function TYPE, adding a watermark pointer if it
2354 : hasn't been added yet. Return the named argument count. */
2355 :
2356 : int
2357 2311 : pass_ipa_strub::adjust_at_calls_type (tree type)
2358 : {
2359 2311 : int named_args = 0;
2360 :
2361 2311 : gcc_checking_assert (same_strub_mode_in_variants_p (type));
2362 :
2363 2311 : if (!TYPE_ARG_TYPES (type))
2364 : return named_args;
2365 :
2366 2311 : tree *tlist = &TYPE_ARG_TYPES (type);
2367 2311 : tree qpwmptrt = get_qpwmt ();
2368 4734 : while (*tlist && TREE_VALUE (*tlist) != void_type_node)
2369 : {
2370 : /* The type has already been adjusted. */
2371 2058 : if (TREE_VALUE (*tlist) == qpwmptrt)
2372 : return named_args;
2373 112 : named_args++;
2374 336 : *tlist = tree_cons (TREE_PURPOSE (*tlist),
2375 112 : TREE_VALUE (*tlist),
2376 112 : TREE_CHAIN (*tlist));
2377 112 : tlist = &TREE_CHAIN (*tlist);
2378 : }
2379 :
2380 : /* Add the new argument after all named arguments, so as to not mess with
2381 : attributes that reference parameters. */
2382 365 : *tlist = tree_cons (NULL_TREE, get_qpwmt (), *tlist);
2383 :
2384 : #if ATTR_FNSPEC_DECONST_WATERMARK
2385 : if (!type_already_adjusted)
2386 : {
2387 : int flags = flags_from_decl_or_type (type);
2388 : tree fnspec = lookup_attribute ("fn spec", type);
2389 :
2390 : if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2391 : {
2392 : size_t xargs = 1;
2393 : size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2394 : auto_vec<char> nspecv (tgtlen);
2395 : char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2396 : if (fnspec)
2397 : {
2398 : tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2399 : curlen = TREE_STRING_LENGTH (fnspecstr);
2400 : memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2401 : }
2402 : if (!curlen)
2403 : {
2404 : nspec[curlen++] = '.';
2405 : nspec[curlen++] = ((flags & ECF_CONST)
2406 : ? 'c'
2407 : : (flags & ECF_PURE)
2408 : ? 'p'
2409 : : ' ');
2410 : }
2411 : while (curlen < tgtlen - 2 * xargs)
2412 : {
2413 : nspec[curlen++] = '.';
2414 : nspec[curlen++] = ' ';
2415 : }
2416 : nspec[curlen++] = 'W';
2417 : nspec[curlen++] = 't';
2418 :
2419 : /* The type has already been copied, if needed, before adding
2420 : parameters. */
2421 : TYPE_ATTRIBUTES (type)
2422 : = tree_cons (get_identifier ("fn spec"),
2423 : build_tree_list (NULL_TREE,
2424 : build_string (tgtlen, nspec)),
2425 : TYPE_ATTRIBUTES (type));
2426 : }
2427 : }
2428 : #endif
2429 :
2430 365 : return named_args;
2431 : }
2432 :
2433 : /* Adjust a call to an at-calls call target. Create a watermark local variable
2434 : if needed, initialize it before, pass it to the callee according to the
2435 : modified at-calls interface, and release the callee's stack space after the
2436 : call, if not deferred. If the call is const or pure, arrange for the
2437 : watermark to not be assumed unused or unchanged. */
2438 :
2439 : void
2440 1924 : pass_ipa_strub::adjust_at_calls_call (cgraph_edge *e, int named_args,
2441 : tree callee_fntype)
2442 : {
2443 1924 : gcc_checking_assert (e->call_stmt);
2444 1924 : gcall *ocall = e->call_stmt;
2445 1924 : gimple_stmt_iterator gsi = gsi_for_stmt (ocall);
2446 :
2447 : /* Make sure we haven't modified this call yet. */
2448 1924 : gcc_checking_assert (!(int (gimple_call_num_args (ocall)) > named_args
2449 : && (TREE_TYPE (gimple_call_arg (ocall, named_args))
2450 : == get_pwmt ())));
2451 :
2452 1924 : tree tsup;
2453 1924 : if (!(tsup = gimple_call_fndecl (ocall)))
2454 46 : tsup = TREE_TYPE (TREE_TYPE (gimple_call_fn (ocall)));
2455 1924 : if (!strub_target_support_p (tsup, true, gimple_location (ocall)))
2456 0 : return;
2457 :
2458 : /* If we're already within a strub context, pass on the incoming watermark
2459 : pointer, and omit the enter and leave calls around the modified call, as an
2460 : optimization, or as a means to satisfy a tail-call requirement. */
2461 1924 : tree swmp = ((opt_for_fn (e->caller->decl, optimize_size)
2462 1625 : || opt_for_fn (e->caller->decl, optimize) > 2
2463 1316 : || gimple_call_must_tail_p (ocall)
2464 1316 : || (opt_for_fn (e->caller->decl, optimize) == 2
2465 538 : && gimple_call_tail_p (ocall)))
2466 2233 : ? strub_watermark_parm (e->caller->decl)
2467 : : NULL_TREE);
2468 2128 : bool omit_own_watermark = swmp;
2469 2128 : tree swm = NULL_TREE;
2470 608 : if (!omit_own_watermark)
2471 : {
2472 1520 : swm = create_tmp_var (get_wmt (), ".strub.watermark");
2473 1520 : TREE_ADDRESSABLE (swm) = true;
2474 1520 : swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
2475 :
2476 : /* Initialize the watermark before the call. */
2477 1520 : tree enter = get_enter ();
2478 1520 : gcall *stptr = gimple_build_call (enter, 1,
2479 : unshare_expr (swmp));
2480 1520 : if (gimple_has_location (ocall))
2481 1520 : gimple_set_location (stptr, gimple_location (ocall));
2482 1520 : gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
2483 1520 : e->caller->create_edge (cgraph_node::get_create (enter),
2484 1520 : stptr, gsi_bb (gsi)->count, false);
2485 : }
2486 :
2487 :
2488 : /* Replace the call with one that passes the swmp argument first. */
2489 1924 : gcall *wrcall;
2490 1924 : { gcall *stmt = ocall;
2491 : // Mostly copied from gimple_call_copy_skip_args.
2492 1924 : int i = 0;
2493 1924 : int nargs = gimple_call_num_args (stmt);
2494 1924 : auto_vec<tree> vargs (MAX (nargs, named_args) + 1);
2495 1924 : gcall *new_stmt;
2496 :
2497 : /* pr71109.c calls a prototypeless function, then defines it with
2498 : additional arguments. It's ill-formed, but after it's inlined,
2499 : it somehow works out. */
2500 1988 : for (; i < named_args && i < nargs; i++)
2501 64 : vargs.quick_push (gimple_call_arg (stmt, i));
2502 1924 : for (; i < named_args; i++)
2503 0 : vargs.quick_push (null_pointer_node);
2504 :
2505 1924 : vargs.quick_push (unshare_expr (swmp));
2506 :
2507 1952 : for (; i < nargs; i++)
2508 28 : vargs.quick_push (gimple_call_arg (stmt, i));
2509 :
2510 1924 : if (gimple_call_internal_p (stmt))
2511 0 : gcc_unreachable ();
2512 : else
2513 1924 : new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2514 1924 : gimple_call_set_fntype (new_stmt, callee_fntype);
2515 :
2516 1924 : if (gimple_call_lhs (stmt))
2517 1861 : gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2518 :
2519 1924 : gimple_move_vops (new_stmt, stmt);
2520 :
2521 1924 : if (gimple_has_location (stmt))
2522 1924 : gimple_set_location (new_stmt, gimple_location (stmt));
2523 1924 : gimple_call_copy_flags (new_stmt, stmt);
2524 1924 : gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2525 :
2526 1924 : gimple_set_modified (new_stmt, true);
2527 :
2528 1924 : wrcall = new_stmt;
2529 1924 : }
2530 :
2531 1924 : update_stmt (wrcall);
2532 1924 : gsi_replace (&gsi, wrcall, true);
2533 1924 : cgraph_edge::set_call_stmt (e, wrcall, false);
2534 :
2535 : /* Insert the strub code after the call. */
2536 1924 : gimple_seq seq = NULL;
2537 :
2538 : #if !ATTR_FNSPEC_DECONST_WATERMARK
2539 : /* If the call will be assumed to not modify or even read the
2540 : watermark, make it read and modified ourselves. */
2541 1924 : if ((gimple_call_flags (wrcall)
2542 1924 : & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
2543 : {
2544 60 : if (!swm)
2545 0 : swm = build2 (MEM_REF,
2546 0 : TREE_TYPE (TREE_TYPE (swmp)),
2547 : swmp,
2548 0 : build_int_cst (TREE_TYPE (swmp), 0));
2549 :
2550 60 : vec<tree, va_gc> *inputs = NULL;
2551 60 : vec<tree, va_gc> *outputs = NULL;
2552 120 : vec_safe_push (outputs,
2553 : build_tree_list
2554 60 : (build_tree_list
2555 : (NULL_TREE, build_string (2, "=m")),
2556 : unshare_expr (swm)));
2557 120 : vec_safe_push (inputs,
2558 : build_tree_list
2559 60 : (build_tree_list
2560 : (NULL_TREE, build_string (1, "m")),
2561 : unshare_expr (swm)));
2562 60 : gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
2563 : NULL, NULL);
2564 60 : gimple_seq_add_stmt (&seq, forcemod);
2565 :
2566 : /* If the call will be assumed to not even read the watermark,
2567 : make sure it is already in memory before the call. */
2568 60 : if ((gimple_call_flags (wrcall) & ECF_CONST))
2569 : {
2570 32 : vec<tree, va_gc> *inputs = NULL;
2571 64 : vec_safe_push (inputs,
2572 : build_tree_list
2573 32 : (build_tree_list
2574 : (NULL_TREE, build_string (1, "m")),
2575 : unshare_expr (swm)));
2576 32 : gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
2577 : NULL, NULL);
2578 32 : if (gimple_has_location (wrcall))
2579 32 : gimple_set_location (force_store, gimple_location (wrcall));
2580 32 : gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
2581 : }
2582 : }
2583 : #endif
2584 :
2585 1924 : if (!omit_own_watermark)
2586 : {
2587 1520 : gcall *sleave = gimple_build_call (get_leave (), 1,
2588 : unshare_expr (swmp));
2589 1520 : gimple_seq_add_stmt (&seq, sleave);
2590 :
2591 1520 : gassign *clobber = gimple_build_assign (swm,
2592 : build_clobber
2593 1520 : (TREE_TYPE (swm)));
2594 1520 : gimple_seq_add_stmt (&seq, clobber);
2595 : }
2596 :
2597 1924 : gsi_insert_finally_seq_after_call (gsi, seq);
2598 : }
2599 :
2600 : /* Adjust all at-calls calls in NODE. */
2601 :
2602 : void
2603 1227 : pass_ipa_strub::adjust_at_calls_calls (cgraph_node *node)
2604 : {
2605 : /* Adjust unknown-callee indirect calls with STRUB_AT_CALLS types within
2606 : onode. */
2607 1227 : if (node->indirect_calls)
2608 : {
2609 110 : push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2610 220 : for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2611 : {
2612 110 : gcc_checking_assert (e->indirect_unknown_callee);
2613 :
2614 110 : if (!e->call_stmt)
2615 64 : continue;
2616 :
2617 110 : tree callee_fntype;
2618 110 : enum strub_mode callee_mode
2619 110 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2620 :
2621 174 : if (callee_mode != STRUB_AT_CALLS
2622 110 : && callee_mode != STRUB_AT_CALLS_OPT)
2623 64 : continue;
2624 :
2625 46 : int named_args = adjust_at_calls_type (callee_fntype);
2626 :
2627 46 : adjust_at_calls_call (e, named_args, callee_fntype);
2628 : }
2629 110 : pop_cfun ();
2630 : }
2631 :
2632 1227 : if (node->callees)
2633 : {
2634 1043 : push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2635 9807 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2636 : {
2637 8764 : gcc_checking_assert (!e->indirect_unknown_callee);
2638 :
2639 8764 : if (!e->call_stmt)
2640 6886 : continue;
2641 :
2642 8764 : tree callee_fntype;
2643 8764 : enum strub_mode callee_mode
2644 8764 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2645 :
2646 15650 : if (callee_mode != STRUB_AT_CALLS
2647 8764 : && callee_mode != STRUB_AT_CALLS_OPT)
2648 6886 : continue;
2649 :
2650 1878 : int named_args = adjust_at_calls_type (callee_fntype);
2651 :
2652 1878 : adjust_at_calls_call (e, named_args, callee_fntype);
2653 : }
2654 1043 : pop_cfun ();
2655 : }
2656 1227 : }
2657 :
2658 : /* The strubm (strub mode) pass computes a strub mode for each function in the
2659 : call graph, and checks, before any inlining, that strub callability
2660 : requirements in effect are satisfied. */
2661 :
2662 : unsigned int
2663 614 : pass_ipa_strub_mode::execute (function *)
2664 : {
2665 614 : last_cgraph_uid = 0;
2666 614 : ipa_strub_set_mode_for_new_functions ();
2667 :
2668 : /* Verify before any inlining or other transformations. */
2669 614 : verify_strub ();
2670 :
2671 614 : return 0;
2672 : }
2673 :
2674 : /* Create a strub mode pass. */
2675 :
2676 : simple_ipa_opt_pass *
2677 285722 : make_pass_ipa_strub_mode (gcc::context *ctxt)
2678 : {
2679 285722 : return new pass_ipa_strub_mode (ctxt);
2680 : }
2681 :
2682 : /* The strub pass proper adjusts types, signatures, and at-calls calls, and
2683 : splits internal-strub functions. */
2684 :
2685 : unsigned int
2686 546 : pass_ipa_strub::execute (function *)
2687 : {
2688 546 : cgraph_node *onode;
2689 :
2690 546 : ipa_strub_set_mode_for_new_functions ();
2691 :
2692 : /* First, adjust the signature of at-calls functions. We adjust types of
2693 : at-calls functions first, so that we don't modify types in place unless
2694 : strub is explicitly requested. */
2695 2852 : FOR_EACH_FUNCTION (onode)
2696 : {
2697 2306 : enum strub_mode mode = get_strub_mode (onode);
2698 :
2699 2306 : if (mode == STRUB_AT_CALLS
2700 2306 : || mode == STRUB_AT_CALLS_OPT)
2701 : {
2702 : /* Create a type variant if strubbing was not explicitly requested in
2703 : the function type. */
2704 387 : if (get_strub_mode_from_type (TREE_TYPE (onode->decl)) != mode)
2705 73 : distinctify_node_type (onode);
2706 :
2707 387 : int named_args = adjust_at_calls_type (TREE_TYPE (onode->decl));
2708 :
2709 : /* An external function explicitly declared with strub won't have a
2710 : body. Even with implicit at-calls strub, a function may have had its
2711 : body removed after we selected the mode, and then we have nothing
2712 : further to do. */
2713 387 : if (!onode->has_gimple_body_p ())
2714 63 : continue;
2715 :
2716 324 : tree *pargs = &DECL_ARGUMENTS (onode->decl);
2717 :
2718 : /* A noninterposable_alias reuses the same parm decl chain, don't add
2719 : the parm twice. */
2720 0 : bool aliased_parms = (onode->alias && *pargs
2721 324 : && DECL_CONTEXT (*pargs) != onode->decl);
2722 :
2723 0 : if (aliased_parms)
2724 0 : continue;
2725 :
2726 372 : for (int i = 0; i < named_args; i++)
2727 48 : pargs = &DECL_CHAIN (*pargs);
2728 :
2729 324 : tree wmptr = build_decl (DECL_SOURCE_LOCATION (onode->decl),
2730 : PARM_DECL,
2731 : get_watermark_ptr (),
2732 : get_qpwmt ());
2733 324 : DECL_ARTIFICIAL (wmptr) = 1;
2734 324 : DECL_ARG_TYPE (wmptr) = get_qpwmt ();
2735 324 : DECL_CONTEXT (wmptr) = onode->decl;
2736 324 : TREE_USED (wmptr) = 1;
2737 324 : DECL_CHAIN (wmptr) = *pargs;
2738 324 : *pargs = wmptr;
2739 :
2740 324 : if (onode->alias)
2741 0 : continue;
2742 :
2743 324 : cgraph_node *nnode = onode;
2744 324 : push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
2745 :
2746 324 : {
2747 324 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2748 324 : gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
2749 324 : gsi_insert_seq_on_edge_immediate (e, seq);
2750 : }
2751 :
2752 324 : if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca)
2753 : {
2754 120 : basic_block bb;
2755 657 : FOR_EACH_BB_FN (bb, cfun)
2756 1074 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2757 4057 : !gsi_end_p (gsi); gsi_next (&gsi))
2758 : {
2759 3520 : gimple *stmt = gsi_stmt (gsi);
2760 :
2761 3520 : gcall *call = dyn_cast <gcall *> (stmt);
2762 :
2763 3520 : if (!call)
2764 2222 : continue;
2765 :
2766 1298 : if (gimple_alloca_call_p (call))
2767 : {
2768 : /* Capture stack growth. */
2769 328 : gimple_seq seq = call_update_watermark (wmptr, NULL,
2770 164 : gsi_bb (gsi)
2771 : ->count);
2772 164 : gsi_insert_finally_seq_after_call (gsi, seq);
2773 : }
2774 : }
2775 : }
2776 :
2777 324 : pop_cfun ();
2778 : }
2779 : }
2780 :
2781 3108 : FOR_EACH_FUNCTION (onode)
2782 : {
2783 2562 : if (!onode->has_gimple_body_p ())
2784 2092 : continue;
2785 :
2786 1227 : enum strub_mode mode = get_strub_mode (onode);
2787 :
2788 1227 : if (mode != STRUB_INTERNAL)
2789 : {
2790 757 : adjust_at_calls_calls (onode);
2791 757 : continue;
2792 : }
2793 :
2794 470 : bool is_stdarg = calls_builtin_va_start_p (onode);;
2795 470 : bool apply_args = calls_builtin_apply_args_p (onode);
2796 :
2797 470 : vec<ipa_adjusted_param, va_gc> *nparms = NULL;
2798 470 : unsigned j = 0;
2799 470 : {
2800 : // The following loop copied from ipa-split.c:split_function.
2801 470 : for (tree parm = DECL_ARGUMENTS (onode->decl);
2802 613 : parm; parm = DECL_CHAIN (parm), j++)
2803 : {
2804 143 : ipa_adjusted_param adj = {};
2805 143 : adj.op = IPA_PARAM_OP_COPY;
2806 143 : adj.base_index = j;
2807 143 : adj.prev_clone_index = j;
2808 143 : vec_safe_push (nparms, adj);
2809 : }
2810 :
2811 470 : if (apply_args)
2812 : {
2813 16 : ipa_adjusted_param aaadj = {};
2814 16 : aaadj.op = IPA_PARAM_OP_NEW;
2815 16 : aaadj.type = get_qptr ();
2816 16 : vec_safe_push (nparms, aaadj);
2817 : }
2818 :
2819 470 : if (is_stdarg)
2820 : {
2821 8 : ipa_adjusted_param vladj = {};
2822 8 : vladj.op = IPA_PARAM_OP_NEW;
2823 8 : vladj.type = get_qpvalst ();
2824 8 : vec_safe_push (nparms, vladj);
2825 : }
2826 :
2827 470 : ipa_adjusted_param wmadj = {};
2828 470 : wmadj.op = IPA_PARAM_OP_NEW;
2829 470 : wmadj.type = get_qpwmt ();
2830 470 : vec_safe_push (nparms, wmadj);
2831 : }
2832 470 : ipa_param_adjustments adj (nparms, -1, false);
2833 :
2834 470 : cgraph_node *nnode = onode->create_version_clone_with_body
2835 470 : (auto_vec<cgraph_edge *> (0),
2836 : NULL, &adj, NULL, NULL, "strub", NULL);
2837 :
2838 470 : if (!nnode)
2839 : {
2840 0 : error_at (DECL_SOURCE_LOCATION (onode->decl),
2841 : "failed to split %qD for %<strub%>",
2842 : onode->decl);
2843 0 : continue;
2844 : }
2845 :
2846 470 : onode->split_part = true;
2847 470 : if (onode->calls_comdat_local)
2848 0 : nnode->add_to_same_comdat_group (onode);
2849 :
2850 470 : set_strub_mode_to (onode, STRUB_WRAPPER);
2851 470 : set_strub_mode_to (nnode, STRUB_WRAPPED);
2852 :
2853 470 : adjust_at_calls_calls (nnode);
2854 :
2855 : /* Decide which of the wrapped function's parms we want to turn into
2856 : references to the argument passed to the wrapper. In general, we want to
2857 : copy small arguments, and avoid copying large ones. Variable-sized array
2858 : lengths given by other arguments, as in 20020210-1.c, would lead to
2859 : problems if passed by value, after resetting the original function and
2860 : dropping the length computation; passing them by reference works.
2861 : DECL_BY_REFERENCE is *not* a substitute for this: it involves copying
2862 : anyway, but performed at the caller. */
2863 470 : indirect_parms_t indirect_nparms (3, false);
2864 470 : unsigned adjust_ftype = 0;
2865 470 : unsigned named_args = 0;
2866 470 : for (tree parm = DECL_ARGUMENTS (onode->decl),
2867 470 : nparm = DECL_ARGUMENTS (nnode->decl),
2868 470 : nparmt = TYPE_ARG_TYPES (TREE_TYPE (nnode->decl));
2869 613 : parm;
2870 : named_args++,
2871 143 : parm = DECL_CHAIN (parm),
2872 429 : nparm = DECL_CHAIN (nparm),
2873 143 : nparmt = nparmt ? TREE_CHAIN (nparmt) : NULL_TREE)
2874 143 : if (TREE_THIS_VOLATILE (parm)
2875 281 : || !(0 /* DECL_BY_REFERENCE (narg) */
2876 138 : || is_gimple_reg_type (TREE_TYPE (nparm))
2877 9 : || VECTOR_TYPE_P (TREE_TYPE (nparm))
2878 9 : || TREE_CODE (TREE_TYPE (nparm)) == COMPLEX_TYPE
2879 9 : || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2880 16 : && (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2881 8 : <= 4 * UNITS_PER_WORD))))
2882 : {
2883 : /* No point in indirecting pointer types, unless they're
2884 : volatile. Presumably they won't ever pass the size-based
2885 : test above, but check the assumption here, because
2886 : getting this wrong would mess with attribute access and
2887 : possibly others. We deal with fn spec below. */
2888 14 : gcc_checking_assert (!POINTER_TYPE_P (TREE_TYPE (nparm))
2889 : || TREE_THIS_VOLATILE (parm));
2890 :
2891 14 : indirect_nparms.add (nparm);
2892 :
2893 : /* ??? Is there any case in which it is not safe to suggest the parms
2894 : turned indirect don't alias anything else? They are distinct,
2895 : unaliased memory in the wrapper, and the wrapped can't possibly
2896 : take pointers into them because none of the pointers passed to the
2897 : wrapper can alias other incoming parameters passed by value, even
2898 : if with transparent reference, and the wrapper doesn't take any
2899 : extra parms that could point into wrapper's parms. So we can
2900 : probably drop the TREE_ADDRESSABLE and keep the TRUE. */
2901 14 : tree ref_type = build_ref_type_for (nparm);
2902 :
2903 14 : if (TREE_THIS_VOLATILE (nparm)
2904 5 : && TYPE_VOLATILE (TREE_TYPE (nparm))
2905 19 : && !TYPE_VOLATILE (ref_type))
2906 5 : TREE_SIDE_EFFECTS (nparm) = TREE_THIS_VOLATILE (nparm) = 0;
2907 14 : DECL_ARG_TYPE (nparm) = TREE_TYPE (nparm) = ref_type;
2908 14 : relayout_decl (nparm);
2909 14 : TREE_ADDRESSABLE (nparm) = 0;
2910 14 : DECL_BY_REFERENCE (nparm) = 0;
2911 14 : DECL_NOT_GIMPLE_REG_P (nparm) = 0;
2912 : /* ??? This avoids mismatches in debug info bind stmts in
2913 : e.g. a-chahan . */
2914 14 : DECL_ABSTRACT_ORIGIN (nparm) = NULL;
2915 :
2916 14 : if (nparmt)
2917 14 : adjust_ftype++;
2918 : }
2919 :
2920 : /* Also adjust the wrapped function type, if needed. */
2921 470 : if (adjust_ftype)
2922 : {
2923 14 : tree nftype = TREE_TYPE (nnode->decl);
2924 :
2925 : /* We always add at least one argument at the end of the signature, when
2926 : cloning the function, so we don't expect to need to duplicate the
2927 : type here. */
2928 14 : gcc_checking_assert (TYPE_ARG_TYPES (nftype)
2929 : != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
2930 :
2931 : /* Check that fnspec still works for the modified function signature,
2932 : and drop it otherwise. */
2933 14 : bool drop_fnspec = false;
2934 14 : tree fnspec = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (nftype));
2935 14 : attr_fnspec spec = fnspec ? attr_fnspec (fnspec) : attr_fnspec ("");
2936 :
2937 14 : unsigned retcopy;
2938 14 : if (!(fnspec && spec.returns_arg (&retcopy)))
2939 : retcopy = (unsigned) -1;
2940 :
2941 14 : unsigned i = 0;
2942 14 : for (tree nparm = DECL_ARGUMENTS (nnode->decl),
2943 14 : nparmt = TYPE_ARG_TYPES (nftype);
2944 32 : adjust_ftype > 0;
2945 18 : i++, nparm = DECL_CHAIN (nparm), nparmt = TREE_CHAIN (nparmt))
2946 18 : if (indirect_nparms.contains (nparm))
2947 : {
2948 14 : TREE_VALUE (nparmt) = TREE_TYPE (nparm);
2949 14 : adjust_ftype--;
2950 :
2951 14 : if (fnspec && !drop_fnspec)
2952 : {
2953 0 : if (i == retcopy)
2954 : drop_fnspec = true;
2955 0 : else if (spec.arg_specified_p (i))
2956 : {
2957 : /* Properties that apply to pointers only must not be
2958 : present, because we don't make pointers further
2959 : indirect. */
2960 0 : gcc_checking_assert
2961 : (!spec.arg_max_access_size_given_by_arg_p (i, NULL));
2962 0 : gcc_checking_assert (!spec.arg_copied_to_arg_p (i, NULL));
2963 :
2964 : /* Any claim of direct access only is invalidated by
2965 : adding an indirection level. */
2966 0 : if (spec.arg_direct_p (i))
2967 : drop_fnspec = true;
2968 :
2969 : /* If there's a claim the argument is not read from, the
2970 : added indirection invalidates it: if the argument is
2971 : used at all, then the pointer will necessarily be
2972 : read. */
2973 0 : if (!spec.arg_maybe_read_p (i)
2974 0 : && spec.arg_used_p (i))
2975 : drop_fnspec = true;
2976 : }
2977 : }
2978 : }
2979 :
2980 : /* ??? Maybe we could adjust it instead. Note we don't need
2981 : to mess with attribute access: pointer-typed parameters are
2982 : not modified, so they can remain unchanged. */
2983 14 : if (drop_fnspec)
2984 0 : remove_named_attribute_unsharing ("fn spec",
2985 0 : &TYPE_ATTRIBUTES (nftype));
2986 :
2987 14 : TREE_TYPE (nnode->decl) = nftype;
2988 : }
2989 :
2990 : #if ATTR_FNSPEC_DECONST_WATERMARK
2991 : {
2992 : int flags = flags_from_decl_or_type (nnode->decl);
2993 : tree fnspec = lookup_attribute ("fn spec", TREE_TYPE (nnode->decl));
2994 :
2995 : if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2996 : {
2997 : size_t xargs = 1 + int (is_stdarg) + int (apply_args);
2998 : size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2999 : auto_vec<char> nspecv (tgtlen);
3000 : char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
3001 : bool no_writes_p = true;
3002 : if (fnspec)
3003 : {
3004 : tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
3005 : curlen = TREE_STRING_LENGTH (fnspecstr);
3006 : memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
3007 : if (!(flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS))
3008 : && curlen >= 2
3009 : && nspec[1] != 'c' && nspec[1] != 'C'
3010 : && nspec[1] != 'p' && nspec[1] != 'P')
3011 : no_writes_p = false;
3012 : }
3013 : if (!curlen)
3014 : {
3015 : nspec[curlen++] = '.';
3016 : nspec[curlen++] = ((flags & ECF_CONST)
3017 : ? 'c'
3018 : : (flags & ECF_PURE)
3019 : ? 'p'
3020 : : ' ');
3021 : }
3022 : while (curlen < tgtlen - 2 * xargs)
3023 : {
3024 : nspec[curlen++] = '.';
3025 : nspec[curlen++] = ' ';
3026 : }
3027 :
3028 : /* These extra args are unlikely to be present in const or pure
3029 : functions. It's conceivable that a function that takes variable
3030 : arguments, or that passes its arguments on to another function,
3031 : could be const or pure, but it would not modify the arguments, and,
3032 : being pure or const, it couldn't possibly modify or even access
3033 : memory referenced by them. But it can read from these internal
3034 : data structures created by the wrapper, and from any
3035 : argument-passing memory referenced by them, so we denote the
3036 : possibility of reading from multiple levels of indirection, but
3037 : only of reading because const/pure. */
3038 : if (apply_args)
3039 : {
3040 : nspec[curlen++] = 'r';
3041 : nspec[curlen++] = ' ';
3042 : }
3043 : if (is_stdarg)
3044 : {
3045 : nspec[curlen++] = (no_writes_p ? 'r' : '.');
3046 : nspec[curlen++] = (no_writes_p ? 't' : ' ');
3047 : }
3048 :
3049 : nspec[curlen++] = 'W';
3050 : nspec[curlen++] = 't';
3051 :
3052 : /* The type has already been copied before adding parameters. */
3053 : gcc_checking_assert (TYPE_ARG_TYPES (TREE_TYPE (nnode->decl))
3054 : != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
3055 : TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl))
3056 : = tree_cons (get_identifier ("fn spec"),
3057 : build_tree_list (NULL_TREE,
3058 : build_string (tgtlen, nspec)),
3059 : TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl)));
3060 : }
3061 : }
3062 : #else
3063 470 : (void) named_args;
3064 : #endif
3065 :
3066 470 : {
3067 470 : tree decl = onode->decl;
3068 470 : cgraph_node *target = nnode;
3069 :
3070 470 : { // copied from create_wrapper
3071 :
3072 : /* Preserve DECL_RESULT so we get right by reference flag. */
3073 470 : tree decl_result = DECL_RESULT (decl);
3074 :
3075 : /* Remove the function's body but keep arguments to be reused
3076 : for thunk. */
3077 470 : onode->release_body (true);
3078 470 : onode->reset (/* unlike create_wrapper: preserve_comdat_group = */true);
3079 :
3080 470 : DECL_UNINLINABLE (decl) = false;
3081 470 : DECL_RESULT (decl) = decl_result;
3082 470 : DECL_INITIAL (decl) = NULL;
3083 470 : allocate_struct_function (decl, false);
3084 470 : set_cfun (NULL);
3085 :
3086 : /* Turn alias into thunk and expand it into GIMPLE representation. */
3087 470 : onode->definition = true;
3088 :
3089 470 : thunk_info::get_create (onode);
3090 470 : onode->thunk = true;
3091 470 : onode->create_edge (target, NULL, onode->count);
3092 470 : onode->callees->can_throw_external = !TREE_NOTHROW (target->decl);
3093 :
3094 470 : tree arguments = DECL_ARGUMENTS (decl);
3095 :
3096 613 : while (arguments)
3097 : {
3098 143 : TREE_ADDRESSABLE (arguments) = false;
3099 143 : arguments = TREE_CHAIN (arguments);
3100 : }
3101 :
3102 470 : {
3103 470 : tree alias = onode->callees->callee->decl;
3104 470 : tree thunk_fndecl = decl;
3105 470 : tree a;
3106 :
3107 470 : int nxargs = 1 + is_stdarg + apply_args;
3108 :
3109 470 : { // Simplified from expand_thunk.
3110 470 : tree restype;
3111 470 : basic_block bb, then_bb, else_bb, return_bb;
3112 470 : gimple_stmt_iterator bsi;
3113 470 : int nargs = 0;
3114 470 : tree arg;
3115 470 : int i;
3116 470 : tree resdecl;
3117 470 : tree restmp = NULL;
3118 :
3119 470 : gcall *call;
3120 470 : greturn *ret;
3121 470 : bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
3122 :
3123 470 : a = DECL_ARGUMENTS (thunk_fndecl);
3124 :
3125 470 : current_function_decl = thunk_fndecl;
3126 :
3127 : /* Ensure thunks are emitted in their correct sections. */
3128 470 : resolve_unique_section (thunk_fndecl, 0,
3129 : flag_function_sections);
3130 :
3131 470 : bitmap_obstack_initialize (NULL);
3132 :
3133 : /* Build the return declaration for the function. */
3134 470 : restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
3135 470 : if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
3136 : {
3137 0 : resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
3138 0 : DECL_ARTIFICIAL (resdecl) = 1;
3139 0 : DECL_IGNORED_P (resdecl) = 1;
3140 0 : DECL_CONTEXT (resdecl) = thunk_fndecl;
3141 0 : DECL_RESULT (thunk_fndecl) = resdecl;
3142 : }
3143 : else
3144 : resdecl = DECL_RESULT (thunk_fndecl);
3145 :
3146 470 : profile_count cfg_count = onode->count;
3147 470 : if (!cfg_count.initialized_p ())
3148 161 : cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
3149 :
3150 940 : bb = then_bb = else_bb = return_bb
3151 470 : = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
3152 :
3153 470 : bsi = gsi_start_bb (bb);
3154 :
3155 : /* Build call to the function being thunked. */
3156 470 : if (!VOID_TYPE_P (restype)
3157 470 : && (!alias_is_noreturn
3158 0 : || TREE_ADDRESSABLE (restype)
3159 0 : || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
3160 : {
3161 356 : if (DECL_BY_REFERENCE (resdecl))
3162 : {
3163 6 : restmp = gimple_fold_indirect_ref (resdecl);
3164 6 : if (!restmp)
3165 12 : restmp = build2 (MEM_REF,
3166 6 : TREE_TYPE (TREE_TYPE (resdecl)),
3167 : resdecl,
3168 6 : build_int_cst (TREE_TYPE (resdecl), 0));
3169 : }
3170 350 : else if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
3171 : {
3172 1 : restmp = resdecl;
3173 :
3174 1 : if (VAR_P (restmp))
3175 : {
3176 0 : add_local_decl (cfun, restmp);
3177 0 : BLOCK_VARS (DECL_INITIAL (current_function_decl))
3178 0 : = restmp;
3179 : }
3180 : }
3181 : else
3182 349 : restmp = create_tmp_reg (restype, "retval");
3183 : }
3184 :
3185 613 : for (arg = a; arg; arg = DECL_CHAIN (arg))
3186 143 : nargs++;
3187 470 : auto_vec<tree> vargs (nargs + nxargs);
3188 470 : i = 0;
3189 470 : arg = a;
3190 :
3191 470 : if (nargs)
3192 64 : for (tree nparm = DECL_ARGUMENTS (nnode->decl);
3193 207 : i < nargs;
3194 143 : i++, arg = DECL_CHAIN (arg), nparm = DECL_CHAIN (nparm))
3195 : {
3196 143 : tree save_arg = arg;
3197 :
3198 : /* Arrange to pass indirectly the parms, if we decided to do
3199 : so, and revert its type in the wrapper. */
3200 143 : if (indirect_nparms.contains (nparm))
3201 : {
3202 14 : tree ref_type = TREE_TYPE (nparm);
3203 14 : TREE_ADDRESSABLE (arg) = true;
3204 14 : arg = build1 (ADDR_EXPR, ref_type, arg);
3205 : }
3206 129 : else if (!TREE_THIS_VOLATILE (arg))
3207 129 : DECL_NOT_GIMPLE_REG_P (arg) = 0;
3208 :
3209 : /* Convert the argument back to the type used by the calling
3210 : conventions, e.g. a non-prototyped float type is passed as
3211 : double, as in 930603-1.c, and needs to be converted back to
3212 : double to be passed on unchanged to the wrapped
3213 : function. */
3214 143 : if (TREE_TYPE (nparm) != DECL_ARG_TYPE (nparm))
3215 : {
3216 0 : tree tmp = arg;
3217 : /* If ARG is e.g. volatile, we must copy and
3218 : convert in separate statements. */
3219 0 : if (!is_gimple_val (arg))
3220 : {
3221 0 : tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3222 : (TREE_TYPE (arg)), "arg");
3223 0 : gimple *stmt = gimple_build_assign (tmp, arg);
3224 0 : gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3225 : }
3226 0 : arg = fold_convert (DECL_ARG_TYPE (nparm), tmp);
3227 : }
3228 :
3229 143 : if (!is_gimple_val (arg))
3230 : {
3231 0 : tree tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3232 : (TREE_TYPE (arg)), "arg");
3233 0 : gimple *stmt = gimple_build_assign (tmp, arg);
3234 0 : gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3235 0 : arg = tmp;
3236 : }
3237 143 : vargs.quick_push (arg);
3238 143 : arg = save_arg;
3239 : }
3240 : /* These strub arguments are adjusted later. */
3241 470 : if (apply_args)
3242 16 : vargs.quick_push (null_pointer_node);
3243 470 : if (is_stdarg)
3244 8 : vargs.quick_push (null_pointer_node);
3245 470 : vargs.quick_push (null_pointer_node);
3246 470 : call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias),
3247 : vargs);
3248 470 : onode->callees->call_stmt = call;
3249 : // gimple_call_set_from_thunk (call, true);
3250 470 : if (DECL_STATIC_CHAIN (alias))
3251 : {
3252 0 : tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
3253 0 : tree type = TREE_TYPE (p);
3254 0 : tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
3255 : PARM_DECL, create_tmp_var_name ("CHAIN"),
3256 : type);
3257 0 : DECL_ARTIFICIAL (decl) = 1;
3258 0 : DECL_IGNORED_P (decl) = 1;
3259 0 : TREE_USED (decl) = 1;
3260 0 : DECL_CONTEXT (decl) = thunk_fndecl;
3261 0 : DECL_ARG_TYPE (decl) = type;
3262 0 : TREE_READONLY (decl) = 1;
3263 :
3264 0 : struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
3265 0 : sf->static_chain_decl = decl;
3266 :
3267 0 : gimple_call_set_chain (call, decl);
3268 : }
3269 :
3270 : /* Return slot optimization is always possible and in fact required to
3271 : return values with DECL_BY_REFERENCE. */
3272 470 : if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
3273 470 : && (!is_gimple_reg_type (TREE_TYPE (resdecl))
3274 7 : || DECL_BY_REFERENCE (resdecl)))
3275 6 : gimple_call_set_return_slot_opt (call, true);
3276 :
3277 470 : if (restmp)
3278 : {
3279 356 : gimple_call_set_lhs (call, restmp);
3280 356 : gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
3281 : TREE_TYPE (TREE_TYPE (alias))));
3282 : }
3283 470 : gsi_insert_after (&bsi, call, GSI_NEW_STMT);
3284 470 : if (!alias_is_noreturn)
3285 : {
3286 : /* Build return value. */
3287 469 : if (!DECL_BY_REFERENCE (resdecl))
3288 463 : ret = gimple_build_return (restmp);
3289 : else
3290 6 : ret = gimple_build_return (resdecl);
3291 :
3292 469 : gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
3293 : }
3294 : else
3295 : {
3296 1 : remove_edge (single_succ_edge (bb));
3297 : }
3298 :
3299 470 : cfun->gimple_df->in_ssa_p = true;
3300 470 : update_max_bb_count ();
3301 470 : profile_status_for_fn (cfun)
3302 940 : = cfg_count.initialized_p () && cfg_count.ipa_p ()
3303 470 : ? PROFILE_READ : PROFILE_GUESSED;
3304 : /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
3305 : // TREE_ASM_WRITTEN (thunk_fndecl) = false;
3306 470 : delete_unreachable_blocks ();
3307 470 : update_ssa (TODO_update_ssa);
3308 470 : checking_verify_flow_info ();
3309 470 : free_dominance_info (CDI_DOMINATORS);
3310 :
3311 : /* Since we want to emit the thunk, we explicitly mark its name as
3312 : referenced. */
3313 470 : onode->thunk = false;
3314 470 : onode->lowered = true;
3315 470 : bitmap_obstack_release (NULL);
3316 470 : }
3317 470 : current_function_decl = NULL;
3318 470 : set_cfun (NULL);
3319 : }
3320 :
3321 470 : thunk_info::remove (onode);
3322 :
3323 : // some more of create_wrapper at the end of the next block.
3324 : }
3325 : }
3326 :
3327 470 : {
3328 470 : tree aaval = NULL_TREE;
3329 470 : tree vaptr = NULL_TREE;
3330 470 : tree wmptr = NULL_TREE;
3331 1107 : for (tree arg = DECL_ARGUMENTS (nnode->decl); arg; arg = DECL_CHAIN (arg))
3332 : {
3333 637 : aaval = vaptr;
3334 637 : vaptr = wmptr;
3335 637 : wmptr = arg;
3336 : }
3337 :
3338 470 : if (!apply_args)
3339 : aaval = NULL_TREE;
3340 : /* The trailing args are [apply_args], [va_list_ptr], and
3341 : watermark. If we don't have a va_list_ptr, the penultimate
3342 : argument is apply_args.
3343 : */
3344 16 : else if (!is_stdarg)
3345 : aaval = vaptr;
3346 :
3347 454 : if (!is_stdarg)
3348 : vaptr = NULL_TREE;
3349 :
3350 470 : DECL_NAME (wmptr) = get_watermark_ptr ();
3351 470 : DECL_ARTIFICIAL (wmptr) = 1;
3352 470 : DECL_IGNORED_P (wmptr) = 1;
3353 470 : TREE_USED (wmptr) = 1;
3354 :
3355 470 : if (is_stdarg)
3356 : {
3357 8 : DECL_NAME (vaptr) = get_va_list_ptr ();
3358 8 : DECL_ARTIFICIAL (vaptr) = 1;
3359 8 : DECL_IGNORED_P (vaptr) = 1;
3360 8 : TREE_USED (vaptr) = 1;
3361 : }
3362 :
3363 470 : if (apply_args)
3364 : {
3365 16 : DECL_NAME (aaval) = get_apply_args ();
3366 16 : DECL_ARTIFICIAL (aaval) = 1;
3367 16 : DECL_IGNORED_P (aaval) = 1;
3368 16 : TREE_USED (aaval) = 1;
3369 : }
3370 :
3371 470 : push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
3372 :
3373 470 : {
3374 470 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3375 470 : gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
3376 470 : gsi_insert_seq_on_edge_immediate (e, seq);
3377 : }
3378 :
3379 470 : bool any_indirect = !indirect_nparms.is_empty ();
3380 :
3381 470 : if (any_indirect)
3382 : {
3383 14 : basic_block bb;
3384 14 : bool needs_commit = false;
3385 42 : FOR_EACH_BB_FN (bb, cfun)
3386 : {
3387 28 : for (gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
3388 28 : !gsi_end_p (gsi);
3389 0 : gsi_next_nonvirtual_phi (&gsi))
3390 : {
3391 0 : gphi *stmt = gsi.phi ();
3392 :
3393 0 : walk_stmt_info wi = {};
3394 0 : wi.info = &indirect_nparms;
3395 0 : walk_gimple_op (stmt, walk_make_indirect, &wi);
3396 0 : if (wi.changed && !is_gimple_debug (gsi_stmt (gsi)))
3397 0 : if (walk_regimplify_phi (stmt))
3398 0 : needs_commit = true;
3399 : }
3400 :
3401 56 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3402 115 : !gsi_end_p (gsi); gsi_next (&gsi))
3403 : {
3404 87 : gimple *stmt = gsi_stmt (gsi);
3405 :
3406 87 : walk_stmt_info wi = {};
3407 87 : wi.info = &indirect_nparms;
3408 87 : walk_gimple_op (stmt, walk_make_indirect, &wi);
3409 87 : if (wi.changed)
3410 : {
3411 6 : if (!is_gimple_debug (stmt))
3412 : {
3413 6 : wi.info = &gsi;
3414 6 : walk_gimple_op (stmt, walk_regimplify_addr_expr,
3415 : &wi);
3416 : }
3417 6 : update_stmt (stmt);
3418 : }
3419 : }
3420 : }
3421 14 : if (needs_commit)
3422 0 : gsi_commit_edge_inserts ();
3423 : }
3424 :
3425 470 : if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca
3426 470 : || is_stdarg || apply_args)
3427 618 : for (cgraph_edge *e = nnode->callees, *enext; e; e = enext)
3428 : {
3429 534 : if (!e->call_stmt)
3430 0 : continue;
3431 :
3432 534 : gcall *call = e->call_stmt;
3433 534 : gimple_stmt_iterator gsi = gsi_for_stmt (call);
3434 534 : tree fndecl = e->callee->decl;
3435 :
3436 534 : enext = e->next_callee;
3437 :
3438 534 : if (gimple_alloca_call_p (call))
3439 : {
3440 74 : gimple_seq seq = call_update_watermark (wmptr, NULL,
3441 74 : gsi_bb (gsi)->count);
3442 74 : gsi_insert_finally_seq_after_call (gsi, seq);
3443 : }
3444 460 : else if (fndecl && is_stdarg
3445 460 : && fndecl_built_in_p (fndecl, BUILT_IN_VA_START))
3446 : {
3447 : /* Using a non-default stdarg ABI makes the function ineligible
3448 : for internal strub. */
3449 8 : gcc_checking_assert (builtin_decl_explicit (BUILT_IN_VA_START)
3450 : == fndecl);
3451 8 : tree bvacopy = builtin_decl_explicit (BUILT_IN_VA_COPY);
3452 8 : gimple_call_set_fndecl (call, bvacopy);
3453 8 : tree arg = vaptr;
3454 : /* The va_copy source must be dereferenced, unless it's an array
3455 : type, that would have decayed to a pointer. */
3456 8 : if (TREE_CODE (TREE_TYPE (TREE_TYPE (vaptr))) != ARRAY_TYPE)
3457 : {
3458 0 : arg = gimple_fold_indirect_ref (vaptr);
3459 0 : if (!arg)
3460 0 : arg = build2 (MEM_REF,
3461 0 : TREE_TYPE (TREE_TYPE (vaptr)),
3462 : vaptr,
3463 0 : build_int_cst (TREE_TYPE (vaptr), 0));
3464 0 : if (!is_gimple_val (arg))
3465 0 : arg = force_gimple_operand_gsi (&gsi, arg, true,
3466 : NULL_TREE, true, GSI_SAME_STMT);
3467 : }
3468 8 : gimple_call_set_arg (call, 1, arg);
3469 8 : update_stmt (call);
3470 8 : e->redirect_callee (cgraph_node::get_create (bvacopy));
3471 : }
3472 452 : else if (fndecl && apply_args
3473 452 : && fndecl_built_in_p (fndecl, BUILT_IN_APPLY_ARGS))
3474 : {
3475 16 : tree lhs = gimple_call_lhs (call);
3476 16 : gimple *assign = (lhs
3477 16 : ? gimple_build_assign (lhs, aaval)
3478 12 : : gimple_build_nop ());
3479 16 : gsi_replace (&gsi, assign, true);
3480 16 : cgraph_edge::remove (e);
3481 : }
3482 : }
3483 :
3484 470 : { // a little more copied from create_wrapper
3485 :
3486 : /* Inline summary set-up. */
3487 470 : nnode->analyze ();
3488 : // inline_analyze_function (nnode);
3489 : }
3490 :
3491 470 : pop_cfun ();
3492 : }
3493 :
3494 470 : {
3495 470 : push_cfun (DECL_STRUCT_FUNCTION (onode->decl));
3496 470 : gimple_stmt_iterator gsi
3497 470 : = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
3498 :
3499 470 : gcall *wrcall;
3500 470 : while (!(wrcall = dyn_cast <gcall *> (gsi_stmt (gsi))))
3501 0 : gsi_next (&gsi);
3502 :
3503 470 : tree swm = create_tmp_var (get_wmt (), ".strub.watermark");
3504 470 : TREE_ADDRESSABLE (swm) = true;
3505 470 : tree swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
3506 :
3507 470 : tree enter = get_enter ();
3508 470 : gcall *stptr = gimple_build_call (enter, 1, unshare_expr (swmp));
3509 470 : gimple_set_location (stptr, gimple_location (wrcall));
3510 470 : gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
3511 470 : onode->create_edge (cgraph_node::get_create (enter),
3512 470 : stptr, gsi_bb (gsi)->count, false);
3513 :
3514 470 : int nargs = gimple_call_num_args (wrcall);
3515 :
3516 470 : gimple_seq seq = NULL;
3517 :
3518 470 : if (apply_args)
3519 : {
3520 16 : tree aalst = create_tmp_var (ptr_type_node, ".strub.apply_args");
3521 16 : tree bappargs = builtin_decl_explicit (BUILT_IN_APPLY_ARGS);
3522 16 : gcall *appargs = gimple_build_call (bappargs, 0);
3523 16 : gimple_call_set_lhs (appargs, aalst);
3524 16 : gimple_set_location (appargs, gimple_location (wrcall));
3525 16 : gsi_insert_before (&gsi, appargs, GSI_SAME_STMT);
3526 16 : gimple_call_set_arg (wrcall, nargs - 2 - is_stdarg, aalst);
3527 16 : onode->create_edge (cgraph_node::get_create (bappargs),
3528 16 : appargs, gsi_bb (gsi)->count, false);
3529 : }
3530 :
3531 470 : if (is_stdarg)
3532 : {
3533 8 : tree valst = create_tmp_var (va_list_type_node, ".strub.va_list");
3534 8 : TREE_ADDRESSABLE (valst) = true;
3535 8 : tree vaptr = build1 (ADDR_EXPR,
3536 : build_pointer_type (va_list_type_node),
3537 : valst);
3538 8 : gimple_call_set_arg (wrcall, nargs - 2, unshare_expr (vaptr));
3539 :
3540 8 : tree bvastart = builtin_decl_explicit (BUILT_IN_VA_START);
3541 8 : gcall *vastart = gimple_build_call (bvastart, 2,
3542 : unshare_expr (vaptr),
3543 : integer_zero_node);
3544 8 : gimple_set_location (vastart, gimple_location (wrcall));
3545 8 : gsi_insert_before (&gsi, vastart, GSI_SAME_STMT);
3546 8 : onode->create_edge (cgraph_node::get_create (bvastart),
3547 8 : vastart, gsi_bb (gsi)->count, false);
3548 :
3549 8 : tree bvaend = builtin_decl_explicit (BUILT_IN_VA_END);
3550 8 : gcall *vaend = gimple_build_call (bvaend, 1, unshare_expr (vaptr));
3551 8 : gimple_set_location (vaend, gimple_location (wrcall));
3552 8 : gimple_seq_add_stmt (&seq, vaend);
3553 : }
3554 :
3555 470 : gimple_call_set_arg (wrcall, nargs - 1, unshare_expr (swmp));
3556 : // gimple_call_set_tail (wrcall, false);
3557 470 : update_stmt (wrcall);
3558 :
3559 470 : {
3560 : #if !ATTR_FNSPEC_DECONST_WATERMARK
3561 : /* If the call will be assumed to not modify or even read the
3562 : watermark, make it read and modified ourselves. */
3563 470 : if ((gimple_call_flags (wrcall)
3564 470 : & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
3565 : {
3566 121 : vec<tree, va_gc> *inputs = NULL;
3567 121 : vec<tree, va_gc> *outputs = NULL;
3568 242 : vec_safe_push (outputs,
3569 : build_tree_list
3570 121 : (build_tree_list
3571 : (NULL_TREE, build_string (2, "=m")),
3572 : swm));
3573 242 : vec_safe_push (inputs,
3574 : build_tree_list
3575 121 : (build_tree_list
3576 : (NULL_TREE, build_string (1, "m")),
3577 : swm));
3578 121 : gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
3579 : NULL, NULL);
3580 121 : gimple_seq_add_stmt (&seq, forcemod);
3581 :
3582 : /* If the call will be assumed to not even read the watermark,
3583 : make sure it is already in memory before the call. */
3584 121 : if ((gimple_call_flags (wrcall) & ECF_CONST))
3585 : {
3586 41 : vec<tree, va_gc> *inputs = NULL;
3587 82 : vec_safe_push (inputs,
3588 : build_tree_list
3589 41 : (build_tree_list
3590 : (NULL_TREE, build_string (1, "m")),
3591 : swm));
3592 41 : gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
3593 : NULL, NULL);
3594 41 : gimple_set_location (force_store, gimple_location (wrcall));
3595 41 : gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
3596 : }
3597 : }
3598 : #endif
3599 :
3600 470 : gcall *sleave = gimple_build_call (get_leave (), 1,
3601 : unshare_expr (swmp));
3602 470 : gimple_seq_add_stmt (&seq, sleave);
3603 :
3604 470 : gassign *clobber = gimple_build_assign (swm,
3605 : build_clobber
3606 470 : (TREE_TYPE (swm)));
3607 470 : gimple_seq_add_stmt (&seq, clobber);
3608 : }
3609 :
3610 470 : gsi_insert_finally_seq_after_call (gsi, seq);
3611 :
3612 : /* For nnode, we don't rebuild edges because we wish to retain
3613 : any redirections copied to it from earlier passes, so we add
3614 : call graph edges explicitly there, but for onode, we create a
3615 : fresh function, so we may as well just issue the calls and
3616 : then rebuild all cgraph edges. */
3617 : // cgraph_edge::rebuild_edges ();
3618 470 : onode->analyze ();
3619 : // inline_analyze_function (onode);
3620 :
3621 470 : pop_cfun ();
3622 : }
3623 470 : }
3624 :
3625 546 : return 0;
3626 : }
3627 :
3628 : simple_ipa_opt_pass *
3629 285722 : make_pass_ipa_strub (gcc::context *ctxt)
3630 : {
3631 285722 : return new pass_ipa_strub (ctxt);
3632 : }
3633 :
3634 : #include "gt-ipa-strub.h"
|