Branch data Line data Source code
1 : : /* strub (stack scrubbing) support.
2 : : Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 : : Contributed by Alexandre Oliva <oliva@adacore.com>.
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "gimplify.h"
28 : : #include "tree-pass.h"
29 : : #include "ssa.h"
30 : : #include "gimple-iterator.h"
31 : : #include "gimplify-me.h"
32 : : #include "tree-into-ssa.h"
33 : : #include "tree-ssa.h"
34 : : #include "tree-cfg.h"
35 : : #include "cfghooks.h"
36 : : #include "cfgloop.h"
37 : : #include "cfgcleanup.h"
38 : : #include "tree-eh.h"
39 : : #include "except.h"
40 : : #include "builtins.h"
41 : : #include "attribs.h"
42 : : #include "tree-inline.h"
43 : : #include "cgraph.h"
44 : : #include "alloc-pool.h"
45 : : #include "symbol-summary.h"
46 : : #include "sreal.h"
47 : : #include "ipa-cp.h"
48 : : #include "ipa-prop.h"
49 : : #include "ipa-fnsummary.h"
50 : : #include "gimple-fold.h"
51 : : #include "fold-const.h"
52 : : #include "gimple-walk.h"
53 : : #include "tree-dfa.h"
54 : : #include "langhooks.h"
55 : : #include "calls.h"
56 : : #include "vec.h"
57 : : #include "stor-layout.h"
58 : : #include "varasm.h"
59 : : #include "alias.h"
60 : : #include "diagnostic.h"
61 : : #include "intl.h"
62 : : #include "ipa-strub.h"
63 : : #include "symtab-thunks.h"
64 : : #include "attr-fnspec.h"
65 : : #include "target.h"
66 : :
67 : : /* This file introduces two passes that, together, implement
68 : : machine-independent stack scrubbing, strub for short. It arranges
69 : : for stack frames that have strub enabled to be zeroed-out after
70 : : relinquishing control to a caller, whether by returning or by
71 : : propagating an exception. This admittedly unusual design decision
72 : : was driven by exception support (one needs a stack frame to be
73 : : active to propagate exceptions out of it), and it enabled an
74 : : implementation that is entirely machine-independent (no custom
75 : : epilogue code is required).
76 : :
77 : : Strub modes can be selected for stack frames by attaching attribute
78 : : strub to functions or to variables (to their types, actually).
79 : : Different strub modes, with different implementation details, are
80 : : available, and they can be selected by an argument to the strub
81 : : attribute. When enabled by strub-enabled variables, whether by
82 : : accessing (as in reading from) statically-allocated ones, or by
83 : : introducing (as in declaring) automatically-allocated ones, a
84 : : suitable mode is selected automatically.
85 : :
86 : : At-calls mode modifies the interface of a function, adding a stack
87 : : watermark argument, that callers use to clean up the stack frame of
88 : : the called function. Because of the interface change, it can only
89 : : be used when explicitly selected, or when a function is internal to
90 : : a translation unit. Strub-at-calls function types are distinct
91 : : from their original types (they're not modified in-place), and they
92 : : are not interchangeable with other function types.
93 : :
94 : : Internal mode, in turn, does not modify the type or the interface
95 : : of a function. It is currently implemented by turning the function
96 : : into a wrapper, moving the function body to a separate wrapped
97 : : function, and scrubbing the wrapped body's stack in the wrapper.
98 : : Internal-strub function types are mostly interface-compatible with
99 : : other strub modes, namely callable (from strub functions, though
100 : : not strub-enabled) and disabled (not callable from strub
101 : : functions).
102 : :
103 : : Always_inline functions can be strub functions, but they can only
104 : : be called from other strub functions, because strub functions must
105 : : never be inlined into non-strub functions. Internal and at-calls
106 : : modes are indistinguishable when it comes to always_inline
107 : : functions: they will necessarily be inlined into another strub
108 : : function, and will thus be integrated into the caller's stack
109 : : frame, whatever the mode. (Contrast with non-always_inline strub
110 : : functions: an at-calls function can be called from other strub
111 : : functions, ensuring no discontinuity in stack erasing, whereas an
112 : : internal-strub function can only be called from other strub
113 : : functions if it happens to be inlined, or if -fstrub=relaxed mode
114 : : is in effect (that's the default). In -fstrub=strict mode,
115 : : internal-strub functions are not callable from strub functions,
116 : : because the wrapper itself is not strubbed.
117 : :
118 : : The implementation involves two simple-IPA passes. The earliest
119 : : one, strub-mode, assigns strub modes to functions. It needs to run
120 : : before any inlining, so that we can prevent inlining of strub
121 : : functions into non-strub functions. It notes explicit strub mode
122 : : requests, enables strub in response to strub variables and testing
123 : : options, and flags unsatisfiable requests.
124 : :
125 : : Three possibilities of unsatisfiable requests come to mind: (a)
126 : : when a strub mode is explicitly selected, but the function uses
127 : : features that make it ineligible for that mode (e.g. at-calls rules
128 : : out calling __builtin_apply_args, because of the interface changes,
129 : : and internal mode rules out noclone or otherwise non-versionable
130 : : functions, non-default varargs, non-local or forced labels, and
131 : : functions with far too many arguments); (b) when some strub mode
132 : : must be enabled because of a strub variable, but the function is
133 : : not eligible or not viable for any mode; and (c) when
134 : : -fstrub=strict is enabled, and calls are found in strub functions
135 : : to functions that are not callable from strub contexts.
136 : : compute_strub_mode implements (a) and (b), and verify_strub
137 : : implements (c).
138 : :
139 : : The second IPA pass modifies interfaces of at-calls-strub functions
140 : : and types, introduces strub calls in and around them. and splits
141 : : internal-strub functions. It is placed after early inlining, so
142 : : that even internal-strub functions get a chance of being inlined
143 : : into other strub functions, but before non-early inlining, so that
144 : : internal-strub wrapper functions still get a chance of inlining
145 : : after splitting.
146 : :
147 : : Wrappers avoid duplicating the copying of large arguments again by
148 : : passing them by reference to the wrapped bodies. This involves
149 : : occasional SSA rewriting of address computations, because of the
150 : : additional indirection. Besides these changes, and the
151 : : introduction of the stack watermark parameter, wrappers and wrapped
152 : : functions cooperate to handle variable argument lists (performing
153 : : va_start in the wrapper, passing the list as an argument, and
154 : : replacing va_start calls in the wrapped body with va_copy), and
155 : : __builtin_apply_args (also called in the wrapper and passed to the
156 : : wrapped body as an argument).
157 : :
158 : : Strub bodies (both internal-mode wrapped bodies, and at-calls
159 : : functions) always start by adjusting the watermark parameter, by
160 : : calling __builtin___strub_update. The compiler inserts them in the
161 : : main strub pass. Allocations of additional stack space for the
162 : : frame (__builtin_alloca) are also followed by watermark updates.
163 : : Stack space temporarily allocated to pass arguments to other
164 : : functions, released right after the call, is not regarded as part
165 : : of the frame. Around calls to them, i.e., in internal-mode
166 : : wrappers and at-calls callers (even calls through pointers), calls
167 : : to __builtin___strub_enter and __builtin___strub_leave are
168 : : inserted, the latter as a __finally block, so that it runs at
169 : : regular and exceptional exit paths. strub_enter only initializes
170 : : the stack watermark, and strub_leave is where the scrubbing takes
171 : : place, overwriting with zeros the stack space from the top of the
172 : : stack to the watermark.
173 : :
174 : : These calls can be optimized in various cases. In
175 : : pass_ipa_strub::adjust_at_calls_call, for example, we enable
176 : : tail-calling and other optimized calls from one strub body to
177 : : another by passing on the watermark parameter. The builtins
178 : : themselves may undergo inline substitution during expansion,
179 : : dependign on optimization levels. This involves dealing with stack
180 : : red zones (when the builtins are called out-of-line, the red zone
181 : : cannot be used) and other ugly details related with inlining strub
182 : : bodies into other strub bodies (see expand_builtin_strub_update).
183 : : expand_builtin_strub_leave may even perform partial inline
184 : : substitution. */
185 : :
186 : : /* Const and pure functions that gain a watermark parameter for strub purposes
187 : : are still regarded as such, which may cause the inline expansions of the
188 : : __strub builtins to malfunction. Ideally, attribute "fn spec" would enable
189 : : us to inform the backend about requirements and side effects of the call, but
190 : : call_fusage building in calls.c:expand_call does not even look at
191 : : attr_fnspec, so we resort to asm loads and updates to attain an equivalent
192 : : effect. Once expand_call gains the ability to issue extra memory uses and
193 : : clobbers based on pure/const function's fnspec, we can define this to 1. */
194 : : #define ATTR_FNSPEC_DECONST_WATERMARK 0
195 : :
196 : : enum strub_mode {
197 : : /* This mode denotes a regular function, that does not require stack
198 : : scrubbing (strubbing). It may call any other functions, but if
199 : : it calls AT_CALLS (or WRAPPED) ones, strubbing logic is
200 : : automatically introduced around those calls (the latter, by
201 : : inlining INTERNAL wrappers). */
202 : : STRUB_DISABLED = 0,
203 : :
204 : : /* This denotes a function whose signature is (to be) modified to
205 : : take an extra parameter, for stack use annotation, and its
206 : : callers must initialize and pass that argument, and perform the
207 : : strubbing. Functions that are explicitly marked with attribute
208 : : strub must have the mark visible wherever the function is,
209 : : including aliases, and overriders and overriding methods.
210 : : Functions that are implicitly marked for strubbing, for accessing
211 : : variables explicitly marked as such, will only select this
212 : : strubbing method if they are internal to a translation unit. It
213 : : can only be inlined into other strubbing functions, i.e.,
214 : : STRUB_AT_CALLS or STRUB_WRAPPED. */
215 : : STRUB_AT_CALLS = 1,
216 : :
217 : : /* This denotes a function that is to perform strubbing internally,
218 : : without any changes to its interface (the function is turned into
219 : : a strubbing wrapper, and its original body is moved to a separate
220 : : STRUB_WRAPPED function, with a modified interface). Functions
221 : : may be explicitly marked with attribute strub(2), and the
222 : : attribute must be visible at the point of definition. Functions
223 : : that are explicitly marked for strubbing, for accessing variables
224 : : explicitly marked as such, may select this strubbing mode if
225 : : their interface cannot change, e.g. because its interface is
226 : : visible to other translation units, directly, by indirection
227 : : (having its address taken), inheritance, etc. Functions that use
228 : : this method must not have the noclone attribute, nor the noipa
229 : : one. Functions marked as always_inline may select this mode, but
230 : : they are NOT wrapped, they remain unchanged, and are only inlined
231 : : into strubbed contexts. Once non-always_inline functions are
232 : : wrapped, the wrapper becomes STRUB_WRAPPER, and the wrapped becomes
233 : : STRUB_WRAPPED. */
234 : : STRUB_INTERNAL = 2,
235 : :
236 : : /* This denotes a function whose stack is not strubbed, but that is
237 : : nevertheless explicitly or implicitly marked as callable from strubbing
238 : : functions. Normally, only STRUB_AT_CALLS (and STRUB_INTERNAL ->
239 : : STRUB_WRAPPED) functions can be called from strubbing contexts (bodies of
240 : : STRUB_AT_CALLS, STRUB_INTERNAL and STRUB_WRAPPED functions), but attribute
241 : : strub(3) enables other functions to be (indirectly) called from these
242 : : contexts. Some builtins and internal functions may be implicitly marked as
243 : : STRUB_CALLABLE. */
244 : : STRUB_CALLABLE = 3,
245 : :
246 : : /* This denotes the function that took over the body of a
247 : : STRUB_INTERNAL function. At first, it's only called by its
248 : : wrapper, but the wrapper may be inlined. The wrapped function,
249 : : in turn, can only be inlined into other functions whose stack
250 : : frames are strubbed, i.e., that are STRUB_WRAPPED or
251 : : STRUB_AT_CALLS. */
252 : : STRUB_WRAPPED = -1,
253 : :
254 : : /* This denotes the wrapper function that replaced the STRUB_INTERNAL
255 : : function. This mode overrides the STRUB_INTERNAL mode at the time the
256 : : internal to-be-wrapped function becomes a wrapper, so that inlining logic
257 : : can tell one from the other. */
258 : : STRUB_WRAPPER = -2,
259 : :
260 : : /* This denotes an always_inline function that requires strubbing. It can
261 : : only be called from, and inlined into, other strubbing contexts. */
262 : : STRUB_INLINABLE = -3,
263 : :
264 : : /* This denotes a function that accesses strub variables, so it would call for
265 : : internal strubbing (whether or not it's eligible for that), but since
266 : : at-calls strubbing is viable, that's selected as an optimization. This
267 : : mode addresses the inconvenience that such functions may have different
268 : : modes selected depending on optimization flags, and get a different
269 : : callable status depending on that choice: if we assigned them
270 : : STRUB_AT_CALLS mode, they would be callable when optimizing, whereas
271 : : STRUB_INTERNAL would not be callable. */
272 : : STRUB_AT_CALLS_OPT = -4,
273 : :
274 : : };
275 : :
276 : : /* Look up a strub attribute in TYPE, and return it. */
277 : :
278 : : static tree
279 : 11870695 : get_strub_attr_from_type (tree type)
280 : : {
281 : 11870695 : return lookup_attribute ("strub", TYPE_ATTRIBUTES (type));
282 : : }
283 : :
284 : : /* Look up a strub attribute in DECL or in its type, and return it. */
285 : :
286 : : static tree
287 : 8971136 : get_strub_attr_from_decl (tree decl)
288 : : {
289 : 8971136 : tree ret = lookup_attribute ("strub", DECL_ATTRIBUTES (decl));
290 : 8971136 : if (ret)
291 : : return ret;
292 : 8954320 : return get_strub_attr_from_type (TREE_TYPE (decl));
293 : : }
294 : :
295 : : #define STRUB_ID_COUNT 8
296 : : #define STRUB_IDENT_COUNT 3
297 : : #define STRUB_TYPE_COUNT 5
298 : :
299 : : #define STRUB_ID_BASE 0
300 : : #define STRUB_IDENT_BASE (STRUB_ID_BASE + STRUB_ID_COUNT)
301 : : #define STRUB_TYPE_BASE (STRUB_IDENT_BASE + STRUB_IDENT_COUNT)
302 : : #define STRUB_CACHE_SIZE (STRUB_TYPE_BASE + STRUB_TYPE_COUNT)
303 : :
304 : : /* Keep the strub mode and temp identifiers and types from being GC'd. */
305 : : static GTY((deletable)) tree strub_cache[STRUB_CACHE_SIZE];
306 : :
307 : : /* Define a function to cache identifier ID, to be used as a strub attribute
308 : : parameter for a strub mode named after NAME. */
309 : : #define DEF_STRUB_IDS(IDX, NAME, ID) \
310 : : static inline tree get_strub_mode_id_ ## NAME () { \
311 : : int idx = STRUB_ID_BASE + IDX; \
312 : : tree identifier = strub_cache[idx]; \
313 : : if (!identifier) \
314 : : strub_cache[idx] = identifier = get_identifier (ID); \
315 : : return identifier; \
316 : : }
317 : : /* Same as DEF_STRUB_IDS, but use the string expansion of NAME as ID. */
318 : : #define DEF_STRUB_ID(IDX, NAME) \
319 : : DEF_STRUB_IDS (IDX, NAME, #NAME)
320 : :
321 : : /* Define functions for each of the strub mode identifiers.
322 : : Expose dashes rather than underscores. */
323 : 10759 : DEF_STRUB_ID (0, disabled)
324 : 20497 : DEF_STRUB_IDS (1, at_calls, "at-calls")
325 : 16382 : DEF_STRUB_ID (2, internal)
326 : 25812 : DEF_STRUB_ID (3, callable)
327 : 2419 : DEF_STRUB_ID (4, wrapped)
328 : 2618 : DEF_STRUB_ID (5, wrapper)
329 : 3897 : DEF_STRUB_ID (6, inlinable)
330 : 50 : DEF_STRUB_IDS (7, at_calls_opt, "at-calls-opt")
331 : :
332 : : /* Release the temporary macro names. */
333 : : #undef DEF_STRUB_IDS
334 : : #undef DEF_STRUB_ID
335 : :
336 : : /* Return the identifier corresponding to strub MODE. */
337 : :
338 : : static tree
339 : 82434 : get_strub_mode_attr_parm (enum strub_mode mode)
340 : : {
341 : 82434 : switch (mode)
342 : : {
343 : 10759 : case STRUB_DISABLED:
344 : 10759 : return get_strub_mode_id_disabled ();
345 : :
346 : 20497 : case STRUB_AT_CALLS:
347 : 20497 : return get_strub_mode_id_at_calls ();
348 : :
349 : 16382 : case STRUB_INTERNAL:
350 : 16382 : return get_strub_mode_id_internal ();
351 : :
352 : 25812 : case STRUB_CALLABLE:
353 : 25812 : return get_strub_mode_id_callable ();
354 : :
355 : 2419 : case STRUB_WRAPPED:
356 : 2419 : return get_strub_mode_id_wrapped ();
357 : :
358 : 2618 : case STRUB_WRAPPER:
359 : 2618 : return get_strub_mode_id_wrapper ();
360 : :
361 : 3897 : case STRUB_INLINABLE:
362 : 3897 : return get_strub_mode_id_inlinable ();
363 : :
364 : 50 : case STRUB_AT_CALLS_OPT:
365 : 50 : return get_strub_mode_id_at_calls_opt ();
366 : :
367 : 0 : default:
368 : 0 : gcc_unreachable ();
369 : : }
370 : : }
371 : :
372 : : /* Return the parmeters (TREE_VALUE) for a strub attribute of MODE.
373 : : We know we use a single parameter, so we bypass the creation of a
374 : : tree list. */
375 : :
376 : : static tree
377 : 2779 : get_strub_mode_attr_value (enum strub_mode mode)
378 : : {
379 : 0 : return get_strub_mode_attr_parm (mode);
380 : : }
381 : :
382 : : /* Determine whether ID is a well-formed strub mode-specifying attribute
383 : : parameter for a function (type). Only user-visible modes are accepted, and
384 : : ID must be non-NULL.
385 : :
386 : : For unacceptable parms, return 0, otherwise a nonzero value as below.
387 : :
388 : : If the parm enables strub, return positive, otherwise negative.
389 : :
390 : : If the affected type must be a distinct, incompatible type,return an integer
391 : : of absolute value 2, otherwise 1. */
392 : :
393 : : int
394 : 2332 : strub_validate_fn_attr_parm (tree id)
395 : : {
396 : 2332 : int ret;
397 : 2332 : const char *s = NULL;
398 : 2332 : size_t len = 0;
399 : :
400 : : /* do NOT test for NULL. This is only to be called with non-NULL arguments.
401 : : We assume that the strub parameter applies to a function, because only
402 : : functions accept an explicit argument. If we accepted NULL, and we
403 : : happened to be called to verify the argument for a variable, our return
404 : : values would be wrong. */
405 : 2332 : if (TREE_CODE (id) == STRING_CST)
406 : : {
407 : 2332 : s = TREE_STRING_POINTER (id);
408 : 2332 : len = TREE_STRING_LENGTH (id) - 1;
409 : : }
410 : 0 : else if (TREE_CODE (id) == IDENTIFIER_NODE)
411 : : {
412 : 0 : s = IDENTIFIER_POINTER (id);
413 : 0 : len = IDENTIFIER_LENGTH (id);
414 : : }
415 : : else
416 : : return 0;
417 : :
418 : 2332 : enum strub_mode mode;
419 : :
420 : 2332 : if (len != 8)
421 : : return 0;
422 : :
423 : 2332 : switch (s[0])
424 : : {
425 : : case 'd':
426 : : mode = STRUB_DISABLED;
427 : : ret = -1;
428 : : break;
429 : :
430 : 627 : case 'a':
431 : 627 : mode = STRUB_AT_CALLS;
432 : 627 : ret = 2;
433 : 627 : break;
434 : :
435 : 598 : case 'i':
436 : 598 : mode = STRUB_INTERNAL;
437 : 598 : ret = 1;
438 : 598 : break;
439 : :
440 : 623 : case 'c':
441 : 623 : mode = STRUB_CALLABLE;
442 : 623 : ret = -2;
443 : 623 : break;
444 : :
445 : : default:
446 : : /* Other parms are for internal use only. */
447 : : return 0;
448 : : }
449 : :
450 : 2332 : tree mode_id = get_strub_mode_attr_parm (mode);
451 : :
452 : 2332 : if (TREE_CODE (id) == IDENTIFIER_NODE
453 : 2332 : ? id != mode_id
454 : 2332 : : strncmp (s, IDENTIFIER_POINTER (mode_id), len) != 0)
455 : : return 0;
456 : :
457 : : return ret;
458 : : }
459 : :
460 : : /* Return the strub mode from STRUB_ATTR. VAR_P should be TRUE if the attribute
461 : : is taken from a variable, rather than from a function, or a type thereof. */
462 : :
463 : : static enum strub_mode
464 : 9004104 : get_strub_mode_from_attr (tree strub_attr, bool var_p = false)
465 : : {
466 : 9004104 : enum strub_mode mode = STRUB_DISABLED;
467 : :
468 : 9004104 : if (strub_attr)
469 : : {
470 : 80791 : if (!TREE_VALUE (strub_attr))
471 : 3498 : mode = !var_p ? STRUB_AT_CALLS : STRUB_INTERNAL;
472 : : else
473 : : {
474 : 77293 : gcc_checking_assert (!var_p);
475 : 77293 : tree id = TREE_VALUE (strub_attr);
476 : 77293 : if (TREE_CODE (id) == TREE_LIST)
477 : 60477 : id = TREE_VALUE (id);
478 : 77293 : const char *s = (TREE_CODE (id) == STRING_CST
479 : 77293 : ? TREE_STRING_POINTER (id)
480 : 16816 : : IDENTIFIER_POINTER (id));
481 : 77293 : size_t len = (TREE_CODE (id) == STRING_CST
482 : 77293 : ? TREE_STRING_LENGTH (id) - 1
483 : 16816 : : IDENTIFIER_LENGTH (id));
484 : :
485 : 77293 : switch (len)
486 : : {
487 : 4005 : case 7:
488 : 4005 : switch (s[6])
489 : : {
490 : : case 'r':
491 : : mode = STRUB_WRAPPER;
492 : : break;
493 : :
494 : 1903 : case 'd':
495 : 1903 : mode = STRUB_WRAPPED;
496 : 1903 : break;
497 : :
498 : 0 : default:
499 : 0 : gcc_unreachable ();
500 : : }
501 : : break;
502 : :
503 : 69763 : case 8:
504 : 69763 : switch (s[0])
505 : : {
506 : : case 'd':
507 : : mode = STRUB_DISABLED;
508 : : break;
509 : :
510 : 19790 : case 'a':
511 : 19790 : mode = STRUB_AT_CALLS;
512 : 19790 : break;
513 : :
514 : 15241 : case 'i':
515 : 15241 : mode = STRUB_INTERNAL;
516 : 15241 : break;
517 : :
518 : 24487 : case 'c':
519 : 24487 : mode = STRUB_CALLABLE;
520 : 24487 : break;
521 : :
522 : 0 : default:
523 : 0 : gcc_unreachable ();
524 : : }
525 : : break;
526 : :
527 : : case 9:
528 : : mode = STRUB_INLINABLE;
529 : : break;
530 : :
531 : 40 : case 12:
532 : 40 : mode = STRUB_AT_CALLS_OPT;
533 : 40 : break;
534 : :
535 : 0 : default:
536 : 0 : gcc_unreachable ();
537 : : }
538 : :
539 : 77293 : gcc_checking_assert (TREE_CODE (id) == IDENTIFIER_NODE
540 : : ? id == get_strub_mode_attr_parm (mode)
541 : : : strncmp (IDENTIFIER_POINTER
542 : : (get_strub_mode_attr_parm (mode)),
543 : : s, len) == 0);
544 : : }
545 : : }
546 : :
547 : 9004104 : return mode;
548 : : }
549 : :
550 : : /* Look up, decode and return the strub mode associated with FNDECL. */
551 : :
552 : : static enum strub_mode
553 : 8960222 : get_strub_mode_from_fndecl (tree fndecl)
554 : : {
555 : 8960222 : return get_strub_mode_from_attr (get_strub_attr_from_decl (fndecl));
556 : : }
557 : :
558 : : /* Look up, decode and return the strub mode associated with NODE. */
559 : :
560 : : static enum strub_mode
561 : 8938800 : get_strub_mode (cgraph_node *node)
562 : : {
563 : 0 : return get_strub_mode_from_fndecl (node->decl);
564 : : }
565 : :
566 : : /* Look up, decode and return the strub mode associated with TYPE. */
567 : :
568 : : static enum strub_mode
569 : 2916375 : get_strub_mode_from_type (tree type)
570 : : {
571 : 2916375 : bool var_p = !FUNC_OR_METHOD_TYPE_P (type);
572 : 2916375 : tree attr = get_strub_attr_from_type (type);
573 : :
574 : 2916375 : if (attr)
575 : 33707 : return get_strub_mode_from_attr (attr, var_p);
576 : :
577 : 2882668 : if (flag_strub >= -1 && !var_p)
578 : 7825 : return STRUB_CALLABLE;
579 : :
580 : : return STRUB_DISABLED;
581 : : }
582 : :
583 : :
584 : : /* Return TRUE iff NODE calls builtin va_start. */
585 : :
586 : : static bool
587 : 516 : calls_builtin_va_start_p (cgraph_node *node)
588 : : {
589 : 516 : bool result = false;
590 : :
591 : 3427 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
592 : : {
593 : 2921 : tree cdecl = e->callee->decl;
594 : 2921 : if (fndecl_built_in_p (cdecl, BUILT_IN_VA_START))
595 : : return true;
596 : : }
597 : :
598 : : return result;
599 : : }
600 : :
601 : : /* Return TRUE iff NODE calls builtin apply_args, and optionally REPORT it. */
602 : :
603 : : static bool
604 : 2980 : calls_builtin_apply_args_p (cgraph_node *node, bool report = false)
605 : : {
606 : 2980 : bool result = false;
607 : :
608 : 10861 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
609 : : {
610 : 7916 : tree cdecl = e->callee->decl;
611 : 7916 : if (!fndecl_built_in_p (cdecl, BUILT_IN_APPLY_ARGS))
612 : 7876 : continue;
613 : :
614 : 40 : result = true;
615 : :
616 : 40 : if (!report)
617 : : break;
618 : :
619 : 10 : sorry_at (e->call_stmt
620 : 5 : ? gimple_location (e->call_stmt)
621 : 0 : : DECL_SOURCE_LOCATION (node->decl),
622 : : "at-calls %<strub%> does not support call to %qD",
623 : : cdecl);
624 : : }
625 : :
626 : 2980 : return result;
627 : : }
628 : :
629 : : /* Return TRUE iff NODE carries the always_inline attribute. */
630 : :
631 : : static inline bool
632 : 8015 : strub_always_inline_p (cgraph_node *node)
633 : : {
634 : 8015 : return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
635 : : }
636 : :
637 : : /* Return TRUE iff the target has strub support for T, a function
638 : : decl, or a type used in an indirect call, and optionally REPORT the
639 : : reasons for ineligibility. If T is a type and error REPORTing is
640 : : enabled, the LOCation (of the indirect call) should be provided. */
641 : : static inline bool
642 : 4595 : strub_target_support_p (tree t, bool report = false,
643 : : location_t loc = UNKNOWN_LOCATION)
644 : : {
645 : 4595 : bool result = true;
646 : :
647 : 4595 : if (!targetm.have_strub_support_for (t))
648 : : {
649 : 0 : result = false;
650 : :
651 : 0 : if (!report)
652 : : return result;
653 : :
654 : 0 : if (DECL_P (t))
655 : 0 : sorry_at (DECL_SOURCE_LOCATION (t),
656 : : "%qD is not eligible for %<strub%>"
657 : : " on the target system", t);
658 : : else
659 : 0 : sorry_at (loc,
660 : : "unsupported %<strub%> call"
661 : : " on the target system");
662 : : }
663 : :
664 : : return result;
665 : : }
666 : :
667 : : /* Return TRUE iff NODE is potentially eligible for any strub-enabled mode, and
668 : : optionally REPORT the reasons for ineligibility. */
669 : :
670 : : static inline bool
671 : 2402 : can_strub_p (cgraph_node *node, bool report = false)
672 : : {
673 : 2402 : bool result = strub_target_support_p (node->decl, report);
674 : :
675 : 2402 : if (!report && (!result || strub_always_inline_p (node)))
676 : 0 : return result;
677 : :
678 : 2402 : if (flag_split_stack)
679 : : {
680 : 20 : result = false;
681 : :
682 : 20 : if (!report)
683 : : return result;
684 : :
685 : 10 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
686 : : "%qD is not eligible for %<strub%>"
687 : : " because %<-fsplit-stack%> is enabled",
688 : : node->decl);
689 : : }
690 : :
691 : 2392 : if (lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)))
692 : : {
693 : 0 : result = false;
694 : :
695 : 0 : if (!report)
696 : : return result;
697 : :
698 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
699 : : "%qD is not eligible for %<strub%>"
700 : : " because of attribute %<noipa%>",
701 : : node->decl);
702 : : }
703 : :
704 : : /* We can't, and don't want to vectorize the watermark and other
705 : : strub-introduced parms. */
706 : 2392 : if (lookup_attribute ("simd", DECL_ATTRIBUTES (node->decl)))
707 : : {
708 : 0 : result = false;
709 : :
710 : 0 : if (!report)
711 : : return result;
712 : :
713 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
714 : : "%qD is not eligible for %<strub%>"
715 : : " because of attribute %<simd%>",
716 : : node->decl);
717 : : }
718 : :
719 : : return result;
720 : : }
721 : :
722 : : /* Return TRUE iff NODE is eligible for at-calls strub, and optionally REPORT
723 : : the reasons for ineligibility. Besides general non-eligibility for
724 : : strub-enabled modes, at-calls rules out calling builtin apply_args. */
725 : :
726 : : static bool
727 : 2464 : can_strub_at_calls_p (cgraph_node *node, bool report = false)
728 : : {
729 : 2464 : bool result = !report || can_strub_p (node, report);
730 : :
731 : 2464 : if (!result && !report)
732 : : return result;
733 : :
734 : 2464 : return !calls_builtin_apply_args_p (node, report);
735 : : }
736 : :
737 : : /* Return TRUE iff the called function (pointer or, if available,
738 : : decl) undergoes a significant type conversion for the call. Strub
739 : : mode changes between function types, and other non-useless type
740 : : conversions, are regarded as significant. When the function type
741 : : is overridden, the effective strub mode for the call is that of the
742 : : call fntype, rather than that of the pointer or of the decl.
743 : : Functions called with type overrides cannot undergo type changes;
744 : : it's as if their address was taken, so they're considered
745 : : non-viable for implicit at-calls strub mode. */
746 : :
747 : : static inline bool
748 : 21046 : strub_call_fntype_override_p (const gcall *gs)
749 : : {
750 : 21046 : if (gimple_call_internal_p (gs))
751 : : return false;
752 : 21046 : tree fn_type = TREE_TYPE (TREE_TYPE (gimple_call_fn (gs)));
753 : 21046 : if (tree decl = gimple_call_fndecl (gs))
754 : 20764 : fn_type = TREE_TYPE (decl);
755 : :
756 : : /* We do NOT want to take the mode from the decl here. This
757 : : function is used to tell whether we can change the strub mode of
758 : : a function, and whether the effective mode for the call is to be
759 : : taken from the decl or from an overrider type. When the strub
760 : : mode is explicitly declared, or overridden with a type cast, the
761 : : difference will be noticed in function types. However, if the
762 : : strub mode is implicit due to e.g. strub variables or -fstrub=*
763 : : command-line flags, we will adjust call types along with function
764 : : types. In either case, the presence of type or strub mode
765 : : overriders in calls will prevent a function from having its strub
766 : : modes changed in ways that would imply type changes, but taking
767 : : strub modes from decls would defeat this, since we set strub
768 : : modes and then call this function to tell whether the original
769 : : type was overridden to decide whether to adjust the call. We
770 : : need the answer to be about the type, not the decl. */
771 : 21046 : enum strub_mode mode = get_strub_mode_from_type (fn_type);
772 : 21046 : return (get_strub_mode_from_type (gs->u.fntype) != mode
773 : 21046 : || !useless_type_conversion_p (gs->u.fntype, fn_type));
774 : : }
775 : :
776 : : /* Return TRUE iff NODE is called directly with a type override. */
777 : :
778 : : static bool
779 : 470 : called_directly_with_type_override_p (cgraph_node *node, void *)
780 : : {
781 : 1340 : for (cgraph_edge *e = node->callers; e; e = e->next_caller)
782 : 870 : if (e->call_stmt && strub_call_fntype_override_p (e->call_stmt))
783 : : return true;
784 : :
785 : : return false;
786 : : }
787 : :
788 : : /* Return TRUE iff NODE or any other nodes aliased to it are called
789 : : with type overrides. We can't safely change the type of such
790 : : functions. */
791 : :
792 : : static bool
793 : 470 : called_with_type_override_p (cgraph_node *node)
794 : : {
795 : 470 : return (node->call_for_symbol_thunks_and_aliases
796 : 470 : (called_directly_with_type_override_p, NULL, true, true));
797 : : }
798 : :
799 : : /* Symbolic macro for the max number of arguments that internal strub may add to
800 : : a function. */
801 : :
802 : : #define STRUB_INTERNAL_MAX_EXTRA_ARGS 3
803 : :
804 : : /* We can't perform internal strubbing if the function body involves certain
805 : : features:
806 : :
807 : : - a non-default __builtin_va_start (e.g. x86's __builtin_ms_va_start) is
808 : : currently unsupported because we can't discover the corresponding va_copy and
809 : : va_end decls in the wrapper, and we don't convey the alternate variable
810 : : arguments ABI to the modified wrapped function. The default
811 : : __builtin_va_start is supported by calling va_start/va_end at the wrapper,
812 : : that takes variable arguments, passing a pointer to the va_list object to the
813 : : wrapped function, that runs va_copy from it where the original function ran
814 : : va_start.
815 : :
816 : : __builtin_next_arg is currently unsupported because the wrapped function
817 : : won't be a variable argument function. We could process it in the wrapper,
818 : : that remains a variable argument function, and replace calls in the wrapped
819 : : body, but we currently don't.
820 : :
821 : : __builtin_return_address is rejected because it's generally used when the
822 : : actual caller matters, and introducing a wrapper breaks such uses as those in
823 : : the unwinder. */
824 : :
825 : : static bool
826 : 1954 : can_strub_internally_p (cgraph_node *node, bool report = false)
827 : : {
828 : 1954 : bool result = !report || can_strub_p (node, report);
829 : :
830 : 1954 : if (!result && !report)
831 : : return result;
832 : :
833 : 1954 : if (!report && strub_always_inline_p (node))
834 : : return result;
835 : :
836 : : /* Since we're not changing the function identity proper, just
837 : : moving its full implementation, we *could* disable
838 : : fun->cannot_be_copied_reason and/or temporarily drop a noclone
839 : : attribute, but we'd have to prevent remapping of the labels. */
840 : 1954 : if (lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
841 : : {
842 : 48 : result = false;
843 : :
844 : 48 : if (!report)
845 : : return result;
846 : :
847 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
848 : : "%qD is not eligible for internal %<strub%>"
849 : : " because of attribute %<noclone%>",
850 : : node->decl);
851 : : }
852 : :
853 : 1906 : if (node->has_gimple_body_p ())
854 : : {
855 : 4000 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
856 : : {
857 : 3078 : tree cdecl = e->callee->decl;
858 : 6156 : if (!((fndecl_built_in_p (cdecl, BUILT_IN_VA_START)
859 : 10 : && cdecl != builtin_decl_explicit (BUILT_IN_VA_START))
860 : 3078 : || fndecl_built_in_p (cdecl, BUILT_IN_NEXT_ARG)
861 : 3078 : || fndecl_built_in_p (cdecl, BUILT_IN_RETURN_ADDRESS)))
862 : 3078 : continue;
863 : :
864 : 0 : result = false;
865 : :
866 : 0 : if (!report)
867 : : return result;
868 : :
869 : 0 : sorry_at (e->call_stmt
870 : 0 : ? gimple_location (e->call_stmt)
871 : 0 : : DECL_SOURCE_LOCATION (node->decl),
872 : : "%qD is not eligible for internal %<strub%> "
873 : : "because it calls %qD",
874 : : node->decl, cdecl);
875 : : }
876 : :
877 : 922 : struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
878 : 922 : if (fun->has_nonlocal_label)
879 : : {
880 : 0 : result = false;
881 : :
882 : 0 : if (!report)
883 : : return result;
884 : :
885 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
886 : : "%qD is not eligible for internal %<strub%> "
887 : : "because it contains a non-local goto target",
888 : : node->decl);
889 : : }
890 : :
891 : 922 : if (fun->has_forced_label_in_static)
892 : : {
893 : 0 : result = false;
894 : :
895 : 0 : if (!report)
896 : : return result;
897 : :
898 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
899 : : "%qD is not eligible for internal %<strub%> "
900 : : "because the address of a local label escapes",
901 : : node->decl);
902 : : }
903 : :
904 : : /* Catch any other case that would prevent versioning/cloning
905 : : so as to also have it covered above. */
906 : 922 : gcc_checking_assert (!result /* || !node->has_gimple_body_p () */
907 : : || tree_versionable_function_p (node->decl));
908 : :
909 : :
910 : : /* Label values references are not preserved when copying. If referenced
911 : : in nested functions, as in 920415-1.c and 920721-4.c their decls get
912 : : remapped independently. The exclusion below might be too broad, in
913 : : that we might be able to support correctly cases in which the labels
914 : : are only used internally in a function, but disconnecting forced labels
915 : : from their original declarations is undesirable in general. */
916 : 922 : basic_block bb;
917 : 3152 : FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
918 : 4460 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
919 : 2376 : !gsi_end_p (gsi); gsi_next (&gsi))
920 : : {
921 : 2376 : glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
922 : 146 : tree target;
923 : :
924 : 146 : if (!label_stmt)
925 : : break;
926 : :
927 : 146 : target = gimple_label_label (label_stmt);
928 : :
929 : 146 : if (!FORCED_LABEL (target))
930 : 146 : continue;
931 : :
932 : 0 : result = false;
933 : :
934 : 0 : if (!report)
935 : 48 : return result;
936 : :
937 : 0 : sorry_at (gimple_location (label_stmt),
938 : : "internal %<strub%> does not support forced labels");
939 : : }
940 : : }
941 : :
942 : 1906 : if (list_length (TYPE_ARG_TYPES (TREE_TYPE (node->decl)))
943 : : >= ((HOST_WIDE_INT_1 << IPA_PARAM_MAX_INDEX_BITS)
944 : : - STRUB_INTERNAL_MAX_EXTRA_ARGS))
945 : : {
946 : 0 : result = false;
947 : :
948 : 0 : if (!report)
949 : : return result;
950 : :
951 : 0 : sorry_at (DECL_SOURCE_LOCATION (node->decl),
952 : : "%qD has too many arguments for internal %<strub%>",
953 : : node->decl);
954 : : }
955 : :
956 : : return result;
957 : : }
958 : :
959 : : /* Return TRUE iff NODE has any strub-requiring local variable, or accesses (as
960 : : in reading) any variable through a strub-requiring type. */
961 : :
962 : : static bool
963 : 1983 : strub_from_body_p (cgraph_node *node)
964 : : {
965 : 1983 : if (!node->has_gimple_body_p ())
966 : : return false;
967 : :
968 : : /* If any local variable is marked for strub... */
969 : 1217 : unsigned i;
970 : 1217 : tree var;
971 : 10264 : FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (node->decl),
972 : : i, var)
973 : 8026 : if (get_strub_mode_from_type (TREE_TYPE (var))
974 : : != STRUB_DISABLED)
975 : : return true;
976 : :
977 : : /* Now scan the body for loads with strub-requiring types.
978 : : ??? Compound types don't propagate the strub requirement to
979 : : component types. */
980 : 1203 : basic_block bb;
981 : 4385 : FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
982 : 7010 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
983 : 18121 : !gsi_end_p (gsi); gsi_next (&gsi))
984 : : {
985 : 14939 : gimple *stmt = gsi_stmt (gsi);
986 : :
987 : 14939 : if (!gimple_assign_load_p (stmt))
988 : 12219 : continue;
989 : :
990 : 2720 : tree rhs = gimple_assign_rhs1 (stmt);
991 : 2720 : if (get_strub_mode_from_type (TREE_TYPE (rhs))
992 : : != STRUB_DISABLED)
993 : 337 : return true;
994 : : }
995 : :
996 : : return false;
997 : : }
998 : :
999 : : /* Return TRUE iff node is associated with a builtin that should be callable
1000 : : from strub contexts. */
1001 : :
1002 : : static inline bool
1003 : 1218 : strub_callable_builtin_p (cgraph_node *node)
1004 : : {
1005 : 1218 : if (DECL_BUILT_IN_CLASS (node->decl) != BUILT_IN_NORMAL)
1006 : : return false;
1007 : :
1008 : 487 : enum built_in_function fcode = DECL_FUNCTION_CODE (node->decl);
1009 : :
1010 : 487 : switch (fcode)
1011 : : {
1012 : 0 : case BUILT_IN_NONE:
1013 : 0 : gcc_unreachable ();
1014 : :
1015 : : /* This temporarily allocates stack for the call, and we can't reasonably
1016 : : update the watermark for that. Besides, we don't check the actual call
1017 : : target, nor its signature, and it seems to be overkill to as much as
1018 : : try to do so. */
1019 : : case BUILT_IN_APPLY:
1020 : : return false;
1021 : :
1022 : : /* Conversely, this shouldn't be called from within strub contexts, since
1023 : : the caller may have had its signature modified. STRUB_INTERNAL is ok,
1024 : : the call will remain in the STRUB_WRAPPER, and removed from the
1025 : : STRUB_WRAPPED clone. */
1026 : : case BUILT_IN_APPLY_ARGS:
1027 : : return false;
1028 : :
1029 : : /* ??? Make all other builtins callable. We wish to make any builtin call
1030 : : the compiler might introduce on its own callable. Anything that is
1031 : : predictable enough as to be known not to allow stack data that should
1032 : : be strubbed to unintentionally escape to non-strub contexts can be
1033 : : allowed, and pretty much every builtin appears to fit this description.
1034 : : The exceptions to this rule seem to be rare, and only available as
1035 : : explicit __builtin calls, so let's keep it simple and allow all of
1036 : : them... */
1037 : : default:
1038 : : return true;
1039 : : }
1040 : : }
1041 : :
1042 : : /* Compute the strub mode to be used for NODE. STRUB_ATTR should be the strub
1043 : : attribute,found for NODE, if any. */
1044 : :
1045 : : static enum strub_mode
1046 : 3679 : compute_strub_mode (cgraph_node *node, tree strub_attr)
1047 : : {
1048 : 3679 : enum strub_mode req_mode = get_strub_mode_from_attr (strub_attr);
1049 : :
1050 : 3679 : gcc_checking_assert (flag_strub >= -2 && flag_strub <= 3);
1051 : :
1052 : : /* Symbolic encodings of the -fstrub-* flags. */
1053 : : /* Enable strub when explicitly requested through attributes to functions or
1054 : : variables, reporting errors if the requests cannot be satisfied. */
1055 : 3679 : const bool strub_flag_auto = flag_strub < 0;
1056 : : /* strub_flag_auto with strub call verification; without this, functions are
1057 : : implicitly callable. */
1058 : 3679 : const bool strub_flag_strict = flag_strub < -1;
1059 : : /* Disable strub altogether, ignore attributes entirely. */
1060 : 3679 : const bool strub_flag_disabled = flag_strub == 0;
1061 : : /* On top of _auto, also enable strub implicitly for functions that can
1062 : : safely undergo at-calls strubbing. Internal mode will still be used in
1063 : : functions that request it explicitly with attribute strub(2), or when the
1064 : : function body requires strubbing and at-calls strubbing is not viable. */
1065 : 3679 : const bool strub_flag_at_calls = flag_strub == 1;
1066 : : /* On top of default, also enable strub implicitly for functions that can
1067 : : safely undergo internal strubbing. At-calls mode will still be used in
1068 : : functions that requiest it explicitly with attribute strub() or strub(1),
1069 : : or when the function body requires strubbing and internal strubbing is not
1070 : : viable. */
1071 : 3679 : const bool strub_flag_internal = flag_strub == 2;
1072 : : /* On top of default, also enable strub implicitly for functions that can
1073 : : safely undergo strubbing in either mode. When both modes are viable,
1074 : : at-calls is preferred. */
1075 : 3679 : const bool strub_flag_either = flag_strub == 3;
1076 : : /* Besides the default behavior, enable strub implicitly for all viable
1077 : : functions. */
1078 : 3679 : const bool strub_flag_viable = flag_strub > 0;
1079 : :
1080 : : /* The consider_* variables should be TRUE if selecting the corresponding
1081 : : strub modes would be consistent with requests from attributes and command
1082 : : line flags. Attributes associated with functions pretty much mandate a
1083 : : selection, and should report an error if not satisfied; strub_flag_auto
1084 : : implicitly enables some viable strub mode if that's required by references
1085 : : to variables marked for strub; strub_flag_viable enables strub if viable
1086 : : (even when favoring one mode, body-requested strub can still be satisfied
1087 : : by either mode), and falls back to callable, silently unless variables
1088 : : require strubbing. */
1089 : :
1090 : 7358 : const bool consider_at_calls
1091 : : = (!strub_flag_disabled
1092 : 3679 : && (strub_attr
1093 : 3679 : ? req_mode == STRUB_AT_CALLS
1094 : : : true));
1095 : 7358 : const bool consider_internal
1096 : : = (!strub_flag_disabled
1097 : 3679 : && (strub_attr
1098 : 3679 : ? req_mode == STRUB_INTERNAL
1099 : : : true));
1100 : :
1101 : 3679 : const bool consider_callable
1102 : : = (!strub_flag_disabled
1103 : 3679 : && (strub_attr
1104 : 1906 : ? req_mode == STRUB_CALLABLE
1105 : : : (!strub_flag_strict
1106 : 1218 : || strub_callable_builtin_p (node))));
1107 : :
1108 : : /* This is a shorthand for either strub-enabled mode. */
1109 : 3679 : const bool consider_strub
1110 : : = (consider_at_calls || consider_internal);
1111 : :
1112 : : /* We can cope with always_inline functions even with noipa and noclone,
1113 : : because we just leave them alone. */
1114 : 3679 : const bool is_always_inline
1115 : 3679 : = strub_always_inline_p (node);
1116 : :
1117 : : /* Strubbing in general, and each specific strub mode, may have its own set of
1118 : : requirements. We require noipa for strubbing, either because of cloning
1119 : : required for internal strub, or because of caller enumeration required for
1120 : : at-calls strub. We don't consider the at-calls mode eligible if it's not
1121 : : even considered, it has no further requirements. Internal mode requires
1122 : : cloning and the absence of certain features in the body and, like at-calls,
1123 : : it's not eligible if it's not even under consideration.
1124 : :
1125 : : ??? Do we need target hooks for further constraints? E.g., x86's
1126 : : "interrupt" attribute breaks internal strubbing because the wrapped clone
1127 : : carries the attribute and thus isn't callable; in this case, we could use a
1128 : : target hook to adjust the clone instead. */
1129 : 3679 : const bool strub_eligible
1130 : : = (consider_strub
1131 : 3679 : && (is_always_inline || can_strub_p (node)));
1132 : 3679 : const bool at_calls_eligible
1133 : 3679 : = (consider_at_calls && strub_eligible
1134 : 3679 : && can_strub_at_calls_p (node));
1135 : 3679 : const bool internal_eligible
1136 : 3679 : = (consider_internal && strub_eligible
1137 : 3679 : && (is_always_inline
1138 : 1949 : || can_strub_internally_p (node)));
1139 : :
1140 : : /* In addition to the strict eligibility requirements, some additional
1141 : : constraints are placed on implicit selection of certain modes. These do
1142 : : not prevent the selection of a mode if explicitly specified as part of a
1143 : : function interface (the strub attribute), but they may prevent modes from
1144 : : being selected by the command line or by function bodies. The only actual
1145 : : constraint is on at-calls mode: since we change the function's exposed
1146 : : signature, we won't do it implicitly if the function can possibly be used
1147 : : in ways that do not expect the signature change, e.g., if the function is
1148 : : available to or interposable by other units, if its address is taken,
1149 : : etc. */
1150 : 3679 : const bool at_calls_viable
1151 : : = (at_calls_eligible
1152 : 3679 : && (strub_attr
1153 : 1896 : || (node->has_gimple_body_p ()
1154 : 991 : && (!node->externally_visible
1155 : 482 : || (node->binds_to_current_def_p ()
1156 : 466 : && node->can_be_local_p ()))
1157 : 509 : && node->only_called_directly_p ()
1158 : 470 : && !called_with_type_override_p (node))));
1159 : 3679 : const bool internal_viable
1160 : : = (internal_eligible);
1161 : :
1162 : : /* Shorthand. */
1163 : 3679 : const bool strub_viable
1164 : : = (at_calls_viable || internal_viable);
1165 : :
1166 : : /* We wish to analyze the body, to look for implicit requests for strub, both
1167 : : to implicitly enable it when the body calls for it, and to report errors if
1168 : : the body calls for it but neither mode is viable (even if that follows from
1169 : : non-eligibility because of the explicit specification of some non-strubbing
1170 : : mode). We can refrain from scanning the body only in rare circumstances:
1171 : : when strub is enabled by a function attribute (scanning might be redundant
1172 : : in telling us to also enable it), and when we are enabling strub implicitly
1173 : : but there are non-viable modes: we want to know whether strubbing is
1174 : : required, to fallback to another mode, even if we're only enabling a
1175 : : certain mode, or, when either mode would do, to report an error if neither
1176 : : happens to be viable. */
1177 : 7358 : const bool analyze_body
1178 : : = (strub_attr
1179 : 3679 : ? !consider_strub
1180 : : : (strub_flag_auto
1181 : 638 : || (strub_flag_viable && (!at_calls_viable && !internal_viable))
1182 : 2544 : || (strub_flag_either && !strub_viable)));
1183 : :
1184 : : /* Cases in which strubbing is enabled or disabled by strub_flag_auto.
1185 : : Unsatisfiable requests ought to be reported. */
1186 : 3679 : const bool strub_required
1187 : 3679 : = ((strub_attr && consider_strub)
1188 : 3679 : || (analyze_body && strub_from_body_p (node)));
1189 : :
1190 : : /* Besides the required cases, we want to abide by the requests to enabling on
1191 : : an if-viable basis. */
1192 : 5963 : const bool strub_enable
1193 : : = (strub_required
1194 : 2284 : || (strub_flag_at_calls && at_calls_viable)
1195 : 2218 : || (strub_flag_internal && internal_viable)
1196 : 1993 : || (strub_flag_either && strub_viable));
1197 : :
1198 : : /* And now we're finally ready to select a mode that abides by the viability
1199 : : and eligibility constraints, and that satisfies the strubbing requirements
1200 : : and requests, subject to the constraints. If both modes are viable and
1201 : : strub is to be enabled, pick STRUB_AT_CALLS unless STRUB_INTERNAL was named
1202 : : as preferred. */
1203 : 10048 : const enum strub_mode mode
1204 : 3679 : = ((strub_enable && is_always_inline)
1205 : 3679 : ? (strub_required ? STRUB_INLINABLE : STRUB_CALLABLE)
1206 : : : (strub_enable && internal_viable
1207 : 958 : && (strub_flag_internal || !at_calls_viable))
1208 : 3164 : ? STRUB_INTERNAL
1209 : 2286 : : (strub_enable && at_calls_viable)
1210 : 2286 : ? (strub_required && !strub_attr
1211 : 503 : ? STRUB_AT_CALLS_OPT
1212 : : : STRUB_AT_CALLS)
1213 : : : consider_callable
1214 : 1783 : ? STRUB_CALLABLE
1215 : : : STRUB_DISABLED);
1216 : :
1217 : 10 : switch (mode)
1218 : : {
1219 : 1105 : case STRUB_CALLABLE:
1220 : 1105 : if (is_always_inline)
1221 : : break;
1222 : : /* Fall through. */
1223 : :
1224 : 1575 : case STRUB_DISABLED:
1225 : 1575 : if (strub_enable && !strub_attr)
1226 : : {
1227 : 0 : gcc_checking_assert (analyze_body);
1228 : 0 : error_at (DECL_SOURCE_LOCATION (node->decl),
1229 : : "%qD requires %<strub%>,"
1230 : : " but no viable %<strub%> mode was found",
1231 : : node->decl);
1232 : 0 : break;
1233 : : }
1234 : : /* Fall through. */
1235 : :
1236 : : case STRUB_AT_CALLS:
1237 : : case STRUB_INTERNAL:
1238 : : case STRUB_INLINABLE:
1239 : : /* Differences from an mode requested through a function attribute are
1240 : : reported in set_strub_mode_to. */
1241 : : break;
1242 : :
1243 : 10 : case STRUB_AT_CALLS_OPT:
1244 : : /* Functions that select this mode do so because of references to strub
1245 : : variables. Even if we choose at-calls as an optimization, the
1246 : : requirements for internal strub must still be satisfied. Optimization
1247 : : options may render implicit at-calls strub not viable (-O0 sets
1248 : : force_output for static non-inline functions), and it would not be good
1249 : : if changing optimization options turned a well-formed into an
1250 : : ill-formed one. */
1251 : 10 : if (!internal_viable)
1252 : 0 : can_strub_internally_p (node, true);
1253 : : break;
1254 : :
1255 : : case STRUB_WRAPPED:
1256 : : case STRUB_WRAPPER:
1257 : : default:
1258 : : gcc_unreachable ();
1259 : : }
1260 : :
1261 : 3679 : return mode;
1262 : : }
1263 : :
1264 : : /* Set FNDT's strub mode to MODE; FNDT may be a function decl or
1265 : : function type. If OVERRIDE, do not check whether a mode is already
1266 : : set. */
1267 : :
1268 : : static void
1269 : 2779 : strub_set_fndt_mode_to (tree fndt, enum strub_mode mode, bool override)
1270 : : {
1271 : 2779 : gcc_checking_assert (override
1272 : : || !(DECL_P (fndt)
1273 : : ? get_strub_attr_from_decl (fndt)
1274 : : : get_strub_attr_from_type (fndt)));
1275 : :
1276 : 2779 : tree attr = tree_cons (get_identifier ("strub"),
1277 : : get_strub_mode_attr_value (mode),
1278 : : NULL_TREE);
1279 : 2779 : tree *attrp = NULL;
1280 : 2779 : if (DECL_P (fndt))
1281 : : {
1282 : 2779 : gcc_checking_assert (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (fndt)));
1283 : 2779 : attrp = &DECL_ATTRIBUTES (fndt);
1284 : : }
1285 : 0 : else if (FUNC_OR_METHOD_TYPE_P (fndt))
1286 : 0 : attrp = &TYPE_ATTRIBUTES (fndt);
1287 : : else
1288 : 0 : gcc_unreachable ();
1289 : :
1290 : 2779 : TREE_CHAIN (attr) = *attrp;
1291 : 2779 : *attrp = attr;
1292 : 2779 : }
1293 : :
1294 : : /* Set FNDT's strub mode to callable.
1295 : : FNDT may be a function decl or a function type. */
1296 : :
1297 : : void
1298 : 0 : strub_make_callable (tree fndt)
1299 : : {
1300 : 0 : strub_set_fndt_mode_to (fndt, STRUB_CALLABLE, false);
1301 : 0 : }
1302 : :
1303 : : /* Set NODE to strub MODE. Report incompatibilities between MODE and the mode
1304 : : requested through explicit attributes, and cases of non-eligibility. */
1305 : :
1306 : : static void
1307 : 4723 : set_strub_mode_to (cgraph_node *node, enum strub_mode mode)
1308 : : {
1309 : 4723 : tree attr = get_strub_attr_from_decl (node->decl);
1310 : 4723 : enum strub_mode req_mode = get_strub_mode_from_attr (attr);
1311 : :
1312 : 4723 : if (attr)
1313 : : {
1314 : : /* Check for and report incompatible mode changes. */
1315 : 2805 : if (mode != req_mode
1316 : 1315 : && !(req_mode == STRUB_INTERNAL
1317 : : && (mode == STRUB_WRAPPED
1318 : 1185 : || mode == STRUB_WRAPPER))
1319 : 283 : && !((req_mode == STRUB_INTERNAL
1320 : : || req_mode == STRUB_AT_CALLS
1321 : 283 : || req_mode == STRUB_CALLABLE)
1322 : : && mode == STRUB_INLINABLE))
1323 : : {
1324 : 15 : error_at (DECL_SOURCE_LOCATION (node->decl),
1325 : : "%<strub%> mode %qE selected for %qD, when %qE was requested",
1326 : : get_strub_mode_attr_parm (mode),
1327 : : node->decl,
1328 : : get_strub_mode_attr_parm (req_mode));
1329 : 15 : if (node->alias)
1330 : : {
1331 : 0 : cgraph_node *target = node->ultimate_alias_target ();
1332 : 0 : if (target != node)
1333 : 0 : error_at (DECL_SOURCE_LOCATION (target->decl),
1334 : : "the incompatible selection was determined"
1335 : : " by ultimate alias target %qD",
1336 : : target->decl);
1337 : : }
1338 : :
1339 : : /* Report any incompatibilities with explicitly-requested strub. */
1340 : 15 : switch (req_mode)
1341 : : {
1342 : 10 : case STRUB_AT_CALLS:
1343 : 10 : can_strub_at_calls_p (node, true);
1344 : 10 : break;
1345 : :
1346 : 5 : case STRUB_INTERNAL:
1347 : 5 : can_strub_internally_p (node, true);
1348 : 5 : break;
1349 : :
1350 : : default:
1351 : : break;
1352 : : }
1353 : : }
1354 : :
1355 : : /* Drop any incompatible strub attributes leading the decl attribute
1356 : : chain. Return if we find one with the mode we need. */
1357 : 2805 : for (;;)
1358 : : {
1359 : 2805 : if (mode == req_mode)
1360 : : return;
1361 : :
1362 : 1315 : if (DECL_ATTRIBUTES (node->decl) != attr)
1363 : : break;
1364 : :
1365 : 518 : DECL_ATTRIBUTES (node->decl) = TREE_CHAIN (attr);
1366 : 518 : attr = get_strub_attr_from_decl (node->decl);
1367 : 518 : if (!attr)
1368 : : break;
1369 : :
1370 : 0 : req_mode = get_strub_mode_from_attr (attr);
1371 : : }
1372 : : }
1373 : 1918 : else if (mode == req_mode)
1374 : : return;
1375 : :
1376 : 2779 : strub_set_fndt_mode_to (node->decl, mode, attr);
1377 : : }
1378 : :
1379 : : /* Compute and set NODE's strub mode. */
1380 : :
1381 : : static void
1382 : 3691 : set_strub_mode (cgraph_node *node)
1383 : : {
1384 : 3691 : tree attr = get_strub_attr_from_decl (node->decl);
1385 : :
1386 : 3691 : if (attr)
1387 : 1773 : switch (get_strub_mode_from_attr (attr))
1388 : : {
1389 : : /* These can't have been requested through user attributes, so we must
1390 : : have already gone through them. */
1391 : : case STRUB_WRAPPER:
1392 : : case STRUB_WRAPPED:
1393 : : case STRUB_INLINABLE:
1394 : : case STRUB_AT_CALLS_OPT:
1395 : : return;
1396 : :
1397 : : case STRUB_DISABLED:
1398 : : case STRUB_AT_CALLS:
1399 : : case STRUB_INTERNAL:
1400 : : case STRUB_CALLABLE:
1401 : : break;
1402 : :
1403 : 0 : default:
1404 : 0 : gcc_unreachable ();
1405 : : }
1406 : :
1407 : 3691 : cgraph_node *xnode = node;
1408 : 3691 : if (node->alias)
1409 : 12 : xnode = node->ultimate_alias_target ();
1410 : : /* Weakrefs may remain unresolved (the above will return node) if
1411 : : their targets are not defined, so make sure we compute a strub
1412 : : mode for them, instead of defaulting to STRUB_DISABLED and
1413 : : rendering them uncallable. */
1414 : 12 : enum strub_mode mode = (xnode != node && !xnode->alias
1415 : 3703 : ? get_strub_mode (xnode)
1416 : 3679 : : compute_strub_mode (node, attr));
1417 : :
1418 : 3691 : set_strub_mode_to (node, mode);
1419 : : }
1420 : :
1421 : :
1422 : : /* Non-strub functions shouldn't be called from within strub contexts,
1423 : : except through callable ones. Always inline strub functions can
1424 : : only be called from strub functions. */
1425 : :
1426 : : static bool
1427 : 9819 : strub_callable_from_p (strub_mode caller_mode, strub_mode callee_mode)
1428 : : {
1429 : 9819 : switch (caller_mode)
1430 : : {
1431 : 5188 : case STRUB_WRAPPED:
1432 : 5188 : case STRUB_AT_CALLS_OPT:
1433 : 5188 : case STRUB_AT_CALLS:
1434 : 5188 : case STRUB_INTERNAL:
1435 : 5188 : case STRUB_INLINABLE:
1436 : 5188 : break;
1437 : :
1438 : 4631 : case STRUB_WRAPPER:
1439 : 4631 : case STRUB_DISABLED:
1440 : 4631 : case STRUB_CALLABLE:
1441 : 4631 : return callee_mode != STRUB_INLINABLE;
1442 : :
1443 : 0 : default:
1444 : 0 : gcc_unreachable ();
1445 : : }
1446 : :
1447 : 5188 : switch (callee_mode)
1448 : : {
1449 : : case STRUB_WRAPPED:
1450 : : case STRUB_AT_CALLS:
1451 : : case STRUB_INLINABLE:
1452 : : break;
1453 : :
1454 : 881 : case STRUB_AT_CALLS_OPT:
1455 : 881 : case STRUB_INTERNAL:
1456 : 881 : case STRUB_WRAPPER:
1457 : 881 : return (flag_strub >= -1);
1458 : :
1459 : : case STRUB_DISABLED:
1460 : : return false;
1461 : :
1462 : : case STRUB_CALLABLE:
1463 : : break;
1464 : :
1465 : 0 : default:
1466 : 0 : gcc_unreachable ();
1467 : : }
1468 : :
1469 : 3966 : return true;
1470 : : }
1471 : :
1472 : : /* Return TRUE iff CALLEE can be inlined into CALLER. We wish to avoid inlining
1473 : : WRAPPED functions back into their WRAPPERs. More generally, we wish to avoid
1474 : : inlining strubbed functions into non-strubbed ones. CALLER doesn't have to
1475 : : be an immediate caller of CALLEE: the immediate caller may have already been
1476 : : cloned for inlining, and then CALLER may be further up the original call
1477 : : chain. ??? It would be nice if our own caller would retry inlining callee
1478 : : if caller gets inlined. */
1479 : :
1480 : : bool
1481 : 7782791 : strub_inlinable_to_p (cgraph_node *callee, cgraph_node *caller)
1482 : : {
1483 : 7782791 : strub_mode callee_mode = get_strub_mode (callee);
1484 : :
1485 : 7782791 : switch (callee_mode)
1486 : : {
1487 : 4330 : case STRUB_WRAPPED:
1488 : 4330 : case STRUB_AT_CALLS:
1489 : 4330 : case STRUB_INTERNAL:
1490 : 4330 : case STRUB_INLINABLE:
1491 : 4330 : case STRUB_AT_CALLS_OPT:
1492 : 4330 : break;
1493 : :
1494 : : case STRUB_WRAPPER:
1495 : : case STRUB_DISABLED:
1496 : : case STRUB_CALLABLE:
1497 : : /* When we consider inlining, we've already verified callability, so we
1498 : : can even inline callable and then disabled into a strub context. That
1499 : : will get strubbed along with the context, so it's hopefully not a
1500 : : problem. */
1501 : : return true;
1502 : :
1503 : 0 : default:
1504 : 0 : gcc_unreachable ();
1505 : : }
1506 : :
1507 : 4330 : strub_mode caller_mode = get_strub_mode (caller);
1508 : :
1509 : 4330 : switch (caller_mode)
1510 : : {
1511 : : case STRUB_WRAPPED:
1512 : : case STRUB_AT_CALLS:
1513 : : case STRUB_INTERNAL:
1514 : : case STRUB_INLINABLE:
1515 : : case STRUB_AT_CALLS_OPT:
1516 : : return true;
1517 : :
1518 : : case STRUB_WRAPPER:
1519 : : case STRUB_DISABLED:
1520 : : case STRUB_CALLABLE:
1521 : : break;
1522 : :
1523 : 0 : default:
1524 : 0 : gcc_unreachable ();
1525 : : }
1526 : :
1527 : : return false;
1528 : : }
1529 : :
1530 : : /* Check that types T1 and T2 are strub-compatible. Return 1 if the strub modes
1531 : : are the same, 2 if they are interchangeable, and 0 otherwise. */
1532 : :
1533 : : int
1534 : 1429988 : strub_comptypes (tree t1, tree t2)
1535 : : {
1536 : 1429988 : if (TREE_CODE (t1) != TREE_CODE (t2))
1537 : : return 0;
1538 : :
1539 : 1429988 : enum strub_mode m1 = get_strub_mode_from_type (t1);
1540 : 1429988 : enum strub_mode m2 = get_strub_mode_from_type (t2);
1541 : :
1542 : 1429988 : if (m1 == m2)
1543 : : return 1;
1544 : :
1545 : : /* We're dealing with types, so only strub modes that can be selected by
1546 : : attributes in the front end matter. If either mode is at-calls (for
1547 : : functions) or internal (for variables), the conversion is not
1548 : : compatible. */
1549 : 3235 : bool var_p = !FUNC_OR_METHOD_TYPE_P (t1);
1550 : 3235 : enum strub_mode mr = var_p ? STRUB_INTERNAL : STRUB_AT_CALLS;
1551 : 3235 : if (m1 == mr || m2 == mr)
1552 : : return 0;
1553 : :
1554 : : return 2;
1555 : : }
1556 : :
1557 : : /* Return the effective strub mode used for CALL, and set *TYPEP to
1558 : : the effective type used for the call. The effective type and mode
1559 : : are those of the callee, unless the call involves a typecast. */
1560 : :
1561 : : static enum strub_mode
1562 : 19510 : effective_strub_mode_for_call (gcall *call, tree *typep)
1563 : : {
1564 : 19510 : tree type;
1565 : 19510 : enum strub_mode mode;
1566 : :
1567 : 19510 : if (strub_call_fntype_override_p (call))
1568 : : {
1569 : 0 : type = gimple_call_fntype (call);
1570 : 0 : mode = get_strub_mode_from_type (type);
1571 : : }
1572 : : else
1573 : : {
1574 : 19510 : type = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1575 : 19510 : tree decl = gimple_call_fndecl (call);
1576 : 19510 : if (decl)
1577 : 19228 : mode = get_strub_mode_from_fndecl (decl);
1578 : : else
1579 : 282 : mode = get_strub_mode_from_type (type);
1580 : : }
1581 : :
1582 : 19510 : if (typep)
1583 : 19343 : *typep = type;
1584 : :
1585 : 19510 : return mode;
1586 : : }
1587 : :
1588 : : /* Create a distinct copy of the type of NODE's function, and change
1589 : : the fntype of all calls to it with the same main type to the new
1590 : : type. */
1591 : :
1592 : : static void
1593 : 75 : distinctify_node_type (cgraph_node *node)
1594 : : {
1595 : 75 : tree old_type = TREE_TYPE (node->decl);
1596 : 75 : tree new_type = build_distinct_type_copy (old_type);
1597 : 75 : tree new_ptr_type = NULL_TREE;
1598 : :
1599 : : /* Remap any calls to node->decl that use old_type, or a variant
1600 : : thereof, to new_type as well. We don't look for aliases, their
1601 : : declarations will have their types changed independently, and
1602 : : we'll adjust their fntypes then. */
1603 : 150 : for (cgraph_edge *e = node->callers; e; e = e->next_caller)
1604 : : {
1605 : 75 : if (!e->call_stmt)
1606 : 0 : continue;
1607 : 75 : tree fnaddr = gimple_call_fn (e->call_stmt);
1608 : 75 : gcc_checking_assert (TREE_CODE (fnaddr) == ADDR_EXPR
1609 : : && TREE_OPERAND (fnaddr, 0) == node->decl);
1610 : 75 : if (strub_call_fntype_override_p (e->call_stmt))
1611 : 0 : continue;
1612 : 75 : if (!new_ptr_type)
1613 : 75 : new_ptr_type = build_pointer_type (new_type);
1614 : 75 : TREE_TYPE (fnaddr) = new_ptr_type;
1615 : 75 : gimple_call_set_fntype (e->call_stmt, new_type);
1616 : : }
1617 : :
1618 : 75 : TREE_TYPE (node->decl) = new_type;
1619 : 75 : }
1620 : :
1621 : : /* Return TRUE iff TYPE and any variants have the same strub mode. */
1622 : :
1623 : : static bool
1624 : 2607 : same_strub_mode_in_variants_p (tree type)
1625 : : {
1626 : 2607 : enum strub_mode mode = get_strub_mode_from_type (type);
1627 : :
1628 : 2607 : for (tree other = TYPE_MAIN_VARIANT (type);
1629 : 5472 : other != NULL_TREE; other = TYPE_NEXT_VARIANT (other))
1630 : 2865 : if (type != other && mode != get_strub_mode_from_type (other))
1631 : : return false;
1632 : :
1633 : : /* Check that the canonical type, if set, either is in the same
1634 : : variant chain, or has the same strub mode as type. Also check
1635 : : the variants of the canonical type. */
1636 : 2607 : if (TYPE_CANONICAL (type)
1637 : 2607 : && (TYPE_MAIN_VARIANT (TYPE_CANONICAL (type))
1638 : 150 : != TYPE_MAIN_VARIANT (type)))
1639 : : {
1640 : 0 : if (mode != get_strub_mode_from_type (TYPE_CANONICAL (type)))
1641 : : return false;
1642 : : else
1643 : 0 : return same_strub_mode_in_variants_p (TYPE_CANONICAL (type));
1644 : : }
1645 : :
1646 : : return true;
1647 : : }
1648 : :
1649 : : /* Check that strub functions don't call non-strub functions, and that
1650 : : always_inline strub functions are only called by strub
1651 : : functions. */
1652 : :
1653 : : static void
1654 : 644 : verify_strub ()
1655 : : {
1656 : 644 : cgraph_node *node;
1657 : :
1658 : : /* It's expected that check strub-wise pointer type compatibility of variables
1659 : : and of functions is already taken care of by front-ends, on account of the
1660 : : attribute's being marked as affecting type identity and of the creation of
1661 : : distinct types. */
1662 : :
1663 : : /* Check that call targets in strub contexts have strub-callable types. */
1664 : :
1665 : 3030 : FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1666 : : {
1667 : 2386 : enum strub_mode caller_mode = get_strub_mode (node);
1668 : :
1669 : 2553 : for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
1670 : : {
1671 : 167 : gcc_checking_assert (e->indirect_unknown_callee);
1672 : :
1673 : 167 : if (!e->call_stmt)
1674 : 0 : continue;
1675 : :
1676 : 167 : enum strub_mode callee_mode
1677 : 167 : = effective_strub_mode_for_call (e->call_stmt, NULL);
1678 : :
1679 : 167 : if (!strub_callable_from_p (caller_mode, callee_mode))
1680 : 28 : error_at (gimple_location (e->call_stmt),
1681 : : "indirect non-%<strub%> call in %<strub%> context %qD",
1682 : : node->decl);
1683 : : }
1684 : :
1685 : 12038 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
1686 : : {
1687 : 9652 : gcc_checking_assert (!e->indirect_unknown_callee);
1688 : :
1689 : 9652 : if (!e->call_stmt)
1690 : 20 : continue;
1691 : :
1692 : 9652 : tree callee_fntype;
1693 : 9652 : enum strub_mode callee_mode
1694 : 9652 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
1695 : :
1696 : 9652 : if (!strub_callable_from_p (caller_mode, callee_mode))
1697 : : {
1698 : 817 : if (callee_mode == STRUB_INLINABLE)
1699 : 206 : error_at (gimple_location (e->call_stmt),
1700 : : "calling %<always_inline%> %<strub%> %qD"
1701 : : " in non-%<strub%> context %qD",
1702 : 206 : e->callee->decl, node->decl);
1703 : 611 : else if (fndecl_built_in_p (e->callee->decl, BUILT_IN_APPLY_ARGS)
1704 : 611 : && caller_mode == STRUB_INTERNAL)
1705 : : /* This is ok, it will be kept in the STRUB_WRAPPER, and removed
1706 : : from the STRUB_WRAPPED's strub context. */
1707 : 20 : continue;
1708 : 591 : else if (!strub_call_fntype_override_p (e->call_stmt))
1709 : 591 : error_at (gimple_location (e->call_stmt),
1710 : : "calling non-%<strub%> %qD in %<strub%> context %qD",
1711 : 591 : e->callee->decl, node->decl);
1712 : : else
1713 : 0 : error_at (gimple_location (e->call_stmt),
1714 : : "calling %qD using non-%<strub%> type %qT"
1715 : : " in %<strub%> context %qD",
1716 : 0 : e->callee->decl, callee_fntype, node->decl);
1717 : : }
1718 : : }
1719 : : }
1720 : 644 : }
1721 : :
1722 : : namespace {
1723 : :
1724 : : /* Define a pass to compute strub modes. */
1725 : : const pass_data pass_data_ipa_strub_mode = {
1726 : : SIMPLE_IPA_PASS,
1727 : : "strubm",
1728 : : OPTGROUP_NONE,
1729 : : TV_NONE,
1730 : : PROP_cfg, // properties_required
1731 : : 0, // properties_provided
1732 : : 0, // properties_destroyed
1733 : : 0, // properties_start
1734 : : 0, // properties_finish
1735 : : };
1736 : :
1737 : : class pass_ipa_strub_mode : public simple_ipa_opt_pass
1738 : : {
1739 : : public:
1740 : 285189 : pass_ipa_strub_mode (gcc::context *ctxt)
1741 : 570378 : : simple_ipa_opt_pass (pass_data_ipa_strub_mode, ctxt)
1742 : : {}
1743 : 0 : opt_pass *clone () { return new pass_ipa_strub_mode (m_ctxt); }
1744 : 229806 : virtual bool gate (function *) {
1745 : : /* In relaxed (-3) and strict (-4) settings, that only enable strub at a
1746 : : function or variable attribute's request, the attribute handler changes
1747 : : flag_strub to -1 or -2, respectively, if any strub-enabling occurence of
1748 : : the attribute is found. Therefore, if it remains at -3 or -4, nothing
1749 : : that would enable strub was found, so we can disable it and avoid the
1750 : : overhead. */
1751 : 229806 : if (flag_strub < -2)
1752 : 229156 : flag_strub = 0;
1753 : 229806 : return flag_strub;
1754 : : }
1755 : : virtual unsigned int execute (function *);
1756 : : };
1757 : :
1758 : : /* Define a pass to introduce strub transformations. */
1759 : : const pass_data pass_data_ipa_strub = {
1760 : : SIMPLE_IPA_PASS,
1761 : : "strub",
1762 : : OPTGROUP_NONE,
1763 : : TV_NONE,
1764 : : PROP_cfg | PROP_ssa, // properties_required
1765 : : 0, // properties_provided
1766 : : 0, // properties_destroyed
1767 : : 0, // properties_start
1768 : : TODO_update_ssa
1769 : : | TODO_cleanup_cfg
1770 : : | TODO_rebuild_cgraph_edges
1771 : : | TODO_verify_il, // properties_finish
1772 : : };
1773 : :
1774 : : class pass_ipa_strub : public simple_ipa_opt_pass
1775 : : {
1776 : : public:
1777 : 285189 : pass_ipa_strub (gcc::context *ctxt)
1778 : 570378 : : simple_ipa_opt_pass (pass_data_ipa_strub, ctxt)
1779 : : {}
1780 : 0 : opt_pass *clone () { return new pass_ipa_strub (m_ctxt); }
1781 : 229806 : virtual bool gate (function *) { return flag_strub && !seen_error (); }
1782 : : virtual unsigned int execute (function *);
1783 : :
1784 : : /* Define on demand and cache some types we use often. */
1785 : : #define DEF_TYPE(IDX, NAME, INIT) \
1786 : : static inline tree get_ ## NAME () { \
1787 : : int idx = STRUB_TYPE_BASE + IDX; \
1788 : : static tree type = strub_cache[idx]; \
1789 : : if (!type) \
1790 : : strub_cache[idx] = type = (INIT); \
1791 : : return type; \
1792 : : }
1793 : :
1794 : : /* Use a distinct ptr_type_node to denote the watermark, so that we can
1795 : : recognize it in arg lists and avoid modifying types twice. */
1796 : 2799 : DEF_TYPE (0, wmt, build_variant_type_copy (ptr_type_node))
1797 : :
1798 : 2813 : DEF_TYPE (1, pwmt, build_reference_type (get_wmt ()))
1799 : :
1800 : 6538 : DEF_TYPE (2, qpwmt,
1801 : : build_qualified_type (get_pwmt (),
1802 : : TYPE_QUAL_RESTRICT
1803 : : /* | TYPE_QUAL_CONST */))
1804 : :
1805 : 20 : DEF_TYPE (3, qptr,
1806 : : build_qualified_type (ptr_type_node,
1807 : : TYPE_QUAL_RESTRICT
1808 : : | TYPE_QUAL_CONST))
1809 : :
1810 : 10 : DEF_TYPE (4, qpvalst,
1811 : : build_qualified_type (build_reference_type
1812 : : (va_list_type_node),
1813 : : TYPE_QUAL_RESTRICT
1814 : : /* | TYPE_QUAL_CONST */))
1815 : :
1816 : : #undef DEF_TYPE
1817 : :
1818 : : /* Define non-strub builtins on demand. */
1819 : : #define DEF_NM_BUILTIN(NAME, CODE, FNTYPELIST) \
1820 : : static tree get_ ## NAME () { \
1821 : : tree decl = builtin_decl_explicit (CODE); \
1822 : : if (!decl) \
1823 : : { \
1824 : : tree type = build_function_type_list FNTYPELIST; \
1825 : : decl = add_builtin_function \
1826 : : ("__builtin_" #NAME, \
1827 : : type, CODE, BUILT_IN_NORMAL, \
1828 : : NULL, NULL); \
1829 : : TREE_NOTHROW (decl) = true; \
1830 : : set_builtin_decl ((CODE), decl, true); \
1831 : : } \
1832 : : return decl; \
1833 : : }
1834 : :
1835 : : DEF_NM_BUILTIN (stack_address,
1836 : : BUILT_IN_STACK_ADDRESS,
1837 : : (ptr_type_node, NULL))
1838 : :
1839 : : #undef DEF_NM_BUILTIN
1840 : :
1841 : : /* Define strub builtins on demand. */
1842 : : #define DEF_SS_BUILTIN(NAME, FNSPEC, CODE, FNTYPELIST) \
1843 : : static tree get_ ## NAME () { \
1844 : : tree decl = builtin_decl_explicit (CODE); \
1845 : : if (!decl) \
1846 : : { \
1847 : : tree type = build_function_type_list FNTYPELIST; \
1848 : : tree attrs = NULL; \
1849 : : if (FNSPEC) \
1850 : : attrs = tree_cons (get_identifier ("fn spec"), \
1851 : : build_tree_list \
1852 : : (NULL_TREE, \
1853 : : build_string (strlen (FNSPEC), \
1854 : : (FNSPEC))), \
1855 : : attrs); \
1856 : : decl = add_builtin_function_ext_scope \
1857 : : ("__builtin___strub_" #NAME, \
1858 : : type, CODE, BUILT_IN_NORMAL, \
1859 : : "__strub_" #NAME, attrs); \
1860 : : TREE_NOTHROW (decl) = true; \
1861 : : set_builtin_decl ((CODE), decl, true); \
1862 : : } \
1863 : : return decl; \
1864 : : }
1865 : :
1866 : 2241 : DEF_SS_BUILTIN (enter, ". Ot",
1867 : : BUILT_IN___STRUB_ENTER,
1868 : : (void_type_node, get_qpwmt (), NULL))
1869 : 1098 : DEF_SS_BUILTIN (update, ". Wt",
1870 : : BUILT_IN___STRUB_UPDATE,
1871 : : (void_type_node, get_qpwmt (), NULL))
1872 : 2241 : DEF_SS_BUILTIN (leave, ". w ",
1873 : : BUILT_IN___STRUB_LEAVE,
1874 : : (void_type_node, get_qpwmt (), NULL))
1875 : :
1876 : : #undef DEF_SS_BUILTIN
1877 : :
1878 : : /* Define strub identifiers on demand. */
1879 : : #define DEF_IDENT(IDX, NAME) \
1880 : : static inline tree get_ ## NAME () { \
1881 : : int idx = STRUB_IDENT_BASE + IDX; \
1882 : : tree identifier = strub_cache[idx]; \
1883 : : if (!identifier) \
1884 : : strub_cache[idx] = identifier = get_identifier (".strub." #NAME); \
1885 : : return identifier; \
1886 : : }
1887 : :
1888 : 904 : DEF_IDENT (0, watermark_ptr)
1889 : 10 : DEF_IDENT (1, va_list_ptr)
1890 : 20 : DEF_IDENT (2, apply_args)
1891 : :
1892 : : #undef DEF_IDENT
1893 : :
1894 : : static inline int adjust_at_calls_type (tree);
1895 : : static inline void adjust_at_calls_call (cgraph_edge *, int, tree);
1896 : : static inline void adjust_at_calls_calls (cgraph_node *);
1897 : :
1898 : : /* Add to SEQ a call to the strub watermark update builtin, taking NODE's
1899 : : location if given. Optionally add the corresponding edge from NODE, with
1900 : : execution frequency COUNT. Return the modified SEQ. */
1901 : :
1902 : : static inline gimple_seq
1903 : 1098 : call_update_watermark (tree wmptr, cgraph_node *node, profile_count count,
1904 : : gimple_seq seq = NULL)
1905 : : {
1906 : 1098 : tree uwm = get_update ();
1907 : 1098 : gcall *update = gimple_build_call (uwm, 1, wmptr);
1908 : 1098 : if (node)
1909 : 860 : gimple_set_location (update, DECL_SOURCE_LOCATION (node->decl));
1910 : 1098 : gimple_seq_add_stmt (&seq, update);
1911 : 1098 : if (node)
1912 : 860 : node->create_edge (cgraph_node::get_create (uwm), update, count, false);
1913 : 1098 : return seq;
1914 : : }
1915 : :
1916 : : };
1917 : :
1918 : : } // anon namespace
1919 : :
1920 : : /* Gather with this type a collection of parameters that we're turning into
1921 : : explicit references. */
1922 : :
1923 : : typedef hash_set<tree> indirect_parms_t;
1924 : :
1925 : : /* Dereference OP's incoming turned-into-reference parm if it's an
1926 : : INDIRECT_PARMS or an ADDR_EXPR thereof. Set *REC and return according to
1927 : : gimple-walking expectations. */
1928 : :
1929 : : static tree
1930 : 140 : maybe_make_indirect (indirect_parms_t &indirect_parms, tree op, int *rec)
1931 : : {
1932 : 140 : if (DECL_P (op))
1933 : : {
1934 : 22 : *rec = 0;
1935 : 22 : if (indirect_parms.contains (op))
1936 : : {
1937 : 0 : tree ret = gimple_fold_indirect_ref (op);
1938 : 0 : if (!ret)
1939 : 0 : ret = build2 (MEM_REF,
1940 : 0 : TREE_TYPE (TREE_TYPE (op)),
1941 : : op,
1942 : 0 : build_int_cst (TREE_TYPE (op), 0));
1943 : 0 : if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op)))
1944 : 0 : && !TREE_THIS_VOLATILE (ret))
1945 : 0 : TREE_SIDE_EFFECTS (ret) = TREE_THIS_VOLATILE (ret) = 1;
1946 : 0 : return ret;
1947 : : }
1948 : : }
1949 : 118 : else if (TREE_CODE (op) == ADDR_EXPR
1950 : 118 : && DECL_P (TREE_OPERAND (op, 0)))
1951 : : {
1952 : 12 : *rec = 0;
1953 : 12 : if (indirect_parms.contains (TREE_OPERAND (op, 0)))
1954 : : {
1955 : 0 : op = TREE_OPERAND (op, 0);
1956 : 0 : return op;
1957 : : }
1958 : : }
1959 : :
1960 : : return NULL_TREE;
1961 : : }
1962 : :
1963 : : /* A gimple-walking function that adds dereferencing to indirect parms. */
1964 : :
1965 : : static tree
1966 : 140 : walk_make_indirect (tree *op, int *rec, void *arg)
1967 : : {
1968 : 140 : walk_stmt_info *wi = (walk_stmt_info *)arg;
1969 : 140 : indirect_parms_t &indirect_parms = *(indirect_parms_t *)wi->info;
1970 : :
1971 : 140 : if (!*op || TYPE_P (*op))
1972 : : {
1973 : 0 : *rec = 0;
1974 : 0 : return NULL_TREE;
1975 : : }
1976 : :
1977 : 140 : if (tree repl = maybe_make_indirect (indirect_parms, *op, rec))
1978 : : {
1979 : 0 : *op = repl;
1980 : 0 : wi->changed = true;
1981 : : }
1982 : :
1983 : : return NULL_TREE;
1984 : : }
1985 : :
1986 : : /* A gimple-walking function that turns any non-gimple-val ADDR_EXPRs into a
1987 : : separate SSA. Though addresses of e.g. parameters, and of members thereof,
1988 : : are gimple vals, turning parameters into references, with an extra layer of
1989 : : indirection and thus explicit dereferencing, need to be regimplified. */
1990 : :
1991 : : static tree
1992 : 0 : walk_regimplify_addr_expr (tree *op, int *rec, void *arg)
1993 : : {
1994 : 0 : walk_stmt_info *wi = (walk_stmt_info *)arg;
1995 : 0 : gimple_stmt_iterator &gsi = *(gimple_stmt_iterator *)wi->info;
1996 : :
1997 : 0 : *rec = 0;
1998 : :
1999 : 0 : if (!*op || TREE_CODE (*op) != ADDR_EXPR)
2000 : : return NULL_TREE;
2001 : :
2002 : 0 : if (!is_gimple_val (*op))
2003 : : {
2004 : 0 : tree ret = force_gimple_operand_gsi (&gsi, *op, true,
2005 : : NULL_TREE, true, GSI_SAME_STMT);
2006 : 0 : gcc_assert (ret != *op);
2007 : 0 : *op = ret;
2008 : 0 : wi->changed = true;
2009 : : }
2010 : :
2011 : : return NULL_TREE;
2012 : : }
2013 : :
2014 : : /* Turn STMT's PHI arg defs into separate SSA defs if they've become
2015 : : non-gimple_val. Return TRUE if any edge insertions need to be committed. */
2016 : :
2017 : : static bool
2018 : 0 : walk_regimplify_phi (gphi *stmt)
2019 : : {
2020 : 0 : bool needs_commit = false;
2021 : :
2022 : 0 : for (unsigned i = 0, n = gimple_phi_num_args (stmt); i < n; i++)
2023 : : {
2024 : 0 : tree op = gimple_phi_arg_def (stmt, i);
2025 : 0 : if ((TREE_CODE (op) == ADDR_EXPR
2026 : 0 : && !is_gimple_val (op))
2027 : : /* ??? A PARM_DECL that was addressable in the original function and
2028 : : had its address in PHI nodes, but that became a reference in the
2029 : : wrapped clone would NOT be updated by update_ssa in PHI nodes.
2030 : : Alas, if we were to create a default def for it now, update_ssa
2031 : : would complain that the symbol that needed rewriting already has
2032 : : SSA names associated with it. OTOH, leaving the PARM_DECL alone,
2033 : : it eventually causes errors because it remains unchanged in PHI
2034 : : nodes, but it gets rewritten as expected if it appears in other
2035 : : stmts. So we cheat a little here, and force the PARM_DECL out of
2036 : : the PHI node and into an assignment. It's a little expensive,
2037 : : because we insert it at the edge, which introduces a basic block
2038 : : that's entirely unnecessary, but it works, and the block will be
2039 : : removed as the default def gets propagated back into the PHI node,
2040 : : so the final optimized code looks just as expected. */
2041 : 0 : || (TREE_CODE (op) == PARM_DECL
2042 : 0 : && !TREE_ADDRESSABLE (op)))
2043 : : {
2044 : 0 : tree temp = make_ssa_name (TREE_TYPE (op), stmt);
2045 : 0 : if (TREE_CODE (op) == PARM_DECL)
2046 : 0 : SET_SSA_NAME_VAR_OR_IDENTIFIER (temp, DECL_NAME (op));
2047 : 0 : SET_PHI_ARG_DEF (stmt, i, temp);
2048 : :
2049 : 0 : gimple *assign = gimple_build_assign (temp, op);
2050 : 0 : if (gimple_phi_arg_has_location (stmt, i))
2051 : 0 : gimple_set_location (assign, gimple_phi_arg_location (stmt, i));
2052 : 0 : gsi_insert_on_edge (gimple_phi_arg_edge (stmt, i), assign);
2053 : 0 : needs_commit = true;
2054 : : }
2055 : : }
2056 : :
2057 : 0 : return needs_commit;
2058 : : }
2059 : :
2060 : : /* Create a reference type to use for PARM when turning it into a
2061 : : reference. */
2062 : :
2063 : : static tree
2064 : 12 : build_ref_type_for (tree parm)
2065 : : {
2066 : 12 : gcc_checking_assert (TREE_CODE (parm) == PARM_DECL);
2067 : :
2068 : 12 : tree ref_type = build_reference_type (TREE_TYPE (parm));
2069 : :
2070 : 12 : return ref_type;
2071 : : }
2072 : :
2073 : : /* Add cgraph edges from current_function_decl to callees in SEQ with frequency
2074 : : COUNT, assuming all calls in SEQ are direct. */
2075 : :
2076 : : static void
2077 : 3171 : add_call_edges_for_seq (gimple_seq seq, profile_count count)
2078 : : {
2079 : 3171 : cgraph_node *node = cgraph_node::get_create (current_function_decl);
2080 : :
2081 : 3171 : for (gimple_stmt_iterator gsi = gsi_start (seq);
2082 : 10155 : !gsi_end_p (gsi); gsi_next (&gsi))
2083 : : {
2084 : 6984 : gimple *stmt = gsi_stmt (gsi);
2085 : :
2086 : 6984 : gcall *call = dyn_cast <gcall *> (stmt);
2087 : 6984 : if (!call)
2088 : 3803 : continue;
2089 : :
2090 : 3181 : tree callee = gimple_call_fndecl (call);
2091 : 3181 : gcc_checking_assert (callee);
2092 : 3181 : node->create_edge (cgraph_node::get_create (callee), call, count, false);
2093 : : }
2094 : 3171 : }
2095 : :
2096 : : /* Insert SEQ after the call at GSI, as if the call was in a try block with SEQ
2097 : : as finally, i.e., SEQ will run after the call whether it returns or
2098 : : propagates an exception. This handles block splitting, EH edge and block
2099 : : creation, noreturn and nothrow optimizations, and even throwing calls without
2100 : : preexisting local handlers. */
2101 : :
2102 : : static void
2103 : 2947 : gsi_insert_finally_seq_after_call (gimple_stmt_iterator gsi, gimple_seq seq)
2104 : : {
2105 : 2947 : if (!seq)
2106 : : return;
2107 : :
2108 : 2479 : gimple *stmt = gsi_stmt (gsi);
2109 : :
2110 : 2479 : if (gimple_has_location (stmt))
2111 : 1963 : annotate_all_with_location (seq, gimple_location (stmt));
2112 : :
2113 : 2479 : gcall *call = dyn_cast <gcall *> (stmt);
2114 : 2479 : bool noreturn_p = call && gimple_call_noreturn_p (call);
2115 : 2479 : int eh_lp = lookup_stmt_eh_lp (stmt);
2116 : 2479 : bool must_not_throw_p = eh_lp < 0;
2117 : 2479 : bool nothrow_p = (must_not_throw_p
2118 : 4958 : || (call && gimple_call_nothrow_p (call))
2119 : 3887 : || (eh_lp <= 0
2120 : 1398 : && (TREE_NOTHROW (cfun->decl)
2121 : 1007 : || !opt_for_fn (cfun->decl, flag_exceptions))));
2122 : :
2123 : 2479 : if (noreturn_p && nothrow_p)
2124 : : return;
2125 : :
2126 : : /* Don't expect an EH edge if we're not to throw, or if we're not in an EH
2127 : : region yet. */
2128 : 2479 : bool no_eh_edge_p = (nothrow_p || !eh_lp);
2129 : 2479 : bool must_end_bb = stmt_ends_bb_p (stmt);
2130 : :
2131 : 2479 : edge eft = NULL, eeh = NULL;
2132 : 2479 : if (must_end_bb && !(noreturn_p && no_eh_edge_p))
2133 : : {
2134 : 10 : gcc_checking_assert (gsi_one_before_end_p (gsi));
2135 : :
2136 : 10 : edge e;
2137 : 10 : edge_iterator ei;
2138 : 30 : FOR_EACH_EDGE (e, ei, gsi_bb (gsi)->succs)
2139 : : {
2140 : 20 : if ((e->flags & EDGE_EH))
2141 : : {
2142 : 10 : gcc_checking_assert (!eeh);
2143 : : eeh = e;
2144 : : #if !CHECKING_P
2145 : : if (eft || noreturn_p)
2146 : : break;
2147 : : #endif
2148 : : }
2149 : 20 : if ((e->flags & EDGE_FALLTHRU))
2150 : : {
2151 : 10 : gcc_checking_assert (!eft);
2152 : : eft = e;
2153 : : #if !CHECKING_P
2154 : : if (eeh || no_eh_edge_p)
2155 : : break;
2156 : : #endif
2157 : : }
2158 : : }
2159 : :
2160 : 10 : gcc_checking_assert (!(eft && (eft->flags & EDGE_FALLTHRU))
2161 : : == noreturn_p);
2162 : 10 : gcc_checking_assert (!(eeh && (eeh->flags & EDGE_EH))
2163 : : == no_eh_edge_p);
2164 : 10 : gcc_checking_assert (eft != eeh);
2165 : : }
2166 : :
2167 : 2479 : if (!noreturn_p)
2168 : : {
2169 : 2479 : gimple_seq nseq = nothrow_p ? seq : gimple_seq_copy (seq);
2170 : :
2171 : 2479 : if (must_end_bb)
2172 : : {
2173 : 10 : gcc_checking_assert (gsi_one_before_end_p (gsi));
2174 : 10 : add_call_edges_for_seq (nseq, eft->count ());
2175 : 10 : gsi_insert_seq_on_edge_immediate (eft, nseq);
2176 : : }
2177 : : else
2178 : : {
2179 : 2469 : add_call_edges_for_seq (nseq, gsi_bb (gsi)->count);
2180 : 2469 : gsi_insert_seq_after (&gsi, nseq, GSI_SAME_STMT);
2181 : : }
2182 : : }
2183 : :
2184 : 2479 : if (nothrow_p)
2185 : : return;
2186 : :
2187 : 692 : if (eh_lp)
2188 : : {
2189 : 10 : add_call_edges_for_seq (seq, eeh->count ());
2190 : 10 : gsi_insert_seq_on_edge_immediate (eeh, seq);
2191 : 10 : return;
2192 : : }
2193 : :
2194 : : /* A throwing call may appear within a basic block in a function that doesn't
2195 : : have any EH regions. We're going to add a cleanup if so, therefore the
2196 : : block will have to be split. */
2197 : 682 : basic_block bb = gsi_bb (gsi);
2198 : 682 : if (!gsi_one_before_end_p (gsi))
2199 : 682 : split_block (bb, stmt);
2200 : :
2201 : : /* Create a new block for the EH cleanup. */
2202 : 682 : basic_block bb_eh_cleanup = create_empty_bb (bb);
2203 : 682 : if (dom_info_available_p (CDI_DOMINATORS))
2204 : 0 : set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
2205 : 682 : if (current_loops)
2206 : 682 : add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
2207 : :
2208 : : /* Make the new block an EH cleanup for the call. */
2209 : 682 : eh_region new_r = gen_eh_region_cleanup (NULL);
2210 : 682 : eh_landing_pad lp = gen_eh_landing_pad (new_r);
2211 : 682 : tree label = gimple_block_label (bb_eh_cleanup);
2212 : 682 : lp->post_landing_pad = label;
2213 : 682 : EH_LANDING_PAD_NR (label) = lp->index;
2214 : 682 : add_stmt_to_eh_lp (stmt, lp->index);
2215 : :
2216 : : /* Add the cleanup code to the EH cleanup block. */
2217 : 682 : gsi = gsi_after_labels (bb_eh_cleanup);
2218 : 682 : gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2219 : :
2220 : : /* And then propagate the exception further. */
2221 : 682 : gresx *resx = gimple_build_resx (new_r->index);
2222 : 682 : if (gimple_has_location (stmt))
2223 : 563 : gimple_set_location (resx, gimple_location (stmt));
2224 : 682 : gsi_insert_before (&gsi, resx, GSI_SAME_STMT);
2225 : :
2226 : : /* Finally, wire the EH cleanup block into the CFG. */
2227 : 682 : edge neeh = make_eh_edge (stmt);
2228 : 682 : neeh->probability = profile_probability::never ();
2229 : 682 : gcc_checking_assert (neeh->dest == bb_eh_cleanup);
2230 : 682 : gcc_checking_assert (!neeh->dest->count.initialized_p ());
2231 : 682 : neeh->dest->count = neeh->count ();
2232 : 682 : add_call_edges_for_seq (seq, neeh->dest->count);
2233 : : }
2234 : :
2235 : : /* Copy the attribute list at *ATTRS, minus any NAME attributes, leaving
2236 : : shareable trailing nodes alone. */
2237 : :
2238 : : static inline void
2239 : 0 : remove_named_attribute_unsharing (const char *name, tree *attrs)
2240 : : {
2241 : 0 : while (tree found = lookup_attribute (name, *attrs))
2242 : : {
2243 : : /* Copy nodes up to the next NAME attribute. */
2244 : 0 : while (*attrs != found)
2245 : : {
2246 : 0 : *attrs = tree_cons (TREE_PURPOSE (*attrs),
2247 : 0 : TREE_VALUE (*attrs),
2248 : 0 : TREE_CHAIN (*attrs));
2249 : 0 : attrs = &TREE_CHAIN (*attrs);
2250 : : }
2251 : : /* Then drop it. */
2252 : 0 : gcc_checking_assert (*attrs == found);
2253 : 0 : *attrs = TREE_CHAIN (*attrs);
2254 : 0 : }
2255 : 0 : }
2256 : :
2257 : : /* Record the order of the last cgraph entry whose mode we've already set, so
2258 : : that we can perform mode setting incrementally without duplication. */
2259 : : static int last_cgraph_order;
2260 : :
2261 : : /* Set strub modes for functions introduced since the last call. */
2262 : :
2263 : : static void
2264 : 1219 : ipa_strub_set_mode_for_new_functions ()
2265 : : {
2266 : 1219 : if (symtab->order == last_cgraph_order)
2267 : : return;
2268 : :
2269 : : cgraph_node *node;
2270 : :
2271 : : /* Go through the functions twice, once over non-aliases, and then over
2272 : : aliases, so that aliases can reuse the mode computation of their ultimate
2273 : : targets. */
2274 : 2268 : for (int aliases = 0; aliases <= 1; aliases++)
2275 : 21732 : FOR_EACH_FUNCTION (node)
2276 : : {
2277 : 9354 : if (!node->alias != !aliases)
2278 : 4677 : continue;
2279 : :
2280 : : /* Already done. */
2281 : 4677 : if (node->order < last_cgraph_order)
2282 : 986 : continue;
2283 : :
2284 : 3691 : set_strub_mode (node);
2285 : : }
2286 : :
2287 : 756 : last_cgraph_order = symtab->order;
2288 : : }
2289 : :
2290 : : /* Return FALSE if NODE is a strub context, and TRUE otherwise. */
2291 : :
2292 : : bool
2293 : 1145549 : strub_splittable_p (cgraph_node *node)
2294 : : {
2295 : 1145549 : switch (get_strub_mode (node))
2296 : : {
2297 : : case STRUB_WRAPPED:
2298 : : case STRUB_AT_CALLS:
2299 : : case STRUB_AT_CALLS_OPT:
2300 : : case STRUB_INLINABLE:
2301 : : case STRUB_INTERNAL:
2302 : : case STRUB_WRAPPER:
2303 : : return false;
2304 : :
2305 : 1145406 : case STRUB_CALLABLE:
2306 : 1145406 : case STRUB_DISABLED:
2307 : 1145406 : break;
2308 : :
2309 : 0 : default:
2310 : 0 : gcc_unreachable ();
2311 : : }
2312 : :
2313 : 1145406 : return true;
2314 : : }
2315 : :
2316 : : /* Return the PARM_DECL of the incoming watermark pointer, if there is one. */
2317 : :
2318 : : tree
2319 : 2194 : strub_watermark_parm (tree fndecl)
2320 : : {
2321 : 2194 : switch (get_strub_mode_from_fndecl (fndecl))
2322 : : {
2323 : 1185 : case STRUB_WRAPPED:
2324 : 1185 : case STRUB_AT_CALLS:
2325 : 1185 : case STRUB_AT_CALLS_OPT:
2326 : 1185 : break;
2327 : :
2328 : : case STRUB_INTERNAL:
2329 : : case STRUB_WRAPPER:
2330 : : case STRUB_CALLABLE:
2331 : : case STRUB_DISABLED:
2332 : : case STRUB_INLINABLE:
2333 : : return NULL_TREE;
2334 : :
2335 : 0 : default:
2336 : 0 : gcc_unreachable ();
2337 : : }
2338 : :
2339 : 1185 : for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2340 : : /* The type (variant) compare finds the parameter even in a just-created
2341 : : clone, before we set its name, but the type-based compare doesn't work
2342 : : during builtin expansion within the lto compiler, because we'll have
2343 : : created a separate variant in that run. */
2344 : 1185 : if (TREE_TYPE (parm) == pass_ipa_strub::get_qpwmt ()
2345 : 1185 : || DECL_NAME (parm) == pass_ipa_strub::get_watermark_ptr ())
2346 : 1185 : return parm;
2347 : :
2348 : 0 : gcc_unreachable ();
2349 : : }
2350 : :
2351 : : /* Adjust a STRUB_AT_CALLS function TYPE, adding a watermark pointer if it
2352 : : hasn't been added yet. Return the named argument count. */
2353 : :
2354 : : int
2355 : 2607 : pass_ipa_strub::adjust_at_calls_type (tree type)
2356 : : {
2357 : 2607 : int named_args = 0;
2358 : :
2359 : 2607 : gcc_checking_assert (same_strub_mode_in_variants_p (type));
2360 : :
2361 : 2607 : if (!TYPE_ARG_TYPES (type))
2362 : : return named_args;
2363 : :
2364 : 2382 : tree *tlist = &TYPE_ARG_TYPES (type);
2365 : 2382 : tree qpwmptrt = get_qpwmt ();
2366 : 4890 : while (*tlist && TREE_VALUE (*tlist) != void_type_node)
2367 : : {
2368 : : /* The type has already been adjusted. */
2369 : 2228 : if (TREE_VALUE (*tlist) == qpwmptrt)
2370 : 2102 : return named_args;
2371 : 126 : named_args++;
2372 : 378 : *tlist = tree_cons (TREE_PURPOSE (*tlist),
2373 : 126 : TREE_VALUE (*tlist),
2374 : 126 : TREE_CHAIN (*tlist));
2375 : 126 : tlist = &TREE_CHAIN (*tlist);
2376 : : }
2377 : :
2378 : : /* Add the new argument after all named arguments, so as to not mess with
2379 : : attributes that reference parameters. */
2380 : 280 : *tlist = tree_cons (NULL_TREE, get_qpwmt (), *tlist);
2381 : :
2382 : : #if ATTR_FNSPEC_DECONST_WATERMARK
2383 : : if (!type_already_adjusted)
2384 : : {
2385 : : int flags = flags_from_decl_or_type (type);
2386 : : tree fnspec = lookup_attribute ("fn spec", type);
2387 : :
2388 : : if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2389 : : {
2390 : : size_t xargs = 1;
2391 : : size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2392 : : auto_vec<char> nspecv (tgtlen);
2393 : : char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2394 : : if (fnspec)
2395 : : {
2396 : : tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2397 : : curlen = TREE_STRING_LENGTH (fnspecstr);
2398 : : memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2399 : : }
2400 : : if (!curlen)
2401 : : {
2402 : : nspec[curlen++] = '.';
2403 : : nspec[curlen++] = ((flags & ECF_CONST)
2404 : : ? 'c'
2405 : : : (flags & ECF_PURE)
2406 : : ? 'p'
2407 : : : ' ');
2408 : : }
2409 : : while (curlen < tgtlen - 2 * xargs)
2410 : : {
2411 : : nspec[curlen++] = '.';
2412 : : nspec[curlen++] = ' ';
2413 : : }
2414 : : nspec[curlen++] = 'W';
2415 : : nspec[curlen++] = 't';
2416 : :
2417 : : /* The type has already been copied, if needed, before adding
2418 : : parameters. */
2419 : : TYPE_ATTRIBUTES (type)
2420 : : = tree_cons (get_identifier ("fn spec"),
2421 : : build_tree_list (NULL_TREE,
2422 : : build_string (tgtlen, nspec)),
2423 : : TYPE_ATTRIBUTES (type));
2424 : : }
2425 : : }
2426 : : #endif
2427 : :
2428 : 280 : return named_args;
2429 : : }
2430 : :
2431 : : /* Adjust a call to an at-calls call target. Create a watermark local variable
2432 : : if needed, initialize it before, pass it to the callee according to the
2433 : : modified at-calls interface, and release the callee's stack space after the
2434 : : call, if not deferred. If the call is const or pure, arrange for the
2435 : : watermark to not be assumed unused or unchanged. */
2436 : :
2437 : : void
2438 : 2193 : pass_ipa_strub::adjust_at_calls_call (cgraph_edge *e, int named_args,
2439 : : tree callee_fntype)
2440 : : {
2441 : 2193 : gcc_checking_assert (e->call_stmt);
2442 : 2193 : gcall *ocall = e->call_stmt;
2443 : 2193 : gimple_stmt_iterator gsi = gsi_for_stmt (ocall);
2444 : :
2445 : : /* Make sure we haven't modified this call yet. */
2446 : 2193 : gcc_checking_assert (!(int (gimple_call_num_args (ocall)) > named_args
2447 : : && (TREE_TYPE (gimple_call_arg (ocall, named_args))
2448 : : == get_pwmt ())));
2449 : :
2450 : 2193 : tree tsup;
2451 : 2193 : if (!(tsup = gimple_call_fndecl (ocall)))
2452 : 47 : tsup = TREE_TYPE (TREE_TYPE (gimple_call_fn (ocall)));
2453 : 2193 : if (!strub_target_support_p (tsup, true, gimple_location (ocall)))
2454 : 0 : return;
2455 : :
2456 : : /* If we're already within a strub context, pass on the incoming watermark
2457 : : pointer, and omit the enter and leave calls around the modified call, as an
2458 : : optimization, or as a means to satisfy a tail-call requirement. */
2459 : 2193 : tree swmp = ((opt_for_fn (e->caller->decl, optimize_size)
2460 : 1849 : || opt_for_fn (e->caller->decl, optimize) > 2
2461 : 1495 : || gimple_call_must_tail_p (ocall)
2462 : 1495 : || (opt_for_fn (e->caller->decl, optimize) == 2
2463 : 582 : && gimple_call_tail_p (ocall)))
2464 : 2547 : ? strub_watermark_parm (e->caller->decl)
2465 : 2193 : : NULL_TREE);
2466 : 2193 : bool omit_own_watermark = swmp;
2467 : 2193 : tree swm = NULL_TREE;
2468 : 2193 : if (!omit_own_watermark)
2469 : : {
2470 : 1725 : swm = create_tmp_var (get_wmt (), ".strub.watermark");
2471 : 1725 : TREE_ADDRESSABLE (swm) = true;
2472 : 1725 : swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
2473 : :
2474 : : /* Initialize the watermark before the call. */
2475 : 1725 : tree enter = get_enter ();
2476 : 1725 : gcall *stptr = gimple_build_call (enter, 1,
2477 : : unshare_expr (swmp));
2478 : 1725 : if (gimple_has_location (ocall))
2479 : 1725 : gimple_set_location (stptr, gimple_location (ocall));
2480 : 1725 : gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
2481 : 1725 : e->caller->create_edge (cgraph_node::get_create (enter),
2482 : 1725 : stptr, gsi_bb (gsi)->count, false);
2483 : : }
2484 : :
2485 : :
2486 : : /* Replace the call with one that passes the swmp argument first. */
2487 : 2193 : gcall *wrcall;
2488 : 2193 : { gcall *stmt = ocall;
2489 : : // Mostly copied from gimple_call_copy_skip_args.
2490 : 2193 : int i = 0;
2491 : 2193 : int nargs = gimple_call_num_args (stmt);
2492 : 2193 : auto_vec<tree> vargs (MAX (nargs, named_args) + 1);
2493 : 2193 : gcall *new_stmt;
2494 : :
2495 : : /* pr71109.c calls a prototypeless function, then defines it with
2496 : : additional arguments. It's ill-formed, but after it's inlined,
2497 : : it somehow works out. */
2498 : 2259 : for (; i < named_args && i < nargs; i++)
2499 : 66 : vargs.quick_push (gimple_call_arg (stmt, i));
2500 : 2193 : for (; i < named_args; i++)
2501 : 0 : vargs.quick_push (null_pointer_node);
2502 : :
2503 : 2193 : vargs.quick_push (unshare_expr (swmp));
2504 : :
2505 : 2221 : for (; i < nargs; i++)
2506 : 28 : vargs.quick_push (gimple_call_arg (stmt, i));
2507 : :
2508 : 2193 : if (gimple_call_internal_p (stmt))
2509 : 0 : gcc_unreachable ();
2510 : : else
2511 : 2193 : new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2512 : 2193 : gimple_call_set_fntype (new_stmt, callee_fntype);
2513 : :
2514 : 2193 : if (gimple_call_lhs (stmt))
2515 : 2126 : gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2516 : :
2517 : 2193 : gimple_move_vops (new_stmt, stmt);
2518 : :
2519 : 2193 : if (gimple_has_location (stmt))
2520 : 2193 : gimple_set_location (new_stmt, gimple_location (stmt));
2521 : 2193 : gimple_call_copy_flags (new_stmt, stmt);
2522 : 2193 : gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2523 : :
2524 : 2193 : gimple_set_modified (new_stmt, true);
2525 : :
2526 : 2193 : wrcall = new_stmt;
2527 : 2193 : }
2528 : :
2529 : 2193 : update_stmt (wrcall);
2530 : 2193 : gsi_replace (&gsi, wrcall, true);
2531 : 2193 : cgraph_edge::set_call_stmt (e, wrcall, false);
2532 : :
2533 : : /* Insert the strub code after the call. */
2534 : 2193 : gimple_seq seq = NULL;
2535 : :
2536 : : #if !ATTR_FNSPEC_DECONST_WATERMARK
2537 : : /* If the call will be assumed to not modify or even read the
2538 : : watermark, make it read and modified ourselves. */
2539 : 2193 : if ((gimple_call_flags (wrcall)
2540 : 2193 : & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
2541 : : {
2542 : 61 : if (!swm)
2543 : 0 : swm = build2 (MEM_REF,
2544 : 0 : TREE_TYPE (TREE_TYPE (swmp)),
2545 : : swmp,
2546 : 0 : build_int_cst (TREE_TYPE (swmp), 0));
2547 : :
2548 : 61 : vec<tree, va_gc> *inputs = NULL;
2549 : 61 : vec<tree, va_gc> *outputs = NULL;
2550 : 122 : vec_safe_push (outputs,
2551 : : build_tree_list
2552 : 61 : (build_tree_list
2553 : : (NULL_TREE, build_string (2, "=m")),
2554 : : unshare_expr (swm)));
2555 : 122 : vec_safe_push (inputs,
2556 : : build_tree_list
2557 : 61 : (build_tree_list
2558 : : (NULL_TREE, build_string (1, "m")),
2559 : : unshare_expr (swm)));
2560 : 61 : gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
2561 : : NULL, NULL);
2562 : 61 : gimple_seq_add_stmt (&seq, forcemod);
2563 : :
2564 : : /* If the call will be assumed to not even read the watermark,
2565 : : make sure it is already in memory before the call. */
2566 : 61 : if ((gimple_call_flags (wrcall) & ECF_CONST))
2567 : : {
2568 : 33 : vec<tree, va_gc> *inputs = NULL;
2569 : 66 : vec_safe_push (inputs,
2570 : : build_tree_list
2571 : 33 : (build_tree_list
2572 : : (NULL_TREE, build_string (1, "m")),
2573 : : unshare_expr (swm)));
2574 : 33 : gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
2575 : : NULL, NULL);
2576 : 33 : if (gimple_has_location (wrcall))
2577 : 33 : gimple_set_location (force_store, gimple_location (wrcall));
2578 : 33 : gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
2579 : : }
2580 : : }
2581 : : #endif
2582 : :
2583 : 2193 : if (!omit_own_watermark)
2584 : : {
2585 : 1725 : gcall *sleave = gimple_build_call (get_leave (), 1,
2586 : : unshare_expr (swmp));
2587 : 1725 : gimple_seq_add_stmt (&seq, sleave);
2588 : :
2589 : 1725 : gassign *clobber = gimple_build_assign (swm,
2590 : : build_clobber
2591 : 1725 : (TREE_TYPE (swm)));
2592 : 1725 : gimple_seq_add_stmt (&seq, clobber);
2593 : : }
2594 : :
2595 : 2193 : gsi_insert_finally_seq_after_call (gsi, seq);
2596 : : }
2597 : :
2598 : : /* Adjust all at-calls calls in NODE. */
2599 : :
2600 : : void
2601 : 1315 : pass_ipa_strub::adjust_at_calls_calls (cgraph_node *node)
2602 : : {
2603 : : /* Adjust unknown-callee indirect calls with STRUB_AT_CALLS types within
2604 : : onode. */
2605 : 1315 : if (node->indirect_calls)
2606 : : {
2607 : 115 : push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2608 : 230 : for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2609 : : {
2610 : 115 : gcc_checking_assert (e->indirect_unknown_callee);
2611 : :
2612 : 115 : if (!e->call_stmt)
2613 : 68 : continue;
2614 : :
2615 : 115 : tree callee_fntype;
2616 : 115 : enum strub_mode callee_mode
2617 : 115 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2618 : :
2619 : 183 : if (callee_mode != STRUB_AT_CALLS
2620 : 115 : && callee_mode != STRUB_AT_CALLS_OPT)
2621 : 68 : continue;
2622 : :
2623 : 47 : int named_args = adjust_at_calls_type (callee_fntype);
2624 : :
2625 : 47 : adjust_at_calls_call (e, named_args, callee_fntype);
2626 : : }
2627 : 115 : pop_cfun ();
2628 : : }
2629 : :
2630 : 1315 : if (node->callees)
2631 : : {
2632 : 1117 : push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2633 : 10693 : for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2634 : : {
2635 : 9576 : gcc_checking_assert (!e->indirect_unknown_callee);
2636 : :
2637 : 9576 : if (!e->call_stmt)
2638 : 7430 : continue;
2639 : :
2640 : 9576 : tree callee_fntype;
2641 : 9576 : enum strub_mode callee_mode
2642 : 9576 : = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2643 : :
2644 : 17006 : if (callee_mode != STRUB_AT_CALLS
2645 : 9576 : && callee_mode != STRUB_AT_CALLS_OPT)
2646 : 7430 : continue;
2647 : :
2648 : 2146 : int named_args = adjust_at_calls_type (callee_fntype);
2649 : :
2650 : 2146 : adjust_at_calls_call (e, named_args, callee_fntype);
2651 : : }
2652 : 1117 : pop_cfun ();
2653 : : }
2654 : 1315 : }
2655 : :
2656 : : /* The strubm (strub mode) pass computes a strub mode for each function in the
2657 : : call graph, and checks, before any inlining, that strub callability
2658 : : requirements in effect are satisfied. */
2659 : :
2660 : : unsigned int
2661 : 644 : pass_ipa_strub_mode::execute (function *)
2662 : : {
2663 : 644 : last_cgraph_order = 0;
2664 : 644 : ipa_strub_set_mode_for_new_functions ();
2665 : :
2666 : : /* Verify before any inlining or other transformations. */
2667 : 644 : verify_strub ();
2668 : :
2669 : 644 : return 0;
2670 : : }
2671 : :
2672 : : /* Create a strub mode pass. */
2673 : :
2674 : : simple_ipa_opt_pass *
2675 : 285189 : make_pass_ipa_strub_mode (gcc::context *ctxt)
2676 : : {
2677 : 285189 : return new pass_ipa_strub_mode (ctxt);
2678 : : }
2679 : :
2680 : : /* The strub pass proper adjusts types, signatures, and at-calls calls, and
2681 : : splits internal-strub functions. */
2682 : :
2683 : : unsigned int
2684 : 575 : pass_ipa_strub::execute (function *)
2685 : : {
2686 : 575 : cgraph_node *onode;
2687 : :
2688 : 575 : ipa_strub_set_mode_for_new_functions ();
2689 : :
2690 : : /* First, adjust the signature of at-calls functions. We adjust types of
2691 : : at-calls functions first, so that we don't modify types in place unless
2692 : : strub is explicitly requested. */
2693 : 5984 : FOR_EACH_FUNCTION (onode)
2694 : : {
2695 : 2417 : enum strub_mode mode = get_strub_mode (onode);
2696 : :
2697 : 2417 : if (mode == STRUB_AT_CALLS
2698 : 2417 : || mode == STRUB_AT_CALLS_OPT)
2699 : : {
2700 : : /* Create a type variant if strubbing was not explicitly requested in
2701 : : the function type. */
2702 : 414 : if (get_strub_mode_from_type (TREE_TYPE (onode->decl)) != mode)
2703 : 75 : distinctify_node_type (onode);
2704 : :
2705 : 414 : int named_args = adjust_at_calls_type (TREE_TYPE (onode->decl));
2706 : :
2707 : : /* An external function explicitly declared with strub won't have a
2708 : : body. Even with implicit at-calls strub, a function may have had its
2709 : : body removed after we selected the mode, and then we have nothing
2710 : : further to do. */
2711 : 414 : if (!onode->has_gimple_body_p ())
2712 : 70 : continue;
2713 : :
2714 : 344 : tree *pargs = &DECL_ARGUMENTS (onode->decl);
2715 : :
2716 : : /* A noninterposable_alias reuses the same parm decl chain, don't add
2717 : : the parm twice. */
2718 : 0 : bool aliased_parms = (onode->alias && *pargs
2719 : 344 : && DECL_CONTEXT (*pargs) != onode->decl);
2720 : :
2721 : 0 : if (aliased_parms)
2722 : 0 : continue;
2723 : :
2724 : 404 : for (int i = 0; i < named_args; i++)
2725 : 60 : pargs = &DECL_CHAIN (*pargs);
2726 : :
2727 : 344 : tree wmptr = build_decl (DECL_SOURCE_LOCATION (onode->decl),
2728 : : PARM_DECL,
2729 : : get_watermark_ptr (),
2730 : : get_qpwmt ());
2731 : 344 : DECL_ARTIFICIAL (wmptr) = 1;
2732 : 344 : DECL_ARG_TYPE (wmptr) = get_qpwmt ();
2733 : 344 : DECL_CONTEXT (wmptr) = onode->decl;
2734 : 344 : TREE_USED (wmptr) = 1;
2735 : 344 : DECL_CHAIN (wmptr) = *pargs;
2736 : 344 : *pargs = wmptr;
2737 : :
2738 : 344 : if (onode->alias)
2739 : 0 : continue;
2740 : :
2741 : 344 : cgraph_node *nnode = onode;
2742 : 344 : push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
2743 : :
2744 : 344 : {
2745 : 344 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2746 : 344 : gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
2747 : 344 : gsi_insert_seq_on_edge_immediate (e, seq);
2748 : : }
2749 : :
2750 : 344 : if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca)
2751 : : {
2752 : 120 : basic_block bb;
2753 : 657 : FOR_EACH_BB_FN (bb, cfun)
2754 : 1074 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2755 : 4025 : !gsi_end_p (gsi); gsi_next (&gsi))
2756 : : {
2757 : 3488 : gimple *stmt = gsi_stmt (gsi);
2758 : :
2759 : 3488 : gcall *call = dyn_cast <gcall *> (stmt);
2760 : :
2761 : 3488 : if (!call)
2762 : 2222 : continue;
2763 : :
2764 : 1266 : if (gimple_alloca_call_p (call))
2765 : : {
2766 : : /* Capture stack growth. */
2767 : 328 : gimple_seq seq = call_update_watermark (wmptr, NULL,
2768 : 164 : gsi_bb (gsi)
2769 : : ->count);
2770 : 164 : gsi_insert_finally_seq_after_call (gsi, seq);
2771 : : }
2772 : : }
2773 : : }
2774 : :
2775 : 344 : pop_cfun ();
2776 : : }
2777 : : }
2778 : :
2779 : 6524 : FOR_EACH_FUNCTION (onode)
2780 : : {
2781 : 2687 : if (!onode->has_gimple_body_p ())
2782 : 2171 : continue;
2783 : :
2784 : 1315 : enum strub_mode mode = get_strub_mode (onode);
2785 : :
2786 : 1315 : if (mode != STRUB_INTERNAL)
2787 : : {
2788 : 799 : adjust_at_calls_calls (onode);
2789 : 799 : continue;
2790 : : }
2791 : :
2792 : 516 : bool is_stdarg = calls_builtin_va_start_p (onode);;
2793 : 516 : bool apply_args = calls_builtin_apply_args_p (onode);
2794 : :
2795 : 516 : vec<ipa_adjusted_param, va_gc> *nparms = NULL;
2796 : 516 : unsigned j = 0;
2797 : 516 : {
2798 : : // The following loop copied from ipa-split.c:split_function.
2799 : 516 : for (tree parm = DECL_ARGUMENTS (onode->decl);
2800 : 687 : parm; parm = DECL_CHAIN (parm), j++)
2801 : : {
2802 : 171 : ipa_adjusted_param adj = {};
2803 : 171 : adj.op = IPA_PARAM_OP_COPY;
2804 : 171 : adj.base_index = j;
2805 : 171 : adj.prev_clone_index = j;
2806 : 171 : vec_safe_push (nparms, adj);
2807 : : }
2808 : :
2809 : 516 : if (apply_args)
2810 : : {
2811 : 20 : ipa_adjusted_param aaadj = {};
2812 : 20 : aaadj.op = IPA_PARAM_OP_NEW;
2813 : 20 : aaadj.type = get_qptr ();
2814 : 20 : vec_safe_push (nparms, aaadj);
2815 : : }
2816 : :
2817 : 516 : if (is_stdarg)
2818 : : {
2819 : 10 : ipa_adjusted_param vladj = {};
2820 : 10 : vladj.op = IPA_PARAM_OP_NEW;
2821 : 10 : vladj.type = get_qpvalst ();
2822 : 10 : vec_safe_push (nparms, vladj);
2823 : : }
2824 : :
2825 : 516 : ipa_adjusted_param wmadj = {};
2826 : 516 : wmadj.op = IPA_PARAM_OP_NEW;
2827 : 516 : wmadj.type = get_qpwmt ();
2828 : 516 : vec_safe_push (nparms, wmadj);
2829 : : }
2830 : 516 : ipa_param_adjustments adj (nparms, -1, false);
2831 : :
2832 : 516 : cgraph_node *nnode = onode->create_version_clone_with_body
2833 : 516 : (auto_vec<cgraph_edge *> (0),
2834 : : NULL, &adj, NULL, NULL, "strub", NULL);
2835 : :
2836 : 516 : if (!nnode)
2837 : : {
2838 : 0 : error_at (DECL_SOURCE_LOCATION (onode->decl),
2839 : : "failed to split %qD for %<strub%>",
2840 : : onode->decl);
2841 : 0 : continue;
2842 : : }
2843 : :
2844 : 516 : onode->split_part = true;
2845 : 516 : if (onode->calls_comdat_local)
2846 : 0 : nnode->add_to_same_comdat_group (onode);
2847 : :
2848 : 516 : set_strub_mode_to (onode, STRUB_WRAPPER);
2849 : 516 : set_strub_mode_to (nnode, STRUB_WRAPPED);
2850 : :
2851 : 516 : adjust_at_calls_calls (nnode);
2852 : :
2853 : : /* Decide which of the wrapped function's parms we want to turn into
2854 : : references to the argument passed to the wrapper. In general, we want to
2855 : : copy small arguments, and avoid copying large ones. Variable-sized array
2856 : : lengths given by other arguments, as in 20020210-1.c, would lead to
2857 : : problems if passed by value, after resetting the original function and
2858 : : dropping the length computation; passing them by reference works.
2859 : : DECL_BY_REFERENCE is *not* a substitute for this: it involves copying
2860 : : anyway, but performed at the caller. */
2861 : 516 : indirect_parms_t indirect_nparms (3, false);
2862 : 516 : unsigned adjust_ftype = 0;
2863 : 516 : unsigned named_args = 0;
2864 : 516 : for (tree parm = DECL_ARGUMENTS (onode->decl),
2865 : 516 : nparm = DECL_ARGUMENTS (nnode->decl),
2866 : 516 : nparmt = TYPE_ARG_TYPES (TREE_TYPE (nnode->decl));
2867 : 687 : parm;
2868 : : named_args++,
2869 : 171 : parm = DECL_CHAIN (parm),
2870 : 342 : nparm = DECL_CHAIN (nparm),
2871 : 171 : nparmt = nparmt ? TREE_CHAIN (nparmt) : NULL_TREE)
2872 : 171 : if (TREE_THIS_VOLATILE (parm)
2873 : 341 : || !(0 /* DECL_BY_REFERENCE (narg) */
2874 : 170 : || is_gimple_reg_type (TREE_TYPE (nparm))
2875 : 11 : || VECTOR_TYPE_P (TREE_TYPE (nparm))
2876 : 11 : || TREE_CODE (TREE_TYPE (nparm)) == COMPLEX_TYPE
2877 : 11 : || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2878 : 20 : && (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2879 : 10 : <= 4 * UNITS_PER_WORD))))
2880 : : {
2881 : : /* No point in indirecting pointer types. Presumably they
2882 : : won't ever pass the size-based test above, but check the
2883 : : assumption here, because getting this wrong would mess
2884 : : with attribute access and possibly others. We deal with
2885 : : fn spec below. */
2886 : 12 : gcc_checking_assert (!POINTER_TYPE_P (TREE_TYPE (nparm)));
2887 : :
2888 : 12 : indirect_nparms.add (nparm);
2889 : :
2890 : : /* ??? Is there any case in which it is not safe to suggest the parms
2891 : : turned indirect don't alias anything else? They are distinct,
2892 : : unaliased memory in the wrapper, and the wrapped can't possibly
2893 : : take pointers into them because none of the pointers passed to the
2894 : : wrapper can alias other incoming parameters passed by value, even
2895 : : if with transparent reference, and the wrapper doesn't take any
2896 : : extra parms that could point into wrapper's parms. So we can
2897 : : probably drop the TREE_ADDRESSABLE and keep the TRUE. */
2898 : 12 : tree ref_type = build_ref_type_for (nparm);
2899 : :
2900 : 12 : if (TREE_THIS_VOLATILE (nparm)
2901 : 1 : && TYPE_VOLATILE (TREE_TYPE (nparm))
2902 : 13 : && !TYPE_VOLATILE (ref_type))
2903 : 1 : TREE_SIDE_EFFECTS (nparm) = TREE_THIS_VOLATILE (nparm) = 0;
2904 : 12 : DECL_ARG_TYPE (nparm) = TREE_TYPE (nparm) = ref_type;
2905 : 12 : relayout_decl (nparm);
2906 : 12 : TREE_ADDRESSABLE (nparm) = 0;
2907 : 12 : DECL_BY_REFERENCE (nparm) = 0;
2908 : 12 : DECL_NOT_GIMPLE_REG_P (nparm) = 0;
2909 : : /* ??? This avoids mismatches in debug info bind stmts in
2910 : : e.g. a-chahan . */
2911 : 12 : DECL_ABSTRACT_ORIGIN (nparm) = NULL;
2912 : :
2913 : 12 : if (nparmt)
2914 : 12 : adjust_ftype++;
2915 : : }
2916 : :
2917 : : /* Also adjust the wrapped function type, if needed. */
2918 : 516 : if (adjust_ftype)
2919 : : {
2920 : 12 : tree nftype = TREE_TYPE (nnode->decl);
2921 : :
2922 : : /* We always add at least one argument at the end of the signature, when
2923 : : cloning the function, so we don't expect to need to duplicate the
2924 : : type here. */
2925 : 12 : gcc_checking_assert (TYPE_ARG_TYPES (nftype)
2926 : : != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
2927 : :
2928 : : /* Check that fnspec still works for the modified function signature,
2929 : : and drop it otherwise. */
2930 : 12 : bool drop_fnspec = false;
2931 : 12 : tree fnspec = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (nftype));
2932 : 12 : attr_fnspec spec = fnspec ? attr_fnspec (fnspec) : attr_fnspec ("");
2933 : :
2934 : 12 : unsigned retcopy;
2935 : 12 : if (!(fnspec && spec.returns_arg (&retcopy)))
2936 : : retcopy = (unsigned) -1;
2937 : :
2938 : 12 : unsigned i = 0;
2939 : 12 : for (tree nparm = DECL_ARGUMENTS (nnode->decl),
2940 : 12 : nparmt = TYPE_ARG_TYPES (nftype);
2941 : 25 : adjust_ftype > 0;
2942 : 13 : i++, nparm = DECL_CHAIN (nparm), nparmt = TREE_CHAIN (nparmt))
2943 : 13 : if (indirect_nparms.contains (nparm))
2944 : : {
2945 : 12 : TREE_VALUE (nparmt) = TREE_TYPE (nparm);
2946 : 12 : adjust_ftype--;
2947 : :
2948 : 12 : if (fnspec && !drop_fnspec)
2949 : : {
2950 : 0 : if (i == retcopy)
2951 : : drop_fnspec = true;
2952 : 0 : else if (spec.arg_specified_p (i))
2953 : : {
2954 : : /* Properties that apply to pointers only must not be
2955 : : present, because we don't make pointers further
2956 : : indirect. */
2957 : 0 : gcc_checking_assert
2958 : : (!spec.arg_max_access_size_given_by_arg_p (i, NULL));
2959 : 0 : gcc_checking_assert (!spec.arg_copied_to_arg_p (i, NULL));
2960 : :
2961 : : /* Any claim of direct access only is invalidated by
2962 : : adding an indirection level. */
2963 : 0 : if (spec.arg_direct_p (i))
2964 : : drop_fnspec = true;
2965 : :
2966 : : /* If there's a claim the argument is not read from, the
2967 : : added indirection invalidates it: if the argument is
2968 : : used at all, then the pointer will necessarily be
2969 : : read. */
2970 : 0 : if (!spec.arg_maybe_read_p (i)
2971 : 0 : && spec.arg_used_p (i))
2972 : : drop_fnspec = true;
2973 : : }
2974 : : }
2975 : : }
2976 : :
2977 : : /* ??? Maybe we could adjust it instead. Note we don't need
2978 : : to mess with attribute access: pointer-typed parameters are
2979 : : not modified, so they can remain unchanged. */
2980 : 12 : if (drop_fnspec)
2981 : 0 : remove_named_attribute_unsharing ("fn spec",
2982 : 0 : &TYPE_ATTRIBUTES (nftype));
2983 : :
2984 : 12 : TREE_TYPE (nnode->decl) = nftype;
2985 : : }
2986 : :
2987 : : #if ATTR_FNSPEC_DECONST_WATERMARK
2988 : : {
2989 : : int flags = flags_from_decl_or_type (nnode->decl);
2990 : : tree fnspec = lookup_attribute ("fn spec", TREE_TYPE (nnode->decl));
2991 : :
2992 : : if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2993 : : {
2994 : : size_t xargs = 1 + int (is_stdarg) + int (apply_args);
2995 : : size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2996 : : auto_vec<char> nspecv (tgtlen);
2997 : : char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2998 : : bool no_writes_p = true;
2999 : : if (fnspec)
3000 : : {
3001 : : tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
3002 : : curlen = TREE_STRING_LENGTH (fnspecstr);
3003 : : memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
3004 : : if (!(flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS))
3005 : : && curlen >= 2
3006 : : && nspec[1] != 'c' && nspec[1] != 'C'
3007 : : && nspec[1] != 'p' && nspec[1] != 'P')
3008 : : no_writes_p = false;
3009 : : }
3010 : : if (!curlen)
3011 : : {
3012 : : nspec[curlen++] = '.';
3013 : : nspec[curlen++] = ((flags & ECF_CONST)
3014 : : ? 'c'
3015 : : : (flags & ECF_PURE)
3016 : : ? 'p'
3017 : : : ' ');
3018 : : }
3019 : : while (curlen < tgtlen - 2 * xargs)
3020 : : {
3021 : : nspec[curlen++] = '.';
3022 : : nspec[curlen++] = ' ';
3023 : : }
3024 : :
3025 : : /* These extra args are unlikely to be present in const or pure
3026 : : functions. It's conceivable that a function that takes variable
3027 : : arguments, or that passes its arguments on to another function,
3028 : : could be const or pure, but it would not modify the arguments, and,
3029 : : being pure or const, it couldn't possibly modify or even access
3030 : : memory referenced by them. But it can read from these internal
3031 : : data structures created by the wrapper, and from any
3032 : : argument-passing memory referenced by them, so we denote the
3033 : : possibility of reading from multiple levels of indirection, but
3034 : : only of reading because const/pure. */
3035 : : if (apply_args)
3036 : : {
3037 : : nspec[curlen++] = 'r';
3038 : : nspec[curlen++] = ' ';
3039 : : }
3040 : : if (is_stdarg)
3041 : : {
3042 : : nspec[curlen++] = (no_writes_p ? 'r' : '.');
3043 : : nspec[curlen++] = (no_writes_p ? 't' : ' ');
3044 : : }
3045 : :
3046 : : nspec[curlen++] = 'W';
3047 : : nspec[curlen++] = 't';
3048 : :
3049 : : /* The type has already been copied before adding parameters. */
3050 : : gcc_checking_assert (TYPE_ARG_TYPES (TREE_TYPE (nnode->decl))
3051 : : != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
3052 : : TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl))
3053 : : = tree_cons (get_identifier ("fn spec"),
3054 : : build_tree_list (NULL_TREE,
3055 : : build_string (tgtlen, nspec)),
3056 : : TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl)));
3057 : : }
3058 : : }
3059 : : #endif
3060 : :
3061 : 516 : {
3062 : 516 : tree decl = onode->decl;
3063 : 516 : cgraph_node *target = nnode;
3064 : :
3065 : 516 : { // copied from create_wrapper
3066 : :
3067 : : /* Preserve DECL_RESULT so we get right by reference flag. */
3068 : 516 : tree decl_result = DECL_RESULT (decl);
3069 : :
3070 : : /* Remove the function's body but keep arguments to be reused
3071 : : for thunk. */
3072 : 516 : onode->release_body (true);
3073 : 516 : onode->reset (/* unlike create_wrapper: preserve_comdat_group = */true);
3074 : :
3075 : 516 : DECL_UNINLINABLE (decl) = false;
3076 : 516 : DECL_RESULT (decl) = decl_result;
3077 : 516 : DECL_INITIAL (decl) = NULL;
3078 : 516 : allocate_struct_function (decl, false);
3079 : 516 : set_cfun (NULL);
3080 : :
3081 : : /* Turn alias into thunk and expand it into GIMPLE representation. */
3082 : 516 : onode->definition = true;
3083 : :
3084 : 516 : thunk_info::get_create (onode);
3085 : 516 : onode->thunk = true;
3086 : 516 : onode->create_edge (target, NULL, onode->count);
3087 : 516 : onode->callees->can_throw_external = !TREE_NOTHROW (target->decl);
3088 : :
3089 : 516 : tree arguments = DECL_ARGUMENTS (decl);
3090 : :
3091 : 687 : while (arguments)
3092 : : {
3093 : 171 : TREE_ADDRESSABLE (arguments) = false;
3094 : 171 : arguments = TREE_CHAIN (arguments);
3095 : : }
3096 : :
3097 : 516 : {
3098 : 516 : tree alias = onode->callees->callee->decl;
3099 : 516 : tree thunk_fndecl = decl;
3100 : 516 : tree a;
3101 : :
3102 : 516 : int nxargs = 1 + is_stdarg + apply_args;
3103 : :
3104 : 516 : { // Simplified from expand_thunk.
3105 : 516 : tree restype;
3106 : 516 : basic_block bb, then_bb, else_bb, return_bb;
3107 : 516 : gimple_stmt_iterator bsi;
3108 : 516 : int nargs = 0;
3109 : 516 : tree arg;
3110 : 516 : int i;
3111 : 516 : tree resdecl;
3112 : 516 : tree restmp = NULL;
3113 : :
3114 : 516 : gcall *call;
3115 : 516 : greturn *ret;
3116 : 516 : bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
3117 : :
3118 : 516 : a = DECL_ARGUMENTS (thunk_fndecl);
3119 : :
3120 : 516 : current_function_decl = thunk_fndecl;
3121 : :
3122 : : /* Ensure thunks are emitted in their correct sections. */
3123 : 516 : resolve_unique_section (thunk_fndecl, 0,
3124 : : flag_function_sections);
3125 : :
3126 : 516 : bitmap_obstack_initialize (NULL);
3127 : :
3128 : : /* Build the return declaration for the function. */
3129 : 516 : restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
3130 : 516 : if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
3131 : : {
3132 : 0 : resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
3133 : 0 : DECL_ARTIFICIAL (resdecl) = 1;
3134 : 0 : DECL_IGNORED_P (resdecl) = 1;
3135 : 0 : DECL_CONTEXT (resdecl) = thunk_fndecl;
3136 : 0 : DECL_RESULT (thunk_fndecl) = resdecl;
3137 : : }
3138 : : else
3139 : : resdecl = DECL_RESULT (thunk_fndecl);
3140 : :
3141 : 516 : profile_count cfg_count = onode->count;
3142 : 516 : if (!cfg_count.initialized_p ())
3143 : 192 : cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
3144 : :
3145 : 1032 : bb = then_bb = else_bb = return_bb
3146 : 516 : = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
3147 : :
3148 : 516 : bsi = gsi_start_bb (bb);
3149 : :
3150 : : /* Build call to the function being thunked. */
3151 : 516 : if (!VOID_TYPE_P (restype)
3152 : 516 : && (!alias_is_noreturn
3153 : 0 : || TREE_ADDRESSABLE (restype)
3154 : 0 : || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
3155 : : {
3156 : 379 : if (DECL_BY_REFERENCE (resdecl))
3157 : : {
3158 : 8 : restmp = gimple_fold_indirect_ref (resdecl);
3159 : 8 : if (!restmp)
3160 : 16 : restmp = build2 (MEM_REF,
3161 : 8 : TREE_TYPE (TREE_TYPE (resdecl)),
3162 : : resdecl,
3163 : 8 : build_int_cst (TREE_TYPE (resdecl), 0));
3164 : : }
3165 : 371 : else if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
3166 : : {
3167 : 1 : restmp = resdecl;
3168 : :
3169 : 1 : if (VAR_P (restmp))
3170 : : {
3171 : 0 : add_local_decl (cfun, restmp);
3172 : 0 : BLOCK_VARS (DECL_INITIAL (current_function_decl))
3173 : 0 : = restmp;
3174 : : }
3175 : : }
3176 : : else
3177 : 370 : restmp = create_tmp_reg (restype, "retval");
3178 : : }
3179 : :
3180 : 687 : for (arg = a; arg; arg = DECL_CHAIN (arg))
3181 : 171 : nargs++;
3182 : 516 : auto_vec<tree> vargs (nargs + nxargs);
3183 : 516 : i = 0;
3184 : 516 : arg = a;
3185 : :
3186 : 516 : if (nargs)
3187 : 76 : for (tree nparm = DECL_ARGUMENTS (nnode->decl);
3188 : 247 : i < nargs;
3189 : 171 : i++, arg = DECL_CHAIN (arg), nparm = DECL_CHAIN (nparm))
3190 : : {
3191 : 171 : tree save_arg = arg;
3192 : :
3193 : : /* Arrange to pass indirectly the parms, if we decided to do
3194 : : so, and revert its type in the wrapper. */
3195 : 171 : if (indirect_nparms.contains (nparm))
3196 : : {
3197 : 12 : tree ref_type = TREE_TYPE (nparm);
3198 : 12 : TREE_ADDRESSABLE (arg) = true;
3199 : 12 : arg = build1 (ADDR_EXPR, ref_type, arg);
3200 : : }
3201 : 159 : else if (!TREE_THIS_VOLATILE (arg))
3202 : 159 : DECL_NOT_GIMPLE_REG_P (arg) = 0;
3203 : :
3204 : : /* Convert the argument back to the type used by the calling
3205 : : conventions, e.g. a non-prototyped float type is passed as
3206 : : double, as in 930603-1.c, and needs to be converted back to
3207 : : double to be passed on unchanged to the wrapped
3208 : : function. */
3209 : 171 : if (TREE_TYPE (nparm) != DECL_ARG_TYPE (nparm))
3210 : : {
3211 : 10 : tree tmp = arg;
3212 : : /* If ARG is e.g. volatile, we must copy and
3213 : : convert in separate statements. */
3214 : 10 : if (!is_gimple_val (arg))
3215 : : {
3216 : 0 : tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3217 : : (TREE_TYPE (arg)), "arg");
3218 : 0 : gimple *stmt = gimple_build_assign (tmp, arg);
3219 : 0 : gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3220 : : }
3221 : 10 : arg = fold_convert (DECL_ARG_TYPE (nparm), tmp);
3222 : : }
3223 : :
3224 : 171 : if (!is_gimple_val (arg))
3225 : : {
3226 : 10 : tree tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3227 : : (TREE_TYPE (arg)), "arg");
3228 : 10 : gimple *stmt = gimple_build_assign (tmp, arg);
3229 : 10 : gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3230 : 10 : arg = tmp;
3231 : : }
3232 : 171 : vargs.quick_push (arg);
3233 : 171 : arg = save_arg;
3234 : : }
3235 : : /* These strub arguments are adjusted later. */
3236 : 516 : if (apply_args)
3237 : 20 : vargs.quick_push (null_pointer_node);
3238 : 516 : if (is_stdarg)
3239 : 10 : vargs.quick_push (null_pointer_node);
3240 : 516 : vargs.quick_push (null_pointer_node);
3241 : 516 : call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias),
3242 : : vargs);
3243 : 516 : onode->callees->call_stmt = call;
3244 : : // gimple_call_set_from_thunk (call, true);
3245 : 516 : if (DECL_STATIC_CHAIN (alias))
3246 : : {
3247 : 0 : tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
3248 : 0 : tree type = TREE_TYPE (p);
3249 : 0 : tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
3250 : : PARM_DECL, create_tmp_var_name ("CHAIN"),
3251 : : type);
3252 : 0 : DECL_ARTIFICIAL (decl) = 1;
3253 : 0 : DECL_IGNORED_P (decl) = 1;
3254 : 0 : TREE_USED (decl) = 1;
3255 : 0 : DECL_CONTEXT (decl) = thunk_fndecl;
3256 : 0 : DECL_ARG_TYPE (decl) = type;
3257 : 0 : TREE_READONLY (decl) = 1;
3258 : :
3259 : 0 : struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
3260 : 0 : sf->static_chain_decl = decl;
3261 : :
3262 : 0 : gimple_call_set_chain (call, decl);
3263 : : }
3264 : :
3265 : : /* Return slot optimization is always possible and in fact required to
3266 : : return values with DECL_BY_REFERENCE. */
3267 : 516 : if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
3268 : 516 : && (!is_gimple_reg_type (TREE_TYPE (resdecl))
3269 : 9 : || DECL_BY_REFERENCE (resdecl)))
3270 : 8 : gimple_call_set_return_slot_opt (call, true);
3271 : :
3272 : 516 : if (restmp)
3273 : : {
3274 : 379 : gimple_call_set_lhs (call, restmp);
3275 : 379 : gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
3276 : : TREE_TYPE (TREE_TYPE (alias))));
3277 : : }
3278 : 516 : gsi_insert_after (&bsi, call, GSI_NEW_STMT);
3279 : 516 : if (!alias_is_noreturn)
3280 : : {
3281 : : /* Build return value. */
3282 : 516 : if (!DECL_BY_REFERENCE (resdecl))
3283 : 508 : ret = gimple_build_return (restmp);
3284 : : else
3285 : 8 : ret = gimple_build_return (resdecl);
3286 : :
3287 : 516 : gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
3288 : : }
3289 : : else
3290 : : {
3291 : 0 : remove_edge (single_succ_edge (bb));
3292 : : }
3293 : :
3294 : 516 : cfun->gimple_df->in_ssa_p = true;
3295 : 516 : update_max_bb_count ();
3296 : 516 : profile_status_for_fn (cfun)
3297 : 1032 : = cfg_count.initialized_p () && cfg_count.ipa_p ()
3298 : 516 : ? PROFILE_READ : PROFILE_GUESSED;
3299 : : /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
3300 : : // TREE_ASM_WRITTEN (thunk_fndecl) = false;
3301 : 516 : delete_unreachable_blocks ();
3302 : 516 : update_ssa (TODO_update_ssa);
3303 : 516 : checking_verify_flow_info ();
3304 : 516 : free_dominance_info (CDI_DOMINATORS);
3305 : :
3306 : : /* Since we want to emit the thunk, we explicitly mark its name as
3307 : : referenced. */
3308 : 516 : onode->thunk = false;
3309 : 516 : onode->lowered = true;
3310 : 516 : bitmap_obstack_release (NULL);
3311 : 516 : }
3312 : 516 : current_function_decl = NULL;
3313 : 516 : set_cfun (NULL);
3314 : : }
3315 : :
3316 : 516 : thunk_info::remove (onode);
3317 : :
3318 : : // some more of create_wrapper at the end of the next block.
3319 : : }
3320 : : }
3321 : :
3322 : 516 : {
3323 : 516 : tree aaval = NULL_TREE;
3324 : 516 : tree vaptr = NULL_TREE;
3325 : 516 : tree wmptr = NULL_TREE;
3326 : 1233 : for (tree arg = DECL_ARGUMENTS (nnode->decl); arg; arg = DECL_CHAIN (arg))
3327 : : {
3328 : 717 : aaval = vaptr;
3329 : 717 : vaptr = wmptr;
3330 : 717 : wmptr = arg;
3331 : : }
3332 : :
3333 : 516 : if (!apply_args)
3334 : : aaval = NULL_TREE;
3335 : : /* The trailing args are [apply_args], [va_list_ptr], and
3336 : : watermark. If we don't have a va_list_ptr, the penultimate
3337 : : argument is apply_args.
3338 : : */
3339 : 20 : else if (!is_stdarg)
3340 : 20 : aaval = vaptr;
3341 : :
3342 : 516 : if (!is_stdarg)
3343 : 506 : vaptr = NULL_TREE;
3344 : :
3345 : 516 : DECL_NAME (wmptr) = get_watermark_ptr ();
3346 : 516 : DECL_ARTIFICIAL (wmptr) = 1;
3347 : 516 : DECL_IGNORED_P (wmptr) = 1;
3348 : 516 : TREE_USED (wmptr) = 1;
3349 : :
3350 : 516 : if (is_stdarg)
3351 : : {
3352 : 10 : DECL_NAME (vaptr) = get_va_list_ptr ();
3353 : 10 : DECL_ARTIFICIAL (vaptr) = 1;
3354 : 10 : DECL_IGNORED_P (vaptr) = 1;
3355 : 10 : TREE_USED (vaptr) = 1;
3356 : : }
3357 : :
3358 : 516 : if (apply_args)
3359 : : {
3360 : 20 : DECL_NAME (aaval) = get_apply_args ();
3361 : 20 : DECL_ARTIFICIAL (aaval) = 1;
3362 : 20 : DECL_IGNORED_P (aaval) = 1;
3363 : 20 : TREE_USED (aaval) = 1;
3364 : : }
3365 : :
3366 : 516 : push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
3367 : :
3368 : 516 : {
3369 : 516 : edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3370 : 516 : gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
3371 : 516 : gsi_insert_seq_on_edge_immediate (e, seq);
3372 : : }
3373 : :
3374 : 516 : bool any_indirect = !indirect_nparms.is_empty ();
3375 : :
3376 : 516 : if (any_indirect)
3377 : : {
3378 : 12 : basic_block bb;
3379 : 12 : bool needs_commit = false;
3380 : 36 : FOR_EACH_BB_FN (bb, cfun)
3381 : : {
3382 : 24 : for (gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
3383 : 24 : !gsi_end_p (gsi);
3384 : 0 : gsi_next_nonvirtual_phi (&gsi))
3385 : : {
3386 : 0 : gphi *stmt = gsi.phi ();
3387 : :
3388 : 0 : walk_stmt_info wi = {};
3389 : 0 : wi.info = &indirect_nparms;
3390 : 0 : walk_gimple_op (stmt, walk_make_indirect, &wi);
3391 : 0 : if (wi.changed && !is_gimple_debug (gsi_stmt (gsi)))
3392 : 0 : if (walk_regimplify_phi (stmt))
3393 : 0 : needs_commit = true;
3394 : : }
3395 : :
3396 : 48 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3397 : 98 : !gsi_end_p (gsi); gsi_next (&gsi))
3398 : : {
3399 : 74 : gimple *stmt = gsi_stmt (gsi);
3400 : :
3401 : 74 : walk_stmt_info wi = {};
3402 : 74 : wi.info = &indirect_nparms;
3403 : 74 : walk_gimple_op (stmt, walk_make_indirect, &wi);
3404 : 74 : if (wi.changed)
3405 : : {
3406 : 0 : if (!is_gimple_debug (stmt))
3407 : : {
3408 : 0 : wi.info = &gsi;
3409 : 0 : walk_gimple_op (stmt, walk_regimplify_addr_expr,
3410 : : &wi);
3411 : : }
3412 : 0 : update_stmt (stmt);
3413 : : }
3414 : : }
3415 : : }
3416 : 12 : if (needs_commit)
3417 : 0 : gsi_commit_edge_inserts ();
3418 : : }
3419 : :
3420 : 516 : if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca
3421 : 516 : || is_stdarg || apply_args)
3422 : 607 : for (cgraph_edge *e = nnode->callees, *enext; e; e = enext)
3423 : : {
3424 : 517 : if (!e->call_stmt)
3425 : 0 : continue;
3426 : :
3427 : 517 : gcall *call = e->call_stmt;
3428 : 517 : gimple_stmt_iterator gsi = gsi_for_stmt (call);
3429 : 517 : tree fndecl = e->callee->decl;
3430 : :
3431 : 517 : enext = e->next_callee;
3432 : :
3433 : 517 : if (gimple_alloca_call_p (call))
3434 : : {
3435 : 74 : gimple_seq seq = call_update_watermark (wmptr, NULL,
3436 : 74 : gsi_bb (gsi)->count);
3437 : 74 : gsi_insert_finally_seq_after_call (gsi, seq);
3438 : : }
3439 : 443 : else if (fndecl && is_stdarg
3440 : 443 : && fndecl_built_in_p (fndecl, BUILT_IN_VA_START))
3441 : : {
3442 : : /* Using a non-default stdarg ABI makes the function ineligible
3443 : : for internal strub. */
3444 : 10 : gcc_checking_assert (builtin_decl_explicit (BUILT_IN_VA_START)
3445 : : == fndecl);
3446 : 10 : tree bvacopy = builtin_decl_explicit (BUILT_IN_VA_COPY);
3447 : 10 : gimple_call_set_fndecl (call, bvacopy);
3448 : 10 : tree arg = vaptr;
3449 : : /* The va_copy source must be dereferenced, unless it's an array
3450 : : type, that would have decayed to a pointer. */
3451 : 10 : if (TREE_CODE (TREE_TYPE (TREE_TYPE (vaptr))) != ARRAY_TYPE)
3452 : : {
3453 : 0 : arg = gimple_fold_indirect_ref (vaptr);
3454 : 0 : if (!arg)
3455 : 0 : arg = build2 (MEM_REF,
3456 : 0 : TREE_TYPE (TREE_TYPE (vaptr)),
3457 : : vaptr,
3458 : 0 : build_int_cst (TREE_TYPE (vaptr), 0));
3459 : 0 : if (!is_gimple_val (arg))
3460 : 0 : arg = force_gimple_operand_gsi (&gsi, arg, true,
3461 : : NULL_TREE, true, GSI_SAME_STMT);
3462 : : }
3463 : 10 : gimple_call_set_arg (call, 1, arg);
3464 : 10 : update_stmt (call);
3465 : 10 : e->redirect_callee (cgraph_node::get_create (bvacopy));
3466 : : }
3467 : 433 : else if (fndecl && apply_args
3468 : 433 : && fndecl_built_in_p (fndecl, BUILT_IN_APPLY_ARGS))
3469 : : {
3470 : 20 : tree lhs = gimple_call_lhs (call);
3471 : 20 : gimple *assign = (lhs
3472 : 20 : ? gimple_build_assign (lhs, aaval)
3473 : 15 : : gimple_build_nop ());
3474 : 20 : gsi_replace (&gsi, assign, true);
3475 : 20 : cgraph_edge::remove (e);
3476 : : }
3477 : : }
3478 : :
3479 : 516 : { // a little more copied from create_wrapper
3480 : :
3481 : : /* Inline summary set-up. */
3482 : 516 : nnode->analyze ();
3483 : : // inline_analyze_function (nnode);
3484 : : }
3485 : :
3486 : 516 : pop_cfun ();
3487 : : }
3488 : :
3489 : 516 : {
3490 : 516 : push_cfun (DECL_STRUCT_FUNCTION (onode->decl));
3491 : 516 : gimple_stmt_iterator gsi
3492 : 516 : = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
3493 : :
3494 : 516 : gcall *wrcall;
3495 : 526 : while (!(wrcall = dyn_cast <gcall *> (gsi_stmt (gsi))))
3496 : 10 : gsi_next (&gsi);
3497 : :
3498 : 516 : tree swm = create_tmp_var (get_wmt (), ".strub.watermark");
3499 : 516 : TREE_ADDRESSABLE (swm) = true;
3500 : 516 : tree swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
3501 : :
3502 : 516 : tree enter = get_enter ();
3503 : 516 : gcall *stptr = gimple_build_call (enter, 1, unshare_expr (swmp));
3504 : 516 : gimple_set_location (stptr, gimple_location (wrcall));
3505 : 516 : gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
3506 : 516 : onode->create_edge (cgraph_node::get_create (enter),
3507 : 516 : stptr, gsi_bb (gsi)->count, false);
3508 : :
3509 : 516 : int nargs = gimple_call_num_args (wrcall);
3510 : :
3511 : 516 : gimple_seq seq = NULL;
3512 : :
3513 : 516 : if (apply_args)
3514 : : {
3515 : 20 : tree aalst = create_tmp_var (ptr_type_node, ".strub.apply_args");
3516 : 20 : tree bappargs = builtin_decl_explicit (BUILT_IN_APPLY_ARGS);
3517 : 20 : gcall *appargs = gimple_build_call (bappargs, 0);
3518 : 20 : gimple_call_set_lhs (appargs, aalst);
3519 : 20 : gimple_set_location (appargs, gimple_location (wrcall));
3520 : 20 : gsi_insert_before (&gsi, appargs, GSI_SAME_STMT);
3521 : 20 : gimple_call_set_arg (wrcall, nargs - 2 - is_stdarg, aalst);
3522 : 20 : onode->create_edge (cgraph_node::get_create (bappargs),
3523 : 20 : appargs, gsi_bb (gsi)->count, false);
3524 : : }
3525 : :
3526 : 516 : if (is_stdarg)
3527 : : {
3528 : 10 : tree valst = create_tmp_var (va_list_type_node, ".strub.va_list");
3529 : 10 : TREE_ADDRESSABLE (valst) = true;
3530 : 10 : tree vaptr = build1 (ADDR_EXPR,
3531 : : build_pointer_type (va_list_type_node),
3532 : : valst);
3533 : 10 : gimple_call_set_arg (wrcall, nargs - 2, unshare_expr (vaptr));
3534 : :
3535 : 10 : tree bvastart = builtin_decl_explicit (BUILT_IN_VA_START);
3536 : 10 : gcall *vastart = gimple_build_call (bvastart, 2,
3537 : : unshare_expr (vaptr),
3538 : : integer_zero_node);
3539 : 10 : gimple_set_location (vastart, gimple_location (wrcall));
3540 : 10 : gsi_insert_before (&gsi, vastart, GSI_SAME_STMT);
3541 : 10 : onode->create_edge (cgraph_node::get_create (bvastart),
3542 : 10 : vastart, gsi_bb (gsi)->count, false);
3543 : :
3544 : 10 : tree bvaend = builtin_decl_explicit (BUILT_IN_VA_END);
3545 : 10 : gcall *vaend = gimple_build_call (bvaend, 1, unshare_expr (vaptr));
3546 : 10 : gimple_set_location (vaend, gimple_location (wrcall));
3547 : 10 : gimple_seq_add_stmt (&seq, vaend);
3548 : : }
3549 : :
3550 : 516 : gimple_call_set_arg (wrcall, nargs - 1, unshare_expr (swmp));
3551 : : // gimple_call_set_tail (wrcall, false);
3552 : 516 : update_stmt (wrcall);
3553 : :
3554 : 516 : {
3555 : : #if !ATTR_FNSPEC_DECONST_WATERMARK
3556 : : /* If the call will be assumed to not modify or even read the
3557 : : watermark, make it read and modified ourselves. */
3558 : 516 : if ((gimple_call_flags (wrcall)
3559 : 516 : & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
3560 : : {
3561 : 127 : vec<tree, va_gc> *inputs = NULL;
3562 : 127 : vec<tree, va_gc> *outputs = NULL;
3563 : 254 : vec_safe_push (outputs,
3564 : : build_tree_list
3565 : 127 : (build_tree_list
3566 : : (NULL_TREE, build_string (2, "=m")),
3567 : : swm));
3568 : 254 : vec_safe_push (inputs,
3569 : : build_tree_list
3570 : 127 : (build_tree_list
3571 : : (NULL_TREE, build_string (1, "m")),
3572 : : swm));
3573 : 127 : gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
3574 : : NULL, NULL);
3575 : 127 : gimple_seq_add_stmt (&seq, forcemod);
3576 : :
3577 : : /* If the call will be assumed to not even read the watermark,
3578 : : make sure it is already in memory before the call. */
3579 : 127 : if ((gimple_call_flags (wrcall) & ECF_CONST))
3580 : : {
3581 : 40 : vec<tree, va_gc> *inputs = NULL;
3582 : 80 : vec_safe_push (inputs,
3583 : : build_tree_list
3584 : 40 : (build_tree_list
3585 : : (NULL_TREE, build_string (1, "m")),
3586 : : swm));
3587 : 40 : gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
3588 : : NULL, NULL);
3589 : 40 : gimple_set_location (force_store, gimple_location (wrcall));
3590 : 40 : gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
3591 : : }
3592 : : }
3593 : : #endif
3594 : :
3595 : 516 : gcall *sleave = gimple_build_call (get_leave (), 1,
3596 : : unshare_expr (swmp));
3597 : 516 : gimple_seq_add_stmt (&seq, sleave);
3598 : :
3599 : 516 : gassign *clobber = gimple_build_assign (swm,
3600 : : build_clobber
3601 : 516 : (TREE_TYPE (swm)));
3602 : 516 : gimple_seq_add_stmt (&seq, clobber);
3603 : : }
3604 : :
3605 : 516 : gsi_insert_finally_seq_after_call (gsi, seq);
3606 : :
3607 : : /* For nnode, we don't rebuild edges because we wish to retain
3608 : : any redirections copied to it from earlier passes, so we add
3609 : : call graph edges explicitly there, but for onode, we create a
3610 : : fresh function, so we may as well just issue the calls and
3611 : : then rebuild all cgraph edges. */
3612 : : // cgraph_edge::rebuild_edges ();
3613 : 516 : onode->analyze ();
3614 : : // inline_analyze_function (onode);
3615 : :
3616 : 516 : pop_cfun ();
3617 : : }
3618 : 516 : }
3619 : :
3620 : 575 : return 0;
3621 : : }
3622 : :
3623 : : simple_ipa_opt_pass *
3624 : 285189 : make_pass_ipa_strub (gcc::context *ctxt)
3625 : : {
3626 : 285189 : return new pass_ipa_strub (ctxt);
3627 : : }
3628 : :
3629 : : #include "gt-ipa-strub.h"
|