Line data Source code
1 : /* Default target hook functions.
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* The migration of target macros to target hooks works as follows:
21 :
22 : 1. Create a target hook that uses the existing target macros to
23 : implement the same functionality.
24 :
25 : 2. Convert all the MI files to use the hook instead of the macro.
26 :
27 : 3. Repeat for a majority of the remaining target macros. This will
28 : take some time.
29 :
30 : 4. Tell target maintainers to start migrating.
31 :
32 : 5. Eventually convert the backends to override the hook instead of
33 : defining the macros. This will take some time too.
34 :
35 : 6. TBD when, poison the macros. Unmigrated targets will break at
36 : this point.
37 :
38 : Note that we expect steps 1-3 to be done by the people that
39 : understand what the MI does with each macro, and step 5 to be done
40 : by the target maintainers for their respective targets.
41 :
42 : Note that steps 1 and 2 don't have to be done together, but no
43 : target can override the new hook until step 2 is complete for it.
44 :
45 : Once the macros are poisoned, we will revert to the old migration
46 : rules - migrate the macro, callers, and targets all at once. This
47 : comment can thus be removed at that point. */
48 :
49 : #include "config.h"
50 : #include "system.h"
51 : #include "coretypes.h"
52 : #include "target.h"
53 : #include "function.h"
54 : #include "rtl.h"
55 : #include "tree.h"
56 : #include "tree-ssa-alias.h"
57 : #include "gimple-expr.h"
58 : #include "memmodel.h"
59 : #include "backend.h"
60 : #include "emit-rtl.h"
61 : #include "df.h"
62 : #include "tm_p.h"
63 : #include "stringpool.h"
64 : #include "tree-vrp.h"
65 : #include "tree-ssanames.h"
66 : #include "profile-count.h"
67 : #include "optabs.h"
68 : #include "regs.h"
69 : #include "recog.h"
70 : #include "diagnostic-core.h"
71 : #include "fold-const.h"
72 : #include "stor-layout.h"
73 : #include "varasm.h"
74 : #include "flags.h"
75 : #include "explow.h"
76 : #include "expmed.h"
77 : #include "calls.h"
78 : #include "expr.h"
79 : #include "output.h"
80 : #include "common/common-target.h"
81 : #include "reload.h"
82 : #include "intl.h"
83 : #include "opts.h"
84 : #include "gimplify.h"
85 : #include "predict.h"
86 : #include "real.h"
87 : #include "langhooks.h"
88 : #include "sbitmap.h"
89 : #include "function-abi.h"
90 : #include "attribs.h"
91 : #include "asan.h"
92 : #include "emit-rtl.h"
93 : #include "gimple.h"
94 : #include "cfgloop.h"
95 : #include "tree-vectorizer.h"
96 : #include "options.h"
97 : #include "case-cfn-macros.h"
98 : #include "avoid-store-forwarding.h"
99 :
100 : bool
101 0 : default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
102 : rtx addr ATTRIBUTE_UNUSED,
103 : bool strict ATTRIBUTE_UNUSED,
104 : code_helper ATTRIBUTE_UNUSED)
105 : {
106 : #ifdef GO_IF_LEGITIMATE_ADDRESS
107 : /* Defer to the old implementation using a goto. */
108 : if (strict)
109 : return strict_memory_address_p (mode, addr);
110 : else
111 : return memory_address_p (mode, addr);
112 : #else
113 0 : gcc_unreachable ();
114 : #endif
115 : }
116 :
117 : void
118 26689 : default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
119 : {
120 : #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
121 26689 : ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
122 : #endif
123 26689 : }
124 :
125 : int
126 5814932 : default_unspec_may_trap_p (const_rtx x, unsigned flags)
127 : {
128 5814932 : int i;
129 :
130 : /* Any floating arithmetic may trap. */
131 5814932 : if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
132 : return 1;
133 :
134 10054695 : for (i = 0; i < XVECLEN (x, 0); ++i)
135 : {
136 7333202 : if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
137 : return 1;
138 : }
139 :
140 : return 0;
141 : }
142 :
143 : machine_mode
144 15952501 : default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
145 : machine_mode mode,
146 : int *punsignedp ATTRIBUTE_UNUSED,
147 : const_tree funtype ATTRIBUTE_UNUSED,
148 : int for_return ATTRIBUTE_UNUSED)
149 : {
150 15952501 : if (type != NULL_TREE && for_return == 2)
151 3657521 : return promote_mode (type, mode, punsignedp);
152 : return mode;
153 : }
154 :
155 : machine_mode
156 0 : default_promote_function_mode_always_promote (const_tree type,
157 : machine_mode mode,
158 : int *punsignedp,
159 : const_tree funtype ATTRIBUTE_UNUSED,
160 : int for_return ATTRIBUTE_UNUSED)
161 : {
162 0 : return promote_mode (type, mode, punsignedp);
163 : }
164 :
165 : /* Sign-extend signed 8/16-bit integer arguments to 32 bits and
166 : zero-extend unsigned 8/16-bit integer arguments to 32 bits. */
167 :
168 : machine_mode
169 0 : default_promote_function_mode_sign_extend (const_tree type,
170 : machine_mode mode,
171 : int *punsignedp,
172 : const_tree, int)
173 : {
174 0 : if (GET_MODE_CLASS (mode) == MODE_INT
175 0 : && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
176 0 : < GET_MODE_SIZE (SImode)))
177 : return SImode;
178 :
179 0 : return promote_mode (type, mode, punsignedp);
180 : }
181 :
182 : machine_mode
183 0 : default_cc_modes_compatible (machine_mode m1, machine_mode m2)
184 : {
185 0 : if (m1 == m2)
186 0 : return m1;
187 : return VOIDmode;
188 : }
189 :
190 : bool
191 0 : default_return_in_memory (const_tree type,
192 : const_tree fntype ATTRIBUTE_UNUSED)
193 : {
194 0 : return (TYPE_MODE (type) == BLKmode);
195 : }
196 :
197 : rtx
198 0 : default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
199 : machine_mode mode ATTRIBUTE_UNUSED)
200 : {
201 0 : return x;
202 : }
203 :
204 : bool
205 0 : default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
206 : machine_mode)
207 : {
208 0 : return false;
209 : }
210 :
211 : bool
212 0 : default_const_not_ok_for_debug_p (rtx x)
213 : {
214 0 : if (GET_CODE (x) == UNSPEC)
215 0 : return true;
216 : return false;
217 : }
218 :
219 : rtx
220 0 : default_expand_builtin_saveregs (void)
221 : {
222 0 : error ("%<__builtin_saveregs%> not supported by this target");
223 0 : return const0_rtx;
224 : }
225 :
226 : void
227 0 : default_setup_incoming_varargs (cumulative_args_t,
228 : const function_arg_info &, int *, int)
229 : {
230 0 : }
231 :
232 : /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
233 :
234 : rtx
235 0 : default_builtin_setjmp_frame_value (void)
236 : {
237 0 : return virtual_stack_vars_rtx;
238 : }
239 :
240 : /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
241 :
242 : bool
243 0 : hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
244 : {
245 0 : return false;
246 : }
247 :
248 : bool
249 0 : default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
250 : {
251 0 : return (targetm.calls.setup_incoming_varargs
252 0 : != default_setup_incoming_varargs);
253 : }
254 :
255 : scalar_int_mode
256 493435 : default_eh_return_filter_mode (void)
257 : {
258 493435 : return targetm.unwind_word_mode ();
259 : }
260 :
261 : scalar_int_mode
262 44618 : default_libgcc_cmp_return_mode (void)
263 : {
264 44618 : return word_mode;
265 : }
266 :
267 : scalar_int_mode
268 308 : default_libgcc_shift_count_mode (void)
269 : {
270 308 : return word_mode;
271 : }
272 :
273 : scalar_int_mode
274 725478 : default_unwind_word_mode (void)
275 : {
276 725478 : return word_mode;
277 : }
278 :
279 : /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
280 :
281 : unsigned HOST_WIDE_INT
282 1 : default_shift_truncation_mask (machine_mode mode)
283 : {
284 1 : return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
285 : }
286 :
287 : /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
288 :
289 : unsigned int
290 207230 : default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
291 : {
292 207230 : return have_insn_for (DIV, mode) ? 3 : 2;
293 : }
294 :
295 : /* The default implementation of TARGET_MODE_REP_EXTENDED. */
296 :
297 : int
298 21687065 : default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
299 : {
300 21687065 : return UNKNOWN;
301 : }
302 :
303 : /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
304 :
305 : bool
306 5840916 : hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
307 : {
308 5840916 : return true;
309 : }
310 :
311 : /* Return machine mode for non-standard suffix
312 : or VOIDmode if non-standard suffixes are unsupported. */
313 : machine_mode
314 0 : default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
315 : {
316 0 : return VOIDmode;
317 : }
318 :
319 : /* Return machine mode for a floating type which is indicated
320 : by the given enum tree_index. */
321 :
322 : machine_mode
323 3038739 : default_mode_for_floating_type (enum tree_index ti)
324 : {
325 3038739 : if (ti == TI_FLOAT_TYPE)
326 : return SFmode;
327 1499061 : gcc_assert (ti == TI_DOUBLE_TYPE || ti == TI_LONG_DOUBLE_TYPE);
328 : return DFmode;
329 : }
330 :
331 : /* The generic C++ ABI specifies this is a 64-bit value. */
332 : tree
333 4726 : default_cxx_guard_type (void)
334 : {
335 4726 : return long_long_integer_type_node;
336 : }
337 :
338 : /* Returns the size of the cookie to use when allocating an array
339 : whose elements have the indicated TYPE. Assumes that it is already
340 : known that a cookie is needed. */
341 :
342 : tree
343 84603 : default_cxx_get_cookie_size (tree type)
344 : {
345 84603 : tree cookie_size;
346 :
347 : /* We need to allocate an additional max (sizeof (size_t), alignof
348 : (true_type)) bytes. */
349 84603 : tree sizetype_size;
350 84603 : tree type_align;
351 :
352 84603 : sizetype_size = size_in_bytes (sizetype);
353 84603 : type_align = size_int (TYPE_ALIGN_UNIT (type));
354 84603 : if (tree_int_cst_lt (type_align, sizetype_size))
355 : cookie_size = sizetype_size;
356 : else
357 19788 : cookie_size = type_align;
358 :
359 84603 : return cookie_size;
360 : }
361 :
362 : /* Returns modified FUNCTION_TYPE for cdtor callabi. */
363 :
364 : tree
365 0 : default_cxx_adjust_cdtor_callabi_fntype (tree fntype)
366 : {
367 0 : return fntype;
368 : }
369 :
370 : /* Return true if a parameter must be passed by reference. This version
371 : of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
372 :
373 : bool
374 0 : hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
375 : const function_arg_info &arg)
376 : {
377 0 : return targetm.calls.must_pass_in_stack (arg);
378 : }
379 :
380 : /* Return true if a parameter follows callee copies conventions. This
381 : version of the hook is true for all named arguments. */
382 :
383 : bool
384 0 : hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg)
385 : {
386 0 : return arg.named;
387 : }
388 :
389 : /* Emit to STREAM the assembler syntax for insn operand X. */
390 :
391 : void
392 0 : default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
393 : int code ATTRIBUTE_UNUSED)
394 : {
395 : #ifdef PRINT_OPERAND
396 : PRINT_OPERAND (stream, x, code);
397 : #else
398 0 : gcc_unreachable ();
399 : #endif
400 : }
401 :
402 : /* Emit to STREAM the assembler syntax for an insn operand whose memory
403 : address is X. */
404 :
405 : void
406 0 : default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
407 : machine_mode /*mode*/,
408 : rtx x ATTRIBUTE_UNUSED)
409 : {
410 : #ifdef PRINT_OPERAND_ADDRESS
411 : PRINT_OPERAND_ADDRESS (stream, x);
412 : #else
413 0 : gcc_unreachable ();
414 : #endif
415 : }
416 :
417 : /* Return true if CODE is a valid punctuation character for the
418 : `print_operand' hook. */
419 :
420 : bool
421 0 : default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
422 : {
423 : #ifdef PRINT_OPERAND_PUNCT_VALID_P
424 : return PRINT_OPERAND_PUNCT_VALID_P (code);
425 : #else
426 0 : return false;
427 : #endif
428 : }
429 :
430 : /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
431 : tree
432 543310 : default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
433 : {
434 543310 : const char *skipped = name + (*name == '*' ? 1 : 0);
435 543310 : const char *stripped = targetm.strip_name_encoding (skipped);
436 543310 : if (*name != '*' && user_label_prefix[0])
437 0 : stripped = ACONCAT ((user_label_prefix, stripped, NULL));
438 543310 : return get_identifier (stripped);
439 : }
440 :
441 : /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
442 :
443 : machine_mode
444 54293 : default_translate_mode_attribute (machine_mode mode)
445 : {
446 54293 : return mode;
447 : }
448 :
449 : /* True if MODE is valid for the target. By "valid", we mean able to
450 : be manipulated in non-trivial ways. In particular, this means all
451 : the arithmetic is supported.
452 :
453 : By default we guess this means that any C type is supported. If
454 : we can't map the mode back to a type that would be available in C,
455 : then reject it. Special case, here, is the double-word arithmetic
456 : supported by optabs.cc. */
457 :
458 : bool
459 2979859 : default_scalar_mode_supported_p (scalar_mode mode)
460 : {
461 2979859 : int precision = GET_MODE_PRECISION (mode);
462 :
463 2979859 : switch (GET_MODE_CLASS (mode))
464 : {
465 1727986 : case MODE_PARTIAL_INT:
466 1727986 : case MODE_INT:
467 1727986 : if (precision == CHAR_TYPE_SIZE)
468 : return true;
469 1644571 : if (precision == SHORT_TYPE_SIZE)
470 : return true;
471 1574451 : if (precision == INT_TYPE_SIZE)
472 : return true;
473 1527011 : if (precision == LONG_TYPE_SIZE)
474 : return true;
475 1402343 : if (precision == LONG_LONG_TYPE_SIZE)
476 : return true;
477 1432451 : if (precision == 2 * BITS_PER_WORD)
478 : return true;
479 : return false;
480 :
481 1251873 : case MODE_FLOAT:
482 1251873 : if (mode == targetm.c.mode_for_floating_type (TI_FLOAT_TYPE))
483 : return true;
484 930343 : if (mode == targetm.c.mode_for_floating_type (TI_DOUBLE_TYPE))
485 : return true;
486 321076 : if (mode == targetm.c.mode_for_floating_type (TI_LONG_DOUBLE_TYPE))
487 : return true;
488 : return false;
489 :
490 : case MODE_DECIMAL_FLOAT:
491 : case MODE_FRACT:
492 : case MODE_UFRACT:
493 : case MODE_ACCUM:
494 : case MODE_UACCUM:
495 : return false;
496 :
497 0 : default:
498 0 : gcc_unreachable ();
499 : }
500 : }
501 :
502 : /* Return true if libgcc supports floating-point mode MODE (known to
503 : be supported as a scalar mode). */
504 :
505 : bool
506 1568493 : default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
507 : {
508 1568493 : switch (mode)
509 : {
510 : #ifdef HAVE_SFmode
511 : case E_SFmode:
512 : #endif
513 : #ifdef HAVE_DFmode
514 : case E_DFmode:
515 : #endif
516 : #ifdef HAVE_XFmode
517 : case E_XFmode:
518 : #endif
519 : #ifdef HAVE_TFmode
520 : case E_TFmode:
521 : #endif
522 : return true;
523 :
524 0 : default:
525 0 : return false;
526 : }
527 : }
528 :
529 : /* Return the machine mode to use for the type _FloatN, if EXTENDED is
530 : false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
531 : supported. */
532 : opt_scalar_float_mode
533 2014215 : default_floatn_mode (int n, bool extended)
534 : {
535 2014215 : if (extended)
536 : {
537 863235 : opt_scalar_float_mode cand1, cand2;
538 863235 : scalar_float_mode mode;
539 863235 : switch (n)
540 : {
541 287745 : case 32:
542 : #ifdef HAVE_DFmode
543 287745 : cand1 = DFmode;
544 : #endif
545 287745 : break;
546 :
547 287745 : case 64:
548 : #ifdef HAVE_XFmode
549 287745 : cand1 = XFmode;
550 : #endif
551 : #ifdef HAVE_TFmode
552 287745 : cand2 = TFmode;
553 : #endif
554 287745 : break;
555 :
556 : case 128:
557 : break;
558 :
559 0 : default:
560 : /* Those are the only valid _FloatNx types. */
561 0 : gcc_unreachable ();
562 : }
563 863235 : if (cand1.exists (&mode)
564 575490 : && REAL_MODE_FORMAT (mode)->ieee_bits > n
565 575490 : && targetm.scalar_mode_supported_p (mode)
566 575468 : && targetm.libgcc_floating_mode_supported_p (mode))
567 575468 : return cand1;
568 287767 : if (cand2.exists (&mode)
569 22 : && REAL_MODE_FORMAT (mode)->ieee_bits > n
570 287767 : && targetm.scalar_mode_supported_p (mode)
571 22 : && targetm.libgcc_floating_mode_supported_p (mode))
572 22 : return cand2;
573 : }
574 : else
575 : {
576 1150980 : opt_scalar_float_mode cand;
577 1150980 : scalar_float_mode mode;
578 1150980 : switch (n)
579 : {
580 : case 16:
581 : /* Always enable _Float16 if we have basic support for the mode.
582 : Targets can control the range and precision of operations on
583 : the _Float16 type using TARGET_C_EXCESS_PRECISION. */
584 : #ifdef HAVE_HFmode
585 : cand = HFmode;
586 : #endif
587 : break;
588 :
589 : case 32:
590 : #ifdef HAVE_SFmode
591 : cand = SFmode;
592 : #endif
593 : break;
594 :
595 : case 64:
596 : #ifdef HAVE_DFmode
597 : cand = DFmode;
598 : #endif
599 : break;
600 :
601 : case 128:
602 : #ifdef HAVE_TFmode
603 : cand = TFmode;
604 : #endif
605 : break;
606 :
607 : default:
608 : break;
609 : }
610 1150980 : if (cand.exists (&mode)
611 1150980 : && REAL_MODE_FORMAT (mode)->ieee_bits == n
612 1150980 : && targetm.scalar_mode_supported_p (mode)
613 1150980 : && targetm.libgcc_floating_mode_supported_p (mode))
614 1150980 : return cand;
615 : }
616 287745 : return opt_scalar_float_mode ();
617 : }
618 :
619 : /* Define this to return true if the _Floatn and _Floatnx built-in functions
620 : should implicitly enable the built-in function without the __builtin_ prefix
621 : in addition to the normal built-in function with the __builtin_ prefix. The
622 : default is to only enable built-in functions without the __builtin_ prefix
623 : for the GNU C langauge. The argument FUNC is the enum builtin_in_function
624 : id of the function to be enabled. */
625 :
626 : bool
627 134693811 : default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
628 : {
629 134693811 : static bool first_time_p = true;
630 134693811 : static bool c_or_objective_c;
631 :
632 134693811 : if (first_time_p)
633 : {
634 231831 : first_time_p = false;
635 343036 : c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
636 : }
637 :
638 134693811 : return c_or_objective_c;
639 : }
640 :
641 : /* Make some target macros useable by target-independent code. */
642 : bool
643 0 : targhook_words_big_endian (void)
644 : {
645 0 : return !!WORDS_BIG_ENDIAN;
646 : }
647 :
648 : bool
649 210216 : targhook_float_words_big_endian (void)
650 : {
651 210216 : return !!FLOAT_WORDS_BIG_ENDIAN;
652 : }
653 :
654 : /* True if the target supports floating-point exceptions and rounding
655 : modes. */
656 :
657 : bool
658 0 : default_float_exceptions_rounding_supported_p (void)
659 : {
660 : #ifdef HAVE_adddf3
661 0 : return HAVE_adddf3;
662 : #else
663 : return false;
664 : #endif
665 : }
666 :
667 : /* True if the target supports decimal floating point. */
668 :
669 : bool
670 1238095 : default_decimal_float_supported_p (void)
671 : {
672 1238095 : return ENABLE_DECIMAL_FLOAT;
673 : }
674 :
675 : /* True if the target supports fixed-point arithmetic. */
676 :
677 : bool
678 418794 : default_fixed_point_supported_p (void)
679 : {
680 418794 : return ENABLE_FIXED_POINT;
681 : }
682 :
683 : /* True if the target supports GNU indirect functions. */
684 :
685 : bool
686 579 : default_has_ifunc_p (void)
687 : {
688 579 : return HAVE_GNU_INDIRECT_FUNCTION;
689 : }
690 :
691 : /* Return true if we predict the loop LOOP will be transformed to a
692 : low-overhead loop, otherwise return false.
693 :
694 : By default, false is returned, as this hook's applicability should be
695 : verified for each target. Target maintainers should re-define the hook
696 : if the target can take advantage of it. */
697 :
698 : bool
699 500070 : default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
700 : {
701 500070 : return false;
702 : }
703 :
704 : /* By default, just use the input MODE itself. */
705 :
706 : machine_mode
707 0 : default_preferred_doloop_mode (machine_mode mode)
708 : {
709 0 : return mode;
710 : }
711 :
712 : /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
713 : an error message.
714 :
715 : This function checks whether a given INSN is valid within a low-overhead
716 : loop. If INSN is invalid it returns the reason for that, otherwise it
717 : returns NULL. A called function may clobber any special registers required
718 : for low-overhead looping. Additionally, some targets (eg, PPC) use the count
719 : register for branch on table instructions. We reject the doloop pattern in
720 : these cases. */
721 :
722 : const char *
723 0 : default_invalid_within_doloop (const rtx_insn *insn)
724 : {
725 0 : if (CALL_P (insn))
726 : return "Function call in loop.";
727 :
728 0 : if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
729 0 : return "Computed branch in the loop.";
730 :
731 : return NULL;
732 : }
733 :
734 : /* Mapping of builtin functions to vectorized variants. */
735 :
736 : tree
737 0 : default_builtin_vectorized_function (unsigned int, tree, tree)
738 : {
739 0 : return NULL_TREE;
740 : }
741 :
742 : /* Mapping of target builtin functions to vectorized variants. */
743 :
744 : tree
745 24 : default_builtin_md_vectorized_function (tree, tree, tree)
746 : {
747 24 : return NULL_TREE;
748 : }
749 :
750 : /* Default vectorizer cost model values. */
751 :
752 : int
753 0 : default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
754 : tree vectype,
755 : int misalign ATTRIBUTE_UNUSED)
756 : {
757 0 : switch (type_of_cost)
758 : {
759 : case scalar_stmt:
760 : case scalar_load:
761 : case scalar_store:
762 : case vector_stmt:
763 : case vector_load:
764 : case vector_store:
765 : case vec_to_scalar:
766 : case scalar_to_vec:
767 : case cond_branch_not_taken:
768 : case vec_perm:
769 : case vec_promote_demote:
770 : return 1;
771 :
772 0 : case unaligned_load:
773 0 : case unaligned_store:
774 0 : return 2;
775 :
776 0 : case cond_branch_taken:
777 0 : return 3;
778 :
779 0 : case vec_construct:
780 0 : return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
781 :
782 0 : default:
783 0 : gcc_unreachable ();
784 : }
785 : }
786 :
787 : /* Reciprocal. */
788 :
789 : tree
790 0 : default_builtin_reciprocal (tree)
791 : {
792 0 : return NULL_TREE;
793 : }
794 :
795 : void
796 0 : default_emit_support_tinfos (emit_support_tinfos_callback)
797 : {
798 0 : }
799 :
800 : bool
801 5300 : hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
802 : const function_arg_info &)
803 : {
804 5300 : return false;
805 : }
806 :
807 : bool
808 0 : hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t,
809 : const function_arg_info &)
810 : {
811 0 : return true;
812 : }
813 :
814 : int
815 12112331 : hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
816 : const function_arg_info &)
817 : {
818 12112331 : return 0;
819 : }
820 :
821 : void
822 11876614 : hook_void_CUMULATIVE_ARGS (cumulative_args_t)
823 : {
824 11876614 : }
825 :
826 : void
827 0 : hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
828 : tree ATTRIBUTE_UNUSED)
829 : {
830 0 : }
831 :
832 : void
833 11476225 : hook_void_CUMULATIVE_ARGS_rtx_tree (cumulative_args_t, rtx, tree)
834 : {
835 11476225 : }
836 :
837 : /* Default implementation of TARGET_PUSH_ARGUMENT. */
838 :
839 : bool
840 0 : default_push_argument (unsigned int)
841 : {
842 : #ifdef PUSH_ROUNDING
843 0 : return !ACCUMULATE_OUTGOING_ARGS;
844 : #else
845 : return false;
846 : #endif
847 : }
848 :
849 : void
850 0 : default_function_arg_advance (cumulative_args_t, const function_arg_info &)
851 : {
852 0 : gcc_unreachable ();
853 : }
854 :
855 : /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
856 :
857 : HOST_WIDE_INT
858 5422197 : default_function_arg_offset (machine_mode, const_tree)
859 : {
860 5422197 : return 0;
861 : }
862 :
863 : /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
864 : upward, but pad short args downward on big-endian machines. */
865 :
866 : pad_direction
867 12324509 : default_function_arg_padding (machine_mode mode, const_tree type)
868 : {
869 12324509 : if (!BYTES_BIG_ENDIAN)
870 12324509 : return PAD_UPWARD;
871 :
872 : unsigned HOST_WIDE_INT size;
873 : if (mode == BLKmode)
874 : {
875 : if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
876 : return PAD_UPWARD;
877 : size = int_size_in_bytes (type);
878 : }
879 : else
880 : /* Targets with variable-sized modes must override this hook
881 : and handle variable-sized modes explicitly. */
882 : size = GET_MODE_SIZE (mode).to_constant ();
883 :
884 : if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
885 : return PAD_DOWNWARD;
886 :
887 : return PAD_UPWARD;
888 : }
889 :
890 : rtx
891 0 : default_function_arg (cumulative_args_t, const function_arg_info &)
892 : {
893 0 : gcc_unreachable ();
894 : }
895 :
896 : rtx
897 0 : default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
898 : {
899 0 : gcc_unreachable ();
900 : }
901 :
902 : unsigned int
903 0 : default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
904 : const_tree type ATTRIBUTE_UNUSED)
905 : {
906 0 : return PARM_BOUNDARY;
907 : }
908 :
909 : unsigned int
910 5422197 : default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
911 : const_tree type ATTRIBUTE_UNUSED)
912 : {
913 5422197 : return PARM_BOUNDARY;
914 : }
915 :
916 : void
917 0 : hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
918 : {
919 0 : }
920 :
921 : const char *
922 786951 : hook_invalid_arg_for_unprototyped_fn (
923 : const_tree typelist ATTRIBUTE_UNUSED,
924 : const_tree funcdecl ATTRIBUTE_UNUSED,
925 : const_tree val ATTRIBUTE_UNUSED)
926 : {
927 786951 : return NULL;
928 : }
929 :
930 : /* Initialize the stack protection decls. */
931 :
932 : /* Stack protection related decls living in libgcc. */
933 : static GTY(()) tree stack_chk_guard_decl;
934 :
935 : tree
936 76 : default_stack_protect_guard (void)
937 : {
938 76 : tree t = stack_chk_guard_decl;
939 :
940 76 : if (t == NULL)
941 : {
942 41 : rtx x;
943 :
944 41 : if (targetm.stack_protect_guard_symbol_p ())
945 41 : t = lang_hooks.types.type_for_mode (ptr_mode, 1);
946 : else
947 0 : t = ptr_type_node;
948 41 : t = build_decl (UNKNOWN_LOCATION,
949 : VAR_DECL, get_identifier ("__stack_chk_guard"), t);
950 41 : TREE_STATIC (t) = 1;
951 41 : TREE_PUBLIC (t) = 1;
952 41 : DECL_EXTERNAL (t) = 1;
953 41 : TREE_USED (t) = 1;
954 41 : TREE_THIS_VOLATILE (t) = 1;
955 41 : DECL_ARTIFICIAL (t) = 1;
956 41 : DECL_IGNORED_P (t) = 1;
957 :
958 : /* Do not share RTL as the declaration is visible outside of
959 : current function. */
960 41 : if (mode_mem_attrs[(int) DECL_MODE (t)])
961 : {
962 : /* NB: Don't call make_decl_rtl when mode_mem_attrs isn't
963 : initialized. -save-temps won't initialize mode_mem_attrs
964 : and make_decl_rtl will fail. */
965 33 : x = DECL_RTL (t);
966 33 : RTX_FLAG (x, used) = 1;
967 : }
968 :
969 41 : stack_chk_guard_decl = t;
970 : }
971 :
972 76 : return t;
973 : }
974 :
975 : static GTY(()) tree stack_chk_fail_decl;
976 :
977 : tree
978 319 : default_external_stack_protect_fail (void)
979 : {
980 319 : tree t = stack_chk_fail_decl;
981 :
982 319 : if (t == NULL_TREE)
983 : {
984 178 : t = build_function_type_list (void_type_node, NULL_TREE);
985 178 : t = build_decl (UNKNOWN_LOCATION,
986 : FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
987 178 : TREE_STATIC (t) = 1;
988 178 : TREE_PUBLIC (t) = 1;
989 178 : DECL_EXTERNAL (t) = 1;
990 178 : TREE_USED (t) = 1;
991 178 : TREE_THIS_VOLATILE (t) = 1;
992 178 : TREE_NOTHROW (t) = 1;
993 178 : DECL_ARTIFICIAL (t) = 1;
994 178 : DECL_IGNORED_P (t) = 1;
995 178 : DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
996 178 : DECL_VISIBILITY_SPECIFIED (t) = 1;
997 :
998 178 : stack_chk_fail_decl = t;
999 : }
1000 :
1001 319 : return build_call_expr (t, 0);
1002 : }
1003 :
1004 : tree
1005 1 : default_hidden_stack_protect_fail (void)
1006 : {
1007 : #ifndef HAVE_GAS_HIDDEN
1008 : return default_external_stack_protect_fail ();
1009 : #else
1010 1 : tree t = stack_chk_fail_decl;
1011 :
1012 1 : if (!flag_pic)
1013 1 : return default_external_stack_protect_fail ();
1014 :
1015 0 : if (t == NULL_TREE)
1016 : {
1017 0 : t = build_function_type_list (void_type_node, NULL_TREE);
1018 0 : t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
1019 : get_identifier ("__stack_chk_fail_local"), t);
1020 0 : TREE_STATIC (t) = 1;
1021 0 : TREE_PUBLIC (t) = 1;
1022 0 : DECL_EXTERNAL (t) = 1;
1023 0 : TREE_USED (t) = 1;
1024 0 : TREE_THIS_VOLATILE (t) = 1;
1025 0 : TREE_NOTHROW (t) = 1;
1026 0 : DECL_ARTIFICIAL (t) = 1;
1027 0 : DECL_IGNORED_P (t) = 1;
1028 0 : DECL_VISIBILITY_SPECIFIED (t) = 1;
1029 0 : DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
1030 :
1031 0 : stack_chk_fail_decl = t;
1032 : }
1033 :
1034 0 : return build_call_expr (t, 0);
1035 : #endif
1036 : }
1037 :
1038 : bool
1039 72517916 : hook_bool_const_rtx_commutative_p (const_rtx x,
1040 : int outer_code ATTRIBUTE_UNUSED)
1041 : {
1042 72517916 : return COMMUTATIVE_P (x);
1043 : }
1044 :
1045 : rtx
1046 0 : default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
1047 : const_tree fn_decl_or_type,
1048 : bool outgoing ATTRIBUTE_UNUSED)
1049 : {
1050 : /* The old interface doesn't handle receiving the function type. */
1051 0 : if (fn_decl_or_type
1052 : && !DECL_P (fn_decl_or_type))
1053 : fn_decl_or_type = NULL;
1054 :
1055 : #ifdef FUNCTION_VALUE
1056 : return FUNCTION_VALUE (ret_type, fn_decl_or_type);
1057 : #else
1058 0 : gcc_unreachable ();
1059 : #endif
1060 : }
1061 :
1062 : rtx
1063 105170 : default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
1064 : const_rtx fun ATTRIBUTE_UNUSED)
1065 : {
1066 : #ifdef LIBCALL_VALUE
1067 105170 : return LIBCALL_VALUE (MACRO_MODE (mode));
1068 : #else
1069 : gcc_unreachable ();
1070 : #endif
1071 : }
1072 :
1073 : /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1074 :
1075 : bool
1076 0 : default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1077 : {
1078 : #ifdef FUNCTION_VALUE_REGNO_P
1079 : return FUNCTION_VALUE_REGNO_P (regno);
1080 : #else
1081 0 : gcc_unreachable ();
1082 : #endif
1083 : }
1084 :
1085 : /* Choose the mode and rtx to use to zero REGNO, storing tem in PMODE and
1086 : PREGNO_RTX and returning TRUE if successful, otherwise returning FALSE. If
1087 : the natural mode for REGNO doesn't work, attempt to group it with subsequent
1088 : adjacent registers set in TOZERO. */
1089 :
1090 : static inline bool
1091 0 : zcur_select_mode_rtx (unsigned int regno, machine_mode *pmode,
1092 : rtx *pregno_rtx, HARD_REG_SET tozero)
1093 : {
1094 0 : rtx regno_rtx = regno_reg_rtx[regno];
1095 0 : machine_mode mode = GET_MODE (regno_rtx);
1096 :
1097 : /* If the natural mode doesn't work, try some wider mode. */
1098 0 : if (!targetm.hard_regno_mode_ok (regno, mode))
1099 : {
1100 : bool found = false;
1101 0 : for (int nregs = 2;
1102 0 : !found && nregs <= hard_regno_max_nregs
1103 0 : && regno + nregs <= FIRST_PSEUDO_REGISTER
1104 0 : && TEST_HARD_REG_BIT (tozero,
1105 : regno + nregs - 1);
1106 : nregs++)
1107 : {
1108 0 : mode = choose_hard_reg_mode (regno, nregs, 0);
1109 0 : if (mode == E_VOIDmode)
1110 0 : continue;
1111 0 : gcc_checking_assert (targetm.hard_regno_mode_ok (regno, mode));
1112 0 : regno_rtx = gen_rtx_REG (mode, regno);
1113 0 : found = true;
1114 : }
1115 0 : if (!found)
1116 : return false;
1117 : }
1118 :
1119 0 : *pmode = mode;
1120 0 : *pregno_rtx = regno_rtx;
1121 0 : return true;
1122 : }
1123 :
1124 : /* The default hook for TARGET_ZERO_CALL_USED_REGS. */
1125 :
1126 : HARD_REG_SET
1127 0 : default_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
1128 : {
1129 0 : gcc_assert (!hard_reg_set_empty_p (need_zeroed_hardregs));
1130 :
1131 : HARD_REG_SET failed;
1132 0 : CLEAR_HARD_REG_SET (failed);
1133 : bool progress = false;
1134 :
1135 : /* First, try to zero each register in need_zeroed_hardregs by
1136 : loading a zero into it, taking note of any failures in
1137 : FAILED. */
1138 0 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1139 0 : if (TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
1140 : {
1141 0 : rtx_insn *last_insn = get_last_insn ();
1142 0 : rtx regno_rtx;
1143 0 : machine_mode mode;
1144 :
1145 0 : if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1146 : need_zeroed_hardregs))
1147 : {
1148 0 : SET_HARD_REG_BIT (failed, regno);
1149 0 : continue;
1150 : }
1151 :
1152 0 : rtx zero = CONST0_RTX (mode);
1153 0 : rtx_insn *insn = emit_move_insn (regno_rtx, zero);
1154 0 : if (!valid_insn_p (insn))
1155 : {
1156 0 : SET_HARD_REG_BIT (failed, regno);
1157 0 : delete_insns_since (last_insn);
1158 : }
1159 : else
1160 : {
1161 0 : progress = true;
1162 0 : regno += hard_regno_nregs (regno, mode) - 1;
1163 : }
1164 : }
1165 :
1166 : /* Now retry with copies from zeroed registers, as long as we've
1167 : made some PROGRESS, and registers remain to be zeroed in
1168 : FAILED. */
1169 0 : while (progress && !hard_reg_set_empty_p (failed))
1170 : {
1171 0 : HARD_REG_SET retrying = failed;
1172 :
1173 0 : CLEAR_HARD_REG_SET (failed);
1174 : progress = false;
1175 :
1176 0 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1177 0 : if (TEST_HARD_REG_BIT (retrying, regno))
1178 : {
1179 0 : rtx regno_rtx;
1180 0 : machine_mode mode;
1181 :
1182 : /* This might select registers we've already zeroed. If grouping
1183 : with them is what it takes to get regno zeroed, so be it. */
1184 0 : if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1185 : need_zeroed_hardregs))
1186 : {
1187 0 : SET_HARD_REG_BIT (failed, regno);
1188 0 : continue;
1189 : }
1190 :
1191 0 : bool success = false;
1192 : /* Look for a source. */
1193 0 : for (unsigned int src = 0; src < FIRST_PSEUDO_REGISTER; src++)
1194 : {
1195 : /* If SRC hasn't been zeroed (yet?), skip it. */
1196 0 : if (! TEST_HARD_REG_BIT (need_zeroed_hardregs, src))
1197 0 : continue;
1198 0 : if (TEST_HARD_REG_BIT (retrying, src))
1199 0 : continue;
1200 :
1201 : /* Check that SRC can hold MODE, and that any other
1202 : registers needed to hold MODE in SRC have also been
1203 : zeroed. */
1204 0 : if (!targetm.hard_regno_mode_ok (src, mode))
1205 0 : continue;
1206 0 : unsigned n = targetm.hard_regno_nregs (src, mode);
1207 0 : bool ok = true;
1208 0 : for (unsigned i = 1; ok && i < n; i++)
1209 0 : ok = (TEST_HARD_REG_BIT (need_zeroed_hardregs, src + i)
1210 0 : && !TEST_HARD_REG_BIT (retrying, src + i));
1211 0 : if (!ok)
1212 0 : continue;
1213 :
1214 : /* SRC is usable, try to copy from it. */
1215 0 : rtx_insn *last_insn = get_last_insn ();
1216 0 : rtx src_rtx = gen_rtx_REG (mode, src);
1217 0 : rtx_insn *insn = emit_move_insn (regno_rtx, src_rtx);
1218 0 : if (!valid_insn_p (insn))
1219 : /* It didn't work, remove any inserts. We'll look
1220 : for another SRC. */
1221 0 : delete_insns_since (last_insn);
1222 : else
1223 : {
1224 : /* We're done for REGNO. */
1225 : success = true;
1226 : break;
1227 : }
1228 : }
1229 :
1230 : /* If nothing worked for REGNO this round, mark it to be
1231 : retried if we get another round. */
1232 0 : if (!success)
1233 0 : SET_HARD_REG_BIT (failed, regno);
1234 : else
1235 : {
1236 : /* Take note so as to enable another round if needed. */
1237 0 : progress = true;
1238 0 : regno += hard_regno_nregs (regno, mode) - 1;
1239 : }
1240 : }
1241 : }
1242 :
1243 : /* If any register remained, report it. */
1244 0 : if (!progress)
1245 : {
1246 0 : static bool issued_error;
1247 0 : if (!issued_error)
1248 : {
1249 0 : const char *name = NULL;
1250 0 : for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL;
1251 : ++i)
1252 0 : if (flag_zero_call_used_regs == zero_call_used_regs_opts[i].flag)
1253 : {
1254 : name = zero_call_used_regs_opts[i].name;
1255 : break;
1256 : }
1257 :
1258 0 : if (!name)
1259 0 : name = "";
1260 :
1261 0 : issued_error = true;
1262 0 : sorry ("argument %qs is not supported for %qs on this target",
1263 : name, "-fzero-call-used-regs");
1264 : }
1265 : }
1266 :
1267 0 : return need_zeroed_hardregs;
1268 : }
1269 :
1270 : rtx
1271 0 : default_internal_arg_pointer (void)
1272 : {
1273 : /* If the reg that the virtual arg pointer will be translated into is
1274 : not a fixed reg or is the stack pointer, make a copy of the virtual
1275 : arg pointer, and address parms via the copy. The frame pointer is
1276 : considered fixed even though it is not marked as such. */
1277 0 : if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1278 0 : || ! (fixed_regs[ARG_POINTER_REGNUM]
1279 : || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1280 0 : return copy_to_reg (virtual_incoming_args_rtx);
1281 : else
1282 0 : return virtual_incoming_args_rtx;
1283 : }
1284 :
1285 : rtx
1286 0 : default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1287 : {
1288 0 : if (incoming_p)
1289 : {
1290 : #ifdef STATIC_CHAIN_INCOMING_REGNUM
1291 : return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1292 : #endif
1293 : }
1294 :
1295 : #ifdef STATIC_CHAIN_REGNUM
1296 : return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1297 : #endif
1298 :
1299 0 : {
1300 0 : static bool issued_error;
1301 0 : if (!issued_error)
1302 : {
1303 0 : issued_error = true;
1304 0 : sorry ("nested functions not supported on this target");
1305 : }
1306 :
1307 : /* It really doesn't matter what we return here, so long at it
1308 : doesn't cause the rest of the compiler to crash. */
1309 0 : return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1310 : }
1311 : }
1312 :
1313 : void
1314 0 : default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1315 : rtx ARG_UNUSED (r_chain))
1316 : {
1317 0 : sorry ("nested function trampolines not supported on this target");
1318 0 : }
1319 :
1320 : poly_int64
1321 0 : default_return_pops_args (tree, tree, poly_int64)
1322 : {
1323 0 : return 0;
1324 : }
1325 :
1326 : reg_class_t
1327 49218697 : default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1328 : reg_class_t cl,
1329 : reg_class_t best_cl ATTRIBUTE_UNUSED)
1330 : {
1331 49218697 : return cl;
1332 : }
1333 :
1334 : int
1335 0 : default_ira_callee_saved_register_cost_scale (int)
1336 : {
1337 0 : return (optimize_size
1338 0 : ? 1
1339 0 : : REG_FREQ_FROM_BB (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1340 : }
1341 :
1342 : extern bool
1343 304718518 : default_lra_p (void)
1344 : {
1345 304718518 : return true;
1346 : }
1347 :
1348 : int
1349 0 : default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1350 : {
1351 0 : return 0;
1352 : }
1353 :
1354 : extern bool
1355 0 : default_register_usage_leveling_p (void)
1356 : {
1357 0 : return false;
1358 : }
1359 :
1360 : extern bool
1361 4844144 : default_different_addr_displacement_p (void)
1362 : {
1363 4844144 : return false;
1364 : }
1365 :
1366 : reg_class_t
1367 0 : default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1368 : reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1369 : machine_mode reload_mode ATTRIBUTE_UNUSED,
1370 : secondary_reload_info *sri)
1371 : {
1372 0 : enum reg_class rclass = NO_REGS;
1373 0 : enum reg_class reload_class = (enum reg_class) reload_class_i;
1374 :
1375 0 : if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1376 : {
1377 0 : sri->icode = sri->prev_sri->t_icode;
1378 0 : return NO_REGS;
1379 : }
1380 : #ifdef SECONDARY_INPUT_RELOAD_CLASS
1381 : if (in_p)
1382 : rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1383 : MACRO_MODE (reload_mode), x);
1384 : #endif
1385 : #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1386 : if (! in_p)
1387 : rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1388 : MACRO_MODE (reload_mode), x);
1389 : #endif
1390 : if (rclass != NO_REGS)
1391 : {
1392 : enum insn_code icode
1393 : = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1394 : reload_mode);
1395 :
1396 : if (icode != CODE_FOR_nothing
1397 : && !insn_operand_matches (icode, in_p, x))
1398 : icode = CODE_FOR_nothing;
1399 : else if (icode != CODE_FOR_nothing)
1400 : {
1401 : const char *insn_constraint, *scratch_constraint;
1402 : enum reg_class insn_class, scratch_class;
1403 :
1404 : gcc_assert (insn_data[(int) icode].n_operands == 3);
1405 : insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1406 : if (!*insn_constraint)
1407 : insn_class = ALL_REGS;
1408 : else
1409 : {
1410 : if (in_p)
1411 : {
1412 : gcc_assert (*insn_constraint == '=');
1413 : insn_constraint++;
1414 : }
1415 : insn_class = (reg_class_for_constraint
1416 : (lookup_constraint (insn_constraint)));
1417 : gcc_assert (insn_class != NO_REGS);
1418 : }
1419 :
1420 : scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1421 : /* The scratch register's constraint must start with "=&",
1422 : except for an input reload, where only "=" is necessary,
1423 : and where it might be beneficial to re-use registers from
1424 : the input. */
1425 : gcc_assert (scratch_constraint[0] == '='
1426 : && (in_p || scratch_constraint[1] == '&'));
1427 : scratch_constraint++;
1428 : if (*scratch_constraint == '&')
1429 : scratch_constraint++;
1430 : scratch_class = (reg_class_for_constraint
1431 : (lookup_constraint (scratch_constraint)));
1432 :
1433 : if (reg_class_subset_p (reload_class, insn_class))
1434 : {
1435 : gcc_assert (scratch_class == rclass);
1436 : rclass = NO_REGS;
1437 : }
1438 : else
1439 : rclass = insn_class;
1440 :
1441 : }
1442 : if (rclass == NO_REGS)
1443 : sri->icode = icode;
1444 : else
1445 : sri->t_icode = icode;
1446 : }
1447 : return rclass;
1448 : }
1449 :
1450 : /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1451 :
1452 : machine_mode
1453 0 : default_secondary_memory_needed_mode (machine_mode mode)
1454 : {
1455 0 : if (!targetm.lra_p ()
1456 0 : && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1457 0 : && INTEGRAL_MODE_P (mode))
1458 0 : return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1459 : return mode;
1460 : }
1461 :
1462 : /* By default, if flag_pic is true, then neither local nor global relocs
1463 : should be placed in readonly memory. */
1464 :
1465 : int
1466 0 : default_reloc_rw_mask (void)
1467 : {
1468 0 : return flag_pic ? 3 : 0;
1469 : }
1470 :
1471 : /* By default, address diff vectors are generated
1472 : for jump tables when flag_pic is true. */
1473 :
1474 : bool
1475 13768 : default_generate_pic_addr_diff_vec (void)
1476 : {
1477 13768 : return flag_pic;
1478 : }
1479 :
1480 : /* Record an element in the table of global constructors. SYMBOL is
1481 : a SYMBOL_REF of the function to be called; PRIORITY is a number
1482 : between 0 and MAX_INIT_PRIORITY. */
1483 :
1484 : void
1485 0 : default_asm_out_constructor (rtx symbol ATTRIBUTE_UNUSED,
1486 : int priority ATTRIBUTE_UNUSED)
1487 : {
1488 0 : sorry ("global constructors not supported on this target");
1489 0 : }
1490 :
1491 : /* Likewise for global destructors. */
1492 :
1493 : void
1494 0 : default_asm_out_destructor (rtx symbol ATTRIBUTE_UNUSED,
1495 : int priority ATTRIBUTE_UNUSED)
1496 : {
1497 0 : sorry ("global destructors not supported on this target");
1498 0 : }
1499 :
1500 : /* By default, do no modification. */
1501 0 : tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1502 : tree id)
1503 : {
1504 0 : return id;
1505 : }
1506 :
1507 : /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1508 :
1509 : HOST_WIDE_INT
1510 0 : default_static_rtx_alignment (machine_mode mode)
1511 : {
1512 0 : return GET_MODE_ALIGNMENT (mode);
1513 : }
1514 :
1515 : /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1516 :
1517 : HOST_WIDE_INT
1518 0 : default_constant_alignment (const_tree, HOST_WIDE_INT align)
1519 : {
1520 0 : return align;
1521 : }
1522 :
1523 : /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1524 : to at least BITS_PER_WORD but otherwise makes no changes. */
1525 :
1526 : HOST_WIDE_INT
1527 0 : constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1528 : {
1529 0 : if (TREE_CODE (exp) == STRING_CST)
1530 0 : return MAX (align, BITS_PER_WORD);
1531 : return align;
1532 : }
1533 :
1534 : /* Default to natural alignment for vector types, bounded by
1535 : MAX_OFILE_ALIGNMENT. */
1536 :
1537 : HOST_WIDE_INT
1538 71369715 : default_vector_alignment (const_tree type)
1539 : {
1540 71369715 : unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1541 71369715 : tree size = TYPE_SIZE (type);
1542 71369715 : if (tree_fits_uhwi_p (size))
1543 71369715 : align = tree_to_uhwi (size);
1544 71369715 : if (align >= MAX_OFILE_ALIGNMENT)
1545 : return MAX_OFILE_ALIGNMENT;
1546 71369708 : return MAX (align, GET_MODE_ALIGNMENT (TYPE_MODE (type)));
1547 : }
1548 :
1549 : /* The default implementation of
1550 : TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1551 :
1552 : poly_uint64
1553 6402016 : default_preferred_vector_alignment (const_tree type)
1554 : {
1555 6402016 : return TYPE_ALIGN (type);
1556 : }
1557 :
1558 : /* The default implementation of
1559 : TARGET_VECTORIZE_PREFERRED_DIV_AS_SHIFTS_OVER_MULT. */
1560 :
1561 : bool
1562 17724 : default_preferred_div_as_shifts_over_mult (const_tree type)
1563 : {
1564 17724 : return !can_mult_highpart_p (TYPE_MODE (type), TYPE_UNSIGNED (type));
1565 : }
1566 :
1567 : /* By default assume vectors of element TYPE require a multiple of the natural
1568 : alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1569 : bool
1570 235790 : default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1571 : {
1572 235790 : return ! is_packed;
1573 : }
1574 :
1575 : /* By default, assume that a target supports any factor of misalignment
1576 : memory access if it supports movmisalign patten.
1577 : is_packed is true if the memory access is defined in a packed struct. */
1578 : bool
1579 1353336 : default_builtin_support_vector_misalignment (machine_mode mode,
1580 : int misalignment
1581 : ATTRIBUTE_UNUSED,
1582 : bool is_packed
1583 : ATTRIBUTE_UNUSED,
1584 : bool is_gather_scatter
1585 : ATTRIBUTE_UNUSED)
1586 : {
1587 1353336 : if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1588 1353119 : return true;
1589 : return false;
1590 : }
1591 :
1592 : /* By default, only attempt to parallelize bitwise operations, and
1593 : possibly adds/subtracts using bit-twiddling. */
1594 :
1595 : machine_mode
1596 0 : default_preferred_simd_mode (scalar_mode)
1597 : {
1598 0 : return word_mode;
1599 : }
1600 :
1601 : /* By default do not split reductions further. */
1602 :
1603 : machine_mode
1604 0 : default_split_reduction (machine_mode mode)
1605 : {
1606 0 : return mode;
1607 : }
1608 :
1609 : /* By default only the preferred vector mode is tried. */
1610 :
1611 : unsigned int
1612 0 : default_autovectorize_vector_modes (vector_modes *, bool)
1613 : {
1614 0 : return 0;
1615 : }
1616 :
1617 : /* The default implementation of TARGET_VECTORIZE_RELATED_MODE. */
1618 :
1619 : opt_machine_mode
1620 33951674 : default_vectorize_related_mode (machine_mode vector_mode,
1621 : scalar_mode element_mode,
1622 : poly_uint64 nunits)
1623 : {
1624 33951674 : machine_mode result_mode;
1625 33951674 : if ((maybe_ne (nunits, 0U)
1626 57349085 : || multiple_p (GET_MODE_SIZE (vector_mode),
1627 29680757 : GET_MODE_SIZE (element_mode), &nunits))
1628 31939245 : && mode_for_vector (element_mode, nunits).exists (&result_mode)
1629 31771486 : && VECTOR_MODE_P (result_mode)
1630 65721808 : && targetm.vector_mode_supported_p (result_mode))
1631 31760867 : return result_mode;
1632 :
1633 2190807 : return opt_machine_mode ();
1634 : }
1635 :
1636 : /* By default a vector of integers is used as a mask. */
1637 :
1638 : opt_machine_mode
1639 0 : default_get_mask_mode (machine_mode mode)
1640 : {
1641 0 : return related_int_vector_mode (mode);
1642 : }
1643 :
1644 : /* By default consider masked stores to be expensive. */
1645 :
1646 : bool
1647 9354 : default_conditional_operation_is_expensive (unsigned ifn)
1648 : {
1649 9354 : return ifn == IFN_MASK_STORE;
1650 : }
1651 :
1652 : /* By default consider masked stores to be expensive. */
1653 :
1654 : bool
1655 493 : default_empty_mask_is_expensive (unsigned ifn)
1656 : {
1657 493 : return ifn == IFN_MASK_STORE;
1658 : }
1659 :
1660 : /* By default, the cost model accumulates three separate costs (prologue,
1661 : loop body, and epilogue) for a vectorized loop or block. So allocate an
1662 : array of three unsigned ints, set it to zero, and return its address. */
1663 :
1664 : vector_costs *
1665 0 : default_vectorize_create_costs (vec_info *vinfo, bool costing_for_scalar)
1666 : {
1667 0 : return new vector_costs (vinfo, costing_for_scalar);
1668 : }
1669 :
1670 : /* Determine whether or not a pointer mode is valid. Assume defaults
1671 : of ptr_mode or Pmode - can be overridden. */
1672 : bool
1673 6839270 : default_valid_pointer_mode (scalar_int_mode mode)
1674 : {
1675 6839270 : return (mode == ptr_mode || mode == Pmode);
1676 : }
1677 :
1678 : /* Determine whether the memory reference specified by REF may alias
1679 : the C libraries errno location. */
1680 : bool
1681 7862460 : default_ref_may_alias_errno (ao_ref *ref)
1682 : {
1683 7862460 : tree base = ao_ref_base (ref);
1684 : /* The default implementation assumes the errno location is
1685 : a declaration of type int or is always accessed via a
1686 : pointer to int. We assume that accesses to errno are
1687 : not deliberately obfuscated (even in conforming ways). */
1688 7862460 : if (TYPE_UNSIGNED (TREE_TYPE (base))
1689 7862460 : || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1690 7493916 : return false;
1691 : /* The default implementation assumes an errno location declaration
1692 : is never defined in the current compilation unit and may not be
1693 : aliased by a local variable. */
1694 368544 : if (DECL_P (base)
1695 256739 : && DECL_EXTERNAL (base)
1696 370028 : && !TREE_STATIC (base))
1697 : return true;
1698 368451 : else if (TREE_CODE (base) == MEM_REF
1699 368451 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1700 : {
1701 103071 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1702 129591 : return !pi || pi->pt.anything || pi->pt.nonlocal;
1703 : }
1704 : return false;
1705 : }
1706 :
1707 : /* Return the mode for a pointer to a given ADDRSPACE,
1708 : defaulting to ptr_mode for all address spaces. */
1709 :
1710 : scalar_int_mode
1711 3041938751 : default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1712 : {
1713 3041938751 : return ptr_mode;
1714 : }
1715 :
1716 : /* Return the mode for an address in a given ADDRSPACE,
1717 : defaulting to Pmode for all address spaces. */
1718 :
1719 : scalar_int_mode
1720 98112064 : default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1721 : {
1722 98112064 : return Pmode;
1723 : }
1724 :
1725 : /* Named address space version of valid_pointer_mode.
1726 : To match the above, the same modes apply to all address spaces. */
1727 :
1728 : bool
1729 6839270 : default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1730 : addr_space_t as ATTRIBUTE_UNUSED)
1731 : {
1732 6839270 : return targetm.valid_pointer_mode (mode);
1733 : }
1734 :
1735 : /* Some places still assume that all pointer or address modes are the
1736 : standard Pmode and ptr_mode. These optimizations become invalid if
1737 : the target actually supports multiple different modes. For now,
1738 : we disable such optimizations on such targets, using this function. */
1739 :
1740 : bool
1741 685276721 : target_default_pointer_address_modes_p (void)
1742 : {
1743 685276721 : if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1744 : return false;
1745 685276721 : if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1746 0 : return false;
1747 :
1748 : return true;
1749 : }
1750 :
1751 : /* Named address space version of legitimate_address_p.
1752 : By default, all address spaces have the same form. */
1753 :
1754 : bool
1755 1522813011 : default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1756 : bool strict,
1757 : addr_space_t as ATTRIBUTE_UNUSED,
1758 : code_helper code)
1759 : {
1760 1522813011 : return targetm.legitimate_address_p (mode, mem, strict, code);
1761 : }
1762 :
1763 : /* Named address space version of LEGITIMIZE_ADDRESS.
1764 : By default, all address spaces have the same form. */
1765 :
1766 : rtx
1767 679133 : default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1768 : addr_space_t as ATTRIBUTE_UNUSED)
1769 : {
1770 679133 : return targetm.legitimize_address (x, oldx, mode);
1771 : }
1772 :
1773 : /* The default hook for determining if one named address space is a subset of
1774 : another and to return which address space to use as the common address
1775 : space. */
1776 :
1777 : bool
1778 3 : default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1779 : {
1780 3 : return (subset == superset);
1781 : }
1782 :
1783 : /* The default hook for determining if 0 within a named address
1784 : space is a valid address. */
1785 :
1786 : bool
1787 0 : default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1788 : {
1789 0 : return false;
1790 : }
1791 :
1792 : /* The default hook for debugging the address space is to return the
1793 : address space number to indicate DW_AT_address_class. */
1794 : int
1795 3 : default_addr_space_debug (addr_space_t as)
1796 : {
1797 3 : return as;
1798 : }
1799 :
1800 : /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1801 : Don't complain about any address space. */
1802 :
1803 : void
1804 177 : default_addr_space_diagnose_usage (addr_space_t, location_t)
1805 : {
1806 177 : }
1807 :
1808 :
1809 : /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1810 : called for targets with only a generic address space. */
1811 :
1812 : rtx
1813 0 : default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1814 : tree from_type ATTRIBUTE_UNUSED,
1815 : tree to_type ATTRIBUTE_UNUSED)
1816 : {
1817 0 : gcc_unreachable ();
1818 : }
1819 :
1820 :
1821 : /* The default hook for TARGET_ADDR_SPACE_FOR_ARTIFICIAL_RODATA. */
1822 :
1823 : addr_space_t
1824 533 : default_addr_space_for_artificial_rodata (tree, artificial_rodata)
1825 : {
1826 533 : return ADDR_SPACE_GENERIC;
1827 : }
1828 :
1829 :
1830 : /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1831 :
1832 : unsigned int
1833 0 : default_hard_regno_nregs (unsigned int, machine_mode mode)
1834 : {
1835 : /* Targets with variable-sized modes must provide their own definition
1836 : of this hook. */
1837 0 : return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1838 : }
1839 :
1840 : bool
1841 0 : default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1842 : {
1843 0 : return true;
1844 : }
1845 :
1846 : /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1847 :
1848 : bool
1849 36341585 : default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1850 : addr_space_t addrspace ATTRIBUTE_UNUSED)
1851 : {
1852 36341585 : return false;
1853 : }
1854 :
1855 : extern bool default_new_address_profitable_p (rtx, rtx);
1856 :
1857 :
1858 : /* The default implementation of TARGET_NEW_ADDRESS_PROFITABLE_P. */
1859 :
1860 : bool
1861 1131416 : default_new_address_profitable_p (rtx memref ATTRIBUTE_UNUSED,
1862 : rtx_insn *insn ATTRIBUTE_UNUSED,
1863 : rtx new_addr ATTRIBUTE_UNUSED)
1864 : {
1865 1131416 : return true;
1866 : }
1867 :
1868 : bool
1869 0 : default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1870 : tree ARG_UNUSED (name),
1871 : tree ARG_UNUSED (args),
1872 : int ARG_UNUSED (flags))
1873 : {
1874 0 : warning (OPT_Wattributes,
1875 : "%<target%> attribute is not supported on this machine");
1876 :
1877 0 : return false;
1878 : }
1879 :
1880 : bool
1881 0 : default_target_option_valid_version_attribute_p (tree ARG_UNUSED (fndecl),
1882 : tree ARG_UNUSED (name),
1883 : tree ARG_UNUSED (args),
1884 : int ARG_UNUSED (flags))
1885 : {
1886 0 : warning (OPT_Wattributes,
1887 : "%<target_version%> attribute is not supported on this machine");
1888 :
1889 0 : return false;
1890 : }
1891 :
1892 : bool
1893 0 : default_target_option_pragma_parse (tree ARG_UNUSED (args),
1894 : tree ARG_UNUSED (pop_target))
1895 : {
1896 : /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1897 : emit no warning because "#pragma GCC pop_target" is valid on targets that
1898 : do not have the "target" pragma. */
1899 0 : if (args)
1900 0 : warning (OPT_Wpragmas,
1901 : "%<#pragma GCC target%> is not supported for this machine");
1902 :
1903 0 : return false;
1904 : }
1905 :
1906 : bool
1907 0 : default_target_can_inline_p (tree caller, tree callee)
1908 : {
1909 0 : tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1910 0 : tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1911 0 : if (! callee_opts)
1912 0 : callee_opts = target_option_default_node;
1913 0 : if (! caller_opts)
1914 0 : caller_opts = target_option_default_node;
1915 :
1916 : /* If both caller and callee have attributes, assume that if the
1917 : pointer is different, the two functions have different target
1918 : options since build_target_option_node uses a hash table for the
1919 : options. */
1920 0 : return callee_opts == caller_opts;
1921 : }
1922 :
1923 : /* By default, return false to not need to collect any target information
1924 : for inlining. Target maintainer should re-define the hook if the
1925 : target want to take advantage of it. */
1926 :
1927 : bool
1928 5669450 : default_need_ipa_fn_target_info (const_tree, unsigned int &)
1929 : {
1930 5669450 : return false;
1931 : }
1932 :
1933 : bool
1934 0 : default_update_ipa_fn_target_info (unsigned int &, const gimple *)
1935 : {
1936 0 : return false;
1937 : }
1938 :
1939 : /* If the machine does not have a case insn that compares the bounds,
1940 : this means extra overhead for dispatch tables, which raises the
1941 : threshold for using them. */
1942 :
1943 : unsigned int
1944 6639393 : default_case_values_threshold (void)
1945 : {
1946 6639393 : return (targetm.have_casesi () ? 4 : 5);
1947 : }
1948 :
1949 : bool
1950 117322275 : default_have_conditional_execution (void)
1951 : {
1952 117322275 : return HAVE_conditional_execution;
1953 : }
1954 :
1955 : bool
1956 0 : default_have_ccmp (void)
1957 : {
1958 0 : return targetm.gen_ccmp_first != NULL;
1959 : }
1960 :
1961 : /* By default we assume that c99 functions are present at the runtime,
1962 : but sincos is not. */
1963 : bool
1964 0 : default_libc_has_function (enum function_class fn_class,
1965 : tree type ATTRIBUTE_UNUSED)
1966 : {
1967 0 : if (fn_class == function_c94
1968 : || fn_class == function_c99_misc
1969 : || fn_class == function_c99_math_complex)
1970 0 : return true;
1971 :
1972 : return false;
1973 : }
1974 :
1975 : /* By default assume that libc has not a fast implementation. */
1976 :
1977 : bool
1978 0 : default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1979 : {
1980 0 : return false;
1981 : }
1982 :
1983 : bool
1984 0 : gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1985 : tree type ATTRIBUTE_UNUSED)
1986 : {
1987 0 : return true;
1988 : }
1989 :
1990 : bool
1991 0 : no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1992 : tree type ATTRIBUTE_UNUSED)
1993 : {
1994 0 : return false;
1995 : }
1996 :
1997 : /* Assume some c99 functions are present at the runtime including sincos. */
1998 : bool
1999 0 : bsd_libc_has_function (enum function_class fn_class,
2000 : tree type ATTRIBUTE_UNUSED)
2001 : {
2002 0 : if (fn_class == function_c94
2003 0 : || fn_class == function_c99_misc
2004 0 : || fn_class == function_sincos)
2005 : return true;
2006 :
2007 : return false;
2008 : }
2009 :
2010 : /* By default, -fhardened will add -D_FORTIFY_SOURCE=2. */
2011 :
2012 : unsigned
2013 0 : default_fortify_source_default_level ()
2014 : {
2015 0 : return 2;
2016 : }
2017 :
2018 : unsigned
2019 118 : default_libm_function_max_error (unsigned, machine_mode, bool)
2020 : {
2021 118 : return ~0U;
2022 : }
2023 :
2024 : unsigned
2025 78136 : glibc_linux_libm_function_max_error (unsigned cfn, machine_mode mode,
2026 : bool boundary_p)
2027 : {
2028 : /* Let's use
2029 : https://www.gnu.org/software/libc/manual/2.22/html_node/Errors-in-Math-Functions.html
2030 : https://www.gnu.org/software/libc/manual/html_node/Errors-in-Math-Functions.html
2031 : with usual values recorded here and significant outliers handled in
2032 : target CPU specific overriders. The tables only record default
2033 : rounding to nearest, for -frounding-math let's add some extra ulps.
2034 : For boundary_p values (say finite results outside of [-1.,1.] for
2035 : sin/cos, or [-0.,+Inf] for sqrt etc. let's use custom random testers. */
2036 78136 : int rnd = flag_rounding_math ? 4 : 0;
2037 78136 : bool sf = (REAL_MODE_FORMAT (mode) == &ieee_single_format
2038 57504 : || REAL_MODE_FORMAT (mode) == &mips_single_format
2039 135640 : || REAL_MODE_FORMAT (mode) == &motorola_single_format);
2040 78136 : bool df = (REAL_MODE_FORMAT (mode) == &ieee_double_format
2041 39407 : || REAL_MODE_FORMAT (mode) == &mips_double_format
2042 117543 : || REAL_MODE_FORMAT (mode) == &motorola_double_format);
2043 78136 : bool xf = (REAL_MODE_FORMAT (mode) == &ieee_extended_intel_96_format
2044 74622 : || REAL_MODE_FORMAT (mode) == &ieee_extended_intel_128_format
2045 139773 : || REAL_MODE_FORMAT (mode) == &ieee_extended_motorola_format);
2046 78136 : bool tf = (REAL_MODE_FORMAT (mode) == &ieee_quad_format
2047 78136 : || REAL_MODE_FORMAT (mode) == &mips_quad_format);
2048 :
2049 78136 : switch (cfn)
2050 : {
2051 50788 : CASE_CFN_SQRT:
2052 50788 : CASE_CFN_SQRT_FN:
2053 50788 : if (boundary_p)
2054 : /* https://gcc.gnu.org/pipermail/gcc-patches/2023-April/616595.html */
2055 : return 0;
2056 25394 : if (sf || df || xf || tf)
2057 25276 : return 0 + rnd;
2058 : break;
2059 9078 : CASE_CFN_COS:
2060 9078 : CASE_CFN_COS_FN:
2061 : /* cos is generally errors like sin, but far more arches have 2ulps
2062 : for double. */
2063 9078 : if (!boundary_p && df)
2064 1705 : return 2 + rnd;
2065 25643 : gcc_fallthrough ();
2066 25643 : CASE_CFN_SIN:
2067 25643 : CASE_CFN_SIN_FN:
2068 25643 : if (boundary_p)
2069 : /* According to
2070 : https://sourceware.org/pipermail/gcc-patches/2023-April/616315.html
2071 : seems default rounding sin/cos stay strictly in [-1.,1.] range,
2072 : with rounding to infinity it can be 1ulp larger/smaller. */
2073 30032 : return flag_rounding_math ? 1 : 0;
2074 10601 : if (sf || df)
2075 6712 : return 1 + rnd;
2076 3889 : if (xf || tf)
2077 3889 : return 2 + rnd;
2078 : break;
2079 : default:
2080 : break;
2081 : }
2082 :
2083 118 : return default_libm_function_max_error (cfn, mode, boundary_p);
2084 : }
2085 :
2086 : tree
2087 0 : default_builtin_tm_load_store (tree ARG_UNUSED (type))
2088 : {
2089 0 : return NULL_TREE;
2090 : }
2091 :
2092 : /* Compute cost of moving registers to/from memory. */
2093 :
2094 : int
2095 0 : default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
2096 : reg_class_t rclass ATTRIBUTE_UNUSED,
2097 : bool in ATTRIBUTE_UNUSED)
2098 : {
2099 : #ifndef MEMORY_MOVE_COST
2100 0 : return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
2101 : #else
2102 : return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
2103 : #endif
2104 : }
2105 :
2106 : /* Compute cost of moving data from a register of class FROM to one of
2107 : TO, using MODE. */
2108 :
2109 : int
2110 0 : default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
2111 : reg_class_t from ATTRIBUTE_UNUSED,
2112 : reg_class_t to ATTRIBUTE_UNUSED)
2113 : {
2114 : #ifndef REGISTER_MOVE_COST
2115 0 : return 2;
2116 : #else
2117 : return REGISTER_MOVE_COST (MACRO_MODE (mode),
2118 : (enum reg_class) from, (enum reg_class) to);
2119 : #endif
2120 : }
2121 :
2122 : /* The default implementation of TARGET_CALLEE_SAVE_COST. */
2123 :
2124 : int
2125 0 : default_callee_save_cost (spill_cost_type spill_type, unsigned int,
2126 : machine_mode, unsigned int, int mem_cost,
2127 : const HARD_REG_SET &callee_saved_regs,
2128 : bool existing_spills_p)
2129 : {
2130 0 : if (!existing_spills_p)
2131 : {
2132 0 : auto frame_type = (spill_type == spill_cost_type::SAVE
2133 0 : ? frame_cost_type::ALLOCATION
2134 : : frame_cost_type::DEALLOCATION);
2135 0 : mem_cost += targetm.frame_allocation_cost (frame_type,
2136 : callee_saved_regs);
2137 : }
2138 0 : return mem_cost;
2139 : }
2140 :
2141 : /* The default implementation of TARGET_FRAME_ALLOCATION_COST. */
2142 :
2143 : int
2144 16265394 : default_frame_allocation_cost (frame_cost_type, const HARD_REG_SET &)
2145 : {
2146 16265394 : return 0;
2147 : }
2148 :
2149 : /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
2150 :
2151 : bool
2152 3455996 : default_slow_unaligned_access (machine_mode, unsigned int)
2153 : {
2154 3455996 : return STRICT_ALIGNMENT;
2155 : }
2156 :
2157 : /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
2158 :
2159 : HOST_WIDE_INT
2160 0 : default_estimated_poly_value (poly_int64 x, poly_value_estimate_kind)
2161 : {
2162 0 : return x.coeffs[0];
2163 : }
2164 :
2165 : /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
2166 : behavior. SPEED_P is true if we are compiling for speed. */
2167 :
2168 : unsigned int
2169 1687345 : get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
2170 : {
2171 1687345 : unsigned int move_ratio;
2172 : #ifdef MOVE_RATIO
2173 1687345 : move_ratio = (unsigned int) MOVE_RATIO (speed_p);
2174 : #else
2175 : #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
2176 : move_ratio = 2;
2177 : #else /* No cpymem patterns, pick a default. */
2178 : move_ratio = ((speed_p) ? 15 : 3);
2179 : #endif
2180 : #endif
2181 1687345 : return move_ratio;
2182 : }
2183 :
2184 : /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
2185 : used; return FALSE if the cpymem/setmem optab should be expanded, or
2186 : a call to memcpy emitted. */
2187 :
2188 : bool
2189 1178614 : default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
2190 : unsigned int alignment,
2191 : enum by_pieces_operation op,
2192 : bool speed_p)
2193 : {
2194 1178614 : unsigned int max_size = 0;
2195 1178614 : unsigned int ratio = 0;
2196 :
2197 1178614 : switch (op)
2198 : {
2199 84078 : case CLEAR_BY_PIECES:
2200 84078 : max_size = STORE_MAX_PIECES;
2201 84078 : ratio = CLEAR_RATIO (speed_p);
2202 : break;
2203 886986 : case MOVE_BY_PIECES:
2204 886986 : max_size = MOVE_MAX_PIECES;
2205 886986 : ratio = get_move_ratio (speed_p);
2206 886986 : break;
2207 49457 : case SET_BY_PIECES:
2208 49457 : max_size = STORE_MAX_PIECES;
2209 49457 : ratio = SET_RATIO (speed_p);
2210 : break;
2211 89715 : case STORE_BY_PIECES:
2212 89715 : max_size = STORE_MAX_PIECES;
2213 89715 : ratio = get_move_ratio (speed_p);
2214 89715 : break;
2215 68378 : case COMPARE_BY_PIECES:
2216 68378 : max_size = COMPARE_MAX_PIECES;
2217 : /* Pick a likely default, just as in get_move_ratio. */
2218 68378 : ratio = speed_p ? 15 : 3;
2219 : break;
2220 : }
2221 :
2222 1178614 : return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
2223 : }
2224 :
2225 : /* This hook controls code generation for expanding a memcmp operation by
2226 : pieces. Return 1 for the normal pattern of compare/jump after each pair
2227 : of loads, or a higher number to reduce the number of branches. */
2228 :
2229 : int
2230 161016 : default_compare_by_pieces_branch_ratio (machine_mode)
2231 : {
2232 161016 : return 1;
2233 : }
2234 :
2235 : /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
2236 : entry. If RECORD_P is true and the target supports named sections,
2237 : the location of the NOPs will be recorded in a special object section
2238 : called "__patchable_function_entries". This routine may be called
2239 : twice per function to put NOPs before and after the function
2240 : entry. */
2241 :
2242 : void
2243 59 : default_print_patchable_function_entry (FILE *file,
2244 : unsigned HOST_WIDE_INT patch_area_size,
2245 : bool record_p)
2246 : {
2247 59 : const char *nop_templ = 0;
2248 59 : int code_num;
2249 59 : rtx_insn *my_nop = make_insn_raw (gen_nop ());
2250 :
2251 : /* We use the template alone, relying on the (currently sane) assumption
2252 : that the NOP template does not have variable operands. */
2253 59 : code_num = recog_memoized (my_nop);
2254 59 : nop_templ = get_insn_template (code_num, my_nop);
2255 :
2256 59 : if (record_p && targetm_common.have_named_sections)
2257 : {
2258 53 : char buf[256];
2259 53 : section *previous_section = in_section;
2260 53 : const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
2261 :
2262 53 : gcc_assert (asm_op != NULL);
2263 : /* If SECTION_LINK_ORDER is supported, this internal label will
2264 : be filled as the symbol for linked_to section. */
2265 53 : ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", current_function_funcdef_no);
2266 :
2267 53 : unsigned int flags = SECTION_WRITE | SECTION_RELRO;
2268 53 : if (HAVE_GAS_SECTION_LINK_ORDER)
2269 53 : flags |= SECTION_LINK_ORDER;
2270 :
2271 53 : section *sect = get_section ("__patchable_function_entries",
2272 : flags, current_function_decl);
2273 53 : if (HAVE_COMDAT_GROUP && DECL_COMDAT_GROUP (current_function_decl))
2274 12 : switch_to_comdat_section (sect, current_function_decl);
2275 : else
2276 41 : switch_to_section (sect);
2277 53 : assemble_align (POINTER_SIZE);
2278 53 : fputs (asm_op, file);
2279 53 : assemble_name_raw (file, buf);
2280 53 : fputc ('\n', file);
2281 :
2282 53 : switch_to_section (previous_section);
2283 53 : ASM_OUTPUT_LABEL (file, buf);
2284 : }
2285 :
2286 : unsigned i;
2287 136 : for (i = 0; i < patch_area_size; ++i)
2288 77 : output_asm_insn (nop_templ, NULL);
2289 59 : }
2290 :
2291 : bool
2292 0 : default_profile_before_prologue (void)
2293 : {
2294 : #ifdef PROFILE_BEFORE_PROLOGUE
2295 : return true;
2296 : #else
2297 0 : return false;
2298 : #endif
2299 : }
2300 :
2301 : /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
2302 :
2303 : reg_class_t
2304 0 : default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
2305 : reg_class_t rclass)
2306 : {
2307 : #ifdef PREFERRED_RELOAD_CLASS
2308 : return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
2309 : #else
2310 0 : return rclass;
2311 : #endif
2312 : }
2313 :
2314 : /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
2315 :
2316 : reg_class_t
2317 0 : default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
2318 : reg_class_t rclass)
2319 : {
2320 0 : return rclass;
2321 : }
2322 :
2323 : /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
2324 : reg_class_t
2325 980004 : default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
2326 : {
2327 980004 : return NO_REGS;
2328 : }
2329 :
2330 : /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
2331 :
2332 : bool
2333 0 : default_class_likely_spilled_p (reg_class_t rclass)
2334 : {
2335 0 : return (reg_class_size[(int) rclass] == 1);
2336 : }
2337 :
2338 : /* The default implementation of TARGET_CLASS_MAX_NREGS. */
2339 :
2340 : unsigned char
2341 0 : default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
2342 : machine_mode mode ATTRIBUTE_UNUSED)
2343 : {
2344 : #ifdef CLASS_MAX_NREGS
2345 : return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
2346 : MACRO_MODE (mode));
2347 : #else
2348 : /* Targets with variable-sized modes must provide their own definition
2349 : of this hook. */
2350 0 : unsigned int size = GET_MODE_SIZE (mode).to_constant ();
2351 0 : return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2352 : #endif
2353 : }
2354 :
2355 : /* The default implementation of TARGET_AVOID_STORE_FORWARDING_P. */
2356 :
2357 : bool
2358 7 : default_avoid_store_forwarding_p (vec<store_fwd_info>, rtx, int total_cost,
2359 : bool)
2360 : {
2361 : /* Use a simple cost heurstic base on param_store_forwarding_max_distance.
2362 : In general the distance should be somewhat correlated to the store
2363 : forwarding penalty; if the penalty is large then it is justified to
2364 : increase the window size. Use this to reject sequences that are clearly
2365 : unprofitable.
2366 : Skip the cost check if param_store_forwarding_max_distance is 0. */
2367 7 : int max_cost = COSTS_N_INSNS (param_store_forwarding_max_distance / 2);
2368 7 : const bool unlimited_cost = (param_store_forwarding_max_distance == 0);
2369 7 : if (!unlimited_cost && total_cost > max_cost && max_cost)
2370 : {
2371 2 : if (dump_file)
2372 0 : fprintf (dump_file, "Not transformed due to cost: %d > %d.\n",
2373 : total_cost, max_cost);
2374 :
2375 2 : return false;
2376 : }
2377 :
2378 : return true;
2379 : }
2380 :
2381 : /* Determine the debugging unwind mechanism for the target. */
2382 :
2383 : enum unwind_info_type
2384 1282098 : default_debug_unwind_info (void)
2385 : {
2386 : /* If the target wants to force the use of dwarf2 unwind info, let it. */
2387 : /* ??? Change all users to the hook, then poison this. */
2388 : #ifdef DWARF2_FRAME_INFO
2389 : if (DWARF2_FRAME_INFO)
2390 : return UI_DWARF2;
2391 : #endif
2392 :
2393 : /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
2394 : #ifdef DWARF2_DEBUGGING_INFO
2395 1282098 : if (dwarf_debuginfo_p ())
2396 653584 : return UI_DWARF2;
2397 : #endif
2398 :
2399 : return UI_NONE;
2400 : }
2401 :
2402 : /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
2403 : must define this hook. */
2404 :
2405 : unsigned int
2406 0 : default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
2407 : {
2408 0 : gcc_unreachable ();
2409 : }
2410 :
2411 : /* Determine the correct mode for a Dwarf frame register that represents
2412 : register REGNO. */
2413 :
2414 : machine_mode
2415 736 : default_dwarf_frame_reg_mode (int regno)
2416 : {
2417 736 : machine_mode save_mode = reg_raw_mode[regno];
2418 :
2419 736 : if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (),
2420 : regno, save_mode))
2421 0 : save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi);
2422 736 : return save_mode;
2423 : }
2424 :
2425 : /* To be used by targets where reg_raw_mode doesn't return the right
2426 : mode for registers used in apply_builtin_return and apply_builtin_arg. */
2427 :
2428 : fixed_size_mode
2429 7530 : default_get_reg_raw_mode (int regno)
2430 : {
2431 : /* Targets must override this hook if the underlying register is
2432 : variable-sized. */
2433 7530 : return as_a <fixed_size_mode> (reg_raw_mode[regno]);
2434 : }
2435 :
2436 : /* Return true if a leaf function should stay leaf even with profiling
2437 : enabled. */
2438 :
2439 : bool
2440 656 : default_keep_leaf_when_profiled ()
2441 : {
2442 656 : return false;
2443 : }
2444 :
2445 : /* Return true if the state of option OPTION should be stored in PCH files
2446 : and checked by default_pch_valid_p. Store the option's current state
2447 : in STATE if so. */
2448 :
2449 : static inline bool
2450 45191672 : option_affects_pch_p (int option, struct cl_option_state *state)
2451 : {
2452 45191672 : if ((cl_options[option].flags & CL_TARGET) == 0)
2453 : return false;
2454 3698194 : if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
2455 : return false;
2456 3698194 : if (option_flag_var (option, &global_options) == &target_flags)
2457 621508 : if (targetm.check_pch_target_flags)
2458 : return false;
2459 3698194 : return get_option_state (&global_options, option, state);
2460 : }
2461 :
2462 : /* Default version of get_pch_validity.
2463 : By default, every flag difference is fatal; that will be mostly right for
2464 : most targets, but completely right for very few. */
2465 :
2466 : void *
2467 436 : default_get_pch_validity (size_t *sz)
2468 : {
2469 436 : struct cl_option_state state;
2470 436 : size_t i;
2471 436 : char *result, *r;
2472 :
2473 436 : *sz = 2;
2474 436 : if (targetm.check_pch_target_flags)
2475 0 : *sz += sizeof (target_flags);
2476 1117032 : for (i = 0; i < cl_options_count; i++)
2477 1116596 : if (option_affects_pch_p (i, &state))
2478 101588 : *sz += state.size;
2479 :
2480 436 : result = r = XNEWVEC (char, *sz);
2481 436 : r[0] = flag_pic;
2482 436 : r[1] = flag_pie;
2483 436 : r += 2;
2484 436 : if (targetm.check_pch_target_flags)
2485 : {
2486 0 : memcpy (r, &target_flags, sizeof (target_flags));
2487 0 : r += sizeof (target_flags);
2488 : }
2489 :
2490 1117032 : for (i = 0; i < cl_options_count; i++)
2491 1116596 : if (option_affects_pch_p (i, &state))
2492 : {
2493 101588 : memcpy (r, state.data, state.size);
2494 101588 : r += state.size;
2495 : }
2496 :
2497 436 : return result;
2498 : }
2499 :
2500 : /* Return a message which says that a PCH file was created with a different
2501 : setting of OPTION. */
2502 :
2503 : static const char *
2504 17916 : pch_option_mismatch (const char *option)
2505 : {
2506 17916 : return xasprintf (_("created and used with differing settings of '%s'"),
2507 17916 : option);
2508 : }
2509 :
2510 : /* Default version of pch_valid_p. */
2511 :
2512 : const char *
2513 18278 : default_pch_valid_p (const void *data_p, size_t len ATTRIBUTE_UNUSED)
2514 : {
2515 18278 : struct cl_option_state state;
2516 18278 : const char *data = (const char *)data_p;
2517 18278 : size_t i;
2518 :
2519 : /* -fpic and -fpie also usually make a PCH invalid. */
2520 18278 : if (data[0] != flag_pic)
2521 0 : return _("created and used with different settings of %<-fpic%>");
2522 18278 : if (data[1] != flag_pie)
2523 0 : return _("created and used with different settings of %<-fpie%>");
2524 18278 : data += 2;
2525 :
2526 : /* Check target_flags. */
2527 18278 : if (targetm.check_pch_target_flags)
2528 : {
2529 0 : int tf;
2530 0 : const char *r;
2531 :
2532 0 : memcpy (&tf, data, sizeof (target_flags));
2533 0 : data += sizeof (target_flags);
2534 0 : r = targetm.check_pch_target_flags (tf);
2535 0 : if (r != NULL)
2536 : return r;
2537 : }
2538 :
2539 42958842 : for (i = 0; i < cl_options_count; i++)
2540 42958480 : if (option_affects_pch_p (i, &state))
2541 : {
2542 3417170 : if (memcmp (data, state.data, state.size) != 0)
2543 17916 : return pch_option_mismatch (cl_options[i].opt_text);
2544 3399254 : data += state.size;
2545 : }
2546 :
2547 : return NULL;
2548 : }
2549 :
2550 : /* Default version of cstore_mode. */
2551 :
2552 : scalar_int_mode
2553 705277 : default_cstore_mode (enum insn_code icode)
2554 : {
2555 705277 : return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2556 : }
2557 :
2558 : /* Default version of member_type_forces_blk. */
2559 :
2560 : bool
2561 0 : default_member_type_forces_blk (const_tree, machine_mode)
2562 : {
2563 0 : return false;
2564 : }
2565 :
2566 : /* Default version of canonicalize_comparison. */
2567 :
2568 : void
2569 0 : default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2570 : {
2571 0 : }
2572 :
2573 : /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2574 :
2575 : void
2576 0 : default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2577 : {
2578 0 : }
2579 :
2580 : #ifndef PAD_VARARGS_DOWN
2581 : #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2582 : #endif
2583 :
2584 : /* Build an indirect-ref expression over the given TREE, which represents a
2585 : piece of a va_arg() expansion. */
2586 : tree
2587 54941 : build_va_arg_indirect_ref (tree addr)
2588 : {
2589 54941 : addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2590 54941 : return addr;
2591 : }
2592 :
2593 : /* The "standard" implementation of va_arg: read the value from the
2594 : current (padded) address and increment by the (padded) size. */
2595 :
2596 : tree
2597 260 : std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2598 : gimple_seq *post_p)
2599 : {
2600 260 : tree addr, t, type_size, rounded_size, valist_tmp;
2601 260 : unsigned HOST_WIDE_INT align, boundary;
2602 260 : bool indirect;
2603 :
2604 : /* All of the alignment and movement below is for args-grow-up machines.
2605 : As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2606 : implement their own specialized gimplify_va_arg_expr routines. */
2607 260 : if (ARGS_GROW_DOWNWARD)
2608 : gcc_unreachable ();
2609 :
2610 260 : indirect = pass_va_arg_by_reference (type);
2611 260 : if (indirect)
2612 33 : type = build_pointer_type (type);
2613 :
2614 260 : if (targetm.calls.split_complex_arg
2615 0 : && TREE_CODE (type) == COMPLEX_TYPE
2616 260 : && targetm.calls.split_complex_arg (type))
2617 : {
2618 0 : tree real_part, imag_part;
2619 :
2620 0 : real_part = std_gimplify_va_arg_expr (valist,
2621 0 : TREE_TYPE (type), pre_p, NULL);
2622 0 : real_part = get_initialized_tmp_var (real_part, pre_p);
2623 :
2624 0 : imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2625 0 : TREE_TYPE (type), pre_p, NULL);
2626 0 : imag_part = get_initialized_tmp_var (imag_part, pre_p);
2627 :
2628 0 : return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2629 : }
2630 :
2631 260 : align = PARM_BOUNDARY / BITS_PER_UNIT;
2632 260 : boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2633 :
2634 : /* When we align parameter on stack for caller, if the parameter
2635 : alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2636 : aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2637 : here with caller. */
2638 260 : if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2639 : boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2640 :
2641 260 : boundary /= BITS_PER_UNIT;
2642 :
2643 : /* Hoist the valist value into a temporary for the moment. */
2644 260 : valist_tmp = get_initialized_tmp_var (valist, pre_p);
2645 :
2646 : /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2647 : requires greater alignment, we must perform dynamic alignment. */
2648 260 : if (boundary > align
2649 4 : && !TYPE_EMPTY_P (type)
2650 264 : && !integer_zerop (TYPE_SIZE (type)))
2651 : {
2652 4 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2653 4 : fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2654 4 : gimplify_and_add (t, pre_p);
2655 :
2656 8 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2657 8 : fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2658 : valist_tmp,
2659 : build_int_cst (TREE_TYPE (valist), -boundary)));
2660 4 : gimplify_and_add (t, pre_p);
2661 : }
2662 : else
2663 : boundary = align;
2664 :
2665 : /* If the actual alignment is less than the alignment of the type,
2666 : adjust the type accordingly so that we don't assume strict alignment
2667 : when dereferencing the pointer. */
2668 260 : boundary *= BITS_PER_UNIT;
2669 260 : if (boundary < TYPE_ALIGN (type))
2670 : {
2671 22 : type = build_variant_type_copy (type);
2672 22 : SET_TYPE_ALIGN (type, boundary);
2673 : }
2674 :
2675 : /* Compute the rounded size of the type. */
2676 260 : type_size = arg_size_in_bytes (type);
2677 260 : rounded_size = round_up (type_size, align);
2678 :
2679 : /* Reduce rounded_size so it's sharable with the postqueue. */
2680 260 : gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2681 :
2682 : /* Get AP. */
2683 260 : addr = valist_tmp;
2684 260 : if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2685 : {
2686 : /* Small args are padded downward. */
2687 : t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2688 : rounded_size, size_int (align));
2689 : t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2690 : size_binop (MINUS_EXPR, rounded_size, type_size));
2691 : addr = fold_build_pointer_plus (addr, t);
2692 : }
2693 :
2694 : /* Compute new value for AP. */
2695 260 : t = fold_build_pointer_plus (valist_tmp, rounded_size);
2696 260 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2697 260 : gimplify_and_add (t, pre_p);
2698 :
2699 260 : addr = fold_convert (build_pointer_type (type), addr);
2700 :
2701 260 : if (indirect)
2702 33 : addr = build_va_arg_indirect_ref (addr);
2703 :
2704 260 : return build_va_arg_indirect_ref (addr);
2705 : }
2706 :
2707 : /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2708 : not support nested low-overhead loops. */
2709 :
2710 : bool
2711 0 : can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2712 : unsigned int loop_depth, bool)
2713 : {
2714 0 : return loop_depth == 1;
2715 : }
2716 :
2717 : /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2718 :
2719 : bool
2720 0 : default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2721 : {
2722 0 : return true;
2723 : }
2724 :
2725 : /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2726 :
2727 : unsigned int
2728 0 : default_max_noce_ifcvt_seq_cost (edge e)
2729 : {
2730 0 : bool predictable_p = predictable_edge_p (e);
2731 :
2732 0 : if (predictable_p)
2733 : {
2734 0 : if (OPTION_SET_P (param_max_rtl_if_conversion_predictable_cost))
2735 0 : return param_max_rtl_if_conversion_predictable_cost;
2736 : }
2737 : else
2738 : {
2739 0 : if (OPTION_SET_P (param_max_rtl_if_conversion_unpredictable_cost))
2740 0 : return param_max_rtl_if_conversion_unpredictable_cost;
2741 : }
2742 :
2743 0 : return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2744 : }
2745 :
2746 : /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2747 :
2748 : unsigned int
2749 71833 : default_min_arithmetic_precision (void)
2750 : {
2751 71833 : return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2752 : }
2753 :
2754 : /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2755 :
2756 : enum flt_eval_method
2757 0 : default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2758 : {
2759 0 : return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2760 : }
2761 :
2762 : /* Return true if _BitInt(N) is supported and fill details about it into
2763 : *INFO. */
2764 : bool
2765 0 : default_bitint_type_info (int, struct bitint_info *)
2766 : {
2767 0 : return false;
2768 : }
2769 :
2770 : /* Default implementation for
2771 : TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2772 : HOST_WIDE_INT
2773 16 : default_stack_clash_protection_alloca_probe_range (void)
2774 : {
2775 16 : return 0;
2776 : }
2777 :
2778 : /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2779 :
2780 : void
2781 0 : default_select_early_remat_modes (sbitmap)
2782 : {
2783 0 : }
2784 :
2785 : /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2786 :
2787 : tree
2788 519 : default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2789 : {
2790 519 : return build_zero_cst (type);
2791 : }
2792 :
2793 : /* The default implementation of TARGET_INSTRUCTION_SELECTION. */
2794 :
2795 : bool
2796 95100243 : default_instruction_selection (function *, gimple_stmt_iterator *)
2797 : {
2798 95100243 : return false;
2799 : }
2800 :
2801 : /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2802 : bool
2803 209135 : default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2804 : {
2805 : #ifdef HAVE_speculation_barrier
2806 209135 : return active ? HAVE_speculation_barrier : true;
2807 : #else
2808 : return false;
2809 : #endif
2810 : }
2811 : /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2812 : that can be used on targets that never have speculative execution. */
2813 : bool
2814 0 : speculation_safe_value_not_needed (bool active)
2815 : {
2816 0 : return !active;
2817 : }
2818 :
2819 : /* Default implementation of the speculation-safe-load builtin. This
2820 : implementation simply copies val to result and generates a
2821 : speculation_barrier insn, if such a pattern is defined. */
2822 : rtx
2823 35 : default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2824 : rtx result, rtx val,
2825 : rtx failval ATTRIBUTE_UNUSED)
2826 : {
2827 35 : emit_move_insn (result, val);
2828 :
2829 : #ifdef HAVE_speculation_barrier
2830 : /* Assume the target knows what it is doing: if it defines a
2831 : speculation barrier, but it is not enabled, then assume that one
2832 : isn't needed. */
2833 35 : if (HAVE_speculation_barrier)
2834 35 : emit_insn (gen_speculation_barrier ());
2835 : #endif
2836 :
2837 35 : return result;
2838 : }
2839 :
2840 : /* How many bits to shift in order to access the tag bits.
2841 : The default is to store the tag in the top 8 bits of a 64 bit pointer, hence
2842 : shifting 56 bits will leave just the tag. */
2843 : #define HWASAN_SHIFT (GET_MODE_PRECISION (Pmode) - 8)
2844 : #define HWASAN_SHIFT_RTX GEN_INT (HWASAN_SHIFT)
2845 :
2846 : bool
2847 0 : default_memtag_can_tag_addresses ()
2848 : {
2849 0 : return false;
2850 : }
2851 :
2852 : uint8_t
2853 0 : default_memtag_tag_bitsize ()
2854 : {
2855 0 : return 8;
2856 : }
2857 :
2858 : uint8_t
2859 1211 : default_memtag_granule_size ()
2860 : {
2861 1211 : return 16;
2862 : }
2863 :
2864 : /* The default implementation of TARGET_MEMTAG_INSERT_RANDOM_TAG. */
2865 : rtx
2866 61 : default_memtag_insert_random_tag (rtx untagged, rtx target)
2867 : {
2868 61 : gcc_assert (param_hwasan_instrument_stack);
2869 61 : if (param_hwasan_random_frame_tag)
2870 : {
2871 16 : rtx fn = init_one_libfunc ("__hwasan_generate_tag");
2872 16 : rtx new_tag = emit_library_call_value (fn, NULL_RTX, LCT_NORMAL, QImode);
2873 16 : return targetm.memtag.set_tag (untagged, new_tag, target);
2874 : }
2875 : else
2876 : {
2877 : /* NOTE: The kernel API does not have __hwasan_generate_tag exposed.
2878 : In the future we may add the option emit random tags with inline
2879 : instrumentation instead of function calls. This would be the same
2880 : between the kernel and userland. */
2881 : return untagged;
2882 : }
2883 : }
2884 :
2885 : /* The default implementation of TARGET_MEMTAG_ADD_TAG. */
2886 : rtx
2887 0 : default_memtag_add_tag (rtx base, poly_int64 offset, uint8_t tag_offset)
2888 : {
2889 : /* Need to look into what the most efficient code sequence is.
2890 : This is a code sequence that would be emitted *many* times, so we
2891 : want it as small as possible.
2892 :
2893 : There are two places where tag overflow is a question:
2894 : - Tagging the shadow stack.
2895 : (both tagging and untagging).
2896 : - Tagging addressable pointers.
2897 :
2898 : We need to ensure both behaviors are the same (i.e. that the tag that
2899 : ends up in a pointer after "overflowing" the tag bits with a tag addition
2900 : is the same that ends up in the shadow space).
2901 :
2902 : The aim is that the behavior of tag addition should follow modulo
2903 : wrapping in both instances.
2904 :
2905 : The libhwasan code doesn't have any path that increments a pointer's tag,
2906 : which means it has no opinion on what happens when a tag increment
2907 : overflows (and hence we can choose our own behavior). */
2908 :
2909 0 : offset += ((uint64_t)tag_offset << HWASAN_SHIFT);
2910 0 : return plus_constant (Pmode, base, offset);
2911 : }
2912 :
2913 : /* The default implementation of TARGET_MEMTAG_SET_TAG. */
2914 : rtx
2915 0 : default_memtag_set_tag (rtx untagged, rtx tag, rtx target)
2916 : {
2917 0 : gcc_assert (GET_MODE (untagged) == Pmode && GET_MODE (tag) == QImode);
2918 0 : tag = expand_simple_binop (Pmode, ASHIFT, tag, HWASAN_SHIFT_RTX, NULL_RTX,
2919 : /* unsignedp = */1, OPTAB_WIDEN);
2920 0 : rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
2921 : /* unsignedp = */1, OPTAB_DIRECT);
2922 0 : gcc_assert (ret);
2923 0 : return ret;
2924 : }
2925 :
2926 : /* The default implementation of TARGET_MEMTAG_EXTRACT_TAG. */
2927 : rtx
2928 0 : default_memtag_extract_tag (rtx tagged_pointer, rtx target)
2929 : {
2930 0 : rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
2931 0 : HWASAN_SHIFT_RTX, target,
2932 : /* unsignedp = */0,
2933 : OPTAB_DIRECT);
2934 0 : rtx ret = gen_lowpart (QImode, tag);
2935 0 : gcc_assert (ret);
2936 0 : return ret;
2937 : }
2938 :
2939 : /* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER. */
2940 : rtx
2941 0 : default_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
2942 : {
2943 0 : rtx tag_mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_SHIFT) - 1, Pmode);
2944 0 : rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
2945 : tag_mask, target, true,
2946 : OPTAB_DIRECT);
2947 0 : gcc_assert (untagged_base);
2948 0 : return untagged_base;
2949 : }
2950 :
2951 : #include "gt-targhooks.h"
|