Line data Source code
1 : /* Default target hook functions.
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* The migration of target macros to target hooks works as follows:
21 :
22 : 1. Create a target hook that uses the existing target macros to
23 : implement the same functionality.
24 :
25 : 2. Convert all the MI files to use the hook instead of the macro.
26 :
27 : 3. Repeat for a majority of the remaining target macros. This will
28 : take some time.
29 :
30 : 4. Tell target maintainers to start migrating.
31 :
32 : 5. Eventually convert the backends to override the hook instead of
33 : defining the macros. This will take some time too.
34 :
35 : 6. TBD when, poison the macros. Unmigrated targets will break at
36 : this point.
37 :
38 : Note that we expect steps 1-3 to be done by the people that
39 : understand what the MI does with each macro, and step 5 to be done
40 : by the target maintainers for their respective targets.
41 :
42 : Note that steps 1 and 2 don't have to be done together, but no
43 : target can override the new hook until step 2 is complete for it.
44 :
45 : Once the macros are poisoned, we will revert to the old migration
46 : rules - migrate the macro, callers, and targets all at once. This
47 : comment can thus be removed at that point. */
48 :
49 : #include "config.h"
50 : #include "system.h"
51 : #include "coretypes.h"
52 : #include "target.h"
53 : #include "function.h"
54 : #include "rtl.h"
55 : #include "tree.h"
56 : #include "tree-ssa-alias.h"
57 : #include "gimple-expr.h"
58 : #include "memmodel.h"
59 : #include "backend.h"
60 : #include "emit-rtl.h"
61 : #include "df.h"
62 : #include "tm_p.h"
63 : #include "stringpool.h"
64 : #include "tree-vrp.h"
65 : #include "tree-ssanames.h"
66 : #include "profile-count.h"
67 : #include "optabs.h"
68 : #include "regs.h"
69 : #include "recog.h"
70 : #include "diagnostic-core.h"
71 : #include "fold-const.h"
72 : #include "stor-layout.h"
73 : #include "varasm.h"
74 : #include "flags.h"
75 : #include "explow.h"
76 : #include "expmed.h"
77 : #include "calls.h"
78 : #include "expr.h"
79 : #include "output.h"
80 : #include "common/common-target.h"
81 : #include "reload.h"
82 : #include "intl.h"
83 : #include "opts.h"
84 : #include "gimplify.h"
85 : #include "predict.h"
86 : #include "real.h"
87 : #include "langhooks.h"
88 : #include "sbitmap.h"
89 : #include "function-abi.h"
90 : #include "attribs.h"
91 : #include "asan.h"
92 : #include "emit-rtl.h"
93 : #include "gimple.h"
94 : #include "cfgloop.h"
95 : #include "tree-vectorizer.h"
96 : #include "options.h"
97 : #include "case-cfn-macros.h"
98 : #include "avoid-store-forwarding.h"
99 :
100 : bool
101 0 : default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
102 : rtx addr ATTRIBUTE_UNUSED,
103 : bool strict ATTRIBUTE_UNUSED,
104 : code_helper ATTRIBUTE_UNUSED)
105 : {
106 : #ifdef GO_IF_LEGITIMATE_ADDRESS
107 : /* Defer to the old implementation using a goto. */
108 : if (strict)
109 : return strict_memory_address_p (mode, addr);
110 : else
111 : return memory_address_p (mode, addr);
112 : #else
113 0 : gcc_unreachable ();
114 : #endif
115 : }
116 :
117 : void
118 26605 : default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
119 : {
120 : #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
121 26605 : ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
122 : #endif
123 26605 : }
124 :
125 : int
126 5823472 : default_unspec_may_trap_p (const_rtx x, unsigned flags)
127 : {
128 5823472 : int i;
129 :
130 : /* Any floating arithmetic may trap. */
131 5823472 : if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
132 : return 1;
133 :
134 10074190 : for (i = 0; i < XVECLEN (x, 0); ++i)
135 : {
136 7351140 : if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
137 : return 1;
138 : }
139 :
140 : return 0;
141 : }
142 :
143 : machine_mode
144 15996889 : default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
145 : machine_mode mode,
146 : int *punsignedp ATTRIBUTE_UNUSED,
147 : const_tree funtype ATTRIBUTE_UNUSED,
148 : int for_return ATTRIBUTE_UNUSED)
149 : {
150 15996889 : if (type != NULL_TREE && for_return == 2)
151 3671838 : return promote_mode (type, mode, punsignedp);
152 : return mode;
153 : }
154 :
155 : machine_mode
156 0 : default_promote_function_mode_always_promote (const_tree type,
157 : machine_mode mode,
158 : int *punsignedp,
159 : const_tree funtype ATTRIBUTE_UNUSED,
160 : int for_return ATTRIBUTE_UNUSED)
161 : {
162 0 : return promote_mode (type, mode, punsignedp);
163 : }
164 :
165 : /* Sign-extend signed 8/16-bit integer arguments to 32 bits and
166 : zero-extend unsigned 8/16-bit integer arguments to 32 bits. */
167 :
168 : machine_mode
169 0 : default_promote_function_mode_sign_extend (const_tree type,
170 : machine_mode mode,
171 : int *punsignedp,
172 : const_tree, int)
173 : {
174 0 : if (GET_MODE_CLASS (mode) == MODE_INT
175 0 : && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
176 0 : < GET_MODE_SIZE (SImode)))
177 : return SImode;
178 :
179 0 : return promote_mode (type, mode, punsignedp);
180 : }
181 :
182 : machine_mode
183 0 : default_cc_modes_compatible (machine_mode m1, machine_mode m2)
184 : {
185 0 : if (m1 == m2)
186 0 : return m1;
187 : return VOIDmode;
188 : }
189 :
190 : bool
191 0 : default_return_in_memory (const_tree type,
192 : const_tree fntype ATTRIBUTE_UNUSED)
193 : {
194 0 : return (TYPE_MODE (type) == BLKmode);
195 : }
196 :
197 : rtx
198 0 : default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
199 : machine_mode mode ATTRIBUTE_UNUSED)
200 : {
201 0 : return x;
202 : }
203 :
204 : bool
205 0 : default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
206 : machine_mode)
207 : {
208 0 : return false;
209 : }
210 :
211 : bool
212 0 : default_const_not_ok_for_debug_p (rtx x)
213 : {
214 0 : if (GET_CODE (x) == UNSPEC)
215 0 : return true;
216 : return false;
217 : }
218 :
219 : rtx
220 0 : default_expand_builtin_saveregs (void)
221 : {
222 0 : error ("%<__builtin_saveregs%> not supported by this target");
223 0 : return const0_rtx;
224 : }
225 :
226 : void
227 0 : default_setup_incoming_varargs (cumulative_args_t,
228 : const function_arg_info &, int *, int)
229 : {
230 0 : }
231 :
232 : /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
233 :
234 : rtx
235 0 : default_builtin_setjmp_frame_value (void)
236 : {
237 0 : return virtual_stack_vars_rtx;
238 : }
239 :
240 : /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
241 :
242 : bool
243 0 : hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
244 : {
245 0 : return false;
246 : }
247 :
248 : bool
249 0 : default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
250 : {
251 0 : return (targetm.calls.setup_incoming_varargs
252 0 : != default_setup_incoming_varargs);
253 : }
254 :
255 : scalar_int_mode
256 490928 : default_eh_return_filter_mode (void)
257 : {
258 490928 : return targetm.unwind_word_mode ();
259 : }
260 :
261 : scalar_int_mode
262 44996 : default_libgcc_cmp_return_mode (void)
263 : {
264 44996 : return word_mode;
265 : }
266 :
267 : scalar_int_mode
268 308 : default_libgcc_shift_count_mode (void)
269 : {
270 308 : return word_mode;
271 : }
272 :
273 : scalar_int_mode
274 721261 : default_unwind_word_mode (void)
275 : {
276 721261 : return word_mode;
277 : }
278 :
279 : /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
280 :
281 : unsigned HOST_WIDE_INT
282 1 : default_shift_truncation_mask (machine_mode mode)
283 : {
284 1 : return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
285 : }
286 :
287 : /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
288 :
289 : unsigned int
290 208853 : default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
291 : {
292 208853 : return have_insn_for (DIV, mode) ? 3 : 2;
293 : }
294 :
295 : /* The default implementation of TARGET_MODE_REP_EXTENDED. */
296 :
297 : int
298 21508278 : default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
299 : {
300 21508278 : return UNKNOWN;
301 : }
302 :
303 : /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
304 :
305 : bool
306 5837403 : hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
307 : {
308 5837403 : return true;
309 : }
310 :
311 : /* Return machine mode for non-standard suffix
312 : or VOIDmode if non-standard suffixes are unsupported. */
313 : machine_mode
314 0 : default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
315 : {
316 0 : return VOIDmode;
317 : }
318 :
319 : /* Return machine mode for a floating type which is indicated
320 : by the given enum tree_index. */
321 :
322 : machine_mode
323 3014755 : default_mode_for_floating_type (enum tree_index ti)
324 : {
325 3014755 : if (ti == TI_FLOAT_TYPE)
326 : return SFmode;
327 1487148 : gcc_assert (ti == TI_DOUBLE_TYPE || ti == TI_LONG_DOUBLE_TYPE);
328 : return DFmode;
329 : }
330 :
331 : /* The generic C++ ABI specifies this is a 64-bit value. */
332 : tree
333 4735 : default_cxx_guard_type (void)
334 : {
335 4735 : return long_long_integer_type_node;
336 : }
337 :
338 : /* Returns the size of the cookie to use when allocating an array
339 : whose elements have the indicated TYPE. Assumes that it is already
340 : known that a cookie is needed. */
341 :
342 : tree
343 84308 : default_cxx_get_cookie_size (tree type)
344 : {
345 84308 : tree cookie_size;
346 :
347 : /* We need to allocate an additional max (sizeof (size_t), alignof
348 : (true_type)) bytes. */
349 84308 : tree sizetype_size;
350 84308 : tree type_align;
351 :
352 84308 : sizetype_size = size_in_bytes (sizetype);
353 84308 : type_align = size_int (TYPE_ALIGN_UNIT (type));
354 84308 : if (tree_int_cst_lt (type_align, sizetype_size))
355 : cookie_size = sizetype_size;
356 : else
357 19694 : cookie_size = type_align;
358 :
359 84308 : return cookie_size;
360 : }
361 :
362 : /* Returns modified FUNCTION_TYPE for cdtor callabi. */
363 :
364 : tree
365 0 : default_cxx_adjust_cdtor_callabi_fntype (tree fntype)
366 : {
367 0 : return fntype;
368 : }
369 :
370 : /* Return true if a parameter must be passed by reference. This version
371 : of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
372 :
373 : bool
374 0 : hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
375 : const function_arg_info &arg)
376 : {
377 0 : return targetm.calls.must_pass_in_stack (arg);
378 : }
379 :
380 : /* Return true if a parameter follows callee copies conventions. This
381 : version of the hook is true for all named arguments. */
382 :
383 : bool
384 0 : hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg)
385 : {
386 0 : return arg.named;
387 : }
388 :
389 : /* Emit to STREAM the assembler syntax for insn operand X. */
390 :
391 : void
392 0 : default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
393 : int code ATTRIBUTE_UNUSED)
394 : {
395 : #ifdef PRINT_OPERAND
396 : PRINT_OPERAND (stream, x, code);
397 : #else
398 0 : gcc_unreachable ();
399 : #endif
400 : }
401 :
402 : /* Emit to STREAM the assembler syntax for an insn operand whose memory
403 : address is X. */
404 :
405 : void
406 0 : default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
407 : machine_mode /*mode*/,
408 : rtx x ATTRIBUTE_UNUSED)
409 : {
410 : #ifdef PRINT_OPERAND_ADDRESS
411 : PRINT_OPERAND_ADDRESS (stream, x);
412 : #else
413 0 : gcc_unreachable ();
414 : #endif
415 : }
416 :
417 : /* Return true if CODE is a valid punctuation character for the
418 : `print_operand' hook. */
419 :
420 : bool
421 0 : default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
422 : {
423 : #ifdef PRINT_OPERAND_PUNCT_VALID_P
424 : return PRINT_OPERAND_PUNCT_VALID_P (code);
425 : #else
426 0 : return false;
427 : #endif
428 : }
429 :
430 : /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
431 : tree
432 542758 : default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
433 : {
434 542758 : const char *skipped = name + (*name == '*' ? 1 : 0);
435 542758 : const char *stripped = targetm.strip_name_encoding (skipped);
436 542758 : if (*name != '*' && user_label_prefix[0])
437 0 : stripped = ACONCAT ((user_label_prefix, stripped, NULL));
438 542758 : return get_identifier (stripped);
439 : }
440 :
441 : /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
442 :
443 : machine_mode
444 54079 : default_translate_mode_attribute (machine_mode mode)
445 : {
446 54079 : return mode;
447 : }
448 :
449 : /* True if MODE is valid for the target. By "valid", we mean able to
450 : be manipulated in non-trivial ways. In particular, this means all
451 : the arithmetic is supported.
452 :
453 : By default we guess this means that any C type is supported. If
454 : we can't map the mode back to a type that would be available in C,
455 : then reject it. Special case, here, is the double-word arithmetic
456 : supported by optabs.cc. */
457 :
458 : bool
459 2900466 : default_scalar_mode_supported_p (scalar_mode mode)
460 : {
461 2900466 : int precision = GET_MODE_PRECISION (mode);
462 :
463 2900466 : switch (GET_MODE_CLASS (mode))
464 : {
465 1658341 : case MODE_PARTIAL_INT:
466 1658341 : case MODE_INT:
467 1658341 : if (precision == CHAR_TYPE_SIZE)
468 : return true;
469 1587978 : if (precision == SHORT_TYPE_SIZE)
470 : return true;
471 1528144 : if (precision == INT_TYPE_SIZE)
472 : return true;
473 1489664 : if (precision == LONG_TYPE_SIZE)
474 : return true;
475 1372597 : if (precision == LONG_LONG_TYPE_SIZE)
476 : return true;
477 1402649 : if (precision == 2 * BITS_PER_WORD)
478 : return true;
479 : return false;
480 :
481 1242125 : case MODE_FLOAT:
482 1242125 : if (mode == targetm.c.mode_for_floating_type (TI_FLOAT_TYPE))
483 : return true;
484 923070 : if (mode == targetm.c.mode_for_floating_type (TI_DOUBLE_TYPE))
485 : return true;
486 318601 : if (mode == targetm.c.mode_for_floating_type (TI_LONG_DOUBLE_TYPE))
487 : return true;
488 : return false;
489 :
490 : case MODE_DECIMAL_FLOAT:
491 : case MODE_FRACT:
492 : case MODE_UFRACT:
493 : case MODE_ACCUM:
494 : case MODE_UACCUM:
495 : return false;
496 :
497 0 : default:
498 0 : gcc_unreachable ();
499 : }
500 : }
501 :
502 : /* Return true if libgcc supports floating-point mode MODE (known to
503 : be supported as a scalar mode). */
504 :
505 : bool
506 1556270 : default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
507 : {
508 1556270 : switch (mode)
509 : {
510 : #ifdef HAVE_SFmode
511 : case E_SFmode:
512 : #endif
513 : #ifdef HAVE_DFmode
514 : case E_DFmode:
515 : #endif
516 : #ifdef HAVE_XFmode
517 : case E_XFmode:
518 : #endif
519 : #ifdef HAVE_TFmode
520 : case E_TFmode:
521 : #endif
522 : return true;
523 :
524 0 : default:
525 0 : return false;
526 : }
527 : }
528 :
529 : /* Return the machine mode to use for the type _FloatN, if EXTENDED is
530 : false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
531 : supported. */
532 : opt_scalar_float_mode
533 1997954 : default_floatn_mode (int n, bool extended)
534 : {
535 1997954 : if (extended)
536 : {
537 856266 : opt_scalar_float_mode cand1, cand2;
538 856266 : scalar_float_mode mode;
539 856266 : switch (n)
540 : {
541 285422 : case 32:
542 : #ifdef HAVE_DFmode
543 285422 : cand1 = DFmode;
544 : #endif
545 285422 : break;
546 :
547 285422 : case 64:
548 : #ifdef HAVE_XFmode
549 285422 : cand1 = XFmode;
550 : #endif
551 : #ifdef HAVE_TFmode
552 285422 : cand2 = TFmode;
553 : #endif
554 285422 : break;
555 :
556 : case 128:
557 : break;
558 :
559 0 : default:
560 : /* Those are the only valid _FloatNx types. */
561 0 : gcc_unreachable ();
562 : }
563 856266 : if (cand1.exists (&mode)
564 570844 : && REAL_MODE_FORMAT (mode)->ieee_bits > n
565 570844 : && targetm.scalar_mode_supported_p (mode)
566 570822 : && targetm.libgcc_floating_mode_supported_p (mode))
567 570822 : return cand1;
568 285444 : if (cand2.exists (&mode)
569 22 : && REAL_MODE_FORMAT (mode)->ieee_bits > n
570 285444 : && targetm.scalar_mode_supported_p (mode)
571 22 : && targetm.libgcc_floating_mode_supported_p (mode))
572 22 : return cand2;
573 : }
574 : else
575 : {
576 1141688 : opt_scalar_float_mode cand;
577 1141688 : scalar_float_mode mode;
578 1141688 : switch (n)
579 : {
580 : case 16:
581 : /* Always enable _Float16 if we have basic support for the mode.
582 : Targets can control the range and precision of operations on
583 : the _Float16 type using TARGET_C_EXCESS_PRECISION. */
584 : #ifdef HAVE_HFmode
585 : cand = HFmode;
586 : #endif
587 : break;
588 :
589 : case 32:
590 : #ifdef HAVE_SFmode
591 : cand = SFmode;
592 : #endif
593 : break;
594 :
595 : case 64:
596 : #ifdef HAVE_DFmode
597 : cand = DFmode;
598 : #endif
599 : break;
600 :
601 : case 128:
602 : #ifdef HAVE_TFmode
603 : cand = TFmode;
604 : #endif
605 : break;
606 :
607 : default:
608 : break;
609 : }
610 1141688 : if (cand.exists (&mode)
611 1141688 : && REAL_MODE_FORMAT (mode)->ieee_bits == n
612 1141688 : && targetm.scalar_mode_supported_p (mode)
613 1141688 : && targetm.libgcc_floating_mode_supported_p (mode))
614 1141688 : return cand;
615 : }
616 285422 : return opt_scalar_float_mode ();
617 : }
618 :
619 : /* Define this to return true if the _Floatn and _Floatnx built-in functions
620 : should implicitly enable the built-in function without the __builtin_ prefix
621 : in addition to the normal built-in function with the __builtin_ prefix. The
622 : default is to only enable built-in functions without the __builtin_ prefix
623 : for the GNU C langauge. The argument FUNC is the enum builtin_in_function
624 : id of the function to be enabled. */
625 :
626 : bool
627 133700301 : default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
628 : {
629 133700301 : static bool first_time_p = true;
630 133700301 : static bool c_or_objective_c;
631 :
632 133700301 : if (first_time_p)
633 : {
634 230121 : first_time_p = false;
635 340884 : c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
636 : }
637 :
638 133700301 : return c_or_objective_c;
639 : }
640 :
641 : /* Make some target macros useable by target-independent code. */
642 : bool
643 0 : targhook_words_big_endian (void)
644 : {
645 0 : return !!WORDS_BIG_ENDIAN;
646 : }
647 :
648 : bool
649 208790 : targhook_float_words_big_endian (void)
650 : {
651 208790 : return !!FLOAT_WORDS_BIG_ENDIAN;
652 : }
653 :
654 : /* True if the target supports floating-point exceptions and rounding
655 : modes. */
656 :
657 : bool
658 0 : default_float_exceptions_rounding_supported_p (void)
659 : {
660 : #ifdef HAVE_adddf3
661 0 : return HAVE_adddf3;
662 : #else
663 : return false;
664 : #endif
665 : }
666 :
667 : /* True if the target supports decimal floating point. */
668 :
669 : bool
670 1229983 : default_decimal_float_supported_p (void)
671 : {
672 1229983 : return ENABLE_DECIMAL_FLOAT;
673 : }
674 :
675 : /* True if the target supports fixed-point arithmetic. */
676 :
677 : bool
678 415928 : default_fixed_point_supported_p (void)
679 : {
680 415928 : return ENABLE_FIXED_POINT;
681 : }
682 :
683 : /* True if the target supports GNU indirect functions. */
684 :
685 : bool
686 565 : default_has_ifunc_p (void)
687 : {
688 565 : return HAVE_GNU_INDIRECT_FUNCTION;
689 : }
690 :
691 : /* Return true if we predict the loop LOOP will be transformed to a
692 : low-overhead loop, otherwise return false.
693 :
694 : By default, false is returned, as this hook's applicability should be
695 : verified for each target. Target maintainers should re-define the hook
696 : if the target can take advantage of it. */
697 :
698 : bool
699 503212 : default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
700 : {
701 503212 : return false;
702 : }
703 :
704 : /* By default, just use the input MODE itself. */
705 :
706 : machine_mode
707 0 : default_preferred_doloop_mode (machine_mode mode)
708 : {
709 0 : return mode;
710 : }
711 :
712 : /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
713 : an error message.
714 :
715 : This function checks whether a given INSN is valid within a low-overhead
716 : loop. If INSN is invalid it returns the reason for that, otherwise it
717 : returns NULL. A called function may clobber any special registers required
718 : for low-overhead looping. Additionally, some targets (eg, PPC) use the count
719 : register for branch on table instructions. We reject the doloop pattern in
720 : these cases. */
721 :
722 : const char *
723 0 : default_invalid_within_doloop (const rtx_insn *insn)
724 : {
725 0 : if (CALL_P (insn))
726 : return "Function call in loop.";
727 :
728 0 : if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
729 0 : return "Computed branch in the loop.";
730 :
731 : return NULL;
732 : }
733 :
734 : /* Mapping of builtin functions to vectorized variants. */
735 :
736 : tree
737 0 : default_builtin_vectorized_function (unsigned int, tree, tree)
738 : {
739 0 : return NULL_TREE;
740 : }
741 :
742 : /* Mapping of target builtin functions to vectorized variants. */
743 :
744 : tree
745 24 : default_builtin_md_vectorized_function (tree, tree, tree)
746 : {
747 24 : return NULL_TREE;
748 : }
749 :
750 : /* Default vectorizer cost model values. */
751 :
752 : int
753 0 : default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
754 : tree vectype,
755 : int misalign ATTRIBUTE_UNUSED)
756 : {
757 0 : switch (type_of_cost)
758 : {
759 : case scalar_stmt:
760 : case scalar_load:
761 : case scalar_store:
762 : case vector_stmt:
763 : case vector_load:
764 : case vector_store:
765 : case vec_to_scalar:
766 : case scalar_to_vec:
767 : case cond_branch_not_taken:
768 : case vec_perm:
769 : case vec_promote_demote:
770 : return 1;
771 :
772 0 : case unaligned_load:
773 0 : case unaligned_store:
774 0 : return 2;
775 :
776 0 : case cond_branch_taken:
777 0 : return 3;
778 :
779 0 : case vec_construct:
780 0 : return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
781 :
782 0 : default:
783 0 : gcc_unreachable ();
784 : }
785 : }
786 :
787 : /* Reciprocal. */
788 :
789 : tree
790 0 : default_builtin_reciprocal (tree)
791 : {
792 0 : return NULL_TREE;
793 : }
794 :
795 : void
796 0 : default_emit_support_tinfos (emit_support_tinfos_callback)
797 : {
798 0 : }
799 :
800 : bool
801 5300 : hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
802 : const function_arg_info &)
803 : {
804 5300 : return false;
805 : }
806 :
807 : bool
808 0 : hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t,
809 : const function_arg_info &)
810 : {
811 0 : return true;
812 : }
813 :
814 : int
815 12133984 : hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
816 : const function_arg_info &)
817 : {
818 12133984 : return 0;
819 : }
820 :
821 : void
822 11870042 : hook_void_CUMULATIVE_ARGS (cumulative_args_t)
823 : {
824 11870042 : }
825 :
826 : void
827 0 : hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
828 : tree ATTRIBUTE_UNUSED)
829 : {
830 0 : }
831 :
832 : void
833 11494102 : hook_void_CUMULATIVE_ARGS_rtx_tree (cumulative_args_t, rtx, tree)
834 : {
835 11494102 : }
836 :
837 : /* Default implementation of TARGET_PUSH_ARGUMENT. */
838 :
839 : bool
840 0 : default_push_argument (unsigned int)
841 : {
842 : #ifdef PUSH_ROUNDING
843 0 : return !ACCUMULATE_OUTGOING_ARGS;
844 : #else
845 : return false;
846 : #endif
847 : }
848 :
849 : void
850 0 : default_function_arg_advance (cumulative_args_t, const function_arg_info &)
851 : {
852 0 : gcc_unreachable ();
853 : }
854 :
855 : /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
856 :
857 : HOST_WIDE_INT
858 5417683 : default_function_arg_offset (machine_mode, const_tree)
859 : {
860 5417683 : return 0;
861 : }
862 :
863 : /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
864 : upward, but pad short args downward on big-endian machines. */
865 :
866 : pad_direction
867 12324275 : default_function_arg_padding (machine_mode mode, const_tree type)
868 : {
869 12324275 : if (!BYTES_BIG_ENDIAN)
870 12324275 : return PAD_UPWARD;
871 :
872 : unsigned HOST_WIDE_INT size;
873 : if (mode == BLKmode)
874 : {
875 : if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
876 : return PAD_UPWARD;
877 : size = int_size_in_bytes (type);
878 : }
879 : else
880 : /* Targets with variable-sized modes must override this hook
881 : and handle variable-sized modes explicitly. */
882 : size = GET_MODE_SIZE (mode).to_constant ();
883 :
884 : if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
885 : return PAD_DOWNWARD;
886 :
887 : return PAD_UPWARD;
888 : }
889 :
890 : rtx
891 0 : default_function_arg (cumulative_args_t, const function_arg_info &)
892 : {
893 0 : gcc_unreachable ();
894 : }
895 :
896 : rtx
897 0 : default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
898 : {
899 0 : gcc_unreachable ();
900 : }
901 :
902 : unsigned int
903 0 : default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
904 : const_tree type ATTRIBUTE_UNUSED)
905 : {
906 0 : return PARM_BOUNDARY;
907 : }
908 :
909 : unsigned int
910 5417683 : default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
911 : const_tree type ATTRIBUTE_UNUSED)
912 : {
913 5417683 : return PARM_BOUNDARY;
914 : }
915 :
916 : void
917 0 : hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
918 : {
919 0 : }
920 :
921 : const char *
922 786548 : hook_invalid_arg_for_unprototyped_fn (
923 : const_tree typelist ATTRIBUTE_UNUSED,
924 : const_tree funcdecl ATTRIBUTE_UNUSED,
925 : const_tree val ATTRIBUTE_UNUSED)
926 : {
927 786548 : return NULL;
928 : }
929 :
930 : /* Initialize the stack protection decls. */
931 :
932 : /* Stack protection related decls living in libgcc. */
933 : static GTY(()) tree stack_chk_guard_decl;
934 :
935 : tree
936 3 : default_stack_protect_guard (void)
937 : {
938 3 : tree t = stack_chk_guard_decl;
939 :
940 3 : if (t == NULL)
941 : {
942 1 : rtx x;
943 :
944 1 : t = build_decl (UNKNOWN_LOCATION,
945 : VAR_DECL, get_identifier ("__stack_chk_guard"),
946 : ptr_type_node);
947 1 : TREE_STATIC (t) = 1;
948 1 : TREE_PUBLIC (t) = 1;
949 1 : DECL_EXTERNAL (t) = 1;
950 1 : TREE_USED (t) = 1;
951 1 : TREE_THIS_VOLATILE (t) = 1;
952 1 : DECL_ARTIFICIAL (t) = 1;
953 1 : DECL_IGNORED_P (t) = 1;
954 :
955 : /* Do not share RTL as the declaration is visible outside of
956 : current function. */
957 1 : x = DECL_RTL (t);
958 1 : RTX_FLAG (x, used) = 1;
959 :
960 1 : stack_chk_guard_decl = t;
961 : }
962 :
963 3 : return t;
964 : }
965 :
966 : static GTY(()) tree stack_chk_fail_decl;
967 :
968 : tree
969 264 : default_external_stack_protect_fail (void)
970 : {
971 264 : tree t = stack_chk_fail_decl;
972 :
973 264 : if (t == NULL_TREE)
974 : {
975 154 : t = build_function_type_list (void_type_node, NULL_TREE);
976 154 : t = build_decl (UNKNOWN_LOCATION,
977 : FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
978 154 : TREE_STATIC (t) = 1;
979 154 : TREE_PUBLIC (t) = 1;
980 154 : DECL_EXTERNAL (t) = 1;
981 154 : TREE_USED (t) = 1;
982 154 : TREE_THIS_VOLATILE (t) = 1;
983 154 : TREE_NOTHROW (t) = 1;
984 154 : DECL_ARTIFICIAL (t) = 1;
985 154 : DECL_IGNORED_P (t) = 1;
986 154 : DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
987 154 : DECL_VISIBILITY_SPECIFIED (t) = 1;
988 :
989 154 : stack_chk_fail_decl = t;
990 : }
991 :
992 264 : return build_call_expr (t, 0);
993 : }
994 :
995 : tree
996 1 : default_hidden_stack_protect_fail (void)
997 : {
998 : #ifndef HAVE_GAS_HIDDEN
999 : return default_external_stack_protect_fail ();
1000 : #else
1001 1 : tree t = stack_chk_fail_decl;
1002 :
1003 1 : if (!flag_pic)
1004 1 : return default_external_stack_protect_fail ();
1005 :
1006 0 : if (t == NULL_TREE)
1007 : {
1008 0 : t = build_function_type_list (void_type_node, NULL_TREE);
1009 0 : t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
1010 : get_identifier ("__stack_chk_fail_local"), t);
1011 0 : TREE_STATIC (t) = 1;
1012 0 : TREE_PUBLIC (t) = 1;
1013 0 : DECL_EXTERNAL (t) = 1;
1014 0 : TREE_USED (t) = 1;
1015 0 : TREE_THIS_VOLATILE (t) = 1;
1016 0 : TREE_NOTHROW (t) = 1;
1017 0 : DECL_ARTIFICIAL (t) = 1;
1018 0 : DECL_IGNORED_P (t) = 1;
1019 0 : DECL_VISIBILITY_SPECIFIED (t) = 1;
1020 0 : DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
1021 :
1022 0 : stack_chk_fail_decl = t;
1023 : }
1024 :
1025 0 : return build_call_expr (t, 0);
1026 : #endif
1027 : }
1028 :
1029 : bool
1030 73774300 : hook_bool_const_rtx_commutative_p (const_rtx x,
1031 : int outer_code ATTRIBUTE_UNUSED)
1032 : {
1033 73774300 : return COMMUTATIVE_P (x);
1034 : }
1035 :
1036 : rtx
1037 0 : default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
1038 : const_tree fn_decl_or_type,
1039 : bool outgoing ATTRIBUTE_UNUSED)
1040 : {
1041 : /* The old interface doesn't handle receiving the function type. */
1042 0 : if (fn_decl_or_type
1043 : && !DECL_P (fn_decl_or_type))
1044 : fn_decl_or_type = NULL;
1045 :
1046 : #ifdef FUNCTION_VALUE
1047 : return FUNCTION_VALUE (ret_type, fn_decl_or_type);
1048 : #else
1049 0 : gcc_unreachable ();
1050 : #endif
1051 : }
1052 :
1053 : rtx
1054 105702 : default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
1055 : const_rtx fun ATTRIBUTE_UNUSED)
1056 : {
1057 : #ifdef LIBCALL_VALUE
1058 105702 : return LIBCALL_VALUE (MACRO_MODE (mode));
1059 : #else
1060 : gcc_unreachable ();
1061 : #endif
1062 : }
1063 :
1064 : /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1065 :
1066 : bool
1067 0 : default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1068 : {
1069 : #ifdef FUNCTION_VALUE_REGNO_P
1070 : return FUNCTION_VALUE_REGNO_P (regno);
1071 : #else
1072 0 : gcc_unreachable ();
1073 : #endif
1074 : }
1075 :
1076 : /* Choose the mode and rtx to use to zero REGNO, storing tem in PMODE and
1077 : PREGNO_RTX and returning TRUE if successful, otherwise returning FALSE. If
1078 : the natural mode for REGNO doesn't work, attempt to group it with subsequent
1079 : adjacent registers set in TOZERO. */
1080 :
1081 : static inline bool
1082 0 : zcur_select_mode_rtx (unsigned int regno, machine_mode *pmode,
1083 : rtx *pregno_rtx, HARD_REG_SET tozero)
1084 : {
1085 0 : rtx regno_rtx = regno_reg_rtx[regno];
1086 0 : machine_mode mode = GET_MODE (regno_rtx);
1087 :
1088 : /* If the natural mode doesn't work, try some wider mode. */
1089 0 : if (!targetm.hard_regno_mode_ok (regno, mode))
1090 : {
1091 : bool found = false;
1092 0 : for (int nregs = 2;
1093 0 : !found && nregs <= hard_regno_max_nregs
1094 0 : && regno + nregs <= FIRST_PSEUDO_REGISTER
1095 0 : && TEST_HARD_REG_BIT (tozero,
1096 : regno + nregs - 1);
1097 : nregs++)
1098 : {
1099 0 : mode = choose_hard_reg_mode (regno, nregs, 0);
1100 0 : if (mode == E_VOIDmode)
1101 0 : continue;
1102 0 : gcc_checking_assert (targetm.hard_regno_mode_ok (regno, mode));
1103 0 : regno_rtx = gen_rtx_REG (mode, regno);
1104 0 : found = true;
1105 : }
1106 0 : if (!found)
1107 : return false;
1108 : }
1109 :
1110 0 : *pmode = mode;
1111 0 : *pregno_rtx = regno_rtx;
1112 0 : return true;
1113 : }
1114 :
1115 : /* The default hook for TARGET_ZERO_CALL_USED_REGS. */
1116 :
1117 : HARD_REG_SET
1118 0 : default_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
1119 : {
1120 0 : gcc_assert (!hard_reg_set_empty_p (need_zeroed_hardregs));
1121 :
1122 : HARD_REG_SET failed;
1123 0 : CLEAR_HARD_REG_SET (failed);
1124 : bool progress = false;
1125 :
1126 : /* First, try to zero each register in need_zeroed_hardregs by
1127 : loading a zero into it, taking note of any failures in
1128 : FAILED. */
1129 0 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1130 0 : if (TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
1131 : {
1132 0 : rtx_insn *last_insn = get_last_insn ();
1133 0 : rtx regno_rtx;
1134 0 : machine_mode mode;
1135 :
1136 0 : if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1137 : need_zeroed_hardregs))
1138 : {
1139 0 : SET_HARD_REG_BIT (failed, regno);
1140 0 : continue;
1141 : }
1142 :
1143 0 : rtx zero = CONST0_RTX (mode);
1144 0 : rtx_insn *insn = emit_move_insn (regno_rtx, zero);
1145 0 : if (!valid_insn_p (insn))
1146 : {
1147 0 : SET_HARD_REG_BIT (failed, regno);
1148 0 : delete_insns_since (last_insn);
1149 : }
1150 : else
1151 : {
1152 0 : progress = true;
1153 0 : regno += hard_regno_nregs (regno, mode) - 1;
1154 : }
1155 : }
1156 :
1157 : /* Now retry with copies from zeroed registers, as long as we've
1158 : made some PROGRESS, and registers remain to be zeroed in
1159 : FAILED. */
1160 0 : while (progress && !hard_reg_set_empty_p (failed))
1161 : {
1162 0 : HARD_REG_SET retrying = failed;
1163 :
1164 0 : CLEAR_HARD_REG_SET (failed);
1165 : progress = false;
1166 :
1167 0 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1168 0 : if (TEST_HARD_REG_BIT (retrying, regno))
1169 : {
1170 0 : rtx regno_rtx;
1171 0 : machine_mode mode;
1172 :
1173 : /* This might select registers we've already zeroed. If grouping
1174 : with them is what it takes to get regno zeroed, so be it. */
1175 0 : if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1176 : need_zeroed_hardregs))
1177 : {
1178 0 : SET_HARD_REG_BIT (failed, regno);
1179 0 : continue;
1180 : }
1181 :
1182 0 : bool success = false;
1183 : /* Look for a source. */
1184 0 : for (unsigned int src = 0; src < FIRST_PSEUDO_REGISTER; src++)
1185 : {
1186 : /* If SRC hasn't been zeroed (yet?), skip it. */
1187 0 : if (! TEST_HARD_REG_BIT (need_zeroed_hardregs, src))
1188 0 : continue;
1189 0 : if (TEST_HARD_REG_BIT (retrying, src))
1190 0 : continue;
1191 :
1192 : /* Check that SRC can hold MODE, and that any other
1193 : registers needed to hold MODE in SRC have also been
1194 : zeroed. */
1195 0 : if (!targetm.hard_regno_mode_ok (src, mode))
1196 0 : continue;
1197 0 : unsigned n = targetm.hard_regno_nregs (src, mode);
1198 0 : bool ok = true;
1199 0 : for (unsigned i = 1; ok && i < n; i++)
1200 0 : ok = (TEST_HARD_REG_BIT (need_zeroed_hardregs, src + i)
1201 0 : && !TEST_HARD_REG_BIT (retrying, src + i));
1202 0 : if (!ok)
1203 0 : continue;
1204 :
1205 : /* SRC is usable, try to copy from it. */
1206 0 : rtx_insn *last_insn = get_last_insn ();
1207 0 : rtx src_rtx = gen_rtx_REG (mode, src);
1208 0 : rtx_insn *insn = emit_move_insn (regno_rtx, src_rtx);
1209 0 : if (!valid_insn_p (insn))
1210 : /* It didn't work, remove any inserts. We'll look
1211 : for another SRC. */
1212 0 : delete_insns_since (last_insn);
1213 : else
1214 : {
1215 : /* We're done for REGNO. */
1216 : success = true;
1217 : break;
1218 : }
1219 : }
1220 :
1221 : /* If nothing worked for REGNO this round, mark it to be
1222 : retried if we get another round. */
1223 0 : if (!success)
1224 0 : SET_HARD_REG_BIT (failed, regno);
1225 : else
1226 : {
1227 : /* Take note so as to enable another round if needed. */
1228 0 : progress = true;
1229 0 : regno += hard_regno_nregs (regno, mode) - 1;
1230 : }
1231 : }
1232 : }
1233 :
1234 : /* If any register remained, report it. */
1235 0 : if (!progress)
1236 : {
1237 0 : static bool issued_error;
1238 0 : if (!issued_error)
1239 : {
1240 0 : const char *name = NULL;
1241 0 : for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL;
1242 : ++i)
1243 0 : if (flag_zero_call_used_regs == zero_call_used_regs_opts[i].flag)
1244 : {
1245 : name = zero_call_used_regs_opts[i].name;
1246 : break;
1247 : }
1248 :
1249 0 : if (!name)
1250 0 : name = "";
1251 :
1252 0 : issued_error = true;
1253 0 : sorry ("argument %qs is not supported for %qs on this target",
1254 : name, "-fzero-call-used-regs");
1255 : }
1256 : }
1257 :
1258 0 : return need_zeroed_hardregs;
1259 : }
1260 :
1261 : rtx
1262 0 : default_internal_arg_pointer (void)
1263 : {
1264 : /* If the reg that the virtual arg pointer will be translated into is
1265 : not a fixed reg or is the stack pointer, make a copy of the virtual
1266 : arg pointer, and address parms via the copy. The frame pointer is
1267 : considered fixed even though it is not marked as such. */
1268 0 : if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1269 0 : || ! (fixed_regs[ARG_POINTER_REGNUM]
1270 : || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1271 0 : return copy_to_reg (virtual_incoming_args_rtx);
1272 : else
1273 0 : return virtual_incoming_args_rtx;
1274 : }
1275 :
1276 : rtx
1277 0 : default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1278 : {
1279 0 : if (incoming_p)
1280 : {
1281 : #ifdef STATIC_CHAIN_INCOMING_REGNUM
1282 : return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1283 : #endif
1284 : }
1285 :
1286 : #ifdef STATIC_CHAIN_REGNUM
1287 : return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1288 : #endif
1289 :
1290 0 : {
1291 0 : static bool issued_error;
1292 0 : if (!issued_error)
1293 : {
1294 0 : issued_error = true;
1295 0 : sorry ("nested functions not supported on this target");
1296 : }
1297 :
1298 : /* It really doesn't matter what we return here, so long at it
1299 : doesn't cause the rest of the compiler to crash. */
1300 0 : return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1301 : }
1302 : }
1303 :
1304 : void
1305 0 : default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1306 : rtx ARG_UNUSED (r_chain))
1307 : {
1308 0 : sorry ("nested function trampolines not supported on this target");
1309 0 : }
1310 :
1311 : poly_int64
1312 0 : default_return_pops_args (tree, tree, poly_int64)
1313 : {
1314 0 : return 0;
1315 : }
1316 :
1317 : reg_class_t
1318 49407176 : default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1319 : reg_class_t cl,
1320 : reg_class_t best_cl ATTRIBUTE_UNUSED)
1321 : {
1322 49407176 : return cl;
1323 : }
1324 :
1325 : int
1326 0 : default_ira_callee_saved_register_cost_scale (int)
1327 : {
1328 0 : return (optimize_size
1329 0 : ? 1
1330 0 : : REG_FREQ_FROM_BB (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1331 : }
1332 :
1333 : extern bool
1334 304534250 : default_lra_p (void)
1335 : {
1336 304534250 : return true;
1337 : }
1338 :
1339 : int
1340 0 : default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1341 : {
1342 0 : return 0;
1343 : }
1344 :
1345 : extern bool
1346 0 : default_register_usage_leveling_p (void)
1347 : {
1348 0 : return false;
1349 : }
1350 :
1351 : extern bool
1352 4885350 : default_different_addr_displacement_p (void)
1353 : {
1354 4885350 : return false;
1355 : }
1356 :
1357 : reg_class_t
1358 0 : default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1359 : reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1360 : machine_mode reload_mode ATTRIBUTE_UNUSED,
1361 : secondary_reload_info *sri)
1362 : {
1363 0 : enum reg_class rclass = NO_REGS;
1364 0 : enum reg_class reload_class = (enum reg_class) reload_class_i;
1365 :
1366 0 : if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1367 : {
1368 0 : sri->icode = sri->prev_sri->t_icode;
1369 0 : return NO_REGS;
1370 : }
1371 : #ifdef SECONDARY_INPUT_RELOAD_CLASS
1372 : if (in_p)
1373 : rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1374 : MACRO_MODE (reload_mode), x);
1375 : #endif
1376 : #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1377 : if (! in_p)
1378 : rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1379 : MACRO_MODE (reload_mode), x);
1380 : #endif
1381 : if (rclass != NO_REGS)
1382 : {
1383 : enum insn_code icode
1384 : = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1385 : reload_mode);
1386 :
1387 : if (icode != CODE_FOR_nothing
1388 : && !insn_operand_matches (icode, in_p, x))
1389 : icode = CODE_FOR_nothing;
1390 : else if (icode != CODE_FOR_nothing)
1391 : {
1392 : const char *insn_constraint, *scratch_constraint;
1393 : enum reg_class insn_class, scratch_class;
1394 :
1395 : gcc_assert (insn_data[(int) icode].n_operands == 3);
1396 : insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1397 : if (!*insn_constraint)
1398 : insn_class = ALL_REGS;
1399 : else
1400 : {
1401 : if (in_p)
1402 : {
1403 : gcc_assert (*insn_constraint == '=');
1404 : insn_constraint++;
1405 : }
1406 : insn_class = (reg_class_for_constraint
1407 : (lookup_constraint (insn_constraint)));
1408 : gcc_assert (insn_class != NO_REGS);
1409 : }
1410 :
1411 : scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1412 : /* The scratch register's constraint must start with "=&",
1413 : except for an input reload, where only "=" is necessary,
1414 : and where it might be beneficial to re-use registers from
1415 : the input. */
1416 : gcc_assert (scratch_constraint[0] == '='
1417 : && (in_p || scratch_constraint[1] == '&'));
1418 : scratch_constraint++;
1419 : if (*scratch_constraint == '&')
1420 : scratch_constraint++;
1421 : scratch_class = (reg_class_for_constraint
1422 : (lookup_constraint (scratch_constraint)));
1423 :
1424 : if (reg_class_subset_p (reload_class, insn_class))
1425 : {
1426 : gcc_assert (scratch_class == rclass);
1427 : rclass = NO_REGS;
1428 : }
1429 : else
1430 : rclass = insn_class;
1431 :
1432 : }
1433 : if (rclass == NO_REGS)
1434 : sri->icode = icode;
1435 : else
1436 : sri->t_icode = icode;
1437 : }
1438 : return rclass;
1439 : }
1440 :
1441 : /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1442 :
1443 : machine_mode
1444 0 : default_secondary_memory_needed_mode (machine_mode mode)
1445 : {
1446 0 : if (!targetm.lra_p ()
1447 0 : && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1448 0 : && INTEGRAL_MODE_P (mode))
1449 0 : return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1450 : return mode;
1451 : }
1452 :
1453 : /* By default, if flag_pic is true, then neither local nor global relocs
1454 : should be placed in readonly memory. */
1455 :
1456 : int
1457 0 : default_reloc_rw_mask (void)
1458 : {
1459 0 : return flag_pic ? 3 : 0;
1460 : }
1461 :
1462 : /* By default, address diff vectors are generated
1463 : for jump tables when flag_pic is true. */
1464 :
1465 : bool
1466 17022 : default_generate_pic_addr_diff_vec (void)
1467 : {
1468 17022 : return flag_pic;
1469 : }
1470 :
1471 : /* Record an element in the table of global constructors. SYMBOL is
1472 : a SYMBOL_REF of the function to be called; PRIORITY is a number
1473 : between 0 and MAX_INIT_PRIORITY. */
1474 :
1475 : void
1476 0 : default_asm_out_constructor (rtx symbol ATTRIBUTE_UNUSED,
1477 : int priority ATTRIBUTE_UNUSED)
1478 : {
1479 0 : sorry ("global constructors not supported on this target");
1480 0 : }
1481 :
1482 : /* Likewise for global destructors. */
1483 :
1484 : void
1485 0 : default_asm_out_destructor (rtx symbol ATTRIBUTE_UNUSED,
1486 : int priority ATTRIBUTE_UNUSED)
1487 : {
1488 0 : sorry ("global destructors not supported on this target");
1489 0 : }
1490 :
1491 : /* By default, do no modification. */
1492 0 : tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1493 : tree id)
1494 : {
1495 0 : return id;
1496 : }
1497 :
1498 : /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1499 :
1500 : HOST_WIDE_INT
1501 0 : default_static_rtx_alignment (machine_mode mode)
1502 : {
1503 0 : return GET_MODE_ALIGNMENT (mode);
1504 : }
1505 :
1506 : /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1507 :
1508 : HOST_WIDE_INT
1509 0 : default_constant_alignment (const_tree, HOST_WIDE_INT align)
1510 : {
1511 0 : return align;
1512 : }
1513 :
1514 : /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1515 : to at least BITS_PER_WORD but otherwise makes no changes. */
1516 :
1517 : HOST_WIDE_INT
1518 0 : constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1519 : {
1520 0 : if (TREE_CODE (exp) == STRING_CST)
1521 0 : return MAX (align, BITS_PER_WORD);
1522 : return align;
1523 : }
1524 :
1525 : /* Default to natural alignment for vector types, bounded by
1526 : MAX_OFILE_ALIGNMENT. */
1527 :
1528 : HOST_WIDE_INT
1529 70187479 : default_vector_alignment (const_tree type)
1530 : {
1531 70187479 : unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1532 70187479 : tree size = TYPE_SIZE (type);
1533 70187479 : if (tree_fits_uhwi_p (size))
1534 70187479 : align = tree_to_uhwi (size);
1535 70187479 : if (align >= MAX_OFILE_ALIGNMENT)
1536 : return MAX_OFILE_ALIGNMENT;
1537 70187472 : return MAX (align, GET_MODE_ALIGNMENT (TYPE_MODE (type)));
1538 : }
1539 :
1540 : /* The default implementation of
1541 : TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1542 :
1543 : poly_uint64
1544 5722227 : default_preferred_vector_alignment (const_tree type)
1545 : {
1546 5722227 : return TYPE_ALIGN (type);
1547 : }
1548 :
1549 : /* The default implementation of
1550 : TARGET_VECTORIZE_PREFERRED_DIV_AS_SHIFTS_OVER_MULT. */
1551 :
1552 : bool
1553 19884 : default_preferred_div_as_shifts_over_mult (const_tree type)
1554 : {
1555 19884 : return !can_mult_highpart_p (TYPE_MODE (type), TYPE_UNSIGNED (type));
1556 : }
1557 :
1558 : /* By default assume vectors of element TYPE require a multiple of the natural
1559 : alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1560 : bool
1561 194462 : default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1562 : {
1563 194462 : return ! is_packed;
1564 : }
1565 :
1566 : /* By default, assume that a target supports any factor of misalignment
1567 : memory access if it supports movmisalign patten.
1568 : is_packed is true if the memory access is defined in a packed struct. */
1569 : bool
1570 1091668 : default_builtin_support_vector_misalignment (machine_mode mode,
1571 : int misalignment
1572 : ATTRIBUTE_UNUSED,
1573 : bool is_packed
1574 : ATTRIBUTE_UNUSED,
1575 : bool is_gather_scatter
1576 : ATTRIBUTE_UNUSED)
1577 : {
1578 1091668 : if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1579 1091491 : return true;
1580 : return false;
1581 : }
1582 :
1583 : /* By default, only attempt to parallelize bitwise operations, and
1584 : possibly adds/subtracts using bit-twiddling. */
1585 :
1586 : machine_mode
1587 0 : default_preferred_simd_mode (scalar_mode)
1588 : {
1589 0 : return word_mode;
1590 : }
1591 :
1592 : /* By default do not split reductions further. */
1593 :
1594 : machine_mode
1595 0 : default_split_reduction (machine_mode mode)
1596 : {
1597 0 : return mode;
1598 : }
1599 :
1600 : /* By default only the preferred vector mode is tried. */
1601 :
1602 : unsigned int
1603 0 : default_autovectorize_vector_modes (vector_modes *, bool)
1604 : {
1605 0 : return 0;
1606 : }
1607 :
1608 : /* The default implementation of TARGET_VECTORIZE_RELATED_MODE. */
1609 :
1610 : opt_machine_mode
1611 33265222 : default_vectorize_related_mode (machine_mode vector_mode,
1612 : scalar_mode element_mode,
1613 : poly_uint64 nunits)
1614 : {
1615 33265222 : machine_mode result_mode;
1616 33265222 : if ((maybe_ne (nunits, 0U)
1617 56224763 : || multiple_p (GET_MODE_SIZE (vector_mode),
1618 29112876 : GET_MODE_SIZE (element_mode), &nunits))
1619 31264233 : && mode_for_vector (element_mode, nunits).exists (&result_mode)
1620 31110907 : && VECTOR_MODE_P (result_mode)
1621 64374761 : && targetm.vector_mode_supported_p (result_mode))
1622 31100284 : return result_mode;
1623 :
1624 2164938 : return opt_machine_mode ();
1625 : }
1626 :
1627 : /* By default a vector of integers is used as a mask. */
1628 :
1629 : opt_machine_mode
1630 0 : default_get_mask_mode (machine_mode mode)
1631 : {
1632 0 : return related_int_vector_mode (mode);
1633 : }
1634 :
1635 : /* By default consider masked stores to be expensive. */
1636 :
1637 : bool
1638 5001 : default_conditional_operation_is_expensive (unsigned ifn)
1639 : {
1640 5001 : return ifn == IFN_MASK_STORE;
1641 : }
1642 :
1643 : /* By default consider masked stores to be expensive. */
1644 :
1645 : bool
1646 499 : default_empty_mask_is_expensive (unsigned ifn)
1647 : {
1648 499 : return ifn == IFN_MASK_STORE;
1649 : }
1650 :
1651 : /* By default, the cost model accumulates three separate costs (prologue,
1652 : loop body, and epilogue) for a vectorized loop or block. So allocate an
1653 : array of three unsigned ints, set it to zero, and return its address. */
1654 :
1655 : vector_costs *
1656 0 : default_vectorize_create_costs (vec_info *vinfo, bool costing_for_scalar)
1657 : {
1658 0 : return new vector_costs (vinfo, costing_for_scalar);
1659 : }
1660 :
1661 : /* Determine whether or not a pointer mode is valid. Assume defaults
1662 : of ptr_mode or Pmode - can be overridden. */
1663 : bool
1664 6975899 : default_valid_pointer_mode (scalar_int_mode mode)
1665 : {
1666 6975899 : return (mode == ptr_mode || mode == Pmode);
1667 : }
1668 :
1669 : /* Determine whether the memory reference specified by REF may alias
1670 : the C libraries errno location. */
1671 : bool
1672 1694520 : default_ref_may_alias_errno (ao_ref *ref)
1673 : {
1674 1694520 : tree base = ao_ref_base (ref);
1675 : /* The default implementation assumes the errno location is
1676 : a declaration of type int or is always accessed via a
1677 : pointer to int. We assume that accesses to errno are
1678 : not deliberately obfuscated (even in conforming ways). */
1679 1694520 : if (TYPE_UNSIGNED (TREE_TYPE (base))
1680 1694520 : || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1681 1545443 : return false;
1682 : /* The default implementation assumes an errno location declaration
1683 : is never defined in the current compilation unit and may not be
1684 : aliased by a local variable. */
1685 149077 : if (DECL_P (base)
1686 108446 : && DECL_EXTERNAL (base)
1687 150556 : && !TREE_STATIC (base))
1688 : return true;
1689 148988 : else if (TREE_CODE (base) == MEM_REF
1690 148988 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1691 : {
1692 37243 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1693 52874 : return !pi || pi->pt.anything || pi->pt.nonlocal;
1694 : }
1695 : return false;
1696 : }
1697 :
1698 : /* Return the mode for a pointer to a given ADDRSPACE,
1699 : defaulting to ptr_mode for all address spaces. */
1700 :
1701 : scalar_int_mode
1702 3777436562 : default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1703 : {
1704 3777436562 : return ptr_mode;
1705 : }
1706 :
1707 : /* Return the mode for an address in a given ADDRSPACE,
1708 : defaulting to Pmode for all address spaces. */
1709 :
1710 : scalar_int_mode
1711 98330426 : default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1712 : {
1713 98330426 : return Pmode;
1714 : }
1715 :
1716 : /* Named address space version of valid_pointer_mode.
1717 : To match the above, the same modes apply to all address spaces. */
1718 :
1719 : bool
1720 6975899 : default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1721 : addr_space_t as ATTRIBUTE_UNUSED)
1722 : {
1723 6975899 : return targetm.valid_pointer_mode (mode);
1724 : }
1725 :
1726 : /* Some places still assume that all pointer or address modes are the
1727 : standard Pmode and ptr_mode. These optimizations become invalid if
1728 : the target actually supports multiple different modes. For now,
1729 : we disable such optimizations on such targets, using this function. */
1730 :
1731 : bool
1732 693875180 : target_default_pointer_address_modes_p (void)
1733 : {
1734 693875180 : if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1735 : return false;
1736 693875180 : if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1737 0 : return false;
1738 :
1739 : return true;
1740 : }
1741 :
1742 : /* Named address space version of legitimate_address_p.
1743 : By default, all address spaces have the same form. */
1744 :
1745 : bool
1746 1519972041 : default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1747 : bool strict,
1748 : addr_space_t as ATTRIBUTE_UNUSED,
1749 : code_helper code)
1750 : {
1751 1519972041 : return targetm.legitimate_address_p (mode, mem, strict, code);
1752 : }
1753 :
1754 : /* Named address space version of LEGITIMIZE_ADDRESS.
1755 : By default, all address spaces have the same form. */
1756 :
1757 : rtx
1758 672030 : default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1759 : addr_space_t as ATTRIBUTE_UNUSED)
1760 : {
1761 672030 : return targetm.legitimize_address (x, oldx, mode);
1762 : }
1763 :
1764 : /* The default hook for determining if one named address space is a subset of
1765 : another and to return which address space to use as the common address
1766 : space. */
1767 :
1768 : bool
1769 3 : default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1770 : {
1771 3 : return (subset == superset);
1772 : }
1773 :
1774 : /* The default hook for determining if 0 within a named address
1775 : space is a valid address. */
1776 :
1777 : bool
1778 0 : default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1779 : {
1780 0 : return false;
1781 : }
1782 :
1783 : /* The default hook for debugging the address space is to return the
1784 : address space number to indicate DW_AT_address_class. */
1785 : int
1786 3 : default_addr_space_debug (addr_space_t as)
1787 : {
1788 3 : return as;
1789 : }
1790 :
1791 : /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1792 : Don't complain about any address space. */
1793 :
1794 : void
1795 177 : default_addr_space_diagnose_usage (addr_space_t, location_t)
1796 : {
1797 177 : }
1798 :
1799 :
1800 : /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1801 : called for targets with only a generic address space. */
1802 :
1803 : rtx
1804 0 : default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1805 : tree from_type ATTRIBUTE_UNUSED,
1806 : tree to_type ATTRIBUTE_UNUSED)
1807 : {
1808 0 : gcc_unreachable ();
1809 : }
1810 :
1811 :
1812 : /* The default hook for TARGET_ADDR_SPACE_FOR_ARTIFICIAL_RODATA. */
1813 :
1814 : addr_space_t
1815 596 : default_addr_space_for_artificial_rodata (tree, artificial_rodata)
1816 : {
1817 596 : return ADDR_SPACE_GENERIC;
1818 : }
1819 :
1820 :
1821 : /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1822 :
1823 : unsigned int
1824 0 : default_hard_regno_nregs (unsigned int, machine_mode mode)
1825 : {
1826 : /* Targets with variable-sized modes must provide their own definition
1827 : of this hook. */
1828 0 : return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1829 : }
1830 :
1831 : bool
1832 0 : default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1833 : {
1834 0 : return true;
1835 : }
1836 :
1837 : /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1838 :
1839 : bool
1840 36612887 : default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1841 : addr_space_t addrspace ATTRIBUTE_UNUSED)
1842 : {
1843 36612887 : return false;
1844 : }
1845 :
1846 : extern bool default_new_address_profitable_p (rtx, rtx);
1847 :
1848 :
1849 : /* The default implementation of TARGET_NEW_ADDRESS_PROFITABLE_P. */
1850 :
1851 : bool
1852 1133122 : default_new_address_profitable_p (rtx memref ATTRIBUTE_UNUSED,
1853 : rtx_insn *insn ATTRIBUTE_UNUSED,
1854 : rtx new_addr ATTRIBUTE_UNUSED)
1855 : {
1856 1133122 : return true;
1857 : }
1858 :
1859 : bool
1860 0 : default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1861 : tree ARG_UNUSED (name),
1862 : tree ARG_UNUSED (args),
1863 : int ARG_UNUSED (flags))
1864 : {
1865 0 : warning (OPT_Wattributes,
1866 : "%<target%> attribute is not supported on this machine");
1867 :
1868 0 : return false;
1869 : }
1870 :
1871 : bool
1872 0 : default_target_option_valid_version_attribute_p (tree ARG_UNUSED (fndecl),
1873 : tree ARG_UNUSED (name),
1874 : tree ARG_UNUSED (args),
1875 : int ARG_UNUSED (flags))
1876 : {
1877 0 : warning (OPT_Wattributes,
1878 : "%<target_version%> attribute is not supported on this machine");
1879 :
1880 0 : return false;
1881 : }
1882 :
1883 : bool
1884 0 : default_target_option_pragma_parse (tree ARG_UNUSED (args),
1885 : tree ARG_UNUSED (pop_target))
1886 : {
1887 : /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1888 : emit no warning because "#pragma GCC pop_target" is valid on targets that
1889 : do not have the "target" pragma. */
1890 0 : if (args)
1891 0 : warning (OPT_Wpragmas,
1892 : "%<#pragma GCC target%> is not supported for this machine");
1893 :
1894 0 : return false;
1895 : }
1896 :
1897 : bool
1898 0 : default_target_can_inline_p (tree caller, tree callee)
1899 : {
1900 0 : tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1901 0 : tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1902 0 : if (! callee_opts)
1903 0 : callee_opts = target_option_default_node;
1904 0 : if (! caller_opts)
1905 0 : caller_opts = target_option_default_node;
1906 :
1907 : /* If both caller and callee have attributes, assume that if the
1908 : pointer is different, the two functions have different target
1909 : options since build_target_option_node uses a hash table for the
1910 : options. */
1911 0 : return callee_opts == caller_opts;
1912 : }
1913 :
1914 : /* By default, return false to not need to collect any target information
1915 : for inlining. Target maintainer should re-define the hook if the
1916 : target want to take advantage of it. */
1917 :
1918 : bool
1919 5670474 : default_need_ipa_fn_target_info (const_tree, unsigned int &)
1920 : {
1921 5670474 : return false;
1922 : }
1923 :
1924 : bool
1925 0 : default_update_ipa_fn_target_info (unsigned int &, const gimple *)
1926 : {
1927 0 : return false;
1928 : }
1929 :
1930 : /* If the machine does not have a case insn that compares the bounds,
1931 : this means extra overhead for dispatch tables, which raises the
1932 : threshold for using them. */
1933 :
1934 : unsigned int
1935 6686581 : default_case_values_threshold (void)
1936 : {
1937 6686581 : return (targetm.have_casesi () ? 4 : 5);
1938 : }
1939 :
1940 : bool
1941 117737413 : default_have_conditional_execution (void)
1942 : {
1943 117737413 : return HAVE_conditional_execution;
1944 : }
1945 :
1946 : bool
1947 0 : default_have_ccmp (void)
1948 : {
1949 0 : return targetm.gen_ccmp_first != NULL;
1950 : }
1951 :
1952 : /* By default we assume that c99 functions are present at the runtime,
1953 : but sincos is not. */
1954 : bool
1955 0 : default_libc_has_function (enum function_class fn_class,
1956 : tree type ATTRIBUTE_UNUSED)
1957 : {
1958 0 : if (fn_class == function_c94
1959 : || fn_class == function_c99_misc
1960 : || fn_class == function_c99_math_complex)
1961 0 : return true;
1962 :
1963 : return false;
1964 : }
1965 :
1966 : /* By default assume that libc has not a fast implementation. */
1967 :
1968 : bool
1969 0 : default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1970 : {
1971 0 : return false;
1972 : }
1973 :
1974 : bool
1975 0 : gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1976 : tree type ATTRIBUTE_UNUSED)
1977 : {
1978 0 : return true;
1979 : }
1980 :
1981 : bool
1982 0 : no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1983 : tree type ATTRIBUTE_UNUSED)
1984 : {
1985 0 : return false;
1986 : }
1987 :
1988 : /* Assume some c99 functions are present at the runtime including sincos. */
1989 : bool
1990 0 : bsd_libc_has_function (enum function_class fn_class,
1991 : tree type ATTRIBUTE_UNUSED)
1992 : {
1993 0 : if (fn_class == function_c94
1994 0 : || fn_class == function_c99_misc
1995 0 : || fn_class == function_sincos)
1996 : return true;
1997 :
1998 : return false;
1999 : }
2000 :
2001 : /* By default, -fhardened will add -D_FORTIFY_SOURCE=2. */
2002 :
2003 : unsigned
2004 0 : default_fortify_source_default_level ()
2005 : {
2006 0 : return 2;
2007 : }
2008 :
2009 : unsigned
2010 134 : default_libm_function_max_error (unsigned, machine_mode, bool)
2011 : {
2012 134 : return ~0U;
2013 : }
2014 :
2015 : unsigned
2016 78746 : glibc_linux_libm_function_max_error (unsigned cfn, machine_mode mode,
2017 : bool boundary_p)
2018 : {
2019 : /* Let's use
2020 : https://www.gnu.org/software/libc/manual/2.22/html_node/Errors-in-Math-Functions.html
2021 : https://www.gnu.org/software/libc/manual/html_node/Errors-in-Math-Functions.html
2022 : with usual values recorded here and significant outliers handled in
2023 : target CPU specific overriders. The tables only record default
2024 : rounding to nearest, for -frounding-math let's add some extra ulps.
2025 : For boundary_p values (say finite results outside of [-1.,1.] for
2026 : sin/cos, or [-0.,+Inf] for sqrt etc. let's use custom random testers. */
2027 78746 : int rnd = flag_rounding_math ? 4 : 0;
2028 78746 : bool sf = (REAL_MODE_FORMAT (mode) == &ieee_single_format
2029 58100 : || REAL_MODE_FORMAT (mode) == &mips_single_format
2030 136846 : || REAL_MODE_FORMAT (mode) == &motorola_single_format);
2031 78746 : bool df = (REAL_MODE_FORMAT (mode) == &ieee_double_format
2032 39467 : || REAL_MODE_FORMAT (mode) == &mips_double_format
2033 118213 : || REAL_MODE_FORMAT (mode) == &motorola_double_format);
2034 78746 : bool xf = (REAL_MODE_FORMAT (mode) == &ieee_extended_intel_96_format
2035 75232 : || REAL_MODE_FORMAT (mode) == &ieee_extended_intel_128_format
2036 140979 : || REAL_MODE_FORMAT (mode) == &ieee_extended_motorola_format);
2037 78746 : bool tf = (REAL_MODE_FORMAT (mode) == &ieee_quad_format
2038 78746 : || REAL_MODE_FORMAT (mode) == &mips_quad_format);
2039 :
2040 78746 : switch (cfn)
2041 : {
2042 51398 : CASE_CFN_SQRT:
2043 51398 : CASE_CFN_SQRT_FN:
2044 51398 : if (boundary_p)
2045 : /* https://gcc.gnu.org/pipermail/gcc-patches/2023-April/616595.html */
2046 : return 0;
2047 25699 : if (sf || df || xf || tf)
2048 25565 : return 0 + rnd;
2049 : break;
2050 9078 : CASE_CFN_COS:
2051 9078 : CASE_CFN_COS_FN:
2052 : /* cos is generally errors like sin, but far more arches have 2ulps
2053 : for double. */
2054 9078 : if (!boundary_p && df)
2055 1705 : return 2 + rnd;
2056 25643 : gcc_fallthrough ();
2057 25643 : CASE_CFN_SIN:
2058 25643 : CASE_CFN_SIN_FN:
2059 25643 : if (boundary_p)
2060 : /* According to
2061 : https://sourceware.org/pipermail/gcc-patches/2023-April/616315.html
2062 : seems default rounding sin/cos stay strictly in [-1.,1.] range,
2063 : with rounding to infinity it can be 1ulp larger/smaller. */
2064 30032 : return flag_rounding_math ? 1 : 0;
2065 10601 : if (sf || df)
2066 6712 : return 1 + rnd;
2067 3889 : if (xf || tf)
2068 3889 : return 2 + rnd;
2069 : break;
2070 : default:
2071 : break;
2072 : }
2073 :
2074 134 : return default_libm_function_max_error (cfn, mode, boundary_p);
2075 : }
2076 :
2077 : tree
2078 0 : default_builtin_tm_load_store (tree ARG_UNUSED (type))
2079 : {
2080 0 : return NULL_TREE;
2081 : }
2082 :
2083 : /* Compute cost of moving registers to/from memory. */
2084 :
2085 : int
2086 0 : default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
2087 : reg_class_t rclass ATTRIBUTE_UNUSED,
2088 : bool in ATTRIBUTE_UNUSED)
2089 : {
2090 : #ifndef MEMORY_MOVE_COST
2091 0 : return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
2092 : #else
2093 : return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
2094 : #endif
2095 : }
2096 :
2097 : /* Compute cost of moving data from a register of class FROM to one of
2098 : TO, using MODE. */
2099 :
2100 : int
2101 0 : default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
2102 : reg_class_t from ATTRIBUTE_UNUSED,
2103 : reg_class_t to ATTRIBUTE_UNUSED)
2104 : {
2105 : #ifndef REGISTER_MOVE_COST
2106 0 : return 2;
2107 : #else
2108 : return REGISTER_MOVE_COST (MACRO_MODE (mode),
2109 : (enum reg_class) from, (enum reg_class) to);
2110 : #endif
2111 : }
2112 :
2113 : /* The default implementation of TARGET_CALLEE_SAVE_COST. */
2114 :
2115 : int
2116 0 : default_callee_save_cost (spill_cost_type spill_type, unsigned int,
2117 : machine_mode, unsigned int, int mem_cost,
2118 : const HARD_REG_SET &callee_saved_regs,
2119 : bool existing_spills_p)
2120 : {
2121 0 : if (!existing_spills_p)
2122 : {
2123 0 : auto frame_type = (spill_type == spill_cost_type::SAVE
2124 0 : ? frame_cost_type::ALLOCATION
2125 : : frame_cost_type::DEALLOCATION);
2126 0 : mem_cost += targetm.frame_allocation_cost (frame_type,
2127 : callee_saved_regs);
2128 : }
2129 0 : return mem_cost;
2130 : }
2131 :
2132 : /* The default implementation of TARGET_FRAME_ALLOCATION_COST. */
2133 :
2134 : int
2135 16222882 : default_frame_allocation_cost (frame_cost_type, const HARD_REG_SET &)
2136 : {
2137 16222882 : return 0;
2138 : }
2139 :
2140 : /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
2141 :
2142 : bool
2143 3459075 : default_slow_unaligned_access (machine_mode, unsigned int)
2144 : {
2145 3459075 : return STRICT_ALIGNMENT;
2146 : }
2147 :
2148 : /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
2149 :
2150 : HOST_WIDE_INT
2151 0 : default_estimated_poly_value (poly_int64 x, poly_value_estimate_kind)
2152 : {
2153 0 : return x.coeffs[0];
2154 : }
2155 :
2156 : /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
2157 : behavior. SPEED_P is true if we are compiling for speed. */
2158 :
2159 : unsigned int
2160 1689701 : get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
2161 : {
2162 1689701 : unsigned int move_ratio;
2163 : #ifdef MOVE_RATIO
2164 1689701 : move_ratio = (unsigned int) MOVE_RATIO (speed_p);
2165 : #else
2166 : #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
2167 : move_ratio = 2;
2168 : #else /* No cpymem patterns, pick a default. */
2169 : move_ratio = ((speed_p) ? 15 : 3);
2170 : #endif
2171 : #endif
2172 1689701 : return move_ratio;
2173 : }
2174 :
2175 : /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
2176 : used; return FALSE if the cpymem/setmem optab should be expanded, or
2177 : a call to memcpy emitted. */
2178 :
2179 : bool
2180 1179037 : default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
2181 : unsigned int alignment,
2182 : enum by_pieces_operation op,
2183 : bool speed_p)
2184 : {
2185 1179037 : unsigned int max_size = 0;
2186 1179037 : unsigned int ratio = 0;
2187 :
2188 1179037 : switch (op)
2189 : {
2190 83679 : case CLEAR_BY_PIECES:
2191 83679 : max_size = STORE_MAX_PIECES;
2192 83679 : ratio = CLEAR_RATIO (speed_p);
2193 : break;
2194 886474 : case MOVE_BY_PIECES:
2195 886474 : max_size = MOVE_MAX_PIECES;
2196 886474 : ratio = get_move_ratio (speed_p);
2197 886474 : break;
2198 49069 : case SET_BY_PIECES:
2199 49069 : max_size = STORE_MAX_PIECES;
2200 49069 : ratio = SET_RATIO (speed_p);
2201 : break;
2202 91421 : case STORE_BY_PIECES:
2203 91421 : max_size = STORE_MAX_PIECES;
2204 91421 : ratio = get_move_ratio (speed_p);
2205 91421 : break;
2206 68394 : case COMPARE_BY_PIECES:
2207 68394 : max_size = COMPARE_MAX_PIECES;
2208 : /* Pick a likely default, just as in get_move_ratio. */
2209 68394 : ratio = speed_p ? 15 : 3;
2210 : break;
2211 : }
2212 :
2213 1179037 : return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
2214 : }
2215 :
2216 : /* This hook controls code generation for expanding a memcmp operation by
2217 : pieces. Return 1 for the normal pattern of compare/jump after each pair
2218 : of loads, or a higher number to reduce the number of branches. */
2219 :
2220 : int
2221 161053 : default_compare_by_pieces_branch_ratio (machine_mode)
2222 : {
2223 161053 : return 1;
2224 : }
2225 :
2226 : /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
2227 : entry. If RECORD_P is true and the target supports named sections,
2228 : the location of the NOPs will be recorded in a special object section
2229 : called "__patchable_function_entries". This routine may be called
2230 : twice per function to put NOPs before and after the function
2231 : entry. */
2232 :
2233 : void
2234 59 : default_print_patchable_function_entry (FILE *file,
2235 : unsigned HOST_WIDE_INT patch_area_size,
2236 : bool record_p)
2237 : {
2238 59 : const char *nop_templ = 0;
2239 59 : int code_num;
2240 59 : rtx_insn *my_nop = make_insn_raw (gen_nop ());
2241 :
2242 : /* We use the template alone, relying on the (currently sane) assumption
2243 : that the NOP template does not have variable operands. */
2244 59 : code_num = recog_memoized (my_nop);
2245 59 : nop_templ = get_insn_template (code_num, my_nop);
2246 :
2247 59 : if (record_p && targetm_common.have_named_sections)
2248 : {
2249 53 : char buf[256];
2250 53 : section *previous_section = in_section;
2251 53 : const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
2252 :
2253 53 : gcc_assert (asm_op != NULL);
2254 : /* If SECTION_LINK_ORDER is supported, this internal label will
2255 : be filled as the symbol for linked_to section. */
2256 53 : ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", current_function_funcdef_no);
2257 :
2258 53 : unsigned int flags = SECTION_WRITE | SECTION_RELRO;
2259 53 : if (HAVE_GAS_SECTION_LINK_ORDER)
2260 53 : flags |= SECTION_LINK_ORDER;
2261 :
2262 53 : section *sect = get_section ("__patchable_function_entries",
2263 : flags, current_function_decl);
2264 53 : if (HAVE_COMDAT_GROUP && DECL_COMDAT_GROUP (current_function_decl))
2265 12 : switch_to_comdat_section (sect, current_function_decl);
2266 : else
2267 41 : switch_to_section (sect);
2268 53 : assemble_align (POINTER_SIZE);
2269 53 : fputs (asm_op, file);
2270 53 : assemble_name_raw (file, buf);
2271 53 : fputc ('\n', file);
2272 :
2273 53 : switch_to_section (previous_section);
2274 53 : ASM_OUTPUT_LABEL (file, buf);
2275 : }
2276 :
2277 : unsigned i;
2278 136 : for (i = 0; i < patch_area_size; ++i)
2279 77 : output_asm_insn (nop_templ, NULL);
2280 59 : }
2281 :
2282 : bool
2283 0 : default_profile_before_prologue (void)
2284 : {
2285 : #ifdef PROFILE_BEFORE_PROLOGUE
2286 : return true;
2287 : #else
2288 0 : return false;
2289 : #endif
2290 : }
2291 :
2292 : /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
2293 :
2294 : reg_class_t
2295 0 : default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
2296 : reg_class_t rclass)
2297 : {
2298 : #ifdef PREFERRED_RELOAD_CLASS
2299 : return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
2300 : #else
2301 0 : return rclass;
2302 : #endif
2303 : }
2304 :
2305 : /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
2306 :
2307 : reg_class_t
2308 0 : default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
2309 : reg_class_t rclass)
2310 : {
2311 0 : return rclass;
2312 : }
2313 :
2314 : /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
2315 : reg_class_t
2316 988673 : default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
2317 : {
2318 988673 : return NO_REGS;
2319 : }
2320 :
2321 : /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
2322 :
2323 : bool
2324 0 : default_class_likely_spilled_p (reg_class_t rclass)
2325 : {
2326 0 : return (reg_class_size[(int) rclass] == 1);
2327 : }
2328 :
2329 : /* The default implementation of TARGET_CLASS_MAX_NREGS. */
2330 :
2331 : unsigned char
2332 0 : default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
2333 : machine_mode mode ATTRIBUTE_UNUSED)
2334 : {
2335 : #ifdef CLASS_MAX_NREGS
2336 : return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
2337 : MACRO_MODE (mode));
2338 : #else
2339 : /* Targets with variable-sized modes must provide their own definition
2340 : of this hook. */
2341 0 : unsigned int size = GET_MODE_SIZE (mode).to_constant ();
2342 0 : return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2343 : #endif
2344 : }
2345 :
2346 : /* The default implementation of TARGET_AVOID_STORE_FORWARDING_P. */
2347 :
2348 : bool
2349 8 : default_avoid_store_forwarding_p (vec<store_fwd_info>, rtx, int total_cost,
2350 : bool)
2351 : {
2352 : /* Use a simple cost heurstic base on param_store_forwarding_max_distance.
2353 : In general the distance should be somewhat correlated to the store
2354 : forwarding penalty; if the penalty is large then it is justified to
2355 : increase the window size. Use this to reject sequences that are clearly
2356 : unprofitable.
2357 : Skip the cost check if param_store_forwarding_max_distance is 0. */
2358 8 : int max_cost = COSTS_N_INSNS (param_store_forwarding_max_distance / 2);
2359 8 : const bool unlimited_cost = (param_store_forwarding_max_distance == 0);
2360 8 : if (!unlimited_cost && total_cost > max_cost && max_cost)
2361 : {
2362 1 : if (dump_file)
2363 0 : fprintf (dump_file, "Not transformed due to cost: %d > %d.\n",
2364 : total_cost, max_cost);
2365 :
2366 1 : return false;
2367 : }
2368 :
2369 : return true;
2370 : }
2371 :
2372 : /* Determine the debugging unwind mechanism for the target. */
2373 :
2374 : enum unwind_info_type
2375 1278998 : default_debug_unwind_info (void)
2376 : {
2377 : /* If the target wants to force the use of dwarf2 unwind info, let it. */
2378 : /* ??? Change all users to the hook, then poison this. */
2379 : #ifdef DWARF2_FRAME_INFO
2380 : if (DWARF2_FRAME_INFO)
2381 : return UI_DWARF2;
2382 : #endif
2383 :
2384 : /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
2385 : #ifdef DWARF2_DEBUGGING_INFO
2386 1278998 : if (dwarf_debuginfo_p ())
2387 656957 : return UI_DWARF2;
2388 : #endif
2389 :
2390 : return UI_NONE;
2391 : }
2392 :
2393 : /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
2394 : must define this hook. */
2395 :
2396 : unsigned int
2397 0 : default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
2398 : {
2399 0 : gcc_unreachable ();
2400 : }
2401 :
2402 : /* Determine the correct mode for a Dwarf frame register that represents
2403 : register REGNO. */
2404 :
2405 : machine_mode
2406 736 : default_dwarf_frame_reg_mode (int regno)
2407 : {
2408 736 : machine_mode save_mode = reg_raw_mode[regno];
2409 :
2410 736 : if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (),
2411 : regno, save_mode))
2412 0 : save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi);
2413 736 : return save_mode;
2414 : }
2415 :
2416 : /* To be used by targets where reg_raw_mode doesn't return the right
2417 : mode for registers used in apply_builtin_return and apply_builtin_arg. */
2418 :
2419 : fixed_size_mode
2420 8240 : default_get_reg_raw_mode (int regno)
2421 : {
2422 : /* Targets must override this hook if the underlying register is
2423 : variable-sized. */
2424 8240 : return as_a <fixed_size_mode> (reg_raw_mode[regno]);
2425 : }
2426 :
2427 : /* Return true if a leaf function should stay leaf even with profiling
2428 : enabled. */
2429 :
2430 : bool
2431 632 : default_keep_leaf_when_profiled ()
2432 : {
2433 632 : return false;
2434 : }
2435 :
2436 : /* Return true if the state of option OPTION should be stored in PCH files
2437 : and checked by default_pch_valid_p. Store the option's current state
2438 : in STATE if so. */
2439 :
2440 : static inline bool
2441 44804568 : option_affects_pch_p (int option, struct cl_option_state *state)
2442 : {
2443 44804568 : if ((cl_options[option].flags & CL_TARGET) == 0)
2444 : return false;
2445 3676824 : if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
2446 : return false;
2447 3676824 : if (option_flag_var (option, &global_options) == &target_flags)
2448 617934 : if (targetm.check_pch_target_flags)
2449 : return false;
2450 3676824 : return get_option_state (&global_options, option, state);
2451 : }
2452 :
2453 : /* Default version of get_pch_validity.
2454 : By default, every flag difference is fatal; that will be mostly right for
2455 : most targets, but completely right for very few. */
2456 :
2457 : void *
2458 428 : default_get_pch_validity (size_t *sz)
2459 : {
2460 428 : struct cl_option_state state;
2461 428 : size_t i;
2462 428 : char *result, *r;
2463 :
2464 428 : *sz = 2;
2465 428 : if (targetm.check_pch_target_flags)
2466 0 : *sz += sizeof (target_flags);
2467 1093112 : for (i = 0; i < cl_options_count; i++)
2468 1092684 : if (option_affects_pch_p (i, &state))
2469 99724 : *sz += state.size;
2470 :
2471 428 : result = r = XNEWVEC (char, *sz);
2472 428 : r[0] = flag_pic;
2473 428 : r[1] = flag_pie;
2474 428 : r += 2;
2475 428 : if (targetm.check_pch_target_flags)
2476 : {
2477 0 : memcpy (r, &target_flags, sizeof (target_flags));
2478 0 : r += sizeof (target_flags);
2479 : }
2480 :
2481 1093112 : for (i = 0; i < cl_options_count; i++)
2482 1092684 : if (option_affects_pch_p (i, &state))
2483 : {
2484 99724 : memcpy (r, state.data, state.size);
2485 99724 : r += state.size;
2486 : }
2487 :
2488 428 : return result;
2489 : }
2490 :
2491 : /* Return a message which says that a PCH file was created with a different
2492 : setting of OPTION. */
2493 :
2494 : static const char *
2495 17826 : pch_option_mismatch (const char *option)
2496 : {
2497 17826 : return xasprintf (_("created and used with differing settings of '%s'"),
2498 17826 : option);
2499 : }
2500 :
2501 : /* Default version of pch_valid_p. */
2502 :
2503 : const char *
2504 18188 : default_pch_valid_p (const void *data_p, size_t len ATTRIBUTE_UNUSED)
2505 : {
2506 18188 : struct cl_option_state state;
2507 18188 : const char *data = (const char *)data_p;
2508 18188 : size_t i;
2509 :
2510 : /* -fpic and -fpie also usually make a PCH invalid. */
2511 18188 : if (data[0] != flag_pic)
2512 0 : return _("created and used with different settings of %<-fpic%>");
2513 18188 : if (data[1] != flag_pie)
2514 0 : return _("created and used with different settings of %<-fpie%>");
2515 18188 : data += 2;
2516 :
2517 : /* Check target_flags. */
2518 18188 : if (targetm.check_pch_target_flags)
2519 : {
2520 0 : int tf;
2521 0 : const char *r;
2522 :
2523 0 : memcpy (&tf, data, sizeof (target_flags));
2524 0 : data += sizeof (target_flags);
2525 0 : r = targetm.check_pch_target_flags (tf);
2526 0 : if (r != NULL)
2527 : return r;
2528 : }
2529 :
2530 42619562 : for (i = 0; i < cl_options_count; i++)
2531 42619200 : if (option_affects_pch_p (i, &state))
2532 : {
2533 3399982 : if (memcmp (data, state.data, state.size) != 0)
2534 17826 : return pch_option_mismatch (cl_options[i].opt_text);
2535 3382156 : data += state.size;
2536 : }
2537 :
2538 : return NULL;
2539 : }
2540 :
2541 : /* Default version of cstore_mode. */
2542 :
2543 : scalar_int_mode
2544 705251 : default_cstore_mode (enum insn_code icode)
2545 : {
2546 705251 : return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2547 : }
2548 :
2549 : /* Default version of member_type_forces_blk. */
2550 :
2551 : bool
2552 0 : default_member_type_forces_blk (const_tree, machine_mode)
2553 : {
2554 0 : return false;
2555 : }
2556 :
2557 : /* Default version of canonicalize_comparison. */
2558 :
2559 : void
2560 0 : default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2561 : {
2562 0 : }
2563 :
2564 : /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2565 :
2566 : void
2567 0 : default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2568 : {
2569 0 : }
2570 :
2571 : #ifndef PAD_VARARGS_DOWN
2572 : #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2573 : #endif
2574 :
2575 : /* Build an indirect-ref expression over the given TREE, which represents a
2576 : piece of a va_arg() expansion. */
2577 : tree
2578 54937 : build_va_arg_indirect_ref (tree addr)
2579 : {
2580 54937 : addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2581 54937 : return addr;
2582 : }
2583 :
2584 : /* The "standard" implementation of va_arg: read the value from the
2585 : current (padded) address and increment by the (padded) size. */
2586 :
2587 : tree
2588 260 : std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2589 : gimple_seq *post_p)
2590 : {
2591 260 : tree addr, t, type_size, rounded_size, valist_tmp;
2592 260 : unsigned HOST_WIDE_INT align, boundary;
2593 260 : bool indirect;
2594 :
2595 : /* All of the alignment and movement below is for args-grow-up machines.
2596 : As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2597 : implement their own specialized gimplify_va_arg_expr routines. */
2598 260 : if (ARGS_GROW_DOWNWARD)
2599 : gcc_unreachable ();
2600 :
2601 260 : indirect = pass_va_arg_by_reference (type);
2602 260 : if (indirect)
2603 33 : type = build_pointer_type (type);
2604 :
2605 260 : if (targetm.calls.split_complex_arg
2606 0 : && TREE_CODE (type) == COMPLEX_TYPE
2607 260 : && targetm.calls.split_complex_arg (type))
2608 : {
2609 0 : tree real_part, imag_part;
2610 :
2611 0 : real_part = std_gimplify_va_arg_expr (valist,
2612 0 : TREE_TYPE (type), pre_p, NULL);
2613 0 : real_part = get_initialized_tmp_var (real_part, pre_p);
2614 :
2615 0 : imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2616 0 : TREE_TYPE (type), pre_p, NULL);
2617 0 : imag_part = get_initialized_tmp_var (imag_part, pre_p);
2618 :
2619 0 : return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2620 : }
2621 :
2622 260 : align = PARM_BOUNDARY / BITS_PER_UNIT;
2623 260 : boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2624 :
2625 : /* When we align parameter on stack for caller, if the parameter
2626 : alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2627 : aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2628 : here with caller. */
2629 260 : if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2630 : boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2631 :
2632 260 : boundary /= BITS_PER_UNIT;
2633 :
2634 : /* Hoist the valist value into a temporary for the moment. */
2635 260 : valist_tmp = get_initialized_tmp_var (valist, pre_p);
2636 :
2637 : /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2638 : requires greater alignment, we must perform dynamic alignment. */
2639 260 : if (boundary > align
2640 4 : && !TYPE_EMPTY_P (type)
2641 264 : && !integer_zerop (TYPE_SIZE (type)))
2642 : {
2643 4 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2644 4 : fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2645 4 : gimplify_and_add (t, pre_p);
2646 :
2647 8 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2648 8 : fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2649 : valist_tmp,
2650 : build_int_cst (TREE_TYPE (valist), -boundary)));
2651 4 : gimplify_and_add (t, pre_p);
2652 : }
2653 : else
2654 : boundary = align;
2655 :
2656 : /* If the actual alignment is less than the alignment of the type,
2657 : adjust the type accordingly so that we don't assume strict alignment
2658 : when dereferencing the pointer. */
2659 260 : boundary *= BITS_PER_UNIT;
2660 260 : if (boundary < TYPE_ALIGN (type))
2661 : {
2662 22 : type = build_variant_type_copy (type);
2663 22 : SET_TYPE_ALIGN (type, boundary);
2664 : }
2665 :
2666 : /* Compute the rounded size of the type. */
2667 260 : type_size = arg_size_in_bytes (type);
2668 260 : rounded_size = round_up (type_size, align);
2669 :
2670 : /* Reduce rounded_size so it's sharable with the postqueue. */
2671 260 : gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2672 :
2673 : /* Get AP. */
2674 260 : addr = valist_tmp;
2675 260 : if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2676 : {
2677 : /* Small args are padded downward. */
2678 : t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2679 : rounded_size, size_int (align));
2680 : t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2681 : size_binop (MINUS_EXPR, rounded_size, type_size));
2682 : addr = fold_build_pointer_plus (addr, t);
2683 : }
2684 :
2685 : /* Compute new value for AP. */
2686 260 : t = fold_build_pointer_plus (valist_tmp, rounded_size);
2687 260 : t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2688 260 : gimplify_and_add (t, pre_p);
2689 :
2690 260 : addr = fold_convert (build_pointer_type (type), addr);
2691 :
2692 260 : if (indirect)
2693 33 : addr = build_va_arg_indirect_ref (addr);
2694 :
2695 260 : return build_va_arg_indirect_ref (addr);
2696 : }
2697 :
2698 : /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2699 : not support nested low-overhead loops. */
2700 :
2701 : bool
2702 0 : can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2703 : unsigned int loop_depth, bool)
2704 : {
2705 0 : return loop_depth == 1;
2706 : }
2707 :
2708 : /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2709 :
2710 : bool
2711 0 : default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2712 : {
2713 0 : return true;
2714 : }
2715 :
2716 : /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2717 :
2718 : unsigned int
2719 0 : default_max_noce_ifcvt_seq_cost (edge e)
2720 : {
2721 0 : bool predictable_p = predictable_edge_p (e);
2722 :
2723 0 : if (predictable_p)
2724 : {
2725 0 : if (OPTION_SET_P (param_max_rtl_if_conversion_predictable_cost))
2726 0 : return param_max_rtl_if_conversion_predictable_cost;
2727 : }
2728 : else
2729 : {
2730 0 : if (OPTION_SET_P (param_max_rtl_if_conversion_unpredictable_cost))
2731 0 : return param_max_rtl_if_conversion_unpredictable_cost;
2732 : }
2733 :
2734 0 : return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2735 : }
2736 :
2737 : /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2738 :
2739 : unsigned int
2740 71956 : default_min_arithmetic_precision (void)
2741 : {
2742 71956 : return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2743 : }
2744 :
2745 : /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2746 :
2747 : enum flt_eval_method
2748 0 : default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2749 : {
2750 0 : return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2751 : }
2752 :
2753 : /* Return true if _BitInt(N) is supported and fill details about it into
2754 : *INFO. */
2755 : bool
2756 0 : default_bitint_type_info (int, struct bitint_info *)
2757 : {
2758 0 : return false;
2759 : }
2760 :
2761 : /* Default implementation for
2762 : TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2763 : HOST_WIDE_INT
2764 16 : default_stack_clash_protection_alloca_probe_range (void)
2765 : {
2766 16 : return 0;
2767 : }
2768 :
2769 : /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2770 :
2771 : void
2772 0 : default_select_early_remat_modes (sbitmap)
2773 : {
2774 0 : }
2775 :
2776 : /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2777 :
2778 : tree
2779 519 : default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2780 : {
2781 519 : return build_zero_cst (type);
2782 : }
2783 :
2784 : /* The default implementation of TARGET_INSTRUCTION_SELECTION. */
2785 :
2786 : bool
2787 96087188 : default_instruction_selection (function *, gimple_stmt_iterator *)
2788 : {
2789 96087188 : return false;
2790 : }
2791 :
2792 : /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2793 : bool
2794 207708 : default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2795 : {
2796 : #ifdef HAVE_speculation_barrier
2797 207708 : return active ? HAVE_speculation_barrier : true;
2798 : #else
2799 : return false;
2800 : #endif
2801 : }
2802 : /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2803 : that can be used on targets that never have speculative execution. */
2804 : bool
2805 0 : speculation_safe_value_not_needed (bool active)
2806 : {
2807 0 : return !active;
2808 : }
2809 :
2810 : /* Default implementation of the speculation-safe-load builtin. This
2811 : implementation simply copies val to result and generates a
2812 : speculation_barrier insn, if such a pattern is defined. */
2813 : rtx
2814 34 : default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2815 : rtx result, rtx val,
2816 : rtx failval ATTRIBUTE_UNUSED)
2817 : {
2818 34 : emit_move_insn (result, val);
2819 :
2820 : #ifdef HAVE_speculation_barrier
2821 : /* Assume the target knows what it is doing: if it defines a
2822 : speculation barrier, but it is not enabled, then assume that one
2823 : isn't needed. */
2824 34 : if (HAVE_speculation_barrier)
2825 34 : emit_insn (gen_speculation_barrier ());
2826 : #endif
2827 :
2828 34 : return result;
2829 : }
2830 :
2831 : /* How many bits to shift in order to access the tag bits.
2832 : The default is to store the tag in the top 8 bits of a 64 bit pointer, hence
2833 : shifting 56 bits will leave just the tag. */
2834 : #define HWASAN_SHIFT (GET_MODE_PRECISION (Pmode) - 8)
2835 : #define HWASAN_SHIFT_RTX GEN_INT (HWASAN_SHIFT)
2836 :
2837 : bool
2838 0 : default_memtag_can_tag_addresses ()
2839 : {
2840 0 : return false;
2841 : }
2842 :
2843 : uint8_t
2844 0 : default_memtag_tag_bitsize ()
2845 : {
2846 0 : return 8;
2847 : }
2848 :
2849 : uint8_t
2850 1238 : default_memtag_granule_size ()
2851 : {
2852 1238 : return 16;
2853 : }
2854 :
2855 : /* The default implementation of TARGET_MEMTAG_INSERT_RANDOM_TAG. */
2856 : rtx
2857 64 : default_memtag_insert_random_tag (rtx untagged, rtx target)
2858 : {
2859 64 : gcc_assert (param_hwasan_instrument_stack);
2860 64 : if (param_hwasan_random_frame_tag)
2861 : {
2862 16 : rtx fn = init_one_libfunc ("__hwasan_generate_tag");
2863 16 : rtx new_tag = emit_library_call_value (fn, NULL_RTX, LCT_NORMAL, QImode);
2864 16 : return targetm.memtag.set_tag (untagged, new_tag, target);
2865 : }
2866 : else
2867 : {
2868 : /* NOTE: The kernel API does not have __hwasan_generate_tag exposed.
2869 : In the future we may add the option emit random tags with inline
2870 : instrumentation instead of function calls. This would be the same
2871 : between the kernel and userland. */
2872 : return untagged;
2873 : }
2874 : }
2875 :
2876 : /* The default implementation of TARGET_MEMTAG_ADD_TAG. */
2877 : rtx
2878 0 : default_memtag_add_tag (rtx base, poly_int64 offset, uint8_t tag_offset)
2879 : {
2880 : /* Need to look into what the most efficient code sequence is.
2881 : This is a code sequence that would be emitted *many* times, so we
2882 : want it as small as possible.
2883 :
2884 : There are two places where tag overflow is a question:
2885 : - Tagging the shadow stack.
2886 : (both tagging and untagging).
2887 : - Tagging addressable pointers.
2888 :
2889 : We need to ensure both behaviors are the same (i.e. that the tag that
2890 : ends up in a pointer after "overflowing" the tag bits with a tag addition
2891 : is the same that ends up in the shadow space).
2892 :
2893 : The aim is that the behavior of tag addition should follow modulo
2894 : wrapping in both instances.
2895 :
2896 : The libhwasan code doesn't have any path that increments a pointer's tag,
2897 : which means it has no opinion on what happens when a tag increment
2898 : overflows (and hence we can choose our own behavior). */
2899 :
2900 0 : offset += ((uint64_t)tag_offset << HWASAN_SHIFT);
2901 0 : return plus_constant (Pmode, base, offset);
2902 : }
2903 :
2904 : /* The default implementation of TARGET_MEMTAG_SET_TAG. */
2905 : rtx
2906 0 : default_memtag_set_tag (rtx untagged, rtx tag, rtx target)
2907 : {
2908 0 : gcc_assert (GET_MODE (untagged) == Pmode && GET_MODE (tag) == QImode);
2909 0 : tag = expand_simple_binop (Pmode, ASHIFT, tag, HWASAN_SHIFT_RTX, NULL_RTX,
2910 : /* unsignedp = */1, OPTAB_WIDEN);
2911 0 : rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
2912 : /* unsignedp = */1, OPTAB_DIRECT);
2913 0 : gcc_assert (ret);
2914 0 : return ret;
2915 : }
2916 :
2917 : /* The default implementation of TARGET_MEMTAG_EXTRACT_TAG. */
2918 : rtx
2919 0 : default_memtag_extract_tag (rtx tagged_pointer, rtx target)
2920 : {
2921 0 : rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
2922 0 : HWASAN_SHIFT_RTX, target,
2923 : /* unsignedp = */0,
2924 : OPTAB_DIRECT);
2925 0 : rtx ret = gen_lowpart (QImode, tag);
2926 0 : gcc_assert (ret);
2927 0 : return ret;
2928 : }
2929 :
2930 : /* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER. */
2931 : rtx
2932 0 : default_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
2933 : {
2934 0 : rtx tag_mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_SHIFT) - 1, Pmode);
2935 0 : rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
2936 : tag_mask, target, true,
2937 : OPTAB_DIRECT);
2938 0 : gcc_assert (untagged_base);
2939 0 : return untagged_base;
2940 : }
2941 :
2942 : #include "gt-targhooks.h"
|