Line data Source code
1 : /* Expand builtin functions.
2 : Copyright (C) 1988-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* Legacy warning! Please add no further builtin simplifications here
21 : (apart from pure constant folding) - builtin simplifications should go
22 : to match.pd or gimple-fold.cc instead. */
23 :
24 : #include "config.h"
25 : #include "system.h"
26 : #include "coretypes.h"
27 : #include "backend.h"
28 : #include "target.h"
29 : #include "rtl.h"
30 : #include "tree.h"
31 : #include "memmodel.h"
32 : #include "gimple.h"
33 : #include "predict.h"
34 : #include "tm_p.h"
35 : #include "stringpool.h"
36 : #include "tree-vrp.h"
37 : #include "tree-ssanames.h"
38 : #include "expmed.h"
39 : #include "optabs.h"
40 : #include "emit-rtl.h"
41 : #include "recog.h"
42 : #include "diagnostic-core.h"
43 : #include "alias.h"
44 : #include "fold-const.h"
45 : #include "fold-const-call.h"
46 : #include "gimple-ssa-warn-access.h"
47 : #include "stor-layout.h"
48 : #include "calls.h"
49 : #include "varasm.h"
50 : #include "tree-object-size.h"
51 : #include "tree-ssa-strlen.h"
52 : #include "realmpfr.h"
53 : #include "cfgrtl.h"
54 : #include "except.h"
55 : #include "dojump.h"
56 : #include "explow.h"
57 : #include "stmt.h"
58 : #include "expr.h"
59 : #include "libfuncs.h"
60 : #include "output.h"
61 : #include "typeclass.h"
62 : #include "langhooks.h"
63 : #include "value-prof.h"
64 : #include "builtins.h"
65 : #include "stringpool.h"
66 : #include "attribs.h"
67 : #include "asan.h"
68 : #include "internal-fn.h"
69 : #include "case-cfn-macros.h"
70 : #include "gimple-iterator.h"
71 : #include "gimple-fold.h"
72 : #include "intl.h"
73 : #include "file-prefix-map.h" /* remap_macro_filename() */
74 : #include "ipa-strub.h" /* strub_watermark_parm() */
75 : #include "gomp-constants.h"
76 : #include "omp-general.h"
77 : #include "tree-dfa.h"
78 : #include "gimple-ssa.h"
79 : #include "tree-ssa-live.h"
80 : #include "tree-outof-ssa.h"
81 : #include "attr-fnspec.h"
82 : #include "demangle.h"
83 : #include "gimple-range.h"
84 : #include "pointer-query.h"
85 :
86 : struct target_builtins default_target_builtins;
87 : #if SWITCHABLE_TARGET
88 : struct target_builtins *this_target_builtins = &default_target_builtins;
89 : #endif
90 :
91 : /* Define the names of the builtin function types and codes. */
92 : const char *const built_in_class_names[BUILT_IN_LAST]
93 : = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 :
95 : #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96 : const char * built_in_names[(int) END_BUILTINS] =
97 : {
98 : #include "builtins.def"
99 : };
100 :
101 : /* Setup an array of builtin_info_type, make sure each element decl is
102 : initialized to NULL_TREE. */
103 : builtin_info_type builtin_info[(int)END_BUILTINS];
104 :
105 : /* Non-zero if __builtin_constant_p should be folded right away. */
106 : bool force_folding_builtin_constant_p;
107 :
108 : static int target_char_cast (tree, char *);
109 : static int apply_args_size (void);
110 : static int apply_result_size (void);
111 : static rtx result_vector (int, rtx);
112 : static void expand_builtin_prefetch (tree);
113 : static rtx expand_builtin_apply_args (void);
114 : static rtx expand_builtin_apply_args_1 (void);
115 : static rtx expand_builtin_apply (rtx, rtx, rtx);
116 : static void expand_builtin_return (rtx);
117 : static rtx expand_builtin_classify_type (tree);
118 : static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 : static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 : static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 : static rtx expand_builtin_sincos (tree);
122 : static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 : static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 : optab);
125 : static rtx expand_builtin_cexpi (tree, rtx);
126 : static rtx expand_builtin_issignaling (tree, rtx);
127 : static rtx expand_builtin_int_roundingfn (tree, rtx);
128 : static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 : static rtx expand_builtin_next_arg (void);
130 : static rtx expand_builtin_va_start (tree);
131 : static rtx expand_builtin_va_end (tree);
132 : static rtx expand_builtin_va_copy (tree);
133 : static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 : static rtx expand_builtin_strcmp (tree, rtx);
135 : static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 : static rtx expand_builtin_memcpy (tree, rtx);
137 : static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 : rtx target, tree exp,
139 : memop_ret retmode,
140 : bool might_overlap);
141 : static rtx expand_builtin_memmove (tree, rtx);
142 : static rtx expand_builtin_mempcpy (tree, rtx);
143 : static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 : static rtx expand_builtin_strcpy (tree, rtx);
145 : static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 : static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 : static rtx expand_builtin_strncpy (tree, rtx);
148 : static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 : static rtx expand_builtin_bzero (tree);
150 : static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 : static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 : static rtx expand_builtin_alloca (tree);
153 : static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 : static rtx expand_builtin_frame_address (tree, tree);
155 : static rtx expand_builtin_stack_address ();
156 : static tree stabilize_va_list_loc (location_t, tree, int);
157 : static rtx expand_builtin_expect (tree, rtx);
158 : static rtx expand_builtin_expect_with_probability (tree, rtx);
159 : static tree fold_builtin_classify_type (tree);
160 : static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 : static tree fold_builtin_inf (location_t, tree, int);
162 : static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 : static bool validate_arg (const_tree, enum tree_code code);
164 : static rtx expand_builtin_fabs (tree, rtx, rtx);
165 : static rtx expand_builtin_signbit (tree, rtx);
166 : static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 : static tree fold_builtin_isascii (location_t, tree);
168 : static tree fold_builtin_toascii (location_t, tree);
169 : static tree fold_builtin_isdigit (location_t, tree);
170 : static tree fold_builtin_fabs (location_t, tree, tree);
171 : static tree fold_builtin_abs (location_t, tree, tree);
172 : static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 : enum tree_code);
174 : static tree fold_builtin_iseqsig (location_t, tree, tree);
175 : static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 :
177 : static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
178 : static tree fold_builtin_strspn (location_t, tree, tree, tree, tree);
179 : static tree fold_builtin_strcspn (location_t, tree, tree, tree, tree);
180 :
181 : static rtx expand_builtin_object_size (tree);
182 : static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
183 : enum built_in_function);
184 : static void maybe_emit_chk_warning (tree, enum built_in_function);
185 : static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
186 : static tree fold_builtin_object_size (tree, tree, enum built_in_function);
187 :
188 : unsigned HOST_WIDE_INT target_newline;
189 : unsigned HOST_WIDE_INT target_percent;
190 : static unsigned HOST_WIDE_INT target_c;
191 : static unsigned HOST_WIDE_INT target_s;
192 : char target_percent_c[3];
193 : char target_percent_s[3];
194 : char target_percent_s_newline[4];
195 : static tree do_mpfr_remquo (tree, tree, tree);
196 : static tree do_mpfr_lgamma_r (tree, tree, tree);
197 : static void expand_builtin_sync_synchronize (void);
198 :
199 : /* Return true if NAME starts with __builtin_ or __sync_. */
200 :
201 : static bool
202 529176 : is_builtin_name (const char *name)
203 : {
204 529176 : return (startswith (name, "__builtin_")
205 189309 : || startswith (name, "__sync_")
206 717630 : || startswith (name, "__atomic_"));
207 : }
208 :
209 : /* Return true if NODE should be considered for inline expansion regardless
210 : of the optimization level. This means whenever a function is invoked with
211 : its "internal" name, which normally contains the prefix "__builtin". */
212 :
213 : bool
214 529176 : called_as_built_in (tree node)
215 : {
216 : /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 : we want the name used to call the function, not the name it
218 : will have. */
219 529176 : const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 529176 : return is_builtin_name (name);
221 : }
222 :
223 : /* Compute values M and N such that M divides (address of EXP - N) and such
224 : that N < M. If these numbers can be determined, store M in alignp and N in
225 : *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 : *alignp and any bit-offset to *bitposp.
227 :
228 : Note that the address (and thus the alignment) computed here is based
229 : on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 : on the address at which an object is actually located. These two
231 : addresses are not always the same. For example, on ARM targets,
232 : the address &foo of a Thumb function foo() has the lowest bit set,
233 : whereas foo() itself starts on an even address.
234 :
235 : If ADDR_P is true we are taking the address of the memory reference EXP
236 : and thus cannot rely on the access taking place. */
237 :
238 : bool
239 108673137 : get_object_alignment_2 (tree exp, unsigned int *alignp,
240 : unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 : {
242 108673137 : poly_int64 bitsize, bitpos;
243 108673137 : tree offset;
244 108673137 : machine_mode mode;
245 108673137 : int unsignedp, reversep, volatilep;
246 108673137 : unsigned int align = BITS_PER_UNIT;
247 108673137 : bool known_alignment = false;
248 :
249 : /* Get the innermost object and the constant (bitpos) and possibly
250 : variable (offset) offset of the access. */
251 108673137 : exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 : &unsignedp, &reversep, &volatilep);
253 :
254 : /* Extract alignment information from the innermost object and
255 : possibly adjust bitpos and offset. */
256 108673137 : if (TREE_CODE (exp) == FUNCTION_DECL)
257 : {
258 : /* Function addresses can encode extra information besides their
259 : alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 : allows the low bit to be used as a virtual bit, we know
261 : that the address itself must be at least 2-byte aligned. */
262 : if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 : align = 2 * BITS_PER_UNIT;
264 : }
265 108234559 : else if (TREE_CODE (exp) == LABEL_DECL)
266 : ;
267 108197601 : else if (TREE_CODE (exp) == CONST_DECL)
268 : {
269 : /* The alignment of a CONST_DECL is determined by its initializer. */
270 58808 : exp = DECL_INITIAL (exp);
271 58808 : align = TYPE_ALIGN (TREE_TYPE (exp));
272 58808 : if (CONSTANT_CLASS_P (exp))
273 58801 : align = targetm.constant_alignment (exp, align);
274 :
275 : known_alignment = true;
276 : }
277 108138793 : else if (DECL_P (exp))
278 : {
279 65268090 : align = DECL_ALIGN (exp);
280 : known_alignment = true;
281 : }
282 42870703 : else if (TREE_CODE (exp) == INDIRECT_REF
283 42858460 : || TREE_CODE (exp) == MEM_REF
284 5369120 : || TREE_CODE (exp) == TARGET_MEM_REF)
285 : {
286 41170913 : tree addr = TREE_OPERAND (exp, 0);
287 41170913 : unsigned ptr_align;
288 41170913 : unsigned HOST_WIDE_INT ptr_bitpos;
289 41170913 : unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 :
291 : /* If the address is explicitly aligned, handle that. */
292 41170913 : if (TREE_CODE (addr) == BIT_AND_EXPR
293 41170913 : && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 : {
295 76 : ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 76 : ptr_bitmask *= BITS_PER_UNIT;
297 76 : align = least_bit_hwi (ptr_bitmask);
298 76 : addr = TREE_OPERAND (addr, 0);
299 : }
300 :
301 41170913 : known_alignment
302 41170913 : = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 41170913 : align = MAX (ptr_align, align);
304 :
305 : /* Re-apply explicit alignment to the bitpos. */
306 41170913 : ptr_bitpos &= ptr_bitmask;
307 :
308 : /* The alignment of the pointer operand in a TARGET_MEM_REF
309 : has to take the variable offset parts into account. */
310 41170913 : if (TREE_CODE (exp) == TARGET_MEM_REF)
311 : {
312 3669330 : if (TMR_INDEX (exp))
313 : {
314 1670073 : unsigned HOST_WIDE_INT step = 1;
315 1670073 : if (TMR_STEP (exp))
316 1515296 : step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 1670073 : align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 : }
319 3669330 : if (TMR_INDEX2 (exp))
320 43356 : align = BITS_PER_UNIT;
321 : known_alignment = false;
322 : }
323 :
324 : /* When EXP is an actual memory reference then we can use
325 : TYPE_ALIGN of a pointer indirection to derive alignment.
326 : Do so only if get_pointer_alignment_1 did not reveal absolute
327 : alignment knowledge and if using that alignment would
328 : improve the situation. */
329 41170913 : unsigned int talign;
330 41170913 : if (!addr_p && !known_alignment
331 38830258 : && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 80001165 : && talign > align)
333 : align = talign;
334 : else
335 : {
336 : /* Else adjust bitpos accordingly. */
337 11179280 : bitpos += ptr_bitpos;
338 11179280 : if (TREE_CODE (exp) == MEM_REF
339 11179280 : || TREE_CODE (exp) == TARGET_MEM_REF)
340 11170005 : bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 : }
342 41170913 : }
343 1699790 : else if (TREE_CODE (exp) == STRING_CST)
344 : {
345 : /* STRING_CST are the only constant objects we allow to be not
346 : wrapped inside a CONST_DECL. */
347 1699240 : align = TYPE_ALIGN (TREE_TYPE (exp));
348 1699240 : if (CONSTANT_CLASS_P (exp))
349 1699240 : align = targetm.constant_alignment (exp, align);
350 :
351 : known_alignment = true;
352 : }
353 :
354 : /* If there is a non-constant offset part extract the maximum
355 : alignment that can prevail. */
356 108673137 : if (offset)
357 : {
358 9921266 : unsigned int trailing_zeros = tree_ctz (offset);
359 9921266 : if (trailing_zeros < HOST_BITS_PER_INT)
360 : {
361 9921188 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 9921188 : if (inner)
363 9921188 : align = MIN (align, inner);
364 : }
365 : }
366 :
367 : /* Account for the alignment of runtime coefficients, so that the constant
368 : bitpos is guaranteed to be accurate. */
369 108673137 : unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 108673137 : if (alt_align != 0 && alt_align < align)
371 : {
372 : align = alt_align;
373 : known_alignment = false;
374 : }
375 :
376 108673137 : *alignp = align;
377 108673137 : *bitposp = bitpos.coeffs[0] & (align - 1);
378 108673137 : return known_alignment;
379 : }
380 :
381 : /* For a memory reference expression EXP compute values M and N such that M
382 : divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 : store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 : and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 :
386 : bool
387 86171047 : get_object_alignment_1 (tree exp, unsigned int *alignp,
388 : unsigned HOST_WIDE_INT *bitposp)
389 : {
390 : /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
391 : with it. */
392 86171047 : if (TREE_CODE (exp) == WITH_SIZE_EXPR)
393 0 : exp = TREE_OPERAND (exp, 0);
394 86171047 : return get_object_alignment_2 (exp, alignp, bitposp, false);
395 : }
396 :
397 : /* Return the alignment in bits of EXP, an object. */
398 :
399 : unsigned int
400 35844164 : get_object_alignment (tree exp)
401 : {
402 35844164 : unsigned HOST_WIDE_INT bitpos = 0;
403 35844164 : unsigned int align;
404 :
405 35844164 : get_object_alignment_1 (exp, &align, &bitpos);
406 :
407 : /* align and bitpos now specify known low bits of the pointer.
408 : ptr & (align - 1) == bitpos. */
409 :
410 35844164 : if (bitpos != 0)
411 2710393 : align = least_bit_hwi (bitpos);
412 35844164 : return align;
413 : }
414 :
415 : /* For a pointer valued expression EXP compute values M and N such that M
416 : divides (EXP - N) and such that N < M. If these numbers can be determined,
417 : store M in alignp and N in *BITPOSP and return true. Return false if
418 : the results are just a conservative approximation.
419 :
420 : If EXP is not a pointer, false is returned too. */
421 :
422 : bool
423 81855177 : get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 : unsigned HOST_WIDE_INT *bitposp)
425 : {
426 81855177 : STRIP_NOPS (exp);
427 :
428 81855177 : if (TREE_CODE (exp) == ADDR_EXPR)
429 22500542 : return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 22500542 : alignp, bitposp, true);
431 59354635 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 : {
433 988628 : unsigned int align;
434 988628 : unsigned HOST_WIDE_INT bitpos;
435 988628 : bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 : &align, &bitpos);
437 988628 : if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 2761 : bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 : else
440 : {
441 985867 : unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 985867 : if (trailing_zeros < HOST_BITS_PER_INT)
443 : {
444 985859 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 985859 : if (inner)
446 985859 : align = MIN (align, inner);
447 : }
448 : }
449 988628 : *alignp = align;
450 988628 : *bitposp = bitpos & (align - 1);
451 988628 : return res;
452 : }
453 58366007 : else if (TREE_CODE (exp) == SSA_NAME
454 58366007 : && POINTER_TYPE_P (TREE_TYPE (exp)))
455 : {
456 56363337 : unsigned int ptr_align, ptr_misalign;
457 56363337 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 :
459 56363337 : if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 : {
461 6026771 : *bitposp = ptr_misalign * BITS_PER_UNIT;
462 6026771 : *alignp = ptr_align * BITS_PER_UNIT;
463 : /* Make sure to return a sensible alignment when the multiplication
464 : by BITS_PER_UNIT overflowed. */
465 6026771 : if (*alignp == 0)
466 30 : *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 : /* We cannot really tell whether this result is an approximation. */
468 6026771 : return false;
469 : }
470 : else
471 : {
472 50336566 : *bitposp = 0;
473 50336566 : *alignp = BITS_PER_UNIT;
474 50336566 : return false;
475 : }
476 : }
477 2002670 : else if (TREE_CODE (exp) == INTEGER_CST)
478 : {
479 16962 : *alignp = BIGGEST_ALIGNMENT;
480 16962 : *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 16962 : & (BIGGEST_ALIGNMENT - 1));
482 16962 : return true;
483 : }
484 :
485 1985708 : *bitposp = 0;
486 1985708 : *alignp = BITS_PER_UNIT;
487 1985708 : return false;
488 : }
489 :
490 : /* Return the alignment in bits of EXP, a pointer valued expression.
491 : The alignment returned is, by default, the alignment of the thing that
492 : EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 :
494 : Otherwise, look at the expression to see if we can do better, i.e., if the
495 : expression is actually pointing at an object whose alignment is tighter. */
496 :
497 : unsigned int
498 11266438 : get_pointer_alignment (tree exp)
499 : {
500 11266438 : unsigned HOST_WIDE_INT bitpos = 0;
501 11266438 : unsigned int align;
502 :
503 11266438 : get_pointer_alignment_1 (exp, &align, &bitpos);
504 :
505 : /* align and bitpos now specify known low bits of the pointer.
506 : ptr & (align - 1) == bitpos. */
507 :
508 11266438 : if (bitpos != 0)
509 96247 : align = least_bit_hwi (bitpos);
510 :
511 11266438 : return align;
512 : }
513 :
514 : /* Return the number of leading non-zero elements in the sequence
515 : [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 : ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 :
518 : unsigned
519 774466 : string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 : {
521 774466 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 :
523 774466 : unsigned n;
524 :
525 774466 : if (eltsize == 1)
526 : {
527 : /* Optimize the common case of plain char. */
528 244924810 : for (n = 0; n < maxelts; n++)
529 : {
530 244837134 : const char *elt = (const char*) ptr + n;
531 244837134 : if (!*elt)
532 : break;
533 : }
534 : }
535 : else
536 : {
537 590 : for (n = 0; n < maxelts; n++)
538 : {
539 566 : const char *elt = (const char*) ptr + n * eltsize;
540 566 : if (!memcmp (elt, "\0\0\0\0", eltsize))
541 : break;
542 : }
543 : }
544 774466 : return n;
545 : }
546 :
547 : /* Compute the length of a null-terminated character string or wide
548 : character string handling character sizes of 1, 2, and 4 bytes.
549 : TREE_STRING_LENGTH is not the right way because it evaluates to
550 : the size of the character array in bytes (as opposed to characters)
551 : and because it can contain a zero byte in the middle.
552 :
553 : ONLY_VALUE should be nonzero if the result is not going to be emitted
554 : into the instruction stream and zero if it is going to be expanded.
555 : E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 : is returned, otherwise NULL, since
557 : len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
558 : evaluate the side-effects.
559 :
560 : If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 : accesses. Note that this implies the result is not going to be emitted
562 : into the instruction stream.
563 :
564 : Additional information about the string accessed may be recorded
565 : in DATA. For example, if ARG references an unterminated string,
566 : then the declaration will be stored in the DECL field. If the
567 : length of the unterminated string can be determined, it'll be
568 : stored in the LEN field. Note this length could well be different
569 : than what a C strlen call would return.
570 :
571 : ELTSIZE is 1 for normal single byte character strings, and 2 or
572 : 4 for wide characer strings. ELTSIZE is by default 1.
573 :
574 : The value returned is of type `ssizetype'. */
575 :
576 : tree
577 2509314 : c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
578 : {
579 : /* If we were not passed a DATA pointer, then get one to a local
580 : structure. That avoids having to check DATA for NULL before
581 : each time we want to use it. */
582 2509314 : c_strlen_data local_strlen_data = { };
583 2509314 : if (!data)
584 597397 : data = &local_strlen_data;
585 :
586 2509314 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
587 :
588 2509314 : tree src = STRIP_NOPS (arg);
589 2509314 : if (TREE_CODE (src) == COND_EXPR
590 2509314 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 : {
592 687 : tree len1, len2;
593 :
594 687 : len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
595 687 : len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
596 687 : if (tree_int_cst_equal (len1, len2))
597 : return len1;
598 : }
599 :
600 2509148 : if (TREE_CODE (src) == COMPOUND_EXPR
601 2509148 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 45 : return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
603 :
604 2509103 : location_t loc = EXPR_LOC_OR_LOC (src, input_location);
605 :
606 : /* Offset from the beginning of the string in bytes. */
607 2509103 : tree byteoff;
608 2509103 : tree memsize;
609 2509103 : tree decl;
610 2509103 : src = string_constant (src, &byteoff, &memsize, &decl);
611 2509103 : if (src == 0)
612 : return NULL_TREE;
613 :
614 : /* Determine the size of the string element. */
615 790373 : if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
616 : return NULL_TREE;
617 :
618 : /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
619 : length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
620 : in case the latter is less than the size of the array, such as when
621 : SRC refers to a short string literal used to initialize a large array.
622 : In that case, the elements of the array after the terminating NUL are
623 : all NUL. */
624 778154 : HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
625 778154 : strelts = strelts / eltsize;
626 :
627 778154 : if (!tree_fits_uhwi_p (memsize))
628 : return NULL_TREE;
629 :
630 778154 : HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
631 :
632 : /* PTR can point to the byte representation of any string type, including
633 : char* and wchar_t*. */
634 778154 : const char *ptr = TREE_STRING_POINTER (src);
635 :
636 778154 : if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
637 : {
638 : /* The code below works only for single byte character types. */
639 4296 : if (eltsize != 1)
640 : return NULL_TREE;
641 :
642 : /* If the string has an internal NUL character followed by any
643 : non-NUL characters (e.g., "foo\0bar"), we can't compute
644 : the offset to the following NUL if we don't know where to
645 : start searching for it. */
646 4296 : unsigned len = string_length (ptr, eltsize, strelts);
647 :
648 : /* Return when an embedded null character is found or none at all.
649 : In the latter case, set the DECL/LEN field in the DATA structure
650 : so that callers may examine them. */
651 4296 : if (len + 1 < strelts)
652 : return NULL_TREE;
653 3726 : else if (len >= maxelts)
654 : {
655 1102 : data->decl = decl;
656 1102 : data->off = byteoff;
657 1102 : data->minlen = ssize_int (len);
658 1102 : return NULL_TREE;
659 : }
660 :
661 : /* For empty strings the result should be zero. */
662 2624 : if (len == 0)
663 39 : return ssize_int (0);
664 :
665 : /* We don't know the starting offset, but we do know that the string
666 : has no internal zero bytes. If the offset falls within the bounds
667 : of the string subtract the offset from the length of the string,
668 : and return that. Otherwise the length is zero. Take care to
669 : use SAVE_EXPR in case the OFFSET has side-effects. */
670 2585 : tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
671 : : byteoff;
672 2585 : offsave = fold_convert_loc (loc, sizetype, offsave);
673 2585 : tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
674 2585 : size_int (len));
675 2585 : tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
676 : offsave);
677 2585 : lenexp = fold_convert_loc (loc, ssizetype, lenexp);
678 2585 : return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
679 2585 : build_zero_cst (ssizetype));
680 : }
681 :
682 : /* Offset from the beginning of the string in elements. */
683 773858 : HOST_WIDE_INT eltoff;
684 :
685 : /* We have a known offset into the string. Start searching there for
686 : a null character if we can represent it as a single HOST_WIDE_INT. */
687 773858 : if (byteoff == 0)
688 : eltoff = 0;
689 773858 : else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
690 : eltoff = -1;
691 : else
692 772670 : eltoff = tree_to_uhwi (byteoff) / eltsize;
693 :
694 : /* If the offset is known to be out of bounds, warn, and call strlen at
695 : runtime. */
696 773858 : if (eltoff < 0 || eltoff >= maxelts)
697 : {
698 : /* Suppress multiple warnings for propagated constant strings. */
699 3688 : if (only_value != 2
700 3688 : && !warning_suppressed_p (arg, OPT_Warray_bounds_)
701 7376 : && warning_at (loc, OPT_Warray_bounds_,
702 : "offset %qwi outside bounds of constant string",
703 : eltoff))
704 : {
705 679 : if (decl)
706 678 : inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
707 679 : suppress_warning (arg, OPT_Warray_bounds_);
708 : }
709 3688 : return NULL_TREE;
710 : }
711 :
712 : /* If eltoff is larger than strelts but less than maxelts the
713 : string length is zero, since the excess memory will be zero. */
714 770170 : if (eltoff > strelts)
715 0 : return ssize_int (0);
716 :
717 : /* Use strlen to search for the first zero byte. Since any strings
718 : constructed with build_string will have nulls appended, we win even
719 : if we get handed something like (char[4])"abcd".
720 :
721 : Since ELTOFF is our starting index into the string, no further
722 : calculation is needed. */
723 770170 : unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
724 770170 : strelts - eltoff);
725 :
726 : /* Don't know what to return if there was no zero termination.
727 : Ideally this would turn into a gcc_checking_assert over time.
728 : Set DECL/LEN so callers can examine them. */
729 770170 : if (len >= maxelts - eltoff)
730 : {
731 82228 : data->decl = decl;
732 82228 : data->off = byteoff;
733 82228 : data->minlen = ssize_int (len);
734 82228 : return NULL_TREE;
735 : }
736 :
737 687942 : return ssize_int (len);
738 : }
739 :
740 : /* Return a constant integer corresponding to target reading
741 : GET_MODE_BITSIZE (MODE) bits from string constant STR. If
742 : NULL_TERMINATED_P, reading stops after '\0' character, all further ones
743 : are assumed to be zero, otherwise it reads as many characters
744 : as needed. */
745 :
746 : rtx
747 291394 : c_readstr (const char *str, fixed_size_mode mode,
748 : bool null_terminated_p/*=true*/)
749 : {
750 291394 : auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
751 :
752 582788 : bytes.reserve (GET_MODE_SIZE (mode));
753 :
754 291394 : target_unit ch = 1;
755 4803068 : for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
756 : {
757 2110140 : if (ch || !null_terminated_p)
758 2021237 : ch = (unsigned char) str[i];
759 2110140 : bytes.quick_push (ch);
760 : }
761 :
762 291394 : return native_decode_rtx (mode, bytes, 0);
763 291394 : }
764 :
765 : /* Cast a target constant CST to target CHAR and if that value fits into
766 : host char type, return zero and put that value into variable pointed to by
767 : P. */
768 :
769 : static int
770 50798 : target_char_cast (tree cst, char *p)
771 : {
772 50798 : unsigned HOST_WIDE_INT val, hostval;
773 :
774 50798 : if (TREE_CODE (cst) != INTEGER_CST
775 : || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
776 : return 1;
777 :
778 : /* Do not care if it fits or not right here. */
779 50798 : val = TREE_INT_CST_LOW (cst);
780 :
781 50798 : if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
782 50798 : val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
783 :
784 50798 : hostval = val;
785 50798 : if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
786 50798 : hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
787 :
788 50798 : if (val != hostval)
789 : return 1;
790 :
791 50798 : *p = hostval;
792 50798 : return 0;
793 : }
794 :
795 : /* Similar to save_expr, but assumes that arbitrary code is not executed
796 : in between the multiple evaluations. In particular, we assume that a
797 : non-addressable local variable will not be modified. */
798 :
799 : static tree
800 1046928 : builtin_save_expr (tree exp)
801 : {
802 1046928 : if (TREE_CODE (exp) == SSA_NAME
803 882924 : || (TREE_ADDRESSABLE (exp) == 0
804 882732 : && (TREE_CODE (exp) == PARM_DECL
805 389147 : || (VAR_P (exp) && !TREE_STATIC (exp)))))
806 : return exp;
807 :
808 387353 : return save_expr (exp);
809 : }
810 :
811 : /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
812 : times to get the address of either a higher stack frame, or a return
813 : address located within it (depending on FNDECL_CODE). */
814 :
815 : static rtx
816 15901 : expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
817 : {
818 15901 : int i;
819 15901 : rtx tem = INITIAL_FRAME_ADDRESS_RTX;
820 15901 : if (tem == NULL_RTX)
821 : {
822 : /* For a zero count with __builtin_return_address, we don't care what
823 : frame address we return, because target-specific definitions will
824 : override us. Therefore frame pointer elimination is OK, and using
825 : the soft frame pointer is OK.
826 :
827 : For a nonzero count, or a zero count with __builtin_frame_address,
828 : we require a stable offset from the current frame pointer to the
829 : previous one, so we must use the hard frame pointer, and
830 : we must disable frame pointer elimination. */
831 15901 : if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
832 4310 : tem = frame_pointer_rtx;
833 : else
834 : {
835 11591 : tem = hard_frame_pointer_rtx;
836 :
837 : /* Tell reload not to eliminate the frame pointer. */
838 11591 : crtl->accesses_prior_frames = 1;
839 : }
840 : }
841 :
842 15901 : if (count > 0)
843 932 : SETUP_FRAME_ADDRESSES ();
844 :
845 : /* On the SPARC, the return address is not in the frame, it is in a
846 : register. There is no way to access it off of the current frame
847 : pointer, but it can be accessed off the previous frame pointer by
848 : reading the value from the register window save area. */
849 : if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
850 : count--;
851 :
852 : /* Scan back COUNT frames to the specified frame. */
853 51777 : for (i = 0; i < count; i++)
854 : {
855 : /* Assume the dynamic chain pointer is in the word that the
856 : frame address points to, unless otherwise specified. */
857 35876 : tem = DYNAMIC_CHAIN_ADDRESS (tem);
858 35876 : tem = memory_address (Pmode, tem);
859 35876 : tem = gen_frame_mem (Pmode, tem);
860 35876 : tem = copy_to_reg (tem);
861 : }
862 :
863 : /* For __builtin_frame_address, return what we've got. But, on
864 : the SPARC for example, we may have to add a bias. */
865 15901 : if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
866 : return FRAME_ADDR_RTX (tem);
867 :
868 : /* For __builtin_return_address, get the return address from that frame. */
869 : #ifdef RETURN_ADDR_RTX
870 7379 : tem = RETURN_ADDR_RTX (count, tem);
871 : #else
872 : tem = memory_address (Pmode,
873 : plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
874 : tem = gen_frame_mem (Pmode, tem);
875 : #endif
876 5150 : return tem;
877 : }
878 :
879 : /* Alias set used for setjmp buffer. */
880 : static alias_set_type setjmp_alias_set = -1;
881 :
882 : /* Construct the leading half of a __builtin_setjmp call. Control will
883 : return to RECEIVER_LABEL. This is also called directly by the SJLJ
884 : exception handling code. */
885 :
886 : void
887 841 : expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
888 : {
889 841 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
890 841 : rtx stack_save;
891 841 : rtx mem;
892 :
893 841 : if (setjmp_alias_set == -1)
894 241 : setjmp_alias_set = new_alias_set ();
895 :
896 841 : buf_addr = convert_memory_address (Pmode, buf_addr);
897 :
898 842 : buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
899 :
900 : /* We store the frame pointer and the address of receiver_label in
901 : the buffer and use the rest of it for the stack save area, which
902 : is machine-dependent. */
903 :
904 842 : mem = gen_rtx_MEM (Pmode, buf_addr);
905 841 : set_mem_alias_set (mem, setjmp_alias_set);
906 841 : emit_move_insn (mem, hard_frame_pointer_rtx);
907 :
908 842 : mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
909 1682 : GET_MODE_SIZE (Pmode))),
910 841 : set_mem_alias_set (mem, setjmp_alias_set);
911 :
912 1682 : emit_move_insn (validize_mem (mem),
913 1682 : force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
914 :
915 841 : stack_save = gen_rtx_MEM (sa_mode,
916 : plus_constant (Pmode, buf_addr,
917 1682 : 2 * GET_MODE_SIZE (Pmode)));
918 841 : set_mem_alias_set (stack_save, setjmp_alias_set);
919 841 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
920 :
921 : /* If there is further processing to do, do it. */
922 841 : if (targetm.have_builtin_setjmp_setup ())
923 0 : emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
924 :
925 : /* We have a nonlocal label. */
926 841 : cfun->has_nonlocal_label = 1;
927 841 : }
928 :
929 : /* Construct the trailing part of a __builtin_setjmp call. This is
930 : also called directly by the SJLJ exception handling code.
931 : If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
932 :
933 : void
934 1345 : expand_builtin_setjmp_receiver (rtx receiver_label)
935 : {
936 1345 : rtx chain;
937 :
938 : /* Mark the FP as used when we get here, so we have to make sure it's
939 : marked as used by this function. */
940 1345 : emit_use (hard_frame_pointer_rtx);
941 :
942 : /* Mark the static chain as clobbered here so life information
943 : doesn't get messed up for it. */
944 1345 : chain = rtx_for_static_chain (current_function_decl, true);
945 1345 : if (chain && REG_P (chain))
946 1 : emit_clobber (chain);
947 :
948 1345 : if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
949 : {
950 : /* If the argument pointer can be eliminated in favor of the
951 : frame pointer, we don't need to restore it. We assume here
952 : that if such an elimination is present, it can always be used.
953 : This is the case on all known machines; if we don't make this
954 : assumption, we do unnecessary saving on many machines. */
955 : size_t i;
956 : static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
957 :
958 2690 : for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
959 2690 : if (elim_regs[i].from == ARG_POINTER_REGNUM
960 2690 : && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
961 : break;
962 :
963 1345 : if (i == ARRAY_SIZE (elim_regs))
964 : {
965 : /* Now restore our arg pointer from the address at which it
966 : was saved in our stack frame. */
967 0 : emit_move_insn (crtl->args.internal_arg_pointer,
968 : copy_to_reg (get_arg_pointer_save_area ()));
969 : }
970 : }
971 :
972 1345 : if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
973 0 : emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
974 1345 : else if (targetm.have_nonlocal_goto_receiver ())
975 0 : emit_insn (targetm.gen_nonlocal_goto_receiver ());
976 : else
977 : { /* Nothing */ }
978 :
979 : /* We must not allow the code we just generated to be reordered by
980 : scheduling. Specifically, the update of the frame pointer must
981 : happen immediately, not later. */
982 1345 : emit_insn (gen_blockage ());
983 1345 : }
984 :
985 : /* __builtin_longjmp is passed a pointer to an array of five words (not
986 : all will be used on all machines). It operates similarly to the C
987 : library function of the same name, but is more efficient. Much of
988 : the code below is copied from the handling of non-local gotos. */
989 :
990 : static void
991 391 : expand_builtin_longjmp (rtx buf_addr, rtx value)
992 : {
993 391 : rtx fp, lab, stack;
994 391 : rtx_insn *insn, *last;
995 391 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996 :
997 : /* DRAP is needed for stack realign if longjmp is expanded to current
998 : function */
999 391 : if (SUPPORTS_STACK_ALIGNMENT)
1000 391 : crtl->need_drap = true;
1001 :
1002 391 : if (setjmp_alias_set == -1)
1003 330 : setjmp_alias_set = new_alias_set ();
1004 :
1005 391 : buf_addr = convert_memory_address (Pmode, buf_addr);
1006 :
1007 392 : buf_addr = force_reg (Pmode, buf_addr);
1008 :
1009 : /* We require that the user must pass a second argument of 1, because
1010 : that is what builtin_setjmp will return. */
1011 391 : gcc_assert (value == const1_rtx);
1012 :
1013 391 : last = get_last_insn ();
1014 391 : if (targetm.have_builtin_longjmp ())
1015 0 : emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1016 : else
1017 : {
1018 392 : fp = gen_rtx_MEM (Pmode, buf_addr);
1019 392 : lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1020 782 : GET_MODE_SIZE (Pmode)));
1021 :
1022 391 : stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1023 782 : 2 * GET_MODE_SIZE (Pmode)));
1024 391 : set_mem_alias_set (fp, setjmp_alias_set);
1025 391 : set_mem_alias_set (lab, setjmp_alias_set);
1026 391 : set_mem_alias_set (stack, setjmp_alias_set);
1027 :
1028 : /* Pick up FP, label, and SP from the block and jump. This code is
1029 : from expand_goto in stmt.cc; see there for detailed comments. */
1030 391 : if (targetm.have_nonlocal_goto ())
1031 : /* We have to pass a value to the nonlocal_goto pattern that will
1032 : get copied into the static_chain pointer, but it does not matter
1033 : what that value is, because builtin_setjmp does not use it. */
1034 0 : emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1035 : else
1036 : {
1037 391 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1038 391 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 :
1040 391 : lab = copy_to_reg (lab);
1041 :
1042 : /* Restore the frame pointer and stack pointer. We must use a
1043 : temporary since the setjmp buffer may be a local. */
1044 391 : fp = copy_to_reg (fp);
1045 391 : emit_stack_restore (SAVE_NONLOCAL, stack);
1046 :
1047 : /* Ensure the frame pointer move is not optimized. */
1048 391 : emit_insn (gen_blockage ());
1049 391 : emit_clobber (hard_frame_pointer_rtx);
1050 391 : emit_clobber (frame_pointer_rtx);
1051 391 : emit_move_insn (hard_frame_pointer_rtx, fp);
1052 :
1053 391 : emit_use (hard_frame_pointer_rtx);
1054 391 : emit_use (stack_pointer_rtx);
1055 391 : emit_indirect_jump (lab);
1056 : }
1057 : }
1058 :
1059 : /* Search backwards and mark the jump insn as a non-local goto.
1060 : Note that this precludes the use of __builtin_longjmp to a
1061 : __builtin_setjmp target in the same function. However, we've
1062 : already cautioned the user that these functions are for
1063 : internal exception handling use only. */
1064 782 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1065 : {
1066 782 : gcc_assert (insn != last);
1067 :
1068 782 : if (JUMP_P (insn))
1069 : {
1070 391 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1071 391 : break;
1072 : }
1073 391 : else if (CALL_P (insn))
1074 : break;
1075 : }
1076 391 : }
1077 :
1078 : static inline bool
1079 1679278 : more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1080 : {
1081 1679278 : return (iter->i < iter->n);
1082 : }
1083 :
1084 : /* This function validates the types of a function call argument list
1085 : against a specified list of tree_codes. If the last specifier is a 0,
1086 : that represents an ellipsis, otherwise the last specifier must be a
1087 : VOID_TYPE. */
1088 :
1089 : static bool
1090 490033 : validate_arglist (const_tree callexpr, ...)
1091 : {
1092 490033 : enum tree_code code;
1093 490033 : bool res = 0;
1094 490033 : va_list ap;
1095 490033 : const_call_expr_arg_iterator iter;
1096 490033 : const_tree arg;
1097 :
1098 490033 : va_start (ap, callexpr);
1099 490033 : init_const_call_expr_arg_iterator (callexpr, &iter);
1100 :
1101 : /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1102 490033 : tree fn = CALL_EXPR_FN (callexpr);
1103 490033 : bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1104 :
1105 490033 : for (unsigned argno = 1; ; ++argno)
1106 : {
1107 1703521 : code = (enum tree_code) va_arg (ap, int);
1108 :
1109 1703521 : switch (code)
1110 : {
1111 2039 : case 0:
1112 : /* This signifies an ellipses, any further arguments are all ok. */
1113 2039 : res = true;
1114 2039 : goto end;
1115 487761 : case VOID_TYPE:
1116 : /* This signifies an endlink, if no arguments remain, return
1117 : true, otherwise return false. */
1118 487761 : res = !more_const_call_expr_args_p (&iter);
1119 487761 : goto end;
1120 808785 : case POINTER_TYPE:
1121 : /* The actual argument must be nonnull when either the whole
1122 : called function has been declared nonnull, or when the formal
1123 : argument corresponding to the actual argument has been. */
1124 808785 : if (argmap
1125 808785 : && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1126 : {
1127 280847 : arg = next_const_call_expr_arg (&iter);
1128 280847 : if (!validate_arg (arg, code) || integer_zerop (arg))
1129 59 : goto end;
1130 : break;
1131 : }
1132 : /* FALLTHRU */
1133 932874 : default:
1134 : /* If no parameters remain or the parameter's code does not
1135 : match the specified code, return false. Otherwise continue
1136 : checking any remaining arguments. */
1137 932874 : arg = next_const_call_expr_arg (&iter);
1138 932874 : if (!validate_arg (arg, code))
1139 174 : goto end;
1140 : break;
1141 : }
1142 : }
1143 :
1144 : /* We need gotos here since we can only have one VA_CLOSE in a
1145 : function. */
1146 490033 : end: ;
1147 490033 : va_end (ap);
1148 :
1149 490033 : BITMAP_FREE (argmap);
1150 :
1151 490033 : if (res)
1152 489800 : for (tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (fn)));
1153 703077 : (attrs = lookup_attribute ("nonnull_if_nonzero", attrs));
1154 213277 : attrs = TREE_CHAIN (attrs))
1155 : {
1156 213299 : tree args = TREE_VALUE (attrs);
1157 213299 : unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1158 213299 : unsigned int idx2
1159 213299 : = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1160 213299 : unsigned int idx3 = idx2;
1161 213299 : if (tree chain2 = TREE_CHAIN (TREE_CHAIN (args)))
1162 0 : idx3 = TREE_INT_CST_LOW (TREE_VALUE (chain2)) - 1;
1163 213299 : if (idx < (unsigned) call_expr_nargs (callexpr)
1164 213299 : && idx2 < (unsigned) call_expr_nargs (callexpr)
1165 213299 : && idx3 < (unsigned) call_expr_nargs (callexpr)
1166 213299 : && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx)))
1167 213299 : && integer_zerop (CALL_EXPR_ARG (callexpr, idx))
1168 82 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx2)))
1169 82 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx2))
1170 22 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx3)))
1171 213321 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx3)))
1172 : return false;
1173 : }
1174 :
1175 : return res;
1176 : }
1177 :
1178 : /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1179 : and the address of the save area. */
1180 :
1181 : static rtx
1182 512 : expand_builtin_nonlocal_goto (tree exp)
1183 : {
1184 512 : tree t_label, t_save_area;
1185 512 : rtx r_label, r_save_area, r_fp, r_sp;
1186 512 : rtx_insn *insn;
1187 :
1188 512 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1189 : return NULL_RTX;
1190 :
1191 512 : t_label = CALL_EXPR_ARG (exp, 0);
1192 512 : t_save_area = CALL_EXPR_ARG (exp, 1);
1193 :
1194 512 : r_label = expand_normal (t_label);
1195 512 : r_label = convert_memory_address (Pmode, r_label);
1196 512 : r_save_area = expand_normal (t_save_area);
1197 512 : r_save_area = convert_memory_address (Pmode, r_save_area);
1198 : /* Copy the address of the save location to a register just in case it was
1199 : based on the frame pointer. */
1200 512 : r_save_area = copy_to_reg (r_save_area);
1201 512 : r_fp = gen_rtx_MEM (Pmode, r_save_area);
1202 512 : r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1203 : plus_constant (Pmode, r_save_area,
1204 1024 : GET_MODE_SIZE (Pmode)));
1205 :
1206 512 : crtl->has_nonlocal_goto = 1;
1207 :
1208 : /* ??? We no longer need to pass the static chain value, afaik. */
1209 512 : if (targetm.have_nonlocal_goto ())
1210 0 : emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1211 : else
1212 : {
1213 512 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1214 512 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1215 :
1216 512 : r_label = copy_to_reg (r_label);
1217 :
1218 : /* Restore the frame pointer and stack pointer. We must use a
1219 : temporary since the setjmp buffer may be a local. */
1220 512 : r_fp = copy_to_reg (r_fp);
1221 512 : emit_stack_restore (SAVE_NONLOCAL, r_sp);
1222 :
1223 : /* Ensure the frame pointer move is not optimized. */
1224 512 : emit_insn (gen_blockage ());
1225 512 : emit_clobber (hard_frame_pointer_rtx);
1226 512 : emit_clobber (frame_pointer_rtx);
1227 512 : emit_move_insn (hard_frame_pointer_rtx, r_fp);
1228 :
1229 : /* USE of hard_frame_pointer_rtx added for consistency;
1230 : not clear if really needed. */
1231 512 : emit_use (hard_frame_pointer_rtx);
1232 512 : emit_use (stack_pointer_rtx);
1233 :
1234 : /* If the architecture is using a GP register, we must
1235 : conservatively assume that the target function makes use of it.
1236 : The prologue of functions with nonlocal gotos must therefore
1237 : initialize the GP register to the appropriate value, and we
1238 : must then make sure that this value is live at the point
1239 : of the jump. (Note that this doesn't necessarily apply
1240 : to targets with a nonlocal_goto pattern; they are free
1241 : to implement it in their own way. Note also that this is
1242 : a no-op if the GP register is a global invariant.) */
1243 512 : unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1244 0 : if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1245 0 : emit_use (pic_offset_table_rtx);
1246 :
1247 512 : emit_indirect_jump (r_label);
1248 : }
1249 :
1250 : /* Search backwards to the jump insn and mark it as a
1251 : non-local goto. */
1252 1024 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1253 : {
1254 1024 : if (JUMP_P (insn))
1255 : {
1256 512 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1257 512 : break;
1258 : }
1259 512 : else if (CALL_P (insn))
1260 : break;
1261 : }
1262 :
1263 512 : return const0_rtx;
1264 : }
1265 :
1266 : /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1267 : (not all will be used on all machines) that was passed to __builtin_setjmp.
1268 : It updates the stack pointer in that block to the current value. This is
1269 : also called directly by the SJLJ exception handling code. */
1270 :
1271 : void
1272 0 : expand_builtin_update_setjmp_buf (rtx buf_addr)
1273 : {
1274 0 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1275 0 : buf_addr = convert_memory_address (Pmode, buf_addr);
1276 0 : rtx stack_save
1277 0 : = gen_rtx_MEM (sa_mode,
1278 0 : memory_address
1279 : (sa_mode,
1280 : plus_constant (Pmode, buf_addr,
1281 0 : 2 * GET_MODE_SIZE (Pmode))));
1282 :
1283 0 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
1284 0 : }
1285 :
1286 : /* Expand a call to __builtin_prefetch. For a target that does not support
1287 : data prefetch, evaluate the memory address argument in case it has side
1288 : effects. */
1289 :
1290 : static void
1291 2039 : expand_builtin_prefetch (tree exp)
1292 : {
1293 2039 : tree arg0, arg1, arg2;
1294 2039 : int nargs;
1295 2039 : rtx op0, op1, op2;
1296 :
1297 2039 : if (!validate_arglist (exp, POINTER_TYPE, 0))
1298 : return;
1299 :
1300 2039 : arg0 = CALL_EXPR_ARG (exp, 0);
1301 :
1302 : /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1303 : zero (read) and argument 2 (locality) defaults to 3 (high degree of
1304 : locality). */
1305 2039 : nargs = call_expr_nargs (exp);
1306 2039 : arg1 = nargs > 1 ? CALL_EXPR_ARG (exp, 1) : NULL_TREE;
1307 2001 : arg2 = nargs > 2 ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
1308 :
1309 : /* Argument 0 is an address. */
1310 2135 : op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1311 :
1312 : /* Argument 1 (read/write flag) must be a compile-time constant int. */
1313 2039 : if (arg1 == NULL_TREE)
1314 38 : op1 = const0_rtx;
1315 2001 : else if (TREE_CODE (arg1) != INTEGER_CST)
1316 : {
1317 0 : error ("second argument to %<__builtin_prefetch%> must be a constant");
1318 0 : op1 = const0_rtx;
1319 : }
1320 : else
1321 2001 : op1 = expand_normal (arg1);
1322 : /* Argument 1 must be 0, 1 or 2. */
1323 2039 : if (!IN_RANGE (INTVAL (op1), 0, 2))
1324 : {
1325 3 : warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1326 : " using zero");
1327 3 : op1 = const0_rtx;
1328 : }
1329 :
1330 : /* Argument 2 (locality) must be a compile-time constant int. */
1331 2039 : if (arg2 == NULL_TREE)
1332 63 : op2 = GEN_INT (3);
1333 1976 : else if (TREE_CODE (arg2) != INTEGER_CST)
1334 : {
1335 0 : error ("third argument to %<__builtin_prefetch%> must be a constant");
1336 0 : op2 = const0_rtx;
1337 : }
1338 : else
1339 1976 : op2 = expand_normal (arg2);
1340 : /* Argument 2 must be 0, 1, 2, or 3. */
1341 2039 : if (!IN_RANGE (INTVAL (op2), 0, 3))
1342 : {
1343 3 : warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1344 3 : op2 = const0_rtx;
1345 : }
1346 :
1347 2039 : if (targetm.have_prefetch ())
1348 : {
1349 2039 : class expand_operand ops[3];
1350 :
1351 2039 : create_address_operand (&ops[0], op0);
1352 2039 : create_integer_operand (&ops[1], INTVAL (op1));
1353 2039 : create_integer_operand (&ops[2], INTVAL (op2));
1354 2039 : if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1355 2039 : return;
1356 : }
1357 :
1358 : /* Don't do anything with direct references to volatile memory, but
1359 : generate code to handle other side effects. */
1360 0 : if (!MEM_P (op0) && side_effects_p (op0))
1361 0 : emit_insn (op0);
1362 : }
1363 :
1364 : /* Get a MEM rtx for expression EXP which is the address of an operand
1365 : to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1366 : the maximum length of the block of memory that might be accessed or
1367 : NULL if unknown. */
1368 :
1369 : rtx
1370 738647 : get_memory_rtx (tree exp, tree len)
1371 : {
1372 738647 : tree orig_exp = exp, base;
1373 738647 : rtx addr, mem;
1374 :
1375 738647 : gcc_checking_assert
1376 : (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1377 :
1378 : /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1379 : from its expression, for expr->a.b only <variable>.a.b is recorded. */
1380 738647 : if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1381 1419 : exp = TREE_OPERAND (exp, 0);
1382 :
1383 738647 : addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1384 738647 : mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1385 :
1386 : /* Get an expression we can use to find the attributes to assign to MEM.
1387 : First remove any nops. */
1388 1477294 : while (CONVERT_EXPR_P (exp)
1389 738650 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1390 3 : exp = TREE_OPERAND (exp, 0);
1391 :
1392 : /* Build a MEM_REF representing the whole accessed area as a byte blob,
1393 : (as builtin stringops may alias with anything). */
1394 738647 : exp = fold_build2 (MEM_REF,
1395 : build_array_type (char_type_node,
1396 : build_range_type (sizetype,
1397 : size_one_node, len)),
1398 : exp, build_int_cst (ptr_type_node, 0));
1399 :
1400 : /* If the MEM_REF has no acceptable address, try to get the base object
1401 : from the original address we got, and build an all-aliasing
1402 : unknown-sized access to that one. */
1403 738647 : if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1404 738635 : set_mem_attributes (mem, exp, 0);
1405 12 : else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1406 12 : && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1407 : 0))))
1408 : {
1409 12 : unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1410 12 : exp = build_fold_addr_expr (base);
1411 12 : exp = fold_build2 (MEM_REF,
1412 : build_array_type (char_type_node,
1413 : build_range_type (sizetype,
1414 : size_zero_node,
1415 : NULL)),
1416 : exp, build_int_cst (ptr_type_node, 0));
1417 12 : set_mem_attributes (mem, exp, 0);
1418 : /* Since we stripped parts make sure the offset is unknown and the
1419 : alignment is computed from the original address. */
1420 12 : clear_mem_offset (mem);
1421 12 : set_mem_align (mem, align);
1422 : }
1423 738647 : set_mem_alias_set (mem, 0);
1424 738647 : return mem;
1425 : }
1426 :
1427 : /* Built-in functions to perform an untyped call and return. */
1428 :
1429 : /* Wrapper that implicitly applies a delta when getting or setting the
1430 : enclosed value. */
1431 : template <typename T>
1432 : class delta_type
1433 : {
1434 : T &value; T const delta;
1435 : public:
1436 2524 : delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1437 1809 : operator T () const { return value + delta; }
1438 715 : T operator = (T val) const { value = val - delta; return val; }
1439 : };
1440 :
1441 : #define saved_apply_args_size \
1442 : (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1443 : #define apply_args_mode \
1444 : (this_target_builtins->x_apply_args_mode)
1445 : #define saved_apply_result_size \
1446 : (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1447 : #define apply_result_mode \
1448 : (this_target_builtins->x_apply_result_mode)
1449 :
1450 : /* Return the size required for the block returned by __builtin_apply_args,
1451 : and initialize apply_args_mode. */
1452 :
1453 : static int
1454 947 : apply_args_size (void)
1455 : {
1456 947 : int size = saved_apply_args_size;
1457 947 : int align;
1458 947 : unsigned int regno;
1459 :
1460 : /* The values computed by this function never change. */
1461 947 : if (size < 0)
1462 : {
1463 : /* The first value is the incoming arg-pointer. */
1464 360 : size = GET_MODE_SIZE (Pmode);
1465 :
1466 : /* The second value is the structure value address unless this is
1467 : passed as an "invisible" first argument. */
1468 360 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1469 0 : size += GET_MODE_SIZE (Pmode);
1470 :
1471 33480 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 33120 : if (FUNCTION_ARG_REGNO_P (regno))
1473 : {
1474 5400 : fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1475 :
1476 5400 : if (mode != VOIDmode)
1477 : {
1478 5400 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1479 5400 : if (size % align != 0)
1480 1 : size = CEIL (size, align) * align;
1481 5400 : size += GET_MODE_SIZE (mode);
1482 5400 : apply_args_mode[regno] = mode;
1483 : }
1484 : else
1485 0 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1486 : }
1487 : else
1488 27720 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1489 :
1490 360 : saved_apply_args_size = size;
1491 : }
1492 947 : return size;
1493 : }
1494 :
1495 : /* Return the size required for the block returned by __builtin_apply,
1496 : and initialize apply_result_mode. */
1497 :
1498 : static int
1499 862 : apply_result_size (void)
1500 : {
1501 862 : int size = saved_apply_result_size;
1502 862 : int align, regno;
1503 :
1504 : /* The values computed by this function never change. */
1505 862 : if (size < 0)
1506 : {
1507 : size = 0;
1508 :
1509 33015 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1510 32660 : if (targetm.calls.function_value_regno_p (regno))
1511 : {
1512 2840 : fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1513 :
1514 2840 : if (mode != VOIDmode)
1515 : {
1516 2840 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1517 2840 : if (size % align != 0)
1518 : size = CEIL (size, align) * align;
1519 2840 : size += GET_MODE_SIZE (mode);
1520 2840 : apply_result_mode[regno] = mode;
1521 : }
1522 : else
1523 0 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1524 : }
1525 : else
1526 29820 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1527 :
1528 : /* Allow targets that use untyped_call and untyped_return to override
1529 : the size so that machine-specific information can be stored here. */
1530 : #ifdef APPLY_RESULT_SIZE
1531 355 : size = APPLY_RESULT_SIZE;
1532 : #endif
1533 :
1534 355 : saved_apply_result_size = size;
1535 : }
1536 862 : return size;
1537 : }
1538 :
1539 : /* Create a vector describing the result block RESULT. If SAVEP is true,
1540 : the result block is used to save the values; otherwise it is used to
1541 : restore the values. */
1542 :
1543 : static rtx
1544 483 : result_vector (int savep, rtx result)
1545 : {
1546 483 : int regno, size, align, nelts;
1547 483 : fixed_size_mode mode;
1548 483 : rtx reg, mem;
1549 483 : rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1550 :
1551 483 : size = nelts = 0;
1552 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1553 44436 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1554 : {
1555 3864 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1556 3864 : if (size % align != 0)
1557 0 : size = CEIL (size, align) * align;
1558 3864 : reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1559 3864 : mem = adjust_address (result, mode, size);
1560 7728 : savevec[nelts++] = (savep
1561 7728 : ? gen_rtx_SET (mem, reg)
1562 0 : : gen_rtx_SET (reg, mem));
1563 7728 : size += GET_MODE_SIZE (mode);
1564 : }
1565 483 : return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1566 : }
1567 :
1568 : /* Save the state required to perform an untyped call with the same
1569 : arguments as were passed to the current function. */
1570 :
1571 : static rtx
1572 464 : expand_builtin_apply_args_1 (void)
1573 : {
1574 464 : rtx registers, tem;
1575 464 : int size, align, regno;
1576 464 : fixed_size_mode mode;
1577 464 : rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1578 :
1579 : /* Create a block where the arg-pointer, structure value address,
1580 : and argument registers can be saved. */
1581 464 : registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1582 :
1583 : /* Walk past the arg-pointer and structure value address. */
1584 464 : size = GET_MODE_SIZE (Pmode);
1585 464 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1586 0 : size += GET_MODE_SIZE (Pmode);
1587 :
1588 : /* Save each register used in calling a function to the block. */
1589 43152 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 42688 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1591 : {
1592 6960 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 6960 : if (size % align != 0)
1594 1 : size = CEIL (size, align) * align;
1595 :
1596 6960 : tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1597 :
1598 6960 : emit_move_insn (adjust_address (registers, mode, size), tem);
1599 13920 : size += GET_MODE_SIZE (mode);
1600 : }
1601 :
1602 : /* Save the arg pointer to the block. */
1603 464 : tem = copy_to_reg (crtl->args.internal_arg_pointer);
1604 : /* We need the pointer as the caller actually passed them to us, not
1605 : as we might have pretended they were passed. Make sure it's a valid
1606 : operand, as emit_move_insn isn't expected to handle a PLUS. */
1607 464 : if (STACK_GROWS_DOWNWARD)
1608 464 : tem
1609 464 : = force_operand (plus_constant (Pmode, tem,
1610 464 : crtl->args.pretend_args_size),
1611 : NULL_RTX);
1612 464 : emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1613 :
1614 464 : size = GET_MODE_SIZE (Pmode);
1615 :
1616 : /* Save the structure value address unless this is passed as an
1617 : "invisible" first argument. */
1618 464 : if (struct_incoming_value)
1619 0 : emit_move_insn (adjust_address (registers, Pmode, size),
1620 : copy_to_reg (struct_incoming_value));
1621 :
1622 : /* Return the address of the block. */
1623 464 : return copy_addr_to_reg (XEXP (registers, 0));
1624 : }
1625 :
1626 : /* __builtin_apply_args returns block of memory allocated on
1627 : the stack into which is stored the arg pointer, structure
1628 : value address, static chain, and all the registers that might
1629 : possibly be used in performing a function call. The code is
1630 : moved to the start of the function so the incoming values are
1631 : saved. */
1632 :
1633 : static rtx
1634 464 : expand_builtin_apply_args (void)
1635 : {
1636 : /* Don't do __builtin_apply_args more than once in a function.
1637 : Save the result of the first call and reuse it. */
1638 464 : if (apply_args_value != 0)
1639 : return apply_args_value;
1640 464 : {
1641 : /* When this function is called, it means that registers must be
1642 : saved on entry to this function. So we migrate the
1643 : call to the first insn of this function. */
1644 464 : rtx temp;
1645 :
1646 464 : start_sequence ();
1647 464 : temp = expand_builtin_apply_args_1 ();
1648 464 : rtx_insn *seq = end_sequence ();
1649 :
1650 464 : apply_args_value = temp;
1651 :
1652 : /* Put the insns after the NOTE that starts the function.
1653 : If this is inside a start_sequence, make the outer-level insn
1654 : chain current, so the code is placed at the start of the
1655 : function. If internal_arg_pointer is a non-virtual pseudo,
1656 : it needs to be placed after the function that initializes
1657 : that pseudo. */
1658 464 : push_topmost_sequence ();
1659 464 : if (REG_P (crtl->args.internal_arg_pointer)
1660 464 : && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1661 0 : emit_insn_before (seq, parm_birth_insn);
1662 : else
1663 464 : emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1664 464 : pop_topmost_sequence ();
1665 464 : return temp;
1666 : }
1667 : }
1668 :
1669 : /* Perform an untyped call and save the state required to perform an
1670 : untyped return of whatever value was returned by the given function. */
1671 :
1672 : static rtx
1673 483 : expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1674 : {
1675 483 : int size, align, regno;
1676 483 : fixed_size_mode mode;
1677 483 : rtx incoming_args, result, reg, dest, src;
1678 483 : rtx_call_insn *call_insn;
1679 483 : rtx old_stack_level = 0;
1680 483 : rtx call_fusage = 0;
1681 483 : rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1682 :
1683 483 : arguments = convert_memory_address (Pmode, arguments);
1684 :
1685 : /* Create a block where the return registers can be saved. */
1686 483 : result = assign_stack_local (BLKmode, apply_result_size (), -1);
1687 :
1688 : /* Fetch the arg pointer from the ARGUMENTS block. */
1689 483 : incoming_args = gen_reg_rtx (Pmode);
1690 483 : emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1691 483 : if (!STACK_GROWS_DOWNWARD)
1692 : incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1693 : incoming_args, 0, OPTAB_LIB_WIDEN);
1694 :
1695 : /* Push a new argument block and copy the arguments. Do not allow
1696 : the (potential) memcpy call below to interfere with our stack
1697 : manipulations. */
1698 483 : do_pending_stack_adjust ();
1699 483 : NO_DEFER_POP;
1700 :
1701 : /* Save the stack with nonlocal if available. */
1702 483 : if (targetm.have_save_stack_nonlocal ())
1703 483 : emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1704 : else
1705 0 : emit_stack_save (SAVE_BLOCK, &old_stack_level);
1706 :
1707 : /* Allocate a block of memory onto the stack and copy the memory
1708 : arguments to the outgoing arguments address. We can pass TRUE
1709 : as the 4th argument because we just saved the stack pointer
1710 : and will restore it right after the call. */
1711 966 : allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1712 :
1713 : /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1714 : may have already set current_function_calls_alloca to true.
1715 : current_function_calls_alloca won't be set if argsize is zero,
1716 : so we have to guarantee need_drap is true here. */
1717 483 : if (SUPPORTS_STACK_ALIGNMENT)
1718 483 : crtl->need_drap = true;
1719 :
1720 483 : dest = virtual_outgoing_args_rtx;
1721 483 : if (!STACK_GROWS_DOWNWARD)
1722 : {
1723 : if (CONST_INT_P (argsize))
1724 : dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1725 : else
1726 : dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1727 : }
1728 483 : dest = gen_rtx_MEM (BLKmode, dest);
1729 483 : set_mem_align (dest, PARM_BOUNDARY);
1730 483 : src = gen_rtx_MEM (BLKmode, incoming_args);
1731 483 : set_mem_align (src, PARM_BOUNDARY);
1732 483 : emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1733 :
1734 : /* Refer to the argument block. */
1735 483 : apply_args_size ();
1736 483 : arguments = gen_rtx_MEM (BLKmode, arguments);
1737 483 : set_mem_align (arguments, PARM_BOUNDARY);
1738 :
1739 : /* Walk past the arg-pointer and structure value address. */
1740 483 : size = GET_MODE_SIZE (Pmode);
1741 483 : if (struct_value)
1742 0 : size += GET_MODE_SIZE (Pmode);
1743 :
1744 : /* Restore each of the registers previously saved. Make USE insns
1745 : for each of these registers for use in making the call. */
1746 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1747 44436 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1748 : {
1749 7245 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1750 7245 : if (size % align != 0)
1751 0 : size = CEIL (size, align) * align;
1752 7245 : reg = gen_rtx_REG (mode, regno);
1753 7245 : emit_move_insn (reg, adjust_address (arguments, mode, size));
1754 7245 : use_reg (&call_fusage, reg);
1755 14490 : size += GET_MODE_SIZE (mode);
1756 : }
1757 :
1758 : /* Restore the structure value address unless this is passed as an
1759 : "invisible" first argument. */
1760 483 : size = GET_MODE_SIZE (Pmode);
1761 483 : if (struct_value)
1762 : {
1763 0 : rtx value = gen_reg_rtx (Pmode);
1764 0 : emit_move_insn (value, adjust_address (arguments, Pmode, size));
1765 0 : emit_move_insn (struct_value, value);
1766 0 : if (REG_P (struct_value))
1767 0 : use_reg (&call_fusage, struct_value);
1768 : }
1769 :
1770 : /* All arguments and registers used for the call are set up by now! */
1771 483 : function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1772 :
1773 : /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1774 : and we don't want to load it into a register as an optimization,
1775 : because prepare_call_address already did it if it should be done. */
1776 483 : if (GET_CODE (function) != SYMBOL_REF)
1777 29 : function = memory_address (FUNCTION_MODE, function);
1778 :
1779 : /* Generate the actual call instruction and save the return value. */
1780 483 : if (targetm.have_untyped_call ())
1781 : {
1782 483 : rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1783 483 : rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1784 : result_vector (1, result));
1785 5800 : for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1786 5317 : if (CALL_P (insn))
1787 483 : add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1788 483 : emit_insn (seq);
1789 : }
1790 0 : else if (targetm.have_call_value ())
1791 : {
1792 : rtx valreg = 0;
1793 :
1794 : /* Locate the unique return register. It is not possible to
1795 : express a call that sets more than one return register using
1796 : call_value; use untyped_call for that. In fact, untyped_call
1797 : only needs to save the return registers in the given block. */
1798 0 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1799 0 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1800 : {
1801 0 : gcc_assert (!valreg); /* have_untyped_call required. */
1802 :
1803 0 : valreg = gen_rtx_REG (mode, regno);
1804 : }
1805 :
1806 0 : emit_insn (targetm.gen_call_value (valreg,
1807 : gen_rtx_MEM (FUNCTION_MODE, function),
1808 : const0_rtx, NULL_RTX, const0_rtx));
1809 :
1810 0 : emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1811 : }
1812 : else
1813 0 : gcc_unreachable ();
1814 :
1815 : /* Find the CALL insn we just emitted, and attach the register usage
1816 : information. */
1817 483 : call_insn = last_call_insn ();
1818 483 : add_function_usage_to (call_insn, call_fusage);
1819 :
1820 : /* Restore the stack. */
1821 483 : if (targetm.have_save_stack_nonlocal ())
1822 483 : emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1823 : else
1824 0 : emit_stack_restore (SAVE_BLOCK, old_stack_level);
1825 483 : fixup_args_size_notes (call_insn, get_last_insn (), 0);
1826 :
1827 483 : OK_DEFER_POP;
1828 :
1829 : /* Return the address of the result block. */
1830 483 : result = copy_addr_to_reg (XEXP (result, 0));
1831 483 : return convert_memory_address (ptr_mode, result);
1832 : }
1833 :
1834 : /* Perform an untyped return. */
1835 :
1836 : static void
1837 379 : expand_builtin_return (rtx result)
1838 : {
1839 379 : int size, align, regno;
1840 379 : fixed_size_mode mode;
1841 379 : rtx reg;
1842 379 : rtx_insn *call_fusage = 0;
1843 :
1844 379 : result = convert_memory_address (Pmode, result);
1845 :
1846 379 : apply_result_size ();
1847 379 : result = gen_rtx_MEM (BLKmode, result);
1848 :
1849 379 : if (targetm.have_untyped_return ())
1850 : {
1851 0 : rtx vector = result_vector (0, result);
1852 0 : emit_jump_insn (targetm.gen_untyped_return (result, vector));
1853 0 : emit_barrier ();
1854 0 : return;
1855 : }
1856 :
1857 : /* Restore the return value and note that each value is used. */
1858 : size = 0;
1859 35247 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1860 34868 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1861 : {
1862 3032 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1863 3032 : if (size % align != 0)
1864 0 : size = CEIL (size, align) * align;
1865 3032 : reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1866 3032 : emit_move_insn (reg, adjust_address (result, mode, size));
1867 :
1868 3032 : push_to_sequence (call_fusage);
1869 3032 : emit_use (reg);
1870 3032 : call_fusage = end_sequence ();
1871 6064 : size += GET_MODE_SIZE (mode);
1872 : }
1873 :
1874 : /* Put the USE insns before the return. */
1875 379 : emit_insn (call_fusage);
1876 :
1877 : /* Return whatever values was restored by jumping directly to the end
1878 : of the function. */
1879 379 : expand_naked_return ();
1880 : }
1881 :
1882 : /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1883 :
1884 : int
1885 2367 : type_to_class (tree type)
1886 : {
1887 2367 : switch (TREE_CODE (type))
1888 : {
1889 : case VOID_TYPE: return void_type_class;
1890 897 : case INTEGER_TYPE: return integer_type_class;
1891 39 : case ENUMERAL_TYPE: return enumeral_type_class;
1892 38 : case BOOLEAN_TYPE: return boolean_type_class;
1893 983 : case POINTER_TYPE: return pointer_type_class;
1894 27 : case REFERENCE_TYPE: return reference_type_class;
1895 36 : case OFFSET_TYPE: return offset_type_class;
1896 132 : case REAL_TYPE: return real_type_class;
1897 40 : case COMPLEX_TYPE: return complex_type_class;
1898 20 : case FUNCTION_TYPE: return function_type_class;
1899 0 : case METHOD_TYPE: return method_type_class;
1900 44 : case RECORD_TYPE: return record_type_class;
1901 44 : case UNION_TYPE:
1902 44 : case QUAL_UNION_TYPE: return union_type_class;
1903 24 : case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1904 24 : ? string_type_class : array_type_class);
1905 0 : case LANG_TYPE: return lang_type_class;
1906 0 : case OPAQUE_TYPE: return opaque_type_class;
1907 0 : case BITINT_TYPE: return bitint_type_class;
1908 30 : case VECTOR_TYPE: return vector_type_class;
1909 3 : default: return no_type_class;
1910 : }
1911 : }
1912 :
1913 : /* Expand a call EXP to __builtin_classify_type. */
1914 :
1915 : static rtx
1916 0 : expand_builtin_classify_type (tree exp)
1917 : {
1918 0 : if (call_expr_nargs (exp))
1919 0 : return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1920 0 : return GEN_INT (no_type_class);
1921 : }
1922 :
1923 : /* This helper macro, meant to be used in mathfn_built_in below, determines
1924 : which among a set of builtin math functions is appropriate for a given type
1925 : mode. The `F' (float) and `L' (long double) are automatically generated
1926 : from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1927 : types, there are additional types that are considered with 'F32', 'F64',
1928 : 'F128', etc. suffixes. */
1929 : #define CASE_MATHFN(MATHFN) \
1930 : CASE_CFN_##MATHFN: \
1931 : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1932 : fcodel = BUILT_IN_##MATHFN##L ; break;
1933 : /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1934 : types. */
1935 : #define CASE_MATHFN_FLOATN(MATHFN) \
1936 : CASE_CFN_##MATHFN: \
1937 : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1938 : fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1939 : fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1940 : fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1941 : fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1942 : break;
1943 : /* Similar to above, but appends _R after any F/L suffix. */
1944 : #define CASE_MATHFN_REENT(MATHFN) \
1945 : case CFN_BUILT_IN_##MATHFN##_R: \
1946 : case CFN_BUILT_IN_##MATHFN##F_R: \
1947 : case CFN_BUILT_IN_##MATHFN##L_R: \
1948 : fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1949 : fcodel = BUILT_IN_##MATHFN##L_R ; break;
1950 :
1951 : /* Return a function equivalent to FN but operating on floating-point
1952 : values of type TYPE, or END_BUILTINS if no such function exists.
1953 : This is purely an operation on function codes; it does not guarantee
1954 : that the target actually has an implementation of the function. */
1955 :
1956 : static built_in_function
1957 580251 : mathfn_built_in_2 (tree type, combined_fn fn)
1958 : {
1959 580251 : tree mtype;
1960 580251 : built_in_function fcode, fcodef, fcodel;
1961 580251 : built_in_function fcodef16 = END_BUILTINS;
1962 580251 : built_in_function fcodef32 = END_BUILTINS;
1963 580251 : built_in_function fcodef64 = END_BUILTINS;
1964 580251 : built_in_function fcodef128 = END_BUILTINS;
1965 580251 : built_in_function fcodef32x = END_BUILTINS;
1966 580251 : built_in_function fcodef64x = END_BUILTINS;
1967 580251 : built_in_function fcodef128x = END_BUILTINS;
1968 :
1969 : /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1970 : break the uses below. */
1971 : #undef HUGE_VAL
1972 : #undef NAN
1973 :
1974 580251 : switch (fn)
1975 : {
1976 : #define SEQ_OF_CASE_MATHFN \
1977 : CASE_MATHFN_FLOATN (ACOS) \
1978 : CASE_MATHFN_FLOATN (ACOSH) \
1979 : CASE_MATHFN_FLOATN (ASIN) \
1980 : CASE_MATHFN_FLOATN (ASINH) \
1981 : CASE_MATHFN_FLOATN (ATAN) \
1982 : CASE_MATHFN_FLOATN (ATAN2) \
1983 : CASE_MATHFN_FLOATN (ATANH) \
1984 : CASE_MATHFN_FLOATN (CBRT) \
1985 : CASE_MATHFN_FLOATN (CEIL) \
1986 : CASE_MATHFN (CEXPI) \
1987 : CASE_MATHFN_FLOATN (COPYSIGN) \
1988 : CASE_MATHFN_FLOATN (COS) \
1989 : CASE_MATHFN_FLOATN (COSH) \
1990 : CASE_MATHFN (DREM) \
1991 : CASE_MATHFN_FLOATN (ERF) \
1992 : CASE_MATHFN_FLOATN (ERFC) \
1993 : CASE_MATHFN_FLOATN (EXP) \
1994 : CASE_MATHFN (EXP10) \
1995 : CASE_MATHFN_FLOATN (EXP2) \
1996 : CASE_MATHFN_FLOATN (EXPM1) \
1997 : CASE_MATHFN_FLOATN (FABS) \
1998 : CASE_MATHFN_FLOATN (FDIM) \
1999 : CASE_MATHFN_FLOATN (FLOOR) \
2000 : CASE_MATHFN_FLOATN (FMA) \
2001 : CASE_MATHFN_FLOATN (FMAX) \
2002 : CASE_MATHFN_FLOATN (FMIN) \
2003 : CASE_MATHFN_FLOATN (FMOD) \
2004 : CASE_MATHFN_FLOATN (FREXP) \
2005 : CASE_MATHFN (GAMMA) \
2006 : CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2007 : CASE_MATHFN_FLOATN (HUGE_VAL) \
2008 : CASE_MATHFN_FLOATN (HYPOT) \
2009 : CASE_MATHFN_FLOATN (ILOGB) \
2010 : CASE_MATHFN (ICEIL) \
2011 : CASE_MATHFN (IFLOOR) \
2012 : CASE_MATHFN_FLOATN (INF) \
2013 : CASE_MATHFN (IRINT) \
2014 : CASE_MATHFN (IROUND) \
2015 : CASE_MATHFN (ISINF) \
2016 : CASE_MATHFN (J0) \
2017 : CASE_MATHFN (J1) \
2018 : CASE_MATHFN (JN) \
2019 : CASE_MATHFN (LCEIL) \
2020 : CASE_MATHFN_FLOATN (LDEXP) \
2021 : CASE_MATHFN (LFLOOR) \
2022 : CASE_MATHFN_FLOATN (LGAMMA) \
2023 : CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2024 : CASE_MATHFN (LLCEIL) \
2025 : CASE_MATHFN (LLFLOOR) \
2026 : CASE_MATHFN_FLOATN (LLRINT) \
2027 : CASE_MATHFN_FLOATN (LLROUND) \
2028 : CASE_MATHFN_FLOATN (LOG) \
2029 : CASE_MATHFN_FLOATN (LOG10) \
2030 : CASE_MATHFN_FLOATN (LOG1P) \
2031 : CASE_MATHFN_FLOATN (LOG2) \
2032 : CASE_MATHFN_FLOATN (LOGB) \
2033 : CASE_MATHFN_FLOATN (LRINT) \
2034 : CASE_MATHFN_FLOATN (LROUND) \
2035 : CASE_MATHFN_FLOATN (MODF) \
2036 : CASE_MATHFN_FLOATN (NAN) \
2037 : CASE_MATHFN_FLOATN (NANS) \
2038 : CASE_MATHFN_FLOATN (NEARBYINT) \
2039 : CASE_MATHFN_FLOATN (NEXTAFTER) \
2040 : CASE_MATHFN (NEXTTOWARD) \
2041 : CASE_MATHFN_FLOATN (POW) \
2042 : CASE_MATHFN (POWI) \
2043 : CASE_MATHFN (POW10) \
2044 : CASE_MATHFN_FLOATN (REMAINDER) \
2045 : CASE_MATHFN_FLOATN (REMQUO) \
2046 : CASE_MATHFN_FLOATN (RINT) \
2047 : CASE_MATHFN_FLOATN (ROUND) \
2048 : CASE_MATHFN_FLOATN (ROUNDEVEN) \
2049 : CASE_MATHFN (SCALB) \
2050 : CASE_MATHFN_FLOATN (SCALBLN) \
2051 : CASE_MATHFN_FLOATN (SCALBN) \
2052 : CASE_MATHFN (SIGNBIT) \
2053 : CASE_MATHFN (SIGNIFICAND) \
2054 : CASE_MATHFN_FLOATN (SIN) \
2055 : CASE_MATHFN (SINCOS) \
2056 : CASE_MATHFN_FLOATN (SINH) \
2057 : CASE_MATHFN_FLOATN (SQRT) \
2058 : CASE_MATHFN_FLOATN (TAN) \
2059 : CASE_MATHFN_FLOATN (TANH) \
2060 : CASE_MATHFN_FLOATN (TGAMMA) \
2061 : CASE_MATHFN_FLOATN (TRUNC) \
2062 : CASE_MATHFN (Y0) \
2063 : CASE_MATHFN (Y1) \
2064 : CASE_MATHFN (YN)
2065 :
2066 580188 : SEQ_OF_CASE_MATHFN
2067 :
2068 : default:
2069 : return END_BUILTINS;
2070 : }
2071 :
2072 580251 : mtype = TYPE_MAIN_VARIANT (type);
2073 580251 : if (mtype == double_type_node)
2074 : return fcode;
2075 543657 : else if (mtype == float_type_node)
2076 : return fcodef;
2077 502925 : else if (mtype == long_double_type_node)
2078 : return fcodel;
2079 471657 : else if (mtype == float16_type_node)
2080 : return fcodef16;
2081 471609 : else if (mtype == float32_type_node)
2082 : return fcodef32;
2083 471609 : else if (mtype == float64_type_node)
2084 : return fcodef64;
2085 471609 : else if (mtype == float128_type_node)
2086 : return fcodef128;
2087 466013 : else if (mtype == float32x_type_node)
2088 : return fcodef32x;
2089 466013 : else if (mtype == float64x_type_node)
2090 : return fcodef64x;
2091 466013 : else if (mtype == float128x_type_node)
2092 : return fcodef128x;
2093 : else
2094 466013 : return END_BUILTINS;
2095 : }
2096 :
2097 : #undef CASE_MATHFN
2098 : #undef CASE_MATHFN_FLOATN
2099 : #undef CASE_MATHFN_REENT
2100 :
2101 : /* Return mathematic function equivalent to FN but operating directly on TYPE,
2102 : if available. If IMPLICIT_P is true use the implicit builtin declaration,
2103 : otherwise use the explicit declaration. If we can't do the conversion,
2104 : return null. */
2105 :
2106 : static tree
2107 580106 : mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2108 : {
2109 580106 : built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2110 580106 : if (fcode2 == END_BUILTINS)
2111 : return NULL_TREE;
2112 :
2113 110445 : if (implicit_p && !builtin_decl_implicit_p (fcode2))
2114 : return NULL_TREE;
2115 :
2116 108400 : return builtin_decl_explicit (fcode2);
2117 : }
2118 :
2119 : /* Like mathfn_built_in_1, but always use the implicit array. */
2120 :
2121 : tree
2122 210 : mathfn_built_in (tree type, combined_fn fn)
2123 : {
2124 210 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2125 : }
2126 :
2127 : /* Like mathfn_built_in_1, but always use the explicit array. */
2128 :
2129 : tree
2130 0 : mathfn_built_in_explicit (tree type, combined_fn fn)
2131 : {
2132 0 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2133 : }
2134 :
2135 : /* Like mathfn_built_in_1, but take a built_in_function and
2136 : always use the implicit array. */
2137 :
2138 : tree
2139 579673 : mathfn_built_in (tree type, enum built_in_function fn)
2140 : {
2141 579673 : return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2142 : }
2143 :
2144 : /* Return the type associated with a built in function, i.e., the one
2145 : to be passed to mathfn_built_in to get the type-specific
2146 : function. */
2147 :
2148 : tree
2149 1241 : mathfn_built_in_type (combined_fn fn)
2150 : {
2151 : #define CASE_MATHFN(MATHFN) \
2152 : case CFN_BUILT_IN_##MATHFN: \
2153 : return double_type_node; \
2154 : case CFN_BUILT_IN_##MATHFN##F: \
2155 : return float_type_node; \
2156 : case CFN_BUILT_IN_##MATHFN##L: \
2157 : return long_double_type_node;
2158 :
2159 : #define CASE_MATHFN_FLOATN(MATHFN) \
2160 : CASE_MATHFN(MATHFN) \
2161 : case CFN_BUILT_IN_##MATHFN##F16: \
2162 : return float16_type_node; \
2163 : case CFN_BUILT_IN_##MATHFN##F32: \
2164 : return float32_type_node; \
2165 : case CFN_BUILT_IN_##MATHFN##F64: \
2166 : return float64_type_node; \
2167 : case CFN_BUILT_IN_##MATHFN##F128: \
2168 : return float128_type_node; \
2169 : case CFN_BUILT_IN_##MATHFN##F32X: \
2170 : return float32x_type_node; \
2171 : case CFN_BUILT_IN_##MATHFN##F64X: \
2172 : return float64x_type_node; \
2173 : case CFN_BUILT_IN_##MATHFN##F128X: \
2174 : return float128x_type_node;
2175 :
2176 : /* Similar to above, but appends _R after any F/L suffix. */
2177 : #define CASE_MATHFN_REENT(MATHFN) \
2178 : case CFN_BUILT_IN_##MATHFN##_R: \
2179 : return double_type_node; \
2180 : case CFN_BUILT_IN_##MATHFN##F_R: \
2181 : return float_type_node; \
2182 : case CFN_BUILT_IN_##MATHFN##L_R: \
2183 : return long_double_type_node;
2184 :
2185 1241 : switch (fn)
2186 : {
2187 1239 : SEQ_OF_CASE_MATHFN
2188 :
2189 : default:
2190 : return NULL_TREE;
2191 : }
2192 :
2193 : #undef CASE_MATHFN
2194 : #undef CASE_MATHFN_FLOATN
2195 : #undef CASE_MATHFN_REENT
2196 : #undef SEQ_OF_CASE_MATHFN
2197 : }
2198 :
2199 : /* Check whether there is an internal function associated with function FN
2200 : and return type RETURN_TYPE. Return the function if so, otherwise return
2201 : IFN_LAST.
2202 :
2203 : Note that this function only tests whether the function is defined in
2204 : internals.def, not whether it is actually available on the target. */
2205 :
2206 : static internal_fn
2207 13652213 : associated_internal_fn (built_in_function fn, tree return_type)
2208 : {
2209 13652213 : switch (fn)
2210 : {
2211 : #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2212 : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2213 : #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2214 : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2215 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2216 : #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2217 : CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2218 : #include "internal-fn.def"
2219 :
2220 : CASE_FLT_FN (BUILT_IN_POW10):
2221 : return IFN_EXP10;
2222 :
2223 : CASE_FLT_FN (BUILT_IN_DREM):
2224 : return IFN_REMAINDER;
2225 :
2226 18482 : CASE_FLT_FN (BUILT_IN_SCALBN):
2227 18482 : CASE_FLT_FN (BUILT_IN_SCALBLN):
2228 18482 : if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2229 : return IFN_LDEXP;
2230 : return IFN_LAST;
2231 123 : case BUILT_IN_CRC8_DATA8:
2232 123 : case BUILT_IN_CRC16_DATA8:
2233 123 : case BUILT_IN_CRC16_DATA16:
2234 123 : case BUILT_IN_CRC32_DATA8:
2235 123 : case BUILT_IN_CRC32_DATA16:
2236 123 : case BUILT_IN_CRC32_DATA32:
2237 123 : case BUILT_IN_CRC64_DATA8:
2238 123 : case BUILT_IN_CRC64_DATA16:
2239 123 : case BUILT_IN_CRC64_DATA32:
2240 123 : case BUILT_IN_CRC64_DATA64:
2241 123 : return IFN_CRC;
2242 176 : case BUILT_IN_REV_CRC8_DATA8:
2243 176 : case BUILT_IN_REV_CRC16_DATA8:
2244 176 : case BUILT_IN_REV_CRC16_DATA16:
2245 176 : case BUILT_IN_REV_CRC32_DATA8:
2246 176 : case BUILT_IN_REV_CRC32_DATA16:
2247 176 : case BUILT_IN_REV_CRC32_DATA32:
2248 176 : case BUILT_IN_REV_CRC64_DATA8:
2249 176 : case BUILT_IN_REV_CRC64_DATA16:
2250 176 : case BUILT_IN_REV_CRC64_DATA32:
2251 176 : case BUILT_IN_REV_CRC64_DATA64:
2252 176 : return IFN_CRC_REV;
2253 : default:
2254 : return IFN_LAST;
2255 : }
2256 : }
2257 :
2258 : /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2259 : return its code, otherwise return IFN_LAST. Note that this function
2260 : only tests whether the function is defined in internals.def, not whether
2261 : it is actually available on the target. */
2262 :
2263 : internal_fn
2264 639465 : associated_internal_fn (tree fndecl)
2265 : {
2266 639465 : gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2267 639465 : return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2268 639465 : TREE_TYPE (TREE_TYPE (fndecl)));
2269 : }
2270 :
2271 : /* Check whether there is an internal function associated with function CFN
2272 : and return type RETURN_TYPE. Return the function if so, otherwise return
2273 : IFN_LAST.
2274 :
2275 : Note that this function only tests whether the function is defined in
2276 : internals.def, not whether it is actually available on the target. */
2277 :
2278 : internal_fn
2279 32785948 : associated_internal_fn (combined_fn cfn, tree return_type)
2280 : {
2281 32785948 : if (internal_fn_p (cfn))
2282 19773200 : return as_internal_fn (cfn);
2283 13012748 : return associated_internal_fn (as_builtin_fn (cfn), return_type);
2284 : }
2285 :
2286 : /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2287 : on the current target by a call to an internal function, return the
2288 : code of that internal function, otherwise return IFN_LAST. The caller
2289 : is responsible for ensuring that any side-effects of the built-in
2290 : call are dealt with correctly. E.g. if CALL sets errno, the caller
2291 : must decide that the errno result isn't needed or make it available
2292 : in some other way. */
2293 :
2294 : internal_fn
2295 862907 : replacement_internal_fn (gcall *call)
2296 : {
2297 862907 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2298 : {
2299 637271 : internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2300 637271 : if (ifn != IFN_LAST)
2301 : {
2302 59339 : tree_pair types = direct_internal_fn_types (ifn, call);
2303 59339 : optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2304 59339 : if (direct_internal_fn_supported_p (ifn, types, opt_type))
2305 37950 : return ifn;
2306 : }
2307 : }
2308 : return IFN_LAST;
2309 : }
2310 :
2311 : /* Expand a call to the builtin trinary math functions (fma).
2312 : Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 : function in-line. EXP is the expression that is a call to the builtin
2314 : function; if convenient, the result should be placed in TARGET.
2315 : SUBTARGET may be used as the target for computing one of EXP's
2316 : operands. */
2317 :
2318 : static rtx
2319 368 : expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2320 : {
2321 368 : optab builtin_optab;
2322 368 : rtx op0, op1, op2, result;
2323 368 : rtx_insn *insns;
2324 368 : tree fndecl = get_callee_fndecl (exp);
2325 368 : tree arg0, arg1, arg2;
2326 368 : machine_mode mode;
2327 :
2328 368 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2329 : return NULL_RTX;
2330 :
2331 368 : arg0 = CALL_EXPR_ARG (exp, 0);
2332 368 : arg1 = CALL_EXPR_ARG (exp, 1);
2333 368 : arg2 = CALL_EXPR_ARG (exp, 2);
2334 :
2335 368 : switch (DECL_FUNCTION_CODE (fndecl))
2336 : {
2337 368 : CASE_FLT_FN (BUILT_IN_FMA):
2338 368 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2339 368 : builtin_optab = fma_optab; break;
2340 0 : default:
2341 0 : gcc_unreachable ();
2342 : }
2343 :
2344 : /* Make a suitable register to place result in. */
2345 368 : mode = TYPE_MODE (TREE_TYPE (exp));
2346 :
2347 : /* Before working hard, check whether the instruction is available. */
2348 368 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2349 : return NULL_RTX;
2350 :
2351 0 : result = gen_reg_rtx (mode);
2352 :
2353 : /* Always stabilize the argument list. */
2354 0 : CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2355 0 : CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2356 0 : CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2357 :
2358 0 : op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2359 0 : op1 = expand_normal (arg1);
2360 0 : op2 = expand_normal (arg2);
2361 :
2362 0 : start_sequence ();
2363 :
2364 : /* Compute into RESULT.
2365 : Set RESULT to wherever the result comes back. */
2366 0 : result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2367 : result, 0);
2368 :
2369 : /* If we were unable to expand via the builtin, stop the sequence
2370 : (without outputting the insns) and call to the library function
2371 : with the stabilized argument list. */
2372 0 : if (result == 0)
2373 : {
2374 0 : end_sequence ();
2375 0 : return expand_call (exp, target, target == const0_rtx);
2376 : }
2377 :
2378 : /* Output the entire sequence. */
2379 0 : insns = end_sequence ();
2380 0 : emit_insn (insns);
2381 :
2382 0 : return result;
2383 : }
2384 :
2385 : /* Expand a call to the builtin sin and cos math functions.
2386 : Return NULL_RTX if a normal call should be emitted rather than expanding the
2387 : function in-line. EXP is the expression that is a call to the builtin
2388 : function; if convenient, the result should be placed in TARGET.
2389 : SUBTARGET may be used as the target for computing one of EXP's
2390 : operands. */
2391 :
2392 : static rtx
2393 47 : expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2394 : {
2395 47 : optab builtin_optab;
2396 47 : rtx op0;
2397 47 : rtx_insn *insns;
2398 47 : tree fndecl = get_callee_fndecl (exp);
2399 47 : machine_mode mode;
2400 47 : tree arg;
2401 :
2402 47 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2403 : return NULL_RTX;
2404 :
2405 39 : arg = CALL_EXPR_ARG (exp, 0);
2406 :
2407 39 : switch (DECL_FUNCTION_CODE (fndecl))
2408 : {
2409 39 : CASE_FLT_FN (BUILT_IN_SIN):
2410 39 : CASE_FLT_FN (BUILT_IN_COS):
2411 39 : builtin_optab = sincos_optab; break;
2412 0 : default:
2413 0 : gcc_unreachable ();
2414 : }
2415 :
2416 : /* Make a suitable register to place result in. */
2417 39 : mode = TYPE_MODE (TREE_TYPE (exp));
2418 :
2419 : /* Check if sincos insn is available, otherwise fallback
2420 : to sin or cos insn. */
2421 39 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2422 39 : switch (DECL_FUNCTION_CODE (fndecl))
2423 : {
2424 : CASE_FLT_FN (BUILT_IN_SIN):
2425 : builtin_optab = sin_optab; break;
2426 14 : CASE_FLT_FN (BUILT_IN_COS):
2427 14 : builtin_optab = cos_optab; break;
2428 0 : default:
2429 0 : gcc_unreachable ();
2430 : }
2431 :
2432 : /* Before working hard, check whether the instruction is available. */
2433 39 : if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2434 : {
2435 0 : rtx result = gen_reg_rtx (mode);
2436 :
2437 : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2438 : need to expand the argument again. This way, we will not perform
2439 : side-effects more the once. */
2440 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2441 :
2442 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2443 :
2444 0 : start_sequence ();
2445 :
2446 : /* Compute into RESULT.
2447 : Set RESULT to wherever the result comes back. */
2448 0 : if (builtin_optab == sincos_optab)
2449 : {
2450 0 : int ok;
2451 :
2452 0 : switch (DECL_FUNCTION_CODE (fndecl))
2453 : {
2454 0 : CASE_FLT_FN (BUILT_IN_SIN):
2455 0 : ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2456 0 : break;
2457 0 : CASE_FLT_FN (BUILT_IN_COS):
2458 0 : ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2459 0 : break;
2460 0 : default:
2461 0 : gcc_unreachable ();
2462 : }
2463 0 : gcc_assert (ok);
2464 : }
2465 : else
2466 0 : result = expand_unop (mode, builtin_optab, op0, result, 0);
2467 :
2468 0 : if (result != 0)
2469 : {
2470 : /* Output the entire sequence. */
2471 0 : insns = end_sequence ();
2472 0 : emit_insn (insns);
2473 0 : return result;
2474 : }
2475 :
2476 : /* If we were unable to expand via the builtin, stop the sequence
2477 : (without outputting the insns) and call to the library function
2478 : with the stabilized argument list. */
2479 0 : end_sequence ();
2480 : }
2481 :
2482 39 : return expand_call (exp, target, target == const0_rtx);
2483 : }
2484 :
2485 : /* Given an interclass math builtin decl FNDECL and it's argument ARG
2486 : return an RTL instruction code that implements the functionality.
2487 : If that isn't possible or available return CODE_FOR_nothing. */
2488 :
2489 : static enum insn_code
2490 1989751 : interclass_mathfn_icode (tree arg, tree fndecl)
2491 : {
2492 1989751 : bool errno_set = false;
2493 1989751 : optab builtin_optab = unknown_optab;
2494 1989751 : machine_mode mode;
2495 :
2496 1989751 : switch (DECL_FUNCTION_CODE (fndecl))
2497 : {
2498 4 : CASE_FLT_FN (BUILT_IN_ILOGB):
2499 4 : errno_set = true;
2500 4 : builtin_optab = ilogb_optab;
2501 4 : break;
2502 221723 : CASE_FLT_FN (BUILT_IN_ISINF):
2503 221723 : builtin_optab = isinf_optab;
2504 221723 : break;
2505 1323503 : case BUILT_IN_ISFINITE:
2506 1323503 : builtin_optab = isfinite_optab;
2507 1323503 : break;
2508 219918 : case BUILT_IN_ISNORMAL:
2509 219918 : builtin_optab = isnormal_optab;
2510 219918 : break;
2511 222526 : CASE_FLT_FN (BUILT_IN_ISNAN):
2512 222526 : builtin_optab = isnan_optab;
2513 222526 : break;
2514 : CASE_FLT_FN (BUILT_IN_FINITE):
2515 : case BUILT_IN_FINITED32:
2516 : case BUILT_IN_FINITED64:
2517 : case BUILT_IN_FINITED128:
2518 : case BUILT_IN_ISINFD32:
2519 : case BUILT_IN_ISINFD64:
2520 : case BUILT_IN_ISINFD128:
2521 : case BUILT_IN_ISNAND32:
2522 : case BUILT_IN_ISNAND64:
2523 : case BUILT_IN_ISNAND128:
2524 : /* These builtins have no optabs (yet). */
2525 : break;
2526 0 : default:
2527 0 : gcc_unreachable ();
2528 : }
2529 :
2530 : /* There's no easy way to detect the case we need to set EDOM. */
2531 1989751 : if (flag_errno_math && errno_set)
2532 : return CODE_FOR_nothing;
2533 :
2534 : /* Optab mode depends on the mode of the input argument. */
2535 1989751 : mode = TYPE_MODE (TREE_TYPE (arg));
2536 :
2537 1989751 : if (builtin_optab)
2538 1987674 : return optab_handler (builtin_optab, mode);
2539 : return CODE_FOR_nothing;
2540 : }
2541 :
2542 : /* Expand a call to one of the builtin math functions that operate on
2543 : floating point argument and output an integer result (ilogb, isinf,
2544 : isnan, etc).
2545 : Return 0 if a normal call should be emitted rather than expanding the
2546 : function in-line. EXP is the expression that is a call to the builtin
2547 : function; if convenient, the result should be placed in TARGET. */
2548 :
2549 : static rtx
2550 4 : expand_builtin_interclass_mathfn (tree exp, rtx target)
2551 : {
2552 4 : enum insn_code icode = CODE_FOR_nothing;
2553 4 : rtx op0;
2554 4 : tree fndecl = get_callee_fndecl (exp);
2555 4 : machine_mode mode;
2556 4 : tree arg;
2557 :
2558 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2559 : return NULL_RTX;
2560 :
2561 4 : arg = CALL_EXPR_ARG (exp, 0);
2562 4 : icode = interclass_mathfn_icode (arg, fndecl);
2563 4 : mode = TYPE_MODE (TREE_TYPE (arg));
2564 :
2565 4 : if (icode != CODE_FOR_nothing)
2566 : {
2567 2 : class expand_operand ops[1];
2568 2 : rtx_insn *last = get_last_insn ();
2569 2 : tree orig_arg = arg;
2570 :
2571 : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572 : need to expand the argument again. This way, we will not perform
2573 : side-effects more the once. */
2574 2 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2575 :
2576 2 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2577 :
2578 2 : if (mode != GET_MODE (op0))
2579 0 : op0 = convert_to_mode (mode, op0, 0);
2580 :
2581 2 : create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2582 2 : if (maybe_legitimize_operands (icode, 0, 1, ops)
2583 2 : && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2584 1 : return ops[0].value;
2585 :
2586 1 : delete_insns_since (last);
2587 1 : CALL_EXPR_ARG (exp, 0) = orig_arg;
2588 : }
2589 :
2590 : return NULL_RTX;
2591 : }
2592 :
2593 : /* Expand a call to the builtin sincos math function.
2594 : Return NULL_RTX if a normal call should be emitted rather than expanding the
2595 : function in-line. EXP is the expression that is a call to the builtin
2596 : function. */
2597 :
2598 : static rtx
2599 3 : expand_builtin_sincos (tree exp)
2600 : {
2601 3 : rtx op0, op1, op2, target1, target2;
2602 3 : machine_mode mode;
2603 3 : tree arg, sinp, cosp;
2604 3 : int result;
2605 3 : location_t loc = EXPR_LOCATION (exp);
2606 3 : tree alias_type, alias_off;
2607 :
2608 3 : if (!validate_arglist (exp, REAL_TYPE,
2609 : POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2610 : return NULL_RTX;
2611 :
2612 3 : arg = CALL_EXPR_ARG (exp, 0);
2613 3 : sinp = CALL_EXPR_ARG (exp, 1);
2614 3 : cosp = CALL_EXPR_ARG (exp, 2);
2615 :
2616 : /* Make a suitable register to place result in. */
2617 3 : mode = TYPE_MODE (TREE_TYPE (arg));
2618 :
2619 : /* Check if sincos insn is available, otherwise emit the call. */
2620 3 : if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2621 : return NULL_RTX;
2622 :
2623 0 : target1 = gen_reg_rtx (mode);
2624 0 : target2 = gen_reg_rtx (mode);
2625 :
2626 0 : op0 = expand_normal (arg);
2627 0 : alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2628 0 : alias_off = build_int_cst (alias_type, 0);
2629 0 : op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2630 : sinp, alias_off));
2631 0 : op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2632 : cosp, alias_off));
2633 :
2634 : /* Compute into target1 and target2.
2635 : Set TARGET to wherever the result comes back. */
2636 0 : result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2637 0 : gcc_assert (result);
2638 :
2639 : /* Move target1 and target2 to the memory locations indicated
2640 : by op1 and op2. */
2641 0 : emit_move_insn (op1, target1);
2642 0 : emit_move_insn (op2, target2);
2643 :
2644 0 : return const0_rtx;
2645 : }
2646 :
2647 : /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2648 : result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2649 : static rtx
2650 60 : expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2651 : {
2652 60 : if (!validate_arglist (exp, VOID_TYPE))
2653 : return NULL_RTX;
2654 :
2655 60 : insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2656 60 : if (icode == CODE_FOR_nothing)
2657 : return NULL_RTX;
2658 :
2659 0 : if (target == 0
2660 0 : || GET_MODE (target) != target_mode
2661 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2662 0 : target = gen_reg_rtx (target_mode);
2663 :
2664 0 : rtx pat = GEN_FCN (icode) (target);
2665 0 : if (!pat)
2666 : return NULL_RTX;
2667 0 : emit_insn (pat);
2668 :
2669 0 : return target;
2670 : }
2671 :
2672 : /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2673 : fenv.h), returning the result and setting it in TARGET. Otherwise return
2674 : NULL_RTX on failure. */
2675 : static rtx
2676 1894 : expand_builtin_feclear_feraise_except (tree exp, rtx target,
2677 : machine_mode target_mode, optab op_optab)
2678 : {
2679 1894 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2680 : return NULL_RTX;
2681 1894 : rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2682 :
2683 1894 : insn_code icode = direct_optab_handler (op_optab, SImode);
2684 1894 : if (icode == CODE_FOR_nothing)
2685 : return NULL_RTX;
2686 :
2687 0 : if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2688 : return NULL_RTX;
2689 :
2690 0 : if (target == 0
2691 0 : || GET_MODE (target) != target_mode
2692 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2693 0 : target = gen_reg_rtx (target_mode);
2694 :
2695 0 : rtx pat = GEN_FCN (icode) (target, op0);
2696 0 : if (!pat)
2697 : return NULL_RTX;
2698 0 : emit_insn (pat);
2699 :
2700 0 : return target;
2701 : }
2702 :
2703 : /* Expand a call to the internal cexpi builtin to the sincos math function.
2704 : EXP is the expression that is a call to the builtin function; if convenient,
2705 : the result should be placed in TARGET. */
2706 :
2707 : static rtx
2708 159 : expand_builtin_cexpi (tree exp, rtx target)
2709 : {
2710 159 : tree fndecl = get_callee_fndecl (exp);
2711 159 : tree arg, type;
2712 159 : machine_mode mode;
2713 159 : rtx op0, op1, op2;
2714 159 : location_t loc = EXPR_LOCATION (exp);
2715 :
2716 159 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2717 : return NULL_RTX;
2718 :
2719 159 : arg = CALL_EXPR_ARG (exp, 0);
2720 159 : type = TREE_TYPE (arg);
2721 159 : mode = TYPE_MODE (TREE_TYPE (arg));
2722 :
2723 : /* Try expanding via a sincos optab, fall back to emitting a libcall
2724 : to sincos or cexp. We are sure we have sincos or cexp because cexpi
2725 : is only generated from sincos, cexp or if we have either of them. */
2726 159 : if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2727 : {
2728 6 : op1 = gen_reg_rtx (mode);
2729 6 : op2 = gen_reg_rtx (mode);
2730 :
2731 6 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2732 :
2733 : /* Compute into op1 and op2. */
2734 6 : expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2735 : }
2736 153 : else if (targetm.libc_has_function (function_sincos, type))
2737 : {
2738 153 : tree call, fn = NULL_TREE;
2739 153 : tree top1, top2;
2740 153 : rtx op1a, op2a;
2741 :
2742 153 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2743 52 : fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2744 101 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2745 82 : fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2746 19 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2747 19 : fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2748 : else
2749 0 : gcc_unreachable ();
2750 :
2751 153 : op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2752 153 : op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2753 153 : op1a = copy_addr_to_reg (XEXP (op1, 0));
2754 153 : op2a = copy_addr_to_reg (XEXP (op2, 0));
2755 153 : top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2756 153 : top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2757 :
2758 : /* Make sure not to fold the sincos call again. */
2759 153 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2760 153 : expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2761 : call, 3, arg, top1, top2));
2762 : }
2763 : else
2764 : {
2765 0 : tree call, fn = NULL_TREE, narg;
2766 0 : tree ctype = build_complex_type (type);
2767 :
2768 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2769 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2770 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2771 0 : fn = builtin_decl_explicit (BUILT_IN_CEXP);
2772 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2773 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2774 : else
2775 0 : gcc_unreachable ();
2776 :
2777 : /* If we don't have a decl for cexp create one. This is the
2778 : friendliest fallback if the user calls __builtin_cexpi
2779 : without full target C99 function support. */
2780 0 : if (fn == NULL_TREE)
2781 : {
2782 0 : tree fntype;
2783 0 : const char *name = NULL;
2784 :
2785 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2786 : name = "cexpf";
2787 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2788 : name = "cexp";
2789 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2790 0 : name = "cexpl";
2791 :
2792 0 : fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2793 0 : fn = build_fn_decl (name, fntype);
2794 : }
2795 :
2796 0 : narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2797 : build_real (type, dconst0), arg);
2798 :
2799 : /* Make sure not to fold the cexp call again. */
2800 0 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2801 0 : return expand_expr (build_call_nary (ctype, call, 1, narg),
2802 0 : target, VOIDmode, EXPAND_NORMAL);
2803 : }
2804 :
2805 : /* Now build the proper return type. */
2806 318 : return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2807 159 : make_tree (TREE_TYPE (arg), op2),
2808 159 : make_tree (TREE_TYPE (arg), op1)),
2809 159 : target, VOIDmode, EXPAND_NORMAL);
2810 : }
2811 :
2812 : /* Conveniently construct a function call expression. FNDECL names the
2813 : function to be called, N is the number of arguments, and the "..."
2814 : parameters are the argument expressions. Unlike build_call_exr
2815 : this doesn't fold the call, hence it will always return a CALL_EXPR. */
2816 :
2817 : static tree
2818 136084 : build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2819 : {
2820 136084 : va_list ap;
2821 136084 : tree fntype = TREE_TYPE (fndecl);
2822 136084 : tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2823 :
2824 136084 : va_start (ap, n);
2825 136084 : fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2826 136084 : va_end (ap);
2827 136084 : SET_EXPR_LOCATION (fn, loc);
2828 136084 : return fn;
2829 : }
2830 :
2831 : /* Expand the __builtin_issignaling builtin. This needs to handle
2832 : all floating point formats that do support NaNs (for those that
2833 : don't it just sets target to 0). */
2834 :
2835 : static rtx
2836 823 : expand_builtin_issignaling (tree exp, rtx target)
2837 : {
2838 823 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2839 : return NULL_RTX;
2840 :
2841 823 : tree arg = CALL_EXPR_ARG (exp, 0);
2842 823 : scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2843 823 : const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2844 :
2845 : /* Expand the argument yielding a RTX expression. */
2846 823 : rtx temp = expand_normal (arg);
2847 :
2848 : /* If mode doesn't support NaN, always return 0.
2849 : Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2850 : __builtin_issignaling working without -fsignaling-nans. Especially
2851 : when -fno-signaling-nans is the default.
2852 : On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2853 : -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2854 : fold to 0 or non-NaN/Inf classification. */
2855 823 : if (!HONOR_NANS (fmode))
2856 : {
2857 0 : emit_move_insn (target, const0_rtx);
2858 0 : return target;
2859 : }
2860 :
2861 : /* Check if the back end provides an insn that handles issignaling for the
2862 : argument's mode. */
2863 823 : enum insn_code icode = optab_handler (issignaling_optab, fmode);
2864 823 : if (icode != CODE_FOR_nothing)
2865 : {
2866 155 : rtx_insn *last = get_last_insn ();
2867 155 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2868 155 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2869 : return this_target;
2870 0 : delete_insns_since (last);
2871 : }
2872 :
2873 668 : if (DECIMAL_FLOAT_MODE_P (fmode))
2874 : {
2875 63 : scalar_int_mode imode;
2876 63 : rtx hi;
2877 63 : switch (fmt->ieee_bits)
2878 : {
2879 42 : case 32:
2880 42 : case 64:
2881 42 : imode = int_mode_for_mode (fmode).require ();
2882 42 : temp = gen_lowpart (imode, temp);
2883 42 : break;
2884 21 : case 128:
2885 21 : imode = int_mode_for_size (64, 1).require ();
2886 21 : hi = NULL_RTX;
2887 : /* For decimal128, TImode support isn't always there and even when
2888 : it is, working on the DImode high part is usually better. */
2889 21 : if (!MEM_P (temp))
2890 : {
2891 6 : if (rtx t = force_highpart_subreg (imode, temp, fmode))
2892 : hi = t;
2893 : else
2894 : {
2895 0 : scalar_int_mode imode2;
2896 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2897 : {
2898 0 : rtx temp2 = gen_lowpart (imode2, temp);
2899 0 : if (rtx t = force_highpart_subreg (imode, temp2, imode2))
2900 : hi = t;
2901 : }
2902 : }
2903 0 : if (!hi)
2904 : {
2905 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2906 0 : emit_move_insn (mem, temp);
2907 0 : temp = mem;
2908 : }
2909 : }
2910 0 : if (!hi)
2911 : {
2912 15 : poly_int64 offset
2913 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
2914 15 : hi = adjust_address (temp, imode, offset);
2915 : }
2916 : temp = hi;
2917 : break;
2918 0 : default:
2919 0 : gcc_unreachable ();
2920 : }
2921 : /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2922 : have 6 bits below it all set. */
2923 63 : rtx val
2924 63 : = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2925 63 : temp = expand_binop (imode, and_optab, temp, val,
2926 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2927 63 : temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2928 63 : return temp;
2929 : }
2930 :
2931 : /* Only PDP11 has these defined differently but doesn't support NaNs. */
2932 605 : gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2933 605 : gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2934 4235 : gcc_assert (MODE_COMPOSITE_P (fmode)
2935 : || (fmt->pnan == fmt->p
2936 : && fmt->signbit_ro == fmt->signbit_rw));
2937 :
2938 605 : switch (fmt->p)
2939 : {
2940 0 : case 106: /* IBM double double */
2941 : /* For IBM double double, recurse on the most significant double. */
2942 0 : gcc_assert (MODE_COMPOSITE_P (fmode));
2943 0 : temp = convert_modes (DFmode, fmode, temp, 0);
2944 0 : fmode = DFmode;
2945 0 : fmt = REAL_MODE_FORMAT (DFmode);
2946 : /* FALLTHRU */
2947 505 : case 8: /* bfloat */
2948 505 : case 11: /* IEEE half */
2949 505 : case 24: /* IEEE single */
2950 505 : case 53: /* IEEE double or Intel extended with rounding to double */
2951 505 : if (fmt->p == 53 && fmt->signbit_ro == 79)
2952 0 : goto extended;
2953 505 : {
2954 505 : scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2955 505 : temp = gen_lowpart (imode, temp);
2956 505 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2957 : & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2958 505 : if (fmt->qnan_msb_set)
2959 : {
2960 505 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2961 505 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2962 : /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2963 : ((temp ^ bit) & mask) > val. */
2964 505 : temp = expand_binop (imode, xor_optab, temp, bit,
2965 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2966 505 : temp = expand_binop (imode, and_optab, temp, mask,
2967 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2968 505 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
2969 : 1, 1);
2970 : }
2971 : else
2972 : {
2973 : /* For MIPS/PA IEEE single/double, expand to:
2974 : (temp & val) == val. */
2975 0 : temp = expand_binop (imode, and_optab, temp, val,
2976 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2977 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
2978 : 1, 1);
2979 : }
2980 : }
2981 : break;
2982 100 : case 113: /* IEEE quad */
2983 100 : {
2984 100 : rtx hi = NULL_RTX, lo = NULL_RTX;
2985 100 : scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2986 : /* For IEEE quad, TImode support isn't always there and even when
2987 : it is, working on DImode parts is usually better. */
2988 100 : if (!MEM_P (temp))
2989 : {
2990 85 : hi = force_highpart_subreg (imode, temp, fmode);
2991 85 : lo = force_lowpart_subreg (imode, temp, fmode);
2992 85 : if (!hi || !lo)
2993 : {
2994 0 : scalar_int_mode imode2;
2995 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2996 : {
2997 0 : rtx temp2 = gen_lowpart (imode2, temp);
2998 0 : hi = force_highpart_subreg (imode, temp2, imode2);
2999 0 : lo = force_lowpart_subreg (imode, temp2, imode2);
3000 : }
3001 : }
3002 85 : if (!hi || !lo)
3003 : {
3004 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3005 0 : emit_move_insn (mem, temp);
3006 0 : temp = mem;
3007 : }
3008 : }
3009 100 : if (!hi || !lo)
3010 : {
3011 15 : poly_int64 offset
3012 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
3013 15 : hi = adjust_address (temp, imode, offset);
3014 15 : offset = subreg_lowpart_offset (imode, GET_MODE (temp));
3015 15 : lo = adjust_address (temp, imode, offset);
3016 : }
3017 100 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
3018 : & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
3019 100 : if (fmt->qnan_msb_set)
3020 : {
3021 100 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
3022 : - 64)));
3023 100 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
3024 : /* For non-MIPS/PA IEEE quad, expand to:
3025 : (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
3026 100 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3027 100 : lo = expand_binop (imode, ior_optab, lo, nlo,
3028 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3029 100 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
3030 100 : temp = expand_binop (imode, xor_optab, hi, bit,
3031 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3032 100 : temp = expand_binop (imode, ior_optab, temp, lo,
3033 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3034 100 : temp = expand_binop (imode, and_optab, temp, mask,
3035 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3036 100 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
3037 : 1, 1);
3038 : }
3039 : else
3040 : {
3041 : /* For MIPS/PA IEEE quad, expand to:
3042 : (hi & val) == val. */
3043 0 : temp = expand_binop (imode, and_optab, hi, val,
3044 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3045 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
3046 : 1, 1);
3047 : }
3048 : }
3049 : break;
3050 0 : case 64: /* Intel or Motorola extended */
3051 0 : extended:
3052 0 : {
3053 0 : rtx ex, hi, lo;
3054 0 : scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3055 0 : scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3056 0 : if (!MEM_P (temp))
3057 : {
3058 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3059 0 : emit_move_insn (mem, temp);
3060 0 : temp = mem;
3061 : }
3062 0 : if (fmt->signbit_ro == 95)
3063 : {
3064 : /* Motorola, always big endian, with 16-bit gap in between
3065 : 16-bit sign+exponent and 64-bit mantissa. */
3066 0 : ex = adjust_address (temp, iemode, 0);
3067 0 : hi = adjust_address (temp, imode, 4);
3068 0 : lo = adjust_address (temp, imode, 8);
3069 : }
3070 0 : else if (!WORDS_BIG_ENDIAN)
3071 : {
3072 : /* Intel little endian, 64-bit mantissa followed by 16-bit
3073 : sign+exponent and then either 16 or 48 bits of gap. */
3074 0 : ex = adjust_address (temp, iemode, 8);
3075 0 : hi = adjust_address (temp, imode, 4);
3076 0 : lo = adjust_address (temp, imode, 0);
3077 : }
3078 : else
3079 : {
3080 : /* Big endian Itanium. */
3081 : ex = adjust_address (temp, iemode, 0);
3082 : hi = adjust_address (temp, imode, 2);
3083 : lo = adjust_address (temp, imode, 6);
3084 : }
3085 0 : rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3086 0 : gcc_assert (fmt->qnan_msb_set);
3087 0 : rtx mask = GEN_INT (0x7fff);
3088 0 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3089 : /* For Intel/Motorola extended format, expand to:
3090 : (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3091 0 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3092 0 : lo = expand_binop (imode, ior_optab, lo, nlo,
3093 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3094 0 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3095 0 : temp = expand_binop (imode, xor_optab, hi, bit,
3096 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3097 0 : temp = expand_binop (imode, ior_optab, temp, lo,
3098 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3099 0 : temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3100 0 : ex = expand_binop (iemode, and_optab, ex, mask,
3101 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3102 0 : ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3103 : ex, mask, iemode, 1, 1);
3104 0 : temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3105 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3106 : }
3107 0 : break;
3108 0 : default:
3109 0 : gcc_unreachable ();
3110 : }
3111 :
3112 : return temp;
3113 : }
3114 :
3115 : /* Expand a call to one of the builtin rounding functions gcc defines
3116 : as an extension (lfloor and lceil). As these are gcc extensions we
3117 : do not need to worry about setting errno to EDOM.
3118 : If expanding via optab fails, lower expression to (int)(floor(x)).
3119 : EXP is the expression that is a call to the builtin function;
3120 : if convenient, the result should be placed in TARGET. */
3121 :
3122 : static rtx
3123 235 : expand_builtin_int_roundingfn (tree exp, rtx target)
3124 : {
3125 235 : convert_optab builtin_optab;
3126 235 : rtx op0, tmp;
3127 235 : rtx_insn *insns;
3128 235 : tree fndecl = get_callee_fndecl (exp);
3129 235 : enum built_in_function fallback_fn;
3130 235 : tree fallback_fndecl;
3131 235 : machine_mode mode;
3132 235 : tree arg;
3133 :
3134 235 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3135 : return NULL_RTX;
3136 :
3137 235 : arg = CALL_EXPR_ARG (exp, 0);
3138 :
3139 235 : switch (DECL_FUNCTION_CODE (fndecl))
3140 : {
3141 : CASE_FLT_FN (BUILT_IN_ICEIL):
3142 : CASE_FLT_FN (BUILT_IN_LCEIL):
3143 : CASE_FLT_FN (BUILT_IN_LLCEIL):
3144 : builtin_optab = lceil_optab;
3145 : fallback_fn = BUILT_IN_CEIL;
3146 : break;
3147 :
3148 116 : CASE_FLT_FN (BUILT_IN_IFLOOR):
3149 116 : CASE_FLT_FN (BUILT_IN_LFLOOR):
3150 116 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
3151 116 : builtin_optab = lfloor_optab;
3152 116 : fallback_fn = BUILT_IN_FLOOR;
3153 116 : break;
3154 :
3155 0 : default:
3156 0 : gcc_unreachable ();
3157 : }
3158 :
3159 : /* Make a suitable register to place result in. */
3160 235 : mode = TYPE_MODE (TREE_TYPE (exp));
3161 :
3162 235 : target = gen_reg_rtx (mode);
3163 :
3164 : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3165 : need to expand the argument again. This way, we will not perform
3166 : side-effects more the once. */
3167 235 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3168 :
3169 235 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3170 :
3171 235 : start_sequence ();
3172 :
3173 : /* Compute into TARGET. */
3174 235 : if (expand_sfix_optab (target, op0, builtin_optab))
3175 : {
3176 : /* Output the entire sequence. */
3177 40 : insns = end_sequence ();
3178 40 : emit_insn (insns);
3179 40 : return target;
3180 : }
3181 :
3182 : /* If we were unable to expand via the builtin, stop the sequence
3183 : (without outputting the insns). */
3184 195 : end_sequence ();
3185 :
3186 : /* Fall back to floating point rounding optab. */
3187 195 : fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3188 :
3189 : /* For non-C99 targets we may end up without a fallback fndecl here
3190 : if the user called __builtin_lfloor directly. In this case emit
3191 : a call to the floor/ceil variants nevertheless. This should result
3192 : in the best user experience for not full C99 targets. */
3193 195 : if (fallback_fndecl == NULL_TREE)
3194 : {
3195 0 : tree fntype;
3196 0 : const char *name = NULL;
3197 :
3198 0 : switch (DECL_FUNCTION_CODE (fndecl))
3199 : {
3200 : case BUILT_IN_ICEIL:
3201 : case BUILT_IN_LCEIL:
3202 : case BUILT_IN_LLCEIL:
3203 : name = "ceil";
3204 : break;
3205 0 : case BUILT_IN_ICEILF:
3206 0 : case BUILT_IN_LCEILF:
3207 0 : case BUILT_IN_LLCEILF:
3208 0 : name = "ceilf";
3209 0 : break;
3210 0 : case BUILT_IN_ICEILL:
3211 0 : case BUILT_IN_LCEILL:
3212 0 : case BUILT_IN_LLCEILL:
3213 0 : name = "ceill";
3214 0 : break;
3215 0 : case BUILT_IN_IFLOOR:
3216 0 : case BUILT_IN_LFLOOR:
3217 0 : case BUILT_IN_LLFLOOR:
3218 0 : name = "floor";
3219 0 : break;
3220 0 : case BUILT_IN_IFLOORF:
3221 0 : case BUILT_IN_LFLOORF:
3222 0 : case BUILT_IN_LLFLOORF:
3223 0 : name = "floorf";
3224 0 : break;
3225 0 : case BUILT_IN_IFLOORL:
3226 0 : case BUILT_IN_LFLOORL:
3227 0 : case BUILT_IN_LLFLOORL:
3228 0 : name = "floorl";
3229 0 : break;
3230 0 : default:
3231 0 : gcc_unreachable ();
3232 : }
3233 :
3234 0 : fntype = build_function_type_list (TREE_TYPE (arg),
3235 0 : TREE_TYPE (arg), NULL_TREE);
3236 0 : fallback_fndecl = build_fn_decl (name, fntype);
3237 : }
3238 :
3239 195 : exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3240 :
3241 195 : tmp = expand_normal (exp);
3242 195 : tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3243 :
3244 : /* Truncate the result of floating point optab to integer
3245 : via expand_fix (). */
3246 195 : target = gen_reg_rtx (mode);
3247 195 : expand_fix (target, tmp, 0);
3248 :
3249 195 : return target;
3250 : }
3251 :
3252 : /* Expand a call to one of the builtin math functions doing integer
3253 : conversion (lrint).
3254 : Return 0 if a normal call should be emitted rather than expanding the
3255 : function in-line. EXP is the expression that is a call to the builtin
3256 : function; if convenient, the result should be placed in TARGET. */
3257 :
3258 : static rtx
3259 592 : expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3260 : {
3261 592 : convert_optab builtin_optab;
3262 592 : rtx op0;
3263 592 : rtx_insn *insns;
3264 592 : tree fndecl = get_callee_fndecl (exp);
3265 592 : tree arg;
3266 592 : machine_mode mode;
3267 592 : enum built_in_function fallback_fn = BUILT_IN_NONE;
3268 :
3269 592 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3270 : return NULL_RTX;
3271 :
3272 492 : arg = CALL_EXPR_ARG (exp, 0);
3273 :
3274 492 : switch (DECL_FUNCTION_CODE (fndecl))
3275 : {
3276 8 : CASE_FLT_FN (BUILT_IN_IRINT):
3277 8 : fallback_fn = BUILT_IN_LRINT;
3278 : gcc_fallthrough ();
3279 : CASE_FLT_FN (BUILT_IN_LRINT):
3280 : CASE_FLT_FN (BUILT_IN_LLRINT):
3281 : builtin_optab = lrint_optab;
3282 : break;
3283 :
3284 223 : CASE_FLT_FN (BUILT_IN_IROUND):
3285 223 : fallback_fn = BUILT_IN_LROUND;
3286 : gcc_fallthrough ();
3287 : CASE_FLT_FN (BUILT_IN_LROUND):
3288 : CASE_FLT_FN (BUILT_IN_LLROUND):
3289 : builtin_optab = lround_optab;
3290 : break;
3291 :
3292 0 : default:
3293 0 : gcc_unreachable ();
3294 : }
3295 :
3296 : /* There's no easy way to detect the case we need to set EDOM. */
3297 492 : if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3298 : return NULL_RTX;
3299 :
3300 : /* Make a suitable register to place result in. */
3301 261 : mode = TYPE_MODE (TREE_TYPE (exp));
3302 :
3303 : /* There's no easy way to detect the case we need to set EDOM. */
3304 261 : if (!flag_errno_math)
3305 : {
3306 261 : rtx result = gen_reg_rtx (mode);
3307 :
3308 : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3309 : need to expand the argument again. This way, we will not perform
3310 : side-effects more the once. */
3311 261 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3312 :
3313 261 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3314 :
3315 261 : start_sequence ();
3316 :
3317 261 : if (expand_sfix_optab (result, op0, builtin_optab))
3318 : {
3319 : /* Output the entire sequence. */
3320 24 : insns = end_sequence ();
3321 24 : emit_insn (insns);
3322 24 : return result;
3323 : }
3324 :
3325 : /* If we were unable to expand via the builtin, stop the sequence
3326 : (without outputting the insns) and call to the library function
3327 : with the stabilized argument list. */
3328 237 : end_sequence ();
3329 : }
3330 :
3331 237 : if (fallback_fn != BUILT_IN_NONE)
3332 : {
3333 : /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3334 : targets, (int) round (x) should never be transformed into
3335 : BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3336 : a call to lround in the hope that the target provides at least some
3337 : C99 functions. This should result in the best user experience for
3338 : not full C99 targets.
3339 : As scalar float conversions with same mode are useless in GIMPLE,
3340 : we can end up e.g. with _Float32 argument passed to float builtin,
3341 : try to get the type from the builtin prototype first. */
3342 223 : tree fallback_fndecl = NULL_TREE;
3343 223 : if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3344 223 : fallback_fndecl
3345 223 : = mathfn_built_in_1 (TREE_VALUE (argtypes),
3346 : as_combined_fn (fallback_fn), 0);
3347 223 : if (fallback_fndecl == NULL_TREE)
3348 0 : fallback_fndecl
3349 0 : = mathfn_built_in_1 (TREE_TYPE (arg),
3350 : as_combined_fn (fallback_fn), 0);
3351 0 : if (fallback_fndecl)
3352 : {
3353 223 : exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3354 : fallback_fndecl, 1, arg);
3355 :
3356 223 : target = expand_call (exp, NULL_RTX, target == const0_rtx);
3357 223 : target = maybe_emit_group_store (target, TREE_TYPE (exp));
3358 223 : return convert_to_mode (mode, target, 0);
3359 : }
3360 : }
3361 :
3362 14 : return expand_call (exp, target, target == const0_rtx);
3363 : }
3364 :
3365 : /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3366 : a normal call should be emitted rather than expanding the function
3367 : in-line. EXP is the expression that is a call to the builtin
3368 : function; if convenient, the result should be placed in TARGET. */
3369 :
3370 : static rtx
3371 292 : expand_builtin_powi (tree exp, rtx target)
3372 : {
3373 292 : tree arg0, arg1;
3374 292 : rtx op0, op1;
3375 292 : machine_mode mode;
3376 292 : machine_mode mode2;
3377 :
3378 292 : if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 : return NULL_RTX;
3380 :
3381 292 : arg0 = CALL_EXPR_ARG (exp, 0);
3382 292 : arg1 = CALL_EXPR_ARG (exp, 1);
3383 292 : mode = TYPE_MODE (TREE_TYPE (exp));
3384 :
3385 : /* Emit a libcall to libgcc. */
3386 :
3387 : /* Mode of the 2nd argument must match that of an int. */
3388 292 : mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3389 :
3390 292 : if (target == NULL_RTX)
3391 0 : target = gen_reg_rtx (mode);
3392 :
3393 292 : op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3394 292 : if (GET_MODE (op0) != mode)
3395 0 : op0 = convert_to_mode (mode, op0, 0);
3396 292 : op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3397 292 : if (GET_MODE (op1) != mode2)
3398 52 : op1 = convert_to_mode (mode2, op1, 0);
3399 :
3400 292 : target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3401 : target, LCT_CONST, mode,
3402 : op0, mode, op1, mode2);
3403 :
3404 292 : return target;
3405 : }
3406 :
3407 : /* Expand expression EXP which is a call to the strlen builtin. Return
3408 : NULL_RTX if we failed and the caller should emit a normal call, otherwise
3409 : try to get the result in TARGET, if convenient. */
3410 :
3411 : static rtx
3412 13749 : expand_builtin_strlen (tree exp, rtx target,
3413 : machine_mode target_mode)
3414 : {
3415 13749 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3416 : return NULL_RTX;
3417 :
3418 13729 : tree src = CALL_EXPR_ARG (exp, 0);
3419 :
3420 : /* If the length can be computed at compile-time, return it. */
3421 13729 : if (tree len = c_strlen (src, 0))
3422 103 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3423 :
3424 : /* If the length can be computed at compile-time and is constant
3425 : integer, but there are side-effects in src, evaluate
3426 : src for side-effects, then return len.
3427 : E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3428 : can be optimized into: i++; x = 3; */
3429 13626 : tree len = c_strlen (src, 1);
3430 13626 : if (len && TREE_CODE (len) == INTEGER_CST)
3431 : {
3432 0 : expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3433 0 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3434 : }
3435 :
3436 13626 : unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3437 :
3438 : /* If SRC is not a pointer type, don't do this operation inline. */
3439 13626 : if (align == 0)
3440 : return NULL_RTX;
3441 :
3442 : /* Bail out if we can't compute strlen in the right mode. */
3443 : machine_mode insn_mode;
3444 : enum insn_code icode = CODE_FOR_nothing;
3445 13626 : FOR_EACH_MODE_FROM (insn_mode, target_mode)
3446 : {
3447 13626 : icode = optab_handler (strlen_optab, insn_mode);
3448 13626 : if (icode != CODE_FOR_nothing)
3449 : break;
3450 : }
3451 13626 : if (insn_mode == VOIDmode)
3452 : return NULL_RTX;
3453 :
3454 : /* Make a place to hold the source address. We will not expand
3455 : the actual source until we are sure that the expansion will
3456 : not fail -- there are trees that cannot be expanded twice. */
3457 14199 : rtx src_reg = gen_reg_rtx (Pmode);
3458 :
3459 : /* Mark the beginning of the strlen sequence so we can emit the
3460 : source operand later. */
3461 13626 : rtx_insn *before_strlen = get_last_insn ();
3462 :
3463 13626 : class expand_operand ops[4];
3464 13626 : create_output_operand (&ops[0], target, insn_mode);
3465 13626 : create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3466 13626 : create_integer_operand (&ops[2], 0);
3467 13626 : create_integer_operand (&ops[3], align);
3468 13626 : if (!maybe_expand_insn (icode, 4, ops))
3469 : return NULL_RTX;
3470 :
3471 : /* Check to see if the argument was declared attribute nonstring
3472 : and if so, issue a warning since at this point it's not known
3473 : to be nul-terminated. */
3474 11 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3475 :
3476 : /* Now that we are assured of success, expand the source. */
3477 11 : start_sequence ();
3478 15 : rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3479 11 : if (pat != src_reg)
3480 : {
3481 : #ifdef POINTERS_EXTEND_UNSIGNED
3482 15 : if (GET_MODE (pat) != Pmode)
3483 0 : pat = convert_to_mode (Pmode, pat,
3484 : POINTERS_EXTEND_UNSIGNED);
3485 : #endif
3486 11 : emit_move_insn (src_reg, pat);
3487 : }
3488 11 : pat = end_sequence ();
3489 :
3490 11 : if (before_strlen)
3491 11 : emit_insn_after (pat, before_strlen);
3492 : else
3493 0 : emit_insn_before (pat, get_insns ());
3494 :
3495 : /* Return the value in the proper mode for this function. */
3496 11 : if (GET_MODE (ops[0].value) == target_mode)
3497 : target = ops[0].value;
3498 0 : else if (target != 0)
3499 0 : convert_move (target, ops[0].value, 0);
3500 : else
3501 0 : target = convert_to_mode (target_mode, ops[0].value, 0);
3502 :
3503 : return target;
3504 : }
3505 :
3506 : /* Expand call EXP to the strnlen built-in, returning the result
3507 : and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3508 :
3509 : static rtx
3510 580 : expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3511 : {
3512 580 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3513 : return NULL_RTX;
3514 :
3515 571 : tree src = CALL_EXPR_ARG (exp, 0);
3516 571 : tree bound = CALL_EXPR_ARG (exp, 1);
3517 :
3518 571 : if (!bound)
3519 : return NULL_RTX;
3520 :
3521 571 : location_t loc = UNKNOWN_LOCATION;
3522 571 : if (EXPR_HAS_LOCATION (exp))
3523 571 : loc = EXPR_LOCATION (exp);
3524 :
3525 : /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3526 : so these conversions aren't necessary. */
3527 571 : c_strlen_data lendata = { };
3528 571 : tree len = c_strlen (src, 0, &lendata, 1);
3529 571 : if (len)
3530 102 : len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3531 :
3532 571 : if (TREE_CODE (bound) == INTEGER_CST)
3533 : {
3534 331 : if (!len)
3535 : return NULL_RTX;
3536 :
3537 62 : len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3538 62 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3539 : }
3540 :
3541 240 : if (TREE_CODE (bound) != SSA_NAME)
3542 : return NULL_RTX;
3543 :
3544 240 : wide_int min, max;
3545 240 : int_range_max r;
3546 480 : get_range_query (cfun)->range_of_expr (r, bound,
3547 : currently_expanding_gimple_stmt);
3548 240 : if (r.varying_p () || r.undefined_p ())
3549 : return NULL_RTX;
3550 135 : min = r.lower_bound ();
3551 135 : max = r.upper_bound ();
3552 :
3553 135 : if (!len || TREE_CODE (len) != INTEGER_CST)
3554 : {
3555 95 : bool exact;
3556 95 : lendata.decl = unterminated_array (src, &len, &exact);
3557 95 : if (!lendata.decl)
3558 75 : return NULL_RTX;
3559 : }
3560 :
3561 60 : if (lendata.decl)
3562 : return NULL_RTX;
3563 :
3564 40 : if (wi::gtu_p (min, wi::to_wide (len)))
3565 7 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3566 :
3567 33 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3568 33 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3569 240 : }
3570 :
3571 : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3572 : bytes from bytes at DATA + OFFSET and return it reinterpreted as
3573 : a target constant. */
3574 :
3575 : static rtx
3576 171925 : builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3577 : fixed_size_mode mode)
3578 : {
3579 : /* The REPresentation pointed to by DATA need not be a nul-terminated
3580 : string but the caller guarantees it's large enough for MODE. */
3581 171925 : const char *rep = (const char *) data;
3582 :
3583 171925 : return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3584 : }
3585 :
3586 : /* LEN specify length of the block of memcpy/memset operation.
3587 : Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3588 : In some cases we can make very likely guess on max size, then we
3589 : set it into PROBABLE_MAX_SIZE. */
3590 :
3591 : static void
3592 176520 : determine_block_size (tree len, rtx len_rtx,
3593 : unsigned HOST_WIDE_INT *min_size,
3594 : unsigned HOST_WIDE_INT *max_size,
3595 : unsigned HOST_WIDE_INT *probable_max_size)
3596 : {
3597 176520 : if (CONST_INT_P (len_rtx))
3598 : {
3599 85605 : *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3600 85605 : return;
3601 : }
3602 : else
3603 : {
3604 90915 : wide_int min, max;
3605 90915 : enum value_range_kind range_type = VR_UNDEFINED;
3606 :
3607 : /* Determine bounds from the type. */
3608 90915 : if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3609 90914 : *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3610 : else
3611 1 : *min_size = 0;
3612 90915 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3613 181830 : *probable_max_size = *max_size
3614 90915 : = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3615 : else
3616 0 : *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3617 :
3618 90915 : if (TREE_CODE (len) == SSA_NAME)
3619 : {
3620 90915 : int_range_max r;
3621 90915 : tree tmin, tmax;
3622 90915 : gimple *cg = currently_expanding_gimple_stmt;
3623 181830 : get_range_query (cfun)->range_of_expr (r, len, cg);
3624 90915 : range_type = get_legacy_range (r, tmin, tmax);
3625 90915 : if (range_type != VR_UNDEFINED)
3626 : {
3627 90915 : min = wi::to_wide (tmin);
3628 90915 : max = wi::to_wide (tmax);
3629 : }
3630 90915 : }
3631 90915 : if (range_type == VR_RANGE)
3632 : {
3633 69864 : if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3634 56750 : *min_size = min.to_uhwi ();
3635 69864 : if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3636 61513 : *probable_max_size = *max_size = max.to_uhwi ();
3637 : }
3638 21051 : else if (range_type == VR_ANTI_RANGE)
3639 : {
3640 : /* Code like
3641 :
3642 : int n;
3643 : if (n < 100)
3644 : memcpy (a, b, n)
3645 :
3646 : Produce anti range allowing negative values of N. We still
3647 : can use the information and make a guess that N is not negative.
3648 : */
3649 3913 : if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3650 3682 : *probable_max_size = min.to_uhwi () - 1;
3651 : }
3652 90915 : }
3653 90915 : gcc_checking_assert (*max_size <=
3654 : (unsigned HOST_WIDE_INT)
3655 : GET_MODE_MASK (GET_MODE (len_rtx)));
3656 : }
3657 :
3658 : /* Expand a call EXP to the memcpy builtin.
3659 : Return NULL_RTX if we failed, the caller should emit a normal call,
3660 : otherwise try to get the result in TARGET, if convenient (and in
3661 : mode MODE if that's convenient). */
3662 :
3663 : static rtx
3664 105132 : expand_builtin_memcpy (tree exp, rtx target)
3665 : {
3666 105132 : if (!validate_arglist (exp,
3667 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3668 : return NULL_RTX;
3669 :
3670 105114 : tree dest = CALL_EXPR_ARG (exp, 0);
3671 105114 : tree src = CALL_EXPR_ARG (exp, 1);
3672 105114 : tree len = CALL_EXPR_ARG (exp, 2);
3673 :
3674 105114 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3675 105114 : /*retmode=*/ RETURN_BEGIN, false);
3676 : }
3677 :
3678 : /* Check a call EXP to the memmove built-in for validity.
3679 : Return NULL_RTX on both success and failure. */
3680 :
3681 : static rtx
3682 17216 : expand_builtin_memmove (tree exp, rtx target)
3683 : {
3684 17216 : if (!validate_arglist (exp,
3685 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3686 : return NULL_RTX;
3687 :
3688 17201 : tree dest = CALL_EXPR_ARG (exp, 0);
3689 17201 : tree src = CALL_EXPR_ARG (exp, 1);
3690 17201 : tree len = CALL_EXPR_ARG (exp, 2);
3691 :
3692 17201 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3693 17201 : /*retmode=*/ RETURN_BEGIN, true);
3694 : }
3695 :
3696 : /* Expand a call EXP to the mempcpy builtin.
3697 : Return NULL_RTX if we failed; the caller should emit a normal call,
3698 : otherwise try to get the result in TARGET, if convenient (and in
3699 : mode MODE if that's convenient). */
3700 :
3701 : static rtx
3702 1634 : expand_builtin_mempcpy (tree exp, rtx target)
3703 : {
3704 1634 : if (!validate_arglist (exp,
3705 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3706 : return NULL_RTX;
3707 :
3708 1629 : tree dest = CALL_EXPR_ARG (exp, 0);
3709 1629 : tree src = CALL_EXPR_ARG (exp, 1);
3710 1629 : tree len = CALL_EXPR_ARG (exp, 2);
3711 :
3712 : /* Policy does not generally allow using compute_objsize (which
3713 : is used internally by check_memop_size) to change code generation
3714 : or drive optimization decisions.
3715 :
3716 : In this instance it is safe because the code we generate has
3717 : the same semantics regardless of the return value of
3718 : check_memop_sizes. Exactly the same amount of data is copied
3719 : and the return value is exactly the same in both cases.
3720 :
3721 : Furthermore, check_memop_size always uses mode 0 for the call to
3722 : compute_objsize, so the imprecise nature of compute_objsize is
3723 : avoided. */
3724 :
3725 : /* Avoid expanding mempcpy into memcpy when the call is determined
3726 : to overflow the buffer. This also prevents the same overflow
3727 : from being diagnosed again when expanding memcpy. */
3728 :
3729 1629 : return expand_builtin_mempcpy_args (dest, src, len,
3730 1629 : target, exp, /*retmode=*/ RETURN_END);
3731 : }
3732 :
3733 : /* Helper function to do the actual work for expand of memory copy family
3734 : functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3735 : of memory from SRC to DEST and assign to TARGET if convenient. Return
3736 : value is based on RETMODE argument. */
3737 :
3738 : static rtx
3739 123967 : expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3740 : rtx target, tree exp, memop_ret retmode,
3741 : bool might_overlap)
3742 : {
3743 123967 : unsigned int src_align = get_pointer_alignment (src);
3744 123967 : unsigned int dest_align = get_pointer_alignment (dest);
3745 123967 : rtx dest_mem, src_mem, dest_addr, len_rtx;
3746 123967 : HOST_WIDE_INT expected_size = -1;
3747 123967 : unsigned int expected_align = 0;
3748 123967 : unsigned HOST_WIDE_INT min_size;
3749 123967 : unsigned HOST_WIDE_INT max_size;
3750 123967 : unsigned HOST_WIDE_INT probable_max_size;
3751 :
3752 123967 : bool is_move_done;
3753 :
3754 : /* If DEST is not a pointer type, call the normal function. */
3755 123967 : if (dest_align == 0)
3756 : return NULL_RTX;
3757 :
3758 : /* If either SRC is not a pointer type, don't do this
3759 : operation in-line. */
3760 123967 : if (src_align == 0)
3761 : return NULL_RTX;
3762 :
3763 123967 : if (currently_expanding_gimple_stmt)
3764 123967 : stringop_block_profile (currently_expanding_gimple_stmt,
3765 : &expected_align, &expected_size);
3766 :
3767 123967 : if (expected_align < dest_align)
3768 123954 : expected_align = dest_align;
3769 123967 : dest_mem = get_memory_rtx (dest, len);
3770 123967 : set_mem_align (dest_mem, dest_align);
3771 123967 : len_rtx = expand_normal (len);
3772 123967 : determine_block_size (len, len_rtx, &min_size, &max_size,
3773 : &probable_max_size);
3774 :
3775 : /* Try to get the byte representation of the constant SRC points to,
3776 : with its byte size in NBYTES. */
3777 123967 : unsigned HOST_WIDE_INT nbytes;
3778 123967 : const char *rep = getbyterep (src, &nbytes);
3779 :
3780 : /* If the function's constant bound LEN_RTX is less than or equal
3781 : to the byte size of the representation of the constant argument,
3782 : and if block move would be done by pieces, we can avoid loading
3783 : the bytes from memory and only store the computed constant.
3784 : This works in the overlap (memmove) case as well because
3785 : store_by_pieces just generates a series of stores of constants
3786 : from the representation returned by getbyterep(). */
3787 123967 : if (rep
3788 31768 : && CONST_INT_P (len_rtx)
3789 30193 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3790 154160 : && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3791 : const_cast<char *> (rep),
3792 : dest_align, false))
3793 : {
3794 29713 : dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3795 : builtin_memcpy_read_str,
3796 : const_cast<char *> (rep),
3797 : dest_align, false, retmode);
3798 29713 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
3799 29713 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
3800 29713 : return dest_mem;
3801 : }
3802 :
3803 94254 : src_mem = get_memory_rtx (src, len);
3804 94254 : set_mem_align (src_mem, src_align);
3805 :
3806 : /* Copy word part most expediently. */
3807 94254 : enum block_op_methods method = BLOCK_OP_NORMAL;
3808 94254 : if (CALL_EXPR_TAILCALL (exp)
3809 94254 : && (retmode == RETURN_BEGIN || target == const0_rtx))
3810 : method = BLOCK_OP_TAILCALL;
3811 94254 : bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3812 94254 : && retmode == RETURN_END
3813 94254 : && !might_overlap
3814 95556 : && target != const0_rtx);
3815 : if (use_mempcpy_call)
3816 94254 : method = BLOCK_OP_NO_LIBCALL_RET;
3817 94254 : dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3818 : expected_align, expected_size,
3819 : min_size, max_size, probable_max_size,
3820 : use_mempcpy_call, &is_move_done,
3821 : might_overlap, tree_ctz (len));
3822 :
3823 : /* Bail out when a mempcpy call would be expanded as libcall and when
3824 : we have a target that provides a fast implementation
3825 : of mempcpy routine. */
3826 94254 : if (!is_move_done)
3827 : return NULL_RTX;
3828 :
3829 83373 : if (dest_addr == pc_rtx)
3830 : return NULL_RTX;
3831 :
3832 83373 : if (dest_addr == 0)
3833 : {
3834 34729 : dest_addr = force_operand (XEXP (dest_mem, 0), target);
3835 34729 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
3836 : }
3837 :
3838 83373 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3839 : {
3840 810 : dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3841 : /* stpcpy pointer to last byte. */
3842 810 : if (retmode == RETURN_END_MINUS_ONE)
3843 0 : dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3844 : }
3845 :
3846 : return dest_addr;
3847 : }
3848 :
3849 : static rtx
3850 1652 : expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3851 : rtx target, tree orig_exp, memop_ret retmode)
3852 : {
3853 1629 : return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3854 0 : retmode, false);
3855 : }
3856 :
3857 : /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3858 : we failed, the caller should emit a normal call, otherwise try to
3859 : get the result in TARGET, if convenient.
3860 : Return value is based on RETMODE argument. */
3861 :
3862 : static rtx
3863 2271 : expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3864 : {
3865 2271 : class expand_operand ops[3];
3866 2271 : rtx dest_mem;
3867 2271 : rtx src_mem;
3868 :
3869 2271 : if (!targetm.have_movstr ())
3870 : return NULL_RTX;
3871 :
3872 0 : dest_mem = get_memory_rtx (dest, NULL);
3873 0 : src_mem = get_memory_rtx (src, NULL);
3874 0 : if (retmode == RETURN_BEGIN)
3875 : {
3876 0 : target = force_reg (Pmode, XEXP (dest_mem, 0));
3877 0 : dest_mem = replace_equiv_address (dest_mem, target);
3878 : }
3879 :
3880 0 : create_output_operand (&ops[0],
3881 0 : retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3882 0 : create_fixed_operand (&ops[1], dest_mem);
3883 0 : create_fixed_operand (&ops[2], src_mem);
3884 0 : if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3885 : return NULL_RTX;
3886 :
3887 0 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3888 : {
3889 0 : target = ops[0].value;
3890 : /* movstr is supposed to set end to the address of the NUL
3891 : terminator. If the caller requested a mempcpy-like return value,
3892 : adjust it. */
3893 0 : if (retmode == RETURN_END)
3894 : {
3895 0 : rtx tem = plus_constant (GET_MODE (target),
3896 0 : gen_lowpart (GET_MODE (target), target), 1);
3897 0 : emit_move_insn (target, force_operand (tem, NULL_RTX));
3898 : }
3899 : }
3900 : return target;
3901 : }
3902 :
3903 : /* Expand expression EXP, which is a call to the strcpy builtin. Return
3904 : NULL_RTX if we failed the caller should emit a normal call, otherwise
3905 : try to get the result in TARGET, if convenient (and in mode MODE if that's
3906 : convenient). */
3907 :
3908 : static rtx
3909 1864 : expand_builtin_strcpy (tree exp, rtx target)
3910 : {
3911 1864 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3912 : return NULL_RTX;
3913 :
3914 1849 : tree dest = CALL_EXPR_ARG (exp, 0);
3915 1849 : tree src = CALL_EXPR_ARG (exp, 1);
3916 :
3917 1849 : return expand_builtin_strcpy_args (exp, dest, src, target);
3918 : }
3919 :
3920 : /* Helper function to do the actual work for expand_builtin_strcpy. The
3921 : arguments to the builtin_strcpy call DEST and SRC are broken out
3922 : so that this can also be called without constructing an actual CALL_EXPR.
3923 : The other arguments and return value are the same as for
3924 : expand_builtin_strcpy. */
3925 :
3926 : static rtx
3927 1849 : expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3928 : {
3929 1849 : return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3930 : }
3931 :
3932 : /* Expand a call EXP to the stpcpy builtin.
3933 : Return NULL_RTX if we failed the caller should emit a normal call,
3934 : otherwise try to get the result in TARGET, if convenient (and in
3935 : mode MODE if that's convenient). */
3936 :
3937 : static rtx
3938 454 : expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3939 : {
3940 454 : tree dst, src;
3941 454 : location_t loc = EXPR_LOCATION (exp);
3942 :
3943 454 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3944 : return NULL_RTX;
3945 :
3946 445 : dst = CALL_EXPR_ARG (exp, 0);
3947 445 : src = CALL_EXPR_ARG (exp, 1);
3948 :
3949 : /* If return value is ignored, transform stpcpy into strcpy. */
3950 445 : if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3951 : {
3952 0 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3953 0 : tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3954 0 : return expand_expr (result, target, mode, EXPAND_NORMAL);
3955 : }
3956 : else
3957 : {
3958 445 : tree len, lenp1;
3959 445 : rtx ret;
3960 :
3961 : /* Ensure we get an actual string whose length can be evaluated at
3962 : compile-time, not an expression containing a string. This is
3963 : because the latter will potentially produce pessimized code
3964 : when used to produce the return value. */
3965 445 : c_strlen_data lendata = { };
3966 445 : if (!c_getstr (src)
3967 445 : || !(len = c_strlen (src, 0, &lendata, 1)))
3968 422 : return expand_movstr (dst, src, target,
3969 422 : /*retmode=*/ RETURN_END_MINUS_ONE);
3970 :
3971 23 : lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3972 23 : ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3973 : target, exp,
3974 : /*retmode=*/ RETURN_END_MINUS_ONE);
3975 :
3976 23 : if (ret)
3977 : return ret;
3978 :
3979 0 : if (TREE_CODE (len) == INTEGER_CST)
3980 : {
3981 0 : rtx len_rtx = expand_normal (len);
3982 :
3983 0 : if (CONST_INT_P (len_rtx))
3984 : {
3985 0 : ret = expand_builtin_strcpy_args (exp, dst, src, target);
3986 :
3987 0 : if (ret)
3988 : {
3989 0 : if (! target)
3990 : {
3991 0 : if (mode != VOIDmode)
3992 0 : target = gen_reg_rtx (mode);
3993 : else
3994 0 : target = gen_reg_rtx (GET_MODE (ret));
3995 : }
3996 0 : if (GET_MODE (target) != GET_MODE (ret))
3997 0 : ret = gen_lowpart (GET_MODE (target), ret);
3998 :
3999 0 : ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4000 0 : ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4001 0 : gcc_assert (ret);
4002 :
4003 : return target;
4004 : }
4005 : }
4006 : }
4007 :
4008 0 : return expand_movstr (dst, src, target,
4009 0 : /*retmode=*/ RETURN_END_MINUS_ONE);
4010 : }
4011 : }
4012 :
4013 : /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4014 : arguments while being careful to avoid duplicate warnings (which could
4015 : be issued if the expander were to expand the call, resulting in it
4016 : being emitted in expand_call(). */
4017 :
4018 : static rtx
4019 454 : expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4020 : {
4021 454 : if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4022 : {
4023 : /* The call has been successfully expanded. Check for nonstring
4024 : arguments and issue warnings as appropriate. */
4025 23 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4026 23 : return ret;
4027 : }
4028 :
4029 : return NULL_RTX;
4030 : }
4031 :
4032 : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4033 : bytes from constant string DATA + OFFSET and return it as target
4034 : constant. */
4035 :
4036 : rtx
4037 9358 : builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
4038 : fixed_size_mode mode)
4039 : {
4040 9358 : const char *str = (const char *) data;
4041 :
4042 9358 : if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4043 260 : return const0_rtx;
4044 :
4045 9098 : return c_readstr (str + offset, mode);
4046 : }
4047 :
4048 : /* Helper to check the sizes of sequences and the destination of calls
4049 : to __builtin_strncat and __builtin___strncat_chk. Returns true on
4050 : success (no overflow or invalid sizes), false otherwise. */
4051 :
4052 : static bool
4053 109 : check_strncat_sizes (tree exp, tree objsize)
4054 : {
4055 109 : tree dest = CALL_EXPR_ARG (exp, 0);
4056 109 : tree src = CALL_EXPR_ARG (exp, 1);
4057 109 : tree maxread = CALL_EXPR_ARG (exp, 2);
4058 :
4059 : /* Try to determine the range of lengths that the source expression
4060 : refers to. */
4061 109 : c_strlen_data lendata = { };
4062 109 : get_range_strlen (src, &lendata, /* eltsize = */ 1);
4063 :
4064 : /* Try to verify that the destination is big enough for the shortest
4065 : string. */
4066 :
4067 109 : access_data data (nullptr, exp, access_read_write, maxread, true);
4068 109 : if (!objsize && warn_stringop_overflow)
4069 : {
4070 : /* If it hasn't been provided by __strncat_chk, try to determine
4071 : the size of the destination object into which the source is
4072 : being copied. */
4073 0 : objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4074 : }
4075 :
4076 : /* Add one for the terminating nul. */
4077 109 : tree srclen = (lendata.minlen
4078 109 : ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4079 : size_one_node)
4080 : : NULL_TREE);
4081 :
4082 : /* The strncat function copies at most MAXREAD bytes and always appends
4083 : the terminating nul so the specified upper bound should never be equal
4084 : to (or greater than) the size of the destination. */
4085 56 : if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4086 157 : && tree_int_cst_equal (objsize, maxread))
4087 : {
4088 23 : location_t loc = EXPR_LOCATION (exp);
4089 23 : warning_at (loc, OPT_Wstringop_overflow_,
4090 : "%qD specified bound %E equals destination size",
4091 : get_callee_fndecl (exp), maxread);
4092 :
4093 23 : return false;
4094 : }
4095 :
4096 86 : if (!srclen
4097 86 : || (maxread && tree_fits_uhwi_p (maxread)
4098 33 : && tree_fits_uhwi_p (srclen)
4099 33 : && tree_int_cst_lt (maxread, srclen)))
4100 : srclen = maxread;
4101 :
4102 : /* The number of bytes to write is LEN but check_access will alsoa
4103 : check SRCLEN if LEN's value isn't known. */
4104 86 : return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4105 86 : objsize, data.mode, &data);
4106 : }
4107 :
4108 : /* Expand expression EXP, which is a call to the strncpy builtin. Return
4109 : NULL_RTX if we failed the caller should emit a normal call. */
4110 :
4111 : static rtx
4112 2169 : expand_builtin_strncpy (tree exp, rtx target)
4113 : {
4114 2169 : location_t loc = EXPR_LOCATION (exp);
4115 :
4116 2169 : if (!validate_arglist (exp,
4117 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4118 : return NULL_RTX;
4119 2157 : tree dest = CALL_EXPR_ARG (exp, 0);
4120 2157 : tree src = CALL_EXPR_ARG (exp, 1);
4121 : /* The number of bytes to write (not the maximum). */
4122 2157 : tree len = CALL_EXPR_ARG (exp, 2);
4123 :
4124 : /* The length of the source sequence. */
4125 2157 : tree slen = c_strlen (src, 1);
4126 :
4127 : /* We must be passed a constant len and src parameter. */
4128 2157 : if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4129 : return NULL_RTX;
4130 :
4131 237 : slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4132 :
4133 : /* We're required to pad with trailing zeros if the requested
4134 : len is greater than strlen(s2)+1. In that case try to
4135 : use store_by_pieces, if it fails, punt. */
4136 237 : if (tree_int_cst_lt (slen, len))
4137 : {
4138 142 : unsigned int dest_align = get_pointer_alignment (dest);
4139 142 : const char *p = c_getstr (src);
4140 142 : rtx dest_mem;
4141 :
4142 136 : if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4143 278 : || !can_store_by_pieces (tree_to_uhwi (len),
4144 : builtin_strncpy_read_str,
4145 : const_cast<char *> (p),
4146 : dest_align, false))
4147 14 : return NULL_RTX;
4148 :
4149 128 : dest_mem = get_memory_rtx (dest, len);
4150 128 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4151 : builtin_strncpy_read_str,
4152 : const_cast<char *> (p), dest_align, false,
4153 : RETURN_BEGIN);
4154 128 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
4155 128 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4156 128 : return dest_mem;
4157 : }
4158 :
4159 : return NULL_RTX;
4160 : }
4161 :
4162 : /* Return the RTL of a register in MODE generated from PREV in the
4163 : previous iteration. */
4164 :
4165 : static rtx
4166 225168 : gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4167 : {
4168 225168 : rtx target = nullptr;
4169 225168 : if (prev != nullptr && prev->data != nullptr)
4170 : {
4171 : /* Use the previous data in the same mode. */
4172 126242 : if (prev->mode == mode)
4173 225168 : return prev->data;
4174 :
4175 31115 : fixed_size_mode prev_mode = prev->mode;
4176 :
4177 : /* Don't use the previous data to write QImode if it is in a
4178 : vector mode. */
4179 31115 : if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4180 : return target;
4181 :
4182 30844 : rtx prev_rtx = prev->data;
4183 :
4184 30844 : if (REG_P (prev_rtx)
4185 21742 : && HARD_REGISTER_P (prev_rtx)
4186 30844 : && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4187 : {
4188 : /* This case occurs when PREV_MODE is a vector and when
4189 : MODE is too small to store using vector operations.
4190 : After register allocation, the code will need to move the
4191 : lowpart of the vector register into a non-vector register.
4192 :
4193 : Also, the target has chosen to use a hard register
4194 : instead of going with the default choice of using a
4195 : pseudo register. We should respect that choice and try to
4196 : avoid creating a pseudo register with the same mode as the
4197 : current hard register.
4198 :
4199 : In principle, we could just use a lowpart MODE subreg of
4200 : the vector register. However, the vector register mode might
4201 : be too wide for non-vector registers, and we already know
4202 : that the non-vector mode is too small for vector registers.
4203 : It's therefore likely that we'd need to spill to memory in
4204 : the vector mode and reload the non-vector value from there.
4205 :
4206 : Try to avoid that by reducing the vector register to the
4207 : smallest size that it can hold. This should increase the
4208 : chances that non-vector registers can hold both the inner
4209 : and outer modes of the subreg that we generate later. */
4210 0 : machine_mode m;
4211 0 : fixed_size_mode candidate;
4212 0 : FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4213 0 : if (is_a<fixed_size_mode> (m, &candidate))
4214 : {
4215 0 : if (GET_MODE_SIZE (candidate)
4216 0 : >= GET_MODE_SIZE (prev_mode))
4217 : break;
4218 0 : if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4219 0 : && lowpart_subreg_regno (REGNO (prev_rtx),
4220 : prev_mode, candidate) >= 0)
4221 : {
4222 0 : target = lowpart_subreg (candidate, prev_rtx,
4223 : prev_mode);
4224 0 : prev_rtx = target;
4225 0 : prev_mode = candidate;
4226 0 : break;
4227 : }
4228 : }
4229 0 : if (target == nullptr)
4230 0 : prev_rtx = copy_to_reg (prev_rtx);
4231 : }
4232 :
4233 30844 : target = lowpart_subreg (mode, prev_rtx, prev_mode);
4234 : }
4235 : return target;
4236 : }
4237 :
4238 : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4239 : bytes from constant string DATA + OFFSET and return it as target
4240 : constant. If PREV isn't nullptr, it has the RTL info from the
4241 : previous iteration. */
4242 :
4243 : rtx
4244 223936 : builtin_memset_read_str (void *data, void *prev,
4245 : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4246 : fixed_size_mode mode)
4247 : {
4248 223936 : const char *c = (const char *) data;
4249 223936 : unsigned int size = GET_MODE_SIZE (mode);
4250 :
4251 223936 : rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4252 : mode);
4253 223936 : if (target != nullptr)
4254 : return target;
4255 98500 : rtx src = gen_int_mode (*c, QImode);
4256 :
4257 98500 : if (VECTOR_MODE_P (mode))
4258 : {
4259 138186 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4260 :
4261 69093 : rtx const_vec = gen_const_vec_duplicate (mode, src);
4262 69093 : if (prev == NULL)
4263 : /* Return CONST_VECTOR when called by a query function. */
4264 : return const_vec;
4265 :
4266 : /* Use the move expander with CONST_VECTOR. */
4267 52959 : target = gen_reg_rtx (mode);
4268 52959 : emit_move_insn (target, const_vec);
4269 52959 : return target;
4270 : }
4271 :
4272 29407 : char *p = XALLOCAVEC (char, size);
4273 :
4274 29407 : memset (p, *c, size);
4275 :
4276 29407 : return c_readstr (p, mode);
4277 : }
4278 :
4279 : /* Callback routine for store_by_pieces. Return the RTL of a register
4280 : containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4281 : char value given in the RTL register data. For example, if mode is
4282 : 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4283 : nullptr, it has the RTL info from the previous iteration. */
4284 :
4285 : static rtx
4286 1403 : builtin_memset_gen_str (void *data, void *prev,
4287 : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4288 : fixed_size_mode mode)
4289 : {
4290 1403 : rtx target, coeff;
4291 1403 : size_t size;
4292 1403 : char *p;
4293 :
4294 1403 : size = GET_MODE_SIZE (mode);
4295 1403 : if (size == 1)
4296 : return (rtx) data;
4297 :
4298 1232 : target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4299 1232 : if (target != nullptr)
4300 : return target;
4301 :
4302 697 : if (VECTOR_MODE_P (mode))
4303 : {
4304 354 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4305 :
4306 : /* vec_duplicate_optab is a precondition to pick a vector mode for
4307 : the memset expander. */
4308 177 : insn_code icode = optab_handler (vec_duplicate_optab, mode);
4309 :
4310 177 : target = gen_reg_rtx (mode);
4311 177 : class expand_operand ops[2];
4312 177 : create_output_operand (&ops[0], target, mode);
4313 177 : create_input_operand (&ops[1], (rtx) data, QImode);
4314 177 : expand_insn (icode, 2, ops);
4315 177 : if (!rtx_equal_p (target, ops[0].value))
4316 0 : emit_move_insn (target, ops[0].value);
4317 :
4318 177 : return target;
4319 : }
4320 :
4321 520 : p = XALLOCAVEC (char, size);
4322 520 : memset (p, 1, size);
4323 520 : coeff = c_readstr (p, mode);
4324 :
4325 520 : target = convert_to_mode (mode, (rtx) data, 1);
4326 520 : target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4327 520 : return force_reg (mode, target);
4328 : }
4329 :
4330 : /* Expand expression EXP, which is a call to the memset builtin. Return
4331 : NULL_RTX if we failed the caller should emit a normal call, otherwise
4332 : try to get the result in TARGET, if convenient (and in mode MODE if that's
4333 : convenient). */
4334 :
4335 : rtx
4336 52562 : expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4337 : {
4338 52562 : if (!validate_arglist (exp,
4339 : POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4340 : return NULL_RTX;
4341 :
4342 52553 : tree dest = CALL_EXPR_ARG (exp, 0);
4343 52553 : tree val = CALL_EXPR_ARG (exp, 1);
4344 52553 : tree len = CALL_EXPR_ARG (exp, 2);
4345 :
4346 52553 : return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4347 : }
4348 :
4349 : /* Check that store_by_pieces allows BITS + LEN (so that we don't
4350 : expand something too unreasonably long), and every power of 2 in
4351 : BITS. It is assumed that LEN has already been tested by
4352 : itself. */
4353 : static bool
4354 23614 : can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4355 : by_pieces_constfn constfun,
4356 : void *constfundata, unsigned int align,
4357 : bool memsetp,
4358 : unsigned HOST_WIDE_INT len)
4359 : {
4360 23614 : if (bits
4361 23614 : && !can_store_by_pieces (bits + len, constfun, constfundata,
4362 : align, memsetp))
4363 : return false;
4364 :
4365 : /* BITS set are expected to be generally in the low range and
4366 : contiguous. We do NOT want to repeat the test above in case BITS
4367 : has a single bit set, so we terminate the loop when BITS == BIT.
4368 : In the unlikely case that BITS has the MSB set, also terminate in
4369 : case BIT gets shifted out. */
4370 1121 : for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4371 : {
4372 840 : if ((bits & bit) == 0)
4373 504 : continue;
4374 :
4375 336 : if (!can_store_by_pieces (bit, constfun, constfundata,
4376 : align, memsetp))
4377 : return false;
4378 : }
4379 :
4380 : return true;
4381 : }
4382 :
4383 : /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4384 : Return TRUE if successful, FALSE otherwise. TO is assumed to be
4385 : aligned at an ALIGN-bits boundary. LEN must be a multiple of
4386 : 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4387 :
4388 : The strategy is to issue one store_by_pieces for each power of two,
4389 : from most to least significant, guarded by a test on whether there
4390 : are at least that many bytes left to copy in LEN.
4391 :
4392 : ??? Should we skip some powers of two in favor of loops? Maybe start
4393 : at the max of TO/LEN/word alignment, at least when optimizing for
4394 : size, instead of ensuring O(log len) dynamic compares? */
4395 :
4396 : bool
4397 21914 : try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4398 : unsigned HOST_WIDE_INT min_len,
4399 : unsigned HOST_WIDE_INT max_len,
4400 : rtx val, char valc, unsigned int align)
4401 : {
4402 21914 : int max_bits = floor_log2 (max_len);
4403 21914 : int min_bits = floor_log2 (min_len);
4404 21914 : int sctz_len = ctz_len;
4405 :
4406 21914 : gcc_checking_assert (sctz_len >= 0);
4407 :
4408 21914 : if (val)
4409 676 : valc = 1;
4410 :
4411 : /* Bits more significant than TST_BITS are part of the shared prefix
4412 : in the binary representation of both min_len and max_len. Since
4413 : they're identical, we don't need to test them in the loop. */
4414 21914 : int tst_bits = (max_bits != min_bits ? max_bits
4415 9674 : : floor_log2 (max_len ^ min_len));
4416 :
4417 : /* Save the pre-blksize values. */
4418 21914 : int orig_max_bits = max_bits;
4419 21914 : int orig_tst_bits = tst_bits;
4420 :
4421 : /* Check whether it's profitable to start by storing a fixed BLKSIZE
4422 : bytes, to lower max_bits. In the unlikely case of a constant LEN
4423 : (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4424 : single store_by_pieces, but otherwise, select the minimum multiple
4425 : of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4426 : brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4427 21914 : unsigned HOST_WIDE_INT blksize;
4428 21914 : if (max_len > min_len)
4429 : {
4430 12261 : unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4431 : align / BITS_PER_UNIT);
4432 12261 : blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4433 12261 : blksize &= ~(alrng - 1);
4434 : }
4435 9653 : else if (max_len == min_len)
4436 : blksize = max_len;
4437 : else
4438 : /* Huh, max_len < min_len? Punt. See pr100843.c. */
4439 : return false;
4440 21913 : if (min_len >= blksize
4441 : /* ??? Maybe try smaller fixed-prefix blksizes before
4442 : punting? */
4443 21913 : && can_store_by_pieces (blksize, builtin_memset_read_str,
4444 : &valc, align, true))
4445 : {
4446 282 : min_len -= blksize;
4447 282 : min_bits = floor_log2 (min_len);
4448 282 : max_len -= blksize;
4449 282 : max_bits = floor_log2 (max_len);
4450 :
4451 282 : tst_bits = (max_bits != min_bits ? max_bits
4452 174 : : floor_log2 (max_len ^ min_len));
4453 : }
4454 : else
4455 : blksize = 0;
4456 :
4457 : /* Check that we can use store by pieces for the maximum store count
4458 : we may issue (initial fixed-size block, plus conditional
4459 : power-of-two-sized from max_bits to ctz_len. */
4460 21913 : unsigned HOST_WIDE_INT xlenest = blksize;
4461 21913 : if (max_bits >= 0)
4462 21739 : xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4463 21739 : - (HOST_WIDE_INT_1U << ctz_len));
4464 21913 : bool max_loop = false;
4465 21913 : bool use_store_by_pieces = true;
4466 : /* Skip the test in case of overflow in xlenest. It shouldn't
4467 : happen because of the way max_bits and blksize are related, but
4468 : it doesn't hurt to test. */
4469 21913 : if (blksize > xlenest
4470 21913 : || !can_store_by_multiple_pieces (xlenest - blksize,
4471 : builtin_memset_read_str,
4472 : &valc, align, true, blksize))
4473 : {
4474 21739 : if (!(flag_inline_stringops & ILSOP_MEMSET))
4475 : return false;
4476 :
4477 1601 : for (max_bits = orig_max_bits;
4478 1708 : max_bits >= sctz_len;
4479 : --max_bits)
4480 : {
4481 1708 : xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4482 1708 : - (HOST_WIDE_INT_1U << ctz_len));
4483 : /* Check that blksize plus the bits to be stored as blocks
4484 : sized at powers of two can be stored by pieces. This is
4485 : like the test above, but with smaller max_bits. Skip
4486 : orig_max_bits (it would be redundant). Also skip in case
4487 : of overflow. */
4488 1708 : if (max_bits < orig_max_bits
4489 1601 : && xlenest + blksize >= xlenest
4490 3309 : && can_store_by_multiple_pieces (xlenest,
4491 : builtin_memset_read_str,
4492 : &valc, align, true, blksize))
4493 : {
4494 : max_loop = true;
4495 : break;
4496 : }
4497 1601 : if (blksize
4498 1601 : && can_store_by_multiple_pieces (xlenest,
4499 : builtin_memset_read_str,
4500 : &valc, align, true, 0))
4501 : {
4502 0 : max_len += blksize;
4503 0 : min_len += blksize;
4504 0 : tst_bits = orig_tst_bits;
4505 0 : blksize = 0;
4506 0 : max_loop = true;
4507 0 : break;
4508 : }
4509 1601 : if (max_bits == sctz_len)
4510 : {
4511 : /* We'll get here if can_store_by_pieces refuses to
4512 : store even a single QImode. We'll fall back to
4513 : QImode stores then. */
4514 98 : if (!sctz_len)
4515 : {
4516 : blksize = 0;
4517 : max_loop = true;
4518 : use_store_by_pieces = false;
4519 : break;
4520 : }
4521 98 : --sctz_len;
4522 98 : --ctz_len;
4523 : }
4524 : }
4525 107 : if (!max_loop)
4526 : return false;
4527 : /* If the boundaries are such that min and max may run a
4528 : different number of trips in the initial loop, the remainder
4529 : needs not be between the moduli, so set tst_bits to cover all
4530 : bits. Otherwise, if the trip counts are the same, max_len
4531 : has the common prefix, and the previously-computed tst_bits
4532 : is usable. */
4533 107 : if (max_len >> max_bits > min_len >> max_bits)
4534 64 : tst_bits = max_bits;
4535 : }
4536 :
4537 281 : by_pieces_constfn constfun;
4538 281 : void *constfundata;
4539 281 : if (val)
4540 : {
4541 1 : constfun = builtin_memset_gen_str;
4542 1 : constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4543 : val);
4544 : }
4545 : else
4546 : {
4547 : constfun = builtin_memset_read_str;
4548 : constfundata = &valc;
4549 : }
4550 :
4551 281 : rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4552 281 : rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4553 281 : to = replace_equiv_address (to, ptr);
4554 281 : set_mem_align (to, align);
4555 :
4556 281 : if (blksize)
4557 : {
4558 366 : to = store_by_pieces (to, blksize,
4559 : constfun, constfundata,
4560 : align, true,
4561 : max_len != 0 ? RETURN_END : RETURN_BEGIN);
4562 193 : if (max_len == 0)
4563 : return true;
4564 :
4565 : /* Adjust PTR, TO and REM. Since TO's address is likely
4566 : PTR+offset, we have to replace it. */
4567 20 : emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4568 20 : to = replace_equiv_address (to, ptr);
4569 20 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4570 20 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4571 : }
4572 :
4573 : /* Iterate over power-of-two block sizes from the maximum length to
4574 : the least significant bit possibly set in the length. */
4575 493 : for (int i = max_bits; i >= sctz_len; i--)
4576 : {
4577 385 : rtx_code_label *loop_label = NULL;
4578 385 : rtx_code_label *label = NULL;
4579 :
4580 385 : blksize = HOST_WIDE_INT_1U << i;
4581 :
4582 : /* If we're past the bits shared between min_ and max_len, expand
4583 : a test on the dynamic length, comparing it with the
4584 : BLKSIZE. */
4585 385 : if (i <= tst_bits)
4586 : {
4587 275 : label = gen_label_rtx ();
4588 275 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4589 : ptr_mode, 1, label,
4590 : profile_probability::even ());
4591 : }
4592 : /* If we are at a bit that is in the prefix shared by min_ and
4593 : max_len, skip the current BLKSIZE if the bit is clear, but do
4594 : not skip the loop, even if it doesn't require
4595 : prechecking. */
4596 110 : else if ((max_len & blksize) == 0
4597 69 : && !(max_loop && i == max_bits))
4598 39 : continue;
4599 :
4600 346 : if (max_loop && i == max_bits)
4601 : {
4602 107 : loop_label = gen_label_rtx ();
4603 107 : emit_label (loop_label);
4604 : /* Since we may run this multiple times, don't assume we
4605 : know anything about the offset. */
4606 107 : clear_mem_offset (to);
4607 : }
4608 :
4609 346 : bool update_needed = i != sctz_len || loop_label;
4610 346 : rtx next_ptr = NULL_RTX;
4611 346 : if (!use_store_by_pieces)
4612 : {
4613 0 : gcc_checking_assert (blksize == 1);
4614 0 : if (!val)
4615 0 : val = gen_int_mode (valc, QImode);
4616 0 : to = change_address (to, QImode, 0);
4617 0 : emit_move_insn (to, val);
4618 0 : if (update_needed)
4619 0 : next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4620 : }
4621 : else
4622 : {
4623 : /* Issue a store of BLKSIZE bytes. */
4624 404 : to = store_by_pieces (to, blksize,
4625 : constfun, constfundata,
4626 : align, true,
4627 : update_needed ? RETURN_END : RETURN_BEGIN);
4628 346 : next_ptr = XEXP (to, 0);
4629 : }
4630 : /* Adjust REM and PTR, unless this is the last iteration. */
4631 346 : if (update_needed)
4632 : {
4633 288 : emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4634 288 : to = replace_equiv_address (to, ptr);
4635 288 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4636 288 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4637 : }
4638 :
4639 288 : if (loop_label)
4640 107 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4641 : ptr_mode, 1, loop_label,
4642 : profile_probability::likely ());
4643 :
4644 346 : if (label)
4645 : {
4646 275 : emit_label (label);
4647 :
4648 : /* Given conditional stores, the offset can no longer be
4649 : known, so clear it. */
4650 275 : clear_mem_offset (to);
4651 : }
4652 : }
4653 :
4654 : return true;
4655 : }
4656 :
4657 : /* Helper function to do the actual work for expand_builtin_memset. The
4658 : arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4659 : so that this can also be called without constructing an actual CALL_EXPR.
4660 : The other arguments and return value are the same as for
4661 : expand_builtin_memset. */
4662 :
4663 : static rtx
4664 52553 : expand_builtin_memset_args (tree dest, tree val, tree len,
4665 : rtx target, machine_mode mode, tree orig_exp)
4666 : {
4667 52553 : tree fndecl, fn;
4668 52553 : enum built_in_function fcode;
4669 52553 : machine_mode val_mode;
4670 52553 : char c;
4671 52553 : unsigned int dest_align;
4672 52553 : rtx dest_mem, dest_addr, len_rtx;
4673 52553 : HOST_WIDE_INT expected_size = -1;
4674 52553 : unsigned int expected_align = 0;
4675 52553 : unsigned HOST_WIDE_INT min_size;
4676 52553 : unsigned HOST_WIDE_INT max_size;
4677 52553 : unsigned HOST_WIDE_INT probable_max_size;
4678 :
4679 52553 : dest_align = get_pointer_alignment (dest);
4680 :
4681 : /* If DEST is not a pointer type, don't do this operation in-line. */
4682 52553 : if (dest_align == 0)
4683 : return NULL_RTX;
4684 :
4685 52553 : if (currently_expanding_gimple_stmt)
4686 52553 : stringop_block_profile (currently_expanding_gimple_stmt,
4687 : &expected_align, &expected_size);
4688 :
4689 52553 : if (expected_align < dest_align)
4690 52548 : expected_align = dest_align;
4691 :
4692 : /* If the LEN parameter is zero, return DEST. */
4693 52553 : if (integer_zerop (len))
4694 : {
4695 : /* Evaluate and ignore VAL in case it has side-effects. */
4696 0 : expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4697 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
4698 : }
4699 :
4700 : /* Stabilize the arguments in case we fail. */
4701 52553 : dest = builtin_save_expr (dest);
4702 52553 : val = builtin_save_expr (val);
4703 52553 : len = builtin_save_expr (len);
4704 :
4705 52553 : len_rtx = expand_normal (len);
4706 52553 : determine_block_size (len, len_rtx, &min_size, &max_size,
4707 : &probable_max_size);
4708 52553 : dest_mem = get_memory_rtx (dest, len);
4709 52553 : val_mode = TYPE_MODE (unsigned_char_type_node);
4710 :
4711 52553 : if (TREE_CODE (val) != INTEGER_CST
4712 52553 : || target_char_cast (val, &c))
4713 : {
4714 1755 : rtx val_rtx;
4715 :
4716 1755 : val_rtx = expand_normal (val);
4717 1755 : val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4718 :
4719 : /* Assume that we can memset by pieces if we can store
4720 : * the coefficients by pieces (in the required modes).
4721 : * We can't pass builtin_memset_gen_str as that emits RTL. */
4722 1755 : c = 1;
4723 1755 : if (tree_fits_uhwi_p (len)
4724 1755 : && can_store_by_pieces (tree_to_uhwi (len),
4725 : builtin_memset_read_str, &c, dest_align,
4726 : true))
4727 : {
4728 739 : val_rtx = force_reg (val_mode, val_rtx);
4729 739 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4730 : builtin_memset_gen_str, val_rtx, dest_align,
4731 : true, RETURN_BEGIN);
4732 : }
4733 1016 : else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4734 : dest_align, expected_align,
4735 : expected_size, min_size, max_size,
4736 : probable_max_size)
4737 1016 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4738 : tree_ctz (len),
4739 : min_size, max_size,
4740 : val_rtx, 0,
4741 : dest_align))
4742 675 : goto do_libcall;
4743 :
4744 1080 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4745 1080 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4746 1080 : return dest_mem;
4747 : }
4748 :
4749 50798 : if (c)
4750 : {
4751 12377 : if (tree_fits_uhwi_p (len)
4752 12377 : && can_store_by_pieces (tree_to_uhwi (len),
4753 : builtin_memset_read_str, &c, dest_align,
4754 : true))
4755 4866 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4756 : builtin_memset_read_str, &c, dest_align, true,
4757 : RETURN_BEGIN);
4758 9306 : else if (!set_storage_via_setmem (dest_mem, len_rtx,
4759 7511 : gen_int_mode (c, val_mode),
4760 : dest_align, expected_align,
4761 : expected_size, min_size, max_size,
4762 : probable_max_size)
4763 7511 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4764 : tree_ctz (len),
4765 : min_size, max_size,
4766 : NULL_RTX, c,
4767 : dest_align))
4768 5716 : goto do_libcall;
4769 :
4770 6661 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4771 6661 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4772 6661 : return dest_mem;
4773 : }
4774 :
4775 38421 : set_mem_align (dest_mem, dest_align);
4776 76842 : dest_addr = clear_storage_hints (dest_mem, len_rtx,
4777 38421 : CALL_EXPR_TAILCALL (orig_exp)
4778 : ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4779 : expected_align, expected_size,
4780 : min_size, max_size,
4781 : probable_max_size, tree_ctz (len));
4782 :
4783 38421 : if (dest_addr == 0)
4784 : {
4785 28936 : dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4786 28936 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
4787 : }
4788 :
4789 : return dest_addr;
4790 :
4791 6391 : do_libcall:
4792 6391 : fndecl = get_callee_fndecl (orig_exp);
4793 6391 : fcode = DECL_FUNCTION_CODE (fndecl);
4794 6391 : if (fcode == BUILT_IN_MEMSET)
4795 6391 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4796 : dest, val, len);
4797 0 : else if (fcode == BUILT_IN_BZERO)
4798 0 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4799 : dest, len);
4800 : else
4801 0 : gcc_unreachable ();
4802 6391 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4803 6391 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4804 6391 : return expand_call (fn, target, target == const0_rtx);
4805 : }
4806 :
4807 : /* Expand expression EXP, which is a call to the bzero builtin. Return
4808 : NULL_RTX if we failed the caller should emit a normal call. */
4809 :
4810 : static rtx
4811 0 : expand_builtin_bzero (tree exp)
4812 : {
4813 0 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4814 : return NULL_RTX;
4815 :
4816 0 : tree dest = CALL_EXPR_ARG (exp, 0);
4817 0 : tree size = CALL_EXPR_ARG (exp, 1);
4818 :
4819 : /* New argument list transforming bzero(ptr x, int y) to
4820 : memset(ptr x, int 0, size_t y). This is done this way
4821 : so that if it isn't expanded inline, we fallback to
4822 : calling bzero instead of memset. */
4823 :
4824 0 : location_t loc = EXPR_LOCATION (exp);
4825 :
4826 0 : return expand_builtin_memset_args (dest, integer_zero_node,
4827 : fold_convert_loc (loc,
4828 : size_type_node, size),
4829 0 : const0_rtx, VOIDmode, exp);
4830 : }
4831 :
4832 : /* Try to expand cmpstr operation ICODE with the given operands.
4833 : Return the result rtx on success, otherwise return null. */
4834 :
4835 : static rtx
4836 0 : expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4837 : HOST_WIDE_INT align)
4838 : {
4839 0 : machine_mode insn_mode = insn_data[icode].operand[0].mode;
4840 :
4841 0 : if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4842 : target = NULL_RTX;
4843 :
4844 0 : class expand_operand ops[4];
4845 0 : create_output_operand (&ops[0], target, insn_mode);
4846 0 : create_fixed_operand (&ops[1], arg1_rtx);
4847 0 : create_fixed_operand (&ops[2], arg2_rtx);
4848 0 : create_integer_operand (&ops[3], align);
4849 0 : if (maybe_expand_insn (icode, 4, ops))
4850 0 : return ops[0].value;
4851 : return NULL_RTX;
4852 : }
4853 :
4854 : /* Expand expression EXP, which is a call to the memcmp built-in function.
4855 : Return NULL_RTX if we failed and the caller should emit a normal call,
4856 : otherwise try to get the result in TARGET, if convenient.
4857 : RESULT_EQ is true if we can relax the returned value to be either zero
4858 : or nonzero, without caring about the sign. */
4859 :
4860 : static rtx
4861 104242 : expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4862 : {
4863 104242 : if (!validate_arglist (exp,
4864 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4865 : return NULL_RTX;
4866 :
4867 104238 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4868 104238 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4869 104238 : tree len = CALL_EXPR_ARG (exp, 2);
4870 :
4871 : /* Due to the performance benefit, always inline the calls first
4872 : when result_eq is false. */
4873 104238 : rtx result = NULL_RTX;
4874 104238 : enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4875 104238 : if (!result_eq && fcode != BUILT_IN_BCMP)
4876 : {
4877 14517 : result = inline_expand_builtin_bytecmp (exp, target);
4878 14517 : if (result)
4879 : return result;
4880 : }
4881 :
4882 104232 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4883 104232 : location_t loc = EXPR_LOCATION (exp);
4884 :
4885 104232 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4886 104232 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4887 :
4888 : /* If we don't have POINTER_TYPE, call the function. */
4889 104232 : if (arg1_align == 0 || arg2_align == 0)
4890 : return NULL_RTX;
4891 :
4892 104232 : rtx arg1_rtx = get_memory_rtx (arg1, len);
4893 104232 : rtx arg2_rtx = get_memory_rtx (arg2, len);
4894 104232 : rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4895 :
4896 : /* Set MEM_SIZE as appropriate. */
4897 104232 : if (CONST_INT_P (len_rtx))
4898 : {
4899 80052 : set_mem_size (arg1_rtx, INTVAL (len_rtx));
4900 80052 : set_mem_size (arg2_rtx, INTVAL (len_rtx));
4901 : }
4902 :
4903 104232 : by_pieces_constfn constfn = NULL;
4904 :
4905 : /* Try to get the byte representation of the constant ARG2 (or, only
4906 : when the function's result is used for equality to zero, ARG1)
4907 : points to, with its byte size in NBYTES. */
4908 104232 : unsigned HOST_WIDE_INT nbytes;
4909 104232 : const char *rep = getbyterep (arg2, &nbytes);
4910 104232 : if (result_eq && rep == NULL)
4911 : {
4912 : /* For equality to zero the arguments are interchangeable. */
4913 62815 : rep = getbyterep (arg1, &nbytes);
4914 62815 : if (rep != NULL)
4915 : std::swap (arg1_rtx, arg2_rtx);
4916 : }
4917 :
4918 : /* If the function's constant bound LEN_RTX is less than or equal
4919 : to the byte size of the representation of the constant argument,
4920 : and if block move would be done by pieces, we can avoid loading
4921 : the bytes from memory and only store the computed constant result. */
4922 41417 : if (rep
4923 28915 : && CONST_INT_P (len_rtx)
4924 28782 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4925 104232 : constfn = builtin_memcpy_read_str;
4926 :
4927 208464 : result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4928 104232 : TREE_TYPE (len), target,
4929 : result_eq, constfn,
4930 : const_cast<char *> (rep),
4931 : tree_ctz (len));
4932 :
4933 104232 : if (result)
4934 : {
4935 : /* Return the value in the proper mode for this function. */
4936 67803 : if (GET_MODE (result) == mode)
4937 : return result;
4938 :
4939 0 : if (target != 0)
4940 : {
4941 0 : convert_move (target, result, 0);
4942 0 : return target;
4943 : }
4944 :
4945 0 : return convert_to_mode (mode, result, 0);
4946 : }
4947 :
4948 : return NULL_RTX;
4949 : }
4950 :
4951 : /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4952 : if we failed the caller should emit a normal call, otherwise try to get
4953 : the result in TARGET, if convenient. */
4954 :
4955 : static rtx
4956 127943 : expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4957 : {
4958 127943 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4959 : return NULL_RTX;
4960 :
4961 127928 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4962 127928 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4963 :
4964 : /* Due to the performance benefit, always inline the calls first. */
4965 127928 : rtx result = NULL_RTX;
4966 127928 : result = inline_expand_builtin_bytecmp (exp, target);
4967 127928 : if (result)
4968 : return result;
4969 :
4970 127529 : insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4971 127529 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4972 127529 : if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4973 : return NULL_RTX;
4974 :
4975 127529 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4976 127529 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4977 :
4978 : /* If we don't have POINTER_TYPE, call the function. */
4979 127529 : if (arg1_align == 0 || arg2_align == 0)
4980 : return NULL_RTX;
4981 :
4982 : /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4983 127529 : arg1 = builtin_save_expr (arg1);
4984 127529 : arg2 = builtin_save_expr (arg2);
4985 :
4986 127529 : rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4987 127529 : rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4988 :
4989 : /* Try to call cmpstrsi. */
4990 127529 : if (cmpstr_icode != CODE_FOR_nothing)
4991 0 : result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4992 0 : MIN (arg1_align, arg2_align));
4993 :
4994 : /* Try to determine at least one length and call cmpstrnsi. */
4995 127529 : if (!result && cmpstrn_icode != CODE_FOR_nothing)
4996 : {
4997 127529 : tree len;
4998 127529 : rtx arg3_rtx;
4999 :
5000 127529 : tree len1 = c_strlen (arg1, 1);
5001 127529 : tree len2 = c_strlen (arg2, 1);
5002 :
5003 127529 : if (len1)
5004 224 : len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5005 127529 : if (len2)
5006 125924 : len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5007 :
5008 : /* If we don't have a constant length for the first, use the length
5009 : of the second, if we know it. We don't require a constant for
5010 : this case; some cost analysis could be done if both are available
5011 : but neither is constant. For now, assume they're equally cheap,
5012 : unless one has side effects. If both strings have constant lengths,
5013 : use the smaller. */
5014 :
5015 127529 : if (!len1)
5016 : len = len2;
5017 224 : else if (!len2)
5018 : len = len1;
5019 17 : else if (TREE_SIDE_EFFECTS (len1))
5020 : len = len2;
5021 17 : else if (TREE_SIDE_EFFECTS (len2))
5022 : len = len1;
5023 17 : else if (TREE_CODE (len1) != INTEGER_CST)
5024 : len = len2;
5025 17 : else if (TREE_CODE (len2) != INTEGER_CST)
5026 : len = len1;
5027 10 : else if (tree_int_cst_lt (len1, len2))
5028 : len = len1;
5029 : else
5030 127311 : len = len2;
5031 :
5032 : /* If both arguments have side effects, we cannot optimize. */
5033 127529 : if (len && !TREE_SIDE_EFFECTS (len))
5034 : {
5035 126131 : arg3_rtx = expand_normal (len);
5036 126131 : result = expand_cmpstrn_or_cmpmem
5037 126131 : (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5038 126131 : arg3_rtx, MIN (arg1_align, arg2_align));
5039 : }
5040 : }
5041 :
5042 127529 : tree fndecl = get_callee_fndecl (exp);
5043 127529 : if (result)
5044 : {
5045 : /* Return the value in the proper mode for this function. */
5046 52 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5047 52 : if (GET_MODE (result) == mode)
5048 : return result;
5049 0 : if (target == 0)
5050 0 : return convert_to_mode (mode, result, 0);
5051 0 : convert_move (target, result, 0);
5052 0 : return target;
5053 : }
5054 :
5055 : /* Expand the library call ourselves using a stabilized argument
5056 : list to avoid re-evaluating the function's arguments twice. */
5057 127477 : tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5058 127477 : copy_warning (fn, exp);
5059 127477 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5060 127477 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5061 127477 : return expand_call (fn, target, target == const0_rtx);
5062 : }
5063 :
5064 : /* Expand expression EXP, which is a call to the strncmp builtin. Return
5065 : NULL_RTX if we failed the caller should emit a normal call, otherwise
5066 : try to get the result in TARGET, if convenient. */
5067 :
5068 : static rtx
5069 2024 : expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5070 : ATTRIBUTE_UNUSED machine_mode mode)
5071 : {
5072 2024 : if (!validate_arglist (exp,
5073 : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5074 : return NULL_RTX;
5075 :
5076 2018 : tree arg1 = CALL_EXPR_ARG (exp, 0);
5077 2018 : tree arg2 = CALL_EXPR_ARG (exp, 1);
5078 2018 : tree arg3 = CALL_EXPR_ARG (exp, 2);
5079 :
5080 2018 : location_t loc = EXPR_LOCATION (exp);
5081 2018 : tree len1 = c_strlen (arg1, 1);
5082 2018 : tree len2 = c_strlen (arg2, 1);
5083 :
5084 : /* Due to the performance benefit, always inline the calls first. */
5085 2018 : rtx result = NULL_RTX;
5086 2018 : result = inline_expand_builtin_bytecmp (exp, target);
5087 2018 : if (result)
5088 : return result;
5089 :
5090 : /* If c_strlen can determine an expression for one of the string
5091 : lengths, and it doesn't have side effects, then emit cmpstrnsi
5092 : using length MIN(strlen(string)+1, arg3). */
5093 1800 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5094 1800 : if (cmpstrn_icode == CODE_FOR_nothing)
5095 : return NULL_RTX;
5096 :
5097 1800 : tree len;
5098 :
5099 1800 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5100 1800 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5101 :
5102 1800 : if (len1)
5103 160 : len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5104 1800 : if (len2)
5105 671 : len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5106 :
5107 1800 : tree len3 = fold_convert_loc (loc, sizetype, arg3);
5108 :
5109 : /* If we don't have a constant length for the first, use the length
5110 : of the second, if we know it. If neither string is constant length,
5111 : use the given length argument. We don't require a constant for
5112 : this case; some cost analysis could be done if both are available
5113 : but neither is constant. For now, assume they're equally cheap,
5114 : unless one has side effects. If both strings have constant lengths,
5115 : use the smaller. */
5116 :
5117 1800 : if (!len1 && !len2)
5118 : len = len3;
5119 714 : else if (!len1)
5120 : len = len2;
5121 160 : else if (!len2)
5122 : len = len1;
5123 117 : else if (TREE_SIDE_EFFECTS (len1))
5124 : len = len2;
5125 117 : else if (TREE_SIDE_EFFECTS (len2))
5126 : len = len1;
5127 117 : else if (TREE_CODE (len1) != INTEGER_CST)
5128 : len = len2;
5129 117 : else if (TREE_CODE (len2) != INTEGER_CST)
5130 : len = len1;
5131 117 : else if (tree_int_cst_lt (len1, len2))
5132 : len = len1;
5133 : else
5134 555 : len = len2;
5135 :
5136 : /* If we are not using the given length, we must incorporate it here.
5137 : The actual new length parameter will be MIN(len,arg3) in this case. */
5138 1800 : if (len != len3)
5139 : {
5140 714 : len = fold_convert_loc (loc, sizetype, len);
5141 714 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5142 : }
5143 1800 : rtx arg1_rtx = get_memory_rtx (arg1, len);
5144 1800 : rtx arg2_rtx = get_memory_rtx (arg2, len);
5145 1800 : rtx arg3_rtx = expand_normal (len);
5146 1800 : result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5147 1800 : arg2_rtx, TREE_TYPE (len), arg3_rtx,
5148 1800 : MIN (arg1_align, arg2_align));
5149 :
5150 1800 : tree fndecl = get_callee_fndecl (exp);
5151 1800 : if (result)
5152 : {
5153 : /* Return the value in the proper mode for this function. */
5154 17 : mode = TYPE_MODE (TREE_TYPE (exp));
5155 17 : if (GET_MODE (result) == mode)
5156 : return result;
5157 0 : if (target == 0)
5158 0 : return convert_to_mode (mode, result, 0);
5159 0 : convert_move (target, result, 0);
5160 0 : return target;
5161 : }
5162 :
5163 : /* Expand the library call ourselves using a stabilized argument
5164 : list to avoid re-evaluating the function's arguments twice. */
5165 1783 : tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5166 1783 : copy_warning (call, exp);
5167 1783 : gcc_assert (TREE_CODE (call) == CALL_EXPR);
5168 1783 : CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5169 1783 : return expand_call (call, target, target == const0_rtx);
5170 : }
5171 :
5172 : /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5173 : if that's convenient. */
5174 :
5175 : rtx
5176 0 : expand_builtin_saveregs (void)
5177 : {
5178 0 : rtx val;
5179 0 : rtx_insn *seq;
5180 :
5181 : /* Don't do __builtin_saveregs more than once in a function.
5182 : Save the result of the first call and reuse it. */
5183 0 : if (saveregs_value != 0)
5184 : return saveregs_value;
5185 :
5186 : /* When this function is called, it means that registers must be
5187 : saved on entry to this function. So we migrate the call to the
5188 : first insn of this function. */
5189 :
5190 0 : start_sequence ();
5191 :
5192 : /* Do whatever the machine needs done in this case. */
5193 0 : val = targetm.calls.expand_builtin_saveregs ();
5194 :
5195 0 : seq = end_sequence ();
5196 :
5197 0 : saveregs_value = val;
5198 :
5199 : /* Put the insns after the NOTE that starts the function. If this
5200 : is inside a start_sequence, make the outer-level insn chain current, so
5201 : the code is placed at the start of the function. */
5202 0 : push_topmost_sequence ();
5203 0 : emit_insn_after (seq, entry_of_function ());
5204 0 : pop_topmost_sequence ();
5205 :
5206 0 : return val;
5207 : }
5208 :
5209 : /* Expand a call to __builtin_next_arg. */
5210 :
5211 : static rtx
5212 21161 : expand_builtin_next_arg (void)
5213 : {
5214 : /* Checking arguments is already done in fold_builtin_next_arg
5215 : that must be called before this function. */
5216 21161 : return expand_binop (ptr_mode, add_optab,
5217 : crtl->args.internal_arg_pointer,
5218 : crtl->args.arg_offset_rtx,
5219 21161 : NULL_RTX, 0, OPTAB_LIB_WIDEN);
5220 : }
5221 :
5222 : /* Make it easier for the backends by protecting the valist argument
5223 : from multiple evaluations. */
5224 :
5225 : static tree
5226 21491 : stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5227 : {
5228 21491 : tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5229 :
5230 : /* The current way of determining the type of valist is completely
5231 : bogus. We should have the information on the va builtin instead. */
5232 21491 : if (!vatype)
5233 21367 : vatype = targetm.fn_abi_va_list (cfun->decl);
5234 :
5235 21491 : if (TREE_CODE (vatype) == ARRAY_TYPE)
5236 : {
5237 15829 : if (TREE_SIDE_EFFECTS (valist))
5238 0 : valist = save_expr (valist);
5239 :
5240 : /* For this case, the backends will be expecting a pointer to
5241 : vatype, but it's possible we've actually been given an array
5242 : (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5243 : So fix it. */
5244 15829 : if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5245 : {
5246 0 : tree p1 = build_pointer_type (TREE_TYPE (vatype));
5247 0 : valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5248 : }
5249 : }
5250 : else
5251 : {
5252 5662 : tree pt = build_pointer_type (vatype);
5253 :
5254 5662 : if (! needs_lvalue)
5255 : {
5256 6 : if (! TREE_SIDE_EFFECTS (valist))
5257 : return valist;
5258 :
5259 0 : valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5260 0 : TREE_SIDE_EFFECTS (valist) = 1;
5261 : }
5262 :
5263 5656 : if (TREE_SIDE_EFFECTS (valist))
5264 0 : valist = save_expr (valist);
5265 5656 : valist = fold_build2_loc (loc, MEM_REF,
5266 : vatype, valist, build_int_cst (pt, 0));
5267 : }
5268 :
5269 : return valist;
5270 : }
5271 :
5272 : /* The "standard" definition of va_list is void*. */
5273 :
5274 : tree
5275 0 : std_build_builtin_va_list (void)
5276 : {
5277 0 : return ptr_type_node;
5278 : }
5279 :
5280 : /* The "standard" abi va_list is va_list_type_node. */
5281 :
5282 : tree
5283 0 : std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5284 : {
5285 0 : return va_list_type_node;
5286 : }
5287 :
5288 : /* The "standard" type of va_list is va_list_type_node. */
5289 :
5290 : tree
5291 502 : std_canonical_va_list_type (tree type)
5292 : {
5293 502 : tree wtype, htype;
5294 :
5295 502 : wtype = va_list_type_node;
5296 502 : htype = type;
5297 :
5298 502 : if (TREE_CODE (wtype) == ARRAY_TYPE)
5299 : {
5300 : /* If va_list is an array type, the argument may have decayed
5301 : to a pointer type, e.g. by being passed to another function.
5302 : In that case, unwrap both types so that we can compare the
5303 : underlying records. */
5304 0 : if (TREE_CODE (htype) == ARRAY_TYPE
5305 0 : || POINTER_TYPE_P (htype))
5306 : {
5307 0 : wtype = TREE_TYPE (wtype);
5308 0 : htype = TREE_TYPE (htype);
5309 : }
5310 : }
5311 502 : if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5312 332 : return va_list_type_node;
5313 :
5314 : return NULL_TREE;
5315 : }
5316 :
5317 : /* The "standard" implementation of va_start: just assign `nextarg' to
5318 : the variable. */
5319 :
5320 : void
5321 5652 : std_expand_builtin_va_start (tree valist, rtx nextarg)
5322 : {
5323 5652 : rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5324 5652 : convert_move (va_r, nextarg, 0);
5325 5652 : }
5326 :
5327 : /* Expand EXP, a call to __builtin_va_start. */
5328 :
5329 : static rtx
5330 21003 : expand_builtin_va_start (tree exp)
5331 : {
5332 21003 : rtx nextarg;
5333 21003 : tree valist;
5334 21003 : location_t loc = EXPR_LOCATION (exp);
5335 :
5336 21003 : if (call_expr_nargs (exp) < 2)
5337 : {
5338 0 : error_at (loc, "too few arguments to function %<va_start%>");
5339 0 : return const0_rtx;
5340 : }
5341 :
5342 21003 : if (fold_builtin_next_arg (exp, true))
5343 0 : return const0_rtx;
5344 :
5345 21003 : nextarg = expand_builtin_next_arg ();
5346 21003 : valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5347 :
5348 21003 : if (targetm.expand_builtin_va_start)
5349 21003 : targetm.expand_builtin_va_start (valist, nextarg);
5350 : else
5351 0 : std_expand_builtin_va_start (valist, nextarg);
5352 :
5353 21003 : return const0_rtx;
5354 : }
5355 :
5356 : /* Expand EXP, a call to __builtin_va_end. */
5357 :
5358 : static rtx
5359 12121 : expand_builtin_va_end (tree exp)
5360 : {
5361 12121 : tree valist = CALL_EXPR_ARG (exp, 0);
5362 :
5363 : /* Evaluate for side effects, if needed. I hate macros that don't
5364 : do that. */
5365 12121 : if (TREE_SIDE_EFFECTS (valist))
5366 0 : expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5367 :
5368 12121 : return const0_rtx;
5369 : }
5370 :
5371 : /* Expand EXP, a call to __builtin_va_copy. We do this as a
5372 : builtin rather than just as an assignment in stdarg.h because of the
5373 : nastiness of array-type va_list types. */
5374 :
5375 : static rtx
5376 244 : expand_builtin_va_copy (tree exp)
5377 : {
5378 244 : tree dst, src, t;
5379 244 : location_t loc = EXPR_LOCATION (exp);
5380 :
5381 244 : dst = CALL_EXPR_ARG (exp, 0);
5382 244 : src = CALL_EXPR_ARG (exp, 1);
5383 :
5384 244 : dst = stabilize_va_list_loc (loc, dst, 1);
5385 244 : src = stabilize_va_list_loc (loc, src, 0);
5386 :
5387 244 : gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5388 :
5389 244 : if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5390 : {
5391 0 : t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5392 0 : TREE_SIDE_EFFECTS (t) = 1;
5393 0 : expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5394 : }
5395 : else
5396 : {
5397 244 : rtx dstb, srcb, size;
5398 :
5399 : /* Evaluate to pointers. */
5400 244 : dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5401 244 : srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5402 244 : size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5403 : NULL_RTX, VOIDmode, EXPAND_NORMAL);
5404 :
5405 244 : dstb = convert_memory_address (Pmode, dstb);
5406 244 : srcb = convert_memory_address (Pmode, srcb);
5407 :
5408 : /* "Dereference" to BLKmode memories. */
5409 244 : dstb = gen_rtx_MEM (BLKmode, dstb);
5410 244 : set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5411 244 : set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5412 244 : srcb = gen_rtx_MEM (BLKmode, srcb);
5413 244 : set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5414 244 : set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5415 :
5416 : /* Copy. */
5417 244 : emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5418 : }
5419 :
5420 244 : return const0_rtx;
5421 : }
5422 :
5423 : /* Expand a call to one of the builtin functions __builtin_frame_address or
5424 : __builtin_return_address. */
5425 :
5426 : static rtx
5427 15901 : expand_builtin_frame_address (tree fndecl, tree exp)
5428 : {
5429 : /* The argument must be a nonnegative integer constant.
5430 : It counts the number of frames to scan up the stack.
5431 : The value is either the frame pointer value or the return
5432 : address saved in that frame. */
5433 15901 : if (call_expr_nargs (exp) == 0)
5434 : /* Warning about missing arg was already issued. */
5435 0 : return const0_rtx;
5436 15901 : else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5437 : {
5438 0 : error ("invalid argument to %qD", fndecl);
5439 0 : return const0_rtx;
5440 : }
5441 : else
5442 : {
5443 : /* Number of frames to scan up the stack. */
5444 15901 : unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5445 :
5446 15901 : rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5447 :
5448 : /* Some ports cannot access arbitrary stack frames. */
5449 15901 : if (tem == NULL)
5450 : {
5451 0 : warning (0, "unsupported argument to %qD", fndecl);
5452 0 : return const0_rtx;
5453 : }
5454 :
5455 15901 : if (count)
5456 : {
5457 : /* Warn since no effort is made to ensure that any frame
5458 : beyond the current one exists or can be safely reached. */
5459 932 : warning (OPT_Wframe_address, "calling %qD with "
5460 : "a nonzero argument is unsafe", fndecl);
5461 : }
5462 :
5463 : /* For __builtin_frame_address, return what we've got. */
5464 15901 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5465 : return tem;
5466 :
5467 5150 : if (!REG_P (tem)
5468 5150 : && ! CONSTANT_P (tem))
5469 5150 : tem = copy_addr_to_reg (tem);
5470 5150 : return tem;
5471 : }
5472 : }
5473 :
5474 : #if ! STACK_GROWS_DOWNWARD
5475 : # define STACK_TOPS GT
5476 : #else
5477 : # define STACK_TOPS LT
5478 : #endif
5479 :
5480 : #ifdef POINTERS_EXTEND_UNSIGNED
5481 : # define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5482 : #else
5483 : # define STACK_UNSIGNED true
5484 : #endif
5485 :
5486 : /* Expand a call to builtin function __builtin_stack_address. */
5487 :
5488 : static rtx
5489 3427 : expand_builtin_stack_address ()
5490 : {
5491 3427 : rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5492 : STACK_UNSIGNED);
5493 :
5494 : #ifdef STACK_ADDRESS_OFFSET
5495 : /* Unbias the stack pointer, bringing it to the boundary between the
5496 : stack area claimed by the active function calling this builtin,
5497 : and stack ranges that could get clobbered if it called another
5498 : function. It should NOT encompass any stack red zone, that is
5499 : used in leaf functions.
5500 :
5501 : On SPARC, the register save area is *not* considered active or
5502 : used by the active function, but rather as akin to the area in
5503 : which call-preserved registers are saved by callees. This
5504 : enables __strub_leave to clear what would otherwise overlap with
5505 : its own register save area.
5506 :
5507 : If the address is computed too high or too low, parts of a stack
5508 : range that should be scrubbed may be left unscrubbed, scrubbing
5509 : may corrupt active portions of the stack frame, and stack ranges
5510 : may be doubly-scrubbed by caller and callee.
5511 :
5512 : In order for it to be just right, the area delimited by
5513 : @code{__builtin_stack_address} and @code{__builtin_frame_address
5514 : (0)} should encompass caller's registers saved by the function,
5515 : local on-stack variables and @code{alloca} stack areas.
5516 : Accumulated outgoing on-stack arguments, preallocated as part of
5517 : a function's own prologue, are to be regarded as part of the
5518 : (caller) function's active area as well, whereas those pushed or
5519 : allocated temporarily for a call are regarded as part of the
5520 : callee's stack range, rather than the caller's. */
5521 : ret = plus_constant (ptr_mode, ret, STACK_ADDRESS_OFFSET);
5522 : #endif
5523 :
5524 3427 : return force_reg (ptr_mode, ret);
5525 : }
5526 :
5527 : /* Expand a call to builtin function __builtin_strub_enter. */
5528 :
5529 : static rtx
5530 2159 : expand_builtin_strub_enter (tree exp)
5531 : {
5532 2159 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5533 : return NULL_RTX;
5534 :
5535 2159 : if (optimize < 1 || flag_no_inline)
5536 : return NULL_RTX;
5537 :
5538 1515 : rtx stktop = expand_builtin_stack_address ();
5539 :
5540 1515 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5541 1515 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5542 1515 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5543 : build_int_cst (TREE_TYPE (wmptr), 0));
5544 1515 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5545 :
5546 1515 : emit_move_insn (wmark, stktop);
5547 :
5548 1515 : return const0_rtx;
5549 : }
5550 :
5551 : /* Expand a call to builtin function __builtin_strub_update. */
5552 :
5553 : static rtx
5554 1072 : expand_builtin_strub_update (tree exp)
5555 : {
5556 1072 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5557 : return NULL_RTX;
5558 :
5559 1072 : if (optimize < 2 || flag_no_inline)
5560 : return NULL_RTX;
5561 :
5562 652 : rtx stktop = expand_builtin_stack_address ();
5563 :
5564 : #ifdef RED_ZONE_SIZE
5565 : /* Here's how the strub enter, update and leave functions deal with red zones.
5566 :
5567 : If it weren't for red zones, update, called from within a strub context,
5568 : would bump the watermark to the top of the stack. Enter and leave, running
5569 : in the caller, would use the caller's top of stack address both to
5570 : initialize the watermark passed to the callee, and to start strubbing the
5571 : stack afterwards.
5572 :
5573 : Ideally, we'd update the watermark so as to cover the used amount of red
5574 : zone, and strub starting at the caller's other end of the (presumably
5575 : unused) red zone. Normally, only leaf functions use the red zone, but at
5576 : this point we can't tell whether a function is a leaf, nor can we tell how
5577 : much of the red zone it uses. Furthermore, some strub contexts may have
5578 : been inlined so that update and leave are called from the same stack frame,
5579 : and the strub builtins may all have been inlined, turning a strub function
5580 : into a leaf.
5581 :
5582 : So cleaning the range from the caller's stack pointer (one end of the red
5583 : zone) to the (potentially inlined) callee's (other end of the) red zone
5584 : could scribble over the caller's own red zone.
5585 :
5586 : We avoid this possibility by arranging for callers that are strub contexts
5587 : to use their own watermark as the strub starting point. So, if A calls B,
5588 : and B calls C, B will tell A to strub up to the end of B's red zone, and
5589 : will strub itself only the part of C's stack frame and red zone that
5590 : doesn't overlap with B's. With that, we don't need to know who's leaf and
5591 : who isn't: inlined calls will shrink their strub window to zero, each
5592 : remaining call will strub some portion of the stack, and eventually the
5593 : strub context will return to a caller that isn't a strub context itself,
5594 : that will therefore use its own stack pointer as the strub starting point.
5595 : It's not a leaf, because strub contexts can't be inlined into non-strub
5596 : contexts, so it doesn't use the red zone, and it will therefore correctly
5597 : strub up the callee's stack frame up to the end of the callee's red zone.
5598 : Neat! */
5599 652 : if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5600 : {
5601 652 : poly_int64 red_zone_size = RED_ZONE_SIZE;
5602 : #if STACK_GROWS_DOWNWARD
5603 652 : red_zone_size = -red_zone_size;
5604 : #endif
5605 652 : stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5606 652 : stktop = force_reg (ptr_mode, stktop);
5607 : }
5608 : #endif
5609 :
5610 652 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5611 652 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5612 652 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5613 : build_int_cst (TREE_TYPE (wmptr), 0));
5614 652 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5615 :
5616 652 : rtx wmarkr = force_reg (ptr_mode, wmark);
5617 :
5618 652 : rtx_code_label *lab = gen_label_rtx ();
5619 652 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5620 : ptr_mode, NULL_RTX, lab, NULL,
5621 : profile_probability::very_likely ());
5622 652 : emit_move_insn (wmark, stktop);
5623 :
5624 : /* If this is an inlined strub function, also bump the watermark for the
5625 : enclosing function. This avoids a problem with the following scenario: A
5626 : calls B and B calls C, and both B and C get inlined into A. B allocates
5627 : temporary stack space before calling C. If we don't update A's watermark,
5628 : we may use an outdated baseline for the post-C strub_leave, erasing B's
5629 : temporary stack allocation. We only need this if we're fully expanding
5630 : strub_leave inline. */
5631 652 : tree xwmptr = (optimize > 2
5632 652 : ? strub_watermark_parm (current_function_decl)
5633 : : wmptr);
5634 652 : if (wmptr != xwmptr)
5635 : {
5636 156 : wmptr = xwmptr;
5637 156 : wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5638 156 : wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5639 : build_int_cst (TREE_TYPE (wmptr), 0));
5640 156 : wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5641 156 : wmarkr = force_reg (ptr_mode, wmark);
5642 :
5643 156 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5644 : ptr_mode, NULL_RTX, lab, NULL,
5645 : profile_probability::very_likely ());
5646 156 : emit_move_insn (wmark, stktop);
5647 : }
5648 :
5649 652 : emit_label (lab);
5650 :
5651 652 : return const0_rtx;
5652 : }
5653 :
5654 :
5655 : /* Expand a call to builtin function __builtin_strub_leave. */
5656 :
5657 : static rtx
5658 2729 : expand_builtin_strub_leave (tree exp)
5659 : {
5660 2729 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5661 : return NULL_RTX;
5662 :
5663 2729 : if (optimize < 2 || optimize_size || flag_no_inline)
5664 : return NULL_RTX;
5665 :
5666 1229 : rtx stktop = NULL_RTX;
5667 :
5668 1229 : if (tree wmptr = (optimize
5669 1229 : ? strub_watermark_parm (current_function_decl)
5670 : : NULL_TREE))
5671 : {
5672 509 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5673 509 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5674 : build_int_cst (TREE_TYPE (wmptr), 0));
5675 509 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5676 509 : stktop = force_reg (ptr_mode, wmark);
5677 : }
5678 :
5679 509 : if (!stktop)
5680 720 : stktop = expand_builtin_stack_address ();
5681 :
5682 1229 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5683 1229 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5684 1229 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5685 : build_int_cst (TREE_TYPE (wmptr), 0));
5686 1229 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5687 :
5688 1229 : rtx wmarkr = force_reg (ptr_mode, wmark);
5689 :
5690 : #if ! STACK_GROWS_DOWNWARD
5691 : rtx base = stktop;
5692 : rtx end = wmarkr;
5693 : #else
5694 1229 : rtx base = wmarkr;
5695 1229 : rtx end = stktop;
5696 : #endif
5697 :
5698 : /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5699 1229 : base = copy_to_reg (base);
5700 :
5701 1229 : rtx_code_label *done = gen_label_rtx ();
5702 1229 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5703 : ptr_mode, NULL_RTX, done, NULL,
5704 : profile_probability::very_likely ());
5705 :
5706 1229 : if (optimize < 3)
5707 909 : expand_call (exp, NULL_RTX, true);
5708 : else
5709 : {
5710 : /* Ok, now we've determined we want to copy the block, so convert the
5711 : addresses to Pmode, as needed to dereference them to access ptr_mode
5712 : memory locations, so that we don't have to convert anything within the
5713 : loop. */
5714 320 : base = memory_address (ptr_mode, base);
5715 320 : end = memory_address (ptr_mode, end);
5716 :
5717 320 : rtx zero = force_operand (const0_rtx, NULL_RTX);
5718 320 : int ulen = GET_MODE_SIZE (ptr_mode);
5719 :
5720 : /* ??? It would be nice to use setmem or similar patterns here,
5721 : but they do not necessarily obey the stack growth direction,
5722 : which has security implications. We also have to avoid calls
5723 : (memset, bzero or any machine-specific ones), which are
5724 : likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5725 : #if ! STACK_GROWS_DOWNWARD
5726 : rtx incr = plus_constant (Pmode, base, ulen);
5727 : rtx dstm = gen_rtx_MEM (ptr_mode, base);
5728 :
5729 : rtx_code_label *loop = gen_label_rtx ();
5730 : emit_label (loop);
5731 : emit_move_insn (dstm, zero);
5732 : emit_move_insn (base, force_operand (incr, NULL_RTX));
5733 : #else
5734 320 : rtx decr = plus_constant (Pmode, end, -ulen);
5735 320 : rtx dstm = gen_rtx_MEM (ptr_mode, end);
5736 :
5737 320 : rtx_code_label *loop = gen_label_rtx ();
5738 320 : emit_label (loop);
5739 320 : emit_move_insn (end, force_operand (decr, NULL_RTX));
5740 320 : emit_move_insn (dstm, zero);
5741 : #endif
5742 640 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5743 320 : Pmode, NULL_RTX, NULL, loop,
5744 : profile_probability::very_likely ());
5745 : }
5746 :
5747 1229 : emit_label (done);
5748 :
5749 1229 : return const0_rtx;
5750 : }
5751 :
5752 : /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5753 : failed and the caller should emit a normal call. */
5754 :
5755 : static rtx
5756 27713 : expand_builtin_alloca (tree exp)
5757 : {
5758 27713 : rtx op0;
5759 27713 : rtx result;
5760 27713 : unsigned int align;
5761 27713 : tree fndecl = get_callee_fndecl (exp);
5762 27713 : HOST_WIDE_INT max_size;
5763 27713 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5764 27713 : bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5765 27713 : bool valid_arglist
5766 : = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5767 27713 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5768 : VOID_TYPE)
5769 : : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5770 27711 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5771 27713 : : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5772 :
5773 27713 : if (!valid_arglist)
5774 : return NULL_RTX;
5775 :
5776 : /* Compute the argument. */
5777 27708 : op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5778 :
5779 : /* Compute the alignment. */
5780 51517 : align = (fcode == BUILT_IN_ALLOCA
5781 23810 : ? BIGGEST_ALIGNMENT
5782 3898 : : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5783 :
5784 : /* Compute the maximum size. */
5785 2 : max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5786 27710 : ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5787 : : -1);
5788 :
5789 : /* Allocate the desired space. If the allocation stems from the declaration
5790 : of a variable-sized object, it cannot accumulate. */
5791 27708 : result
5792 27708 : = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5793 27708 : result = convert_memory_address (ptr_mode, result);
5794 :
5795 : /* Dynamic allocations for variables are recorded during gimplification. */
5796 27708 : if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5797 0 : record_dynamic_alloc (exp);
5798 :
5799 : return result;
5800 : }
5801 :
5802 : /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5803 : of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5804 : STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5805 : handle_builtin_stack_restore function. */
5806 :
5807 : static rtx
5808 212 : expand_asan_emit_allocas_unpoison (tree exp)
5809 : {
5810 212 : tree arg0 = CALL_EXPR_ARG (exp, 0);
5811 212 : tree arg1 = CALL_EXPR_ARG (exp, 1);
5812 212 : rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5813 212 : rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5814 212 : rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5815 : stack_pointer_rtx, NULL_RTX, 0,
5816 : OPTAB_LIB_WIDEN);
5817 212 : off = convert_modes (ptr_mode, Pmode, off, 0);
5818 212 : bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5819 : OPTAB_LIB_WIDEN);
5820 212 : rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5821 212 : ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5822 : top, ptr_mode, bot, ptr_mode);
5823 212 : return ret;
5824 : }
5825 :
5826 : /* Expand a call to bswap builtin in EXP.
5827 : Return NULL_RTX if a normal call should be emitted rather than expanding the
5828 : function in-line. If convenient, the result should be placed in TARGET.
5829 : SUBTARGET may be used as the target for computing one of EXP's operands. */
5830 :
5831 : static rtx
5832 1204 : expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5833 : rtx subtarget)
5834 : {
5835 1204 : tree arg;
5836 1204 : rtx op0;
5837 :
5838 1204 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5839 : return NULL_RTX;
5840 :
5841 1204 : arg = CALL_EXPR_ARG (exp, 0);
5842 1204 : op0 = expand_expr (arg,
5843 51 : subtarget && GET_MODE (subtarget) == target_mode
5844 : ? subtarget : NULL_RTX,
5845 : target_mode, EXPAND_NORMAL);
5846 1204 : if (GET_MODE (op0) != target_mode)
5847 0 : op0 = convert_to_mode (target_mode, op0, 1);
5848 :
5849 1204 : target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5850 :
5851 1204 : gcc_assert (target);
5852 :
5853 1204 : return convert_to_mode (target_mode, target, 1);
5854 : }
5855 :
5856 : /* Expand a call to a unary builtin in EXP.
5857 : Return NULL_RTX if a normal call should be emitted rather than expanding the
5858 : function in-line. If convenient, the result should be placed in TARGET.
5859 : SUBTARGET may be used as the target for computing one of EXP's operands. */
5860 :
5861 : static rtx
5862 755 : expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5863 : rtx subtarget, optab op_optab)
5864 : {
5865 755 : rtx op0;
5866 :
5867 755 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5868 : return NULL_RTX;
5869 :
5870 : /* Compute the argument. */
5871 1510 : op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5872 : (subtarget
5873 97 : && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5874 97 : == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5875 : VOIDmode, EXPAND_NORMAL);
5876 : /* Compute op, into TARGET if possible.
5877 : Set TARGET to wherever the result comes back. */
5878 755 : target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5879 : op_optab, op0, target, op_optab != clrsb_optab);
5880 755 : gcc_assert (target);
5881 :
5882 755 : return convert_to_mode (target_mode, target, 0);
5883 : }
5884 :
5885 : /* Expand a call to __builtin_expect. We just return our argument
5886 : as the builtin_expect semantic should've been already executed by
5887 : tree branch prediction pass. */
5888 :
5889 : static rtx
5890 1049 : expand_builtin_expect (tree exp, rtx target)
5891 : {
5892 1049 : tree arg;
5893 :
5894 1049 : if (call_expr_nargs (exp) < 2)
5895 0 : return const0_rtx;
5896 1049 : arg = CALL_EXPR_ARG (exp, 0);
5897 :
5898 1049 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5899 : /* When guessing was done, the hints should be already stripped away. */
5900 1049 : gcc_assert (!flag_guess_branch_prob
5901 : || optimize == 0 || seen_error ());
5902 : return target;
5903 : }
5904 :
5905 : /* Expand a call to __builtin_expect_with_probability. We just return our
5906 : argument as the builtin_expect semantic should've been already executed by
5907 : tree branch prediction pass. */
5908 :
5909 : static rtx
5910 5 : expand_builtin_expect_with_probability (tree exp, rtx target)
5911 : {
5912 5 : tree arg;
5913 :
5914 5 : if (call_expr_nargs (exp) < 3)
5915 0 : return const0_rtx;
5916 5 : arg = CALL_EXPR_ARG (exp, 0);
5917 :
5918 5 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5919 : /* When guessing was done, the hints should be already stripped away. */
5920 5 : gcc_assert (!flag_guess_branch_prob
5921 : || optimize == 0 || seen_error ());
5922 : return target;
5923 : }
5924 :
5925 :
5926 : /* Expand a call to __builtin_assume_aligned. We just return our first
5927 : argument as the builtin_assume_aligned semantic should've been already
5928 : executed by CCP. */
5929 :
5930 : static rtx
5931 663 : expand_builtin_assume_aligned (tree exp, rtx target)
5932 : {
5933 663 : if (call_expr_nargs (exp) < 2)
5934 0 : return const0_rtx;
5935 663 : target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5936 : EXPAND_NORMAL);
5937 663 : gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5938 : && (call_expr_nargs (exp) < 3
5939 : || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5940 : return target;
5941 : }
5942 :
5943 : void
5944 38527 : expand_builtin_trap (void)
5945 : {
5946 38527 : if (targetm.have_trap ())
5947 : {
5948 38527 : rtx_insn *insn = emit_insn (targetm.gen_trap ());
5949 : /* For trap insns when not accumulating outgoing args force
5950 : REG_ARGS_SIZE note to prevent crossjumping of calls with
5951 : different args sizes. */
5952 38527 : if (!ACCUMULATE_OUTGOING_ARGS)
5953 38525 : add_args_size_note (insn, stack_pointer_delta);
5954 : }
5955 : else
5956 : {
5957 0 : tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5958 0 : tree call_expr = build_call_expr (fn, 0);
5959 0 : expand_call (call_expr, NULL_RTX, false);
5960 : }
5961 :
5962 38527 : emit_barrier ();
5963 38527 : }
5964 :
5965 : /* Expand a call to __builtin_unreachable. We do nothing except emit
5966 : a barrier saying that control flow will not pass here.
5967 :
5968 : It is the responsibility of the program being compiled to ensure
5969 : that control flow does never reach __builtin_unreachable. */
5970 : static void
5971 4384 : expand_builtin_unreachable (void)
5972 : {
5973 : /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5974 : to avoid this. */
5975 4384 : gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5976 4384 : emit_barrier ();
5977 4384 : }
5978 :
5979 : /* Expand EXP, a call to fabs, fabsf or fabsl.
5980 : Return NULL_RTX if a normal call should be emitted rather than expanding
5981 : the function inline. If convenient, the result should be placed
5982 : in TARGET. SUBTARGET may be used as the target for computing
5983 : the operand. */
5984 :
5985 : static rtx
5986 4 : expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5987 : {
5988 4 : machine_mode mode;
5989 4 : tree arg;
5990 4 : rtx op0;
5991 :
5992 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5993 : return NULL_RTX;
5994 :
5995 0 : arg = CALL_EXPR_ARG (exp, 0);
5996 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5997 0 : mode = TYPE_MODE (TREE_TYPE (arg));
5998 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5999 0 : return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6000 : }
6001 :
6002 : /* Expand EXP, a call to copysign, copysignf, or copysignl.
6003 : Return NULL is a normal call should be emitted rather than expanding the
6004 : function inline. If convenient, the result should be placed in TARGET.
6005 : SUBTARGET may be used as the target for computing the operand. */
6006 :
6007 : static rtx
6008 11674 : expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6009 : {
6010 11674 : rtx op0, op1;
6011 11674 : tree arg;
6012 :
6013 11674 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6014 : return NULL_RTX;
6015 :
6016 11673 : arg = CALL_EXPR_ARG (exp, 0);
6017 11673 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6018 :
6019 11673 : arg = CALL_EXPR_ARG (exp, 1);
6020 11673 : op1 = expand_normal (arg);
6021 :
6022 11673 : return expand_copysign (op0, op1, target);
6023 : }
6024 :
6025 : /* Emit a call to __builtin___clear_cache. */
6026 :
6027 : void
6028 0 : default_emit_call_builtin___clear_cache (rtx begin, rtx end)
6029 : {
6030 0 : rtx callee = gen_rtx_SYMBOL_REF (Pmode,
6031 : BUILTIN_ASM_NAME_PTR
6032 : (BUILT_IN_CLEAR_CACHE));
6033 :
6034 0 : emit_library_call (callee,
6035 : LCT_NORMAL, VOIDmode,
6036 : convert_memory_address (ptr_mode, begin), ptr_mode,
6037 : convert_memory_address (ptr_mode, end), ptr_mode);
6038 0 : }
6039 :
6040 : /* Emit a call to __builtin___clear_cache, unless the target specifies
6041 : it as do-nothing. This function can be used by trampoline
6042 : finalizers to duplicate the effects of expanding a call to the
6043 : clear_cache builtin. */
6044 :
6045 : void
6046 28 : maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6047 : {
6048 28 : gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6049 : || CONST_INT_P (begin))
6050 : && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6051 : || CONST_INT_P (end)));
6052 :
6053 28 : if (targetm.have_clear_cache ())
6054 : {
6055 : /* We have a "clear_cache" insn, and it will handle everything. */
6056 0 : class expand_operand ops[2];
6057 :
6058 0 : create_address_operand (&ops[0], begin);
6059 0 : create_address_operand (&ops[1], end);
6060 :
6061 0 : if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6062 0 : return;
6063 : }
6064 : else
6065 : {
6066 : #ifndef CLEAR_INSN_CACHE
6067 : /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6068 : does nothing. There is no need to call it. Do nothing. */
6069 : return;
6070 : #endif /* CLEAR_INSN_CACHE */
6071 : }
6072 :
6073 0 : targetm.calls.emit_call_builtin___clear_cache (begin, end);
6074 : }
6075 :
6076 : /* Expand a call to __builtin___clear_cache. */
6077 :
6078 : static void
6079 28 : expand_builtin___clear_cache (tree exp)
6080 : {
6081 28 : tree begin, end;
6082 28 : rtx begin_rtx, end_rtx;
6083 :
6084 : /* We must not expand to a library call. If we did, any
6085 : fallback library function in libgcc that might contain a call to
6086 : __builtin___clear_cache() would recurse infinitely. */
6087 28 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6088 : {
6089 0 : error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6090 0 : return;
6091 : }
6092 :
6093 28 : begin = CALL_EXPR_ARG (exp, 0);
6094 30 : begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6095 :
6096 28 : end = CALL_EXPR_ARG (exp, 1);
6097 30 : end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6098 :
6099 28 : maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
6100 : }
6101 :
6102 : /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6103 :
6104 : static rtx
6105 634 : round_trampoline_addr (rtx tramp)
6106 : {
6107 634 : rtx temp, addend, mask;
6108 :
6109 : /* If we don't need too much alignment, we'll have been guaranteed
6110 : proper alignment by get_trampoline_type. */
6111 634 : if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6112 : return tramp;
6113 :
6114 : /* Round address up to desired boundary. */
6115 0 : temp = gen_reg_rtx (Pmode);
6116 0 : addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6117 0 : mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6118 :
6119 0 : temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6120 : temp, 0, OPTAB_LIB_WIDEN);
6121 0 : tramp = expand_simple_binop (Pmode, AND, temp, mask,
6122 : temp, 0, OPTAB_LIB_WIDEN);
6123 :
6124 0 : return tramp;
6125 : }
6126 :
6127 : static rtx
6128 295 : expand_builtin_init_trampoline (tree exp, bool onstack)
6129 : {
6130 295 : tree t_tramp, t_func, t_chain;
6131 295 : rtx m_tramp, r_tramp, r_chain, tmp;
6132 :
6133 295 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6134 : POINTER_TYPE, VOID_TYPE))
6135 : return NULL_RTX;
6136 :
6137 295 : t_tramp = CALL_EXPR_ARG (exp, 0);
6138 295 : t_func = CALL_EXPR_ARG (exp, 1);
6139 295 : t_chain = CALL_EXPR_ARG (exp, 2);
6140 :
6141 295 : r_tramp = expand_normal (t_tramp);
6142 295 : m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6143 295 : MEM_NOTRAP_P (m_tramp) = 1;
6144 :
6145 : /* If ONSTACK, the TRAMP argument should be the address of a field
6146 : within the local function's FRAME decl. Either way, let's see if
6147 : we can fill in the MEM_ATTRs for this memory. */
6148 295 : if (TREE_CODE (t_tramp) == ADDR_EXPR)
6149 295 : set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6150 :
6151 : /* Creator of a heap trampoline is responsible for making sure the
6152 : address is aligned to at least STACK_BOUNDARY. Normally malloc
6153 : will ensure this anyhow. */
6154 295 : tmp = round_trampoline_addr (r_tramp);
6155 295 : if (tmp != r_tramp)
6156 : {
6157 0 : m_tramp = change_address (m_tramp, BLKmode, tmp);
6158 0 : set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6159 0 : set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6160 : }
6161 :
6162 : /* The FUNC argument should be the address of the nested function.
6163 : Extract the actual function decl to pass to the hook. */
6164 295 : gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6165 295 : t_func = TREE_OPERAND (t_func, 0);
6166 295 : gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6167 :
6168 295 : r_chain = expand_normal (t_chain);
6169 :
6170 : /* Generate insns to initialize the trampoline. */
6171 295 : targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6172 :
6173 295 : if (onstack)
6174 : {
6175 295 : trampolines_created = 1;
6176 :
6177 295 : if (targetm.calls.custom_function_descriptors != 0)
6178 295 : warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6179 : "trampoline generated for nested function %qD", t_func);
6180 : }
6181 :
6182 295 : return const0_rtx;
6183 : }
6184 :
6185 : static rtx
6186 339 : expand_builtin_adjust_trampoline (tree exp)
6187 : {
6188 339 : rtx tramp;
6189 :
6190 339 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6191 : return NULL_RTX;
6192 :
6193 339 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6194 339 : tramp = round_trampoline_addr (tramp);
6195 339 : if (targetm.calls.trampoline_adjust_address)
6196 0 : tramp = targetm.calls.trampoline_adjust_address (tramp);
6197 :
6198 : return tramp;
6199 : }
6200 :
6201 : /* Expand a call to the builtin descriptor initialization routine.
6202 : A descriptor is made up of a couple of pointers to the static
6203 : chain and the code entry in this order. */
6204 :
6205 : static rtx
6206 0 : expand_builtin_init_descriptor (tree exp)
6207 : {
6208 0 : tree t_descr, t_func, t_chain;
6209 0 : rtx m_descr, r_descr, r_func, r_chain;
6210 :
6211 0 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6212 : VOID_TYPE))
6213 : return NULL_RTX;
6214 :
6215 0 : t_descr = CALL_EXPR_ARG (exp, 0);
6216 0 : t_func = CALL_EXPR_ARG (exp, 1);
6217 0 : t_chain = CALL_EXPR_ARG (exp, 2);
6218 :
6219 0 : r_descr = expand_normal (t_descr);
6220 0 : m_descr = gen_rtx_MEM (BLKmode, r_descr);
6221 0 : MEM_NOTRAP_P (m_descr) = 1;
6222 0 : set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6223 :
6224 0 : r_func = expand_normal (t_func);
6225 0 : r_chain = expand_normal (t_chain);
6226 :
6227 : /* Generate insns to initialize the descriptor. */
6228 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6229 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6230 : POINTER_SIZE / BITS_PER_UNIT), r_func);
6231 :
6232 0 : return const0_rtx;
6233 : }
6234 :
6235 : /* Expand a call to the builtin descriptor adjustment routine. */
6236 :
6237 : static rtx
6238 0 : expand_builtin_adjust_descriptor (tree exp)
6239 : {
6240 0 : rtx tramp;
6241 :
6242 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6243 : return NULL_RTX;
6244 :
6245 0 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6246 :
6247 : /* Unalign the descriptor to allow runtime identification. */
6248 0 : tramp = plus_constant (ptr_mode, tramp,
6249 0 : targetm.calls.custom_function_descriptors);
6250 :
6251 0 : return force_operand (tramp, NULL_RTX);
6252 : }
6253 :
6254 : /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6255 : function. The function first checks whether the back end provides
6256 : an insn to implement signbit for the respective mode. If not, it
6257 : checks whether the floating point format of the value is such that
6258 : the sign bit can be extracted. If that is not the case, error out.
6259 : EXP is the expression that is a call to the builtin function; if
6260 : convenient, the result should be placed in TARGET. */
6261 : static rtx
6262 1071 : expand_builtin_signbit (tree exp, rtx target)
6263 : {
6264 1071 : const struct real_format *fmt;
6265 1071 : scalar_float_mode fmode;
6266 1071 : scalar_int_mode rmode, imode;
6267 1071 : tree arg;
6268 1071 : int word, bitpos;
6269 1071 : enum insn_code icode;
6270 1071 : rtx temp;
6271 1071 : location_t loc = EXPR_LOCATION (exp);
6272 :
6273 1071 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6274 : return NULL_RTX;
6275 :
6276 1071 : arg = CALL_EXPR_ARG (exp, 0);
6277 1071 : fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6278 1071 : rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6279 1071 : fmt = REAL_MODE_FORMAT (fmode);
6280 :
6281 1071 : arg = builtin_save_expr (arg);
6282 :
6283 : /* Expand the argument yielding a RTX expression. */
6284 1071 : temp = expand_normal (arg);
6285 :
6286 : /* Check if the back end provides an insn that handles signbit for the
6287 : argument's mode. */
6288 1071 : icode = optab_handler (signbit_optab, fmode);
6289 1071 : if (icode != CODE_FOR_nothing)
6290 : {
6291 10 : rtx_insn *last = get_last_insn ();
6292 10 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6293 10 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6294 : return this_target;
6295 0 : delete_insns_since (last);
6296 : }
6297 :
6298 : /* For floating point formats without a sign bit, implement signbit
6299 : as "ARG < 0.0". */
6300 1061 : bitpos = fmt->signbit_ro;
6301 1061 : if (bitpos < 0)
6302 : {
6303 : /* But we can't do this if the format supports signed zero. */
6304 0 : gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6305 :
6306 0 : arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6307 0 : build_real (TREE_TYPE (arg), dconst0));
6308 0 : return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6309 : }
6310 :
6311 2122 : if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6312 : {
6313 957 : imode = int_mode_for_mode (fmode).require ();
6314 957 : temp = gen_lowpart (imode, temp);
6315 : }
6316 : else
6317 : {
6318 104 : imode = word_mode;
6319 : /* Handle targets with different FP word orders. */
6320 104 : if (FLOAT_WORDS_BIG_ENDIAN)
6321 : word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6322 : else
6323 104 : word = bitpos / BITS_PER_WORD;
6324 104 : temp = operand_subword_force (temp, word, fmode);
6325 104 : bitpos = bitpos % BITS_PER_WORD;
6326 : }
6327 :
6328 : /* Force the intermediate word_mode (or narrower) result into a
6329 : register. This avoids attempting to create paradoxical SUBREGs
6330 : of floating point modes below. */
6331 1061 : temp = force_reg (imode, temp);
6332 :
6333 : /* If the bitpos is within the "result mode" lowpart, the operation
6334 : can be implement with a single bitwise AND. Otherwise, we need
6335 : a right shift and an AND. */
6336 :
6337 2122 : if (bitpos < GET_MODE_BITSIZE (rmode))
6338 : {
6339 853 : wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6340 :
6341 2559 : if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6342 0 : temp = gen_lowpart (rmode, temp);
6343 1706 : temp = expand_binop (rmode, and_optab, temp,
6344 1706 : immed_wide_int_const (mask, rmode),
6345 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6346 853 : }
6347 : else
6348 : {
6349 : /* Perform a logical right shift to place the signbit in the least
6350 : significant bit, then truncate the result to the desired mode
6351 : and mask just this bit. */
6352 208 : temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6353 208 : temp = gen_lowpart (rmode, temp);
6354 208 : temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6355 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6356 : }
6357 :
6358 : return temp;
6359 : }
6360 :
6361 : /* Expand fork or exec calls. TARGET is the desired target of the
6362 : call. EXP is the call. FN is the
6363 : identificator of the actual function. IGNORE is nonzero if the
6364 : value is to be ignored. */
6365 :
6366 : static rtx
6367 99 : expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6368 : {
6369 99 : tree id, decl;
6370 99 : tree call;
6371 :
6372 : /* If we are not profiling, just call the function. */
6373 99 : if (!coverage_instrumentation_p ())
6374 : return NULL_RTX;
6375 :
6376 : /* Otherwise call the wrapper. This should be equivalent for the rest of
6377 : compiler, so the code does not diverge, and the wrapper may run the
6378 : code necessary for keeping the profiling sane. */
6379 :
6380 4 : switch (DECL_FUNCTION_CODE (fn))
6381 : {
6382 4 : case BUILT_IN_FORK:
6383 4 : id = get_identifier ("__gcov_fork");
6384 4 : break;
6385 :
6386 0 : case BUILT_IN_EXECL:
6387 0 : id = get_identifier ("__gcov_execl");
6388 0 : break;
6389 :
6390 0 : case BUILT_IN_EXECV:
6391 0 : id = get_identifier ("__gcov_execv");
6392 0 : break;
6393 :
6394 0 : case BUILT_IN_EXECLP:
6395 0 : id = get_identifier ("__gcov_execlp");
6396 0 : break;
6397 :
6398 0 : case BUILT_IN_EXECLE:
6399 0 : id = get_identifier ("__gcov_execle");
6400 0 : break;
6401 :
6402 0 : case BUILT_IN_EXECVP:
6403 0 : id = get_identifier ("__gcov_execvp");
6404 0 : break;
6405 :
6406 0 : case BUILT_IN_EXECVE:
6407 0 : id = get_identifier ("__gcov_execve");
6408 0 : break;
6409 :
6410 0 : default:
6411 0 : gcc_unreachable ();
6412 : }
6413 :
6414 4 : decl = build_decl (DECL_SOURCE_LOCATION (fn),
6415 4 : FUNCTION_DECL, id, TREE_TYPE (fn));
6416 4 : DECL_EXTERNAL (decl) = 1;
6417 4 : TREE_PUBLIC (decl) = 1;
6418 4 : DECL_ARTIFICIAL (decl) = 1;
6419 4 : TREE_NOTHROW (decl) = 1;
6420 4 : DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6421 4 : DECL_VISIBILITY_SPECIFIED (decl) = 1;
6422 4 : call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6423 4 : return expand_call (call, target, ignore);
6424 : }
6425 :
6426 :
6427 :
6428 : /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6429 : the pointer in these functions is void*, the tree optimizers may remove
6430 : casts. The mode computed in expand_builtin isn't reliable either, due
6431 : to __sync_bool_compare_and_swap.
6432 :
6433 : FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6434 : group of builtins. This gives us log2 of the mode size. */
6435 :
6436 : static inline machine_mode
6437 133612 : get_builtin_sync_mode (int fcode_diff)
6438 : {
6439 : /* The size is not negotiable, so ask not to get BLKmode in return
6440 : if the target indicates that a smaller size would be better. */
6441 133612 : return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6442 : }
6443 :
6444 : /* Expand the memory expression LOC and return the appropriate memory operand
6445 : for the builtin_sync operations. */
6446 :
6447 : static rtx
6448 150111 : get_builtin_sync_mem (tree loc, machine_mode mode)
6449 : {
6450 150111 : rtx addr, mem;
6451 150111 : int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6452 : ? TREE_TYPE (TREE_TYPE (loc))
6453 : : TREE_TYPE (loc));
6454 150111 : scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6455 :
6456 150111 : addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6457 150111 : addr = convert_memory_address (addr_mode, addr);
6458 :
6459 : /* Note that we explicitly do not want any alias information for this
6460 : memory, so that we kill all other live memories. Otherwise we don't
6461 : satisfy the full barrier semantics of the intrinsic. */
6462 150111 : mem = gen_rtx_MEM (mode, addr);
6463 :
6464 150111 : set_mem_addr_space (mem, addr_space);
6465 :
6466 150111 : mem = validize_mem (mem);
6467 :
6468 : /* The alignment needs to be at least according to that of the mode. */
6469 150111 : set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6470 : get_pointer_alignment (loc)));
6471 150111 : set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6472 150111 : MEM_VOLATILE_P (mem) = 1;
6473 :
6474 150111 : return mem;
6475 : }
6476 :
6477 : /* Make sure an argument is in the right mode.
6478 : EXP is the tree argument.
6479 : MODE is the mode it should be in. */
6480 :
6481 : static rtx
6482 93823 : expand_expr_force_mode (tree exp, machine_mode mode)
6483 : {
6484 93823 : rtx val;
6485 93823 : machine_mode old_mode;
6486 :
6487 93823 : if (TREE_CODE (exp) == SSA_NAME
6488 93823 : && TYPE_MODE (TREE_TYPE (exp)) != mode)
6489 : {
6490 : /* Undo argument promotion if possible, as combine might not
6491 : be able to do it later due to MEM_VOLATILE_P uses in the
6492 : patterns. */
6493 25 : gimple *g = get_gimple_for_ssa_name (exp);
6494 25 : if (g && gimple_assign_cast_p (g))
6495 : {
6496 0 : tree rhs = gimple_assign_rhs1 (g);
6497 0 : tree_code code = gimple_assign_rhs_code (g);
6498 0 : if (CONVERT_EXPR_CODE_P (code)
6499 0 : && TYPE_MODE (TREE_TYPE (rhs)) == mode
6500 0 : && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6501 0 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6502 0 : && (TYPE_PRECISION (TREE_TYPE (exp))
6503 0 : > TYPE_PRECISION (TREE_TYPE (rhs))))
6504 : exp = rhs;
6505 : }
6506 : }
6507 :
6508 93823 : val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6509 : /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6510 : of CONST_INTs, where we know the old_mode only from the call argument. */
6511 :
6512 93823 : old_mode = GET_MODE (val);
6513 93823 : if (old_mode == VOIDmode)
6514 44123 : old_mode = TYPE_MODE (TREE_TYPE (exp));
6515 93823 : val = convert_modes (mode, old_mode, val, 1);
6516 93823 : return val;
6517 : }
6518 :
6519 :
6520 : /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6521 : EXP is the CALL_EXPR. CODE is the rtx code
6522 : that corresponds to the arithmetic or logical operation from the name;
6523 : an exception here is that NOT actually means NAND. TARGET is an optional
6524 : place for us to store the results; AFTER is true if this is the
6525 : fetch_and_xxx form. */
6526 :
6527 : static rtx
6528 4003 : expand_builtin_sync_operation (machine_mode mode, tree exp,
6529 : enum rtx_code code, bool after,
6530 : rtx target)
6531 : {
6532 4003 : rtx val, mem;
6533 4003 : location_t loc = EXPR_LOCATION (exp);
6534 :
6535 4003 : if (code == NOT && warn_sync_nand)
6536 : {
6537 523 : tree fndecl = get_callee_fndecl (exp);
6538 523 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6539 :
6540 523 : static bool warned_f_a_n, warned_n_a_f;
6541 :
6542 523 : switch (fcode)
6543 : {
6544 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6545 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6546 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6547 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6548 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6549 434 : if (warned_f_a_n)
6550 : break;
6551 :
6552 30 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6553 30 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6554 30 : warned_f_a_n = true;
6555 30 : break;
6556 :
6557 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6558 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6559 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6560 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6561 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6562 89 : if (warned_n_a_f)
6563 : break;
6564 :
6565 22 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6566 22 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6567 22 : warned_n_a_f = true;
6568 22 : break;
6569 :
6570 0 : default:
6571 0 : gcc_unreachable ();
6572 : }
6573 : }
6574 :
6575 : /* Expand the operands. */
6576 4003 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6577 4003 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6578 :
6579 4003 : return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6580 4003 : after);
6581 : }
6582 :
6583 : /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6584 : intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6585 : true if this is the boolean form. TARGET is a place for us to store the
6586 : results; this is NOT optional if IS_BOOL is true. */
6587 :
6588 : static rtx
6589 456 : expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6590 : bool is_bool, rtx target)
6591 : {
6592 456 : rtx old_val, new_val, mem;
6593 456 : rtx *pbool, *poval;
6594 :
6595 : /* Expand the operands. */
6596 456 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6597 456 : old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6598 456 : new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6599 :
6600 456 : pbool = poval = NULL;
6601 456 : if (target != const0_rtx)
6602 : {
6603 430 : if (is_bool)
6604 : pbool = ⌖
6605 : else
6606 230 : poval = ⌖
6607 : }
6608 456 : if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6609 : false, MEMMODEL_SYNC_SEQ_CST,
6610 : MEMMODEL_SYNC_SEQ_CST))
6611 : return NULL_RTX;
6612 :
6613 454 : return target;
6614 : }
6615 :
6616 : /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6617 : general form is actually an atomic exchange, and some targets only
6618 : support a reduced form with the second argument being a constant 1.
6619 : EXP is the CALL_EXPR; TARGET is an optional place for us to store
6620 : the results. */
6621 :
6622 : static rtx
6623 326 : expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6624 : rtx target)
6625 : {
6626 326 : rtx val, mem;
6627 :
6628 : /* Expand the operands. */
6629 326 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6630 326 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6631 :
6632 326 : return expand_sync_lock_test_and_set (target, mem, val);
6633 : }
6634 :
6635 : /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6636 :
6637 : static rtx
6638 146 : expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6639 : {
6640 146 : rtx mem;
6641 :
6642 : /* Expand the operands. */
6643 146 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6644 :
6645 146 : return expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6646 : }
6647 :
6648 : /* Given an integer representing an ``enum memmodel'', verify its
6649 : correctness and return the memory model enum. */
6650 :
6651 : static enum memmodel
6652 168569 : get_memmodel (tree exp)
6653 : {
6654 : /* If the parameter is not a constant, it's a run time value so we'll just
6655 : convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6656 168569 : if (TREE_CODE (exp) != INTEGER_CST)
6657 : return MEMMODEL_SEQ_CST;
6658 :
6659 167743 : rtx op = expand_normal (exp);
6660 :
6661 167743 : unsigned HOST_WIDE_INT val = INTVAL (op);
6662 167743 : if (targetm.memmodel_check)
6663 167743 : val = targetm.memmodel_check (val);
6664 0 : else if (val & ~MEMMODEL_MASK)
6665 : return MEMMODEL_SEQ_CST;
6666 :
6667 : /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6668 167743 : if (memmodel_base (val) >= MEMMODEL_LAST)
6669 : return MEMMODEL_SEQ_CST;
6670 :
6671 : /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6672 : be conservative and promote consume to acquire. */
6673 167742 : if (val == MEMMODEL_CONSUME)
6674 754 : val = MEMMODEL_ACQUIRE;
6675 :
6676 167742 : return (enum memmodel) val;
6677 : }
6678 :
6679 : /* Expand the __atomic_exchange intrinsic:
6680 : TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6681 : EXP is the CALL_EXPR.
6682 : TARGET is an optional place for us to store the results. */
6683 :
6684 : static rtx
6685 2889 : expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6686 : {
6687 2889 : rtx val, mem;
6688 2889 : enum memmodel model;
6689 :
6690 2889 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6691 :
6692 2889 : if (!flag_inline_atomics)
6693 : return NULL_RTX;
6694 :
6695 : /* Expand the operands. */
6696 2838 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6697 2838 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6698 :
6699 2838 : return expand_atomic_exchange (target, mem, val, model);
6700 : }
6701 :
6702 : /* Expand the __atomic_compare_exchange intrinsic:
6703 : bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6704 : TYPE desired, BOOL weak,
6705 : enum memmodel success,
6706 : enum memmodel failure)
6707 : EXP is the CALL_EXPR.
6708 : TARGET is an optional place for us to store the results. */
6709 :
6710 : static rtx
6711 9131 : expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6712 : rtx target)
6713 : {
6714 9131 : rtx expect, desired, mem, oldval;
6715 9131 : rtx_code_label *label;
6716 9131 : tree weak;
6717 9131 : bool is_weak;
6718 :
6719 9131 : memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6720 9131 : memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6721 :
6722 9131 : if (failure > success)
6723 22 : success = MEMMODEL_SEQ_CST;
6724 :
6725 9131 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6726 : {
6727 : failure = MEMMODEL_SEQ_CST;
6728 : success = MEMMODEL_SEQ_CST;
6729 : }
6730 :
6731 :
6732 9131 : if (!flag_inline_atomics)
6733 : return NULL_RTX;
6734 :
6735 : /* Expand the operands. */
6736 9080 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6737 :
6738 9080 : expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6739 9080 : expect = convert_memory_address (Pmode, expect);
6740 9080 : expect = gen_rtx_MEM (mode, expect);
6741 9080 : desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6742 :
6743 9080 : weak = CALL_EXPR_ARG (exp, 3);
6744 9080 : is_weak = false;
6745 9080 : if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6746 9080 : is_weak = true;
6747 :
6748 9080 : if (target == const0_rtx)
6749 286 : target = NULL;
6750 :
6751 : /* Lest the rtl backend create a race condition with an imporoper store
6752 : to memory, always create a new pseudo for OLDVAL. */
6753 9080 : oldval = NULL;
6754 :
6755 9080 : if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6756 : is_weak, success, failure))
6757 : return NULL_RTX;
6758 :
6759 : /* Conditionally store back to EXPECT, lest we create a race condition
6760 : with an improper store to memory. */
6761 : /* ??? With a rearrangement of atomics at the gimple level, we can handle
6762 : the normal case where EXPECT is totally private, i.e. a register. At
6763 : which point the store can be unconditional. */
6764 7329 : label = gen_label_rtx ();
6765 7329 : emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6766 7329 : GET_MODE (target), 1, label);
6767 7329 : emit_move_insn (expect, oldval);
6768 7329 : emit_label (label);
6769 :
6770 7329 : return target;
6771 : }
6772 :
6773 : /* Helper function for expand_ifn_atomic_compare_exchange - expand
6774 : internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6775 : call. The weak parameter must be dropped to match the expected parameter
6776 : list and the expected argument changed from value to pointer to memory
6777 : slot. */
6778 :
6779 : static void
6780 0 : expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6781 : {
6782 0 : unsigned int z;
6783 0 : vec<tree, va_gc> *vec;
6784 :
6785 0 : vec_alloc (vec, 5);
6786 0 : vec->quick_push (gimple_call_arg (call, 0));
6787 0 : tree expected = gimple_call_arg (call, 1);
6788 0 : rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6789 0 : TREE_TYPE (expected));
6790 0 : rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6791 0 : if (expd != x)
6792 0 : emit_move_insn (x, expd);
6793 0 : tree v = make_tree (TREE_TYPE (expected), x);
6794 0 : vec->quick_push (build1 (ADDR_EXPR,
6795 0 : build_pointer_type (TREE_TYPE (expected)), v));
6796 0 : vec->quick_push (gimple_call_arg (call, 2));
6797 : /* Skip the boolean weak parameter. */
6798 0 : for (z = 4; z < 6; z++)
6799 0 : vec->quick_push (gimple_call_arg (call, z));
6800 : /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6801 0 : unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6802 0 : gcc_assert (bytes_log2 < 5);
6803 0 : built_in_function fncode
6804 : = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6805 : + bytes_log2);
6806 0 : tree fndecl = builtin_decl_explicit (fncode);
6807 0 : tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6808 : fndecl);
6809 0 : tree exp = build_call_vec (boolean_type_node, fn, vec);
6810 0 : tree lhs = gimple_call_lhs (call);
6811 0 : rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6812 0 : if (lhs)
6813 : {
6814 0 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6815 0 : if (GET_MODE (boolret) != mode)
6816 0 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6817 0 : x = force_reg (mode, x);
6818 0 : write_complex_part (target, boolret, true, true);
6819 0 : write_complex_part (target, x, false, false);
6820 : }
6821 0 : }
6822 :
6823 : /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6824 :
6825 : void
6826 13870 : expand_ifn_atomic_compare_exchange (gcall *call)
6827 : {
6828 13870 : int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6829 13870 : gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6830 13870 : machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6831 :
6832 13870 : memmodel success = get_memmodel (gimple_call_arg (call, 4));
6833 13870 : memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6834 :
6835 13870 : if (failure > success)
6836 0 : success = MEMMODEL_SEQ_CST;
6837 :
6838 13870 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6839 : {
6840 : failure = MEMMODEL_SEQ_CST;
6841 : success = MEMMODEL_SEQ_CST;
6842 : }
6843 :
6844 13870 : if (!flag_inline_atomics)
6845 : {
6846 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6847 0 : return;
6848 : }
6849 :
6850 : /* Expand the operands. */
6851 13870 : rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6852 :
6853 13870 : rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6854 13870 : rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6855 :
6856 13870 : bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6857 :
6858 13870 : rtx boolret = NULL;
6859 13870 : rtx oldval = NULL;
6860 :
6861 13870 : if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6862 : is_weak, success, failure))
6863 : {
6864 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6865 0 : return;
6866 : }
6867 :
6868 13870 : tree lhs = gimple_call_lhs (call);
6869 13870 : if (lhs)
6870 : {
6871 13664 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6872 13664 : if (GET_MODE (boolret) != mode)
6873 12195 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6874 13664 : write_complex_part (target, boolret, true, true);
6875 13664 : write_complex_part (target, oldval, false, false);
6876 : }
6877 : }
6878 :
6879 : /* Expand the __atomic_load intrinsic:
6880 : TYPE __atomic_load (TYPE *object, enum memmodel)
6881 : EXP is the CALL_EXPR.
6882 : TARGET is an optional place for us to store the results. */
6883 :
6884 : static rtx
6885 70190 : expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6886 : {
6887 70190 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6888 70190 : if (is_mm_release (model) || is_mm_acq_rel (model))
6889 : model = MEMMODEL_SEQ_CST;
6890 :
6891 70190 : if (!flag_inline_atomics)
6892 : return NULL_RTX;
6893 :
6894 : /* Expand the operand. */
6895 70149 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6896 :
6897 70149 : return expand_atomic_load (target, mem, model);
6898 : }
6899 :
6900 :
6901 : /* Expand the __atomic_store intrinsic:
6902 : void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6903 : EXP is the CALL_EXPR.
6904 : TARGET is an optional place for us to store the results. */
6905 :
6906 : static rtx
6907 17419 : expand_builtin_atomic_store (machine_mode mode, tree exp)
6908 : {
6909 17419 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6910 17419 : if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6911 1590 : || is_mm_release (model)))
6912 : model = MEMMODEL_SEQ_CST;
6913 :
6914 17419 : if (!flag_inline_atomics)
6915 : return NULL_RTX;
6916 :
6917 : /* Expand the operands. */
6918 17388 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6919 17388 : rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6920 :
6921 17388 : return expand_atomic_store (mem, val, model, false);
6922 : }
6923 :
6924 : /* Expand the __atomic_fetch_XXX intrinsic:
6925 : TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6926 : EXP is the CALL_EXPR.
6927 : TARGET is an optional place for us to store the results.
6928 : CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6929 : FETCH_AFTER is true if returning the result of the operation.
6930 : FETCH_AFTER is false if returning the value before the operation.
6931 : IGNORE is true if the result is not used.
6932 : EXT_CALL is the correct builtin for an external call if this cannot be
6933 : resolved to an instruction sequence. */
6934 :
6935 : static rtx
6936 29022 : expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6937 : enum rtx_code code, bool fetch_after,
6938 : bool ignore, enum built_in_function ext_call)
6939 : {
6940 29022 : rtx val, mem, ret;
6941 29022 : enum memmodel model;
6942 29022 : tree fndecl;
6943 29022 : tree addr;
6944 :
6945 29022 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6946 :
6947 : /* Expand the operands. */
6948 29022 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6949 29022 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6950 :
6951 : /* Only try generating instructions if inlining is turned on. */
6952 29022 : if (flag_inline_atomics)
6953 : {
6954 28479 : ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6955 28479 : if (ret)
6956 : return ret;
6957 : }
6958 :
6959 : /* Return if a different routine isn't needed for the library call. */
6960 1142 : if (ext_call == BUILT_IN_NONE)
6961 : return NULL_RTX;
6962 :
6963 : /* Change the call to the specified function. */
6964 279 : fndecl = get_callee_fndecl (exp);
6965 279 : addr = CALL_EXPR_FN (exp);
6966 279 : STRIP_NOPS (addr);
6967 :
6968 279 : gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6969 279 : TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6970 :
6971 : /* If we will emit code after the call, the call cannot be a tail call.
6972 : If it is emitted as a tail call, a barrier is emitted after it, and
6973 : then all trailing code is removed. */
6974 279 : if (!ignore)
6975 182 : CALL_EXPR_TAILCALL (exp) = 0;
6976 :
6977 : /* Expand the call here so we can emit trailing code. */
6978 279 : ret = expand_call (exp, target, ignore);
6979 :
6980 : /* Replace the original function just in case it matters. */
6981 279 : TREE_OPERAND (addr, 0) = fndecl;
6982 :
6983 : /* Then issue the arithmetic correction to return the right result. */
6984 279 : if (!ignore)
6985 : {
6986 182 : if (code == NOT)
6987 : {
6988 31 : ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6989 : OPTAB_LIB_WIDEN);
6990 31 : ret = expand_simple_unop (mode, NOT, ret, target, true);
6991 : }
6992 : else
6993 151 : ret = expand_simple_binop (mode, code, ret, val, target, true,
6994 : OPTAB_LIB_WIDEN);
6995 : }
6996 : return ret;
6997 : }
6998 :
6999 : /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7000 :
7001 : void
7002 427 : expand_ifn_atomic_bit_test_and (gcall *call)
7003 : {
7004 427 : tree ptr = gimple_call_arg (call, 0);
7005 427 : tree bit = gimple_call_arg (call, 1);
7006 427 : tree flag = gimple_call_arg (call, 2);
7007 427 : tree lhs = gimple_call_lhs (call);
7008 427 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7009 427 : machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7010 427 : enum rtx_code code;
7011 427 : optab optab;
7012 427 : class expand_operand ops[5];
7013 :
7014 427 : gcc_assert (flag_inline_atomics);
7015 :
7016 427 : if (gimple_call_num_args (call) == 5)
7017 296 : model = get_memmodel (gimple_call_arg (call, 3));
7018 :
7019 427 : rtx mem = get_builtin_sync_mem (ptr, mode);
7020 427 : rtx val = expand_expr_force_mode (bit, mode);
7021 :
7022 427 : switch (gimple_call_internal_fn (call))
7023 : {
7024 : case IFN_ATOMIC_BIT_TEST_AND_SET:
7025 : code = IOR;
7026 : optab = atomic_bit_test_and_set_optab;
7027 : break;
7028 : case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7029 : code = XOR;
7030 : optab = atomic_bit_test_and_complement_optab;
7031 : break;
7032 : case IFN_ATOMIC_BIT_TEST_AND_RESET:
7033 : code = AND;
7034 : optab = atomic_bit_test_and_reset_optab;
7035 : break;
7036 0 : default:
7037 0 : gcc_unreachable ();
7038 : }
7039 :
7040 427 : if (lhs == NULL_TREE)
7041 : {
7042 0 : rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
7043 : val, NULL_RTX, true, OPTAB_DIRECT);
7044 0 : if (code == AND)
7045 0 : val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
7046 0 : if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7047 427 : return;
7048 : }
7049 :
7050 427 : rtx target;
7051 427 : if (lhs)
7052 427 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7053 : else
7054 0 : target = gen_reg_rtx (mode);
7055 427 : enum insn_code icode = direct_optab_handler (optab, mode);
7056 427 : gcc_assert (icode != CODE_FOR_nothing);
7057 427 : create_output_operand (&ops[0], target, mode);
7058 427 : create_fixed_operand (&ops[1], mem);
7059 427 : create_convert_operand_to (&ops[2], val, mode, true);
7060 427 : create_integer_operand (&ops[3], model);
7061 427 : create_integer_operand (&ops[4], integer_onep (flag));
7062 427 : if (maybe_expand_insn (icode, 5, ops))
7063 : return;
7064 :
7065 0 : rtx bitval = val;
7066 0 : val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7067 : val, NULL_RTX, true, OPTAB_DIRECT);
7068 0 : rtx maskval = val;
7069 0 : if (code == AND)
7070 0 : val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7071 0 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7072 : code, model, false);
7073 0 : if (!result)
7074 : {
7075 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7076 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7077 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7078 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7079 0 : tree exp;
7080 0 : if (is_atomic)
7081 0 : exp = build_call_nary (type, tcall, 3,
7082 : ptr, make_tree (type, val),
7083 : gimple_call_arg (call, 3));
7084 : else
7085 0 : exp = build_call_nary (type, tcall, 2, ptr, make_tree (type, val));
7086 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7087 : mode, !lhs);
7088 : }
7089 0 : if (!lhs)
7090 : return;
7091 0 : if (integer_onep (flag))
7092 : {
7093 0 : result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7094 : NULL_RTX, true, OPTAB_DIRECT);
7095 0 : result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7096 : true, OPTAB_DIRECT);
7097 : }
7098 : else
7099 0 : result = expand_simple_binop (mode, AND, result, maskval, target, true,
7100 : OPTAB_DIRECT);
7101 0 : if (result != target)
7102 0 : emit_move_insn (target, result);
7103 : }
7104 :
7105 : /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7106 :
7107 : void
7108 2087 : expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7109 : {
7110 2087 : tree cmp = gimple_call_arg (call, 0);
7111 2087 : tree ptr = gimple_call_arg (call, 1);
7112 2087 : tree arg = gimple_call_arg (call, 2);
7113 2087 : tree lhs = gimple_call_lhs (call);
7114 2087 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7115 2087 : machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7116 2087 : optab optab;
7117 2087 : rtx_code code;
7118 2087 : class expand_operand ops[5];
7119 :
7120 2087 : gcc_assert (flag_inline_atomics);
7121 :
7122 2087 : if (gimple_call_num_args (call) == 5)
7123 1679 : model = get_memmodel (gimple_call_arg (call, 3));
7124 :
7125 2087 : rtx mem = get_builtin_sync_mem (ptr, mode);
7126 2087 : rtx op = expand_expr_force_mode (arg, mode);
7127 :
7128 2087 : switch (gimple_call_internal_fn (call))
7129 : {
7130 : case IFN_ATOMIC_ADD_FETCH_CMP_0:
7131 : code = PLUS;
7132 : optab = atomic_add_fetch_cmp_0_optab;
7133 : break;
7134 : case IFN_ATOMIC_SUB_FETCH_CMP_0:
7135 : code = MINUS;
7136 : optab = atomic_sub_fetch_cmp_0_optab;
7137 : break;
7138 : case IFN_ATOMIC_AND_FETCH_CMP_0:
7139 : code = AND;
7140 : optab = atomic_and_fetch_cmp_0_optab;
7141 : break;
7142 : case IFN_ATOMIC_OR_FETCH_CMP_0:
7143 : code = IOR;
7144 : optab = atomic_or_fetch_cmp_0_optab;
7145 : break;
7146 : case IFN_ATOMIC_XOR_FETCH_CMP_0:
7147 : code = XOR;
7148 : optab = atomic_xor_fetch_cmp_0_optab;
7149 : break;
7150 0 : default:
7151 0 : gcc_unreachable ();
7152 : }
7153 :
7154 2087 : enum rtx_code comp = UNKNOWN;
7155 2087 : switch (tree_to_uhwi (cmp))
7156 : {
7157 : case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7158 : case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7159 : case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7160 : case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7161 : case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7162 : case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7163 0 : default: gcc_unreachable ();
7164 : }
7165 :
7166 2087 : rtx target;
7167 2087 : if (lhs == NULL_TREE)
7168 0 : target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7169 : else
7170 2087 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7171 2087 : enum insn_code icode = direct_optab_handler (optab, mode);
7172 2087 : gcc_assert (icode != CODE_FOR_nothing);
7173 2087 : create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
7174 2087 : create_fixed_operand (&ops[1], mem);
7175 2087 : create_convert_operand_to (&ops[2], op, mode, true);
7176 2087 : create_integer_operand (&ops[3], model);
7177 2087 : create_integer_operand (&ops[4], comp);
7178 2087 : if (maybe_expand_insn (icode, 5, ops))
7179 2055 : return;
7180 :
7181 32 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7182 : code, model, true);
7183 32 : if (!result)
7184 : {
7185 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7186 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7187 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7188 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7189 0 : tree exp;
7190 0 : if (is_atomic)
7191 0 : exp = build_call_nary (type, tcall, 3,
7192 : ptr, arg,
7193 : gimple_call_arg (call, 3));
7194 : else
7195 0 : exp = build_call_nary (type, tcall, 2, ptr, arg);
7196 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7197 : mode, !lhs);
7198 : }
7199 :
7200 32 : if (lhs)
7201 : {
7202 32 : result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7203 : 0, 1);
7204 32 : if (result != target)
7205 32 : emit_move_insn (target, result);
7206 : }
7207 : }
7208 :
7209 : /* Expand an atomic clear operation.
7210 : void _atomic_clear (BOOL *obj, enum memmodel)
7211 : EXP is the call expression. */
7212 :
7213 : static rtx
7214 53 : expand_builtin_atomic_clear (tree exp)
7215 : {
7216 53 : machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7217 53 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7218 53 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7219 :
7220 53 : if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7221 : model = MEMMODEL_SEQ_CST;
7222 :
7223 : /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7224 : Failing that, a store is issued by __atomic_store. The only way this can
7225 : fail is if the bool type is larger than a word size. Unlikely, but
7226 : handle it anyway for completeness. Assume a single threaded model since
7227 : there is no atomic support in this case, and no barriers are required. */
7228 53 : rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7229 53 : if (!ret)
7230 0 : emit_move_insn (mem, const0_rtx);
7231 53 : return const0_rtx;
7232 : }
7233 :
7234 : /* Expand an atomic test_and_set operation.
7235 : bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7236 : EXP is the call expression. */
7237 :
7238 : static rtx
7239 266 : expand_builtin_atomic_test_and_set (tree exp, rtx target)
7240 : {
7241 266 : rtx mem;
7242 266 : enum memmodel model;
7243 266 : machine_mode mode;
7244 :
7245 266 : mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7246 266 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7247 266 : model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7248 :
7249 266 : return expand_atomic_test_and_set (target, mem, model);
7250 : }
7251 :
7252 :
7253 : /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7254 : this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7255 :
7256 : static tree
7257 137553 : fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7258 : {
7259 137553 : int size;
7260 137553 : machine_mode mode;
7261 137553 : unsigned int mode_align, type_align;
7262 :
7263 137553 : if (TREE_CODE (arg0) != INTEGER_CST)
7264 : return NULL_TREE;
7265 :
7266 : /* We need a corresponding integer mode for the access to be lock-free. */
7267 137459 : size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7268 137459 : if (!int_mode_for_size (size, 0).exists (&mode))
7269 9 : return boolean_false_node;
7270 :
7271 137450 : mode_align = GET_MODE_ALIGNMENT (mode);
7272 :
7273 137450 : if (TREE_CODE (arg1) == INTEGER_CST)
7274 : {
7275 118703 : unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7276 :
7277 : /* Either this argument is null, or it's a fake pointer encoding
7278 : the alignment of the object. */
7279 118703 : val = least_bit_hwi (val);
7280 118703 : val *= BITS_PER_UNIT;
7281 :
7282 118703 : if (val == 0 || mode_align < val)
7283 : type_align = mode_align;
7284 : else
7285 18148 : type_align = val;
7286 : }
7287 : else
7288 : {
7289 18747 : tree ttype = TREE_TYPE (arg1);
7290 :
7291 : /* This function is usually invoked and folded immediately by the front
7292 : end before anything else has a chance to look at it. The pointer
7293 : parameter at this point is usually cast to a void *, so check for that
7294 : and look past the cast. */
7295 63 : if (CONVERT_EXPR_P (arg1)
7296 18715 : && POINTER_TYPE_P (ttype)
7297 18715 : && VOID_TYPE_P (TREE_TYPE (ttype))
7298 37462 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7299 18684 : arg1 = TREE_OPERAND (arg1, 0);
7300 :
7301 18747 : ttype = TREE_TYPE (arg1);
7302 18747 : gcc_assert (POINTER_TYPE_P (ttype));
7303 :
7304 : /* Get the underlying type of the object. */
7305 18747 : ttype = TREE_TYPE (ttype);
7306 18747 : type_align = TYPE_ALIGN (ttype);
7307 : }
7308 :
7309 : /* If the object has smaller alignment, the lock free routines cannot
7310 : be used. */
7311 137450 : if (type_align < mode_align)
7312 86 : return boolean_false_node;
7313 :
7314 : /* Check if a compare_and_swap pattern exists for the mode which represents
7315 : the required size. The pattern is not allowed to fail, so the existence
7316 : of the pattern indicates support is present. Also require that an
7317 : atomic load exists for the required size. */
7318 137364 : if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7319 137323 : return boolean_true_node;
7320 : else
7321 41 : return boolean_false_node;
7322 : }
7323 :
7324 : /* Return true if the parameters to call EXP represent an object which will
7325 : always generate lock free instructions. The first argument represents the
7326 : size of the object, and the second parameter is a pointer to the object
7327 : itself. If NULL is passed for the object, then the result is based on
7328 : typical alignment for an object of the specified size. Otherwise return
7329 : false. */
7330 :
7331 : static rtx
7332 1 : expand_builtin_atomic_always_lock_free (tree exp)
7333 : {
7334 1 : tree size;
7335 1 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7336 1 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7337 :
7338 1 : if (TREE_CODE (arg0) != INTEGER_CST)
7339 : {
7340 1 : error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7341 1 : return const0_rtx;
7342 : }
7343 :
7344 0 : size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7345 0 : if (size == boolean_true_node)
7346 0 : return const1_rtx;
7347 0 : return const0_rtx;
7348 : }
7349 :
7350 : /* Return a one or zero if it can be determined that object ARG1 of size ARG
7351 : is lock free on this architecture. */
7352 :
7353 : static tree
7354 36938 : fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7355 : {
7356 36938 : if (!flag_inline_atomics)
7357 : return NULL_TREE;
7358 :
7359 : /* If it isn't always lock free, don't generate a result. */
7360 36929 : if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7361 : return boolean_true_node;
7362 :
7363 : return NULL_TREE;
7364 : }
7365 :
7366 : /* Return true if the parameters to call EXP represent an object which will
7367 : always generate lock free instructions. The first argument represents the
7368 : size of the object, and the second parameter is a pointer to the object
7369 : itself. If NULL is passed for the object, then the result is based on
7370 : typical alignment for an object of the specified size. Otherwise return
7371 : NULL*/
7372 :
7373 : static rtx
7374 3 : expand_builtin_atomic_is_lock_free (tree exp)
7375 : {
7376 3 : tree size;
7377 3 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7378 3 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7379 :
7380 3 : if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7381 : {
7382 0 : error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7383 0 : return NULL_RTX;
7384 : }
7385 :
7386 3 : if (!flag_inline_atomics)
7387 : return NULL_RTX;
7388 :
7389 : /* If the value is known at compile time, return the RTX for it. */
7390 2 : size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7391 2 : if (size == boolean_true_node)
7392 0 : return const1_rtx;
7393 :
7394 : return NULL_RTX;
7395 : }
7396 :
7397 : /* Expand the __atomic_thread_fence intrinsic:
7398 : void __atomic_thread_fence (enum memmodel)
7399 : EXP is the CALL_EXPR. */
7400 :
7401 : static void
7402 693 : expand_builtin_atomic_thread_fence (tree exp)
7403 : {
7404 693 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7405 693 : expand_mem_thread_fence (model);
7406 693 : }
7407 :
7408 : /* Expand the __atomic_signal_fence intrinsic:
7409 : void __atomic_signal_fence (enum memmodel)
7410 : EXP is the CALL_EXPR. */
7411 :
7412 : static void
7413 60 : expand_builtin_atomic_signal_fence (tree exp)
7414 : {
7415 60 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7416 60 : expand_mem_signal_fence (model);
7417 60 : }
7418 :
7419 : /* Expand the __sync_synchronize intrinsic. */
7420 :
7421 : static void
7422 273 : expand_builtin_sync_synchronize (void)
7423 : {
7424 0 : expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7425 0 : }
7426 :
7427 : static rtx
7428 3 : expand_builtin_thread_pointer (tree exp, rtx target)
7429 : {
7430 3 : enum insn_code icode;
7431 3 : if (!validate_arglist (exp, VOID_TYPE))
7432 0 : return const0_rtx;
7433 3 : icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7434 3 : if (icode != CODE_FOR_nothing)
7435 : {
7436 3 : class expand_operand op;
7437 : /* If the target is not sutitable then create a new target. */
7438 3 : if (target == NULL_RTX
7439 3 : || !REG_P (target)
7440 6 : || GET_MODE (target) != Pmode)
7441 0 : target = gen_reg_rtx (Pmode);
7442 3 : create_output_operand (&op, target, Pmode);
7443 3 : expand_insn (icode, 1, &op);
7444 3 : return target;
7445 : }
7446 0 : error ("%<__builtin_thread_pointer%> is not supported on this target");
7447 0 : return const0_rtx;
7448 : }
7449 :
7450 : static void
7451 0 : expand_builtin_set_thread_pointer (tree exp)
7452 : {
7453 0 : enum insn_code icode;
7454 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7455 : return;
7456 0 : icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7457 0 : if (icode != CODE_FOR_nothing)
7458 : {
7459 0 : class expand_operand op;
7460 0 : rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7461 0 : Pmode, EXPAND_NORMAL);
7462 0 : create_input_operand (&op, val, Pmode);
7463 0 : expand_insn (icode, 1, &op);
7464 0 : return;
7465 : }
7466 0 : error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7467 : }
7468 :
7469 :
7470 : /* Emit code to restore the current value of stack. */
7471 :
7472 : static void
7473 1712 : expand_stack_restore (tree var)
7474 : {
7475 1712 : rtx_insn *prev;
7476 1712 : rtx sa = expand_normal (var);
7477 :
7478 1712 : sa = convert_memory_address (Pmode, sa);
7479 :
7480 1712 : prev = get_last_insn ();
7481 1712 : emit_stack_restore (SAVE_BLOCK, sa);
7482 :
7483 1712 : record_new_stack_level ();
7484 :
7485 1712 : fixup_args_size_notes (prev, get_last_insn (), 0);
7486 1712 : }
7487 :
7488 : /* Emit code to save the current value of stack. */
7489 :
7490 : static rtx
7491 1821 : expand_stack_save (void)
7492 : {
7493 1821 : rtx ret = NULL_RTX;
7494 :
7495 0 : emit_stack_save (SAVE_BLOCK, &ret);
7496 1821 : return ret;
7497 : }
7498 :
7499 : /* Emit code to get the openacc gang, worker or vector id or size. */
7500 :
7501 : static rtx
7502 356 : expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7503 : {
7504 356 : const char *name;
7505 356 : rtx fallback_retval;
7506 356 : rtx_insn *(*gen_fn) (rtx, rtx);
7507 356 : switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7508 : {
7509 244 : case BUILT_IN_GOACC_PARLEVEL_ID:
7510 244 : name = "__builtin_goacc_parlevel_id";
7511 244 : fallback_retval = const0_rtx;
7512 244 : gen_fn = targetm.gen_oacc_dim_pos;
7513 244 : break;
7514 112 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
7515 112 : name = "__builtin_goacc_parlevel_size";
7516 112 : fallback_retval = const1_rtx;
7517 112 : gen_fn = targetm.gen_oacc_dim_size;
7518 112 : break;
7519 0 : default:
7520 0 : gcc_unreachable ();
7521 : }
7522 :
7523 356 : if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7524 : {
7525 8 : error ("%qs only supported in OpenACC code", name);
7526 8 : return const0_rtx;
7527 : }
7528 :
7529 348 : tree arg = CALL_EXPR_ARG (exp, 0);
7530 348 : if (TREE_CODE (arg) != INTEGER_CST)
7531 : {
7532 8 : error ("non-constant argument 0 to %qs", name);
7533 8 : return const0_rtx;
7534 : }
7535 :
7536 340 : int dim = TREE_INT_CST_LOW (arg);
7537 340 : switch (dim)
7538 : {
7539 324 : case GOMP_DIM_GANG:
7540 324 : case GOMP_DIM_WORKER:
7541 324 : case GOMP_DIM_VECTOR:
7542 324 : break;
7543 16 : default:
7544 16 : error ("illegal argument 0 to %qs", name);
7545 16 : return const0_rtx;
7546 : }
7547 :
7548 324 : if (ignore)
7549 : return target;
7550 :
7551 180 : if (target == NULL_RTX)
7552 0 : target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7553 :
7554 180 : if (!targetm.have_oacc_dim_size ())
7555 : {
7556 180 : emit_move_insn (target, fallback_retval);
7557 180 : return target;
7558 : }
7559 :
7560 0 : rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7561 0 : emit_insn (gen_fn (reg, GEN_INT (dim)));
7562 0 : if (reg != target)
7563 0 : emit_move_insn (target, reg);
7564 :
7565 : return target;
7566 : }
7567 :
7568 : /* Expand a string compare operation using a sequence of char comparison
7569 : to get rid of the calling overhead, with result going to TARGET if
7570 : that's convenient.
7571 :
7572 : VAR_STR is the variable string source;
7573 : CONST_STR is the constant string source;
7574 : LENGTH is the number of chars to compare;
7575 : CONST_STR_N indicates which source string is the constant string;
7576 : IS_MEMCMP indicates whether it's a memcmp or strcmp.
7577 :
7578 : to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7579 :
7580 : target = (int) (unsigned char) var_str[0]
7581 : - (int) (unsigned char) const_str[0];
7582 : if (target != 0)
7583 : goto ne_label;
7584 : ...
7585 : target = (int) (unsigned char) var_str[length - 2]
7586 : - (int) (unsigned char) const_str[length - 2];
7587 : if (target != 0)
7588 : goto ne_label;
7589 : target = (int) (unsigned char) var_str[length - 1]
7590 : - (int) (unsigned char) const_str[length - 1];
7591 : ne_label:
7592 : */
7593 :
7594 : static rtx
7595 623 : inline_string_cmp (rtx target, tree var_str, const char *const_str,
7596 : unsigned HOST_WIDE_INT length,
7597 : int const_str_n, machine_mode mode)
7598 : {
7599 623 : HOST_WIDE_INT offset = 0;
7600 623 : rtx var_rtx_array
7601 623 : = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7602 623 : rtx var_rtx = NULL_RTX;
7603 623 : rtx const_rtx = NULL_RTX;
7604 623 : rtx result = target ? target : gen_reg_rtx (mode);
7605 623 : rtx_code_label *ne_label = gen_label_rtx ();
7606 623 : tree unit_type_node = unsigned_char_type_node;
7607 623 : scalar_int_mode unit_mode
7608 623 : = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7609 :
7610 623 : start_sequence ();
7611 :
7612 2280 : for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7613 : {
7614 1657 : var_rtx
7615 1657 : = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7616 1657 : const_rtx = c_readstr (const_str + offset, unit_mode);
7617 1657 : rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7618 1507 : rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7619 :
7620 1657 : op0 = convert_modes (mode, unit_mode, op0, 1);
7621 1657 : op1 = convert_modes (mode, unit_mode, op1, 1);
7622 1657 : rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7623 : result, 1, OPTAB_WIDEN);
7624 :
7625 : /* Force the difference into result register. We cannot reassign
7626 : result here ("result = diff") or we may end up returning
7627 : uninitialized result when expand_simple_binop allocates a new
7628 : pseudo-register for returning. */
7629 1657 : if (diff != result)
7630 0 : emit_move_insn (result, diff);
7631 :
7632 1657 : if (i < length - 1)
7633 1034 : emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7634 : mode, true, ne_label);
7635 3314 : offset += GET_MODE_SIZE (unit_mode);
7636 : }
7637 :
7638 623 : emit_label (ne_label);
7639 623 : rtx_insn *insns = end_sequence ();
7640 623 : emit_insn (insns);
7641 :
7642 623 : return result;
7643 : }
7644 :
7645 : /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7646 : to TARGET if that's convenient.
7647 : If the call is not been inlined, return NULL_RTX. */
7648 :
7649 : static rtx
7650 144463 : inline_expand_builtin_bytecmp (tree exp, rtx target)
7651 : {
7652 144463 : tree fndecl = get_callee_fndecl (exp);
7653 144463 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7654 144463 : bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7655 :
7656 : /* Do NOT apply this inlining expansion when optimizing for size or
7657 : optimization level below 2 or if unused *cmp hasn't been DCEd. */
7658 144463 : if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7659 122024 : return NULL_RTX;
7660 :
7661 22439 : gcc_checking_assert (fcode == BUILT_IN_STRCMP
7662 : || fcode == BUILT_IN_STRNCMP
7663 : || fcode == BUILT_IN_MEMCMP);
7664 :
7665 : /* On a target where the type of the call (int) has same or narrower presicion
7666 : than unsigned char, give up the inlining expansion. */
7667 22439 : if (TYPE_PRECISION (unsigned_char_type_node)
7668 22439 : >= TYPE_PRECISION (TREE_TYPE (exp)))
7669 : return NULL_RTX;
7670 :
7671 22439 : tree arg1 = CALL_EXPR_ARG (exp, 0);
7672 22439 : tree arg2 = CALL_EXPR_ARG (exp, 1);
7673 22439 : tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7674 :
7675 22439 : unsigned HOST_WIDE_INT len1 = 0;
7676 22439 : unsigned HOST_WIDE_INT len2 = 0;
7677 22439 : unsigned HOST_WIDE_INT len3 = 0;
7678 :
7679 : /* Get the object representation of the initializers of ARG1 and ARG2
7680 : as strings, provided they refer to constant objects, with their byte
7681 : sizes in LEN1 and LEN2, respectively. */
7682 22439 : const char *bytes1 = getbyterep (arg1, &len1);
7683 22439 : const char *bytes2 = getbyterep (arg2, &len2);
7684 :
7685 : /* Fail if neither argument refers to an initialized constant. */
7686 22439 : if (!bytes1 && !bytes2)
7687 : return NULL_RTX;
7688 :
7689 18452 : if (is_ncmp)
7690 : {
7691 : /* Fail if the memcmp/strncmp bound is not a constant. */
7692 1344 : if (!tree_fits_uhwi_p (len3_tree))
7693 : return NULL_RTX;
7694 :
7695 948 : len3 = tree_to_uhwi (len3_tree);
7696 :
7697 948 : if (fcode == BUILT_IN_MEMCMP)
7698 : {
7699 : /* Fail if the memcmp bound is greater than the size of either
7700 : of the two constant objects. */
7701 450 : if ((bytes1 && len1 < len3)
7702 450 : || (bytes2 && len2 < len3))
7703 : return NULL_RTX;
7704 : }
7705 : }
7706 :
7707 : if (fcode != BUILT_IN_MEMCMP)
7708 : {
7709 : /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7710 : and LEN2 to the length of the nul-terminated string stored
7711 : in each. */
7712 17606 : if (bytes1 != NULL)
7713 236 : len1 = strnlen (bytes1, len1) + 1;
7714 17606 : if (bytes2 != NULL)
7715 17373 : len2 = strnlen (bytes2, len2) + 1;
7716 : }
7717 :
7718 : /* See inline_string_cmp. */
7719 18052 : int const_str_n;
7720 18052 : if (!len1)
7721 : const_str_n = 2;
7722 241 : else if (!len2)
7723 : const_str_n = 1;
7724 3 : else if (len2 > len1)
7725 : const_str_n = 1;
7726 : else
7727 : const_str_n = 2;
7728 :
7729 : /* For strncmp only, compute the new bound as the smallest of
7730 : the lengths of the two strings (plus 1) and the bound provided
7731 : to the function. */
7732 17813 : unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7733 18052 : if (is_ncmp && len3 < bound)
7734 520 : bound = len3;
7735 :
7736 : /* If the bound of the comparison is larger than the threshold,
7737 : do nothing. */
7738 18052 : if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7739 : return NULL_RTX;
7740 :
7741 623 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7742 :
7743 : /* Now, start inline expansion the call. */
7744 1189 : return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7745 : (const_str_n == 1) ? bytes1 : bytes2, bound,
7746 623 : const_str_n, mode);
7747 : }
7748 :
7749 : /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7750 : represents the size of the first argument to that call, or VOIDmode
7751 : if the argument is a pointer. IGNORE will be true if the result
7752 : isn't used. */
7753 : static rtx
7754 34 : expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7755 : bool ignore)
7756 : {
7757 34 : rtx val, failsafe;
7758 34 : unsigned nargs = call_expr_nargs (exp);
7759 :
7760 34 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7761 :
7762 34 : if (mode == VOIDmode)
7763 : {
7764 4 : mode = TYPE_MODE (TREE_TYPE (arg0));
7765 4 : gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7766 : }
7767 :
7768 34 : val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7769 :
7770 : /* An optional second argument can be used as a failsafe value on
7771 : some machines. If it isn't present, then the failsafe value is
7772 : assumed to be 0. */
7773 34 : if (nargs > 1)
7774 : {
7775 4 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7776 4 : failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7777 : }
7778 : else
7779 30 : failsafe = const0_rtx;
7780 :
7781 : /* If the result isn't used, the behavior is undefined. It would be
7782 : nice to emit a warning here, but path splitting means this might
7783 : happen with legitimate code. So simply drop the builtin
7784 : expansion in that case; we've handled any side-effects above. */
7785 34 : if (ignore)
7786 0 : return const0_rtx;
7787 :
7788 : /* If we don't have a suitable target, create one to hold the result. */
7789 34 : if (target == NULL || GET_MODE (target) != mode)
7790 0 : target = gen_reg_rtx (mode);
7791 :
7792 34 : if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7793 0 : val = convert_modes (mode, VOIDmode, val, false);
7794 :
7795 34 : return targetm.speculation_safe_value (mode, target, val, failsafe);
7796 : }
7797 :
7798 : /* Expand CRC* or REV_CRC* built-ins. */
7799 :
7800 : rtx
7801 36 : expand_builtin_crc_table_based (internal_fn fn, scalar_mode crc_mode,
7802 : scalar_mode data_mode, machine_mode mode,
7803 : tree exp, rtx target)
7804 : {
7805 36 : tree rhs1 = CALL_EXPR_ARG (exp, 0); // crc
7806 36 : tree rhs2 = CALL_EXPR_ARG (exp, 1); // data
7807 36 : tree rhs3 = CALL_EXPR_ARG (exp, 2); // polynomial
7808 :
7809 36 : if (!target || mode == VOIDmode)
7810 0 : target = gen_reg_rtx (crc_mode);
7811 :
7812 36 : rtx op1 = expand_normal (rhs1);
7813 36 : rtx op2 = expand_normal (rhs2);
7814 36 : rtx op3;
7815 36 : if (TREE_CODE (rhs3) != INTEGER_CST)
7816 : {
7817 1 : error ("third argument to %<crc%> builtins must be a constant");
7818 1 : op3 = const0_rtx;
7819 : }
7820 : else
7821 35 : op3 = convert_to_mode (crc_mode, expand_normal (rhs3), 0);
7822 :
7823 36 : if (CONST_INT_P (op2))
7824 35 : op2 = convert_to_mode (crc_mode, op2, 0);
7825 :
7826 36 : if (fn == IFN_CRC)
7827 19 : expand_crc_table_based (target, op1, op2, op3, data_mode);
7828 : else
7829 : /* If it's IFN_CRC_REV generate bit-reversed CRC. */
7830 17 : expand_reversed_crc_table_based (target, op1, op2, op3,
7831 : data_mode,
7832 : generate_reflecting_code_standard);
7833 36 : return target;
7834 : }
7835 :
7836 : /* Expand an expression EXP that calls a built-in function,
7837 : with result going to TARGET if that's convenient
7838 : (and in mode MODE if that's convenient).
7839 : SUBTARGET may be used as the target for computing one of EXP's operands.
7840 : IGNORE is nonzero if the value is to be ignored. */
7841 :
7842 : rtx
7843 1966895 : expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7844 : int ignore)
7845 : {
7846 1966895 : tree fndecl = get_callee_fndecl (exp);
7847 1966895 : machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7848 1966895 : int flags;
7849 :
7850 1966895 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7851 172368 : return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7852 :
7853 : /* When ASan is enabled, we don't want to expand some memory/string
7854 : builtins and rely on libsanitizer's hooks. This allows us to avoid
7855 : redundant checks and be sure, that possible overflow will be detected
7856 : by ASan. */
7857 :
7858 1794527 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7859 1794527 : if (param_asan_kernel_mem_intrinsic_prefix
7860 1794527 : && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7861 : | SANITIZE_KERNEL_HWADDRESS))
7862 42 : switch (fcode)
7863 : {
7864 42 : rtx save_decl_rtl, ret;
7865 42 : case BUILT_IN_MEMCPY:
7866 42 : case BUILT_IN_MEMMOVE:
7867 42 : case BUILT_IN_MEMSET:
7868 42 : save_decl_rtl = DECL_RTL (fndecl);
7869 42 : DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7870 42 : ret = expand_call (exp, target, ignore);
7871 42 : DECL_RTL (fndecl) = save_decl_rtl;
7872 42 : return ret;
7873 : default:
7874 : break;
7875 : }
7876 1794485 : if (sanitize_flags_p (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)
7877 1794485 : && asan_intercepted_p (fcode))
7878 803 : return expand_call (exp, target, ignore);
7879 :
7880 : /* When not optimizing, generate calls to library functions for a certain
7881 : set of builtins. */
7882 1793682 : if (!optimize
7883 421786 : && !called_as_built_in (fndecl)
7884 141049 : && fcode != BUILT_IN_FORK
7885 141049 : && fcode != BUILT_IN_EXECL
7886 141038 : && fcode != BUILT_IN_EXECV
7887 141038 : && fcode != BUILT_IN_EXECLP
7888 141036 : && fcode != BUILT_IN_EXECLE
7889 141036 : && fcode != BUILT_IN_EXECVP
7890 141035 : && fcode != BUILT_IN_EXECVE
7891 141035 : && fcode != BUILT_IN_CLEAR_CACHE
7892 141032 : && !ALLOCA_FUNCTION_CODE_P (fcode)
7893 140978 : && fcode != BUILT_IN_FREE
7894 138894 : && (fcode != BUILT_IN_MEMSET
7895 55878 : || !(flag_inline_stringops & ILSOP_MEMSET))
7896 138891 : && (fcode != BUILT_IN_MEMCPY
7897 918 : || !(flag_inline_stringops & ILSOP_MEMCPY))
7898 138755 : && (fcode != BUILT_IN_MEMMOVE
7899 348 : || !(flag_inline_stringops & ILSOP_MEMMOVE))
7900 1932437 : && (fcode != BUILT_IN_MEMCMP
7901 2504 : || !(flag_inline_stringops & ILSOP_MEMCMP)))
7902 138707 : return expand_call (exp, target, ignore);
7903 :
7904 : /* The built-in function expanders test for target == const0_rtx
7905 : to determine whether the function's result will be ignored. */
7906 1654975 : if (ignore)
7907 960896 : target = const0_rtx;
7908 :
7909 : /* If the result of a pure or const built-in function is ignored, and
7910 : none of its arguments are volatile, we can avoid expanding the
7911 : built-in call and just evaluate the arguments for side-effects. */
7912 1654975 : if (target == const0_rtx
7913 960896 : && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7914 1660458 : && !(flags & ECF_LOOPING_CONST_OR_PURE))
7915 : {
7916 6 : bool volatilep = false;
7917 6 : tree arg;
7918 6 : call_expr_arg_iterator iter;
7919 :
7920 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7921 8 : if (TREE_THIS_VOLATILE (arg))
7922 : {
7923 : volatilep = true;
7924 : break;
7925 : }
7926 :
7927 6 : if (! volatilep)
7928 : {
7929 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7930 8 : expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7931 6 : return const0_rtx;
7932 : }
7933 : }
7934 :
7935 1654969 : switch (fcode)
7936 : {
7937 4 : CASE_FLT_FN (BUILT_IN_FABS):
7938 4 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7939 4 : case BUILT_IN_FABSD32:
7940 4 : case BUILT_IN_FABSD64:
7941 4 : case BUILT_IN_FABSD128:
7942 4 : case BUILT_IN_FABSD64X:
7943 4 : target = expand_builtin_fabs (exp, target, subtarget);
7944 4 : if (target)
7945 823262 : return target;
7946 831707 : break;
7947 :
7948 11674 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
7949 11674 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7950 11674 : target = expand_builtin_copysign (exp, target, subtarget);
7951 11674 : if (target)
7952 : return target;
7953 : break;
7954 :
7955 : /* Just do a normal library call if we were unable to fold
7956 : the values. */
7957 : CASE_FLT_FN (BUILT_IN_CABS):
7958 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7959 : break;
7960 :
7961 368 : CASE_FLT_FN (BUILT_IN_FMA):
7962 368 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7963 368 : target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7964 368 : if (target)
7965 : return target;
7966 : break;
7967 :
7968 235 : CASE_FLT_FN (BUILT_IN_ILOGB):
7969 235 : if (! flag_unsafe_math_optimizations)
7970 : break;
7971 4 : gcc_fallthrough ();
7972 4 : CASE_FLT_FN (BUILT_IN_ISINF):
7973 4 : CASE_FLT_FN (BUILT_IN_FINITE):
7974 4 : CASE_FLT_FN (BUILT_IN_ISNAN):
7975 4 : case BUILT_IN_ISFINITE:
7976 4 : case BUILT_IN_ISNORMAL:
7977 4 : target = expand_builtin_interclass_mathfn (exp, target);
7978 4 : if (target)
7979 : return target;
7980 : break;
7981 :
7982 823 : case BUILT_IN_ISSIGNALING:
7983 823 : target = expand_builtin_issignaling (exp, target);
7984 823 : if (target)
7985 : return target;
7986 : break;
7987 :
7988 235 : CASE_FLT_FN (BUILT_IN_ICEIL):
7989 235 : CASE_FLT_FN (BUILT_IN_LCEIL):
7990 235 : CASE_FLT_FN (BUILT_IN_LLCEIL):
7991 235 : CASE_FLT_FN (BUILT_IN_LFLOOR):
7992 235 : CASE_FLT_FN (BUILT_IN_IFLOOR):
7993 235 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
7994 235 : target = expand_builtin_int_roundingfn (exp, target);
7995 235 : if (target)
7996 : return target;
7997 : break;
7998 :
7999 592 : CASE_FLT_FN (BUILT_IN_IRINT):
8000 592 : CASE_FLT_FN (BUILT_IN_LRINT):
8001 592 : CASE_FLT_FN (BUILT_IN_LLRINT):
8002 592 : CASE_FLT_FN (BUILT_IN_IROUND):
8003 592 : CASE_FLT_FN (BUILT_IN_LROUND):
8004 592 : CASE_FLT_FN (BUILT_IN_LLROUND):
8005 592 : target = expand_builtin_int_roundingfn_2 (exp, target);
8006 592 : if (target)
8007 : return target;
8008 : break;
8009 :
8010 292 : CASE_FLT_FN (BUILT_IN_POWI):
8011 292 : target = expand_builtin_powi (exp, target);
8012 292 : if (target)
8013 : return target;
8014 : break;
8015 :
8016 159 : CASE_FLT_FN (BUILT_IN_CEXPI):
8017 159 : target = expand_builtin_cexpi (exp, target);
8018 159 : gcc_assert (target);
8019 : return target;
8020 :
8021 991 : CASE_FLT_FN (BUILT_IN_SIN):
8022 991 : CASE_FLT_FN (BUILT_IN_COS):
8023 991 : if (! flag_unsafe_math_optimizations)
8024 : break;
8025 47 : target = expand_builtin_mathfn_3 (exp, target, subtarget);
8026 47 : if (target)
8027 : return target;
8028 : break;
8029 :
8030 153 : CASE_FLT_FN (BUILT_IN_SINCOS):
8031 153 : if (! flag_unsafe_math_optimizations)
8032 : break;
8033 3 : target = expand_builtin_sincos (exp);
8034 3 : if (target)
8035 : return target;
8036 : break;
8037 :
8038 60 : case BUILT_IN_FEGETROUND:
8039 60 : target = expand_builtin_fegetround (exp, target, target_mode);
8040 60 : if (target)
8041 : return target;
8042 : break;
8043 :
8044 1063 : case BUILT_IN_FECLEAREXCEPT:
8045 1063 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8046 : feclearexcept_optab);
8047 1063 : if (target)
8048 : return target;
8049 : break;
8050 :
8051 831 : case BUILT_IN_FERAISEEXCEPT:
8052 831 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8053 : feraiseexcept_optab);
8054 831 : if (target)
8055 : return target;
8056 : break;
8057 :
8058 464 : case BUILT_IN_APPLY_ARGS:
8059 464 : return expand_builtin_apply_args ();
8060 :
8061 : /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8062 : FUNCTION with a copy of the parameters described by
8063 : ARGUMENTS, and ARGSIZE. It returns a block of memory
8064 : allocated on the stack into which is stored all the registers
8065 : that might possibly be used for returning the result of a
8066 : function. ARGUMENTS is the value returned by
8067 : __builtin_apply_args. ARGSIZE is the number of bytes of
8068 : arguments that must be copied. ??? How should this value be
8069 : computed? We'll also need a safe worst case value for varargs
8070 : functions. */
8071 483 : case BUILT_IN_APPLY:
8072 483 : if (!validate_arglist (exp, POINTER_TYPE,
8073 : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8074 483 : && !validate_arglist (exp, REFERENCE_TYPE,
8075 : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8076 0 : return const0_rtx;
8077 : else
8078 : {
8079 483 : rtx ops[3];
8080 :
8081 483 : ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8082 483 : ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8083 483 : ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8084 :
8085 483 : return expand_builtin_apply (ops[0], ops[1], ops[2]);
8086 : }
8087 :
8088 : /* __builtin_return (RESULT) causes the function to return the
8089 : value described by RESULT. RESULT is address of the block of
8090 : memory returned by __builtin_apply. */
8091 379 : case BUILT_IN_RETURN:
8092 379 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8093 379 : expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8094 379 : return const0_rtx;
8095 :
8096 0 : case BUILT_IN_SAVEREGS:
8097 0 : return expand_builtin_saveregs ();
8098 :
8099 16 : case BUILT_IN_VA_ARG_PACK:
8100 : /* All valid uses of __builtin_va_arg_pack () are removed during
8101 : inlining. */
8102 16 : error ("invalid use of %<__builtin_va_arg_pack ()%>");
8103 16 : return const0_rtx;
8104 :
8105 0 : case BUILT_IN_VA_ARG_PACK_LEN:
8106 : /* All valid uses of __builtin_va_arg_pack_len () are removed during
8107 : inlining. */
8108 0 : error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8109 0 : return const0_rtx;
8110 :
8111 : /* Return the address of the first anonymous stack arg. */
8112 158 : case BUILT_IN_NEXT_ARG:
8113 158 : if (fold_builtin_next_arg (exp, false))
8114 0 : return const0_rtx;
8115 158 : return expand_builtin_next_arg ();
8116 :
8117 28 : case BUILT_IN_CLEAR_CACHE:
8118 28 : expand_builtin___clear_cache (exp);
8119 28 : return const0_rtx;
8120 :
8121 0 : case BUILT_IN_CLASSIFY_TYPE:
8122 0 : return expand_builtin_classify_type (exp);
8123 :
8124 0 : case BUILT_IN_CONSTANT_P:
8125 0 : return const0_rtx;
8126 :
8127 15901 : case BUILT_IN_FRAME_ADDRESS:
8128 15901 : case BUILT_IN_RETURN_ADDRESS:
8129 15901 : return expand_builtin_frame_address (fndecl, exp);
8130 :
8131 540 : case BUILT_IN_STACK_ADDRESS:
8132 540 : return expand_builtin_stack_address ();
8133 :
8134 2159 : case BUILT_IN___STRUB_ENTER:
8135 2159 : target = expand_builtin_strub_enter (exp);
8136 2159 : if (target)
8137 : return target;
8138 : break;
8139 :
8140 1072 : case BUILT_IN___STRUB_UPDATE:
8141 1072 : target = expand_builtin_strub_update (exp);
8142 1072 : if (target)
8143 : return target;
8144 : break;
8145 :
8146 2729 : case BUILT_IN___STRUB_LEAVE:
8147 2729 : target = expand_builtin_strub_leave (exp);
8148 2729 : if (target)
8149 : return target;
8150 : break;
8151 :
8152 : /* Returns the address of the area where the structure is returned.
8153 : 0 otherwise. */
8154 0 : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8155 0 : if (call_expr_nargs (exp) != 0
8156 0 : || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8157 0 : || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8158 0 : return const0_rtx;
8159 : else
8160 0 : return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8161 :
8162 27713 : CASE_BUILT_IN_ALLOCA:
8163 27713 : target = expand_builtin_alloca (exp);
8164 27713 : if (target)
8165 : return target;
8166 : break;
8167 :
8168 212 : case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8169 212 : return expand_asan_emit_allocas_unpoison (exp);
8170 :
8171 1821 : case BUILT_IN_STACK_SAVE:
8172 1821 : return expand_stack_save ();
8173 :
8174 1712 : case BUILT_IN_STACK_RESTORE:
8175 1712 : expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8176 1712 : return const0_rtx;
8177 :
8178 1204 : case BUILT_IN_BSWAP16:
8179 1204 : case BUILT_IN_BSWAP32:
8180 1204 : case BUILT_IN_BSWAP64:
8181 1204 : case BUILT_IN_BSWAP128:
8182 1204 : target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8183 1204 : if (target)
8184 : return target;
8185 : break;
8186 :
8187 0 : CASE_INT_FN (BUILT_IN_FFS):
8188 0 : target = expand_builtin_unop (target_mode, exp, target,
8189 : subtarget, ffs_optab);
8190 0 : if (target)
8191 : return target;
8192 : break;
8193 :
8194 183 : CASE_INT_FN (BUILT_IN_CLZ):
8195 183 : target = expand_builtin_unop (target_mode, exp, target,
8196 : subtarget, clz_optab);
8197 183 : if (target)
8198 : return target;
8199 : break;
8200 :
8201 50 : CASE_INT_FN (BUILT_IN_CTZ):
8202 50 : target = expand_builtin_unop (target_mode, exp, target,
8203 : subtarget, ctz_optab);
8204 50 : if (target)
8205 : return target;
8206 : break;
8207 :
8208 90 : CASE_INT_FN (BUILT_IN_CLRSB):
8209 90 : target = expand_builtin_unop (target_mode, exp, target,
8210 : subtarget, clrsb_optab);
8211 90 : if (target)
8212 : return target;
8213 : break;
8214 :
8215 421 : CASE_INT_FN (BUILT_IN_POPCOUNT):
8216 421 : target = expand_builtin_unop (target_mode, exp, target,
8217 : subtarget, popcount_optab);
8218 421 : if (target)
8219 : return target;
8220 : break;
8221 :
8222 11 : CASE_INT_FN (BUILT_IN_PARITY):
8223 11 : target = expand_builtin_unop (target_mode, exp, target,
8224 : subtarget, parity_optab);
8225 11 : if (target)
8226 : return target;
8227 : break;
8228 :
8229 13749 : case BUILT_IN_STRLEN:
8230 13749 : target = expand_builtin_strlen (exp, target, target_mode);
8231 13749 : if (target)
8232 : return target;
8233 : break;
8234 :
8235 580 : case BUILT_IN_STRNLEN:
8236 580 : target = expand_builtin_strnlen (exp, target, target_mode);
8237 580 : if (target)
8238 : return target;
8239 : break;
8240 :
8241 1864 : case BUILT_IN_STRCPY:
8242 1864 : target = expand_builtin_strcpy (exp, target);
8243 1864 : if (target)
8244 : return target;
8245 : break;
8246 :
8247 2169 : case BUILT_IN_STRNCPY:
8248 2169 : target = expand_builtin_strncpy (exp, target);
8249 2169 : if (target)
8250 : return target;
8251 : break;
8252 :
8253 454 : case BUILT_IN_STPCPY:
8254 454 : target = expand_builtin_stpcpy (exp, target, mode);
8255 454 : if (target)
8256 : return target;
8257 : break;
8258 :
8259 105132 : case BUILT_IN_MEMCPY:
8260 105132 : target = expand_builtin_memcpy (exp, target);
8261 105132 : if (target)
8262 : return target;
8263 : break;
8264 :
8265 17216 : case BUILT_IN_MEMMOVE:
8266 17216 : target = expand_builtin_memmove (exp, target);
8267 17216 : if (target)
8268 : return target;
8269 : break;
8270 :
8271 1634 : case BUILT_IN_MEMPCPY:
8272 1634 : target = expand_builtin_mempcpy (exp, target);
8273 1634 : if (target)
8274 : return target;
8275 : break;
8276 :
8277 35487 : case BUILT_IN_MEMSET:
8278 35487 : target = expand_builtin_memset (exp, target, mode);
8279 35487 : if (target)
8280 : return target;
8281 : break;
8282 :
8283 0 : case BUILT_IN_BZERO:
8284 0 : target = expand_builtin_bzero (exp);
8285 0 : if (target)
8286 : return target;
8287 : break;
8288 :
8289 : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8290 : back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8291 : when changing it to a strcmp call. */
8292 381 : case BUILT_IN_STRCMP_EQ:
8293 381 : target = expand_builtin_memcmp (exp, target, true);
8294 381 : if (target)
8295 : return target;
8296 :
8297 : /* Change this call back to a BUILT_IN_STRCMP. */
8298 25 : TREE_OPERAND (exp, 1)
8299 25 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8300 :
8301 : /* Delete the last parameter. */
8302 25 : unsigned int i;
8303 25 : vec<tree, va_gc> *arg_vec;
8304 25 : vec_alloc (arg_vec, 2);
8305 100 : for (i = 0; i < 2; i++)
8306 50 : arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8307 25 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8308 : /* FALLTHROUGH */
8309 :
8310 127943 : case BUILT_IN_STRCMP:
8311 127943 : target = expand_builtin_strcmp (exp, target);
8312 127943 : if (target)
8313 : return target;
8314 : break;
8315 :
8316 : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8317 : back to a BUILT_IN_STRNCMP. */
8318 37 : case BUILT_IN_STRNCMP_EQ:
8319 37 : target = expand_builtin_memcmp (exp, target, true);
8320 37 : if (target)
8321 : return target;
8322 :
8323 : /* Change it back to a BUILT_IN_STRNCMP. */
8324 0 : TREE_OPERAND (exp, 1)
8325 0 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8326 : /* FALLTHROUGH */
8327 :
8328 2024 : case BUILT_IN_STRNCMP:
8329 2024 : target = expand_builtin_strncmp (exp, target, mode);
8330 2024 : if (target)
8331 : return target;
8332 : break;
8333 :
8334 103824 : case BUILT_IN_BCMP:
8335 103824 : case BUILT_IN_MEMCMP:
8336 103824 : case BUILT_IN_MEMCMP_EQ:
8337 103824 : target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8338 103824 : if (target)
8339 : return target;
8340 36408 : if (fcode == BUILT_IN_MEMCMP_EQ)
8341 : {
8342 22263 : tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8343 22263 : TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8344 : }
8345 : break;
8346 :
8347 0 : case BUILT_IN_SETJMP:
8348 : /* This should have been lowered to the builtins below. */
8349 0 : gcc_unreachable ();
8350 :
8351 841 : case BUILT_IN_SETJMP_SETUP:
8352 : /* __builtin_setjmp_setup is passed a pointer to an array of five words
8353 : and the receiver label. */
8354 841 : if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8355 : {
8356 841 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8357 : VOIDmode, EXPAND_NORMAL);
8358 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8359 841 : rtx_insn *label_r = label_rtx (label);
8360 :
8361 841 : expand_builtin_setjmp_setup (buf_addr, label_r);
8362 841 : return const0_rtx;
8363 : }
8364 : break;
8365 :
8366 841 : case BUILT_IN_SETJMP_RECEIVER:
8367 : /* __builtin_setjmp_receiver is passed the receiver label. */
8368 841 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8369 : {
8370 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8371 841 : rtx_insn *label_r = label_rtx (label);
8372 :
8373 841 : expand_builtin_setjmp_receiver (label_r);
8374 841 : nonlocal_goto_handler_labels
8375 1682 : = gen_rtx_INSN_LIST (VOIDmode, label_r,
8376 841 : nonlocal_goto_handler_labels);
8377 : /* ??? Do not let expand_label treat us as such since we would
8378 : not want to be both on the list of non-local labels and on
8379 : the list of forced labels. */
8380 841 : FORCED_LABEL (label) = 0;
8381 841 : return const0_rtx;
8382 : }
8383 : break;
8384 :
8385 : /* __builtin_longjmp is passed a pointer to an array of five words.
8386 : It's similar to the C library longjmp function but works with
8387 : __builtin_setjmp above. */
8388 391 : case BUILT_IN_LONGJMP:
8389 391 : if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8390 : {
8391 391 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8392 : VOIDmode, EXPAND_NORMAL);
8393 391 : rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8394 :
8395 391 : if (value != const1_rtx)
8396 : {
8397 0 : error ("%<__builtin_longjmp%> second argument must be 1");
8398 0 : return const0_rtx;
8399 : }
8400 :
8401 391 : expand_builtin_longjmp (buf_addr, value);
8402 391 : return const0_rtx;
8403 : }
8404 : break;
8405 :
8406 512 : case BUILT_IN_NONLOCAL_GOTO:
8407 512 : target = expand_builtin_nonlocal_goto (exp);
8408 512 : if (target)
8409 : return target;
8410 : break;
8411 :
8412 : /* This updates the setjmp buffer that is its argument with the value
8413 : of the current stack pointer. */
8414 0 : case BUILT_IN_UPDATE_SETJMP_BUF:
8415 0 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8416 : {
8417 0 : rtx buf_addr
8418 0 : = expand_normal (CALL_EXPR_ARG (exp, 0));
8419 :
8420 0 : expand_builtin_update_setjmp_buf (buf_addr);
8421 0 : return const0_rtx;
8422 : }
8423 : break;
8424 :
8425 38526 : case BUILT_IN_TRAP:
8426 38526 : case BUILT_IN_UNREACHABLE_TRAP:
8427 38526 : expand_builtin_trap ();
8428 38526 : return const0_rtx;
8429 :
8430 4384 : case BUILT_IN_UNREACHABLE:
8431 4384 : expand_builtin_unreachable ();
8432 4384 : return const0_rtx;
8433 :
8434 214 : case BUILT_IN_OBSERVABLE_CHKPT:
8435 : /* Generate no code. */
8436 214 : return const0_rtx;
8437 :
8438 1071 : CASE_FLT_FN (BUILT_IN_SIGNBIT):
8439 1071 : case BUILT_IN_SIGNBITD32:
8440 1071 : case BUILT_IN_SIGNBITD64:
8441 1071 : case BUILT_IN_SIGNBITD128:
8442 1071 : target = expand_builtin_signbit (exp, target);
8443 1071 : if (target)
8444 : return target;
8445 : break;
8446 :
8447 : /* Various hooks for the DWARF 2 __throw routine. */
8448 34 : case BUILT_IN_UNWIND_INIT:
8449 34 : expand_builtin_unwind_init ();
8450 34 : return const0_rtx;
8451 1214 : case BUILT_IN_DWARF_CFA:
8452 1214 : return virtual_cfa_rtx;
8453 : #ifdef DWARF2_UNWIND_INFO
8454 48 : case BUILT_IN_DWARF_SP_COLUMN:
8455 48 : return expand_builtin_dwarf_sp_column ();
8456 8 : case BUILT_IN_INIT_DWARF_REG_SIZES:
8457 8 : expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8458 8 : return const0_rtx;
8459 : #endif
8460 19 : case BUILT_IN_FROB_RETURN_ADDR:
8461 19 : return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8462 2350 : case BUILT_IN_EXTRACT_RETURN_ADDR:
8463 2350 : return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8464 29 : case BUILT_IN_EH_RETURN:
8465 58 : expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8466 29 : CALL_EXPR_ARG (exp, 1));
8467 29 : return const0_rtx;
8468 21 : case BUILT_IN_EH_RETURN_DATA_REGNO:
8469 21 : return expand_builtin_eh_return_data_regno (exp);
8470 2 : case BUILT_IN_EXTEND_POINTER:
8471 2 : return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8472 94836 : case BUILT_IN_EH_POINTER:
8473 94836 : return expand_builtin_eh_pointer (exp);
8474 5031 : case BUILT_IN_EH_FILTER:
8475 5031 : return expand_builtin_eh_filter (exp);
8476 92466 : case BUILT_IN_EH_COPY_VALUES:
8477 92466 : return expand_builtin_eh_copy_values (exp);
8478 :
8479 21003 : case BUILT_IN_VA_START:
8480 21003 : return expand_builtin_va_start (exp);
8481 12121 : case BUILT_IN_VA_END:
8482 12121 : return expand_builtin_va_end (exp);
8483 244 : case BUILT_IN_VA_COPY:
8484 244 : return expand_builtin_va_copy (exp);
8485 1049 : case BUILT_IN_EXPECT:
8486 1049 : return expand_builtin_expect (exp, target);
8487 5 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
8488 5 : return expand_builtin_expect_with_probability (exp, target);
8489 663 : case BUILT_IN_ASSUME_ALIGNED:
8490 663 : return expand_builtin_assume_aligned (exp, target);
8491 2039 : case BUILT_IN_PREFETCH:
8492 2039 : expand_builtin_prefetch (exp);
8493 2039 : return const0_rtx;
8494 :
8495 295 : case BUILT_IN_INIT_TRAMPOLINE:
8496 295 : return expand_builtin_init_trampoline (exp, true);
8497 0 : case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8498 0 : return expand_builtin_init_trampoline (exp, false);
8499 339 : case BUILT_IN_ADJUST_TRAMPOLINE:
8500 339 : return expand_builtin_adjust_trampoline (exp);
8501 :
8502 0 : case BUILT_IN_INIT_DESCRIPTOR:
8503 0 : return expand_builtin_init_descriptor (exp);
8504 0 : case BUILT_IN_ADJUST_DESCRIPTOR:
8505 0 : return expand_builtin_adjust_descriptor (exp);
8506 :
8507 : case BUILT_IN_GCC_NESTED_PTR_CREATED:
8508 : case BUILT_IN_GCC_NESTED_PTR_DELETED:
8509 : break; /* At present, no expansion, just call the function. */
8510 :
8511 99 : case BUILT_IN_FORK:
8512 99 : case BUILT_IN_EXECL:
8513 99 : case BUILT_IN_EXECV:
8514 99 : case BUILT_IN_EXECLP:
8515 99 : case BUILT_IN_EXECLE:
8516 99 : case BUILT_IN_EXECVP:
8517 99 : case BUILT_IN_EXECVE:
8518 99 : target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8519 99 : if (target)
8520 : return target;
8521 : break;
8522 :
8523 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8524 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8525 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8526 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8527 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8528 707 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8529 707 : target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8530 707 : if (target)
8531 : return target;
8532 : break;
8533 :
8534 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8535 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8536 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8537 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8538 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8539 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8540 581 : target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8541 581 : if (target)
8542 : return target;
8543 : break;
8544 :
8545 512 : case BUILT_IN_SYNC_FETCH_AND_OR_1:
8546 512 : case BUILT_IN_SYNC_FETCH_AND_OR_2:
8547 512 : case BUILT_IN_SYNC_FETCH_AND_OR_4:
8548 512 : case BUILT_IN_SYNC_FETCH_AND_OR_8:
8549 512 : case BUILT_IN_SYNC_FETCH_AND_OR_16:
8550 512 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8551 512 : target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8552 512 : if (target)
8553 : return target;
8554 : break;
8555 :
8556 488 : case BUILT_IN_SYNC_FETCH_AND_AND_1:
8557 488 : case BUILT_IN_SYNC_FETCH_AND_AND_2:
8558 488 : case BUILT_IN_SYNC_FETCH_AND_AND_4:
8559 488 : case BUILT_IN_SYNC_FETCH_AND_AND_8:
8560 488 : case BUILT_IN_SYNC_FETCH_AND_AND_16:
8561 488 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8562 488 : target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8563 488 : if (target)
8564 : return target;
8565 : break;
8566 :
8567 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8568 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8569 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8570 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8571 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8572 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8573 581 : target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8574 581 : if (target)
8575 : return target;
8576 : break;
8577 :
8578 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8579 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8580 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8581 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8582 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8583 434 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8584 434 : target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8585 434 : if (target)
8586 : return target;
8587 : break;
8588 :
8589 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8590 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8591 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8592 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8593 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8594 149 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8595 149 : target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8596 149 : if (target)
8597 : return target;
8598 : break;
8599 :
8600 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8601 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8602 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8603 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8604 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8605 117 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8606 117 : target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8607 117 : if (target)
8608 : return target;
8609 : break;
8610 :
8611 115 : case BUILT_IN_SYNC_OR_AND_FETCH_1:
8612 115 : case BUILT_IN_SYNC_OR_AND_FETCH_2:
8613 115 : case BUILT_IN_SYNC_OR_AND_FETCH_4:
8614 115 : case BUILT_IN_SYNC_OR_AND_FETCH_8:
8615 115 : case BUILT_IN_SYNC_OR_AND_FETCH_16:
8616 115 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8617 115 : target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8618 115 : if (target)
8619 : return target;
8620 : break;
8621 :
8622 116 : case BUILT_IN_SYNC_AND_AND_FETCH_1:
8623 116 : case BUILT_IN_SYNC_AND_AND_FETCH_2:
8624 116 : case BUILT_IN_SYNC_AND_AND_FETCH_4:
8625 116 : case BUILT_IN_SYNC_AND_AND_FETCH_8:
8626 116 : case BUILT_IN_SYNC_AND_AND_FETCH_16:
8627 116 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8628 116 : target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8629 116 : if (target)
8630 : return target;
8631 : break;
8632 :
8633 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8634 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8635 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8636 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8637 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8638 114 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8639 114 : target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8640 114 : if (target)
8641 : return target;
8642 : break;
8643 :
8644 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8645 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8646 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8647 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8648 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8649 89 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8650 89 : target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8651 89 : if (target)
8652 : return target;
8653 : break;
8654 :
8655 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8656 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8657 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8658 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8659 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8660 200 : if (mode == VOIDmode)
8661 40 : mode = TYPE_MODE (boolean_type_node);
8662 200 : if (!target || !register_operand (target, mode))
8663 40 : target = gen_reg_rtx (mode);
8664 :
8665 200 : mode = get_builtin_sync_mode
8666 200 : (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8667 200 : target = expand_builtin_compare_and_swap (mode, exp, true, target);
8668 200 : if (target)
8669 : return target;
8670 : break;
8671 :
8672 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8673 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8674 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8675 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8676 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8677 256 : mode = get_builtin_sync_mode
8678 256 : (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8679 256 : target = expand_builtin_compare_and_swap (mode, exp, false, target);
8680 256 : if (target)
8681 : return target;
8682 : break;
8683 :
8684 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8685 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8686 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8687 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8688 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8689 326 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8690 326 : target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8691 326 : if (target)
8692 : return target;
8693 : break;
8694 :
8695 146 : case BUILT_IN_SYNC_LOCK_RELEASE_1:
8696 146 : case BUILT_IN_SYNC_LOCK_RELEASE_2:
8697 146 : case BUILT_IN_SYNC_LOCK_RELEASE_4:
8698 146 : case BUILT_IN_SYNC_LOCK_RELEASE_8:
8699 146 : case BUILT_IN_SYNC_LOCK_RELEASE_16:
8700 146 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8701 146 : if (expand_builtin_sync_lock_release (mode, exp))
8702 144 : return const0_rtx;
8703 : break;
8704 :
8705 273 : case BUILT_IN_SYNC_SYNCHRONIZE:
8706 273 : expand_builtin_sync_synchronize ();
8707 273 : return const0_rtx;
8708 :
8709 2889 : case BUILT_IN_ATOMIC_EXCHANGE_1:
8710 2889 : case BUILT_IN_ATOMIC_EXCHANGE_2:
8711 2889 : case BUILT_IN_ATOMIC_EXCHANGE_4:
8712 2889 : case BUILT_IN_ATOMIC_EXCHANGE_8:
8713 2889 : case BUILT_IN_ATOMIC_EXCHANGE_16:
8714 2889 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8715 2889 : target = expand_builtin_atomic_exchange (mode, exp, target);
8716 2889 : if (target)
8717 : return target;
8718 : break;
8719 :
8720 9131 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8721 9131 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8722 9131 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8723 9131 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8724 9131 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8725 9131 : {
8726 9131 : unsigned int nargs, z;
8727 9131 : vec<tree, va_gc> *vec;
8728 :
8729 9131 : mode =
8730 9131 : get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8731 9131 : target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8732 9131 : if (target)
8733 7329 : return target;
8734 :
8735 : /* If this is turned into an external library call, the weak parameter
8736 : must be dropped to match the expected parameter list. */
8737 1802 : nargs = call_expr_nargs (exp);
8738 1802 : vec_alloc (vec, nargs - 1);
8739 9010 : for (z = 0; z < 3; z++)
8740 5406 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8741 : /* Skip the boolean weak parameter. */
8742 5406 : for (z = 4; z < 6; z++)
8743 3604 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8744 1802 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8745 1802 : break;
8746 : }
8747 :
8748 70190 : case BUILT_IN_ATOMIC_LOAD_1:
8749 70190 : case BUILT_IN_ATOMIC_LOAD_2:
8750 70190 : case BUILT_IN_ATOMIC_LOAD_4:
8751 70190 : case BUILT_IN_ATOMIC_LOAD_8:
8752 70190 : case BUILT_IN_ATOMIC_LOAD_16:
8753 70190 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8754 70190 : target = expand_builtin_atomic_load (mode, exp, target);
8755 70190 : if (target)
8756 : return target;
8757 : break;
8758 :
8759 17419 : case BUILT_IN_ATOMIC_STORE_1:
8760 17419 : case BUILT_IN_ATOMIC_STORE_2:
8761 17419 : case BUILT_IN_ATOMIC_STORE_4:
8762 17419 : case BUILT_IN_ATOMIC_STORE_8:
8763 17419 : case BUILT_IN_ATOMIC_STORE_16:
8764 17419 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8765 17419 : target = expand_builtin_atomic_store (mode, exp);
8766 17419 : if (target)
8767 15792 : return const0_rtx;
8768 : break;
8769 :
8770 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_1:
8771 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_2:
8772 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_4:
8773 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_8:
8774 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_16:
8775 4377 : {
8776 4377 : enum built_in_function lib;
8777 4377 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8778 4377 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8779 : (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8780 4377 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8781 : ignore, lib);
8782 4377 : if (target)
8783 : return target;
8784 : break;
8785 : }
8786 2161 : case BUILT_IN_ATOMIC_SUB_FETCH_1:
8787 2161 : case BUILT_IN_ATOMIC_SUB_FETCH_2:
8788 2161 : case BUILT_IN_ATOMIC_SUB_FETCH_4:
8789 2161 : case BUILT_IN_ATOMIC_SUB_FETCH_8:
8790 2161 : case BUILT_IN_ATOMIC_SUB_FETCH_16:
8791 2161 : {
8792 2161 : enum built_in_function lib;
8793 2161 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8794 2161 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8795 : (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8796 2161 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8797 : ignore, lib);
8798 2161 : if (target)
8799 : return target;
8800 : break;
8801 : }
8802 895 : case BUILT_IN_ATOMIC_AND_FETCH_1:
8803 895 : case BUILT_IN_ATOMIC_AND_FETCH_2:
8804 895 : case BUILT_IN_ATOMIC_AND_FETCH_4:
8805 895 : case BUILT_IN_ATOMIC_AND_FETCH_8:
8806 895 : case BUILT_IN_ATOMIC_AND_FETCH_16:
8807 895 : {
8808 895 : enum built_in_function lib;
8809 895 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8810 895 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8811 : (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8812 895 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8813 : ignore, lib);
8814 895 : if (target)
8815 : return target;
8816 : break;
8817 : }
8818 155 : case BUILT_IN_ATOMIC_NAND_FETCH_1:
8819 155 : case BUILT_IN_ATOMIC_NAND_FETCH_2:
8820 155 : case BUILT_IN_ATOMIC_NAND_FETCH_4:
8821 155 : case BUILT_IN_ATOMIC_NAND_FETCH_8:
8822 155 : case BUILT_IN_ATOMIC_NAND_FETCH_16:
8823 155 : {
8824 155 : enum built_in_function lib;
8825 155 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8826 155 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8827 : (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8828 155 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8829 : ignore, lib);
8830 155 : if (target)
8831 : return target;
8832 : break;
8833 : }
8834 840 : case BUILT_IN_ATOMIC_XOR_FETCH_1:
8835 840 : case BUILT_IN_ATOMIC_XOR_FETCH_2:
8836 840 : case BUILT_IN_ATOMIC_XOR_FETCH_4:
8837 840 : case BUILT_IN_ATOMIC_XOR_FETCH_8:
8838 840 : case BUILT_IN_ATOMIC_XOR_FETCH_16:
8839 840 : {
8840 840 : enum built_in_function lib;
8841 840 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8842 840 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8843 : (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8844 840 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8845 : ignore, lib);
8846 840 : if (target)
8847 : return target;
8848 : break;
8849 : }
8850 956 : case BUILT_IN_ATOMIC_OR_FETCH_1:
8851 956 : case BUILT_IN_ATOMIC_OR_FETCH_2:
8852 956 : case BUILT_IN_ATOMIC_OR_FETCH_4:
8853 956 : case BUILT_IN_ATOMIC_OR_FETCH_8:
8854 956 : case BUILT_IN_ATOMIC_OR_FETCH_16:
8855 956 : {
8856 956 : enum built_in_function lib;
8857 956 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8858 956 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8859 : (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8860 956 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8861 : ignore, lib);
8862 956 : if (target)
8863 : return target;
8864 : break;
8865 : }
8866 14649 : case BUILT_IN_ATOMIC_FETCH_ADD_1:
8867 14649 : case BUILT_IN_ATOMIC_FETCH_ADD_2:
8868 14649 : case BUILT_IN_ATOMIC_FETCH_ADD_4:
8869 14649 : case BUILT_IN_ATOMIC_FETCH_ADD_8:
8870 14649 : case BUILT_IN_ATOMIC_FETCH_ADD_16:
8871 14649 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8872 14649 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8873 : ignore, BUILT_IN_NONE);
8874 14649 : if (target)
8875 : return target;
8876 : break;
8877 :
8878 2103 : case BUILT_IN_ATOMIC_FETCH_SUB_1:
8879 2103 : case BUILT_IN_ATOMIC_FETCH_SUB_2:
8880 2103 : case BUILT_IN_ATOMIC_FETCH_SUB_4:
8881 2103 : case BUILT_IN_ATOMIC_FETCH_SUB_8:
8882 2103 : case BUILT_IN_ATOMIC_FETCH_SUB_16:
8883 2103 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8884 2103 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8885 : ignore, BUILT_IN_NONE);
8886 2103 : if (target)
8887 : return target;
8888 : break;
8889 :
8890 778 : case BUILT_IN_ATOMIC_FETCH_AND_1:
8891 778 : case BUILT_IN_ATOMIC_FETCH_AND_2:
8892 778 : case BUILT_IN_ATOMIC_FETCH_AND_4:
8893 778 : case BUILT_IN_ATOMIC_FETCH_AND_8:
8894 778 : case BUILT_IN_ATOMIC_FETCH_AND_16:
8895 778 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8896 778 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8897 : ignore, BUILT_IN_NONE);
8898 778 : if (target)
8899 : return target;
8900 : break;
8901 :
8902 118 : case BUILT_IN_ATOMIC_FETCH_NAND_1:
8903 118 : case BUILT_IN_ATOMIC_FETCH_NAND_2:
8904 118 : case BUILT_IN_ATOMIC_FETCH_NAND_4:
8905 118 : case BUILT_IN_ATOMIC_FETCH_NAND_8:
8906 118 : case BUILT_IN_ATOMIC_FETCH_NAND_16:
8907 118 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8908 118 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8909 : ignore, BUILT_IN_NONE);
8910 118 : if (target)
8911 : return target;
8912 : break;
8913 :
8914 868 : case BUILT_IN_ATOMIC_FETCH_XOR_1:
8915 868 : case BUILT_IN_ATOMIC_FETCH_XOR_2:
8916 868 : case BUILT_IN_ATOMIC_FETCH_XOR_4:
8917 868 : case BUILT_IN_ATOMIC_FETCH_XOR_8:
8918 868 : case BUILT_IN_ATOMIC_FETCH_XOR_16:
8919 868 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8920 868 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8921 : ignore, BUILT_IN_NONE);
8922 868 : if (target)
8923 : return target;
8924 : break;
8925 :
8926 1122 : case BUILT_IN_ATOMIC_FETCH_OR_1:
8927 1122 : case BUILT_IN_ATOMIC_FETCH_OR_2:
8928 1122 : case BUILT_IN_ATOMIC_FETCH_OR_4:
8929 1122 : case BUILT_IN_ATOMIC_FETCH_OR_8:
8930 1122 : case BUILT_IN_ATOMIC_FETCH_OR_16:
8931 1122 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8932 1122 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8933 : ignore, BUILT_IN_NONE);
8934 1122 : if (target)
8935 : return target;
8936 : break;
8937 :
8938 266 : case BUILT_IN_ATOMIC_TEST_AND_SET:
8939 266 : target = expand_builtin_atomic_test_and_set (exp, target);
8940 266 : if (target)
8941 : return target;
8942 : break;
8943 :
8944 53 : case BUILT_IN_ATOMIC_CLEAR:
8945 53 : return expand_builtin_atomic_clear (exp);
8946 :
8947 1 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8948 1 : return expand_builtin_atomic_always_lock_free (exp);
8949 :
8950 3 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8951 3 : target = expand_builtin_atomic_is_lock_free (exp);
8952 3 : if (target)
8953 : return target;
8954 : break;
8955 :
8956 693 : case BUILT_IN_ATOMIC_THREAD_FENCE:
8957 693 : expand_builtin_atomic_thread_fence (exp);
8958 693 : return const0_rtx;
8959 :
8960 60 : case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8961 60 : expand_builtin_atomic_signal_fence (exp);
8962 60 : return const0_rtx;
8963 :
8964 618 : case BUILT_IN_OBJECT_SIZE:
8965 618 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8966 618 : return expand_builtin_object_size (exp);
8967 :
8968 837 : case BUILT_IN_MEMCPY_CHK:
8969 837 : case BUILT_IN_MEMPCPY_CHK:
8970 837 : case BUILT_IN_MEMMOVE_CHK:
8971 837 : case BUILT_IN_MEMSET_CHK:
8972 837 : target = expand_builtin_memory_chk (exp, target, mode, fcode);
8973 837 : if (target)
8974 : return target;
8975 : break;
8976 :
8977 1135 : case BUILT_IN_STRCPY_CHK:
8978 1135 : case BUILT_IN_STPCPY_CHK:
8979 1135 : case BUILT_IN_STRNCPY_CHK:
8980 1135 : case BUILT_IN_STPNCPY_CHK:
8981 1135 : case BUILT_IN_STRCAT_CHK:
8982 1135 : case BUILT_IN_STRNCAT_CHK:
8983 1135 : case BUILT_IN_SNPRINTF_CHK:
8984 1135 : case BUILT_IN_VSNPRINTF_CHK:
8985 1135 : maybe_emit_chk_warning (exp, fcode);
8986 1135 : break;
8987 :
8988 1329 : case BUILT_IN_SPRINTF_CHK:
8989 1329 : case BUILT_IN_VSPRINTF_CHK:
8990 1329 : maybe_emit_sprintf_chk_warning (exp, fcode);
8991 1329 : break;
8992 :
8993 3 : case BUILT_IN_THREAD_POINTER:
8994 3 : return expand_builtin_thread_pointer (exp, target);
8995 :
8996 0 : case BUILT_IN_SET_THREAD_POINTER:
8997 0 : expand_builtin_set_thread_pointer (exp);
8998 0 : return const0_rtx;
8999 :
9000 : case BUILT_IN_ACC_ON_DEVICE:
9001 : /* Do library call, if we failed to expand the builtin when
9002 : folding. */
9003 : break;
9004 :
9005 356 : case BUILT_IN_GOACC_PARLEVEL_ID:
9006 356 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
9007 356 : return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9008 :
9009 4 : case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9010 4 : return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9011 :
9012 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9013 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9014 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9015 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9016 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9017 30 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9018 30 : return expand_speculation_safe_value (mode, exp, target, ignore);
9019 :
9020 3 : case BUILT_IN_CRC8_DATA8:
9021 3 : return expand_builtin_crc_table_based (IFN_CRC, QImode, QImode, mode,
9022 : exp, target);
9023 2 : case BUILT_IN_CRC16_DATA8:
9024 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, QImode, mode,
9025 : exp, target);
9026 2 : case BUILT_IN_CRC16_DATA16:
9027 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, HImode, mode,
9028 : exp, target);
9029 2 : case BUILT_IN_CRC32_DATA8:
9030 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, QImode, mode,
9031 : exp, target);
9032 2 : case BUILT_IN_CRC32_DATA16:
9033 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, HImode, mode,
9034 : exp, target);
9035 4 : case BUILT_IN_CRC32_DATA32:
9036 4 : return expand_builtin_crc_table_based (IFN_CRC, SImode, SImode, mode,
9037 : exp, target);
9038 1 : case BUILT_IN_CRC64_DATA8:
9039 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, QImode, mode,
9040 : exp, target);
9041 1 : case BUILT_IN_CRC64_DATA16:
9042 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, HImode, mode,
9043 : exp, target);
9044 1 : case BUILT_IN_CRC64_DATA32:
9045 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, SImode, mode,
9046 : exp, target);
9047 1 : case BUILT_IN_CRC64_DATA64:
9048 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, DImode, mode,
9049 : exp, target);
9050 2 : case BUILT_IN_REV_CRC8_DATA8:
9051 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, QImode, QImode,
9052 : mode, exp, target);
9053 2 : case BUILT_IN_REV_CRC16_DATA8:
9054 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, QImode,
9055 : mode, exp, target);
9056 2 : case BUILT_IN_REV_CRC16_DATA16:
9057 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, HImode,
9058 : mode, exp, target);
9059 3 : case BUILT_IN_REV_CRC32_DATA8:
9060 3 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, QImode,
9061 : mode, exp, target);
9062 2 : case BUILT_IN_REV_CRC32_DATA16:
9063 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, HImode,
9064 : mode, exp, target);
9065 2 : case BUILT_IN_REV_CRC32_DATA32:
9066 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, SImode,
9067 : mode, exp, target);
9068 1 : case BUILT_IN_REV_CRC64_DATA8:
9069 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, QImode,
9070 : mode, exp, target);
9071 1 : case BUILT_IN_REV_CRC64_DATA16:
9072 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, HImode,
9073 : mode, exp, target);
9074 1 : case BUILT_IN_REV_CRC64_DATA32:
9075 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, SImode,
9076 : mode, exp, target);
9077 1 : case BUILT_IN_REV_CRC64_DATA64:
9078 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, DImode,
9079 : mode, exp, target);
9080 : default: /* just do library call, if unknown builtin */
9081 : break;
9082 : }
9083 :
9084 : /* The switch statement above can drop through to cause the function
9085 : to be called normally. */
9086 831707 : return expand_call (exp, target, ignore);
9087 : }
9088 :
9089 : /* Determine whether a tree node represents a call to a built-in
9090 : function. If the tree T is a call to a built-in function with
9091 : the right number of arguments of the appropriate types, return
9092 : the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9093 : Otherwise the return value is END_BUILTINS. */
9094 :
9095 : enum built_in_function
9096 124404393 : builtin_mathfn_code (const_tree t)
9097 : {
9098 124404393 : const_tree fndecl, arg, parmlist;
9099 124404393 : const_tree argtype, parmtype;
9100 124404393 : const_call_expr_arg_iterator iter;
9101 :
9102 124404393 : if (TREE_CODE (t) != CALL_EXPR)
9103 : return END_BUILTINS;
9104 :
9105 1620955 : fndecl = get_callee_fndecl (t);
9106 1620955 : if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9107 : return END_BUILTINS;
9108 :
9109 528160 : parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9110 528160 : init_const_call_expr_arg_iterator (t, &iter);
9111 1719948 : for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9112 : {
9113 : /* If a function doesn't take a variable number of arguments,
9114 : the last element in the list will have type `void'. */
9115 1191517 : parmtype = TREE_VALUE (parmlist);
9116 1191517 : if (VOID_TYPE_P (parmtype))
9117 : {
9118 527875 : if (more_const_call_expr_args_p (&iter))
9119 : return END_BUILTINS;
9120 527875 : return DECL_FUNCTION_CODE (fndecl);
9121 : }
9122 :
9123 663642 : if (! more_const_call_expr_args_p (&iter))
9124 : return END_BUILTINS;
9125 :
9126 663642 : arg = next_const_call_expr_arg (&iter);
9127 663642 : argtype = TREE_TYPE (arg);
9128 :
9129 663642 : if (SCALAR_FLOAT_TYPE_P (parmtype))
9130 : {
9131 421567 : if (! SCALAR_FLOAT_TYPE_P (argtype))
9132 : return END_BUILTINS;
9133 : }
9134 242075 : else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9135 : {
9136 5185 : if (! COMPLEX_FLOAT_TYPE_P (argtype))
9137 : return END_BUILTINS;
9138 : }
9139 236890 : else if (POINTER_TYPE_P (parmtype))
9140 : {
9141 68866 : if (! POINTER_TYPE_P (argtype))
9142 : return END_BUILTINS;
9143 : }
9144 168024 : else if (INTEGRAL_TYPE_P (parmtype))
9145 : {
9146 168024 : if (! INTEGRAL_TYPE_P (argtype))
9147 : return END_BUILTINS;
9148 : }
9149 : else
9150 : return END_BUILTINS;
9151 : }
9152 :
9153 : /* Variable-length argument list. */
9154 271 : return DECL_FUNCTION_CODE (fndecl);
9155 : }
9156 :
9157 : /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9158 : evaluate to a constant. */
9159 :
9160 : tree
9161 1313191 : fold_builtin_constant_p (tree arg)
9162 : {
9163 : /* We return 1 for a numeric type that's known to be a constant
9164 : value at compile-time or for an aggregate type that's a
9165 : literal constant. */
9166 1313191 : STRIP_NOPS (arg);
9167 :
9168 : /* If we know this is a constant, emit the constant of one. */
9169 1313191 : if (CONSTANT_CLASS_P (arg)
9170 1313191 : || (TREE_CODE (arg) == CONSTRUCTOR
9171 6 : && TREE_CONSTANT (arg)))
9172 23294 : return integer_one_node;
9173 1289897 : if (TREE_CODE (arg) == ADDR_EXPR)
9174 : {
9175 110 : tree op = TREE_OPERAND (arg, 0);
9176 110 : if (TREE_CODE (op) == STRING_CST
9177 110 : || (TREE_CODE (op) == ARRAY_REF
9178 57 : && integer_zerop (TREE_OPERAND (op, 1))
9179 57 : && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9180 87 : return integer_one_node;
9181 : }
9182 :
9183 : /* If this expression has side effects, show we don't know it to be a
9184 : constant. Likewise if it's a pointer or aggregate type since in
9185 : those case we only want literals, since those are only optimized
9186 : when generating RTL, not later.
9187 : And finally, if we are compiling an initializer, not code, we
9188 : need to return a definite result now; there's not going to be any
9189 : more optimization done. */
9190 1289810 : if (TREE_SIDE_EFFECTS (arg)
9191 1289685 : || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9192 1289679 : || POINTER_TYPE_P (TREE_TYPE (arg))
9193 1289508 : || cfun == 0
9194 1289465 : || folding_initializer
9195 2579268 : || force_folding_builtin_constant_p)
9196 382 : return integer_zero_node;
9197 :
9198 : return NULL_TREE;
9199 : }
9200 :
9201 : /* Create builtin_expect or builtin_expect_with_probability
9202 : with PRED and EXPECTED as its arguments and return it as a truthvalue.
9203 : Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9204 : builtin_expect_with_probability instead uses third argument as PROBABILITY
9205 : value. */
9206 :
9207 : static tree
9208 119476 : build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9209 : tree predictor, tree probability)
9210 : {
9211 119476 : tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9212 :
9213 119476 : fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9214 : : BUILT_IN_EXPECT_WITH_PROBABILITY);
9215 119476 : arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9216 119476 : ret_type = TREE_TYPE (TREE_TYPE (fn));
9217 119476 : pred_type = TREE_VALUE (arg_types);
9218 119476 : expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9219 :
9220 119476 : pred = fold_convert_loc (loc, pred_type, pred);
9221 119476 : expected = fold_convert_loc (loc, expected_type, expected);
9222 :
9223 119476 : if (probability)
9224 0 : call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9225 : else
9226 238952 : call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9227 : predictor);
9228 :
9229 119476 : return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9230 119476 : build_int_cst (ret_type, 0));
9231 : }
9232 :
9233 : /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9234 : NULL_TREE if no simplification is possible. */
9235 :
9236 : tree
9237 6823000 : fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9238 : tree arg3)
9239 : {
9240 6823000 : tree inner, fndecl, inner_arg0;
9241 6823000 : enum tree_code code;
9242 :
9243 : /* Distribute the expected value over short-circuiting operators.
9244 : See through the cast from truthvalue_type_node to long. */
9245 6823000 : inner_arg0 = arg0;
9246 13901081 : while (CONVERT_EXPR_P (inner_arg0)
9247 664257 : && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9248 8151513 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9249 664256 : inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9250 :
9251 : /* If this is a builtin_expect within a builtin_expect keep the
9252 : inner one. See through a comparison against a constant. It
9253 : might have been added to create a thruthvalue. */
9254 6823000 : inner = inner_arg0;
9255 :
9256 6823000 : if (COMPARISON_CLASS_P (inner)
9257 6823000 : && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9258 1211953 : inner = TREE_OPERAND (inner, 0);
9259 :
9260 6823000 : if (TREE_CODE (inner) == CALL_EXPR
9261 129521 : && (fndecl = get_callee_fndecl (inner))
9262 6952521 : && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
9263 : BUILT_IN_EXPECT_WITH_PROBABILITY))
9264 : return arg0;
9265 :
9266 6822992 : inner = inner_arg0;
9267 6822992 : code = TREE_CODE (inner);
9268 6822992 : if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9269 : {
9270 59738 : tree op0 = TREE_OPERAND (inner, 0);
9271 59738 : tree op1 = TREE_OPERAND (inner, 1);
9272 59738 : arg1 = save_expr (arg1);
9273 :
9274 59738 : op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9275 59738 : op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9276 59738 : inner = build2 (code, TREE_TYPE (inner), op0, op1);
9277 :
9278 59738 : return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9279 : }
9280 :
9281 : /* If the argument isn't invariant then there's nothing else we can do. */
9282 6763254 : if (!TREE_CONSTANT (inner_arg0))
9283 : return NULL_TREE;
9284 :
9285 : /* If we expect that a comparison against the argument will fold to
9286 : a constant return the constant. In practice, this means a true
9287 : constant or the address of a non-weak symbol. */
9288 119262 : inner = inner_arg0;
9289 119262 : STRIP_NOPS (inner);
9290 119262 : if (TREE_CODE (inner) == ADDR_EXPR)
9291 : {
9292 1 : do
9293 : {
9294 1 : inner = TREE_OPERAND (inner, 0);
9295 : }
9296 1 : while (TREE_CODE (inner) == COMPONENT_REF
9297 1 : || TREE_CODE (inner) == ARRAY_REF);
9298 1 : if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9299 : return NULL_TREE;
9300 : }
9301 :
9302 : /* Otherwise, ARG0 already has the proper type for the return value. */
9303 : return arg0;
9304 : }
9305 :
9306 : /* Fold a call to __builtin_classify_type with argument ARG. */
9307 :
9308 : static tree
9309 2080 : fold_builtin_classify_type (tree arg)
9310 : {
9311 2080 : if (arg == 0)
9312 0 : return build_int_cst (integer_type_node, no_type_class);
9313 :
9314 2080 : return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9315 : }
9316 :
9317 : /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9318 : ARG. */
9319 :
9320 : static tree
9321 429133 : fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9322 : {
9323 429133 : if (!validate_arg (arg, POINTER_TYPE))
9324 : return NULL_TREE;
9325 : else
9326 : {
9327 429130 : c_strlen_data lendata = { };
9328 429130 : tree len = c_strlen (arg, 0, &lendata);
9329 :
9330 429130 : if (len)
9331 1545 : return fold_convert_loc (loc, type, len);
9332 :
9333 : /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9334 : also early enough to detect invalid reads in multimensional
9335 : arrays and struct members. */
9336 427585 : if (!lendata.decl)
9337 420407 : c_strlen (arg, 1, &lendata);
9338 :
9339 427585 : if (lendata.decl)
9340 : {
9341 7192 : if (EXPR_HAS_LOCATION (arg))
9342 2800 : loc = EXPR_LOCATION (arg);
9343 4392 : else if (loc == UNKNOWN_LOCATION)
9344 0 : loc = input_location;
9345 7192 : warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9346 : }
9347 :
9348 427585 : return NULL_TREE;
9349 : }
9350 : }
9351 :
9352 : /* Fold a call to __builtin_inf or __builtin_huge_val. */
9353 :
9354 : static tree
9355 239775 : fold_builtin_inf (location_t loc, tree type, int warn)
9356 : {
9357 : /* __builtin_inff is intended to be usable to define INFINITY on all
9358 : targets. If an infinity is not available, INFINITY expands "to a
9359 : positive constant of type float that overflows at translation
9360 : time", footnote "In this case, using INFINITY will violate the
9361 : constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9362 : Thus we pedwarn to ensure this constraint violation is
9363 : diagnosed. */
9364 958291 : if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9365 0 : pedwarn (loc, 0, "target format does not support infinity");
9366 :
9367 239775 : return build_real (type, dconstinf);
9368 : }
9369 :
9370 : /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9371 : NULL_TREE if no simplification can be made. */
9372 :
9373 : static tree
9374 145 : fold_builtin_sincos (location_t loc,
9375 : tree arg0, tree arg1, tree arg2)
9376 : {
9377 145 : tree type;
9378 145 : tree fndecl, call = NULL_TREE;
9379 :
9380 145 : if (!validate_arg (arg0, REAL_TYPE)
9381 145 : || !validate_arg (arg1, POINTER_TYPE)
9382 290 : || !validate_arg (arg2, POINTER_TYPE))
9383 : return NULL_TREE;
9384 :
9385 145 : type = TREE_TYPE (arg0);
9386 :
9387 : /* Calculate the result when the argument is a constant. */
9388 145 : built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9389 145 : if (fn == END_BUILTINS)
9390 : return NULL_TREE;
9391 :
9392 : /* Canonicalize sincos to cexpi. */
9393 145 : if (TREE_CODE (arg0) == REAL_CST)
9394 : {
9395 86 : tree complex_type = build_complex_type (type);
9396 86 : call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9397 : }
9398 86 : if (!call)
9399 : {
9400 59 : if (!targetm.libc_has_function (function_c99_math_complex, type)
9401 59 : || !builtin_decl_implicit_p (fn))
9402 : return NULL_TREE;
9403 59 : fndecl = builtin_decl_explicit (fn);
9404 59 : call = build_call_expr_loc (loc, fndecl, 1, arg0);
9405 59 : call = builtin_save_expr (call);
9406 : }
9407 :
9408 145 : tree ptype = build_pointer_type (type);
9409 145 : arg1 = fold_convert (ptype, arg1);
9410 145 : arg2 = fold_convert (ptype, arg2);
9411 145 : return build2 (COMPOUND_EXPR, void_type_node,
9412 : build2 (MODIFY_EXPR, void_type_node,
9413 : build_fold_indirect_ref_loc (loc, arg1),
9414 : fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9415 : build2 (MODIFY_EXPR, void_type_node,
9416 : build_fold_indirect_ref_loc (loc, arg2),
9417 145 : fold_build1_loc (loc, REALPART_EXPR, type, call)));
9418 : }
9419 :
9420 : /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9421 : Return NULL_TREE if no simplification can be made. */
9422 :
9423 : static tree
9424 2483147 : fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9425 : {
9426 2483147 : if (!validate_arg (arg1, POINTER_TYPE)
9427 2483147 : || !validate_arg (arg2, POINTER_TYPE)
9428 4966294 : || !validate_arg (len, INTEGER_TYPE))
9429 : return NULL_TREE;
9430 :
9431 : /* If the LEN parameter is zero, return zero. */
9432 2483147 : if (integer_zerop (len))
9433 0 : return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9434 0 : arg1, arg2);
9435 :
9436 : /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9437 2483147 : if (operand_equal_p (arg1, arg2, 0))
9438 695 : return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9439 :
9440 : /* If len parameter is one, return an expression corresponding to
9441 : (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9442 2482452 : if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9443 : {
9444 52884 : tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9445 52884 : tree cst_uchar_ptr_node
9446 52884 : = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9447 :
9448 52884 : tree ind1
9449 52884 : = fold_convert_loc (loc, integer_type_node,
9450 : build1 (INDIRECT_REF, cst_uchar_node,
9451 : fold_convert_loc (loc,
9452 : cst_uchar_ptr_node,
9453 : arg1)));
9454 52884 : tree ind2
9455 52884 : = fold_convert_loc (loc, integer_type_node,
9456 : build1 (INDIRECT_REF, cst_uchar_node,
9457 : fold_convert_loc (loc,
9458 : cst_uchar_ptr_node,
9459 : arg2)));
9460 52884 : return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9461 : }
9462 :
9463 : return NULL_TREE;
9464 : }
9465 :
9466 : /* Fold a call to builtin isascii with argument ARG. */
9467 :
9468 : static tree
9469 211 : fold_builtin_isascii (location_t loc, tree arg)
9470 : {
9471 211 : if (!validate_arg (arg, INTEGER_TYPE))
9472 : return NULL_TREE;
9473 : else
9474 : {
9475 : /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9476 211 : arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9477 : build_int_cst (integer_type_node,
9478 : ~ HOST_WIDE_INT_UC (0x7f)));
9479 211 : return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9480 211 : arg, integer_zero_node);
9481 : }
9482 : }
9483 :
9484 : /* Fold a call to builtin toascii with argument ARG. */
9485 :
9486 : static tree
9487 168 : fold_builtin_toascii (location_t loc, tree arg)
9488 : {
9489 168 : if (!validate_arg (arg, INTEGER_TYPE))
9490 : return NULL_TREE;
9491 :
9492 : /* Transform toascii(c) -> (c & 0x7f). */
9493 168 : return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9494 : build_int_cst (integer_type_node, 0x7f));
9495 : }
9496 :
9497 : /* Fold a call to builtin isdigit with argument ARG. */
9498 :
9499 : static tree
9500 325 : fold_builtin_isdigit (location_t loc, tree arg)
9501 : {
9502 325 : if (!validate_arg (arg, INTEGER_TYPE))
9503 : return NULL_TREE;
9504 : else
9505 : {
9506 : /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9507 : /* According to the C standard, isdigit is unaffected by locale.
9508 : However, it definitely is affected by the target character set. */
9509 313 : unsigned HOST_WIDE_INT target_digit0
9510 313 : = lang_hooks.to_target_charset ('0');
9511 :
9512 313 : if (target_digit0 == 0)
9513 : return NULL_TREE;
9514 :
9515 313 : arg = fold_convert_loc (loc, unsigned_type_node, arg);
9516 313 : arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9517 : build_int_cst (unsigned_type_node, target_digit0));
9518 313 : return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9519 : build_int_cst (unsigned_type_node, 9));
9520 : }
9521 : }
9522 :
9523 : /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9524 :
9525 : static tree
9526 334668 : fold_builtin_fabs (location_t loc, tree arg, tree type)
9527 : {
9528 334668 : if (!validate_arg (arg, REAL_TYPE))
9529 : return NULL_TREE;
9530 :
9531 334583 : arg = fold_convert_loc (loc, type, arg);
9532 334583 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9533 : }
9534 :
9535 : /* Fold a call to abs, labs, llabs, imaxabs, uabs, ulabs, ullabs or uimaxabs
9536 : with argument ARG. */
9537 :
9538 : static tree
9539 93596 : fold_builtin_abs (location_t loc, tree arg, tree type)
9540 : {
9541 93596 : if (!validate_arg (arg, INTEGER_TYPE) || !INTEGRAL_TYPE_P (type))
9542 : return NULL_TREE;
9543 :
9544 93548 : if (TYPE_UNSIGNED (type))
9545 : {
9546 1168 : if (TYPE_PRECISION (TREE_TYPE (arg))
9547 1168 : != TYPE_PRECISION (type)
9548 1168 : || TYPE_UNSIGNED (TREE_TYPE (arg)))
9549 : return NULL_TREE;
9550 1168 : return fold_build1_loc (loc, ABSU_EXPR, type, arg);
9551 : }
9552 92380 : arg = fold_convert_loc (loc, type, arg);
9553 92380 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9554 : }
9555 :
9556 : /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9557 :
9558 : static tree
9559 93682 : fold_builtin_carg (location_t loc, tree arg, tree type)
9560 : {
9561 93682 : if (validate_arg (arg, COMPLEX_TYPE)
9562 93682 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9563 : {
9564 93682 : tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9565 :
9566 93682 : if (atan2_fn)
9567 : {
9568 91686 : tree new_arg = builtin_save_expr (arg);
9569 91686 : tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9570 91686 : tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9571 91686 : return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9572 : }
9573 : }
9574 :
9575 : return NULL_TREE;
9576 : }
9577 :
9578 : /* Fold a call to builtin frexp, we can assume the base is 2. */
9579 :
9580 : static tree
9581 134826 : fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9582 : {
9583 134826 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9584 : return NULL_TREE;
9585 :
9586 134826 : STRIP_NOPS (arg0);
9587 :
9588 134826 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9589 : return NULL_TREE;
9590 :
9591 2210 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9592 :
9593 : /* Proceed if a valid pointer type was passed in. */
9594 2210 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9595 : {
9596 2210 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9597 2210 : tree frac, exp, res;
9598 :
9599 2210 : switch (value->cl)
9600 : {
9601 276 : case rvc_zero:
9602 276 : case rvc_nan:
9603 276 : case rvc_inf:
9604 : /* For +-0, return (*exp = 0, +-0). */
9605 : /* For +-NaN or +-Inf, *exp is unspecified, but something should
9606 : be stored there so that it isn't read from uninitialized object.
9607 : As glibc and newlib store *exp = 0 for +-Inf/NaN, storing
9608 : 0 here as well is easiest. */
9609 276 : exp = integer_zero_node;
9610 276 : frac = arg0;
9611 276 : break;
9612 1934 : case rvc_normal:
9613 1934 : {
9614 : /* Since the frexp function always expects base 2, and in
9615 : GCC normalized significands are already in the range
9616 : [0.5, 1.0), we have exactly what frexp wants. */
9617 1934 : REAL_VALUE_TYPE frac_rvt = *value;
9618 1934 : SET_REAL_EXP (&frac_rvt, 0);
9619 1934 : frac = build_real (rettype, frac_rvt);
9620 1934 : exp = build_int_cst (integer_type_node, REAL_EXP (value));
9621 : }
9622 1934 : break;
9623 0 : default:
9624 0 : gcc_unreachable ();
9625 : }
9626 :
9627 : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9628 2210 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9629 2210 : TREE_SIDE_EFFECTS (arg1) = 1;
9630 2210 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9631 2210 : suppress_warning (res, OPT_Wunused_value);
9632 2210 : return res;
9633 : }
9634 :
9635 : return NULL_TREE;
9636 : }
9637 :
9638 : /* Fold a call to builtin modf. */
9639 :
9640 : static tree
9641 64626 : fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9642 : {
9643 64626 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9644 : return NULL_TREE;
9645 :
9646 64626 : STRIP_NOPS (arg0);
9647 :
9648 64626 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9649 : return NULL_TREE;
9650 :
9651 2474 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9652 :
9653 : /* Proceed if a valid pointer type was passed in. */
9654 2474 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9655 : {
9656 2474 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9657 2474 : REAL_VALUE_TYPE trunc, frac;
9658 2474 : tree res;
9659 :
9660 2474 : switch (value->cl)
9661 : {
9662 228 : case rvc_nan:
9663 228 : case rvc_zero:
9664 : /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9665 228 : trunc = frac = *value;
9666 228 : break;
9667 96 : case rvc_inf:
9668 : /* For +-Inf, return (*arg1 = arg0, +-0). */
9669 96 : frac = dconst0;
9670 96 : frac.sign = value->sign;
9671 96 : trunc = *value;
9672 96 : break;
9673 2150 : case rvc_normal:
9674 : /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9675 2150 : real_trunc (&trunc, VOIDmode, value);
9676 2150 : real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9677 : /* If the original number was negative and already
9678 : integral, then the fractional part is -0.0. */
9679 2150 : if (value->sign && frac.cl == rvc_zero)
9680 69 : frac.sign = value->sign;
9681 : break;
9682 : }
9683 :
9684 : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9685 2474 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9686 : build_real (rettype, trunc));
9687 2474 : TREE_SIDE_EFFECTS (arg1) = 1;
9688 2474 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9689 : build_real (rettype, frac));
9690 2474 : suppress_warning (res, OPT_Wunused_value);
9691 2474 : return res;
9692 : }
9693 :
9694 : return NULL_TREE;
9695 : }
9696 :
9697 : /* Given a location LOC, an interclass builtin function decl FNDECL
9698 : and its single argument ARG, return an folded expression computing
9699 : the same, or NULL_TREE if we either couldn't or didn't want to fold
9700 : (the latter happen if there's an RTL instruction available). */
9701 :
9702 : static tree
9703 1989747 : fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9704 : {
9705 1989747 : machine_mode mode;
9706 :
9707 1989747 : if (!validate_arg (arg, REAL_TYPE))
9708 : return NULL_TREE;
9709 :
9710 1989747 : if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9711 : return NULL_TREE;
9712 :
9713 1989747 : mode = TYPE_MODE (TREE_TYPE (arg));
9714 :
9715 13924387 : bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9716 :
9717 : /* If there is no optab, try generic code. */
9718 1989747 : switch (DECL_FUNCTION_CODE (fndecl))
9719 : {
9720 221723 : tree result;
9721 :
9722 221723 : CASE_FLT_FN (BUILT_IN_ISINF):
9723 221723 : {
9724 : /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9725 221723 : tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9726 221723 : tree type = TREE_TYPE (arg);
9727 221723 : REAL_VALUE_TYPE r;
9728 221723 : char buf[128];
9729 :
9730 221723 : if (is_ibm_extended)
9731 : {
9732 : /* NaN and Inf are encoded in the high-order double value
9733 : only. The low-order value is not significant. */
9734 0 : type = double_type_node;
9735 0 : mode = DFmode;
9736 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9737 : }
9738 221723 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9739 221723 : real_from_string3 (&r, buf, mode);
9740 221723 : result = build_call_expr (isgr_fn, 2,
9741 : fold_build1_loc (loc, ABS_EXPR, type, arg),
9742 : build_real (type, r));
9743 221723 : return result;
9744 : }
9745 1323671 : CASE_FLT_FN (BUILT_IN_FINITE):
9746 1323671 : case BUILT_IN_ISFINITE:
9747 1323671 : {
9748 : /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9749 1323671 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9750 1323671 : tree type = TREE_TYPE (arg);
9751 1323671 : REAL_VALUE_TYPE r;
9752 1323671 : char buf[128];
9753 :
9754 1323671 : if (is_ibm_extended)
9755 : {
9756 : /* NaN and Inf are encoded in the high-order double value
9757 : only. The low-order value is not significant. */
9758 0 : type = double_type_node;
9759 0 : mode = DFmode;
9760 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9761 : }
9762 1323671 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9763 1323671 : real_from_string3 (&r, buf, mode);
9764 1323671 : result = build_call_expr (isle_fn, 2,
9765 : fold_build1_loc (loc, ABS_EXPR, type, arg),
9766 : build_real (type, r));
9767 : /*result = fold_build2_loc (loc, UNGT_EXPR,
9768 : TREE_TYPE (TREE_TYPE (fndecl)),
9769 : fold_build1_loc (loc, ABS_EXPR, type, arg),
9770 : build_real (type, r));
9771 : result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9772 : TREE_TYPE (TREE_TYPE (fndecl)),
9773 : result);*/
9774 1323671 : return result;
9775 : }
9776 219918 : case BUILT_IN_ISNORMAL:
9777 219918 : {
9778 : /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9779 : islessequal(fabs(x),DBL_MAX). */
9780 219918 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9781 219918 : tree type = TREE_TYPE (arg);
9782 219918 : tree orig_arg, max_exp, min_exp;
9783 219918 : machine_mode orig_mode = mode;
9784 219918 : REAL_VALUE_TYPE rmax, rmin;
9785 219918 : char buf[128];
9786 :
9787 219918 : orig_arg = arg = builtin_save_expr (arg);
9788 219918 : if (is_ibm_extended)
9789 : {
9790 : /* Use double to test the normal range of IBM extended
9791 : precision. Emin for IBM extended precision is
9792 : different to emin for IEEE double, being 53 higher
9793 : since the low double exponent is at least 53 lower
9794 : than the high double exponent. */
9795 0 : type = double_type_node;
9796 0 : mode = DFmode;
9797 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9798 : }
9799 219918 : arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9800 :
9801 219918 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9802 219918 : real_from_string3 (&rmax, buf, mode);
9803 219918 : if (DECIMAL_FLOAT_MODE_P (mode))
9804 1 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9805 : else
9806 219917 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9807 219918 : real_from_string3 (&rmin, buf, orig_mode);
9808 219918 : max_exp = build_real (type, rmax);
9809 219918 : min_exp = build_real (type, rmin);
9810 :
9811 219918 : max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9812 219918 : if (is_ibm_extended)
9813 : {
9814 : /* Testing the high end of the range is done just using
9815 : the high double, using the same test as isfinite().
9816 : For the subnormal end of the range we first test the
9817 : high double, then if its magnitude is equal to the
9818 : limit of 0x1p-969, we test whether the low double is
9819 : non-zero and opposite sign to the high double. */
9820 0 : tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9821 0 : tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9822 0 : tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9823 0 : tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9824 : arg, min_exp);
9825 0 : tree as_complex = build1 (VIEW_CONVERT_EXPR,
9826 : complex_double_type_node, orig_arg);
9827 0 : tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9828 0 : tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9829 0 : tree zero = build_real (type, dconst0);
9830 0 : tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9831 0 : tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9832 0 : tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9833 0 : tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9834 : fold_build3 (COND_EXPR,
9835 : integer_type_node,
9836 : hilt, logt, lolt));
9837 0 : eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9838 : eq_min, ok_lo);
9839 0 : min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9840 : gt_min, eq_min);
9841 : }
9842 : else
9843 : {
9844 219918 : tree const isge_fn
9845 219918 : = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9846 219918 : min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9847 : }
9848 219918 : result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9849 : max_exp, min_exp);
9850 219918 : return result;
9851 : }
9852 222901 : CASE_FLT_FN (BUILT_IN_ISNAN):
9853 222901 : case BUILT_IN_ISNAND32:
9854 222901 : case BUILT_IN_ISNAND64:
9855 222901 : case BUILT_IN_ISNAND128:
9856 222901 : {
9857 : /* In IBM extended NaN and Inf are encoded in the high-order double
9858 : value only. The low-order value is not significant. */
9859 222901 : if (is_ibm_extended)
9860 0 : arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9861 222901 : arg = builtin_save_expr (arg);
9862 222901 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9863 222901 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9864 : }
9865 : default:
9866 : break;
9867 : }
9868 :
9869 : return NULL_TREE;
9870 : }
9871 :
9872 : /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9873 : ARG is the argument for the call. */
9874 :
9875 : static tree
9876 1799385 : fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9877 : {
9878 1799385 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9879 :
9880 1799385 : if (!validate_arg (arg, REAL_TYPE))
9881 : return NULL_TREE;
9882 :
9883 1799385 : switch (builtin_index)
9884 : {
9885 223378 : case BUILT_IN_ISINF:
9886 223378 : if (tree_expr_infinite_p (arg))
9887 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9888 223378 : if (!tree_expr_maybe_infinite_p (arg))
9889 121 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9890 : return NULL_TREE;
9891 :
9892 708 : case BUILT_IN_ISINF_SIGN:
9893 708 : {
9894 : /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9895 : /* In a boolean context, GCC will fold the inner COND_EXPR to
9896 : 1. So e.g. "if (isinf_sign(x))" would be folded to just
9897 : "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9898 708 : tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9899 708 : tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9900 708 : tree tmp = NULL_TREE;
9901 :
9902 708 : arg = builtin_save_expr (arg);
9903 :
9904 708 : if (signbit_fn && isinf_fn)
9905 : {
9906 708 : tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9907 708 : tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9908 :
9909 708 : signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9910 : signbit_call, integer_zero_node);
9911 708 : isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9912 : isinf_call, integer_zero_node);
9913 :
9914 708 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9915 : integer_minus_one_node, integer_one_node);
9916 708 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9917 : isinf_call, tmp,
9918 : integer_zero_node);
9919 : }
9920 :
9921 : return tmp;
9922 : }
9923 :
9924 1323895 : case BUILT_IN_ISFINITE:
9925 1323895 : if (tree_expr_finite_p (arg))
9926 224 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9927 1323671 : if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9928 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9929 : return NULL_TREE;
9930 :
9931 223025 : case BUILT_IN_ISNAN:
9932 223025 : if (tree_expr_nan_p (arg))
9933 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9934 223025 : if (!tree_expr_maybe_nan_p (arg))
9935 124 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9936 : return NULL_TREE;
9937 :
9938 28379 : case BUILT_IN_ISSIGNALING:
9939 : /* Folding to true for REAL_CST is done in fold_const_call_ss.
9940 : Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9941 : and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9942 : here, so there is some possibility of __builtin_issignaling working
9943 : without -fsignaling-nans. Especially when -fno-signaling-nans is
9944 : the default. */
9945 28379 : if (!tree_expr_maybe_nan_p (arg))
9946 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9947 : return NULL_TREE;
9948 :
9949 0 : default:
9950 0 : gcc_unreachable ();
9951 : }
9952 : }
9953 :
9954 : /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9955 : This builtin will generate code to return the appropriate floating
9956 : point classification depending on the value of the floating point
9957 : number passed in. The possible return values must be supplied as
9958 : int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9959 : FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9960 : one floating point argument which is "type generic". */
9961 :
9962 : static tree
9963 95952 : fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9964 : {
9965 95952 : tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9966 : arg, type, res, tmp;
9967 95952 : machine_mode mode;
9968 95952 : REAL_VALUE_TYPE r;
9969 95952 : char buf[128];
9970 :
9971 : /* Verify the required arguments in the original call. */
9972 95952 : if (nargs != 6
9973 95952 : || !validate_arg (args[0], INTEGER_TYPE)
9974 95952 : || !validate_arg (args[1], INTEGER_TYPE)
9975 95952 : || !validate_arg (args[2], INTEGER_TYPE)
9976 95952 : || !validate_arg (args[3], INTEGER_TYPE)
9977 95952 : || !validate_arg (args[4], INTEGER_TYPE)
9978 191904 : || !validate_arg (args[5], REAL_TYPE))
9979 : return NULL_TREE;
9980 :
9981 95952 : fp_nan = args[0];
9982 95952 : fp_infinite = args[1];
9983 95952 : fp_normal = args[2];
9984 95952 : fp_subnormal = args[3];
9985 95952 : fp_zero = args[4];
9986 95952 : arg = args[5];
9987 95952 : type = TREE_TYPE (arg);
9988 95952 : mode = TYPE_MODE (type);
9989 95952 : arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9990 :
9991 : /* fpclassify(x) ->
9992 : isnan(x) ? FP_NAN :
9993 : (fabs(x) == Inf ? FP_INFINITE :
9994 : (fabs(x) >= DBL_MIN ? FP_NORMAL :
9995 : (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9996 :
9997 95952 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9998 : build_real (type, dconst0));
9999 95952 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10000 : tmp, fp_zero, fp_subnormal);
10001 :
10002 95952 : if (DECIMAL_FLOAT_MODE_P (mode))
10003 3 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (mode)->emin - 1);
10004 : else
10005 95949 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10006 95952 : real_from_string3 (&r, buf, mode);
10007 95952 : tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10008 : arg, build_real (type, r));
10009 95952 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10010 : fp_normal, res);
10011 :
10012 95952 : if (tree_expr_maybe_infinite_p (arg))
10013 : {
10014 95864 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10015 : build_real (type, dconstinf));
10016 95864 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10017 : fp_infinite, res);
10018 : }
10019 :
10020 95952 : if (tree_expr_maybe_nan_p (arg))
10021 : {
10022 95862 : tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10023 95862 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10024 : res, fp_nan);
10025 : }
10026 :
10027 : return res;
10028 : }
10029 :
10030 : /* Fold a call to an unordered comparison function such as
10031 : __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10032 : being called and ARG0 and ARG1 are the arguments for the call.
10033 : UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10034 : the opposite of the desired result. UNORDERED_CODE is used
10035 : for modes that can hold NaNs and ORDERED_CODE is used for
10036 : the rest. */
10037 :
10038 : static tree
10039 3309212 : fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10040 : enum tree_code unordered_code,
10041 : enum tree_code ordered_code)
10042 : {
10043 3309212 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10044 3309212 : enum tree_code code;
10045 3309212 : tree type0, type1;
10046 3309212 : enum tree_code code0, code1;
10047 3309212 : tree cmp_type = NULL_TREE;
10048 :
10049 3309212 : type0 = TREE_TYPE (arg0);
10050 3309212 : type1 = TREE_TYPE (arg1);
10051 :
10052 3309212 : code0 = TREE_CODE (type0);
10053 3309212 : code1 = TREE_CODE (type1);
10054 :
10055 3309212 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10056 : /* Choose the wider of two real types. */
10057 3309032 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10058 3309032 : ? type0 : type1;
10059 180 : else if (code0 == REAL_TYPE
10060 91 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10061 : cmp_type = type0;
10062 89 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10063 89 : && code1 == REAL_TYPE)
10064 135 : cmp_type = type1;
10065 :
10066 3309212 : arg0 = fold_convert_loc (loc, cmp_type, arg0);
10067 3309212 : arg1 = fold_convert_loc (loc, cmp_type, arg1);
10068 :
10069 3309212 : if (unordered_code == UNORDERED_EXPR)
10070 : {
10071 220965 : if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
10072 16 : return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
10073 220949 : if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
10074 141 : return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10075 220808 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10076 : }
10077 :
10078 3091098 : code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
10079 3088247 : ? unordered_code : ordered_code;
10080 3088247 : return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10081 3088247 : fold_build2_loc (loc, code, type, arg0, arg1));
10082 : }
10083 :
10084 : /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
10085 : After choosing the wider floating-point type for the comparison,
10086 : the code is folded to:
10087 : SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
10088 :
10089 : static tree
10090 709 : fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
10091 : {
10092 709 : tree type0, type1;
10093 709 : enum tree_code code0, code1;
10094 709 : tree cmp1, cmp2, cmp_type = NULL_TREE;
10095 :
10096 709 : type0 = TREE_TYPE (arg0);
10097 709 : type1 = TREE_TYPE (arg1);
10098 :
10099 709 : code0 = TREE_CODE (type0);
10100 709 : code1 = TREE_CODE (type1);
10101 :
10102 709 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10103 : /* Choose the wider of two real types. */
10104 690 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10105 690 : ? type0 : type1;
10106 19 : else if (code0 == REAL_TYPE
10107 6 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10108 : cmp_type = type0;
10109 13 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10110 13 : && code1 == REAL_TYPE)
10111 13 : cmp_type = type1;
10112 :
10113 709 : arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
10114 709 : arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
10115 :
10116 709 : cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
10117 709 : cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
10118 :
10119 709 : return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
10120 : }
10121 :
10122 : /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10123 : arithmetics if it can never overflow, or into internal functions that
10124 : return both result of arithmetics and overflowed boolean flag in
10125 : a complex integer result, or some other check for overflow.
10126 : Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10127 : checking part of that. */
10128 :
10129 : static tree
10130 450817 : fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10131 : tree arg0, tree arg1, tree arg2)
10132 : {
10133 450817 : enum internal_fn ifn = IFN_LAST;
10134 : /* The code of the expression corresponding to the built-in. */
10135 450817 : enum tree_code opcode = ERROR_MARK;
10136 450817 : bool ovf_only = false;
10137 :
10138 450817 : switch (fcode)
10139 : {
10140 : case BUILT_IN_ADD_OVERFLOW_P:
10141 : ovf_only = true;
10142 : /* FALLTHRU */
10143 : case BUILT_IN_ADD_OVERFLOW:
10144 : case BUILT_IN_SADD_OVERFLOW:
10145 : case BUILT_IN_SADDL_OVERFLOW:
10146 : case BUILT_IN_SADDLL_OVERFLOW:
10147 : case BUILT_IN_UADD_OVERFLOW:
10148 : case BUILT_IN_UADDL_OVERFLOW:
10149 : case BUILT_IN_UADDLL_OVERFLOW:
10150 : opcode = PLUS_EXPR;
10151 : ifn = IFN_ADD_OVERFLOW;
10152 : break;
10153 15577 : case BUILT_IN_SUB_OVERFLOW_P:
10154 15577 : ovf_only = true;
10155 : /* FALLTHRU */
10156 37333 : case BUILT_IN_SUB_OVERFLOW:
10157 37333 : case BUILT_IN_SSUB_OVERFLOW:
10158 37333 : case BUILT_IN_SSUBL_OVERFLOW:
10159 37333 : case BUILT_IN_SSUBLL_OVERFLOW:
10160 37333 : case BUILT_IN_USUB_OVERFLOW:
10161 37333 : case BUILT_IN_USUBL_OVERFLOW:
10162 37333 : case BUILT_IN_USUBLL_OVERFLOW:
10163 37333 : opcode = MINUS_EXPR;
10164 37333 : ifn = IFN_SUB_OVERFLOW;
10165 37333 : break;
10166 15871 : case BUILT_IN_MUL_OVERFLOW_P:
10167 15871 : ovf_only = true;
10168 : /* FALLTHRU */
10169 374855 : case BUILT_IN_MUL_OVERFLOW:
10170 374855 : case BUILT_IN_SMUL_OVERFLOW:
10171 374855 : case BUILT_IN_SMULL_OVERFLOW:
10172 374855 : case BUILT_IN_SMULLL_OVERFLOW:
10173 374855 : case BUILT_IN_UMUL_OVERFLOW:
10174 374855 : case BUILT_IN_UMULL_OVERFLOW:
10175 374855 : case BUILT_IN_UMULLL_OVERFLOW:
10176 374855 : opcode = MULT_EXPR;
10177 374855 : ifn = IFN_MUL_OVERFLOW;
10178 374855 : break;
10179 0 : default:
10180 0 : gcc_unreachable ();
10181 : }
10182 :
10183 : /* For the "generic" overloads, the first two arguments can have different
10184 : types and the last argument determines the target type to use to check
10185 : for overflow. The arguments of the other overloads all have the same
10186 : type. */
10187 450817 : tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10188 :
10189 : /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10190 : arguments are constant, attempt to fold the built-in call into a constant
10191 : expression indicating whether or not it detected an overflow. */
10192 450817 : if (ovf_only
10193 43258 : && TREE_CODE (arg0) == INTEGER_CST
10194 13131 : && TREE_CODE (arg1) == INTEGER_CST)
10195 : /* Perform the computation in the target type and check for overflow. */
10196 7347 : return omit_one_operand_loc (loc, boolean_type_node,
10197 7347 : arith_overflowed_p (opcode, type, arg0, arg1)
10198 : ? boolean_true_node : boolean_false_node,
10199 7347 : arg2);
10200 :
10201 443470 : tree intres, ovfres;
10202 443470 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10203 : {
10204 55832 : intres = fold_binary_loc (loc, opcode, type,
10205 : fold_convert_loc (loc, type, arg0),
10206 : fold_convert_loc (loc, type, arg1));
10207 55832 : if (TREE_OVERFLOW (intres))
10208 1592 : intres = drop_tree_overflow (intres);
10209 111664 : ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10210 55832 : ? boolean_true_node : boolean_false_node);
10211 : }
10212 : else
10213 : {
10214 387638 : tree ctype = build_complex_type (type);
10215 387638 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10216 : arg0, arg1);
10217 387638 : tree tgt;
10218 387638 : if (ovf_only)
10219 : {
10220 : tgt = call;
10221 387638 : intres = NULL_TREE;
10222 : }
10223 : else
10224 : {
10225 : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10226 : as while the call itself is const, the REALPART_EXPR store is
10227 : certainly not. And in any case, we want just one call,
10228 : not multiple and trying to CSE them later. */
10229 351727 : TREE_SIDE_EFFECTS (call) = 1;
10230 351727 : tgt = save_expr (call);
10231 : }
10232 387638 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10233 387638 : ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10234 387638 : ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10235 : }
10236 :
10237 443470 : if (ovf_only)
10238 35911 : return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10239 :
10240 407559 : tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10241 407559 : tree store
10242 407559 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10243 407559 : return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10244 : }
10245 :
10246 : /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10247 : internal function. */
10248 :
10249 : static tree
10250 235024 : fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10251 : tree arg0, tree arg1)
10252 : {
10253 235024 : enum internal_fn ifn;
10254 235024 : enum built_in_function fcodei, fcodel, fcodell;
10255 235024 : tree arg0_type = TREE_TYPE (arg0);
10256 235024 : tree cast_type = NULL_TREE;
10257 235024 : int addend = 0;
10258 :
10259 235024 : switch (fcode)
10260 : {
10261 168399 : case BUILT_IN_CLZG:
10262 168399 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10263 : return NULL_TREE;
10264 : ifn = IFN_CLZ;
10265 : fcodei = BUILT_IN_CLZ;
10266 : fcodel = BUILT_IN_CLZL;
10267 : fcodell = BUILT_IN_CLZLL;
10268 : break;
10269 60067 : case BUILT_IN_CTZG:
10270 60067 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10271 : return NULL_TREE;
10272 : ifn = IFN_CTZ;
10273 : fcodei = BUILT_IN_CTZ;
10274 : fcodel = BUILT_IN_CTZL;
10275 : fcodell = BUILT_IN_CTZLL;
10276 : break;
10277 : case BUILT_IN_CLRSBG:
10278 : ifn = IFN_CLRSB;
10279 : fcodei = BUILT_IN_CLRSB;
10280 : fcodel = BUILT_IN_CLRSBL;
10281 : fcodell = BUILT_IN_CLRSBLL;
10282 : break;
10283 83 : case BUILT_IN_FFSG:
10284 83 : ifn = IFN_FFS;
10285 83 : fcodei = BUILT_IN_FFS;
10286 83 : fcodel = BUILT_IN_FFSL;
10287 83 : fcodell = BUILT_IN_FFSLL;
10288 83 : break;
10289 76 : case BUILT_IN_PARITYG:
10290 76 : ifn = IFN_PARITY;
10291 76 : fcodei = BUILT_IN_PARITY;
10292 76 : fcodel = BUILT_IN_PARITYL;
10293 76 : fcodell = BUILT_IN_PARITYLL;
10294 76 : break;
10295 6318 : case BUILT_IN_POPCOUNTG:
10296 6318 : ifn = IFN_POPCOUNT;
10297 6318 : fcodei = BUILT_IN_POPCOUNT;
10298 6318 : fcodel = BUILT_IN_POPCOUNTL;
10299 6318 : fcodell = BUILT_IN_POPCOUNTLL;
10300 6318 : break;
10301 0 : default:
10302 0 : gcc_unreachable ();
10303 : }
10304 :
10305 234854 : if (TYPE_PRECISION (arg0_type)
10306 234854 : <= TYPE_PRECISION (long_long_unsigned_type_node))
10307 : {
10308 199306 : if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10309 :
10310 67229 : cast_type = (TYPE_UNSIGNED (arg0_type)
10311 67175 : ? unsigned_type_node : integer_type_node);
10312 132131 : else if (TYPE_PRECISION (arg0_type)
10313 132131 : <= TYPE_PRECISION (long_unsigned_type_node))
10314 : {
10315 132116 : cast_type = (TYPE_UNSIGNED (arg0_type)
10316 132044 : ? long_unsigned_type_node : long_integer_type_node);
10317 : fcodei = fcodel;
10318 : }
10319 : else
10320 : {
10321 87 : cast_type = (TYPE_UNSIGNED (arg0_type)
10322 87 : ? long_long_unsigned_type_node
10323 : : long_long_integer_type_node);
10324 : fcodei = fcodell;
10325 : }
10326 : }
10327 71096 : else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10328 : {
10329 35448 : cast_type
10330 35448 : = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10331 35448 : TYPE_UNSIGNED (arg0_type));
10332 35448 : gcc_assert (TYPE_PRECISION (cast_type)
10333 : == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10334 : fcodei = END_BUILTINS;
10335 : }
10336 : else
10337 : fcodei = END_BUILTINS;
10338 234754 : if (cast_type)
10339 : {
10340 234754 : switch (fcode)
10341 : {
10342 168364 : case BUILT_IN_CLZG:
10343 168364 : case BUILT_IN_CLRSBG:
10344 168364 : addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10345 168364 : break;
10346 : default:
10347 : break;
10348 : }
10349 234754 : arg0 = fold_convert (cast_type, arg0);
10350 234754 : arg0_type = cast_type;
10351 : }
10352 :
10353 234854 : if (arg1)
10354 157290 : arg1 = fold_convert (integer_type_node, arg1);
10355 :
10356 234854 : tree arg2 = arg1;
10357 234854 : if (fcode == BUILT_IN_CLZG && addend)
10358 : {
10359 2556 : if (arg1)
10360 2523 : arg0 = save_expr (arg0);
10361 : arg2 = NULL_TREE;
10362 : }
10363 234854 : tree call = NULL_TREE, tem;
10364 234854 : if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10365 35535 : && (TYPE_PRECISION (arg0_type)
10366 35535 : == 2 * TYPE_PRECISION (long_long_unsigned_type_node))
10367 : /* If the target supports the optab, then don't do the expansion. */
10368 270302 : && !direct_internal_fn_supported_p (ifn, arg0_type, OPTIMIZE_FOR_BOTH))
10369 : {
10370 : /* __int128 expansions using up to 2 long long builtins. */
10371 35448 : arg0 = save_expr (arg0);
10372 35448 : tree type = (TYPE_UNSIGNED (arg0_type)
10373 35448 : ? long_long_unsigned_type_node
10374 35448 : : long_long_integer_type_node);
10375 70896 : tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10376 : build_int_cst (integer_type_node,
10377 : MAX_FIXED_MODE_SIZE / 2));
10378 35448 : hi = fold_convert (type, hi);
10379 35448 : tree lo = fold_convert (type, arg0);
10380 35448 : switch (fcode)
10381 : {
10382 35365 : case BUILT_IN_CLZG:
10383 35365 : call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10384 70730 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10385 : build_int_cst (integer_type_node,
10386 : MAX_FIXED_MODE_SIZE / 2));
10387 35365 : if (arg2)
10388 35352 : call = fold_build3 (COND_EXPR, integer_type_node,
10389 : fold_build2 (NE_EXPR, boolean_type_node,
10390 : lo, build_zero_cst (type)),
10391 : call, arg2);
10392 35365 : call = fold_build3 (COND_EXPR, integer_type_node,
10393 : fold_build2 (NE_EXPR, boolean_type_node,
10394 : hi, build_zero_cst (type)),
10395 : fold_builtin_bit_query (loc, fcode, hi,
10396 : NULL_TREE),
10397 : call);
10398 35365 : break;
10399 27 : case BUILT_IN_CTZG:
10400 27 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10401 54 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10402 : build_int_cst (integer_type_node,
10403 : MAX_FIXED_MODE_SIZE / 2));
10404 27 : if (arg2)
10405 18 : call = fold_build3 (COND_EXPR, integer_type_node,
10406 : fold_build2 (NE_EXPR, boolean_type_node,
10407 : hi, build_zero_cst (type)),
10408 : call, arg2);
10409 27 : call = fold_build3 (COND_EXPR, integer_type_node,
10410 : fold_build2 (NE_EXPR, boolean_type_node,
10411 : lo, build_zero_cst (type)),
10412 : fold_builtin_bit_query (loc, fcode, lo,
10413 : NULL_TREE),
10414 : call);
10415 27 : break;
10416 9 : case BUILT_IN_CLRSBG:
10417 9 : tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10418 18 : tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10419 : build_int_cst (integer_type_node,
10420 : MAX_FIXED_MODE_SIZE / 2));
10421 18 : tem = fold_build3 (COND_EXPR, integer_type_node,
10422 : fold_build2 (LT_EXPR, boolean_type_node,
10423 : fold_build2 (BIT_XOR_EXPR, type,
10424 : lo, hi),
10425 : build_zero_cst (type)),
10426 : build_int_cst (integer_type_node,
10427 : MAX_FIXED_MODE_SIZE / 2 - 1),
10428 : tem);
10429 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10430 9 : call = save_expr (call);
10431 18 : call = fold_build3 (COND_EXPR, integer_type_node,
10432 : fold_build2 (NE_EXPR, boolean_type_node,
10433 : call,
10434 : build_int_cst (integer_type_node,
10435 : MAX_FIXED_MODE_SIZE
10436 : / 2 - 1)),
10437 : call, tem);
10438 9 : break;
10439 9 : case BUILT_IN_FFSG:
10440 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10441 18 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10442 : build_int_cst (integer_type_node,
10443 : MAX_FIXED_MODE_SIZE / 2));
10444 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10445 : fold_build2 (NE_EXPR, boolean_type_node,
10446 : hi, build_zero_cst (type)),
10447 : call, integer_zero_node);
10448 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10449 : fold_build2 (NE_EXPR, boolean_type_node,
10450 : lo, build_zero_cst (type)),
10451 : fold_builtin_bit_query (loc, fcode, lo,
10452 : NULL_TREE),
10453 : call);
10454 9 : break;
10455 9 : case BUILT_IN_PARITYG:
10456 9 : call = fold_builtin_bit_query (loc, fcode,
10457 : fold_build2 (BIT_XOR_EXPR, type,
10458 : lo, hi), NULL_TREE);
10459 9 : break;
10460 29 : case BUILT_IN_POPCOUNTG:
10461 29 : call = fold_build2 (PLUS_EXPR, integer_type_node,
10462 : fold_builtin_bit_query (loc, fcode, hi,
10463 : NULL_TREE),
10464 : fold_builtin_bit_query (loc, fcode, lo,
10465 : NULL_TREE));
10466 29 : break;
10467 0 : default:
10468 0 : gcc_unreachable ();
10469 : }
10470 : }
10471 : else
10472 : {
10473 : /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10474 : value defined at zero during GIMPLE, or for large/huge _BitInt
10475 : (which are then lowered during bitint lowering). */
10476 199406 : if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10477 : {
10478 119379 : int val;
10479 119379 : if (fcode == BUILT_IN_CLZG)
10480 : {
10481 59587 : if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10482 : val) != 2
10483 59617 : || wi::to_widest (arg2) != val)
10484 59557 : arg2 = NULL_TREE;
10485 : }
10486 59792 : else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10487 : val) != 2
10488 59801 : || wi::to_widest (arg2) != val)
10489 59783 : arg2 = NULL_TREE;
10490 119379 : if (!direct_internal_fn_supported_p (ifn, arg0_type,
10491 : OPTIMIZE_FOR_BOTH))
10492 : arg2 = NULL_TREE;
10493 119295 : if (arg2 == NULL_TREE)
10494 119340 : arg0 = save_expr (arg0);
10495 : }
10496 199406 : if (fcodei == END_BUILTINS || arg2)
10497 221 : call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10498 : arg2 ? 2 : 1, arg0, arg2);
10499 : else
10500 199267 : call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
10501 : arg0);
10502 : }
10503 234854 : if (addend)
10504 2574 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10505 : build_int_cst (integer_type_node, addend));
10506 234854 : if (arg1 && arg2 == NULL_TREE)
10507 121863 : call = fold_build3 (COND_EXPR, integer_type_node,
10508 : fold_build2 (NE_EXPR, boolean_type_node,
10509 : arg0, build_zero_cst (arg0_type)),
10510 : call, arg1);
10511 :
10512 : return call;
10513 : }
10514 :
10515 : /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10516 : that return both result of arithmetics and overflowed boolean
10517 : flag in a complex integer result. */
10518 :
10519 : static tree
10520 54 : fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10521 : tree *args)
10522 : {
10523 54 : enum internal_fn ifn;
10524 :
10525 54 : switch (fcode)
10526 : {
10527 : case BUILT_IN_ADDC:
10528 : case BUILT_IN_ADDCL:
10529 : case BUILT_IN_ADDCLL:
10530 : ifn = IFN_ADD_OVERFLOW;
10531 : break;
10532 28 : case BUILT_IN_SUBC:
10533 28 : case BUILT_IN_SUBCL:
10534 28 : case BUILT_IN_SUBCLL:
10535 28 : ifn = IFN_SUB_OVERFLOW;
10536 28 : break;
10537 0 : default:
10538 0 : gcc_unreachable ();
10539 : }
10540 :
10541 54 : tree type = TREE_TYPE (args[0]);
10542 54 : tree ctype = build_complex_type (type);
10543 54 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10544 : args[0], args[1]);
10545 : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10546 : as while the call itself is const, the REALPART_EXPR store is
10547 : certainly not. And in any case, we want just one call,
10548 : not multiple and trying to CSE them later. */
10549 54 : TREE_SIDE_EFFECTS (call) = 1;
10550 54 : tree tgt = save_expr (call);
10551 54 : tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10552 54 : tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10553 54 : call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10554 : intres, args[2]);
10555 54 : TREE_SIDE_EFFECTS (call) = 1;
10556 54 : tgt = save_expr (call);
10557 54 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10558 54 : tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10559 54 : ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10560 54 : tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10561 54 : tree store
10562 54 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10563 54 : return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10564 : }
10565 :
10566 : /* Fold a call to __builtin_FILE to a constant string. */
10567 :
10568 : static inline tree
10569 6097 : fold_builtin_FILE (location_t loc)
10570 : {
10571 6097 : if (const char *fname = LOCATION_FILE (loc))
10572 : {
10573 : /* The documentation says this builtin is equivalent to the preprocessor
10574 : __FILE__ macro so it appears appropriate to use the same file prefix
10575 : mappings. */
10576 6097 : fname = remap_macro_filename (fname);
10577 6097 : return build_string_literal (fname);
10578 : }
10579 :
10580 0 : return build_string_literal ("");
10581 : }
10582 :
10583 : /* Fold a call to __builtin_FUNCTION to a constant string. */
10584 :
10585 : static inline tree
10586 68 : fold_builtin_FUNCTION ()
10587 : {
10588 68 : const char *name = "";
10589 :
10590 68 : if (current_function_decl)
10591 43 : name = lang_hooks.decl_printable_name (current_function_decl, 0);
10592 :
10593 68 : return build_string_literal (name);
10594 : }
10595 :
10596 : /* Fold a call to __builtin_LINE to an integer constant. */
10597 :
10598 : static inline tree
10599 11755 : fold_builtin_LINE (location_t loc, tree type)
10600 : {
10601 11755 : return build_int_cst (type, LOCATION_LINE (loc));
10602 : }
10603 :
10604 : /* Fold a call to built-in function FNDECL with 0 arguments.
10605 : This function returns NULL_TREE if no simplification was possible. */
10606 :
10607 : static tree
10608 25188905 : fold_builtin_0 (location_t loc, tree fndecl)
10609 : {
10610 25188905 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10611 25188905 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10612 25188905 : switch (fcode)
10613 : {
10614 6097 : case BUILT_IN_FILE:
10615 6097 : return fold_builtin_FILE (loc);
10616 :
10617 68 : case BUILT_IN_FUNCTION:
10618 68 : return fold_builtin_FUNCTION ();
10619 :
10620 11755 : case BUILT_IN_LINE:
10621 11755 : return fold_builtin_LINE (loc, type);
10622 :
10623 32428 : CASE_FLT_FN (BUILT_IN_INF):
10624 32428 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10625 32428 : case BUILT_IN_INFD32:
10626 32428 : case BUILT_IN_INFD64:
10627 32428 : case BUILT_IN_INFD128:
10628 32428 : case BUILT_IN_INFD64X:
10629 32428 : return fold_builtin_inf (loc, type, true);
10630 :
10631 207347 : CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10632 207347 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10633 207347 : return fold_builtin_inf (loc, type, false);
10634 :
10635 0 : case BUILT_IN_CLASSIFY_TYPE:
10636 0 : return fold_builtin_classify_type (NULL_TREE);
10637 :
10638 21623434 : case BUILT_IN_UNREACHABLE:
10639 : /* Rewrite any explicit calls to __builtin_unreachable. */
10640 21623434 : if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10641 114 : return build_builtin_unreachable (loc);
10642 : break;
10643 :
10644 : default:
10645 : break;
10646 : }
10647 : return NULL_TREE;
10648 : }
10649 :
10650 : /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10651 : This function returns NULL_TREE if no simplification was possible. */
10652 :
10653 : static tree
10654 16718447 : fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10655 : {
10656 16718447 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10657 16718447 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10658 :
10659 16718447 : if (error_operand_p (arg0))
10660 : return NULL_TREE;
10661 :
10662 16718447 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10663 : return ret;
10664 :
10665 16135237 : switch (fcode)
10666 : {
10667 1286963 : case BUILT_IN_CONSTANT_P:
10668 1286963 : {
10669 1286963 : tree val = fold_builtin_constant_p (arg0);
10670 :
10671 : /* Gimplification will pull the CALL_EXPR for the builtin out of
10672 : an if condition. When not optimizing, we'll not CSE it back.
10673 : To avoid link error types of regressions, return false now. */
10674 1286963 : if (!val && !optimize)
10675 1552 : val = integer_zero_node;
10676 :
10677 : return val;
10678 : }
10679 :
10680 2080 : case BUILT_IN_CLASSIFY_TYPE:
10681 2080 : return fold_builtin_classify_type (arg0);
10682 :
10683 429133 : case BUILT_IN_STRLEN:
10684 429133 : return fold_builtin_strlen (loc, expr, type, arg0);
10685 :
10686 334668 : CASE_FLT_FN (BUILT_IN_FABS):
10687 334668 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10688 334668 : case BUILT_IN_FABSD32:
10689 334668 : case BUILT_IN_FABSD64:
10690 334668 : case BUILT_IN_FABSD128:
10691 334668 : case BUILT_IN_FABSD64X:
10692 334668 : return fold_builtin_fabs (loc, arg0, type);
10693 :
10694 93596 : case BUILT_IN_ABS:
10695 93596 : case BUILT_IN_LABS:
10696 93596 : case BUILT_IN_LLABS:
10697 93596 : case BUILT_IN_IMAXABS:
10698 93596 : case BUILT_IN_UABS:
10699 93596 : case BUILT_IN_ULABS:
10700 93596 : case BUILT_IN_ULLABS:
10701 93596 : case BUILT_IN_UMAXABS:
10702 93596 : return fold_builtin_abs (loc, arg0, type);
10703 :
10704 24501 : CASE_FLT_FN (BUILT_IN_CONJ):
10705 24501 : if (validate_arg (arg0, COMPLEX_TYPE)
10706 24501 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10707 24501 : return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10708 : break;
10709 :
10710 764 : CASE_FLT_FN (BUILT_IN_CREAL):
10711 764 : if (validate_arg (arg0, COMPLEX_TYPE)
10712 764 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10713 764 : return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10714 : break;
10715 :
10716 1884 : CASE_FLT_FN (BUILT_IN_CIMAG):
10717 1884 : if (validate_arg (arg0, COMPLEX_TYPE)
10718 1884 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10719 1884 : return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10720 : break;
10721 :
10722 93682 : CASE_FLT_FN (BUILT_IN_CARG):
10723 93682 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10724 93682 : return fold_builtin_carg (loc, arg0, type);
10725 :
10726 211 : case BUILT_IN_ISASCII:
10727 211 : return fold_builtin_isascii (loc, arg0);
10728 :
10729 168 : case BUILT_IN_TOASCII:
10730 168 : return fold_builtin_toascii (loc, arg0);
10731 :
10732 325 : case BUILT_IN_ISDIGIT:
10733 325 : return fold_builtin_isdigit (loc, arg0);
10734 :
10735 1323895 : CASE_FLT_FN (BUILT_IN_FINITE):
10736 1323895 : case BUILT_IN_FINITED32:
10737 1323895 : case BUILT_IN_FINITED64:
10738 1323895 : case BUILT_IN_FINITED128:
10739 1323895 : case BUILT_IN_ISFINITE:
10740 1323895 : {
10741 1323895 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10742 1323895 : if (ret)
10743 : return ret;
10744 1323671 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10745 : }
10746 :
10747 223378 : CASE_FLT_FN (BUILT_IN_ISINF):
10748 223378 : case BUILT_IN_ISINFD32:
10749 223378 : case BUILT_IN_ISINFD64:
10750 223378 : case BUILT_IN_ISINFD128:
10751 223378 : {
10752 223378 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10753 223378 : if (ret)
10754 : return ret;
10755 223257 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10756 : }
10757 :
10758 219918 : case BUILT_IN_ISNORMAL:
10759 219918 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10760 :
10761 708 : case BUILT_IN_ISINF_SIGN:
10762 708 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10763 :
10764 223025 : CASE_FLT_FN (BUILT_IN_ISNAN):
10765 223025 : case BUILT_IN_ISNAND32:
10766 223025 : case BUILT_IN_ISNAND64:
10767 223025 : case BUILT_IN_ISNAND128:
10768 223025 : {
10769 223025 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10770 223025 : if (ret)
10771 : return ret;
10772 222901 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10773 : }
10774 :
10775 28379 : case BUILT_IN_ISSIGNALING:
10776 28379 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10777 :
10778 591042 : case BUILT_IN_FREE:
10779 591042 : if (integer_zerop (arg0))
10780 1049 : return build_empty_stmt (loc);
10781 : break;
10782 :
10783 6677 : case BUILT_IN_CLZG:
10784 6677 : case BUILT_IN_CTZG:
10785 6677 : case BUILT_IN_CLRSBG:
10786 6677 : case BUILT_IN_FFSG:
10787 6677 : case BUILT_IN_PARITYG:
10788 6677 : case BUILT_IN_POPCOUNTG:
10789 6677 : return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10790 :
10791 : default:
10792 : break;
10793 : }
10794 :
10795 : return NULL_TREE;
10796 :
10797 : }
10798 :
10799 : /* Folds a call EXPR (which may be null) to built-in function FNDECL
10800 : with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10801 : if no simplification was possible. */
10802 :
10803 : static tree
10804 18029253 : fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10805 : {
10806 18029253 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10807 18029253 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10808 :
10809 18029253 : if (error_operand_p (arg0)
10810 18029253 : || error_operand_p (arg1))
10811 : return NULL_TREE;
10812 :
10813 18029249 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10814 : return ret;
10815 :
10816 17820033 : switch (fcode)
10817 : {
10818 6504 : CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10819 6504 : CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10820 6504 : if (validate_arg (arg0, REAL_TYPE)
10821 6504 : && validate_arg (arg1, POINTER_TYPE))
10822 6504 : return do_mpfr_lgamma_r (arg0, arg1, type);
10823 : break;
10824 :
10825 134826 : CASE_FLT_FN (BUILT_IN_FREXP):
10826 134826 : return fold_builtin_frexp (loc, arg0, arg1, type);
10827 :
10828 64626 : CASE_FLT_FN (BUILT_IN_MODF):
10829 64626 : return fold_builtin_modf (loc, arg0, arg1, type);
10830 :
10831 2560 : case BUILT_IN_STRSPN:
10832 2560 : return fold_builtin_strspn (loc, expr, arg0, arg1, type);
10833 :
10834 2465 : case BUILT_IN_STRCSPN:
10835 2465 : return fold_builtin_strcspn (loc, expr, arg0, arg1, type);
10836 :
10837 76983 : case BUILT_IN_STRPBRK:
10838 76983 : return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10839 :
10840 6156136 : case BUILT_IN_EXPECT:
10841 6156136 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10842 :
10843 443345 : case BUILT_IN_ISGREATER:
10844 443345 : return fold_builtin_unordered_cmp (loc, fndecl,
10845 443345 : arg0, arg1, UNLE_EXPR, LE_EXPR);
10846 439941 : case BUILT_IN_ISGREATEREQUAL:
10847 439941 : return fold_builtin_unordered_cmp (loc, fndecl,
10848 439941 : arg0, arg1, UNLT_EXPR, LT_EXPR);
10849 221592 : case BUILT_IN_ISLESS:
10850 221592 : return fold_builtin_unordered_cmp (loc, fndecl,
10851 221592 : arg0, arg1, UNGE_EXPR, GE_EXPR);
10852 1763641 : case BUILT_IN_ISLESSEQUAL:
10853 1763641 : return fold_builtin_unordered_cmp (loc, fndecl,
10854 1763641 : arg0, arg1, UNGT_EXPR, GT_EXPR);
10855 219728 : case BUILT_IN_ISLESSGREATER:
10856 219728 : return fold_builtin_unordered_cmp (loc, fndecl,
10857 219728 : arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10858 220965 : case BUILT_IN_ISUNORDERED:
10859 220965 : return fold_builtin_unordered_cmp (loc, fndecl,
10860 : arg0, arg1, UNORDERED_EXPR,
10861 220965 : NOP_EXPR);
10862 :
10863 709 : case BUILT_IN_ISEQSIG:
10864 709 : return fold_builtin_iseqsig (loc, arg0, arg1);
10865 :
10866 : /* We do the folding for va_start in the expander. */
10867 : case BUILT_IN_VA_START:
10868 : break;
10869 :
10870 200163 : case BUILT_IN_OBJECT_SIZE:
10871 200163 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10872 200163 : return fold_builtin_object_size (arg0, arg1, fcode);
10873 :
10874 100624 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10875 100624 : return fold_builtin_atomic_always_lock_free (arg0, arg1);
10876 :
10877 36936 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10878 36936 : return fold_builtin_atomic_is_lock_free (arg0, arg1);
10879 :
10880 157460 : case BUILT_IN_CLZG:
10881 157460 : case BUILT_IN_CTZG:
10882 157460 : return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10883 :
10884 : default:
10885 : break;
10886 : }
10887 : return NULL_TREE;
10888 : }
10889 :
10890 : /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10891 : and ARG2.
10892 : This function returns NULL_TREE if no simplification was possible. */
10893 :
10894 : static tree
10895 7103640 : fold_builtin_3 (location_t loc, tree fndecl,
10896 : tree arg0, tree arg1, tree arg2)
10897 : {
10898 7103640 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10899 7103640 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10900 :
10901 7103640 : if (error_operand_p (arg0)
10902 7103640 : || error_operand_p (arg1)
10903 14207280 : || error_operand_p (arg2))
10904 : return NULL_TREE;
10905 :
10906 7103638 : if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10907 : arg0, arg1, arg2))
10908 : return ret;
10909 :
10910 7086845 : switch (fcode)
10911 : {
10912 :
10913 145 : CASE_FLT_FN (BUILT_IN_SINCOS):
10914 145 : return fold_builtin_sincos (loc, arg0, arg1, arg2);
10915 :
10916 69269 : CASE_FLT_FN (BUILT_IN_REMQUO):
10917 69269 : if (validate_arg (arg0, REAL_TYPE)
10918 69269 : && validate_arg (arg1, REAL_TYPE)
10919 138538 : && validate_arg (arg2, POINTER_TYPE))
10920 69269 : return do_mpfr_remquo (arg0, arg1, arg2);
10921 : break;
10922 :
10923 2483147 : case BUILT_IN_MEMCMP:
10924 2483147 : return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10925 :
10926 504546 : case BUILT_IN_EXPECT:
10927 504546 : return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10928 :
10929 337 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
10930 337 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10931 :
10932 450817 : case BUILT_IN_ADD_OVERFLOW:
10933 450817 : case BUILT_IN_SUB_OVERFLOW:
10934 450817 : case BUILT_IN_MUL_OVERFLOW:
10935 450817 : case BUILT_IN_ADD_OVERFLOW_P:
10936 450817 : case BUILT_IN_SUB_OVERFLOW_P:
10937 450817 : case BUILT_IN_MUL_OVERFLOW_P:
10938 450817 : case BUILT_IN_SADD_OVERFLOW:
10939 450817 : case BUILT_IN_SADDL_OVERFLOW:
10940 450817 : case BUILT_IN_SADDLL_OVERFLOW:
10941 450817 : case BUILT_IN_SSUB_OVERFLOW:
10942 450817 : case BUILT_IN_SSUBL_OVERFLOW:
10943 450817 : case BUILT_IN_SSUBLL_OVERFLOW:
10944 450817 : case BUILT_IN_SMUL_OVERFLOW:
10945 450817 : case BUILT_IN_SMULL_OVERFLOW:
10946 450817 : case BUILT_IN_SMULLL_OVERFLOW:
10947 450817 : case BUILT_IN_UADD_OVERFLOW:
10948 450817 : case BUILT_IN_UADDL_OVERFLOW:
10949 450817 : case BUILT_IN_UADDLL_OVERFLOW:
10950 450817 : case BUILT_IN_USUB_OVERFLOW:
10951 450817 : case BUILT_IN_USUBL_OVERFLOW:
10952 450817 : case BUILT_IN_USUBLL_OVERFLOW:
10953 450817 : case BUILT_IN_UMUL_OVERFLOW:
10954 450817 : case BUILT_IN_UMULL_OVERFLOW:
10955 450817 : case BUILT_IN_UMULLL_OVERFLOW:
10956 450817 : return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10957 :
10958 : default:
10959 : break;
10960 : }
10961 : return NULL_TREE;
10962 : }
10963 :
10964 : /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10965 : ARGS is an array of NARGS arguments. IGNORE is true if the result
10966 : of the function call is ignored. This function returns NULL_TREE
10967 : if no simplification was possible. */
10968 :
10969 : static tree
10970 69509889 : fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10971 : int nargs, bool)
10972 : {
10973 69509889 : tree ret = NULL_TREE;
10974 :
10975 69509889 : switch (nargs)
10976 : {
10977 25188905 : case 0:
10978 25188905 : ret = fold_builtin_0 (loc, fndecl);
10979 25188905 : break;
10980 16718447 : case 1:
10981 16718447 : ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10982 16718447 : break;
10983 18029253 : case 2:
10984 18029253 : ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10985 18029253 : break;
10986 7103640 : case 3:
10987 7103640 : ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10988 7103640 : break;
10989 2469644 : default:
10990 2469644 : ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10991 2469644 : break;
10992 : }
10993 69509889 : if (ret)
10994 : {
10995 8046996 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10996 8046996 : SET_EXPR_LOCATION (ret, loc);
10997 8046996 : return ret;
10998 : }
10999 : return NULL_TREE;
11000 : }
11001 :
11002 : /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11003 : list ARGS along with N new arguments in NEWARGS. SKIP is the number
11004 : of arguments in ARGS to be omitted. OLDNARGS is the number of
11005 : elements in ARGS. */
11006 :
11007 : static tree
11008 4 : rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11009 : int skip, tree fndecl, int n, va_list newargs)
11010 : {
11011 4 : int nargs = oldnargs - skip + n;
11012 4 : tree *buffer;
11013 :
11014 4 : if (n > 0)
11015 : {
11016 0 : int i, j;
11017 :
11018 0 : buffer = XALLOCAVEC (tree, nargs);
11019 0 : for (i = 0; i < n; i++)
11020 0 : buffer[i] = va_arg (newargs, tree);
11021 0 : for (j = skip; j < oldnargs; j++, i++)
11022 0 : buffer[i] = args[j];
11023 : }
11024 : else
11025 4 : buffer = args + skip;
11026 :
11027 4 : return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11028 : }
11029 :
11030 : /* Return true if FNDECL shouldn't be folded right now.
11031 : If a built-in function has an inline attribute always_inline
11032 : wrapper, defer folding it after always_inline functions have
11033 : been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11034 : might not be performed. */
11035 :
11036 : bool
11037 146696567 : avoid_folding_inline_builtin (tree fndecl)
11038 : {
11039 146696567 : return (DECL_DECLARED_INLINE_P (fndecl)
11040 11977 : && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11041 11935 : && cfun
11042 11935 : && !cfun->always_inline_functions_inlined
11043 146708502 : && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11044 : }
11045 :
11046 : /* A wrapper function for builtin folding that prevents warnings for
11047 : "statement without effect" and the like, caused by removing the
11048 : call node earlier than the warning is generated. */
11049 :
11050 : tree
11051 223417856 : fold_call_expr (location_t loc, tree exp, bool ignore)
11052 : {
11053 223417856 : tree ret = NULL_TREE;
11054 223417856 : tree fndecl = get_callee_fndecl (exp);
11055 221142631 : if (fndecl && fndecl_built_in_p (fndecl)
11056 : /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11057 : yet. Defer folding until we see all the arguments
11058 : (after inlining). */
11059 283210072 : && !CALL_EXPR_VA_ARG_PACK (exp))
11060 : {
11061 59792186 : int nargs = call_expr_nargs (exp);
11062 :
11063 : /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11064 : instead last argument is __builtin_va_arg_pack (). Defer folding
11065 : even in that case, until arguments are finalized. */
11066 59792186 : if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11067 : {
11068 360932 : tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11069 360932 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11070 : return NULL_TREE;
11071 : }
11072 :
11073 59792119 : if (avoid_folding_inline_builtin (fndecl))
11074 : return NULL_TREE;
11075 :
11076 59788584 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11077 69354816 : return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11078 69354816 : CALL_EXPR_ARGP (exp), ignore);
11079 : else
11080 : {
11081 25111176 : tree *args = CALL_EXPR_ARGP (exp);
11082 25111176 : ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11083 25111176 : if (ret)
11084 : return ret;
11085 : }
11086 : }
11087 : return NULL_TREE;
11088 : }
11089 :
11090 : /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11091 : N arguments are passed in the array ARGARRAY. Return a folded
11092 : expression or NULL_TREE if no simplification was possible. */
11093 :
11094 : tree
11095 72316787 : fold_builtin_call_array (location_t loc, tree,
11096 : tree fn,
11097 : int n,
11098 : tree *argarray)
11099 : {
11100 72316787 : if (TREE_CODE (fn) != ADDR_EXPR)
11101 : return NULL_TREE;
11102 :
11103 72316787 : tree fndecl = TREE_OPERAND (fn, 0);
11104 72316787 : if (TREE_CODE (fndecl) == FUNCTION_DECL
11105 72316787 : && fndecl_built_in_p (fndecl))
11106 : {
11107 : /* If last argument is __builtin_va_arg_pack (), arguments to this
11108 : function are not finalized yet. Defer folding until they are. */
11109 71723793 : if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11110 : {
11111 129770 : tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11112 129770 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11113 : return NULL_TREE;
11114 : }
11115 71723765 : if (avoid_folding_inline_builtin (fndecl))
11116 : return NULL_TREE;
11117 71723765 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11118 33292669 : return targetm.fold_builtin (fndecl, n, argarray, false);
11119 : else
11120 38431096 : return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11121 : }
11122 :
11123 : return NULL_TREE;
11124 : }
11125 :
11126 : /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11127 : along with N new arguments specified as the "..." parameters. SKIP
11128 : is the number of arguments in EXP to be omitted. This function is used
11129 : to do varargs-to-varargs transformations. */
11130 :
11131 : static tree
11132 4 : rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11133 : {
11134 4 : va_list ap;
11135 4 : tree t;
11136 :
11137 4 : va_start (ap, n);
11138 8 : t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11139 4 : CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11140 4 : va_end (ap);
11141 :
11142 4 : return t;
11143 : }
11144 :
11145 : /* Validate a single argument ARG against a tree code CODE representing
11146 : a type. Return true when argument is valid. */
11147 :
11148 : static bool
11149 15191713 : validate_arg (const_tree arg, enum tree_code code)
11150 : {
11151 15191713 : if (!arg)
11152 : return false;
11153 15191690 : else if (code == POINTER_TYPE)
11154 6844046 : return POINTER_TYPE_P (TREE_TYPE (arg));
11155 8347644 : else if (code == INTEGER_TYPE)
11156 3634742 : return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11157 4712902 : return code == TREE_CODE (TREE_TYPE (arg));
11158 : }
11159 :
11160 : /* This function validates the types of a function call argument list
11161 : against a specified list of tree_codes. If the last specifier is a 0,
11162 : that represents an ellipses, otherwise the last specifier must be a
11163 : VOID_TYPE.
11164 :
11165 : This is the GIMPLE version of validate_arglist. Eventually we want to
11166 : completely convert builtins.cc to work from GIMPLEs and the tree based
11167 : validate_arglist will then be removed. */
11168 :
11169 : bool
11170 93 : validate_gimple_arglist (const gcall *call, ...)
11171 : {
11172 93 : enum tree_code code;
11173 93 : bool res = 0;
11174 93 : va_list ap;
11175 93 : const_tree arg;
11176 93 : size_t i;
11177 :
11178 93 : va_start (ap, call);
11179 93 : i = 0;
11180 :
11181 372 : do
11182 : {
11183 372 : code = (enum tree_code) va_arg (ap, int);
11184 372 : switch (code)
11185 : {
11186 0 : case 0:
11187 : /* This signifies an ellipses, any further arguments are all ok. */
11188 0 : res = true;
11189 0 : goto end;
11190 93 : case VOID_TYPE:
11191 : /* This signifies an endlink, if no arguments remain, return
11192 : true, otherwise return false. */
11193 93 : res = (i == gimple_call_num_args (call));
11194 93 : goto end;
11195 279 : default:
11196 : /* If no parameters remain or the parameter's code does not
11197 : match the specified code, return false. Otherwise continue
11198 : checking any remaining arguments. */
11199 279 : arg = gimple_call_arg (call, i++);
11200 279 : if (!validate_arg (arg, code))
11201 0 : goto end;
11202 : break;
11203 : }
11204 : }
11205 : while (1);
11206 :
11207 : /* We need gotos here since we can only have one VA_CLOSE in a
11208 : function. */
11209 93 : end: ;
11210 93 : va_end (ap);
11211 :
11212 93 : return res;
11213 : }
11214 :
11215 : /* Default target-specific builtin expander that does nothing. */
11216 :
11217 : rtx
11218 0 : default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11219 : rtx target ATTRIBUTE_UNUSED,
11220 : rtx subtarget ATTRIBUTE_UNUSED,
11221 : machine_mode mode ATTRIBUTE_UNUSED,
11222 : int ignore ATTRIBUTE_UNUSED)
11223 : {
11224 0 : return NULL_RTX;
11225 : }
11226 :
11227 : /* Returns true is EXP represents data that would potentially reside
11228 : in a readonly section. */
11229 :
11230 : bool
11231 203354 : readonly_data_expr (tree exp)
11232 : {
11233 203354 : STRIP_NOPS (exp);
11234 :
11235 203354 : if (TREE_CODE (exp) != ADDR_EXPR)
11236 : return false;
11237 :
11238 25379 : exp = get_base_address (TREE_OPERAND (exp, 0));
11239 25379 : if (!exp)
11240 : return false;
11241 :
11242 : /* Make sure we call decl_readonly_section only for trees it
11243 : can handle (since it returns true for everything it doesn't
11244 : understand). */
11245 25379 : if (TREE_CODE (exp) == STRING_CST
11246 5837 : || TREE_CODE (exp) == CONSTRUCTOR
11247 5837 : || (VAR_P (exp) && TREE_STATIC (exp)))
11248 22232 : return decl_readonly_section (exp, 0);
11249 : else
11250 : return false;
11251 : }
11252 :
11253 : /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11254 : to the call, and TYPE is its return type.
11255 :
11256 : Return NULL_TREE if no simplification was possible, otherwise return the
11257 : simplified form of the call as a tree.
11258 :
11259 : The simplified form may be a constant or other expression which
11260 : computes the same value, but in a more efficient manner (including
11261 : calls to other builtin functions).
11262 :
11263 : The call may contain arguments which need to be evaluated, but
11264 : which are not useful to determine the result of the call. In
11265 : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11266 : COMPOUND_EXPR will be an argument which must be evaluated.
11267 : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11268 : COMPOUND_EXPR in the chain will contain the tree for the simplified
11269 : form of the builtin function call. */
11270 :
11271 : static tree
11272 76983 : fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11273 : {
11274 76983 : if (!validate_arg (s1, POINTER_TYPE)
11275 76983 : || !validate_arg (s2, POINTER_TYPE))
11276 : return NULL_TREE;
11277 :
11278 76983 : tree fn;
11279 76983 : const char *p1, *p2;
11280 :
11281 76983 : p2 = c_getstr (s2);
11282 76983 : if (p2 == NULL)
11283 : return NULL_TREE;
11284 :
11285 92 : p1 = c_getstr (s1);
11286 92 : if (p1 != NULL)
11287 : {
11288 22 : const char *r = strpbrk (p1, p2);
11289 22 : tree tem;
11290 :
11291 22 : if (r == NULL)
11292 0 : return build_int_cst (TREE_TYPE (s1), 0);
11293 :
11294 : /* Return an offset into the constant string argument. */
11295 22 : tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11296 22 : return fold_convert_loc (loc, type, tem);
11297 : }
11298 :
11299 70 : if (p2[0] == '\0')
11300 : /* strpbrk(x, "") == NULL.
11301 : Evaluate and ignore s1 in case it had side-effects. */
11302 26 : return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11303 :
11304 44 : if (p2[1] != '\0')
11305 : return NULL_TREE; /* Really call strpbrk. */
11306 :
11307 76935 : fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11308 41 : if (!fn)
11309 : return NULL_TREE;
11310 :
11311 : /* New argument list transforming strpbrk(s1, s2) to
11312 : strchr(s1, s2[0]). */
11313 41 : return build_call_expr_loc (loc, fn, 2, s1,
11314 41 : build_int_cst (integer_type_node, p2[0]));
11315 : }
11316 :
11317 : /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11318 : to the call.
11319 :
11320 : Return NULL_TREE if no simplification was possible, otherwise return the
11321 : simplified form of the call as a tree.
11322 :
11323 : The simplified form may be a constant or other expression which
11324 : computes the same value, but in a more efficient manner (including
11325 : calls to other builtin functions).
11326 :
11327 : The call may contain arguments which need to be evaluated, but
11328 : which are not useful to determine the result of the call. In
11329 : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11330 : COMPOUND_EXPR will be an argument which must be evaluated.
11331 : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11332 : COMPOUND_EXPR in the chain will contain the tree for the simplified
11333 : form of the builtin function call. */
11334 :
11335 : static tree
11336 2560 : fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11337 : {
11338 2560 : if (!validate_arg (s1, POINTER_TYPE)
11339 2560 : || !validate_arg (s2, POINTER_TYPE))
11340 : return NULL_TREE;
11341 :
11342 2560 : if (!check_nul_terminated_array (expr, s1)
11343 2560 : || !check_nul_terminated_array (expr, s2))
11344 58 : return NULL_TREE;
11345 :
11346 2502 : const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11347 :
11348 : /* If either argument is "", return NULL_TREE. */
11349 2502 : if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11350 : /* Evaluate and ignore both arguments in case either one has
11351 : side-effects. */
11352 147 : return omit_two_operands_loc (loc, type, size_zero_node, s1, s2);
11353 : return NULL_TREE;
11354 : }
11355 :
11356 : /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11357 : to the call.
11358 :
11359 : Return NULL_TREE if no simplification was possible, otherwise return the
11360 : simplified form of the call as a tree.
11361 :
11362 : The simplified form may be a constant or other expression which
11363 : computes the same value, but in a more efficient manner (including
11364 : calls to other builtin functions).
11365 :
11366 : The call may contain arguments which need to be evaluated, but
11367 : which are not useful to determine the result of the call. In
11368 : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11369 : COMPOUND_EXPR will be an argument which must be evaluated.
11370 : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11371 : COMPOUND_EXPR in the chain will contain the tree for the simplified
11372 : form of the builtin function call. */
11373 :
11374 : static tree
11375 2465 : fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11376 : {
11377 2465 : if (!validate_arg (s1, POINTER_TYPE)
11378 2465 : || !validate_arg (s2, POINTER_TYPE))
11379 : return NULL_TREE;
11380 :
11381 2465 : if (!check_nul_terminated_array (expr, s1)
11382 2465 : || !check_nul_terminated_array (expr, s2))
11383 58 : return NULL_TREE;
11384 :
11385 : /* If the first argument is "", return NULL_TREE. */
11386 2407 : const char *p1 = c_getstr (s1);
11387 2407 : if (p1 && *p1 == '\0')
11388 : {
11389 : /* Evaluate and ignore argument s2 in case it has
11390 : side-effects. */
11391 65 : return omit_one_operand_loc (loc, type, size_zero_node, s2);
11392 : }
11393 :
11394 : /* If the second argument is "", return __builtin_strlen(s1). */
11395 2342 : const char *p2 = c_getstr (s2);
11396 2342 : if (p2 && *p2 == '\0')
11397 : {
11398 2400 : tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11399 :
11400 : /* If the replacement _DECL isn't initialized, don't do the
11401 : transformation. */
11402 81 : if (!fn)
11403 : return NULL_TREE;
11404 :
11405 81 : return fold_convert_loc (loc, type,
11406 81 : build_call_expr_loc (loc, fn, 1, s1));
11407 : }
11408 : return NULL_TREE;
11409 : }
11410 :
11411 : /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11412 : produced. False otherwise. This is done so that we don't output the error
11413 : or warning twice or three times. */
11414 :
11415 : bool
11416 41950 : fold_builtin_next_arg (tree exp, bool va_start_p)
11417 : {
11418 41950 : tree fntype = TREE_TYPE (current_function_decl);
11419 41950 : int nargs = call_expr_nargs (exp);
11420 41950 : tree arg;
11421 : /* There is good chance the current input_location points inside the
11422 : definition of the va_start macro (perhaps on the token for
11423 : builtin) in a system header, so warnings will not be emitted.
11424 : Use the location in real source code. */
11425 41950 : location_t current_location =
11426 41950 : linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11427 : NULL);
11428 :
11429 41950 : if (!stdarg_p (fntype))
11430 : {
11431 8 : error ("%<va_start%> used in function with fixed arguments");
11432 8 : return true;
11433 : }
11434 :
11435 41942 : if (va_start_p)
11436 : {
11437 41784 : if (va_start_p && (nargs != 2))
11438 : {
11439 0 : error ("wrong number of arguments to function %<va_start%>");
11440 0 : return true;
11441 : }
11442 41784 : arg = CALL_EXPR_ARG (exp, 1);
11443 : }
11444 : /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11445 : when we checked the arguments and if needed issued a warning. */
11446 : else
11447 : {
11448 158 : if (nargs == 0)
11449 : {
11450 : /* Evidently an out of date version of <stdarg.h>; can't validate
11451 : va_start's second argument, but can still work as intended. */
11452 0 : warning_at (current_location,
11453 0 : OPT_Wvarargs,
11454 : "%<__builtin_next_arg%> called without an argument");
11455 0 : return true;
11456 : }
11457 158 : else if (nargs > 1)
11458 : {
11459 0 : error ("wrong number of arguments to function %<__builtin_next_arg%>");
11460 0 : return true;
11461 : }
11462 158 : arg = CALL_EXPR_ARG (exp, 0);
11463 : }
11464 :
11465 41942 : if (TREE_CODE (arg) == SSA_NAME
11466 41942 : && SSA_NAME_VAR (arg))
11467 : arg = SSA_NAME_VAR (arg);
11468 :
11469 : /* We destructively modify the call to be __builtin_va_start (ap, 0)
11470 : or __builtin_next_arg (0) the first time we see it, after checking
11471 : the arguments and if needed issuing a warning. */
11472 41942 : if (!integer_zerop (arg))
11473 : {
11474 7025 : tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11475 :
11476 : /* Strip off all nops for the sake of the comparison. This
11477 : is not quite the same as STRIP_NOPS. It does more.
11478 : We must also strip off INDIRECT_EXPR for C++ reference
11479 : parameters. */
11480 14052 : while (CONVERT_EXPR_P (arg)
11481 14063 : || INDIRECT_REF_P (arg))
11482 11 : arg = TREE_OPERAND (arg, 0);
11483 7025 : if (arg != last_parm)
11484 : {
11485 : /* FIXME: Sometimes with the tree optimizers we can get the
11486 : not the last argument even though the user used the last
11487 : argument. We just warn and set the arg to be the last
11488 : argument so that we will get wrong-code because of
11489 : it. */
11490 12 : warning_at (current_location,
11491 12 : OPT_Wvarargs,
11492 : "second parameter of %<va_start%> not last named argument");
11493 : }
11494 :
11495 : /* Undefined by C99 7.15.1.4p4 (va_start):
11496 : "If the parameter parmN is declared with the register storage
11497 : class, with a function or array type, or with a type that is
11498 : not compatible with the type that results after application of
11499 : the default argument promotions, the behavior is undefined."
11500 : */
11501 7013 : else if (DECL_REGISTER (arg))
11502 : {
11503 12 : warning_at (current_location,
11504 12 : OPT_Wvarargs,
11505 : "undefined behavior when second parameter of "
11506 : "%<va_start%> is declared with %<register%> storage");
11507 : }
11508 :
11509 : /* We want to verify the second parameter just once before the tree
11510 : optimizers are run and then avoid keeping it in the tree,
11511 : as otherwise we could warn even for correct code like:
11512 : void foo (int i, ...)
11513 : { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11514 7025 : if (va_start_p)
11515 7023 : CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11516 : else
11517 2 : CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11518 : }
11519 : return false;
11520 : }
11521 :
11522 :
11523 : /* Expand a call EXP to __builtin_object_size. */
11524 :
11525 : static rtx
11526 618 : expand_builtin_object_size (tree exp)
11527 : {
11528 618 : tree ost;
11529 618 : int object_size_type;
11530 618 : tree fndecl = get_callee_fndecl (exp);
11531 :
11532 618 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11533 : {
11534 0 : error ("first argument of %qD must be a pointer, second integer constant",
11535 : fndecl);
11536 0 : expand_builtin_trap ();
11537 0 : return const0_rtx;
11538 : }
11539 :
11540 618 : ost = CALL_EXPR_ARG (exp, 1);
11541 618 : STRIP_NOPS (ost);
11542 :
11543 618 : if (TREE_CODE (ost) != INTEGER_CST
11544 618 : || tree_int_cst_sgn (ost) < 0
11545 1236 : || compare_tree_int (ost, 3) > 0)
11546 : {
11547 0 : error ("last argument of %qD is not integer constant between 0 and 3",
11548 : fndecl);
11549 0 : expand_builtin_trap ();
11550 0 : return const0_rtx;
11551 : }
11552 :
11553 618 : object_size_type = tree_to_shwi (ost);
11554 :
11555 618 : return object_size_type < 2 ? constm1_rtx : const0_rtx;
11556 : }
11557 :
11558 : /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11559 : FCODE is the BUILT_IN_* to use.
11560 : Return NULL_RTX if we failed; the caller should emit a normal call,
11561 : otherwise try to get the result in TARGET, if convenient (and in
11562 : mode MODE if that's convenient). */
11563 :
11564 : static rtx
11565 837 : expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11566 : enum built_in_function fcode)
11567 : {
11568 1410 : if (!validate_arglist (exp,
11569 : POINTER_TYPE,
11570 : fcode == BUILT_IN_MEMSET_CHK
11571 : ? INTEGER_TYPE : POINTER_TYPE,
11572 : INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11573 : return NULL_RTX;
11574 :
11575 837 : tree dest = CALL_EXPR_ARG (exp, 0);
11576 837 : tree src = CALL_EXPR_ARG (exp, 1);
11577 837 : tree len = CALL_EXPR_ARG (exp, 2);
11578 837 : tree size = CALL_EXPR_ARG (exp, 3);
11579 :
11580 : /* FIXME: Set access mode to write only for memset et al. */
11581 837 : bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11582 : /*srcstr=*/NULL_TREE, size, access_read_write);
11583 :
11584 837 : if (!tree_fits_uhwi_p (size))
11585 : return NULL_RTX;
11586 :
11587 627 : if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11588 : {
11589 : /* Avoid transforming the checking call to an ordinary one when
11590 : an overflow has been detected or when the call couldn't be
11591 : validated because the size is not constant. */
11592 186 : if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11593 : return NULL_RTX;
11594 :
11595 0 : tree fn = NULL_TREE;
11596 : /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11597 : mem{cpy,pcpy,move,set} is available. */
11598 0 : switch (fcode)
11599 : {
11600 0 : case BUILT_IN_MEMCPY_CHK:
11601 0 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11602 0 : break;
11603 0 : case BUILT_IN_MEMPCPY_CHK:
11604 0 : fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11605 0 : break;
11606 0 : case BUILT_IN_MEMMOVE_CHK:
11607 0 : fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11608 0 : break;
11609 0 : case BUILT_IN_MEMSET_CHK:
11610 0 : fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11611 0 : break;
11612 : default:
11613 : break;
11614 : }
11615 :
11616 0 : if (! fn)
11617 : return NULL_RTX;
11618 :
11619 0 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11620 0 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11621 0 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11622 0 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11623 : }
11624 441 : else if (fcode == BUILT_IN_MEMSET_CHK)
11625 : return NULL_RTX;
11626 : else
11627 : {
11628 293 : unsigned int dest_align = get_pointer_alignment (dest);
11629 :
11630 : /* If DEST is not a pointer type, call the normal function. */
11631 293 : if (dest_align == 0)
11632 : return NULL_RTX;
11633 :
11634 : /* If SRC and DEST are the same (and not volatile), do nothing. */
11635 293 : if (operand_equal_p (src, dest, 0))
11636 : {
11637 0 : tree expr;
11638 :
11639 0 : if (fcode != BUILT_IN_MEMPCPY_CHK)
11640 : {
11641 : /* Evaluate and ignore LEN in case it has side-effects. */
11642 0 : expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11643 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
11644 : }
11645 :
11646 0 : expr = fold_build_pointer_plus (dest, len);
11647 0 : return expand_expr (expr, target, mode, EXPAND_NORMAL);
11648 : }
11649 :
11650 : /* __memmove_chk special case. */
11651 293 : if (fcode == BUILT_IN_MEMMOVE_CHK)
11652 : {
11653 79 : unsigned int src_align = get_pointer_alignment (src);
11654 :
11655 79 : if (src_align == 0)
11656 : return NULL_RTX;
11657 :
11658 : /* If src is categorized for a readonly section we can use
11659 : normal __memcpy_chk. */
11660 79 : if (readonly_data_expr (src))
11661 : {
11662 15 : tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11663 15 : if (!fn)
11664 : return NULL_RTX;
11665 15 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11666 : dest, src, len, size);
11667 15 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11668 15 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11669 15 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11670 : }
11671 : }
11672 278 : return NULL_RTX;
11673 : }
11674 : }
11675 :
11676 : /* Emit warning if a buffer overflow is detected at compile time. */
11677 :
11678 : static void
11679 1135 : maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11680 : {
11681 : /* The source string. */
11682 1135 : tree srcstr = NULL_TREE;
11683 : /* The size of the destination object returned by __builtin_object_size. */
11684 1135 : tree objsize = NULL_TREE;
11685 : /* The string that is being concatenated with (as in __strcat_chk)
11686 : or null if it isn't. */
11687 1135 : tree catstr = NULL_TREE;
11688 : /* The maximum length of the source sequence in a bounded operation
11689 : (such as __strncat_chk) or null if the operation isn't bounded
11690 : (such as __strcat_chk). */
11691 1135 : tree maxread = NULL_TREE;
11692 : /* The exact size of the access (such as in __strncpy_chk). */
11693 1135 : tree size = NULL_TREE;
11694 : /* The access by the function that's checked. Except for snprintf
11695 : both writing and reading is checked. */
11696 1135 : access_mode mode = access_read_write;
11697 :
11698 1135 : switch (fcode)
11699 : {
11700 276 : case BUILT_IN_STRCPY_CHK:
11701 276 : case BUILT_IN_STPCPY_CHK:
11702 276 : srcstr = CALL_EXPR_ARG (exp, 1);
11703 276 : objsize = CALL_EXPR_ARG (exp, 2);
11704 276 : break;
11705 :
11706 198 : case BUILT_IN_STRCAT_CHK:
11707 : /* For __strcat_chk the warning will be emitted only if overflowing
11708 : by at least strlen (dest) + 1 bytes. */
11709 198 : catstr = CALL_EXPR_ARG (exp, 0);
11710 198 : srcstr = CALL_EXPR_ARG (exp, 1);
11711 198 : objsize = CALL_EXPR_ARG (exp, 2);
11712 198 : break;
11713 :
11714 109 : case BUILT_IN_STRNCAT_CHK:
11715 109 : catstr = CALL_EXPR_ARG (exp, 0);
11716 109 : srcstr = CALL_EXPR_ARG (exp, 1);
11717 109 : maxread = CALL_EXPR_ARG (exp, 2);
11718 109 : objsize = CALL_EXPR_ARG (exp, 3);
11719 109 : break;
11720 :
11721 243 : case BUILT_IN_STRNCPY_CHK:
11722 243 : case BUILT_IN_STPNCPY_CHK:
11723 243 : srcstr = CALL_EXPR_ARG (exp, 1);
11724 243 : size = CALL_EXPR_ARG (exp, 2);
11725 243 : objsize = CALL_EXPR_ARG (exp, 3);
11726 243 : break;
11727 :
11728 309 : case BUILT_IN_SNPRINTF_CHK:
11729 309 : case BUILT_IN_VSNPRINTF_CHK:
11730 309 : maxread = CALL_EXPR_ARG (exp, 1);
11731 309 : objsize = CALL_EXPR_ARG (exp, 3);
11732 : /* The only checked access the write to the destination. */
11733 309 : mode = access_write_only;
11734 309 : break;
11735 0 : default:
11736 0 : gcc_unreachable ();
11737 : }
11738 :
11739 1135 : if (catstr && maxread)
11740 : {
11741 : /* Check __strncat_chk. There is no way to determine the length
11742 : of the string to which the source string is being appended so
11743 : just warn when the length of the source string is not known. */
11744 109 : check_strncat_sizes (exp, objsize);
11745 109 : return;
11746 : }
11747 :
11748 1026 : check_access (exp, size, maxread, srcstr, objsize, mode);
11749 : }
11750 :
11751 : /* Emit warning if a buffer overflow is detected at compile time
11752 : in __sprintf_chk/__vsprintf_chk calls. */
11753 :
11754 : static void
11755 1329 : maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11756 : {
11757 1329 : tree size, len, fmt;
11758 1329 : const char *fmt_str;
11759 1329 : int nargs = call_expr_nargs (exp);
11760 :
11761 : /* Verify the required arguments in the original call. */
11762 :
11763 1329 : if (nargs < 4)
11764 : return;
11765 1329 : size = CALL_EXPR_ARG (exp, 2);
11766 1329 : fmt = CALL_EXPR_ARG (exp, 3);
11767 :
11768 1329 : if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11769 21 : return;
11770 :
11771 : /* Check whether the format is a literal string constant. */
11772 1308 : fmt_str = c_getstr (fmt);
11773 1308 : if (fmt_str == NULL)
11774 : return;
11775 :
11776 1272 : if (!init_target_chars ())
11777 : return;
11778 :
11779 : /* If the format doesn't contain % args or %%, we know its size. */
11780 1272 : if (strchr (fmt_str, target_percent) == 0)
11781 22 : len = build_int_cstu (size_type_node, strlen (fmt_str));
11782 : /* If the format is "%s" and first ... argument is a string literal,
11783 : we know it too. */
11784 1250 : else if (fcode == BUILT_IN_SPRINTF_CHK
11785 1094 : && strcmp (fmt_str, target_percent_s) == 0)
11786 : {
11787 49 : tree arg;
11788 :
11789 49 : if (nargs < 5)
11790 : return;
11791 49 : arg = CALL_EXPR_ARG (exp, 4);
11792 49 : if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11793 : return;
11794 :
11795 45 : len = c_strlen (arg, 1);
11796 45 : if (!len || ! tree_fits_uhwi_p (len))
11797 : return;
11798 : }
11799 : else
11800 : return;
11801 :
11802 : /* Add one for the terminating nul. */
11803 34 : len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11804 :
11805 34 : check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11806 : access_write_only);
11807 : }
11808 :
11809 : /* Fold a call to __builtin_object_size with arguments PTR and OST,
11810 : if possible. */
11811 :
11812 : static tree
11813 200163 : fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11814 : {
11815 200163 : tree bytes;
11816 200163 : int object_size_type;
11817 :
11818 200163 : if (!validate_arg (ptr, POINTER_TYPE)
11819 200163 : || !validate_arg (ost, INTEGER_TYPE))
11820 : return NULL_TREE;
11821 :
11822 200163 : STRIP_NOPS (ost);
11823 :
11824 200163 : if (TREE_CODE (ost) != INTEGER_CST
11825 200163 : || tree_int_cst_sgn (ost) < 0
11826 400326 : || compare_tree_int (ost, 3) > 0)
11827 0 : return NULL_TREE;
11828 :
11829 200163 : object_size_type = tree_to_shwi (ost);
11830 :
11831 : /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11832 : if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11833 : and (size_t) 0 for types 2 and 3. */
11834 200163 : if (TREE_SIDE_EFFECTS (ptr))
11835 603 : return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11836 :
11837 199560 : if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11838 66871 : object_size_type |= OST_DYNAMIC;
11839 :
11840 199560 : if (TREE_CODE (ptr) == ADDR_EXPR)
11841 : {
11842 12035 : compute_builtin_object_size (ptr, object_size_type, &bytes);
11843 12035 : if ((object_size_type & OST_DYNAMIC)
11844 10852 : || int_fits_type_p (bytes, size_type_node))
11845 12035 : return fold_convert (size_type_node, bytes);
11846 : }
11847 187525 : else if (TREE_CODE (ptr) == SSA_NAME)
11848 : {
11849 : /* If object size is not known yet, delay folding until
11850 : later. Maybe subsequent passes will help determining
11851 : it. */
11852 102188 : if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11853 102188 : && ((object_size_type & OST_DYNAMIC)
11854 2544 : || int_fits_type_p (bytes, size_type_node)))
11855 4830 : return fold_convert (size_type_node, bytes);
11856 : }
11857 :
11858 : return NULL_TREE;
11859 : }
11860 :
11861 : /* Builtins with folding operations that operate on "..." arguments
11862 : need special handling; we need to store the arguments in a convenient
11863 : data structure before attempting any folding. Fortunately there are
11864 : only a few builtins that fall into this category. FNDECL is the
11865 : function, EXP is the CALL_EXPR for the call. */
11866 :
11867 : static tree
11868 2469644 : fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11869 : {
11870 2469644 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11871 2469644 : tree ret = NULL_TREE;
11872 :
11873 2469644 : switch (fcode)
11874 : {
11875 95952 : case BUILT_IN_FPCLASSIFY:
11876 95952 : ret = fold_builtin_fpclassify (loc, args, nargs);
11877 95952 : break;
11878 :
11879 54 : case BUILT_IN_ADDC:
11880 54 : case BUILT_IN_ADDCL:
11881 54 : case BUILT_IN_ADDCLL:
11882 54 : case BUILT_IN_SUBC:
11883 54 : case BUILT_IN_SUBCL:
11884 54 : case BUILT_IN_SUBCLL:
11885 54 : return fold_builtin_addc_subc (loc, fcode, args);
11886 :
11887 : default:
11888 : break;
11889 : }
11890 95952 : if (ret)
11891 : {
11892 95952 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11893 95952 : SET_EXPR_LOCATION (ret, loc);
11894 95952 : suppress_warning (ret);
11895 95952 : return ret;
11896 : }
11897 : return NULL_TREE;
11898 : }
11899 :
11900 : /* Initialize format string characters in the target charset. */
11901 :
11902 : bool
11903 276469 : init_target_chars (void)
11904 : {
11905 276469 : static bool init;
11906 276469 : if (!init)
11907 : {
11908 112496 : target_newline = lang_hooks.to_target_charset ('\n');
11909 112496 : target_percent = lang_hooks.to_target_charset ('%');
11910 112496 : target_c = lang_hooks.to_target_charset ('c');
11911 112496 : target_s = lang_hooks.to_target_charset ('s');
11912 112496 : if (target_newline == 0 || target_percent == 0 || target_c == 0
11913 112496 : || target_s == 0)
11914 : return false;
11915 :
11916 112496 : target_percent_c[0] = target_percent;
11917 112496 : target_percent_c[1] = target_c;
11918 112496 : target_percent_c[2] = '\0';
11919 :
11920 112496 : target_percent_s[0] = target_percent;
11921 112496 : target_percent_s[1] = target_s;
11922 112496 : target_percent_s[2] = '\0';
11923 :
11924 112496 : target_percent_s_newline[0] = target_percent;
11925 112496 : target_percent_s_newline[1] = target_s;
11926 112496 : target_percent_s_newline[2] = target_newline;
11927 112496 : target_percent_s_newline[3] = '\0';
11928 :
11929 112496 : init = true;
11930 : }
11931 : return true;
11932 : }
11933 :
11934 : /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11935 : and no overflow/underflow occurred. INEXACT is true if M was not
11936 : exactly calculated. TYPE is the tree type for the result. This
11937 : function assumes that you cleared the MPFR flags and then
11938 : calculated M to see if anything subsequently set a flag prior to
11939 : entering this function. Return NULL_TREE if any checks fail. */
11940 :
11941 : static tree
11942 2979 : do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11943 : {
11944 : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11945 : overflow/underflow occurred. If -frounding-math, proceed iff the
11946 : result of calling FUNC was exact. */
11947 4832 : if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11948 4832 : && (!flag_rounding_math || !inexact))
11949 : {
11950 1853 : REAL_VALUE_TYPE rr;
11951 :
11952 1853 : real_from_mpfr (&rr, m, type, MPFR_RNDN);
11953 : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11954 : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11955 : but the mpfr_t is not, then we underflowed in the
11956 : conversion. */
11957 1853 : if (real_isfinite (&rr)
11958 1853 : && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11959 : {
11960 1853 : REAL_VALUE_TYPE rmode;
11961 :
11962 1853 : real_convert (&rmode, TYPE_MODE (type), &rr);
11963 : /* Proceed iff the specified mode can hold the value. */
11964 1853 : if (real_identical (&rmode, &rr))
11965 1853 : return build_real (type, rmode);
11966 : }
11967 : }
11968 : return NULL_TREE;
11969 : }
11970 :
11971 : /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11972 : number and no overflow/underflow occurred. INEXACT is true if M
11973 : was not exactly calculated. TYPE is the tree type for the result.
11974 : This function assumes that you cleared the MPFR flags and then
11975 : calculated M to see if anything subsequently set a flag prior to
11976 : entering this function. Return NULL_TREE if any checks fail, if
11977 : FORCE_CONVERT is true, then bypass the checks. */
11978 :
11979 : static tree
11980 4129 : do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11981 : {
11982 : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11983 : overflow/underflow occurred. If -frounding-math, proceed iff the
11984 : result of calling FUNC was exact. */
11985 4129 : if (force_convert
11986 4129 : || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11987 3810 : && !mpfr_overflow_p () && !mpfr_underflow_p ()
11988 3810 : && (!flag_rounding_math || !inexact)))
11989 : {
11990 3950 : REAL_VALUE_TYPE re, im;
11991 :
11992 3950 : real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11993 3950 : real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11994 : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11995 : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11996 : but the mpfr_t is not, then we underflowed in the
11997 : conversion. */
11998 3950 : if (force_convert
11999 3950 : || (real_isfinite (&re) && real_isfinite (&im)
12000 3810 : && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12001 3810 : && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12002 : {
12003 3950 : REAL_VALUE_TYPE re_mode, im_mode;
12004 :
12005 3950 : real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12006 3950 : real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12007 : /* Proceed iff the specified mode can hold the value. */
12008 3950 : if (force_convert
12009 3950 : || (real_identical (&re_mode, &re)
12010 3810 : && real_identical (&im_mode, &im)))
12011 3950 : return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12012 7900 : build_real (TREE_TYPE (type), im_mode));
12013 : }
12014 : }
12015 : return NULL_TREE;
12016 : }
12017 :
12018 : /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12019 : the pointer *(ARG_QUO) and return the result. The type is taken
12020 : from the type of ARG0 and is used for setting the precision of the
12021 : calculation and results. */
12022 :
12023 : static tree
12024 69269 : do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12025 : {
12026 69269 : tree const type = TREE_TYPE (arg0);
12027 69269 : tree result = NULL_TREE;
12028 :
12029 69269 : STRIP_NOPS (arg0);
12030 69269 : STRIP_NOPS (arg1);
12031 :
12032 : /* To proceed, MPFR must exactly represent the target floating point
12033 : format, which only happens when the target base equals two. */
12034 69269 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12035 69269 : && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12036 71574 : && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12037 : {
12038 2305 : const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12039 2305 : const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12040 :
12041 2305 : if (real_isfinite (ra0) && real_isfinite (ra1))
12042 : {
12043 2305 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12044 2305 : const int prec = fmt->p;
12045 2305 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12046 2305 : tree result_rem;
12047 2305 : long integer_quo;
12048 2305 : mpfr_t m0, m1;
12049 :
12050 2305 : mpfr_inits2 (prec, m0, m1, NULL);
12051 2305 : mpfr_from_real (m0, ra0, MPFR_RNDN);
12052 2305 : mpfr_from_real (m1, ra1, MPFR_RNDN);
12053 2305 : mpfr_clear_flags ();
12054 2305 : mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12055 : /* Remquo is independent of the rounding mode, so pass
12056 : inexact=0 to do_mpfr_ckconv(). */
12057 2305 : result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12058 2305 : mpfr_clears (m0, m1, NULL);
12059 2305 : if (result_rem)
12060 : {
12061 : /* MPFR calculates quo in the host's long so it may
12062 : return more bits in quo than the target int can hold
12063 : if sizeof(host long) > sizeof(target int). This can
12064 : happen even for native compilers in LP64 mode. In
12065 : these cases, modulo the quo value with the largest
12066 : number that the target int can hold while leaving one
12067 : bit for the sign. */
12068 1179 : if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12069 1179 : integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12070 :
12071 : /* Dereference the quo pointer argument. */
12072 1179 : arg_quo = build_fold_indirect_ref (arg_quo);
12073 : /* Proceed iff a valid pointer type was passed in. */
12074 1179 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12075 : {
12076 : /* Set the value. */
12077 1179 : tree result_quo
12078 1179 : = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12079 : build_int_cst (TREE_TYPE (arg_quo),
12080 : integer_quo));
12081 1179 : TREE_SIDE_EFFECTS (result_quo) = 1;
12082 : /* Combine the quo assignment with the rem. */
12083 1179 : result = fold_build2 (COMPOUND_EXPR, type,
12084 : result_quo, result_rem);
12085 1179 : suppress_warning (result, OPT_Wunused_value);
12086 1179 : result = non_lvalue (result);
12087 : }
12088 : }
12089 : }
12090 : }
12091 69269 : return result;
12092 : }
12093 :
12094 : /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12095 : resulting value as a tree with type TYPE. The mpfr precision is
12096 : set to the precision of TYPE. We assume that this mpfr function
12097 : returns zero if the result could be calculated exactly within the
12098 : requested precision. In addition, the integer pointer represented
12099 : by ARG_SG will be dereferenced and set to the appropriate signgam
12100 : (-1,1) value. */
12101 :
12102 : static tree
12103 6504 : do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12104 : {
12105 6504 : tree result = NULL_TREE;
12106 :
12107 6504 : STRIP_NOPS (arg);
12108 :
12109 : /* To proceed, MPFR must exactly represent the target floating point
12110 : format, which only happens when the target base equals two. Also
12111 : verify ARG is a constant and that ARG_SG is an int pointer. */
12112 6504 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12113 6504 : && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12114 6470 : && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12115 12974 : && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12116 : {
12117 6470 : const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12118 :
12119 : /* In addition to NaN and Inf, the argument cannot be zero or a
12120 : negative integer. */
12121 6470 : if (real_isfinite (ra)
12122 6470 : && ra->cl != rvc_zero
12123 12940 : && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12124 : {
12125 674 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12126 674 : const int prec = fmt->p;
12127 674 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12128 674 : int inexact, sg;
12129 674 : tree result_lg;
12130 :
12131 674 : auto_mpfr m (prec);
12132 674 : mpfr_from_real (m, ra, MPFR_RNDN);
12133 674 : mpfr_clear_flags ();
12134 674 : inexact = mpfr_lgamma (m, &sg, m, rnd);
12135 674 : result_lg = do_mpfr_ckconv (m, type, inexact);
12136 674 : if (result_lg)
12137 : {
12138 674 : tree result_sg;
12139 :
12140 : /* Dereference the arg_sg pointer argument. */
12141 674 : arg_sg = build_fold_indirect_ref (arg_sg);
12142 : /* Assign the signgam value into *arg_sg. */
12143 674 : result_sg = fold_build2 (MODIFY_EXPR,
12144 : TREE_TYPE (arg_sg), arg_sg,
12145 : build_int_cst (TREE_TYPE (arg_sg), sg));
12146 674 : TREE_SIDE_EFFECTS (result_sg) = 1;
12147 : /* Combine the signgam assignment with the lgamma result. */
12148 674 : result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12149 : result_sg, result_lg));
12150 : }
12151 674 : }
12152 : }
12153 :
12154 6504 : return result;
12155 : }
12156 :
12157 : /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12158 : mpc function FUNC on it and return the resulting value as a tree
12159 : with type TYPE. The mpfr precision is set to the precision of
12160 : TYPE. We assume that function FUNC returns zero if the result
12161 : could be calculated exactly within the requested precision. If
12162 : DO_NONFINITE is true, then fold expressions containing Inf or NaN
12163 : in the arguments and/or results. */
12164 :
12165 : tree
12166 4481 : do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12167 : int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12168 : {
12169 4481 : tree result = NULL_TREE;
12170 :
12171 4481 : STRIP_NOPS (arg0);
12172 4481 : STRIP_NOPS (arg1);
12173 :
12174 : /* To proceed, MPFR must exactly represent the target floating point
12175 : format, which only happens when the target base equals two. */
12176 4481 : if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12177 4481 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
12178 4481 : && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12179 4481 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
12180 8962 : && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12181 : {
12182 4481 : const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12183 4481 : const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12184 4481 : const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12185 4481 : const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12186 :
12187 4481 : if (do_nonfinite
12188 4481 : || (real_isfinite (re0) && real_isfinite (im0)
12189 4017 : && real_isfinite (re1) && real_isfinite (im1)))
12190 : {
12191 4129 : const struct real_format *const fmt =
12192 4129 : REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12193 4129 : const int prec = fmt->p;
12194 4129 : const mpfr_rnd_t rnd = fmt->round_towards_zero
12195 4129 : ? MPFR_RNDZ : MPFR_RNDN;
12196 4129 : const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12197 4129 : int inexact;
12198 4129 : mpc_t m0, m1;
12199 :
12200 4129 : mpc_init2 (m0, prec);
12201 4129 : mpc_init2 (m1, prec);
12202 4129 : mpfr_from_real (mpc_realref (m0), re0, rnd);
12203 4129 : mpfr_from_real (mpc_imagref (m0), im0, rnd);
12204 4129 : mpfr_from_real (mpc_realref (m1), re1, rnd);
12205 4129 : mpfr_from_real (mpc_imagref (m1), im1, rnd);
12206 4129 : mpfr_clear_flags ();
12207 4129 : inexact = func (m0, m0, m1, crnd);
12208 4129 : result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12209 4129 : mpc_clear (m0);
12210 4129 : mpc_clear (m1);
12211 : }
12212 : }
12213 :
12214 4481 : return result;
12215 : }
12216 :
12217 : /* A wrapper function for builtin folding that prevents warnings for
12218 : "statement without effect" and the like, caused by removing the
12219 : call node earlier than the warning is generated. */
12220 :
12221 : tree
12222 5967683 : fold_call_stmt (gcall *stmt, bool ignore)
12223 : {
12224 5967683 : tree ret = NULL_TREE;
12225 5967683 : tree fndecl = gimple_call_fndecl (stmt);
12226 5967683 : location_t loc = gimple_location (stmt);
12227 5967683 : if (fndecl && fndecl_built_in_p (fndecl)
12228 11935366 : && !gimple_call_va_arg_pack_p (stmt))
12229 : {
12230 5967617 : int nargs = gimple_call_num_args (stmt);
12231 5967617 : tree *args = (nargs > 0
12232 5967617 : ? gimple_call_arg_ptr (stmt, 0)
12233 : : &error_mark_node);
12234 :
12235 5967617 : if (avoid_folding_inline_builtin (fndecl))
12236 : return NULL_TREE;
12237 5967617 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12238 : {
12239 0 : return targetm.fold_builtin (fndecl, nargs, args, ignore);
12240 : }
12241 : else
12242 : {
12243 5967617 : ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12244 5967617 : if (ret)
12245 : {
12246 : /* Propagate location information from original call to
12247 : expansion of builtin. Otherwise things like
12248 : maybe_emit_chk_warning, that operate on the expansion
12249 : of a builtin, will use the wrong location information. */
12250 4924 : if (gimple_has_location (stmt))
12251 : {
12252 4923 : tree realret = ret;
12253 4923 : if (TREE_CODE (ret) == NOP_EXPR)
12254 4923 : realret = TREE_OPERAND (ret, 0);
12255 4923 : if (CAN_HAVE_LOCATION_P (realret)
12256 9142 : && !EXPR_HAS_LOCATION (realret))
12257 6 : SET_EXPR_LOCATION (realret, loc);
12258 4923 : return realret;
12259 : }
12260 : return ret;
12261 : }
12262 : }
12263 : }
12264 : return NULL_TREE;
12265 : }
12266 :
12267 : /* Look up the function in builtin_decl that corresponds to DECL
12268 : and set ASMSPEC as its user assembler name. DECL must be a
12269 : function decl that declares a builtin. */
12270 :
12271 : void
12272 148438 : set_builtin_user_assembler_name (tree decl, const char *asmspec)
12273 : {
12274 148438 : gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12275 : && asmspec != 0);
12276 :
12277 148438 : tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12278 148438 : set_user_assembler_name (builtin, asmspec);
12279 :
12280 148438 : if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12281 148438 : && INT_TYPE_SIZE < BITS_PER_WORD)
12282 : {
12283 1 : scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12284 1 : set_user_assembler_libfunc ("ffs", asmspec);
12285 1 : set_optab_libfunc (ffs_optab, mode, "ffs");
12286 : }
12287 148438 : }
12288 :
12289 : /* Return true if DECL is a builtin that expands to a constant or similarly
12290 : simple code. */
12291 : bool
12292 32306215 : is_simple_builtin (tree decl)
12293 : {
12294 32306215 : if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12295 31045880 : switch (DECL_FUNCTION_CODE (decl))
12296 : {
12297 : /* Builtins that expand to constants. */
12298 : case BUILT_IN_CONSTANT_P:
12299 : case BUILT_IN_EXPECT:
12300 : case BUILT_IN_OBJECT_SIZE:
12301 : case BUILT_IN_UNREACHABLE:
12302 : /* Simple register moves or loads from stack. */
12303 : case BUILT_IN_ASSUME_ALIGNED:
12304 : case BUILT_IN_RETURN_ADDRESS:
12305 : case BUILT_IN_EXTRACT_RETURN_ADDR:
12306 : case BUILT_IN_FROB_RETURN_ADDR:
12307 : case BUILT_IN_RETURN:
12308 : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12309 : case BUILT_IN_FRAME_ADDRESS:
12310 : case BUILT_IN_VA_END:
12311 : case BUILT_IN_STACK_SAVE:
12312 : case BUILT_IN_STACK_RESTORE:
12313 : case BUILT_IN_DWARF_CFA:
12314 : /* Exception state returns or moves registers around. */
12315 : case BUILT_IN_EH_FILTER:
12316 : case BUILT_IN_EH_POINTER:
12317 : case BUILT_IN_EH_COPY_VALUES:
12318 : return true;
12319 :
12320 28248450 : default:
12321 28248450 : return false;
12322 : }
12323 :
12324 : return false;
12325 : }
12326 :
12327 : /* Return true if DECL is a builtin that is not expensive, i.e., they are
12328 : most probably expanded inline into reasonably simple code. This is a
12329 : superset of is_simple_builtin. */
12330 : bool
12331 19435596 : is_inexpensive_builtin (tree decl)
12332 : {
12333 19435596 : if (!decl)
12334 : return false;
12335 19420344 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12336 : return true;
12337 18137752 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12338 15281807 : switch (DECL_FUNCTION_CODE (decl))
12339 : {
12340 : case BUILT_IN_ABS:
12341 : CASE_BUILT_IN_ALLOCA:
12342 : case BUILT_IN_BSWAP16:
12343 : case BUILT_IN_BSWAP32:
12344 : case BUILT_IN_BSWAP64:
12345 : case BUILT_IN_BSWAP128:
12346 : case BUILT_IN_CLZ:
12347 : case BUILT_IN_CLZIMAX:
12348 : case BUILT_IN_CLZL:
12349 : case BUILT_IN_CLZLL:
12350 : case BUILT_IN_CTZ:
12351 : case BUILT_IN_CTZIMAX:
12352 : case BUILT_IN_CTZL:
12353 : case BUILT_IN_CTZLL:
12354 : case BUILT_IN_FFS:
12355 : case BUILT_IN_FFSIMAX:
12356 : case BUILT_IN_FFSL:
12357 : case BUILT_IN_FFSLL:
12358 : case BUILT_IN_IMAXABS:
12359 : case BUILT_IN_FINITE:
12360 : case BUILT_IN_FINITEF:
12361 : case BUILT_IN_FINITEL:
12362 : case BUILT_IN_FINITED32:
12363 : case BUILT_IN_FINITED64:
12364 : case BUILT_IN_FINITED128:
12365 : case BUILT_IN_FPCLASSIFY:
12366 : case BUILT_IN_ISFINITE:
12367 : case BUILT_IN_ISINF_SIGN:
12368 : case BUILT_IN_ISINF:
12369 : case BUILT_IN_ISINFF:
12370 : case BUILT_IN_ISINFL:
12371 : case BUILT_IN_ISINFD32:
12372 : case BUILT_IN_ISINFD64:
12373 : case BUILT_IN_ISINFD128:
12374 : case BUILT_IN_ISNAN:
12375 : case BUILT_IN_ISNANF:
12376 : case BUILT_IN_ISNANL:
12377 : case BUILT_IN_ISNAND32:
12378 : case BUILT_IN_ISNAND64:
12379 : case BUILT_IN_ISNAND128:
12380 : case BUILT_IN_ISNORMAL:
12381 : case BUILT_IN_ISGREATER:
12382 : case BUILT_IN_ISGREATEREQUAL:
12383 : case BUILT_IN_ISLESS:
12384 : case BUILT_IN_ISLESSEQUAL:
12385 : case BUILT_IN_ISLESSGREATER:
12386 : case BUILT_IN_ISUNORDERED:
12387 : case BUILT_IN_ISEQSIG:
12388 : case BUILT_IN_VA_ARG_PACK:
12389 : case BUILT_IN_VA_ARG_PACK_LEN:
12390 : case BUILT_IN_VA_COPY:
12391 : case BUILT_IN_TRAP:
12392 : case BUILT_IN_UNREACHABLE_TRAP:
12393 : case BUILT_IN_SAVEREGS:
12394 : case BUILT_IN_POPCOUNTL:
12395 : case BUILT_IN_POPCOUNTLL:
12396 : case BUILT_IN_POPCOUNTIMAX:
12397 : case BUILT_IN_POPCOUNT:
12398 : case BUILT_IN_PARITYL:
12399 : case BUILT_IN_PARITYLL:
12400 : case BUILT_IN_PARITYIMAX:
12401 : case BUILT_IN_PARITY:
12402 : case BUILT_IN_LABS:
12403 : case BUILT_IN_LLABS:
12404 : case BUILT_IN_PREFETCH:
12405 : case BUILT_IN_ACC_ON_DEVICE:
12406 : return true;
12407 :
12408 14545675 : default:
12409 14545675 : return is_simple_builtin (decl);
12410 : }
12411 :
12412 : return false;
12413 : }
12414 :
12415 : /* Return true if T is a constant and the value cast to a target char
12416 : can be represented by a host char.
12417 : Store the casted char constant in *P if so. */
12418 :
12419 : bool
12420 3019 : target_char_cst_p (tree t, char *p)
12421 : {
12422 3019 : if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12423 : return false;
12424 :
12425 1634 : *p = (char)tree_to_uhwi (t);
12426 1634 : return true;
12427 : }
12428 :
12429 : /* Return true if the builtin DECL is implemented in a standard library.
12430 : Otherwise return false which doesn't guarantee it is not (thus the list
12431 : of handled builtins below may be incomplete). */
12432 :
12433 : bool
12434 41394 : builtin_with_linkage_p (tree decl)
12435 : {
12436 41394 : if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12437 41116 : switch (DECL_FUNCTION_CODE (decl))
12438 : {
12439 1304 : CASE_FLT_FN (BUILT_IN_ACOS):
12440 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12441 1304 : CASE_FLT_FN (BUILT_IN_ACOSH):
12442 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12443 1304 : CASE_FLT_FN (BUILT_IN_ASIN):
12444 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12445 1304 : CASE_FLT_FN (BUILT_IN_ASINH):
12446 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12447 1304 : CASE_FLT_FN (BUILT_IN_ATAN):
12448 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12449 1304 : CASE_FLT_FN (BUILT_IN_ATANH):
12450 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12451 1304 : CASE_FLT_FN (BUILT_IN_ATAN2):
12452 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12453 1304 : CASE_FLT_FN (BUILT_IN_CBRT):
12454 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12455 1304 : CASE_FLT_FN (BUILT_IN_CEIL):
12456 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12457 1304 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
12458 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12459 1304 : CASE_FLT_FN (BUILT_IN_COS):
12460 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12461 1304 : CASE_FLT_FN (BUILT_IN_COSH):
12462 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12463 1304 : CASE_FLT_FN (BUILT_IN_ERF):
12464 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12465 1304 : CASE_FLT_FN (BUILT_IN_ERFC):
12466 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12467 1304 : CASE_FLT_FN (BUILT_IN_EXP):
12468 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12469 1304 : CASE_FLT_FN (BUILT_IN_EXP2):
12470 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12471 1304 : CASE_FLT_FN (BUILT_IN_EXPM1):
12472 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12473 1304 : CASE_FLT_FN (BUILT_IN_FABS):
12474 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12475 1304 : CASE_FLT_FN (BUILT_IN_FDIM):
12476 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12477 1304 : CASE_FLT_FN (BUILT_IN_FLOOR):
12478 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12479 1304 : CASE_FLT_FN (BUILT_IN_FMA):
12480 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12481 1304 : CASE_FLT_FN (BUILT_IN_FMAX):
12482 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12483 1304 : CASE_FLT_FN (BUILT_IN_FMIN):
12484 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12485 1304 : CASE_FLT_FN (BUILT_IN_FMOD):
12486 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12487 1304 : CASE_FLT_FN (BUILT_IN_FREXP):
12488 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12489 1304 : CASE_FLT_FN (BUILT_IN_HYPOT):
12490 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12491 1304 : CASE_FLT_FN (BUILT_IN_ILOGB):
12492 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12493 1304 : CASE_FLT_FN (BUILT_IN_LDEXP):
12494 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12495 1304 : CASE_FLT_FN (BUILT_IN_LGAMMA):
12496 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12497 1304 : CASE_FLT_FN (BUILT_IN_LLRINT):
12498 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12499 1304 : CASE_FLT_FN (BUILT_IN_LLROUND):
12500 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12501 1304 : CASE_FLT_FN (BUILT_IN_LOG):
12502 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12503 1304 : CASE_FLT_FN (BUILT_IN_LOG10):
12504 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12505 1304 : CASE_FLT_FN (BUILT_IN_LOG1P):
12506 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12507 1304 : CASE_FLT_FN (BUILT_IN_LOG2):
12508 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12509 1304 : CASE_FLT_FN (BUILT_IN_LOGB):
12510 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12511 1304 : CASE_FLT_FN (BUILT_IN_LRINT):
12512 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12513 1304 : CASE_FLT_FN (BUILT_IN_LROUND):
12514 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12515 1304 : CASE_FLT_FN (BUILT_IN_MODF):
12516 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12517 1304 : CASE_FLT_FN (BUILT_IN_NAN):
12518 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12519 1304 : CASE_FLT_FN (BUILT_IN_NEARBYINT):
12520 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12521 1304 : CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12522 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12523 1304 : CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12524 1304 : CASE_FLT_FN (BUILT_IN_POW):
12525 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12526 1304 : CASE_FLT_FN (BUILT_IN_REMAINDER):
12527 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12528 1304 : CASE_FLT_FN (BUILT_IN_REMQUO):
12529 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12530 1304 : CASE_FLT_FN (BUILT_IN_RINT):
12531 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12532 1304 : CASE_FLT_FN (BUILT_IN_ROUND):
12533 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12534 1304 : CASE_FLT_FN (BUILT_IN_SCALBLN):
12535 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12536 1304 : CASE_FLT_FN (BUILT_IN_SCALBN):
12537 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12538 1304 : CASE_FLT_FN (BUILT_IN_SIN):
12539 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12540 1304 : CASE_FLT_FN (BUILT_IN_SINH):
12541 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12542 1304 : CASE_FLT_FN (BUILT_IN_SINCOS):
12543 1304 : CASE_FLT_FN (BUILT_IN_SQRT):
12544 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12545 1304 : CASE_FLT_FN (BUILT_IN_TAN):
12546 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12547 1304 : CASE_FLT_FN (BUILT_IN_TANH):
12548 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12549 1304 : CASE_FLT_FN (BUILT_IN_TGAMMA):
12550 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12551 1304 : CASE_FLT_FN (BUILT_IN_TRUNC):
12552 1304 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12553 1304 : return true;
12554 :
12555 16 : case BUILT_IN_STPCPY:
12556 16 : case BUILT_IN_STPNCPY:
12557 : /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12558 : by libiberty's stpcpy.c for MinGW targets so we need to return true
12559 : in order to be able to build libiberty in LTO mode for them. */
12560 16 : return true;
12561 :
12562 : default:
12563 : break;
12564 : }
12565 : return false;
12566 : }
12567 :
12568 : /* Return true if OFFRNG is bounded to a subrange of offset values
12569 : valid for the largest possible object. */
12570 :
12571 : bool
12572 501 : access_ref::offset_bounded () const
12573 : {
12574 501 : tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12575 501 : tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12576 975 : return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12577 : }
12578 :
12579 : /* If CALLEE has known side effects, fill in INFO and return true.
12580 : See tree-ssa-structalias.cc:find_func_aliases
12581 : for the list of builtins we might need to handle here. */
12582 :
12583 : attr_fnspec
12584 112783122 : builtin_fnspec (tree callee)
12585 : {
12586 112783122 : built_in_function code = DECL_FUNCTION_CODE (callee);
12587 :
12588 112783122 : switch (code)
12589 : {
12590 : /* All the following functions read memory pointed to by
12591 : their second argument and write memory pointed to by first
12592 : argument.
12593 : strcat/strncat additionally reads memory pointed to by the first
12594 : argument. */
12595 138380 : case BUILT_IN_STRCAT:
12596 138380 : case BUILT_IN_STRCAT_CHK:
12597 138380 : return "1cW 1 ";
12598 44496 : case BUILT_IN_STRNCAT:
12599 44496 : case BUILT_IN_STRNCAT_CHK:
12600 44496 : return "1cW 13";
12601 265870 : case BUILT_IN_STRCPY:
12602 265870 : case BUILT_IN_STRCPY_CHK:
12603 265870 : return "1cO 1 ";
12604 32225 : case BUILT_IN_STPCPY:
12605 32225 : case BUILT_IN_STPCPY_CHK:
12606 32225 : return ".cO 1 ";
12607 19694531 : case BUILT_IN_STRNCPY:
12608 19694531 : case BUILT_IN_MEMCPY:
12609 19694531 : case BUILT_IN_MEMMOVE:
12610 19694531 : case BUILT_IN_TM_MEMCPY:
12611 19694531 : case BUILT_IN_TM_MEMMOVE:
12612 19694531 : case BUILT_IN_STRNCPY_CHK:
12613 19694531 : case BUILT_IN_MEMCPY_CHK:
12614 19694531 : case BUILT_IN_MEMMOVE_CHK:
12615 19694531 : return "1cO313";
12616 151937 : case BUILT_IN_MEMPCPY:
12617 151937 : case BUILT_IN_MEMPCPY_CHK:
12618 151937 : return ".cO313";
12619 57714 : case BUILT_IN_STPNCPY:
12620 57714 : case BUILT_IN_STPNCPY_CHK:
12621 57714 : return ".cO313";
12622 0 : case BUILT_IN_BCOPY:
12623 0 : return ".c23O3";
12624 0 : case BUILT_IN_BZERO:
12625 0 : return ".cO2";
12626 8913288 : case BUILT_IN_MEMCMP:
12627 8913288 : case BUILT_IN_MEMCMP_EQ:
12628 8913288 : case BUILT_IN_BCMP:
12629 8913288 : case BUILT_IN_STRNCMP:
12630 8913288 : case BUILT_IN_STRNCMP_EQ:
12631 8913288 : case BUILT_IN_STRNCASECMP:
12632 8913288 : return ".cR3R3";
12633 :
12634 : /* The following functions read memory pointed to by their
12635 : first argument. */
12636 804 : CASE_BUILT_IN_TM_LOAD (1):
12637 804 : CASE_BUILT_IN_TM_LOAD (2):
12638 804 : CASE_BUILT_IN_TM_LOAD (4):
12639 804 : CASE_BUILT_IN_TM_LOAD (8):
12640 804 : CASE_BUILT_IN_TM_LOAD (FLOAT):
12641 804 : CASE_BUILT_IN_TM_LOAD (DOUBLE):
12642 804 : CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12643 804 : CASE_BUILT_IN_TM_LOAD (M64):
12644 804 : CASE_BUILT_IN_TM_LOAD (M128):
12645 804 : CASE_BUILT_IN_TM_LOAD (M256):
12646 804 : case BUILT_IN_TM_LOG:
12647 804 : case BUILT_IN_TM_LOG_1:
12648 804 : case BUILT_IN_TM_LOG_2:
12649 804 : case BUILT_IN_TM_LOG_4:
12650 804 : case BUILT_IN_TM_LOG_8:
12651 804 : case BUILT_IN_TM_LOG_FLOAT:
12652 804 : case BUILT_IN_TM_LOG_DOUBLE:
12653 804 : case BUILT_IN_TM_LOG_LDOUBLE:
12654 804 : case BUILT_IN_TM_LOG_M64:
12655 804 : case BUILT_IN_TM_LOG_M128:
12656 804 : case BUILT_IN_TM_LOG_M256:
12657 804 : return ".cR ";
12658 :
12659 465611 : case BUILT_IN_INDEX:
12660 465611 : case BUILT_IN_RINDEX:
12661 465611 : case BUILT_IN_STRCHR:
12662 465611 : case BUILT_IN_STRLEN:
12663 465611 : case BUILT_IN_STRRCHR:
12664 465611 : return ".cR ";
12665 63031 : case BUILT_IN_STRNLEN:
12666 63031 : return ".cR2";
12667 :
12668 : /* These read memory pointed to by the first argument.
12669 : Allocating memory does not have any side-effects apart from
12670 : being the definition point for the pointer.
12671 : Unix98 specifies that errno is set on allocation failure. */
12672 17789 : case BUILT_IN_STRDUP:
12673 17789 : return "mCR ";
12674 13304 : case BUILT_IN_STRNDUP:
12675 13304 : return "mCR2";
12676 : /* Allocating memory does not have any side-effects apart from
12677 : being the definition point for the pointer. */
12678 10315173 : case BUILT_IN_MALLOC:
12679 10315173 : case BUILT_IN_ALIGNED_ALLOC:
12680 10315173 : case BUILT_IN_CALLOC:
12681 10315173 : case BUILT_IN_GOMP_ALLOC:
12682 10315173 : return "mC";
12683 854002 : CASE_BUILT_IN_ALLOCA:
12684 854002 : return "mc";
12685 : /* These read memory pointed to by the first argument with size
12686 : in the third argument. */
12687 675242 : case BUILT_IN_MEMCHR:
12688 675242 : return ".cR3";
12689 : /* These read memory pointed to by the first and second arguments. */
12690 11299936 : case BUILT_IN_STRSTR:
12691 11299936 : case BUILT_IN_STRPBRK:
12692 11299936 : case BUILT_IN_STRCASECMP:
12693 11299936 : case BUILT_IN_STRCSPN:
12694 11299936 : case BUILT_IN_STRSPN:
12695 11299936 : case BUILT_IN_STRCMP:
12696 11299936 : case BUILT_IN_STRCMP_EQ:
12697 11299936 : return ".cR R ";
12698 : /* Freeing memory kills the pointed-to memory. More importantly
12699 : the call has to serve as a barrier for moving loads and stores
12700 : across it. */
12701 6027509 : case BUILT_IN_STACK_RESTORE:
12702 6027509 : case BUILT_IN_FREE:
12703 6027509 : case BUILT_IN_GOMP_FREE:
12704 6027509 : return ".co ";
12705 103755 : case BUILT_IN_VA_END:
12706 103755 : return ".cO ";
12707 : /* Realloc serves both as allocation point and deallocation point. */
12708 1038931 : case BUILT_IN_REALLOC:
12709 1038931 : case BUILT_IN_GOMP_REALLOC:
12710 1038931 : return ".Cw ";
12711 15732 : case BUILT_IN_GAMMA_R:
12712 15732 : case BUILT_IN_GAMMAF_R:
12713 15732 : case BUILT_IN_GAMMAL_R:
12714 15732 : case BUILT_IN_LGAMMA_R:
12715 15732 : case BUILT_IN_LGAMMAF_R:
12716 15732 : case BUILT_IN_LGAMMAL_R:
12717 15732 : return ".C. Ot";
12718 62079 : case BUILT_IN_FREXP:
12719 62079 : case BUILT_IN_FREXPF:
12720 62079 : case BUILT_IN_FREXPL:
12721 62079 : case BUILT_IN_MODF:
12722 62079 : case BUILT_IN_MODFF:
12723 62079 : case BUILT_IN_MODFL:
12724 62079 : return ".c. Ot";
12725 7586 : case BUILT_IN_REMQUO:
12726 7586 : case BUILT_IN_REMQUOF:
12727 7586 : case BUILT_IN_REMQUOL:
12728 7586 : return ".c. . Ot";
12729 153 : case BUILT_IN_SINCOS:
12730 153 : case BUILT_IN_SINCOSF:
12731 153 : case BUILT_IN_SINCOSL:
12732 153 : return ".c. OtOt";
12733 4317008 : case BUILT_IN_MEMSET:
12734 4317008 : case BUILT_IN_MEMSET_CHK:
12735 4317008 : case BUILT_IN_TM_MEMSET:
12736 4317008 : return "1cO3";
12737 366 : CASE_BUILT_IN_TM_STORE (1):
12738 366 : CASE_BUILT_IN_TM_STORE (2):
12739 366 : CASE_BUILT_IN_TM_STORE (4):
12740 366 : CASE_BUILT_IN_TM_STORE (8):
12741 366 : CASE_BUILT_IN_TM_STORE (FLOAT):
12742 366 : CASE_BUILT_IN_TM_STORE (DOUBLE):
12743 366 : CASE_BUILT_IN_TM_STORE (LDOUBLE):
12744 366 : CASE_BUILT_IN_TM_STORE (M64):
12745 366 : CASE_BUILT_IN_TM_STORE (M128):
12746 366 : CASE_BUILT_IN_TM_STORE (M256):
12747 366 : return ".cO ";
12748 2291626 : case BUILT_IN_STACK_SAVE:
12749 2291626 : case BUILT_IN_RETURN:
12750 2291626 : case BUILT_IN_EH_POINTER:
12751 2291626 : case BUILT_IN_EH_FILTER:
12752 2291626 : case BUILT_IN_UNWIND_RESUME:
12753 2291626 : case BUILT_IN_CXA_END_CLEANUP:
12754 2291626 : case BUILT_IN_EH_COPY_VALUES:
12755 2291626 : case BUILT_IN_FRAME_ADDRESS:
12756 2291626 : case BUILT_IN_APPLY_ARGS:
12757 2291626 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12758 2291626 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12759 2291626 : case BUILT_IN_PREFETCH:
12760 2291626 : case BUILT_IN_DWARF_CFA:
12761 2291626 : case BUILT_IN_RETURN_ADDRESS:
12762 2291626 : return ".c";
12763 1870195 : case BUILT_IN_ASSUME_ALIGNED:
12764 1870195 : case BUILT_IN_EXPECT:
12765 1870195 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
12766 1870195 : return "1cX ";
12767 : /* But posix_memalign stores a pointer into the memory pointed to
12768 : by its first argument. */
12769 5681 : case BUILT_IN_POSIX_MEMALIGN:
12770 5681 : return ".cOt";
12771 4438 : case BUILT_IN_OMP_GET_MAPPED_PTR:
12772 4438 : return ". R ";
12773 :
12774 44034730 : default:
12775 44034730 : return "";
12776 : }
12777 : }
|