Branch data Line data Source code
1 : : /* Expand builtin functions.
2 : : Copyright (C) 1988-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /* Legacy warning! Please add no further builtin simplifications here
21 : : (apart from pure constant folding) - builtin simplifications should go
22 : : to match.pd or gimple-fold.cc instead. */
23 : :
24 : : #include "config.h"
25 : : #include "system.h"
26 : : #include "coretypes.h"
27 : : #include "backend.h"
28 : : #include "target.h"
29 : : #include "rtl.h"
30 : : #include "tree.h"
31 : : #include "memmodel.h"
32 : : #include "gimple.h"
33 : : #include "predict.h"
34 : : #include "tm_p.h"
35 : : #include "stringpool.h"
36 : : #include "tree-vrp.h"
37 : : #include "tree-ssanames.h"
38 : : #include "expmed.h"
39 : : #include "optabs.h"
40 : : #include "emit-rtl.h"
41 : : #include "recog.h"
42 : : #include "diagnostic-core.h"
43 : : #include "alias.h"
44 : : #include "fold-const.h"
45 : : #include "fold-const-call.h"
46 : : #include "gimple-ssa-warn-access.h"
47 : : #include "stor-layout.h"
48 : : #include "calls.h"
49 : : #include "varasm.h"
50 : : #include "tree-object-size.h"
51 : : #include "tree-ssa-strlen.h"
52 : : #include "realmpfr.h"
53 : : #include "cfgrtl.h"
54 : : #include "except.h"
55 : : #include "dojump.h"
56 : : #include "explow.h"
57 : : #include "stmt.h"
58 : : #include "expr.h"
59 : : #include "libfuncs.h"
60 : : #include "output.h"
61 : : #include "typeclass.h"
62 : : #include "langhooks.h"
63 : : #include "value-prof.h"
64 : : #include "builtins.h"
65 : : #include "stringpool.h"
66 : : #include "attribs.h"
67 : : #include "asan.h"
68 : : #include "internal-fn.h"
69 : : #include "case-cfn-macros.h"
70 : : #include "gimple-iterator.h"
71 : : #include "gimple-fold.h"
72 : : #include "intl.h"
73 : : #include "file-prefix-map.h" /* remap_macro_filename() */
74 : : #include "ipa-strub.h" /* strub_watermark_parm() */
75 : : #include "gomp-constants.h"
76 : : #include "omp-general.h"
77 : : #include "tree-dfa.h"
78 : : #include "gimple-ssa.h"
79 : : #include "tree-ssa-live.h"
80 : : #include "tree-outof-ssa.h"
81 : : #include "attr-fnspec.h"
82 : : #include "demangle.h"
83 : : #include "gimple-range.h"
84 : : #include "pointer-query.h"
85 : :
86 : : struct target_builtins default_target_builtins;
87 : : #if SWITCHABLE_TARGET
88 : : struct target_builtins *this_target_builtins = &default_target_builtins;
89 : : #endif
90 : :
91 : : /* Define the names of the builtin function types and codes. */
92 : : const char *const built_in_class_names[BUILT_IN_LAST]
93 : : = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 : :
95 : : #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96 : : const char * built_in_names[(int) END_BUILTINS] =
97 : : {
98 : : #include "builtins.def"
99 : : };
100 : :
101 : : /* Setup an array of builtin_info_type, make sure each element decl is
102 : : initialized to NULL_TREE. */
103 : : builtin_info_type builtin_info[(int)END_BUILTINS];
104 : :
105 : : /* Non-zero if __builtin_constant_p should be folded right away. */
106 : : bool force_folding_builtin_constant_p;
107 : :
108 : : static int target_char_cast (tree, char *);
109 : : static int apply_args_size (void);
110 : : static int apply_result_size (void);
111 : : static rtx result_vector (int, rtx);
112 : : static void expand_builtin_prefetch (tree);
113 : : static rtx expand_builtin_apply_args (void);
114 : : static rtx expand_builtin_apply_args_1 (void);
115 : : static rtx expand_builtin_apply (rtx, rtx, rtx);
116 : : static void expand_builtin_return (rtx);
117 : : static rtx expand_builtin_classify_type (tree);
118 : : static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 : : static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 : : static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 : : static rtx expand_builtin_sincos (tree);
122 : : static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 : : static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 : : optab);
125 : : static rtx expand_builtin_cexpi (tree, rtx);
126 : : static rtx expand_builtin_issignaling (tree, rtx);
127 : : static rtx expand_builtin_int_roundingfn (tree, rtx);
128 : : static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 : : static rtx expand_builtin_next_arg (void);
130 : : static rtx expand_builtin_va_start (tree);
131 : : static rtx expand_builtin_va_end (tree);
132 : : static rtx expand_builtin_va_copy (tree);
133 : : static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 : : static rtx expand_builtin_strcmp (tree, rtx);
135 : : static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 : : static rtx expand_builtin_memcpy (tree, rtx);
137 : : static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 : : rtx target, tree exp,
139 : : memop_ret retmode,
140 : : bool might_overlap);
141 : : static rtx expand_builtin_memmove (tree, rtx);
142 : : static rtx expand_builtin_mempcpy (tree, rtx);
143 : : static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 : : static rtx expand_builtin_strcpy (tree, rtx);
145 : : static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 : : static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 : : static rtx expand_builtin_strncpy (tree, rtx);
148 : : static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 : : static rtx expand_builtin_bzero (tree);
150 : : static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 : : static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 : : static rtx expand_builtin_alloca (tree);
153 : : static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 : : static rtx expand_builtin_frame_address (tree, tree);
155 : : static rtx expand_builtin_stack_address ();
156 : : static tree stabilize_va_list_loc (location_t, tree, int);
157 : : static rtx expand_builtin_expect (tree, rtx);
158 : : static rtx expand_builtin_expect_with_probability (tree, rtx);
159 : : static tree fold_builtin_classify_type (tree);
160 : : static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 : : static tree fold_builtin_inf (location_t, tree, int);
162 : : static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 : : static bool validate_arg (const_tree, enum tree_code code);
164 : : static rtx expand_builtin_fabs (tree, rtx, rtx);
165 : : static rtx expand_builtin_signbit (tree, rtx);
166 : : static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 : : static tree fold_builtin_isascii (location_t, tree);
168 : : static tree fold_builtin_toascii (location_t, tree);
169 : : static tree fold_builtin_isdigit (location_t, tree);
170 : : static tree fold_builtin_fabs (location_t, tree, tree);
171 : : static tree fold_builtin_abs (location_t, tree, tree);
172 : : static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 : : enum tree_code);
174 : : static tree fold_builtin_iseqsig (location_t, tree, tree);
175 : : static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 : :
177 : : static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
178 : : static tree fold_builtin_strspn (location_t, tree, tree, tree, tree);
179 : : static tree fold_builtin_strcspn (location_t, tree, tree, tree, tree);
180 : :
181 : : static rtx expand_builtin_object_size (tree);
182 : : static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
183 : : enum built_in_function);
184 : : static void maybe_emit_chk_warning (tree, enum built_in_function);
185 : : static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
186 : : static tree fold_builtin_object_size (tree, tree, enum built_in_function);
187 : :
188 : : unsigned HOST_WIDE_INT target_newline;
189 : : unsigned HOST_WIDE_INT target_percent;
190 : : static unsigned HOST_WIDE_INT target_c;
191 : : static unsigned HOST_WIDE_INT target_s;
192 : : char target_percent_c[3];
193 : : char target_percent_s[3];
194 : : char target_percent_s_newline[4];
195 : : static tree do_mpfr_remquo (tree, tree, tree);
196 : : static tree do_mpfr_lgamma_r (tree, tree, tree);
197 : : static void expand_builtin_sync_synchronize (void);
198 : :
199 : : /* Return true if NAME starts with __builtin_ or __sync_. */
200 : :
201 : : static bool
202 : 528437 : is_builtin_name (const char *name)
203 : : {
204 : 528437 : return (startswith (name, "__builtin_")
205 : 190174 : || startswith (name, "__sync_")
206 : 717756 : || startswith (name, "__atomic_"));
207 : : }
208 : :
209 : : /* Return true if NODE should be considered for inline expansion regardless
210 : : of the optimization level. This means whenever a function is invoked with
211 : : its "internal" name, which normally contains the prefix "__builtin". */
212 : :
213 : : bool
214 : 528437 : called_as_built_in (tree node)
215 : : {
216 : : /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 : : we want the name used to call the function, not the name it
218 : : will have. */
219 : 528437 : const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 : 528437 : return is_builtin_name (name);
221 : : }
222 : :
223 : : /* Compute values M and N such that M divides (address of EXP - N) and such
224 : : that N < M. If these numbers can be determined, store M in alignp and N in
225 : : *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 : : *alignp and any bit-offset to *bitposp.
227 : :
228 : : Note that the address (and thus the alignment) computed here is based
229 : : on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 : : on the address at which an object is actually located. These two
231 : : addresses are not always the same. For example, on ARM targets,
232 : : the address &foo of a Thumb function foo() has the lowest bit set,
233 : : whereas foo() itself starts on an even address.
234 : :
235 : : If ADDR_P is true we are taking the address of the memory reference EXP
236 : : and thus cannot rely on the access taking place. */
237 : :
238 : : bool
239 : 109656113 : get_object_alignment_2 (tree exp, unsigned int *alignp,
240 : : unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 : : {
242 : 109656113 : poly_int64 bitsize, bitpos;
243 : 109656113 : tree offset;
244 : 109656113 : machine_mode mode;
245 : 109656113 : int unsignedp, reversep, volatilep;
246 : 109656113 : unsigned int align = BITS_PER_UNIT;
247 : 109656113 : bool known_alignment = false;
248 : :
249 : : /* Get the innermost object and the constant (bitpos) and possibly
250 : : variable (offset) offset of the access. */
251 : 109656113 : exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 : : &unsignedp, &reversep, &volatilep);
253 : :
254 : : /* Extract alignment information from the innermost object and
255 : : possibly adjust bitpos and offset. */
256 : 109656113 : if (TREE_CODE (exp) == FUNCTION_DECL)
257 : : {
258 : : /* Function addresses can encode extra information besides their
259 : : alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 : : allows the low bit to be used as a virtual bit, we know
261 : : that the address itself must be at least 2-byte aligned. */
262 : : if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 : : align = 2 * BITS_PER_UNIT;
264 : : }
265 : 109220831 : else if (TREE_CODE (exp) == LABEL_DECL)
266 : : ;
267 : 109183886 : else if (TREE_CODE (exp) == CONST_DECL)
268 : : {
269 : : /* The alignment of a CONST_DECL is determined by its initializer. */
270 : 58600 : exp = DECL_INITIAL (exp);
271 : 58600 : align = TYPE_ALIGN (TREE_TYPE (exp));
272 : 58600 : if (CONSTANT_CLASS_P (exp))
273 : 58593 : align = targetm.constant_alignment (exp, align);
274 : :
275 : : known_alignment = true;
276 : : }
277 : 109125286 : else if (DECL_P (exp))
278 : : {
279 : 66398475 : align = DECL_ALIGN (exp);
280 : : known_alignment = true;
281 : : }
282 : 42726811 : else if (TREE_CODE (exp) == INDIRECT_REF
283 : 42714919 : || TREE_CODE (exp) == MEM_REF
284 : 5320602 : || TREE_CODE (exp) == TARGET_MEM_REF)
285 : : {
286 : 41053189 : tree addr = TREE_OPERAND (exp, 0);
287 : 41053189 : unsigned ptr_align;
288 : 41053189 : unsigned HOST_WIDE_INT ptr_bitpos;
289 : 41053189 : unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290 : :
291 : : /* If the address is explicitely aligned, handle that. */
292 : 41053189 : if (TREE_CODE (addr) == BIT_AND_EXPR
293 : 41053189 : && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 : : {
295 : 76 : ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 : 76 : ptr_bitmask *= BITS_PER_UNIT;
297 : 76 : align = least_bit_hwi (ptr_bitmask);
298 : 76 : addr = TREE_OPERAND (addr, 0);
299 : : }
300 : :
301 : 41053189 : known_alignment
302 : 41053189 : = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 : 41053189 : align = MAX (ptr_align, align);
304 : :
305 : : /* Re-apply explicit alignment to the bitpos. */
306 : 41053189 : ptr_bitpos &= ptr_bitmask;
307 : :
308 : : /* The alignment of the pointer operand in a TARGET_MEM_REF
309 : : has to take the variable offset parts into account. */
310 : 41053189 : if (TREE_CODE (exp) == TARGET_MEM_REF)
311 : : {
312 : 3646980 : if (TMR_INDEX (exp))
313 : : {
314 : 1653023 : unsigned HOST_WIDE_INT step = 1;
315 : 1653023 : if (TMR_STEP (exp))
316 : 1506513 : step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 : 1653023 : align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 : : }
319 : 3646980 : if (TMR_INDEX2 (exp))
320 : 42173 : align = BITS_PER_UNIT;
321 : : known_alignment = false;
322 : : }
323 : :
324 : : /* When EXP is an actual memory reference then we can use
325 : : TYPE_ALIGN of a pointer indirection to derive alignment.
326 : : Do so only if get_pointer_alignment_1 did not reveal absolute
327 : : alignment knowledge and if using that alignment would
328 : : improve the situation. */
329 : 41053189 : unsigned int talign;
330 : 41053189 : if (!addr_p && !known_alignment
331 : 38641771 : && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 : 79694954 : && talign > align)
333 : : align = talign;
334 : : else
335 : : {
336 : : /* Else adjust bitpos accordingly. */
337 : 10999256 : bitpos += ptr_bitpos;
338 : 10999256 : if (TREE_CODE (exp) == MEM_REF
339 : 10999256 : || TREE_CODE (exp) == TARGET_MEM_REF)
340 : 10990201 : bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 : : }
342 : 41053189 : }
343 : 1673622 : else if (TREE_CODE (exp) == STRING_CST)
344 : : {
345 : : /* STRING_CST are the only constant objects we allow to be not
346 : : wrapped inside a CONST_DECL. */
347 : 1673118 : align = TYPE_ALIGN (TREE_TYPE (exp));
348 : 1673118 : if (CONSTANT_CLASS_P (exp))
349 : 1673118 : align = targetm.constant_alignment (exp, align);
350 : :
351 : : known_alignment = true;
352 : : }
353 : :
354 : : /* If there is a non-constant offset part extract the maximum
355 : : alignment that can prevail. */
356 : 109656113 : if (offset)
357 : : {
358 : 9807805 : unsigned int trailing_zeros = tree_ctz (offset);
359 : 9807805 : if (trailing_zeros < HOST_BITS_PER_INT)
360 : : {
361 : 9807749 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 : 9807749 : if (inner)
363 : 9807749 : align = MIN (align, inner);
364 : : }
365 : : }
366 : :
367 : : /* Account for the alignment of runtime coefficients, so that the constant
368 : : bitpos is guaranteed to be accurate. */
369 : 109656113 : unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 : 109656113 : if (alt_align != 0 && alt_align < align)
371 : : {
372 : : align = alt_align;
373 : : known_alignment = false;
374 : : }
375 : :
376 : 109656113 : *alignp = align;
377 : 109656113 : *bitposp = bitpos.coeffs[0] & (align - 1);
378 : 109656113 : return known_alignment;
379 : : }
380 : :
381 : : /* For a memory reference expression EXP compute values M and N such that M
382 : : divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 : : store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 : : and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 : :
386 : : bool
387 : 86655810 : get_object_alignment_1 (tree exp, unsigned int *alignp,
388 : : unsigned HOST_WIDE_INT *bitposp)
389 : : {
390 : : /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
391 : : with it. */
392 : 86655810 : if (TREE_CODE (exp) == WITH_SIZE_EXPR)
393 : 0 : exp = TREE_OPERAND (exp, 0);
394 : 86655810 : return get_object_alignment_2 (exp, alignp, bitposp, false);
395 : : }
396 : :
397 : : /* Return the alignment in bits of EXP, an object. */
398 : :
399 : : unsigned int
400 : 35750737 : get_object_alignment (tree exp)
401 : : {
402 : 35750737 : unsigned HOST_WIDE_INT bitpos = 0;
403 : 35750737 : unsigned int align;
404 : :
405 : 35750737 : get_object_alignment_1 (exp, &align, &bitpos);
406 : :
407 : : /* align and bitpos now specify known low bits of the pointer.
408 : : ptr & (align - 1) == bitpos. */
409 : :
410 : 35750737 : if (bitpos != 0)
411 : 2816114 : align = least_bit_hwi (bitpos);
412 : 35750737 : return align;
413 : : }
414 : :
415 : : /* For a pointer valued expression EXP compute values M and N such that M
416 : : divides (EXP - N) and such that N < M. If these numbers can be determined,
417 : : store M in alignp and N in *BITPOSP and return true. Return false if
418 : : the results are just a conservative approximation.
419 : :
420 : : If EXP is not a pointer, false is returned too. */
421 : :
422 : : bool
423 : 82055057 : get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 : : unsigned HOST_WIDE_INT *bitposp)
425 : : {
426 : 82055057 : STRIP_NOPS (exp);
427 : :
428 : 82055057 : if (TREE_CODE (exp) == ADDR_EXPR)
429 : 22998787 : return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 : 22998787 : alignp, bitposp, true);
431 : 59056270 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 : : {
433 : 953449 : unsigned int align;
434 : 953449 : unsigned HOST_WIDE_INT bitpos;
435 : 953449 : bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 : : &align, &bitpos);
437 : 953449 : if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 : 2734 : bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 : : else
440 : : {
441 : 950715 : unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 : 950715 : if (trailing_zeros < HOST_BITS_PER_INT)
443 : : {
444 : 950709 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 : 950709 : if (inner)
446 : 950709 : align = MIN (align, inner);
447 : : }
448 : : }
449 : 953449 : *alignp = align;
450 : 953449 : *bitposp = bitpos & (align - 1);
451 : 953449 : return res;
452 : : }
453 : 58102821 : else if (TREE_CODE (exp) == SSA_NAME
454 : 58102821 : && POINTER_TYPE_P (TREE_TYPE (exp)))
455 : : {
456 : 56063430 : unsigned int ptr_align, ptr_misalign;
457 : 56063430 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 : :
459 : 56063430 : if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 : : {
461 : 5814418 : *bitposp = ptr_misalign * BITS_PER_UNIT;
462 : 5814418 : *alignp = ptr_align * BITS_PER_UNIT;
463 : : /* Make sure to return a sensible alignment when the multiplication
464 : : by BITS_PER_UNIT overflowed. */
465 : 5814418 : if (*alignp == 0)
466 : 30 : *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 : : /* We cannot really tell whether this result is an approximation. */
468 : 5814418 : return false;
469 : : }
470 : : else
471 : : {
472 : 50249012 : *bitposp = 0;
473 : 50249012 : *alignp = BITS_PER_UNIT;
474 : 50249012 : return false;
475 : : }
476 : : }
477 : 2039391 : else if (TREE_CODE (exp) == INTEGER_CST)
478 : : {
479 : 16749 : *alignp = BIGGEST_ALIGNMENT;
480 : 16749 : *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 : 16749 : & (BIGGEST_ALIGNMENT - 1));
482 : 16749 : return true;
483 : : }
484 : :
485 : 2022642 : *bitposp = 0;
486 : 2022642 : *alignp = BITS_PER_UNIT;
487 : 2022642 : return false;
488 : : }
489 : :
490 : : /* Return the alignment in bits of EXP, a pointer valued expression.
491 : : The alignment returned is, by default, the alignment of the thing that
492 : : EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 : :
494 : : Otherwise, look at the expression to see if we can do better, i.e., if the
495 : : expression is actually pointing at an object whose alignment is tighter. */
496 : :
497 : : unsigned int
498 : 11084089 : get_pointer_alignment (tree exp)
499 : : {
500 : 11084089 : unsigned HOST_WIDE_INT bitpos = 0;
501 : 11084089 : unsigned int align;
502 : :
503 : 11084089 : get_pointer_alignment_1 (exp, &align, &bitpos);
504 : :
505 : : /* align and bitpos now specify known low bits of the pointer.
506 : : ptr & (align - 1) == bitpos. */
507 : :
508 : 11084089 : if (bitpos != 0)
509 : 93947 : align = least_bit_hwi (bitpos);
510 : :
511 : 11084089 : return align;
512 : : }
513 : :
514 : : /* Return the number of leading non-zero elements in the sequence
515 : : [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 : : ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517 : :
518 : : unsigned
519 : 767509 : string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520 : : {
521 : 767509 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522 : :
523 : 767509 : unsigned n;
524 : :
525 : 767509 : if (eltsize == 1)
526 : : {
527 : : /* Optimize the common case of plain char. */
528 : 244906634 : for (n = 0; n < maxelts; n++)
529 : : {
530 : 244822853 : const char *elt = (const char*) ptr + n;
531 : 244822853 : if (!*elt)
532 : : break;
533 : : }
534 : : }
535 : : else
536 : : {
537 : 590 : for (n = 0; n < maxelts; n++)
538 : : {
539 : 566 : const char *elt = (const char*) ptr + n * eltsize;
540 : 566 : if (!memcmp (elt, "\0\0\0\0", eltsize))
541 : : break;
542 : : }
543 : : }
544 : 767509 : return n;
545 : : }
546 : :
547 : : /* Compute the length of a null-terminated character string or wide
548 : : character string handling character sizes of 1, 2, and 4 bytes.
549 : : TREE_STRING_LENGTH is not the right way because it evaluates to
550 : : the size of the character array in bytes (as opposed to characters)
551 : : and because it can contain a zero byte in the middle.
552 : :
553 : : ONLY_VALUE should be nonzero if the result is not going to be emitted
554 : : into the instruction stream and zero if it is going to be expanded.
555 : : E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 : : is returned, otherwise NULL, since
557 : : len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
558 : : evaluate the side-effects.
559 : :
560 : : If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 : : accesses. Note that this implies the result is not going to be emitted
562 : : into the instruction stream.
563 : :
564 : : Additional information about the string accessed may be recorded
565 : : in DATA. For example, if ARG references an unterminated string,
566 : : then the declaration will be stored in the DECL field. If the
567 : : length of the unterminated string can be determined, it'll be
568 : : stored in the LEN field. Note this length could well be different
569 : : than what a C strlen call would return.
570 : :
571 : : ELTSIZE is 1 for normal single byte character strings, and 2 or
572 : : 4 for wide characer strings. ELTSIZE is by default 1.
573 : :
574 : : The value returned is of type `ssizetype'. */
575 : :
576 : : tree
577 : 2472683 : c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
578 : : {
579 : : /* If we were not passed a DATA pointer, then get one to a local
580 : : structure. That avoids having to check DATA for NULL before
581 : : each time we want to use it. */
582 : 2472683 : c_strlen_data local_strlen_data = { };
583 : 2472683 : if (!data)
584 : 578086 : data = &local_strlen_data;
585 : :
586 : 2472683 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
587 : :
588 : 2472683 : tree src = STRIP_NOPS (arg);
589 : 2472683 : if (TREE_CODE (src) == COND_EXPR
590 : 2472683 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 : : {
592 : 687 : tree len1, len2;
593 : :
594 : 687 : len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
595 : 687 : len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
596 : 687 : if (tree_int_cst_equal (len1, len2))
597 : : return len1;
598 : : }
599 : :
600 : 2472517 : if (TREE_CODE (src) == COMPOUND_EXPR
601 : 2472517 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
602 : 45 : return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
603 : :
604 : 2472472 : location_t loc = EXPR_LOC_OR_LOC (src, input_location);
605 : :
606 : : /* Offset from the beginning of the string in bytes. */
607 : 2472472 : tree byteoff;
608 : 2472472 : tree memsize;
609 : 2472472 : tree decl;
610 : 2472472 : src = string_constant (src, &byteoff, &memsize, &decl);
611 : 2472472 : if (src == 0)
612 : : return NULL_TREE;
613 : :
614 : : /* Determine the size of the string element. */
615 : 781104 : if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
616 : : return NULL_TREE;
617 : :
618 : : /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
619 : : length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
620 : : in case the latter is less than the size of the array, such as when
621 : : SRC refers to a short string literal used to initialize a large array.
622 : : In that case, the elements of the array after the terminating NUL are
623 : : all NUL. */
624 : 771195 : HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
625 : 771195 : strelts = strelts / eltsize;
626 : :
627 : 771195 : if (!tree_fits_uhwi_p (memsize))
628 : : return NULL_TREE;
629 : :
630 : 771195 : HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
631 : :
632 : : /* PTR can point to the byte representation of any string type, including
633 : : char* and wchar_t*. */
634 : 771195 : const char *ptr = TREE_STRING_POINTER (src);
635 : :
636 : 771195 : if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
637 : : {
638 : : /* The code below works only for single byte character types. */
639 : 5258 : if (eltsize != 1)
640 : : return NULL_TREE;
641 : :
642 : : /* If the string has an internal NUL character followed by any
643 : : non-NUL characters (e.g., "foo\0bar"), we can't compute
644 : : the offset to the following NUL if we don't know where to
645 : : start searching for it. */
646 : 5258 : unsigned len = string_length (ptr, eltsize, strelts);
647 : :
648 : : /* Return when an embedded null character is found or none at all.
649 : : In the latter case, set the DECL/LEN field in the DATA structure
650 : : so that callers may examine them. */
651 : 5258 : if (len + 1 < strelts)
652 : : return NULL_TREE;
653 : 4688 : else if (len >= maxelts)
654 : : {
655 : 1102 : data->decl = decl;
656 : 1102 : data->off = byteoff;
657 : 1102 : data->minlen = ssize_int (len);
658 : 1102 : return NULL_TREE;
659 : : }
660 : :
661 : : /* For empty strings the result should be zero. */
662 : 3586 : if (len == 0)
663 : 39 : return ssize_int (0);
664 : :
665 : : /* We don't know the starting offset, but we do know that the string
666 : : has no internal zero bytes. If the offset falls within the bounds
667 : : of the string subtract the offset from the length of the string,
668 : : and return that. Otherwise the length is zero. Take care to
669 : : use SAVE_EXPR in case the OFFSET has side-effects. */
670 : 3547 : tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
671 : : : byteoff;
672 : 3547 : offsave = fold_convert_loc (loc, sizetype, offsave);
673 : 3547 : tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
674 : 3547 : size_int (len));
675 : 3547 : tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
676 : : offsave);
677 : 3547 : lenexp = fold_convert_loc (loc, ssizetype, lenexp);
678 : 3547 : return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
679 : 3547 : build_zero_cst (ssizetype));
680 : : }
681 : :
682 : : /* Offset from the beginning of the string in elements. */
683 : 765937 : HOST_WIDE_INT eltoff;
684 : :
685 : : /* We have a known offset into the string. Start searching there for
686 : : a null character if we can represent it as a single HOST_WIDE_INT. */
687 : 765937 : if (byteoff == 0)
688 : : eltoff = 0;
689 : 765937 : else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
690 : : eltoff = -1;
691 : : else
692 : 764749 : eltoff = tree_to_uhwi (byteoff) / eltsize;
693 : :
694 : : /* If the offset is known to be out of bounds, warn, and call strlen at
695 : : runtime. */
696 : 765937 : if (eltoff < 0 || eltoff >= maxelts)
697 : : {
698 : : /* Suppress multiple warnings for propagated constant strings. */
699 : 3686 : if (only_value != 2
700 : 3686 : && !warning_suppressed_p (arg, OPT_Warray_bounds_)
701 : 7372 : && warning_at (loc, OPT_Warray_bounds_,
702 : : "offset %qwi outside bounds of constant string",
703 : : eltoff))
704 : : {
705 : 679 : if (decl)
706 : 678 : inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
707 : 679 : suppress_warning (arg, OPT_Warray_bounds_);
708 : : }
709 : 3686 : return NULL_TREE;
710 : : }
711 : :
712 : : /* If eltoff is larger than strelts but less than maxelts the
713 : : string length is zero, since the excess memory will be zero. */
714 : 762251 : if (eltoff > strelts)
715 : 0 : return ssize_int (0);
716 : :
717 : : /* Use strlen to search for the first zero byte. Since any strings
718 : : constructed with build_string will have nulls appended, we win even
719 : : if we get handed something like (char[4])"abcd".
720 : :
721 : : Since ELTOFF is our starting index into the string, no further
722 : : calculation is needed. */
723 : 762251 : unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
724 : 762251 : strelts - eltoff);
725 : :
726 : : /* Don't know what to return if there was no zero termination.
727 : : Ideally this would turn into a gcc_checking_assert over time.
728 : : Set DECL/LEN so callers can examine them. */
729 : 762251 : if (len >= maxelts - eltoff)
730 : : {
731 : 78333 : data->decl = decl;
732 : 78333 : data->off = byteoff;
733 : 78333 : data->minlen = ssize_int (len);
734 : 78333 : return NULL_TREE;
735 : : }
736 : :
737 : 683918 : return ssize_int (len);
738 : : }
739 : :
740 : : /* Return a constant integer corresponding to target reading
741 : : GET_MODE_BITSIZE (MODE) bits from string constant STR. If
742 : : NULL_TERMINATED_P, reading stops after '\0' character, all further ones
743 : : are assumed to be zero, otherwise it reads as many characters
744 : : as needed. */
745 : :
746 : : rtx
747 : 298869 : c_readstr (const char *str, fixed_size_mode mode,
748 : : bool null_terminated_p/*=true*/)
749 : : {
750 : 298869 : auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
751 : :
752 : 597738 : bytes.reserve (GET_MODE_SIZE (mode));
753 : :
754 : 298869 : target_unit ch = 1;
755 : 5173068 : for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
756 : : {
757 : 2287665 : if (ch || !null_terminated_p)
758 : 2171563 : ch = (unsigned char) str[i];
759 : 2287665 : bytes.quick_push (ch);
760 : : }
761 : :
762 : 298869 : return native_decode_rtx (mode, bytes, 0);
763 : 298869 : }
764 : :
765 : : /* Cast a target constant CST to target CHAR and if that value fits into
766 : : host char type, return zero and put that value into variable pointed to by
767 : : P. */
768 : :
769 : : static int
770 : 63278 : target_char_cast (tree cst, char *p)
771 : : {
772 : 63278 : unsigned HOST_WIDE_INT val, hostval;
773 : :
774 : 63278 : if (TREE_CODE (cst) != INTEGER_CST
775 : : || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
776 : : return 1;
777 : :
778 : : /* Do not care if it fits or not right here. */
779 : 63278 : val = TREE_INT_CST_LOW (cst);
780 : :
781 : 63278 : if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
782 : 63278 : val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
783 : :
784 : 63278 : hostval = val;
785 : 63278 : if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
786 : 63278 : hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
787 : :
788 : 63278 : if (val != hostval)
789 : : return 1;
790 : :
791 : 63278 : *p = hostval;
792 : 63278 : return 0;
793 : : }
794 : :
795 : : /* Similar to save_expr, but assumes that arbitrary code is not executed
796 : : in between the multiple evaluations. In particular, we assume that a
797 : : non-addressable local variable will not be modified. */
798 : :
799 : : static tree
800 : 1214604 : builtin_save_expr (tree exp)
801 : : {
802 : 1214604 : if (TREE_CODE (exp) == SSA_NAME
803 : 1049382 : || (TREE_ADDRESSABLE (exp) == 0
804 : 1049190 : && (TREE_CODE (exp) == PARM_DECL
805 : 460447 : || (VAR_P (exp) && !TREE_STATIC (exp)))))
806 : : return exp;
807 : :
808 : 458653 : return save_expr (exp);
809 : : }
810 : :
811 : : /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
812 : : times to get the address of either a higher stack frame, or a return
813 : : address located within it (depending on FNDECL_CODE). */
814 : :
815 : : static rtx
816 : 15583 : expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
817 : : {
818 : 15583 : int i;
819 : 15583 : rtx tem = INITIAL_FRAME_ADDRESS_RTX;
820 : 15583 : if (tem == NULL_RTX)
821 : : {
822 : : /* For a zero count with __builtin_return_address, we don't care what
823 : : frame address we return, because target-specific definitions will
824 : : override us. Therefore frame pointer elimination is OK, and using
825 : : the soft frame pointer is OK.
826 : :
827 : : For a nonzero count, or a zero count with __builtin_frame_address,
828 : : we require a stable offset from the current frame pointer to the
829 : : previous one, so we must use the hard frame pointer, and
830 : : we must disable frame pointer elimination. */
831 : 15583 : if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
832 : 4246 : tem = frame_pointer_rtx;
833 : : else
834 : : {
835 : 11337 : tem = hard_frame_pointer_rtx;
836 : :
837 : : /* Tell reload not to eliminate the frame pointer. */
838 : 11337 : crtl->accesses_prior_frames = 1;
839 : : }
840 : : }
841 : :
842 : 15583 : if (count > 0)
843 : 932 : SETUP_FRAME_ADDRESSES ();
844 : :
845 : : /* On the SPARC, the return address is not in the frame, it is in a
846 : : register. There is no way to access it off of the current frame
847 : : pointer, but it can be accessed off the previous frame pointer by
848 : : reading the value from the register window save area. */
849 : : if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
850 : : count--;
851 : :
852 : : /* Scan back COUNT frames to the specified frame. */
853 : 51459 : for (i = 0; i < count; i++)
854 : : {
855 : : /* Assume the dynamic chain pointer is in the word that the
856 : : frame address points to, unless otherwise specified. */
857 : 35876 : tem = DYNAMIC_CHAIN_ADDRESS (tem);
858 : 35876 : tem = memory_address (Pmode, tem);
859 : 35876 : tem = gen_frame_mem (Pmode, tem);
860 : 35876 : tem = copy_to_reg (tem);
861 : : }
862 : :
863 : : /* For __builtin_frame_address, return what we've got. But, on
864 : : the SPARC for example, we may have to add a bias. */
865 : 15583 : if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
866 : : return FRAME_ADDR_RTX (tem);
867 : :
868 : : /* For __builtin_return_address, get the return address from that frame. */
869 : : #ifdef RETURN_ADDR_RTX
870 : 7297 : tem = RETURN_ADDR_RTX (count, tem);
871 : : #else
872 : : tem = memory_address (Pmode,
873 : : plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
874 : : tem = gen_frame_mem (Pmode, tem);
875 : : #endif
876 : 5086 : return tem;
877 : : }
878 : :
879 : : /* Alias set used for setjmp buffer. */
880 : : static alias_set_type setjmp_alias_set = -1;
881 : :
882 : : /* Construct the leading half of a __builtin_setjmp call. Control will
883 : : return to RECEIVER_LABEL. This is also called directly by the SJLJ
884 : : exception handling code. */
885 : :
886 : : void
887 : 841 : expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
888 : : {
889 : 841 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
890 : 841 : rtx stack_save;
891 : 841 : rtx mem;
892 : :
893 : 841 : if (setjmp_alias_set == -1)
894 : 241 : setjmp_alias_set = new_alias_set ();
895 : :
896 : 841 : buf_addr = convert_memory_address (Pmode, buf_addr);
897 : :
898 : 842 : buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
899 : :
900 : : /* We store the frame pointer and the address of receiver_label in
901 : : the buffer and use the rest of it for the stack save area, which
902 : : is machine-dependent. */
903 : :
904 : 842 : mem = gen_rtx_MEM (Pmode, buf_addr);
905 : 841 : set_mem_alias_set (mem, setjmp_alias_set);
906 : 841 : emit_move_insn (mem, hard_frame_pointer_rtx);
907 : :
908 : 842 : mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
909 : 1682 : GET_MODE_SIZE (Pmode))),
910 : 841 : set_mem_alias_set (mem, setjmp_alias_set);
911 : :
912 : 1682 : emit_move_insn (validize_mem (mem),
913 : 1682 : force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
914 : :
915 : 841 : stack_save = gen_rtx_MEM (sa_mode,
916 : : plus_constant (Pmode, buf_addr,
917 : 1682 : 2 * GET_MODE_SIZE (Pmode)));
918 : 841 : set_mem_alias_set (stack_save, setjmp_alias_set);
919 : 841 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
920 : :
921 : : /* If there is further processing to do, do it. */
922 : 841 : if (targetm.have_builtin_setjmp_setup ())
923 : 0 : emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
924 : :
925 : : /* We have a nonlocal label. */
926 : 841 : cfun->has_nonlocal_label = 1;
927 : 841 : }
928 : :
929 : : /* Construct the trailing part of a __builtin_setjmp call. This is
930 : : also called directly by the SJLJ exception handling code.
931 : : If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
932 : :
933 : : void
934 : 1345 : expand_builtin_setjmp_receiver (rtx receiver_label)
935 : : {
936 : 1345 : rtx chain;
937 : :
938 : : /* Mark the FP as used when we get here, so we have to make sure it's
939 : : marked as used by this function. */
940 : 1345 : emit_use (hard_frame_pointer_rtx);
941 : :
942 : : /* Mark the static chain as clobbered here so life information
943 : : doesn't get messed up for it. */
944 : 1345 : chain = rtx_for_static_chain (current_function_decl, true);
945 : 1345 : if (chain && REG_P (chain))
946 : 1 : emit_clobber (chain);
947 : :
948 : 1345 : if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
949 : : {
950 : : /* If the argument pointer can be eliminated in favor of the
951 : : frame pointer, we don't need to restore it. We assume here
952 : : that if such an elimination is present, it can always be used.
953 : : This is the case on all known machines; if we don't make this
954 : : assumption, we do unnecessary saving on many machines. */
955 : : size_t i;
956 : : static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
957 : :
958 : 2690 : for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
959 : 2690 : if (elim_regs[i].from == ARG_POINTER_REGNUM
960 : 2690 : && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
961 : : break;
962 : :
963 : 1345 : if (i == ARRAY_SIZE (elim_regs))
964 : : {
965 : : /* Now restore our arg pointer from the address at which it
966 : : was saved in our stack frame. */
967 : 0 : emit_move_insn (crtl->args.internal_arg_pointer,
968 : : copy_to_reg (get_arg_pointer_save_area ()));
969 : : }
970 : : }
971 : :
972 : 1345 : if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
973 : 0 : emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
974 : 1345 : else if (targetm.have_nonlocal_goto_receiver ())
975 : 0 : emit_insn (targetm.gen_nonlocal_goto_receiver ());
976 : : else
977 : : { /* Nothing */ }
978 : :
979 : : /* We must not allow the code we just generated to be reordered by
980 : : scheduling. Specifically, the update of the frame pointer must
981 : : happen immediately, not later. */
982 : 1345 : emit_insn (gen_blockage ());
983 : 1345 : }
984 : :
985 : : /* __builtin_longjmp is passed a pointer to an array of five words (not
986 : : all will be used on all machines). It operates similarly to the C
987 : : library function of the same name, but is more efficient. Much of
988 : : the code below is copied from the handling of non-local gotos. */
989 : :
990 : : static void
991 : 391 : expand_builtin_longjmp (rtx buf_addr, rtx value)
992 : : {
993 : 391 : rtx fp, lab, stack;
994 : 391 : rtx_insn *insn, *last;
995 : 391 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
996 : :
997 : : /* DRAP is needed for stack realign if longjmp is expanded to current
998 : : function */
999 : 391 : if (SUPPORTS_STACK_ALIGNMENT)
1000 : 391 : crtl->need_drap = true;
1001 : :
1002 : 391 : if (setjmp_alias_set == -1)
1003 : 330 : setjmp_alias_set = new_alias_set ();
1004 : :
1005 : 391 : buf_addr = convert_memory_address (Pmode, buf_addr);
1006 : :
1007 : 392 : buf_addr = force_reg (Pmode, buf_addr);
1008 : :
1009 : : /* We require that the user must pass a second argument of 1, because
1010 : : that is what builtin_setjmp will return. */
1011 : 391 : gcc_assert (value == const1_rtx);
1012 : :
1013 : 391 : last = get_last_insn ();
1014 : 391 : if (targetm.have_builtin_longjmp ())
1015 : 0 : emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1016 : : else
1017 : : {
1018 : 392 : fp = gen_rtx_MEM (Pmode, buf_addr);
1019 : 392 : lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1020 : 782 : GET_MODE_SIZE (Pmode)));
1021 : :
1022 : 391 : stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1023 : 782 : 2 * GET_MODE_SIZE (Pmode)));
1024 : 391 : set_mem_alias_set (fp, setjmp_alias_set);
1025 : 391 : set_mem_alias_set (lab, setjmp_alias_set);
1026 : 391 : set_mem_alias_set (stack, setjmp_alias_set);
1027 : :
1028 : : /* Pick up FP, label, and SP from the block and jump. This code is
1029 : : from expand_goto in stmt.cc; see there for detailed comments. */
1030 : 391 : if (targetm.have_nonlocal_goto ())
1031 : : /* We have to pass a value to the nonlocal_goto pattern that will
1032 : : get copied into the static_chain pointer, but it does not matter
1033 : : what that value is, because builtin_setjmp does not use it. */
1034 : 0 : emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1035 : : else
1036 : : {
1037 : 391 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1038 : 391 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039 : :
1040 : 391 : lab = copy_to_reg (lab);
1041 : :
1042 : : /* Restore the frame pointer and stack pointer. We must use a
1043 : : temporary since the setjmp buffer may be a local. */
1044 : 391 : fp = copy_to_reg (fp);
1045 : 391 : emit_stack_restore (SAVE_NONLOCAL, stack);
1046 : :
1047 : : /* Ensure the frame pointer move is not optimized. */
1048 : 391 : emit_insn (gen_blockage ());
1049 : 391 : emit_clobber (hard_frame_pointer_rtx);
1050 : 391 : emit_clobber (frame_pointer_rtx);
1051 : 391 : emit_move_insn (hard_frame_pointer_rtx, fp);
1052 : :
1053 : 391 : emit_use (hard_frame_pointer_rtx);
1054 : 391 : emit_use (stack_pointer_rtx);
1055 : 391 : emit_indirect_jump (lab);
1056 : : }
1057 : : }
1058 : :
1059 : : /* Search backwards and mark the jump insn as a non-local goto.
1060 : : Note that this precludes the use of __builtin_longjmp to a
1061 : : __builtin_setjmp target in the same function. However, we've
1062 : : already cautioned the user that these functions are for
1063 : : internal exception handling use only. */
1064 : 782 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1065 : : {
1066 : 782 : gcc_assert (insn != last);
1067 : :
1068 : 782 : if (JUMP_P (insn))
1069 : : {
1070 : 391 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1071 : 391 : break;
1072 : : }
1073 : 391 : else if (CALL_P (insn))
1074 : : break;
1075 : : }
1076 : 391 : }
1077 : :
1078 : : static inline bool
1079 : 2261415 : more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1080 : : {
1081 : 2261415 : return (iter->i < iter->n);
1082 : : }
1083 : :
1084 : : /* This function validates the types of a function call argument list
1085 : : against a specified list of tree_codes. If the last specifier is a 0,
1086 : : that represents an ellipsis, otherwise the last specifier must be a
1087 : : VOID_TYPE. */
1088 : :
1089 : : static bool
1090 : 488342 : validate_arglist (const_tree callexpr, ...)
1091 : : {
1092 : 488342 : enum tree_code code;
1093 : 488342 : bool res = 0;
1094 : 488342 : va_list ap;
1095 : 488342 : const_call_expr_arg_iterator iter;
1096 : 488342 : const_tree arg;
1097 : :
1098 : 488342 : va_start (ap, callexpr);
1099 : 488342 : init_const_call_expr_arg_iterator (callexpr, &iter);
1100 : :
1101 : : /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1102 : 488342 : tree fn = CALL_EXPR_FN (callexpr);
1103 : 488342 : bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1104 : :
1105 : 488342 : for (unsigned argno = 1; ; ++argno)
1106 : : {
1107 : 1695941 : code = (enum tree_code) va_arg (ap, int);
1108 : :
1109 : 1695941 : switch (code)
1110 : : {
1111 : 2039 : case 0:
1112 : : /* This signifies an ellipses, any further arguments are all ok. */
1113 : 2039 : res = true;
1114 : 2039 : goto end;
1115 : 486070 : case VOID_TYPE:
1116 : : /* This signifies an endlink, if no arguments remain, return
1117 : : true, otherwise return false. */
1118 : 486070 : res = !more_const_call_expr_args_p (&iter);
1119 : 486070 : goto end;
1120 : 792378 : case POINTER_TYPE:
1121 : : /* The actual argument must be nonnull when either the whole
1122 : : called function has been declared nonnull, or when the formal
1123 : : argument corresponding to the actual argument has been. */
1124 : 792378 : if (argmap
1125 : 792378 : && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1126 : : {
1127 : 280986 : arg = next_const_call_expr_arg (&iter);
1128 : 280986 : if (!validate_arg (arg, code) || integer_zerop (arg))
1129 : 59 : goto end;
1130 : : break;
1131 : : }
1132 : : /* FALLTHRU */
1133 : 926846 : default:
1134 : : /* If no parameters remain or the parameter's code does not
1135 : : match the specified code, return false. Otherwise continue
1136 : : checking any remaining arguments. */
1137 : 926846 : arg = next_const_call_expr_arg (&iter);
1138 : 926846 : if (!validate_arg (arg, code))
1139 : 174 : goto end;
1140 : : break;
1141 : : }
1142 : : }
1143 : :
1144 : : /* We need gotos here since we can only have one VA_CLOSE in a
1145 : : function. */
1146 : 488342 : end: ;
1147 : 488342 : va_end (ap);
1148 : :
1149 : 488342 : BITMAP_FREE (argmap);
1150 : :
1151 : 488342 : if (res)
1152 : 488109 : for (tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (fn)));
1153 : 691991 : (attrs = lookup_attribute ("nonnull_if_nonzero", attrs));
1154 : 203882 : attrs = TREE_CHAIN (attrs))
1155 : : {
1156 : 203904 : tree args = TREE_VALUE (attrs);
1157 : 203904 : unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1158 : 203904 : unsigned int idx2
1159 : 203904 : = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1160 : 203904 : unsigned int idx3 = idx2;
1161 : 203904 : if (tree chain2 = TREE_CHAIN (TREE_CHAIN (args)))
1162 : 0 : idx3 = TREE_INT_CST_LOW (TREE_VALUE (chain2)) - 1;
1163 : 203904 : if (idx < (unsigned) call_expr_nargs (callexpr)
1164 : 203904 : && idx2 < (unsigned) call_expr_nargs (callexpr)
1165 : 203904 : && idx3 < (unsigned) call_expr_nargs (callexpr)
1166 : 203904 : && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx)))
1167 : 203904 : && integer_zerop (CALL_EXPR_ARG (callexpr, idx))
1168 : 82 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx2)))
1169 : 82 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx2))
1170 : 22 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx3)))
1171 : 203926 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx3)))
1172 : : return false;
1173 : : }
1174 : :
1175 : : return res;
1176 : : }
1177 : :
1178 : : /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1179 : : and the address of the save area. */
1180 : :
1181 : : static rtx
1182 : 512 : expand_builtin_nonlocal_goto (tree exp)
1183 : : {
1184 : 512 : tree t_label, t_save_area;
1185 : 512 : rtx r_label, r_save_area, r_fp, r_sp;
1186 : 512 : rtx_insn *insn;
1187 : :
1188 : 512 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1189 : : return NULL_RTX;
1190 : :
1191 : 512 : t_label = CALL_EXPR_ARG (exp, 0);
1192 : 512 : t_save_area = CALL_EXPR_ARG (exp, 1);
1193 : :
1194 : 512 : r_label = expand_normal (t_label);
1195 : 512 : r_label = convert_memory_address (Pmode, r_label);
1196 : 512 : r_save_area = expand_normal (t_save_area);
1197 : 512 : r_save_area = convert_memory_address (Pmode, r_save_area);
1198 : : /* Copy the address of the save location to a register just in case it was
1199 : : based on the frame pointer. */
1200 : 512 : r_save_area = copy_to_reg (r_save_area);
1201 : 512 : r_fp = gen_rtx_MEM (Pmode, r_save_area);
1202 : 512 : r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1203 : : plus_constant (Pmode, r_save_area,
1204 : 1024 : GET_MODE_SIZE (Pmode)));
1205 : :
1206 : 512 : crtl->has_nonlocal_goto = 1;
1207 : :
1208 : : /* ??? We no longer need to pass the static chain value, afaik. */
1209 : 512 : if (targetm.have_nonlocal_goto ())
1210 : 0 : emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1211 : : else
1212 : : {
1213 : 512 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1214 : 512 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1215 : :
1216 : 512 : r_label = copy_to_reg (r_label);
1217 : :
1218 : : /* Restore the frame pointer and stack pointer. We must use a
1219 : : temporary since the setjmp buffer may be a local. */
1220 : 512 : r_fp = copy_to_reg (r_fp);
1221 : 512 : emit_stack_restore (SAVE_NONLOCAL, r_sp);
1222 : :
1223 : : /* Ensure the frame pointer move is not optimized. */
1224 : 512 : emit_insn (gen_blockage ());
1225 : 512 : emit_clobber (hard_frame_pointer_rtx);
1226 : 512 : emit_clobber (frame_pointer_rtx);
1227 : 512 : emit_move_insn (hard_frame_pointer_rtx, r_fp);
1228 : :
1229 : : /* USE of hard_frame_pointer_rtx added for consistency;
1230 : : not clear if really needed. */
1231 : 512 : emit_use (hard_frame_pointer_rtx);
1232 : 512 : emit_use (stack_pointer_rtx);
1233 : :
1234 : : /* If the architecture is using a GP register, we must
1235 : : conservatively assume that the target function makes use of it.
1236 : : The prologue of functions with nonlocal gotos must therefore
1237 : : initialize the GP register to the appropriate value, and we
1238 : : must then make sure that this value is live at the point
1239 : : of the jump. (Note that this doesn't necessarily apply
1240 : : to targets with a nonlocal_goto pattern; they are free
1241 : : to implement it in their own way. Note also that this is
1242 : : a no-op if the GP register is a global invariant.) */
1243 : 512 : unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1244 : 0 : if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1245 : 0 : emit_use (pic_offset_table_rtx);
1246 : :
1247 : 512 : emit_indirect_jump (r_label);
1248 : : }
1249 : :
1250 : : /* Search backwards to the jump insn and mark it as a
1251 : : non-local goto. */
1252 : 1024 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1253 : : {
1254 : 1024 : if (JUMP_P (insn))
1255 : : {
1256 : 512 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1257 : 512 : break;
1258 : : }
1259 : 512 : else if (CALL_P (insn))
1260 : : break;
1261 : : }
1262 : :
1263 : 512 : return const0_rtx;
1264 : : }
1265 : :
1266 : : /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1267 : : (not all will be used on all machines) that was passed to __builtin_setjmp.
1268 : : It updates the stack pointer in that block to the current value. This is
1269 : : also called directly by the SJLJ exception handling code. */
1270 : :
1271 : : void
1272 : 0 : expand_builtin_update_setjmp_buf (rtx buf_addr)
1273 : : {
1274 : 0 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1275 : 0 : buf_addr = convert_memory_address (Pmode, buf_addr);
1276 : 0 : rtx stack_save
1277 : 0 : = gen_rtx_MEM (sa_mode,
1278 : 0 : memory_address
1279 : : (sa_mode,
1280 : : plus_constant (Pmode, buf_addr,
1281 : 0 : 2 * GET_MODE_SIZE (Pmode))));
1282 : :
1283 : 0 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
1284 : 0 : }
1285 : :
1286 : : /* Expand a call to __builtin_prefetch. For a target that does not support
1287 : : data prefetch, evaluate the memory address argument in case it has side
1288 : : effects. */
1289 : :
1290 : : static void
1291 : 2039 : expand_builtin_prefetch (tree exp)
1292 : : {
1293 : 2039 : tree arg0, arg1, arg2;
1294 : 2039 : int nargs;
1295 : 2039 : rtx op0, op1, op2;
1296 : :
1297 : 2039 : if (!validate_arglist (exp, POINTER_TYPE, 0))
1298 : : return;
1299 : :
1300 : 2039 : arg0 = CALL_EXPR_ARG (exp, 0);
1301 : :
1302 : : /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1303 : : zero (read) and argument 2 (locality) defaults to 3 (high degree of
1304 : : locality). */
1305 : 2039 : nargs = call_expr_nargs (exp);
1306 : 2039 : arg1 = nargs > 1 ? CALL_EXPR_ARG (exp, 1) : NULL_TREE;
1307 : 2001 : arg2 = nargs > 2 ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
1308 : :
1309 : : /* Argument 0 is an address. */
1310 : 2135 : op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1311 : :
1312 : : /* Argument 1 (read/write flag) must be a compile-time constant int. */
1313 : 2039 : if (arg1 == NULL_TREE)
1314 : 38 : op1 = const0_rtx;
1315 : 2001 : else if (TREE_CODE (arg1) != INTEGER_CST)
1316 : : {
1317 : 0 : error ("second argument to %<__builtin_prefetch%> must be a constant");
1318 : 0 : op1 = const0_rtx;
1319 : : }
1320 : : else
1321 : 2001 : op1 = expand_normal (arg1);
1322 : : /* Argument 1 must be 0, 1 or 2. */
1323 : 2039 : if (!IN_RANGE (INTVAL (op1), 0, 2))
1324 : : {
1325 : 3 : warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1326 : : " using zero");
1327 : 3 : op1 = const0_rtx;
1328 : : }
1329 : :
1330 : : /* Argument 2 (locality) must be a compile-time constant int. */
1331 : 2039 : if (arg2 == NULL_TREE)
1332 : 63 : op2 = GEN_INT (3);
1333 : 1976 : else if (TREE_CODE (arg2) != INTEGER_CST)
1334 : : {
1335 : 0 : error ("third argument to %<__builtin_prefetch%> must be a constant");
1336 : 0 : op2 = const0_rtx;
1337 : : }
1338 : : else
1339 : 1976 : op2 = expand_normal (arg2);
1340 : : /* Argument 2 must be 0, 1, 2, or 3. */
1341 : 2039 : if (!IN_RANGE (INTVAL (op2), 0, 3))
1342 : : {
1343 : 3 : warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1344 : 3 : op2 = const0_rtx;
1345 : : }
1346 : :
1347 : 2039 : if (targetm.have_prefetch ())
1348 : : {
1349 : 2039 : class expand_operand ops[3];
1350 : :
1351 : 2039 : create_address_operand (&ops[0], op0);
1352 : 2039 : create_integer_operand (&ops[1], INTVAL (op1));
1353 : 2039 : create_integer_operand (&ops[2], INTVAL (op2));
1354 : 2039 : if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1355 : 2039 : return;
1356 : : }
1357 : :
1358 : : /* Don't do anything with direct references to volatile memory, but
1359 : : generate code to handle other side effects. */
1360 : 0 : if (!MEM_P (op0) && side_effects_p (op0))
1361 : 0 : emit_insn (op0);
1362 : : }
1363 : :
1364 : : /* Get a MEM rtx for expression EXP which is the address of an operand
1365 : : to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1366 : : the maximum length of the block of memory that might be accessed or
1367 : : NULL if unknown. */
1368 : :
1369 : : rtx
1370 : 724599 : get_memory_rtx (tree exp, tree len)
1371 : : {
1372 : 724599 : tree orig_exp = exp, base;
1373 : 724599 : rtx addr, mem;
1374 : :
1375 : 724599 : gcc_checking_assert
1376 : : (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1377 : :
1378 : : /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1379 : : from its expression, for expr->a.b only <variable>.a.b is recorded. */
1380 : 724599 : if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1381 : 1409 : exp = TREE_OPERAND (exp, 0);
1382 : :
1383 : 724599 : addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1384 : 724599 : mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1385 : :
1386 : : /* Get an expression we can use to find the attributes to assign to MEM.
1387 : : First remove any nops. */
1388 : 1449198 : while (CONVERT_EXPR_P (exp)
1389 : 724602 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1390 : 3 : exp = TREE_OPERAND (exp, 0);
1391 : :
1392 : : /* Build a MEM_REF representing the whole accessed area as a byte blob,
1393 : : (as builtin stringops may alias with anything). */
1394 : 724599 : exp = fold_build2 (MEM_REF,
1395 : : build_array_type (char_type_node,
1396 : : build_range_type (sizetype,
1397 : : size_one_node, len)),
1398 : : exp, build_int_cst (ptr_type_node, 0));
1399 : :
1400 : : /* If the MEM_REF has no acceptable address, try to get the base object
1401 : : from the original address we got, and build an all-aliasing
1402 : : unknown-sized access to that one. */
1403 : 724599 : if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1404 : 724587 : set_mem_attributes (mem, exp, 0);
1405 : 12 : else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1406 : 12 : && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1407 : : 0))))
1408 : : {
1409 : 12 : unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1410 : 12 : exp = build_fold_addr_expr (base);
1411 : 12 : exp = fold_build2 (MEM_REF,
1412 : : build_array_type (char_type_node,
1413 : : build_range_type (sizetype,
1414 : : size_zero_node,
1415 : : NULL)),
1416 : : exp, build_int_cst (ptr_type_node, 0));
1417 : 12 : set_mem_attributes (mem, exp, 0);
1418 : : /* Since we stripped parts make sure the offset is unknown and the
1419 : : alignment is computed from the original address. */
1420 : 12 : clear_mem_offset (mem);
1421 : 12 : set_mem_align (mem, align);
1422 : : }
1423 : 724599 : set_mem_alias_set (mem, 0);
1424 : 724599 : return mem;
1425 : : }
1426 : :
1427 : : /* Built-in functions to perform an untyped call and return. */
1428 : :
1429 : : /* Wrapper that implicitly applies a delta when getting or setting the
1430 : : enclosed value. */
1431 : : template <typename T>
1432 : : class delta_type
1433 : : {
1434 : : T &value; T const delta;
1435 : : public:
1436 : 2524 : delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1437 : 1809 : operator T () const { return value + delta; }
1438 : 715 : T operator = (T val) const { value = val - delta; return val; }
1439 : : };
1440 : :
1441 : : #define saved_apply_args_size \
1442 : : (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1443 : : #define apply_args_mode \
1444 : : (this_target_builtins->x_apply_args_mode)
1445 : : #define saved_apply_result_size \
1446 : : (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1447 : : #define apply_result_mode \
1448 : : (this_target_builtins->x_apply_result_mode)
1449 : :
1450 : : /* Return the size required for the block returned by __builtin_apply_args,
1451 : : and initialize apply_args_mode. */
1452 : :
1453 : : static int
1454 : 947 : apply_args_size (void)
1455 : : {
1456 : 947 : int size = saved_apply_args_size;
1457 : 947 : int align;
1458 : 947 : unsigned int regno;
1459 : :
1460 : : /* The values computed by this function never change. */
1461 : 947 : if (size < 0)
1462 : : {
1463 : : /* The first value is the incoming arg-pointer. */
1464 : 360 : size = GET_MODE_SIZE (Pmode);
1465 : :
1466 : : /* The second value is the structure value address unless this is
1467 : : passed as an "invisible" first argument. */
1468 : 360 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1469 : 0 : size += GET_MODE_SIZE (Pmode);
1470 : :
1471 : 33480 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 : 33120 : if (FUNCTION_ARG_REGNO_P (regno))
1473 : : {
1474 : 5400 : fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1475 : :
1476 : 5400 : if (mode != VOIDmode)
1477 : : {
1478 : 5400 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1479 : 5400 : if (size % align != 0)
1480 : 1 : size = CEIL (size, align) * align;
1481 : 5400 : size += GET_MODE_SIZE (mode);
1482 : 5400 : apply_args_mode[regno] = mode;
1483 : : }
1484 : : else
1485 : 0 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1486 : : }
1487 : : else
1488 : 27720 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1489 : :
1490 : 360 : saved_apply_args_size = size;
1491 : : }
1492 : 947 : return size;
1493 : : }
1494 : :
1495 : : /* Return the size required for the block returned by __builtin_apply,
1496 : : and initialize apply_result_mode. */
1497 : :
1498 : : static int
1499 : 862 : apply_result_size (void)
1500 : : {
1501 : 862 : int size = saved_apply_result_size;
1502 : 862 : int align, regno;
1503 : :
1504 : : /* The values computed by this function never change. */
1505 : 862 : if (size < 0)
1506 : : {
1507 : : size = 0;
1508 : :
1509 : 33015 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1510 : 32660 : if (targetm.calls.function_value_regno_p (regno))
1511 : : {
1512 : 2840 : fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1513 : :
1514 : 2840 : if (mode != VOIDmode)
1515 : : {
1516 : 2840 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1517 : 2840 : if (size % align != 0)
1518 : : size = CEIL (size, align) * align;
1519 : 2840 : size += GET_MODE_SIZE (mode);
1520 : 2840 : apply_result_mode[regno] = mode;
1521 : : }
1522 : : else
1523 : 0 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1524 : : }
1525 : : else
1526 : 29820 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1527 : :
1528 : : /* Allow targets that use untyped_call and untyped_return to override
1529 : : the size so that machine-specific information can be stored here. */
1530 : : #ifdef APPLY_RESULT_SIZE
1531 : 355 : size = APPLY_RESULT_SIZE;
1532 : : #endif
1533 : :
1534 : 355 : saved_apply_result_size = size;
1535 : : }
1536 : 862 : return size;
1537 : : }
1538 : :
1539 : : /* Create a vector describing the result block RESULT. If SAVEP is true,
1540 : : the result block is used to save the values; otherwise it is used to
1541 : : restore the values. */
1542 : :
1543 : : static rtx
1544 : 483 : result_vector (int savep, rtx result)
1545 : : {
1546 : 483 : int regno, size, align, nelts;
1547 : 483 : fixed_size_mode mode;
1548 : 483 : rtx reg, mem;
1549 : 483 : rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1550 : :
1551 : 483 : size = nelts = 0;
1552 : 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1553 : 44436 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1554 : : {
1555 : 3864 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1556 : 3864 : if (size % align != 0)
1557 : 0 : size = CEIL (size, align) * align;
1558 : 3864 : reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1559 : 3864 : mem = adjust_address (result, mode, size);
1560 : 7728 : savevec[nelts++] = (savep
1561 : 7728 : ? gen_rtx_SET (mem, reg)
1562 : 0 : : gen_rtx_SET (reg, mem));
1563 : 7728 : size += GET_MODE_SIZE (mode);
1564 : : }
1565 : 483 : return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1566 : : }
1567 : :
1568 : : /* Save the state required to perform an untyped call with the same
1569 : : arguments as were passed to the current function. */
1570 : :
1571 : : static rtx
1572 : 464 : expand_builtin_apply_args_1 (void)
1573 : : {
1574 : 464 : rtx registers, tem;
1575 : 464 : int size, align, regno;
1576 : 464 : fixed_size_mode mode;
1577 : 464 : rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1578 : :
1579 : : /* Create a block where the arg-pointer, structure value address,
1580 : : and argument registers can be saved. */
1581 : 464 : registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1582 : :
1583 : : /* Walk past the arg-pointer and structure value address. */
1584 : 464 : size = GET_MODE_SIZE (Pmode);
1585 : 464 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1586 : 0 : size += GET_MODE_SIZE (Pmode);
1587 : :
1588 : : /* Save each register used in calling a function to the block. */
1589 : 43152 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590 : 42688 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1591 : : {
1592 : 6960 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 : 6960 : if (size % align != 0)
1594 : 1 : size = CEIL (size, align) * align;
1595 : :
1596 : 6960 : tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1597 : :
1598 : 6960 : emit_move_insn (adjust_address (registers, mode, size), tem);
1599 : 13920 : size += GET_MODE_SIZE (mode);
1600 : : }
1601 : :
1602 : : /* Save the arg pointer to the block. */
1603 : 464 : tem = copy_to_reg (crtl->args.internal_arg_pointer);
1604 : : /* We need the pointer as the caller actually passed them to us, not
1605 : : as we might have pretended they were passed. Make sure it's a valid
1606 : : operand, as emit_move_insn isn't expected to handle a PLUS. */
1607 : 464 : if (STACK_GROWS_DOWNWARD)
1608 : 464 : tem
1609 : 464 : = force_operand (plus_constant (Pmode, tem,
1610 : 464 : crtl->args.pretend_args_size),
1611 : : NULL_RTX);
1612 : 464 : emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1613 : :
1614 : 464 : size = GET_MODE_SIZE (Pmode);
1615 : :
1616 : : /* Save the structure value address unless this is passed as an
1617 : : "invisible" first argument. */
1618 : 464 : if (struct_incoming_value)
1619 : 0 : emit_move_insn (adjust_address (registers, Pmode, size),
1620 : : copy_to_reg (struct_incoming_value));
1621 : :
1622 : : /* Return the address of the block. */
1623 : 464 : return copy_addr_to_reg (XEXP (registers, 0));
1624 : : }
1625 : :
1626 : : /* __builtin_apply_args returns block of memory allocated on
1627 : : the stack into which is stored the arg pointer, structure
1628 : : value address, static chain, and all the registers that might
1629 : : possibly be used in performing a function call. The code is
1630 : : moved to the start of the function so the incoming values are
1631 : : saved. */
1632 : :
1633 : : static rtx
1634 : 464 : expand_builtin_apply_args (void)
1635 : : {
1636 : : /* Don't do __builtin_apply_args more than once in a function.
1637 : : Save the result of the first call and reuse it. */
1638 : 464 : if (apply_args_value != 0)
1639 : : return apply_args_value;
1640 : 464 : {
1641 : : /* When this function is called, it means that registers must be
1642 : : saved on entry to this function. So we migrate the
1643 : : call to the first insn of this function. */
1644 : 464 : rtx temp;
1645 : :
1646 : 464 : start_sequence ();
1647 : 464 : temp = expand_builtin_apply_args_1 ();
1648 : 464 : rtx_insn *seq = end_sequence ();
1649 : :
1650 : 464 : apply_args_value = temp;
1651 : :
1652 : : /* Put the insns after the NOTE that starts the function.
1653 : : If this is inside a start_sequence, make the outer-level insn
1654 : : chain current, so the code is placed at the start of the
1655 : : function. If internal_arg_pointer is a non-virtual pseudo,
1656 : : it needs to be placed after the function that initializes
1657 : : that pseudo. */
1658 : 464 : push_topmost_sequence ();
1659 : 464 : if (REG_P (crtl->args.internal_arg_pointer)
1660 : 464 : && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1661 : 0 : emit_insn_before (seq, parm_birth_insn);
1662 : : else
1663 : 464 : emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1664 : 464 : pop_topmost_sequence ();
1665 : 464 : return temp;
1666 : : }
1667 : : }
1668 : :
1669 : : /* Perform an untyped call and save the state required to perform an
1670 : : untyped return of whatever value was returned by the given function. */
1671 : :
1672 : : static rtx
1673 : 483 : expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1674 : : {
1675 : 483 : int size, align, regno;
1676 : 483 : fixed_size_mode mode;
1677 : 483 : rtx incoming_args, result, reg, dest, src;
1678 : 483 : rtx_call_insn *call_insn;
1679 : 483 : rtx old_stack_level = 0;
1680 : 483 : rtx call_fusage = 0;
1681 : 483 : rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1682 : :
1683 : 483 : arguments = convert_memory_address (Pmode, arguments);
1684 : :
1685 : : /* Create a block where the return registers can be saved. */
1686 : 483 : result = assign_stack_local (BLKmode, apply_result_size (), -1);
1687 : :
1688 : : /* Fetch the arg pointer from the ARGUMENTS block. */
1689 : 483 : incoming_args = gen_reg_rtx (Pmode);
1690 : 483 : emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1691 : 483 : if (!STACK_GROWS_DOWNWARD)
1692 : : incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1693 : : incoming_args, 0, OPTAB_LIB_WIDEN);
1694 : :
1695 : : /* Push a new argument block and copy the arguments. Do not allow
1696 : : the (potential) memcpy call below to interfere with our stack
1697 : : manipulations. */
1698 : 483 : do_pending_stack_adjust ();
1699 : 483 : NO_DEFER_POP;
1700 : :
1701 : : /* Save the stack with nonlocal if available. */
1702 : 483 : if (targetm.have_save_stack_nonlocal ())
1703 : 483 : emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1704 : : else
1705 : 0 : emit_stack_save (SAVE_BLOCK, &old_stack_level);
1706 : :
1707 : : /* Allocate a block of memory onto the stack and copy the memory
1708 : : arguments to the outgoing arguments address. We can pass TRUE
1709 : : as the 4th argument because we just saved the stack pointer
1710 : : and will restore it right after the call. */
1711 : 966 : allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1712 : :
1713 : : /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1714 : : may have already set current_function_calls_alloca to true.
1715 : : current_function_calls_alloca won't be set if argsize is zero,
1716 : : so we have to guarantee need_drap is true here. */
1717 : 483 : if (SUPPORTS_STACK_ALIGNMENT)
1718 : 483 : crtl->need_drap = true;
1719 : :
1720 : 483 : dest = virtual_outgoing_args_rtx;
1721 : 483 : if (!STACK_GROWS_DOWNWARD)
1722 : : {
1723 : : if (CONST_INT_P (argsize))
1724 : : dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1725 : : else
1726 : : dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1727 : : }
1728 : 483 : dest = gen_rtx_MEM (BLKmode, dest);
1729 : 483 : set_mem_align (dest, PARM_BOUNDARY);
1730 : 483 : src = gen_rtx_MEM (BLKmode, incoming_args);
1731 : 483 : set_mem_align (src, PARM_BOUNDARY);
1732 : 483 : emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1733 : :
1734 : : /* Refer to the argument block. */
1735 : 483 : apply_args_size ();
1736 : 483 : arguments = gen_rtx_MEM (BLKmode, arguments);
1737 : 483 : set_mem_align (arguments, PARM_BOUNDARY);
1738 : :
1739 : : /* Walk past the arg-pointer and structure value address. */
1740 : 483 : size = GET_MODE_SIZE (Pmode);
1741 : 483 : if (struct_value)
1742 : 0 : size += GET_MODE_SIZE (Pmode);
1743 : :
1744 : : /* Restore each of the registers previously saved. Make USE insns
1745 : : for each of these registers for use in making the call. */
1746 : 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1747 : 44436 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1748 : : {
1749 : 7245 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1750 : 7245 : if (size % align != 0)
1751 : 0 : size = CEIL (size, align) * align;
1752 : 7245 : reg = gen_rtx_REG (mode, regno);
1753 : 7245 : emit_move_insn (reg, adjust_address (arguments, mode, size));
1754 : 7245 : use_reg (&call_fusage, reg);
1755 : 14490 : size += GET_MODE_SIZE (mode);
1756 : : }
1757 : :
1758 : : /* Restore the structure value address unless this is passed as an
1759 : : "invisible" first argument. */
1760 : 483 : size = GET_MODE_SIZE (Pmode);
1761 : 483 : if (struct_value)
1762 : : {
1763 : 0 : rtx value = gen_reg_rtx (Pmode);
1764 : 0 : emit_move_insn (value, adjust_address (arguments, Pmode, size));
1765 : 0 : emit_move_insn (struct_value, value);
1766 : 0 : if (REG_P (struct_value))
1767 : 0 : use_reg (&call_fusage, struct_value);
1768 : : }
1769 : :
1770 : : /* All arguments and registers used for the call are set up by now! */
1771 : 483 : function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1772 : :
1773 : : /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1774 : : and we don't want to load it into a register as an optimization,
1775 : : because prepare_call_address already did it if it should be done. */
1776 : 483 : if (GET_CODE (function) != SYMBOL_REF)
1777 : 29 : function = memory_address (FUNCTION_MODE, function);
1778 : :
1779 : : /* Generate the actual call instruction and save the return value. */
1780 : 483 : if (targetm.have_untyped_call ())
1781 : : {
1782 : 483 : rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1783 : 483 : rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1784 : : result_vector (1, result));
1785 : 5800 : for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1786 : 5317 : if (CALL_P (insn))
1787 : 483 : add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1788 : 483 : emit_insn (seq);
1789 : : }
1790 : 0 : else if (targetm.have_call_value ())
1791 : : {
1792 : : rtx valreg = 0;
1793 : :
1794 : : /* Locate the unique return register. It is not possible to
1795 : : express a call that sets more than one return register using
1796 : : call_value; use untyped_call for that. In fact, untyped_call
1797 : : only needs to save the return registers in the given block. */
1798 : 0 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1799 : 0 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1800 : : {
1801 : 0 : gcc_assert (!valreg); /* have_untyped_call required. */
1802 : :
1803 : 0 : valreg = gen_rtx_REG (mode, regno);
1804 : : }
1805 : :
1806 : 0 : emit_insn (targetm.gen_call_value (valreg,
1807 : : gen_rtx_MEM (FUNCTION_MODE, function),
1808 : : const0_rtx, NULL_RTX, const0_rtx));
1809 : :
1810 : 0 : emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1811 : : }
1812 : : else
1813 : 0 : gcc_unreachable ();
1814 : :
1815 : : /* Find the CALL insn we just emitted, and attach the register usage
1816 : : information. */
1817 : 483 : call_insn = last_call_insn ();
1818 : 483 : add_function_usage_to (call_insn, call_fusage);
1819 : :
1820 : : /* Restore the stack. */
1821 : 483 : if (targetm.have_save_stack_nonlocal ())
1822 : 483 : emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1823 : : else
1824 : 0 : emit_stack_restore (SAVE_BLOCK, old_stack_level);
1825 : 483 : fixup_args_size_notes (call_insn, get_last_insn (), 0);
1826 : :
1827 : 483 : OK_DEFER_POP;
1828 : :
1829 : : /* Return the address of the result block. */
1830 : 483 : result = copy_addr_to_reg (XEXP (result, 0));
1831 : 483 : return convert_memory_address (ptr_mode, result);
1832 : : }
1833 : :
1834 : : /* Perform an untyped return. */
1835 : :
1836 : : static void
1837 : 379 : expand_builtin_return (rtx result)
1838 : : {
1839 : 379 : int size, align, regno;
1840 : 379 : fixed_size_mode mode;
1841 : 379 : rtx reg;
1842 : 379 : rtx_insn *call_fusage = 0;
1843 : :
1844 : 379 : result = convert_memory_address (Pmode, result);
1845 : :
1846 : 379 : apply_result_size ();
1847 : 379 : result = gen_rtx_MEM (BLKmode, result);
1848 : :
1849 : 379 : if (targetm.have_untyped_return ())
1850 : : {
1851 : 0 : rtx vector = result_vector (0, result);
1852 : 0 : emit_jump_insn (targetm.gen_untyped_return (result, vector));
1853 : 0 : emit_barrier ();
1854 : 0 : return;
1855 : : }
1856 : :
1857 : : /* Restore the return value and note that each value is used. */
1858 : : size = 0;
1859 : 35247 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1860 : 34868 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1861 : : {
1862 : 3032 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1863 : 3032 : if (size % align != 0)
1864 : 0 : size = CEIL (size, align) * align;
1865 : 3032 : reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1866 : 3032 : emit_move_insn (reg, adjust_address (result, mode, size));
1867 : :
1868 : 3032 : push_to_sequence (call_fusage);
1869 : 3032 : emit_use (reg);
1870 : 3032 : call_fusage = end_sequence ();
1871 : 6064 : size += GET_MODE_SIZE (mode);
1872 : : }
1873 : :
1874 : : /* Put the USE insns before the return. */
1875 : 379 : emit_insn (call_fusage);
1876 : :
1877 : : /* Return whatever values was restored by jumping directly to the end
1878 : : of the function. */
1879 : 379 : expand_naked_return ();
1880 : : }
1881 : :
1882 : : /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1883 : :
1884 : : int
1885 : 2427 : type_to_class (tree type)
1886 : : {
1887 : 2427 : switch (TREE_CODE (type))
1888 : : {
1889 : : case VOID_TYPE: return void_type_class;
1890 : 927 : case INTEGER_TYPE: return integer_type_class;
1891 : 39 : case ENUMERAL_TYPE: return enumeral_type_class;
1892 : 38 : case BOOLEAN_TYPE: return boolean_type_class;
1893 : 1013 : case POINTER_TYPE: return pointer_type_class;
1894 : 27 : case REFERENCE_TYPE: return reference_type_class;
1895 : 36 : case OFFSET_TYPE: return offset_type_class;
1896 : 132 : case REAL_TYPE: return real_type_class;
1897 : 40 : case COMPLEX_TYPE: return complex_type_class;
1898 : 20 : case FUNCTION_TYPE: return function_type_class;
1899 : 0 : case METHOD_TYPE: return method_type_class;
1900 : 44 : case RECORD_TYPE: return record_type_class;
1901 : 44 : case UNION_TYPE:
1902 : 44 : case QUAL_UNION_TYPE: return union_type_class;
1903 : 24 : case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1904 : 24 : ? string_type_class : array_type_class);
1905 : 0 : case LANG_TYPE: return lang_type_class;
1906 : 0 : case OPAQUE_TYPE: return opaque_type_class;
1907 : 0 : case BITINT_TYPE: return bitint_type_class;
1908 : 30 : case VECTOR_TYPE: return vector_type_class;
1909 : 3 : default: return no_type_class;
1910 : : }
1911 : : }
1912 : :
1913 : : /* Expand a call EXP to __builtin_classify_type. */
1914 : :
1915 : : static rtx
1916 : 0 : expand_builtin_classify_type (tree exp)
1917 : : {
1918 : 0 : if (call_expr_nargs (exp))
1919 : 0 : return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1920 : 0 : return GEN_INT (no_type_class);
1921 : : }
1922 : :
1923 : : /* This helper macro, meant to be used in mathfn_built_in below, determines
1924 : : which among a set of builtin math functions is appropriate for a given type
1925 : : mode. The `F' (float) and `L' (long double) are automatically generated
1926 : : from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1927 : : types, there are additional types that are considered with 'F32', 'F64',
1928 : : 'F128', etc. suffixes. */
1929 : : #define CASE_MATHFN(MATHFN) \
1930 : : CASE_CFN_##MATHFN: \
1931 : : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1932 : : fcodel = BUILT_IN_##MATHFN##L ; break;
1933 : : /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1934 : : types. */
1935 : : #define CASE_MATHFN_FLOATN(MATHFN) \
1936 : : CASE_CFN_##MATHFN: \
1937 : : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1938 : : fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1939 : : fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1940 : : fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1941 : : fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1942 : : break;
1943 : : /* Similar to above, but appends _R after any F/L suffix. */
1944 : : #define CASE_MATHFN_REENT(MATHFN) \
1945 : : case CFN_BUILT_IN_##MATHFN##_R: \
1946 : : case CFN_BUILT_IN_##MATHFN##F_R: \
1947 : : case CFN_BUILT_IN_##MATHFN##L_R: \
1948 : : fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1949 : : fcodel = BUILT_IN_##MATHFN##L_R ; break;
1950 : :
1951 : : /* Return a function equivalent to FN but operating on floating-point
1952 : : values of type TYPE, or END_BUILTINS if no such function exists.
1953 : : This is purely an operation on function codes; it does not guarantee
1954 : : that the target actually has an implementation of the function. */
1955 : :
1956 : : static built_in_function
1957 : 600623 : mathfn_built_in_2 (tree type, combined_fn fn)
1958 : : {
1959 : 600623 : tree mtype;
1960 : 600623 : built_in_function fcode, fcodef, fcodel;
1961 : 600623 : built_in_function fcodef16 = END_BUILTINS;
1962 : 600623 : built_in_function fcodef32 = END_BUILTINS;
1963 : 600623 : built_in_function fcodef64 = END_BUILTINS;
1964 : 600623 : built_in_function fcodef128 = END_BUILTINS;
1965 : 600623 : built_in_function fcodef32x = END_BUILTINS;
1966 : 600623 : built_in_function fcodef64x = END_BUILTINS;
1967 : 600623 : built_in_function fcodef128x = END_BUILTINS;
1968 : :
1969 : : /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1970 : : break the uses below. */
1971 : : #undef HUGE_VAL
1972 : : #undef NAN
1973 : :
1974 : 600623 : switch (fn)
1975 : : {
1976 : : #define SEQ_OF_CASE_MATHFN \
1977 : : CASE_MATHFN_FLOATN (ACOS) \
1978 : : CASE_MATHFN_FLOATN (ACOSH) \
1979 : : CASE_MATHFN_FLOATN (ASIN) \
1980 : : CASE_MATHFN_FLOATN (ASINH) \
1981 : : CASE_MATHFN_FLOATN (ATAN) \
1982 : : CASE_MATHFN_FLOATN (ATAN2) \
1983 : : CASE_MATHFN_FLOATN (ATANH) \
1984 : : CASE_MATHFN_FLOATN (CBRT) \
1985 : : CASE_MATHFN_FLOATN (CEIL) \
1986 : : CASE_MATHFN (CEXPI) \
1987 : : CASE_MATHFN_FLOATN (COPYSIGN) \
1988 : : CASE_MATHFN_FLOATN (COS) \
1989 : : CASE_MATHFN_FLOATN (COSH) \
1990 : : CASE_MATHFN (DREM) \
1991 : : CASE_MATHFN_FLOATN (ERF) \
1992 : : CASE_MATHFN_FLOATN (ERFC) \
1993 : : CASE_MATHFN_FLOATN (EXP) \
1994 : : CASE_MATHFN (EXP10) \
1995 : : CASE_MATHFN_FLOATN (EXP2) \
1996 : : CASE_MATHFN_FLOATN (EXPM1) \
1997 : : CASE_MATHFN_FLOATN (FABS) \
1998 : : CASE_MATHFN_FLOATN (FDIM) \
1999 : : CASE_MATHFN_FLOATN (FLOOR) \
2000 : : CASE_MATHFN_FLOATN (FMA) \
2001 : : CASE_MATHFN_FLOATN (FMAX) \
2002 : : CASE_MATHFN_FLOATN (FMIN) \
2003 : : CASE_MATHFN_FLOATN (FMOD) \
2004 : : CASE_MATHFN_FLOATN (FREXP) \
2005 : : CASE_MATHFN (GAMMA) \
2006 : : CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2007 : : CASE_MATHFN_FLOATN (HUGE_VAL) \
2008 : : CASE_MATHFN_FLOATN (HYPOT) \
2009 : : CASE_MATHFN_FLOATN (ILOGB) \
2010 : : CASE_MATHFN (ICEIL) \
2011 : : CASE_MATHFN (IFLOOR) \
2012 : : CASE_MATHFN_FLOATN (INF) \
2013 : : CASE_MATHFN (IRINT) \
2014 : : CASE_MATHFN (IROUND) \
2015 : : CASE_MATHFN (ISINF) \
2016 : : CASE_MATHFN (J0) \
2017 : : CASE_MATHFN (J1) \
2018 : : CASE_MATHFN (JN) \
2019 : : CASE_MATHFN (LCEIL) \
2020 : : CASE_MATHFN_FLOATN (LDEXP) \
2021 : : CASE_MATHFN (LFLOOR) \
2022 : : CASE_MATHFN_FLOATN (LGAMMA) \
2023 : : CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2024 : : CASE_MATHFN (LLCEIL) \
2025 : : CASE_MATHFN (LLFLOOR) \
2026 : : CASE_MATHFN_FLOATN (LLRINT) \
2027 : : CASE_MATHFN_FLOATN (LLROUND) \
2028 : : CASE_MATHFN_FLOATN (LOG) \
2029 : : CASE_MATHFN_FLOATN (LOG10) \
2030 : : CASE_MATHFN_FLOATN (LOG1P) \
2031 : : CASE_MATHFN_FLOATN (LOG2) \
2032 : : CASE_MATHFN_FLOATN (LOGB) \
2033 : : CASE_MATHFN_FLOATN (LRINT) \
2034 : : CASE_MATHFN_FLOATN (LROUND) \
2035 : : CASE_MATHFN_FLOATN (MODF) \
2036 : : CASE_MATHFN_FLOATN (NAN) \
2037 : : CASE_MATHFN_FLOATN (NANS) \
2038 : : CASE_MATHFN_FLOATN (NEARBYINT) \
2039 : : CASE_MATHFN_FLOATN (NEXTAFTER) \
2040 : : CASE_MATHFN (NEXTTOWARD) \
2041 : : CASE_MATHFN_FLOATN (POW) \
2042 : : CASE_MATHFN (POWI) \
2043 : : CASE_MATHFN (POW10) \
2044 : : CASE_MATHFN_FLOATN (REMAINDER) \
2045 : : CASE_MATHFN_FLOATN (REMQUO) \
2046 : : CASE_MATHFN_FLOATN (RINT) \
2047 : : CASE_MATHFN_FLOATN (ROUND) \
2048 : : CASE_MATHFN_FLOATN (ROUNDEVEN) \
2049 : : CASE_MATHFN (SCALB) \
2050 : : CASE_MATHFN_FLOATN (SCALBLN) \
2051 : : CASE_MATHFN_FLOATN (SCALBN) \
2052 : : CASE_MATHFN (SIGNBIT) \
2053 : : CASE_MATHFN (SIGNIFICAND) \
2054 : : CASE_MATHFN_FLOATN (SIN) \
2055 : : CASE_MATHFN (SINCOS) \
2056 : : CASE_MATHFN_FLOATN (SINH) \
2057 : : CASE_MATHFN_FLOATN (SQRT) \
2058 : : CASE_MATHFN_FLOATN (TAN) \
2059 : : CASE_MATHFN_FLOATN (TANH) \
2060 : : CASE_MATHFN_FLOATN (TGAMMA) \
2061 : : CASE_MATHFN_FLOATN (TRUNC) \
2062 : : CASE_MATHFN (Y0) \
2063 : : CASE_MATHFN (Y1) \
2064 : : CASE_MATHFN (YN)
2065 : :
2066 : 600560 : SEQ_OF_CASE_MATHFN
2067 : :
2068 : : default:
2069 : : return END_BUILTINS;
2070 : : }
2071 : :
2072 : 600623 : mtype = TYPE_MAIN_VARIANT (type);
2073 : 600623 : if (mtype == double_type_node)
2074 : : return fcode;
2075 : 557377 : else if (mtype == float_type_node)
2076 : : return fcodef;
2077 : 505023 : else if (mtype == long_double_type_node)
2078 : : return fcodel;
2079 : 471299 : else if (mtype == float16_type_node)
2080 : : return fcodef16;
2081 : 471251 : else if (mtype == float32_type_node)
2082 : : return fcodef32;
2083 : 471251 : else if (mtype == float64_type_node)
2084 : : return fcodef64;
2085 : 471251 : else if (mtype == float128_type_node)
2086 : : return fcodef128;
2087 : 463156 : else if (mtype == float32x_type_node)
2088 : : return fcodef32x;
2089 : 463156 : else if (mtype == float64x_type_node)
2090 : : return fcodef64x;
2091 : 463156 : else if (mtype == float128x_type_node)
2092 : : return fcodef128x;
2093 : : else
2094 : 463156 : return END_BUILTINS;
2095 : : }
2096 : :
2097 : : #undef CASE_MATHFN
2098 : : #undef CASE_MATHFN_FLOATN
2099 : : #undef CASE_MATHFN_REENT
2100 : :
2101 : : /* Return mathematic function equivalent to FN but operating directly on TYPE,
2102 : : if available. If IMPLICIT_P is true use the implicit builtin declaration,
2103 : : otherwise use the explicit declaration. If we can't do the conversion,
2104 : : return null. */
2105 : :
2106 : : static tree
2107 : 600478 : mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2108 : : {
2109 : 600478 : built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2110 : 600478 : if (fcode2 == END_BUILTINS)
2111 : : return NULL_TREE;
2112 : :
2113 : 133674 : if (implicit_p && !builtin_decl_implicit_p (fcode2))
2114 : : return NULL_TREE;
2115 : :
2116 : 129130 : return builtin_decl_explicit (fcode2);
2117 : : }
2118 : :
2119 : : /* Like mathfn_built_in_1, but always use the implicit array. */
2120 : :
2121 : : tree
2122 : 210 : mathfn_built_in (tree type, combined_fn fn)
2123 : : {
2124 : 210 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2125 : : }
2126 : :
2127 : : /* Like mathfn_built_in_1, but always use the explicit array. */
2128 : :
2129 : : tree
2130 : 0 : mathfn_built_in_explicit (tree type, combined_fn fn)
2131 : : {
2132 : 0 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2133 : : }
2134 : :
2135 : : /* Like mathfn_built_in_1, but take a built_in_function and
2136 : : always use the implicit array. */
2137 : :
2138 : : tree
2139 : 600046 : mathfn_built_in (tree type, enum built_in_function fn)
2140 : : {
2141 : 600046 : return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2142 : : }
2143 : :
2144 : : /* Return the type associated with a built in function, i.e., the one
2145 : : to be passed to mathfn_built_in to get the type-specific
2146 : : function. */
2147 : :
2148 : : tree
2149 : 1235 : mathfn_built_in_type (combined_fn fn)
2150 : : {
2151 : : #define CASE_MATHFN(MATHFN) \
2152 : : case CFN_BUILT_IN_##MATHFN: \
2153 : : return double_type_node; \
2154 : : case CFN_BUILT_IN_##MATHFN##F: \
2155 : : return float_type_node; \
2156 : : case CFN_BUILT_IN_##MATHFN##L: \
2157 : : return long_double_type_node;
2158 : :
2159 : : #define CASE_MATHFN_FLOATN(MATHFN) \
2160 : : CASE_MATHFN(MATHFN) \
2161 : : case CFN_BUILT_IN_##MATHFN##F16: \
2162 : : return float16_type_node; \
2163 : : case CFN_BUILT_IN_##MATHFN##F32: \
2164 : : return float32_type_node; \
2165 : : case CFN_BUILT_IN_##MATHFN##F64: \
2166 : : return float64_type_node; \
2167 : : case CFN_BUILT_IN_##MATHFN##F128: \
2168 : : return float128_type_node; \
2169 : : case CFN_BUILT_IN_##MATHFN##F32X: \
2170 : : return float32x_type_node; \
2171 : : case CFN_BUILT_IN_##MATHFN##F64X: \
2172 : : return float64x_type_node; \
2173 : : case CFN_BUILT_IN_##MATHFN##F128X: \
2174 : : return float128x_type_node;
2175 : :
2176 : : /* Similar to above, but appends _R after any F/L suffix. */
2177 : : #define CASE_MATHFN_REENT(MATHFN) \
2178 : : case CFN_BUILT_IN_##MATHFN##_R: \
2179 : : return double_type_node; \
2180 : : case CFN_BUILT_IN_##MATHFN##F_R: \
2181 : : return float_type_node; \
2182 : : case CFN_BUILT_IN_##MATHFN##L_R: \
2183 : : return long_double_type_node;
2184 : :
2185 : 1235 : switch (fn)
2186 : : {
2187 : 1235 : SEQ_OF_CASE_MATHFN
2188 : :
2189 : : default:
2190 : : return NULL_TREE;
2191 : : }
2192 : :
2193 : : #undef CASE_MATHFN
2194 : : #undef CASE_MATHFN_FLOATN
2195 : : #undef CASE_MATHFN_REENT
2196 : : #undef SEQ_OF_CASE_MATHFN
2197 : : }
2198 : :
2199 : : /* Check whether there is an internal function associated with function FN
2200 : : and return type RETURN_TYPE. Return the function if so, otherwise return
2201 : : IFN_LAST.
2202 : :
2203 : : Note that this function only tests whether the function is defined in
2204 : : internals.def, not whether it is actually available on the target. */
2205 : :
2206 : : static internal_fn
2207 : 13468344 : associated_internal_fn (built_in_function fn, tree return_type)
2208 : : {
2209 : 13468344 : switch (fn)
2210 : : {
2211 : : #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2212 : : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2213 : : #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2214 : : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2215 : : CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2216 : : #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2217 : : CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2218 : : #include "internal-fn.def"
2219 : :
2220 : : CASE_FLT_FN (BUILT_IN_POW10):
2221 : : return IFN_EXP10;
2222 : :
2223 : : CASE_FLT_FN (BUILT_IN_DREM):
2224 : : return IFN_REMAINDER;
2225 : :
2226 : 18482 : CASE_FLT_FN (BUILT_IN_SCALBN):
2227 : 18482 : CASE_FLT_FN (BUILT_IN_SCALBLN):
2228 : 18482 : if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2229 : : return IFN_LDEXP;
2230 : : return IFN_LAST;
2231 : 73 : case BUILT_IN_CRC8_DATA8:
2232 : 73 : case BUILT_IN_CRC16_DATA8:
2233 : 73 : case BUILT_IN_CRC16_DATA16:
2234 : 73 : case BUILT_IN_CRC32_DATA8:
2235 : 73 : case BUILT_IN_CRC32_DATA16:
2236 : 73 : case BUILT_IN_CRC32_DATA32:
2237 : 73 : case BUILT_IN_CRC64_DATA8:
2238 : 73 : case BUILT_IN_CRC64_DATA16:
2239 : 73 : case BUILT_IN_CRC64_DATA32:
2240 : 73 : case BUILT_IN_CRC64_DATA64:
2241 : 73 : return IFN_CRC;
2242 : 126 : case BUILT_IN_REV_CRC8_DATA8:
2243 : 126 : case BUILT_IN_REV_CRC16_DATA8:
2244 : 126 : case BUILT_IN_REV_CRC16_DATA16:
2245 : 126 : case BUILT_IN_REV_CRC32_DATA8:
2246 : 126 : case BUILT_IN_REV_CRC32_DATA16:
2247 : 126 : case BUILT_IN_REV_CRC32_DATA32:
2248 : 126 : case BUILT_IN_REV_CRC64_DATA8:
2249 : 126 : case BUILT_IN_REV_CRC64_DATA16:
2250 : 126 : case BUILT_IN_REV_CRC64_DATA32:
2251 : 126 : case BUILT_IN_REV_CRC64_DATA64:
2252 : 126 : return IFN_CRC_REV;
2253 : : default:
2254 : : return IFN_LAST;
2255 : : }
2256 : : }
2257 : :
2258 : : /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2259 : : return its code, otherwise return IFN_LAST. Note that this function
2260 : : only tests whether the function is defined in internals.def, not whether
2261 : : it is actually available on the target. */
2262 : :
2263 : : internal_fn
2264 : 639710 : associated_internal_fn (tree fndecl)
2265 : : {
2266 : 639710 : gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2267 : 639710 : return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2268 : 639710 : TREE_TYPE (TREE_TYPE (fndecl)));
2269 : : }
2270 : :
2271 : : /* Check whether there is an internal function associated with function CFN
2272 : : and return type RETURN_TYPE. Return the function if so, otherwise return
2273 : : IFN_LAST.
2274 : :
2275 : : Note that this function only tests whether the function is defined in
2276 : : internals.def, not whether it is actually available on the target. */
2277 : :
2278 : : internal_fn
2279 : 33406523 : associated_internal_fn (combined_fn cfn, tree return_type)
2280 : : {
2281 : 33406523 : if (internal_fn_p (cfn))
2282 : 20577889 : return as_internal_fn (cfn);
2283 : 12828634 : return associated_internal_fn (as_builtin_fn (cfn), return_type);
2284 : : }
2285 : :
2286 : : /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2287 : : on the current target by a call to an internal function, return the
2288 : : code of that internal function, otherwise return IFN_LAST. The caller
2289 : : is responsible for ensuring that any side-effects of the built-in
2290 : : call are dealt with correctly. E.g. if CALL sets errno, the caller
2291 : : must decide that the errno result isn't needed or make it available
2292 : : in some other way. */
2293 : :
2294 : : internal_fn
2295 : 864407 : replacement_internal_fn (gcall *call)
2296 : : {
2297 : 864407 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2298 : : {
2299 : 637367 : internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2300 : 637367 : if (ifn != IFN_LAST)
2301 : : {
2302 : 62203 : tree_pair types = direct_internal_fn_types (ifn, call);
2303 : 62203 : optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2304 : 62203 : if (direct_internal_fn_supported_p (ifn, types, opt_type))
2305 : 40767 : return ifn;
2306 : : }
2307 : : }
2308 : : return IFN_LAST;
2309 : : }
2310 : :
2311 : : /* Expand a call to the builtin trinary math functions (fma).
2312 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2313 : : function in-line. EXP is the expression that is a call to the builtin
2314 : : function; if convenient, the result should be placed in TARGET.
2315 : : SUBTARGET may be used as the target for computing one of EXP's
2316 : : operands. */
2317 : :
2318 : : static rtx
2319 : 364 : expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2320 : : {
2321 : 364 : optab builtin_optab;
2322 : 364 : rtx op0, op1, op2, result;
2323 : 364 : rtx_insn *insns;
2324 : 364 : tree fndecl = get_callee_fndecl (exp);
2325 : 364 : tree arg0, arg1, arg2;
2326 : 364 : machine_mode mode;
2327 : :
2328 : 364 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2329 : : return NULL_RTX;
2330 : :
2331 : 364 : arg0 = CALL_EXPR_ARG (exp, 0);
2332 : 364 : arg1 = CALL_EXPR_ARG (exp, 1);
2333 : 364 : arg2 = CALL_EXPR_ARG (exp, 2);
2334 : :
2335 : 364 : switch (DECL_FUNCTION_CODE (fndecl))
2336 : : {
2337 : 364 : CASE_FLT_FN (BUILT_IN_FMA):
2338 : 364 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2339 : 364 : builtin_optab = fma_optab; break;
2340 : 0 : default:
2341 : 0 : gcc_unreachable ();
2342 : : }
2343 : :
2344 : : /* Make a suitable register to place result in. */
2345 : 364 : mode = TYPE_MODE (TREE_TYPE (exp));
2346 : :
2347 : : /* Before working hard, check whether the instruction is available. */
2348 : 364 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2349 : : return NULL_RTX;
2350 : :
2351 : 0 : result = gen_reg_rtx (mode);
2352 : :
2353 : : /* Always stabilize the argument list. */
2354 : 0 : CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2355 : 0 : CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2356 : 0 : CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2357 : :
2358 : 0 : op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2359 : 0 : op1 = expand_normal (arg1);
2360 : 0 : op2 = expand_normal (arg2);
2361 : :
2362 : 0 : start_sequence ();
2363 : :
2364 : : /* Compute into RESULT.
2365 : : Set RESULT to wherever the result comes back. */
2366 : 0 : result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2367 : : result, 0);
2368 : :
2369 : : /* If we were unable to expand via the builtin, stop the sequence
2370 : : (without outputting the insns) and call to the library function
2371 : : with the stabilized argument list. */
2372 : 0 : if (result == 0)
2373 : : {
2374 : 0 : end_sequence ();
2375 : 0 : return expand_call (exp, target, target == const0_rtx);
2376 : : }
2377 : :
2378 : : /* Output the entire sequence. */
2379 : 0 : insns = end_sequence ();
2380 : 0 : emit_insn (insns);
2381 : :
2382 : 0 : return result;
2383 : : }
2384 : :
2385 : : /* Expand a call to the builtin sin and cos math functions.
2386 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2387 : : function in-line. EXP is the expression that is a call to the builtin
2388 : : function; if convenient, the result should be placed in TARGET.
2389 : : SUBTARGET may be used as the target for computing one of EXP's
2390 : : operands. */
2391 : :
2392 : : static rtx
2393 : 47 : expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2394 : : {
2395 : 47 : optab builtin_optab;
2396 : 47 : rtx op0;
2397 : 47 : rtx_insn *insns;
2398 : 47 : tree fndecl = get_callee_fndecl (exp);
2399 : 47 : machine_mode mode;
2400 : 47 : tree arg;
2401 : :
2402 : 47 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2403 : : return NULL_RTX;
2404 : :
2405 : 39 : arg = CALL_EXPR_ARG (exp, 0);
2406 : :
2407 : 39 : switch (DECL_FUNCTION_CODE (fndecl))
2408 : : {
2409 : 39 : CASE_FLT_FN (BUILT_IN_SIN):
2410 : 39 : CASE_FLT_FN (BUILT_IN_COS):
2411 : 39 : builtin_optab = sincos_optab; break;
2412 : 0 : default:
2413 : 0 : gcc_unreachable ();
2414 : : }
2415 : :
2416 : : /* Make a suitable register to place result in. */
2417 : 39 : mode = TYPE_MODE (TREE_TYPE (exp));
2418 : :
2419 : : /* Check if sincos insn is available, otherwise fallback
2420 : : to sin or cos insn. */
2421 : 39 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2422 : 39 : switch (DECL_FUNCTION_CODE (fndecl))
2423 : : {
2424 : : CASE_FLT_FN (BUILT_IN_SIN):
2425 : : builtin_optab = sin_optab; break;
2426 : 14 : CASE_FLT_FN (BUILT_IN_COS):
2427 : 14 : builtin_optab = cos_optab; break;
2428 : 0 : default:
2429 : 0 : gcc_unreachable ();
2430 : : }
2431 : :
2432 : : /* Before working hard, check whether the instruction is available. */
2433 : 39 : if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2434 : : {
2435 : 0 : rtx result = gen_reg_rtx (mode);
2436 : :
2437 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2438 : : need to expand the argument again. This way, we will not perform
2439 : : side-effects more the once. */
2440 : 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2441 : :
2442 : 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2443 : :
2444 : 0 : start_sequence ();
2445 : :
2446 : : /* Compute into RESULT.
2447 : : Set RESULT to wherever the result comes back. */
2448 : 0 : if (builtin_optab == sincos_optab)
2449 : : {
2450 : 0 : int ok;
2451 : :
2452 : 0 : switch (DECL_FUNCTION_CODE (fndecl))
2453 : : {
2454 : 0 : CASE_FLT_FN (BUILT_IN_SIN):
2455 : 0 : ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2456 : 0 : break;
2457 : 0 : CASE_FLT_FN (BUILT_IN_COS):
2458 : 0 : ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2459 : 0 : break;
2460 : 0 : default:
2461 : 0 : gcc_unreachable ();
2462 : : }
2463 : 0 : gcc_assert (ok);
2464 : : }
2465 : : else
2466 : 0 : result = expand_unop (mode, builtin_optab, op0, result, 0);
2467 : :
2468 : 0 : if (result != 0)
2469 : : {
2470 : : /* Output the entire sequence. */
2471 : 0 : insns = end_sequence ();
2472 : 0 : emit_insn (insns);
2473 : 0 : return result;
2474 : : }
2475 : :
2476 : : /* If we were unable to expand via the builtin, stop the sequence
2477 : : (without outputting the insns) and call to the library function
2478 : : with the stabilized argument list. */
2479 : 0 : end_sequence ();
2480 : : }
2481 : :
2482 : 39 : return expand_call (exp, target, target == const0_rtx);
2483 : : }
2484 : :
2485 : : /* Given an interclass math builtin decl FNDECL and it's argument ARG
2486 : : return an RTL instruction code that implements the functionality.
2487 : : If that isn't possible or available return CODE_FOR_nothing. */
2488 : :
2489 : : static enum insn_code
2490 : 1381664 : interclass_mathfn_icode (tree arg, tree fndecl)
2491 : : {
2492 : 1381664 : bool errno_set = false;
2493 : 1381664 : optab builtin_optab = unknown_optab;
2494 : 1381664 : machine_mode mode;
2495 : :
2496 : 1381664 : switch (DECL_FUNCTION_CODE (fndecl))
2497 : : {
2498 : 4 : CASE_FLT_FN (BUILT_IN_ILOGB):
2499 : 4 : errno_set = true;
2500 : 4 : builtin_optab = ilogb_optab;
2501 : 4 : break;
2502 : 266999 : CASE_FLT_FN (BUILT_IN_ISINF):
2503 : 266999 : builtin_optab = isinf_optab;
2504 : 266999 : break;
2505 : 579584 : case BUILT_IN_ISFINITE:
2506 : 579584 : builtin_optab = isfinite_optab;
2507 : 579584 : break;
2508 : 265194 : case BUILT_IN_ISNORMAL:
2509 : 265194 : builtin_optab = isnormal_optab;
2510 : 265194 : break;
2511 : 267806 : CASE_FLT_FN (BUILT_IN_ISNAN):
2512 : 267806 : builtin_optab = isnan_optab;
2513 : 267806 : break;
2514 : : CASE_FLT_FN (BUILT_IN_FINITE):
2515 : : case BUILT_IN_FINITED32:
2516 : : case BUILT_IN_FINITED64:
2517 : : case BUILT_IN_FINITED128:
2518 : : case BUILT_IN_ISINFD32:
2519 : : case BUILT_IN_ISINFD64:
2520 : : case BUILT_IN_ISINFD128:
2521 : : case BUILT_IN_ISNAND32:
2522 : : case BUILT_IN_ISNAND64:
2523 : : case BUILT_IN_ISNAND128:
2524 : : /* These builtins have no optabs (yet). */
2525 : : break;
2526 : 0 : default:
2527 : 0 : gcc_unreachable ();
2528 : : }
2529 : :
2530 : : /* There's no easy way to detect the case we need to set EDOM. */
2531 : 1381664 : if (flag_errno_math && errno_set)
2532 : : return CODE_FOR_nothing;
2533 : :
2534 : : /* Optab mode depends on the mode of the input argument. */
2535 : 1381664 : mode = TYPE_MODE (TREE_TYPE (arg));
2536 : :
2537 : 1381664 : if (builtin_optab)
2538 : 1379587 : return optab_handler (builtin_optab, mode);
2539 : : return CODE_FOR_nothing;
2540 : : }
2541 : :
2542 : : /* Expand a call to one of the builtin math functions that operate on
2543 : : floating point argument and output an integer result (ilogb, isinf,
2544 : : isnan, etc).
2545 : : Return 0 if a normal call should be emitted rather than expanding the
2546 : : function in-line. EXP is the expression that is a call to the builtin
2547 : : function; if convenient, the result should be placed in TARGET. */
2548 : :
2549 : : static rtx
2550 : 4 : expand_builtin_interclass_mathfn (tree exp, rtx target)
2551 : : {
2552 : 4 : enum insn_code icode = CODE_FOR_nothing;
2553 : 4 : rtx op0;
2554 : 4 : tree fndecl = get_callee_fndecl (exp);
2555 : 4 : machine_mode mode;
2556 : 4 : tree arg;
2557 : :
2558 : 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2559 : : return NULL_RTX;
2560 : :
2561 : 4 : arg = CALL_EXPR_ARG (exp, 0);
2562 : 4 : icode = interclass_mathfn_icode (arg, fndecl);
2563 : 4 : mode = TYPE_MODE (TREE_TYPE (arg));
2564 : :
2565 : 4 : if (icode != CODE_FOR_nothing)
2566 : : {
2567 : 2 : class expand_operand ops[1];
2568 : 2 : rtx_insn *last = get_last_insn ();
2569 : 2 : tree orig_arg = arg;
2570 : :
2571 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2572 : : need to expand the argument again. This way, we will not perform
2573 : : side-effects more the once. */
2574 : 2 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2575 : :
2576 : 2 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2577 : :
2578 : 2 : if (mode != GET_MODE (op0))
2579 : 0 : op0 = convert_to_mode (mode, op0, 0);
2580 : :
2581 : 2 : create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2582 : 2 : if (maybe_legitimize_operands (icode, 0, 1, ops)
2583 : 2 : && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2584 : 1 : return ops[0].value;
2585 : :
2586 : 1 : delete_insns_since (last);
2587 : 1 : CALL_EXPR_ARG (exp, 0) = orig_arg;
2588 : : }
2589 : :
2590 : : return NULL_RTX;
2591 : : }
2592 : :
2593 : : /* Expand a call to the builtin sincos math function.
2594 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2595 : : function in-line. EXP is the expression that is a call to the builtin
2596 : : function. */
2597 : :
2598 : : static rtx
2599 : 3 : expand_builtin_sincos (tree exp)
2600 : : {
2601 : 3 : rtx op0, op1, op2, target1, target2;
2602 : 3 : machine_mode mode;
2603 : 3 : tree arg, sinp, cosp;
2604 : 3 : int result;
2605 : 3 : location_t loc = EXPR_LOCATION (exp);
2606 : 3 : tree alias_type, alias_off;
2607 : :
2608 : 3 : if (!validate_arglist (exp, REAL_TYPE,
2609 : : POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2610 : : return NULL_RTX;
2611 : :
2612 : 3 : arg = CALL_EXPR_ARG (exp, 0);
2613 : 3 : sinp = CALL_EXPR_ARG (exp, 1);
2614 : 3 : cosp = CALL_EXPR_ARG (exp, 2);
2615 : :
2616 : : /* Make a suitable register to place result in. */
2617 : 3 : mode = TYPE_MODE (TREE_TYPE (arg));
2618 : :
2619 : : /* Check if sincos insn is available, otherwise emit the call. */
2620 : 3 : if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2621 : : return NULL_RTX;
2622 : :
2623 : 0 : target1 = gen_reg_rtx (mode);
2624 : 0 : target2 = gen_reg_rtx (mode);
2625 : :
2626 : 0 : op0 = expand_normal (arg);
2627 : 0 : alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2628 : 0 : alias_off = build_int_cst (alias_type, 0);
2629 : 0 : op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2630 : : sinp, alias_off));
2631 : 0 : op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2632 : : cosp, alias_off));
2633 : :
2634 : : /* Compute into target1 and target2.
2635 : : Set TARGET to wherever the result comes back. */
2636 : 0 : result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2637 : 0 : gcc_assert (result);
2638 : :
2639 : : /* Move target1 and target2 to the memory locations indicated
2640 : : by op1 and op2. */
2641 : 0 : emit_move_insn (op1, target1);
2642 : 0 : emit_move_insn (op2, target2);
2643 : :
2644 : 0 : return const0_rtx;
2645 : : }
2646 : :
2647 : : /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2648 : : result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2649 : : static rtx
2650 : 60 : expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2651 : : {
2652 : 60 : if (!validate_arglist (exp, VOID_TYPE))
2653 : : return NULL_RTX;
2654 : :
2655 : 60 : insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2656 : 60 : if (icode == CODE_FOR_nothing)
2657 : : return NULL_RTX;
2658 : :
2659 : 0 : if (target == 0
2660 : 0 : || GET_MODE (target) != target_mode
2661 : 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2662 : 0 : target = gen_reg_rtx (target_mode);
2663 : :
2664 : 0 : rtx pat = GEN_FCN (icode) (target);
2665 : 0 : if (!pat)
2666 : : return NULL_RTX;
2667 : 0 : emit_insn (pat);
2668 : :
2669 : 0 : return target;
2670 : : }
2671 : :
2672 : : /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2673 : : fenv.h), returning the result and setting it in TARGET. Otherwise return
2674 : : NULL_RTX on failure. */
2675 : : static rtx
2676 : 1894 : expand_builtin_feclear_feraise_except (tree exp, rtx target,
2677 : : machine_mode target_mode, optab op_optab)
2678 : : {
2679 : 1894 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2680 : : return NULL_RTX;
2681 : 1894 : rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2682 : :
2683 : 1894 : insn_code icode = direct_optab_handler (op_optab, SImode);
2684 : 1894 : if (icode == CODE_FOR_nothing)
2685 : : return NULL_RTX;
2686 : :
2687 : 0 : if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2688 : : return NULL_RTX;
2689 : :
2690 : 0 : if (target == 0
2691 : 0 : || GET_MODE (target) != target_mode
2692 : 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2693 : 0 : target = gen_reg_rtx (target_mode);
2694 : :
2695 : 0 : rtx pat = GEN_FCN (icode) (target, op0);
2696 : 0 : if (!pat)
2697 : : return NULL_RTX;
2698 : 0 : emit_insn (pat);
2699 : :
2700 : 0 : return target;
2701 : : }
2702 : :
2703 : : /* Expand a call to the internal cexpi builtin to the sincos math function.
2704 : : EXP is the expression that is a call to the builtin function; if convenient,
2705 : : the result should be placed in TARGET. */
2706 : :
2707 : : static rtx
2708 : 157 : expand_builtin_cexpi (tree exp, rtx target)
2709 : : {
2710 : 157 : tree fndecl = get_callee_fndecl (exp);
2711 : 157 : tree arg, type;
2712 : 157 : machine_mode mode;
2713 : 157 : rtx op0, op1, op2;
2714 : 157 : location_t loc = EXPR_LOCATION (exp);
2715 : :
2716 : 157 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2717 : : return NULL_RTX;
2718 : :
2719 : 157 : arg = CALL_EXPR_ARG (exp, 0);
2720 : 157 : type = TREE_TYPE (arg);
2721 : 157 : mode = TYPE_MODE (TREE_TYPE (arg));
2722 : :
2723 : : /* Try expanding via a sincos optab, fall back to emitting a libcall
2724 : : to sincos or cexp. We are sure we have sincos or cexp because cexpi
2725 : : is only generated from sincos, cexp or if we have either of them. */
2726 : 157 : if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2727 : : {
2728 : 6 : op1 = gen_reg_rtx (mode);
2729 : 6 : op2 = gen_reg_rtx (mode);
2730 : :
2731 : 6 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2732 : :
2733 : : /* Compute into op1 and op2. */
2734 : 6 : expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2735 : : }
2736 : 151 : else if (targetm.libc_has_function (function_sincos, type))
2737 : : {
2738 : 151 : tree call, fn = NULL_TREE;
2739 : 151 : tree top1, top2;
2740 : 151 : rtx op1a, op2a;
2741 : :
2742 : 151 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2743 : 50 : fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2744 : 101 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2745 : 82 : fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2746 : 19 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2747 : 19 : fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2748 : : else
2749 : 0 : gcc_unreachable ();
2750 : :
2751 : 151 : op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2752 : 151 : op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2753 : 151 : op1a = copy_addr_to_reg (XEXP (op1, 0));
2754 : 151 : op2a = copy_addr_to_reg (XEXP (op2, 0));
2755 : 151 : top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2756 : 151 : top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2757 : :
2758 : : /* Make sure not to fold the sincos call again. */
2759 : 151 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2760 : 151 : expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2761 : : call, 3, arg, top1, top2));
2762 : : }
2763 : : else
2764 : : {
2765 : 0 : tree call, fn = NULL_TREE, narg;
2766 : 0 : tree ctype = build_complex_type (type);
2767 : :
2768 : 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2769 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2770 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2771 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXP);
2772 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2773 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2774 : : else
2775 : 0 : gcc_unreachable ();
2776 : :
2777 : : /* If we don't have a decl for cexp create one. This is the
2778 : : friendliest fallback if the user calls __builtin_cexpi
2779 : : without full target C99 function support. */
2780 : 0 : if (fn == NULL_TREE)
2781 : : {
2782 : 0 : tree fntype;
2783 : 0 : const char *name = NULL;
2784 : :
2785 : 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2786 : : name = "cexpf";
2787 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2788 : : name = "cexp";
2789 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2790 : 0 : name = "cexpl";
2791 : :
2792 : 0 : fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2793 : 0 : fn = build_fn_decl (name, fntype);
2794 : : }
2795 : :
2796 : 0 : narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2797 : : build_real (type, dconst0), arg);
2798 : :
2799 : : /* Make sure not to fold the cexp call again. */
2800 : 0 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2801 : 0 : return expand_expr (build_call_nary (ctype, call, 1, narg),
2802 : 0 : target, VOIDmode, EXPAND_NORMAL);
2803 : : }
2804 : :
2805 : : /* Now build the proper return type. */
2806 : 314 : return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2807 : 157 : make_tree (TREE_TYPE (arg), op2),
2808 : 157 : make_tree (TREE_TYPE (arg), op1)),
2809 : 157 : target, VOIDmode, EXPAND_NORMAL);
2810 : : }
2811 : :
2812 : : /* Conveniently construct a function call expression. FNDECL names the
2813 : : function to be called, N is the number of arguments, and the "..."
2814 : : parameters are the argument expressions. Unlike build_call_exr
2815 : : this doesn't fold the call, hence it will always return a CALL_EXPR. */
2816 : :
2817 : : static tree
2818 : 136587 : build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2819 : : {
2820 : 136587 : va_list ap;
2821 : 136587 : tree fntype = TREE_TYPE (fndecl);
2822 : 136587 : tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2823 : :
2824 : 136587 : va_start (ap, n);
2825 : 136587 : fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2826 : 136587 : va_end (ap);
2827 : 136587 : SET_EXPR_LOCATION (fn, loc);
2828 : 136587 : return fn;
2829 : : }
2830 : :
2831 : : /* Expand the __builtin_issignaling builtin. This needs to handle
2832 : : all floating point formats that do support NaNs (for those that
2833 : : don't it just sets target to 0). */
2834 : :
2835 : : static rtx
2836 : 823 : expand_builtin_issignaling (tree exp, rtx target)
2837 : : {
2838 : 823 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2839 : : return NULL_RTX;
2840 : :
2841 : 823 : tree arg = CALL_EXPR_ARG (exp, 0);
2842 : 823 : scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2843 : 823 : const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2844 : :
2845 : : /* Expand the argument yielding a RTX expression. */
2846 : 823 : rtx temp = expand_normal (arg);
2847 : :
2848 : : /* If mode doesn't support NaN, always return 0.
2849 : : Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2850 : : __builtin_issignaling working without -fsignaling-nans. Especially
2851 : : when -fno-signaling-nans is the default.
2852 : : On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2853 : : -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2854 : : fold to 0 or non-NaN/Inf classification. */
2855 : 823 : if (!HONOR_NANS (fmode))
2856 : : {
2857 : 0 : emit_move_insn (target, const0_rtx);
2858 : 0 : return target;
2859 : : }
2860 : :
2861 : : /* Check if the back end provides an insn that handles issignaling for the
2862 : : argument's mode. */
2863 : 823 : enum insn_code icode = optab_handler (issignaling_optab, fmode);
2864 : 823 : if (icode != CODE_FOR_nothing)
2865 : : {
2866 : 155 : rtx_insn *last = get_last_insn ();
2867 : 155 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2868 : 155 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2869 : : return this_target;
2870 : 0 : delete_insns_since (last);
2871 : : }
2872 : :
2873 : 668 : if (DECIMAL_FLOAT_MODE_P (fmode))
2874 : : {
2875 : 63 : scalar_int_mode imode;
2876 : 63 : rtx hi;
2877 : 63 : switch (fmt->ieee_bits)
2878 : : {
2879 : 42 : case 32:
2880 : 42 : case 64:
2881 : 42 : imode = int_mode_for_mode (fmode).require ();
2882 : 42 : temp = gen_lowpart (imode, temp);
2883 : 42 : break;
2884 : 21 : case 128:
2885 : 21 : imode = int_mode_for_size (64, 1).require ();
2886 : 21 : hi = NULL_RTX;
2887 : : /* For decimal128, TImode support isn't always there and even when
2888 : : it is, working on the DImode high part is usually better. */
2889 : 21 : if (!MEM_P (temp))
2890 : : {
2891 : 6 : if (rtx t = force_highpart_subreg (imode, temp, fmode))
2892 : : hi = t;
2893 : : else
2894 : : {
2895 : 0 : scalar_int_mode imode2;
2896 : 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2897 : : {
2898 : 0 : rtx temp2 = gen_lowpart (imode2, temp);
2899 : 0 : if (rtx t = force_highpart_subreg (imode, temp2, imode2))
2900 : : hi = t;
2901 : : }
2902 : : }
2903 : 0 : if (!hi)
2904 : : {
2905 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2906 : 0 : emit_move_insn (mem, temp);
2907 : 0 : temp = mem;
2908 : : }
2909 : : }
2910 : 0 : if (!hi)
2911 : : {
2912 : 15 : poly_int64 offset
2913 : 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
2914 : 15 : hi = adjust_address (temp, imode, offset);
2915 : : }
2916 : : temp = hi;
2917 : : break;
2918 : 0 : default:
2919 : 0 : gcc_unreachable ();
2920 : : }
2921 : : /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2922 : : have 6 bits below it all set. */
2923 : 63 : rtx val
2924 : 63 : = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2925 : 63 : temp = expand_binop (imode, and_optab, temp, val,
2926 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2927 : 63 : temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2928 : 63 : return temp;
2929 : : }
2930 : :
2931 : : /* Only PDP11 has these defined differently but doesn't support NaNs. */
2932 : 605 : gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2933 : 605 : gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2934 : 4235 : gcc_assert (MODE_COMPOSITE_P (fmode)
2935 : : || (fmt->pnan == fmt->p
2936 : : && fmt->signbit_ro == fmt->signbit_rw));
2937 : :
2938 : 605 : switch (fmt->p)
2939 : : {
2940 : 0 : case 106: /* IBM double double */
2941 : : /* For IBM double double, recurse on the most significant double. */
2942 : 0 : gcc_assert (MODE_COMPOSITE_P (fmode));
2943 : 0 : temp = convert_modes (DFmode, fmode, temp, 0);
2944 : 0 : fmode = DFmode;
2945 : 0 : fmt = REAL_MODE_FORMAT (DFmode);
2946 : : /* FALLTHRU */
2947 : 505 : case 8: /* bfloat */
2948 : 505 : case 11: /* IEEE half */
2949 : 505 : case 24: /* IEEE single */
2950 : 505 : case 53: /* IEEE double or Intel extended with rounding to double */
2951 : 505 : if (fmt->p == 53 && fmt->signbit_ro == 79)
2952 : 0 : goto extended;
2953 : 505 : {
2954 : 505 : scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2955 : 505 : temp = gen_lowpart (imode, temp);
2956 : 505 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2957 : : & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2958 : 505 : if (fmt->qnan_msb_set)
2959 : : {
2960 : 505 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2961 : 505 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2962 : : /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2963 : : ((temp ^ bit) & mask) > val. */
2964 : 505 : temp = expand_binop (imode, xor_optab, temp, bit,
2965 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2966 : 505 : temp = expand_binop (imode, and_optab, temp, mask,
2967 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2968 : 505 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
2969 : : 1, 1);
2970 : : }
2971 : : else
2972 : : {
2973 : : /* For MIPS/PA IEEE single/double, expand to:
2974 : : (temp & val) == val. */
2975 : 0 : temp = expand_binop (imode, and_optab, temp, val,
2976 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2977 : 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
2978 : : 1, 1);
2979 : : }
2980 : : }
2981 : : break;
2982 : 100 : case 113: /* IEEE quad */
2983 : 100 : {
2984 : 100 : rtx hi = NULL_RTX, lo = NULL_RTX;
2985 : 100 : scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2986 : : /* For IEEE quad, TImode support isn't always there and even when
2987 : : it is, working on DImode parts is usually better. */
2988 : 100 : if (!MEM_P (temp))
2989 : : {
2990 : 85 : hi = force_highpart_subreg (imode, temp, fmode);
2991 : 85 : lo = force_lowpart_subreg (imode, temp, fmode);
2992 : 85 : if (!hi || !lo)
2993 : : {
2994 : 0 : scalar_int_mode imode2;
2995 : 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2996 : : {
2997 : 0 : rtx temp2 = gen_lowpart (imode2, temp);
2998 : 0 : hi = force_highpart_subreg (imode, temp2, imode2);
2999 : 0 : lo = force_lowpart_subreg (imode, temp2, imode2);
3000 : : }
3001 : : }
3002 : 85 : if (!hi || !lo)
3003 : : {
3004 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3005 : 0 : emit_move_insn (mem, temp);
3006 : 0 : temp = mem;
3007 : : }
3008 : : }
3009 : 100 : if (!hi || !lo)
3010 : : {
3011 : 15 : poly_int64 offset
3012 : 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
3013 : 15 : hi = adjust_address (temp, imode, offset);
3014 : 15 : offset = subreg_lowpart_offset (imode, GET_MODE (temp));
3015 : 15 : lo = adjust_address (temp, imode, offset);
3016 : : }
3017 : 100 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
3018 : : & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
3019 : 100 : if (fmt->qnan_msb_set)
3020 : : {
3021 : 100 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
3022 : : - 64)));
3023 : 100 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
3024 : : /* For non-MIPS/PA IEEE quad, expand to:
3025 : : (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
3026 : 100 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3027 : 100 : lo = expand_binop (imode, ior_optab, lo, nlo,
3028 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3029 : 100 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
3030 : 100 : temp = expand_binop (imode, xor_optab, hi, bit,
3031 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3032 : 100 : temp = expand_binop (imode, ior_optab, temp, lo,
3033 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3034 : 100 : temp = expand_binop (imode, and_optab, temp, mask,
3035 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3036 : 100 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
3037 : : 1, 1);
3038 : : }
3039 : : else
3040 : : {
3041 : : /* For MIPS/PA IEEE quad, expand to:
3042 : : (hi & val) == val. */
3043 : 0 : temp = expand_binop (imode, and_optab, hi, val,
3044 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3045 : 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
3046 : : 1, 1);
3047 : : }
3048 : : }
3049 : : break;
3050 : 0 : case 64: /* Intel or Motorola extended */
3051 : 0 : extended:
3052 : 0 : {
3053 : 0 : rtx ex, hi, lo;
3054 : 0 : scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3055 : 0 : scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3056 : 0 : if (!MEM_P (temp))
3057 : : {
3058 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3059 : 0 : emit_move_insn (mem, temp);
3060 : 0 : temp = mem;
3061 : : }
3062 : 0 : if (fmt->signbit_ro == 95)
3063 : : {
3064 : : /* Motorola, always big endian, with 16-bit gap in between
3065 : : 16-bit sign+exponent and 64-bit mantissa. */
3066 : 0 : ex = adjust_address (temp, iemode, 0);
3067 : 0 : hi = adjust_address (temp, imode, 4);
3068 : 0 : lo = adjust_address (temp, imode, 8);
3069 : : }
3070 : 0 : else if (!WORDS_BIG_ENDIAN)
3071 : : {
3072 : : /* Intel little endian, 64-bit mantissa followed by 16-bit
3073 : : sign+exponent and then either 16 or 48 bits of gap. */
3074 : 0 : ex = adjust_address (temp, iemode, 8);
3075 : 0 : hi = adjust_address (temp, imode, 4);
3076 : 0 : lo = adjust_address (temp, imode, 0);
3077 : : }
3078 : : else
3079 : : {
3080 : : /* Big endian Itanium. */
3081 : : ex = adjust_address (temp, iemode, 0);
3082 : : hi = adjust_address (temp, imode, 2);
3083 : : lo = adjust_address (temp, imode, 6);
3084 : : }
3085 : 0 : rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3086 : 0 : gcc_assert (fmt->qnan_msb_set);
3087 : 0 : rtx mask = GEN_INT (0x7fff);
3088 : 0 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3089 : : /* For Intel/Motorola extended format, expand to:
3090 : : (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3091 : 0 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3092 : 0 : lo = expand_binop (imode, ior_optab, lo, nlo,
3093 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3094 : 0 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3095 : 0 : temp = expand_binop (imode, xor_optab, hi, bit,
3096 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3097 : 0 : temp = expand_binop (imode, ior_optab, temp, lo,
3098 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3099 : 0 : temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3100 : 0 : ex = expand_binop (iemode, and_optab, ex, mask,
3101 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3102 : 0 : ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3103 : : ex, mask, iemode, 1, 1);
3104 : 0 : temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3105 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3106 : : }
3107 : 0 : break;
3108 : 0 : default:
3109 : 0 : gcc_unreachable ();
3110 : : }
3111 : :
3112 : : return temp;
3113 : : }
3114 : :
3115 : : /* Expand a call to one of the builtin rounding functions gcc defines
3116 : : as an extension (lfloor and lceil). As these are gcc extensions we
3117 : : do not need to worry about setting errno to EDOM.
3118 : : If expanding via optab fails, lower expression to (int)(floor(x)).
3119 : : EXP is the expression that is a call to the builtin function;
3120 : : if convenient, the result should be placed in TARGET. */
3121 : :
3122 : : static rtx
3123 : 231 : expand_builtin_int_roundingfn (tree exp, rtx target)
3124 : : {
3125 : 231 : convert_optab builtin_optab;
3126 : 231 : rtx op0, tmp;
3127 : 231 : rtx_insn *insns;
3128 : 231 : tree fndecl = get_callee_fndecl (exp);
3129 : 231 : enum built_in_function fallback_fn;
3130 : 231 : tree fallback_fndecl;
3131 : 231 : machine_mode mode;
3132 : 231 : tree arg;
3133 : :
3134 : 231 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3135 : : return NULL_RTX;
3136 : :
3137 : 231 : arg = CALL_EXPR_ARG (exp, 0);
3138 : :
3139 : 231 : switch (DECL_FUNCTION_CODE (fndecl))
3140 : : {
3141 : : CASE_FLT_FN (BUILT_IN_ICEIL):
3142 : : CASE_FLT_FN (BUILT_IN_LCEIL):
3143 : : CASE_FLT_FN (BUILT_IN_LLCEIL):
3144 : : builtin_optab = lceil_optab;
3145 : : fallback_fn = BUILT_IN_CEIL;
3146 : : break;
3147 : :
3148 : 116 : CASE_FLT_FN (BUILT_IN_IFLOOR):
3149 : 116 : CASE_FLT_FN (BUILT_IN_LFLOOR):
3150 : 116 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
3151 : 116 : builtin_optab = lfloor_optab;
3152 : 116 : fallback_fn = BUILT_IN_FLOOR;
3153 : 116 : break;
3154 : :
3155 : 0 : default:
3156 : 0 : gcc_unreachable ();
3157 : : }
3158 : :
3159 : : /* Make a suitable register to place result in. */
3160 : 231 : mode = TYPE_MODE (TREE_TYPE (exp));
3161 : :
3162 : 231 : target = gen_reg_rtx (mode);
3163 : :
3164 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3165 : : need to expand the argument again. This way, we will not perform
3166 : : side-effects more the once. */
3167 : 231 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3168 : :
3169 : 231 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3170 : :
3171 : 231 : start_sequence ();
3172 : :
3173 : : /* Compute into TARGET. */
3174 : 231 : if (expand_sfix_optab (target, op0, builtin_optab))
3175 : : {
3176 : : /* Output the entire sequence. */
3177 : 40 : insns = end_sequence ();
3178 : 40 : emit_insn (insns);
3179 : 40 : return target;
3180 : : }
3181 : :
3182 : : /* If we were unable to expand via the builtin, stop the sequence
3183 : : (without outputting the insns). */
3184 : 191 : end_sequence ();
3185 : :
3186 : : /* Fall back to floating point rounding optab. */
3187 : 191 : fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3188 : :
3189 : : /* For non-C99 targets we may end up without a fallback fndecl here
3190 : : if the user called __builtin_lfloor directly. In this case emit
3191 : : a call to the floor/ceil variants nevertheless. This should result
3192 : : in the best user experience for not full C99 targets. */
3193 : 191 : if (fallback_fndecl == NULL_TREE)
3194 : : {
3195 : 0 : tree fntype;
3196 : 0 : const char *name = NULL;
3197 : :
3198 : 0 : switch (DECL_FUNCTION_CODE (fndecl))
3199 : : {
3200 : : case BUILT_IN_ICEIL:
3201 : : case BUILT_IN_LCEIL:
3202 : : case BUILT_IN_LLCEIL:
3203 : : name = "ceil";
3204 : : break;
3205 : 0 : case BUILT_IN_ICEILF:
3206 : 0 : case BUILT_IN_LCEILF:
3207 : 0 : case BUILT_IN_LLCEILF:
3208 : 0 : name = "ceilf";
3209 : 0 : break;
3210 : 0 : case BUILT_IN_ICEILL:
3211 : 0 : case BUILT_IN_LCEILL:
3212 : 0 : case BUILT_IN_LLCEILL:
3213 : 0 : name = "ceill";
3214 : 0 : break;
3215 : 0 : case BUILT_IN_IFLOOR:
3216 : 0 : case BUILT_IN_LFLOOR:
3217 : 0 : case BUILT_IN_LLFLOOR:
3218 : 0 : name = "floor";
3219 : 0 : break;
3220 : 0 : case BUILT_IN_IFLOORF:
3221 : 0 : case BUILT_IN_LFLOORF:
3222 : 0 : case BUILT_IN_LLFLOORF:
3223 : 0 : name = "floorf";
3224 : 0 : break;
3225 : 0 : case BUILT_IN_IFLOORL:
3226 : 0 : case BUILT_IN_LFLOORL:
3227 : 0 : case BUILT_IN_LLFLOORL:
3228 : 0 : name = "floorl";
3229 : 0 : break;
3230 : 0 : default:
3231 : 0 : gcc_unreachable ();
3232 : : }
3233 : :
3234 : 0 : fntype = build_function_type_list (TREE_TYPE (arg),
3235 : 0 : TREE_TYPE (arg), NULL_TREE);
3236 : 0 : fallback_fndecl = build_fn_decl (name, fntype);
3237 : : }
3238 : :
3239 : 191 : exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3240 : :
3241 : 191 : tmp = expand_normal (exp);
3242 : 191 : tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3243 : :
3244 : : /* Truncate the result of floating point optab to integer
3245 : : via expand_fix (). */
3246 : 191 : target = gen_reg_rtx (mode);
3247 : 191 : expand_fix (target, tmp, 0);
3248 : :
3249 : 191 : return target;
3250 : : }
3251 : :
3252 : : /* Expand a call to one of the builtin math functions doing integer
3253 : : conversion (lrint).
3254 : : Return 0 if a normal call should be emitted rather than expanding the
3255 : : function in-line. EXP is the expression that is a call to the builtin
3256 : : function; if convenient, the result should be placed in TARGET. */
3257 : :
3258 : : static rtx
3259 : 591 : expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3260 : : {
3261 : 591 : convert_optab builtin_optab;
3262 : 591 : rtx op0;
3263 : 591 : rtx_insn *insns;
3264 : 591 : tree fndecl = get_callee_fndecl (exp);
3265 : 591 : tree arg;
3266 : 591 : machine_mode mode;
3267 : 591 : enum built_in_function fallback_fn = BUILT_IN_NONE;
3268 : :
3269 : 591 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3270 : : return NULL_RTX;
3271 : :
3272 : 491 : arg = CALL_EXPR_ARG (exp, 0);
3273 : :
3274 : 491 : switch (DECL_FUNCTION_CODE (fndecl))
3275 : : {
3276 : 8 : CASE_FLT_FN (BUILT_IN_IRINT):
3277 : 8 : fallback_fn = BUILT_IN_LRINT;
3278 : : gcc_fallthrough ();
3279 : : CASE_FLT_FN (BUILT_IN_LRINT):
3280 : : CASE_FLT_FN (BUILT_IN_LLRINT):
3281 : : builtin_optab = lrint_optab;
3282 : : break;
3283 : :
3284 : 222 : CASE_FLT_FN (BUILT_IN_IROUND):
3285 : 222 : fallback_fn = BUILT_IN_LROUND;
3286 : : gcc_fallthrough ();
3287 : : CASE_FLT_FN (BUILT_IN_LROUND):
3288 : : CASE_FLT_FN (BUILT_IN_LLROUND):
3289 : : builtin_optab = lround_optab;
3290 : : break;
3291 : :
3292 : 0 : default:
3293 : 0 : gcc_unreachable ();
3294 : : }
3295 : :
3296 : : /* There's no easy way to detect the case we need to set EDOM. */
3297 : 491 : if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3298 : : return NULL_RTX;
3299 : :
3300 : : /* Make a suitable register to place result in. */
3301 : 260 : mode = TYPE_MODE (TREE_TYPE (exp));
3302 : :
3303 : : /* There's no easy way to detect the case we need to set EDOM. */
3304 : 260 : if (!flag_errno_math)
3305 : : {
3306 : 260 : rtx result = gen_reg_rtx (mode);
3307 : :
3308 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3309 : : need to expand the argument again. This way, we will not perform
3310 : : side-effects more the once. */
3311 : 260 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3312 : :
3313 : 260 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3314 : :
3315 : 260 : start_sequence ();
3316 : :
3317 : 260 : if (expand_sfix_optab (result, op0, builtin_optab))
3318 : : {
3319 : : /* Output the entire sequence. */
3320 : 24 : insns = end_sequence ();
3321 : 24 : emit_insn (insns);
3322 : 24 : return result;
3323 : : }
3324 : :
3325 : : /* If we were unable to expand via the builtin, stop the sequence
3326 : : (without outputting the insns) and call to the library function
3327 : : with the stabilized argument list. */
3328 : 236 : end_sequence ();
3329 : : }
3330 : :
3331 : 236 : if (fallback_fn != BUILT_IN_NONE)
3332 : : {
3333 : : /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3334 : : targets, (int) round (x) should never be transformed into
3335 : : BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3336 : : a call to lround in the hope that the target provides at least some
3337 : : C99 functions. This should result in the best user experience for
3338 : : not full C99 targets.
3339 : : As scalar float conversions with same mode are useless in GIMPLE,
3340 : : we can end up e.g. with _Float32 argument passed to float builtin,
3341 : : try to get the type from the builtin prototype first. */
3342 : 222 : tree fallback_fndecl = NULL_TREE;
3343 : 222 : if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3344 : 222 : fallback_fndecl
3345 : 222 : = mathfn_built_in_1 (TREE_VALUE (argtypes),
3346 : : as_combined_fn (fallback_fn), 0);
3347 : 222 : if (fallback_fndecl == NULL_TREE)
3348 : 0 : fallback_fndecl
3349 : 0 : = mathfn_built_in_1 (TREE_TYPE (arg),
3350 : : as_combined_fn (fallback_fn), 0);
3351 : 0 : if (fallback_fndecl)
3352 : : {
3353 : 222 : exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3354 : : fallback_fndecl, 1, arg);
3355 : :
3356 : 222 : target = expand_call (exp, NULL_RTX, target == const0_rtx);
3357 : 222 : target = maybe_emit_group_store (target, TREE_TYPE (exp));
3358 : 222 : return convert_to_mode (mode, target, 0);
3359 : : }
3360 : : }
3361 : :
3362 : 14 : return expand_call (exp, target, target == const0_rtx);
3363 : : }
3364 : :
3365 : : /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3366 : : a normal call should be emitted rather than expanding the function
3367 : : in-line. EXP is the expression that is a call to the builtin
3368 : : function; if convenient, the result should be placed in TARGET. */
3369 : :
3370 : : static rtx
3371 : 279 : expand_builtin_powi (tree exp, rtx target)
3372 : : {
3373 : 279 : tree arg0, arg1;
3374 : 279 : rtx op0, op1;
3375 : 279 : machine_mode mode;
3376 : 279 : machine_mode mode2;
3377 : :
3378 : 279 : if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 : : return NULL_RTX;
3380 : :
3381 : 279 : arg0 = CALL_EXPR_ARG (exp, 0);
3382 : 279 : arg1 = CALL_EXPR_ARG (exp, 1);
3383 : 279 : mode = TYPE_MODE (TREE_TYPE (exp));
3384 : :
3385 : : /* Emit a libcall to libgcc. */
3386 : :
3387 : : /* Mode of the 2nd argument must match that of an int. */
3388 : 279 : mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3389 : :
3390 : 279 : if (target == NULL_RTX)
3391 : 0 : target = gen_reg_rtx (mode);
3392 : :
3393 : 279 : op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3394 : 279 : if (GET_MODE (op0) != mode)
3395 : 0 : op0 = convert_to_mode (mode, op0, 0);
3396 : 279 : op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3397 : 279 : if (GET_MODE (op1) != mode2)
3398 : 39 : op1 = convert_to_mode (mode2, op1, 0);
3399 : :
3400 : 279 : target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3401 : : target, LCT_CONST, mode,
3402 : : op0, mode, op1, mode2);
3403 : :
3404 : 279 : return target;
3405 : : }
3406 : :
3407 : : /* Expand expression EXP which is a call to the strlen builtin. Return
3408 : : NULL_RTX if we failed and the caller should emit a normal call, otherwise
3409 : : try to get the result in TARGET, if convenient. */
3410 : :
3411 : : static rtx
3412 : 13857 : expand_builtin_strlen (tree exp, rtx target,
3413 : : machine_mode target_mode)
3414 : : {
3415 : 13857 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3416 : : return NULL_RTX;
3417 : :
3418 : 13837 : tree src = CALL_EXPR_ARG (exp, 0);
3419 : :
3420 : : /* If the length can be computed at compile-time, return it. */
3421 : 13837 : if (tree len = c_strlen (src, 0))
3422 : 177 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3423 : :
3424 : : /* If the length can be computed at compile-time and is constant
3425 : : integer, but there are side-effects in src, evaluate
3426 : : src for side-effects, then return len.
3427 : : E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3428 : : can be optimized into: i++; x = 3; */
3429 : 13660 : tree len = c_strlen (src, 1);
3430 : 13660 : if (len && TREE_CODE (len) == INTEGER_CST)
3431 : : {
3432 : 0 : expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3433 : 0 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3434 : : }
3435 : :
3436 : 13660 : unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3437 : :
3438 : : /* If SRC is not a pointer type, don't do this operation inline. */
3439 : 13660 : if (align == 0)
3440 : : return NULL_RTX;
3441 : :
3442 : : /* Bail out if we can't compute strlen in the right mode. */
3443 : : machine_mode insn_mode;
3444 : : enum insn_code icode = CODE_FOR_nothing;
3445 : 13660 : FOR_EACH_MODE_FROM (insn_mode, target_mode)
3446 : : {
3447 : 13660 : icode = optab_handler (strlen_optab, insn_mode);
3448 : 13660 : if (icode != CODE_FOR_nothing)
3449 : : break;
3450 : : }
3451 : 13660 : if (insn_mode == VOIDmode)
3452 : : return NULL_RTX;
3453 : :
3454 : : /* Make a place to hold the source address. We will not expand
3455 : : the actual source until we are sure that the expansion will
3456 : : not fail -- there are trees that cannot be expanded twice. */
3457 : 14234 : rtx src_reg = gen_reg_rtx (Pmode);
3458 : :
3459 : : /* Mark the beginning of the strlen sequence so we can emit the
3460 : : source operand later. */
3461 : 13660 : rtx_insn *before_strlen = get_last_insn ();
3462 : :
3463 : 13660 : class expand_operand ops[4];
3464 : 13660 : create_output_operand (&ops[0], target, insn_mode);
3465 : 13660 : create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3466 : 13660 : create_integer_operand (&ops[2], 0);
3467 : 13660 : create_integer_operand (&ops[3], align);
3468 : 13660 : if (!maybe_expand_insn (icode, 4, ops))
3469 : : return NULL_RTX;
3470 : :
3471 : : /* Check to see if the argument was declared attribute nonstring
3472 : : and if so, issue a warning since at this point it's not known
3473 : : to be nul-terminated. */
3474 : 11 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3475 : :
3476 : : /* Now that we are assured of success, expand the source. */
3477 : 11 : start_sequence ();
3478 : 15 : rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3479 : 11 : if (pat != src_reg)
3480 : : {
3481 : : #ifdef POINTERS_EXTEND_UNSIGNED
3482 : 15 : if (GET_MODE (pat) != Pmode)
3483 : 0 : pat = convert_to_mode (Pmode, pat,
3484 : : POINTERS_EXTEND_UNSIGNED);
3485 : : #endif
3486 : 11 : emit_move_insn (src_reg, pat);
3487 : : }
3488 : 11 : pat = end_sequence ();
3489 : :
3490 : 11 : if (before_strlen)
3491 : 11 : emit_insn_after (pat, before_strlen);
3492 : : else
3493 : 0 : emit_insn_before (pat, get_insns ());
3494 : :
3495 : : /* Return the value in the proper mode for this function. */
3496 : 11 : if (GET_MODE (ops[0].value) == target_mode)
3497 : : target = ops[0].value;
3498 : 0 : else if (target != 0)
3499 : 0 : convert_move (target, ops[0].value, 0);
3500 : : else
3501 : 0 : target = convert_to_mode (target_mode, ops[0].value, 0);
3502 : :
3503 : : return target;
3504 : : }
3505 : :
3506 : : /* Expand call EXP to the strnlen built-in, returning the result
3507 : : and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3508 : :
3509 : : static rtx
3510 : 580 : expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3511 : : {
3512 : 580 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3513 : : return NULL_RTX;
3514 : :
3515 : 571 : tree src = CALL_EXPR_ARG (exp, 0);
3516 : 571 : tree bound = CALL_EXPR_ARG (exp, 1);
3517 : :
3518 : 571 : if (!bound)
3519 : : return NULL_RTX;
3520 : :
3521 : 571 : location_t loc = UNKNOWN_LOCATION;
3522 : 571 : if (EXPR_HAS_LOCATION (exp))
3523 : 571 : loc = EXPR_LOCATION (exp);
3524 : :
3525 : : /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3526 : : so these conversions aren't necessary. */
3527 : 571 : c_strlen_data lendata = { };
3528 : 571 : tree len = c_strlen (src, 0, &lendata, 1);
3529 : 571 : if (len)
3530 : 102 : len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3531 : :
3532 : 571 : if (TREE_CODE (bound) == INTEGER_CST)
3533 : : {
3534 : 331 : if (!len)
3535 : : return NULL_RTX;
3536 : :
3537 : 62 : len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3538 : 62 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3539 : : }
3540 : :
3541 : 240 : if (TREE_CODE (bound) != SSA_NAME)
3542 : : return NULL_RTX;
3543 : :
3544 : 240 : wide_int min, max;
3545 : 240 : int_range_max r;
3546 : 480 : get_range_query (cfun)->range_of_expr (r, bound,
3547 : : currently_expanding_gimple_stmt);
3548 : 240 : if (r.varying_p () || r.undefined_p ())
3549 : : return NULL_RTX;
3550 : 135 : min = r.lower_bound ();
3551 : 135 : max = r.upper_bound ();
3552 : :
3553 : 135 : if (!len || TREE_CODE (len) != INTEGER_CST)
3554 : : {
3555 : 95 : bool exact;
3556 : 95 : lendata.decl = unterminated_array (src, &len, &exact);
3557 : 95 : if (!lendata.decl)
3558 : 75 : return NULL_RTX;
3559 : : }
3560 : :
3561 : 60 : if (lendata.decl)
3562 : : return NULL_RTX;
3563 : :
3564 : 40 : if (wi::gtu_p (min, wi::to_wide (len)))
3565 : 7 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3566 : :
3567 : 33 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3568 : 33 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3569 : 240 : }
3570 : :
3571 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3572 : : bytes from bytes at DATA + OFFSET and return it reinterpreted as
3573 : : a target constant. */
3574 : :
3575 : : static rtx
3576 : 162424 : builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3577 : : fixed_size_mode mode)
3578 : : {
3579 : : /* The REPresentation pointed to by DATA need not be a nul-terminated
3580 : : string but the caller guarantees it's large enough for MODE. */
3581 : 162424 : const char *rep = (const char *) data;
3582 : :
3583 : 162424 : return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3584 : : }
3585 : :
3586 : : /* LEN specify length of the block of memcpy/memset operation.
3587 : : Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3588 : : In some cases we can make very likely guess on max size, then we
3589 : : set it into PROBABLE_MAX_SIZE. */
3590 : :
3591 : : static void
3592 : 176115 : determine_block_size (tree len, rtx len_rtx,
3593 : : unsigned HOST_WIDE_INT *min_size,
3594 : : unsigned HOST_WIDE_INT *max_size,
3595 : : unsigned HOST_WIDE_INT *probable_max_size)
3596 : : {
3597 : 176115 : if (CONST_INT_P (len_rtx))
3598 : : {
3599 : 93361 : *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3600 : 93361 : return;
3601 : : }
3602 : : else
3603 : : {
3604 : 82754 : wide_int min, max;
3605 : 82754 : enum value_range_kind range_type = VR_UNDEFINED;
3606 : :
3607 : : /* Determine bounds from the type. */
3608 : 82754 : if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3609 : 82753 : *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3610 : : else
3611 : 1 : *min_size = 0;
3612 : 82754 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3613 : 165508 : *probable_max_size = *max_size
3614 : 82754 : = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3615 : : else
3616 : 0 : *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3617 : :
3618 : 82754 : if (TREE_CODE (len) == SSA_NAME)
3619 : : {
3620 : 82754 : int_range_max r;
3621 : 82754 : tree tmin, tmax;
3622 : 82754 : gimple *cg = currently_expanding_gimple_stmt;
3623 : 165508 : get_range_query (cfun)->range_of_expr (r, len, cg);
3624 : 82754 : range_type = get_legacy_range (r, tmin, tmax);
3625 : 82754 : if (range_type != VR_UNDEFINED)
3626 : : {
3627 : 82754 : min = wi::to_wide (tmin);
3628 : 82754 : max = wi::to_wide (tmax);
3629 : : }
3630 : 82754 : }
3631 : 82754 : if (range_type == VR_RANGE)
3632 : : {
3633 : 61405 : if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3634 : 49762 : *min_size = min.to_uhwi ();
3635 : 61405 : if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3636 : 51833 : *probable_max_size = *max_size = max.to_uhwi ();
3637 : : }
3638 : 21349 : else if (range_type == VR_ANTI_RANGE)
3639 : : {
3640 : : /* Code like
3641 : :
3642 : : int n;
3643 : : if (n < 100)
3644 : : memcpy (a, b, n)
3645 : :
3646 : : Produce anti range allowing negative values of N. We still
3647 : : can use the information and make a guess that N is not negative.
3648 : : */
3649 : 3745 : if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3650 : 3666 : *probable_max_size = min.to_uhwi () - 1;
3651 : : }
3652 : 82754 : }
3653 : 82754 : gcc_checking_assert (*max_size <=
3654 : : (unsigned HOST_WIDE_INT)
3655 : : GET_MODE_MASK (GET_MODE (len_rtx)));
3656 : : }
3657 : :
3658 : : /* Expand a call EXP to the memcpy builtin.
3659 : : Return NULL_RTX if we failed, the caller should emit a normal call,
3660 : : otherwise try to get the result in TARGET, if convenient (and in
3661 : : mode MODE if that's convenient). */
3662 : :
3663 : : static rtx
3664 : 93267 : expand_builtin_memcpy (tree exp, rtx target)
3665 : : {
3666 : 93267 : if (!validate_arglist (exp,
3667 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3668 : : return NULL_RTX;
3669 : :
3670 : 93249 : tree dest = CALL_EXPR_ARG (exp, 0);
3671 : 93249 : tree src = CALL_EXPR_ARG (exp, 1);
3672 : 93249 : tree len = CALL_EXPR_ARG (exp, 2);
3673 : :
3674 : 93249 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3675 : 93249 : /*retmode=*/ RETURN_BEGIN, false);
3676 : : }
3677 : :
3678 : : /* Check a call EXP to the memmove built-in for validity.
3679 : : Return NULL_RTX on both success and failure. */
3680 : :
3681 : : static rtx
3682 : 16231 : expand_builtin_memmove (tree exp, rtx target)
3683 : : {
3684 : 16231 : if (!validate_arglist (exp,
3685 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3686 : : return NULL_RTX;
3687 : :
3688 : 16216 : tree dest = CALL_EXPR_ARG (exp, 0);
3689 : 16216 : tree src = CALL_EXPR_ARG (exp, 1);
3690 : 16216 : tree len = CALL_EXPR_ARG (exp, 2);
3691 : :
3692 : 16216 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3693 : 16216 : /*retmode=*/ RETURN_BEGIN, true);
3694 : : }
3695 : :
3696 : : /* Expand a call EXP to the mempcpy builtin.
3697 : : Return NULL_RTX if we failed; the caller should emit a normal call,
3698 : : otherwise try to get the result in TARGET, if convenient (and in
3699 : : mode MODE if that's convenient). */
3700 : :
3701 : : static rtx
3702 : 1634 : expand_builtin_mempcpy (tree exp, rtx target)
3703 : : {
3704 : 1634 : if (!validate_arglist (exp,
3705 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3706 : : return NULL_RTX;
3707 : :
3708 : 1629 : tree dest = CALL_EXPR_ARG (exp, 0);
3709 : 1629 : tree src = CALL_EXPR_ARG (exp, 1);
3710 : 1629 : tree len = CALL_EXPR_ARG (exp, 2);
3711 : :
3712 : : /* Policy does not generally allow using compute_objsize (which
3713 : : is used internally by check_memop_size) to change code generation
3714 : : or drive optimization decisions.
3715 : :
3716 : : In this instance it is safe because the code we generate has
3717 : : the same semantics regardless of the return value of
3718 : : check_memop_sizes. Exactly the same amount of data is copied
3719 : : and the return value is exactly the same in both cases.
3720 : :
3721 : : Furthermore, check_memop_size always uses mode 0 for the call to
3722 : : compute_objsize, so the imprecise nature of compute_objsize is
3723 : : avoided. */
3724 : :
3725 : : /* Avoid expanding mempcpy into memcpy when the call is determined
3726 : : to overflow the buffer. This also prevents the same overflow
3727 : : from being diagnosed again when expanding memcpy. */
3728 : :
3729 : 1629 : return expand_builtin_mempcpy_args (dest, src, len,
3730 : 1629 : target, exp, /*retmode=*/ RETURN_END);
3731 : : }
3732 : :
3733 : : /* Helper function to do the actual work for expand of memory copy family
3734 : : functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3735 : : of memory from SRC to DEST and assign to TARGET if convenient. Return
3736 : : value is based on RETMODE argument. */
3737 : :
3738 : : static rtx
3739 : 111117 : expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3740 : : rtx target, tree exp, memop_ret retmode,
3741 : : bool might_overlap)
3742 : : {
3743 : 111117 : unsigned int src_align = get_pointer_alignment (src);
3744 : 111117 : unsigned int dest_align = get_pointer_alignment (dest);
3745 : 111117 : rtx dest_mem, src_mem, dest_addr, len_rtx;
3746 : 111117 : HOST_WIDE_INT expected_size = -1;
3747 : 111117 : unsigned int expected_align = 0;
3748 : 111117 : unsigned HOST_WIDE_INT min_size;
3749 : 111117 : unsigned HOST_WIDE_INT max_size;
3750 : 111117 : unsigned HOST_WIDE_INT probable_max_size;
3751 : :
3752 : 111117 : bool is_move_done;
3753 : :
3754 : : /* If DEST is not a pointer type, call the normal function. */
3755 : 111117 : if (dest_align == 0)
3756 : : return NULL_RTX;
3757 : :
3758 : : /* If either SRC is not a pointer type, don't do this
3759 : : operation in-line. */
3760 : 111117 : if (src_align == 0)
3761 : : return NULL_RTX;
3762 : :
3763 : 111117 : if (currently_expanding_gimple_stmt)
3764 : 111117 : stringop_block_profile (currently_expanding_gimple_stmt,
3765 : : &expected_align, &expected_size);
3766 : :
3767 : 111117 : if (expected_align < dest_align)
3768 : 111104 : expected_align = dest_align;
3769 : 111117 : dest_mem = get_memory_rtx (dest, len);
3770 : 111117 : set_mem_align (dest_mem, dest_align);
3771 : 111117 : len_rtx = expand_normal (len);
3772 : 111117 : determine_block_size (len, len_rtx, &min_size, &max_size,
3773 : : &probable_max_size);
3774 : :
3775 : : /* Try to get the byte representation of the constant SRC points to,
3776 : : with its byte size in NBYTES. */
3777 : 111117 : unsigned HOST_WIDE_INT nbytes;
3778 : 111117 : const char *rep = getbyterep (src, &nbytes);
3779 : :
3780 : : /* If the function's constant bound LEN_RTX is less than or equal
3781 : : to the byte size of the representation of the constant argument,
3782 : : and if block move would be done by pieces, we can avoid loading
3783 : : the bytes from memory and only store the computed constant.
3784 : : This works in the overlap (memmove) case as well because
3785 : : store_by_pieces just generates a series of stores of constants
3786 : : from the representation returned by getbyterep(). */
3787 : 111117 : if (rep
3788 : 29261 : && CONST_INT_P (len_rtx)
3789 : 27719 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3790 : 138836 : && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3791 : : CONST_CAST (char *, rep),
3792 : : dest_align, false))
3793 : : {
3794 : 27251 : dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3795 : : builtin_memcpy_read_str,
3796 : : CONST_CAST (char *, rep),
3797 : : dest_align, false, retmode);
3798 : 27251 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
3799 : 27251 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
3800 : 27251 : return dest_mem;
3801 : : }
3802 : :
3803 : 83866 : src_mem = get_memory_rtx (src, len);
3804 : 83866 : set_mem_align (src_mem, src_align);
3805 : :
3806 : : /* Copy word part most expediently. */
3807 : 83866 : enum block_op_methods method = BLOCK_OP_NORMAL;
3808 : 83866 : if (CALL_EXPR_TAILCALL (exp)
3809 : 83866 : && (retmode == RETURN_BEGIN || target == const0_rtx))
3810 : : method = BLOCK_OP_TAILCALL;
3811 : 83866 : bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3812 : 83866 : && retmode == RETURN_END
3813 : 83866 : && !might_overlap
3814 : 85168 : && target != const0_rtx);
3815 : : if (use_mempcpy_call)
3816 : 83866 : method = BLOCK_OP_NO_LIBCALL_RET;
3817 : 83866 : dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3818 : : expected_align, expected_size,
3819 : : min_size, max_size, probable_max_size,
3820 : : use_mempcpy_call, &is_move_done,
3821 : : might_overlap, tree_ctz (len));
3822 : :
3823 : : /* Bail out when a mempcpy call would be expanded as libcall and when
3824 : : we have a target that provides a fast implementation
3825 : : of mempcpy routine. */
3826 : 83866 : if (!is_move_done)
3827 : : return NULL_RTX;
3828 : :
3829 : 73738 : if (dest_addr == pc_rtx)
3830 : : return NULL_RTX;
3831 : :
3832 : 73738 : if (dest_addr == 0)
3833 : : {
3834 : 32412 : dest_addr = force_operand (XEXP (dest_mem, 0), target);
3835 : 32412 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
3836 : : }
3837 : :
3838 : 73738 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3839 : : {
3840 : 810 : dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3841 : : /* stpcpy pointer to last byte. */
3842 : 810 : if (retmode == RETURN_END_MINUS_ONE)
3843 : 0 : dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3844 : : }
3845 : :
3846 : : return dest_addr;
3847 : : }
3848 : :
3849 : : static rtx
3850 : 1652 : expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3851 : : rtx target, tree orig_exp, memop_ret retmode)
3852 : : {
3853 : 1629 : return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3854 : 0 : retmode, false);
3855 : : }
3856 : :
3857 : : /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3858 : : we failed, the caller should emit a normal call, otherwise try to
3859 : : get the result in TARGET, if convenient.
3860 : : Return value is based on RETMODE argument. */
3861 : :
3862 : : static rtx
3863 : 2269 : expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3864 : : {
3865 : 2269 : class expand_operand ops[3];
3866 : 2269 : rtx dest_mem;
3867 : 2269 : rtx src_mem;
3868 : :
3869 : 2269 : if (!targetm.have_movstr ())
3870 : : return NULL_RTX;
3871 : :
3872 : 0 : dest_mem = get_memory_rtx (dest, NULL);
3873 : 0 : src_mem = get_memory_rtx (src, NULL);
3874 : 0 : if (retmode == RETURN_BEGIN)
3875 : : {
3876 : 0 : target = force_reg (Pmode, XEXP (dest_mem, 0));
3877 : 0 : dest_mem = replace_equiv_address (dest_mem, target);
3878 : : }
3879 : :
3880 : 0 : create_output_operand (&ops[0],
3881 : 0 : retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3882 : 0 : create_fixed_operand (&ops[1], dest_mem);
3883 : 0 : create_fixed_operand (&ops[2], src_mem);
3884 : 0 : if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3885 : : return NULL_RTX;
3886 : :
3887 : 0 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3888 : : {
3889 : 0 : target = ops[0].value;
3890 : : /* movstr is supposed to set end to the address of the NUL
3891 : : terminator. If the caller requested a mempcpy-like return value,
3892 : : adjust it. */
3893 : 0 : if (retmode == RETURN_END)
3894 : : {
3895 : 0 : rtx tem = plus_constant (GET_MODE (target),
3896 : 0 : gen_lowpart (GET_MODE (target), target), 1);
3897 : 0 : emit_move_insn (target, force_operand (tem, NULL_RTX));
3898 : : }
3899 : : }
3900 : : return target;
3901 : : }
3902 : :
3903 : : /* Expand expression EXP, which is a call to the strcpy builtin. Return
3904 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
3905 : : try to get the result in TARGET, if convenient (and in mode MODE if that's
3906 : : convenient). */
3907 : :
3908 : : static rtx
3909 : 1862 : expand_builtin_strcpy (tree exp, rtx target)
3910 : : {
3911 : 1862 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3912 : : return NULL_RTX;
3913 : :
3914 : 1847 : tree dest = CALL_EXPR_ARG (exp, 0);
3915 : 1847 : tree src = CALL_EXPR_ARG (exp, 1);
3916 : :
3917 : 1847 : return expand_builtin_strcpy_args (exp, dest, src, target);
3918 : : }
3919 : :
3920 : : /* Helper function to do the actual work for expand_builtin_strcpy. The
3921 : : arguments to the builtin_strcpy call DEST and SRC are broken out
3922 : : so that this can also be called without constructing an actual CALL_EXPR.
3923 : : The other arguments and return value are the same as for
3924 : : expand_builtin_strcpy. */
3925 : :
3926 : : static rtx
3927 : 1847 : expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3928 : : {
3929 : 1847 : return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3930 : : }
3931 : :
3932 : : /* Expand a call EXP to the stpcpy builtin.
3933 : : Return NULL_RTX if we failed the caller should emit a normal call,
3934 : : otherwise try to get the result in TARGET, if convenient (and in
3935 : : mode MODE if that's convenient). */
3936 : :
3937 : : static rtx
3938 : 454 : expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3939 : : {
3940 : 454 : tree dst, src;
3941 : 454 : location_t loc = EXPR_LOCATION (exp);
3942 : :
3943 : 454 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3944 : : return NULL_RTX;
3945 : :
3946 : 445 : dst = CALL_EXPR_ARG (exp, 0);
3947 : 445 : src = CALL_EXPR_ARG (exp, 1);
3948 : :
3949 : : /* If return value is ignored, transform stpcpy into strcpy. */
3950 : 445 : if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3951 : : {
3952 : 0 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3953 : 0 : tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3954 : 0 : return expand_expr (result, target, mode, EXPAND_NORMAL);
3955 : : }
3956 : : else
3957 : : {
3958 : 445 : tree len, lenp1;
3959 : 445 : rtx ret;
3960 : :
3961 : : /* Ensure we get an actual string whose length can be evaluated at
3962 : : compile-time, not an expression containing a string. This is
3963 : : because the latter will potentially produce pessimized code
3964 : : when used to produce the return value. */
3965 : 445 : c_strlen_data lendata = { };
3966 : 445 : if (!c_getstr (src)
3967 : 445 : || !(len = c_strlen (src, 0, &lendata, 1)))
3968 : 422 : return expand_movstr (dst, src, target,
3969 : 422 : /*retmode=*/ RETURN_END_MINUS_ONE);
3970 : :
3971 : 23 : lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3972 : 23 : ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3973 : : target, exp,
3974 : : /*retmode=*/ RETURN_END_MINUS_ONE);
3975 : :
3976 : 23 : if (ret)
3977 : : return ret;
3978 : :
3979 : 0 : if (TREE_CODE (len) == INTEGER_CST)
3980 : : {
3981 : 0 : rtx len_rtx = expand_normal (len);
3982 : :
3983 : 0 : if (CONST_INT_P (len_rtx))
3984 : : {
3985 : 0 : ret = expand_builtin_strcpy_args (exp, dst, src, target);
3986 : :
3987 : 0 : if (ret)
3988 : : {
3989 : 0 : if (! target)
3990 : : {
3991 : 0 : if (mode != VOIDmode)
3992 : 0 : target = gen_reg_rtx (mode);
3993 : : else
3994 : 0 : target = gen_reg_rtx (GET_MODE (ret));
3995 : : }
3996 : 0 : if (GET_MODE (target) != GET_MODE (ret))
3997 : 0 : ret = gen_lowpart (GET_MODE (target), ret);
3998 : :
3999 : 0 : ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4000 : 0 : ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4001 : 0 : gcc_assert (ret);
4002 : :
4003 : : return target;
4004 : : }
4005 : : }
4006 : : }
4007 : :
4008 : 0 : return expand_movstr (dst, src, target,
4009 : 0 : /*retmode=*/ RETURN_END_MINUS_ONE);
4010 : : }
4011 : : }
4012 : :
4013 : : /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4014 : : arguments while being careful to avoid duplicate warnings (which could
4015 : : be issued if the expander were to expand the call, resulting in it
4016 : : being emitted in expand_call(). */
4017 : :
4018 : : static rtx
4019 : 454 : expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4020 : : {
4021 : 454 : if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4022 : : {
4023 : : /* The call has been successfully expanded. Check for nonstring
4024 : : arguments and issue warnings as appropriate. */
4025 : 23 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4026 : 23 : return ret;
4027 : : }
4028 : :
4029 : : return NULL_RTX;
4030 : : }
4031 : :
4032 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4033 : : bytes from constant string DATA + OFFSET and return it as target
4034 : : constant. */
4035 : :
4036 : : rtx
4037 : 9320 : builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
4038 : : fixed_size_mode mode)
4039 : : {
4040 : 9320 : const char *str = (const char *) data;
4041 : :
4042 : 9320 : if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4043 : 260 : return const0_rtx;
4044 : :
4045 : 9060 : return c_readstr (str + offset, mode);
4046 : : }
4047 : :
4048 : : /* Helper to check the sizes of sequences and the destination of calls
4049 : : to __builtin_strncat and __builtin___strncat_chk. Returns true on
4050 : : success (no overflow or invalid sizes), false otherwise. */
4051 : :
4052 : : static bool
4053 : 109 : check_strncat_sizes (tree exp, tree objsize)
4054 : : {
4055 : 109 : tree dest = CALL_EXPR_ARG (exp, 0);
4056 : 109 : tree src = CALL_EXPR_ARG (exp, 1);
4057 : 109 : tree maxread = CALL_EXPR_ARG (exp, 2);
4058 : :
4059 : : /* Try to determine the range of lengths that the source expression
4060 : : refers to. */
4061 : 109 : c_strlen_data lendata = { };
4062 : 109 : get_range_strlen (src, &lendata, /* eltsize = */ 1);
4063 : :
4064 : : /* Try to verify that the destination is big enough for the shortest
4065 : : string. */
4066 : :
4067 : 109 : access_data data (nullptr, exp, access_read_write, maxread, true);
4068 : 109 : if (!objsize && warn_stringop_overflow)
4069 : : {
4070 : : /* If it hasn't been provided by __strncat_chk, try to determine
4071 : : the size of the destination object into which the source is
4072 : : being copied. */
4073 : 0 : objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4074 : : }
4075 : :
4076 : : /* Add one for the terminating nul. */
4077 : 109 : tree srclen = (lendata.minlen
4078 : 109 : ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4079 : : size_one_node)
4080 : : : NULL_TREE);
4081 : :
4082 : : /* The strncat function copies at most MAXREAD bytes and always appends
4083 : : the terminating nul so the specified upper bound should never be equal
4084 : : to (or greater than) the size of the destination. */
4085 : 56 : if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4086 : 157 : && tree_int_cst_equal (objsize, maxread))
4087 : : {
4088 : 23 : location_t loc = EXPR_LOCATION (exp);
4089 : 23 : warning_at (loc, OPT_Wstringop_overflow_,
4090 : : "%qD specified bound %E equals destination size",
4091 : : get_callee_fndecl (exp), maxread);
4092 : :
4093 : 23 : return false;
4094 : : }
4095 : :
4096 : 86 : if (!srclen
4097 : 86 : || (maxread && tree_fits_uhwi_p (maxread)
4098 : 33 : && tree_fits_uhwi_p (srclen)
4099 : 33 : && tree_int_cst_lt (maxread, srclen)))
4100 : : srclen = maxread;
4101 : :
4102 : : /* The number of bytes to write is LEN but check_access will alsoa
4103 : : check SRCLEN if LEN's value isn't known. */
4104 : 86 : return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4105 : 86 : objsize, data.mode, &data);
4106 : : }
4107 : :
4108 : : /* Expand expression EXP, which is a call to the strncpy builtin. Return
4109 : : NULL_RTX if we failed the caller should emit a normal call. */
4110 : :
4111 : : static rtx
4112 : 2169 : expand_builtin_strncpy (tree exp, rtx target)
4113 : : {
4114 : 2169 : location_t loc = EXPR_LOCATION (exp);
4115 : :
4116 : 2169 : if (!validate_arglist (exp,
4117 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4118 : : return NULL_RTX;
4119 : 2157 : tree dest = CALL_EXPR_ARG (exp, 0);
4120 : 2157 : tree src = CALL_EXPR_ARG (exp, 1);
4121 : : /* The number of bytes to write (not the maximum). */
4122 : 2157 : tree len = CALL_EXPR_ARG (exp, 2);
4123 : :
4124 : : /* The length of the source sequence. */
4125 : 2157 : tree slen = c_strlen (src, 1);
4126 : :
4127 : : /* We must be passed a constant len and src parameter. */
4128 : 2157 : if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4129 : : return NULL_RTX;
4130 : :
4131 : 237 : slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4132 : :
4133 : : /* We're required to pad with trailing zeros if the requested
4134 : : len is greater than strlen(s2)+1. In that case try to
4135 : : use store_by_pieces, if it fails, punt. */
4136 : 237 : if (tree_int_cst_lt (slen, len))
4137 : : {
4138 : 142 : unsigned int dest_align = get_pointer_alignment (dest);
4139 : 142 : const char *p = c_getstr (src);
4140 : 142 : rtx dest_mem;
4141 : :
4142 : 136 : if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4143 : 278 : || !can_store_by_pieces (tree_to_uhwi (len),
4144 : : builtin_strncpy_read_str,
4145 : : CONST_CAST (char *, p),
4146 : : dest_align, false))
4147 : 14 : return NULL_RTX;
4148 : :
4149 : 128 : dest_mem = get_memory_rtx (dest, len);
4150 : 128 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4151 : : builtin_strncpy_read_str,
4152 : : CONST_CAST (char *, p), dest_align, false,
4153 : : RETURN_BEGIN);
4154 : 128 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
4155 : 128 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4156 : 128 : return dest_mem;
4157 : : }
4158 : :
4159 : : return NULL_RTX;
4160 : : }
4161 : :
4162 : : /* Return the RTL of a register in MODE generated from PREV in the
4163 : : previous iteration. */
4164 : :
4165 : : static rtx
4166 : 258081 : gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4167 : : {
4168 : 258081 : rtx target = nullptr;
4169 : 258081 : if (prev != nullptr && prev->data != nullptr)
4170 : : {
4171 : : /* Use the previous data in the same mode. */
4172 : 145987 : if (prev->mode == mode)
4173 : 258081 : return prev->data;
4174 : :
4175 : 35165 : fixed_size_mode prev_mode = prev->mode;
4176 : :
4177 : : /* Don't use the previous data to write QImode if it is in a
4178 : : vector mode. */
4179 : 35165 : if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4180 : : return target;
4181 : :
4182 : 34897 : rtx prev_rtx = prev->data;
4183 : :
4184 : 34897 : if (REG_P (prev_rtx)
4185 : 22871 : && HARD_REGISTER_P (prev_rtx)
4186 : 34897 : && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4187 : : {
4188 : : /* This case occurs when PREV_MODE is a vector and when
4189 : : MODE is too small to store using vector operations.
4190 : : After register allocation, the code will need to move the
4191 : : lowpart of the vector register into a non-vector register.
4192 : :
4193 : : Also, the target has chosen to use a hard register
4194 : : instead of going with the default choice of using a
4195 : : pseudo register. We should respect that choice and try to
4196 : : avoid creating a pseudo register with the same mode as the
4197 : : current hard register.
4198 : :
4199 : : In principle, we could just use a lowpart MODE subreg of
4200 : : the vector register. However, the vector register mode might
4201 : : be too wide for non-vector registers, and we already know
4202 : : that the non-vector mode is too small for vector registers.
4203 : : It's therefore likely that we'd need to spill to memory in
4204 : : the vector mode and reload the non-vector value from there.
4205 : :
4206 : : Try to avoid that by reducing the vector register to the
4207 : : smallest size that it can hold. This should increase the
4208 : : chances that non-vector registers can hold both the inner
4209 : : and outer modes of the subreg that we generate later. */
4210 : 0 : machine_mode m;
4211 : 0 : fixed_size_mode candidate;
4212 : 0 : FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4213 : 0 : if (is_a<fixed_size_mode> (m, &candidate))
4214 : : {
4215 : 0 : if (GET_MODE_SIZE (candidate)
4216 : 0 : >= GET_MODE_SIZE (prev_mode))
4217 : : break;
4218 : 0 : if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4219 : 0 : && lowpart_subreg_regno (REGNO (prev_rtx),
4220 : : prev_mode, candidate) >= 0)
4221 : : {
4222 : 0 : target = lowpart_subreg (candidate, prev_rtx,
4223 : : prev_mode);
4224 : 0 : prev_rtx = target;
4225 : 0 : prev_mode = candidate;
4226 : 0 : break;
4227 : : }
4228 : : }
4229 : 0 : if (target == nullptr)
4230 : 0 : prev_rtx = copy_to_reg (prev_rtx);
4231 : : }
4232 : :
4233 : 34897 : target = lowpart_subreg (mode, prev_rtx, prev_mode);
4234 : : }
4235 : : return target;
4236 : : }
4237 : :
4238 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4239 : : bytes from constant string DATA + OFFSET and return it as target
4240 : : constant. If PREV isn't nullptr, it has the RTL info from the
4241 : : previous iteration. */
4242 : :
4243 : : rtx
4244 : 256771 : builtin_memset_read_str (void *data, void *prev,
4245 : : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4246 : : fixed_size_mode mode)
4247 : : {
4248 : 256771 : const char *c = (const char *) data;
4249 : 256771 : unsigned int size = GET_MODE_SIZE (mode);
4250 : :
4251 : 256771 : rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4252 : : mode);
4253 : 256771 : if (target != nullptr)
4254 : : return target;
4255 : 111632 : rtx src = gen_int_mode (*c, QImode);
4256 : :
4257 : 111632 : if (VECTOR_MODE_P (mode))
4258 : : {
4259 : 155680 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4260 : :
4261 : 77840 : rtx const_vec = gen_const_vec_duplicate (mode, src);
4262 : 77840 : if (prev == NULL)
4263 : : /* Return CONST_VECTOR when called by a query function. */
4264 : : return const_vec;
4265 : :
4266 : : /* Use the move expander with CONST_VECTOR. */
4267 : 61704 : target = gen_reg_rtx (mode);
4268 : 61704 : emit_move_insn (target, const_vec);
4269 : 61704 : return target;
4270 : : }
4271 : :
4272 : 33792 : char *p = XALLOCAVEC (char, size);
4273 : :
4274 : 33792 : memset (p, *c, size);
4275 : :
4276 : 33792 : return c_readstr (p, mode);
4277 : : }
4278 : :
4279 : : /* Callback routine for store_by_pieces. Return the RTL of a register
4280 : : containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4281 : : char value given in the RTL register data. For example, if mode is
4282 : : 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4283 : : nullptr, it has the RTL info from the previous iteration. */
4284 : :
4285 : : static rtx
4286 : 1479 : builtin_memset_gen_str (void *data, void *prev,
4287 : : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4288 : : fixed_size_mode mode)
4289 : : {
4290 : 1479 : rtx target, coeff;
4291 : 1479 : size_t size;
4292 : 1479 : char *p;
4293 : :
4294 : 1479 : size = GET_MODE_SIZE (mode);
4295 : 1479 : if (size == 1)
4296 : : return (rtx) data;
4297 : :
4298 : 1310 : target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4299 : 1310 : if (target != nullptr)
4300 : : return target;
4301 : :
4302 : 730 : if (VECTOR_MODE_P (mode))
4303 : : {
4304 : 448 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4305 : :
4306 : : /* vec_duplicate_optab is a precondition to pick a vector mode for
4307 : : the memset expander. */
4308 : 224 : insn_code icode = optab_handler (vec_duplicate_optab, mode);
4309 : :
4310 : 224 : target = gen_reg_rtx (mode);
4311 : 224 : class expand_operand ops[2];
4312 : 224 : create_output_operand (&ops[0], target, mode);
4313 : 224 : create_input_operand (&ops[1], (rtx) data, QImode);
4314 : 224 : expand_insn (icode, 2, ops);
4315 : 224 : if (!rtx_equal_p (target, ops[0].value))
4316 : 0 : emit_move_insn (target, ops[0].value);
4317 : :
4318 : 224 : return target;
4319 : : }
4320 : :
4321 : 506 : p = XALLOCAVEC (char, size);
4322 : 506 : memset (p, 1, size);
4323 : 506 : coeff = c_readstr (p, mode);
4324 : :
4325 : 506 : target = convert_to_mode (mode, (rtx) data, 1);
4326 : 506 : target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4327 : 506 : return force_reg (mode, target);
4328 : : }
4329 : :
4330 : : /* Expand expression EXP, which is a call to the memset builtin. Return
4331 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
4332 : : try to get the result in TARGET, if convenient (and in mode MODE if that's
4333 : : convenient). */
4334 : :
4335 : : rtx
4336 : 65007 : expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4337 : : {
4338 : 65007 : if (!validate_arglist (exp,
4339 : : POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4340 : : return NULL_RTX;
4341 : :
4342 : 64998 : tree dest = CALL_EXPR_ARG (exp, 0);
4343 : 64998 : tree val = CALL_EXPR_ARG (exp, 1);
4344 : 64998 : tree len = CALL_EXPR_ARG (exp, 2);
4345 : :
4346 : 64998 : return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4347 : : }
4348 : :
4349 : : /* Check that store_by_pieces allows BITS + LEN (so that we don't
4350 : : expand something too unreasonably long), and every power of 2 in
4351 : : BITS. It is assumed that LEN has already been tested by
4352 : : itself. */
4353 : : static bool
4354 : 25336 : can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4355 : : by_pieces_constfn constfun,
4356 : : void *constfundata, unsigned int align,
4357 : : bool memsetp,
4358 : : unsigned HOST_WIDE_INT len)
4359 : : {
4360 : 25336 : if (bits
4361 : 25336 : && !can_store_by_pieces (bits + len, constfun, constfundata,
4362 : : align, memsetp))
4363 : : return false;
4364 : :
4365 : : /* BITS set are expected to be generally in the low range and
4366 : : contiguous. We do NOT want to repeat the test above in case BITS
4367 : : has a single bit set, so we terminate the loop when BITS == BIT.
4368 : : In the unlikely case that BITS has the MSB set, also terminate in
4369 : : case BIT gets shifted out. */
4370 : 1120 : for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4371 : : {
4372 : 840 : if ((bits & bit) == 0)
4373 : 504 : continue;
4374 : :
4375 : 336 : if (!can_store_by_pieces (bit, constfun, constfundata,
4376 : : align, memsetp))
4377 : : return false;
4378 : : }
4379 : :
4380 : : return true;
4381 : : }
4382 : :
4383 : : /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4384 : : Return TRUE if successful, FALSE otherwise. TO is assumed to be
4385 : : aligned at an ALIGN-bits boundary. LEN must be a multiple of
4386 : : 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4387 : :
4388 : : The strategy is to issue one store_by_pieces for each power of two,
4389 : : from most to least significant, guarded by a test on whether there
4390 : : are at least that many bytes left to copy in LEN.
4391 : :
4392 : : ??? Should we skip some powers of two in favor of loops? Maybe start
4393 : : at the max of TO/LEN/word alignment, at least when optimizing for
4394 : : size, instead of ensuring O(log len) dynamic compares? */
4395 : :
4396 : : bool
4397 : 23636 : try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4398 : : unsigned HOST_WIDE_INT min_len,
4399 : : unsigned HOST_WIDE_INT max_len,
4400 : : rtx val, char valc, unsigned int align)
4401 : : {
4402 : 23636 : int max_bits = floor_log2 (max_len);
4403 : 23636 : int min_bits = floor_log2 (min_len);
4404 : 23636 : int sctz_len = ctz_len;
4405 : :
4406 : 23636 : gcc_checking_assert (sctz_len >= 0);
4407 : :
4408 : 23636 : if (val)
4409 : 516 : valc = 1;
4410 : :
4411 : : /* Bits more significant than TST_BITS are part of the shared prefix
4412 : : in the binary representation of both min_len and max_len. Since
4413 : : they're identical, we don't need to test them in the loop. */
4414 : 23636 : int tst_bits = (max_bits != min_bits ? max_bits
4415 : 10835 : : floor_log2 (max_len ^ min_len));
4416 : :
4417 : : /* Save the pre-blksize values. */
4418 : 23636 : int orig_max_bits = max_bits;
4419 : 23636 : int orig_tst_bits = tst_bits;
4420 : :
4421 : : /* Check whether it's profitable to start by storing a fixed BLKSIZE
4422 : : bytes, to lower max_bits. In the unlikely case of a constant LEN
4423 : : (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4424 : : single store_by_pieces, but otherwise, select the minimum multiple
4425 : : of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4426 : : brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4427 : 23636 : unsigned HOST_WIDE_INT blksize;
4428 : 23636 : if (max_len > min_len)
4429 : : {
4430 : 12814 : unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4431 : : align / BITS_PER_UNIT);
4432 : 12814 : blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4433 : 12814 : blksize &= ~(alrng - 1);
4434 : : }
4435 : 10822 : else if (max_len == min_len)
4436 : : blksize = max_len;
4437 : : else
4438 : : /* Huh, max_len < min_len? Punt. See pr100843.c. */
4439 : : return false;
4440 : 23635 : if (min_len >= blksize
4441 : : /* ??? Maybe try smaller fixed-prefix blksizes before
4442 : : punting? */
4443 : 23635 : && can_store_by_pieces (blksize, builtin_memset_read_str,
4444 : : &valc, align, true))
4445 : : {
4446 : 284 : min_len -= blksize;
4447 : 284 : min_bits = floor_log2 (min_len);
4448 : 284 : max_len -= blksize;
4449 : 284 : max_bits = floor_log2 (max_len);
4450 : :
4451 : 284 : tst_bits = (max_bits != min_bits ? max_bits
4452 : 173 : : floor_log2 (max_len ^ min_len));
4453 : : }
4454 : : else
4455 : : blksize = 0;
4456 : :
4457 : : /* Check that we can use store by pieces for the maximum store count
4458 : : we may issue (initial fixed-size block, plus conditional
4459 : : power-of-two-sized from max_bits to ctz_len. */
4460 : 23635 : unsigned HOST_WIDE_INT xlenest = blksize;
4461 : 23635 : if (max_bits >= 0)
4462 : 23462 : xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4463 : 23462 : - (HOST_WIDE_INT_1U << ctz_len));
4464 : 23635 : bool max_loop = false;
4465 : 23635 : bool use_store_by_pieces = true;
4466 : : /* Skip the test in case of overflow in xlenest. It shouldn't
4467 : : happen because of the way max_bits and blksize are related, but
4468 : : it doesn't hurt to test. */
4469 : 23635 : if (blksize > xlenest
4470 : 23635 : || !can_store_by_multiple_pieces (xlenest - blksize,
4471 : : builtin_memset_read_str,
4472 : : &valc, align, true, blksize))
4473 : : {
4474 : 23462 : if (!(flag_inline_stringops & ILSOP_MEMSET))
4475 : : return false;
4476 : :
4477 : 1601 : for (max_bits = orig_max_bits;
4478 : 1708 : max_bits >= sctz_len;
4479 : : --max_bits)
4480 : : {
4481 : 1708 : xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4482 : 1708 : - (HOST_WIDE_INT_1U << ctz_len));
4483 : : /* Check that blksize plus the bits to be stored as blocks
4484 : : sized at powers of two can be stored by pieces. This is
4485 : : like the test above, but with smaller max_bits. Skip
4486 : : orig_max_bits (it would be redundant). Also skip in case
4487 : : of overflow. */
4488 : 1708 : if (max_bits < orig_max_bits
4489 : 1601 : && xlenest + blksize >= xlenest
4490 : 3309 : && can_store_by_multiple_pieces (xlenest,
4491 : : builtin_memset_read_str,
4492 : : &valc, align, true, blksize))
4493 : : {
4494 : : max_loop = true;
4495 : : break;
4496 : : }
4497 : 1601 : if (blksize
4498 : 1601 : && can_store_by_multiple_pieces (xlenest,
4499 : : builtin_memset_read_str,
4500 : : &valc, align, true, 0))
4501 : : {
4502 : 0 : max_len += blksize;
4503 : 0 : min_len += blksize;
4504 : 0 : tst_bits = orig_tst_bits;
4505 : 0 : blksize = 0;
4506 : 0 : max_loop = true;
4507 : 0 : break;
4508 : : }
4509 : 1601 : if (max_bits == sctz_len)
4510 : : {
4511 : : /* We'll get here if can_store_by_pieces refuses to
4512 : : store even a single QImode. We'll fall back to
4513 : : QImode stores then. */
4514 : 98 : if (!sctz_len)
4515 : : {
4516 : : blksize = 0;
4517 : : max_loop = true;
4518 : : use_store_by_pieces = false;
4519 : : break;
4520 : : }
4521 : 98 : --sctz_len;
4522 : 98 : --ctz_len;
4523 : : }
4524 : : }
4525 : 107 : if (!max_loop)
4526 : : return false;
4527 : : /* If the boundaries are such that min and max may run a
4528 : : different number of trips in the initial loop, the remainder
4529 : : needs not be between the moduli, so set tst_bits to cover all
4530 : : bits. Otherwise, if the trip counts are the same, max_len
4531 : : has the common prefix, and the previously-computed tst_bits
4532 : : is usable. */
4533 : 107 : if (max_len >> max_bits > min_len >> max_bits)
4534 : 64 : tst_bits = max_bits;
4535 : : }
4536 : :
4537 : 280 : by_pieces_constfn constfun;
4538 : 280 : void *constfundata;
4539 : 280 : if (val)
4540 : : {
4541 : 1 : constfun = builtin_memset_gen_str;
4542 : 1 : constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4543 : : val);
4544 : : }
4545 : : else
4546 : : {
4547 : : constfun = builtin_memset_read_str;
4548 : : constfundata = &valc;
4549 : : }
4550 : :
4551 : 280 : rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4552 : 280 : rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4553 : 280 : to = replace_equiv_address (to, ptr);
4554 : 280 : set_mem_align (to, align);
4555 : :
4556 : 280 : if (blksize)
4557 : : {
4558 : 364 : to = store_by_pieces (to, blksize,
4559 : : constfun, constfundata,
4560 : : align, true,
4561 : : max_len != 0 ? RETURN_END : RETURN_BEGIN);
4562 : 192 : if (max_len == 0)
4563 : : return true;
4564 : :
4565 : : /* Adjust PTR, TO and REM. Since TO's address is likely
4566 : : PTR+offset, we have to replace it. */
4567 : 20 : emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4568 : 20 : to = replace_equiv_address (to, ptr);
4569 : 20 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4570 : 20 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4571 : : }
4572 : :
4573 : : /* Iterate over power-of-two block sizes from the maximum length to
4574 : : the least significant bit possibly set in the length. */
4575 : 493 : for (int i = max_bits; i >= sctz_len; i--)
4576 : : {
4577 : 385 : rtx_code_label *loop_label = NULL;
4578 : 385 : rtx_code_label *label = NULL;
4579 : :
4580 : 385 : blksize = HOST_WIDE_INT_1U << i;
4581 : :
4582 : : /* If we're past the bits shared between min_ and max_len, expand
4583 : : a test on the dynamic length, comparing it with the
4584 : : BLKSIZE. */
4585 : 385 : if (i <= tst_bits)
4586 : : {
4587 : 275 : label = gen_label_rtx ();
4588 : 275 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4589 : : ptr_mode, 1, label,
4590 : : profile_probability::even ());
4591 : : }
4592 : : /* If we are at a bit that is in the prefix shared by min_ and
4593 : : max_len, skip the current BLKSIZE if the bit is clear, but do
4594 : : not skip the loop, even if it doesn't require
4595 : : prechecking. */
4596 : 110 : else if ((max_len & blksize) == 0
4597 : 69 : && !(max_loop && i == max_bits))
4598 : 39 : continue;
4599 : :
4600 : 346 : if (max_loop && i == max_bits)
4601 : : {
4602 : 107 : loop_label = gen_label_rtx ();
4603 : 107 : emit_label (loop_label);
4604 : : /* Since we may run this multiple times, don't assume we
4605 : : know anything about the offset. */
4606 : 107 : clear_mem_offset (to);
4607 : : }
4608 : :
4609 : 346 : bool update_needed = i != sctz_len || loop_label;
4610 : 346 : rtx next_ptr = NULL_RTX;
4611 : 346 : if (!use_store_by_pieces)
4612 : : {
4613 : 0 : gcc_checking_assert (blksize == 1);
4614 : 0 : if (!val)
4615 : 0 : val = gen_int_mode (valc, QImode);
4616 : 0 : to = change_address (to, QImode, 0);
4617 : 0 : emit_move_insn (to, val);
4618 : 0 : if (update_needed)
4619 : 0 : next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4620 : : }
4621 : : else
4622 : : {
4623 : : /* Issue a store of BLKSIZE bytes. */
4624 : 404 : to = store_by_pieces (to, blksize,
4625 : : constfun, constfundata,
4626 : : align, true,
4627 : : update_needed ? RETURN_END : RETURN_BEGIN);
4628 : 346 : next_ptr = XEXP (to, 0);
4629 : : }
4630 : : /* Adjust REM and PTR, unless this is the last iteration. */
4631 : 346 : if (update_needed)
4632 : : {
4633 : 288 : emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4634 : 288 : to = replace_equiv_address (to, ptr);
4635 : 288 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4636 : 288 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4637 : : }
4638 : :
4639 : 288 : if (loop_label)
4640 : 107 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4641 : : ptr_mode, 1, loop_label,
4642 : : profile_probability::likely ());
4643 : :
4644 : 346 : if (label)
4645 : : {
4646 : 275 : emit_label (label);
4647 : :
4648 : : /* Given conditional stores, the offset can no longer be
4649 : : known, so clear it. */
4650 : 275 : clear_mem_offset (to);
4651 : : }
4652 : : }
4653 : :
4654 : : return true;
4655 : : }
4656 : :
4657 : : /* Helper function to do the actual work for expand_builtin_memset. The
4658 : : arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4659 : : so that this can also be called without constructing an actual CALL_EXPR.
4660 : : The other arguments and return value are the same as for
4661 : : expand_builtin_memset. */
4662 : :
4663 : : static rtx
4664 : 64998 : expand_builtin_memset_args (tree dest, tree val, tree len,
4665 : : rtx target, machine_mode mode, tree orig_exp)
4666 : : {
4667 : 64998 : tree fndecl, fn;
4668 : 64998 : enum built_in_function fcode;
4669 : 64998 : machine_mode val_mode;
4670 : 64998 : char c;
4671 : 64998 : unsigned int dest_align;
4672 : 64998 : rtx dest_mem, dest_addr, len_rtx;
4673 : 64998 : HOST_WIDE_INT expected_size = -1;
4674 : 64998 : unsigned int expected_align = 0;
4675 : 64998 : unsigned HOST_WIDE_INT min_size;
4676 : 64998 : unsigned HOST_WIDE_INT max_size;
4677 : 64998 : unsigned HOST_WIDE_INT probable_max_size;
4678 : :
4679 : 64998 : dest_align = get_pointer_alignment (dest);
4680 : :
4681 : : /* If DEST is not a pointer type, don't do this operation in-line. */
4682 : 64998 : if (dest_align == 0)
4683 : : return NULL_RTX;
4684 : :
4685 : 64998 : if (currently_expanding_gimple_stmt)
4686 : 64998 : stringop_block_profile (currently_expanding_gimple_stmt,
4687 : : &expected_align, &expected_size);
4688 : :
4689 : 64998 : if (expected_align < dest_align)
4690 : 64993 : expected_align = dest_align;
4691 : :
4692 : : /* If the LEN parameter is zero, return DEST. */
4693 : 64998 : if (integer_zerop (len))
4694 : : {
4695 : : /* Evaluate and ignore VAL in case it has side-effects. */
4696 : 0 : expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4697 : 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
4698 : : }
4699 : :
4700 : : /* Stabilize the arguments in case we fail. */
4701 : 64998 : dest = builtin_save_expr (dest);
4702 : 64998 : val = builtin_save_expr (val);
4703 : 64998 : len = builtin_save_expr (len);
4704 : :
4705 : 64998 : len_rtx = expand_normal (len);
4706 : 64998 : determine_block_size (len, len_rtx, &min_size, &max_size,
4707 : : &probable_max_size);
4708 : 64998 : dest_mem = get_memory_rtx (dest, len);
4709 : 64998 : val_mode = TYPE_MODE (unsigned_char_type_node);
4710 : :
4711 : 64998 : if (TREE_CODE (val) != INTEGER_CST
4712 : 64998 : || target_char_cast (val, &c))
4713 : : {
4714 : 1720 : rtx val_rtx;
4715 : :
4716 : 1720 : val_rtx = expand_normal (val);
4717 : 1720 : val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4718 : :
4719 : : /* Assume that we can memset by pieces if we can store
4720 : : * the coefficients by pieces (in the required modes).
4721 : : * We can't pass builtin_memset_gen_str as that emits RTL. */
4722 : 1720 : c = 1;
4723 : 1720 : if (tree_fits_uhwi_p (len)
4724 : 1720 : && can_store_by_pieces (tree_to_uhwi (len),
4725 : : builtin_memset_read_str, &c, dest_align,
4726 : : true))
4727 : : {
4728 : 772 : val_rtx = force_reg (val_mode, val_rtx);
4729 : 772 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4730 : : builtin_memset_gen_str, val_rtx, dest_align,
4731 : : true, RETURN_BEGIN);
4732 : : }
4733 : 948 : else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4734 : : dest_align, expected_align,
4735 : : expected_size, min_size, max_size,
4736 : : probable_max_size)
4737 : 948 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4738 : : tree_ctz (len),
4739 : : min_size, max_size,
4740 : : val_rtx, 0,
4741 : : dest_align))
4742 : 515 : goto do_libcall;
4743 : :
4744 : 1205 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4745 : 1205 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4746 : 1205 : return dest_mem;
4747 : : }
4748 : :
4749 : 63278 : if (c)
4750 : : {
4751 : 13049 : if (tree_fits_uhwi_p (len)
4752 : 13049 : && can_store_by_pieces (tree_to_uhwi (len),
4753 : : builtin_memset_read_str, &c, dest_align,
4754 : : true))
4755 : 4936 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4756 : : builtin_memset_read_str, &c, dest_align, true,
4757 : : RETURN_BEGIN);
4758 : 9891 : else if (!set_storage_via_setmem (dest_mem, len_rtx,
4759 : 8113 : gen_int_mode (c, val_mode),
4760 : : dest_align, expected_align,
4761 : : expected_size, min_size, max_size,
4762 : : probable_max_size)
4763 : 8113 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4764 : : tree_ctz (len),
4765 : : min_size, max_size,
4766 : : NULL_RTX, c,
4767 : : dest_align))
4768 : 6335 : goto do_libcall;
4769 : :
4770 : 6714 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4771 : 6714 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4772 : 6714 : return dest_mem;
4773 : : }
4774 : :
4775 : 50229 : set_mem_align (dest_mem, dest_align);
4776 : 100458 : dest_addr = clear_storage_hints (dest_mem, len_rtx,
4777 : 50229 : CALL_EXPR_TAILCALL (orig_exp)
4778 : : ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4779 : : expected_align, expected_size,
4780 : : min_size, max_size,
4781 : : probable_max_size, tree_ctz (len));
4782 : :
4783 : 50229 : if (dest_addr == 0)
4784 : : {
4785 : 39463 : dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4786 : 39463 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
4787 : : }
4788 : :
4789 : : return dest_addr;
4790 : :
4791 : 6850 : do_libcall:
4792 : 6850 : fndecl = get_callee_fndecl (orig_exp);
4793 : 6850 : fcode = DECL_FUNCTION_CODE (fndecl);
4794 : 6850 : if (fcode == BUILT_IN_MEMSET)
4795 : 6850 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4796 : : dest, val, len);
4797 : 0 : else if (fcode == BUILT_IN_BZERO)
4798 : 0 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4799 : : dest, len);
4800 : : else
4801 : 0 : gcc_unreachable ();
4802 : 6850 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4803 : 6850 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4804 : 6850 : return expand_call (fn, target, target == const0_rtx);
4805 : : }
4806 : :
4807 : : /* Expand expression EXP, which is a call to the bzero builtin. Return
4808 : : NULL_RTX if we failed the caller should emit a normal call. */
4809 : :
4810 : : static rtx
4811 : 0 : expand_builtin_bzero (tree exp)
4812 : : {
4813 : 0 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4814 : : return NULL_RTX;
4815 : :
4816 : 0 : tree dest = CALL_EXPR_ARG (exp, 0);
4817 : 0 : tree size = CALL_EXPR_ARG (exp, 1);
4818 : :
4819 : : /* New argument list transforming bzero(ptr x, int y) to
4820 : : memset(ptr x, int 0, size_t y). This is done this way
4821 : : so that if it isn't expanded inline, we fallback to
4822 : : calling bzero instead of memset. */
4823 : :
4824 : 0 : location_t loc = EXPR_LOCATION (exp);
4825 : :
4826 : 0 : return expand_builtin_memset_args (dest, integer_zero_node,
4827 : : fold_convert_loc (loc,
4828 : : size_type_node, size),
4829 : 0 : const0_rtx, VOIDmode, exp);
4830 : : }
4831 : :
4832 : : /* Try to expand cmpstr operation ICODE with the given operands.
4833 : : Return the result rtx on success, otherwise return null. */
4834 : :
4835 : : static rtx
4836 : 0 : expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4837 : : HOST_WIDE_INT align)
4838 : : {
4839 : 0 : machine_mode insn_mode = insn_data[icode].operand[0].mode;
4840 : :
4841 : 0 : if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4842 : : target = NULL_RTX;
4843 : :
4844 : 0 : class expand_operand ops[4];
4845 : 0 : create_output_operand (&ops[0], target, insn_mode);
4846 : 0 : create_fixed_operand (&ops[1], arg1_rtx);
4847 : 0 : create_fixed_operand (&ops[2], arg2_rtx);
4848 : 0 : create_integer_operand (&ops[3], align);
4849 : 0 : if (maybe_expand_insn (icode, 4, ops))
4850 : 0 : return ops[0].value;
4851 : : return NULL_RTX;
4852 : : }
4853 : :
4854 : : /* Expand expression EXP, which is a call to the memcmp built-in function.
4855 : : Return NULL_RTX if we failed and the caller should emit a normal call,
4856 : : otherwise try to get the result in TARGET, if convenient.
4857 : : RESULT_EQ is true if we can relax the returned value to be either zero
4858 : : or nonzero, without caring about the sign. */
4859 : :
4860 : : static rtx
4861 : 102565 : expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4862 : : {
4863 : 102565 : if (!validate_arglist (exp,
4864 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4865 : : return NULL_RTX;
4866 : :
4867 : 102561 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4868 : 102561 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4869 : 102561 : tree len = CALL_EXPR_ARG (exp, 2);
4870 : :
4871 : : /* Due to the performance benefit, always inline the calls first
4872 : : when result_eq is false. */
4873 : 102561 : rtx result = NULL_RTX;
4874 : 102561 : enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4875 : 102561 : if (!result_eq && fcode != BUILT_IN_BCMP)
4876 : : {
4877 : 14306 : result = inline_expand_builtin_bytecmp (exp, target);
4878 : 14306 : if (result)
4879 : : return result;
4880 : : }
4881 : :
4882 : 102556 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4883 : 102556 : location_t loc = EXPR_LOCATION (exp);
4884 : :
4885 : 102556 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4886 : 102556 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4887 : :
4888 : : /* If we don't have POINTER_TYPE, call the function. */
4889 : 102556 : if (arg1_align == 0 || arg2_align == 0)
4890 : : return NULL_RTX;
4891 : :
4892 : 102556 : rtx arg1_rtx = get_memory_rtx (arg1, len);
4893 : 102556 : rtx arg2_rtx = get_memory_rtx (arg2, len);
4894 : 102556 : rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4895 : :
4896 : : /* Set MEM_SIZE as appropriate. */
4897 : 102556 : if (CONST_INT_P (len_rtx))
4898 : : {
4899 : 78488 : set_mem_size (arg1_rtx, INTVAL (len_rtx));
4900 : 78488 : set_mem_size (arg2_rtx, INTVAL (len_rtx));
4901 : : }
4902 : :
4903 : 102556 : by_pieces_constfn constfn = NULL;
4904 : :
4905 : : /* Try to get the byte representation of the constant ARG2 (or, only
4906 : : when the function's result is used for equality to zero, ARG1)
4907 : : points to, with its byte size in NBYTES. */
4908 : 102556 : unsigned HOST_WIDE_INT nbytes;
4909 : 102556 : const char *rep = getbyterep (arg2, &nbytes);
4910 : 102556 : if (result_eq && rep == NULL)
4911 : : {
4912 : : /* For equality to zero the arguments are interchangeable. */
4913 : 62786 : rep = getbyterep (arg1, &nbytes);
4914 : 62786 : if (rep != NULL)
4915 : : std::swap (arg1_rtx, arg2_rtx);
4916 : : }
4917 : :
4918 : : /* If the function's constant bound LEN_RTX is less than or equal
4919 : : to the byte size of the representation of the constant argument,
4920 : : and if block move would be done by pieces, we can avoid loading
4921 : : the bytes from memory and only store the computed constant result. */
4922 : 39770 : if (rep
4923 : 27401 : && CONST_INT_P (len_rtx)
4924 : 27291 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4925 : 102556 : constfn = builtin_memcpy_read_str;
4926 : :
4927 : 205112 : result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4928 : 102556 : TREE_TYPE (len), target,
4929 : : result_eq, constfn,
4930 : : CONST_CAST (char *, rep),
4931 : : tree_ctz (len));
4932 : :
4933 : 102556 : if (result)
4934 : : {
4935 : : /* Return the value in the proper mode for this function. */
4936 : 66285 : if (GET_MODE (result) == mode)
4937 : : return result;
4938 : :
4939 : 0 : if (target != 0)
4940 : : {
4941 : 0 : convert_move (target, result, 0);
4942 : 0 : return target;
4943 : : }
4944 : :
4945 : 0 : return convert_to_mode (mode, result, 0);
4946 : : }
4947 : :
4948 : : return NULL_RTX;
4949 : : }
4950 : :
4951 : : /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4952 : : if we failed the caller should emit a normal call, otherwise try to get
4953 : : the result in TARGET, if convenient. */
4954 : :
4955 : : static rtx
4956 : 127991 : expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4957 : : {
4958 : 127991 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4959 : : return NULL_RTX;
4960 : :
4961 : 127976 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4962 : 127976 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4963 : :
4964 : : /* Due to the performance benefit, always inline the calls first. */
4965 : 127976 : rtx result = NULL_RTX;
4966 : 127976 : result = inline_expand_builtin_bytecmp (exp, target);
4967 : 127976 : if (result)
4968 : : return result;
4969 : :
4970 : 127577 : insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4971 : 127577 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4972 : 127577 : if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4973 : : return NULL_RTX;
4974 : :
4975 : 127577 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4976 : 127577 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4977 : :
4978 : : /* If we don't have POINTER_TYPE, call the function. */
4979 : 127577 : if (arg1_align == 0 || arg2_align == 0)
4980 : : return NULL_RTX;
4981 : :
4982 : : /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4983 : 127577 : arg1 = builtin_save_expr (arg1);
4984 : 127577 : arg2 = builtin_save_expr (arg2);
4985 : :
4986 : 127577 : rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4987 : 127577 : rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4988 : :
4989 : : /* Try to call cmpstrsi. */
4990 : 127577 : if (cmpstr_icode != CODE_FOR_nothing)
4991 : 0 : result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4992 : 0 : MIN (arg1_align, arg2_align));
4993 : :
4994 : : /* Try to determine at least one length and call cmpstrnsi. */
4995 : 127577 : if (!result && cmpstrn_icode != CODE_FOR_nothing)
4996 : : {
4997 : 127577 : tree len;
4998 : 127577 : rtx arg3_rtx;
4999 : :
5000 : 127577 : tree len1 = c_strlen (arg1, 1);
5001 : 127577 : tree len2 = c_strlen (arg2, 1);
5002 : :
5003 : 127577 : if (len1)
5004 : 224 : len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5005 : 127577 : if (len2)
5006 : 125941 : len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5007 : :
5008 : : /* If we don't have a constant length for the first, use the length
5009 : : of the second, if we know it. We don't require a constant for
5010 : : this case; some cost analysis could be done if both are available
5011 : : but neither is constant. For now, assume they're equally cheap,
5012 : : unless one has side effects. If both strings have constant lengths,
5013 : : use the smaller. */
5014 : :
5015 : 127577 : if (!len1)
5016 : : len = len2;
5017 : 224 : else if (!len2)
5018 : : len = len1;
5019 : 17 : else if (TREE_SIDE_EFFECTS (len1))
5020 : : len = len2;
5021 : 17 : else if (TREE_SIDE_EFFECTS (len2))
5022 : : len = len1;
5023 : 17 : else if (TREE_CODE (len1) != INTEGER_CST)
5024 : : len = len2;
5025 : 17 : else if (TREE_CODE (len2) != INTEGER_CST)
5026 : : len = len1;
5027 : 10 : else if (tree_int_cst_lt (len1, len2))
5028 : : len = len1;
5029 : : else
5030 : 127359 : len = len2;
5031 : :
5032 : : /* If both arguments have side effects, we cannot optimize. */
5033 : 127577 : if (len && !TREE_SIDE_EFFECTS (len))
5034 : : {
5035 : 126148 : arg3_rtx = expand_normal (len);
5036 : 126148 : result = expand_cmpstrn_or_cmpmem
5037 : 126148 : (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5038 : 126148 : arg3_rtx, MIN (arg1_align, arg2_align));
5039 : : }
5040 : : }
5041 : :
5042 : 127577 : tree fndecl = get_callee_fndecl (exp);
5043 : 127577 : if (result)
5044 : : {
5045 : : /* Return the value in the proper mode for this function. */
5046 : 52 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5047 : 52 : if (GET_MODE (result) == mode)
5048 : : return result;
5049 : 0 : if (target == 0)
5050 : 0 : return convert_to_mode (mode, result, 0);
5051 : 0 : convert_move (target, result, 0);
5052 : 0 : return target;
5053 : : }
5054 : :
5055 : : /* Expand the library call ourselves using a stabilized argument
5056 : : list to avoid re-evaluating the function's arguments twice. */
5057 : 127525 : tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5058 : 127525 : copy_warning (fn, exp);
5059 : 127525 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5060 : 127525 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5061 : 127525 : return expand_call (fn, target, target == const0_rtx);
5062 : : }
5063 : :
5064 : : /* Expand expression EXP, which is a call to the strncmp builtin. Return
5065 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
5066 : : try to get the result in TARGET, if convenient. */
5067 : :
5068 : : static rtx
5069 : 2025 : expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5070 : : ATTRIBUTE_UNUSED machine_mode mode)
5071 : : {
5072 : 2025 : if (!validate_arglist (exp,
5073 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5074 : : return NULL_RTX;
5075 : :
5076 : 2019 : tree arg1 = CALL_EXPR_ARG (exp, 0);
5077 : 2019 : tree arg2 = CALL_EXPR_ARG (exp, 1);
5078 : 2019 : tree arg3 = CALL_EXPR_ARG (exp, 2);
5079 : :
5080 : 2019 : location_t loc = EXPR_LOCATION (exp);
5081 : 2019 : tree len1 = c_strlen (arg1, 1);
5082 : 2019 : tree len2 = c_strlen (arg2, 1);
5083 : :
5084 : : /* Due to the performance benefit, always inline the calls first. */
5085 : 2019 : rtx result = NULL_RTX;
5086 : 2019 : result = inline_expand_builtin_bytecmp (exp, target);
5087 : 2019 : if (result)
5088 : : return result;
5089 : :
5090 : : /* If c_strlen can determine an expression for one of the string
5091 : : lengths, and it doesn't have side effects, then emit cmpstrnsi
5092 : : using length MIN(strlen(string)+1, arg3). */
5093 : 1801 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5094 : 1801 : if (cmpstrn_icode == CODE_FOR_nothing)
5095 : : return NULL_RTX;
5096 : :
5097 : 1801 : tree len;
5098 : :
5099 : 1801 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5100 : 1801 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5101 : :
5102 : 1801 : if (len1)
5103 : 160 : len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5104 : 1801 : if (len2)
5105 : 672 : len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5106 : :
5107 : 1801 : tree len3 = fold_convert_loc (loc, sizetype, arg3);
5108 : :
5109 : : /* If we don't have a constant length for the first, use the length
5110 : : of the second, if we know it. If neither string is constant length,
5111 : : use the given length argument. We don't require a constant for
5112 : : this case; some cost analysis could be done if both are available
5113 : : but neither is constant. For now, assume they're equally cheap,
5114 : : unless one has side effects. If both strings have constant lengths,
5115 : : use the smaller. */
5116 : :
5117 : 1801 : if (!len1 && !len2)
5118 : : len = len3;
5119 : 715 : else if (!len1)
5120 : : len = len2;
5121 : 160 : else if (!len2)
5122 : : len = len1;
5123 : 117 : else if (TREE_SIDE_EFFECTS (len1))
5124 : : len = len2;
5125 : 117 : else if (TREE_SIDE_EFFECTS (len2))
5126 : : len = len1;
5127 : 117 : else if (TREE_CODE (len1) != INTEGER_CST)
5128 : : len = len2;
5129 : 117 : else if (TREE_CODE (len2) != INTEGER_CST)
5130 : : len = len1;
5131 : 117 : else if (tree_int_cst_lt (len1, len2))
5132 : : len = len1;
5133 : : else
5134 : 556 : len = len2;
5135 : :
5136 : : /* If we are not using the given length, we must incorporate it here.
5137 : : The actual new length parameter will be MIN(len,arg3) in this case. */
5138 : 1801 : if (len != len3)
5139 : : {
5140 : 715 : len = fold_convert_loc (loc, sizetype, len);
5141 : 715 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5142 : : }
5143 : 1801 : rtx arg1_rtx = get_memory_rtx (arg1, len);
5144 : 1801 : rtx arg2_rtx = get_memory_rtx (arg2, len);
5145 : 1801 : rtx arg3_rtx = expand_normal (len);
5146 : 1801 : result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5147 : 1801 : arg2_rtx, TREE_TYPE (len), arg3_rtx,
5148 : 1801 : MIN (arg1_align, arg2_align));
5149 : :
5150 : 1801 : tree fndecl = get_callee_fndecl (exp);
5151 : 1801 : if (result)
5152 : : {
5153 : : /* Return the value in the proper mode for this function. */
5154 : 17 : mode = TYPE_MODE (TREE_TYPE (exp));
5155 : 17 : if (GET_MODE (result) == mode)
5156 : : return result;
5157 : 0 : if (target == 0)
5158 : 0 : return convert_to_mode (mode, result, 0);
5159 : 0 : convert_move (target, result, 0);
5160 : 0 : return target;
5161 : : }
5162 : :
5163 : : /* Expand the library call ourselves using a stabilized argument
5164 : : list to avoid re-evaluating the function's arguments twice. */
5165 : 1784 : tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5166 : 1784 : copy_warning (call, exp);
5167 : 1784 : gcc_assert (TREE_CODE (call) == CALL_EXPR);
5168 : 1784 : CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5169 : 1784 : return expand_call (call, target, target == const0_rtx);
5170 : : }
5171 : :
5172 : : /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5173 : : if that's convenient. */
5174 : :
5175 : : rtx
5176 : 0 : expand_builtin_saveregs (void)
5177 : : {
5178 : 0 : rtx val;
5179 : 0 : rtx_insn *seq;
5180 : :
5181 : : /* Don't do __builtin_saveregs more than once in a function.
5182 : : Save the result of the first call and reuse it. */
5183 : 0 : if (saveregs_value != 0)
5184 : : return saveregs_value;
5185 : :
5186 : : /* When this function is called, it means that registers must be
5187 : : saved on entry to this function. So we migrate the call to the
5188 : : first insn of this function. */
5189 : :
5190 : 0 : start_sequence ();
5191 : :
5192 : : /* Do whatever the machine needs done in this case. */
5193 : 0 : val = targetm.calls.expand_builtin_saveregs ();
5194 : :
5195 : 0 : seq = end_sequence ();
5196 : :
5197 : 0 : saveregs_value = val;
5198 : :
5199 : : /* Put the insns after the NOTE that starts the function. If this
5200 : : is inside a start_sequence, make the outer-level insn chain current, so
5201 : : the code is placed at the start of the function. */
5202 : 0 : push_topmost_sequence ();
5203 : 0 : emit_insn_after (seq, entry_of_function ());
5204 : 0 : pop_topmost_sequence ();
5205 : :
5206 : 0 : return val;
5207 : : }
5208 : :
5209 : : /* Expand a call to __builtin_next_arg. */
5210 : :
5211 : : static rtx
5212 : 21126 : expand_builtin_next_arg (void)
5213 : : {
5214 : : /* Checking arguments is already done in fold_builtin_next_arg
5215 : : that must be called before this function. */
5216 : 21126 : return expand_binop (ptr_mode, add_optab,
5217 : : crtl->args.internal_arg_pointer,
5218 : : crtl->args.arg_offset_rtx,
5219 : 21126 : NULL_RTX, 0, OPTAB_LIB_WIDEN);
5220 : : }
5221 : :
5222 : : /* Make it easier for the backends by protecting the valist argument
5223 : : from multiple evaluations. */
5224 : :
5225 : : static tree
5226 : 21460 : stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5227 : : {
5228 : 21460 : tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5229 : :
5230 : : /* The current way of determining the type of valist is completely
5231 : : bogus. We should have the information on the va builtin instead. */
5232 : 21460 : if (!vatype)
5233 : 21336 : vatype = targetm.fn_abi_va_list (cfun->decl);
5234 : :
5235 : 21460 : if (TREE_CODE (vatype) == ARRAY_TYPE)
5236 : : {
5237 : 15798 : if (TREE_SIDE_EFFECTS (valist))
5238 : 0 : valist = save_expr (valist);
5239 : :
5240 : : /* For this case, the backends will be expecting a pointer to
5241 : : vatype, but it's possible we've actually been given an array
5242 : : (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5243 : : So fix it. */
5244 : 15798 : if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5245 : : {
5246 : 0 : tree p1 = build_pointer_type (TREE_TYPE (vatype));
5247 : 0 : valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5248 : : }
5249 : : }
5250 : : else
5251 : : {
5252 : 5662 : tree pt = build_pointer_type (vatype);
5253 : :
5254 : 5662 : if (! needs_lvalue)
5255 : : {
5256 : 6 : if (! TREE_SIDE_EFFECTS (valist))
5257 : : return valist;
5258 : :
5259 : 0 : valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5260 : 0 : TREE_SIDE_EFFECTS (valist) = 1;
5261 : : }
5262 : :
5263 : 5656 : if (TREE_SIDE_EFFECTS (valist))
5264 : 0 : valist = save_expr (valist);
5265 : 5656 : valist = fold_build2_loc (loc, MEM_REF,
5266 : : vatype, valist, build_int_cst (pt, 0));
5267 : : }
5268 : :
5269 : : return valist;
5270 : : }
5271 : :
5272 : : /* The "standard" definition of va_list is void*. */
5273 : :
5274 : : tree
5275 : 0 : std_build_builtin_va_list (void)
5276 : : {
5277 : 0 : return ptr_type_node;
5278 : : }
5279 : :
5280 : : /* The "standard" abi va_list is va_list_type_node. */
5281 : :
5282 : : tree
5283 : 0 : std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5284 : : {
5285 : 0 : return va_list_type_node;
5286 : : }
5287 : :
5288 : : /* The "standard" type of va_list is va_list_type_node. */
5289 : :
5290 : : tree
5291 : 502 : std_canonical_va_list_type (tree type)
5292 : : {
5293 : 502 : tree wtype, htype;
5294 : :
5295 : 502 : wtype = va_list_type_node;
5296 : 502 : htype = type;
5297 : :
5298 : 502 : if (TREE_CODE (wtype) == ARRAY_TYPE)
5299 : : {
5300 : : /* If va_list is an array type, the argument may have decayed
5301 : : to a pointer type, e.g. by being passed to another function.
5302 : : In that case, unwrap both types so that we can compare the
5303 : : underlying records. */
5304 : 0 : if (TREE_CODE (htype) == ARRAY_TYPE
5305 : 0 : || POINTER_TYPE_P (htype))
5306 : : {
5307 : 0 : wtype = TREE_TYPE (wtype);
5308 : 0 : htype = TREE_TYPE (htype);
5309 : : }
5310 : : }
5311 : 502 : if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5312 : 332 : return va_list_type_node;
5313 : :
5314 : : return NULL_TREE;
5315 : : }
5316 : :
5317 : : /* The "standard" implementation of va_start: just assign `nextarg' to
5318 : : the variable. */
5319 : :
5320 : : void
5321 : 5652 : std_expand_builtin_va_start (tree valist, rtx nextarg)
5322 : : {
5323 : 5652 : rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5324 : 5652 : convert_move (va_r, nextarg, 0);
5325 : 5652 : }
5326 : :
5327 : : /* Expand EXP, a call to __builtin_va_start. */
5328 : :
5329 : : static rtx
5330 : 20976 : expand_builtin_va_start (tree exp)
5331 : : {
5332 : 20976 : rtx nextarg;
5333 : 20976 : tree valist;
5334 : 20976 : location_t loc = EXPR_LOCATION (exp);
5335 : :
5336 : 20976 : if (call_expr_nargs (exp) < 2)
5337 : : {
5338 : 0 : error_at (loc, "too few arguments to function %<va_start%>");
5339 : 0 : return const0_rtx;
5340 : : }
5341 : :
5342 : 20976 : if (fold_builtin_next_arg (exp, true))
5343 : 0 : return const0_rtx;
5344 : :
5345 : 20976 : nextarg = expand_builtin_next_arg ();
5346 : 20976 : valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5347 : :
5348 : 20976 : if (targetm.expand_builtin_va_start)
5349 : 20976 : targetm.expand_builtin_va_start (valist, nextarg);
5350 : : else
5351 : 0 : std_expand_builtin_va_start (valist, nextarg);
5352 : :
5353 : 20976 : return const0_rtx;
5354 : : }
5355 : :
5356 : : /* Expand EXP, a call to __builtin_va_end. */
5357 : :
5358 : : static rtx
5359 : 12122 : expand_builtin_va_end (tree exp)
5360 : : {
5361 : 12122 : tree valist = CALL_EXPR_ARG (exp, 0);
5362 : :
5363 : : /* Evaluate for side effects, if needed. I hate macros that don't
5364 : : do that. */
5365 : 12122 : if (TREE_SIDE_EFFECTS (valist))
5366 : 0 : expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5367 : :
5368 : 12122 : return const0_rtx;
5369 : : }
5370 : :
5371 : : /* Expand EXP, a call to __builtin_va_copy. We do this as a
5372 : : builtin rather than just as an assignment in stdarg.h because of the
5373 : : nastiness of array-type va_list types. */
5374 : :
5375 : : static rtx
5376 : 242 : expand_builtin_va_copy (tree exp)
5377 : : {
5378 : 242 : tree dst, src, t;
5379 : 242 : location_t loc = EXPR_LOCATION (exp);
5380 : :
5381 : 242 : dst = CALL_EXPR_ARG (exp, 0);
5382 : 242 : src = CALL_EXPR_ARG (exp, 1);
5383 : :
5384 : 242 : dst = stabilize_va_list_loc (loc, dst, 1);
5385 : 242 : src = stabilize_va_list_loc (loc, src, 0);
5386 : :
5387 : 242 : gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5388 : :
5389 : 242 : if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5390 : : {
5391 : 0 : t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5392 : 0 : TREE_SIDE_EFFECTS (t) = 1;
5393 : 0 : expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5394 : : }
5395 : : else
5396 : : {
5397 : 242 : rtx dstb, srcb, size;
5398 : :
5399 : : /* Evaluate to pointers. */
5400 : 242 : dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5401 : 242 : srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5402 : 242 : size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5403 : : NULL_RTX, VOIDmode, EXPAND_NORMAL);
5404 : :
5405 : 242 : dstb = convert_memory_address (Pmode, dstb);
5406 : 242 : srcb = convert_memory_address (Pmode, srcb);
5407 : :
5408 : : /* "Dereference" to BLKmode memories. */
5409 : 242 : dstb = gen_rtx_MEM (BLKmode, dstb);
5410 : 242 : set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5411 : 242 : set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5412 : 242 : srcb = gen_rtx_MEM (BLKmode, srcb);
5413 : 242 : set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5414 : 242 : set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5415 : :
5416 : : /* Copy. */
5417 : 242 : emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5418 : : }
5419 : :
5420 : 242 : return const0_rtx;
5421 : : }
5422 : :
5423 : : /* Expand a call to one of the builtin functions __builtin_frame_address or
5424 : : __builtin_return_address. */
5425 : :
5426 : : static rtx
5427 : 15583 : expand_builtin_frame_address (tree fndecl, tree exp)
5428 : : {
5429 : : /* The argument must be a nonnegative integer constant.
5430 : : It counts the number of frames to scan up the stack.
5431 : : The value is either the frame pointer value or the return
5432 : : address saved in that frame. */
5433 : 15583 : if (call_expr_nargs (exp) == 0)
5434 : : /* Warning about missing arg was already issued. */
5435 : 0 : return const0_rtx;
5436 : 15583 : else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5437 : : {
5438 : 0 : error ("invalid argument to %qD", fndecl);
5439 : 0 : return const0_rtx;
5440 : : }
5441 : : else
5442 : : {
5443 : : /* Number of frames to scan up the stack. */
5444 : 15583 : unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5445 : :
5446 : 15583 : rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5447 : :
5448 : : /* Some ports cannot access arbitrary stack frames. */
5449 : 15583 : if (tem == NULL)
5450 : : {
5451 : 0 : warning (0, "unsupported argument to %qD", fndecl);
5452 : 0 : return const0_rtx;
5453 : : }
5454 : :
5455 : 15583 : if (count)
5456 : : {
5457 : : /* Warn since no effort is made to ensure that any frame
5458 : : beyond the current one exists or can be safely reached. */
5459 : 932 : warning (OPT_Wframe_address, "calling %qD with "
5460 : : "a nonzero argument is unsafe", fndecl);
5461 : : }
5462 : :
5463 : : /* For __builtin_frame_address, return what we've got. */
5464 : 15583 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5465 : : return tem;
5466 : :
5467 : 5086 : if (!REG_P (tem)
5468 : 5086 : && ! CONSTANT_P (tem))
5469 : 5086 : tem = copy_addr_to_reg (tem);
5470 : 5086 : return tem;
5471 : : }
5472 : : }
5473 : :
5474 : : #if ! STACK_GROWS_DOWNWARD
5475 : : # define STACK_TOPS GT
5476 : : #else
5477 : : # define STACK_TOPS LT
5478 : : #endif
5479 : :
5480 : : #ifdef POINTERS_EXTEND_UNSIGNED
5481 : : # define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5482 : : #else
5483 : : # define STACK_UNSIGNED true
5484 : : #endif
5485 : :
5486 : : /* Expand a call to builtin function __builtin_stack_address. */
5487 : :
5488 : : static rtx
5489 : 3427 : expand_builtin_stack_address ()
5490 : : {
5491 : 3427 : rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5492 : : STACK_UNSIGNED);
5493 : :
5494 : : #ifdef STACK_ADDRESS_OFFSET
5495 : : /* Unbias the stack pointer, bringing it to the boundary between the
5496 : : stack area claimed by the active function calling this builtin,
5497 : : and stack ranges that could get clobbered if it called another
5498 : : function. It should NOT encompass any stack red zone, that is
5499 : : used in leaf functions.
5500 : :
5501 : : On SPARC, the register save area is *not* considered active or
5502 : : used by the active function, but rather as akin to the area in
5503 : : which call-preserved registers are saved by callees. This
5504 : : enables __strub_leave to clear what would otherwise overlap with
5505 : : its own register save area.
5506 : :
5507 : : If the address is computed too high or too low, parts of a stack
5508 : : range that should be scrubbed may be left unscrubbed, scrubbing
5509 : : may corrupt active portions of the stack frame, and stack ranges
5510 : : may be doubly-scrubbed by caller and callee.
5511 : :
5512 : : In order for it to be just right, the area delimited by
5513 : : @code{__builtin_stack_address} and @code{__builtin_frame_address
5514 : : (0)} should encompass caller's registers saved by the function,
5515 : : local on-stack variables and @code{alloca} stack areas.
5516 : : Accumulated outgoing on-stack arguments, preallocated as part of
5517 : : a function's own prologue, are to be regarded as part of the
5518 : : (caller) function's active area as well, whereas those pushed or
5519 : : allocated temporarily for a call are regarded as part of the
5520 : : callee's stack range, rather than the caller's. */
5521 : : ret = plus_constant (ptr_mode, ret, STACK_ADDRESS_OFFSET);
5522 : : #endif
5523 : :
5524 : 3427 : return force_reg (ptr_mode, ret);
5525 : : }
5526 : :
5527 : : /* Expand a call to builtin function __builtin_strub_enter. */
5528 : :
5529 : : static rtx
5530 : 2159 : expand_builtin_strub_enter (tree exp)
5531 : : {
5532 : 2159 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5533 : : return NULL_RTX;
5534 : :
5535 : 2159 : if (optimize < 1 || flag_no_inline)
5536 : : return NULL_RTX;
5537 : :
5538 : 1515 : rtx stktop = expand_builtin_stack_address ();
5539 : :
5540 : 1515 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5541 : 1515 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5542 : 1515 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5543 : : build_int_cst (TREE_TYPE (wmptr), 0));
5544 : 1515 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5545 : :
5546 : 1515 : emit_move_insn (wmark, stktop);
5547 : :
5548 : 1515 : return const0_rtx;
5549 : : }
5550 : :
5551 : : /* Expand a call to builtin function __builtin_strub_update. */
5552 : :
5553 : : static rtx
5554 : 1072 : expand_builtin_strub_update (tree exp)
5555 : : {
5556 : 1072 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5557 : : return NULL_RTX;
5558 : :
5559 : 1072 : if (optimize < 2 || flag_no_inline)
5560 : : return NULL_RTX;
5561 : :
5562 : 652 : rtx stktop = expand_builtin_stack_address ();
5563 : :
5564 : : #ifdef RED_ZONE_SIZE
5565 : : /* Here's how the strub enter, update and leave functions deal with red zones.
5566 : :
5567 : : If it weren't for red zones, update, called from within a strub context,
5568 : : would bump the watermark to the top of the stack. Enter and leave, running
5569 : : in the caller, would use the caller's top of stack address both to
5570 : : initialize the watermark passed to the callee, and to start strubbing the
5571 : : stack afterwards.
5572 : :
5573 : : Ideally, we'd update the watermark so as to cover the used amount of red
5574 : : zone, and strub starting at the caller's other end of the (presumably
5575 : : unused) red zone. Normally, only leaf functions use the red zone, but at
5576 : : this point we can't tell whether a function is a leaf, nor can we tell how
5577 : : much of the red zone it uses. Furthermore, some strub contexts may have
5578 : : been inlined so that update and leave are called from the same stack frame,
5579 : : and the strub builtins may all have been inlined, turning a strub function
5580 : : into a leaf.
5581 : :
5582 : : So cleaning the range from the caller's stack pointer (one end of the red
5583 : : zone) to the (potentially inlined) callee's (other end of the) red zone
5584 : : could scribble over the caller's own red zone.
5585 : :
5586 : : We avoid this possibility by arranging for callers that are strub contexts
5587 : : to use their own watermark as the strub starting point. So, if A calls B,
5588 : : and B calls C, B will tell A to strub up to the end of B's red zone, and
5589 : : will strub itself only the part of C's stack frame and red zone that
5590 : : doesn't overlap with B's. With that, we don't need to know who's leaf and
5591 : : who isn't: inlined calls will shrink their strub window to zero, each
5592 : : remaining call will strub some portion of the stack, and eventually the
5593 : : strub context will return to a caller that isn't a strub context itself,
5594 : : that will therefore use its own stack pointer as the strub starting point.
5595 : : It's not a leaf, because strub contexts can't be inlined into non-strub
5596 : : contexts, so it doesn't use the red zone, and it will therefore correctly
5597 : : strub up the callee's stack frame up to the end of the callee's red zone.
5598 : : Neat! */
5599 : 652 : if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5600 : : {
5601 : 652 : poly_int64 red_zone_size = RED_ZONE_SIZE;
5602 : : #if STACK_GROWS_DOWNWARD
5603 : 652 : red_zone_size = -red_zone_size;
5604 : : #endif
5605 : 652 : stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5606 : 652 : stktop = force_reg (ptr_mode, stktop);
5607 : : }
5608 : : #endif
5609 : :
5610 : 652 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5611 : 652 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5612 : 652 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5613 : : build_int_cst (TREE_TYPE (wmptr), 0));
5614 : 652 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5615 : :
5616 : 652 : rtx wmarkr = force_reg (ptr_mode, wmark);
5617 : :
5618 : 652 : rtx_code_label *lab = gen_label_rtx ();
5619 : 652 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5620 : : ptr_mode, NULL_RTX, lab, NULL,
5621 : : profile_probability::very_likely ());
5622 : 652 : emit_move_insn (wmark, stktop);
5623 : :
5624 : : /* If this is an inlined strub function, also bump the watermark for the
5625 : : enclosing function. This avoids a problem with the following scenario: A
5626 : : calls B and B calls C, and both B and C get inlined into A. B allocates
5627 : : temporary stack space before calling C. If we don't update A's watermark,
5628 : : we may use an outdated baseline for the post-C strub_leave, erasing B's
5629 : : temporary stack allocation. We only need this if we're fully expanding
5630 : : strub_leave inline. */
5631 : 652 : tree xwmptr = (optimize > 2
5632 : 652 : ? strub_watermark_parm (current_function_decl)
5633 : : : wmptr);
5634 : 652 : if (wmptr != xwmptr)
5635 : : {
5636 : 156 : wmptr = xwmptr;
5637 : 156 : wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5638 : 156 : wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5639 : : build_int_cst (TREE_TYPE (wmptr), 0));
5640 : 156 : wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5641 : 156 : wmarkr = force_reg (ptr_mode, wmark);
5642 : :
5643 : 156 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5644 : : ptr_mode, NULL_RTX, lab, NULL,
5645 : : profile_probability::very_likely ());
5646 : 156 : emit_move_insn (wmark, stktop);
5647 : : }
5648 : :
5649 : 652 : emit_label (lab);
5650 : :
5651 : 652 : return const0_rtx;
5652 : : }
5653 : :
5654 : :
5655 : : /* Expand a call to builtin function __builtin_strub_leave. */
5656 : :
5657 : : static rtx
5658 : 2729 : expand_builtin_strub_leave (tree exp)
5659 : : {
5660 : 2729 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5661 : : return NULL_RTX;
5662 : :
5663 : 2729 : if (optimize < 2 || optimize_size || flag_no_inline)
5664 : : return NULL_RTX;
5665 : :
5666 : 1229 : rtx stktop = NULL_RTX;
5667 : :
5668 : 1229 : if (tree wmptr = (optimize
5669 : 1229 : ? strub_watermark_parm (current_function_decl)
5670 : : : NULL_TREE))
5671 : : {
5672 : 509 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5673 : 509 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5674 : : build_int_cst (TREE_TYPE (wmptr), 0));
5675 : 509 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5676 : 509 : stktop = force_reg (ptr_mode, wmark);
5677 : : }
5678 : :
5679 : 509 : if (!stktop)
5680 : 720 : stktop = expand_builtin_stack_address ();
5681 : :
5682 : 1229 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5683 : 1229 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5684 : 1229 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5685 : : build_int_cst (TREE_TYPE (wmptr), 0));
5686 : 1229 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5687 : :
5688 : 1229 : rtx wmarkr = force_reg (ptr_mode, wmark);
5689 : :
5690 : : #if ! STACK_GROWS_DOWNWARD
5691 : : rtx base = stktop;
5692 : : rtx end = wmarkr;
5693 : : #else
5694 : 1229 : rtx base = wmarkr;
5695 : 1229 : rtx end = stktop;
5696 : : #endif
5697 : :
5698 : : /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5699 : 1229 : base = copy_to_reg (base);
5700 : :
5701 : 1229 : rtx_code_label *done = gen_label_rtx ();
5702 : 1229 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5703 : : ptr_mode, NULL_RTX, done, NULL,
5704 : : profile_probability::very_likely ());
5705 : :
5706 : 1229 : if (optimize < 3)
5707 : 909 : expand_call (exp, NULL_RTX, true);
5708 : : else
5709 : : {
5710 : : /* Ok, now we've determined we want to copy the block, so convert the
5711 : : addresses to Pmode, as needed to dereference them to access ptr_mode
5712 : : memory locations, so that we don't have to convert anything within the
5713 : : loop. */
5714 : 320 : base = memory_address (ptr_mode, base);
5715 : 320 : end = memory_address (ptr_mode, end);
5716 : :
5717 : 320 : rtx zero = force_operand (const0_rtx, NULL_RTX);
5718 : 320 : int ulen = GET_MODE_SIZE (ptr_mode);
5719 : :
5720 : : /* ??? It would be nice to use setmem or similar patterns here,
5721 : : but they do not necessarily obey the stack growth direction,
5722 : : which has security implications. We also have to avoid calls
5723 : : (memset, bzero or any machine-specific ones), which are
5724 : : likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5725 : : #if ! STACK_GROWS_DOWNWARD
5726 : : rtx incr = plus_constant (Pmode, base, ulen);
5727 : : rtx dstm = gen_rtx_MEM (ptr_mode, base);
5728 : :
5729 : : rtx_code_label *loop = gen_label_rtx ();
5730 : : emit_label (loop);
5731 : : emit_move_insn (dstm, zero);
5732 : : emit_move_insn (base, force_operand (incr, NULL_RTX));
5733 : : #else
5734 : 320 : rtx decr = plus_constant (Pmode, end, -ulen);
5735 : 320 : rtx dstm = gen_rtx_MEM (ptr_mode, end);
5736 : :
5737 : 320 : rtx_code_label *loop = gen_label_rtx ();
5738 : 320 : emit_label (loop);
5739 : 320 : emit_move_insn (end, force_operand (decr, NULL_RTX));
5740 : 320 : emit_move_insn (dstm, zero);
5741 : : #endif
5742 : 640 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5743 : 320 : Pmode, NULL_RTX, NULL, loop,
5744 : : profile_probability::very_likely ());
5745 : : }
5746 : :
5747 : 1229 : emit_label (done);
5748 : :
5749 : 1229 : return const0_rtx;
5750 : : }
5751 : :
5752 : : /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5753 : : failed and the caller should emit a normal call. */
5754 : :
5755 : : static rtx
5756 : 27944 : expand_builtin_alloca (tree exp)
5757 : : {
5758 : 27944 : rtx op0;
5759 : 27944 : rtx result;
5760 : 27944 : unsigned int align;
5761 : 27944 : tree fndecl = get_callee_fndecl (exp);
5762 : 27944 : HOST_WIDE_INT max_size;
5763 : 27944 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5764 : 27944 : bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5765 : 27944 : bool valid_arglist
5766 : : = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5767 : 27944 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5768 : : VOID_TYPE)
5769 : : : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5770 : 27942 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5771 : 27944 : : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5772 : :
5773 : 27944 : if (!valid_arglist)
5774 : : return NULL_RTX;
5775 : :
5776 : : /* Compute the argument. */
5777 : 27939 : op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5778 : :
5779 : : /* Compute the alignment. */
5780 : 52040 : align = (fcode == BUILT_IN_ALLOCA
5781 : 24102 : ? BIGGEST_ALIGNMENT
5782 : 3837 : : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5783 : :
5784 : : /* Compute the maximum size. */
5785 : 2 : max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5786 : 27941 : ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5787 : : : -1);
5788 : :
5789 : : /* Allocate the desired space. If the allocation stems from the declaration
5790 : : of a variable-sized object, it cannot accumulate. */
5791 : 27939 : result
5792 : 27939 : = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5793 : 27939 : result = convert_memory_address (ptr_mode, result);
5794 : :
5795 : : /* Dynamic allocations for variables are recorded during gimplification. */
5796 : 27939 : if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5797 : 0 : record_dynamic_alloc (exp);
5798 : :
5799 : : return result;
5800 : : }
5801 : :
5802 : : /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5803 : : of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5804 : : STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5805 : : handle_builtin_stack_restore function. */
5806 : :
5807 : : static rtx
5808 : 204 : expand_asan_emit_allocas_unpoison (tree exp)
5809 : : {
5810 : 204 : tree arg0 = CALL_EXPR_ARG (exp, 0);
5811 : 204 : tree arg1 = CALL_EXPR_ARG (exp, 1);
5812 : 204 : rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5813 : 204 : rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5814 : 204 : rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5815 : : stack_pointer_rtx, NULL_RTX, 0,
5816 : : OPTAB_LIB_WIDEN);
5817 : 204 : off = convert_modes (ptr_mode, Pmode, off, 0);
5818 : 204 : bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5819 : : OPTAB_LIB_WIDEN);
5820 : 204 : rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5821 : 204 : ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5822 : : top, ptr_mode, bot, ptr_mode);
5823 : 204 : return ret;
5824 : : }
5825 : :
5826 : : /* Expand a call to bswap builtin in EXP.
5827 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
5828 : : function in-line. If convenient, the result should be placed in TARGET.
5829 : : SUBTARGET may be used as the target for computing one of EXP's operands. */
5830 : :
5831 : : static rtx
5832 : 1169 : expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5833 : : rtx subtarget)
5834 : : {
5835 : 1169 : tree arg;
5836 : 1169 : rtx op0;
5837 : :
5838 : 1169 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5839 : : return NULL_RTX;
5840 : :
5841 : 1169 : arg = CALL_EXPR_ARG (exp, 0);
5842 : 1169 : op0 = expand_expr (arg,
5843 : 50 : subtarget && GET_MODE (subtarget) == target_mode
5844 : : ? subtarget : NULL_RTX,
5845 : : target_mode, EXPAND_NORMAL);
5846 : 1169 : if (GET_MODE (op0) != target_mode)
5847 : 0 : op0 = convert_to_mode (target_mode, op0, 1);
5848 : :
5849 : 1169 : target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5850 : :
5851 : 1169 : gcc_assert (target);
5852 : :
5853 : 1169 : return convert_to_mode (target_mode, target, 1);
5854 : : }
5855 : :
5856 : : /* Expand a call to a unary builtin in EXP.
5857 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
5858 : : function in-line. If convenient, the result should be placed in TARGET.
5859 : : SUBTARGET may be used as the target for computing one of EXP's operands. */
5860 : :
5861 : : static rtx
5862 : 751 : expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5863 : : rtx subtarget, optab op_optab)
5864 : : {
5865 : 751 : rtx op0;
5866 : :
5867 : 751 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5868 : : return NULL_RTX;
5869 : :
5870 : : /* Compute the argument. */
5871 : 1502 : op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5872 : : (subtarget
5873 : 94 : && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5874 : 94 : == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5875 : : VOIDmode, EXPAND_NORMAL);
5876 : : /* Compute op, into TARGET if possible.
5877 : : Set TARGET to wherever the result comes back. */
5878 : 751 : target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5879 : : op_optab, op0, target, op_optab != clrsb_optab);
5880 : 751 : gcc_assert (target);
5881 : :
5882 : 751 : return convert_to_mode (target_mode, target, 0);
5883 : : }
5884 : :
5885 : : /* Expand a call to __builtin_expect. We just return our argument
5886 : : as the builtin_expect semantic should've been already executed by
5887 : : tree branch prediction pass. */
5888 : :
5889 : : static rtx
5890 : 1017 : expand_builtin_expect (tree exp, rtx target)
5891 : : {
5892 : 1017 : tree arg;
5893 : :
5894 : 1017 : if (call_expr_nargs (exp) < 2)
5895 : 0 : return const0_rtx;
5896 : 1017 : arg = CALL_EXPR_ARG (exp, 0);
5897 : :
5898 : 1017 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5899 : : /* When guessing was done, the hints should be already stripped away. */
5900 : 1017 : gcc_assert (!flag_guess_branch_prob
5901 : : || optimize == 0 || seen_error ());
5902 : : return target;
5903 : : }
5904 : :
5905 : : /* Expand a call to __builtin_expect_with_probability. We just return our
5906 : : argument as the builtin_expect semantic should've been already executed by
5907 : : tree branch prediction pass. */
5908 : :
5909 : : static rtx
5910 : 5 : expand_builtin_expect_with_probability (tree exp, rtx target)
5911 : : {
5912 : 5 : tree arg;
5913 : :
5914 : 5 : if (call_expr_nargs (exp) < 3)
5915 : 0 : return const0_rtx;
5916 : 5 : arg = CALL_EXPR_ARG (exp, 0);
5917 : :
5918 : 5 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5919 : : /* When guessing was done, the hints should be already stripped away. */
5920 : 5 : gcc_assert (!flag_guess_branch_prob
5921 : : || optimize == 0 || seen_error ());
5922 : : return target;
5923 : : }
5924 : :
5925 : :
5926 : : /* Expand a call to __builtin_assume_aligned. We just return our first
5927 : : argument as the builtin_assume_aligned semantic should've been already
5928 : : executed by CCP. */
5929 : :
5930 : : static rtx
5931 : 664 : expand_builtin_assume_aligned (tree exp, rtx target)
5932 : : {
5933 : 664 : if (call_expr_nargs (exp) < 2)
5934 : 0 : return const0_rtx;
5935 : 664 : target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5936 : : EXPAND_NORMAL);
5937 : 664 : gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5938 : : && (call_expr_nargs (exp) < 3
5939 : : || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5940 : : return target;
5941 : : }
5942 : :
5943 : : void
5944 : 38541 : expand_builtin_trap (void)
5945 : : {
5946 : 38541 : if (targetm.have_trap ())
5947 : : {
5948 : 38541 : rtx_insn *insn = emit_insn (targetm.gen_trap ());
5949 : : /* For trap insns when not accumulating outgoing args force
5950 : : REG_ARGS_SIZE note to prevent crossjumping of calls with
5951 : : different args sizes. */
5952 : 38541 : if (!ACCUMULATE_OUTGOING_ARGS)
5953 : 38539 : add_args_size_note (insn, stack_pointer_delta);
5954 : : }
5955 : : else
5956 : : {
5957 : 0 : tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5958 : 0 : tree call_expr = build_call_expr (fn, 0);
5959 : 0 : expand_call (call_expr, NULL_RTX, false);
5960 : : }
5961 : :
5962 : 38541 : emit_barrier ();
5963 : 38541 : }
5964 : :
5965 : : /* Expand a call to __builtin_unreachable. We do nothing except emit
5966 : : a barrier saying that control flow will not pass here.
5967 : :
5968 : : It is the responsibility of the program being compiled to ensure
5969 : : that control flow does never reach __builtin_unreachable. */
5970 : : static void
5971 : 5400 : expand_builtin_unreachable (void)
5972 : : {
5973 : : /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5974 : : to avoid this. */
5975 : 5400 : gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5976 : 5400 : emit_barrier ();
5977 : 5400 : }
5978 : :
5979 : : /* Expand EXP, a call to fabs, fabsf or fabsl.
5980 : : Return NULL_RTX if a normal call should be emitted rather than expanding
5981 : : the function inline. If convenient, the result should be placed
5982 : : in TARGET. SUBTARGET may be used as the target for computing
5983 : : the operand. */
5984 : :
5985 : : static rtx
5986 : 4 : expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5987 : : {
5988 : 4 : machine_mode mode;
5989 : 4 : tree arg;
5990 : 4 : rtx op0;
5991 : :
5992 : 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5993 : : return NULL_RTX;
5994 : :
5995 : 0 : arg = CALL_EXPR_ARG (exp, 0);
5996 : 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5997 : 0 : mode = TYPE_MODE (TREE_TYPE (arg));
5998 : 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5999 : 0 : return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6000 : : }
6001 : :
6002 : : /* Expand EXP, a call to copysign, copysignf, or copysignl.
6003 : : Return NULL is a normal call should be emitted rather than expanding the
6004 : : function inline. If convenient, the result should be placed in TARGET.
6005 : : SUBTARGET may be used as the target for computing the operand. */
6006 : :
6007 : : static rtx
6008 : 11674 : expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6009 : : {
6010 : 11674 : rtx op0, op1;
6011 : 11674 : tree arg;
6012 : :
6013 : 11674 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6014 : : return NULL_RTX;
6015 : :
6016 : 11673 : arg = CALL_EXPR_ARG (exp, 0);
6017 : 11673 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6018 : :
6019 : 11673 : arg = CALL_EXPR_ARG (exp, 1);
6020 : 11673 : op1 = expand_normal (arg);
6021 : :
6022 : 11673 : return expand_copysign (op0, op1, target);
6023 : : }
6024 : :
6025 : : /* Emit a call to __builtin___clear_cache. */
6026 : :
6027 : : void
6028 : 0 : default_emit_call_builtin___clear_cache (rtx begin, rtx end)
6029 : : {
6030 : 0 : rtx callee = gen_rtx_SYMBOL_REF (Pmode,
6031 : : BUILTIN_ASM_NAME_PTR
6032 : : (BUILT_IN_CLEAR_CACHE));
6033 : :
6034 : 0 : emit_library_call (callee,
6035 : : LCT_NORMAL, VOIDmode,
6036 : : convert_memory_address (ptr_mode, begin), ptr_mode,
6037 : : convert_memory_address (ptr_mode, end), ptr_mode);
6038 : 0 : }
6039 : :
6040 : : /* Emit a call to __builtin___clear_cache, unless the target specifies
6041 : : it as do-nothing. This function can be used by trampoline
6042 : : finalizers to duplicate the effects of expanding a call to the
6043 : : clear_cache builtin. */
6044 : :
6045 : : void
6046 : 28 : maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6047 : : {
6048 : 28 : gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6049 : : || CONST_INT_P (begin))
6050 : : && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6051 : : || CONST_INT_P (end)));
6052 : :
6053 : 28 : if (targetm.have_clear_cache ())
6054 : : {
6055 : : /* We have a "clear_cache" insn, and it will handle everything. */
6056 : 0 : class expand_operand ops[2];
6057 : :
6058 : 0 : create_address_operand (&ops[0], begin);
6059 : 0 : create_address_operand (&ops[1], end);
6060 : :
6061 : 0 : if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6062 : 0 : return;
6063 : : }
6064 : : else
6065 : : {
6066 : : #ifndef CLEAR_INSN_CACHE
6067 : : /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6068 : : does nothing. There is no need to call it. Do nothing. */
6069 : : return;
6070 : : #endif /* CLEAR_INSN_CACHE */
6071 : : }
6072 : :
6073 : 0 : targetm.calls.emit_call_builtin___clear_cache (begin, end);
6074 : : }
6075 : :
6076 : : /* Expand a call to __builtin___clear_cache. */
6077 : :
6078 : : static void
6079 : 28 : expand_builtin___clear_cache (tree exp)
6080 : : {
6081 : 28 : tree begin, end;
6082 : 28 : rtx begin_rtx, end_rtx;
6083 : :
6084 : : /* We must not expand to a library call. If we did, any
6085 : : fallback library function in libgcc that might contain a call to
6086 : : __builtin___clear_cache() would recurse infinitely. */
6087 : 28 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6088 : : {
6089 : 0 : error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6090 : 0 : return;
6091 : : }
6092 : :
6093 : 28 : begin = CALL_EXPR_ARG (exp, 0);
6094 : 30 : begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6095 : :
6096 : 28 : end = CALL_EXPR_ARG (exp, 1);
6097 : 30 : end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6098 : :
6099 : 28 : maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
6100 : : }
6101 : :
6102 : : /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6103 : :
6104 : : static rtx
6105 : 634 : round_trampoline_addr (rtx tramp)
6106 : : {
6107 : 634 : rtx temp, addend, mask;
6108 : :
6109 : : /* If we don't need too much alignment, we'll have been guaranteed
6110 : : proper alignment by get_trampoline_type. */
6111 : 634 : if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6112 : : return tramp;
6113 : :
6114 : : /* Round address up to desired boundary. */
6115 : 0 : temp = gen_reg_rtx (Pmode);
6116 : 0 : addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6117 : 0 : mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6118 : :
6119 : 0 : temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6120 : : temp, 0, OPTAB_LIB_WIDEN);
6121 : 0 : tramp = expand_simple_binop (Pmode, AND, temp, mask,
6122 : : temp, 0, OPTAB_LIB_WIDEN);
6123 : :
6124 : 0 : return tramp;
6125 : : }
6126 : :
6127 : : static rtx
6128 : 295 : expand_builtin_init_trampoline (tree exp, bool onstack)
6129 : : {
6130 : 295 : tree t_tramp, t_func, t_chain;
6131 : 295 : rtx m_tramp, r_tramp, r_chain, tmp;
6132 : :
6133 : 295 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6134 : : POINTER_TYPE, VOID_TYPE))
6135 : : return NULL_RTX;
6136 : :
6137 : 295 : t_tramp = CALL_EXPR_ARG (exp, 0);
6138 : 295 : t_func = CALL_EXPR_ARG (exp, 1);
6139 : 295 : t_chain = CALL_EXPR_ARG (exp, 2);
6140 : :
6141 : 295 : r_tramp = expand_normal (t_tramp);
6142 : 295 : m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6143 : 295 : MEM_NOTRAP_P (m_tramp) = 1;
6144 : :
6145 : : /* If ONSTACK, the TRAMP argument should be the address of a field
6146 : : within the local function's FRAME decl. Either way, let's see if
6147 : : we can fill in the MEM_ATTRs for this memory. */
6148 : 295 : if (TREE_CODE (t_tramp) == ADDR_EXPR)
6149 : 295 : set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6150 : :
6151 : : /* Creator of a heap trampoline is responsible for making sure the
6152 : : address is aligned to at least STACK_BOUNDARY. Normally malloc
6153 : : will ensure this anyhow. */
6154 : 295 : tmp = round_trampoline_addr (r_tramp);
6155 : 295 : if (tmp != r_tramp)
6156 : : {
6157 : 0 : m_tramp = change_address (m_tramp, BLKmode, tmp);
6158 : 0 : set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6159 : 0 : set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6160 : : }
6161 : :
6162 : : /* The FUNC argument should be the address of the nested function.
6163 : : Extract the actual function decl to pass to the hook. */
6164 : 295 : gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6165 : 295 : t_func = TREE_OPERAND (t_func, 0);
6166 : 295 : gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6167 : :
6168 : 295 : r_chain = expand_normal (t_chain);
6169 : :
6170 : : /* Generate insns to initialize the trampoline. */
6171 : 295 : targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6172 : :
6173 : 295 : if (onstack)
6174 : : {
6175 : 295 : trampolines_created = 1;
6176 : :
6177 : 295 : if (targetm.calls.custom_function_descriptors != 0)
6178 : 295 : warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6179 : : "trampoline generated for nested function %qD", t_func);
6180 : : }
6181 : :
6182 : 295 : return const0_rtx;
6183 : : }
6184 : :
6185 : : static rtx
6186 : 339 : expand_builtin_adjust_trampoline (tree exp)
6187 : : {
6188 : 339 : rtx tramp;
6189 : :
6190 : 339 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6191 : : return NULL_RTX;
6192 : :
6193 : 339 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6194 : 339 : tramp = round_trampoline_addr (tramp);
6195 : 339 : if (targetm.calls.trampoline_adjust_address)
6196 : 0 : tramp = targetm.calls.trampoline_adjust_address (tramp);
6197 : :
6198 : : return tramp;
6199 : : }
6200 : :
6201 : : /* Expand a call to the builtin descriptor initialization routine.
6202 : : A descriptor is made up of a couple of pointers to the static
6203 : : chain and the code entry in this order. */
6204 : :
6205 : : static rtx
6206 : 0 : expand_builtin_init_descriptor (tree exp)
6207 : : {
6208 : 0 : tree t_descr, t_func, t_chain;
6209 : 0 : rtx m_descr, r_descr, r_func, r_chain;
6210 : :
6211 : 0 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6212 : : VOID_TYPE))
6213 : : return NULL_RTX;
6214 : :
6215 : 0 : t_descr = CALL_EXPR_ARG (exp, 0);
6216 : 0 : t_func = CALL_EXPR_ARG (exp, 1);
6217 : 0 : t_chain = CALL_EXPR_ARG (exp, 2);
6218 : :
6219 : 0 : r_descr = expand_normal (t_descr);
6220 : 0 : m_descr = gen_rtx_MEM (BLKmode, r_descr);
6221 : 0 : MEM_NOTRAP_P (m_descr) = 1;
6222 : 0 : set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6223 : :
6224 : 0 : r_func = expand_normal (t_func);
6225 : 0 : r_chain = expand_normal (t_chain);
6226 : :
6227 : : /* Generate insns to initialize the descriptor. */
6228 : 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6229 : 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6230 : : POINTER_SIZE / BITS_PER_UNIT), r_func);
6231 : :
6232 : 0 : return const0_rtx;
6233 : : }
6234 : :
6235 : : /* Expand a call to the builtin descriptor adjustment routine. */
6236 : :
6237 : : static rtx
6238 : 0 : expand_builtin_adjust_descriptor (tree exp)
6239 : : {
6240 : 0 : rtx tramp;
6241 : :
6242 : 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6243 : : return NULL_RTX;
6244 : :
6245 : 0 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6246 : :
6247 : : /* Unalign the descriptor to allow runtime identification. */
6248 : 0 : tramp = plus_constant (ptr_mode, tramp,
6249 : 0 : targetm.calls.custom_function_descriptors);
6250 : :
6251 : 0 : return force_operand (tramp, NULL_RTX);
6252 : : }
6253 : :
6254 : : /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6255 : : function. The function first checks whether the back end provides
6256 : : an insn to implement signbit for the respective mode. If not, it
6257 : : checks whether the floating point format of the value is such that
6258 : : the sign bit can be extracted. If that is not the case, error out.
6259 : : EXP is the expression that is a call to the builtin function; if
6260 : : convenient, the result should be placed in TARGET. */
6261 : : static rtx
6262 : 1139 : expand_builtin_signbit (tree exp, rtx target)
6263 : : {
6264 : 1139 : const struct real_format *fmt;
6265 : 1139 : scalar_float_mode fmode;
6266 : 1139 : scalar_int_mode rmode, imode;
6267 : 1139 : tree arg;
6268 : 1139 : int word, bitpos;
6269 : 1139 : enum insn_code icode;
6270 : 1139 : rtx temp;
6271 : 1139 : location_t loc = EXPR_LOCATION (exp);
6272 : :
6273 : 1139 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6274 : : return NULL_RTX;
6275 : :
6276 : 1139 : arg = CALL_EXPR_ARG (exp, 0);
6277 : 1139 : fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6278 : 1139 : rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6279 : 1139 : fmt = REAL_MODE_FORMAT (fmode);
6280 : :
6281 : 1139 : arg = builtin_save_expr (arg);
6282 : :
6283 : : /* Expand the argument yielding a RTX expression. */
6284 : 1139 : temp = expand_normal (arg);
6285 : :
6286 : : /* Check if the back end provides an insn that handles signbit for the
6287 : : argument's mode. */
6288 : 1139 : icode = optab_handler (signbit_optab, fmode);
6289 : 1139 : if (icode != CODE_FOR_nothing)
6290 : : {
6291 : 10 : rtx_insn *last = get_last_insn ();
6292 : 10 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6293 : 10 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6294 : : return this_target;
6295 : 0 : delete_insns_since (last);
6296 : : }
6297 : :
6298 : : /* For floating point formats without a sign bit, implement signbit
6299 : : as "ARG < 0.0". */
6300 : 1129 : bitpos = fmt->signbit_ro;
6301 : 1129 : if (bitpos < 0)
6302 : : {
6303 : : /* But we can't do this if the format supports signed zero. */
6304 : 0 : gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6305 : :
6306 : 0 : arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6307 : 0 : build_real (TREE_TYPE (arg), dconst0));
6308 : 0 : return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6309 : : }
6310 : :
6311 : 2258 : if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6312 : : {
6313 : 1025 : imode = int_mode_for_mode (fmode).require ();
6314 : 1025 : temp = gen_lowpart (imode, temp);
6315 : : }
6316 : : else
6317 : : {
6318 : 104 : imode = word_mode;
6319 : : /* Handle targets with different FP word orders. */
6320 : 104 : if (FLOAT_WORDS_BIG_ENDIAN)
6321 : : word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6322 : : else
6323 : 104 : word = bitpos / BITS_PER_WORD;
6324 : 104 : temp = operand_subword_force (temp, word, fmode);
6325 : 104 : bitpos = bitpos % BITS_PER_WORD;
6326 : : }
6327 : :
6328 : : /* Force the intermediate word_mode (or narrower) result into a
6329 : : register. This avoids attempting to create paradoxical SUBREGs
6330 : : of floating point modes below. */
6331 : 1129 : temp = force_reg (imode, temp);
6332 : :
6333 : : /* If the bitpos is within the "result mode" lowpart, the operation
6334 : : can be implement with a single bitwise AND. Otherwise, we need
6335 : : a right shift and an AND. */
6336 : :
6337 : 2258 : if (bitpos < GET_MODE_BITSIZE (rmode))
6338 : : {
6339 : 921 : wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6340 : :
6341 : 2763 : if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6342 : 0 : temp = gen_lowpart (rmode, temp);
6343 : 1842 : temp = expand_binop (rmode, and_optab, temp,
6344 : 1842 : immed_wide_int_const (mask, rmode),
6345 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6346 : 921 : }
6347 : : else
6348 : : {
6349 : : /* Perform a logical right shift to place the signbit in the least
6350 : : significant bit, then truncate the result to the desired mode
6351 : : and mask just this bit. */
6352 : 208 : temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6353 : 208 : temp = gen_lowpart (rmode, temp);
6354 : 208 : temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6355 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6356 : : }
6357 : :
6358 : : return temp;
6359 : : }
6360 : :
6361 : : /* Expand fork or exec calls. TARGET is the desired target of the
6362 : : call. EXP is the call. FN is the
6363 : : identificator of the actual function. IGNORE is nonzero if the
6364 : : value is to be ignored. */
6365 : :
6366 : : static rtx
6367 : 87 : expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6368 : : {
6369 : 87 : tree id, decl;
6370 : 87 : tree call;
6371 : :
6372 : : /* If we are not profiling, just call the function. */
6373 : 87 : if (!coverage_instrumentation_p ())
6374 : : return NULL_RTX;
6375 : :
6376 : : /* Otherwise call the wrapper. This should be equivalent for the rest of
6377 : : compiler, so the code does not diverge, and the wrapper may run the
6378 : : code necessary for keeping the profiling sane. */
6379 : :
6380 : 4 : switch (DECL_FUNCTION_CODE (fn))
6381 : : {
6382 : 4 : case BUILT_IN_FORK:
6383 : 4 : id = get_identifier ("__gcov_fork");
6384 : 4 : break;
6385 : :
6386 : 0 : case BUILT_IN_EXECL:
6387 : 0 : id = get_identifier ("__gcov_execl");
6388 : 0 : break;
6389 : :
6390 : 0 : case BUILT_IN_EXECV:
6391 : 0 : id = get_identifier ("__gcov_execv");
6392 : 0 : break;
6393 : :
6394 : 0 : case BUILT_IN_EXECLP:
6395 : 0 : id = get_identifier ("__gcov_execlp");
6396 : 0 : break;
6397 : :
6398 : 0 : case BUILT_IN_EXECLE:
6399 : 0 : id = get_identifier ("__gcov_execle");
6400 : 0 : break;
6401 : :
6402 : 0 : case BUILT_IN_EXECVP:
6403 : 0 : id = get_identifier ("__gcov_execvp");
6404 : 0 : break;
6405 : :
6406 : 0 : case BUILT_IN_EXECVE:
6407 : 0 : id = get_identifier ("__gcov_execve");
6408 : 0 : break;
6409 : :
6410 : 0 : default:
6411 : 0 : gcc_unreachable ();
6412 : : }
6413 : :
6414 : 4 : decl = build_decl (DECL_SOURCE_LOCATION (fn),
6415 : 4 : FUNCTION_DECL, id, TREE_TYPE (fn));
6416 : 4 : DECL_EXTERNAL (decl) = 1;
6417 : 4 : TREE_PUBLIC (decl) = 1;
6418 : 4 : DECL_ARTIFICIAL (decl) = 1;
6419 : 4 : TREE_NOTHROW (decl) = 1;
6420 : 4 : DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6421 : 4 : DECL_VISIBILITY_SPECIFIED (decl) = 1;
6422 : 4 : call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6423 : 4 : return expand_call (call, target, ignore);
6424 : : }
6425 : :
6426 : :
6427 : :
6428 : : /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6429 : : the pointer in these functions is void*, the tree optimizers may remove
6430 : : casts. The mode computed in expand_builtin isn't reliable either, due
6431 : : to __sync_bool_compare_and_swap.
6432 : :
6433 : : FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6434 : : group of builtins. This gives us log2 of the mode size. */
6435 : :
6436 : : static inline machine_mode
6437 : 135542 : get_builtin_sync_mode (int fcode_diff)
6438 : : {
6439 : : /* The size is not negotiable, so ask not to get BLKmode in return
6440 : : if the target indicates that a smaller size would be better. */
6441 : 135542 : return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6442 : : }
6443 : :
6444 : : /* Expand the memory expression LOC and return the appropriate memory operand
6445 : : for the builtin_sync operations. */
6446 : :
6447 : : static rtx
6448 : 152330 : get_builtin_sync_mem (tree loc, machine_mode mode)
6449 : : {
6450 : 152330 : rtx addr, mem;
6451 : 152330 : int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6452 : : ? TREE_TYPE (TREE_TYPE (loc))
6453 : : : TREE_TYPE (loc));
6454 : 152330 : scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6455 : :
6456 : 152330 : addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6457 : 152330 : addr = convert_memory_address (addr_mode, addr);
6458 : :
6459 : : /* Note that we explicitly do not want any alias information for this
6460 : : memory, so that we kill all other live memories. Otherwise we don't
6461 : : satisfy the full barrier semantics of the intrinsic. */
6462 : 152330 : mem = gen_rtx_MEM (mode, addr);
6463 : :
6464 : 152330 : set_mem_addr_space (mem, addr_space);
6465 : :
6466 : 152330 : mem = validize_mem (mem);
6467 : :
6468 : : /* The alignment needs to be at least according to that of the mode. */
6469 : 152330 : set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6470 : : get_pointer_alignment (loc)));
6471 : 152330 : set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6472 : 152330 : MEM_VOLATILE_P (mem) = 1;
6473 : :
6474 : 152330 : return mem;
6475 : : }
6476 : :
6477 : : /* Make sure an argument is in the right mode.
6478 : : EXP is the tree argument.
6479 : : MODE is the mode it should be in. */
6480 : :
6481 : : static rtx
6482 : 95265 : expand_expr_force_mode (tree exp, machine_mode mode)
6483 : : {
6484 : 95265 : rtx val;
6485 : 95265 : machine_mode old_mode;
6486 : :
6487 : 95265 : if (TREE_CODE (exp) == SSA_NAME
6488 : 95265 : && TYPE_MODE (TREE_TYPE (exp)) != mode)
6489 : : {
6490 : : /* Undo argument promotion if possible, as combine might not
6491 : : be able to do it later due to MEM_VOLATILE_P uses in the
6492 : : patterns. */
6493 : 25 : gimple *g = get_gimple_for_ssa_name (exp);
6494 : 25 : if (g && gimple_assign_cast_p (g))
6495 : : {
6496 : 0 : tree rhs = gimple_assign_rhs1 (g);
6497 : 0 : tree_code code = gimple_assign_rhs_code (g);
6498 : 0 : if (CONVERT_EXPR_CODE_P (code)
6499 : 0 : && TYPE_MODE (TREE_TYPE (rhs)) == mode
6500 : 0 : && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6501 : 0 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6502 : 0 : && (TYPE_PRECISION (TREE_TYPE (exp))
6503 : 0 : > TYPE_PRECISION (TREE_TYPE (rhs))))
6504 : : exp = rhs;
6505 : : }
6506 : : }
6507 : :
6508 : 95265 : val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6509 : : /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6510 : : of CONST_INTs, where we know the old_mode only from the call argument. */
6511 : :
6512 : 95265 : old_mode = GET_MODE (val);
6513 : 95265 : if (old_mode == VOIDmode)
6514 : 45382 : old_mode = TYPE_MODE (TREE_TYPE (exp));
6515 : 95265 : val = convert_modes (mode, old_mode, val, 1);
6516 : 95265 : return val;
6517 : : }
6518 : :
6519 : :
6520 : : /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6521 : : EXP is the CALL_EXPR. CODE is the rtx code
6522 : : that corresponds to the arithmetic or logical operation from the name;
6523 : : an exception here is that NOT actually means NAND. TARGET is an optional
6524 : : place for us to store the results; AFTER is true if this is the
6525 : : fetch_and_xxx form. */
6526 : :
6527 : : static rtx
6528 : 4003 : expand_builtin_sync_operation (machine_mode mode, tree exp,
6529 : : enum rtx_code code, bool after,
6530 : : rtx target)
6531 : : {
6532 : 4003 : rtx val, mem;
6533 : 4003 : location_t loc = EXPR_LOCATION (exp);
6534 : :
6535 : 4003 : if (code == NOT && warn_sync_nand)
6536 : : {
6537 : 523 : tree fndecl = get_callee_fndecl (exp);
6538 : 523 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6539 : :
6540 : 523 : static bool warned_f_a_n, warned_n_a_f;
6541 : :
6542 : 523 : switch (fcode)
6543 : : {
6544 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6545 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6546 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6547 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6548 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6549 : 434 : if (warned_f_a_n)
6550 : : break;
6551 : :
6552 : 30 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6553 : 30 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6554 : 30 : warned_f_a_n = true;
6555 : 30 : break;
6556 : :
6557 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6558 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6559 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6560 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6561 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6562 : 89 : if (warned_n_a_f)
6563 : : break;
6564 : :
6565 : 22 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6566 : 22 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6567 : 22 : warned_n_a_f = true;
6568 : 22 : break;
6569 : :
6570 : 0 : default:
6571 : 0 : gcc_unreachable ();
6572 : : }
6573 : : }
6574 : :
6575 : : /* Expand the operands. */
6576 : 4003 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6577 : 4003 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6578 : :
6579 : 4003 : return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6580 : 4003 : after);
6581 : : }
6582 : :
6583 : : /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6584 : : intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6585 : : true if this is the boolean form. TARGET is a place for us to store the
6586 : : results; this is NOT optional if IS_BOOL is true. */
6587 : :
6588 : : static rtx
6589 : 456 : expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6590 : : bool is_bool, rtx target)
6591 : : {
6592 : 456 : rtx old_val, new_val, mem;
6593 : 456 : rtx *pbool, *poval;
6594 : :
6595 : : /* Expand the operands. */
6596 : 456 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6597 : 456 : old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6598 : 456 : new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6599 : :
6600 : 456 : pbool = poval = NULL;
6601 : 456 : if (target != const0_rtx)
6602 : : {
6603 : 430 : if (is_bool)
6604 : : pbool = ⌖
6605 : : else
6606 : 230 : poval = ⌖
6607 : : }
6608 : 456 : if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6609 : : false, MEMMODEL_SYNC_SEQ_CST,
6610 : : MEMMODEL_SYNC_SEQ_CST))
6611 : : return NULL_RTX;
6612 : :
6613 : 454 : return target;
6614 : : }
6615 : :
6616 : : /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6617 : : general form is actually an atomic exchange, and some targets only
6618 : : support a reduced form with the second argument being a constant 1.
6619 : : EXP is the CALL_EXPR; TARGET is an optional place for us to store
6620 : : the results. */
6621 : :
6622 : : static rtx
6623 : 326 : expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6624 : : rtx target)
6625 : : {
6626 : 326 : rtx val, mem;
6627 : :
6628 : : /* Expand the operands. */
6629 : 326 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6630 : 326 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6631 : :
6632 : 326 : return expand_sync_lock_test_and_set (target, mem, val);
6633 : : }
6634 : :
6635 : : /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6636 : :
6637 : : static rtx
6638 : 158 : expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6639 : : {
6640 : 158 : rtx mem;
6641 : :
6642 : : /* Expand the operands. */
6643 : 158 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6644 : :
6645 : 158 : return expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6646 : : }
6647 : :
6648 : : /* Given an integer representing an ``enum memmodel'', verify its
6649 : : correctness and return the memory model enum. */
6650 : :
6651 : : static enum memmodel
6652 : 170936 : get_memmodel (tree exp)
6653 : : {
6654 : : /* If the parameter is not a constant, it's a run time value so we'll just
6655 : : convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6656 : 170936 : if (TREE_CODE (exp) != INTEGER_CST)
6657 : : return MEMMODEL_SEQ_CST;
6658 : :
6659 : 170083 : rtx op = expand_normal (exp);
6660 : :
6661 : 170083 : unsigned HOST_WIDE_INT val = INTVAL (op);
6662 : 170083 : if (targetm.memmodel_check)
6663 : 170083 : val = targetm.memmodel_check (val);
6664 : 0 : else if (val & ~MEMMODEL_MASK)
6665 : : return MEMMODEL_SEQ_CST;
6666 : :
6667 : : /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6668 : 170083 : if (memmodel_base (val) >= MEMMODEL_LAST)
6669 : : return MEMMODEL_SEQ_CST;
6670 : :
6671 : : /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6672 : : be conservative and promote consume to acquire. */
6673 : 170082 : if (val == MEMMODEL_CONSUME)
6674 : 750 : val = MEMMODEL_ACQUIRE;
6675 : :
6676 : 170082 : return (enum memmodel) val;
6677 : : }
6678 : :
6679 : : /* Expand the __atomic_exchange intrinsic:
6680 : : TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6681 : : EXP is the CALL_EXPR.
6682 : : TARGET is an optional place for us to store the results. */
6683 : :
6684 : : static rtx
6685 : 2921 : expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6686 : : {
6687 : 2921 : rtx val, mem;
6688 : 2921 : enum memmodel model;
6689 : :
6690 : 2921 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6691 : :
6692 : 2921 : if (!flag_inline_atomics)
6693 : : return NULL_RTX;
6694 : :
6695 : : /* Expand the operands. */
6696 : 2870 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6697 : 2870 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6698 : :
6699 : 2870 : return expand_atomic_exchange (target, mem, val, model);
6700 : : }
6701 : :
6702 : : /* Expand the __atomic_compare_exchange intrinsic:
6703 : : bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6704 : : TYPE desired, BOOL weak,
6705 : : enum memmodel success,
6706 : : enum memmodel failure)
6707 : : EXP is the CALL_EXPR.
6708 : : TARGET is an optional place for us to store the results. */
6709 : :
6710 : : static rtx
6711 : 9233 : expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6712 : : rtx target)
6713 : : {
6714 : 9233 : rtx expect, desired, mem, oldval;
6715 : 9233 : rtx_code_label *label;
6716 : 9233 : tree weak;
6717 : 9233 : bool is_weak;
6718 : :
6719 : 9233 : memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6720 : 9233 : memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6721 : :
6722 : 9233 : if (failure > success)
6723 : 22 : success = MEMMODEL_SEQ_CST;
6724 : :
6725 : 9233 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6726 : : {
6727 : : failure = MEMMODEL_SEQ_CST;
6728 : : success = MEMMODEL_SEQ_CST;
6729 : : }
6730 : :
6731 : :
6732 : 9233 : if (!flag_inline_atomics)
6733 : : return NULL_RTX;
6734 : :
6735 : : /* Expand the operands. */
6736 : 9182 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6737 : :
6738 : 9182 : expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6739 : 9182 : expect = convert_memory_address (Pmode, expect);
6740 : 9182 : expect = gen_rtx_MEM (mode, expect);
6741 : 9182 : desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6742 : :
6743 : 9182 : weak = CALL_EXPR_ARG (exp, 3);
6744 : 9182 : is_weak = false;
6745 : 9182 : if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6746 : 9182 : is_weak = true;
6747 : :
6748 : 9182 : if (target == const0_rtx)
6749 : 281 : target = NULL;
6750 : :
6751 : : /* Lest the rtl backend create a race condition with an imporoper store
6752 : : to memory, always create a new pseudo for OLDVAL. */
6753 : 9182 : oldval = NULL;
6754 : :
6755 : 9182 : if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6756 : : is_weak, success, failure))
6757 : : return NULL_RTX;
6758 : :
6759 : : /* Conditionally store back to EXPECT, lest we create a race condition
6760 : : with an improper store to memory. */
6761 : : /* ??? With a rearrangement of atomics at the gimple level, we can handle
6762 : : the normal case where EXPECT is totally private, i.e. a register. At
6763 : : which point the store can be unconditional. */
6764 : 7433 : label = gen_label_rtx ();
6765 : 7433 : emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6766 : 7433 : GET_MODE (target), 1, label);
6767 : 7433 : emit_move_insn (expect, oldval);
6768 : 7433 : emit_label (label);
6769 : :
6770 : 7433 : return target;
6771 : : }
6772 : :
6773 : : /* Helper function for expand_ifn_atomic_compare_exchange - expand
6774 : : internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6775 : : call. The weak parameter must be dropped to match the expected parameter
6776 : : list and the expected argument changed from value to pointer to memory
6777 : : slot. */
6778 : :
6779 : : static void
6780 : 0 : expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6781 : : {
6782 : 0 : unsigned int z;
6783 : 0 : vec<tree, va_gc> *vec;
6784 : :
6785 : 0 : vec_alloc (vec, 5);
6786 : 0 : vec->quick_push (gimple_call_arg (call, 0));
6787 : 0 : tree expected = gimple_call_arg (call, 1);
6788 : 0 : rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6789 : 0 : TREE_TYPE (expected));
6790 : 0 : rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6791 : 0 : if (expd != x)
6792 : 0 : emit_move_insn (x, expd);
6793 : 0 : tree v = make_tree (TREE_TYPE (expected), x);
6794 : 0 : vec->quick_push (build1 (ADDR_EXPR,
6795 : 0 : build_pointer_type (TREE_TYPE (expected)), v));
6796 : 0 : vec->quick_push (gimple_call_arg (call, 2));
6797 : : /* Skip the boolean weak parameter. */
6798 : 0 : for (z = 4; z < 6; z++)
6799 : 0 : vec->quick_push (gimple_call_arg (call, z));
6800 : : /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6801 : 0 : unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6802 : 0 : gcc_assert (bytes_log2 < 5);
6803 : 0 : built_in_function fncode
6804 : : = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6805 : : + bytes_log2);
6806 : 0 : tree fndecl = builtin_decl_explicit (fncode);
6807 : 0 : tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6808 : : fndecl);
6809 : 0 : tree exp = build_call_vec (boolean_type_node, fn, vec);
6810 : 0 : tree lhs = gimple_call_lhs (call);
6811 : 0 : rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6812 : 0 : if (lhs)
6813 : : {
6814 : 0 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6815 : 0 : if (GET_MODE (boolret) != mode)
6816 : 0 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6817 : 0 : x = force_reg (mode, x);
6818 : 0 : write_complex_part (target, boolret, true, true);
6819 : 0 : write_complex_part (target, x, false, false);
6820 : : }
6821 : 0 : }
6822 : :
6823 : : /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6824 : :
6825 : : void
6826 : 13928 : expand_ifn_atomic_compare_exchange (gcall *call)
6827 : : {
6828 : 13928 : int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6829 : 13928 : gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6830 : 13928 : machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6831 : :
6832 : 13928 : memmodel success = get_memmodel (gimple_call_arg (call, 4));
6833 : 13928 : memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6834 : :
6835 : 13928 : if (failure > success)
6836 : 0 : success = MEMMODEL_SEQ_CST;
6837 : :
6838 : 13928 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6839 : : {
6840 : : failure = MEMMODEL_SEQ_CST;
6841 : : success = MEMMODEL_SEQ_CST;
6842 : : }
6843 : :
6844 : 13928 : if (!flag_inline_atomics)
6845 : : {
6846 : 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6847 : 0 : return;
6848 : : }
6849 : :
6850 : : /* Expand the operands. */
6851 : 13928 : rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6852 : :
6853 : 13928 : rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6854 : 13928 : rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6855 : :
6856 : 13928 : bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6857 : :
6858 : 13928 : rtx boolret = NULL;
6859 : 13928 : rtx oldval = NULL;
6860 : :
6861 : 13928 : if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6862 : : is_weak, success, failure))
6863 : : {
6864 : 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6865 : 0 : return;
6866 : : }
6867 : :
6868 : 13928 : tree lhs = gimple_call_lhs (call);
6869 : 13928 : if (lhs)
6870 : : {
6871 : 13728 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6872 : 13728 : if (GET_MODE (boolret) != mode)
6873 : 12214 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6874 : 13728 : write_complex_part (target, boolret, true, true);
6875 : 13728 : write_complex_part (target, oldval, false, false);
6876 : : }
6877 : : }
6878 : :
6879 : : /* Expand the __atomic_load intrinsic:
6880 : : TYPE __atomic_load (TYPE *object, enum memmodel)
6881 : : EXP is the CALL_EXPR.
6882 : : TARGET is an optional place for us to store the results. */
6883 : :
6884 : : static rtx
6885 : 71001 : expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6886 : : {
6887 : 71001 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6888 : 71001 : if (is_mm_release (model) || is_mm_acq_rel (model))
6889 : : model = MEMMODEL_SEQ_CST;
6890 : :
6891 : 71001 : if (!flag_inline_atomics)
6892 : : return NULL_RTX;
6893 : :
6894 : : /* Expand the operand. */
6895 : 70960 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6896 : :
6897 : 70960 : return expand_atomic_load (target, mem, model);
6898 : : }
6899 : :
6900 : :
6901 : : /* Expand the __atomic_store intrinsic:
6902 : : void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6903 : : EXP is the CALL_EXPR.
6904 : : TARGET is an optional place for us to store the results. */
6905 : :
6906 : : static rtx
6907 : 17539 : expand_builtin_atomic_store (machine_mode mode, tree exp)
6908 : : {
6909 : 17539 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6910 : 17539 : if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6911 : 1593 : || is_mm_release (model)))
6912 : : model = MEMMODEL_SEQ_CST;
6913 : :
6914 : 17539 : if (!flag_inline_atomics)
6915 : : return NULL_RTX;
6916 : :
6917 : : /* Expand the operands. */
6918 : 17508 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6919 : 17508 : rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6920 : :
6921 : 17508 : return expand_atomic_store (mem, val, model, false);
6922 : : }
6923 : :
6924 : : /* Expand the __atomic_fetch_XXX intrinsic:
6925 : : TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6926 : : EXP is the CALL_EXPR.
6927 : : TARGET is an optional place for us to store the results.
6928 : : CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6929 : : FETCH_AFTER is true if returning the result of the operation.
6930 : : FETCH_AFTER is false if returning the value before the operation.
6931 : : IGNORE is true if the result is not used.
6932 : : EXT_CALL is the correct builtin for an external call if this cannot be
6933 : : resolved to an instruction sequence. */
6934 : :
6935 : : static rtx
6936 : 29875 : expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6937 : : enum rtx_code code, bool fetch_after,
6938 : : bool ignore, enum built_in_function ext_call)
6939 : : {
6940 : 29875 : rtx val, mem, ret;
6941 : 29875 : enum memmodel model;
6942 : 29875 : tree fndecl;
6943 : 29875 : tree addr;
6944 : :
6945 : 29875 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6946 : :
6947 : : /* Expand the operands. */
6948 : 29875 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6949 : 29875 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6950 : :
6951 : : /* Only try generating instructions if inlining is turned on. */
6952 : 29875 : if (flag_inline_atomics)
6953 : : {
6954 : 29332 : ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6955 : 29332 : if (ret)
6956 : : return ret;
6957 : : }
6958 : :
6959 : : /* Return if a different routine isn't needed for the library call. */
6960 : 1142 : if (ext_call == BUILT_IN_NONE)
6961 : : return NULL_RTX;
6962 : :
6963 : : /* Change the call to the specified function. */
6964 : 279 : fndecl = get_callee_fndecl (exp);
6965 : 279 : addr = CALL_EXPR_FN (exp);
6966 : 279 : STRIP_NOPS (addr);
6967 : :
6968 : 279 : gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6969 : 279 : TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6970 : :
6971 : : /* If we will emit code after the call, the call cannot be a tail call.
6972 : : If it is emitted as a tail call, a barrier is emitted after it, and
6973 : : then all trailing code is removed. */
6974 : 279 : if (!ignore)
6975 : 182 : CALL_EXPR_TAILCALL (exp) = 0;
6976 : :
6977 : : /* Expand the call here so we can emit trailing code. */
6978 : 279 : ret = expand_call (exp, target, ignore);
6979 : :
6980 : : /* Replace the original function just in case it matters. */
6981 : 279 : TREE_OPERAND (addr, 0) = fndecl;
6982 : :
6983 : : /* Then issue the arithmetic correction to return the right result. */
6984 : 279 : if (!ignore)
6985 : : {
6986 : 182 : if (code == NOT)
6987 : : {
6988 : 31 : ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6989 : : OPTAB_LIB_WIDEN);
6990 : 31 : ret = expand_simple_unop (mode, NOT, ret, target, true);
6991 : : }
6992 : : else
6993 : 151 : ret = expand_simple_binop (mode, code, ret, val, target, true,
6994 : : OPTAB_LIB_WIDEN);
6995 : : }
6996 : : return ret;
6997 : : }
6998 : :
6999 : : /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7000 : :
7001 : : void
7002 : 427 : expand_ifn_atomic_bit_test_and (gcall *call)
7003 : : {
7004 : 427 : tree ptr = gimple_call_arg (call, 0);
7005 : 427 : tree bit = gimple_call_arg (call, 1);
7006 : 427 : tree flag = gimple_call_arg (call, 2);
7007 : 427 : tree lhs = gimple_call_lhs (call);
7008 : 427 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7009 : 427 : machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7010 : 427 : enum rtx_code code;
7011 : 427 : optab optab;
7012 : 427 : class expand_operand ops[5];
7013 : :
7014 : 427 : gcc_assert (flag_inline_atomics);
7015 : :
7016 : 427 : if (gimple_call_num_args (call) == 5)
7017 : 296 : model = get_memmodel (gimple_call_arg (call, 3));
7018 : :
7019 : 427 : rtx mem = get_builtin_sync_mem (ptr, mode);
7020 : 427 : rtx val = expand_expr_force_mode (bit, mode);
7021 : :
7022 : 427 : switch (gimple_call_internal_fn (call))
7023 : : {
7024 : : case IFN_ATOMIC_BIT_TEST_AND_SET:
7025 : : code = IOR;
7026 : : optab = atomic_bit_test_and_set_optab;
7027 : : break;
7028 : : case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7029 : : code = XOR;
7030 : : optab = atomic_bit_test_and_complement_optab;
7031 : : break;
7032 : : case IFN_ATOMIC_BIT_TEST_AND_RESET:
7033 : : code = AND;
7034 : : optab = atomic_bit_test_and_reset_optab;
7035 : : break;
7036 : 0 : default:
7037 : 0 : gcc_unreachable ();
7038 : : }
7039 : :
7040 : 427 : if (lhs == NULL_TREE)
7041 : : {
7042 : 0 : rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
7043 : : val, NULL_RTX, true, OPTAB_DIRECT);
7044 : 0 : if (code == AND)
7045 : 0 : val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
7046 : 0 : if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7047 : 427 : return;
7048 : : }
7049 : :
7050 : 427 : rtx target;
7051 : 427 : if (lhs)
7052 : 427 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7053 : : else
7054 : 0 : target = gen_reg_rtx (mode);
7055 : 427 : enum insn_code icode = direct_optab_handler (optab, mode);
7056 : 427 : gcc_assert (icode != CODE_FOR_nothing);
7057 : 427 : create_output_operand (&ops[0], target, mode);
7058 : 427 : create_fixed_operand (&ops[1], mem);
7059 : 427 : create_convert_operand_to (&ops[2], val, mode, true);
7060 : 427 : create_integer_operand (&ops[3], model);
7061 : 427 : create_integer_operand (&ops[4], integer_onep (flag));
7062 : 427 : if (maybe_expand_insn (icode, 5, ops))
7063 : : return;
7064 : :
7065 : 0 : rtx bitval = val;
7066 : 0 : val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7067 : : val, NULL_RTX, true, OPTAB_DIRECT);
7068 : 0 : rtx maskval = val;
7069 : 0 : if (code == AND)
7070 : 0 : val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7071 : 0 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7072 : : code, model, false);
7073 : 0 : if (!result)
7074 : : {
7075 : 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7076 : 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7077 : 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7078 : 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7079 : 0 : tree exp;
7080 : 0 : if (is_atomic)
7081 : 0 : exp = build_call_nary (type, tcall, 3,
7082 : : ptr, make_tree (type, val),
7083 : : gimple_call_arg (call, 3));
7084 : : else
7085 : 0 : exp = build_call_nary (type, tcall, 2, ptr, make_tree (type, val));
7086 : 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7087 : : mode, !lhs);
7088 : : }
7089 : 0 : if (!lhs)
7090 : : return;
7091 : 0 : if (integer_onep (flag))
7092 : : {
7093 : 0 : result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7094 : : NULL_RTX, true, OPTAB_DIRECT);
7095 : 0 : result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7096 : : true, OPTAB_DIRECT);
7097 : : }
7098 : : else
7099 : 0 : result = expand_simple_binop (mode, AND, result, maskval, target, true,
7100 : : OPTAB_DIRECT);
7101 : 0 : if (result != target)
7102 : 0 : emit_move_insn (target, result);
7103 : : }
7104 : :
7105 : : /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7106 : :
7107 : : void
7108 : 2306 : expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7109 : : {
7110 : 2306 : tree cmp = gimple_call_arg (call, 0);
7111 : 2306 : tree ptr = gimple_call_arg (call, 1);
7112 : 2306 : tree arg = gimple_call_arg (call, 2);
7113 : 2306 : tree lhs = gimple_call_lhs (call);
7114 : 2306 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7115 : 2306 : machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7116 : 2306 : optab optab;
7117 : 2306 : rtx_code code;
7118 : 2306 : class expand_operand ops[5];
7119 : :
7120 : 2306 : gcc_assert (flag_inline_atomics);
7121 : :
7122 : 2306 : if (gimple_call_num_args (call) == 5)
7123 : 1898 : model = get_memmodel (gimple_call_arg (call, 3));
7124 : :
7125 : 2306 : rtx mem = get_builtin_sync_mem (ptr, mode);
7126 : 2306 : rtx op = expand_expr_force_mode (arg, mode);
7127 : :
7128 : 2306 : switch (gimple_call_internal_fn (call))
7129 : : {
7130 : : case IFN_ATOMIC_ADD_FETCH_CMP_0:
7131 : : code = PLUS;
7132 : : optab = atomic_add_fetch_cmp_0_optab;
7133 : : break;
7134 : : case IFN_ATOMIC_SUB_FETCH_CMP_0:
7135 : : code = MINUS;
7136 : : optab = atomic_sub_fetch_cmp_0_optab;
7137 : : break;
7138 : : case IFN_ATOMIC_AND_FETCH_CMP_0:
7139 : : code = AND;
7140 : : optab = atomic_and_fetch_cmp_0_optab;
7141 : : break;
7142 : : case IFN_ATOMIC_OR_FETCH_CMP_0:
7143 : : code = IOR;
7144 : : optab = atomic_or_fetch_cmp_0_optab;
7145 : : break;
7146 : : case IFN_ATOMIC_XOR_FETCH_CMP_0:
7147 : : code = XOR;
7148 : : optab = atomic_xor_fetch_cmp_0_optab;
7149 : : break;
7150 : 0 : default:
7151 : 0 : gcc_unreachable ();
7152 : : }
7153 : :
7154 : 2306 : enum rtx_code comp = UNKNOWN;
7155 : 2306 : switch (tree_to_uhwi (cmp))
7156 : : {
7157 : : case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7158 : : case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7159 : : case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7160 : : case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7161 : : case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7162 : : case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7163 : 0 : default: gcc_unreachable ();
7164 : : }
7165 : :
7166 : 2306 : rtx target;
7167 : 2306 : if (lhs == NULL_TREE)
7168 : 0 : target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7169 : : else
7170 : 2306 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7171 : 2306 : enum insn_code icode = direct_optab_handler (optab, mode);
7172 : 2306 : gcc_assert (icode != CODE_FOR_nothing);
7173 : 2306 : create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
7174 : 2306 : create_fixed_operand (&ops[1], mem);
7175 : 2306 : create_convert_operand_to (&ops[2], op, mode, true);
7176 : 2306 : create_integer_operand (&ops[3], model);
7177 : 2306 : create_integer_operand (&ops[4], comp);
7178 : 2306 : if (maybe_expand_insn (icode, 5, ops))
7179 : 2274 : return;
7180 : :
7181 : 32 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7182 : : code, model, true);
7183 : 32 : if (!result)
7184 : : {
7185 : 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7186 : 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7187 : 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7188 : 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7189 : 0 : tree exp;
7190 : 0 : if (is_atomic)
7191 : 0 : exp = build_call_nary (type, tcall, 3,
7192 : : ptr, arg,
7193 : : gimple_call_arg (call, 3));
7194 : : else
7195 : 0 : exp = build_call_nary (type, tcall, 2, ptr, arg);
7196 : 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7197 : : mode, !lhs);
7198 : : }
7199 : :
7200 : 32 : if (lhs)
7201 : : {
7202 : 32 : result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7203 : : 0, 1);
7204 : 32 : if (result != target)
7205 : 32 : emit_move_insn (target, result);
7206 : : }
7207 : : }
7208 : :
7209 : : /* Expand an atomic clear operation.
7210 : : void _atomic_clear (BOOL *obj, enum memmodel)
7211 : : EXP is the call expression. */
7212 : :
7213 : : static rtx
7214 : 57 : expand_builtin_atomic_clear (tree exp)
7215 : : {
7216 : 57 : machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7217 : 57 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7218 : 57 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7219 : :
7220 : 57 : if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7221 : : model = MEMMODEL_SEQ_CST;
7222 : :
7223 : : /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7224 : : Failing that, a store is issued by __atomic_store. The only way this can
7225 : : fail is if the bool type is larger than a word size. Unlikely, but
7226 : : handle it anyway for completeness. Assume a single threaded model since
7227 : : there is no atomic support in this case, and no barriers are required. */
7228 : 57 : rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7229 : 57 : if (!ret)
7230 : 0 : emit_move_insn (mem, const0_rtx);
7231 : 57 : return const0_rtx;
7232 : : }
7233 : :
7234 : : /* Expand an atomic test_and_set operation.
7235 : : bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7236 : : EXP is the call expression. */
7237 : :
7238 : : static rtx
7239 : 274 : expand_builtin_atomic_test_and_set (tree exp, rtx target)
7240 : : {
7241 : 274 : rtx mem;
7242 : 274 : enum memmodel model;
7243 : 274 : machine_mode mode;
7244 : :
7245 : 274 : mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7246 : 274 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7247 : 274 : model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7248 : :
7249 : 274 : return expand_atomic_test_and_set (target, mem, model);
7250 : : }
7251 : :
7252 : :
7253 : : /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7254 : : this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7255 : :
7256 : : static tree
7257 : 104951 : fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7258 : : {
7259 : 104951 : int size;
7260 : 104951 : machine_mode mode;
7261 : 104951 : unsigned int mode_align, type_align;
7262 : :
7263 : 104951 : if (TREE_CODE (arg0) != INTEGER_CST)
7264 : : return NULL_TREE;
7265 : :
7266 : : /* We need a corresponding integer mode for the access to be lock-free. */
7267 : 104857 : size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7268 : 104857 : if (!int_mode_for_size (size, 0).exists (&mode))
7269 : 17 : return boolean_false_node;
7270 : :
7271 : 104840 : mode_align = GET_MODE_ALIGNMENT (mode);
7272 : :
7273 : 104840 : if (TREE_CODE (arg1) == INTEGER_CST)
7274 : : {
7275 : 84546 : unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7276 : :
7277 : : /* Either this argument is null, or it's a fake pointer encoding
7278 : : the alignment of the object. */
7279 : 84546 : val = least_bit_hwi (val);
7280 : 84546 : val *= BITS_PER_UNIT;
7281 : :
7282 : 84546 : if (val == 0 || mode_align < val)
7283 : : type_align = mode_align;
7284 : : else
7285 : 19704 : type_align = val;
7286 : : }
7287 : : else
7288 : : {
7289 : 20294 : tree ttype = TREE_TYPE (arg1);
7290 : :
7291 : : /* This function is usually invoked and folded immediately by the front
7292 : : end before anything else has a chance to look at it. The pointer
7293 : : parameter at this point is usually cast to a void *, so check for that
7294 : : and look past the cast. */
7295 : 63 : if (CONVERT_EXPR_P (arg1)
7296 : 20262 : && POINTER_TYPE_P (ttype)
7297 : 20262 : && VOID_TYPE_P (TREE_TYPE (ttype))
7298 : 40556 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7299 : 20231 : arg1 = TREE_OPERAND (arg1, 0);
7300 : :
7301 : 20294 : ttype = TREE_TYPE (arg1);
7302 : 20294 : gcc_assert (POINTER_TYPE_P (ttype));
7303 : :
7304 : : /* Get the underlying type of the object. */
7305 : 20294 : ttype = TREE_TYPE (ttype);
7306 : 20294 : type_align = TYPE_ALIGN (ttype);
7307 : : }
7308 : :
7309 : : /* If the object has smaller alignment, the lock free routines cannot
7310 : : be used. */
7311 : 104840 : if (type_align < mode_align)
7312 : 95 : return boolean_false_node;
7313 : :
7314 : : /* Check if a compare_and_swap pattern exists for the mode which represents
7315 : : the required size. The pattern is not allowed to fail, so the existence
7316 : : of the pattern indicates support is present. Also require that an
7317 : : atomic load exists for the required size. */
7318 : 104745 : if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7319 : 104701 : return boolean_true_node;
7320 : : else
7321 : 44 : return boolean_false_node;
7322 : : }
7323 : :
7324 : : /* Return true if the parameters to call EXP represent an object which will
7325 : : always generate lock free instructions. The first argument represents the
7326 : : size of the object, and the second parameter is a pointer to the object
7327 : : itself. If NULL is passed for the object, then the result is based on
7328 : : typical alignment for an object of the specified size. Otherwise return
7329 : : false. */
7330 : :
7331 : : static rtx
7332 : 1 : expand_builtin_atomic_always_lock_free (tree exp)
7333 : : {
7334 : 1 : tree size;
7335 : 1 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7336 : 1 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7337 : :
7338 : 1 : if (TREE_CODE (arg0) != INTEGER_CST)
7339 : : {
7340 : 1 : error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7341 : 1 : return const0_rtx;
7342 : : }
7343 : :
7344 : 0 : size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7345 : 0 : if (size == boolean_true_node)
7346 : 0 : return const1_rtx;
7347 : 0 : return const0_rtx;
7348 : : }
7349 : :
7350 : : /* Return a one or zero if it can be determined that object ARG1 of size ARG
7351 : : is lock free on this architecture. */
7352 : :
7353 : : static tree
7354 : 40041 : fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7355 : : {
7356 : 40041 : if (!flag_inline_atomics)
7357 : : return NULL_TREE;
7358 : :
7359 : : /* If it isn't always lock free, don't generate a result. */
7360 : 40032 : if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7361 : : return boolean_true_node;
7362 : :
7363 : : return NULL_TREE;
7364 : : }
7365 : :
7366 : : /* Return true if the parameters to call EXP represent an object which will
7367 : : always generate lock free instructions. The first argument represents the
7368 : : size of the object, and the second parameter is a pointer to the object
7369 : : itself. If NULL is passed for the object, then the result is based on
7370 : : typical alignment for an object of the specified size. Otherwise return
7371 : : NULL*/
7372 : :
7373 : : static rtx
7374 : 3 : expand_builtin_atomic_is_lock_free (tree exp)
7375 : : {
7376 : 3 : tree size;
7377 : 3 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7378 : 3 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7379 : :
7380 : 3 : if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7381 : : {
7382 : 0 : error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7383 : 0 : return NULL_RTX;
7384 : : }
7385 : :
7386 : 3 : if (!flag_inline_atomics)
7387 : : return NULL_RTX;
7388 : :
7389 : : /* If the value is known at compile time, return the RTX for it. */
7390 : 2 : size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7391 : 2 : if (size == boolean_true_node)
7392 : 0 : return const1_rtx;
7393 : :
7394 : : return NULL_RTX;
7395 : : }
7396 : :
7397 : : /* Expand the __atomic_thread_fence intrinsic:
7398 : : void __atomic_thread_fence (enum memmodel)
7399 : : EXP is the CALL_EXPR. */
7400 : :
7401 : : static void
7402 : 693 : expand_builtin_atomic_thread_fence (tree exp)
7403 : : {
7404 : 693 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7405 : 693 : expand_mem_thread_fence (model);
7406 : 693 : }
7407 : :
7408 : : /* Expand the __atomic_signal_fence intrinsic:
7409 : : void __atomic_signal_fence (enum memmodel)
7410 : : EXP is the CALL_EXPR. */
7411 : :
7412 : : static void
7413 : 60 : expand_builtin_atomic_signal_fence (tree exp)
7414 : : {
7415 : 60 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7416 : 60 : expand_mem_signal_fence (model);
7417 : 60 : }
7418 : :
7419 : : /* Expand the __sync_synchronize intrinsic. */
7420 : :
7421 : : static void
7422 : 273 : expand_builtin_sync_synchronize (void)
7423 : : {
7424 : 0 : expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7425 : 0 : }
7426 : :
7427 : : static rtx
7428 : 3 : expand_builtin_thread_pointer (tree exp, rtx target)
7429 : : {
7430 : 3 : enum insn_code icode;
7431 : 3 : if (!validate_arglist (exp, VOID_TYPE))
7432 : 0 : return const0_rtx;
7433 : 3 : icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7434 : 3 : if (icode != CODE_FOR_nothing)
7435 : : {
7436 : 3 : class expand_operand op;
7437 : : /* If the target is not sutitable then create a new target. */
7438 : 3 : if (target == NULL_RTX
7439 : 3 : || !REG_P (target)
7440 : 6 : || GET_MODE (target) != Pmode)
7441 : 0 : target = gen_reg_rtx (Pmode);
7442 : 3 : create_output_operand (&op, target, Pmode);
7443 : 3 : expand_insn (icode, 1, &op);
7444 : 3 : return target;
7445 : : }
7446 : 0 : error ("%<__builtin_thread_pointer%> is not supported on this target");
7447 : 0 : return const0_rtx;
7448 : : }
7449 : :
7450 : : static void
7451 : 0 : expand_builtin_set_thread_pointer (tree exp)
7452 : : {
7453 : 0 : enum insn_code icode;
7454 : 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7455 : : return;
7456 : 0 : icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7457 : 0 : if (icode != CODE_FOR_nothing)
7458 : : {
7459 : 0 : class expand_operand op;
7460 : 0 : rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7461 : 0 : Pmode, EXPAND_NORMAL);
7462 : 0 : create_input_operand (&op, val, Pmode);
7463 : 0 : expand_insn (icode, 1, &op);
7464 : 0 : return;
7465 : : }
7466 : 0 : error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7467 : : }
7468 : :
7469 : :
7470 : : /* Emit code to restore the current value of stack. */
7471 : :
7472 : : static void
7473 : 1693 : expand_stack_restore (tree var)
7474 : : {
7475 : 1693 : rtx_insn *prev;
7476 : 1693 : rtx sa = expand_normal (var);
7477 : :
7478 : 1693 : sa = convert_memory_address (Pmode, sa);
7479 : :
7480 : 1693 : prev = get_last_insn ();
7481 : 1693 : emit_stack_restore (SAVE_BLOCK, sa);
7482 : :
7483 : 1693 : record_new_stack_level ();
7484 : :
7485 : 1693 : fixup_args_size_notes (prev, get_last_insn (), 0);
7486 : 1693 : }
7487 : :
7488 : : /* Emit code to save the current value of stack. */
7489 : :
7490 : : static rtx
7491 : 1794 : expand_stack_save (void)
7492 : : {
7493 : 1794 : rtx ret = NULL_RTX;
7494 : :
7495 : 0 : emit_stack_save (SAVE_BLOCK, &ret);
7496 : 1794 : return ret;
7497 : : }
7498 : :
7499 : : /* Emit code to get the openacc gang, worker or vector id or size. */
7500 : :
7501 : : static rtx
7502 : 356 : expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7503 : : {
7504 : 356 : const char *name;
7505 : 356 : rtx fallback_retval;
7506 : 356 : rtx_insn *(*gen_fn) (rtx, rtx);
7507 : 356 : switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7508 : : {
7509 : 244 : case BUILT_IN_GOACC_PARLEVEL_ID:
7510 : 244 : name = "__builtin_goacc_parlevel_id";
7511 : 244 : fallback_retval = const0_rtx;
7512 : 244 : gen_fn = targetm.gen_oacc_dim_pos;
7513 : 244 : break;
7514 : 112 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
7515 : 112 : name = "__builtin_goacc_parlevel_size";
7516 : 112 : fallback_retval = const1_rtx;
7517 : 112 : gen_fn = targetm.gen_oacc_dim_size;
7518 : 112 : break;
7519 : 0 : default:
7520 : 0 : gcc_unreachable ();
7521 : : }
7522 : :
7523 : 356 : if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7524 : : {
7525 : 8 : error ("%qs only supported in OpenACC code", name);
7526 : 8 : return const0_rtx;
7527 : : }
7528 : :
7529 : 348 : tree arg = CALL_EXPR_ARG (exp, 0);
7530 : 348 : if (TREE_CODE (arg) != INTEGER_CST)
7531 : : {
7532 : 8 : error ("non-constant argument 0 to %qs", name);
7533 : 8 : return const0_rtx;
7534 : : }
7535 : :
7536 : 340 : int dim = TREE_INT_CST_LOW (arg);
7537 : 340 : switch (dim)
7538 : : {
7539 : 324 : case GOMP_DIM_GANG:
7540 : 324 : case GOMP_DIM_WORKER:
7541 : 324 : case GOMP_DIM_VECTOR:
7542 : 324 : break;
7543 : 16 : default:
7544 : 16 : error ("illegal argument 0 to %qs", name);
7545 : 16 : return const0_rtx;
7546 : : }
7547 : :
7548 : 324 : if (ignore)
7549 : : return target;
7550 : :
7551 : 180 : if (target == NULL_RTX)
7552 : 0 : target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7553 : :
7554 : 180 : if (!targetm.have_oacc_dim_size ())
7555 : : {
7556 : 180 : emit_move_insn (target, fallback_retval);
7557 : 180 : return target;
7558 : : }
7559 : :
7560 : 0 : rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7561 : 0 : emit_insn (gen_fn (reg, GEN_INT (dim)));
7562 : 0 : if (reg != target)
7563 : 0 : emit_move_insn (target, reg);
7564 : :
7565 : : return target;
7566 : : }
7567 : :
7568 : : /* Expand a string compare operation using a sequence of char comparison
7569 : : to get rid of the calling overhead, with result going to TARGET if
7570 : : that's convenient.
7571 : :
7572 : : VAR_STR is the variable string source;
7573 : : CONST_STR is the constant string source;
7574 : : LENGTH is the number of chars to compare;
7575 : : CONST_STR_N indicates which source string is the constant string;
7576 : : IS_MEMCMP indicates whether it's a memcmp or strcmp.
7577 : :
7578 : : to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7579 : :
7580 : : target = (int) (unsigned char) var_str[0]
7581 : : - (int) (unsigned char) const_str[0];
7582 : : if (target != 0)
7583 : : goto ne_label;
7584 : : ...
7585 : : target = (int) (unsigned char) var_str[length - 2]
7586 : : - (int) (unsigned char) const_str[length - 2];
7587 : : if (target != 0)
7588 : : goto ne_label;
7589 : : target = (int) (unsigned char) var_str[length - 1]
7590 : : - (int) (unsigned char) const_str[length - 1];
7591 : : ne_label:
7592 : : */
7593 : :
7594 : : static rtx
7595 : 622 : inline_string_cmp (rtx target, tree var_str, const char *const_str,
7596 : : unsigned HOST_WIDE_INT length,
7597 : : int const_str_n, machine_mode mode)
7598 : : {
7599 : 622 : HOST_WIDE_INT offset = 0;
7600 : 622 : rtx var_rtx_array
7601 : 622 : = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7602 : 622 : rtx var_rtx = NULL_RTX;
7603 : 622 : rtx const_rtx = NULL_RTX;
7604 : 622 : rtx result = target ? target : gen_reg_rtx (mode);
7605 : 622 : rtx_code_label *ne_label = gen_label_rtx ();
7606 : 622 : tree unit_type_node = unsigned_char_type_node;
7607 : 622 : scalar_int_mode unit_mode
7608 : 622 : = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7609 : :
7610 : 622 : start_sequence ();
7611 : :
7612 : 2276 : for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7613 : : {
7614 : 1654 : var_rtx
7615 : 1654 : = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7616 : 1654 : const_rtx = c_readstr (const_str + offset, unit_mode);
7617 : 1654 : rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7618 : 1504 : rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7619 : :
7620 : 1654 : op0 = convert_modes (mode, unit_mode, op0, 1);
7621 : 1654 : op1 = convert_modes (mode, unit_mode, op1, 1);
7622 : 1654 : rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7623 : : result, 1, OPTAB_WIDEN);
7624 : :
7625 : : /* Force the difference into result register. We cannot reassign
7626 : : result here ("result = diff") or we may end up returning
7627 : : uninitialized result when expand_simple_binop allocates a new
7628 : : pseudo-register for returning. */
7629 : 1654 : if (diff != result)
7630 : 0 : emit_move_insn (result, diff);
7631 : :
7632 : 1654 : if (i < length - 1)
7633 : 1032 : emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7634 : : mode, true, ne_label);
7635 : 3308 : offset += GET_MODE_SIZE (unit_mode);
7636 : : }
7637 : :
7638 : 622 : emit_label (ne_label);
7639 : 622 : rtx_insn *insns = end_sequence ();
7640 : 622 : emit_insn (insns);
7641 : :
7642 : 622 : return result;
7643 : : }
7644 : :
7645 : : /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7646 : : to TARGET if that's convenient.
7647 : : If the call is not been inlined, return NULL_RTX. */
7648 : :
7649 : : static rtx
7650 : 144301 : inline_expand_builtin_bytecmp (tree exp, rtx target)
7651 : : {
7652 : 144301 : tree fndecl = get_callee_fndecl (exp);
7653 : 144301 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7654 : 144301 : bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7655 : :
7656 : : /* Do NOT apply this inlining expansion when optimizing for size or
7657 : : optimization level below 2 or if unused *cmp hasn't been DCEd. */
7658 : 144301 : if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7659 : 121981 : return NULL_RTX;
7660 : :
7661 : 22320 : gcc_checking_assert (fcode == BUILT_IN_STRCMP
7662 : : || fcode == BUILT_IN_STRNCMP
7663 : : || fcode == BUILT_IN_MEMCMP);
7664 : :
7665 : : /* On a target where the type of the call (int) has same or narrower presicion
7666 : : than unsigned char, give up the inlining expansion. */
7667 : 22320 : if (TYPE_PRECISION (unsigned_char_type_node)
7668 : 22320 : >= TYPE_PRECISION (TREE_TYPE (exp)))
7669 : : return NULL_RTX;
7670 : :
7671 : 22320 : tree arg1 = CALL_EXPR_ARG (exp, 0);
7672 : 22320 : tree arg2 = CALL_EXPR_ARG (exp, 1);
7673 : 22320 : tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7674 : :
7675 : 22320 : unsigned HOST_WIDE_INT len1 = 0;
7676 : 22320 : unsigned HOST_WIDE_INT len2 = 0;
7677 : 22320 : unsigned HOST_WIDE_INT len3 = 0;
7678 : :
7679 : : /* Get the object representation of the initializers of ARG1 and ARG2
7680 : : as strings, provided they refer to constant objects, with their byte
7681 : : sizes in LEN1 and LEN2, respectively. */
7682 : 22320 : const char *bytes1 = getbyterep (arg1, &len1);
7683 : 22320 : const char *bytes2 = getbyterep (arg2, &len2);
7684 : :
7685 : : /* Fail if neither argument refers to an initialized constant. */
7686 : 22320 : if (!bytes1 && !bytes2)
7687 : : return NULL_RTX;
7688 : :
7689 : 18419 : if (is_ncmp)
7690 : : {
7691 : : /* Fail if the memcmp/strncmp bound is not a constant. */
7692 : 1291 : if (!tree_fits_uhwi_p (len3_tree))
7693 : : return NULL_RTX;
7694 : :
7695 : 948 : len3 = tree_to_uhwi (len3_tree);
7696 : :
7697 : 948 : if (fcode == BUILT_IN_MEMCMP)
7698 : : {
7699 : : /* Fail if the memcmp bound is greater than the size of either
7700 : : of the two constant objects. */
7701 : 449 : if ((bytes1 && len1 < len3)
7702 : 449 : || (bytes2 && len2 < len3))
7703 : : return NULL_RTX;
7704 : : }
7705 : : }
7706 : :
7707 : : if (fcode != BUILT_IN_MEMCMP)
7708 : : {
7709 : : /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7710 : : and LEN2 to the length of the nul-terminated string stored
7711 : : in each. */
7712 : 17627 : if (bytes1 != NULL)
7713 : 236 : len1 = strnlen (bytes1, len1) + 1;
7714 : 17627 : if (bytes2 != NULL)
7715 : 17394 : len2 = strnlen (bytes2, len2) + 1;
7716 : : }
7717 : :
7718 : : /* See inline_string_cmp. */
7719 : 18072 : int const_str_n;
7720 : 18072 : if (!len1)
7721 : : const_str_n = 2;
7722 : 241 : else if (!len2)
7723 : : const_str_n = 1;
7724 : 3 : else if (len2 > len1)
7725 : : const_str_n = 1;
7726 : : else
7727 : : const_str_n = 2;
7728 : :
7729 : : /* For strncmp only, compute the new bound as the smallest of
7730 : : the lengths of the two strings (plus 1) and the bound provided
7731 : : to the function. */
7732 : 17833 : unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7733 : 18072 : if (is_ncmp && len3 < bound)
7734 : 520 : bound = len3;
7735 : :
7736 : : /* If the bound of the comparison is larger than the threshold,
7737 : : do nothing. */
7738 : 18072 : if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7739 : : return NULL_RTX;
7740 : :
7741 : 622 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7742 : :
7743 : : /* Now, start inline expansion the call. */
7744 : 1187 : return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7745 : : (const_str_n == 1) ? bytes1 : bytes2, bound,
7746 : 622 : const_str_n, mode);
7747 : : }
7748 : :
7749 : : /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7750 : : represents the size of the first argument to that call, or VOIDmode
7751 : : if the argument is a pointer. IGNORE will be true if the result
7752 : : isn't used. */
7753 : : static rtx
7754 : 34 : expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7755 : : bool ignore)
7756 : : {
7757 : 34 : rtx val, failsafe;
7758 : 34 : unsigned nargs = call_expr_nargs (exp);
7759 : :
7760 : 34 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7761 : :
7762 : 34 : if (mode == VOIDmode)
7763 : : {
7764 : 4 : mode = TYPE_MODE (TREE_TYPE (arg0));
7765 : 4 : gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7766 : : }
7767 : :
7768 : 34 : val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7769 : :
7770 : : /* An optional second argument can be used as a failsafe value on
7771 : : some machines. If it isn't present, then the failsafe value is
7772 : : assumed to be 0. */
7773 : 34 : if (nargs > 1)
7774 : : {
7775 : 4 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7776 : 4 : failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7777 : : }
7778 : : else
7779 : 30 : failsafe = const0_rtx;
7780 : :
7781 : : /* If the result isn't used, the behavior is undefined. It would be
7782 : : nice to emit a warning here, but path splitting means this might
7783 : : happen with legitimate code. So simply drop the builtin
7784 : : expansion in that case; we've handled any side-effects above. */
7785 : 34 : if (ignore)
7786 : 0 : return const0_rtx;
7787 : :
7788 : : /* If we don't have a suitable target, create one to hold the result. */
7789 : 34 : if (target == NULL || GET_MODE (target) != mode)
7790 : 0 : target = gen_reg_rtx (mode);
7791 : :
7792 : 34 : if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7793 : 0 : val = convert_modes (mode, VOIDmode, val, false);
7794 : :
7795 : 34 : return targetm.speculation_safe_value (mode, target, val, failsafe);
7796 : : }
7797 : :
7798 : : /* Expand CRC* or REV_CRC* built-ins. */
7799 : :
7800 : : rtx
7801 : 34 : expand_builtin_crc_table_based (internal_fn fn, scalar_mode crc_mode,
7802 : : scalar_mode data_mode, machine_mode mode,
7803 : : tree exp, rtx target)
7804 : : {
7805 : 34 : tree rhs1 = CALL_EXPR_ARG (exp, 0); // crc
7806 : 34 : tree rhs2 = CALL_EXPR_ARG (exp, 1); // data
7807 : 34 : tree rhs3 = CALL_EXPR_ARG (exp, 2); // polynomial
7808 : :
7809 : 34 : if (!target || mode == VOIDmode)
7810 : 0 : target = gen_reg_rtx (crc_mode);
7811 : :
7812 : 34 : rtx op1 = expand_normal (rhs1);
7813 : 34 : rtx op2 = expand_normal (rhs2);
7814 : 34 : rtx op3;
7815 : 34 : if (TREE_CODE (rhs3) != INTEGER_CST)
7816 : : {
7817 : 1 : error ("third argument to %<crc%> builtins must be a constant");
7818 : 1 : op3 = const0_rtx;
7819 : : }
7820 : : else
7821 : 33 : op3 = convert_to_mode (crc_mode, expand_normal (rhs3), 0);
7822 : :
7823 : 34 : if (CONST_INT_P (op2))
7824 : 33 : op2 = convert_to_mode (crc_mode, op2, 0);
7825 : :
7826 : 34 : if (fn == IFN_CRC)
7827 : 17 : expand_crc_table_based (target, op1, op2, op3, data_mode);
7828 : : else
7829 : : /* If it's IFN_CRC_REV generate bit-reversed CRC. */
7830 : 17 : expand_reversed_crc_table_based (target, op1, op2, op3,
7831 : : data_mode,
7832 : : generate_reflecting_code_standard);
7833 : 34 : return target;
7834 : : }
7835 : :
7836 : : /* Expand an expression EXP that calls a built-in function,
7837 : : with result going to TARGET if that's convenient
7838 : : (and in mode MODE if that's convenient).
7839 : : SUBTARGET may be used as the target for computing one of EXP's operands.
7840 : : IGNORE is nonzero if the value is to be ignored. */
7841 : :
7842 : : rtx
7843 : 1973491 : expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7844 : : int ignore)
7845 : : {
7846 : 1973491 : tree fndecl = get_callee_fndecl (exp);
7847 : 1973491 : machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7848 : 1973491 : int flags;
7849 : :
7850 : 1973491 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7851 : 172894 : return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7852 : :
7853 : : /* When ASan is enabled, we don't want to expand some memory/string
7854 : : builtins and rely on libsanitizer's hooks. This allows us to avoid
7855 : : redundant checks and be sure, that possible overflow will be detected
7856 : : by ASan. */
7857 : :
7858 : 1800597 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7859 : 1800597 : if (param_asan_kernel_mem_intrinsic_prefix
7860 : 1800597 : && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7861 : : | SANITIZE_KERNEL_HWADDRESS))
7862 : 42 : switch (fcode)
7863 : : {
7864 : 42 : rtx save_decl_rtl, ret;
7865 : 42 : case BUILT_IN_MEMCPY:
7866 : 42 : case BUILT_IN_MEMMOVE:
7867 : 42 : case BUILT_IN_MEMSET:
7868 : 42 : save_decl_rtl = DECL_RTL (fndecl);
7869 : 42 : DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7870 : 42 : ret = expand_call (exp, target, ignore);
7871 : 42 : DECL_RTL (fndecl) = save_decl_rtl;
7872 : 42 : return ret;
7873 : : default:
7874 : : break;
7875 : : }
7876 : 1800555 : if (sanitize_flags_p (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)
7877 : 1800555 : && asan_intercepted_p (fcode))
7878 : 757 : return expand_call (exp, target, ignore);
7879 : :
7880 : : /* When not optimizing, generate calls to library functions for a certain
7881 : : set of builtins. */
7882 : 1799798 : if (!optimize
7883 : 421056 : && !called_as_built_in (fndecl)
7884 : 141852 : && fcode != BUILT_IN_FORK
7885 : 141852 : && fcode != BUILT_IN_EXECL
7886 : 141841 : && fcode != BUILT_IN_EXECV
7887 : 141841 : && fcode != BUILT_IN_EXECLP
7888 : 141839 : && fcode != BUILT_IN_EXECLE
7889 : 141839 : && fcode != BUILT_IN_EXECVP
7890 : 141838 : && fcode != BUILT_IN_EXECVE
7891 : 141838 : && fcode != BUILT_IN_CLEAR_CACHE
7892 : 141835 : && !ALLOCA_FUNCTION_CODE_P (fcode)
7893 : 141781 : && fcode != BUILT_IN_FREE
7894 : 139693 : && (fcode != BUILT_IN_MEMSET
7895 : 55888 : || !(flag_inline_stringops & ILSOP_MEMSET))
7896 : 139690 : && (fcode != BUILT_IN_MEMCPY
7897 : 918 : || !(flag_inline_stringops & ILSOP_MEMCPY))
7898 : 139554 : && (fcode != BUILT_IN_MEMMOVE
7899 : 347 : || !(flag_inline_stringops & ILSOP_MEMMOVE))
7900 : 1939352 : && (fcode != BUILT_IN_MEMCMP
7901 : 2504 : || !(flag_inline_stringops & ILSOP_MEMCMP)))
7902 : 139506 : return expand_call (exp, target, ignore);
7903 : :
7904 : : /* The built-in function expanders test for target == const0_rtx
7905 : : to determine whether the function's result will be ignored. */
7906 : 1660292 : if (ignore)
7907 : 969151 : target = const0_rtx;
7908 : :
7909 : : /* If the result of a pure or const built-in function is ignored, and
7910 : : none of its arguments are volatile, we can avoid expanding the
7911 : : built-in call and just evaluate the arguments for side-effects. */
7912 : 1660292 : if (target == const0_rtx
7913 : 969151 : && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7914 : 1666776 : && !(flags & ECF_LOOPING_CONST_OR_PURE))
7915 : : {
7916 : 6 : bool volatilep = false;
7917 : 6 : tree arg;
7918 : 6 : call_expr_arg_iterator iter;
7919 : :
7920 : 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7921 : 8 : if (TREE_THIS_VOLATILE (arg))
7922 : : {
7923 : : volatilep = true;
7924 : : break;
7925 : : }
7926 : :
7927 : 6 : if (! volatilep)
7928 : : {
7929 : 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7930 : 8 : expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7931 : 6 : return const0_rtx;
7932 : : }
7933 : : }
7934 : :
7935 : 1660286 : switch (fcode)
7936 : : {
7937 : 4 : CASE_FLT_FN (BUILT_IN_FABS):
7938 : 4 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7939 : 4 : case BUILT_IN_FABSD32:
7940 : 4 : case BUILT_IN_FABSD64:
7941 : 4 : case BUILT_IN_FABSD128:
7942 : 4 : case BUILT_IN_FABSD64X:
7943 : 4 : target = expand_builtin_fabs (exp, target, subtarget);
7944 : 4 : if (target)
7945 : 826686 : return target;
7946 : 833600 : break;
7947 : :
7948 : 11674 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
7949 : 11674 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7950 : 11674 : target = expand_builtin_copysign (exp, target, subtarget);
7951 : 11674 : if (target)
7952 : : return target;
7953 : : break;
7954 : :
7955 : : /* Just do a normal library call if we were unable to fold
7956 : : the values. */
7957 : : CASE_FLT_FN (BUILT_IN_CABS):
7958 : : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7959 : : break;
7960 : :
7961 : 364 : CASE_FLT_FN (BUILT_IN_FMA):
7962 : 364 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7963 : 364 : target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7964 : 364 : if (target)
7965 : : return target;
7966 : : break;
7967 : :
7968 : 235 : CASE_FLT_FN (BUILT_IN_ILOGB):
7969 : 235 : if (! flag_unsafe_math_optimizations)
7970 : : break;
7971 : 4 : gcc_fallthrough ();
7972 : 4 : CASE_FLT_FN (BUILT_IN_ISINF):
7973 : 4 : CASE_FLT_FN (BUILT_IN_FINITE):
7974 : 4 : CASE_FLT_FN (BUILT_IN_ISNAN):
7975 : 4 : case BUILT_IN_ISFINITE:
7976 : 4 : case BUILT_IN_ISNORMAL:
7977 : 4 : target = expand_builtin_interclass_mathfn (exp, target);
7978 : 4 : if (target)
7979 : : return target;
7980 : : break;
7981 : :
7982 : 823 : case BUILT_IN_ISSIGNALING:
7983 : 823 : target = expand_builtin_issignaling (exp, target);
7984 : 823 : if (target)
7985 : : return target;
7986 : : break;
7987 : :
7988 : 231 : CASE_FLT_FN (BUILT_IN_ICEIL):
7989 : 231 : CASE_FLT_FN (BUILT_IN_LCEIL):
7990 : 231 : CASE_FLT_FN (BUILT_IN_LLCEIL):
7991 : 231 : CASE_FLT_FN (BUILT_IN_LFLOOR):
7992 : 231 : CASE_FLT_FN (BUILT_IN_IFLOOR):
7993 : 231 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
7994 : 231 : target = expand_builtin_int_roundingfn (exp, target);
7995 : 231 : if (target)
7996 : : return target;
7997 : : break;
7998 : :
7999 : 591 : CASE_FLT_FN (BUILT_IN_IRINT):
8000 : 591 : CASE_FLT_FN (BUILT_IN_LRINT):
8001 : 591 : CASE_FLT_FN (BUILT_IN_LLRINT):
8002 : 591 : CASE_FLT_FN (BUILT_IN_IROUND):
8003 : 591 : CASE_FLT_FN (BUILT_IN_LROUND):
8004 : 591 : CASE_FLT_FN (BUILT_IN_LLROUND):
8005 : 591 : target = expand_builtin_int_roundingfn_2 (exp, target);
8006 : 591 : if (target)
8007 : : return target;
8008 : : break;
8009 : :
8010 : 279 : CASE_FLT_FN (BUILT_IN_POWI):
8011 : 279 : target = expand_builtin_powi (exp, target);
8012 : 279 : if (target)
8013 : : return target;
8014 : : break;
8015 : :
8016 : 157 : CASE_FLT_FN (BUILT_IN_CEXPI):
8017 : 157 : target = expand_builtin_cexpi (exp, target);
8018 : 157 : gcc_assert (target);
8019 : : return target;
8020 : :
8021 : 990 : CASE_FLT_FN (BUILT_IN_SIN):
8022 : 990 : CASE_FLT_FN (BUILT_IN_COS):
8023 : 990 : if (! flag_unsafe_math_optimizations)
8024 : : break;
8025 : 47 : target = expand_builtin_mathfn_3 (exp, target, subtarget);
8026 : 47 : if (target)
8027 : : return target;
8028 : : break;
8029 : :
8030 : 151 : CASE_FLT_FN (BUILT_IN_SINCOS):
8031 : 151 : if (! flag_unsafe_math_optimizations)
8032 : : break;
8033 : 3 : target = expand_builtin_sincos (exp);
8034 : 3 : if (target)
8035 : : return target;
8036 : : break;
8037 : :
8038 : 60 : case BUILT_IN_FEGETROUND:
8039 : 60 : target = expand_builtin_fegetround (exp, target, target_mode);
8040 : 60 : if (target)
8041 : : return target;
8042 : : break;
8043 : :
8044 : 1063 : case BUILT_IN_FECLEAREXCEPT:
8045 : 1063 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8046 : : feclearexcept_optab);
8047 : 1063 : if (target)
8048 : : return target;
8049 : : break;
8050 : :
8051 : 831 : case BUILT_IN_FERAISEEXCEPT:
8052 : 831 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8053 : : feraiseexcept_optab);
8054 : 831 : if (target)
8055 : : return target;
8056 : : break;
8057 : :
8058 : 464 : case BUILT_IN_APPLY_ARGS:
8059 : 464 : return expand_builtin_apply_args ();
8060 : :
8061 : : /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8062 : : FUNCTION with a copy of the parameters described by
8063 : : ARGUMENTS, and ARGSIZE. It returns a block of memory
8064 : : allocated on the stack into which is stored all the registers
8065 : : that might possibly be used for returning the result of a
8066 : : function. ARGUMENTS is the value returned by
8067 : : __builtin_apply_args. ARGSIZE is the number of bytes of
8068 : : arguments that must be copied. ??? How should this value be
8069 : : computed? We'll also need a safe worst case value for varargs
8070 : : functions. */
8071 : 483 : case BUILT_IN_APPLY:
8072 : 483 : if (!validate_arglist (exp, POINTER_TYPE,
8073 : : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8074 : 483 : && !validate_arglist (exp, REFERENCE_TYPE,
8075 : : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8076 : 0 : return const0_rtx;
8077 : : else
8078 : : {
8079 : 483 : rtx ops[3];
8080 : :
8081 : 483 : ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8082 : 483 : ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8083 : 483 : ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8084 : :
8085 : 483 : return expand_builtin_apply (ops[0], ops[1], ops[2]);
8086 : : }
8087 : :
8088 : : /* __builtin_return (RESULT) causes the function to return the
8089 : : value described by RESULT. RESULT is address of the block of
8090 : : memory returned by __builtin_apply. */
8091 : 379 : case BUILT_IN_RETURN:
8092 : 379 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8093 : 379 : expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8094 : 379 : return const0_rtx;
8095 : :
8096 : 0 : case BUILT_IN_SAVEREGS:
8097 : 0 : return expand_builtin_saveregs ();
8098 : :
8099 : 16 : case BUILT_IN_VA_ARG_PACK:
8100 : : /* All valid uses of __builtin_va_arg_pack () are removed during
8101 : : inlining. */
8102 : 16 : error ("invalid use of %<__builtin_va_arg_pack ()%>");
8103 : 16 : return const0_rtx;
8104 : :
8105 : 0 : case BUILT_IN_VA_ARG_PACK_LEN:
8106 : : /* All valid uses of __builtin_va_arg_pack_len () are removed during
8107 : : inlining. */
8108 : 0 : error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8109 : 0 : return const0_rtx;
8110 : :
8111 : : /* Return the address of the first anonymous stack arg. */
8112 : 150 : case BUILT_IN_NEXT_ARG:
8113 : 150 : if (fold_builtin_next_arg (exp, false))
8114 : 0 : return const0_rtx;
8115 : 150 : return expand_builtin_next_arg ();
8116 : :
8117 : 28 : case BUILT_IN_CLEAR_CACHE:
8118 : 28 : expand_builtin___clear_cache (exp);
8119 : 28 : return const0_rtx;
8120 : :
8121 : 0 : case BUILT_IN_CLASSIFY_TYPE:
8122 : 0 : return expand_builtin_classify_type (exp);
8123 : :
8124 : 0 : case BUILT_IN_CONSTANT_P:
8125 : 0 : return const0_rtx;
8126 : :
8127 : 15583 : case BUILT_IN_FRAME_ADDRESS:
8128 : 15583 : case BUILT_IN_RETURN_ADDRESS:
8129 : 15583 : return expand_builtin_frame_address (fndecl, exp);
8130 : :
8131 : 540 : case BUILT_IN_STACK_ADDRESS:
8132 : 540 : return expand_builtin_stack_address ();
8133 : :
8134 : 2159 : case BUILT_IN___STRUB_ENTER:
8135 : 2159 : target = expand_builtin_strub_enter (exp);
8136 : 2159 : if (target)
8137 : : return target;
8138 : : break;
8139 : :
8140 : 1072 : case BUILT_IN___STRUB_UPDATE:
8141 : 1072 : target = expand_builtin_strub_update (exp);
8142 : 1072 : if (target)
8143 : : return target;
8144 : : break;
8145 : :
8146 : 2729 : case BUILT_IN___STRUB_LEAVE:
8147 : 2729 : target = expand_builtin_strub_leave (exp);
8148 : 2729 : if (target)
8149 : : return target;
8150 : : break;
8151 : :
8152 : : /* Returns the address of the area where the structure is returned.
8153 : : 0 otherwise. */
8154 : 0 : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8155 : 0 : if (call_expr_nargs (exp) != 0
8156 : 0 : || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8157 : 0 : || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8158 : 0 : return const0_rtx;
8159 : : else
8160 : 0 : return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8161 : :
8162 : 27944 : CASE_BUILT_IN_ALLOCA:
8163 : 27944 : target = expand_builtin_alloca (exp);
8164 : 27944 : if (target)
8165 : : return target;
8166 : : break;
8167 : :
8168 : 204 : case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8169 : 204 : return expand_asan_emit_allocas_unpoison (exp);
8170 : :
8171 : 1794 : case BUILT_IN_STACK_SAVE:
8172 : 1794 : return expand_stack_save ();
8173 : :
8174 : 1693 : case BUILT_IN_STACK_RESTORE:
8175 : 1693 : expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8176 : 1693 : return const0_rtx;
8177 : :
8178 : 1169 : case BUILT_IN_BSWAP16:
8179 : 1169 : case BUILT_IN_BSWAP32:
8180 : 1169 : case BUILT_IN_BSWAP64:
8181 : 1169 : case BUILT_IN_BSWAP128:
8182 : 1169 : target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8183 : 1169 : if (target)
8184 : : return target;
8185 : : break;
8186 : :
8187 : 0 : CASE_INT_FN (BUILT_IN_FFS):
8188 : 0 : target = expand_builtin_unop (target_mode, exp, target,
8189 : : subtarget, ffs_optab);
8190 : 0 : if (target)
8191 : : return target;
8192 : : break;
8193 : :
8194 : 183 : CASE_INT_FN (BUILT_IN_CLZ):
8195 : 183 : target = expand_builtin_unop (target_mode, exp, target,
8196 : : subtarget, clz_optab);
8197 : 183 : if (target)
8198 : : return target;
8199 : : break;
8200 : :
8201 : 50 : CASE_INT_FN (BUILT_IN_CTZ):
8202 : 50 : target = expand_builtin_unop (target_mode, exp, target,
8203 : : subtarget, ctz_optab);
8204 : 50 : if (target)
8205 : : return target;
8206 : : break;
8207 : :
8208 : 90 : CASE_INT_FN (BUILT_IN_CLRSB):
8209 : 90 : target = expand_builtin_unop (target_mode, exp, target,
8210 : : subtarget, clrsb_optab);
8211 : 90 : if (target)
8212 : : return target;
8213 : : break;
8214 : :
8215 : 417 : CASE_INT_FN (BUILT_IN_POPCOUNT):
8216 : 417 : target = expand_builtin_unop (target_mode, exp, target,
8217 : : subtarget, popcount_optab);
8218 : 417 : if (target)
8219 : : return target;
8220 : : break;
8221 : :
8222 : 11 : CASE_INT_FN (BUILT_IN_PARITY):
8223 : 11 : target = expand_builtin_unop (target_mode, exp, target,
8224 : : subtarget, parity_optab);
8225 : 11 : if (target)
8226 : : return target;
8227 : : break;
8228 : :
8229 : 13857 : case BUILT_IN_STRLEN:
8230 : 13857 : target = expand_builtin_strlen (exp, target, target_mode);
8231 : 13857 : if (target)
8232 : : return target;
8233 : : break;
8234 : :
8235 : 580 : case BUILT_IN_STRNLEN:
8236 : 580 : target = expand_builtin_strnlen (exp, target, target_mode);
8237 : 580 : if (target)
8238 : : return target;
8239 : : break;
8240 : :
8241 : 1862 : case BUILT_IN_STRCPY:
8242 : 1862 : target = expand_builtin_strcpy (exp, target);
8243 : 1862 : if (target)
8244 : : return target;
8245 : : break;
8246 : :
8247 : 2169 : case BUILT_IN_STRNCPY:
8248 : 2169 : target = expand_builtin_strncpy (exp, target);
8249 : 2169 : if (target)
8250 : : return target;
8251 : : break;
8252 : :
8253 : 454 : case BUILT_IN_STPCPY:
8254 : 454 : target = expand_builtin_stpcpy (exp, target, mode);
8255 : 454 : if (target)
8256 : : return target;
8257 : : break;
8258 : :
8259 : 93267 : case BUILT_IN_MEMCPY:
8260 : 93267 : target = expand_builtin_memcpy (exp, target);
8261 : 93267 : if (target)
8262 : : return target;
8263 : : break;
8264 : :
8265 : 16231 : case BUILT_IN_MEMMOVE:
8266 : 16231 : target = expand_builtin_memmove (exp, target);
8267 : 16231 : if (target)
8268 : : return target;
8269 : : break;
8270 : :
8271 : 1634 : case BUILT_IN_MEMPCPY:
8272 : 1634 : target = expand_builtin_mempcpy (exp, target);
8273 : 1634 : if (target)
8274 : : return target;
8275 : : break;
8276 : :
8277 : 36179 : case BUILT_IN_MEMSET:
8278 : 36179 : target = expand_builtin_memset (exp, target, mode);
8279 : 36179 : if (target)
8280 : : return target;
8281 : : break;
8282 : :
8283 : 0 : case BUILT_IN_BZERO:
8284 : 0 : target = expand_builtin_bzero (exp);
8285 : 0 : if (target)
8286 : : return target;
8287 : : break;
8288 : :
8289 : : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8290 : : back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8291 : : when changing it to a strcmp call. */
8292 : 381 : case BUILT_IN_STRCMP_EQ:
8293 : 381 : target = expand_builtin_memcmp (exp, target, true);
8294 : 381 : if (target)
8295 : : return target;
8296 : :
8297 : : /* Change this call back to a BUILT_IN_STRCMP. */
8298 : 25 : TREE_OPERAND (exp, 1)
8299 : 25 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8300 : :
8301 : : /* Delete the last parameter. */
8302 : 25 : unsigned int i;
8303 : 25 : vec<tree, va_gc> *arg_vec;
8304 : 25 : vec_alloc (arg_vec, 2);
8305 : 100 : for (i = 0; i < 2; i++)
8306 : 50 : arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8307 : 25 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8308 : : /* FALLTHROUGH */
8309 : :
8310 : 127991 : case BUILT_IN_STRCMP:
8311 : 127991 : target = expand_builtin_strcmp (exp, target);
8312 : 127991 : if (target)
8313 : : return target;
8314 : : break;
8315 : :
8316 : : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8317 : : back to a BUILT_IN_STRNCMP. */
8318 : 36 : case BUILT_IN_STRNCMP_EQ:
8319 : 36 : target = expand_builtin_memcmp (exp, target, true);
8320 : 36 : if (target)
8321 : : return target;
8322 : :
8323 : : /* Change it back to a BUILT_IN_STRNCMP. */
8324 : 0 : TREE_OPERAND (exp, 1)
8325 : 0 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8326 : : /* FALLTHROUGH */
8327 : :
8328 : 2025 : case BUILT_IN_STRNCMP:
8329 : 2025 : target = expand_builtin_strncmp (exp, target, mode);
8330 : 2025 : if (target)
8331 : : return target;
8332 : : break;
8333 : :
8334 : 102148 : case BUILT_IN_BCMP:
8335 : 102148 : case BUILT_IN_MEMCMP:
8336 : 102148 : case BUILT_IN_MEMCMP_EQ:
8337 : 102148 : target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8338 : 102148 : if (target)
8339 : : return target;
8340 : 36250 : if (fcode == BUILT_IN_MEMCMP_EQ)
8341 : : {
8342 : 22315 : tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8343 : 22315 : TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8344 : : }
8345 : : break;
8346 : :
8347 : 0 : case BUILT_IN_SETJMP:
8348 : : /* This should have been lowered to the builtins below. */
8349 : 0 : gcc_unreachable ();
8350 : :
8351 : 841 : case BUILT_IN_SETJMP_SETUP:
8352 : : /* __builtin_setjmp_setup is passed a pointer to an array of five words
8353 : : and the receiver label. */
8354 : 841 : if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8355 : : {
8356 : 841 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8357 : : VOIDmode, EXPAND_NORMAL);
8358 : 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8359 : 841 : rtx_insn *label_r = label_rtx (label);
8360 : :
8361 : 841 : expand_builtin_setjmp_setup (buf_addr, label_r);
8362 : 841 : return const0_rtx;
8363 : : }
8364 : : break;
8365 : :
8366 : 841 : case BUILT_IN_SETJMP_RECEIVER:
8367 : : /* __builtin_setjmp_receiver is passed the receiver label. */
8368 : 841 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8369 : : {
8370 : 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8371 : 841 : rtx_insn *label_r = label_rtx (label);
8372 : :
8373 : 841 : expand_builtin_setjmp_receiver (label_r);
8374 : 841 : nonlocal_goto_handler_labels
8375 : 1682 : = gen_rtx_INSN_LIST (VOIDmode, label_r,
8376 : 841 : nonlocal_goto_handler_labels);
8377 : : /* ??? Do not let expand_label treat us as such since we would
8378 : : not want to be both on the list of non-local labels and on
8379 : : the list of forced labels. */
8380 : 841 : FORCED_LABEL (label) = 0;
8381 : 841 : return const0_rtx;
8382 : : }
8383 : : break;
8384 : :
8385 : : /* __builtin_longjmp is passed a pointer to an array of five words.
8386 : : It's similar to the C library longjmp function but works with
8387 : : __builtin_setjmp above. */
8388 : 391 : case BUILT_IN_LONGJMP:
8389 : 391 : if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8390 : : {
8391 : 391 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8392 : : VOIDmode, EXPAND_NORMAL);
8393 : 391 : rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8394 : :
8395 : 391 : if (value != const1_rtx)
8396 : : {
8397 : 0 : error ("%<__builtin_longjmp%> second argument must be 1");
8398 : 0 : return const0_rtx;
8399 : : }
8400 : :
8401 : 391 : expand_builtin_longjmp (buf_addr, value);
8402 : 391 : return const0_rtx;
8403 : : }
8404 : : break;
8405 : :
8406 : 512 : case BUILT_IN_NONLOCAL_GOTO:
8407 : 512 : target = expand_builtin_nonlocal_goto (exp);
8408 : 512 : if (target)
8409 : : return target;
8410 : : break;
8411 : :
8412 : : /* This updates the setjmp buffer that is its argument with the value
8413 : : of the current stack pointer. */
8414 : 0 : case BUILT_IN_UPDATE_SETJMP_BUF:
8415 : 0 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8416 : : {
8417 : 0 : rtx buf_addr
8418 : 0 : = expand_normal (CALL_EXPR_ARG (exp, 0));
8419 : :
8420 : 0 : expand_builtin_update_setjmp_buf (buf_addr);
8421 : 0 : return const0_rtx;
8422 : : }
8423 : : break;
8424 : :
8425 : 38540 : case BUILT_IN_TRAP:
8426 : 38540 : case BUILT_IN_UNREACHABLE_TRAP:
8427 : 38540 : expand_builtin_trap ();
8428 : 38540 : return const0_rtx;
8429 : :
8430 : 5400 : case BUILT_IN_UNREACHABLE:
8431 : 5400 : expand_builtin_unreachable ();
8432 : 5400 : return const0_rtx;
8433 : :
8434 : 3 : case BUILT_IN_OBSERVABLE_CHKPT:
8435 : : /* Generate no code. */
8436 : 3 : return const0_rtx;
8437 : :
8438 : 1139 : CASE_FLT_FN (BUILT_IN_SIGNBIT):
8439 : 1139 : case BUILT_IN_SIGNBITD32:
8440 : 1139 : case BUILT_IN_SIGNBITD64:
8441 : 1139 : case BUILT_IN_SIGNBITD128:
8442 : 1139 : target = expand_builtin_signbit (exp, target);
8443 : 1139 : if (target)
8444 : : return target;
8445 : : break;
8446 : :
8447 : : /* Various hooks for the DWARF 2 __throw routine. */
8448 : 34 : case BUILT_IN_UNWIND_INIT:
8449 : 34 : expand_builtin_unwind_init ();
8450 : 34 : return const0_rtx;
8451 : 1205 : case BUILT_IN_DWARF_CFA:
8452 : 1205 : return virtual_cfa_rtx;
8453 : : #ifdef DWARF2_UNWIND_INFO
8454 : 48 : case BUILT_IN_DWARF_SP_COLUMN:
8455 : 48 : return expand_builtin_dwarf_sp_column ();
8456 : 8 : case BUILT_IN_INIT_DWARF_REG_SIZES:
8457 : 8 : expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8458 : 8 : return const0_rtx;
8459 : : #endif
8460 : 19 : case BUILT_IN_FROB_RETURN_ADDR:
8461 : 19 : return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8462 : 2296 : case BUILT_IN_EXTRACT_RETURN_ADDR:
8463 : 2296 : return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8464 : 29 : case BUILT_IN_EH_RETURN:
8465 : 58 : expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8466 : 29 : CALL_EXPR_ARG (exp, 1));
8467 : 29 : return const0_rtx;
8468 : 21 : case BUILT_IN_EH_RETURN_DATA_REGNO:
8469 : 21 : return expand_builtin_eh_return_data_regno (exp);
8470 : 2 : case BUILT_IN_EXTEND_POINTER:
8471 : 2 : return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8472 : 96002 : case BUILT_IN_EH_POINTER:
8473 : 96002 : return expand_builtin_eh_pointer (exp);
8474 : 5049 : case BUILT_IN_EH_FILTER:
8475 : 5049 : return expand_builtin_eh_filter (exp);
8476 : 105010 : case BUILT_IN_EH_COPY_VALUES:
8477 : 105010 : return expand_builtin_eh_copy_values (exp);
8478 : :
8479 : 20976 : case BUILT_IN_VA_START:
8480 : 20976 : return expand_builtin_va_start (exp);
8481 : 12122 : case BUILT_IN_VA_END:
8482 : 12122 : return expand_builtin_va_end (exp);
8483 : 242 : case BUILT_IN_VA_COPY:
8484 : 242 : return expand_builtin_va_copy (exp);
8485 : 1017 : case BUILT_IN_EXPECT:
8486 : 1017 : return expand_builtin_expect (exp, target);
8487 : 5 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
8488 : 5 : return expand_builtin_expect_with_probability (exp, target);
8489 : 664 : case BUILT_IN_ASSUME_ALIGNED:
8490 : 664 : return expand_builtin_assume_aligned (exp, target);
8491 : 2039 : case BUILT_IN_PREFETCH:
8492 : 2039 : expand_builtin_prefetch (exp);
8493 : 2039 : return const0_rtx;
8494 : :
8495 : 295 : case BUILT_IN_INIT_TRAMPOLINE:
8496 : 295 : return expand_builtin_init_trampoline (exp, true);
8497 : 0 : case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8498 : 0 : return expand_builtin_init_trampoline (exp, false);
8499 : 339 : case BUILT_IN_ADJUST_TRAMPOLINE:
8500 : 339 : return expand_builtin_adjust_trampoline (exp);
8501 : :
8502 : 0 : case BUILT_IN_INIT_DESCRIPTOR:
8503 : 0 : return expand_builtin_init_descriptor (exp);
8504 : 0 : case BUILT_IN_ADJUST_DESCRIPTOR:
8505 : 0 : return expand_builtin_adjust_descriptor (exp);
8506 : :
8507 : : case BUILT_IN_GCC_NESTED_PTR_CREATED:
8508 : : case BUILT_IN_GCC_NESTED_PTR_DELETED:
8509 : : break; /* At present, no expansion, just call the function. */
8510 : :
8511 : 87 : case BUILT_IN_FORK:
8512 : 87 : case BUILT_IN_EXECL:
8513 : 87 : case BUILT_IN_EXECV:
8514 : 87 : case BUILT_IN_EXECLP:
8515 : 87 : case BUILT_IN_EXECLE:
8516 : 87 : case BUILT_IN_EXECVP:
8517 : 87 : case BUILT_IN_EXECVE:
8518 : 87 : target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8519 : 87 : if (target)
8520 : : return target;
8521 : : break;
8522 : :
8523 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8524 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8525 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8526 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8527 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8528 : 707 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8529 : 707 : target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8530 : 707 : if (target)
8531 : : return target;
8532 : : break;
8533 : :
8534 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8535 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8536 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8537 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8538 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8539 : 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8540 : 581 : target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8541 : 581 : if (target)
8542 : : return target;
8543 : : break;
8544 : :
8545 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_1:
8546 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_2:
8547 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_4:
8548 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_8:
8549 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_16:
8550 : 512 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8551 : 512 : target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8552 : 512 : if (target)
8553 : : return target;
8554 : : break;
8555 : :
8556 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_1:
8557 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_2:
8558 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_4:
8559 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_8:
8560 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_16:
8561 : 488 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8562 : 488 : target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8563 : 488 : if (target)
8564 : : return target;
8565 : : break;
8566 : :
8567 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8568 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8569 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8570 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8571 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8572 : 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8573 : 581 : target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8574 : 581 : if (target)
8575 : : return target;
8576 : : break;
8577 : :
8578 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8579 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8580 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8581 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8582 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8583 : 434 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8584 : 434 : target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8585 : 434 : if (target)
8586 : : return target;
8587 : : break;
8588 : :
8589 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8590 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8591 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8592 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8593 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8594 : 149 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8595 : 149 : target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8596 : 149 : if (target)
8597 : : return target;
8598 : : break;
8599 : :
8600 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8601 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8602 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8603 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8604 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8605 : 117 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8606 : 117 : target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8607 : 117 : if (target)
8608 : : return target;
8609 : : break;
8610 : :
8611 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_1:
8612 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_2:
8613 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_4:
8614 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_8:
8615 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_16:
8616 : 115 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8617 : 115 : target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8618 : 115 : if (target)
8619 : : return target;
8620 : : break;
8621 : :
8622 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_1:
8623 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_2:
8624 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_4:
8625 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_8:
8626 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_16:
8627 : 116 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8628 : 116 : target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8629 : 116 : if (target)
8630 : : return target;
8631 : : break;
8632 : :
8633 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8634 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8635 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8636 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8637 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8638 : 114 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8639 : 114 : target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8640 : 114 : if (target)
8641 : : return target;
8642 : : break;
8643 : :
8644 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8645 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8646 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8647 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8648 : 89 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8649 : 89 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8650 : 89 : target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8651 : 89 : if (target)
8652 : : return target;
8653 : : break;
8654 : :
8655 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8656 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8657 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8658 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8659 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8660 : 200 : if (mode == VOIDmode)
8661 : 40 : mode = TYPE_MODE (boolean_type_node);
8662 : 200 : if (!target || !register_operand (target, mode))
8663 : 40 : target = gen_reg_rtx (mode);
8664 : :
8665 : 200 : mode = get_builtin_sync_mode
8666 : 200 : (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8667 : 200 : target = expand_builtin_compare_and_swap (mode, exp, true, target);
8668 : 200 : if (target)
8669 : : return target;
8670 : : break;
8671 : :
8672 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8673 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8674 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8675 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8676 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8677 : 256 : mode = get_builtin_sync_mode
8678 : 256 : (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8679 : 256 : target = expand_builtin_compare_and_swap (mode, exp, false, target);
8680 : 256 : if (target)
8681 : : return target;
8682 : : break;
8683 : :
8684 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8685 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8686 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8687 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8688 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8689 : 326 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8690 : 326 : target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8691 : 326 : if (target)
8692 : : return target;
8693 : : break;
8694 : :
8695 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_1:
8696 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_2:
8697 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_4:
8698 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_8:
8699 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_16:
8700 : 158 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8701 : 158 : if (expand_builtin_sync_lock_release (mode, exp))
8702 : 156 : return const0_rtx;
8703 : : break;
8704 : :
8705 : 273 : case BUILT_IN_SYNC_SYNCHRONIZE:
8706 : 273 : expand_builtin_sync_synchronize ();
8707 : 273 : return const0_rtx;
8708 : :
8709 : 2921 : case BUILT_IN_ATOMIC_EXCHANGE_1:
8710 : 2921 : case BUILT_IN_ATOMIC_EXCHANGE_2:
8711 : 2921 : case BUILT_IN_ATOMIC_EXCHANGE_4:
8712 : 2921 : case BUILT_IN_ATOMIC_EXCHANGE_8:
8713 : 2921 : case BUILT_IN_ATOMIC_EXCHANGE_16:
8714 : 2921 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8715 : 2921 : target = expand_builtin_atomic_exchange (mode, exp, target);
8716 : 2921 : if (target)
8717 : : return target;
8718 : : break;
8719 : :
8720 : 9233 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8721 : 9233 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8722 : 9233 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8723 : 9233 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8724 : 9233 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8725 : 9233 : {
8726 : 9233 : unsigned int nargs, z;
8727 : 9233 : vec<tree, va_gc> *vec;
8728 : :
8729 : 9233 : mode =
8730 : 9233 : get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8731 : 9233 : target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8732 : 9233 : if (target)
8733 : 7433 : return target;
8734 : :
8735 : : /* If this is turned into an external library call, the weak parameter
8736 : : must be dropped to match the expected parameter list. */
8737 : 1800 : nargs = call_expr_nargs (exp);
8738 : 1800 : vec_alloc (vec, nargs - 1);
8739 : 9000 : for (z = 0; z < 3; z++)
8740 : 5400 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8741 : : /* Skip the boolean weak parameter. */
8742 : 5400 : for (z = 4; z < 6; z++)
8743 : 3600 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8744 : 1800 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8745 : 1800 : break;
8746 : : }
8747 : :
8748 : 71001 : case BUILT_IN_ATOMIC_LOAD_1:
8749 : 71001 : case BUILT_IN_ATOMIC_LOAD_2:
8750 : 71001 : case BUILT_IN_ATOMIC_LOAD_4:
8751 : 71001 : case BUILT_IN_ATOMIC_LOAD_8:
8752 : 71001 : case BUILT_IN_ATOMIC_LOAD_16:
8753 : 71001 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8754 : 71001 : target = expand_builtin_atomic_load (mode, exp, target);
8755 : 71001 : if (target)
8756 : : return target;
8757 : : break;
8758 : :
8759 : 17539 : case BUILT_IN_ATOMIC_STORE_1:
8760 : 17539 : case BUILT_IN_ATOMIC_STORE_2:
8761 : 17539 : case BUILT_IN_ATOMIC_STORE_4:
8762 : 17539 : case BUILT_IN_ATOMIC_STORE_8:
8763 : 17539 : case BUILT_IN_ATOMIC_STORE_16:
8764 : 17539 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8765 : 17539 : target = expand_builtin_atomic_store (mode, exp);
8766 : 17539 : if (target)
8767 : 15912 : return const0_rtx;
8768 : : break;
8769 : :
8770 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_1:
8771 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_2:
8772 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_4:
8773 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_8:
8774 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_16:
8775 : 4377 : {
8776 : 4377 : enum built_in_function lib;
8777 : 4377 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8778 : 4377 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8779 : : (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8780 : 4377 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8781 : : ignore, lib);
8782 : 4377 : if (target)
8783 : : return target;
8784 : : break;
8785 : : }
8786 : 2178 : case BUILT_IN_ATOMIC_SUB_FETCH_1:
8787 : 2178 : case BUILT_IN_ATOMIC_SUB_FETCH_2:
8788 : 2178 : case BUILT_IN_ATOMIC_SUB_FETCH_4:
8789 : 2178 : case BUILT_IN_ATOMIC_SUB_FETCH_8:
8790 : 2178 : case BUILT_IN_ATOMIC_SUB_FETCH_16:
8791 : 2178 : {
8792 : 2178 : enum built_in_function lib;
8793 : 2178 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8794 : 2178 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8795 : : (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8796 : 2178 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8797 : : ignore, lib);
8798 : 2178 : if (target)
8799 : : return target;
8800 : : break;
8801 : : }
8802 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_1:
8803 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_2:
8804 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_4:
8805 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_8:
8806 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_16:
8807 : 891 : {
8808 : 891 : enum built_in_function lib;
8809 : 891 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8810 : 891 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8811 : : (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8812 : 891 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8813 : : ignore, lib);
8814 : 891 : if (target)
8815 : : return target;
8816 : : break;
8817 : : }
8818 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_1:
8819 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_2:
8820 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_4:
8821 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_8:
8822 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_16:
8823 : 155 : {
8824 : 155 : enum built_in_function lib;
8825 : 155 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8826 : 155 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8827 : : (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8828 : 155 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8829 : : ignore, lib);
8830 : 155 : if (target)
8831 : : return target;
8832 : : break;
8833 : : }
8834 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_1:
8835 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_2:
8836 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_4:
8837 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_8:
8838 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_16:
8839 : 836 : {
8840 : 836 : enum built_in_function lib;
8841 : 836 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8842 : 836 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8843 : : (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8844 : 836 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8845 : : ignore, lib);
8846 : 836 : if (target)
8847 : : return target;
8848 : : break;
8849 : : }
8850 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_1:
8851 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_2:
8852 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_4:
8853 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_8:
8854 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_16:
8855 : 953 : {
8856 : 953 : enum built_in_function lib;
8857 : 953 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8858 : 953 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8859 : : (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8860 : 953 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8861 : : ignore, lib);
8862 : 953 : if (target)
8863 : : return target;
8864 : : break;
8865 : : }
8866 : 15213 : case BUILT_IN_ATOMIC_FETCH_ADD_1:
8867 : 15213 : case BUILT_IN_ATOMIC_FETCH_ADD_2:
8868 : 15213 : case BUILT_IN_ATOMIC_FETCH_ADD_4:
8869 : 15213 : case BUILT_IN_ATOMIC_FETCH_ADD_8:
8870 : 15213 : case BUILT_IN_ATOMIC_FETCH_ADD_16:
8871 : 15213 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8872 : 15213 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8873 : : ignore, BUILT_IN_NONE);
8874 : 15213 : if (target)
8875 : : return target;
8876 : : break;
8877 : :
8878 : 2393 : case BUILT_IN_ATOMIC_FETCH_SUB_1:
8879 : 2393 : case BUILT_IN_ATOMIC_FETCH_SUB_2:
8880 : 2393 : case BUILT_IN_ATOMIC_FETCH_SUB_4:
8881 : 2393 : case BUILT_IN_ATOMIC_FETCH_SUB_8:
8882 : 2393 : case BUILT_IN_ATOMIC_FETCH_SUB_16:
8883 : 2393 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8884 : 2393 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8885 : : ignore, BUILT_IN_NONE);
8886 : 2393 : if (target)
8887 : : return target;
8888 : : break;
8889 : :
8890 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_1:
8891 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_2:
8892 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_4:
8893 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_8:
8894 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_16:
8895 : 772 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8896 : 772 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8897 : : ignore, BUILT_IN_NONE);
8898 : 772 : if (target)
8899 : : return target;
8900 : : break;
8901 : :
8902 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_1:
8903 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_2:
8904 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_4:
8905 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_8:
8906 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_16:
8907 : 118 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8908 : 118 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8909 : : ignore, BUILT_IN_NONE);
8910 : 118 : if (target)
8911 : : return target;
8912 : : break;
8913 : :
8914 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_1:
8915 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_2:
8916 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_4:
8917 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_8:
8918 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_16:
8919 : 862 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8920 : 862 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8921 : : ignore, BUILT_IN_NONE);
8922 : 862 : if (target)
8923 : : return target;
8924 : : break;
8925 : :
8926 : 1127 : case BUILT_IN_ATOMIC_FETCH_OR_1:
8927 : 1127 : case BUILT_IN_ATOMIC_FETCH_OR_2:
8928 : 1127 : case BUILT_IN_ATOMIC_FETCH_OR_4:
8929 : 1127 : case BUILT_IN_ATOMIC_FETCH_OR_8:
8930 : 1127 : case BUILT_IN_ATOMIC_FETCH_OR_16:
8931 : 1127 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8932 : 1127 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8933 : : ignore, BUILT_IN_NONE);
8934 : 1127 : if (target)
8935 : : return target;
8936 : : break;
8937 : :
8938 : 274 : case BUILT_IN_ATOMIC_TEST_AND_SET:
8939 : 274 : target = expand_builtin_atomic_test_and_set (exp, target);
8940 : 274 : if (target)
8941 : : return target;
8942 : : break;
8943 : :
8944 : 57 : case BUILT_IN_ATOMIC_CLEAR:
8945 : 57 : return expand_builtin_atomic_clear (exp);
8946 : :
8947 : 1 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8948 : 1 : return expand_builtin_atomic_always_lock_free (exp);
8949 : :
8950 : 3 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8951 : 3 : target = expand_builtin_atomic_is_lock_free (exp);
8952 : 3 : if (target)
8953 : : return target;
8954 : : break;
8955 : :
8956 : 693 : case BUILT_IN_ATOMIC_THREAD_FENCE:
8957 : 693 : expand_builtin_atomic_thread_fence (exp);
8958 : 693 : return const0_rtx;
8959 : :
8960 : 60 : case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8961 : 60 : expand_builtin_atomic_signal_fence (exp);
8962 : 60 : return const0_rtx;
8963 : :
8964 : 618 : case BUILT_IN_OBJECT_SIZE:
8965 : 618 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8966 : 618 : return expand_builtin_object_size (exp);
8967 : :
8968 : 837 : case BUILT_IN_MEMCPY_CHK:
8969 : 837 : case BUILT_IN_MEMPCPY_CHK:
8970 : 837 : case BUILT_IN_MEMMOVE_CHK:
8971 : 837 : case BUILT_IN_MEMSET_CHK:
8972 : 837 : target = expand_builtin_memory_chk (exp, target, mode, fcode);
8973 : 837 : if (target)
8974 : : return target;
8975 : : break;
8976 : :
8977 : 1135 : case BUILT_IN_STRCPY_CHK:
8978 : 1135 : case BUILT_IN_STPCPY_CHK:
8979 : 1135 : case BUILT_IN_STRNCPY_CHK:
8980 : 1135 : case BUILT_IN_STPNCPY_CHK:
8981 : 1135 : case BUILT_IN_STRCAT_CHK:
8982 : 1135 : case BUILT_IN_STRNCAT_CHK:
8983 : 1135 : case BUILT_IN_SNPRINTF_CHK:
8984 : 1135 : case BUILT_IN_VSNPRINTF_CHK:
8985 : 1135 : maybe_emit_chk_warning (exp, fcode);
8986 : 1135 : break;
8987 : :
8988 : 1329 : case BUILT_IN_SPRINTF_CHK:
8989 : 1329 : case BUILT_IN_VSPRINTF_CHK:
8990 : 1329 : maybe_emit_sprintf_chk_warning (exp, fcode);
8991 : 1329 : break;
8992 : :
8993 : 3 : case BUILT_IN_THREAD_POINTER:
8994 : 3 : return expand_builtin_thread_pointer (exp, target);
8995 : :
8996 : 0 : case BUILT_IN_SET_THREAD_POINTER:
8997 : 0 : expand_builtin_set_thread_pointer (exp);
8998 : 0 : return const0_rtx;
8999 : :
9000 : : case BUILT_IN_ACC_ON_DEVICE:
9001 : : /* Do library call, if we failed to expand the builtin when
9002 : : folding. */
9003 : : break;
9004 : :
9005 : 356 : case BUILT_IN_GOACC_PARLEVEL_ID:
9006 : 356 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
9007 : 356 : return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9008 : :
9009 : 4 : case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9010 : 4 : return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9011 : :
9012 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9013 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9014 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9015 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9016 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9017 : 30 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9018 : 30 : return expand_speculation_safe_value (mode, exp, target, ignore);
9019 : :
9020 : 3 : case BUILT_IN_CRC8_DATA8:
9021 : 3 : return expand_builtin_crc_table_based (IFN_CRC, QImode, QImode, mode,
9022 : : exp, target);
9023 : 2 : case BUILT_IN_CRC16_DATA8:
9024 : 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, QImode, mode,
9025 : : exp, target);
9026 : 2 : case BUILT_IN_CRC16_DATA16:
9027 : 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, HImode, mode,
9028 : : exp, target);
9029 : 2 : case BUILT_IN_CRC32_DATA8:
9030 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, QImode, mode,
9031 : : exp, target);
9032 : 2 : case BUILT_IN_CRC32_DATA16:
9033 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, HImode, mode,
9034 : : exp, target);
9035 : 2 : case BUILT_IN_CRC32_DATA32:
9036 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, SImode, mode,
9037 : : exp, target);
9038 : 1 : case BUILT_IN_CRC64_DATA8:
9039 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, QImode, mode,
9040 : : exp, target);
9041 : 1 : case BUILT_IN_CRC64_DATA16:
9042 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, HImode, mode,
9043 : : exp, target);
9044 : 1 : case BUILT_IN_CRC64_DATA32:
9045 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, SImode, mode,
9046 : : exp, target);
9047 : 1 : case BUILT_IN_CRC64_DATA64:
9048 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, DImode, mode,
9049 : : exp, target);
9050 : 2 : case BUILT_IN_REV_CRC8_DATA8:
9051 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, QImode, QImode,
9052 : : mode, exp, target);
9053 : 2 : case BUILT_IN_REV_CRC16_DATA8:
9054 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, QImode,
9055 : : mode, exp, target);
9056 : 2 : case BUILT_IN_REV_CRC16_DATA16:
9057 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, HImode,
9058 : : mode, exp, target);
9059 : 3 : case BUILT_IN_REV_CRC32_DATA8:
9060 : 3 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, QImode,
9061 : : mode, exp, target);
9062 : 2 : case BUILT_IN_REV_CRC32_DATA16:
9063 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, HImode,
9064 : : mode, exp, target);
9065 : 2 : case BUILT_IN_REV_CRC32_DATA32:
9066 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, SImode,
9067 : : mode, exp, target);
9068 : 1 : case BUILT_IN_REV_CRC64_DATA8:
9069 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, QImode,
9070 : : mode, exp, target);
9071 : 1 : case BUILT_IN_REV_CRC64_DATA16:
9072 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, HImode,
9073 : : mode, exp, target);
9074 : 1 : case BUILT_IN_REV_CRC64_DATA32:
9075 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, SImode,
9076 : : mode, exp, target);
9077 : 1 : case BUILT_IN_REV_CRC64_DATA64:
9078 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, DImode,
9079 : : mode, exp, target);
9080 : : default: /* just do library call, if unknown builtin */
9081 : : break;
9082 : : }
9083 : :
9084 : : /* The switch statement above can drop through to cause the function
9085 : : to be called normally. */
9086 : 833600 : return expand_call (exp, target, ignore);
9087 : : }
9088 : :
9089 : : /* Determine whether a tree node represents a call to a built-in
9090 : : function. If the tree T is a call to a built-in function with
9091 : : the right number of arguments of the appropriate types, return
9092 : : the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9093 : : Otherwise the return value is END_BUILTINS. */
9094 : :
9095 : : enum built_in_function
9096 : 94498292 : builtin_mathfn_code (const_tree t)
9097 : : {
9098 : 94498292 : const_tree fndecl, arg, parmlist;
9099 : 94498292 : const_tree argtype, parmtype;
9100 : 94498292 : const_call_expr_arg_iterator iter;
9101 : :
9102 : 94498292 : if (TREE_CODE (t) != CALL_EXPR)
9103 : : return END_BUILTINS;
9104 : :
9105 : 1371957 : fndecl = get_callee_fndecl (t);
9106 : 1371957 : if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9107 : : return END_BUILTINS;
9108 : :
9109 : 781341 : parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9110 : 781341 : init_const_call_expr_arg_iterator (t, &iter);
9111 : 2556895 : for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9112 : : {
9113 : : /* If a function doesn't take a variable number of arguments,
9114 : : the last element in the list will have type `void'. */
9115 : 1775345 : parmtype = TREE_VALUE (parmlist);
9116 : 1775345 : if (VOID_TYPE_P (parmtype))
9117 : : {
9118 : 781118 : if (more_const_call_expr_args_p (&iter))
9119 : : return END_BUILTINS;
9120 : 781118 : return DECL_FUNCTION_CODE (fndecl);
9121 : : }
9122 : :
9123 : 994227 : if (! more_const_call_expr_args_p (&iter))
9124 : : return END_BUILTINS;
9125 : :
9126 : 994227 : arg = next_const_call_expr_arg (&iter);
9127 : 994227 : argtype = TREE_TYPE (arg);
9128 : :
9129 : 994227 : if (SCALAR_FLOAT_TYPE_P (parmtype))
9130 : : {
9131 : 736031 : if (! SCALAR_FLOAT_TYPE_P (argtype))
9132 : : return END_BUILTINS;
9133 : : }
9134 : 258196 : else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9135 : : {
9136 : 11849 : if (! COMPLEX_FLOAT_TYPE_P (argtype))
9137 : : return END_BUILTINS;
9138 : : }
9139 : 246347 : else if (POINTER_TYPE_P (parmtype))
9140 : : {
9141 : 72104 : if (! POINTER_TYPE_P (argtype))
9142 : : return END_BUILTINS;
9143 : : }
9144 : 174243 : else if (INTEGRAL_TYPE_P (parmtype))
9145 : : {
9146 : 174243 : if (! INTEGRAL_TYPE_P (argtype))
9147 : : return END_BUILTINS;
9148 : : }
9149 : : else
9150 : : return END_BUILTINS;
9151 : : }
9152 : :
9153 : : /* Variable-length argument list. */
9154 : 209 : return DECL_FUNCTION_CODE (fndecl);
9155 : : }
9156 : :
9157 : : /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9158 : : evaluate to a constant. */
9159 : :
9160 : : tree
9161 : 815136 : fold_builtin_constant_p (tree arg)
9162 : : {
9163 : : /* We return 1 for a numeric type that's known to be a constant
9164 : : value at compile-time or for an aggregate type that's a
9165 : : literal constant. */
9166 : 815136 : STRIP_NOPS (arg);
9167 : :
9168 : : /* If we know this is a constant, emit the constant of one. */
9169 : 815136 : if (CONSTANT_CLASS_P (arg)
9170 : 815136 : || (TREE_CODE (arg) == CONSTRUCTOR
9171 : 6 : && TREE_CONSTANT (arg)))
9172 : 26582 : return integer_one_node;
9173 : 788554 : if (TREE_CODE (arg) == ADDR_EXPR)
9174 : : {
9175 : 110 : tree op = TREE_OPERAND (arg, 0);
9176 : 110 : if (TREE_CODE (op) == STRING_CST
9177 : 110 : || (TREE_CODE (op) == ARRAY_REF
9178 : 57 : && integer_zerop (TREE_OPERAND (op, 1))
9179 : 57 : && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9180 : 87 : return integer_one_node;
9181 : : }
9182 : :
9183 : : /* If this expression has side effects, show we don't know it to be a
9184 : : constant. Likewise if it's a pointer or aggregate type since in
9185 : : those case we only want literals, since those are only optimized
9186 : : when generating RTL, not later.
9187 : : And finally, if we are compiling an initializer, not code, we
9188 : : need to return a definite result now; there's not going to be any
9189 : : more optimization done. */
9190 : 788467 : if (TREE_SIDE_EFFECTS (arg)
9191 : 788342 : || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9192 : 788330 : || POINTER_TYPE_P (TREE_TYPE (arg))
9193 : 788165 : || cfun == 0
9194 : 788115 : || folding_initializer
9195 : 1576575 : || force_folding_builtin_constant_p)
9196 : 377 : return integer_zero_node;
9197 : :
9198 : : return NULL_TREE;
9199 : : }
9200 : :
9201 : : /* Create builtin_expect or builtin_expect_with_probability
9202 : : with PRED and EXPECTED as its arguments and return it as a truthvalue.
9203 : : Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9204 : : builtin_expect_with_probability instead uses third argument as PROBABILITY
9205 : : value. */
9206 : :
9207 : : static tree
9208 : 49974 : build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9209 : : tree predictor, tree probability)
9210 : : {
9211 : 49974 : tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9212 : :
9213 : 49974 : fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9214 : : : BUILT_IN_EXPECT_WITH_PROBABILITY);
9215 : 49974 : arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9216 : 49974 : ret_type = TREE_TYPE (TREE_TYPE (fn));
9217 : 49974 : pred_type = TREE_VALUE (arg_types);
9218 : 49974 : expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9219 : :
9220 : 49974 : pred = fold_convert_loc (loc, pred_type, pred);
9221 : 49974 : expected = fold_convert_loc (loc, expected_type, expected);
9222 : :
9223 : 49974 : if (probability)
9224 : 0 : call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9225 : : else
9226 : 99948 : call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9227 : : predictor);
9228 : :
9229 : 49974 : return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9230 : 49974 : build_int_cst (ret_type, 0));
9231 : : }
9232 : :
9233 : : /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9234 : : NULL_TREE if no simplification is possible. */
9235 : :
9236 : : tree
9237 : 5673522 : fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9238 : : tree arg3)
9239 : : {
9240 : 5673522 : tree inner, fndecl, inner_arg0;
9241 : 5673522 : enum tree_code code;
9242 : :
9243 : : /* Distribute the expected value over short-circuiting operators.
9244 : : See through the cast from truthvalue_type_node to long. */
9245 : 5673522 : inner_arg0 = arg0;
9246 : 11480718 : while (CONVERT_EXPR_P (inner_arg0)
9247 : 420037 : && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9248 : 6513595 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9249 : 420036 : inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9250 : :
9251 : : /* If this is a builtin_expect within a builtin_expect keep the
9252 : : inner one. See through a comparison against a constant. It
9253 : : might have been added to create a thruthvalue. */
9254 : 5673522 : inner = inner_arg0;
9255 : :
9256 : 5673522 : if (COMPARISON_CLASS_P (inner)
9257 : 5673522 : && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9258 : 712438 : inner = TREE_OPERAND (inner, 0);
9259 : :
9260 : 5673522 : if (TREE_CODE (inner) == CALL_EXPR
9261 : 55289 : && (fndecl = get_callee_fndecl (inner))
9262 : 5728811 : && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
9263 : : BUILT_IN_EXPECT_WITH_PROBABILITY))
9264 : : return arg0;
9265 : :
9266 : 5673514 : inner = inner_arg0;
9267 : 5673514 : code = TREE_CODE (inner);
9268 : 5673514 : if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9269 : : {
9270 : 24987 : tree op0 = TREE_OPERAND (inner, 0);
9271 : 24987 : tree op1 = TREE_OPERAND (inner, 1);
9272 : 24987 : arg1 = save_expr (arg1);
9273 : :
9274 : 24987 : op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9275 : 24987 : op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9276 : 24987 : inner = build2 (code, TREE_TYPE (inner), op0, op1);
9277 : :
9278 : 24987 : return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9279 : : }
9280 : :
9281 : : /* If the argument isn't invariant then there's nothing else we can do. */
9282 : 5648527 : if (!TREE_CONSTANT (inner_arg0))
9283 : : return NULL_TREE;
9284 : :
9285 : : /* If we expect that a comparison against the argument will fold to
9286 : : a constant return the constant. In practice, this means a true
9287 : : constant or the address of a non-weak symbol. */
9288 : 67153 : inner = inner_arg0;
9289 : 67153 : STRIP_NOPS (inner);
9290 : 67153 : if (TREE_CODE (inner) == ADDR_EXPR)
9291 : : {
9292 : 1 : do
9293 : : {
9294 : 1 : inner = TREE_OPERAND (inner, 0);
9295 : : }
9296 : 1 : while (TREE_CODE (inner) == COMPONENT_REF
9297 : 1 : || TREE_CODE (inner) == ARRAY_REF);
9298 : 1 : if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9299 : : return NULL_TREE;
9300 : : }
9301 : :
9302 : : /* Otherwise, ARG0 already has the proper type for the return value. */
9303 : : return arg0;
9304 : : }
9305 : :
9306 : : /* Fold a call to __builtin_classify_type with argument ARG. */
9307 : :
9308 : : static tree
9309 : 2140 : fold_builtin_classify_type (tree arg)
9310 : : {
9311 : 2140 : if (arg == 0)
9312 : 0 : return build_int_cst (integer_type_node, no_type_class);
9313 : :
9314 : 2140 : return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9315 : : }
9316 : :
9317 : : /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9318 : : ARG. */
9319 : :
9320 : : static tree
9321 : 419144 : fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9322 : : {
9323 : 419144 : if (!validate_arg (arg, POINTER_TYPE))
9324 : : return NULL_TREE;
9325 : : else
9326 : : {
9327 : 419141 : c_strlen_data lendata = { };
9328 : 419141 : tree len = c_strlen (arg, 0, &lendata);
9329 : :
9330 : 419141 : if (len)
9331 : 2359 : return fold_convert_loc (loc, type, len);
9332 : :
9333 : : /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9334 : : also early enough to detect invalid reads in multimensional
9335 : : arrays and struct members. */
9336 : 416782 : if (!lendata.decl)
9337 : 409604 : c_strlen (arg, 1, &lendata);
9338 : :
9339 : 416782 : if (lendata.decl)
9340 : : {
9341 : 7192 : if (EXPR_HAS_LOCATION (arg))
9342 : 2800 : loc = EXPR_LOCATION (arg);
9343 : 4392 : else if (loc == UNKNOWN_LOCATION)
9344 : 0 : loc = input_location;
9345 : 7192 : warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9346 : : }
9347 : :
9348 : 416782 : return NULL_TREE;
9349 : : }
9350 : : }
9351 : :
9352 : : /* Fold a call to __builtin_inf or __builtin_huge_val. */
9353 : :
9354 : : static tree
9355 : 230763 : fold_builtin_inf (location_t loc, tree type, int warn)
9356 : : {
9357 : : /* __builtin_inff is intended to be usable to define INFINITY on all
9358 : : targets. If an infinity is not available, INFINITY expands "to a
9359 : : positive constant of type float that overflows at translation
9360 : : time", footnote "In this case, using INFINITY will violate the
9361 : : constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9362 : : Thus we pedwarn to ensure this constraint violation is
9363 : : diagnosed. */
9364 : 922243 : if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9365 : 0 : pedwarn (loc, 0, "target format does not support infinity");
9366 : :
9367 : 230763 : return build_real (type, dconstinf);
9368 : : }
9369 : :
9370 : : /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9371 : : NULL_TREE if no simplification can be made. */
9372 : :
9373 : : static tree
9374 : 145 : fold_builtin_sincos (location_t loc,
9375 : : tree arg0, tree arg1, tree arg2)
9376 : : {
9377 : 145 : tree type;
9378 : 145 : tree fndecl, call = NULL_TREE;
9379 : :
9380 : 145 : if (!validate_arg (arg0, REAL_TYPE)
9381 : 145 : || !validate_arg (arg1, POINTER_TYPE)
9382 : 290 : || !validate_arg (arg2, POINTER_TYPE))
9383 : : return NULL_TREE;
9384 : :
9385 : 145 : type = TREE_TYPE (arg0);
9386 : :
9387 : : /* Calculate the result when the argument is a constant. */
9388 : 145 : built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9389 : 145 : if (fn == END_BUILTINS)
9390 : : return NULL_TREE;
9391 : :
9392 : : /* Canonicalize sincos to cexpi. */
9393 : 145 : if (TREE_CODE (arg0) == REAL_CST)
9394 : : {
9395 : 86 : tree complex_type = build_complex_type (type);
9396 : 86 : call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9397 : : }
9398 : 86 : if (!call)
9399 : : {
9400 : 59 : if (!targetm.libc_has_function (function_c99_math_complex, type)
9401 : 59 : || !builtin_decl_implicit_p (fn))
9402 : : return NULL_TREE;
9403 : 59 : fndecl = builtin_decl_explicit (fn);
9404 : 59 : call = build_call_expr_loc (loc, fndecl, 1, arg0);
9405 : 59 : call = builtin_save_expr (call);
9406 : : }
9407 : :
9408 : 145 : tree ptype = build_pointer_type (type);
9409 : 145 : arg1 = fold_convert (ptype, arg1);
9410 : 145 : arg2 = fold_convert (ptype, arg2);
9411 : 145 : return build2 (COMPOUND_EXPR, void_type_node,
9412 : : build2 (MODIFY_EXPR, void_type_node,
9413 : : build_fold_indirect_ref_loc (loc, arg1),
9414 : : fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9415 : : build2 (MODIFY_EXPR, void_type_node,
9416 : : build_fold_indirect_ref_loc (loc, arg2),
9417 : 145 : fold_build1_loc (loc, REALPART_EXPR, type, call)));
9418 : : }
9419 : :
9420 : : /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9421 : : Return NULL_TREE if no simplification can be made. */
9422 : :
9423 : : static tree
9424 : 2392126 : fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9425 : : {
9426 : 2392126 : if (!validate_arg (arg1, POINTER_TYPE)
9427 : 2392126 : || !validate_arg (arg2, POINTER_TYPE)
9428 : 4784252 : || !validate_arg (len, INTEGER_TYPE))
9429 : : return NULL_TREE;
9430 : :
9431 : : /* If the LEN parameter is zero, return zero. */
9432 : 2392126 : if (integer_zerop (len))
9433 : 0 : return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9434 : 0 : arg1, arg2);
9435 : :
9436 : : /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9437 : 2392126 : if (operand_equal_p (arg1, arg2, 0))
9438 : 739 : return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9439 : :
9440 : : /* If len parameter is one, return an expression corresponding to
9441 : : (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9442 : 2391387 : if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9443 : : {
9444 : 19599 : tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9445 : 19599 : tree cst_uchar_ptr_node
9446 : 19599 : = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9447 : :
9448 : 19599 : tree ind1
9449 : 19599 : = fold_convert_loc (loc, integer_type_node,
9450 : : build1 (INDIRECT_REF, cst_uchar_node,
9451 : : fold_convert_loc (loc,
9452 : : cst_uchar_ptr_node,
9453 : : arg1)));
9454 : 19599 : tree ind2
9455 : 19599 : = fold_convert_loc (loc, integer_type_node,
9456 : : build1 (INDIRECT_REF, cst_uchar_node,
9457 : : fold_convert_loc (loc,
9458 : : cst_uchar_ptr_node,
9459 : : arg2)));
9460 : 19599 : return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9461 : : }
9462 : :
9463 : : return NULL_TREE;
9464 : : }
9465 : :
9466 : : /* Fold a call to builtin isascii with argument ARG. */
9467 : :
9468 : : static tree
9469 : 211 : fold_builtin_isascii (location_t loc, tree arg)
9470 : : {
9471 : 211 : if (!validate_arg (arg, INTEGER_TYPE))
9472 : : return NULL_TREE;
9473 : : else
9474 : : {
9475 : : /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9476 : 211 : arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9477 : : build_int_cst (integer_type_node,
9478 : : ~ HOST_WIDE_INT_UC (0x7f)));
9479 : 211 : return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9480 : 211 : arg, integer_zero_node);
9481 : : }
9482 : : }
9483 : :
9484 : : /* Fold a call to builtin toascii with argument ARG. */
9485 : :
9486 : : static tree
9487 : 168 : fold_builtin_toascii (location_t loc, tree arg)
9488 : : {
9489 : 168 : if (!validate_arg (arg, INTEGER_TYPE))
9490 : : return NULL_TREE;
9491 : :
9492 : : /* Transform toascii(c) -> (c & 0x7f). */
9493 : 168 : return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9494 : : build_int_cst (integer_type_node, 0x7f));
9495 : : }
9496 : :
9497 : : /* Fold a call to builtin isdigit with argument ARG. */
9498 : :
9499 : : static tree
9500 : 325 : fold_builtin_isdigit (location_t loc, tree arg)
9501 : : {
9502 : 325 : if (!validate_arg (arg, INTEGER_TYPE))
9503 : : return NULL_TREE;
9504 : : else
9505 : : {
9506 : : /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9507 : : /* According to the C standard, isdigit is unaffected by locale.
9508 : : However, it definitely is affected by the target character set. */
9509 : 313 : unsigned HOST_WIDE_INT target_digit0
9510 : 313 : = lang_hooks.to_target_charset ('0');
9511 : :
9512 : 313 : if (target_digit0 == 0)
9513 : : return NULL_TREE;
9514 : :
9515 : 313 : arg = fold_convert_loc (loc, unsigned_type_node, arg);
9516 : 313 : arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9517 : : build_int_cst (unsigned_type_node, target_digit0));
9518 : 313 : return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9519 : : build_int_cst (unsigned_type_node, 9));
9520 : : }
9521 : : }
9522 : :
9523 : : /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9524 : :
9525 : : static tree
9526 : 364904 : fold_builtin_fabs (location_t loc, tree arg, tree type)
9527 : : {
9528 : 364904 : if (!validate_arg (arg, REAL_TYPE))
9529 : : return NULL_TREE;
9530 : :
9531 : 364819 : arg = fold_convert_loc (loc, type, arg);
9532 : 364819 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9533 : : }
9534 : :
9535 : : /* Fold a call to abs, labs, llabs, imaxabs, uabs, ulabs, ullabs or uimaxabs
9536 : : with argument ARG. */
9537 : :
9538 : : static tree
9539 : 94635 : fold_builtin_abs (location_t loc, tree arg, tree type)
9540 : : {
9541 : 94635 : if (!validate_arg (arg, INTEGER_TYPE))
9542 : : return NULL_TREE;
9543 : :
9544 : 94608 : if (TYPE_UNSIGNED (type))
9545 : : {
9546 : 1168 : if (TYPE_PRECISION (TREE_TYPE (arg))
9547 : 1168 : != TYPE_PRECISION (type)
9548 : 1168 : || TYPE_UNSIGNED (TREE_TYPE (arg)))
9549 : : return NULL_TREE;
9550 : 1168 : return fold_build1_loc (loc, ABSU_EXPR, type, arg);
9551 : : }
9552 : 93440 : arg = fold_convert_loc (loc, type, arg);
9553 : 93440 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9554 : : }
9555 : :
9556 : : /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9557 : :
9558 : : static tree
9559 : 116403 : fold_builtin_carg (location_t loc, tree arg, tree type)
9560 : : {
9561 : 116403 : if (validate_arg (arg, COMPLEX_TYPE)
9562 : 116403 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9563 : : {
9564 : 116403 : tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9565 : :
9566 : 116403 : if (atan2_fn)
9567 : : {
9568 : 111908 : tree new_arg = builtin_save_expr (arg);
9569 : 111908 : tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9570 : 111908 : tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9571 : 111908 : return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9572 : : }
9573 : : }
9574 : :
9575 : : return NULL_TREE;
9576 : : }
9577 : :
9578 : : /* Fold a call to builtin frexp, we can assume the base is 2. */
9579 : :
9580 : : static tree
9581 : 115610 : fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9582 : : {
9583 : 115610 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9584 : : return NULL_TREE;
9585 : :
9586 : 115610 : STRIP_NOPS (arg0);
9587 : :
9588 : 115610 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9589 : : return NULL_TREE;
9590 : :
9591 : 2210 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9592 : :
9593 : : /* Proceed if a valid pointer type was passed in. */
9594 : 2210 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9595 : : {
9596 : 2210 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9597 : 2210 : tree frac, exp, res;
9598 : :
9599 : 2210 : switch (value->cl)
9600 : : {
9601 : 276 : case rvc_zero:
9602 : 276 : case rvc_nan:
9603 : 276 : case rvc_inf:
9604 : : /* For +-0, return (*exp = 0, +-0). */
9605 : : /* For +-NaN or +-Inf, *exp is unspecified, but something should
9606 : : be stored there so that it isn't read from uninitialized object.
9607 : : As glibc and newlib store *exp = 0 for +-Inf/NaN, storing
9608 : : 0 here as well is easiest. */
9609 : 276 : exp = integer_zero_node;
9610 : 276 : frac = arg0;
9611 : 276 : break;
9612 : 1934 : case rvc_normal:
9613 : 1934 : {
9614 : : /* Since the frexp function always expects base 2, and in
9615 : : GCC normalized significands are already in the range
9616 : : [0.5, 1.0), we have exactly what frexp wants. */
9617 : 1934 : REAL_VALUE_TYPE frac_rvt = *value;
9618 : 1934 : SET_REAL_EXP (&frac_rvt, 0);
9619 : 1934 : frac = build_real (rettype, frac_rvt);
9620 : 1934 : exp = build_int_cst (integer_type_node, REAL_EXP (value));
9621 : : }
9622 : 1934 : break;
9623 : 0 : default:
9624 : 0 : gcc_unreachable ();
9625 : : }
9626 : :
9627 : : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9628 : 2210 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9629 : 2210 : TREE_SIDE_EFFECTS (arg1) = 1;
9630 : 2210 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9631 : 2210 : suppress_warning (res, OPT_Wunused_value);
9632 : 2210 : return res;
9633 : : }
9634 : :
9635 : : return NULL_TREE;
9636 : : }
9637 : :
9638 : : /* Fold a call to builtin modf. */
9639 : :
9640 : : static tree
9641 : 75894 : fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9642 : : {
9643 : 75894 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9644 : : return NULL_TREE;
9645 : :
9646 : 75894 : STRIP_NOPS (arg0);
9647 : :
9648 : 75894 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9649 : : return NULL_TREE;
9650 : :
9651 : 2474 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9652 : :
9653 : : /* Proceed if a valid pointer type was passed in. */
9654 : 2474 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9655 : : {
9656 : 2474 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9657 : 2474 : REAL_VALUE_TYPE trunc, frac;
9658 : 2474 : tree res;
9659 : :
9660 : 2474 : switch (value->cl)
9661 : : {
9662 : 228 : case rvc_nan:
9663 : 228 : case rvc_zero:
9664 : : /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9665 : 228 : trunc = frac = *value;
9666 : 228 : break;
9667 : 96 : case rvc_inf:
9668 : : /* For +-Inf, return (*arg1 = arg0, +-0). */
9669 : 96 : frac = dconst0;
9670 : 96 : frac.sign = value->sign;
9671 : 96 : trunc = *value;
9672 : 96 : break;
9673 : 2150 : case rvc_normal:
9674 : : /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9675 : 2150 : real_trunc (&trunc, VOIDmode, value);
9676 : 2150 : real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9677 : : /* If the original number was negative and already
9678 : : integral, then the fractional part is -0.0. */
9679 : 2150 : if (value->sign && frac.cl == rvc_zero)
9680 : 69 : frac.sign = value->sign;
9681 : : break;
9682 : : }
9683 : :
9684 : : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9685 : 2474 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9686 : : build_real (rettype, trunc));
9687 : 2474 : TREE_SIDE_EFFECTS (arg1) = 1;
9688 : 2474 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9689 : : build_real (rettype, frac));
9690 : 2474 : suppress_warning (res, OPT_Wunused_value);
9691 : 2474 : return res;
9692 : : }
9693 : :
9694 : : return NULL_TREE;
9695 : : }
9696 : :
9697 : : /* Given a location LOC, an interclass builtin function decl FNDECL
9698 : : and its single argument ARG, return an folded expression computing
9699 : : the same, or NULL_TREE if we either couldn't or didn't want to fold
9700 : : (the latter happen if there's an RTL instruction available). */
9701 : :
9702 : : static tree
9703 : 1381660 : fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9704 : : {
9705 : 1381660 : machine_mode mode;
9706 : :
9707 : 1381660 : if (!validate_arg (arg, REAL_TYPE))
9708 : : return NULL_TREE;
9709 : :
9710 : 1381660 : if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9711 : : return NULL_TREE;
9712 : :
9713 : 1381660 : mode = TYPE_MODE (TREE_TYPE (arg));
9714 : :
9715 : 9667778 : bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9716 : :
9717 : : /* If there is no optab, try generic code. */
9718 : 1381660 : switch (DECL_FUNCTION_CODE (fndecl))
9719 : : {
9720 : 266999 : tree result;
9721 : :
9722 : 266999 : CASE_FLT_FN (BUILT_IN_ISINF):
9723 : 266999 : {
9724 : : /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9725 : 266999 : tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9726 : 266999 : tree type = TREE_TYPE (arg);
9727 : 266999 : REAL_VALUE_TYPE r;
9728 : 266999 : char buf[128];
9729 : :
9730 : 266999 : if (is_ibm_extended)
9731 : : {
9732 : : /* NaN and Inf are encoded in the high-order double value
9733 : : only. The low-order value is not significant. */
9734 : 0 : type = double_type_node;
9735 : 0 : mode = DFmode;
9736 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9737 : : }
9738 : 266999 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9739 : 266999 : real_from_string3 (&r, buf, mode);
9740 : 266999 : result = build_call_expr (isgr_fn, 2,
9741 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9742 : : build_real (type, r));
9743 : 266999 : return result;
9744 : : }
9745 : 579752 : CASE_FLT_FN (BUILT_IN_FINITE):
9746 : 579752 : case BUILT_IN_ISFINITE:
9747 : 579752 : {
9748 : : /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9749 : 579752 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9750 : 579752 : tree type = TREE_TYPE (arg);
9751 : 579752 : REAL_VALUE_TYPE r;
9752 : 579752 : char buf[128];
9753 : :
9754 : 579752 : if (is_ibm_extended)
9755 : : {
9756 : : /* NaN and Inf are encoded in the high-order double value
9757 : : only. The low-order value is not significant. */
9758 : 0 : type = double_type_node;
9759 : 0 : mode = DFmode;
9760 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9761 : : }
9762 : 579752 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9763 : 579752 : real_from_string3 (&r, buf, mode);
9764 : 579752 : result = build_call_expr (isle_fn, 2,
9765 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9766 : : build_real (type, r));
9767 : : /*result = fold_build2_loc (loc, UNGT_EXPR,
9768 : : TREE_TYPE (TREE_TYPE (fndecl)),
9769 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9770 : : build_real (type, r));
9771 : : result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9772 : : TREE_TYPE (TREE_TYPE (fndecl)),
9773 : : result);*/
9774 : 579752 : return result;
9775 : : }
9776 : 265194 : case BUILT_IN_ISNORMAL:
9777 : 265194 : {
9778 : : /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9779 : : islessequal(fabs(x),DBL_MAX). */
9780 : 265194 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9781 : 265194 : tree type = TREE_TYPE (arg);
9782 : 265194 : tree orig_arg, max_exp, min_exp;
9783 : 265194 : machine_mode orig_mode = mode;
9784 : 265194 : REAL_VALUE_TYPE rmax, rmin;
9785 : 265194 : char buf[128];
9786 : :
9787 : 265194 : orig_arg = arg = builtin_save_expr (arg);
9788 : 265194 : if (is_ibm_extended)
9789 : : {
9790 : : /* Use double to test the normal range of IBM extended
9791 : : precision. Emin for IBM extended precision is
9792 : : different to emin for IEEE double, being 53 higher
9793 : : since the low double exponent is at least 53 lower
9794 : : than the high double exponent. */
9795 : 0 : type = double_type_node;
9796 : 0 : mode = DFmode;
9797 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9798 : : }
9799 : 265194 : arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9800 : :
9801 : 265194 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9802 : 265194 : real_from_string3 (&rmax, buf, mode);
9803 : 265194 : if (DECIMAL_FLOAT_MODE_P (mode))
9804 : 1 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9805 : : else
9806 : 265193 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9807 : 265194 : real_from_string3 (&rmin, buf, orig_mode);
9808 : 265194 : max_exp = build_real (type, rmax);
9809 : 265194 : min_exp = build_real (type, rmin);
9810 : :
9811 : 265194 : max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9812 : 265194 : if (is_ibm_extended)
9813 : : {
9814 : : /* Testing the high end of the range is done just using
9815 : : the high double, using the same test as isfinite().
9816 : : For the subnormal end of the range we first test the
9817 : : high double, then if its magnitude is equal to the
9818 : : limit of 0x1p-969, we test whether the low double is
9819 : : non-zero and opposite sign to the high double. */
9820 : 0 : tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9821 : 0 : tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9822 : 0 : tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9823 : 0 : tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9824 : : arg, min_exp);
9825 : 0 : tree as_complex = build1 (VIEW_CONVERT_EXPR,
9826 : : complex_double_type_node, orig_arg);
9827 : 0 : tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9828 : 0 : tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9829 : 0 : tree zero = build_real (type, dconst0);
9830 : 0 : tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9831 : 0 : tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9832 : 0 : tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9833 : 0 : tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9834 : : fold_build3 (COND_EXPR,
9835 : : integer_type_node,
9836 : : hilt, logt, lolt));
9837 : 0 : eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9838 : : eq_min, ok_lo);
9839 : 0 : min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9840 : : gt_min, eq_min);
9841 : : }
9842 : : else
9843 : : {
9844 : 265194 : tree const isge_fn
9845 : 265194 : = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9846 : 265194 : min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9847 : : }
9848 : 265194 : result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9849 : : max_exp, min_exp);
9850 : 265194 : return result;
9851 : : }
9852 : 268181 : CASE_FLT_FN (BUILT_IN_ISNAN):
9853 : 268181 : case BUILT_IN_ISNAND32:
9854 : 268181 : case BUILT_IN_ISNAND64:
9855 : 268181 : case BUILT_IN_ISNAND128:
9856 : 268181 : {
9857 : : /* In IBM extended NaN and Inf are encoded in the high-order double
9858 : : value only. The low-order value is not significant. */
9859 : 268181 : if (is_ibm_extended)
9860 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9861 : 268181 : arg = builtin_save_expr (arg);
9862 : 268181 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9863 : 268181 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9864 : : }
9865 : : default:
9866 : : break;
9867 : : }
9868 : :
9869 : : return NULL_TREE;
9870 : : }
9871 : :
9872 : : /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9873 : : ARG is the argument for the call. */
9874 : :
9875 : : static tree
9876 : 1146022 : fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9877 : : {
9878 : 1146022 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9879 : :
9880 : 1146022 : if (!validate_arg (arg, REAL_TYPE))
9881 : : return NULL_TREE;
9882 : :
9883 : 1146022 : switch (builtin_index)
9884 : : {
9885 : 268654 : case BUILT_IN_ISINF:
9886 : 268654 : if (tree_expr_infinite_p (arg))
9887 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9888 : 268654 : if (!tree_expr_maybe_infinite_p (arg))
9889 : 121 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9890 : : return NULL_TREE;
9891 : :
9892 : 708 : case BUILT_IN_ISINF_SIGN:
9893 : 708 : {
9894 : : /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9895 : : /* In a boolean context, GCC will fold the inner COND_EXPR to
9896 : : 1. So e.g. "if (isinf_sign(x))" would be folded to just
9897 : : "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9898 : 708 : tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9899 : 708 : tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9900 : 708 : tree tmp = NULL_TREE;
9901 : :
9902 : 708 : arg = builtin_save_expr (arg);
9903 : :
9904 : 708 : if (signbit_fn && isinf_fn)
9905 : : {
9906 : 708 : tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9907 : 708 : tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9908 : :
9909 : 708 : signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9910 : : signbit_call, integer_zero_node);
9911 : 708 : isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9912 : : isinf_call, integer_zero_node);
9913 : :
9914 : 708 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9915 : : integer_minus_one_node, integer_one_node);
9916 : 708 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9917 : : isinf_call, tmp,
9918 : : integer_zero_node);
9919 : : }
9920 : :
9921 : : return tmp;
9922 : : }
9923 : :
9924 : 579976 : case BUILT_IN_ISFINITE:
9925 : 579976 : if (tree_expr_finite_p (arg))
9926 : 224 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9927 : 579752 : if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9928 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9929 : : return NULL_TREE;
9930 : :
9931 : 268305 : case BUILT_IN_ISNAN:
9932 : 268305 : if (tree_expr_nan_p (arg))
9933 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9934 : 268305 : if (!tree_expr_maybe_nan_p (arg))
9935 : 124 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9936 : : return NULL_TREE;
9937 : :
9938 : 28379 : case BUILT_IN_ISSIGNALING:
9939 : : /* Folding to true for REAL_CST is done in fold_const_call_ss.
9940 : : Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9941 : : and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9942 : : here, so there is some possibility of __builtin_issignaling working
9943 : : without -fsignaling-nans. Especially when -fno-signaling-nans is
9944 : : the default. */
9945 : 28379 : if (!tree_expr_maybe_nan_p (arg))
9946 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9947 : : return NULL_TREE;
9948 : :
9949 : 0 : default:
9950 : 0 : gcc_unreachable ();
9951 : : }
9952 : : }
9953 : :
9954 : : /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9955 : : This builtin will generate code to return the appropriate floating
9956 : : point classification depending on the value of the floating point
9957 : : number passed in. The possible return values must be supplied as
9958 : : int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9959 : : FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9960 : : one floating point argument which is "type generic". */
9961 : :
9962 : : static tree
9963 : 115356 : fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9964 : : {
9965 : 115356 : tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9966 : : arg, type, res, tmp;
9967 : 115356 : machine_mode mode;
9968 : 115356 : REAL_VALUE_TYPE r;
9969 : 115356 : char buf[128];
9970 : :
9971 : : /* Verify the required arguments in the original call. */
9972 : 115356 : if (nargs != 6
9973 : 115356 : || !validate_arg (args[0], INTEGER_TYPE)
9974 : 115356 : || !validate_arg (args[1], INTEGER_TYPE)
9975 : 115356 : || !validate_arg (args[2], INTEGER_TYPE)
9976 : 115356 : || !validate_arg (args[3], INTEGER_TYPE)
9977 : 115356 : || !validate_arg (args[4], INTEGER_TYPE)
9978 : 230712 : || !validate_arg (args[5], REAL_TYPE))
9979 : : return NULL_TREE;
9980 : :
9981 : 115356 : fp_nan = args[0];
9982 : 115356 : fp_infinite = args[1];
9983 : 115356 : fp_normal = args[2];
9984 : 115356 : fp_subnormal = args[3];
9985 : 115356 : fp_zero = args[4];
9986 : 115356 : arg = args[5];
9987 : 115356 : type = TREE_TYPE (arg);
9988 : 115356 : mode = TYPE_MODE (type);
9989 : 115356 : arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9990 : :
9991 : : /* fpclassify(x) ->
9992 : : isnan(x) ? FP_NAN :
9993 : : (fabs(x) == Inf ? FP_INFINITE :
9994 : : (fabs(x) >= DBL_MIN ? FP_NORMAL :
9995 : : (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9996 : :
9997 : 115356 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9998 : : build_real (type, dconst0));
9999 : 115356 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10000 : : tmp, fp_zero, fp_subnormal);
10001 : :
10002 : 115356 : if (DECIMAL_FLOAT_MODE_P (mode))
10003 : 3 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (mode)->emin - 1);
10004 : : else
10005 : 115353 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10006 : 115356 : real_from_string3 (&r, buf, mode);
10007 : 115356 : tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10008 : : arg, build_real (type, r));
10009 : 115356 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10010 : : fp_normal, res);
10011 : :
10012 : 115356 : if (tree_expr_maybe_infinite_p (arg))
10013 : : {
10014 : 115268 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10015 : : build_real (type, dconstinf));
10016 : 115268 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10017 : : fp_infinite, res);
10018 : : }
10019 : :
10020 : 115356 : if (tree_expr_maybe_nan_p (arg))
10021 : : {
10022 : 115266 : tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10023 : 115266 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10024 : : res, fp_nan);
10025 : : }
10026 : :
10027 : : return res;
10028 : : }
10029 : :
10030 : : /* Fold a call to an unordered comparison function such as
10031 : : __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10032 : : being called and ARG0 and ARG1 are the arguments for the call.
10033 : : UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10034 : : the opposite of the desired result. UNORDERED_CODE is used
10035 : : for modes that can hold NaNs and ORDERED_CODE is used for
10036 : : the rest. */
10037 : :
10038 : : static tree
10039 : 2972777 : fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10040 : : enum tree_code unordered_code,
10041 : : enum tree_code ordered_code)
10042 : : {
10043 : 2972777 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10044 : 2972777 : enum tree_code code;
10045 : 2972777 : tree type0, type1;
10046 : 2972777 : enum tree_code code0, code1;
10047 : 2972777 : tree cmp_type = NULL_TREE;
10048 : :
10049 : 2972777 : type0 = TREE_TYPE (arg0);
10050 : 2972777 : type1 = TREE_TYPE (arg1);
10051 : :
10052 : 2972777 : code0 = TREE_CODE (type0);
10053 : 2972777 : code1 = TREE_CODE (type1);
10054 : :
10055 : 2972777 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10056 : : /* Choose the wider of two real types. */
10057 : 2972597 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10058 : 2972597 : ? type0 : type1;
10059 : 180 : else if (code0 == REAL_TYPE
10060 : 91 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10061 : : cmp_type = type0;
10062 : 89 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10063 : 89 : && code1 == REAL_TYPE)
10064 : 135 : cmp_type = type1;
10065 : :
10066 : 2972777 : arg0 = fold_convert_loc (loc, cmp_type, arg0);
10067 : 2972777 : arg1 = fold_convert_loc (loc, cmp_type, arg1);
10068 : :
10069 : 2972777 : if (unordered_code == UNORDERED_EXPR)
10070 : : {
10071 : 266241 : if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
10072 : 16 : return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
10073 : 266225 : if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
10074 : 141 : return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10075 : 266084 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10076 : : }
10077 : :
10078 : 2709387 : code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
10079 : 2706536 : ? unordered_code : ordered_code;
10080 : 2706536 : return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10081 : 2706536 : fold_build2_loc (loc, code, type, arg0, arg1));
10082 : : }
10083 : :
10084 : : /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
10085 : : After choosing the wider floating-point type for the comparison,
10086 : : the code is folded to:
10087 : : SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
10088 : :
10089 : : static tree
10090 : 709 : fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
10091 : : {
10092 : 709 : tree type0, type1;
10093 : 709 : enum tree_code code0, code1;
10094 : 709 : tree cmp1, cmp2, cmp_type = NULL_TREE;
10095 : :
10096 : 709 : type0 = TREE_TYPE (arg0);
10097 : 709 : type1 = TREE_TYPE (arg1);
10098 : :
10099 : 709 : code0 = TREE_CODE (type0);
10100 : 709 : code1 = TREE_CODE (type1);
10101 : :
10102 : 709 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10103 : : /* Choose the wider of two real types. */
10104 : 690 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10105 : 690 : ? type0 : type1;
10106 : 19 : else if (code0 == REAL_TYPE
10107 : 6 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10108 : : cmp_type = type0;
10109 : 13 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10110 : 13 : && code1 == REAL_TYPE)
10111 : 13 : cmp_type = type1;
10112 : :
10113 : 709 : arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
10114 : 709 : arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
10115 : :
10116 : 709 : cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
10117 : 709 : cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
10118 : :
10119 : 709 : return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
10120 : : }
10121 : :
10122 : : /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10123 : : arithmetics if it can never overflow, or into internal functions that
10124 : : return both result of arithmetics and overflowed boolean flag in
10125 : : a complex integer result, or some other check for overflow.
10126 : : Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10127 : : checking part of that. */
10128 : :
10129 : : static tree
10130 : 184547 : fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10131 : : tree arg0, tree arg1, tree arg2)
10132 : : {
10133 : 184547 : enum internal_fn ifn = IFN_LAST;
10134 : : /* The code of the expression corresponding to the built-in. */
10135 : 184547 : enum tree_code opcode = ERROR_MARK;
10136 : 184547 : bool ovf_only = false;
10137 : :
10138 : 184547 : switch (fcode)
10139 : : {
10140 : : case BUILT_IN_ADD_OVERFLOW_P:
10141 : : ovf_only = true;
10142 : : /* FALLTHRU */
10143 : : case BUILT_IN_ADD_OVERFLOW:
10144 : : case BUILT_IN_SADD_OVERFLOW:
10145 : : case BUILT_IN_SADDL_OVERFLOW:
10146 : : case BUILT_IN_SADDLL_OVERFLOW:
10147 : : case BUILT_IN_UADD_OVERFLOW:
10148 : : case BUILT_IN_UADDL_OVERFLOW:
10149 : : case BUILT_IN_UADDLL_OVERFLOW:
10150 : : opcode = PLUS_EXPR;
10151 : : ifn = IFN_ADD_OVERFLOW;
10152 : : break;
10153 : 15577 : case BUILT_IN_SUB_OVERFLOW_P:
10154 : 15577 : ovf_only = true;
10155 : : /* FALLTHRU */
10156 : 37257 : case BUILT_IN_SUB_OVERFLOW:
10157 : 37257 : case BUILT_IN_SSUB_OVERFLOW:
10158 : 37257 : case BUILT_IN_SSUBL_OVERFLOW:
10159 : 37257 : case BUILT_IN_SSUBLL_OVERFLOW:
10160 : 37257 : case BUILT_IN_USUB_OVERFLOW:
10161 : 37257 : case BUILT_IN_USUBL_OVERFLOW:
10162 : 37257 : case BUILT_IN_USUBLL_OVERFLOW:
10163 : 37257 : opcode = MINUS_EXPR;
10164 : 37257 : ifn = IFN_SUB_OVERFLOW;
10165 : 37257 : break;
10166 : 15857 : case BUILT_IN_MUL_OVERFLOW_P:
10167 : 15857 : ovf_only = true;
10168 : : /* FALLTHRU */
10169 : 115279 : case BUILT_IN_MUL_OVERFLOW:
10170 : 115279 : case BUILT_IN_SMUL_OVERFLOW:
10171 : 115279 : case BUILT_IN_SMULL_OVERFLOW:
10172 : 115279 : case BUILT_IN_SMULLL_OVERFLOW:
10173 : 115279 : case BUILT_IN_UMUL_OVERFLOW:
10174 : 115279 : case BUILT_IN_UMULL_OVERFLOW:
10175 : 115279 : case BUILT_IN_UMULLL_OVERFLOW:
10176 : 115279 : opcode = MULT_EXPR;
10177 : 115279 : ifn = IFN_MUL_OVERFLOW;
10178 : 115279 : break;
10179 : 0 : default:
10180 : 0 : gcc_unreachable ();
10181 : : }
10182 : :
10183 : : /* For the "generic" overloads, the first two arguments can have different
10184 : : types and the last argument determines the target type to use to check
10185 : : for overflow. The arguments of the other overloads all have the same
10186 : : type. */
10187 : 184547 : tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10188 : :
10189 : : /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10190 : : arguments are constant, attempt to fold the built-in call into a constant
10191 : : expression indicating whether or not it detected an overflow. */
10192 : 184547 : if (ovf_only
10193 : 43244 : && TREE_CODE (arg0) == INTEGER_CST
10194 : 13131 : && TREE_CODE (arg1) == INTEGER_CST)
10195 : : /* Perform the computation in the target type and check for overflow. */
10196 : 7347 : return omit_one_operand_loc (loc, boolean_type_node,
10197 : 7347 : arith_overflowed_p (opcode, type, arg0, arg1)
10198 : : ? boolean_true_node : boolean_false_node,
10199 : 7347 : arg2);
10200 : :
10201 : 177200 : tree intres, ovfres;
10202 : 177200 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10203 : : {
10204 : 12381 : intres = fold_binary_loc (loc, opcode, type,
10205 : : fold_convert_loc (loc, type, arg0),
10206 : : fold_convert_loc (loc, type, arg1));
10207 : 12381 : if (TREE_OVERFLOW (intres))
10208 : 1586 : intres = drop_tree_overflow (intres);
10209 : 24762 : ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10210 : 12381 : ? boolean_true_node : boolean_false_node);
10211 : : }
10212 : : else
10213 : : {
10214 : 164819 : tree ctype = build_complex_type (type);
10215 : 164819 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10216 : : arg0, arg1);
10217 : 164819 : tree tgt;
10218 : 164819 : if (ovf_only)
10219 : : {
10220 : : tgt = call;
10221 : 164819 : intres = NULL_TREE;
10222 : : }
10223 : : else
10224 : : {
10225 : : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10226 : : as while the call itself is const, the REALPART_EXPR store is
10227 : : certainly not. And in any case, we want just one call,
10228 : : not multiple and trying to CSE them later. */
10229 : 128922 : TREE_SIDE_EFFECTS (call) = 1;
10230 : 128922 : tgt = save_expr (call);
10231 : : }
10232 : 164819 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10233 : 164819 : ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10234 : 164819 : ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10235 : : }
10236 : :
10237 : 177200 : if (ovf_only)
10238 : 35897 : return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10239 : :
10240 : 141303 : tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10241 : 141303 : tree store
10242 : 141303 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10243 : 141303 : return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10244 : : }
10245 : :
10246 : : /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10247 : : internal function. */
10248 : :
10249 : : static tree
10250 : 233746 : fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10251 : : tree arg0, tree arg1)
10252 : : {
10253 : 233746 : enum internal_fn ifn;
10254 : 233746 : enum built_in_function fcodei, fcodel, fcodell;
10255 : 233746 : tree arg0_type = TREE_TYPE (arg0);
10256 : 233746 : tree cast_type = NULL_TREE;
10257 : 233746 : int addend = 0;
10258 : :
10259 : 233746 : switch (fcode)
10260 : : {
10261 : 165543 : case BUILT_IN_CLZG:
10262 : 165543 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10263 : : return NULL_TREE;
10264 : : ifn = IFN_CLZ;
10265 : : fcodei = BUILT_IN_CLZ;
10266 : : fcodel = BUILT_IN_CLZL;
10267 : : fcodell = BUILT_IN_CLZLL;
10268 : : break;
10269 : 48970 : case BUILT_IN_CTZG:
10270 : 48970 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10271 : : return NULL_TREE;
10272 : : ifn = IFN_CTZ;
10273 : : fcodei = BUILT_IN_CTZ;
10274 : : fcodel = BUILT_IN_CTZL;
10275 : : fcodell = BUILT_IN_CTZLL;
10276 : : break;
10277 : : case BUILT_IN_CLRSBG:
10278 : : ifn = IFN_CLRSB;
10279 : : fcodei = BUILT_IN_CLRSB;
10280 : : fcodel = BUILT_IN_CLRSBL;
10281 : : fcodell = BUILT_IN_CLRSBLL;
10282 : : break;
10283 : 83 : case BUILT_IN_FFSG:
10284 : 83 : ifn = IFN_FFS;
10285 : 83 : fcodei = BUILT_IN_FFS;
10286 : 83 : fcodel = BUILT_IN_FFSL;
10287 : 83 : fcodell = BUILT_IN_FFSLL;
10288 : 83 : break;
10289 : 76 : case BUILT_IN_PARITYG:
10290 : 76 : ifn = IFN_PARITY;
10291 : 76 : fcodei = BUILT_IN_PARITY;
10292 : 76 : fcodel = BUILT_IN_PARITYL;
10293 : 76 : fcodell = BUILT_IN_PARITYLL;
10294 : 76 : break;
10295 : 18993 : case BUILT_IN_POPCOUNTG:
10296 : 18993 : ifn = IFN_POPCOUNT;
10297 : 18993 : fcodei = BUILT_IN_POPCOUNT;
10298 : 18993 : fcodel = BUILT_IN_POPCOUNTL;
10299 : 18993 : fcodell = BUILT_IN_POPCOUNTLL;
10300 : 18993 : break;
10301 : 0 : default:
10302 : 0 : gcc_unreachable ();
10303 : : }
10304 : :
10305 : 233576 : if (TYPE_PRECISION (arg0_type)
10306 : 233576 : <= TYPE_PRECISION (long_long_unsigned_type_node))
10307 : : {
10308 : 202177 : if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10309 : :
10310 : 86987 : cast_type = (TYPE_UNSIGNED (arg0_type)
10311 : 86933 : ? unsigned_type_node : integer_type_node);
10312 : 115244 : else if (TYPE_PRECISION (arg0_type)
10313 : 115244 : <= TYPE_PRECISION (long_unsigned_type_node))
10314 : : {
10315 : 115265 : cast_type = (TYPE_UNSIGNED (arg0_type)
10316 : 115193 : ? long_unsigned_type_node : long_integer_type_node);
10317 : : fcodei = fcodel;
10318 : : }
10319 : : else
10320 : : {
10321 : 51 : cast_type = (TYPE_UNSIGNED (arg0_type)
10322 : 51 : ? long_long_unsigned_type_node
10323 : : : long_long_integer_type_node);
10324 : : fcodei = fcodell;
10325 : : }
10326 : : }
10327 : 62798 : else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10328 : : {
10329 : 31299 : cast_type
10330 : 31299 : = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10331 : 31299 : TYPE_UNSIGNED (arg0_type));
10332 : 31299 : gcc_assert (TYPE_PRECISION (cast_type)
10333 : : == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10334 : : fcodei = END_BUILTINS;
10335 : : }
10336 : : else
10337 : : fcodei = END_BUILTINS;
10338 : 233476 : if (cast_type)
10339 : : {
10340 : 233476 : switch (fcode)
10341 : : {
10342 : 165508 : case BUILT_IN_CLZG:
10343 : 165508 : case BUILT_IN_CLRSBG:
10344 : 165508 : addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10345 : 165508 : break;
10346 : : default:
10347 : : break;
10348 : : }
10349 : 233476 : arg0 = fold_convert (cast_type, arg0);
10350 : 233476 : arg0_type = cast_type;
10351 : : }
10352 : :
10353 : 233576 : if (arg1)
10354 : 151647 : arg1 = fold_convert (integer_type_node, arg1);
10355 : :
10356 : 233576 : tree arg2 = arg1;
10357 : 233576 : if (fcode == BUILT_IN_CLZG && addend)
10358 : : {
10359 : 7626 : if (arg1)
10360 : 7593 : arg0 = save_expr (arg0);
10361 : : arg2 = NULL_TREE;
10362 : : }
10363 : 233576 : tree call = NULL_TREE, tem;
10364 : 233576 : if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10365 : 31350 : && (TYPE_PRECISION (arg0_type)
10366 : 31350 : == 2 * TYPE_PRECISION (long_long_unsigned_type_node))
10367 : : /* If the target supports the optab, then don't do the expansion. */
10368 : 264875 : && !direct_internal_fn_supported_p (ifn, arg0_type, OPTIMIZE_FOR_BOTH))
10369 : : {
10370 : : /* __int128 expansions using up to 2 long long builtins. */
10371 : 31299 : arg0 = save_expr (arg0);
10372 : 31299 : tree type = (TYPE_UNSIGNED (arg0_type)
10373 : 31299 : ? long_long_unsigned_type_node
10374 : 31299 : : long_long_integer_type_node);
10375 : 62598 : tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10376 : : build_int_cst (integer_type_node,
10377 : : MAX_FIXED_MODE_SIZE / 2));
10378 : 31299 : hi = fold_convert (type, hi);
10379 : 31299 : tree lo = fold_convert (type, arg0);
10380 : 31299 : switch (fcode)
10381 : : {
10382 : 31204 : case BUILT_IN_CLZG:
10383 : 31204 : call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10384 : 62408 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10385 : : build_int_cst (integer_type_node,
10386 : : MAX_FIXED_MODE_SIZE / 2));
10387 : 31204 : if (arg2)
10388 : 31191 : call = fold_build3 (COND_EXPR, integer_type_node,
10389 : : fold_build2 (NE_EXPR, boolean_type_node,
10390 : : lo, build_zero_cst (type)),
10391 : : call, arg2);
10392 : 31204 : call = fold_build3 (COND_EXPR, integer_type_node,
10393 : : fold_build2 (NE_EXPR, boolean_type_node,
10394 : : hi, build_zero_cst (type)),
10395 : : fold_builtin_bit_query (loc, fcode, hi,
10396 : : NULL_TREE),
10397 : : call);
10398 : 31204 : break;
10399 : 33 : case BUILT_IN_CTZG:
10400 : 33 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10401 : 66 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10402 : : build_int_cst (integer_type_node,
10403 : : MAX_FIXED_MODE_SIZE / 2));
10404 : 33 : if (arg2)
10405 : 24 : call = fold_build3 (COND_EXPR, integer_type_node,
10406 : : fold_build2 (NE_EXPR, boolean_type_node,
10407 : : hi, build_zero_cst (type)),
10408 : : call, arg2);
10409 : 33 : call = fold_build3 (COND_EXPR, integer_type_node,
10410 : : fold_build2 (NE_EXPR, boolean_type_node,
10411 : : lo, build_zero_cst (type)),
10412 : : fold_builtin_bit_query (loc, fcode, lo,
10413 : : NULL_TREE),
10414 : : call);
10415 : 33 : break;
10416 : 9 : case BUILT_IN_CLRSBG:
10417 : 9 : tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10418 : 18 : tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10419 : : build_int_cst (integer_type_node,
10420 : : MAX_FIXED_MODE_SIZE / 2));
10421 : 18 : tem = fold_build3 (COND_EXPR, integer_type_node,
10422 : : fold_build2 (LT_EXPR, boolean_type_node,
10423 : : fold_build2 (BIT_XOR_EXPR, type,
10424 : : lo, hi),
10425 : : build_zero_cst (type)),
10426 : : build_int_cst (integer_type_node,
10427 : : MAX_FIXED_MODE_SIZE / 2 - 1),
10428 : : tem);
10429 : 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10430 : 9 : call = save_expr (call);
10431 : 18 : call = fold_build3 (COND_EXPR, integer_type_node,
10432 : : fold_build2 (NE_EXPR, boolean_type_node,
10433 : : call,
10434 : : build_int_cst (integer_type_node,
10435 : : MAX_FIXED_MODE_SIZE
10436 : : / 2 - 1)),
10437 : : call, tem);
10438 : 9 : break;
10439 : 9 : case BUILT_IN_FFSG:
10440 : 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10441 : 18 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10442 : : build_int_cst (integer_type_node,
10443 : : MAX_FIXED_MODE_SIZE / 2));
10444 : 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10445 : : fold_build2 (NE_EXPR, boolean_type_node,
10446 : : hi, build_zero_cst (type)),
10447 : : call, integer_zero_node);
10448 : 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10449 : : fold_build2 (NE_EXPR, boolean_type_node,
10450 : : lo, build_zero_cst (type)),
10451 : : fold_builtin_bit_query (loc, fcode, lo,
10452 : : NULL_TREE),
10453 : : call);
10454 : 9 : break;
10455 : 9 : case BUILT_IN_PARITYG:
10456 : 9 : call = fold_builtin_bit_query (loc, fcode,
10457 : : fold_build2 (BIT_XOR_EXPR, type,
10458 : : lo, hi), NULL_TREE);
10459 : 9 : break;
10460 : 35 : case BUILT_IN_POPCOUNTG:
10461 : 35 : call = fold_build2 (PLUS_EXPR, integer_type_node,
10462 : : fold_builtin_bit_query (loc, fcode, hi,
10463 : : NULL_TREE),
10464 : : fold_builtin_bit_query (loc, fcode, lo,
10465 : : NULL_TREE));
10466 : 35 : break;
10467 : 0 : default:
10468 : 0 : gcc_unreachable ();
10469 : : }
10470 : : }
10471 : : else
10472 : : {
10473 : : /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10474 : : value defined at zero during GIMPLE, or for large/huge _BitInt
10475 : : (which are then lowered during bitint lowering). */
10476 : 202277 : if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10477 : : {
10478 : 112821 : int val;
10479 : 112821 : if (fcode == BUILT_IN_CLZG)
10480 : : {
10481 : 64144 : if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10482 : : val) != 2
10483 : 64156 : || wi::to_widest (arg2) != val)
10484 : 64132 : arg2 = NULL_TREE;
10485 : : }
10486 : 48677 : else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10487 : : val) != 2
10488 : 48677 : || wi::to_widest (arg2) != val)
10489 : 48677 : arg2 = NULL_TREE;
10490 : 112821 : if (!direct_internal_fn_supported_p (ifn, arg0_type,
10491 : : OPTIMIZE_FOR_BOTH))
10492 : : arg2 = NULL_TREE;
10493 : 112773 : if (arg2 == NULL_TREE)
10494 : 112809 : arg0 = save_expr (arg0);
10495 : : }
10496 : 202277 : if (fcodei == END_BUILTINS || arg2)
10497 : 194 : call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10498 : : arg2 ? 2 : 1, arg0, arg2);
10499 : : else
10500 : 202165 : call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
10501 : : arg0);
10502 : : }
10503 : 233576 : if (addend)
10504 : 7644 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10505 : : build_int_cst (integer_type_node, addend));
10506 : 233576 : if (arg1 && arg2 == NULL_TREE)
10507 : 120402 : call = fold_build3 (COND_EXPR, integer_type_node,
10508 : : fold_build2 (NE_EXPR, boolean_type_node,
10509 : : arg0, build_zero_cst (arg0_type)),
10510 : : call, arg1);
10511 : :
10512 : : return call;
10513 : : }
10514 : :
10515 : : /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10516 : : that return both result of arithmetics and overflowed boolean
10517 : : flag in a complex integer result. */
10518 : :
10519 : : static tree
10520 : 54 : fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10521 : : tree *args)
10522 : : {
10523 : 54 : enum internal_fn ifn;
10524 : :
10525 : 54 : switch (fcode)
10526 : : {
10527 : : case BUILT_IN_ADDC:
10528 : : case BUILT_IN_ADDCL:
10529 : : case BUILT_IN_ADDCLL:
10530 : : ifn = IFN_ADD_OVERFLOW;
10531 : : break;
10532 : 28 : case BUILT_IN_SUBC:
10533 : 28 : case BUILT_IN_SUBCL:
10534 : 28 : case BUILT_IN_SUBCLL:
10535 : 28 : ifn = IFN_SUB_OVERFLOW;
10536 : 28 : break;
10537 : 0 : default:
10538 : 0 : gcc_unreachable ();
10539 : : }
10540 : :
10541 : 54 : tree type = TREE_TYPE (args[0]);
10542 : 54 : tree ctype = build_complex_type (type);
10543 : 54 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10544 : : args[0], args[1]);
10545 : : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10546 : : as while the call itself is const, the REALPART_EXPR store is
10547 : : certainly not. And in any case, we want just one call,
10548 : : not multiple and trying to CSE them later. */
10549 : 54 : TREE_SIDE_EFFECTS (call) = 1;
10550 : 54 : tree tgt = save_expr (call);
10551 : 54 : tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10552 : 54 : tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10553 : 54 : call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10554 : : intres, args[2]);
10555 : 54 : TREE_SIDE_EFFECTS (call) = 1;
10556 : 54 : tgt = save_expr (call);
10557 : 54 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10558 : 54 : tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10559 : 54 : ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10560 : 54 : tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10561 : 54 : tree store
10562 : 54 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10563 : 54 : return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10564 : : }
10565 : :
10566 : : /* Fold a call to __builtin_FILE to a constant string. */
10567 : :
10568 : : static inline tree
10569 : 5852 : fold_builtin_FILE (location_t loc)
10570 : : {
10571 : 5852 : if (const char *fname = LOCATION_FILE (loc))
10572 : : {
10573 : : /* The documentation says this builtin is equivalent to the preprocessor
10574 : : __FILE__ macro so it appears appropriate to use the same file prefix
10575 : : mappings. */
10576 : 5852 : fname = remap_macro_filename (fname);
10577 : 5852 : return build_string_literal (fname);
10578 : : }
10579 : :
10580 : 0 : return build_string_literal ("");
10581 : : }
10582 : :
10583 : : /* Fold a call to __builtin_FUNCTION to a constant string. */
10584 : :
10585 : : static inline tree
10586 : 68 : fold_builtin_FUNCTION ()
10587 : : {
10588 : 68 : const char *name = "";
10589 : :
10590 : 68 : if (current_function_decl)
10591 : 43 : name = lang_hooks.decl_printable_name (current_function_decl, 0);
10592 : :
10593 : 68 : return build_string_literal (name);
10594 : : }
10595 : :
10596 : : /* Fold a call to __builtin_LINE to an integer constant. */
10597 : :
10598 : : static inline tree
10599 : 11707 : fold_builtin_LINE (location_t loc, tree type)
10600 : : {
10601 : 11707 : return build_int_cst (type, LOCATION_LINE (loc));
10602 : : }
10603 : :
10604 : : /* Fold a call to built-in function FNDECL with 0 arguments.
10605 : : This function returns NULL_TREE if no simplification was possible. */
10606 : :
10607 : : static tree
10608 : 22601631 : fold_builtin_0 (location_t loc, tree fndecl)
10609 : : {
10610 : 22601631 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10611 : 22601631 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10612 : 22601631 : switch (fcode)
10613 : : {
10614 : 5852 : case BUILT_IN_FILE:
10615 : 5852 : return fold_builtin_FILE (loc);
10616 : :
10617 : 68 : case BUILT_IN_FUNCTION:
10618 : 68 : return fold_builtin_FUNCTION ();
10619 : :
10620 : 11707 : case BUILT_IN_LINE:
10621 : 11707 : return fold_builtin_LINE (loc, type);
10622 : :
10623 : 35734 : CASE_FLT_FN (BUILT_IN_INF):
10624 : 35734 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10625 : 35734 : case BUILT_IN_INFD32:
10626 : 35734 : case BUILT_IN_INFD64:
10627 : 35734 : case BUILT_IN_INFD128:
10628 : 35734 : case BUILT_IN_INFD64X:
10629 : 35734 : return fold_builtin_inf (loc, type, true);
10630 : :
10631 : 195029 : CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10632 : 195029 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10633 : 195029 : return fold_builtin_inf (loc, type, false);
10634 : :
10635 : 0 : case BUILT_IN_CLASSIFY_TYPE:
10636 : 0 : return fold_builtin_classify_type (NULL_TREE);
10637 : :
10638 : 17375917 : case BUILT_IN_UNREACHABLE:
10639 : : /* Rewrite any explicit calls to __builtin_unreachable. */
10640 : 17375917 : if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10641 : 115 : return build_builtin_unreachable (loc);
10642 : : break;
10643 : :
10644 : : default:
10645 : : break;
10646 : : }
10647 : : return NULL_TREE;
10648 : : }
10649 : :
10650 : : /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10651 : : This function returns NULL_TREE if no simplification was possible. */
10652 : :
10653 : : static tree
10654 : 15923757 : fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10655 : : {
10656 : 15923757 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10657 : 15923757 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10658 : :
10659 : 15923757 : if (error_operand_p (arg0))
10660 : : return NULL_TREE;
10661 : :
10662 : 15923757 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10663 : : return ret;
10664 : :
10665 : 15430698 : switch (fcode)
10666 : : {
10667 : 785132 : case BUILT_IN_CONSTANT_P:
10668 : 785132 : {
10669 : 785132 : tree val = fold_builtin_constant_p (arg0);
10670 : :
10671 : : /* Gimplification will pull the CALL_EXPR for the builtin out of
10672 : : an if condition. When not optimizing, we'll not CSE it back.
10673 : : To avoid link error types of regressions, return false now. */
10674 : 785132 : if (!val && !optimize)
10675 : 1538 : val = integer_zero_node;
10676 : :
10677 : : return val;
10678 : : }
10679 : :
10680 : 2140 : case BUILT_IN_CLASSIFY_TYPE:
10681 : 2140 : return fold_builtin_classify_type (arg0);
10682 : :
10683 : 419144 : case BUILT_IN_STRLEN:
10684 : 419144 : return fold_builtin_strlen (loc, expr, type, arg0);
10685 : :
10686 : 364904 : CASE_FLT_FN (BUILT_IN_FABS):
10687 : 364904 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10688 : 364904 : case BUILT_IN_FABSD32:
10689 : 364904 : case BUILT_IN_FABSD64:
10690 : 364904 : case BUILT_IN_FABSD128:
10691 : 364904 : case BUILT_IN_FABSD64X:
10692 : 364904 : return fold_builtin_fabs (loc, arg0, type);
10693 : :
10694 : 94635 : case BUILT_IN_ABS:
10695 : 94635 : case BUILT_IN_LABS:
10696 : 94635 : case BUILT_IN_LLABS:
10697 : 94635 : case BUILT_IN_IMAXABS:
10698 : 94635 : case BUILT_IN_UABS:
10699 : 94635 : case BUILT_IN_ULABS:
10700 : 94635 : case BUILT_IN_ULLABS:
10701 : 94635 : case BUILT_IN_UMAXABS:
10702 : 94635 : return fold_builtin_abs (loc, arg0, type);
10703 : :
10704 : 24501 : CASE_FLT_FN (BUILT_IN_CONJ):
10705 : 24501 : if (validate_arg (arg0, COMPLEX_TYPE)
10706 : 24501 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10707 : 24501 : return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10708 : : break;
10709 : :
10710 : 764 : CASE_FLT_FN (BUILT_IN_CREAL):
10711 : 764 : if (validate_arg (arg0, COMPLEX_TYPE)
10712 : 764 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10713 : 764 : return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10714 : : break;
10715 : :
10716 : 1884 : CASE_FLT_FN (BUILT_IN_CIMAG):
10717 : 1884 : if (validate_arg (arg0, COMPLEX_TYPE)
10718 : 1884 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10719 : 1884 : return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10720 : : break;
10721 : :
10722 : 116403 : CASE_FLT_FN (BUILT_IN_CARG):
10723 : 116403 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10724 : 116403 : return fold_builtin_carg (loc, arg0, type);
10725 : :
10726 : 211 : case BUILT_IN_ISASCII:
10727 : 211 : return fold_builtin_isascii (loc, arg0);
10728 : :
10729 : 168 : case BUILT_IN_TOASCII:
10730 : 168 : return fold_builtin_toascii (loc, arg0);
10731 : :
10732 : 325 : case BUILT_IN_ISDIGIT:
10733 : 325 : return fold_builtin_isdigit (loc, arg0);
10734 : :
10735 : 579976 : CASE_FLT_FN (BUILT_IN_FINITE):
10736 : 579976 : case BUILT_IN_FINITED32:
10737 : 579976 : case BUILT_IN_FINITED64:
10738 : 579976 : case BUILT_IN_FINITED128:
10739 : 579976 : case BUILT_IN_ISFINITE:
10740 : 579976 : {
10741 : 579976 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10742 : 579976 : if (ret)
10743 : : return ret;
10744 : 579752 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10745 : : }
10746 : :
10747 : 268654 : CASE_FLT_FN (BUILT_IN_ISINF):
10748 : 268654 : case BUILT_IN_ISINFD32:
10749 : 268654 : case BUILT_IN_ISINFD64:
10750 : 268654 : case BUILT_IN_ISINFD128:
10751 : 268654 : {
10752 : 268654 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10753 : 268654 : if (ret)
10754 : : return ret;
10755 : 268533 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10756 : : }
10757 : :
10758 : 265194 : case BUILT_IN_ISNORMAL:
10759 : 265194 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10760 : :
10761 : 708 : case BUILT_IN_ISINF_SIGN:
10762 : 708 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10763 : :
10764 : 268305 : CASE_FLT_FN (BUILT_IN_ISNAN):
10765 : 268305 : case BUILT_IN_ISNAND32:
10766 : 268305 : case BUILT_IN_ISNAND64:
10767 : 268305 : case BUILT_IN_ISNAND128:
10768 : 268305 : {
10769 : 268305 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10770 : 268305 : if (ret)
10771 : : return ret;
10772 : 268181 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10773 : : }
10774 : :
10775 : 28379 : case BUILT_IN_ISSIGNALING:
10776 : 28379 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10777 : :
10778 : 567543 : case BUILT_IN_FREE:
10779 : 567543 : if (integer_zerop (arg0))
10780 : 1051 : return build_empty_stmt (loc);
10781 : : break;
10782 : :
10783 : 19340 : case BUILT_IN_CLZG:
10784 : 19340 : case BUILT_IN_CTZG:
10785 : 19340 : case BUILT_IN_CLRSBG:
10786 : 19340 : case BUILT_IN_FFSG:
10787 : 19340 : case BUILT_IN_PARITYG:
10788 : 19340 : case BUILT_IN_POPCOUNTG:
10789 : 19340 : return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10790 : :
10791 : : default:
10792 : : break;
10793 : : }
10794 : :
10795 : : return NULL_TREE;
10796 : :
10797 : : }
10798 : :
10799 : : /* Folds a call EXPR (which may be null) to built-in function FNDECL
10800 : : with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10801 : : if no simplification was possible. */
10802 : :
10803 : : static tree
10804 : 16806180 : fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10805 : : {
10806 : 16806180 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10807 : 16806180 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10808 : :
10809 : 16806180 : if (error_operand_p (arg0)
10810 : 16806180 : || error_operand_p (arg1))
10811 : : return NULL_TREE;
10812 : :
10813 : 16806176 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10814 : : return ret;
10815 : :
10816 : 16600173 : switch (fcode)
10817 : : {
10818 : 6504 : CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10819 : 6504 : CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10820 : 6504 : if (validate_arg (arg0, REAL_TYPE)
10821 : 6504 : && validate_arg (arg1, POINTER_TYPE))
10822 : 6504 : return do_mpfr_lgamma_r (arg0, arg1, type);
10823 : : break;
10824 : :
10825 : 115610 : CASE_FLT_FN (BUILT_IN_FREXP):
10826 : 115610 : return fold_builtin_frexp (loc, arg0, arg1, type);
10827 : :
10828 : 75894 : CASE_FLT_FN (BUILT_IN_MODF):
10829 : 75894 : return fold_builtin_modf (loc, arg0, arg1, type);
10830 : :
10831 : 2560 : case BUILT_IN_STRSPN:
10832 : 2560 : return fold_builtin_strspn (loc, expr, arg0, arg1, type);
10833 : :
10834 : 2465 : case BUILT_IN_STRCSPN:
10835 : 2465 : return fold_builtin_strcspn (loc, expr, arg0, arg1, type);
10836 : :
10837 : 83023 : case BUILT_IN_STRPBRK:
10838 : 83023 : return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10839 : :
10840 : 5021456 : case BUILT_IN_EXPECT:
10841 : 5021456 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10842 : :
10843 : 533897 : case BUILT_IN_ISGREATER:
10844 : 533897 : return fold_builtin_unordered_cmp (loc, fndecl,
10845 : 533897 : arg0, arg1, UNLE_EXPR, LE_EXPR);
10846 : 530493 : case BUILT_IN_ISGREATEREQUAL:
10847 : 530493 : return fold_builtin_unordered_cmp (loc, fndecl,
10848 : 530493 : arg0, arg1, UNLT_EXPR, LT_EXPR);
10849 : 266868 : case BUILT_IN_ISLESS:
10850 : 266868 : return fold_builtin_unordered_cmp (loc, fndecl,
10851 : 266868 : arg0, arg1, UNGE_EXPR, GE_EXPR);
10852 : 1110274 : case BUILT_IN_ISLESSEQUAL:
10853 : 1110274 : return fold_builtin_unordered_cmp (loc, fndecl,
10854 : 1110274 : arg0, arg1, UNGT_EXPR, GT_EXPR);
10855 : 265004 : case BUILT_IN_ISLESSGREATER:
10856 : 265004 : return fold_builtin_unordered_cmp (loc, fndecl,
10857 : 265004 : arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10858 : 266241 : case BUILT_IN_ISUNORDERED:
10859 : 266241 : return fold_builtin_unordered_cmp (loc, fndecl,
10860 : : arg0, arg1, UNORDERED_EXPR,
10861 : 266241 : NOP_EXPR);
10862 : :
10863 : 709 : case BUILT_IN_ISEQSIG:
10864 : 709 : return fold_builtin_iseqsig (loc, arg0, arg1);
10865 : :
10866 : : /* We do the folding for va_start in the expander. */
10867 : : case BUILT_IN_VA_START:
10868 : : break;
10869 : :
10870 : 200060 : case BUILT_IN_OBJECT_SIZE:
10871 : 200060 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10872 : 200060 : return fold_builtin_object_size (arg0, arg1, fcode);
10873 : :
10874 : 64919 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10875 : 64919 : return fold_builtin_atomic_always_lock_free (arg0, arg1);
10876 : :
10877 : 40039 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10878 : 40039 : return fold_builtin_atomic_is_lock_free (arg0, arg1);
10879 : :
10880 : 151817 : case BUILT_IN_CLZG:
10881 : 151817 : case BUILT_IN_CTZG:
10882 : 151817 : return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10883 : :
10884 : : default:
10885 : : break;
10886 : : }
10887 : : return NULL_TREE;
10888 : : }
10889 : :
10890 : : /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10891 : : and ARG2.
10892 : : This function returns NULL_TREE if no simplification was possible. */
10893 : :
10894 : : static tree
10895 : 6217853 : fold_builtin_3 (location_t loc, tree fndecl,
10896 : : tree arg0, tree arg1, tree arg2)
10897 : : {
10898 : 6217853 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10899 : 6217853 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10900 : :
10901 : 6217853 : if (error_operand_p (arg0)
10902 : 6217853 : || error_operand_p (arg1)
10903 : 12435706 : || error_operand_p (arg2))
10904 : : return NULL_TREE;
10905 : :
10906 : 6217851 : if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10907 : : arg0, arg1, arg2))
10908 : : return ret;
10909 : :
10910 : 6201087 : switch (fcode)
10911 : : {
10912 : :
10913 : 145 : CASE_FLT_FN (BUILT_IN_SINCOS):
10914 : 145 : return fold_builtin_sincos (loc, arg0, arg1, arg2);
10915 : :
10916 : 87209 : CASE_FLT_FN (BUILT_IN_REMQUO):
10917 : 87209 : if (validate_arg (arg0, REAL_TYPE)
10918 : 87209 : && validate_arg (arg1, REAL_TYPE)
10919 : 174418 : && validate_arg (arg2, POINTER_TYPE))
10920 : 87209 : return do_mpfr_remquo (arg0, arg1, arg2);
10921 : : break;
10922 : :
10923 : 2392126 : case BUILT_IN_MEMCMP:
10924 : 2392126 : return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10925 : :
10926 : 493476 : case BUILT_IN_EXPECT:
10927 : 493476 : return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10928 : :
10929 : 337 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
10930 : 337 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10931 : :
10932 : 184547 : case BUILT_IN_ADD_OVERFLOW:
10933 : 184547 : case BUILT_IN_SUB_OVERFLOW:
10934 : 184547 : case BUILT_IN_MUL_OVERFLOW:
10935 : 184547 : case BUILT_IN_ADD_OVERFLOW_P:
10936 : 184547 : case BUILT_IN_SUB_OVERFLOW_P:
10937 : 184547 : case BUILT_IN_MUL_OVERFLOW_P:
10938 : 184547 : case BUILT_IN_SADD_OVERFLOW:
10939 : 184547 : case BUILT_IN_SADDL_OVERFLOW:
10940 : 184547 : case BUILT_IN_SADDLL_OVERFLOW:
10941 : 184547 : case BUILT_IN_SSUB_OVERFLOW:
10942 : 184547 : case BUILT_IN_SSUBL_OVERFLOW:
10943 : 184547 : case BUILT_IN_SSUBLL_OVERFLOW:
10944 : 184547 : case BUILT_IN_SMUL_OVERFLOW:
10945 : 184547 : case BUILT_IN_SMULL_OVERFLOW:
10946 : 184547 : case BUILT_IN_SMULLL_OVERFLOW:
10947 : 184547 : case BUILT_IN_UADD_OVERFLOW:
10948 : 184547 : case BUILT_IN_UADDL_OVERFLOW:
10949 : 184547 : case BUILT_IN_UADDLL_OVERFLOW:
10950 : 184547 : case BUILT_IN_USUB_OVERFLOW:
10951 : 184547 : case BUILT_IN_USUBL_OVERFLOW:
10952 : 184547 : case BUILT_IN_USUBLL_OVERFLOW:
10953 : 184547 : case BUILT_IN_UMUL_OVERFLOW:
10954 : 184547 : case BUILT_IN_UMULL_OVERFLOW:
10955 : 184547 : case BUILT_IN_UMULLL_OVERFLOW:
10956 : 184547 : return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10957 : :
10958 : : default:
10959 : : break;
10960 : : }
10961 : : return NULL_TREE;
10962 : : }
10963 : :
10964 : : /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10965 : : ARGS is an array of NARGS arguments. IGNORE is true if the result
10966 : : of the function call is ignored. This function returns NULL_TREE
10967 : : if no simplification was possible. */
10968 : :
10969 : : static tree
10970 : 64061246 : fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10971 : : int nargs, bool)
10972 : : {
10973 : 64061246 : tree ret = NULL_TREE;
10974 : :
10975 : 64061246 : switch (nargs)
10976 : : {
10977 : 22601631 : case 0:
10978 : 22601631 : ret = fold_builtin_0 (loc, fndecl);
10979 : 22601631 : break;
10980 : 15923757 : case 1:
10981 : 15923757 : ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10982 : 15923757 : break;
10983 : 16806180 : case 2:
10984 : 16806180 : ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10985 : 16806180 : break;
10986 : 6217853 : case 3:
10987 : 6217853 : ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10988 : 6217853 : break;
10989 : 2511825 : default:
10990 : 2511825 : ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10991 : 2511825 : break;
10992 : : }
10993 : 64061246 : if (ret)
10994 : : {
10995 : 6662699 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10996 : 6662699 : SET_EXPR_LOCATION (ret, loc);
10997 : 6662699 : return ret;
10998 : : }
10999 : : return NULL_TREE;
11000 : : }
11001 : :
11002 : : /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11003 : : list ARGS along with N new arguments in NEWARGS. SKIP is the number
11004 : : of arguments in ARGS to be omitted. OLDNARGS is the number of
11005 : : elements in ARGS. */
11006 : :
11007 : : static tree
11008 : 4 : rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11009 : : int skip, tree fndecl, int n, va_list newargs)
11010 : : {
11011 : 4 : int nargs = oldnargs - skip + n;
11012 : 4 : tree *buffer;
11013 : :
11014 : 4 : if (n > 0)
11015 : : {
11016 : 0 : int i, j;
11017 : :
11018 : 0 : buffer = XALLOCAVEC (tree, nargs);
11019 : 0 : for (i = 0; i < n; i++)
11020 : 0 : buffer[i] = va_arg (newargs, tree);
11021 : 0 : for (j = skip; j < oldnargs; j++, i++)
11022 : 0 : buffer[i] = args[j];
11023 : : }
11024 : : else
11025 : 4 : buffer = args + skip;
11026 : :
11027 : 4 : return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11028 : : }
11029 : :
11030 : : /* Return true if FNDECL shouldn't be folded right now.
11031 : : If a built-in function has an inline attribute always_inline
11032 : : wrapper, defer folding it after always_inline functions have
11033 : : been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11034 : : might not be performed. */
11035 : :
11036 : : bool
11037 : 144402311 : avoid_folding_inline_builtin (tree fndecl)
11038 : : {
11039 : 144402311 : return (DECL_DECLARED_INLINE_P (fndecl)
11040 : 15297 : && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11041 : 15255 : && cfun
11042 : 15255 : && !cfun->always_inline_functions_inlined
11043 : 144417566 : && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11044 : : }
11045 : :
11046 : : /* A wrapper function for builtin folding that prevents warnings for
11047 : : "statement without effect" and the like, caused by removing the
11048 : : call node earlier than the warning is generated. */
11049 : :
11050 : : tree
11051 : 184963988 : fold_call_expr (location_t loc, tree exp, bool ignore)
11052 : : {
11053 : 184963988 : tree ret = NULL_TREE;
11054 : 184963988 : tree fndecl = get_callee_fndecl (exp);
11055 : 183196520 : if (fndecl && fndecl_built_in_p (fndecl)
11056 : : /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11057 : : yet. Defer folding until we see all the arguments
11058 : : (after inlining). */
11059 : 242725256 : && !CALL_EXPR_VA_ARG_PACK (exp))
11060 : : {
11061 : 57761243 : int nargs = call_expr_nargs (exp);
11062 : :
11063 : : /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11064 : : instead last argument is __builtin_va_arg_pack (). Defer folding
11065 : : even in that case, until arguments are finalized. */
11066 : 57761243 : if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11067 : : {
11068 : 262814 : tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11069 : 262814 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11070 : : return NULL_TREE;
11071 : : }
11072 : :
11073 : 57761179 : if (avoid_folding_inline_builtin (fndecl))
11074 : : return NULL_TREE;
11075 : :
11076 : 57757644 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11077 : 69095922 : return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11078 : 69095922 : CALL_EXPR_ARGP (exp), ignore);
11079 : : else
11080 : : {
11081 : 23209683 : tree *args = CALL_EXPR_ARGP (exp);
11082 : 23209683 : ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11083 : 23209683 : if (ret)
11084 : : return ret;
11085 : : }
11086 : : }
11087 : : return NULL_TREE;
11088 : : }
11089 : :
11090 : : /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11091 : : N arguments are passed in the array ARGARRAY. Return a folded
11092 : : expression or NULL_TREE if no simplification was possible. */
11093 : :
11094 : : tree
11095 : 65146306 : fold_builtin_call_array (location_t loc, tree,
11096 : : tree fn,
11097 : : int n,
11098 : : tree *argarray)
11099 : : {
11100 : 65146306 : if (TREE_CODE (fn) != ADDR_EXPR)
11101 : : return NULL_TREE;
11102 : :
11103 : 65146306 : tree fndecl = TREE_OPERAND (fn, 0);
11104 : 65146306 : if (TREE_CODE (fndecl) == FUNCTION_DECL
11105 : 65146306 : && fndecl_built_in_p (fndecl))
11106 : : {
11107 : : /* If last argument is __builtin_va_arg_pack (), arguments to this
11108 : : function are not finalized yet. Defer folding until they are. */
11109 : 64563537 : if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11110 : : {
11111 : 108415 : tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11112 : 108415 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11113 : : return NULL_TREE;
11114 : : }
11115 : 64563510 : if (avoid_folding_inline_builtin (fndecl))
11116 : : return NULL_TREE;
11117 : 64563510 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11118 : 33181955 : return targetm.fold_builtin (fndecl, n, argarray, false);
11119 : : else
11120 : 31381555 : return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11121 : : }
11122 : :
11123 : : return NULL_TREE;
11124 : : }
11125 : :
11126 : : /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11127 : : along with N new arguments specified as the "..." parameters. SKIP
11128 : : is the number of arguments in EXP to be omitted. This function is used
11129 : : to do varargs-to-varargs transformations. */
11130 : :
11131 : : static tree
11132 : 4 : rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11133 : : {
11134 : 4 : va_list ap;
11135 : 4 : tree t;
11136 : :
11137 : 4 : va_start (ap, n);
11138 : 8 : t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11139 : 4 : CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11140 : 4 : va_end (ap);
11141 : :
11142 : 4 : return t;
11143 : : }
11144 : :
11145 : : /* Validate a single argument ARG against a tree code CODE representing
11146 : : a type. Return true when argument is valid. */
11147 : :
11148 : : static bool
11149 : 13861540 : validate_arg (const_tree arg, enum tree_code code)
11150 : : {
11151 : 13861540 : if (!arg)
11152 : : return false;
11153 : 13861517 : else if (code == POINTER_TYPE)
11154 : 6657577 : return POINTER_TYPE_P (TREE_TYPE (arg));
11155 : 7203940 : else if (code == INTEGER_TYPE)
11156 : 3652159 : return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11157 : 3551781 : return code == TREE_CODE (TREE_TYPE (arg));
11158 : : }
11159 : :
11160 : : /* This function validates the types of a function call argument list
11161 : : against a specified list of tree_codes. If the last specifier is a 0,
11162 : : that represents an ellipses, otherwise the last specifier must be a
11163 : : VOID_TYPE.
11164 : :
11165 : : This is the GIMPLE version of validate_arglist. Eventually we want to
11166 : : completely convert builtins.cc to work from GIMPLEs and the tree based
11167 : : validate_arglist will then be removed. */
11168 : :
11169 : : bool
11170 : 93 : validate_gimple_arglist (const gcall *call, ...)
11171 : : {
11172 : 93 : enum tree_code code;
11173 : 93 : bool res = 0;
11174 : 93 : va_list ap;
11175 : 93 : const_tree arg;
11176 : 93 : size_t i;
11177 : :
11178 : 93 : va_start (ap, call);
11179 : 93 : i = 0;
11180 : :
11181 : 372 : do
11182 : : {
11183 : 372 : code = (enum tree_code) va_arg (ap, int);
11184 : 372 : switch (code)
11185 : : {
11186 : 0 : case 0:
11187 : : /* This signifies an ellipses, any further arguments are all ok. */
11188 : 0 : res = true;
11189 : 0 : goto end;
11190 : 93 : case VOID_TYPE:
11191 : : /* This signifies an endlink, if no arguments remain, return
11192 : : true, otherwise return false. */
11193 : 93 : res = (i == gimple_call_num_args (call));
11194 : 93 : goto end;
11195 : 279 : default:
11196 : : /* If no parameters remain or the parameter's code does not
11197 : : match the specified code, return false. Otherwise continue
11198 : : checking any remaining arguments. */
11199 : 279 : arg = gimple_call_arg (call, i++);
11200 : 279 : if (!validate_arg (arg, code))
11201 : 0 : goto end;
11202 : : break;
11203 : : }
11204 : : }
11205 : : while (1);
11206 : :
11207 : : /* We need gotos here since we can only have one VA_CLOSE in a
11208 : : function. */
11209 : 93 : end: ;
11210 : 93 : va_end (ap);
11211 : :
11212 : 93 : return res;
11213 : : }
11214 : :
11215 : : /* Default target-specific builtin expander that does nothing. */
11216 : :
11217 : : rtx
11218 : 0 : default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11219 : : rtx target ATTRIBUTE_UNUSED,
11220 : : rtx subtarget ATTRIBUTE_UNUSED,
11221 : : machine_mode mode ATTRIBUTE_UNUSED,
11222 : : int ignore ATTRIBUTE_UNUSED)
11223 : : {
11224 : 0 : return NULL_RTX;
11225 : : }
11226 : :
11227 : : /* Returns true is EXP represents data that would potentially reside
11228 : : in a readonly section. */
11229 : :
11230 : : bool
11231 : 186755 : readonly_data_expr (tree exp)
11232 : : {
11233 : 186755 : STRIP_NOPS (exp);
11234 : :
11235 : 186755 : if (TREE_CODE (exp) != ADDR_EXPR)
11236 : : return false;
11237 : :
11238 : 23871 : exp = get_base_address (TREE_OPERAND (exp, 0));
11239 : 23871 : if (!exp)
11240 : : return false;
11241 : :
11242 : : /* Make sure we call decl_readonly_section only for trees it
11243 : : can handle (since it returns true for everything it doesn't
11244 : : understand). */
11245 : 23871 : if (TREE_CODE (exp) == STRING_CST
11246 : 5663 : || TREE_CODE (exp) == CONSTRUCTOR
11247 : 5663 : || (VAR_P (exp) && TREE_STATIC (exp)))
11248 : 20882 : return decl_readonly_section (exp, 0);
11249 : : else
11250 : : return false;
11251 : : }
11252 : :
11253 : : /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11254 : : to the call, and TYPE is its return type.
11255 : :
11256 : : Return NULL_TREE if no simplification was possible, otherwise return the
11257 : : simplified form of the call as a tree.
11258 : :
11259 : : The simplified form may be a constant or other expression which
11260 : : computes the same value, but in a more efficient manner (including
11261 : : calls to other builtin functions).
11262 : :
11263 : : The call may contain arguments which need to be evaluated, but
11264 : : which are not useful to determine the result of the call. In
11265 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11266 : : COMPOUND_EXPR will be an argument which must be evaluated.
11267 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11268 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11269 : : form of the builtin function call. */
11270 : :
11271 : : static tree
11272 : 83023 : fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11273 : : {
11274 : 83023 : if (!validate_arg (s1, POINTER_TYPE)
11275 : 83023 : || !validate_arg (s2, POINTER_TYPE))
11276 : : return NULL_TREE;
11277 : :
11278 : 83023 : tree fn;
11279 : 83023 : const char *p1, *p2;
11280 : :
11281 : 83023 : p2 = c_getstr (s2);
11282 : 83023 : if (p2 == NULL)
11283 : : return NULL_TREE;
11284 : :
11285 : 92 : p1 = c_getstr (s1);
11286 : 92 : if (p1 != NULL)
11287 : : {
11288 : 22 : const char *r = strpbrk (p1, p2);
11289 : 22 : tree tem;
11290 : :
11291 : 22 : if (r == NULL)
11292 : 0 : return build_int_cst (TREE_TYPE (s1), 0);
11293 : :
11294 : : /* Return an offset into the constant string argument. */
11295 : 22 : tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11296 : 22 : return fold_convert_loc (loc, type, tem);
11297 : : }
11298 : :
11299 : 70 : if (p2[0] == '\0')
11300 : : /* strpbrk(x, "") == NULL.
11301 : : Evaluate and ignore s1 in case it had side-effects. */
11302 : 26 : return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11303 : :
11304 : 44 : if (p2[1] != '\0')
11305 : : return NULL_TREE; /* Really call strpbrk. */
11306 : :
11307 : 82975 : fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11308 : 41 : if (!fn)
11309 : : return NULL_TREE;
11310 : :
11311 : : /* New argument list transforming strpbrk(s1, s2) to
11312 : : strchr(s1, s2[0]). */
11313 : 41 : return build_call_expr_loc (loc, fn, 2, s1,
11314 : 41 : build_int_cst (integer_type_node, p2[0]));
11315 : : }
11316 : :
11317 : : /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11318 : : to the call.
11319 : :
11320 : : Return NULL_TREE if no simplification was possible, otherwise return the
11321 : : simplified form of the call as a tree.
11322 : :
11323 : : The simplified form may be a constant or other expression which
11324 : : computes the same value, but in a more efficient manner (including
11325 : : calls to other builtin functions).
11326 : :
11327 : : The call may contain arguments which need to be evaluated, but
11328 : : which are not useful to determine the result of the call. In
11329 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11330 : : COMPOUND_EXPR will be an argument which must be evaluated.
11331 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11332 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11333 : : form of the builtin function call. */
11334 : :
11335 : : static tree
11336 : 2560 : fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11337 : : {
11338 : 2560 : if (!validate_arg (s1, POINTER_TYPE)
11339 : 2560 : || !validate_arg (s2, POINTER_TYPE))
11340 : : return NULL_TREE;
11341 : :
11342 : 2560 : if (!check_nul_terminated_array (expr, s1)
11343 : 2560 : || !check_nul_terminated_array (expr, s2))
11344 : 58 : return NULL_TREE;
11345 : :
11346 : 2502 : const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11347 : :
11348 : : /* If either argument is "", return NULL_TREE. */
11349 : 2502 : if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11350 : : /* Evaluate and ignore both arguments in case either one has
11351 : : side-effects. */
11352 : 147 : return omit_two_operands_loc (loc, type, size_zero_node, s1, s2);
11353 : : return NULL_TREE;
11354 : : }
11355 : :
11356 : : /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11357 : : to the call.
11358 : :
11359 : : Return NULL_TREE if no simplification was possible, otherwise return the
11360 : : simplified form of the call as a tree.
11361 : :
11362 : : The simplified form may be a constant or other expression which
11363 : : computes the same value, but in a more efficient manner (including
11364 : : calls to other builtin functions).
11365 : :
11366 : : The call may contain arguments which need to be evaluated, but
11367 : : which are not useful to determine the result of the call. In
11368 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11369 : : COMPOUND_EXPR will be an argument which must be evaluated.
11370 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11371 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11372 : : form of the builtin function call. */
11373 : :
11374 : : static tree
11375 : 2465 : fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11376 : : {
11377 : 2465 : if (!validate_arg (s1, POINTER_TYPE)
11378 : 2465 : || !validate_arg (s2, POINTER_TYPE))
11379 : : return NULL_TREE;
11380 : :
11381 : 2465 : if (!check_nul_terminated_array (expr, s1)
11382 : 2465 : || !check_nul_terminated_array (expr, s2))
11383 : 58 : return NULL_TREE;
11384 : :
11385 : : /* If the first argument is "", return NULL_TREE. */
11386 : 2407 : const char *p1 = c_getstr (s1);
11387 : 2407 : if (p1 && *p1 == '\0')
11388 : : {
11389 : : /* Evaluate and ignore argument s2 in case it has
11390 : : side-effects. */
11391 : 65 : return omit_one_operand_loc (loc, type, size_zero_node, s2);
11392 : : }
11393 : :
11394 : : /* If the second argument is "", return __builtin_strlen(s1). */
11395 : 2342 : const char *p2 = c_getstr (s2);
11396 : 2342 : if (p2 && *p2 == '\0')
11397 : : {
11398 : 2400 : tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11399 : :
11400 : : /* If the replacement _DECL isn't initialized, don't do the
11401 : : transformation. */
11402 : 81 : if (!fn)
11403 : : return NULL_TREE;
11404 : :
11405 : 81 : return fold_convert_loc (loc, type,
11406 : 81 : build_call_expr_loc (loc, fn, 1, s1));
11407 : : }
11408 : : return NULL_TREE;
11409 : : }
11410 : :
11411 : : /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11412 : : produced. False otherwise. This is done so that we don't output the error
11413 : : or warning twice or three times. */
11414 : :
11415 : : bool
11416 : 41900 : fold_builtin_next_arg (tree exp, bool va_start_p)
11417 : : {
11418 : 41900 : tree fntype = TREE_TYPE (current_function_decl);
11419 : 41900 : int nargs = call_expr_nargs (exp);
11420 : 41900 : tree arg;
11421 : : /* There is good chance the current input_location points inside the
11422 : : definition of the va_start macro (perhaps on the token for
11423 : : builtin) in a system header, so warnings will not be emitted.
11424 : : Use the location in real source code. */
11425 : 41900 : location_t current_location =
11426 : 41900 : linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11427 : : NULL);
11428 : :
11429 : 41900 : if (!stdarg_p (fntype))
11430 : : {
11431 : 8 : error ("%<va_start%> used in function with fixed arguments");
11432 : 8 : return true;
11433 : : }
11434 : :
11435 : 41892 : if (va_start_p)
11436 : : {
11437 : 41742 : if (va_start_p && (nargs != 2))
11438 : : {
11439 : 0 : error ("wrong number of arguments to function %<va_start%>");
11440 : 0 : return true;
11441 : : }
11442 : 41742 : arg = CALL_EXPR_ARG (exp, 1);
11443 : : }
11444 : : /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11445 : : when we checked the arguments and if needed issued a warning. */
11446 : : else
11447 : : {
11448 : 150 : if (nargs == 0)
11449 : : {
11450 : : /* Evidently an out of date version of <stdarg.h>; can't validate
11451 : : va_start's second argument, but can still work as intended. */
11452 : 0 : warning_at (current_location,
11453 : 0 : OPT_Wvarargs,
11454 : : "%<__builtin_next_arg%> called without an argument");
11455 : 0 : return true;
11456 : : }
11457 : 150 : else if (nargs > 1)
11458 : : {
11459 : 0 : error ("wrong number of arguments to function %<__builtin_next_arg%>");
11460 : 0 : return true;
11461 : : }
11462 : 150 : arg = CALL_EXPR_ARG (exp, 0);
11463 : : }
11464 : :
11465 : 41892 : if (TREE_CODE (arg) == SSA_NAME
11466 : 41892 : && SSA_NAME_VAR (arg))
11467 : : arg = SSA_NAME_VAR (arg);
11468 : :
11469 : : /* We destructively modify the call to be __builtin_va_start (ap, 0)
11470 : : or __builtin_next_arg (0) the first time we see it, after checking
11471 : : the arguments and if needed issuing a warning. */
11472 : 41892 : if (!integer_zerop (arg))
11473 : : {
11474 : 7014 : tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11475 : :
11476 : : /* Strip off all nops for the sake of the comparison. This
11477 : : is not quite the same as STRIP_NOPS. It does more.
11478 : : We must also strip off INDIRECT_EXPR for C++ reference
11479 : : parameters. */
11480 : 14030 : while (CONVERT_EXPR_P (arg)
11481 : 14041 : || INDIRECT_REF_P (arg))
11482 : 11 : arg = TREE_OPERAND (arg, 0);
11483 : 7014 : if (arg != last_parm)
11484 : : {
11485 : : /* FIXME: Sometimes with the tree optimizers we can get the
11486 : : not the last argument even though the user used the last
11487 : : argument. We just warn and set the arg to be the last
11488 : : argument so that we will get wrong-code because of
11489 : : it. */
11490 : 11 : warning_at (current_location,
11491 : 11 : OPT_Wvarargs,
11492 : : "second parameter of %<va_start%> not last named argument");
11493 : : }
11494 : :
11495 : : /* Undefined by C99 7.15.1.4p4 (va_start):
11496 : : "If the parameter parmN is declared with the register storage
11497 : : class, with a function or array type, or with a type that is
11498 : : not compatible with the type that results after application of
11499 : : the default argument promotions, the behavior is undefined."
11500 : : */
11501 : 7003 : else if (DECL_REGISTER (arg))
11502 : : {
11503 : 11 : warning_at (current_location,
11504 : 11 : OPT_Wvarargs,
11505 : : "undefined behavior when second parameter of "
11506 : : "%<va_start%> is declared with %<register%> storage");
11507 : : }
11508 : :
11509 : : /* We want to verify the second parameter just once before the tree
11510 : : optimizers are run and then avoid keeping it in the tree,
11511 : : as otherwise we could warn even for correct code like:
11512 : : void foo (int i, ...)
11513 : : { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11514 : 7014 : if (va_start_p)
11515 : 7012 : CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11516 : : else
11517 : 2 : CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11518 : : }
11519 : : return false;
11520 : : }
11521 : :
11522 : :
11523 : : /* Expand a call EXP to __builtin_object_size. */
11524 : :
11525 : : static rtx
11526 : 618 : expand_builtin_object_size (tree exp)
11527 : : {
11528 : 618 : tree ost;
11529 : 618 : int object_size_type;
11530 : 618 : tree fndecl = get_callee_fndecl (exp);
11531 : :
11532 : 618 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11533 : : {
11534 : 0 : error ("first argument of %qD must be a pointer, second integer constant",
11535 : : fndecl);
11536 : 0 : expand_builtin_trap ();
11537 : 0 : return const0_rtx;
11538 : : }
11539 : :
11540 : 618 : ost = CALL_EXPR_ARG (exp, 1);
11541 : 618 : STRIP_NOPS (ost);
11542 : :
11543 : 618 : if (TREE_CODE (ost) != INTEGER_CST
11544 : 618 : || tree_int_cst_sgn (ost) < 0
11545 : 1236 : || compare_tree_int (ost, 3) > 0)
11546 : : {
11547 : 0 : error ("last argument of %qD is not integer constant between 0 and 3",
11548 : : fndecl);
11549 : 0 : expand_builtin_trap ();
11550 : 0 : return const0_rtx;
11551 : : }
11552 : :
11553 : 618 : object_size_type = tree_to_shwi (ost);
11554 : :
11555 : 618 : return object_size_type < 2 ? constm1_rtx : const0_rtx;
11556 : : }
11557 : :
11558 : : /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11559 : : FCODE is the BUILT_IN_* to use.
11560 : : Return NULL_RTX if we failed; the caller should emit a normal call,
11561 : : otherwise try to get the result in TARGET, if convenient (and in
11562 : : mode MODE if that's convenient). */
11563 : :
11564 : : static rtx
11565 : 837 : expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11566 : : enum built_in_function fcode)
11567 : : {
11568 : 1410 : if (!validate_arglist (exp,
11569 : : POINTER_TYPE,
11570 : : fcode == BUILT_IN_MEMSET_CHK
11571 : : ? INTEGER_TYPE : POINTER_TYPE,
11572 : : INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11573 : : return NULL_RTX;
11574 : :
11575 : 837 : tree dest = CALL_EXPR_ARG (exp, 0);
11576 : 837 : tree src = CALL_EXPR_ARG (exp, 1);
11577 : 837 : tree len = CALL_EXPR_ARG (exp, 2);
11578 : 837 : tree size = CALL_EXPR_ARG (exp, 3);
11579 : :
11580 : : /* FIXME: Set access mode to write only for memset et al. */
11581 : 837 : bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11582 : : /*srcstr=*/NULL_TREE, size, access_read_write);
11583 : :
11584 : 837 : if (!tree_fits_uhwi_p (size))
11585 : : return NULL_RTX;
11586 : :
11587 : 627 : if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11588 : : {
11589 : : /* Avoid transforming the checking call to an ordinary one when
11590 : : an overflow has been detected or when the call couldn't be
11591 : : validated because the size is not constant. */
11592 : 186 : if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11593 : : return NULL_RTX;
11594 : :
11595 : 0 : tree fn = NULL_TREE;
11596 : : /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11597 : : mem{cpy,pcpy,move,set} is available. */
11598 : 0 : switch (fcode)
11599 : : {
11600 : 0 : case BUILT_IN_MEMCPY_CHK:
11601 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11602 : 0 : break;
11603 : 0 : case BUILT_IN_MEMPCPY_CHK:
11604 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11605 : 0 : break;
11606 : 0 : case BUILT_IN_MEMMOVE_CHK:
11607 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11608 : 0 : break;
11609 : 0 : case BUILT_IN_MEMSET_CHK:
11610 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11611 : 0 : break;
11612 : : default:
11613 : : break;
11614 : : }
11615 : :
11616 : 0 : if (! fn)
11617 : : return NULL_RTX;
11618 : :
11619 : 0 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11620 : 0 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11621 : 0 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11622 : 0 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11623 : : }
11624 : 441 : else if (fcode == BUILT_IN_MEMSET_CHK)
11625 : : return NULL_RTX;
11626 : : else
11627 : : {
11628 : 293 : unsigned int dest_align = get_pointer_alignment (dest);
11629 : :
11630 : : /* If DEST is not a pointer type, call the normal function. */
11631 : 293 : if (dest_align == 0)
11632 : : return NULL_RTX;
11633 : :
11634 : : /* If SRC and DEST are the same (and not volatile), do nothing. */
11635 : 293 : if (operand_equal_p (src, dest, 0))
11636 : : {
11637 : 0 : tree expr;
11638 : :
11639 : 0 : if (fcode != BUILT_IN_MEMPCPY_CHK)
11640 : : {
11641 : : /* Evaluate and ignore LEN in case it has side-effects. */
11642 : 0 : expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11643 : 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
11644 : : }
11645 : :
11646 : 0 : expr = fold_build_pointer_plus (dest, len);
11647 : 0 : return expand_expr (expr, target, mode, EXPAND_NORMAL);
11648 : : }
11649 : :
11650 : : /* __memmove_chk special case. */
11651 : 293 : if (fcode == BUILT_IN_MEMMOVE_CHK)
11652 : : {
11653 : 79 : unsigned int src_align = get_pointer_alignment (src);
11654 : :
11655 : 79 : if (src_align == 0)
11656 : : return NULL_RTX;
11657 : :
11658 : : /* If src is categorized for a readonly section we can use
11659 : : normal __memcpy_chk. */
11660 : 79 : if (readonly_data_expr (src))
11661 : : {
11662 : 15 : tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11663 : 15 : if (!fn)
11664 : : return NULL_RTX;
11665 : 15 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11666 : : dest, src, len, size);
11667 : 15 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11668 : 15 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11669 : 15 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11670 : : }
11671 : : }
11672 : 278 : return NULL_RTX;
11673 : : }
11674 : : }
11675 : :
11676 : : /* Emit warning if a buffer overflow is detected at compile time. */
11677 : :
11678 : : static void
11679 : 1135 : maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11680 : : {
11681 : : /* The source string. */
11682 : 1135 : tree srcstr = NULL_TREE;
11683 : : /* The size of the destination object returned by __builtin_object_size. */
11684 : 1135 : tree objsize = NULL_TREE;
11685 : : /* The string that is being concatenated with (as in __strcat_chk)
11686 : : or null if it isn't. */
11687 : 1135 : tree catstr = NULL_TREE;
11688 : : /* The maximum length of the source sequence in a bounded operation
11689 : : (such as __strncat_chk) or null if the operation isn't bounded
11690 : : (such as __strcat_chk). */
11691 : 1135 : tree maxread = NULL_TREE;
11692 : : /* The exact size of the access (such as in __strncpy_chk). */
11693 : 1135 : tree size = NULL_TREE;
11694 : : /* The access by the function that's checked. Except for snprintf
11695 : : both writing and reading is checked. */
11696 : 1135 : access_mode mode = access_read_write;
11697 : :
11698 : 1135 : switch (fcode)
11699 : : {
11700 : 276 : case BUILT_IN_STRCPY_CHK:
11701 : 276 : case BUILT_IN_STPCPY_CHK:
11702 : 276 : srcstr = CALL_EXPR_ARG (exp, 1);
11703 : 276 : objsize = CALL_EXPR_ARG (exp, 2);
11704 : 276 : break;
11705 : :
11706 : 198 : case BUILT_IN_STRCAT_CHK:
11707 : : /* For __strcat_chk the warning will be emitted only if overflowing
11708 : : by at least strlen (dest) + 1 bytes. */
11709 : 198 : catstr = CALL_EXPR_ARG (exp, 0);
11710 : 198 : srcstr = CALL_EXPR_ARG (exp, 1);
11711 : 198 : objsize = CALL_EXPR_ARG (exp, 2);
11712 : 198 : break;
11713 : :
11714 : 109 : case BUILT_IN_STRNCAT_CHK:
11715 : 109 : catstr = CALL_EXPR_ARG (exp, 0);
11716 : 109 : srcstr = CALL_EXPR_ARG (exp, 1);
11717 : 109 : maxread = CALL_EXPR_ARG (exp, 2);
11718 : 109 : objsize = CALL_EXPR_ARG (exp, 3);
11719 : 109 : break;
11720 : :
11721 : 243 : case BUILT_IN_STRNCPY_CHK:
11722 : 243 : case BUILT_IN_STPNCPY_CHK:
11723 : 243 : srcstr = CALL_EXPR_ARG (exp, 1);
11724 : 243 : size = CALL_EXPR_ARG (exp, 2);
11725 : 243 : objsize = CALL_EXPR_ARG (exp, 3);
11726 : 243 : break;
11727 : :
11728 : 309 : case BUILT_IN_SNPRINTF_CHK:
11729 : 309 : case BUILT_IN_VSNPRINTF_CHK:
11730 : 309 : maxread = CALL_EXPR_ARG (exp, 1);
11731 : 309 : objsize = CALL_EXPR_ARG (exp, 3);
11732 : : /* The only checked access the write to the destination. */
11733 : 309 : mode = access_write_only;
11734 : 309 : break;
11735 : 0 : default:
11736 : 0 : gcc_unreachable ();
11737 : : }
11738 : :
11739 : 1135 : if (catstr && maxread)
11740 : : {
11741 : : /* Check __strncat_chk. There is no way to determine the length
11742 : : of the string to which the source string is being appended so
11743 : : just warn when the length of the source string is not known. */
11744 : 109 : check_strncat_sizes (exp, objsize);
11745 : 109 : return;
11746 : : }
11747 : :
11748 : 1026 : check_access (exp, size, maxread, srcstr, objsize, mode);
11749 : : }
11750 : :
11751 : : /* Emit warning if a buffer overflow is detected at compile time
11752 : : in __sprintf_chk/__vsprintf_chk calls. */
11753 : :
11754 : : static void
11755 : 1329 : maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11756 : : {
11757 : 1329 : tree size, len, fmt;
11758 : 1329 : const char *fmt_str;
11759 : 1329 : int nargs = call_expr_nargs (exp);
11760 : :
11761 : : /* Verify the required arguments in the original call. */
11762 : :
11763 : 1329 : if (nargs < 4)
11764 : : return;
11765 : 1329 : size = CALL_EXPR_ARG (exp, 2);
11766 : 1329 : fmt = CALL_EXPR_ARG (exp, 3);
11767 : :
11768 : 1329 : if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11769 : 21 : return;
11770 : :
11771 : : /* Check whether the format is a literal string constant. */
11772 : 1308 : fmt_str = c_getstr (fmt);
11773 : 1308 : if (fmt_str == NULL)
11774 : : return;
11775 : :
11776 : 1272 : if (!init_target_chars ())
11777 : : return;
11778 : :
11779 : : /* If the format doesn't contain % args or %%, we know its size. */
11780 : 1272 : if (strchr (fmt_str, target_percent) == 0)
11781 : 22 : len = build_int_cstu (size_type_node, strlen (fmt_str));
11782 : : /* If the format is "%s" and first ... argument is a string literal,
11783 : : we know it too. */
11784 : 1250 : else if (fcode == BUILT_IN_SPRINTF_CHK
11785 : 1094 : && strcmp (fmt_str, target_percent_s) == 0)
11786 : : {
11787 : 49 : tree arg;
11788 : :
11789 : 49 : if (nargs < 5)
11790 : : return;
11791 : 49 : arg = CALL_EXPR_ARG (exp, 4);
11792 : 49 : if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11793 : : return;
11794 : :
11795 : 45 : len = c_strlen (arg, 1);
11796 : 45 : if (!len || ! tree_fits_uhwi_p (len))
11797 : : return;
11798 : : }
11799 : : else
11800 : : return;
11801 : :
11802 : : /* Add one for the terminating nul. */
11803 : 34 : len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11804 : :
11805 : 34 : check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11806 : : access_write_only);
11807 : : }
11808 : :
11809 : : /* Fold a call to __builtin_object_size with arguments PTR and OST,
11810 : : if possible. */
11811 : :
11812 : : static tree
11813 : 200060 : fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11814 : : {
11815 : 200060 : tree bytes;
11816 : 200060 : int object_size_type;
11817 : :
11818 : 200060 : if (!validate_arg (ptr, POINTER_TYPE)
11819 : 200060 : || !validate_arg (ost, INTEGER_TYPE))
11820 : : return NULL_TREE;
11821 : :
11822 : 200060 : STRIP_NOPS (ost);
11823 : :
11824 : 200060 : if (TREE_CODE (ost) != INTEGER_CST
11825 : 200060 : || tree_int_cst_sgn (ost) < 0
11826 : 400120 : || compare_tree_int (ost, 3) > 0)
11827 : 0 : return NULL_TREE;
11828 : :
11829 : 200060 : object_size_type = tree_to_shwi (ost);
11830 : :
11831 : : /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11832 : : if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11833 : : and (size_t) 0 for types 2 and 3. */
11834 : 200060 : if (TREE_SIDE_EFFECTS (ptr))
11835 : 603 : return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11836 : :
11837 : 199457 : if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11838 : 66495 : object_size_type |= OST_DYNAMIC;
11839 : :
11840 : 199457 : if (TREE_CODE (ptr) == ADDR_EXPR)
11841 : : {
11842 : 12077 : compute_builtin_object_size (ptr, object_size_type, &bytes);
11843 : 12077 : if ((object_size_type & OST_DYNAMIC)
11844 : 10892 : || int_fits_type_p (bytes, size_type_node))
11845 : 12077 : return fold_convert (size_type_node, bytes);
11846 : : }
11847 : 187380 : else if (TREE_CODE (ptr) == SSA_NAME)
11848 : : {
11849 : : /* If object size is not known yet, delay folding until
11850 : : later. Maybe subsequent passes will help determining
11851 : : it. */
11852 : 102001 : if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11853 : 102001 : && ((object_size_type & OST_DYNAMIC)
11854 : 2544 : || int_fits_type_p (bytes, size_type_node)))
11855 : 4817 : return fold_convert (size_type_node, bytes);
11856 : : }
11857 : :
11858 : : return NULL_TREE;
11859 : : }
11860 : :
11861 : : /* Builtins with folding operations that operate on "..." arguments
11862 : : need special handling; we need to store the arguments in a convenient
11863 : : data structure before attempting any folding. Fortunately there are
11864 : : only a few builtins that fall into this category. FNDECL is the
11865 : : function, EXP is the CALL_EXPR for the call. */
11866 : :
11867 : : static tree
11868 : 2511825 : fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11869 : : {
11870 : 2511825 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11871 : 2511825 : tree ret = NULL_TREE;
11872 : :
11873 : 2511825 : switch (fcode)
11874 : : {
11875 : 115356 : case BUILT_IN_FPCLASSIFY:
11876 : 115356 : ret = fold_builtin_fpclassify (loc, args, nargs);
11877 : 115356 : break;
11878 : :
11879 : 54 : case BUILT_IN_ADDC:
11880 : 54 : case BUILT_IN_ADDCL:
11881 : 54 : case BUILT_IN_ADDCLL:
11882 : 54 : case BUILT_IN_SUBC:
11883 : 54 : case BUILT_IN_SUBCL:
11884 : 54 : case BUILT_IN_SUBCLL:
11885 : 54 : return fold_builtin_addc_subc (loc, fcode, args);
11886 : :
11887 : : default:
11888 : : break;
11889 : : }
11890 : 115356 : if (ret)
11891 : : {
11892 : 115356 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11893 : 115356 : SET_EXPR_LOCATION (ret, loc);
11894 : 115356 : suppress_warning (ret);
11895 : 115356 : return ret;
11896 : : }
11897 : : return NULL_TREE;
11898 : : }
11899 : :
11900 : : /* Initialize format string characters in the target charset. */
11901 : :
11902 : : bool
11903 : 276281 : init_target_chars (void)
11904 : : {
11905 : 276281 : static bool init;
11906 : 276281 : if (!init)
11907 : : {
11908 : 112021 : target_newline = lang_hooks.to_target_charset ('\n');
11909 : 112021 : target_percent = lang_hooks.to_target_charset ('%');
11910 : 112021 : target_c = lang_hooks.to_target_charset ('c');
11911 : 112021 : target_s = lang_hooks.to_target_charset ('s');
11912 : 112021 : if (target_newline == 0 || target_percent == 0 || target_c == 0
11913 : 112021 : || target_s == 0)
11914 : : return false;
11915 : :
11916 : 112021 : target_percent_c[0] = target_percent;
11917 : 112021 : target_percent_c[1] = target_c;
11918 : 112021 : target_percent_c[2] = '\0';
11919 : :
11920 : 112021 : target_percent_s[0] = target_percent;
11921 : 112021 : target_percent_s[1] = target_s;
11922 : 112021 : target_percent_s[2] = '\0';
11923 : :
11924 : 112021 : target_percent_s_newline[0] = target_percent;
11925 : 112021 : target_percent_s_newline[1] = target_s;
11926 : 112021 : target_percent_s_newline[2] = target_newline;
11927 : 112021 : target_percent_s_newline[3] = '\0';
11928 : :
11929 : 112021 : init = true;
11930 : : }
11931 : : return true;
11932 : : }
11933 : :
11934 : : /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11935 : : and no overflow/underflow occurred. INEXACT is true if M was not
11936 : : exactly calculated. TYPE is the tree type for the result. This
11937 : : function assumes that you cleared the MPFR flags and then
11938 : : calculated M to see if anything subsequently set a flag prior to
11939 : : entering this function. Return NULL_TREE if any checks fail. */
11940 : :
11941 : : static tree
11942 : 2979 : do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11943 : : {
11944 : : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11945 : : overflow/underflow occurred. If -frounding-math, proceed iff the
11946 : : result of calling FUNC was exact. */
11947 : 4832 : if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11948 : 4832 : && (!flag_rounding_math || !inexact))
11949 : : {
11950 : 1853 : REAL_VALUE_TYPE rr;
11951 : :
11952 : 1853 : real_from_mpfr (&rr, m, type, MPFR_RNDN);
11953 : : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11954 : : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11955 : : but the mpfr_t is not, then we underflowed in the
11956 : : conversion. */
11957 : 1853 : if (real_isfinite (&rr)
11958 : 1853 : && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11959 : : {
11960 : 1853 : REAL_VALUE_TYPE rmode;
11961 : :
11962 : 1853 : real_convert (&rmode, TYPE_MODE (type), &rr);
11963 : : /* Proceed iff the specified mode can hold the value. */
11964 : 1853 : if (real_identical (&rmode, &rr))
11965 : 1853 : return build_real (type, rmode);
11966 : : }
11967 : : }
11968 : : return NULL_TREE;
11969 : : }
11970 : :
11971 : : /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11972 : : number and no overflow/underflow occurred. INEXACT is true if M
11973 : : was not exactly calculated. TYPE is the tree type for the result.
11974 : : This function assumes that you cleared the MPFR flags and then
11975 : : calculated M to see if anything subsequently set a flag prior to
11976 : : entering this function. Return NULL_TREE if any checks fail, if
11977 : : FORCE_CONVERT is true, then bypass the checks. */
11978 : :
11979 : : static tree
11980 : 4130 : do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11981 : : {
11982 : : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11983 : : overflow/underflow occurred. If -frounding-math, proceed iff the
11984 : : result of calling FUNC was exact. */
11985 : 4130 : if (force_convert
11986 : 4130 : || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11987 : 3784 : && !mpfr_overflow_p () && !mpfr_underflow_p ()
11988 : 3784 : && (!flag_rounding_math || !inexact)))
11989 : : {
11990 : 3951 : REAL_VALUE_TYPE re, im;
11991 : :
11992 : 3951 : real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11993 : 3951 : real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11994 : : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11995 : : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11996 : : but the mpfr_t is not, then we underflowed in the
11997 : : conversion. */
11998 : 3951 : if (force_convert
11999 : 3951 : || (real_isfinite (&re) && real_isfinite (&im)
12000 : 3784 : && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12001 : 3784 : && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12002 : : {
12003 : 3951 : REAL_VALUE_TYPE re_mode, im_mode;
12004 : :
12005 : 3951 : real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12006 : 3951 : real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12007 : : /* Proceed iff the specified mode can hold the value. */
12008 : 3951 : if (force_convert
12009 : 3951 : || (real_identical (&re_mode, &re)
12010 : 3784 : && real_identical (&im_mode, &im)))
12011 : 3951 : return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12012 : 7902 : build_real (TREE_TYPE (type), im_mode));
12013 : : }
12014 : : }
12015 : : return NULL_TREE;
12016 : : }
12017 : :
12018 : : /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12019 : : the pointer *(ARG_QUO) and return the result. The type is taken
12020 : : from the type of ARG0 and is used for setting the precision of the
12021 : : calculation and results. */
12022 : :
12023 : : static tree
12024 : 87209 : do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12025 : : {
12026 : 87209 : tree const type = TREE_TYPE (arg0);
12027 : 87209 : tree result = NULL_TREE;
12028 : :
12029 : 87209 : STRIP_NOPS (arg0);
12030 : 87209 : STRIP_NOPS (arg1);
12031 : :
12032 : : /* To proceed, MPFR must exactly represent the target floating point
12033 : : format, which only happens when the target base equals two. */
12034 : 87209 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12035 : 87209 : && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12036 : 89514 : && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12037 : : {
12038 : 2305 : const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12039 : 2305 : const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12040 : :
12041 : 2305 : if (real_isfinite (ra0) && real_isfinite (ra1))
12042 : : {
12043 : 2305 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12044 : 2305 : const int prec = fmt->p;
12045 : 2305 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12046 : 2305 : tree result_rem;
12047 : 2305 : long integer_quo;
12048 : 2305 : mpfr_t m0, m1;
12049 : :
12050 : 2305 : mpfr_inits2 (prec, m0, m1, NULL);
12051 : 2305 : mpfr_from_real (m0, ra0, MPFR_RNDN);
12052 : 2305 : mpfr_from_real (m1, ra1, MPFR_RNDN);
12053 : 2305 : mpfr_clear_flags ();
12054 : 2305 : mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12055 : : /* Remquo is independent of the rounding mode, so pass
12056 : : inexact=0 to do_mpfr_ckconv(). */
12057 : 2305 : result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12058 : 2305 : mpfr_clears (m0, m1, NULL);
12059 : 2305 : if (result_rem)
12060 : : {
12061 : : /* MPFR calculates quo in the host's long so it may
12062 : : return more bits in quo than the target int can hold
12063 : : if sizeof(host long) > sizeof(target int). This can
12064 : : happen even for native compilers in LP64 mode. In
12065 : : these cases, modulo the quo value with the largest
12066 : : number that the target int can hold while leaving one
12067 : : bit for the sign. */
12068 : 1179 : if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12069 : 1179 : integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12070 : :
12071 : : /* Dereference the quo pointer argument. */
12072 : 1179 : arg_quo = build_fold_indirect_ref (arg_quo);
12073 : : /* Proceed iff a valid pointer type was passed in. */
12074 : 1179 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12075 : : {
12076 : : /* Set the value. */
12077 : 1179 : tree result_quo
12078 : 1179 : = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12079 : : build_int_cst (TREE_TYPE (arg_quo),
12080 : : integer_quo));
12081 : 1179 : TREE_SIDE_EFFECTS (result_quo) = 1;
12082 : : /* Combine the quo assignment with the rem. */
12083 : 1179 : result = fold_build2 (COMPOUND_EXPR, type,
12084 : : result_quo, result_rem);
12085 : 1179 : suppress_warning (result, OPT_Wunused_value);
12086 : 1179 : result = non_lvalue (result);
12087 : : }
12088 : : }
12089 : : }
12090 : : }
12091 : 87209 : return result;
12092 : : }
12093 : :
12094 : : /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12095 : : resulting value as a tree with type TYPE. The mpfr precision is
12096 : : set to the precision of TYPE. We assume that this mpfr function
12097 : : returns zero if the result could be calculated exactly within the
12098 : : requested precision. In addition, the integer pointer represented
12099 : : by ARG_SG will be dereferenced and set to the appropriate signgam
12100 : : (-1,1) value. */
12101 : :
12102 : : static tree
12103 : 6504 : do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12104 : : {
12105 : 6504 : tree result = NULL_TREE;
12106 : :
12107 : 6504 : STRIP_NOPS (arg);
12108 : :
12109 : : /* To proceed, MPFR must exactly represent the target floating point
12110 : : format, which only happens when the target base equals two. Also
12111 : : verify ARG is a constant and that ARG_SG is an int pointer. */
12112 : 6504 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12113 : 6504 : && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12114 : 6470 : && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12115 : 12974 : && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12116 : : {
12117 : 6470 : const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12118 : :
12119 : : /* In addition to NaN and Inf, the argument cannot be zero or a
12120 : : negative integer. */
12121 : 6470 : if (real_isfinite (ra)
12122 : 6470 : && ra->cl != rvc_zero
12123 : 12940 : && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12124 : : {
12125 : 674 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12126 : 674 : const int prec = fmt->p;
12127 : 674 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12128 : 674 : int inexact, sg;
12129 : 674 : tree result_lg;
12130 : :
12131 : 674 : auto_mpfr m (prec);
12132 : 674 : mpfr_from_real (m, ra, MPFR_RNDN);
12133 : 674 : mpfr_clear_flags ();
12134 : 674 : inexact = mpfr_lgamma (m, &sg, m, rnd);
12135 : 674 : result_lg = do_mpfr_ckconv (m, type, inexact);
12136 : 674 : if (result_lg)
12137 : : {
12138 : 674 : tree result_sg;
12139 : :
12140 : : /* Dereference the arg_sg pointer argument. */
12141 : 674 : arg_sg = build_fold_indirect_ref (arg_sg);
12142 : : /* Assign the signgam value into *arg_sg. */
12143 : 674 : result_sg = fold_build2 (MODIFY_EXPR,
12144 : : TREE_TYPE (arg_sg), arg_sg,
12145 : : build_int_cst (TREE_TYPE (arg_sg), sg));
12146 : 674 : TREE_SIDE_EFFECTS (result_sg) = 1;
12147 : : /* Combine the signgam assignment with the lgamma result. */
12148 : 674 : result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12149 : : result_sg, result_lg));
12150 : : }
12151 : 674 : }
12152 : : }
12153 : :
12154 : 6504 : return result;
12155 : : }
12156 : :
12157 : : /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12158 : : mpc function FUNC on it and return the resulting value as a tree
12159 : : with type TYPE. The mpfr precision is set to the precision of
12160 : : TYPE. We assume that function FUNC returns zero if the result
12161 : : could be calculated exactly within the requested precision. If
12162 : : DO_NONFINITE is true, then fold expressions containing Inf or NaN
12163 : : in the arguments and/or results. */
12164 : :
12165 : : tree
12166 : 4482 : do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12167 : : int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12168 : : {
12169 : 4482 : tree result = NULL_TREE;
12170 : :
12171 : 4482 : STRIP_NOPS (arg0);
12172 : 4482 : STRIP_NOPS (arg1);
12173 : :
12174 : : /* To proceed, MPFR must exactly represent the target floating point
12175 : : format, which only happens when the target base equals two. */
12176 : 4482 : if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12177 : 4482 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
12178 : 4482 : && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12179 : 4482 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
12180 : 8964 : && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12181 : : {
12182 : 4482 : const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12183 : 4482 : const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12184 : 4482 : const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12185 : 4482 : const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12186 : :
12187 : 4482 : if (do_nonfinite
12188 : 4482 : || (real_isfinite (re0) && real_isfinite (im0)
12189 : 3991 : && real_isfinite (re1) && real_isfinite (im1)))
12190 : : {
12191 : 4130 : const struct real_format *const fmt =
12192 : 4130 : REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12193 : 4130 : const int prec = fmt->p;
12194 : 4130 : const mpfr_rnd_t rnd = fmt->round_towards_zero
12195 : 4130 : ? MPFR_RNDZ : MPFR_RNDN;
12196 : 4130 : const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12197 : 4130 : int inexact;
12198 : 4130 : mpc_t m0, m1;
12199 : :
12200 : 4130 : mpc_init2 (m0, prec);
12201 : 4130 : mpc_init2 (m1, prec);
12202 : 4130 : mpfr_from_real (mpc_realref (m0), re0, rnd);
12203 : 4130 : mpfr_from_real (mpc_imagref (m0), im0, rnd);
12204 : 4130 : mpfr_from_real (mpc_realref (m1), re1, rnd);
12205 : 4130 : mpfr_from_real (mpc_imagref (m1), im1, rnd);
12206 : 4130 : mpfr_clear_flags ();
12207 : 4130 : inexact = func (m0, m0, m1, crnd);
12208 : 4130 : result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12209 : 4130 : mpc_clear (m0);
12210 : 4130 : mpc_clear (m1);
12211 : : }
12212 : : }
12213 : :
12214 : 4482 : return result;
12215 : : }
12216 : :
12217 : : /* A wrapper function for builtin folding that prevents warnings for
12218 : : "statement without effect" and the like, caused by removing the
12219 : : call node earlier than the warning is generated. */
12220 : :
12221 : : tree
12222 : 9470074 : fold_call_stmt (gcall *stmt, bool ignore)
12223 : : {
12224 : 9470074 : tree ret = NULL_TREE;
12225 : 9470074 : tree fndecl = gimple_call_fndecl (stmt);
12226 : 9470074 : location_t loc = gimple_location (stmt);
12227 : 9470074 : if (fndecl && fndecl_built_in_p (fndecl)
12228 : 18940148 : && !gimple_call_va_arg_pack_p (stmt))
12229 : : {
12230 : 9470008 : int nargs = gimple_call_num_args (stmt);
12231 : 9470008 : tree *args = (nargs > 0
12232 : 9470008 : ? gimple_call_arg_ptr (stmt, 0)
12233 : : : &error_mark_node);
12234 : :
12235 : 9470008 : if (avoid_folding_inline_builtin (fndecl))
12236 : : return NULL_TREE;
12237 : 9470008 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12238 : : {
12239 : 0 : return targetm.fold_builtin (fndecl, nargs, args, ignore);
12240 : : }
12241 : : else
12242 : : {
12243 : 9470008 : ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12244 : 9470008 : if (ret)
12245 : : {
12246 : : /* Propagate location information from original call to
12247 : : expansion of builtin. Otherwise things like
12248 : : maybe_emit_chk_warning, that operate on the expansion
12249 : : of a builtin, will use the wrong location information. */
12250 : 5045 : if (gimple_has_location (stmt))
12251 : : {
12252 : 5044 : tree realret = ret;
12253 : 5044 : if (TREE_CODE (ret) == NOP_EXPR)
12254 : 5044 : realret = TREE_OPERAND (ret, 0);
12255 : 5044 : if (CAN_HAVE_LOCATION_P (realret)
12256 : 9342 : && !EXPR_HAS_LOCATION (realret))
12257 : 6 : SET_EXPR_LOCATION (realret, loc);
12258 : 5044 : return realret;
12259 : : }
12260 : : return ret;
12261 : : }
12262 : : }
12263 : : }
12264 : : return NULL_TREE;
12265 : : }
12266 : :
12267 : : /* Look up the function in builtin_decl that corresponds to DECL
12268 : : and set ASMSPEC as its user assembler name. DECL must be a
12269 : : function decl that declares a builtin. */
12270 : :
12271 : : void
12272 : 149590 : set_builtin_user_assembler_name (tree decl, const char *asmspec)
12273 : : {
12274 : 149590 : gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12275 : : && asmspec != 0);
12276 : :
12277 : 149590 : tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12278 : 149590 : set_user_assembler_name (builtin, asmspec);
12279 : :
12280 : 149590 : if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12281 : 149590 : && INT_TYPE_SIZE < BITS_PER_WORD)
12282 : : {
12283 : 1 : scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12284 : 1 : set_user_assembler_libfunc ("ffs", asmspec);
12285 : 1 : set_optab_libfunc (ffs_optab, mode, "ffs");
12286 : : }
12287 : 149590 : }
12288 : :
12289 : : /* Return true if DECL is a builtin that expands to a constant or similarly
12290 : : simple code. */
12291 : : bool
12292 : 32027931 : is_simple_builtin (tree decl)
12293 : : {
12294 : 32027931 : if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12295 : 30764654 : switch (DECL_FUNCTION_CODE (decl))
12296 : : {
12297 : : /* Builtins that expand to constants. */
12298 : : case BUILT_IN_CONSTANT_P:
12299 : : case BUILT_IN_EXPECT:
12300 : : case BUILT_IN_OBJECT_SIZE:
12301 : : case BUILT_IN_UNREACHABLE:
12302 : : /* Simple register moves or loads from stack. */
12303 : : case BUILT_IN_ASSUME_ALIGNED:
12304 : : case BUILT_IN_RETURN_ADDRESS:
12305 : : case BUILT_IN_EXTRACT_RETURN_ADDR:
12306 : : case BUILT_IN_FROB_RETURN_ADDR:
12307 : : case BUILT_IN_RETURN:
12308 : : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12309 : : case BUILT_IN_FRAME_ADDRESS:
12310 : : case BUILT_IN_VA_END:
12311 : : case BUILT_IN_STACK_SAVE:
12312 : : case BUILT_IN_STACK_RESTORE:
12313 : : case BUILT_IN_DWARF_CFA:
12314 : : /* Exception state returns or moves registers around. */
12315 : : case BUILT_IN_EH_FILTER:
12316 : : case BUILT_IN_EH_POINTER:
12317 : : case BUILT_IN_EH_COPY_VALUES:
12318 : : return true;
12319 : :
12320 : 28124055 : default:
12321 : 28124055 : return false;
12322 : : }
12323 : :
12324 : : return false;
12325 : : }
12326 : :
12327 : : /* Return true if DECL is a builtin that is not expensive, i.e., they are
12328 : : most probably expanded inline into reasonably simple code. This is a
12329 : : superset of is_simple_builtin. */
12330 : : bool
12331 : 19385336 : is_inexpensive_builtin (tree decl)
12332 : : {
12333 : 19385336 : if (!decl)
12334 : : return false;
12335 : 19369106 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12336 : : return true;
12337 : 18083337 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12338 : 15221009 : switch (DECL_FUNCTION_CODE (decl))
12339 : : {
12340 : : case BUILT_IN_ABS:
12341 : : CASE_BUILT_IN_ALLOCA:
12342 : : case BUILT_IN_BSWAP16:
12343 : : case BUILT_IN_BSWAP32:
12344 : : case BUILT_IN_BSWAP64:
12345 : : case BUILT_IN_BSWAP128:
12346 : : case BUILT_IN_CLZ:
12347 : : case BUILT_IN_CLZIMAX:
12348 : : case BUILT_IN_CLZL:
12349 : : case BUILT_IN_CLZLL:
12350 : : case BUILT_IN_CTZ:
12351 : : case BUILT_IN_CTZIMAX:
12352 : : case BUILT_IN_CTZL:
12353 : : case BUILT_IN_CTZLL:
12354 : : case BUILT_IN_FFS:
12355 : : case BUILT_IN_FFSIMAX:
12356 : : case BUILT_IN_FFSL:
12357 : : case BUILT_IN_FFSLL:
12358 : : case BUILT_IN_IMAXABS:
12359 : : case BUILT_IN_FINITE:
12360 : : case BUILT_IN_FINITEF:
12361 : : case BUILT_IN_FINITEL:
12362 : : case BUILT_IN_FINITED32:
12363 : : case BUILT_IN_FINITED64:
12364 : : case BUILT_IN_FINITED128:
12365 : : case BUILT_IN_FPCLASSIFY:
12366 : : case BUILT_IN_ISFINITE:
12367 : : case BUILT_IN_ISINF_SIGN:
12368 : : case BUILT_IN_ISINF:
12369 : : case BUILT_IN_ISINFF:
12370 : : case BUILT_IN_ISINFL:
12371 : : case BUILT_IN_ISINFD32:
12372 : : case BUILT_IN_ISINFD64:
12373 : : case BUILT_IN_ISINFD128:
12374 : : case BUILT_IN_ISNAN:
12375 : : case BUILT_IN_ISNANF:
12376 : : case BUILT_IN_ISNANL:
12377 : : case BUILT_IN_ISNAND32:
12378 : : case BUILT_IN_ISNAND64:
12379 : : case BUILT_IN_ISNAND128:
12380 : : case BUILT_IN_ISNORMAL:
12381 : : case BUILT_IN_ISGREATER:
12382 : : case BUILT_IN_ISGREATEREQUAL:
12383 : : case BUILT_IN_ISLESS:
12384 : : case BUILT_IN_ISLESSEQUAL:
12385 : : case BUILT_IN_ISLESSGREATER:
12386 : : case BUILT_IN_ISUNORDERED:
12387 : : case BUILT_IN_ISEQSIG:
12388 : : case BUILT_IN_VA_ARG_PACK:
12389 : : case BUILT_IN_VA_ARG_PACK_LEN:
12390 : : case BUILT_IN_VA_COPY:
12391 : : case BUILT_IN_TRAP:
12392 : : case BUILT_IN_UNREACHABLE_TRAP:
12393 : : case BUILT_IN_SAVEREGS:
12394 : : case BUILT_IN_POPCOUNTL:
12395 : : case BUILT_IN_POPCOUNTLL:
12396 : : case BUILT_IN_POPCOUNTIMAX:
12397 : : case BUILT_IN_POPCOUNT:
12398 : : case BUILT_IN_PARITYL:
12399 : : case BUILT_IN_PARITYLL:
12400 : : case BUILT_IN_PARITYIMAX:
12401 : : case BUILT_IN_PARITY:
12402 : : case BUILT_IN_LABS:
12403 : : case BUILT_IN_LLABS:
12404 : : case BUILT_IN_PREFETCH:
12405 : : case BUILT_IN_ACC_ON_DEVICE:
12406 : : return true;
12407 : :
12408 : 14423184 : default:
12409 : 14423184 : return is_simple_builtin (decl);
12410 : : }
12411 : :
12412 : : return false;
12413 : : }
12414 : :
12415 : : /* Return true if T is a constant and the value cast to a target char
12416 : : can be represented by a host char.
12417 : : Store the casted char constant in *P if so. */
12418 : :
12419 : : bool
12420 : 2958 : target_char_cst_p (tree t, char *p)
12421 : : {
12422 : 2958 : if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12423 : : return false;
12424 : :
12425 : 1663 : *p = (char)tree_to_uhwi (t);
12426 : 1663 : return true;
12427 : : }
12428 : :
12429 : : /* Return true if the builtin DECL is implemented in a standard library.
12430 : : Otherwise return false which doesn't guarantee it is not (thus the list
12431 : : of handled builtins below may be incomplete). */
12432 : :
12433 : : bool
12434 : 40794 : builtin_with_linkage_p (tree decl)
12435 : : {
12436 : 40794 : if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12437 : 40516 : switch (DECL_FUNCTION_CODE (decl))
12438 : : {
12439 : 1294 : CASE_FLT_FN (BUILT_IN_ACOS):
12440 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12441 : 1294 : CASE_FLT_FN (BUILT_IN_ACOSH):
12442 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12443 : 1294 : CASE_FLT_FN (BUILT_IN_ASIN):
12444 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12445 : 1294 : CASE_FLT_FN (BUILT_IN_ASINH):
12446 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12447 : 1294 : CASE_FLT_FN (BUILT_IN_ATAN):
12448 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12449 : 1294 : CASE_FLT_FN (BUILT_IN_ATANH):
12450 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12451 : 1294 : CASE_FLT_FN (BUILT_IN_ATAN2):
12452 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12453 : 1294 : CASE_FLT_FN (BUILT_IN_CBRT):
12454 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12455 : 1294 : CASE_FLT_FN (BUILT_IN_CEIL):
12456 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12457 : 1294 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
12458 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12459 : 1294 : CASE_FLT_FN (BUILT_IN_COS):
12460 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12461 : 1294 : CASE_FLT_FN (BUILT_IN_COSH):
12462 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12463 : 1294 : CASE_FLT_FN (BUILT_IN_ERF):
12464 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12465 : 1294 : CASE_FLT_FN (BUILT_IN_ERFC):
12466 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12467 : 1294 : CASE_FLT_FN (BUILT_IN_EXP):
12468 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12469 : 1294 : CASE_FLT_FN (BUILT_IN_EXP2):
12470 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12471 : 1294 : CASE_FLT_FN (BUILT_IN_EXPM1):
12472 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12473 : 1294 : CASE_FLT_FN (BUILT_IN_FABS):
12474 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12475 : 1294 : CASE_FLT_FN (BUILT_IN_FDIM):
12476 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12477 : 1294 : CASE_FLT_FN (BUILT_IN_FLOOR):
12478 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12479 : 1294 : CASE_FLT_FN (BUILT_IN_FMA):
12480 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12481 : 1294 : CASE_FLT_FN (BUILT_IN_FMAX):
12482 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12483 : 1294 : CASE_FLT_FN (BUILT_IN_FMIN):
12484 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12485 : 1294 : CASE_FLT_FN (BUILT_IN_FMOD):
12486 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12487 : 1294 : CASE_FLT_FN (BUILT_IN_FREXP):
12488 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12489 : 1294 : CASE_FLT_FN (BUILT_IN_HYPOT):
12490 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12491 : 1294 : CASE_FLT_FN (BUILT_IN_ILOGB):
12492 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12493 : 1294 : CASE_FLT_FN (BUILT_IN_LDEXP):
12494 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12495 : 1294 : CASE_FLT_FN (BUILT_IN_LGAMMA):
12496 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12497 : 1294 : CASE_FLT_FN (BUILT_IN_LLRINT):
12498 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12499 : 1294 : CASE_FLT_FN (BUILT_IN_LLROUND):
12500 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12501 : 1294 : CASE_FLT_FN (BUILT_IN_LOG):
12502 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12503 : 1294 : CASE_FLT_FN (BUILT_IN_LOG10):
12504 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12505 : 1294 : CASE_FLT_FN (BUILT_IN_LOG1P):
12506 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12507 : 1294 : CASE_FLT_FN (BUILT_IN_LOG2):
12508 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12509 : 1294 : CASE_FLT_FN (BUILT_IN_LOGB):
12510 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12511 : 1294 : CASE_FLT_FN (BUILT_IN_LRINT):
12512 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12513 : 1294 : CASE_FLT_FN (BUILT_IN_LROUND):
12514 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12515 : 1294 : CASE_FLT_FN (BUILT_IN_MODF):
12516 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12517 : 1294 : CASE_FLT_FN (BUILT_IN_NAN):
12518 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12519 : 1294 : CASE_FLT_FN (BUILT_IN_NEARBYINT):
12520 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12521 : 1294 : CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12522 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12523 : 1294 : CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12524 : 1294 : CASE_FLT_FN (BUILT_IN_POW):
12525 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12526 : 1294 : CASE_FLT_FN (BUILT_IN_REMAINDER):
12527 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12528 : 1294 : CASE_FLT_FN (BUILT_IN_REMQUO):
12529 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12530 : 1294 : CASE_FLT_FN (BUILT_IN_RINT):
12531 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12532 : 1294 : CASE_FLT_FN (BUILT_IN_ROUND):
12533 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12534 : 1294 : CASE_FLT_FN (BUILT_IN_SCALBLN):
12535 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12536 : 1294 : CASE_FLT_FN (BUILT_IN_SCALBN):
12537 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12538 : 1294 : CASE_FLT_FN (BUILT_IN_SIN):
12539 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12540 : 1294 : CASE_FLT_FN (BUILT_IN_SINH):
12541 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12542 : 1294 : CASE_FLT_FN (BUILT_IN_SINCOS):
12543 : 1294 : CASE_FLT_FN (BUILT_IN_SQRT):
12544 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12545 : 1294 : CASE_FLT_FN (BUILT_IN_TAN):
12546 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12547 : 1294 : CASE_FLT_FN (BUILT_IN_TANH):
12548 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12549 : 1294 : CASE_FLT_FN (BUILT_IN_TGAMMA):
12550 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12551 : 1294 : CASE_FLT_FN (BUILT_IN_TRUNC):
12552 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12553 : 1294 : return true;
12554 : :
12555 : 16 : case BUILT_IN_STPCPY:
12556 : 16 : case BUILT_IN_STPNCPY:
12557 : : /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12558 : : by libiberty's stpcpy.c for MinGW targets so we need to return true
12559 : : in order to be able to build libiberty in LTO mode for them. */
12560 : 16 : return true;
12561 : :
12562 : : default:
12563 : : break;
12564 : : }
12565 : : return false;
12566 : : }
12567 : :
12568 : : /* Return true if OFFRNG is bounded to a subrange of offset values
12569 : : valid for the largest possible object. */
12570 : :
12571 : : bool
12572 : 501 : access_ref::offset_bounded () const
12573 : : {
12574 : 501 : tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12575 : 501 : tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12576 : 975 : return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12577 : : }
12578 : :
12579 : : /* If CALLEE has known side effects, fill in INFO and return true.
12580 : : See tree-ssa-structalias.cc:find_func_aliases
12581 : : for the list of builtins we might need to handle here. */
12582 : :
12583 : : attr_fnspec
12584 : 111451997 : builtin_fnspec (tree callee)
12585 : : {
12586 : 111451997 : built_in_function code = DECL_FUNCTION_CODE (callee);
12587 : :
12588 : 111451997 : switch (code)
12589 : : {
12590 : : /* All the following functions read memory pointed to by
12591 : : their second argument and write memory pointed to by first
12592 : : argument.
12593 : : strcat/strncat additionally reads memory pointed to by the first
12594 : : argument. */
12595 : 137176 : case BUILT_IN_STRCAT:
12596 : 137176 : case BUILT_IN_STRCAT_CHK:
12597 : 137176 : return "1cW 1 ";
12598 : 42692 : case BUILT_IN_STRNCAT:
12599 : 42692 : case BUILT_IN_STRNCAT_CHK:
12600 : 42692 : return "1cW 13";
12601 : 264130 : case BUILT_IN_STRCPY:
12602 : 264130 : case BUILT_IN_STRCPY_CHK:
12603 : 264130 : return "1cO 1 ";
12604 : 32225 : case BUILT_IN_STPCPY:
12605 : 32225 : case BUILT_IN_STPCPY_CHK:
12606 : 32225 : return ".cO 1 ";
12607 : 18912854 : case BUILT_IN_STRNCPY:
12608 : 18912854 : case BUILT_IN_MEMCPY:
12609 : 18912854 : case BUILT_IN_MEMMOVE:
12610 : 18912854 : case BUILT_IN_TM_MEMCPY:
12611 : 18912854 : case BUILT_IN_TM_MEMMOVE:
12612 : 18912854 : case BUILT_IN_STRNCPY_CHK:
12613 : 18912854 : case BUILT_IN_MEMCPY_CHK:
12614 : 18912854 : case BUILT_IN_MEMMOVE_CHK:
12615 : 18912854 : return "1cO313";
12616 : 151937 : case BUILT_IN_MEMPCPY:
12617 : 151937 : case BUILT_IN_MEMPCPY_CHK:
12618 : 151937 : return ".cO313";
12619 : 57714 : case BUILT_IN_STPNCPY:
12620 : 57714 : case BUILT_IN_STPNCPY_CHK:
12621 : 57714 : return ".cO313";
12622 : 0 : case BUILT_IN_BCOPY:
12623 : 0 : return ".c23O3";
12624 : 0 : case BUILT_IN_BZERO:
12625 : 0 : return ".cO2";
12626 : 8727162 : case BUILT_IN_MEMCMP:
12627 : 8727162 : case BUILT_IN_MEMCMP_EQ:
12628 : 8727162 : case BUILT_IN_BCMP:
12629 : 8727162 : case BUILT_IN_STRNCMP:
12630 : 8727162 : case BUILT_IN_STRNCMP_EQ:
12631 : 8727162 : case BUILT_IN_STRNCASECMP:
12632 : 8727162 : return ".cR3R3";
12633 : :
12634 : : /* The following functions read memory pointed to by their
12635 : : first argument. */
12636 : 804 : CASE_BUILT_IN_TM_LOAD (1):
12637 : 804 : CASE_BUILT_IN_TM_LOAD (2):
12638 : 804 : CASE_BUILT_IN_TM_LOAD (4):
12639 : 804 : CASE_BUILT_IN_TM_LOAD (8):
12640 : 804 : CASE_BUILT_IN_TM_LOAD (FLOAT):
12641 : 804 : CASE_BUILT_IN_TM_LOAD (DOUBLE):
12642 : 804 : CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12643 : 804 : CASE_BUILT_IN_TM_LOAD (M64):
12644 : 804 : CASE_BUILT_IN_TM_LOAD (M128):
12645 : 804 : CASE_BUILT_IN_TM_LOAD (M256):
12646 : 804 : case BUILT_IN_TM_LOG:
12647 : 804 : case BUILT_IN_TM_LOG_1:
12648 : 804 : case BUILT_IN_TM_LOG_2:
12649 : 804 : case BUILT_IN_TM_LOG_4:
12650 : 804 : case BUILT_IN_TM_LOG_8:
12651 : 804 : case BUILT_IN_TM_LOG_FLOAT:
12652 : 804 : case BUILT_IN_TM_LOG_DOUBLE:
12653 : 804 : case BUILT_IN_TM_LOG_LDOUBLE:
12654 : 804 : case BUILT_IN_TM_LOG_M64:
12655 : 804 : case BUILT_IN_TM_LOG_M128:
12656 : 804 : case BUILT_IN_TM_LOG_M256:
12657 : 804 : return ".cR ";
12658 : :
12659 : 475110 : case BUILT_IN_INDEX:
12660 : 475110 : case BUILT_IN_RINDEX:
12661 : 475110 : case BUILT_IN_STRCHR:
12662 : 475110 : case BUILT_IN_STRLEN:
12663 : 475110 : case BUILT_IN_STRRCHR:
12664 : 475110 : return ".cR ";
12665 : 62911 : case BUILT_IN_STRNLEN:
12666 : 62911 : return ".cR2";
12667 : :
12668 : : /* These read memory pointed to by the first argument.
12669 : : Allocating memory does not have any side-effects apart from
12670 : : being the definition point for the pointer.
12671 : : Unix98 specifies that errno is set on allocation failure. */
12672 : 17800 : case BUILT_IN_STRDUP:
12673 : 17800 : return "mCR ";
12674 : 13328 : case BUILT_IN_STRNDUP:
12675 : 13328 : return "mCR2";
12676 : : /* Allocating memory does not have any side-effects apart from
12677 : : being the definition point for the pointer. */
12678 : 9627790 : case BUILT_IN_MALLOC:
12679 : 9627790 : case BUILT_IN_ALIGNED_ALLOC:
12680 : 9627790 : case BUILT_IN_CALLOC:
12681 : 9627790 : case BUILT_IN_GOMP_ALLOC:
12682 : 9627790 : return "mC";
12683 : 863874 : CASE_BUILT_IN_ALLOCA:
12684 : 863874 : return "mc";
12685 : : /* These read memory pointed to by the first argument with size
12686 : : in the third argument. */
12687 : 526765 : case BUILT_IN_MEMCHR:
12688 : 526765 : return ".cR3";
12689 : : /* These read memory pointed to by the first and second arguments. */
12690 : 11291775 : case BUILT_IN_STRSTR:
12691 : 11291775 : case BUILT_IN_STRPBRK:
12692 : 11291775 : case BUILT_IN_STRCASECMP:
12693 : 11291775 : case BUILT_IN_STRCSPN:
12694 : 11291775 : case BUILT_IN_STRSPN:
12695 : 11291775 : case BUILT_IN_STRCMP:
12696 : 11291775 : case BUILT_IN_STRCMP_EQ:
12697 : 11291775 : return ".cR R ";
12698 : : /* Freeing memory kills the pointed-to memory. More importantly
12699 : : the call has to serve as a barrier for moving loads and stores
12700 : : across it. */
12701 : 5796914 : case BUILT_IN_STACK_RESTORE:
12702 : 5796914 : case BUILT_IN_FREE:
12703 : 5796914 : case BUILT_IN_GOMP_FREE:
12704 : 5796914 : return ".co ";
12705 : 103113 : case BUILT_IN_VA_END:
12706 : 103113 : return ".cO ";
12707 : : /* Realloc serves both as allocation point and deallocation point. */
12708 : 988071 : case BUILT_IN_REALLOC:
12709 : 988071 : case BUILT_IN_GOMP_REALLOC:
12710 : 988071 : return ".Cw ";
12711 : 15732 : case BUILT_IN_GAMMA_R:
12712 : 15732 : case BUILT_IN_GAMMAF_R:
12713 : 15732 : case BUILT_IN_GAMMAL_R:
12714 : 15732 : case BUILT_IN_LGAMMA_R:
12715 : 15732 : case BUILT_IN_LGAMMAF_R:
12716 : 15732 : case BUILT_IN_LGAMMAL_R:
12717 : 15732 : return ".C. Ot";
12718 : 81519 : case BUILT_IN_FREXP:
12719 : 81519 : case BUILT_IN_FREXPF:
12720 : 81519 : case BUILT_IN_FREXPL:
12721 : 81519 : case BUILT_IN_MODF:
12722 : 81519 : case BUILT_IN_MODFF:
12723 : 81519 : case BUILT_IN_MODFL:
12724 : 81519 : return ".c. Ot";
12725 : 7586 : case BUILT_IN_REMQUO:
12726 : 7586 : case BUILT_IN_REMQUOF:
12727 : 7586 : case BUILT_IN_REMQUOL:
12728 : 7586 : return ".c. . Ot";
12729 : 151 : case BUILT_IN_SINCOS:
12730 : 151 : case BUILT_IN_SINCOSF:
12731 : 151 : case BUILT_IN_SINCOSL:
12732 : 151 : return ".c. OtOt";
12733 : 4294860 : case BUILT_IN_MEMSET:
12734 : 4294860 : case BUILT_IN_MEMSET_CHK:
12735 : 4294860 : case BUILT_IN_TM_MEMSET:
12736 : 4294860 : return "1cO3";
12737 : 366 : CASE_BUILT_IN_TM_STORE (1):
12738 : 366 : CASE_BUILT_IN_TM_STORE (2):
12739 : 366 : CASE_BUILT_IN_TM_STORE (4):
12740 : 366 : CASE_BUILT_IN_TM_STORE (8):
12741 : 366 : CASE_BUILT_IN_TM_STORE (FLOAT):
12742 : 366 : CASE_BUILT_IN_TM_STORE (DOUBLE):
12743 : 366 : CASE_BUILT_IN_TM_STORE (LDOUBLE):
12744 : 366 : CASE_BUILT_IN_TM_STORE (M64):
12745 : 366 : CASE_BUILT_IN_TM_STORE (M128):
12746 : 366 : CASE_BUILT_IN_TM_STORE (M256):
12747 : 366 : return ".cO ";
12748 : 2273027 : case BUILT_IN_STACK_SAVE:
12749 : 2273027 : case BUILT_IN_RETURN:
12750 : 2273027 : case BUILT_IN_EH_POINTER:
12751 : 2273027 : case BUILT_IN_EH_FILTER:
12752 : 2273027 : case BUILT_IN_UNWIND_RESUME:
12753 : 2273027 : case BUILT_IN_CXA_END_CLEANUP:
12754 : 2273027 : case BUILT_IN_EH_COPY_VALUES:
12755 : 2273027 : case BUILT_IN_FRAME_ADDRESS:
12756 : 2273027 : case BUILT_IN_APPLY_ARGS:
12757 : 2273027 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12758 : 2273027 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12759 : 2273027 : case BUILT_IN_PREFETCH:
12760 : 2273027 : case BUILT_IN_DWARF_CFA:
12761 : 2273027 : case BUILT_IN_RETURN_ADDRESS:
12762 : 2273027 : return ".c";
12763 : 1711889 : case BUILT_IN_ASSUME_ALIGNED:
12764 : 1711889 : case BUILT_IN_EXPECT:
12765 : 1711889 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
12766 : 1711889 : return "1cX ";
12767 : : /* But posix_memalign stores a pointer into the memory pointed to
12768 : : by its first argument. */
12769 : 5681 : case BUILT_IN_POSIX_MEMALIGN:
12770 : 5681 : return ".cOt";
12771 : 4438 : case BUILT_IN_OMP_GET_MAPPED_PTR:
12772 : 4438 : return ". R ";
12773 : :
12774 : 44962603 : default:
12775 : 44962603 : return "";
12776 : : }
12777 : : }
|