Branch data Line data Source code
1 : : /* Expand builtin functions.
2 : : Copyright (C) 1988-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /* Legacy warning! Please add no further builtin simplifications here
21 : : (apart from pure constant folding) - builtin simplifications should go
22 : : to match.pd or gimple-fold.cc instead. */
23 : :
24 : : #include "config.h"
25 : : #include "system.h"
26 : : #include "coretypes.h"
27 : : #include "backend.h"
28 : : #include "target.h"
29 : : #include "rtl.h"
30 : : #include "tree.h"
31 : : #include "memmodel.h"
32 : : #include "gimple.h"
33 : : #include "predict.h"
34 : : #include "tm_p.h"
35 : : #include "stringpool.h"
36 : : #include "tree-vrp.h"
37 : : #include "tree-ssanames.h"
38 : : #include "expmed.h"
39 : : #include "optabs.h"
40 : : #include "emit-rtl.h"
41 : : #include "recog.h"
42 : : #include "diagnostic-core.h"
43 : : #include "alias.h"
44 : : #include "fold-const.h"
45 : : #include "fold-const-call.h"
46 : : #include "gimple-ssa-warn-access.h"
47 : : #include "stor-layout.h"
48 : : #include "calls.h"
49 : : #include "varasm.h"
50 : : #include "tree-object-size.h"
51 : : #include "tree-ssa-strlen.h"
52 : : #include "realmpfr.h"
53 : : #include "cfgrtl.h"
54 : : #include "except.h"
55 : : #include "dojump.h"
56 : : #include "explow.h"
57 : : #include "stmt.h"
58 : : #include "expr.h"
59 : : #include "libfuncs.h"
60 : : #include "output.h"
61 : : #include "typeclass.h"
62 : : #include "langhooks.h"
63 : : #include "value-prof.h"
64 : : #include "builtins.h"
65 : : #include "stringpool.h"
66 : : #include "attribs.h"
67 : : #include "asan.h"
68 : : #include "internal-fn.h"
69 : : #include "case-cfn-macros.h"
70 : : #include "gimple-iterator.h"
71 : : #include "gimple-fold.h"
72 : : #include "intl.h"
73 : : #include "file-prefix-map.h" /* remap_macro_filename() */
74 : : #include "ipa-strub.h" /* strub_watermark_parm() */
75 : : #include "gomp-constants.h"
76 : : #include "omp-general.h"
77 : : #include "tree-dfa.h"
78 : : #include "gimple-ssa.h"
79 : : #include "tree-ssa-live.h"
80 : : #include "tree-outof-ssa.h"
81 : : #include "attr-fnspec.h"
82 : : #include "demangle.h"
83 : : #include "gimple-range.h"
84 : : #include "pointer-query.h"
85 : :
86 : : struct target_builtins default_target_builtins;
87 : : #if SWITCHABLE_TARGET
88 : : struct target_builtins *this_target_builtins = &default_target_builtins;
89 : : #endif
90 : :
91 : : /* Define the names of the builtin function types and codes. */
92 : : const char *const built_in_class_names[BUILT_IN_LAST]
93 : : = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
94 : :
95 : : #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
96 : : const char * built_in_names[(int) END_BUILTINS] =
97 : : {
98 : : #include "builtins.def"
99 : : };
100 : :
101 : : /* Setup an array of builtin_info_type, make sure each element decl is
102 : : initialized to NULL_TREE. */
103 : : builtin_info_type builtin_info[(int)END_BUILTINS];
104 : :
105 : : /* Non-zero if __builtin_constant_p should be folded right away. */
106 : : bool force_folding_builtin_constant_p;
107 : :
108 : : static int target_char_cast (tree, char *);
109 : : static int apply_args_size (void);
110 : : static int apply_result_size (void);
111 : : static rtx result_vector (int, rtx);
112 : : static void expand_builtin_prefetch (tree);
113 : : static rtx expand_builtin_apply_args (void);
114 : : static rtx expand_builtin_apply_args_1 (void);
115 : : static rtx expand_builtin_apply (rtx, rtx, rtx);
116 : : static void expand_builtin_return (rtx);
117 : : static rtx expand_builtin_classify_type (tree);
118 : : static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 : : static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 : : static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 : : static rtx expand_builtin_sincos (tree);
122 : : static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123 : : static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 : : optab);
125 : : static rtx expand_builtin_cexpi (tree, rtx);
126 : : static rtx expand_builtin_issignaling (tree, rtx);
127 : : static rtx expand_builtin_int_roundingfn (tree, rtx);
128 : : static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
129 : : static rtx expand_builtin_next_arg (void);
130 : : static rtx expand_builtin_va_start (tree);
131 : : static rtx expand_builtin_va_end (tree);
132 : : static rtx expand_builtin_va_copy (tree);
133 : : static rtx inline_expand_builtin_bytecmp (tree, rtx);
134 : : static rtx expand_builtin_strcmp (tree, rtx);
135 : : static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
136 : : static rtx expand_builtin_memcpy (tree, rtx);
137 : : static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
138 : : rtx target, tree exp,
139 : : memop_ret retmode,
140 : : bool might_overlap);
141 : : static rtx expand_builtin_memmove (tree, rtx);
142 : : static rtx expand_builtin_mempcpy (tree, rtx);
143 : : static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
144 : : static rtx expand_builtin_strcpy (tree, rtx);
145 : : static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
146 : : static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
147 : : static rtx expand_builtin_strncpy (tree, rtx);
148 : : static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 : : static rtx expand_builtin_bzero (tree);
150 : : static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 : : static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 : : static rtx expand_builtin_alloca (tree);
153 : : static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 : : static rtx expand_builtin_frame_address (tree, tree);
155 : : static rtx expand_builtin_stack_address ();
156 : : static tree stabilize_va_list_loc (location_t, tree, int);
157 : : static rtx expand_builtin_expect (tree, rtx);
158 : : static rtx expand_builtin_expect_with_probability (tree, rtx);
159 : : static tree fold_builtin_constant_p (tree);
160 : : static tree fold_builtin_classify_type (tree);
161 : : static tree fold_builtin_strlen (location_t, tree, tree, tree);
162 : : static tree fold_builtin_inf (location_t, tree, int);
163 : : static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 : : static bool validate_arg (const_tree, enum tree_code code);
165 : : static rtx expand_builtin_fabs (tree, rtx, rtx);
166 : : static rtx expand_builtin_signbit (tree, rtx);
167 : : static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 : : static tree fold_builtin_isascii (location_t, tree);
169 : : static tree fold_builtin_toascii (location_t, tree);
170 : : static tree fold_builtin_isdigit (location_t, tree);
171 : : static tree fold_builtin_fabs (location_t, tree, tree);
172 : : static tree fold_builtin_abs (location_t, tree, tree);
173 : : static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 : : enum tree_code);
175 : : static tree fold_builtin_iseqsig (location_t, tree, tree);
176 : : static tree fold_builtin_varargs (location_t, tree, tree*, int);
177 : :
178 : : static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179 : : static tree fold_builtin_strspn (location_t, tree, tree, tree, tree);
180 : : static tree fold_builtin_strcspn (location_t, tree, tree, tree, tree);
181 : :
182 : : static rtx expand_builtin_object_size (tree);
183 : : static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 : : enum built_in_function);
185 : : static void maybe_emit_chk_warning (tree, enum built_in_function);
186 : : static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187 : : static tree fold_builtin_object_size (tree, tree, enum built_in_function);
188 : :
189 : : unsigned HOST_WIDE_INT target_newline;
190 : : unsigned HOST_WIDE_INT target_percent;
191 : : static unsigned HOST_WIDE_INT target_c;
192 : : static unsigned HOST_WIDE_INT target_s;
193 : : char target_percent_c[3];
194 : : char target_percent_s[3];
195 : : char target_percent_s_newline[4];
196 : : static tree do_mpfr_remquo (tree, tree, tree);
197 : : static tree do_mpfr_lgamma_r (tree, tree, tree);
198 : : static void expand_builtin_sync_synchronize (void);
199 : :
200 : : /* Return true if NAME starts with __builtin_ or __sync_. */
201 : :
202 : : static bool
203 : 527787 : is_builtin_name (const char *name)
204 : : {
205 : 527787 : return (startswith (name, "__builtin_")
206 : 189469 : || startswith (name, "__sync_")
207 : 716394 : || startswith (name, "__atomic_"));
208 : : }
209 : :
210 : : /* Return true if NODE should be considered for inline expansion regardless
211 : : of the optimization level. This means whenever a function is invoked with
212 : : its "internal" name, which normally contains the prefix "__builtin". */
213 : :
214 : : bool
215 : 527787 : called_as_built_in (tree node)
216 : : {
217 : : /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
218 : : we want the name used to call the function, not the name it
219 : : will have. */
220 : 527787 : const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
221 : 527787 : return is_builtin_name (name);
222 : : }
223 : :
224 : : /* Compute values M and N such that M divides (address of EXP - N) and such
225 : : that N < M. If these numbers can be determined, store M in alignp and N in
226 : : *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
227 : : *alignp and any bit-offset to *bitposp.
228 : :
229 : : Note that the address (and thus the alignment) computed here is based
230 : : on the address to which a symbol resolves, whereas DECL_ALIGN is based
231 : : on the address at which an object is actually located. These two
232 : : addresses are not always the same. For example, on ARM targets,
233 : : the address &foo of a Thumb function foo() has the lowest bit set,
234 : : whereas foo() itself starts on an even address.
235 : :
236 : : If ADDR_P is true we are taking the address of the memory reference EXP
237 : : and thus cannot rely on the access taking place. */
238 : :
239 : : bool
240 : 109543573 : get_object_alignment_2 (tree exp, unsigned int *alignp,
241 : : unsigned HOST_WIDE_INT *bitposp, bool addr_p)
242 : : {
243 : 109543573 : poly_int64 bitsize, bitpos;
244 : 109543573 : tree offset;
245 : 109543573 : machine_mode mode;
246 : 109543573 : int unsignedp, reversep, volatilep;
247 : 109543573 : unsigned int align = BITS_PER_UNIT;
248 : 109543573 : bool known_alignment = false;
249 : :
250 : : /* Get the innermost object and the constant (bitpos) and possibly
251 : : variable (offset) offset of the access. */
252 : 109543573 : exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
253 : : &unsignedp, &reversep, &volatilep);
254 : :
255 : : /* Extract alignment information from the innermost object and
256 : : possibly adjust bitpos and offset. */
257 : 109543573 : if (TREE_CODE (exp) == FUNCTION_DECL)
258 : : {
259 : : /* Function addresses can encode extra information besides their
260 : : alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
261 : : allows the low bit to be used as a virtual bit, we know
262 : : that the address itself must be at least 2-byte aligned. */
263 : : if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
264 : : align = 2 * BITS_PER_UNIT;
265 : : }
266 : 109219871 : else if (TREE_CODE (exp) == LABEL_DECL)
267 : : ;
268 : 109182899 : else if (TREE_CODE (exp) == CONST_DECL)
269 : : {
270 : : /* The alignment of a CONST_DECL is determined by its initializer. */
271 : 58542 : exp = DECL_INITIAL (exp);
272 : 58542 : align = TYPE_ALIGN (TREE_TYPE (exp));
273 : 58542 : if (CONSTANT_CLASS_P (exp))
274 : 58535 : align = targetm.constant_alignment (exp, align);
275 : :
276 : : known_alignment = true;
277 : : }
278 : 109124357 : else if (DECL_P (exp))
279 : : {
280 : 66310779 : align = DECL_ALIGN (exp);
281 : : known_alignment = true;
282 : : }
283 : 42813578 : else if (TREE_CODE (exp) == INDIRECT_REF
284 : 42801998 : || TREE_CODE (exp) == MEM_REF
285 : 5452983 : || TREE_CODE (exp) == TARGET_MEM_REF)
286 : : {
287 : 41013565 : tree addr = TREE_OPERAND (exp, 0);
288 : 41013565 : unsigned ptr_align;
289 : 41013565 : unsigned HOST_WIDE_INT ptr_bitpos;
290 : 41013565 : unsigned HOST_WIDE_INT ptr_bitmask = ~0;
291 : :
292 : : /* If the address is explicitely aligned, handle that. */
293 : 41013565 : if (TREE_CODE (addr) == BIT_AND_EXPR
294 : 41013565 : && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
295 : : {
296 : 76 : ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
297 : 76 : ptr_bitmask *= BITS_PER_UNIT;
298 : 76 : align = least_bit_hwi (ptr_bitmask);
299 : 76 : addr = TREE_OPERAND (addr, 0);
300 : : }
301 : :
302 : 41013565 : known_alignment
303 : 41013565 : = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
304 : 41013565 : align = MAX (ptr_align, align);
305 : :
306 : : /* Re-apply explicit alignment to the bitpos. */
307 : 41013565 : ptr_bitpos &= ptr_bitmask;
308 : :
309 : : /* The alignment of the pointer operand in a TARGET_MEM_REF
310 : : has to take the variable offset parts into account. */
311 : 41013565 : if (TREE_CODE (exp) == TARGET_MEM_REF)
312 : : {
313 : 3652970 : if (TMR_INDEX (exp))
314 : : {
315 : 1650039 : unsigned HOST_WIDE_INT step = 1;
316 : 1650039 : if (TMR_STEP (exp))
317 : 1505081 : step = TREE_INT_CST_LOW (TMR_STEP (exp));
318 : 1650039 : align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
319 : : }
320 : 3652970 : if (TMR_INDEX2 (exp))
321 : 42129 : align = BITS_PER_UNIT;
322 : : known_alignment = false;
323 : : }
324 : :
325 : : /* When EXP is an actual memory reference then we can use
326 : : TYPE_ALIGN of a pointer indirection to derive alignment.
327 : : Do so only if get_pointer_alignment_1 did not reveal absolute
328 : : alignment knowledge and if using that alignment would
329 : : improve the situation. */
330 : 41013565 : unsigned int talign;
331 : 41013565 : if (!addr_p && !known_alignment
332 : 38548348 : && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
333 : 79561907 : && talign > align)
334 : : align = talign;
335 : : else
336 : : {
337 : : /* Else adjust bitpos accordingly. */
338 : 10933532 : bitpos += ptr_bitpos;
339 : 10933532 : if (TREE_CODE (exp) == MEM_REF
340 : 10933532 : || TREE_CODE (exp) == TARGET_MEM_REF)
341 : 10924757 : bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
342 : : }
343 : 41013565 : }
344 : 1800013 : else if (TREE_CODE (exp) == STRING_CST)
345 : : {
346 : : /* STRING_CST are the only constant objects we allow to be not
347 : : wrapped inside a CONST_DECL. */
348 : 1799519 : align = TYPE_ALIGN (TREE_TYPE (exp));
349 : 1799519 : if (CONSTANT_CLASS_P (exp))
350 : 1799519 : align = targetm.constant_alignment (exp, align);
351 : :
352 : : known_alignment = true;
353 : : }
354 : :
355 : : /* If there is a non-constant offset part extract the maximum
356 : : alignment that can prevail. */
357 : 109543573 : if (offset)
358 : : {
359 : 9761303 : unsigned int trailing_zeros = tree_ctz (offset);
360 : 9761303 : if (trailing_zeros < HOST_BITS_PER_INT)
361 : : {
362 : 9761247 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
363 : 9761247 : if (inner)
364 : 9761247 : align = MIN (align, inner);
365 : : }
366 : : }
367 : :
368 : : /* Account for the alignment of runtime coefficients, so that the constant
369 : : bitpos is guaranteed to be accurate. */
370 : 109543573 : unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
371 : 109543573 : if (alt_align != 0 && alt_align < align)
372 : : {
373 : : align = alt_align;
374 : : known_alignment = false;
375 : : }
376 : :
377 : 109543573 : *alignp = align;
378 : 109543573 : *bitposp = bitpos.coeffs[0] & (align - 1);
379 : 109543573 : return known_alignment;
380 : : }
381 : :
382 : : /* For a memory reference expression EXP compute values M and N such that M
383 : : divides (&EXP - N) and such that N < M. If these numbers can be determined,
384 : : store M in alignp and N in *BITPOSP and return true. Otherwise return false
385 : : and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 : :
387 : : bool
388 : 86561612 : get_object_alignment_1 (tree exp, unsigned int *alignp,
389 : : unsigned HOST_WIDE_INT *bitposp)
390 : : {
391 : : /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
392 : : with it. */
393 : 86561612 : if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 : 0 : exp = TREE_OPERAND (exp, 0);
395 : 86561612 : return get_object_alignment_2 (exp, alignp, bitposp, false);
396 : : }
397 : :
398 : : /* Return the alignment in bits of EXP, an object. */
399 : :
400 : : unsigned int
401 : 35300665 : get_object_alignment (tree exp)
402 : : {
403 : 35300665 : unsigned HOST_WIDE_INT bitpos = 0;
404 : 35300665 : unsigned int align;
405 : :
406 : 35300665 : get_object_alignment_1 (exp, &align, &bitpos);
407 : :
408 : : /* align and bitpos now specify known low bits of the pointer.
409 : : ptr & (align - 1) == bitpos. */
410 : :
411 : 35300665 : if (bitpos != 0)
412 : 2623894 : align = least_bit_hwi (bitpos);
413 : 35300665 : return align;
414 : : }
415 : :
416 : : /* For a pointer valued expression EXP compute values M and N such that M
417 : : divides (EXP - N) and such that N < M. If these numbers can be determined,
418 : : store M in alignp and N in *BITPOSP and return true. Return false if
419 : : the results are just a conservative approximation.
420 : :
421 : : If EXP is not a pointer, false is returned too. */
422 : :
423 : : bool
424 : 82154264 : get_pointer_alignment_1 (tree exp, unsigned int *alignp,
425 : : unsigned HOST_WIDE_INT *bitposp)
426 : : {
427 : 82154264 : STRIP_NOPS (exp);
428 : :
429 : 82154264 : if (TREE_CODE (exp) == ADDR_EXPR)
430 : 22980441 : return get_object_alignment_2 (TREE_OPERAND (exp, 0),
431 : 22980441 : alignp, bitposp, true);
432 : 59173823 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 : : {
434 : 952922 : unsigned int align;
435 : 952922 : unsigned HOST_WIDE_INT bitpos;
436 : 952922 : bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
437 : : &align, &bitpos);
438 : 952922 : if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
439 : 2736 : bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
440 : : else
441 : : {
442 : 950186 : unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
443 : 950186 : if (trailing_zeros < HOST_BITS_PER_INT)
444 : : {
445 : 950180 : unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
446 : 950180 : if (inner)
447 : 950180 : align = MIN (align, inner);
448 : : }
449 : : }
450 : 952922 : *alignp = align;
451 : 952922 : *bitposp = bitpos & (align - 1);
452 : 952922 : return res;
453 : : }
454 : 58220901 : else if (TREE_CODE (exp) == SSA_NAME
455 : 58220901 : && POINTER_TYPE_P (TREE_TYPE (exp)))
456 : : {
457 : 56165994 : unsigned int ptr_align, ptr_misalign;
458 : 56165994 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 : :
460 : 56165994 : if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 : : {
462 : 5753926 : *bitposp = ptr_misalign * BITS_PER_UNIT;
463 : 5753926 : *alignp = ptr_align * BITS_PER_UNIT;
464 : : /* Make sure to return a sensible alignment when the multiplication
465 : : by BITS_PER_UNIT overflowed. */
466 : 5753926 : if (*alignp == 0)
467 : 30 : *alignp = 1u << (HOST_BITS_PER_INT - 1);
468 : : /* We cannot really tell whether this result is an approximation. */
469 : 5753926 : return false;
470 : : }
471 : : else
472 : : {
473 : 50412068 : *bitposp = 0;
474 : 50412068 : *alignp = BITS_PER_UNIT;
475 : 50412068 : return false;
476 : : }
477 : : }
478 : 2054907 : else if (TREE_CODE (exp) == INTEGER_CST)
479 : : {
480 : 16619 : *alignp = BIGGEST_ALIGNMENT;
481 : 16619 : *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
482 : 16619 : & (BIGGEST_ALIGNMENT - 1));
483 : 16619 : return true;
484 : : }
485 : :
486 : 2038288 : *bitposp = 0;
487 : 2038288 : *alignp = BITS_PER_UNIT;
488 : 2038288 : return false;
489 : : }
490 : :
491 : : /* Return the alignment in bits of EXP, a pointer valued expression.
492 : : The alignment returned is, by default, the alignment of the thing that
493 : : EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 : :
495 : : Otherwise, look at the expression to see if we can do better, i.e., if the
496 : : expression is actually pointing at an object whose alignment is tighter. */
497 : :
498 : : unsigned int
499 : 11451005 : get_pointer_alignment (tree exp)
500 : : {
501 : 11451005 : unsigned HOST_WIDE_INT bitpos = 0;
502 : 11451005 : unsigned int align;
503 : :
504 : 11451005 : get_pointer_alignment_1 (exp, &align, &bitpos);
505 : :
506 : : /* align and bitpos now specify known low bits of the pointer.
507 : : ptr & (align - 1) == bitpos. */
508 : :
509 : 11451005 : if (bitpos != 0)
510 : 103664 : align = least_bit_hwi (bitpos);
511 : :
512 : 11451005 : return align;
513 : : }
514 : :
515 : : /* Return the number of leading non-zero elements in the sequence
516 : : [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
517 : : ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 : :
519 : : unsigned
520 : 805243 : string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 : : {
522 : 805243 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 : :
524 : 805243 : unsigned n;
525 : :
526 : 805243 : if (eltsize == 1)
527 : : {
528 : : /* Optimize the common case of plain char. */
529 : 245311873 : for (n = 0; n < maxelts; n++)
530 : : {
531 : 245222702 : const char *elt = (const char*) ptr + n;
532 : 245222702 : if (!*elt)
533 : : break;
534 : : }
535 : : }
536 : : else
537 : : {
538 : 590 : for (n = 0; n < maxelts; n++)
539 : : {
540 : 566 : const char *elt = (const char*) ptr + n * eltsize;
541 : 566 : if (!memcmp (elt, "\0\0\0\0", eltsize))
542 : : break;
543 : : }
544 : : }
545 : 805243 : return n;
546 : : }
547 : :
548 : : /* Compute the length of a null-terminated character string or wide
549 : : character string handling character sizes of 1, 2, and 4 bytes.
550 : : TREE_STRING_LENGTH is not the right way because it evaluates to
551 : : the size of the character array in bytes (as opposed to characters)
552 : : and because it can contain a zero byte in the middle.
553 : :
554 : : ONLY_VALUE should be nonzero if the result is not going to be emitted
555 : : into the instruction stream and zero if it is going to be expanded.
556 : : E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
557 : : is returned, otherwise NULL, since
558 : : len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
559 : : evaluate the side-effects.
560 : :
561 : : If ONLY_VALUE is two then we do not emit warnings about out-of-bound
562 : : accesses. Note that this implies the result is not going to be emitted
563 : : into the instruction stream.
564 : :
565 : : Additional information about the string accessed may be recorded
566 : : in DATA. For example, if ARG references an unterminated string,
567 : : then the declaration will be stored in the DECL field. If the
568 : : length of the unterminated string can be determined, it'll be
569 : : stored in the LEN field. Note this length could well be different
570 : : than what a C strlen call would return.
571 : :
572 : : ELTSIZE is 1 for normal single byte character strings, and 2 or
573 : : 4 for wide characer strings. ELTSIZE is by default 1.
574 : :
575 : : The value returned is of type `ssizetype'. */
576 : :
577 : : tree
578 : 2551412 : c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
579 : : {
580 : : /* If we were not passed a DATA pointer, then get one to a local
581 : : structure. That avoids having to check DATA for NULL before
582 : : each time we want to use it. */
583 : 2551412 : c_strlen_data local_strlen_data = { };
584 : 2551412 : if (!data)
585 : 624120 : data = &local_strlen_data;
586 : :
587 : 2551412 : gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
588 : :
589 : 2551412 : tree src = STRIP_NOPS (arg);
590 : 2551412 : if (TREE_CODE (src) == COND_EXPR
591 : 2551412 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 : : {
593 : 687 : tree len1, len2;
594 : :
595 : 687 : len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
596 : 687 : len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
597 : 687 : if (tree_int_cst_equal (len1, len2))
598 : : return len1;
599 : : }
600 : :
601 : 2551246 : if (TREE_CODE (src) == COMPOUND_EXPR
602 : 2551246 : && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
603 : 45 : return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
604 : :
605 : 2551201 : location_t loc = EXPR_LOC_OR_LOC (src, input_location);
606 : :
607 : : /* Offset from the beginning of the string in bytes. */
608 : 2551201 : tree byteoff;
609 : 2551201 : tree memsize;
610 : 2551201 : tree decl;
611 : 2551201 : src = string_constant (src, &byteoff, &memsize, &decl);
612 : 2551201 : if (src == 0)
613 : : return NULL_TREE;
614 : :
615 : : /* Determine the size of the string element. */
616 : 819752 : if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
617 : : return NULL_TREE;
618 : :
619 : : /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
620 : : length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
621 : : in case the latter is less than the size of the array, such as when
622 : : SRC refers to a short string literal used to initialize a large array.
623 : : In that case, the elements of the array after the terminating NUL are
624 : : all NUL. */
625 : 808963 : HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
626 : 808963 : strelts = strelts / eltsize;
627 : :
628 : 808963 : if (!tree_fits_uhwi_p (memsize))
629 : : return NULL_TREE;
630 : :
631 : 808963 : HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
632 : :
633 : : /* PTR can point to the byte representation of any string type, including
634 : : char* and wchar_t*. */
635 : 808963 : const char *ptr = TREE_STRING_POINTER (src);
636 : :
637 : 808963 : if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
638 : : {
639 : : /* The code below works only for single byte character types. */
640 : 5363 : if (eltsize != 1)
641 : : return NULL_TREE;
642 : :
643 : : /* If the string has an internal NUL character followed by any
644 : : non-NUL characters (e.g., "foo\0bar"), we can't compute
645 : : the offset to the following NUL if we don't know where to
646 : : start searching for it. */
647 : 5363 : unsigned len = string_length (ptr, eltsize, strelts);
648 : :
649 : : /* Return when an embedded null character is found or none at all.
650 : : In the latter case, set the DECL/LEN field in the DATA structure
651 : : so that callers may examine them. */
652 : 5363 : if (len + 1 < strelts)
653 : : return NULL_TREE;
654 : 4793 : else if (len >= maxelts)
655 : : {
656 : 1149 : data->decl = decl;
657 : 1149 : data->off = byteoff;
658 : 1149 : data->minlen = ssize_int (len);
659 : 1149 : return NULL_TREE;
660 : : }
661 : :
662 : : /* For empty strings the result should be zero. */
663 : 3644 : if (len == 0)
664 : 39 : return ssize_int (0);
665 : :
666 : : /* We don't know the starting offset, but we do know that the string
667 : : has no internal zero bytes. If the offset falls within the bounds
668 : : of the string subtract the offset from the length of the string,
669 : : and return that. Otherwise the length is zero. Take care to
670 : : use SAVE_EXPR in case the OFFSET has side-effects. */
671 : 3605 : tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
672 : : : byteoff;
673 : 3605 : offsave = fold_convert_loc (loc, sizetype, offsave);
674 : 3605 : tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
675 : 3605 : size_int (len));
676 : 3605 : tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
677 : : offsave);
678 : 3605 : lenexp = fold_convert_loc (loc, ssizetype, lenexp);
679 : 3605 : return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
680 : 3605 : build_zero_cst (ssizetype));
681 : : }
682 : :
683 : : /* Offset from the beginning of the string in elements. */
684 : 803600 : HOST_WIDE_INT eltoff;
685 : :
686 : : /* We have a known offset into the string. Start searching there for
687 : : a null character if we can represent it as a single HOST_WIDE_INT. */
688 : 803600 : if (byteoff == 0)
689 : : eltoff = 0;
690 : 803600 : else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
691 : : eltoff = -1;
692 : : else
693 : 802412 : eltoff = tree_to_uhwi (byteoff) / eltsize;
694 : :
695 : : /* If the offset is known to be out of bounds, warn, and call strlen at
696 : : runtime. */
697 : 803600 : if (eltoff < 0 || eltoff >= maxelts)
698 : : {
699 : : /* Suppress multiple warnings for propagated constant strings. */
700 : 3720 : if (only_value != 2
701 : 3720 : && !warning_suppressed_p (arg, OPT_Warray_bounds_)
702 : 7440 : && warning_at (loc, OPT_Warray_bounds_,
703 : : "offset %qwi outside bounds of constant string",
704 : : eltoff))
705 : : {
706 : 679 : if (decl)
707 : 678 : inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
708 : 679 : suppress_warning (arg, OPT_Warray_bounds_);
709 : : }
710 : 3720 : return NULL_TREE;
711 : : }
712 : :
713 : : /* If eltoff is larger than strelts but less than maxelts the
714 : : string length is zero, since the excess memory will be zero. */
715 : 799880 : if (eltoff > strelts)
716 : 0 : return ssize_int (0);
717 : :
718 : : /* Use strlen to search for the first zero byte. Since any strings
719 : : constructed with build_string will have nulls appended, we win even
720 : : if we get handed something like (char[4])"abcd".
721 : :
722 : : Since ELTOFF is our starting index into the string, no further
723 : : calculation is needed. */
724 : 799880 : unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
725 : 799880 : strelts - eltoff);
726 : :
727 : : /* Don't know what to return if there was no zero termination.
728 : : Ideally this would turn into a gcc_checking_assert over time.
729 : : Set DECL/LEN so callers can examine them. */
730 : 799880 : if (len >= maxelts - eltoff)
731 : : {
732 : 83135 : data->decl = decl;
733 : 83135 : data->off = byteoff;
734 : 83135 : data->minlen = ssize_int (len);
735 : 83135 : return NULL_TREE;
736 : : }
737 : :
738 : 716745 : return ssize_int (len);
739 : : }
740 : :
741 : : /* Return a constant integer corresponding to target reading
742 : : GET_MODE_BITSIZE (MODE) bits from string constant STR. If
743 : : NULL_TERMINATED_P, reading stops after '\0' character, all further ones
744 : : are assumed to be zero, otherwise it reads as many characters
745 : : as needed. */
746 : :
747 : : rtx
748 : 307494 : c_readstr (const char *str, fixed_size_mode mode,
749 : : bool null_terminated_p/*=true*/)
750 : : {
751 : 307494 : auto_vec<target_unit, MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT> bytes;
752 : :
753 : 614988 : bytes.reserve (GET_MODE_SIZE (mode));
754 : :
755 : 307494 : target_unit ch = 1;
756 : 5337524 : for (unsigned int i = 0; i < GET_MODE_SIZE (mode); ++i)
757 : : {
758 : 2361268 : if (ch || !null_terminated_p)
759 : 2276663 : ch = (unsigned char) str[i];
760 : 2361268 : bytes.quick_push (ch);
761 : : }
762 : :
763 : 307494 : return native_decode_rtx (mode, bytes, 0);
764 : 307494 : }
765 : :
766 : : /* Cast a target constant CST to target CHAR and if that value fits into
767 : : host char type, return zero and put that value into variable pointed to by
768 : : P. */
769 : :
770 : : static int
771 : 34264 : target_char_cast (tree cst, char *p)
772 : : {
773 : 34264 : unsigned HOST_WIDE_INT val, hostval;
774 : :
775 : 34264 : if (TREE_CODE (cst) != INTEGER_CST
776 : : || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
777 : : return 1;
778 : :
779 : : /* Do not care if it fits or not right here. */
780 : 34264 : val = TREE_INT_CST_LOW (cst);
781 : :
782 : 34264 : if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
783 : 34264 : val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
784 : :
785 : 34264 : hostval = val;
786 : 34264 : if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
787 : 34264 : hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
788 : :
789 : 34264 : if (val != hostval)
790 : : return 1;
791 : :
792 : 34264 : *p = hostval;
793 : 34264 : return 0;
794 : : }
795 : :
796 : : /* Similar to save_expr, but assumes that arbitrary code is not executed
797 : : in between the multiple evaluations. In particular, we assume that a
798 : : non-addressable local variable will not be modified. */
799 : :
800 : : static tree
801 : 1125661 : builtin_save_expr (tree exp)
802 : : {
803 : 1125661 : if (TREE_CODE (exp) == SSA_NAME
804 : 961143 : || (TREE_ADDRESSABLE (exp) == 0
805 : 960951 : && (TREE_CODE (exp) == PARM_DECL
806 : 373220 : || (VAR_P (exp) && !TREE_STATIC (exp)))))
807 : : return exp;
808 : :
809 : 371430 : return save_expr (exp);
810 : : }
811 : :
812 : : /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
813 : : times to get the address of either a higher stack frame, or a return
814 : : address located within it (depending on FNDECL_CODE). */
815 : :
816 : : static rtx
817 : 15583 : expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
818 : : {
819 : 15583 : int i;
820 : 15583 : rtx tem = INITIAL_FRAME_ADDRESS_RTX;
821 : 15583 : if (tem == NULL_RTX)
822 : : {
823 : : /* For a zero count with __builtin_return_address, we don't care what
824 : : frame address we return, because target-specific definitions will
825 : : override us. Therefore frame pointer elimination is OK, and using
826 : : the soft frame pointer is OK.
827 : :
828 : : For a nonzero count, or a zero count with __builtin_frame_address,
829 : : we require a stable offset from the current frame pointer to the
830 : : previous one, so we must use the hard frame pointer, and
831 : : we must disable frame pointer elimination. */
832 : 15583 : if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
833 : 4246 : tem = frame_pointer_rtx;
834 : : else
835 : : {
836 : 11337 : tem = hard_frame_pointer_rtx;
837 : :
838 : : /* Tell reload not to eliminate the frame pointer. */
839 : 11337 : crtl->accesses_prior_frames = 1;
840 : : }
841 : : }
842 : :
843 : 15583 : if (count > 0)
844 : 932 : SETUP_FRAME_ADDRESSES ();
845 : :
846 : : /* On the SPARC, the return address is not in the frame, it is in a
847 : : register. There is no way to access it off of the current frame
848 : : pointer, but it can be accessed off the previous frame pointer by
849 : : reading the value from the register window save area. */
850 : : if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
851 : : count--;
852 : :
853 : : /* Scan back COUNT frames to the specified frame. */
854 : 51459 : for (i = 0; i < count; i++)
855 : : {
856 : : /* Assume the dynamic chain pointer is in the word that the
857 : : frame address points to, unless otherwise specified. */
858 : 35876 : tem = DYNAMIC_CHAIN_ADDRESS (tem);
859 : 35876 : tem = memory_address (Pmode, tem);
860 : 35876 : tem = gen_frame_mem (Pmode, tem);
861 : 35876 : tem = copy_to_reg (tem);
862 : : }
863 : :
864 : : /* For __builtin_frame_address, return what we've got. But, on
865 : : the SPARC for example, we may have to add a bias. */
866 : 15583 : if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
867 : : return FRAME_ADDR_RTX (tem);
868 : :
869 : : /* For __builtin_return_address, get the return address from that frame. */
870 : : #ifdef RETURN_ADDR_RTX
871 : 7297 : tem = RETURN_ADDR_RTX (count, tem);
872 : : #else
873 : : tem = memory_address (Pmode,
874 : : plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
875 : : tem = gen_frame_mem (Pmode, tem);
876 : : #endif
877 : 5086 : return tem;
878 : : }
879 : :
880 : : /* Alias set used for setjmp buffer. */
881 : : static alias_set_type setjmp_alias_set = -1;
882 : :
883 : : /* Construct the leading half of a __builtin_setjmp call. Control will
884 : : return to RECEIVER_LABEL. This is also called directly by the SJLJ
885 : : exception handling code. */
886 : :
887 : : void
888 : 841 : expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
889 : : {
890 : 841 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
891 : 841 : rtx stack_save;
892 : 841 : rtx mem;
893 : :
894 : 841 : if (setjmp_alias_set == -1)
895 : 241 : setjmp_alias_set = new_alias_set ();
896 : :
897 : 841 : buf_addr = convert_memory_address (Pmode, buf_addr);
898 : :
899 : 842 : buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
900 : :
901 : : /* We store the frame pointer and the address of receiver_label in
902 : : the buffer and use the rest of it for the stack save area, which
903 : : is machine-dependent. */
904 : :
905 : 842 : mem = gen_rtx_MEM (Pmode, buf_addr);
906 : 841 : set_mem_alias_set (mem, setjmp_alias_set);
907 : 841 : emit_move_insn (mem, hard_frame_pointer_rtx);
908 : :
909 : 842 : mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
910 : 1682 : GET_MODE_SIZE (Pmode))),
911 : 841 : set_mem_alias_set (mem, setjmp_alias_set);
912 : :
913 : 1682 : emit_move_insn (validize_mem (mem),
914 : 1682 : force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
915 : :
916 : 841 : stack_save = gen_rtx_MEM (sa_mode,
917 : : plus_constant (Pmode, buf_addr,
918 : 1682 : 2 * GET_MODE_SIZE (Pmode)));
919 : 841 : set_mem_alias_set (stack_save, setjmp_alias_set);
920 : 841 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
921 : :
922 : : /* If there is further processing to do, do it. */
923 : 841 : if (targetm.have_builtin_setjmp_setup ())
924 : 0 : emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
925 : :
926 : : /* We have a nonlocal label. */
927 : 841 : cfun->has_nonlocal_label = 1;
928 : 841 : }
929 : :
930 : : /* Construct the trailing part of a __builtin_setjmp call. This is
931 : : also called directly by the SJLJ exception handling code.
932 : : If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
933 : :
934 : : void
935 : 1845 : expand_builtin_setjmp_receiver (rtx receiver_label)
936 : : {
937 : 1845 : rtx chain;
938 : :
939 : : /* Mark the FP as used when we get here, so we have to make sure it's
940 : : marked as used by this function. */
941 : 1845 : emit_use (hard_frame_pointer_rtx);
942 : :
943 : : /* Mark the static chain as clobbered here so life information
944 : : doesn't get messed up for it. */
945 : 1845 : chain = rtx_for_static_chain (current_function_decl, true);
946 : 1845 : if (chain && REG_P (chain))
947 : 2 : emit_clobber (chain);
948 : :
949 : 1845 : if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
950 : : {
951 : : /* If the argument pointer can be eliminated in favor of the
952 : : frame pointer, we don't need to restore it. We assume here
953 : : that if such an elimination is present, it can always be used.
954 : : This is the case on all known machines; if we don't make this
955 : : assumption, we do unnecessary saving on many machines. */
956 : : size_t i;
957 : : static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
958 : :
959 : 3690 : for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
960 : 3690 : if (elim_regs[i].from == ARG_POINTER_REGNUM
961 : 3690 : && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
962 : : break;
963 : :
964 : 1845 : if (i == ARRAY_SIZE (elim_regs))
965 : : {
966 : : /* Now restore our arg pointer from the address at which it
967 : : was saved in our stack frame. */
968 : 0 : emit_move_insn (crtl->args.internal_arg_pointer,
969 : : copy_to_reg (get_arg_pointer_save_area ()));
970 : : }
971 : : }
972 : :
973 : 1845 : if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
974 : 0 : emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
975 : 1845 : else if (targetm.have_nonlocal_goto_receiver ())
976 : 0 : emit_insn (targetm.gen_nonlocal_goto_receiver ());
977 : : else
978 : : { /* Nothing */ }
979 : :
980 : : /* We must not allow the code we just generated to be reordered by
981 : : scheduling. Specifically, the update of the frame pointer must
982 : : happen immediately, not later. */
983 : 1845 : emit_insn (gen_blockage ());
984 : 1845 : }
985 : :
986 : : /* __builtin_longjmp is passed a pointer to an array of five words (not
987 : : all will be used on all machines). It operates similarly to the C
988 : : library function of the same name, but is more efficient. Much of
989 : : the code below is copied from the handling of non-local gotos. */
990 : :
991 : : static void
992 : 391 : expand_builtin_longjmp (rtx buf_addr, rtx value)
993 : : {
994 : 391 : rtx fp, lab, stack;
995 : 391 : rtx_insn *insn, *last;
996 : 391 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
997 : :
998 : : /* DRAP is needed for stack realign if longjmp is expanded to current
999 : : function */
1000 : 391 : if (SUPPORTS_STACK_ALIGNMENT)
1001 : 391 : crtl->need_drap = true;
1002 : :
1003 : 391 : if (setjmp_alias_set == -1)
1004 : 330 : setjmp_alias_set = new_alias_set ();
1005 : :
1006 : 391 : buf_addr = convert_memory_address (Pmode, buf_addr);
1007 : :
1008 : 392 : buf_addr = force_reg (Pmode, buf_addr);
1009 : :
1010 : : /* We require that the user must pass a second argument of 1, because
1011 : : that is what builtin_setjmp will return. */
1012 : 391 : gcc_assert (value == const1_rtx);
1013 : :
1014 : 391 : last = get_last_insn ();
1015 : 391 : if (targetm.have_builtin_longjmp ())
1016 : 0 : emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1017 : : else
1018 : : {
1019 : 392 : fp = gen_rtx_MEM (Pmode, buf_addr);
1020 : 392 : lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1021 : 782 : GET_MODE_SIZE (Pmode)));
1022 : :
1023 : 391 : stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1024 : 782 : 2 * GET_MODE_SIZE (Pmode)));
1025 : 391 : set_mem_alias_set (fp, setjmp_alias_set);
1026 : 391 : set_mem_alias_set (lab, setjmp_alias_set);
1027 : 391 : set_mem_alias_set (stack, setjmp_alias_set);
1028 : :
1029 : : /* Pick up FP, label, and SP from the block and jump. This code is
1030 : : from expand_goto in stmt.cc; see there for detailed comments. */
1031 : 391 : if (targetm.have_nonlocal_goto ())
1032 : : /* We have to pass a value to the nonlocal_goto pattern that will
1033 : : get copied into the static_chain pointer, but it does not matter
1034 : : what that value is, because builtin_setjmp does not use it. */
1035 : 0 : emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1036 : : else
1037 : : {
1038 : 391 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 : 391 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1040 : :
1041 : 391 : lab = copy_to_reg (lab);
1042 : :
1043 : : /* Restore the frame pointer and stack pointer. We must use a
1044 : : temporary since the setjmp buffer may be a local. */
1045 : 391 : fp = copy_to_reg (fp);
1046 : 391 : emit_stack_restore (SAVE_NONLOCAL, stack);
1047 : :
1048 : : /* Ensure the frame pointer move is not optimized. */
1049 : 391 : emit_insn (gen_blockage ());
1050 : 391 : emit_clobber (hard_frame_pointer_rtx);
1051 : 391 : emit_clobber (frame_pointer_rtx);
1052 : 391 : emit_move_insn (hard_frame_pointer_rtx, fp);
1053 : :
1054 : 391 : emit_use (hard_frame_pointer_rtx);
1055 : 391 : emit_use (stack_pointer_rtx);
1056 : 391 : emit_indirect_jump (lab);
1057 : : }
1058 : : }
1059 : :
1060 : : /* Search backwards and mark the jump insn as a non-local goto.
1061 : : Note that this precludes the use of __builtin_longjmp to a
1062 : : __builtin_setjmp target in the same function. However, we've
1063 : : already cautioned the user that these functions are for
1064 : : internal exception handling use only. */
1065 : 782 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1066 : : {
1067 : 782 : gcc_assert (insn != last);
1068 : :
1069 : 782 : if (JUMP_P (insn))
1070 : : {
1071 : 391 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1072 : 391 : break;
1073 : : }
1074 : 391 : else if (CALL_P (insn))
1075 : : break;
1076 : : }
1077 : 391 : }
1078 : :
1079 : : static inline bool
1080 : 2211409 : more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1081 : : {
1082 : 2211409 : return (iter->i < iter->n);
1083 : : }
1084 : :
1085 : : /* This function validates the types of a function call argument list
1086 : : against a specified list of tree_codes. If the last specifier is a 0,
1087 : : that represents an ellipsis, otherwise the last specifier must be a
1088 : : VOID_TYPE. */
1089 : :
1090 : : static bool
1091 : 463276 : validate_arglist (const_tree callexpr, ...)
1092 : : {
1093 : 463276 : enum tree_code code;
1094 : 463276 : bool res = 0;
1095 : 463276 : va_list ap;
1096 : 463276 : const_call_expr_arg_iterator iter;
1097 : 463276 : const_tree arg;
1098 : :
1099 : 463276 : va_start (ap, callexpr);
1100 : 463276 : init_const_call_expr_arg_iterator (callexpr, &iter);
1101 : :
1102 : : /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1103 : 463276 : tree fn = CALL_EXPR_FN (callexpr);
1104 : 463276 : bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1105 : :
1106 : 463276 : for (unsigned argno = 1; ; ++argno)
1107 : : {
1108 : 1594027 : code = (enum tree_code) va_arg (ap, int);
1109 : :
1110 : 1594027 : switch (code)
1111 : : {
1112 : 2037 : case 0:
1113 : : /* This signifies an ellipses, any further arguments are all ok. */
1114 : 2037 : res = true;
1115 : 2037 : goto end;
1116 : 461006 : case VOID_TYPE:
1117 : : /* This signifies an endlink, if no arguments remain, return
1118 : : true, otherwise return false. */
1119 : 461006 : res = !more_const_call_expr_args_p (&iter);
1120 : 461006 : goto end;
1121 : 770135 : case POINTER_TYPE:
1122 : : /* The actual argument must be nonnull when either the whole
1123 : : called function has been declared nonnull, or when the formal
1124 : : argument corresponding to the actual argument has been. */
1125 : 770135 : if (argmap
1126 : 770135 : && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1127 : : {
1128 : 281426 : arg = next_const_call_expr_arg (&iter);
1129 : 281426 : if (!validate_arg (arg, code) || integer_zerop (arg))
1130 : 59 : goto end;
1131 : : break;
1132 : : }
1133 : : /* FALLTHRU */
1134 : 849558 : default:
1135 : : /* If no parameters remain or the parameter's code does not
1136 : : match the specified code, return false. Otherwise continue
1137 : : checking any remaining arguments. */
1138 : 849558 : arg = next_const_call_expr_arg (&iter);
1139 : 849558 : if (!validate_arg (arg, code))
1140 : 174 : goto end;
1141 : : break;
1142 : : }
1143 : : }
1144 : :
1145 : : /* We need gotos here since we can only have one VA_CLOSE in a
1146 : : function. */
1147 : 463276 : end: ;
1148 : 463276 : va_end (ap);
1149 : :
1150 : 463276 : BITMAP_FREE (argmap);
1151 : :
1152 : 463276 : if (res)
1153 : 463043 : for (tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (fn)));
1154 : 644807 : (attrs = lookup_attribute ("nonnull_if_nonzero", attrs));
1155 : 181764 : attrs = TREE_CHAIN (attrs))
1156 : : {
1157 : 181786 : tree args = TREE_VALUE (attrs);
1158 : 181786 : unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1159 : 181786 : unsigned int idx2
1160 : 181786 : = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1161 : 181786 : unsigned int idx3 = idx2;
1162 : 181786 : if (tree chain2 = TREE_CHAIN (TREE_CHAIN (args)))
1163 : 0 : idx3 = TREE_INT_CST_LOW (TREE_VALUE (chain2)) - 1;
1164 : 181786 : if (idx < (unsigned) call_expr_nargs (callexpr)
1165 : 181786 : && idx2 < (unsigned) call_expr_nargs (callexpr)
1166 : 181786 : && idx3 < (unsigned) call_expr_nargs (callexpr)
1167 : 181786 : && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx)))
1168 : 181786 : && integer_zerop (CALL_EXPR_ARG (callexpr, idx))
1169 : 82 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx2)))
1170 : 82 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx2))
1171 : 22 : && INTEGRAL_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (callexpr, idx3)))
1172 : 181808 : && integer_nonzerop (CALL_EXPR_ARG (callexpr, idx3)))
1173 : : return false;
1174 : : }
1175 : :
1176 : : return res;
1177 : : }
1178 : :
1179 : : /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1180 : : and the address of the save area. */
1181 : :
1182 : : static rtx
1183 : 510 : expand_builtin_nonlocal_goto (tree exp)
1184 : : {
1185 : 510 : tree t_label, t_save_area;
1186 : 510 : rtx r_label, r_save_area, r_fp, r_sp;
1187 : 510 : rtx_insn *insn;
1188 : :
1189 : 510 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1190 : : return NULL_RTX;
1191 : :
1192 : 510 : t_label = CALL_EXPR_ARG (exp, 0);
1193 : 510 : t_save_area = CALL_EXPR_ARG (exp, 1);
1194 : :
1195 : 510 : r_label = expand_normal (t_label);
1196 : 510 : r_label = convert_memory_address (Pmode, r_label);
1197 : 510 : r_save_area = expand_normal (t_save_area);
1198 : 510 : r_save_area = convert_memory_address (Pmode, r_save_area);
1199 : : /* Copy the address of the save location to a register just in case it was
1200 : : based on the frame pointer. */
1201 : 510 : r_save_area = copy_to_reg (r_save_area);
1202 : 510 : r_fp = gen_rtx_MEM (Pmode, r_save_area);
1203 : 510 : r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1204 : : plus_constant (Pmode, r_save_area,
1205 : 1020 : GET_MODE_SIZE (Pmode)));
1206 : :
1207 : 510 : crtl->has_nonlocal_goto = 1;
1208 : :
1209 : : /* ??? We no longer need to pass the static chain value, afaik. */
1210 : 510 : if (targetm.have_nonlocal_goto ())
1211 : 0 : emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1212 : : else
1213 : : {
1214 : 510 : emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1215 : 510 : emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1216 : :
1217 : 510 : r_label = copy_to_reg (r_label);
1218 : :
1219 : : /* Restore the frame pointer and stack pointer. We must use a
1220 : : temporary since the setjmp buffer may be a local. */
1221 : 510 : r_fp = copy_to_reg (r_fp);
1222 : 510 : emit_stack_restore (SAVE_NONLOCAL, r_sp);
1223 : :
1224 : : /* Ensure the frame pointer move is not optimized. */
1225 : 510 : emit_insn (gen_blockage ());
1226 : 510 : emit_clobber (hard_frame_pointer_rtx);
1227 : 510 : emit_clobber (frame_pointer_rtx);
1228 : 510 : emit_move_insn (hard_frame_pointer_rtx, r_fp);
1229 : :
1230 : : /* USE of hard_frame_pointer_rtx added for consistency;
1231 : : not clear if really needed. */
1232 : 510 : emit_use (hard_frame_pointer_rtx);
1233 : 510 : emit_use (stack_pointer_rtx);
1234 : :
1235 : : /* If the architecture is using a GP register, we must
1236 : : conservatively assume that the target function makes use of it.
1237 : : The prologue of functions with nonlocal gotos must therefore
1238 : : initialize the GP register to the appropriate value, and we
1239 : : must then make sure that this value is live at the point
1240 : : of the jump. (Note that this doesn't necessarily apply
1241 : : to targets with a nonlocal_goto pattern; they are free
1242 : : to implement it in their own way. Note also that this is
1243 : : a no-op if the GP register is a global invariant.) */
1244 : 510 : unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1245 : 0 : if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1246 : 0 : emit_use (pic_offset_table_rtx);
1247 : :
1248 : 510 : emit_indirect_jump (r_label);
1249 : : }
1250 : :
1251 : : /* Search backwards to the jump insn and mark it as a
1252 : : non-local goto. */
1253 : 1020 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1254 : : {
1255 : 1020 : if (JUMP_P (insn))
1256 : : {
1257 : 510 : add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1258 : 510 : break;
1259 : : }
1260 : 510 : else if (CALL_P (insn))
1261 : : break;
1262 : : }
1263 : :
1264 : 510 : return const0_rtx;
1265 : : }
1266 : :
1267 : : /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1268 : : (not all will be used on all machines) that was passed to __builtin_setjmp.
1269 : : It updates the stack pointer in that block to the current value. This is
1270 : : also called directly by the SJLJ exception handling code. */
1271 : :
1272 : : void
1273 : 0 : expand_builtin_update_setjmp_buf (rtx buf_addr)
1274 : : {
1275 : 0 : machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1276 : 0 : buf_addr = convert_memory_address (Pmode, buf_addr);
1277 : 0 : rtx stack_save
1278 : 0 : = gen_rtx_MEM (sa_mode,
1279 : 0 : memory_address
1280 : : (sa_mode,
1281 : : plus_constant (Pmode, buf_addr,
1282 : 0 : 2 * GET_MODE_SIZE (Pmode))));
1283 : :
1284 : 0 : emit_stack_save (SAVE_NONLOCAL, &stack_save);
1285 : 0 : }
1286 : :
1287 : : /* Expand a call to __builtin_prefetch. For a target that does not support
1288 : : data prefetch, evaluate the memory address argument in case it has side
1289 : : effects. */
1290 : :
1291 : : static void
1292 : 2037 : expand_builtin_prefetch (tree exp)
1293 : : {
1294 : 2037 : tree arg0, arg1, arg2;
1295 : 2037 : int nargs;
1296 : 2037 : rtx op0, op1, op2;
1297 : :
1298 : 2037 : if (!validate_arglist (exp, POINTER_TYPE, 0))
1299 : : return;
1300 : :
1301 : 2037 : arg0 = CALL_EXPR_ARG (exp, 0);
1302 : :
1303 : : /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1304 : : zero (read) and argument 2 (locality) defaults to 3 (high degree of
1305 : : locality). */
1306 : 2037 : nargs = call_expr_nargs (exp);
1307 : 2037 : arg1 = nargs > 1 ? CALL_EXPR_ARG (exp, 1) : NULL_TREE;
1308 : 2001 : arg2 = nargs > 2 ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
1309 : :
1310 : : /* Argument 0 is an address. */
1311 : 2133 : op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1312 : :
1313 : : /* Argument 1 (read/write flag) must be a compile-time constant int. */
1314 : 2037 : if (arg1 == NULL_TREE)
1315 : 36 : op1 = const0_rtx;
1316 : 2001 : else if (TREE_CODE (arg1) != INTEGER_CST)
1317 : : {
1318 : 0 : error ("second argument to %<__builtin_prefetch%> must be a constant");
1319 : 0 : op1 = const0_rtx;
1320 : : }
1321 : : else
1322 : 2001 : op1 = expand_normal (arg1);
1323 : : /* Argument 1 must be 0, 1 or 2. */
1324 : 2037 : if (!IN_RANGE (INTVAL (op1), 0, 2))
1325 : : {
1326 : 3 : warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1327 : : " using zero");
1328 : 3 : op1 = const0_rtx;
1329 : : }
1330 : :
1331 : : /* Argument 2 (locality) must be a compile-time constant int. */
1332 : 2037 : if (arg2 == NULL_TREE)
1333 : 61 : op2 = GEN_INT (3);
1334 : 1976 : else if (TREE_CODE (arg2) != INTEGER_CST)
1335 : : {
1336 : 0 : error ("third argument to %<__builtin_prefetch%> must be a constant");
1337 : 0 : op2 = const0_rtx;
1338 : : }
1339 : : else
1340 : 1976 : op2 = expand_normal (arg2);
1341 : : /* Argument 2 must be 0, 1, 2, or 3. */
1342 : 2037 : if (!IN_RANGE (INTVAL (op2), 0, 3))
1343 : : {
1344 : 3 : warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1345 : 3 : op2 = const0_rtx;
1346 : : }
1347 : :
1348 : 2037 : if (targetm.have_prefetch ())
1349 : : {
1350 : 2037 : class expand_operand ops[3];
1351 : :
1352 : 2037 : create_address_operand (&ops[0], op0);
1353 : 2037 : create_integer_operand (&ops[1], INTVAL (op1));
1354 : 2037 : create_integer_operand (&ops[2], INTVAL (op2));
1355 : 2037 : if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1356 : 2037 : return;
1357 : : }
1358 : :
1359 : : /* Don't do anything with direct references to volatile memory, but
1360 : : generate code to handle other side effects. */
1361 : 0 : if (!MEM_P (op0) && side_effects_p (op0))
1362 : 0 : emit_insn (op0);
1363 : : }
1364 : :
1365 : : /* Get a MEM rtx for expression EXP which is the address of an operand
1366 : : to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1367 : : the maximum length of the block of memory that might be accessed or
1368 : : NULL if unknown. */
1369 : :
1370 : : rtx
1371 : 699372 : get_memory_rtx (tree exp, tree len)
1372 : : {
1373 : 699372 : tree orig_exp = exp, base;
1374 : 699372 : rtx addr, mem;
1375 : :
1376 : 699372 : gcc_checking_assert
1377 : : (ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))));
1378 : :
1379 : : /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1380 : : from its expression, for expr->a.b only <variable>.a.b is recorded. */
1381 : 699372 : if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1382 : 1339 : exp = TREE_OPERAND (exp, 0);
1383 : :
1384 : 699372 : addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1385 : 699372 : mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1386 : :
1387 : : /* Get an expression we can use to find the attributes to assign to MEM.
1388 : : First remove any nops. */
1389 : 1398744 : while (CONVERT_EXPR_P (exp)
1390 : 699372 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1391 : 0 : exp = TREE_OPERAND (exp, 0);
1392 : :
1393 : : /* Build a MEM_REF representing the whole accessed area as a byte blob,
1394 : : (as builtin stringops may alias with anything). */
1395 : 699372 : exp = fold_build2 (MEM_REF,
1396 : : build_array_type (char_type_node,
1397 : : build_range_type (sizetype,
1398 : : size_one_node, len)),
1399 : : exp, build_int_cst (ptr_type_node, 0));
1400 : :
1401 : : /* If the MEM_REF has no acceptable address, try to get the base object
1402 : : from the original address we got, and build an all-aliasing
1403 : : unknown-sized access to that one. */
1404 : 699372 : if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1405 : 699360 : set_mem_attributes (mem, exp, 0);
1406 : 12 : else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1407 : 12 : && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1408 : : 0))))
1409 : : {
1410 : 12 : unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1411 : 12 : exp = build_fold_addr_expr (base);
1412 : 12 : exp = fold_build2 (MEM_REF,
1413 : : build_array_type (char_type_node,
1414 : : build_range_type (sizetype,
1415 : : size_zero_node,
1416 : : NULL)),
1417 : : exp, build_int_cst (ptr_type_node, 0));
1418 : 12 : set_mem_attributes (mem, exp, 0);
1419 : : /* Since we stripped parts make sure the offset is unknown and the
1420 : : alignment is computed from the original address. */
1421 : 12 : clear_mem_offset (mem);
1422 : 12 : set_mem_align (mem, align);
1423 : : }
1424 : 699372 : set_mem_alias_set (mem, 0);
1425 : 699372 : return mem;
1426 : : }
1427 : :
1428 : : /* Built-in functions to perform an untyped call and return. */
1429 : :
1430 : : /* Wrapper that implicitly applies a delta when getting or setting the
1431 : : enclosed value. */
1432 : : template <typename T>
1433 : : class delta_type
1434 : : {
1435 : : T &value; T const delta;
1436 : : public:
1437 : 2524 : delta_type (T &val, T dlt) : value (val), delta (dlt) {}
1438 : 1809 : operator T () const { return value + delta; }
1439 : 715 : T operator = (T val) const { value = val - delta; return val; }
1440 : : };
1441 : :
1442 : : #define saved_apply_args_size \
1443 : : (delta_type<int> (this_target_builtins->x_apply_args_size_plus_one, -1))
1444 : : #define apply_args_mode \
1445 : : (this_target_builtins->x_apply_args_mode)
1446 : : #define saved_apply_result_size \
1447 : : (delta_type<int> (this_target_builtins->x_apply_result_size_plus_one, -1))
1448 : : #define apply_result_mode \
1449 : : (this_target_builtins->x_apply_result_mode)
1450 : :
1451 : : /* Return the size required for the block returned by __builtin_apply_args,
1452 : : and initialize apply_args_mode. */
1453 : :
1454 : : static int
1455 : 947 : apply_args_size (void)
1456 : : {
1457 : 947 : int size = saved_apply_args_size;
1458 : 947 : int align;
1459 : 947 : unsigned int regno;
1460 : :
1461 : : /* The values computed by this function never change. */
1462 : 947 : if (size < 0)
1463 : : {
1464 : : /* The first value is the incoming arg-pointer. */
1465 : 360 : size = GET_MODE_SIZE (Pmode);
1466 : :
1467 : : /* The second value is the structure value address unless this is
1468 : : passed as an "invisible" first argument. */
1469 : 360 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1470 : 0 : size += GET_MODE_SIZE (Pmode);
1471 : :
1472 : 33480 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1473 : 33120 : if (FUNCTION_ARG_REGNO_P (regno))
1474 : : {
1475 : 5400 : fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1476 : :
1477 : 5400 : if (mode != VOIDmode)
1478 : : {
1479 : 5400 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1480 : 5400 : if (size % align != 0)
1481 : 1 : size = CEIL (size, align) * align;
1482 : 5400 : size += GET_MODE_SIZE (mode);
1483 : 5400 : apply_args_mode[regno] = mode;
1484 : : }
1485 : : else
1486 : 0 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1487 : : }
1488 : : else
1489 : 27720 : apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1490 : :
1491 : 360 : saved_apply_args_size = size;
1492 : : }
1493 : 947 : return size;
1494 : : }
1495 : :
1496 : : /* Return the size required for the block returned by __builtin_apply,
1497 : : and initialize apply_result_mode. */
1498 : :
1499 : : static int
1500 : 862 : apply_result_size (void)
1501 : : {
1502 : 862 : int size = saved_apply_result_size;
1503 : 862 : int align, regno;
1504 : :
1505 : : /* The values computed by this function never change. */
1506 : 862 : if (size < 0)
1507 : : {
1508 : : size = 0;
1509 : :
1510 : 33015 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1511 : 32660 : if (targetm.calls.function_value_regno_p (regno))
1512 : : {
1513 : 2840 : fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1514 : :
1515 : 2840 : if (mode != VOIDmode)
1516 : : {
1517 : 2840 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1518 : 2840 : if (size % align != 0)
1519 : : size = CEIL (size, align) * align;
1520 : 2840 : size += GET_MODE_SIZE (mode);
1521 : 2840 : apply_result_mode[regno] = mode;
1522 : : }
1523 : : else
1524 : 0 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1525 : : }
1526 : : else
1527 : 29820 : apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1528 : :
1529 : : /* Allow targets that use untyped_call and untyped_return to override
1530 : : the size so that machine-specific information can be stored here. */
1531 : : #ifdef APPLY_RESULT_SIZE
1532 : 355 : size = APPLY_RESULT_SIZE;
1533 : : #endif
1534 : :
1535 : 355 : saved_apply_result_size = size;
1536 : : }
1537 : 862 : return size;
1538 : : }
1539 : :
1540 : : /* Create a vector describing the result block RESULT. If SAVEP is true,
1541 : : the result block is used to save the values; otherwise it is used to
1542 : : restore the values. */
1543 : :
1544 : : static rtx
1545 : 483 : result_vector (int savep, rtx result)
1546 : : {
1547 : 483 : int regno, size, align, nelts;
1548 : 483 : fixed_size_mode mode;
1549 : 483 : rtx reg, mem;
1550 : 483 : rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1551 : :
1552 : 483 : size = nelts = 0;
1553 : 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1554 : 44436 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1555 : : {
1556 : 3864 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 : 3864 : if (size % align != 0)
1558 : 0 : size = CEIL (size, align) * align;
1559 : 3864 : reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1560 : 3864 : mem = adjust_address (result, mode, size);
1561 : 7728 : savevec[nelts++] = (savep
1562 : 7728 : ? gen_rtx_SET (mem, reg)
1563 : 0 : : gen_rtx_SET (reg, mem));
1564 : 7728 : size += GET_MODE_SIZE (mode);
1565 : : }
1566 : 483 : return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1567 : : }
1568 : :
1569 : : /* Save the state required to perform an untyped call with the same
1570 : : arguments as were passed to the current function. */
1571 : :
1572 : : static rtx
1573 : 464 : expand_builtin_apply_args_1 (void)
1574 : : {
1575 : 464 : rtx registers, tem;
1576 : 464 : int size, align, regno;
1577 : 464 : fixed_size_mode mode;
1578 : 464 : rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1579 : :
1580 : : /* Create a block where the arg-pointer, structure value address,
1581 : : and argument registers can be saved. */
1582 : 464 : registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1583 : :
1584 : : /* Walk past the arg-pointer and structure value address. */
1585 : 464 : size = GET_MODE_SIZE (Pmode);
1586 : 464 : if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1587 : 0 : size += GET_MODE_SIZE (Pmode);
1588 : :
1589 : : /* Save each register used in calling a function to the block. */
1590 : 43152 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1591 : 42688 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1592 : : {
1593 : 6960 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1594 : 6960 : if (size % align != 0)
1595 : 1 : size = CEIL (size, align) * align;
1596 : :
1597 : 6960 : tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1598 : :
1599 : 6960 : emit_move_insn (adjust_address (registers, mode, size), tem);
1600 : 13920 : size += GET_MODE_SIZE (mode);
1601 : : }
1602 : :
1603 : : /* Save the arg pointer to the block. */
1604 : 464 : tem = copy_to_reg (crtl->args.internal_arg_pointer);
1605 : : /* We need the pointer as the caller actually passed them to us, not
1606 : : as we might have pretended they were passed. Make sure it's a valid
1607 : : operand, as emit_move_insn isn't expected to handle a PLUS. */
1608 : 464 : if (STACK_GROWS_DOWNWARD)
1609 : 464 : tem
1610 : 464 : = force_operand (plus_constant (Pmode, tem,
1611 : 464 : crtl->args.pretend_args_size),
1612 : : NULL_RTX);
1613 : 464 : emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1614 : :
1615 : 464 : size = GET_MODE_SIZE (Pmode);
1616 : :
1617 : : /* Save the structure value address unless this is passed as an
1618 : : "invisible" first argument. */
1619 : 464 : if (struct_incoming_value)
1620 : 0 : emit_move_insn (adjust_address (registers, Pmode, size),
1621 : : copy_to_reg (struct_incoming_value));
1622 : :
1623 : : /* Return the address of the block. */
1624 : 464 : return copy_addr_to_reg (XEXP (registers, 0));
1625 : : }
1626 : :
1627 : : /* __builtin_apply_args returns block of memory allocated on
1628 : : the stack into which is stored the arg pointer, structure
1629 : : value address, static chain, and all the registers that might
1630 : : possibly be used in performing a function call. The code is
1631 : : moved to the start of the function so the incoming values are
1632 : : saved. */
1633 : :
1634 : : static rtx
1635 : 464 : expand_builtin_apply_args (void)
1636 : : {
1637 : : /* Don't do __builtin_apply_args more than once in a function.
1638 : : Save the result of the first call and reuse it. */
1639 : 464 : if (apply_args_value != 0)
1640 : : return apply_args_value;
1641 : 464 : {
1642 : : /* When this function is called, it means that registers must be
1643 : : saved on entry to this function. So we migrate the
1644 : : call to the first insn of this function. */
1645 : 464 : rtx temp;
1646 : :
1647 : 464 : start_sequence ();
1648 : 464 : temp = expand_builtin_apply_args_1 ();
1649 : 464 : rtx_insn *seq = end_sequence ();
1650 : :
1651 : 464 : apply_args_value = temp;
1652 : :
1653 : : /* Put the insns after the NOTE that starts the function.
1654 : : If this is inside a start_sequence, make the outer-level insn
1655 : : chain current, so the code is placed at the start of the
1656 : : function. If internal_arg_pointer is a non-virtual pseudo,
1657 : : it needs to be placed after the function that initializes
1658 : : that pseudo. */
1659 : 464 : push_topmost_sequence ();
1660 : 464 : if (REG_P (crtl->args.internal_arg_pointer)
1661 : 464 : && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1662 : 0 : emit_insn_before (seq, parm_birth_insn);
1663 : : else
1664 : 464 : emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1665 : 464 : pop_topmost_sequence ();
1666 : 464 : return temp;
1667 : : }
1668 : : }
1669 : :
1670 : : /* Perform an untyped call and save the state required to perform an
1671 : : untyped return of whatever value was returned by the given function. */
1672 : :
1673 : : static rtx
1674 : 483 : expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1675 : : {
1676 : 483 : int size, align, regno;
1677 : 483 : fixed_size_mode mode;
1678 : 483 : rtx incoming_args, result, reg, dest, src;
1679 : 483 : rtx_call_insn *call_insn;
1680 : 483 : rtx old_stack_level = 0;
1681 : 483 : rtx call_fusage = 0;
1682 : 483 : rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1683 : :
1684 : 483 : arguments = convert_memory_address (Pmode, arguments);
1685 : :
1686 : : /* Create a block where the return registers can be saved. */
1687 : 483 : result = assign_stack_local (BLKmode, apply_result_size (), -1);
1688 : :
1689 : : /* Fetch the arg pointer from the ARGUMENTS block. */
1690 : 483 : incoming_args = gen_reg_rtx (Pmode);
1691 : 483 : emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1692 : 483 : if (!STACK_GROWS_DOWNWARD)
1693 : : incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1694 : : incoming_args, 0, OPTAB_LIB_WIDEN);
1695 : :
1696 : : /* Push a new argument block and copy the arguments. Do not allow
1697 : : the (potential) memcpy call below to interfere with our stack
1698 : : manipulations. */
1699 : 483 : do_pending_stack_adjust ();
1700 : 483 : NO_DEFER_POP;
1701 : :
1702 : : /* Save the stack with nonlocal if available. */
1703 : 483 : if (targetm.have_save_stack_nonlocal ())
1704 : 483 : emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1705 : : else
1706 : 0 : emit_stack_save (SAVE_BLOCK, &old_stack_level);
1707 : :
1708 : : /* Allocate a block of memory onto the stack and copy the memory
1709 : : arguments to the outgoing arguments address. We can pass TRUE
1710 : : as the 4th argument because we just saved the stack pointer
1711 : : and will restore it right after the call. */
1712 : 966 : allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1713 : :
1714 : : /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1715 : : may have already set current_function_calls_alloca to true.
1716 : : current_function_calls_alloca won't be set if argsize is zero,
1717 : : so we have to guarantee need_drap is true here. */
1718 : 483 : if (SUPPORTS_STACK_ALIGNMENT)
1719 : 483 : crtl->need_drap = true;
1720 : :
1721 : 483 : dest = virtual_outgoing_args_rtx;
1722 : 483 : if (!STACK_GROWS_DOWNWARD)
1723 : : {
1724 : : if (CONST_INT_P (argsize))
1725 : : dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1726 : : else
1727 : : dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1728 : : }
1729 : 483 : dest = gen_rtx_MEM (BLKmode, dest);
1730 : 483 : set_mem_align (dest, PARM_BOUNDARY);
1731 : 483 : src = gen_rtx_MEM (BLKmode, incoming_args);
1732 : 483 : set_mem_align (src, PARM_BOUNDARY);
1733 : 483 : emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1734 : :
1735 : : /* Refer to the argument block. */
1736 : 483 : apply_args_size ();
1737 : 483 : arguments = gen_rtx_MEM (BLKmode, arguments);
1738 : 483 : set_mem_align (arguments, PARM_BOUNDARY);
1739 : :
1740 : : /* Walk past the arg-pointer and structure value address. */
1741 : 483 : size = GET_MODE_SIZE (Pmode);
1742 : 483 : if (struct_value)
1743 : 0 : size += GET_MODE_SIZE (Pmode);
1744 : :
1745 : : /* Restore each of the registers previously saved. Make USE insns
1746 : : for each of these registers for use in making the call. */
1747 : 44919 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1748 : 44436 : if ((mode = apply_args_mode[regno]) != VOIDmode)
1749 : : {
1750 : 7245 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1751 : 7245 : if (size % align != 0)
1752 : 0 : size = CEIL (size, align) * align;
1753 : 7245 : reg = gen_rtx_REG (mode, regno);
1754 : 7245 : emit_move_insn (reg, adjust_address (arguments, mode, size));
1755 : 7245 : use_reg (&call_fusage, reg);
1756 : 14490 : size += GET_MODE_SIZE (mode);
1757 : : }
1758 : :
1759 : : /* Restore the structure value address unless this is passed as an
1760 : : "invisible" first argument. */
1761 : 483 : size = GET_MODE_SIZE (Pmode);
1762 : 483 : if (struct_value)
1763 : : {
1764 : 0 : rtx value = gen_reg_rtx (Pmode);
1765 : 0 : emit_move_insn (value, adjust_address (arguments, Pmode, size));
1766 : 0 : emit_move_insn (struct_value, value);
1767 : 0 : if (REG_P (struct_value))
1768 : 0 : use_reg (&call_fusage, struct_value);
1769 : : }
1770 : :
1771 : : /* All arguments and registers used for the call are set up by now! */
1772 : 483 : function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1773 : :
1774 : : /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1775 : : and we don't want to load it into a register as an optimization,
1776 : : because prepare_call_address already did it if it should be done. */
1777 : 483 : if (GET_CODE (function) != SYMBOL_REF)
1778 : 29 : function = memory_address (FUNCTION_MODE, function);
1779 : :
1780 : : /* Generate the actual call instruction and save the return value. */
1781 : 483 : if (targetm.have_untyped_call ())
1782 : : {
1783 : 483 : rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1784 : 483 : rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1785 : : result_vector (1, result));
1786 : 5800 : for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1787 : 5317 : if (CALL_P (insn))
1788 : 483 : add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1789 : 483 : emit_insn (seq);
1790 : : }
1791 : 0 : else if (targetm.have_call_value ())
1792 : : {
1793 : : rtx valreg = 0;
1794 : :
1795 : : /* Locate the unique return register. It is not possible to
1796 : : express a call that sets more than one return register using
1797 : : call_value; use untyped_call for that. In fact, untyped_call
1798 : : only needs to save the return registers in the given block. */
1799 : 0 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1800 : 0 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1801 : : {
1802 : 0 : gcc_assert (!valreg); /* have_untyped_call required. */
1803 : :
1804 : 0 : valreg = gen_rtx_REG (mode, regno);
1805 : : }
1806 : :
1807 : 0 : emit_insn (targetm.gen_call_value (valreg,
1808 : : gen_rtx_MEM (FUNCTION_MODE, function),
1809 : : const0_rtx, NULL_RTX, const0_rtx));
1810 : :
1811 : 0 : emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1812 : : }
1813 : : else
1814 : 0 : gcc_unreachable ();
1815 : :
1816 : : /* Find the CALL insn we just emitted, and attach the register usage
1817 : : information. */
1818 : 483 : call_insn = last_call_insn ();
1819 : 483 : add_function_usage_to (call_insn, call_fusage);
1820 : :
1821 : : /* Restore the stack. */
1822 : 483 : if (targetm.have_save_stack_nonlocal ())
1823 : 483 : emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1824 : : else
1825 : 0 : emit_stack_restore (SAVE_BLOCK, old_stack_level);
1826 : 483 : fixup_args_size_notes (call_insn, get_last_insn (), 0);
1827 : :
1828 : 483 : OK_DEFER_POP;
1829 : :
1830 : : /* Return the address of the result block. */
1831 : 483 : result = copy_addr_to_reg (XEXP (result, 0));
1832 : 483 : return convert_memory_address (ptr_mode, result);
1833 : : }
1834 : :
1835 : : /* Perform an untyped return. */
1836 : :
1837 : : static void
1838 : 379 : expand_builtin_return (rtx result)
1839 : : {
1840 : 379 : int size, align, regno;
1841 : 379 : fixed_size_mode mode;
1842 : 379 : rtx reg;
1843 : 379 : rtx_insn *call_fusage = 0;
1844 : :
1845 : 379 : result = convert_memory_address (Pmode, result);
1846 : :
1847 : 379 : apply_result_size ();
1848 : 379 : result = gen_rtx_MEM (BLKmode, result);
1849 : :
1850 : 379 : if (targetm.have_untyped_return ())
1851 : : {
1852 : 0 : rtx vector = result_vector (0, result);
1853 : 0 : emit_jump_insn (targetm.gen_untyped_return (result, vector));
1854 : 0 : emit_barrier ();
1855 : 0 : return;
1856 : : }
1857 : :
1858 : : /* Restore the return value and note that each value is used. */
1859 : : size = 0;
1860 : 35247 : for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1861 : 34868 : if ((mode = apply_result_mode[regno]) != VOIDmode)
1862 : : {
1863 : 3032 : align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1864 : 3032 : if (size % align != 0)
1865 : 0 : size = CEIL (size, align) * align;
1866 : 3032 : reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1867 : 3032 : emit_move_insn (reg, adjust_address (result, mode, size));
1868 : :
1869 : 3032 : push_to_sequence (call_fusage);
1870 : 3032 : emit_use (reg);
1871 : 3032 : call_fusage = end_sequence ();
1872 : 6064 : size += GET_MODE_SIZE (mode);
1873 : : }
1874 : :
1875 : : /* Put the USE insns before the return. */
1876 : 379 : emit_insn (call_fusage);
1877 : :
1878 : : /* Return whatever values was restored by jumping directly to the end
1879 : : of the function. */
1880 : 379 : expand_naked_return ();
1881 : : }
1882 : :
1883 : : /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1884 : :
1885 : : int
1886 : 2383 : type_to_class (tree type)
1887 : : {
1888 : 2383 : switch (TREE_CODE (type))
1889 : : {
1890 : : case VOID_TYPE: return void_type_class;
1891 : 905 : case INTEGER_TYPE: return integer_type_class;
1892 : 39 : case ENUMERAL_TYPE: return enumeral_type_class;
1893 : 38 : case BOOLEAN_TYPE: return boolean_type_class;
1894 : 991 : case POINTER_TYPE: return pointer_type_class;
1895 : 27 : case REFERENCE_TYPE: return reference_type_class;
1896 : 36 : case OFFSET_TYPE: return offset_type_class;
1897 : 132 : case REAL_TYPE: return real_type_class;
1898 : 40 : case COMPLEX_TYPE: return complex_type_class;
1899 : 20 : case FUNCTION_TYPE: return function_type_class;
1900 : 0 : case METHOD_TYPE: return method_type_class;
1901 : 44 : case RECORD_TYPE: return record_type_class;
1902 : 44 : case UNION_TYPE:
1903 : 44 : case QUAL_UNION_TYPE: return union_type_class;
1904 : 24 : case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1905 : 24 : ? string_type_class : array_type_class);
1906 : 0 : case LANG_TYPE: return lang_type_class;
1907 : 0 : case OPAQUE_TYPE: return opaque_type_class;
1908 : 0 : case BITINT_TYPE: return bitint_type_class;
1909 : 30 : case VECTOR_TYPE: return vector_type_class;
1910 : 3 : default: return no_type_class;
1911 : : }
1912 : : }
1913 : :
1914 : : /* Expand a call EXP to __builtin_classify_type. */
1915 : :
1916 : : static rtx
1917 : 0 : expand_builtin_classify_type (tree exp)
1918 : : {
1919 : 0 : if (call_expr_nargs (exp))
1920 : 0 : return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1921 : 0 : return GEN_INT (no_type_class);
1922 : : }
1923 : :
1924 : : /* This helper macro, meant to be used in mathfn_built_in below, determines
1925 : : which among a set of builtin math functions is appropriate for a given type
1926 : : mode. The `F' (float) and `L' (long double) are automatically generated
1927 : : from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1928 : : types, there are additional types that are considered with 'F32', 'F64',
1929 : : 'F128', etc. suffixes. */
1930 : : #define CASE_MATHFN(MATHFN) \
1931 : : CASE_CFN_##MATHFN: \
1932 : : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1933 : : fcodel = BUILT_IN_##MATHFN##L ; break;
1934 : : /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1935 : : types. */
1936 : : #define CASE_MATHFN_FLOATN(MATHFN) \
1937 : : CASE_CFN_##MATHFN: \
1938 : : fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1939 : : fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1940 : : fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1941 : : fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1942 : : fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1943 : : break;
1944 : : /* Similar to above, but appends _R after any F/L suffix. */
1945 : : #define CASE_MATHFN_REENT(MATHFN) \
1946 : : case CFN_BUILT_IN_##MATHFN##_R: \
1947 : : case CFN_BUILT_IN_##MATHFN##F_R: \
1948 : : case CFN_BUILT_IN_##MATHFN##L_R: \
1949 : : fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1950 : : fcodel = BUILT_IN_##MATHFN##L_R ; break;
1951 : :
1952 : : /* Return a function equivalent to FN but operating on floating-point
1953 : : values of type TYPE, or END_BUILTINS if no such function exists.
1954 : : This is purely an operation on function codes; it does not guarantee
1955 : : that the target actually has an implementation of the function. */
1956 : :
1957 : : static built_in_function
1958 : 597830 : mathfn_built_in_2 (tree type, combined_fn fn)
1959 : : {
1960 : 597830 : tree mtype;
1961 : 597830 : built_in_function fcode, fcodef, fcodel;
1962 : 597830 : built_in_function fcodef16 = END_BUILTINS;
1963 : 597830 : built_in_function fcodef32 = END_BUILTINS;
1964 : 597830 : built_in_function fcodef64 = END_BUILTINS;
1965 : 597830 : built_in_function fcodef128 = END_BUILTINS;
1966 : 597830 : built_in_function fcodef32x = END_BUILTINS;
1967 : 597830 : built_in_function fcodef64x = END_BUILTINS;
1968 : 597830 : built_in_function fcodef128x = END_BUILTINS;
1969 : :
1970 : : /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions
1971 : : break the uses below. */
1972 : : #undef HUGE_VAL
1973 : : #undef NAN
1974 : :
1975 : 597830 : switch (fn)
1976 : : {
1977 : : #define SEQ_OF_CASE_MATHFN \
1978 : : CASE_MATHFN_FLOATN (ACOS) \
1979 : : CASE_MATHFN_FLOATN (ACOSH) \
1980 : : CASE_MATHFN_FLOATN (ASIN) \
1981 : : CASE_MATHFN_FLOATN (ASINH) \
1982 : : CASE_MATHFN_FLOATN (ATAN) \
1983 : : CASE_MATHFN_FLOATN (ATAN2) \
1984 : : CASE_MATHFN_FLOATN (ATANH) \
1985 : : CASE_MATHFN_FLOATN (CBRT) \
1986 : : CASE_MATHFN_FLOATN (CEIL) \
1987 : : CASE_MATHFN (CEXPI) \
1988 : : CASE_MATHFN_FLOATN (COPYSIGN) \
1989 : : CASE_MATHFN_FLOATN (COS) \
1990 : : CASE_MATHFN_FLOATN (COSH) \
1991 : : CASE_MATHFN (DREM) \
1992 : : CASE_MATHFN_FLOATN (ERF) \
1993 : : CASE_MATHFN_FLOATN (ERFC) \
1994 : : CASE_MATHFN_FLOATN (EXP) \
1995 : : CASE_MATHFN (EXP10) \
1996 : : CASE_MATHFN_FLOATN (EXP2) \
1997 : : CASE_MATHFN_FLOATN (EXPM1) \
1998 : : CASE_MATHFN_FLOATN (FABS) \
1999 : : CASE_MATHFN_FLOATN (FDIM) \
2000 : : CASE_MATHFN_FLOATN (FLOOR) \
2001 : : CASE_MATHFN_FLOATN (FMA) \
2002 : : CASE_MATHFN_FLOATN (FMAX) \
2003 : : CASE_MATHFN_FLOATN (FMIN) \
2004 : : CASE_MATHFN_FLOATN (FMOD) \
2005 : : CASE_MATHFN_FLOATN (FREXP) \
2006 : : CASE_MATHFN (GAMMA) \
2007 : : CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2008 : : CASE_MATHFN_FLOATN (HUGE_VAL) \
2009 : : CASE_MATHFN_FLOATN (HYPOT) \
2010 : : CASE_MATHFN_FLOATN (ILOGB) \
2011 : : CASE_MATHFN (ICEIL) \
2012 : : CASE_MATHFN (IFLOOR) \
2013 : : CASE_MATHFN_FLOATN (INF) \
2014 : : CASE_MATHFN (IRINT) \
2015 : : CASE_MATHFN (IROUND) \
2016 : : CASE_MATHFN (ISINF) \
2017 : : CASE_MATHFN (J0) \
2018 : : CASE_MATHFN (J1) \
2019 : : CASE_MATHFN (JN) \
2020 : : CASE_MATHFN (LCEIL) \
2021 : : CASE_MATHFN_FLOATN (LDEXP) \
2022 : : CASE_MATHFN (LFLOOR) \
2023 : : CASE_MATHFN_FLOATN (LGAMMA) \
2024 : : CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2025 : : CASE_MATHFN (LLCEIL) \
2026 : : CASE_MATHFN (LLFLOOR) \
2027 : : CASE_MATHFN_FLOATN (LLRINT) \
2028 : : CASE_MATHFN_FLOATN (LLROUND) \
2029 : : CASE_MATHFN_FLOATN (LOG) \
2030 : : CASE_MATHFN_FLOATN (LOG10) \
2031 : : CASE_MATHFN_FLOATN (LOG1P) \
2032 : : CASE_MATHFN_FLOATN (LOG2) \
2033 : : CASE_MATHFN_FLOATN (LOGB) \
2034 : : CASE_MATHFN_FLOATN (LRINT) \
2035 : : CASE_MATHFN_FLOATN (LROUND) \
2036 : : CASE_MATHFN_FLOATN (MODF) \
2037 : : CASE_MATHFN_FLOATN (NAN) \
2038 : : CASE_MATHFN_FLOATN (NANS) \
2039 : : CASE_MATHFN_FLOATN (NEARBYINT) \
2040 : : CASE_MATHFN_FLOATN (NEXTAFTER) \
2041 : : CASE_MATHFN (NEXTTOWARD) \
2042 : : CASE_MATHFN_FLOATN (POW) \
2043 : : CASE_MATHFN (POWI) \
2044 : : CASE_MATHFN (POW10) \
2045 : : CASE_MATHFN_FLOATN (REMAINDER) \
2046 : : CASE_MATHFN_FLOATN (REMQUO) \
2047 : : CASE_MATHFN_FLOATN (RINT) \
2048 : : CASE_MATHFN_FLOATN (ROUND) \
2049 : : CASE_MATHFN_FLOATN (ROUNDEVEN) \
2050 : : CASE_MATHFN (SCALB) \
2051 : : CASE_MATHFN_FLOATN (SCALBLN) \
2052 : : CASE_MATHFN_FLOATN (SCALBN) \
2053 : : CASE_MATHFN (SIGNBIT) \
2054 : : CASE_MATHFN (SIGNIFICAND) \
2055 : : CASE_MATHFN_FLOATN (SIN) \
2056 : : CASE_MATHFN (SINCOS) \
2057 : : CASE_MATHFN_FLOATN (SINH) \
2058 : : CASE_MATHFN_FLOATN (SQRT) \
2059 : : CASE_MATHFN_FLOATN (TAN) \
2060 : : CASE_MATHFN_FLOATN (TANH) \
2061 : : CASE_MATHFN_FLOATN (TGAMMA) \
2062 : : CASE_MATHFN_FLOATN (TRUNC) \
2063 : : CASE_MATHFN (Y0) \
2064 : : CASE_MATHFN (Y1) \
2065 : : CASE_MATHFN (YN)
2066 : :
2067 : 597767 : SEQ_OF_CASE_MATHFN
2068 : :
2069 : : default:
2070 : : return END_BUILTINS;
2071 : : }
2072 : :
2073 : 597830 : mtype = TYPE_MAIN_VARIANT (type);
2074 : 597830 : if (mtype == double_type_node)
2075 : : return fcode;
2076 : 554707 : else if (mtype == float_type_node)
2077 : : return fcodef;
2078 : 502684 : else if (mtype == long_double_type_node)
2079 : : return fcodel;
2080 : 468873 : else if (mtype == float16_type_node)
2081 : : return fcodef16;
2082 : 468825 : else if (mtype == float32_type_node)
2083 : : return fcodef32;
2084 : 468825 : else if (mtype == float64_type_node)
2085 : : return fcodef64;
2086 : 468825 : else if (mtype == float128_type_node)
2087 : : return fcodef128;
2088 : 460842 : else if (mtype == float32x_type_node)
2089 : : return fcodef32x;
2090 : 460842 : else if (mtype == float64x_type_node)
2091 : : return fcodef64x;
2092 : 460842 : else if (mtype == float128x_type_node)
2093 : : return fcodef128x;
2094 : : else
2095 : 460842 : return END_BUILTINS;
2096 : : }
2097 : :
2098 : : #undef CASE_MATHFN
2099 : : #undef CASE_MATHFN_FLOATN
2100 : : #undef CASE_MATHFN_REENT
2101 : :
2102 : : /* Return mathematic function equivalent to FN but operating directly on TYPE,
2103 : : if available. If IMPLICIT_P is true use the implicit builtin declaration,
2104 : : otherwise use the explicit declaration. If we can't do the conversion,
2105 : : return null. */
2106 : :
2107 : : static tree
2108 : 597685 : mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2109 : : {
2110 : 597685 : built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2111 : 597685 : if (fcode2 == END_BUILTINS)
2112 : : return NULL_TREE;
2113 : :
2114 : 133195 : if (implicit_p && !builtin_decl_implicit_p (fcode2))
2115 : : return NULL_TREE;
2116 : :
2117 : 128763 : return builtin_decl_explicit (fcode2);
2118 : : }
2119 : :
2120 : : /* Like mathfn_built_in_1, but always use the implicit array. */
2121 : :
2122 : : tree
2123 : 210 : mathfn_built_in (tree type, combined_fn fn)
2124 : : {
2125 : 210 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2126 : : }
2127 : :
2128 : : /* Like mathfn_built_in_1, but always use the explicit array. */
2129 : :
2130 : : tree
2131 : 0 : mathfn_built_in_explicit (tree type, combined_fn fn)
2132 : : {
2133 : 0 : return mathfn_built_in_1 (type, fn, /*implicit=*/ 0);
2134 : : }
2135 : :
2136 : : /* Like mathfn_built_in_1, but take a built_in_function and
2137 : : always use the implicit array. */
2138 : :
2139 : : tree
2140 : 597254 : mathfn_built_in (tree type, enum built_in_function fn)
2141 : : {
2142 : 597254 : return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2143 : : }
2144 : :
2145 : : /* Return the type associated with a built in function, i.e., the one
2146 : : to be passed to mathfn_built_in to get the type-specific
2147 : : function. */
2148 : :
2149 : : tree
2150 : 1238 : mathfn_built_in_type (combined_fn fn)
2151 : : {
2152 : : #define CASE_MATHFN(MATHFN) \
2153 : : case CFN_BUILT_IN_##MATHFN: \
2154 : : return double_type_node; \
2155 : : case CFN_BUILT_IN_##MATHFN##F: \
2156 : : return float_type_node; \
2157 : : case CFN_BUILT_IN_##MATHFN##L: \
2158 : : return long_double_type_node;
2159 : :
2160 : : #define CASE_MATHFN_FLOATN(MATHFN) \
2161 : : CASE_MATHFN(MATHFN) \
2162 : : case CFN_BUILT_IN_##MATHFN##F16: \
2163 : : return float16_type_node; \
2164 : : case CFN_BUILT_IN_##MATHFN##F32: \
2165 : : return float32_type_node; \
2166 : : case CFN_BUILT_IN_##MATHFN##F64: \
2167 : : return float64_type_node; \
2168 : : case CFN_BUILT_IN_##MATHFN##F128: \
2169 : : return float128_type_node; \
2170 : : case CFN_BUILT_IN_##MATHFN##F32X: \
2171 : : return float32x_type_node; \
2172 : : case CFN_BUILT_IN_##MATHFN##F64X: \
2173 : : return float64x_type_node; \
2174 : : case CFN_BUILT_IN_##MATHFN##F128X: \
2175 : : return float128x_type_node;
2176 : :
2177 : : /* Similar to above, but appends _R after any F/L suffix. */
2178 : : #define CASE_MATHFN_REENT(MATHFN) \
2179 : : case CFN_BUILT_IN_##MATHFN##_R: \
2180 : : return double_type_node; \
2181 : : case CFN_BUILT_IN_##MATHFN##F_R: \
2182 : : return float_type_node; \
2183 : : case CFN_BUILT_IN_##MATHFN##L_R: \
2184 : : return long_double_type_node;
2185 : :
2186 : 1238 : switch (fn)
2187 : : {
2188 : 1238 : SEQ_OF_CASE_MATHFN
2189 : :
2190 : : default:
2191 : : return NULL_TREE;
2192 : : }
2193 : :
2194 : : #undef CASE_MATHFN
2195 : : #undef CASE_MATHFN_FLOATN
2196 : : #undef CASE_MATHFN_REENT
2197 : : #undef SEQ_OF_CASE_MATHFN
2198 : : }
2199 : :
2200 : : /* Check whether there is an internal function associated with function FN
2201 : : and return type RETURN_TYPE. Return the function if so, otherwise return
2202 : : IFN_LAST.
2203 : :
2204 : : Note that this function only tests whether the function is defined in
2205 : : internals.def, not whether it is actually available on the target. */
2206 : :
2207 : : static internal_fn
2208 : 14210295 : associated_internal_fn (built_in_function fn, tree return_type)
2209 : : {
2210 : 14210295 : switch (fn)
2211 : : {
2212 : : #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2213 : : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2214 : : #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2215 : : CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2216 : : CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2217 : : #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2218 : : CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2219 : : #include "internal-fn.def"
2220 : :
2221 : : CASE_FLT_FN (BUILT_IN_POW10):
2222 : : return IFN_EXP10;
2223 : :
2224 : : CASE_FLT_FN (BUILT_IN_DREM):
2225 : : return IFN_REMAINDER;
2226 : :
2227 : 19564 : CASE_FLT_FN (BUILT_IN_SCALBN):
2228 : 19564 : CASE_FLT_FN (BUILT_IN_SCALBLN):
2229 : 19564 : if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2230 : : return IFN_LDEXP;
2231 : : return IFN_LAST;
2232 : 75 : case BUILT_IN_CRC8_DATA8:
2233 : 75 : case BUILT_IN_CRC16_DATA8:
2234 : 75 : case BUILT_IN_CRC16_DATA16:
2235 : 75 : case BUILT_IN_CRC32_DATA8:
2236 : 75 : case BUILT_IN_CRC32_DATA16:
2237 : 75 : case BUILT_IN_CRC32_DATA32:
2238 : 75 : case BUILT_IN_CRC64_DATA8:
2239 : 75 : case BUILT_IN_CRC64_DATA16:
2240 : 75 : case BUILT_IN_CRC64_DATA32:
2241 : 75 : case BUILT_IN_CRC64_DATA64:
2242 : 75 : return IFN_CRC;
2243 : 132 : case BUILT_IN_REV_CRC8_DATA8:
2244 : 132 : case BUILT_IN_REV_CRC16_DATA8:
2245 : 132 : case BUILT_IN_REV_CRC16_DATA16:
2246 : 132 : case BUILT_IN_REV_CRC32_DATA8:
2247 : 132 : case BUILT_IN_REV_CRC32_DATA16:
2248 : 132 : case BUILT_IN_REV_CRC32_DATA32:
2249 : 132 : case BUILT_IN_REV_CRC64_DATA8:
2250 : 132 : case BUILT_IN_REV_CRC64_DATA16:
2251 : 132 : case BUILT_IN_REV_CRC64_DATA32:
2252 : 132 : case BUILT_IN_REV_CRC64_DATA64:
2253 : 132 : return IFN_CRC_REV;
2254 : : default:
2255 : : return IFN_LAST;
2256 : : }
2257 : : }
2258 : :
2259 : : /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2260 : : return its code, otherwise return IFN_LAST. Note that this function
2261 : : only tests whether the function is defined in internals.def, not whether
2262 : : it is actually available on the target. */
2263 : :
2264 : : internal_fn
2265 : 642376 : associated_internal_fn (tree fndecl)
2266 : : {
2267 : 642376 : gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2268 : 642376 : return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2269 : 642376 : TREE_TYPE (TREE_TYPE (fndecl)));
2270 : : }
2271 : :
2272 : : /* Check whether there is an internal function associated with function CFN
2273 : : and return type RETURN_TYPE. Return the function if so, otherwise return
2274 : : IFN_LAST.
2275 : :
2276 : : Note that this function only tests whether the function is defined in
2277 : : internals.def, not whether it is actually available on the target. */
2278 : :
2279 : : internal_fn
2280 : 32699724 : associated_internal_fn (combined_fn cfn, tree return_type)
2281 : : {
2282 : 32699724 : if (internal_fn_p (cfn))
2283 : 19131805 : return as_internal_fn (cfn);
2284 : 13567919 : return associated_internal_fn (as_builtin_fn (cfn), return_type);
2285 : : }
2286 : :
2287 : : /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2288 : : on the current target by a call to an internal function, return the
2289 : : code of that internal function, otherwise return IFN_LAST. The caller
2290 : : is responsible for ensuring that any side-effects of the built-in
2291 : : call are dealt with correctly. E.g. if CALL sets errno, the caller
2292 : : must decide that the errno result isn't needed or make it available
2293 : : in some other way. */
2294 : :
2295 : : internal_fn
2296 : 866072 : replacement_internal_fn (gcall *call)
2297 : : {
2298 : 866072 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2299 : : {
2300 : 640036 : internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2301 : 640036 : if (ifn != IFN_LAST)
2302 : : {
2303 : 61971 : tree_pair types = direct_internal_fn_types (ifn, call);
2304 : 61971 : optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2305 : 61971 : if (direct_internal_fn_supported_p (ifn, types, opt_type))
2306 : 40548 : return ifn;
2307 : : }
2308 : : }
2309 : : return IFN_LAST;
2310 : : }
2311 : :
2312 : : /* Expand a call to the builtin trinary math functions (fma).
2313 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2314 : : function in-line. EXP is the expression that is a call to the builtin
2315 : : function; if convenient, the result should be placed in TARGET.
2316 : : SUBTARGET may be used as the target for computing one of EXP's
2317 : : operands. */
2318 : :
2319 : : static rtx
2320 : 364 : expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2321 : : {
2322 : 364 : optab builtin_optab;
2323 : 364 : rtx op0, op1, op2, result;
2324 : 364 : rtx_insn *insns;
2325 : 364 : tree fndecl = get_callee_fndecl (exp);
2326 : 364 : tree arg0, arg1, arg2;
2327 : 364 : machine_mode mode;
2328 : :
2329 : 364 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2330 : : return NULL_RTX;
2331 : :
2332 : 364 : arg0 = CALL_EXPR_ARG (exp, 0);
2333 : 364 : arg1 = CALL_EXPR_ARG (exp, 1);
2334 : 364 : arg2 = CALL_EXPR_ARG (exp, 2);
2335 : :
2336 : 364 : switch (DECL_FUNCTION_CODE (fndecl))
2337 : : {
2338 : 364 : CASE_FLT_FN (BUILT_IN_FMA):
2339 : 364 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2340 : 364 : builtin_optab = fma_optab; break;
2341 : 0 : default:
2342 : 0 : gcc_unreachable ();
2343 : : }
2344 : :
2345 : : /* Make a suitable register to place result in. */
2346 : 364 : mode = TYPE_MODE (TREE_TYPE (exp));
2347 : :
2348 : : /* Before working hard, check whether the instruction is available. */
2349 : 364 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2350 : : return NULL_RTX;
2351 : :
2352 : 0 : result = gen_reg_rtx (mode);
2353 : :
2354 : : /* Always stabilize the argument list. */
2355 : 0 : CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2356 : 0 : CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2357 : 0 : CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2358 : :
2359 : 0 : op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2360 : 0 : op1 = expand_normal (arg1);
2361 : 0 : op2 = expand_normal (arg2);
2362 : :
2363 : 0 : start_sequence ();
2364 : :
2365 : : /* Compute into RESULT.
2366 : : Set RESULT to wherever the result comes back. */
2367 : 0 : result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2368 : : result, 0);
2369 : :
2370 : : /* If we were unable to expand via the builtin, stop the sequence
2371 : : (without outputting the insns) and call to the library function
2372 : : with the stabilized argument list. */
2373 : 0 : if (result == 0)
2374 : : {
2375 : 0 : end_sequence ();
2376 : 0 : return expand_call (exp, target, target == const0_rtx);
2377 : : }
2378 : :
2379 : : /* Output the entire sequence. */
2380 : 0 : insns = end_sequence ();
2381 : 0 : emit_insn (insns);
2382 : :
2383 : 0 : return result;
2384 : : }
2385 : :
2386 : : /* Expand a call to the builtin sin and cos math functions.
2387 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2388 : : function in-line. EXP is the expression that is a call to the builtin
2389 : : function; if convenient, the result should be placed in TARGET.
2390 : : SUBTARGET may be used as the target for computing one of EXP's
2391 : : operands. */
2392 : :
2393 : : static rtx
2394 : 47 : expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2395 : : {
2396 : 47 : optab builtin_optab;
2397 : 47 : rtx op0;
2398 : 47 : rtx_insn *insns;
2399 : 47 : tree fndecl = get_callee_fndecl (exp);
2400 : 47 : machine_mode mode;
2401 : 47 : tree arg;
2402 : :
2403 : 47 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2404 : : return NULL_RTX;
2405 : :
2406 : 39 : arg = CALL_EXPR_ARG (exp, 0);
2407 : :
2408 : 39 : switch (DECL_FUNCTION_CODE (fndecl))
2409 : : {
2410 : 39 : CASE_FLT_FN (BUILT_IN_SIN):
2411 : 39 : CASE_FLT_FN (BUILT_IN_COS):
2412 : 39 : builtin_optab = sincos_optab; break;
2413 : 0 : default:
2414 : 0 : gcc_unreachable ();
2415 : : }
2416 : :
2417 : : /* Make a suitable register to place result in. */
2418 : 39 : mode = TYPE_MODE (TREE_TYPE (exp));
2419 : :
2420 : : /* Check if sincos insn is available, otherwise fallback
2421 : : to sin or cos insn. */
2422 : 39 : if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2423 : 39 : switch (DECL_FUNCTION_CODE (fndecl))
2424 : : {
2425 : : CASE_FLT_FN (BUILT_IN_SIN):
2426 : : builtin_optab = sin_optab; break;
2427 : 14 : CASE_FLT_FN (BUILT_IN_COS):
2428 : 14 : builtin_optab = cos_optab; break;
2429 : 0 : default:
2430 : 0 : gcc_unreachable ();
2431 : : }
2432 : :
2433 : : /* Before working hard, check whether the instruction is available. */
2434 : 39 : if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2435 : : {
2436 : 0 : rtx result = gen_reg_rtx (mode);
2437 : :
2438 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2439 : : need to expand the argument again. This way, we will not perform
2440 : : side-effects more the once. */
2441 : 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2442 : :
2443 : 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2444 : :
2445 : 0 : start_sequence ();
2446 : :
2447 : : /* Compute into RESULT.
2448 : : Set RESULT to wherever the result comes back. */
2449 : 0 : if (builtin_optab == sincos_optab)
2450 : : {
2451 : 0 : int ok;
2452 : :
2453 : 0 : switch (DECL_FUNCTION_CODE (fndecl))
2454 : : {
2455 : 0 : CASE_FLT_FN (BUILT_IN_SIN):
2456 : 0 : ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2457 : 0 : break;
2458 : 0 : CASE_FLT_FN (BUILT_IN_COS):
2459 : 0 : ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2460 : 0 : break;
2461 : 0 : default:
2462 : 0 : gcc_unreachable ();
2463 : : }
2464 : 0 : gcc_assert (ok);
2465 : : }
2466 : : else
2467 : 0 : result = expand_unop (mode, builtin_optab, op0, result, 0);
2468 : :
2469 : 0 : if (result != 0)
2470 : : {
2471 : : /* Output the entire sequence. */
2472 : 0 : insns = end_sequence ();
2473 : 0 : emit_insn (insns);
2474 : 0 : return result;
2475 : : }
2476 : :
2477 : : /* If we were unable to expand via the builtin, stop the sequence
2478 : : (without outputting the insns) and call to the library function
2479 : : with the stabilized argument list. */
2480 : 0 : end_sequence ();
2481 : : }
2482 : :
2483 : 39 : return expand_call (exp, target, target == const0_rtx);
2484 : : }
2485 : :
2486 : : /* Given an interclass math builtin decl FNDECL and it's argument ARG
2487 : : return an RTL instruction code that implements the functionality.
2488 : : If that isn't possible or available return CODE_FOR_nothing. */
2489 : :
2490 : : static enum insn_code
2491 : 1373674 : interclass_mathfn_icode (tree arg, tree fndecl)
2492 : : {
2493 : 1373674 : bool errno_set = false;
2494 : 1373674 : optab builtin_optab = unknown_optab;
2495 : 1373674 : machine_mode mode;
2496 : :
2497 : 1373674 : switch (DECL_FUNCTION_CODE (fndecl))
2498 : : {
2499 : 4 : CASE_FLT_FN (BUILT_IN_ILOGB):
2500 : 4 : errno_set = true;
2501 : 4 : builtin_optab = ilogb_optab;
2502 : 4 : break;
2503 : 266404 : CASE_FLT_FN (BUILT_IN_ISINF):
2504 : 266404 : builtin_optab = isinf_optab;
2505 : 266404 : break;
2506 : 573370 : case BUILT_IN_ISFINITE:
2507 : 573370 : builtin_optab = isfinite_optab;
2508 : 573370 : break;
2509 : 264620 : case BUILT_IN_ISNORMAL:
2510 : 264620 : builtin_optab = isnormal_optab;
2511 : 264620 : break;
2512 : 267193 : CASE_FLT_FN (BUILT_IN_ISNAN):
2513 : 267193 : builtin_optab = isnan_optab;
2514 : 267193 : break;
2515 : : CASE_FLT_FN (BUILT_IN_FINITE):
2516 : : case BUILT_IN_FINITED32:
2517 : : case BUILT_IN_FINITED64:
2518 : : case BUILT_IN_FINITED128:
2519 : : case BUILT_IN_ISINFD32:
2520 : : case BUILT_IN_ISINFD64:
2521 : : case BUILT_IN_ISINFD128:
2522 : : case BUILT_IN_ISNAND32:
2523 : : case BUILT_IN_ISNAND64:
2524 : : case BUILT_IN_ISNAND128:
2525 : : /* These builtins have no optabs (yet). */
2526 : : break;
2527 : 0 : default:
2528 : 0 : gcc_unreachable ();
2529 : : }
2530 : :
2531 : : /* There's no easy way to detect the case we need to set EDOM. */
2532 : 1373674 : if (flag_errno_math && errno_set)
2533 : : return CODE_FOR_nothing;
2534 : :
2535 : : /* Optab mode depends on the mode of the input argument. */
2536 : 1373674 : mode = TYPE_MODE (TREE_TYPE (arg));
2537 : :
2538 : 1373674 : if (builtin_optab)
2539 : 1371591 : return optab_handler (builtin_optab, mode);
2540 : : return CODE_FOR_nothing;
2541 : : }
2542 : :
2543 : : /* Expand a call to one of the builtin math functions that operate on
2544 : : floating point argument and output an integer result (ilogb, isinf,
2545 : : isnan, etc).
2546 : : Return 0 if a normal call should be emitted rather than expanding the
2547 : : function in-line. EXP is the expression that is a call to the builtin
2548 : : function; if convenient, the result should be placed in TARGET. */
2549 : :
2550 : : static rtx
2551 : 4 : expand_builtin_interclass_mathfn (tree exp, rtx target)
2552 : : {
2553 : 4 : enum insn_code icode = CODE_FOR_nothing;
2554 : 4 : rtx op0;
2555 : 4 : tree fndecl = get_callee_fndecl (exp);
2556 : 4 : machine_mode mode;
2557 : 4 : tree arg;
2558 : :
2559 : 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 : : return NULL_RTX;
2561 : :
2562 : 4 : arg = CALL_EXPR_ARG (exp, 0);
2563 : 4 : icode = interclass_mathfn_icode (arg, fndecl);
2564 : 4 : mode = TYPE_MODE (TREE_TYPE (arg));
2565 : :
2566 : 4 : if (icode != CODE_FOR_nothing)
2567 : : {
2568 : 2 : class expand_operand ops[1];
2569 : 2 : rtx_insn *last = get_last_insn ();
2570 : 2 : tree orig_arg = arg;
2571 : :
2572 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 : : need to expand the argument again. This way, we will not perform
2574 : : side-effects more the once. */
2575 : 2 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2576 : :
2577 : 2 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2578 : :
2579 : 2 : if (mode != GET_MODE (op0))
2580 : 0 : op0 = convert_to_mode (mode, op0, 0);
2581 : :
2582 : 2 : create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2583 : 2 : if (maybe_legitimize_operands (icode, 0, 1, ops)
2584 : 2 : && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2585 : 1 : return ops[0].value;
2586 : :
2587 : 1 : delete_insns_since (last);
2588 : 1 : CALL_EXPR_ARG (exp, 0) = orig_arg;
2589 : : }
2590 : :
2591 : : return NULL_RTX;
2592 : : }
2593 : :
2594 : : /* Expand a call to the builtin sincos math function.
2595 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
2596 : : function in-line. EXP is the expression that is a call to the builtin
2597 : : function. */
2598 : :
2599 : : static rtx
2600 : 3 : expand_builtin_sincos (tree exp)
2601 : : {
2602 : 3 : rtx op0, op1, op2, target1, target2;
2603 : 3 : machine_mode mode;
2604 : 3 : tree arg, sinp, cosp;
2605 : 3 : int result;
2606 : 3 : location_t loc = EXPR_LOCATION (exp);
2607 : 3 : tree alias_type, alias_off;
2608 : :
2609 : 3 : if (!validate_arglist (exp, REAL_TYPE,
2610 : : POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2611 : : return NULL_RTX;
2612 : :
2613 : 3 : arg = CALL_EXPR_ARG (exp, 0);
2614 : 3 : sinp = CALL_EXPR_ARG (exp, 1);
2615 : 3 : cosp = CALL_EXPR_ARG (exp, 2);
2616 : :
2617 : : /* Make a suitable register to place result in. */
2618 : 3 : mode = TYPE_MODE (TREE_TYPE (arg));
2619 : :
2620 : : /* Check if sincos insn is available, otherwise emit the call. */
2621 : 3 : if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2622 : : return NULL_RTX;
2623 : :
2624 : 0 : target1 = gen_reg_rtx (mode);
2625 : 0 : target2 = gen_reg_rtx (mode);
2626 : :
2627 : 0 : op0 = expand_normal (arg);
2628 : 0 : alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2629 : 0 : alias_off = build_int_cst (alias_type, 0);
2630 : 0 : op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2631 : : sinp, alias_off));
2632 : 0 : op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2633 : : cosp, alias_off));
2634 : :
2635 : : /* Compute into target1 and target2.
2636 : : Set TARGET to wherever the result comes back. */
2637 : 0 : result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2638 : 0 : gcc_assert (result);
2639 : :
2640 : : /* Move target1 and target2 to the memory locations indicated
2641 : : by op1 and op2. */
2642 : 0 : emit_move_insn (op1, target1);
2643 : 0 : emit_move_insn (op2, target2);
2644 : :
2645 : 0 : return const0_rtx;
2646 : : }
2647 : :
2648 : : /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2649 : : result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2650 : : static rtx
2651 : 60 : expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2652 : : {
2653 : 60 : if (!validate_arglist (exp, VOID_TYPE))
2654 : : return NULL_RTX;
2655 : :
2656 : 60 : insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2657 : 60 : if (icode == CODE_FOR_nothing)
2658 : : return NULL_RTX;
2659 : :
2660 : 0 : if (target == 0
2661 : 0 : || GET_MODE (target) != target_mode
2662 : 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2663 : 0 : target = gen_reg_rtx (target_mode);
2664 : :
2665 : 0 : rtx pat = GEN_FCN (icode) (target);
2666 : 0 : if (!pat)
2667 : : return NULL_RTX;
2668 : 0 : emit_insn (pat);
2669 : :
2670 : 0 : return target;
2671 : : }
2672 : :
2673 : : /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2674 : : fenv.h), returning the result and setting it in TARGET. Otherwise return
2675 : : NULL_RTX on failure. */
2676 : : static rtx
2677 : 1894 : expand_builtin_feclear_feraise_except (tree exp, rtx target,
2678 : : machine_mode target_mode, optab op_optab)
2679 : : {
2680 : 1894 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2681 : : return NULL_RTX;
2682 : 1894 : rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2683 : :
2684 : 1894 : insn_code icode = direct_optab_handler (op_optab, SImode);
2685 : 1894 : if (icode == CODE_FOR_nothing)
2686 : : return NULL_RTX;
2687 : :
2688 : 0 : if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2689 : : return NULL_RTX;
2690 : :
2691 : 0 : if (target == 0
2692 : 0 : || GET_MODE (target) != target_mode
2693 : 0 : || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2694 : 0 : target = gen_reg_rtx (target_mode);
2695 : :
2696 : 0 : rtx pat = GEN_FCN (icode) (target, op0);
2697 : 0 : if (!pat)
2698 : : return NULL_RTX;
2699 : 0 : emit_insn (pat);
2700 : :
2701 : 0 : return target;
2702 : : }
2703 : :
2704 : : /* Expand a call to the internal cexpi builtin to the sincos math function.
2705 : : EXP is the expression that is a call to the builtin function; if convenient,
2706 : : the result should be placed in TARGET. */
2707 : :
2708 : : static rtx
2709 : 158 : expand_builtin_cexpi (tree exp, rtx target)
2710 : : {
2711 : 158 : tree fndecl = get_callee_fndecl (exp);
2712 : 158 : tree arg, type;
2713 : 158 : machine_mode mode;
2714 : 158 : rtx op0, op1, op2;
2715 : 158 : location_t loc = EXPR_LOCATION (exp);
2716 : :
2717 : 158 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 : : return NULL_RTX;
2719 : :
2720 : 158 : arg = CALL_EXPR_ARG (exp, 0);
2721 : 158 : type = TREE_TYPE (arg);
2722 : 158 : mode = TYPE_MODE (TREE_TYPE (arg));
2723 : :
2724 : : /* Try expanding via a sincos optab, fall back to emitting a libcall
2725 : : to sincos or cexp. We are sure we have sincos or cexp because cexpi
2726 : : is only generated from sincos, cexp or if we have either of them. */
2727 : 158 : if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2728 : : {
2729 : 6 : op1 = gen_reg_rtx (mode);
2730 : 6 : op2 = gen_reg_rtx (mode);
2731 : :
2732 : 6 : op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2733 : :
2734 : : /* Compute into op1 and op2. */
2735 : 6 : expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2736 : : }
2737 : 152 : else if (targetm.libc_has_function (function_sincos, type))
2738 : : {
2739 : 152 : tree call, fn = NULL_TREE;
2740 : 152 : tree top1, top2;
2741 : 152 : rtx op1a, op2a;
2742 : :
2743 : 152 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2744 : 51 : fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2745 : 101 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2746 : 82 : fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2747 : 19 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2748 : 19 : fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2749 : : else
2750 : 0 : gcc_unreachable ();
2751 : :
2752 : 152 : op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2753 : 152 : op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2754 : 152 : op1a = copy_addr_to_reg (XEXP (op1, 0));
2755 : 152 : op2a = copy_addr_to_reg (XEXP (op2, 0));
2756 : 152 : top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2757 : 152 : top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2758 : :
2759 : : /* Make sure not to fold the sincos call again. */
2760 : 152 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2761 : 152 : expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2762 : : call, 3, arg, top1, top2));
2763 : : }
2764 : : else
2765 : : {
2766 : 0 : tree call, fn = NULL_TREE, narg;
2767 : 0 : tree ctype = build_complex_type (type);
2768 : :
2769 : 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2770 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2771 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2772 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXP);
2773 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2774 : 0 : fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2775 : : else
2776 : 0 : gcc_unreachable ();
2777 : :
2778 : : /* If we don't have a decl for cexp create one. This is the
2779 : : friendliest fallback if the user calls __builtin_cexpi
2780 : : without full target C99 function support. */
2781 : 0 : if (fn == NULL_TREE)
2782 : : {
2783 : 0 : tree fntype;
2784 : 0 : const char *name = NULL;
2785 : :
2786 : 0 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2787 : : name = "cexpf";
2788 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2789 : : name = "cexp";
2790 : 0 : else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2791 : 0 : name = "cexpl";
2792 : :
2793 : 0 : fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2794 : 0 : fn = build_fn_decl (name, fntype);
2795 : : }
2796 : :
2797 : 0 : narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2798 : : build_real (type, dconst0), arg);
2799 : :
2800 : : /* Make sure not to fold the cexp call again. */
2801 : 0 : call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2802 : 0 : return expand_expr (build_call_nary (ctype, call, 1, narg),
2803 : 0 : target, VOIDmode, EXPAND_NORMAL);
2804 : : }
2805 : :
2806 : : /* Now build the proper return type. */
2807 : 316 : return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2808 : 158 : make_tree (TREE_TYPE (arg), op2),
2809 : 158 : make_tree (TREE_TYPE (arg), op1)),
2810 : 158 : target, VOIDmode, EXPAND_NORMAL);
2811 : : }
2812 : :
2813 : : /* Conveniently construct a function call expression. FNDECL names the
2814 : : function to be called, N is the number of arguments, and the "..."
2815 : : parameters are the argument expressions. Unlike build_call_exr
2816 : : this doesn't fold the call, hence it will always return a CALL_EXPR. */
2817 : :
2818 : : static tree
2819 : 136373 : build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2820 : : {
2821 : 136373 : va_list ap;
2822 : 136373 : tree fntype = TREE_TYPE (fndecl);
2823 : 136373 : tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2824 : :
2825 : 136373 : va_start (ap, n);
2826 : 136373 : fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2827 : 136373 : va_end (ap);
2828 : 136373 : SET_EXPR_LOCATION (fn, loc);
2829 : 136373 : return fn;
2830 : : }
2831 : :
2832 : : /* Expand the __builtin_issignaling builtin. This needs to handle
2833 : : all floating point formats that do support NaNs (for those that
2834 : : don't it just sets target to 0). */
2835 : :
2836 : : static rtx
2837 : 823 : expand_builtin_issignaling (tree exp, rtx target)
2838 : : {
2839 : 823 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2840 : : return NULL_RTX;
2841 : :
2842 : 823 : tree arg = CALL_EXPR_ARG (exp, 0);
2843 : 823 : scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2844 : 823 : const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2845 : :
2846 : : /* Expand the argument yielding a RTX expression. */
2847 : 823 : rtx temp = expand_normal (arg);
2848 : :
2849 : : /* If mode doesn't support NaN, always return 0.
2850 : : Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2851 : : __builtin_issignaling working without -fsignaling-nans. Especially
2852 : : when -fno-signaling-nans is the default.
2853 : : On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2854 : : -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2855 : : fold to 0 or non-NaN/Inf classification. */
2856 : 823 : if (!HONOR_NANS (fmode))
2857 : : {
2858 : 0 : emit_move_insn (target, const0_rtx);
2859 : 0 : return target;
2860 : : }
2861 : :
2862 : : /* Check if the back end provides an insn that handles issignaling for the
2863 : : argument's mode. */
2864 : 823 : enum insn_code icode = optab_handler (issignaling_optab, fmode);
2865 : 823 : if (icode != CODE_FOR_nothing)
2866 : : {
2867 : 155 : rtx_insn *last = get_last_insn ();
2868 : 155 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2869 : 155 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2870 : : return this_target;
2871 : 0 : delete_insns_since (last);
2872 : : }
2873 : :
2874 : 668 : if (DECIMAL_FLOAT_MODE_P (fmode))
2875 : : {
2876 : 63 : scalar_int_mode imode;
2877 : 63 : rtx hi;
2878 : 63 : switch (fmt->ieee_bits)
2879 : : {
2880 : 42 : case 32:
2881 : 42 : case 64:
2882 : 42 : imode = int_mode_for_mode (fmode).require ();
2883 : 42 : temp = gen_lowpart (imode, temp);
2884 : 42 : break;
2885 : 21 : case 128:
2886 : 21 : imode = int_mode_for_size (64, 1).require ();
2887 : 21 : hi = NULL_RTX;
2888 : : /* For decimal128, TImode support isn't always there and even when
2889 : : it is, working on the DImode high part is usually better. */
2890 : 21 : if (!MEM_P (temp))
2891 : : {
2892 : 6 : if (rtx t = force_highpart_subreg (imode, temp, fmode))
2893 : : hi = t;
2894 : : else
2895 : : {
2896 : 0 : scalar_int_mode imode2;
2897 : 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2898 : : {
2899 : 0 : rtx temp2 = gen_lowpart (imode2, temp);
2900 : 0 : if (rtx t = force_highpart_subreg (imode, temp2, imode2))
2901 : : hi = t;
2902 : : }
2903 : : }
2904 : 0 : if (!hi)
2905 : : {
2906 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2907 : 0 : emit_move_insn (mem, temp);
2908 : 0 : temp = mem;
2909 : : }
2910 : : }
2911 : 0 : if (!hi)
2912 : : {
2913 : 15 : poly_int64 offset
2914 : 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
2915 : 15 : hi = adjust_address (temp, imode, offset);
2916 : : }
2917 : : temp = hi;
2918 : : break;
2919 : 0 : default:
2920 : 0 : gcc_unreachable ();
2921 : : }
2922 : : /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2923 : : have 6 bits below it all set. */
2924 : 63 : rtx val
2925 : 63 : = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2926 : 63 : temp = expand_binop (imode, and_optab, temp, val,
2927 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2928 : 63 : temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2929 : 63 : return temp;
2930 : : }
2931 : :
2932 : : /* Only PDP11 has these defined differently but doesn't support NaNs. */
2933 : 605 : gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2934 : 605 : gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2935 : 4235 : gcc_assert (MODE_COMPOSITE_P (fmode)
2936 : : || (fmt->pnan == fmt->p
2937 : : && fmt->signbit_ro == fmt->signbit_rw));
2938 : :
2939 : 605 : switch (fmt->p)
2940 : : {
2941 : 0 : case 106: /* IBM double double */
2942 : : /* For IBM double double, recurse on the most significant double. */
2943 : 0 : gcc_assert (MODE_COMPOSITE_P (fmode));
2944 : 0 : temp = convert_modes (DFmode, fmode, temp, 0);
2945 : 0 : fmode = DFmode;
2946 : 0 : fmt = REAL_MODE_FORMAT (DFmode);
2947 : : /* FALLTHRU */
2948 : 505 : case 8: /* bfloat */
2949 : 505 : case 11: /* IEEE half */
2950 : 505 : case 24: /* IEEE single */
2951 : 505 : case 53: /* IEEE double or Intel extended with rounding to double */
2952 : 505 : if (fmt->p == 53 && fmt->signbit_ro == 79)
2953 : 0 : goto extended;
2954 : 505 : {
2955 : 505 : scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2956 : 505 : temp = gen_lowpart (imode, temp);
2957 : 505 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2958 : : & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2959 : 505 : if (fmt->qnan_msb_set)
2960 : : {
2961 : 505 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2962 : 505 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2963 : : /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2964 : : ((temp ^ bit) & mask) > val. */
2965 : 505 : temp = expand_binop (imode, xor_optab, temp, bit,
2966 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2967 : 505 : temp = expand_binop (imode, and_optab, temp, mask,
2968 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2969 : 505 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
2970 : : 1, 1);
2971 : : }
2972 : : else
2973 : : {
2974 : : /* For MIPS/PA IEEE single/double, expand to:
2975 : : (temp & val) == val. */
2976 : 0 : temp = expand_binop (imode, and_optab, temp, val,
2977 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
2978 : 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
2979 : : 1, 1);
2980 : : }
2981 : : }
2982 : : break;
2983 : 100 : case 113: /* IEEE quad */
2984 : 100 : {
2985 : 100 : rtx hi = NULL_RTX, lo = NULL_RTX;
2986 : 100 : scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2987 : : /* For IEEE quad, TImode support isn't always there and even when
2988 : : it is, working on DImode parts is usually better. */
2989 : 100 : if (!MEM_P (temp))
2990 : : {
2991 : 85 : hi = force_highpart_subreg (imode, temp, fmode);
2992 : 85 : lo = force_lowpart_subreg (imode, temp, fmode);
2993 : 85 : if (!hi || !lo)
2994 : : {
2995 : 0 : scalar_int_mode imode2;
2996 : 0 : if (int_mode_for_mode (fmode).exists (&imode2))
2997 : : {
2998 : 0 : rtx temp2 = gen_lowpart (imode2, temp);
2999 : 0 : hi = force_highpart_subreg (imode, temp2, imode2);
3000 : 0 : lo = force_lowpart_subreg (imode, temp2, imode2);
3001 : : }
3002 : : }
3003 : 85 : if (!hi || !lo)
3004 : : {
3005 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3006 : 0 : emit_move_insn (mem, temp);
3007 : 0 : temp = mem;
3008 : : }
3009 : : }
3010 : 100 : if (!hi || !lo)
3011 : : {
3012 : 15 : poly_int64 offset
3013 : 15 : = subreg_highpart_offset (imode, GET_MODE (temp));
3014 : 15 : hi = adjust_address (temp, imode, offset);
3015 : 15 : offset = subreg_lowpart_offset (imode, GET_MODE (temp));
3016 : 15 : lo = adjust_address (temp, imode, offset);
3017 : : }
3018 : 100 : rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
3019 : : & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
3020 : 100 : if (fmt->qnan_msb_set)
3021 : : {
3022 : 100 : rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
3023 : : - 64)));
3024 : 100 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
3025 : : /* For non-MIPS/PA IEEE quad, expand to:
3026 : : (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
3027 : 100 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3028 : 100 : lo = expand_binop (imode, ior_optab, lo, nlo,
3029 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3030 : 100 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
3031 : 100 : temp = expand_binop (imode, xor_optab, hi, bit,
3032 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3033 : 100 : temp = expand_binop (imode, ior_optab, temp, lo,
3034 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3035 : 100 : temp = expand_binop (imode, and_optab, temp, mask,
3036 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3037 : 100 : temp = emit_store_flag_force (target, GTU, temp, val, imode,
3038 : : 1, 1);
3039 : : }
3040 : : else
3041 : : {
3042 : : /* For MIPS/PA IEEE quad, expand to:
3043 : : (hi & val) == val. */
3044 : 0 : temp = expand_binop (imode, and_optab, hi, val,
3045 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3046 : 0 : temp = emit_store_flag_force (target, EQ, temp, val, imode,
3047 : : 1, 1);
3048 : : }
3049 : : }
3050 : : break;
3051 : 0 : case 64: /* Intel or Motorola extended */
3052 : 0 : extended:
3053 : 0 : {
3054 : 0 : rtx ex, hi, lo;
3055 : 0 : scalar_int_mode imode = int_mode_for_size (32, 1).require ();
3056 : 0 : scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
3057 : 0 : if (!MEM_P (temp))
3058 : : {
3059 : 0 : rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
3060 : 0 : emit_move_insn (mem, temp);
3061 : 0 : temp = mem;
3062 : : }
3063 : 0 : if (fmt->signbit_ro == 95)
3064 : : {
3065 : : /* Motorola, always big endian, with 16-bit gap in between
3066 : : 16-bit sign+exponent and 64-bit mantissa. */
3067 : 0 : ex = adjust_address (temp, iemode, 0);
3068 : 0 : hi = adjust_address (temp, imode, 4);
3069 : 0 : lo = adjust_address (temp, imode, 8);
3070 : : }
3071 : 0 : else if (!WORDS_BIG_ENDIAN)
3072 : : {
3073 : : /* Intel little endian, 64-bit mantissa followed by 16-bit
3074 : : sign+exponent and then either 16 or 48 bits of gap. */
3075 : 0 : ex = adjust_address (temp, iemode, 8);
3076 : 0 : hi = adjust_address (temp, imode, 4);
3077 : 0 : lo = adjust_address (temp, imode, 0);
3078 : : }
3079 : : else
3080 : : {
3081 : : /* Big endian Itanium. */
3082 : : ex = adjust_address (temp, iemode, 0);
3083 : : hi = adjust_address (temp, imode, 2);
3084 : : lo = adjust_address (temp, imode, 6);
3085 : : }
3086 : 0 : rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3087 : 0 : gcc_assert (fmt->qnan_msb_set);
3088 : 0 : rtx mask = GEN_INT (0x7fff);
3089 : 0 : rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3090 : : /* For Intel/Motorola extended format, expand to:
3091 : : (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3092 : 0 : rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3093 : 0 : lo = expand_binop (imode, ior_optab, lo, nlo,
3094 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3095 : 0 : lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3096 : 0 : temp = expand_binop (imode, xor_optab, hi, bit,
3097 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3098 : 0 : temp = expand_binop (imode, ior_optab, temp, lo,
3099 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3100 : 0 : temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3101 : 0 : ex = expand_binop (iemode, and_optab, ex, mask,
3102 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3103 : 0 : ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3104 : : ex, mask, iemode, 1, 1);
3105 : 0 : temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3106 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
3107 : : }
3108 : 0 : break;
3109 : 0 : default:
3110 : 0 : gcc_unreachable ();
3111 : : }
3112 : :
3113 : : return temp;
3114 : : }
3115 : :
3116 : : /* Expand a call to one of the builtin rounding functions gcc defines
3117 : : as an extension (lfloor and lceil). As these are gcc extensions we
3118 : : do not need to worry about setting errno to EDOM.
3119 : : If expanding via optab fails, lower expression to (int)(floor(x)).
3120 : : EXP is the expression that is a call to the builtin function;
3121 : : if convenient, the result should be placed in TARGET. */
3122 : :
3123 : : static rtx
3124 : 231 : expand_builtin_int_roundingfn (tree exp, rtx target)
3125 : : {
3126 : 231 : convert_optab builtin_optab;
3127 : 231 : rtx op0, tmp;
3128 : 231 : rtx_insn *insns;
3129 : 231 : tree fndecl = get_callee_fndecl (exp);
3130 : 231 : enum built_in_function fallback_fn;
3131 : 231 : tree fallback_fndecl;
3132 : 231 : machine_mode mode;
3133 : 231 : tree arg;
3134 : :
3135 : 231 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3136 : : return NULL_RTX;
3137 : :
3138 : 231 : arg = CALL_EXPR_ARG (exp, 0);
3139 : :
3140 : 231 : switch (DECL_FUNCTION_CODE (fndecl))
3141 : : {
3142 : : CASE_FLT_FN (BUILT_IN_ICEIL):
3143 : : CASE_FLT_FN (BUILT_IN_LCEIL):
3144 : : CASE_FLT_FN (BUILT_IN_LLCEIL):
3145 : : builtin_optab = lceil_optab;
3146 : : fallback_fn = BUILT_IN_CEIL;
3147 : : break;
3148 : :
3149 : 116 : CASE_FLT_FN (BUILT_IN_IFLOOR):
3150 : 116 : CASE_FLT_FN (BUILT_IN_LFLOOR):
3151 : 116 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
3152 : 116 : builtin_optab = lfloor_optab;
3153 : 116 : fallback_fn = BUILT_IN_FLOOR;
3154 : 116 : break;
3155 : :
3156 : 0 : default:
3157 : 0 : gcc_unreachable ();
3158 : : }
3159 : :
3160 : : /* Make a suitable register to place result in. */
3161 : 231 : mode = TYPE_MODE (TREE_TYPE (exp));
3162 : :
3163 : 231 : target = gen_reg_rtx (mode);
3164 : :
3165 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3166 : : need to expand the argument again. This way, we will not perform
3167 : : side-effects more the once. */
3168 : 231 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3169 : :
3170 : 231 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3171 : :
3172 : 231 : start_sequence ();
3173 : :
3174 : : /* Compute into TARGET. */
3175 : 231 : if (expand_sfix_optab (target, op0, builtin_optab))
3176 : : {
3177 : : /* Output the entire sequence. */
3178 : 40 : insns = end_sequence ();
3179 : 40 : emit_insn (insns);
3180 : 40 : return target;
3181 : : }
3182 : :
3183 : : /* If we were unable to expand via the builtin, stop the sequence
3184 : : (without outputting the insns). */
3185 : 191 : end_sequence ();
3186 : :
3187 : : /* Fall back to floating point rounding optab. */
3188 : 191 : fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3189 : :
3190 : : /* For non-C99 targets we may end up without a fallback fndecl here
3191 : : if the user called __builtin_lfloor directly. In this case emit
3192 : : a call to the floor/ceil variants nevertheless. This should result
3193 : : in the best user experience for not full C99 targets. */
3194 : 191 : if (fallback_fndecl == NULL_TREE)
3195 : : {
3196 : 0 : tree fntype;
3197 : 0 : const char *name = NULL;
3198 : :
3199 : 0 : switch (DECL_FUNCTION_CODE (fndecl))
3200 : : {
3201 : : case BUILT_IN_ICEIL:
3202 : : case BUILT_IN_LCEIL:
3203 : : case BUILT_IN_LLCEIL:
3204 : : name = "ceil";
3205 : : break;
3206 : 0 : case BUILT_IN_ICEILF:
3207 : 0 : case BUILT_IN_LCEILF:
3208 : 0 : case BUILT_IN_LLCEILF:
3209 : 0 : name = "ceilf";
3210 : 0 : break;
3211 : 0 : case BUILT_IN_ICEILL:
3212 : 0 : case BUILT_IN_LCEILL:
3213 : 0 : case BUILT_IN_LLCEILL:
3214 : 0 : name = "ceill";
3215 : 0 : break;
3216 : 0 : case BUILT_IN_IFLOOR:
3217 : 0 : case BUILT_IN_LFLOOR:
3218 : 0 : case BUILT_IN_LLFLOOR:
3219 : 0 : name = "floor";
3220 : 0 : break;
3221 : 0 : case BUILT_IN_IFLOORF:
3222 : 0 : case BUILT_IN_LFLOORF:
3223 : 0 : case BUILT_IN_LLFLOORF:
3224 : 0 : name = "floorf";
3225 : 0 : break;
3226 : 0 : case BUILT_IN_IFLOORL:
3227 : 0 : case BUILT_IN_LFLOORL:
3228 : 0 : case BUILT_IN_LLFLOORL:
3229 : 0 : name = "floorl";
3230 : 0 : break;
3231 : 0 : default:
3232 : 0 : gcc_unreachable ();
3233 : : }
3234 : :
3235 : 0 : fntype = build_function_type_list (TREE_TYPE (arg),
3236 : 0 : TREE_TYPE (arg), NULL_TREE);
3237 : 0 : fallback_fndecl = build_fn_decl (name, fntype);
3238 : : }
3239 : :
3240 : 191 : exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3241 : :
3242 : 191 : tmp = expand_normal (exp);
3243 : 191 : tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3244 : :
3245 : : /* Truncate the result of floating point optab to integer
3246 : : via expand_fix (). */
3247 : 191 : target = gen_reg_rtx (mode);
3248 : 191 : expand_fix (target, tmp, 0);
3249 : :
3250 : 191 : return target;
3251 : : }
3252 : :
3253 : : /* Expand a call to one of the builtin math functions doing integer
3254 : : conversion (lrint).
3255 : : Return 0 if a normal call should be emitted rather than expanding the
3256 : : function in-line. EXP is the expression that is a call to the builtin
3257 : : function; if convenient, the result should be placed in TARGET. */
3258 : :
3259 : : static rtx
3260 : 590 : expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3261 : : {
3262 : 590 : convert_optab builtin_optab;
3263 : 590 : rtx op0;
3264 : 590 : rtx_insn *insns;
3265 : 590 : tree fndecl = get_callee_fndecl (exp);
3266 : 590 : tree arg;
3267 : 590 : machine_mode mode;
3268 : 590 : enum built_in_function fallback_fn = BUILT_IN_NONE;
3269 : :
3270 : 590 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3271 : : return NULL_RTX;
3272 : :
3273 : 490 : arg = CALL_EXPR_ARG (exp, 0);
3274 : :
3275 : 490 : switch (DECL_FUNCTION_CODE (fndecl))
3276 : : {
3277 : 8 : CASE_FLT_FN (BUILT_IN_IRINT):
3278 : 8 : fallback_fn = BUILT_IN_LRINT;
3279 : : gcc_fallthrough ();
3280 : : CASE_FLT_FN (BUILT_IN_LRINT):
3281 : : CASE_FLT_FN (BUILT_IN_LLRINT):
3282 : : builtin_optab = lrint_optab;
3283 : : break;
3284 : :
3285 : 221 : CASE_FLT_FN (BUILT_IN_IROUND):
3286 : 221 : fallback_fn = BUILT_IN_LROUND;
3287 : : gcc_fallthrough ();
3288 : : CASE_FLT_FN (BUILT_IN_LROUND):
3289 : : CASE_FLT_FN (BUILT_IN_LLROUND):
3290 : : builtin_optab = lround_optab;
3291 : : break;
3292 : :
3293 : 0 : default:
3294 : 0 : gcc_unreachable ();
3295 : : }
3296 : :
3297 : : /* There's no easy way to detect the case we need to set EDOM. */
3298 : 490 : if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3299 : : return NULL_RTX;
3300 : :
3301 : : /* Make a suitable register to place result in. */
3302 : 259 : mode = TYPE_MODE (TREE_TYPE (exp));
3303 : :
3304 : : /* There's no easy way to detect the case we need to set EDOM. */
3305 : 259 : if (!flag_errno_math)
3306 : : {
3307 : 259 : rtx result = gen_reg_rtx (mode);
3308 : :
3309 : : /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3310 : : need to expand the argument again. This way, we will not perform
3311 : : side-effects more the once. */
3312 : 259 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3313 : :
3314 : 259 : op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3315 : :
3316 : 259 : start_sequence ();
3317 : :
3318 : 259 : if (expand_sfix_optab (result, op0, builtin_optab))
3319 : : {
3320 : : /* Output the entire sequence. */
3321 : 24 : insns = end_sequence ();
3322 : 24 : emit_insn (insns);
3323 : 24 : return result;
3324 : : }
3325 : :
3326 : : /* If we were unable to expand via the builtin, stop the sequence
3327 : : (without outputting the insns) and call to the library function
3328 : : with the stabilized argument list. */
3329 : 235 : end_sequence ();
3330 : : }
3331 : :
3332 : 235 : if (fallback_fn != BUILT_IN_NONE)
3333 : : {
3334 : : /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3335 : : targets, (int) round (x) should never be transformed into
3336 : : BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3337 : : a call to lround in the hope that the target provides at least some
3338 : : C99 functions. This should result in the best user experience for
3339 : : not full C99 targets.
3340 : : As scalar float conversions with same mode are useless in GIMPLE,
3341 : : we can end up e.g. with _Float32 argument passed to float builtin,
3342 : : try to get the type from the builtin prototype first. */
3343 : 221 : tree fallback_fndecl = NULL_TREE;
3344 : 221 : if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3345 : 221 : fallback_fndecl
3346 : 221 : = mathfn_built_in_1 (TREE_VALUE (argtypes),
3347 : : as_combined_fn (fallback_fn), 0);
3348 : 221 : if (fallback_fndecl == NULL_TREE)
3349 : 0 : fallback_fndecl
3350 : 0 : = mathfn_built_in_1 (TREE_TYPE (arg),
3351 : : as_combined_fn (fallback_fn), 0);
3352 : 0 : if (fallback_fndecl)
3353 : : {
3354 : 221 : exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3355 : : fallback_fndecl, 1, arg);
3356 : :
3357 : 221 : target = expand_call (exp, NULL_RTX, target == const0_rtx);
3358 : 221 : target = maybe_emit_group_store (target, TREE_TYPE (exp));
3359 : 221 : return convert_to_mode (mode, target, 0);
3360 : : }
3361 : : }
3362 : :
3363 : 14 : return expand_call (exp, target, target == const0_rtx);
3364 : : }
3365 : :
3366 : : /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3367 : : a normal call should be emitted rather than expanding the function
3368 : : in-line. EXP is the expression that is a call to the builtin
3369 : : function; if convenient, the result should be placed in TARGET. */
3370 : :
3371 : : static rtx
3372 : 279 : expand_builtin_powi (tree exp, rtx target)
3373 : : {
3374 : 279 : tree arg0, arg1;
3375 : 279 : rtx op0, op1;
3376 : 279 : machine_mode mode;
3377 : 279 : machine_mode mode2;
3378 : :
3379 : 279 : if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3380 : : return NULL_RTX;
3381 : :
3382 : 279 : arg0 = CALL_EXPR_ARG (exp, 0);
3383 : 279 : arg1 = CALL_EXPR_ARG (exp, 1);
3384 : 279 : mode = TYPE_MODE (TREE_TYPE (exp));
3385 : :
3386 : : /* Emit a libcall to libgcc. */
3387 : :
3388 : : /* Mode of the 2nd argument must match that of an int. */
3389 : 279 : mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3390 : :
3391 : 279 : if (target == NULL_RTX)
3392 : 0 : target = gen_reg_rtx (mode);
3393 : :
3394 : 279 : op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3395 : 279 : if (GET_MODE (op0) != mode)
3396 : 0 : op0 = convert_to_mode (mode, op0, 0);
3397 : 279 : op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3398 : 279 : if (GET_MODE (op1) != mode2)
3399 : 39 : op1 = convert_to_mode (mode2, op1, 0);
3400 : :
3401 : 279 : target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3402 : : target, LCT_CONST, mode,
3403 : : op0, mode, op1, mode2);
3404 : :
3405 : 279 : return target;
3406 : : }
3407 : :
3408 : : /* Expand expression EXP which is a call to the strlen builtin. Return
3409 : : NULL_RTX if we failed and the caller should emit a normal call, otherwise
3410 : : try to get the result in TARGET, if convenient. */
3411 : :
3412 : : static rtx
3413 : 14425 : expand_builtin_strlen (tree exp, rtx target,
3414 : : machine_mode target_mode)
3415 : : {
3416 : 14425 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3417 : : return NULL_RTX;
3418 : :
3419 : 14405 : tree src = CALL_EXPR_ARG (exp, 0);
3420 : :
3421 : : /* If the length can be computed at compile-time, return it. */
3422 : 14405 : if (tree len = c_strlen (src, 0))
3423 : 177 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3424 : :
3425 : : /* If the length can be computed at compile-time and is constant
3426 : : integer, but there are side-effects in src, evaluate
3427 : : src for side-effects, then return len.
3428 : : E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3429 : : can be optimized into: i++; x = 3; */
3430 : 14228 : tree len = c_strlen (src, 1);
3431 : 14228 : if (len && TREE_CODE (len) == INTEGER_CST)
3432 : : {
3433 : 0 : expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3434 : 0 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3435 : : }
3436 : :
3437 : 14228 : unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3438 : :
3439 : : /* If SRC is not a pointer type, don't do this operation inline. */
3440 : 14228 : if (align == 0)
3441 : : return NULL_RTX;
3442 : :
3443 : : /* Bail out if we can't compute strlen in the right mode. */
3444 : : machine_mode insn_mode;
3445 : : enum insn_code icode = CODE_FOR_nothing;
3446 : 14228 : FOR_EACH_MODE_FROM (insn_mode, target_mode)
3447 : : {
3448 : 14228 : icode = optab_handler (strlen_optab, insn_mode);
3449 : 14228 : if (icode != CODE_FOR_nothing)
3450 : : break;
3451 : : }
3452 : 14228 : if (insn_mode == VOIDmode)
3453 : : return NULL_RTX;
3454 : :
3455 : : /* Make a place to hold the source address. We will not expand
3456 : : the actual source until we are sure that the expansion will
3457 : : not fail -- there are trees that cannot be expanded twice. */
3458 : 14802 : rtx src_reg = gen_reg_rtx (Pmode);
3459 : :
3460 : : /* Mark the beginning of the strlen sequence so we can emit the
3461 : : source operand later. */
3462 : 14228 : rtx_insn *before_strlen = get_last_insn ();
3463 : :
3464 : 14228 : class expand_operand ops[4];
3465 : 14228 : create_output_operand (&ops[0], target, insn_mode);
3466 : 14228 : create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3467 : 14228 : create_integer_operand (&ops[2], 0);
3468 : 14228 : create_integer_operand (&ops[3], align);
3469 : 14228 : if (!maybe_expand_insn (icode, 4, ops))
3470 : : return NULL_RTX;
3471 : :
3472 : : /* Check to see if the argument was declared attribute nonstring
3473 : : and if so, issue a warning since at this point it's not known
3474 : : to be nul-terminated. */
3475 : 11 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3476 : :
3477 : : /* Now that we are assured of success, expand the source. */
3478 : 11 : start_sequence ();
3479 : 15 : rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3480 : 11 : if (pat != src_reg)
3481 : : {
3482 : : #ifdef POINTERS_EXTEND_UNSIGNED
3483 : 15 : if (GET_MODE (pat) != Pmode)
3484 : 0 : pat = convert_to_mode (Pmode, pat,
3485 : : POINTERS_EXTEND_UNSIGNED);
3486 : : #endif
3487 : 11 : emit_move_insn (src_reg, pat);
3488 : : }
3489 : 11 : pat = end_sequence ();
3490 : :
3491 : 11 : if (before_strlen)
3492 : 11 : emit_insn_after (pat, before_strlen);
3493 : : else
3494 : 0 : emit_insn_before (pat, get_insns ());
3495 : :
3496 : : /* Return the value in the proper mode for this function. */
3497 : 11 : if (GET_MODE (ops[0].value) == target_mode)
3498 : : target = ops[0].value;
3499 : 0 : else if (target != 0)
3500 : 0 : convert_move (target, ops[0].value, 0);
3501 : : else
3502 : 0 : target = convert_to_mode (target_mode, ops[0].value, 0);
3503 : :
3504 : : return target;
3505 : : }
3506 : :
3507 : : /* Expand call EXP to the strnlen built-in, returning the result
3508 : : and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3509 : :
3510 : : static rtx
3511 : 580 : expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3512 : : {
3513 : 580 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3514 : : return NULL_RTX;
3515 : :
3516 : 571 : tree src = CALL_EXPR_ARG (exp, 0);
3517 : 571 : tree bound = CALL_EXPR_ARG (exp, 1);
3518 : :
3519 : 571 : if (!bound)
3520 : : return NULL_RTX;
3521 : :
3522 : 571 : location_t loc = UNKNOWN_LOCATION;
3523 : 571 : if (EXPR_HAS_LOCATION (exp))
3524 : 571 : loc = EXPR_LOCATION (exp);
3525 : :
3526 : : /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3527 : : so these conversions aren't necessary. */
3528 : 571 : c_strlen_data lendata = { };
3529 : 571 : tree len = c_strlen (src, 0, &lendata, 1);
3530 : 571 : if (len)
3531 : 102 : len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3532 : :
3533 : 571 : if (TREE_CODE (bound) == INTEGER_CST)
3534 : : {
3535 : 331 : if (!len)
3536 : : return NULL_RTX;
3537 : :
3538 : 62 : len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3539 : 62 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3540 : : }
3541 : :
3542 : 240 : if (TREE_CODE (bound) != SSA_NAME)
3543 : : return NULL_RTX;
3544 : :
3545 : 240 : wide_int min, max;
3546 : 240 : int_range_max r;
3547 : 480 : get_range_query (cfun)->range_of_expr (r, bound,
3548 : : currently_expanding_gimple_stmt);
3549 : 240 : if (r.varying_p () || r.undefined_p ())
3550 : : return NULL_RTX;
3551 : 135 : min = r.lower_bound ();
3552 : 135 : max = r.upper_bound ();
3553 : :
3554 : 135 : if (!len || TREE_CODE (len) != INTEGER_CST)
3555 : : {
3556 : 95 : bool exact;
3557 : 95 : lendata.decl = unterminated_array (src, &len, &exact);
3558 : 95 : if (!lendata.decl)
3559 : 75 : return NULL_RTX;
3560 : : }
3561 : :
3562 : 60 : if (lendata.decl)
3563 : : return NULL_RTX;
3564 : :
3565 : 40 : if (wi::gtu_p (min, wi::to_wide (len)))
3566 : 7 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3567 : :
3568 : 33 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3569 : 33 : return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3570 : 240 : }
3571 : :
3572 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3573 : : bytes from bytes at DATA + OFFSET and return it reinterpreted as
3574 : : a target constant. */
3575 : :
3576 : : static rtx
3577 : 174692 : builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3578 : : fixed_size_mode mode)
3579 : : {
3580 : : /* The REPresentation pointed to by DATA need not be a nul-terminated
3581 : : string but the caller guarantees it's large enough for MODE. */
3582 : 174692 : const char *rep = (const char *) data;
3583 : :
3584 : 174692 : return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3585 : : }
3586 : :
3587 : : /* LEN specify length of the block of memcpy/memset operation.
3588 : : Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3589 : : In some cases we can make very likely guess on max size, then we
3590 : : set it into PROBABLE_MAX_SIZE. */
3591 : :
3592 : : static void
3593 : 149977 : determine_block_size (tree len, rtx len_rtx,
3594 : : unsigned HOST_WIDE_INT *min_size,
3595 : : unsigned HOST_WIDE_INT *max_size,
3596 : : unsigned HOST_WIDE_INT *probable_max_size)
3597 : : {
3598 : 149977 : if (CONST_INT_P (len_rtx))
3599 : : {
3600 : 66880 : *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3601 : 66880 : return;
3602 : : }
3603 : : else
3604 : : {
3605 : 83097 : wide_int min, max;
3606 : 83097 : enum value_range_kind range_type = VR_UNDEFINED;
3607 : :
3608 : : /* Determine bounds from the type. */
3609 : 83097 : if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3610 : 83096 : *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3611 : : else
3612 : 1 : *min_size = 0;
3613 : 83097 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3614 : 166194 : *probable_max_size = *max_size
3615 : 83097 : = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3616 : : else
3617 : 0 : *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3618 : :
3619 : 83097 : if (TREE_CODE (len) == SSA_NAME)
3620 : : {
3621 : 83097 : int_range_max r;
3622 : 83097 : tree tmin, tmax;
3623 : 83097 : gimple *cg = currently_expanding_gimple_stmt;
3624 : 166194 : get_range_query (cfun)->range_of_expr (r, len, cg);
3625 : 83097 : range_type = get_legacy_range (r, tmin, tmax);
3626 : 83097 : if (range_type != VR_UNDEFINED)
3627 : : {
3628 : 83097 : min = wi::to_wide (tmin);
3629 : 83097 : max = wi::to_wide (tmax);
3630 : : }
3631 : 83097 : }
3632 : 83097 : if (range_type == VR_RANGE)
3633 : : {
3634 : 61744 : if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3635 : 50334 : *min_size = min.to_uhwi ();
3636 : 61744 : if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3637 : 51787 : *probable_max_size = *max_size = max.to_uhwi ();
3638 : : }
3639 : 21353 : else if (range_type == VR_ANTI_RANGE)
3640 : : {
3641 : : /* Code like
3642 : :
3643 : : int n;
3644 : : if (n < 100)
3645 : : memcpy (a, b, n)
3646 : :
3647 : : Produce anti range allowing negative values of N. We still
3648 : : can use the information and make a guess that N is not negative.
3649 : : */
3650 : 3653 : if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3651 : 3574 : *probable_max_size = min.to_uhwi () - 1;
3652 : : }
3653 : 83097 : }
3654 : 83097 : gcc_checking_assert (*max_size <=
3655 : : (unsigned HOST_WIDE_INT)
3656 : : GET_MODE_MASK (GET_MODE (len_rtx)));
3657 : : }
3658 : :
3659 : : /* Expand a call EXP to the memcpy builtin.
3660 : : Return NULL_RTX if we failed, the caller should emit a normal call,
3661 : : otherwise try to get the result in TARGET, if convenient (and in
3662 : : mode MODE if that's convenient). */
3663 : :
3664 : : static rtx
3665 : 96198 : expand_builtin_memcpy (tree exp, rtx target)
3666 : : {
3667 : 96198 : if (!validate_arglist (exp,
3668 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3669 : : return NULL_RTX;
3670 : :
3671 : 96180 : tree dest = CALL_EXPR_ARG (exp, 0);
3672 : 96180 : tree src = CALL_EXPR_ARG (exp, 1);
3673 : 96180 : tree len = CALL_EXPR_ARG (exp, 2);
3674 : :
3675 : 96180 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3676 : 96180 : /*retmode=*/ RETURN_BEGIN, false);
3677 : : }
3678 : :
3679 : : /* Check a call EXP to the memmove built-in for validity.
3680 : : Return NULL_RTX on both success and failure. */
3681 : :
3682 : : static rtx
3683 : 16171 : expand_builtin_memmove (tree exp, rtx target)
3684 : : {
3685 : 16171 : if (!validate_arglist (exp,
3686 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3687 : : return NULL_RTX;
3688 : :
3689 : 16156 : tree dest = CALL_EXPR_ARG (exp, 0);
3690 : 16156 : tree src = CALL_EXPR_ARG (exp, 1);
3691 : 16156 : tree len = CALL_EXPR_ARG (exp, 2);
3692 : :
3693 : 16156 : return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3694 : 16156 : /*retmode=*/ RETURN_BEGIN, true);
3695 : : }
3696 : :
3697 : : /* Expand a call EXP to the mempcpy builtin.
3698 : : Return NULL_RTX if we failed; the caller should emit a normal call,
3699 : : otherwise try to get the result in TARGET, if convenient (and in
3700 : : mode MODE if that's convenient). */
3701 : :
3702 : : static rtx
3703 : 1637 : expand_builtin_mempcpy (tree exp, rtx target)
3704 : : {
3705 : 1637 : if (!validate_arglist (exp,
3706 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3707 : : return NULL_RTX;
3708 : :
3709 : 1632 : tree dest = CALL_EXPR_ARG (exp, 0);
3710 : 1632 : tree src = CALL_EXPR_ARG (exp, 1);
3711 : 1632 : tree len = CALL_EXPR_ARG (exp, 2);
3712 : :
3713 : : /* Policy does not generally allow using compute_objsize (which
3714 : : is used internally by check_memop_size) to change code generation
3715 : : or drive optimization decisions.
3716 : :
3717 : : In this instance it is safe because the code we generate has
3718 : : the same semantics regardless of the return value of
3719 : : check_memop_sizes. Exactly the same amount of data is copied
3720 : : and the return value is exactly the same in both cases.
3721 : :
3722 : : Furthermore, check_memop_size always uses mode 0 for the call to
3723 : : compute_objsize, so the imprecise nature of compute_objsize is
3724 : : avoided. */
3725 : :
3726 : : /* Avoid expanding mempcpy into memcpy when the call is determined
3727 : : to overflow the buffer. This also prevents the same overflow
3728 : : from being diagnosed again when expanding memcpy. */
3729 : :
3730 : 1632 : return expand_builtin_mempcpy_args (dest, src, len,
3731 : 1632 : target, exp, /*retmode=*/ RETURN_END);
3732 : : }
3733 : :
3734 : : /* Helper function to do the actual work for expand of memory copy family
3735 : : functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3736 : : of memory from SRC to DEST and assign to TARGET if convenient. Return
3737 : : value is based on RETMODE argument. */
3738 : :
3739 : : static rtx
3740 : 113991 : expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3741 : : rtx target, tree exp, memop_ret retmode,
3742 : : bool might_overlap)
3743 : : {
3744 : 113991 : unsigned int src_align = get_pointer_alignment (src);
3745 : 113991 : unsigned int dest_align = get_pointer_alignment (dest);
3746 : 113991 : rtx dest_mem, src_mem, dest_addr, len_rtx;
3747 : 113991 : HOST_WIDE_INT expected_size = -1;
3748 : 113991 : unsigned int expected_align = 0;
3749 : 113991 : unsigned HOST_WIDE_INT min_size;
3750 : 113991 : unsigned HOST_WIDE_INT max_size;
3751 : 113991 : unsigned HOST_WIDE_INT probable_max_size;
3752 : :
3753 : 113991 : bool is_move_done;
3754 : :
3755 : : /* If DEST is not a pointer type, call the normal function. */
3756 : 113991 : if (dest_align == 0)
3757 : : return NULL_RTX;
3758 : :
3759 : : /* If either SRC is not a pointer type, don't do this
3760 : : operation in-line. */
3761 : 113991 : if (src_align == 0)
3762 : : return NULL_RTX;
3763 : :
3764 : 113991 : if (currently_expanding_gimple_stmt)
3765 : 113991 : stringop_block_profile (currently_expanding_gimple_stmt,
3766 : : &expected_align, &expected_size);
3767 : :
3768 : 113991 : if (expected_align < dest_align)
3769 : 113978 : expected_align = dest_align;
3770 : 113991 : dest_mem = get_memory_rtx (dest, len);
3771 : 113991 : set_mem_align (dest_mem, dest_align);
3772 : 113991 : len_rtx = expand_normal (len);
3773 : 113991 : determine_block_size (len, len_rtx, &min_size, &max_size,
3774 : : &probable_max_size);
3775 : :
3776 : : /* Try to get the byte representation of the constant SRC points to,
3777 : : with its byte size in NBYTES. */
3778 : 113991 : unsigned HOST_WIDE_INT nbytes;
3779 : 113991 : const char *rep = getbyterep (src, &nbytes);
3780 : :
3781 : : /* If the function's constant bound LEN_RTX is less than or equal
3782 : : to the byte size of the representation of the constant argument,
3783 : : and if block move would be done by pieces, we can avoid loading
3784 : : the bytes from memory and only store the computed constant.
3785 : : This works in the overlap (memmove) case as well because
3786 : : store_by_pieces just generates a series of stores of constants
3787 : : from the representation returned by getbyterep(). */
3788 : 113991 : if (rep
3789 : 31663 : && CONST_INT_P (len_rtx)
3790 : 30144 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3791 : 144135 : && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3792 : : CONST_CAST (char *, rep),
3793 : : dest_align, false))
3794 : : {
3795 : 29676 : dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3796 : : builtin_memcpy_read_str,
3797 : : CONST_CAST (char *, rep),
3798 : : dest_align, false, retmode);
3799 : 29676 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
3800 : 29676 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
3801 : 29676 : return dest_mem;
3802 : : }
3803 : :
3804 : 84315 : src_mem = get_memory_rtx (src, len);
3805 : 84315 : set_mem_align (src_mem, src_align);
3806 : :
3807 : : /* Copy word part most expediently. */
3808 : 84315 : enum block_op_methods method = BLOCK_OP_NORMAL;
3809 : 84315 : if (CALL_EXPR_TAILCALL (exp)
3810 : 84315 : && (retmode == RETURN_BEGIN || target == const0_rtx))
3811 : : method = BLOCK_OP_TAILCALL;
3812 : 84315 : bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3813 : 84315 : && retmode == RETURN_END
3814 : 84315 : && !might_overlap
3815 : 85617 : && target != const0_rtx);
3816 : : if (use_mempcpy_call)
3817 : 84315 : method = BLOCK_OP_NO_LIBCALL_RET;
3818 : 84315 : dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3819 : : expected_align, expected_size,
3820 : : min_size, max_size, probable_max_size,
3821 : : use_mempcpy_call, &is_move_done,
3822 : : might_overlap, tree_ctz (len));
3823 : :
3824 : : /* Bail out when a mempcpy call would be expanded as libcall and when
3825 : : we have a target that provides a fast implementation
3826 : : of mempcpy routine. */
3827 : 84315 : if (!is_move_done)
3828 : : return NULL_RTX;
3829 : :
3830 : 68367 : if (dest_addr == pc_rtx)
3831 : : return NULL_RTX;
3832 : :
3833 : 68367 : if (dest_addr == 0)
3834 : : {
3835 : 26376 : dest_addr = force_operand (XEXP (dest_mem, 0), target);
3836 : 26376 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
3837 : : }
3838 : :
3839 : 68367 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3840 : : {
3841 : 808 : dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3842 : : /* stpcpy pointer to last byte. */
3843 : 808 : if (retmode == RETURN_END_MINUS_ONE)
3844 : 0 : dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3845 : : }
3846 : :
3847 : : return dest_addr;
3848 : : }
3849 : :
3850 : : static rtx
3851 : 1655 : expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3852 : : rtx target, tree orig_exp, memop_ret retmode)
3853 : : {
3854 : 1632 : return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3855 : 0 : retmode, false);
3856 : : }
3857 : :
3858 : : /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3859 : : we failed, the caller should emit a normal call, otherwise try to
3860 : : get the result in TARGET, if convenient.
3861 : : Return value is based on RETMODE argument. */
3862 : :
3863 : : static rtx
3864 : 2269 : expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3865 : : {
3866 : 2269 : class expand_operand ops[3];
3867 : 2269 : rtx dest_mem;
3868 : 2269 : rtx src_mem;
3869 : :
3870 : 2269 : if (!targetm.have_movstr ())
3871 : : return NULL_RTX;
3872 : :
3873 : 0 : dest_mem = get_memory_rtx (dest, NULL);
3874 : 0 : src_mem = get_memory_rtx (src, NULL);
3875 : 0 : if (retmode == RETURN_BEGIN)
3876 : : {
3877 : 0 : target = force_reg (Pmode, XEXP (dest_mem, 0));
3878 : 0 : dest_mem = replace_equiv_address (dest_mem, target);
3879 : : }
3880 : :
3881 : 0 : create_output_operand (&ops[0],
3882 : 0 : retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3883 : 0 : create_fixed_operand (&ops[1], dest_mem);
3884 : 0 : create_fixed_operand (&ops[2], src_mem);
3885 : 0 : if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3886 : : return NULL_RTX;
3887 : :
3888 : 0 : if (retmode != RETURN_BEGIN && target != const0_rtx)
3889 : : {
3890 : 0 : target = ops[0].value;
3891 : : /* movstr is supposed to set end to the address of the NUL
3892 : : terminator. If the caller requested a mempcpy-like return value,
3893 : : adjust it. */
3894 : 0 : if (retmode == RETURN_END)
3895 : : {
3896 : 0 : rtx tem = plus_constant (GET_MODE (target),
3897 : 0 : gen_lowpart (GET_MODE (target), target), 1);
3898 : 0 : emit_move_insn (target, force_operand (tem, NULL_RTX));
3899 : : }
3900 : : }
3901 : : return target;
3902 : : }
3903 : :
3904 : : /* Expand expression EXP, which is a call to the strcpy builtin. Return
3905 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
3906 : : try to get the result in TARGET, if convenient (and in mode MODE if that's
3907 : : convenient). */
3908 : :
3909 : : static rtx
3910 : 1862 : expand_builtin_strcpy (tree exp, rtx target)
3911 : : {
3912 : 1862 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3913 : : return NULL_RTX;
3914 : :
3915 : 1847 : tree dest = CALL_EXPR_ARG (exp, 0);
3916 : 1847 : tree src = CALL_EXPR_ARG (exp, 1);
3917 : :
3918 : 1847 : return expand_builtin_strcpy_args (exp, dest, src, target);
3919 : : }
3920 : :
3921 : : /* Helper function to do the actual work for expand_builtin_strcpy. The
3922 : : arguments to the builtin_strcpy call DEST and SRC are broken out
3923 : : so that this can also be called without constructing an actual CALL_EXPR.
3924 : : The other arguments and return value are the same as for
3925 : : expand_builtin_strcpy. */
3926 : :
3927 : : static rtx
3928 : 1847 : expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3929 : : {
3930 : 1847 : return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3931 : : }
3932 : :
3933 : : /* Expand a call EXP to the stpcpy builtin.
3934 : : Return NULL_RTX if we failed the caller should emit a normal call,
3935 : : otherwise try to get the result in TARGET, if convenient (and in
3936 : : mode MODE if that's convenient). */
3937 : :
3938 : : static rtx
3939 : 454 : expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3940 : : {
3941 : 454 : tree dst, src;
3942 : 454 : location_t loc = EXPR_LOCATION (exp);
3943 : :
3944 : 454 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3945 : : return NULL_RTX;
3946 : :
3947 : 445 : dst = CALL_EXPR_ARG (exp, 0);
3948 : 445 : src = CALL_EXPR_ARG (exp, 1);
3949 : :
3950 : : /* If return value is ignored, transform stpcpy into strcpy. */
3951 : 445 : if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3952 : : {
3953 : 0 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3954 : 0 : tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3955 : 0 : return expand_expr (result, target, mode, EXPAND_NORMAL);
3956 : : }
3957 : : else
3958 : : {
3959 : 445 : tree len, lenp1;
3960 : 445 : rtx ret;
3961 : :
3962 : : /* Ensure we get an actual string whose length can be evaluated at
3963 : : compile-time, not an expression containing a string. This is
3964 : : because the latter will potentially produce pessimized code
3965 : : when used to produce the return value. */
3966 : 445 : c_strlen_data lendata = { };
3967 : 445 : if (!c_getstr (src)
3968 : 445 : || !(len = c_strlen (src, 0, &lendata, 1)))
3969 : 422 : return expand_movstr (dst, src, target,
3970 : 422 : /*retmode=*/ RETURN_END_MINUS_ONE);
3971 : :
3972 : 23 : lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3973 : 23 : ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3974 : : target, exp,
3975 : : /*retmode=*/ RETURN_END_MINUS_ONE);
3976 : :
3977 : 23 : if (ret)
3978 : : return ret;
3979 : :
3980 : 0 : if (TREE_CODE (len) == INTEGER_CST)
3981 : : {
3982 : 0 : rtx len_rtx = expand_normal (len);
3983 : :
3984 : 0 : if (CONST_INT_P (len_rtx))
3985 : : {
3986 : 0 : ret = expand_builtin_strcpy_args (exp, dst, src, target);
3987 : :
3988 : 0 : if (ret)
3989 : : {
3990 : 0 : if (! target)
3991 : : {
3992 : 0 : if (mode != VOIDmode)
3993 : 0 : target = gen_reg_rtx (mode);
3994 : : else
3995 : 0 : target = gen_reg_rtx (GET_MODE (ret));
3996 : : }
3997 : 0 : if (GET_MODE (target) != GET_MODE (ret))
3998 : 0 : ret = gen_lowpart (GET_MODE (target), ret);
3999 : :
4000 : 0 : ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4001 : 0 : ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4002 : 0 : gcc_assert (ret);
4003 : :
4004 : : return target;
4005 : : }
4006 : : }
4007 : : }
4008 : :
4009 : 0 : return expand_movstr (dst, src, target,
4010 : 0 : /*retmode=*/ RETURN_END_MINUS_ONE);
4011 : : }
4012 : : }
4013 : :
4014 : : /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4015 : : arguments while being careful to avoid duplicate warnings (which could
4016 : : be issued if the expander were to expand the call, resulting in it
4017 : : being emitted in expand_call(). */
4018 : :
4019 : : static rtx
4020 : 454 : expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4021 : : {
4022 : 454 : if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4023 : : {
4024 : : /* The call has been successfully expanded. Check for nonstring
4025 : : arguments and issue warnings as appropriate. */
4026 : 23 : maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4027 : 23 : return ret;
4028 : : }
4029 : :
4030 : : return NULL_RTX;
4031 : : }
4032 : :
4033 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4034 : : bytes from constant string DATA + OFFSET and return it as target
4035 : : constant. */
4036 : :
4037 : : rtx
4038 : 9310 : builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
4039 : : fixed_size_mode mode)
4040 : : {
4041 : 9310 : const char *str = (const char *) data;
4042 : :
4043 : 9310 : if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4044 : 260 : return const0_rtx;
4045 : :
4046 : 9050 : return c_readstr (str + offset, mode);
4047 : : }
4048 : :
4049 : : /* Helper to check the sizes of sequences and the destination of calls
4050 : : to __builtin_strncat and __builtin___strncat_chk. Returns true on
4051 : : success (no overflow or invalid sizes), false otherwise. */
4052 : :
4053 : : static bool
4054 : 109 : check_strncat_sizes (tree exp, tree objsize)
4055 : : {
4056 : 109 : tree dest = CALL_EXPR_ARG (exp, 0);
4057 : 109 : tree src = CALL_EXPR_ARG (exp, 1);
4058 : 109 : tree maxread = CALL_EXPR_ARG (exp, 2);
4059 : :
4060 : : /* Try to determine the range of lengths that the source expression
4061 : : refers to. */
4062 : 109 : c_strlen_data lendata = { };
4063 : 109 : get_range_strlen (src, &lendata, /* eltsize = */ 1);
4064 : :
4065 : : /* Try to verify that the destination is big enough for the shortest
4066 : : string. */
4067 : :
4068 : 109 : access_data data (nullptr, exp, access_read_write, maxread, true);
4069 : 109 : if (!objsize && warn_stringop_overflow)
4070 : : {
4071 : : /* If it hasn't been provided by __strncat_chk, try to determine
4072 : : the size of the destination object into which the source is
4073 : : being copied. */
4074 : 0 : objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
4075 : : }
4076 : :
4077 : : /* Add one for the terminating nul. */
4078 : 109 : tree srclen = (lendata.minlen
4079 : 109 : ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4080 : : size_one_node)
4081 : : : NULL_TREE);
4082 : :
4083 : : /* The strncat function copies at most MAXREAD bytes and always appends
4084 : : the terminating nul so the specified upper bound should never be equal
4085 : : to (or greater than) the size of the destination. */
4086 : 56 : if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4087 : 157 : && tree_int_cst_equal (objsize, maxread))
4088 : : {
4089 : 23 : location_t loc = EXPR_LOCATION (exp);
4090 : 23 : warning_at (loc, OPT_Wstringop_overflow_,
4091 : : "%qD specified bound %E equals destination size",
4092 : : get_callee_fndecl (exp), maxread);
4093 : :
4094 : 23 : return false;
4095 : : }
4096 : :
4097 : 86 : if (!srclen
4098 : 86 : || (maxread && tree_fits_uhwi_p (maxread)
4099 : 33 : && tree_fits_uhwi_p (srclen)
4100 : 33 : && tree_int_cst_lt (maxread, srclen)))
4101 : : srclen = maxread;
4102 : :
4103 : : /* The number of bytes to write is LEN but check_access will alsoa
4104 : : check SRCLEN if LEN's value isn't known. */
4105 : 86 : return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4106 : 86 : objsize, data.mode, &data);
4107 : : }
4108 : :
4109 : : /* Expand expression EXP, which is a call to the strncpy builtin. Return
4110 : : NULL_RTX if we failed the caller should emit a normal call. */
4111 : :
4112 : : static rtx
4113 : 2168 : expand_builtin_strncpy (tree exp, rtx target)
4114 : : {
4115 : 2168 : location_t loc = EXPR_LOCATION (exp);
4116 : :
4117 : 2168 : if (!validate_arglist (exp,
4118 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4119 : : return NULL_RTX;
4120 : 2156 : tree dest = CALL_EXPR_ARG (exp, 0);
4121 : 2156 : tree src = CALL_EXPR_ARG (exp, 1);
4122 : : /* The number of bytes to write (not the maximum). */
4123 : 2156 : tree len = CALL_EXPR_ARG (exp, 2);
4124 : :
4125 : : /* The length of the source sequence. */
4126 : 2156 : tree slen = c_strlen (src, 1);
4127 : :
4128 : : /* We must be passed a constant len and src parameter. */
4129 : 2156 : if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4130 : : return NULL_RTX;
4131 : :
4132 : 237 : slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4133 : :
4134 : : /* We're required to pad with trailing zeros if the requested
4135 : : len is greater than strlen(s2)+1. In that case try to
4136 : : use store_by_pieces, if it fails, punt. */
4137 : 237 : if (tree_int_cst_lt (slen, len))
4138 : : {
4139 : 142 : unsigned int dest_align = get_pointer_alignment (dest);
4140 : 142 : const char *p = c_getstr (src);
4141 : 142 : rtx dest_mem;
4142 : :
4143 : 136 : if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4144 : 278 : || !can_store_by_pieces (tree_to_uhwi (len),
4145 : : builtin_strncpy_read_str,
4146 : : CONST_CAST (char *, p),
4147 : : dest_align, false))
4148 : 14 : return NULL_RTX;
4149 : :
4150 : 128 : dest_mem = get_memory_rtx (dest, len);
4151 : 128 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4152 : : builtin_strncpy_read_str,
4153 : : CONST_CAST (char *, p), dest_align, false,
4154 : : RETURN_BEGIN);
4155 : 128 : dest_mem = force_operand (XEXP (dest_mem, 0), target);
4156 : 128 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4157 : 128 : return dest_mem;
4158 : : }
4159 : :
4160 : : return NULL_RTX;
4161 : : }
4162 : :
4163 : : /* Return the RTL of a register in MODE generated from PREV in the
4164 : : previous iteration. */
4165 : :
4166 : : static rtx
4167 : 197997 : gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
4168 : : {
4169 : 197997 : rtx target = nullptr;
4170 : 197997 : if (prev != nullptr && prev->data != nullptr)
4171 : : {
4172 : : /* Use the previous data in the same mode. */
4173 : 112222 : if (prev->mode == mode)
4174 : 197997 : return prev->data;
4175 : :
4176 : 29958 : fixed_size_mode prev_mode = prev->mode;
4177 : :
4178 : : /* Don't use the previous data to write QImode if it is in a
4179 : : vector mode. */
4180 : 29958 : if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4181 : : return target;
4182 : :
4183 : 29695 : rtx prev_rtx = prev->data;
4184 : :
4185 : 29695 : if (REG_P (prev_rtx)
4186 : 18836 : && HARD_REGISTER_P (prev_rtx)
4187 : 29695 : && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4188 : : {
4189 : : /* This case occurs when PREV_MODE is a vector and when
4190 : : MODE is too small to store using vector operations.
4191 : : After register allocation, the code will need to move the
4192 : : lowpart of the vector register into a non-vector register.
4193 : :
4194 : : Also, the target has chosen to use a hard register
4195 : : instead of going with the default choice of using a
4196 : : pseudo register. We should respect that choice and try to
4197 : : avoid creating a pseudo register with the same mode as the
4198 : : current hard register.
4199 : :
4200 : : In principle, we could just use a lowpart MODE subreg of
4201 : : the vector register. However, the vector register mode might
4202 : : be too wide for non-vector registers, and we already know
4203 : : that the non-vector mode is too small for vector registers.
4204 : : It's therefore likely that we'd need to spill to memory in
4205 : : the vector mode and reload the non-vector value from there.
4206 : :
4207 : : Try to avoid that by reducing the vector register to the
4208 : : smallest size that it can hold. This should increase the
4209 : : chances that non-vector registers can hold both the inner
4210 : : and outer modes of the subreg that we generate later. */
4211 : 0 : machine_mode m;
4212 : 0 : fixed_size_mode candidate;
4213 : 0 : FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4214 : 0 : if (is_a<fixed_size_mode> (m, &candidate))
4215 : : {
4216 : 0 : if (GET_MODE_SIZE (candidate)
4217 : 0 : >= GET_MODE_SIZE (prev_mode))
4218 : : break;
4219 : 0 : if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4220 : 0 : && lowpart_subreg_regno (REGNO (prev_rtx),
4221 : : prev_mode, candidate) >= 0)
4222 : : {
4223 : 0 : target = lowpart_subreg (candidate, prev_rtx,
4224 : : prev_mode);
4225 : 0 : prev_rtx = target;
4226 : 0 : prev_mode = candidate;
4227 : 0 : break;
4228 : : }
4229 : : }
4230 : 0 : if (target == nullptr)
4231 : 0 : prev_rtx = copy_to_reg (prev_rtx);
4232 : : }
4233 : :
4234 : 29695 : target = lowpart_subreg (mode, prev_rtx, prev_mode);
4235 : : }
4236 : : return target;
4237 : : }
4238 : :
4239 : : /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4240 : : bytes from constant string DATA + OFFSET and return it as target
4241 : : constant. If PREV isn't nullptr, it has the RTL info from the
4242 : : previous iteration. */
4243 : :
4244 : : rtx
4245 : 196687 : builtin_memset_read_str (void *data, void *prev,
4246 : : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4247 : : fixed_size_mode mode)
4248 : : {
4249 : 196687 : const char *c = (const char *) data;
4250 : 196687 : unsigned int size = GET_MODE_SIZE (mode);
4251 : :
4252 : 196687 : rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4253 : : mode);
4254 : 196687 : if (target != nullptr)
4255 : : return target;
4256 : 85308 : rtx src = gen_int_mode (*c, QImode);
4257 : :
4258 : 85308 : if (VECTOR_MODE_P (mode))
4259 : : {
4260 : 114710 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4261 : :
4262 : 57355 : rtx const_vec = gen_const_vec_duplicate (mode, src);
4263 : 57355 : if (prev == NULL)
4264 : : /* Return CONST_VECTOR when called by a query function. */
4265 : : return const_vec;
4266 : :
4267 : : /* Use the move expander with CONST_VECTOR. */
4268 : 41454 : target = gen_reg_rtx (mode);
4269 : 41454 : emit_move_insn (target, const_vec);
4270 : 41454 : return target;
4271 : : }
4272 : :
4273 : 27953 : char *p = XALLOCAVEC (char, size);
4274 : :
4275 : 27953 : memset (p, *c, size);
4276 : :
4277 : 27953 : return c_readstr (p, mode);
4278 : : }
4279 : :
4280 : : /* Callback routine for store_by_pieces. Return the RTL of a register
4281 : : containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4282 : : char value given in the RTL register data. For example, if mode is
4283 : : 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4284 : : nullptr, it has the RTL info from the previous iteration. */
4285 : :
4286 : : static rtx
4287 : 1479 : builtin_memset_gen_str (void *data, void *prev,
4288 : : HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4289 : : fixed_size_mode mode)
4290 : : {
4291 : 1479 : rtx target, coeff;
4292 : 1479 : size_t size;
4293 : 1479 : char *p;
4294 : :
4295 : 1479 : size = GET_MODE_SIZE (mode);
4296 : 1479 : if (size == 1)
4297 : : return (rtx) data;
4298 : :
4299 : 1310 : target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4300 : 1310 : if (target != nullptr)
4301 : : return target;
4302 : :
4303 : 730 : if (VECTOR_MODE_P (mode))
4304 : : {
4305 : 448 : gcc_assert (GET_MODE_INNER (mode) == QImode);
4306 : :
4307 : : /* vec_duplicate_optab is a precondition to pick a vector mode for
4308 : : the memset expander. */
4309 : 224 : insn_code icode = optab_handler (vec_duplicate_optab, mode);
4310 : :
4311 : 224 : target = gen_reg_rtx (mode);
4312 : 224 : class expand_operand ops[2];
4313 : 224 : create_output_operand (&ops[0], target, mode);
4314 : 224 : create_input_operand (&ops[1], (rtx) data, QImode);
4315 : 224 : expand_insn (icode, 2, ops);
4316 : 224 : if (!rtx_equal_p (target, ops[0].value))
4317 : 0 : emit_move_insn (target, ops[0].value);
4318 : :
4319 : 224 : return target;
4320 : : }
4321 : :
4322 : 506 : p = XALLOCAVEC (char, size);
4323 : 506 : memset (p, 1, size);
4324 : 506 : coeff = c_readstr (p, mode);
4325 : :
4326 : 506 : target = convert_to_mode (mode, (rtx) data, 1);
4327 : 506 : target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4328 : 506 : return force_reg (mode, target);
4329 : : }
4330 : :
4331 : : /* Expand expression EXP, which is a call to the memset builtin. Return
4332 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
4333 : : try to get the result in TARGET, if convenient (and in mode MODE if that's
4334 : : convenient). */
4335 : :
4336 : : rtx
4337 : 35995 : expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4338 : : {
4339 : 35995 : if (!validate_arglist (exp,
4340 : : POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4341 : : return NULL_RTX;
4342 : :
4343 : 35986 : tree dest = CALL_EXPR_ARG (exp, 0);
4344 : 35986 : tree val = CALL_EXPR_ARG (exp, 1);
4345 : 35986 : tree len = CALL_EXPR_ARG (exp, 2);
4346 : :
4347 : 35986 : return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4348 : : }
4349 : :
4350 : : /* Check that store_by_pieces allows BITS + LEN (so that we don't
4351 : : expand something too unreasonably long), and every power of 2 in
4352 : : BITS. It is assumed that LEN has already been tested by
4353 : : itself. */
4354 : : static bool
4355 : 23075 : can_store_by_multiple_pieces (unsigned HOST_WIDE_INT bits,
4356 : : by_pieces_constfn constfun,
4357 : : void *constfundata, unsigned int align,
4358 : : bool memsetp,
4359 : : unsigned HOST_WIDE_INT len)
4360 : : {
4361 : 23075 : if (bits
4362 : 23075 : && !can_store_by_pieces (bits + len, constfun, constfundata,
4363 : : align, memsetp))
4364 : : return false;
4365 : :
4366 : : /* BITS set are expected to be generally in the low range and
4367 : : contiguous. We do NOT want to repeat the test above in case BITS
4368 : : has a single bit set, so we terminate the loop when BITS == BIT.
4369 : : In the unlikely case that BITS has the MSB set, also terminate in
4370 : : case BIT gets shifted out. */
4371 : 1116 : for (unsigned HOST_WIDE_INT bit = 1; bit < bits && bit; bit <<= 1)
4372 : : {
4373 : 840 : if ((bits & bit) == 0)
4374 : 504 : continue;
4375 : :
4376 : 336 : if (!can_store_by_pieces (bit, constfun, constfundata,
4377 : : align, memsetp))
4378 : : return false;
4379 : : }
4380 : :
4381 : : return true;
4382 : : }
4383 : :
4384 : : /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4385 : : Return TRUE if successful, FALSE otherwise. TO is assumed to be
4386 : : aligned at an ALIGN-bits boundary. LEN must be a multiple of
4387 : : 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
4388 : :
4389 : : The strategy is to issue one store_by_pieces for each power of two,
4390 : : from most to least significant, guarded by a test on whether there
4391 : : are at least that many bytes left to copy in LEN.
4392 : :
4393 : : ??? Should we skip some powers of two in favor of loops? Maybe start
4394 : : at the max of TO/LEN/word alignment, at least when optimizing for
4395 : : size, instead of ensuring O(log len) dynamic compares? */
4396 : :
4397 : : bool
4398 : 21375 : try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4399 : : unsigned HOST_WIDE_INT min_len,
4400 : : unsigned HOST_WIDE_INT max_len,
4401 : : rtx val, char valc, unsigned int align)
4402 : : {
4403 : 21375 : int max_bits = floor_log2 (max_len);
4404 : 21375 : int min_bits = floor_log2 (min_len);
4405 : 21375 : int sctz_len = ctz_len;
4406 : :
4407 : 21375 : gcc_checking_assert (sctz_len >= 0);
4408 : :
4409 : 21375 : if (val)
4410 : 518 : valc = 1;
4411 : :
4412 : : /* Bits more significant than TST_BITS are part of the shared prefix
4413 : : in the binary representation of both min_len and max_len. Since
4414 : : they're identical, we don't need to test them in the loop. */
4415 : 21375 : int tst_bits = (max_bits != min_bits ? max_bits
4416 : 8828 : : floor_log2 (max_len ^ min_len));
4417 : :
4418 : : /* Save the pre-blksize values. */
4419 : 21375 : int orig_max_bits = max_bits;
4420 : 21375 : int orig_tst_bits = tst_bits;
4421 : :
4422 : : /* Check whether it's profitable to start by storing a fixed BLKSIZE
4423 : : bytes, to lower max_bits. In the unlikely case of a constant LEN
4424 : : (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4425 : : single store_by_pieces, but otherwise, select the minimum multiple
4426 : : of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4427 : : brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4428 : 21375 : unsigned HOST_WIDE_INT blksize;
4429 : 21375 : if (max_len > min_len)
4430 : : {
4431 : 12560 : unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4432 : : align / BITS_PER_UNIT);
4433 : 12560 : blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4434 : 12560 : blksize &= ~(alrng - 1);
4435 : : }
4436 : 8815 : else if (max_len == min_len)
4437 : : blksize = max_len;
4438 : : else
4439 : : /* Huh, max_len < min_len? Punt. See pr100843.c. */
4440 : : return false;
4441 : 21374 : if (min_len >= blksize
4442 : : /* ??? Maybe try smaller fixed-prefix blksizes before
4443 : : punting? */
4444 : 21374 : && can_store_by_pieces (blksize, builtin_memset_read_str,
4445 : : &valc, align, true))
4446 : : {
4447 : 279 : min_len -= blksize;
4448 : 279 : min_bits = floor_log2 (min_len);
4449 : 279 : max_len -= blksize;
4450 : 279 : max_bits = floor_log2 (max_len);
4451 : :
4452 : 279 : tst_bits = (max_bits != min_bits ? max_bits
4453 : 169 : : floor_log2 (max_len ^ min_len));
4454 : : }
4455 : : else
4456 : : blksize = 0;
4457 : :
4458 : : /* Check that we can use store by pieces for the maximum store count
4459 : : we may issue (initial fixed-size block, plus conditional
4460 : : power-of-two-sized from max_bits to ctz_len. */
4461 : 21374 : unsigned HOST_WIDE_INT xlenest = blksize;
4462 : 21374 : if (max_bits >= 0)
4463 : 21205 : xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4464 : 21205 : - (HOST_WIDE_INT_1U << ctz_len));
4465 : 21374 : bool max_loop = false;
4466 : 21374 : bool use_store_by_pieces = true;
4467 : : /* Skip the test in case of overflow in xlenest. It shouldn't
4468 : : happen because of the way max_bits and blksize are related, but
4469 : : it doesn't hurt to test. */
4470 : 21374 : if (blksize > xlenest
4471 : 21374 : || !can_store_by_multiple_pieces (xlenest - blksize,
4472 : : builtin_memset_read_str,
4473 : : &valc, align, true, blksize))
4474 : : {
4475 : 21205 : if (!(flag_inline_stringops & ILSOP_MEMSET))
4476 : : return false;
4477 : :
4478 : 1601 : for (max_bits = orig_max_bits;
4479 : 1708 : max_bits >= sctz_len;
4480 : : --max_bits)
4481 : : {
4482 : 1708 : xlenest = ((HOST_WIDE_INT_1U << max_bits) * 2
4483 : 1708 : - (HOST_WIDE_INT_1U << ctz_len));
4484 : : /* Check that blksize plus the bits to be stored as blocks
4485 : : sized at powers of two can be stored by pieces. This is
4486 : : like the test above, but with smaller max_bits. Skip
4487 : : orig_max_bits (it would be redundant). Also skip in case
4488 : : of overflow. */
4489 : 1708 : if (max_bits < orig_max_bits
4490 : 1601 : && xlenest + blksize >= xlenest
4491 : 3309 : && can_store_by_multiple_pieces (xlenest,
4492 : : builtin_memset_read_str,
4493 : : &valc, align, true, blksize))
4494 : : {
4495 : : max_loop = true;
4496 : : break;
4497 : : }
4498 : 1601 : if (blksize
4499 : 1601 : && can_store_by_multiple_pieces (xlenest,
4500 : : builtin_memset_read_str,
4501 : : &valc, align, true, 0))
4502 : : {
4503 : 0 : max_len += blksize;
4504 : 0 : min_len += blksize;
4505 : 0 : tst_bits = orig_tst_bits;
4506 : 0 : blksize = 0;
4507 : 0 : max_loop = true;
4508 : 0 : break;
4509 : : }
4510 : 1601 : if (max_bits == sctz_len)
4511 : : {
4512 : : /* We'll get here if can_store_by_pieces refuses to
4513 : : store even a single QImode. We'll fall back to
4514 : : QImode stores then. */
4515 : 98 : if (!sctz_len)
4516 : : {
4517 : : blksize = 0;
4518 : : max_loop = true;
4519 : : use_store_by_pieces = false;
4520 : : break;
4521 : : }
4522 : 98 : --sctz_len;
4523 : 98 : --ctz_len;
4524 : : }
4525 : : }
4526 : 107 : if (!max_loop)
4527 : : return false;
4528 : : /* If the boundaries are such that min and max may run a
4529 : : different number of trips in the initial loop, the remainder
4530 : : needs not be between the moduli, so set tst_bits to cover all
4531 : : bits. Otherwise, if the trip counts are the same, max_len
4532 : : has the common prefix, and the previously-computed tst_bits
4533 : : is usable. */
4534 : 107 : if (max_len >> max_bits > min_len >> max_bits)
4535 : 64 : tst_bits = max_bits;
4536 : : }
4537 : :
4538 : 276 : by_pieces_constfn constfun;
4539 : 276 : void *constfundata;
4540 : 276 : if (val)
4541 : : {
4542 : 1 : constfun = builtin_memset_gen_str;
4543 : 1 : constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4544 : : val);
4545 : : }
4546 : : else
4547 : : {
4548 : : constfun = builtin_memset_read_str;
4549 : : constfundata = &valc;
4550 : : }
4551 : :
4552 : 276 : rtx ptr = copy_addr_to_reg (XEXP (to, 0));
4553 : 276 : rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4554 : 276 : to = replace_equiv_address (to, ptr);
4555 : 276 : set_mem_align (to, align);
4556 : :
4557 : 276 : if (blksize)
4558 : : {
4559 : 356 : to = store_by_pieces (to, blksize,
4560 : : constfun, constfundata,
4561 : : align, true,
4562 : : max_len != 0 ? RETURN_END : RETURN_BEGIN);
4563 : 188 : if (max_len == 0)
4564 : : return true;
4565 : :
4566 : : /* Adjust PTR, TO and REM. Since TO's address is likely
4567 : : PTR+offset, we have to replace it. */
4568 : 20 : emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4569 : 20 : to = replace_equiv_address (to, ptr);
4570 : 20 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4571 : 20 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4572 : : }
4573 : :
4574 : : /* Iterate over power-of-two block sizes from the maximum length to
4575 : : the least significant bit possibly set in the length. */
4576 : 493 : for (int i = max_bits; i >= sctz_len; i--)
4577 : : {
4578 : 385 : rtx_code_label *loop_label = NULL;
4579 : 385 : rtx_code_label *label = NULL;
4580 : :
4581 : 385 : blksize = HOST_WIDE_INT_1U << i;
4582 : :
4583 : : /* If we're past the bits shared between min_ and max_len, expand
4584 : : a test on the dynamic length, comparing it with the
4585 : : BLKSIZE. */
4586 : 385 : if (i <= tst_bits)
4587 : : {
4588 : 275 : label = gen_label_rtx ();
4589 : 275 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4590 : : ptr_mode, 1, label,
4591 : : profile_probability::even ());
4592 : : }
4593 : : /* If we are at a bit that is in the prefix shared by min_ and
4594 : : max_len, skip the current BLKSIZE if the bit is clear, but do
4595 : : not skip the loop, even if it doesn't require
4596 : : prechecking. */
4597 : 110 : else if ((max_len & blksize) == 0
4598 : 69 : && !(max_loop && i == max_bits))
4599 : 39 : continue;
4600 : :
4601 : 346 : if (max_loop && i == max_bits)
4602 : : {
4603 : 107 : loop_label = gen_label_rtx ();
4604 : 107 : emit_label (loop_label);
4605 : : /* Since we may run this multiple times, don't assume we
4606 : : know anything about the offset. */
4607 : 107 : clear_mem_offset (to);
4608 : : }
4609 : :
4610 : 346 : bool update_needed = i != sctz_len || loop_label;
4611 : 346 : rtx next_ptr = NULL_RTX;
4612 : 346 : if (!use_store_by_pieces)
4613 : : {
4614 : 0 : gcc_checking_assert (blksize == 1);
4615 : 0 : if (!val)
4616 : 0 : val = gen_int_mode (valc, QImode);
4617 : 0 : to = change_address (to, QImode, 0);
4618 : 0 : emit_move_insn (to, val);
4619 : 0 : if (update_needed)
4620 : 0 : next_ptr = plus_constant (GET_MODE (ptr), ptr, blksize);
4621 : : }
4622 : : else
4623 : : {
4624 : : /* Issue a store of BLKSIZE bytes. */
4625 : 404 : to = store_by_pieces (to, blksize,
4626 : : constfun, constfundata,
4627 : : align, true,
4628 : : update_needed ? RETURN_END : RETURN_BEGIN);
4629 : 346 : next_ptr = XEXP (to, 0);
4630 : : }
4631 : : /* Adjust REM and PTR, unless this is the last iteration. */
4632 : 346 : if (update_needed)
4633 : : {
4634 : 288 : emit_move_insn (ptr, force_operand (next_ptr, NULL_RTX));
4635 : 288 : to = replace_equiv_address (to, ptr);
4636 : 288 : rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4637 : 288 : emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4638 : : }
4639 : :
4640 : 288 : if (loop_label)
4641 : 107 : emit_cmp_and_jump_insns (rem, GEN_INT (blksize), GE, NULL,
4642 : : ptr_mode, 1, loop_label,
4643 : : profile_probability::likely ());
4644 : :
4645 : 346 : if (label)
4646 : : {
4647 : 275 : emit_label (label);
4648 : :
4649 : : /* Given conditional stores, the offset can no longer be
4650 : : known, so clear it. */
4651 : 275 : clear_mem_offset (to);
4652 : : }
4653 : : }
4654 : :
4655 : : return true;
4656 : : }
4657 : :
4658 : : /* Helper function to do the actual work for expand_builtin_memset. The
4659 : : arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4660 : : so that this can also be called without constructing an actual CALL_EXPR.
4661 : : The other arguments and return value are the same as for
4662 : : expand_builtin_memset. */
4663 : :
4664 : : static rtx
4665 : 35986 : expand_builtin_memset_args (tree dest, tree val, tree len,
4666 : : rtx target, machine_mode mode, tree orig_exp)
4667 : : {
4668 : 35986 : tree fndecl, fn;
4669 : 35986 : enum built_in_function fcode;
4670 : 35986 : machine_mode val_mode;
4671 : 35986 : char c;
4672 : 35986 : unsigned int dest_align;
4673 : 35986 : rtx dest_mem, dest_addr, len_rtx;
4674 : 35986 : HOST_WIDE_INT expected_size = -1;
4675 : 35986 : unsigned int expected_align = 0;
4676 : 35986 : unsigned HOST_WIDE_INT min_size;
4677 : 35986 : unsigned HOST_WIDE_INT max_size;
4678 : 35986 : unsigned HOST_WIDE_INT probable_max_size;
4679 : :
4680 : 35986 : dest_align = get_pointer_alignment (dest);
4681 : :
4682 : : /* If DEST is not a pointer type, don't do this operation in-line. */
4683 : 35986 : if (dest_align == 0)
4684 : : return NULL_RTX;
4685 : :
4686 : 35986 : if (currently_expanding_gimple_stmt)
4687 : 35986 : stringop_block_profile (currently_expanding_gimple_stmt,
4688 : : &expected_align, &expected_size);
4689 : :
4690 : 35986 : if (expected_align < dest_align)
4691 : 35981 : expected_align = dest_align;
4692 : :
4693 : : /* If the LEN parameter is zero, return DEST. */
4694 : 35986 : if (integer_zerop (len))
4695 : : {
4696 : : /* Evaluate and ignore VAL in case it has side-effects. */
4697 : 0 : expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4698 : 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
4699 : : }
4700 : :
4701 : : /* Stabilize the arguments in case we fail. */
4702 : 35986 : dest = builtin_save_expr (dest);
4703 : 35986 : val = builtin_save_expr (val);
4704 : 35986 : len = builtin_save_expr (len);
4705 : :
4706 : 35986 : len_rtx = expand_normal (len);
4707 : 35986 : determine_block_size (len, len_rtx, &min_size, &max_size,
4708 : : &probable_max_size);
4709 : 35986 : dest_mem = get_memory_rtx (dest, len);
4710 : 35986 : val_mode = TYPE_MODE (unsigned_char_type_node);
4711 : :
4712 : 35986 : if (TREE_CODE (val) != INTEGER_CST
4713 : 35986 : || target_char_cast (val, &c))
4714 : : {
4715 : 1722 : rtx val_rtx;
4716 : :
4717 : 1722 : val_rtx = expand_normal (val);
4718 : 1722 : val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4719 : :
4720 : : /* Assume that we can memset by pieces if we can store
4721 : : * the coefficients by pieces (in the required modes).
4722 : : * We can't pass builtin_memset_gen_str as that emits RTL. */
4723 : 1722 : c = 1;
4724 : 1722 : if (tree_fits_uhwi_p (len)
4725 : 1722 : && can_store_by_pieces (tree_to_uhwi (len),
4726 : : builtin_memset_read_str, &c, dest_align,
4727 : : true))
4728 : : {
4729 : 772 : val_rtx = force_reg (val_mode, val_rtx);
4730 : 772 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4731 : : builtin_memset_gen_str, val_rtx, dest_align,
4732 : : true, RETURN_BEGIN);
4733 : : }
4734 : 950 : else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4735 : : dest_align, expected_align,
4736 : : expected_size, min_size, max_size,
4737 : : probable_max_size)
4738 : 950 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4739 : : tree_ctz (len),
4740 : : min_size, max_size,
4741 : : val_rtx, 0,
4742 : : dest_align))
4743 : 517 : goto do_libcall;
4744 : :
4745 : 1205 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4746 : 1205 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4747 : 1205 : return dest_mem;
4748 : : }
4749 : :
4750 : 34264 : if (c)
4751 : : {
4752 : 12807 : if (tree_fits_uhwi_p (len)
4753 : 12807 : && can_store_by_pieces (tree_to_uhwi (len),
4754 : : builtin_memset_read_str, &c, dest_align,
4755 : : true))
4756 : 4868 : store_by_pieces (dest_mem, tree_to_uhwi (len),
4757 : : builtin_memset_read_str, &c, dest_align, true,
4758 : : RETURN_BEGIN);
4759 : 9698 : else if (!set_storage_via_setmem (dest_mem, len_rtx,
4760 : 7939 : gen_int_mode (c, val_mode),
4761 : : dest_align, expected_align,
4762 : : expected_size, min_size, max_size,
4763 : : probable_max_size)
4764 : 7939 : && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4765 : : tree_ctz (len),
4766 : : min_size, max_size,
4767 : : NULL_RTX, c,
4768 : : dest_align))
4769 : 6180 : goto do_libcall;
4770 : :
4771 : 6627 : dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4772 : 6627 : dest_mem = convert_memory_address (ptr_mode, dest_mem);
4773 : 6627 : return dest_mem;
4774 : : }
4775 : :
4776 : 21457 : set_mem_align (dest_mem, dest_align);
4777 : 42914 : dest_addr = clear_storage_hints (dest_mem, len_rtx,
4778 : 21457 : CALL_EXPR_TAILCALL (orig_exp)
4779 : : ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4780 : : expected_align, expected_size,
4781 : : min_size, max_size,
4782 : : probable_max_size, tree_ctz (len));
4783 : :
4784 : 21457 : if (dest_addr == 0)
4785 : : {
4786 : 12761 : dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4787 : 12761 : dest_addr = convert_memory_address (ptr_mode, dest_addr);
4788 : : }
4789 : :
4790 : : return dest_addr;
4791 : :
4792 : 6697 : do_libcall:
4793 : 6697 : fndecl = get_callee_fndecl (orig_exp);
4794 : 6697 : fcode = DECL_FUNCTION_CODE (fndecl);
4795 : 6697 : if (fcode == BUILT_IN_MEMSET)
4796 : 6697 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4797 : : dest, val, len);
4798 : 0 : else if (fcode == BUILT_IN_BZERO)
4799 : 0 : fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4800 : : dest, len);
4801 : : else
4802 : 0 : gcc_unreachable ();
4803 : 6697 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4804 : 6697 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4805 : 6697 : return expand_call (fn, target, target == const0_rtx);
4806 : : }
4807 : :
4808 : : /* Expand expression EXP, which is a call to the bzero builtin. Return
4809 : : NULL_RTX if we failed the caller should emit a normal call. */
4810 : :
4811 : : static rtx
4812 : 0 : expand_builtin_bzero (tree exp)
4813 : : {
4814 : 0 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4815 : : return NULL_RTX;
4816 : :
4817 : 0 : tree dest = CALL_EXPR_ARG (exp, 0);
4818 : 0 : tree size = CALL_EXPR_ARG (exp, 1);
4819 : :
4820 : : /* New argument list transforming bzero(ptr x, int y) to
4821 : : memset(ptr x, int 0, size_t y). This is done this way
4822 : : so that if it isn't expanded inline, we fallback to
4823 : : calling bzero instead of memset. */
4824 : :
4825 : 0 : location_t loc = EXPR_LOCATION (exp);
4826 : :
4827 : 0 : return expand_builtin_memset_args (dest, integer_zero_node,
4828 : : fold_convert_loc (loc,
4829 : : size_type_node, size),
4830 : 0 : const0_rtx, VOIDmode, exp);
4831 : : }
4832 : :
4833 : : /* Try to expand cmpstr operation ICODE with the given operands.
4834 : : Return the result rtx on success, otherwise return null. */
4835 : :
4836 : : static rtx
4837 : 0 : expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4838 : : HOST_WIDE_INT align)
4839 : : {
4840 : 0 : machine_mode insn_mode = insn_data[icode].operand[0].mode;
4841 : :
4842 : 0 : if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4843 : : target = NULL_RTX;
4844 : :
4845 : 0 : class expand_operand ops[4];
4846 : 0 : create_output_operand (&ops[0], target, insn_mode);
4847 : 0 : create_fixed_operand (&ops[1], arg1_rtx);
4848 : 0 : create_fixed_operand (&ops[2], arg2_rtx);
4849 : 0 : create_integer_operand (&ops[3], align);
4850 : 0 : if (maybe_expand_insn (icode, 4, ops))
4851 : 0 : return ops[0].value;
4852 : : return NULL_RTX;
4853 : : }
4854 : :
4855 : : /* Expand expression EXP, which is a call to the memcmp built-in function.
4856 : : Return NULL_RTX if we failed and the caller should emit a normal call,
4857 : : otherwise try to get the result in TARGET, if convenient.
4858 : : RESULT_EQ is true if we can relax the returned value to be either zero
4859 : : or nonzero, without caring about the sign. */
4860 : :
4861 : : static rtx
4862 : 102852 : expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4863 : : {
4864 : 102852 : if (!validate_arglist (exp,
4865 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4866 : : return NULL_RTX;
4867 : :
4868 : 102848 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4869 : 102848 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4870 : 102848 : tree len = CALL_EXPR_ARG (exp, 2);
4871 : :
4872 : : /* Due to the performance benefit, always inline the calls first
4873 : : when result_eq is false. */
4874 : 102848 : rtx result = NULL_RTX;
4875 : 102848 : enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4876 : 102848 : if (!result_eq && fcode != BUILT_IN_BCMP)
4877 : : {
4878 : 14302 : result = inline_expand_builtin_bytecmp (exp, target);
4879 : 14302 : if (result)
4880 : : return result;
4881 : : }
4882 : :
4883 : 102843 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4884 : 102843 : location_t loc = EXPR_LOCATION (exp);
4885 : :
4886 : 102843 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4887 : 102843 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4888 : :
4889 : : /* If we don't have POINTER_TYPE, call the function. */
4890 : 102843 : if (arg1_align == 0 || arg2_align == 0)
4891 : : return NULL_RTX;
4892 : :
4893 : 102843 : rtx arg1_rtx = get_memory_rtx (arg1, len);
4894 : 102843 : rtx arg2_rtx = get_memory_rtx (arg2, len);
4895 : 102843 : rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4896 : :
4897 : : /* Set MEM_SIZE as appropriate. */
4898 : 102843 : if (CONST_INT_P (len_rtx))
4899 : : {
4900 : 78305 : set_mem_size (arg1_rtx, INTVAL (len_rtx));
4901 : 78305 : set_mem_size (arg2_rtx, INTVAL (len_rtx));
4902 : : }
4903 : :
4904 : 102843 : by_pieces_constfn constfn = NULL;
4905 : :
4906 : : /* Try to get the byte representation of the constant ARG2 (or, only
4907 : : when the function's result is used for equality to zero, ARG1)
4908 : : points to, with its byte size in NBYTES. */
4909 : 102843 : unsigned HOST_WIDE_INT nbytes;
4910 : 102843 : const char *rep = getbyterep (arg2, &nbytes);
4911 : 102843 : if (result_eq && rep == NULL)
4912 : : {
4913 : : /* For equality to zero the arguments are interchangeable. */
4914 : 62922 : rep = getbyterep (arg1, &nbytes);
4915 : 62922 : if (rep != NULL)
4916 : : std::swap (arg1_rtx, arg2_rtx);
4917 : : }
4918 : :
4919 : : /* If the function's constant bound LEN_RTX is less than or equal
4920 : : to the byte size of the representation of the constant argument,
4921 : : and if block move would be done by pieces, we can avoid loading
4922 : : the bytes from memory and only store the computed constant result. */
4923 : 39921 : if (rep
4924 : 27599 : && CONST_INT_P (len_rtx)
4925 : 27443 : && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4926 : 102843 : constfn = builtin_memcpy_read_str;
4927 : :
4928 : 205686 : result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4929 : 102843 : TREE_TYPE (len), target,
4930 : : result_eq, constfn,
4931 : : CONST_CAST (char *, rep),
4932 : : tree_ctz (len));
4933 : :
4934 : 102843 : if (result)
4935 : : {
4936 : : /* Return the value in the proper mode for this function. */
4937 : 66236 : if (GET_MODE (result) == mode)
4938 : : return result;
4939 : :
4940 : 0 : if (target != 0)
4941 : : {
4942 : 0 : convert_move (target, result, 0);
4943 : 0 : return target;
4944 : : }
4945 : :
4946 : 0 : return convert_to_mode (mode, result, 0);
4947 : : }
4948 : :
4949 : : return NULL_RTX;
4950 : : }
4951 : :
4952 : : /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4953 : : if we failed the caller should emit a normal call, otherwise try to get
4954 : : the result in TARGET, if convenient. */
4955 : :
4956 : : static rtx
4957 : 127931 : expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4958 : : {
4959 : 127931 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4960 : : return NULL_RTX;
4961 : :
4962 : 127916 : tree arg1 = CALL_EXPR_ARG (exp, 0);
4963 : 127916 : tree arg2 = CALL_EXPR_ARG (exp, 1);
4964 : :
4965 : : /* Due to the performance benefit, always inline the calls first. */
4966 : 127916 : rtx result = NULL_RTX;
4967 : 127916 : result = inline_expand_builtin_bytecmp (exp, target);
4968 : 127916 : if (result)
4969 : : return result;
4970 : :
4971 : 127517 : insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4972 : 127517 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4973 : 127517 : if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4974 : : return NULL_RTX;
4975 : :
4976 : 127517 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4977 : 127517 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4978 : :
4979 : : /* If we don't have POINTER_TYPE, call the function. */
4980 : 127517 : if (arg1_align == 0 || arg2_align == 0)
4981 : : return NULL_RTX;
4982 : :
4983 : : /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4984 : 127517 : arg1 = builtin_save_expr (arg1);
4985 : 127517 : arg2 = builtin_save_expr (arg2);
4986 : :
4987 : 127517 : rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4988 : 127517 : rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4989 : :
4990 : : /* Try to call cmpstrsi. */
4991 : 127517 : if (cmpstr_icode != CODE_FOR_nothing)
4992 : 0 : result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4993 : 0 : MIN (arg1_align, arg2_align));
4994 : :
4995 : : /* Try to determine at least one length and call cmpstrnsi. */
4996 : 127517 : if (!result && cmpstrn_icode != CODE_FOR_nothing)
4997 : : {
4998 : 127517 : tree len;
4999 : 127517 : rtx arg3_rtx;
5000 : :
5001 : 127517 : tree len1 = c_strlen (arg1, 1);
5002 : 127517 : tree len2 = c_strlen (arg2, 1);
5003 : :
5004 : 127517 : if (len1)
5005 : 224 : len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5006 : 127517 : if (len2)
5007 : 125889 : len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5008 : :
5009 : : /* If we don't have a constant length for the first, use the length
5010 : : of the second, if we know it. We don't require a constant for
5011 : : this case; some cost analysis could be done if both are available
5012 : : but neither is constant. For now, assume they're equally cheap,
5013 : : unless one has side effects. If both strings have constant lengths,
5014 : : use the smaller. */
5015 : :
5016 : 127517 : if (!len1)
5017 : : len = len2;
5018 : 224 : else if (!len2)
5019 : : len = len1;
5020 : 17 : else if (TREE_SIDE_EFFECTS (len1))
5021 : : len = len2;
5022 : 17 : else if (TREE_SIDE_EFFECTS (len2))
5023 : : len = len1;
5024 : 17 : else if (TREE_CODE (len1) != INTEGER_CST)
5025 : : len = len2;
5026 : 17 : else if (TREE_CODE (len2) != INTEGER_CST)
5027 : : len = len1;
5028 : 10 : else if (tree_int_cst_lt (len1, len2))
5029 : : len = len1;
5030 : : else
5031 : 127299 : len = len2;
5032 : :
5033 : : /* If both arguments have side effects, we cannot optimize. */
5034 : 127517 : if (len && !TREE_SIDE_EFFECTS (len))
5035 : : {
5036 : 126096 : arg3_rtx = expand_normal (len);
5037 : 126096 : result = expand_cmpstrn_or_cmpmem
5038 : 126096 : (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5039 : 126096 : arg3_rtx, MIN (arg1_align, arg2_align));
5040 : : }
5041 : : }
5042 : :
5043 : 127517 : tree fndecl = get_callee_fndecl (exp);
5044 : 127517 : if (result)
5045 : : {
5046 : : /* Return the value in the proper mode for this function. */
5047 : 52 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5048 : 52 : if (GET_MODE (result) == mode)
5049 : : return result;
5050 : 0 : if (target == 0)
5051 : 0 : return convert_to_mode (mode, result, 0);
5052 : 0 : convert_move (target, result, 0);
5053 : 0 : return target;
5054 : : }
5055 : :
5056 : : /* Expand the library call ourselves using a stabilized argument
5057 : : list to avoid re-evaluating the function's arguments twice. */
5058 : 127465 : tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5059 : 127465 : copy_warning (fn, exp);
5060 : 127465 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5061 : 127465 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5062 : 127465 : return expand_call (fn, target, target == const0_rtx);
5063 : : }
5064 : :
5065 : : /* Expand expression EXP, which is a call to the strncmp builtin. Return
5066 : : NULL_RTX if we failed the caller should emit a normal call, otherwise
5067 : : try to get the result in TARGET, if convenient. */
5068 : :
5069 : : static rtx
5070 : 2029 : expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5071 : : ATTRIBUTE_UNUSED machine_mode mode)
5072 : : {
5073 : 2029 : if (!validate_arglist (exp,
5074 : : POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5075 : : return NULL_RTX;
5076 : :
5077 : 2023 : tree arg1 = CALL_EXPR_ARG (exp, 0);
5078 : 2023 : tree arg2 = CALL_EXPR_ARG (exp, 1);
5079 : 2023 : tree arg3 = CALL_EXPR_ARG (exp, 2);
5080 : :
5081 : 2023 : location_t loc = EXPR_LOCATION (exp);
5082 : 2023 : tree len1 = c_strlen (arg1, 1);
5083 : 2023 : tree len2 = c_strlen (arg2, 1);
5084 : :
5085 : : /* Due to the performance benefit, always inline the calls first. */
5086 : 2023 : rtx result = NULL_RTX;
5087 : 2023 : result = inline_expand_builtin_bytecmp (exp, target);
5088 : 2023 : if (result)
5089 : : return result;
5090 : :
5091 : : /* If c_strlen can determine an expression for one of the string
5092 : : lengths, and it doesn't have side effects, then emit cmpstrnsi
5093 : : using length MIN(strlen(string)+1, arg3). */
5094 : 1805 : insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5095 : 1805 : if (cmpstrn_icode == CODE_FOR_nothing)
5096 : : return NULL_RTX;
5097 : :
5098 : 1805 : tree len;
5099 : :
5100 : 1805 : unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5101 : 1805 : unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5102 : :
5103 : 1805 : if (len1)
5104 : 160 : len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5105 : 1805 : if (len2)
5106 : 676 : len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5107 : :
5108 : 1805 : tree len3 = fold_convert_loc (loc, sizetype, arg3);
5109 : :
5110 : : /* If we don't have a constant length for the first, use the length
5111 : : of the second, if we know it. If neither string is constant length,
5112 : : use the given length argument. We don't require a constant for
5113 : : this case; some cost analysis could be done if both are available
5114 : : but neither is constant. For now, assume they're equally cheap,
5115 : : unless one has side effects. If both strings have constant lengths,
5116 : : use the smaller. */
5117 : :
5118 : 1805 : if (!len1 && !len2)
5119 : : len = len3;
5120 : 719 : else if (!len1)
5121 : : len = len2;
5122 : 160 : else if (!len2)
5123 : : len = len1;
5124 : 117 : else if (TREE_SIDE_EFFECTS (len1))
5125 : : len = len2;
5126 : 117 : else if (TREE_SIDE_EFFECTS (len2))
5127 : : len = len1;
5128 : 117 : else if (TREE_CODE (len1) != INTEGER_CST)
5129 : : len = len2;
5130 : 117 : else if (TREE_CODE (len2) != INTEGER_CST)
5131 : : len = len1;
5132 : 117 : else if (tree_int_cst_lt (len1, len2))
5133 : : len = len1;
5134 : : else
5135 : 560 : len = len2;
5136 : :
5137 : : /* If we are not using the given length, we must incorporate it here.
5138 : : The actual new length parameter will be MIN(len,arg3) in this case. */
5139 : 1805 : if (len != len3)
5140 : : {
5141 : 719 : len = fold_convert_loc (loc, sizetype, len);
5142 : 719 : len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5143 : : }
5144 : 1805 : rtx arg1_rtx = get_memory_rtx (arg1, len);
5145 : 1805 : rtx arg2_rtx = get_memory_rtx (arg2, len);
5146 : 1805 : rtx arg3_rtx = expand_normal (len);
5147 : 1805 : result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5148 : 1805 : arg2_rtx, TREE_TYPE (len), arg3_rtx,
5149 : 1805 : MIN (arg1_align, arg2_align));
5150 : :
5151 : 1805 : tree fndecl = get_callee_fndecl (exp);
5152 : 1805 : if (result)
5153 : : {
5154 : : /* Return the value in the proper mode for this function. */
5155 : 21 : mode = TYPE_MODE (TREE_TYPE (exp));
5156 : 21 : if (GET_MODE (result) == mode)
5157 : : return result;
5158 : 0 : if (target == 0)
5159 : 0 : return convert_to_mode (mode, result, 0);
5160 : 0 : convert_move (target, result, 0);
5161 : 0 : return target;
5162 : : }
5163 : :
5164 : : /* Expand the library call ourselves using a stabilized argument
5165 : : list to avoid re-evaluating the function's arguments twice. */
5166 : 1784 : tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5167 : 1784 : copy_warning (call, exp);
5168 : 1784 : gcc_assert (TREE_CODE (call) == CALL_EXPR);
5169 : 1784 : CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
5170 : 1784 : return expand_call (call, target, target == const0_rtx);
5171 : : }
5172 : :
5173 : : /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5174 : : if that's convenient. */
5175 : :
5176 : : rtx
5177 : 0 : expand_builtin_saveregs (void)
5178 : : {
5179 : 0 : rtx val;
5180 : 0 : rtx_insn *seq;
5181 : :
5182 : : /* Don't do __builtin_saveregs more than once in a function.
5183 : : Save the result of the first call and reuse it. */
5184 : 0 : if (saveregs_value != 0)
5185 : : return saveregs_value;
5186 : :
5187 : : /* When this function is called, it means that registers must be
5188 : : saved on entry to this function. So we migrate the call to the
5189 : : first insn of this function. */
5190 : :
5191 : 0 : start_sequence ();
5192 : :
5193 : : /* Do whatever the machine needs done in this case. */
5194 : 0 : val = targetm.calls.expand_builtin_saveregs ();
5195 : :
5196 : 0 : seq = end_sequence ();
5197 : :
5198 : 0 : saveregs_value = val;
5199 : :
5200 : : /* Put the insns after the NOTE that starts the function. If this
5201 : : is inside a start_sequence, make the outer-level insn chain current, so
5202 : : the code is placed at the start of the function. */
5203 : 0 : push_topmost_sequence ();
5204 : 0 : emit_insn_after (seq, entry_of_function ());
5205 : 0 : pop_topmost_sequence ();
5206 : :
5207 : 0 : return val;
5208 : : }
5209 : :
5210 : : /* Expand a call to __builtin_next_arg. */
5211 : :
5212 : : static rtx
5213 : 21042 : expand_builtin_next_arg (void)
5214 : : {
5215 : : /* Checking arguments is already done in fold_builtin_next_arg
5216 : : that must be called before this function. */
5217 : 21042 : return expand_binop (ptr_mode, add_optab,
5218 : : crtl->args.internal_arg_pointer,
5219 : : crtl->args.arg_offset_rtx,
5220 : 21042 : NULL_RTX, 0, OPTAB_LIB_WIDEN);
5221 : : }
5222 : :
5223 : : /* Make it easier for the backends by protecting the valist argument
5224 : : from multiple evaluations. */
5225 : :
5226 : : static tree
5227 : 21376 : stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5228 : : {
5229 : 21376 : tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5230 : :
5231 : : /* The current way of determining the type of valist is completely
5232 : : bogus. We should have the information on the va builtin instead. */
5233 : 21376 : if (!vatype)
5234 : 21252 : vatype = targetm.fn_abi_va_list (cfun->decl);
5235 : :
5236 : 21376 : if (TREE_CODE (vatype) == ARRAY_TYPE)
5237 : : {
5238 : 15714 : if (TREE_SIDE_EFFECTS (valist))
5239 : 0 : valist = save_expr (valist);
5240 : :
5241 : : /* For this case, the backends will be expecting a pointer to
5242 : : vatype, but it's possible we've actually been given an array
5243 : : (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5244 : : So fix it. */
5245 : 15714 : if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5246 : : {
5247 : 0 : tree p1 = build_pointer_type (TREE_TYPE (vatype));
5248 : 0 : valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5249 : : }
5250 : : }
5251 : : else
5252 : : {
5253 : 5662 : tree pt = build_pointer_type (vatype);
5254 : :
5255 : 5662 : if (! needs_lvalue)
5256 : : {
5257 : 6 : if (! TREE_SIDE_EFFECTS (valist))
5258 : : return valist;
5259 : :
5260 : 0 : valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5261 : 0 : TREE_SIDE_EFFECTS (valist) = 1;
5262 : : }
5263 : :
5264 : 5656 : if (TREE_SIDE_EFFECTS (valist))
5265 : 0 : valist = save_expr (valist);
5266 : 5656 : valist = fold_build2_loc (loc, MEM_REF,
5267 : : vatype, valist, build_int_cst (pt, 0));
5268 : : }
5269 : :
5270 : : return valist;
5271 : : }
5272 : :
5273 : : /* The "standard" definition of va_list is void*. */
5274 : :
5275 : : tree
5276 : 0 : std_build_builtin_va_list (void)
5277 : : {
5278 : 0 : return ptr_type_node;
5279 : : }
5280 : :
5281 : : /* The "standard" abi va_list is va_list_type_node. */
5282 : :
5283 : : tree
5284 : 0 : std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5285 : : {
5286 : 0 : return va_list_type_node;
5287 : : }
5288 : :
5289 : : /* The "standard" type of va_list is va_list_type_node. */
5290 : :
5291 : : tree
5292 : 502 : std_canonical_va_list_type (tree type)
5293 : : {
5294 : 502 : tree wtype, htype;
5295 : :
5296 : 502 : wtype = va_list_type_node;
5297 : 502 : htype = type;
5298 : :
5299 : 502 : if (TREE_CODE (wtype) == ARRAY_TYPE)
5300 : : {
5301 : : /* If va_list is an array type, the argument may have decayed
5302 : : to a pointer type, e.g. by being passed to another function.
5303 : : In that case, unwrap both types so that we can compare the
5304 : : underlying records. */
5305 : 0 : if (TREE_CODE (htype) == ARRAY_TYPE
5306 : 0 : || POINTER_TYPE_P (htype))
5307 : : {
5308 : 0 : wtype = TREE_TYPE (wtype);
5309 : 0 : htype = TREE_TYPE (htype);
5310 : : }
5311 : : }
5312 : 502 : if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5313 : 332 : return va_list_type_node;
5314 : :
5315 : : return NULL_TREE;
5316 : : }
5317 : :
5318 : : /* The "standard" implementation of va_start: just assign `nextarg' to
5319 : : the variable. */
5320 : :
5321 : : void
5322 : 5652 : std_expand_builtin_va_start (tree valist, rtx nextarg)
5323 : : {
5324 : 5652 : rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5325 : 5652 : convert_move (va_r, nextarg, 0);
5326 : 5652 : }
5327 : :
5328 : : /* Expand EXP, a call to __builtin_va_start. */
5329 : :
5330 : : static rtx
5331 : 20892 : expand_builtin_va_start (tree exp)
5332 : : {
5333 : 20892 : rtx nextarg;
5334 : 20892 : tree valist;
5335 : 20892 : location_t loc = EXPR_LOCATION (exp);
5336 : :
5337 : 20892 : if (call_expr_nargs (exp) < 2)
5338 : : {
5339 : 0 : error_at (loc, "too few arguments to function %<va_start%>");
5340 : 0 : return const0_rtx;
5341 : : }
5342 : :
5343 : 20892 : if (fold_builtin_next_arg (exp, true))
5344 : 0 : return const0_rtx;
5345 : :
5346 : 20892 : nextarg = expand_builtin_next_arg ();
5347 : 20892 : valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5348 : :
5349 : 20892 : if (targetm.expand_builtin_va_start)
5350 : 20892 : targetm.expand_builtin_va_start (valist, nextarg);
5351 : : else
5352 : 0 : std_expand_builtin_va_start (valist, nextarg);
5353 : :
5354 : 20892 : return const0_rtx;
5355 : : }
5356 : :
5357 : : /* Expand EXP, a call to __builtin_va_end. */
5358 : :
5359 : : static rtx
5360 : 12086 : expand_builtin_va_end (tree exp)
5361 : : {
5362 : 12086 : tree valist = CALL_EXPR_ARG (exp, 0);
5363 : :
5364 : : /* Evaluate for side effects, if needed. I hate macros that don't
5365 : : do that. */
5366 : 12086 : if (TREE_SIDE_EFFECTS (valist))
5367 : 0 : expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5368 : :
5369 : 12086 : return const0_rtx;
5370 : : }
5371 : :
5372 : : /* Expand EXP, a call to __builtin_va_copy. We do this as a
5373 : : builtin rather than just as an assignment in stdarg.h because of the
5374 : : nastiness of array-type va_list types. */
5375 : :
5376 : : static rtx
5377 : 242 : expand_builtin_va_copy (tree exp)
5378 : : {
5379 : 242 : tree dst, src, t;
5380 : 242 : location_t loc = EXPR_LOCATION (exp);
5381 : :
5382 : 242 : dst = CALL_EXPR_ARG (exp, 0);
5383 : 242 : src = CALL_EXPR_ARG (exp, 1);
5384 : :
5385 : 242 : dst = stabilize_va_list_loc (loc, dst, 1);
5386 : 242 : src = stabilize_va_list_loc (loc, src, 0);
5387 : :
5388 : 242 : gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5389 : :
5390 : 242 : if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5391 : : {
5392 : 0 : t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5393 : 0 : TREE_SIDE_EFFECTS (t) = 1;
5394 : 0 : expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5395 : : }
5396 : : else
5397 : : {
5398 : 242 : rtx dstb, srcb, size;
5399 : :
5400 : : /* Evaluate to pointers. */
5401 : 242 : dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5402 : 242 : srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5403 : 242 : size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5404 : : NULL_RTX, VOIDmode, EXPAND_NORMAL);
5405 : :
5406 : 242 : dstb = convert_memory_address (Pmode, dstb);
5407 : 242 : srcb = convert_memory_address (Pmode, srcb);
5408 : :
5409 : : /* "Dereference" to BLKmode memories. */
5410 : 242 : dstb = gen_rtx_MEM (BLKmode, dstb);
5411 : 242 : set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5412 : 242 : set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5413 : 242 : srcb = gen_rtx_MEM (BLKmode, srcb);
5414 : 242 : set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5415 : 242 : set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5416 : :
5417 : : /* Copy. */
5418 : 242 : emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5419 : : }
5420 : :
5421 : 242 : return const0_rtx;
5422 : : }
5423 : :
5424 : : /* Expand a call to one of the builtin functions __builtin_frame_address or
5425 : : __builtin_return_address. */
5426 : :
5427 : : static rtx
5428 : 15583 : expand_builtin_frame_address (tree fndecl, tree exp)
5429 : : {
5430 : : /* The argument must be a nonnegative integer constant.
5431 : : It counts the number of frames to scan up the stack.
5432 : : The value is either the frame pointer value or the return
5433 : : address saved in that frame. */
5434 : 15583 : if (call_expr_nargs (exp) == 0)
5435 : : /* Warning about missing arg was already issued. */
5436 : 0 : return const0_rtx;
5437 : 15583 : else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5438 : : {
5439 : 0 : error ("invalid argument to %qD", fndecl);
5440 : 0 : return const0_rtx;
5441 : : }
5442 : : else
5443 : : {
5444 : : /* Number of frames to scan up the stack. */
5445 : 15583 : unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5446 : :
5447 : 15583 : rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5448 : :
5449 : : /* Some ports cannot access arbitrary stack frames. */
5450 : 15583 : if (tem == NULL)
5451 : : {
5452 : 0 : warning (0, "unsupported argument to %qD", fndecl);
5453 : 0 : return const0_rtx;
5454 : : }
5455 : :
5456 : 15583 : if (count)
5457 : : {
5458 : : /* Warn since no effort is made to ensure that any frame
5459 : : beyond the current one exists or can be safely reached. */
5460 : 932 : warning (OPT_Wframe_address, "calling %qD with "
5461 : : "a nonzero argument is unsafe", fndecl);
5462 : : }
5463 : :
5464 : : /* For __builtin_frame_address, return what we've got. */
5465 : 15583 : if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5466 : : return tem;
5467 : :
5468 : 5086 : if (!REG_P (tem)
5469 : 5086 : && ! CONSTANT_P (tem))
5470 : 5086 : tem = copy_addr_to_reg (tem);
5471 : 5086 : return tem;
5472 : : }
5473 : : }
5474 : :
5475 : : #if ! STACK_GROWS_DOWNWARD
5476 : : # define STACK_TOPS GT
5477 : : #else
5478 : : # define STACK_TOPS LT
5479 : : #endif
5480 : :
5481 : : #ifdef POINTERS_EXTEND_UNSIGNED
5482 : : # define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
5483 : : #else
5484 : : # define STACK_UNSIGNED true
5485 : : #endif
5486 : :
5487 : : /* Expand a call to builtin function __builtin_stack_address. */
5488 : :
5489 : : static rtx
5490 : 3427 : expand_builtin_stack_address ()
5491 : : {
5492 : 3427 : rtx ret = convert_to_mode (ptr_mode, copy_to_reg (stack_pointer_rtx),
5493 : : STACK_UNSIGNED);
5494 : :
5495 : : #ifdef STACK_ADDRESS_OFFSET
5496 : : /* Unbias the stack pointer, bringing it to the boundary between the
5497 : : stack area claimed by the active function calling this builtin,
5498 : : and stack ranges that could get clobbered if it called another
5499 : : function. It should NOT encompass any stack red zone, that is
5500 : : used in leaf functions.
5501 : :
5502 : : On SPARC, the register save area is *not* considered active or
5503 : : used by the active function, but rather as akin to the area in
5504 : : which call-preserved registers are saved by callees. This
5505 : : enables __strub_leave to clear what would otherwise overlap with
5506 : : its own register save area.
5507 : :
5508 : : If the address is computed too high or too low, parts of a stack
5509 : : range that should be scrubbed may be left unscrubbed, scrubbing
5510 : : may corrupt active portions of the stack frame, and stack ranges
5511 : : may be doubly-scrubbed by caller and callee.
5512 : :
5513 : : In order for it to be just right, the area delimited by
5514 : : @code{__builtin_stack_address} and @code{__builtin_frame_address
5515 : : (0)} should encompass caller's registers saved by the function,
5516 : : local on-stack variables and @code{alloca} stack areas.
5517 : : Accumulated outgoing on-stack arguments, preallocated as part of
5518 : : a function's own prologue, are to be regarded as part of the
5519 : : (caller) function's active area as well, whereas those pushed or
5520 : : allocated temporarily for a call are regarded as part of the
5521 : : callee's stack range, rather than the caller's. */
5522 : : ret = plus_constant (ptr_mode, ret, STACK_ADDRESS_OFFSET);
5523 : : #endif
5524 : :
5525 : 3427 : return force_reg (ptr_mode, ret);
5526 : : }
5527 : :
5528 : : /* Expand a call to builtin function __builtin_strub_enter. */
5529 : :
5530 : : static rtx
5531 : 2159 : expand_builtin_strub_enter (tree exp)
5532 : : {
5533 : 2159 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5534 : : return NULL_RTX;
5535 : :
5536 : 2159 : if (optimize < 1 || flag_no_inline)
5537 : : return NULL_RTX;
5538 : :
5539 : 1515 : rtx stktop = expand_builtin_stack_address ();
5540 : :
5541 : 1515 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5542 : 1515 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5543 : 1515 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5544 : : build_int_cst (TREE_TYPE (wmptr), 0));
5545 : 1515 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5546 : :
5547 : 1515 : emit_move_insn (wmark, stktop);
5548 : :
5549 : 1515 : return const0_rtx;
5550 : : }
5551 : :
5552 : : /* Expand a call to builtin function __builtin_strub_update. */
5553 : :
5554 : : static rtx
5555 : 1072 : expand_builtin_strub_update (tree exp)
5556 : : {
5557 : 1072 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5558 : : return NULL_RTX;
5559 : :
5560 : 1072 : if (optimize < 2 || flag_no_inline)
5561 : : return NULL_RTX;
5562 : :
5563 : 652 : rtx stktop = expand_builtin_stack_address ();
5564 : :
5565 : : #ifdef RED_ZONE_SIZE
5566 : : /* Here's how the strub enter, update and leave functions deal with red zones.
5567 : :
5568 : : If it weren't for red zones, update, called from within a strub context,
5569 : : would bump the watermark to the top of the stack. Enter and leave, running
5570 : : in the caller, would use the caller's top of stack address both to
5571 : : initialize the watermark passed to the callee, and to start strubbing the
5572 : : stack afterwards.
5573 : :
5574 : : Ideally, we'd update the watermark so as to cover the used amount of red
5575 : : zone, and strub starting at the caller's other end of the (presumably
5576 : : unused) red zone. Normally, only leaf functions use the red zone, but at
5577 : : this point we can't tell whether a function is a leaf, nor can we tell how
5578 : : much of the red zone it uses. Furthermore, some strub contexts may have
5579 : : been inlined so that update and leave are called from the same stack frame,
5580 : : and the strub builtins may all have been inlined, turning a strub function
5581 : : into a leaf.
5582 : :
5583 : : So cleaning the range from the caller's stack pointer (one end of the red
5584 : : zone) to the (potentially inlined) callee's (other end of the) red zone
5585 : : could scribble over the caller's own red zone.
5586 : :
5587 : : We avoid this possibility by arranging for callers that are strub contexts
5588 : : to use their own watermark as the strub starting point. So, if A calls B,
5589 : : and B calls C, B will tell A to strub up to the end of B's red zone, and
5590 : : will strub itself only the part of C's stack frame and red zone that
5591 : : doesn't overlap with B's. With that, we don't need to know who's leaf and
5592 : : who isn't: inlined calls will shrink their strub window to zero, each
5593 : : remaining call will strub some portion of the stack, and eventually the
5594 : : strub context will return to a caller that isn't a strub context itself,
5595 : : that will therefore use its own stack pointer as the strub starting point.
5596 : : It's not a leaf, because strub contexts can't be inlined into non-strub
5597 : : contexts, so it doesn't use the red zone, and it will therefore correctly
5598 : : strub up the callee's stack frame up to the end of the callee's red zone.
5599 : : Neat! */
5600 : 652 : if (true /* (flags_from_decl_or_type (current_function_decl) & ECF_LEAF) */)
5601 : : {
5602 : 652 : poly_int64 red_zone_size = RED_ZONE_SIZE;
5603 : : #if STACK_GROWS_DOWNWARD
5604 : 652 : red_zone_size = -red_zone_size;
5605 : : #endif
5606 : 652 : stktop = plus_constant (ptr_mode, stktop, red_zone_size);
5607 : 652 : stktop = force_reg (ptr_mode, stktop);
5608 : : }
5609 : : #endif
5610 : :
5611 : 652 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5612 : 652 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5613 : 652 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5614 : : build_int_cst (TREE_TYPE (wmptr), 0));
5615 : 652 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5616 : :
5617 : 652 : rtx wmarkr = force_reg (ptr_mode, wmark);
5618 : :
5619 : 652 : rtx_code_label *lab = gen_label_rtx ();
5620 : 652 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5621 : : ptr_mode, NULL_RTX, lab, NULL,
5622 : : profile_probability::very_likely ());
5623 : 652 : emit_move_insn (wmark, stktop);
5624 : :
5625 : : /* If this is an inlined strub function, also bump the watermark for the
5626 : : enclosing function. This avoids a problem with the following scenario: A
5627 : : calls B and B calls C, and both B and C get inlined into A. B allocates
5628 : : temporary stack space before calling C. If we don't update A's watermark,
5629 : : we may use an outdated baseline for the post-C strub_leave, erasing B's
5630 : : temporary stack allocation. We only need this if we're fully expanding
5631 : : strub_leave inline. */
5632 : 652 : tree xwmptr = (optimize > 2
5633 : 652 : ? strub_watermark_parm (current_function_decl)
5634 : : : wmptr);
5635 : 652 : if (wmptr != xwmptr)
5636 : : {
5637 : 156 : wmptr = xwmptr;
5638 : 156 : wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5639 : 156 : wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5640 : : build_int_cst (TREE_TYPE (wmptr), 0));
5641 : 156 : wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5642 : 156 : wmarkr = force_reg (ptr_mode, wmark);
5643 : :
5644 : 156 : do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
5645 : : ptr_mode, NULL_RTX, lab, NULL,
5646 : : profile_probability::very_likely ());
5647 : 156 : emit_move_insn (wmark, stktop);
5648 : : }
5649 : :
5650 : 652 : emit_label (lab);
5651 : :
5652 : 652 : return const0_rtx;
5653 : : }
5654 : :
5655 : :
5656 : : /* Expand a call to builtin function __builtin_strub_leave. */
5657 : :
5658 : : static rtx
5659 : 2729 : expand_builtin_strub_leave (tree exp)
5660 : : {
5661 : 2729 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5662 : : return NULL_RTX;
5663 : :
5664 : 2729 : if (optimize < 2 || optimize_size || flag_no_inline)
5665 : : return NULL_RTX;
5666 : :
5667 : 1229 : rtx stktop = NULL_RTX;
5668 : :
5669 : 1229 : if (tree wmptr = (optimize
5670 : 1229 : ? strub_watermark_parm (current_function_decl)
5671 : : : NULL_TREE))
5672 : : {
5673 : 509 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5674 : 509 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5675 : : build_int_cst (TREE_TYPE (wmptr), 0));
5676 : 509 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5677 : 509 : stktop = force_reg (ptr_mode, wmark);
5678 : : }
5679 : :
5680 : 509 : if (!stktop)
5681 : 720 : stktop = expand_builtin_stack_address ();
5682 : :
5683 : 1229 : tree wmptr = CALL_EXPR_ARG (exp, 0);
5684 : 1229 : tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
5685 : 1229 : tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
5686 : : build_int_cst (TREE_TYPE (wmptr), 0));
5687 : 1229 : rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
5688 : :
5689 : 1229 : rtx wmarkr = force_reg (ptr_mode, wmark);
5690 : :
5691 : : #if ! STACK_GROWS_DOWNWARD
5692 : : rtx base = stktop;
5693 : : rtx end = wmarkr;
5694 : : #else
5695 : 1229 : rtx base = wmarkr;
5696 : 1229 : rtx end = stktop;
5697 : : #endif
5698 : :
5699 : : /* We're going to modify it, so make sure it's not e.g. the stack pointer. */
5700 : 1229 : base = copy_to_reg (base);
5701 : :
5702 : 1229 : rtx_code_label *done = gen_label_rtx ();
5703 : 1229 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5704 : : ptr_mode, NULL_RTX, done, NULL,
5705 : : profile_probability::very_likely ());
5706 : :
5707 : 1229 : if (optimize < 3)
5708 : 909 : expand_call (exp, NULL_RTX, true);
5709 : : else
5710 : : {
5711 : : /* Ok, now we've determined we want to copy the block, so convert the
5712 : : addresses to Pmode, as needed to dereference them to access ptr_mode
5713 : : memory locations, so that we don't have to convert anything within the
5714 : : loop. */
5715 : 320 : base = memory_address (ptr_mode, base);
5716 : 320 : end = memory_address (ptr_mode, end);
5717 : :
5718 : 320 : rtx zero = force_operand (const0_rtx, NULL_RTX);
5719 : 320 : int ulen = GET_MODE_SIZE (ptr_mode);
5720 : :
5721 : : /* ??? It would be nice to use setmem or similar patterns here,
5722 : : but they do not necessarily obey the stack growth direction,
5723 : : which has security implications. We also have to avoid calls
5724 : : (memset, bzero or any machine-specific ones), which are
5725 : : likely unsafe here (see TARGET_STRUB_MAY_USE_MEMSET). */
5726 : : #if ! STACK_GROWS_DOWNWARD
5727 : : rtx incr = plus_constant (Pmode, base, ulen);
5728 : : rtx dstm = gen_rtx_MEM (ptr_mode, base);
5729 : :
5730 : : rtx_code_label *loop = gen_label_rtx ();
5731 : : emit_label (loop);
5732 : : emit_move_insn (dstm, zero);
5733 : : emit_move_insn (base, force_operand (incr, NULL_RTX));
5734 : : #else
5735 : 320 : rtx decr = plus_constant (Pmode, end, -ulen);
5736 : 320 : rtx dstm = gen_rtx_MEM (ptr_mode, end);
5737 : :
5738 : 320 : rtx_code_label *loop = gen_label_rtx ();
5739 : 320 : emit_label (loop);
5740 : 320 : emit_move_insn (end, force_operand (decr, NULL_RTX));
5741 : 320 : emit_move_insn (dstm, zero);
5742 : : #endif
5743 : 640 : do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
5744 : 320 : Pmode, NULL_RTX, NULL, loop,
5745 : : profile_probability::very_likely ());
5746 : : }
5747 : :
5748 : 1229 : emit_label (done);
5749 : :
5750 : 1229 : return const0_rtx;
5751 : : }
5752 : :
5753 : : /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5754 : : failed and the caller should emit a normal call. */
5755 : :
5756 : : static rtx
5757 : 28233 : expand_builtin_alloca (tree exp)
5758 : : {
5759 : 28233 : rtx op0;
5760 : 28233 : rtx result;
5761 : 28233 : unsigned int align;
5762 : 28233 : tree fndecl = get_callee_fndecl (exp);
5763 : 28233 : HOST_WIDE_INT max_size;
5764 : 28233 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5765 : 28233 : bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5766 : 28233 : bool valid_arglist
5767 : : = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5768 : 28233 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5769 : : VOID_TYPE)
5770 : : : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5771 : 28231 : ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5772 : 28233 : : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5773 : :
5774 : 28233 : if (!valid_arglist)
5775 : : return NULL_RTX;
5776 : :
5777 : : /* Compute the argument. */
5778 : 28228 : op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5779 : :
5780 : : /* Compute the alignment. */
5781 : 52641 : align = (fcode == BUILT_IN_ALLOCA
5782 : 24414 : ? BIGGEST_ALIGNMENT
5783 : 3814 : : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5784 : :
5785 : : /* Compute the maximum size. */
5786 : 2 : max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5787 : 28230 : ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5788 : : : -1);
5789 : :
5790 : : /* Allocate the desired space. If the allocation stems from the declaration
5791 : : of a variable-sized object, it cannot accumulate. */
5792 : 28228 : result
5793 : 28228 : = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5794 : 28228 : result = convert_memory_address (ptr_mode, result);
5795 : :
5796 : : /* Dynamic allocations for variables are recorded during gimplification. */
5797 : 28228 : if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5798 : 0 : record_dynamic_alloc (exp);
5799 : :
5800 : : return result;
5801 : : }
5802 : :
5803 : : /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5804 : : of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5805 : : STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5806 : : handle_builtin_stack_restore function. */
5807 : :
5808 : : static rtx
5809 : 203 : expand_asan_emit_allocas_unpoison (tree exp)
5810 : : {
5811 : 203 : tree arg0 = CALL_EXPR_ARG (exp, 0);
5812 : 203 : tree arg1 = CALL_EXPR_ARG (exp, 1);
5813 : 203 : rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5814 : 203 : rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5815 : 203 : rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5816 : : stack_pointer_rtx, NULL_RTX, 0,
5817 : : OPTAB_LIB_WIDEN);
5818 : 203 : off = convert_modes (ptr_mode, Pmode, off, 0);
5819 : 203 : bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5820 : : OPTAB_LIB_WIDEN);
5821 : 203 : rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5822 : 203 : ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5823 : : top, ptr_mode, bot, ptr_mode);
5824 : 203 : return ret;
5825 : : }
5826 : :
5827 : : /* Expand a call to bswap builtin in EXP.
5828 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
5829 : : function in-line. If convenient, the result should be placed in TARGET.
5830 : : SUBTARGET may be used as the target for computing one of EXP's operands. */
5831 : :
5832 : : static rtx
5833 : 1163 : expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5834 : : rtx subtarget)
5835 : : {
5836 : 1163 : tree arg;
5837 : 1163 : rtx op0;
5838 : :
5839 : 1163 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5840 : : return NULL_RTX;
5841 : :
5842 : 1163 : arg = CALL_EXPR_ARG (exp, 0);
5843 : 1163 : op0 = expand_expr (arg,
5844 : 50 : subtarget && GET_MODE (subtarget) == target_mode
5845 : : ? subtarget : NULL_RTX,
5846 : : target_mode, EXPAND_NORMAL);
5847 : 1163 : if (GET_MODE (op0) != target_mode)
5848 : 0 : op0 = convert_to_mode (target_mode, op0, 1);
5849 : :
5850 : 1163 : target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5851 : :
5852 : 1163 : gcc_assert (target);
5853 : :
5854 : 1163 : return convert_to_mode (target_mode, target, 1);
5855 : : }
5856 : :
5857 : : /* Expand a call to a unary builtin in EXP.
5858 : : Return NULL_RTX if a normal call should be emitted rather than expanding the
5859 : : function in-line. If convenient, the result should be placed in TARGET.
5860 : : SUBTARGET may be used as the target for computing one of EXP's operands. */
5861 : :
5862 : : static rtx
5863 : 753 : expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5864 : : rtx subtarget, optab op_optab)
5865 : : {
5866 : 753 : rtx op0;
5867 : :
5868 : 753 : if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5869 : : return NULL_RTX;
5870 : :
5871 : : /* Compute the argument. */
5872 : 1506 : op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5873 : : (subtarget
5874 : 96 : && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5875 : 96 : == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5876 : : VOIDmode, EXPAND_NORMAL);
5877 : : /* Compute op, into TARGET if possible.
5878 : : Set TARGET to wherever the result comes back. */
5879 : 753 : target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5880 : : op_optab, op0, target, op_optab != clrsb_optab);
5881 : 753 : gcc_assert (target);
5882 : :
5883 : 753 : return convert_to_mode (target_mode, target, 0);
5884 : : }
5885 : :
5886 : : /* Expand a call to __builtin_expect. We just return our argument
5887 : : as the builtin_expect semantic should've been already executed by
5888 : : tree branch prediction pass. */
5889 : :
5890 : : static rtx
5891 : 1025 : expand_builtin_expect (tree exp, rtx target)
5892 : : {
5893 : 1025 : tree arg;
5894 : :
5895 : 1025 : if (call_expr_nargs (exp) < 2)
5896 : 0 : return const0_rtx;
5897 : 1025 : arg = CALL_EXPR_ARG (exp, 0);
5898 : :
5899 : 1025 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5900 : : /* When guessing was done, the hints should be already stripped away. */
5901 : 1025 : gcc_assert (!flag_guess_branch_prob
5902 : : || optimize == 0 || seen_error ());
5903 : : return target;
5904 : : }
5905 : :
5906 : : /* Expand a call to __builtin_expect_with_probability. We just return our
5907 : : argument as the builtin_expect semantic should've been already executed by
5908 : : tree branch prediction pass. */
5909 : :
5910 : : static rtx
5911 : 5 : expand_builtin_expect_with_probability (tree exp, rtx target)
5912 : : {
5913 : 5 : tree arg;
5914 : :
5915 : 5 : if (call_expr_nargs (exp) < 3)
5916 : 0 : return const0_rtx;
5917 : 5 : arg = CALL_EXPR_ARG (exp, 0);
5918 : :
5919 : 5 : target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5920 : : /* When guessing was done, the hints should be already stripped away. */
5921 : 5 : gcc_assert (!flag_guess_branch_prob
5922 : : || optimize == 0 || seen_error ());
5923 : : return target;
5924 : : }
5925 : :
5926 : :
5927 : : /* Expand a call to __builtin_assume_aligned. We just return our first
5928 : : argument as the builtin_assume_aligned semantic should've been already
5929 : : executed by CCP. */
5930 : :
5931 : : static rtx
5932 : 75 : expand_builtin_assume_aligned (tree exp, rtx target)
5933 : : {
5934 : 75 : if (call_expr_nargs (exp) < 2)
5935 : 0 : return const0_rtx;
5936 : 75 : target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5937 : : EXPAND_NORMAL);
5938 : 75 : gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5939 : : && (call_expr_nargs (exp) < 3
5940 : : || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5941 : : return target;
5942 : : }
5943 : :
5944 : : void
5945 : 38504 : expand_builtin_trap (void)
5946 : : {
5947 : 38504 : if (targetm.have_trap ())
5948 : : {
5949 : 38504 : rtx_insn *insn = emit_insn (targetm.gen_trap ());
5950 : : /* For trap insns when not accumulating outgoing args force
5951 : : REG_ARGS_SIZE note to prevent crossjumping of calls with
5952 : : different args sizes. */
5953 : 38504 : if (!ACCUMULATE_OUTGOING_ARGS)
5954 : 38502 : add_args_size_note (insn, stack_pointer_delta);
5955 : : }
5956 : : else
5957 : : {
5958 : 0 : tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5959 : 0 : tree call_expr = build_call_expr (fn, 0);
5960 : 0 : expand_call (call_expr, NULL_RTX, false);
5961 : : }
5962 : :
5963 : 38504 : emit_barrier ();
5964 : 38504 : }
5965 : :
5966 : : /* Expand a call to __builtin_unreachable. We do nothing except emit
5967 : : a barrier saying that control flow will not pass here.
5968 : :
5969 : : It is the responsibility of the program being compiled to ensure
5970 : : that control flow does never reach __builtin_unreachable. */
5971 : : static void
5972 : 5311 : expand_builtin_unreachable (void)
5973 : : {
5974 : : /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5975 : : to avoid this. */
5976 : 5311 : gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
5977 : 5311 : emit_barrier ();
5978 : 5311 : }
5979 : :
5980 : : /* Expand EXP, a call to fabs, fabsf or fabsl.
5981 : : Return NULL_RTX if a normal call should be emitted rather than expanding
5982 : : the function inline. If convenient, the result should be placed
5983 : : in TARGET. SUBTARGET may be used as the target for computing
5984 : : the operand. */
5985 : :
5986 : : static rtx
5987 : 4 : expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5988 : : {
5989 : 4 : machine_mode mode;
5990 : 4 : tree arg;
5991 : 4 : rtx op0;
5992 : :
5993 : 4 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5994 : : return NULL_RTX;
5995 : :
5996 : 0 : arg = CALL_EXPR_ARG (exp, 0);
5997 : 0 : CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5998 : 0 : mode = TYPE_MODE (TREE_TYPE (arg));
5999 : 0 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6000 : 0 : return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6001 : : }
6002 : :
6003 : : /* Expand EXP, a call to copysign, copysignf, or copysignl.
6004 : : Return NULL is a normal call should be emitted rather than expanding the
6005 : : function inline. If convenient, the result should be placed in TARGET.
6006 : : SUBTARGET may be used as the target for computing the operand. */
6007 : :
6008 : : static rtx
6009 : 11674 : expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6010 : : {
6011 : 11674 : rtx op0, op1;
6012 : 11674 : tree arg;
6013 : :
6014 : 11674 : if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6015 : : return NULL_RTX;
6016 : :
6017 : 11673 : arg = CALL_EXPR_ARG (exp, 0);
6018 : 11673 : op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6019 : :
6020 : 11673 : arg = CALL_EXPR_ARG (exp, 1);
6021 : 11673 : op1 = expand_normal (arg);
6022 : :
6023 : 11673 : return expand_copysign (op0, op1, target);
6024 : : }
6025 : :
6026 : : /* Emit a call to __builtin___clear_cache. */
6027 : :
6028 : : void
6029 : 0 : default_emit_call_builtin___clear_cache (rtx begin, rtx end)
6030 : : {
6031 : 0 : rtx callee = gen_rtx_SYMBOL_REF (Pmode,
6032 : : BUILTIN_ASM_NAME_PTR
6033 : : (BUILT_IN_CLEAR_CACHE));
6034 : :
6035 : 0 : emit_library_call (callee,
6036 : : LCT_NORMAL, VOIDmode,
6037 : : convert_memory_address (ptr_mode, begin), ptr_mode,
6038 : : convert_memory_address (ptr_mode, end), ptr_mode);
6039 : 0 : }
6040 : :
6041 : : /* Emit a call to __builtin___clear_cache, unless the target specifies
6042 : : it as do-nothing. This function can be used by trampoline
6043 : : finalizers to duplicate the effects of expanding a call to the
6044 : : clear_cache builtin. */
6045 : :
6046 : : void
6047 : 28 : maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
6048 : : {
6049 : 28 : gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
6050 : : || CONST_INT_P (begin))
6051 : : && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
6052 : : || CONST_INT_P (end)));
6053 : :
6054 : 28 : if (targetm.have_clear_cache ())
6055 : : {
6056 : : /* We have a "clear_cache" insn, and it will handle everything. */
6057 : 0 : class expand_operand ops[2];
6058 : :
6059 : 0 : create_address_operand (&ops[0], begin);
6060 : 0 : create_address_operand (&ops[1], end);
6061 : :
6062 : 0 : if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6063 : 0 : return;
6064 : : }
6065 : : else
6066 : : {
6067 : : #ifndef CLEAR_INSN_CACHE
6068 : : /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6069 : : does nothing. There is no need to call it. Do nothing. */
6070 : : return;
6071 : : #endif /* CLEAR_INSN_CACHE */
6072 : : }
6073 : :
6074 : 0 : targetm.calls.emit_call_builtin___clear_cache (begin, end);
6075 : : }
6076 : :
6077 : : /* Expand a call to __builtin___clear_cache. */
6078 : :
6079 : : static void
6080 : 28 : expand_builtin___clear_cache (tree exp)
6081 : : {
6082 : 28 : tree begin, end;
6083 : 28 : rtx begin_rtx, end_rtx;
6084 : :
6085 : : /* We must not expand to a library call. If we did, any
6086 : : fallback library function in libgcc that might contain a call to
6087 : : __builtin___clear_cache() would recurse infinitely. */
6088 : 28 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6089 : : {
6090 : 0 : error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6091 : 0 : return;
6092 : : }
6093 : :
6094 : 28 : begin = CALL_EXPR_ARG (exp, 0);
6095 : 30 : begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6096 : :
6097 : 28 : end = CALL_EXPR_ARG (exp, 1);
6098 : 30 : end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6099 : :
6100 : 28 : maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
6101 : : }
6102 : :
6103 : : /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6104 : :
6105 : : static rtx
6106 : 634 : round_trampoline_addr (rtx tramp)
6107 : : {
6108 : 634 : rtx temp, addend, mask;
6109 : :
6110 : : /* If we don't need too much alignment, we'll have been guaranteed
6111 : : proper alignment by get_trampoline_type. */
6112 : 634 : if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6113 : : return tramp;
6114 : :
6115 : : /* Round address up to desired boundary. */
6116 : 0 : temp = gen_reg_rtx (Pmode);
6117 : 0 : addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6118 : 0 : mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6119 : :
6120 : 0 : temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6121 : : temp, 0, OPTAB_LIB_WIDEN);
6122 : 0 : tramp = expand_simple_binop (Pmode, AND, temp, mask,
6123 : : temp, 0, OPTAB_LIB_WIDEN);
6124 : :
6125 : 0 : return tramp;
6126 : : }
6127 : :
6128 : : static rtx
6129 : 295 : expand_builtin_init_trampoline (tree exp, bool onstack)
6130 : : {
6131 : 295 : tree t_tramp, t_func, t_chain;
6132 : 295 : rtx m_tramp, r_tramp, r_chain, tmp;
6133 : :
6134 : 295 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6135 : : POINTER_TYPE, VOID_TYPE))
6136 : : return NULL_RTX;
6137 : :
6138 : 295 : t_tramp = CALL_EXPR_ARG (exp, 0);
6139 : 295 : t_func = CALL_EXPR_ARG (exp, 1);
6140 : 295 : t_chain = CALL_EXPR_ARG (exp, 2);
6141 : :
6142 : 295 : r_tramp = expand_normal (t_tramp);
6143 : 295 : m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6144 : 295 : MEM_NOTRAP_P (m_tramp) = 1;
6145 : :
6146 : : /* If ONSTACK, the TRAMP argument should be the address of a field
6147 : : within the local function's FRAME decl. Either way, let's see if
6148 : : we can fill in the MEM_ATTRs for this memory. */
6149 : 295 : if (TREE_CODE (t_tramp) == ADDR_EXPR)
6150 : 295 : set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6151 : :
6152 : : /* Creator of a heap trampoline is responsible for making sure the
6153 : : address is aligned to at least STACK_BOUNDARY. Normally malloc
6154 : : will ensure this anyhow. */
6155 : 295 : tmp = round_trampoline_addr (r_tramp);
6156 : 295 : if (tmp != r_tramp)
6157 : : {
6158 : 0 : m_tramp = change_address (m_tramp, BLKmode, tmp);
6159 : 0 : set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6160 : 0 : set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6161 : : }
6162 : :
6163 : : /* The FUNC argument should be the address of the nested function.
6164 : : Extract the actual function decl to pass to the hook. */
6165 : 295 : gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6166 : 295 : t_func = TREE_OPERAND (t_func, 0);
6167 : 295 : gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6168 : :
6169 : 295 : r_chain = expand_normal (t_chain);
6170 : :
6171 : : /* Generate insns to initialize the trampoline. */
6172 : 295 : targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6173 : :
6174 : 295 : if (onstack)
6175 : : {
6176 : 295 : trampolines_created = 1;
6177 : :
6178 : 295 : if (targetm.calls.custom_function_descriptors != 0)
6179 : 295 : warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6180 : : "trampoline generated for nested function %qD", t_func);
6181 : : }
6182 : :
6183 : 295 : return const0_rtx;
6184 : : }
6185 : :
6186 : : static rtx
6187 : 339 : expand_builtin_adjust_trampoline (tree exp)
6188 : : {
6189 : 339 : rtx tramp;
6190 : :
6191 : 339 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6192 : : return NULL_RTX;
6193 : :
6194 : 339 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6195 : 339 : tramp = round_trampoline_addr (tramp);
6196 : 339 : if (targetm.calls.trampoline_adjust_address)
6197 : 0 : tramp = targetm.calls.trampoline_adjust_address (tramp);
6198 : :
6199 : : return tramp;
6200 : : }
6201 : :
6202 : : /* Expand a call to the builtin descriptor initialization routine.
6203 : : A descriptor is made up of a couple of pointers to the static
6204 : : chain and the code entry in this order. */
6205 : :
6206 : : static rtx
6207 : 0 : expand_builtin_init_descriptor (tree exp)
6208 : : {
6209 : 0 : tree t_descr, t_func, t_chain;
6210 : 0 : rtx m_descr, r_descr, r_func, r_chain;
6211 : :
6212 : 0 : if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6213 : : VOID_TYPE))
6214 : : return NULL_RTX;
6215 : :
6216 : 0 : t_descr = CALL_EXPR_ARG (exp, 0);
6217 : 0 : t_func = CALL_EXPR_ARG (exp, 1);
6218 : 0 : t_chain = CALL_EXPR_ARG (exp, 2);
6219 : :
6220 : 0 : r_descr = expand_normal (t_descr);
6221 : 0 : m_descr = gen_rtx_MEM (BLKmode, r_descr);
6222 : 0 : MEM_NOTRAP_P (m_descr) = 1;
6223 : 0 : set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6224 : :
6225 : 0 : r_func = expand_normal (t_func);
6226 : 0 : r_chain = expand_normal (t_chain);
6227 : :
6228 : : /* Generate insns to initialize the descriptor. */
6229 : 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6230 : 0 : emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6231 : : POINTER_SIZE / BITS_PER_UNIT), r_func);
6232 : :
6233 : 0 : return const0_rtx;
6234 : : }
6235 : :
6236 : : /* Expand a call to the builtin descriptor adjustment routine. */
6237 : :
6238 : : static rtx
6239 : 0 : expand_builtin_adjust_descriptor (tree exp)
6240 : : {
6241 : 0 : rtx tramp;
6242 : :
6243 : 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6244 : : return NULL_RTX;
6245 : :
6246 : 0 : tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6247 : :
6248 : : /* Unalign the descriptor to allow runtime identification. */
6249 : 0 : tramp = plus_constant (ptr_mode, tramp,
6250 : 0 : targetm.calls.custom_function_descriptors);
6251 : :
6252 : 0 : return force_operand (tramp, NULL_RTX);
6253 : : }
6254 : :
6255 : : /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6256 : : function. The function first checks whether the back end provides
6257 : : an insn to implement signbit for the respective mode. If not, it
6258 : : checks whether the floating point format of the value is such that
6259 : : the sign bit can be extracted. If that is not the case, error out.
6260 : : EXP is the expression that is a call to the builtin function; if
6261 : : convenient, the result should be placed in TARGET. */
6262 : : static rtx
6263 : 1133 : expand_builtin_signbit (tree exp, rtx target)
6264 : : {
6265 : 1133 : const struct real_format *fmt;
6266 : 1133 : scalar_float_mode fmode;
6267 : 1133 : scalar_int_mode rmode, imode;
6268 : 1133 : tree arg;
6269 : 1133 : int word, bitpos;
6270 : 1133 : enum insn_code icode;
6271 : 1133 : rtx temp;
6272 : 1133 : location_t loc = EXPR_LOCATION (exp);
6273 : :
6274 : 1133 : if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6275 : : return NULL_RTX;
6276 : :
6277 : 1133 : arg = CALL_EXPR_ARG (exp, 0);
6278 : 1133 : fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6279 : 1133 : rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6280 : 1133 : fmt = REAL_MODE_FORMAT (fmode);
6281 : :
6282 : 1133 : arg = builtin_save_expr (arg);
6283 : :
6284 : : /* Expand the argument yielding a RTX expression. */
6285 : 1133 : temp = expand_normal (arg);
6286 : :
6287 : : /* Check if the back end provides an insn that handles signbit for the
6288 : : argument's mode. */
6289 : 1133 : icode = optab_handler (signbit_optab, fmode);
6290 : 1133 : if (icode != CODE_FOR_nothing)
6291 : : {
6292 : 10 : rtx_insn *last = get_last_insn ();
6293 : 10 : rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6294 : 10 : if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
6295 : : return this_target;
6296 : 0 : delete_insns_since (last);
6297 : : }
6298 : :
6299 : : /* For floating point formats without a sign bit, implement signbit
6300 : : as "ARG < 0.0". */
6301 : 1123 : bitpos = fmt->signbit_ro;
6302 : 1123 : if (bitpos < 0)
6303 : : {
6304 : : /* But we can't do this if the format supports signed zero. */
6305 : 0 : gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6306 : :
6307 : 0 : arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6308 : 0 : build_real (TREE_TYPE (arg), dconst0));
6309 : 0 : return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6310 : : }
6311 : :
6312 : 2246 : if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6313 : : {
6314 : 1019 : imode = int_mode_for_mode (fmode).require ();
6315 : 1019 : temp = gen_lowpart (imode, temp);
6316 : : }
6317 : : else
6318 : : {
6319 : 104 : imode = word_mode;
6320 : : /* Handle targets with different FP word orders. */
6321 : 104 : if (FLOAT_WORDS_BIG_ENDIAN)
6322 : : word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6323 : : else
6324 : 104 : word = bitpos / BITS_PER_WORD;
6325 : 104 : temp = operand_subword_force (temp, word, fmode);
6326 : 104 : bitpos = bitpos % BITS_PER_WORD;
6327 : : }
6328 : :
6329 : : /* Force the intermediate word_mode (or narrower) result into a
6330 : : register. This avoids attempting to create paradoxical SUBREGs
6331 : : of floating point modes below. */
6332 : 1123 : temp = force_reg (imode, temp);
6333 : :
6334 : : /* If the bitpos is within the "result mode" lowpart, the operation
6335 : : can be implement with a single bitwise AND. Otherwise, we need
6336 : : a right shift and an AND. */
6337 : :
6338 : 2246 : if (bitpos < GET_MODE_BITSIZE (rmode))
6339 : : {
6340 : 915 : wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6341 : :
6342 : 2745 : if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6343 : 0 : temp = gen_lowpart (rmode, temp);
6344 : 1830 : temp = expand_binop (rmode, and_optab, temp,
6345 : 1830 : immed_wide_int_const (mask, rmode),
6346 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6347 : 915 : }
6348 : : else
6349 : : {
6350 : : /* Perform a logical right shift to place the signbit in the least
6351 : : significant bit, then truncate the result to the desired mode
6352 : : and mask just this bit. */
6353 : 208 : temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6354 : 208 : temp = gen_lowpart (rmode, temp);
6355 : 208 : temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6356 : : NULL_RTX, 1, OPTAB_LIB_WIDEN);
6357 : : }
6358 : :
6359 : : return temp;
6360 : : }
6361 : :
6362 : : /* Expand fork or exec calls. TARGET is the desired target of the
6363 : : call. EXP is the call. FN is the
6364 : : identificator of the actual function. IGNORE is nonzero if the
6365 : : value is to be ignored. */
6366 : :
6367 : : static rtx
6368 : 87 : expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6369 : : {
6370 : 87 : tree id, decl;
6371 : 87 : tree call;
6372 : :
6373 : : /* If we are not profiling, just call the function. */
6374 : 87 : if (!coverage_instrumentation_p ())
6375 : : return NULL_RTX;
6376 : :
6377 : : /* Otherwise call the wrapper. This should be equivalent for the rest of
6378 : : compiler, so the code does not diverge, and the wrapper may run the
6379 : : code necessary for keeping the profiling sane. */
6380 : :
6381 : 4 : switch (DECL_FUNCTION_CODE (fn))
6382 : : {
6383 : 4 : case BUILT_IN_FORK:
6384 : 4 : id = get_identifier ("__gcov_fork");
6385 : 4 : break;
6386 : :
6387 : 0 : case BUILT_IN_EXECL:
6388 : 0 : id = get_identifier ("__gcov_execl");
6389 : 0 : break;
6390 : :
6391 : 0 : case BUILT_IN_EXECV:
6392 : 0 : id = get_identifier ("__gcov_execv");
6393 : 0 : break;
6394 : :
6395 : 0 : case BUILT_IN_EXECLP:
6396 : 0 : id = get_identifier ("__gcov_execlp");
6397 : 0 : break;
6398 : :
6399 : 0 : case BUILT_IN_EXECLE:
6400 : 0 : id = get_identifier ("__gcov_execle");
6401 : 0 : break;
6402 : :
6403 : 0 : case BUILT_IN_EXECVP:
6404 : 0 : id = get_identifier ("__gcov_execvp");
6405 : 0 : break;
6406 : :
6407 : 0 : case BUILT_IN_EXECVE:
6408 : 0 : id = get_identifier ("__gcov_execve");
6409 : 0 : break;
6410 : :
6411 : 0 : default:
6412 : 0 : gcc_unreachable ();
6413 : : }
6414 : :
6415 : 4 : decl = build_decl (DECL_SOURCE_LOCATION (fn),
6416 : 4 : FUNCTION_DECL, id, TREE_TYPE (fn));
6417 : 4 : DECL_EXTERNAL (decl) = 1;
6418 : 4 : TREE_PUBLIC (decl) = 1;
6419 : 4 : DECL_ARTIFICIAL (decl) = 1;
6420 : 4 : TREE_NOTHROW (decl) = 1;
6421 : 4 : DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6422 : 4 : DECL_VISIBILITY_SPECIFIED (decl) = 1;
6423 : 4 : call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6424 : 4 : return expand_call (call, target, ignore);
6425 : : }
6426 : :
6427 : :
6428 : :
6429 : : /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6430 : : the pointer in these functions is void*, the tree optimizers may remove
6431 : : casts. The mode computed in expand_builtin isn't reliable either, due
6432 : : to __sync_bool_compare_and_swap.
6433 : :
6434 : : FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6435 : : group of builtins. This gives us log2 of the mode size. */
6436 : :
6437 : : static inline machine_mode
6438 : 135349 : get_builtin_sync_mode (int fcode_diff)
6439 : : {
6440 : : /* The size is not negotiable, so ask not to get BLKmode in return
6441 : : if the target indicates that a smaller size would be better. */
6442 : 135349 : return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6443 : : }
6444 : :
6445 : : /* Expand the memory expression LOC and return the appropriate memory operand
6446 : : for the builtin_sync operations. */
6447 : :
6448 : : static rtx
6449 : 151988 : get_builtin_sync_mem (tree loc, machine_mode mode)
6450 : : {
6451 : 151988 : rtx addr, mem;
6452 : 151988 : int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6453 : : ? TREE_TYPE (TREE_TYPE (loc))
6454 : : : TREE_TYPE (loc));
6455 : 151988 : scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6456 : :
6457 : 151988 : addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6458 : 151988 : addr = convert_memory_address (addr_mode, addr);
6459 : :
6460 : : /* Note that we explicitly do not want any alias information for this
6461 : : memory, so that we kill all other live memories. Otherwise we don't
6462 : : satisfy the full barrier semantics of the intrinsic. */
6463 : 151988 : mem = gen_rtx_MEM (mode, addr);
6464 : :
6465 : 151988 : set_mem_addr_space (mem, addr_space);
6466 : :
6467 : 151988 : mem = validize_mem (mem);
6468 : :
6469 : : /* The alignment needs to be at least according to that of the mode. */
6470 : 151988 : set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6471 : : get_pointer_alignment (loc)));
6472 : 151988 : set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6473 : 151988 : MEM_VOLATILE_P (mem) = 1;
6474 : :
6475 : 151988 : return mem;
6476 : : }
6477 : :
6478 : : /* Make sure an argument is in the right mode.
6479 : : EXP is the tree argument.
6480 : : MODE is the mode it should be in. */
6481 : :
6482 : : static rtx
6483 : 94888 : expand_expr_force_mode (tree exp, machine_mode mode)
6484 : : {
6485 : 94888 : rtx val;
6486 : 94888 : machine_mode old_mode;
6487 : :
6488 : 94888 : if (TREE_CODE (exp) == SSA_NAME
6489 : 94888 : && TYPE_MODE (TREE_TYPE (exp)) != mode)
6490 : : {
6491 : : /* Undo argument promotion if possible, as combine might not
6492 : : be able to do it later due to MEM_VOLATILE_P uses in the
6493 : : patterns. */
6494 : 25 : gimple *g = get_gimple_for_ssa_name (exp);
6495 : 25 : if (g && gimple_assign_cast_p (g))
6496 : : {
6497 : 6 : tree rhs = gimple_assign_rhs1 (g);
6498 : 6 : tree_code code = gimple_assign_rhs_code (g);
6499 : 6 : if (CONVERT_EXPR_CODE_P (code)
6500 : 6 : && TYPE_MODE (TREE_TYPE (rhs)) == mode
6501 : 6 : && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6502 : 6 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6503 : 12 : && (TYPE_PRECISION (TREE_TYPE (exp))
6504 : 6 : > TYPE_PRECISION (TREE_TYPE (rhs))))
6505 : : exp = rhs;
6506 : : }
6507 : : }
6508 : :
6509 : 94888 : val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6510 : : /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6511 : : of CONST_INTs, where we know the old_mode only from the call argument. */
6512 : :
6513 : 94888 : old_mode = GET_MODE (val);
6514 : 94888 : if (old_mode == VOIDmode)
6515 : 45279 : old_mode = TYPE_MODE (TREE_TYPE (exp));
6516 : 94888 : val = convert_modes (mode, old_mode, val, 1);
6517 : 94888 : return val;
6518 : : }
6519 : :
6520 : :
6521 : : /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6522 : : EXP is the CALL_EXPR. CODE is the rtx code
6523 : : that corresponds to the arithmetic or logical operation from the name;
6524 : : an exception here is that NOT actually means NAND. TARGET is an optional
6525 : : place for us to store the results; AFTER is true if this is the
6526 : : fetch_and_xxx form. */
6527 : :
6528 : : static rtx
6529 : 3997 : expand_builtin_sync_operation (machine_mode mode, tree exp,
6530 : : enum rtx_code code, bool after,
6531 : : rtx target)
6532 : : {
6533 : 3997 : rtx val, mem;
6534 : 3997 : location_t loc = EXPR_LOCATION (exp);
6535 : :
6536 : 3997 : if (code == NOT && warn_sync_nand)
6537 : : {
6538 : 517 : tree fndecl = get_callee_fndecl (exp);
6539 : 517 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6540 : :
6541 : 517 : static bool warned_f_a_n, warned_n_a_f;
6542 : :
6543 : 517 : switch (fcode)
6544 : : {
6545 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6546 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6547 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6548 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6549 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6550 : 434 : if (warned_f_a_n)
6551 : : break;
6552 : :
6553 : 30 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6554 : 30 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6555 : 30 : warned_f_a_n = true;
6556 : 30 : break;
6557 : :
6558 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6559 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6560 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6561 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6562 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6563 : 83 : if (warned_n_a_f)
6564 : : break;
6565 : :
6566 : 16 : fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6567 : 16 : inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6568 : 16 : warned_n_a_f = true;
6569 : 16 : break;
6570 : :
6571 : 0 : default:
6572 : 0 : gcc_unreachable ();
6573 : : }
6574 : : }
6575 : :
6576 : : /* Expand the operands. */
6577 : 3997 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6578 : 3997 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6579 : :
6580 : 3997 : return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6581 : 3997 : after);
6582 : : }
6583 : :
6584 : : /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6585 : : intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6586 : : true if this is the boolean form. TARGET is a place for us to store the
6587 : : results; this is NOT optional if IS_BOOL is true. */
6588 : :
6589 : : static rtx
6590 : 456 : expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6591 : : bool is_bool, rtx target)
6592 : : {
6593 : 456 : rtx old_val, new_val, mem;
6594 : 456 : rtx *pbool, *poval;
6595 : :
6596 : : /* Expand the operands. */
6597 : 456 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6598 : 456 : old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6599 : 456 : new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6600 : :
6601 : 456 : pbool = poval = NULL;
6602 : 456 : if (target != const0_rtx)
6603 : : {
6604 : 430 : if (is_bool)
6605 : : pbool = ⌖
6606 : : else
6607 : 230 : poval = ⌖
6608 : : }
6609 : 456 : if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6610 : : false, MEMMODEL_SYNC_SEQ_CST,
6611 : : MEMMODEL_SYNC_SEQ_CST))
6612 : : return NULL_RTX;
6613 : :
6614 : 454 : return target;
6615 : : }
6616 : :
6617 : : /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6618 : : general form is actually an atomic exchange, and some targets only
6619 : : support a reduced form with the second argument being a constant 1.
6620 : : EXP is the CALL_EXPR; TARGET is an optional place for us to store
6621 : : the results. */
6622 : :
6623 : : static rtx
6624 : 326 : expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6625 : : rtx target)
6626 : : {
6627 : 326 : rtx val, mem;
6628 : :
6629 : : /* Expand the operands. */
6630 : 326 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6631 : 326 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6632 : :
6633 : 326 : return expand_sync_lock_test_and_set (target, mem, val);
6634 : : }
6635 : :
6636 : : /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6637 : :
6638 : : static rtx
6639 : 158 : expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6640 : : {
6641 : 158 : rtx mem;
6642 : :
6643 : : /* Expand the operands. */
6644 : 158 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6645 : :
6646 : 158 : return expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6647 : : }
6648 : :
6649 : : /* Given an integer representing an ``enum memmodel'', verify its
6650 : : correctness and return the memory model enum. */
6651 : :
6652 : : static enum memmodel
6653 : 170442 : get_memmodel (tree exp)
6654 : : {
6655 : : /* If the parameter is not a constant, it's a run time value so we'll just
6656 : : convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6657 : 170442 : if (TREE_CODE (exp) != INTEGER_CST)
6658 : : return MEMMODEL_SEQ_CST;
6659 : :
6660 : 169599 : rtx op = expand_normal (exp);
6661 : :
6662 : 169599 : unsigned HOST_WIDE_INT val = INTVAL (op);
6663 : 169599 : if (targetm.memmodel_check)
6664 : 169599 : val = targetm.memmodel_check (val);
6665 : 0 : else if (val & ~MEMMODEL_MASK)
6666 : : return MEMMODEL_SEQ_CST;
6667 : :
6668 : : /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6669 : 169599 : if (memmodel_base (val) >= MEMMODEL_LAST)
6670 : : return MEMMODEL_SEQ_CST;
6671 : :
6672 : : /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6673 : : be conservative and promote consume to acquire. */
6674 : 169598 : if (val == MEMMODEL_CONSUME)
6675 : 750 : val = MEMMODEL_ACQUIRE;
6676 : :
6677 : 169598 : return (enum memmodel) val;
6678 : : }
6679 : :
6680 : : /* Expand the __atomic_exchange intrinsic:
6681 : : TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6682 : : EXP is the CALL_EXPR.
6683 : : TARGET is an optional place for us to store the results. */
6684 : :
6685 : : static rtx
6686 : 2811 : expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6687 : : {
6688 : 2811 : rtx val, mem;
6689 : 2811 : enum memmodel model;
6690 : :
6691 : 2811 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6692 : :
6693 : 2811 : if (!flag_inline_atomics)
6694 : : return NULL_RTX;
6695 : :
6696 : : /* Expand the operands. */
6697 : 2760 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6698 : 2760 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6699 : :
6700 : 2760 : return expand_atomic_exchange (target, mem, val, model);
6701 : : }
6702 : :
6703 : : /* Expand the __atomic_compare_exchange intrinsic:
6704 : : bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6705 : : TYPE desired, BOOL weak,
6706 : : enum memmodel success,
6707 : : enum memmodel failure)
6708 : : EXP is the CALL_EXPR.
6709 : : TARGET is an optional place for us to store the results. */
6710 : :
6711 : : static rtx
6712 : 9204 : expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6713 : : rtx target)
6714 : : {
6715 : 9204 : rtx expect, desired, mem, oldval;
6716 : 9204 : rtx_code_label *label;
6717 : 9204 : tree weak;
6718 : 9204 : bool is_weak;
6719 : :
6720 : 9204 : memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6721 : 9204 : memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6722 : :
6723 : 9204 : if (failure > success)
6724 : 22 : success = MEMMODEL_SEQ_CST;
6725 : :
6726 : 9204 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6727 : : {
6728 : : failure = MEMMODEL_SEQ_CST;
6729 : : success = MEMMODEL_SEQ_CST;
6730 : : }
6731 : :
6732 : :
6733 : 9204 : if (!flag_inline_atomics)
6734 : : return NULL_RTX;
6735 : :
6736 : : /* Expand the operands. */
6737 : 9153 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6738 : :
6739 : 9153 : expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6740 : 9153 : expect = convert_memory_address (Pmode, expect);
6741 : 9153 : expect = gen_rtx_MEM (mode, expect);
6742 : 9153 : desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6743 : :
6744 : 9153 : weak = CALL_EXPR_ARG (exp, 3);
6745 : 9153 : is_weak = false;
6746 : 9153 : if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6747 : 9153 : is_weak = true;
6748 : :
6749 : 9153 : if (target == const0_rtx)
6750 : 282 : target = NULL;
6751 : :
6752 : : /* Lest the rtl backend create a race condition with an imporoper store
6753 : : to memory, always create a new pseudo for OLDVAL. */
6754 : 9153 : oldval = NULL;
6755 : :
6756 : 9153 : if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6757 : : is_weak, success, failure))
6758 : : return NULL_RTX;
6759 : :
6760 : : /* Conditionally store back to EXPECT, lest we create a race condition
6761 : : with an improper store to memory. */
6762 : : /* ??? With a rearrangement of atomics at the gimple level, we can handle
6763 : : the normal case where EXPECT is totally private, i.e. a register. At
6764 : : which point the store can be unconditional. */
6765 : 7410 : label = gen_label_rtx ();
6766 : 7410 : emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6767 : 7410 : GET_MODE (target), 1, label);
6768 : 7410 : emit_move_insn (expect, oldval);
6769 : 7410 : emit_label (label);
6770 : :
6771 : 7410 : return target;
6772 : : }
6773 : :
6774 : : /* Helper function for expand_ifn_atomic_compare_exchange - expand
6775 : : internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6776 : : call. The weak parameter must be dropped to match the expected parameter
6777 : : list and the expected argument changed from value to pointer to memory
6778 : : slot. */
6779 : :
6780 : : static void
6781 : 0 : expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6782 : : {
6783 : 0 : unsigned int z;
6784 : 0 : vec<tree, va_gc> *vec;
6785 : :
6786 : 0 : vec_alloc (vec, 5);
6787 : 0 : vec->quick_push (gimple_call_arg (call, 0));
6788 : 0 : tree expected = gimple_call_arg (call, 1);
6789 : 0 : rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6790 : 0 : TREE_TYPE (expected));
6791 : 0 : rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6792 : 0 : if (expd != x)
6793 : 0 : emit_move_insn (x, expd);
6794 : 0 : tree v = make_tree (TREE_TYPE (expected), x);
6795 : 0 : vec->quick_push (build1 (ADDR_EXPR,
6796 : 0 : build_pointer_type (TREE_TYPE (expected)), v));
6797 : 0 : vec->quick_push (gimple_call_arg (call, 2));
6798 : : /* Skip the boolean weak parameter. */
6799 : 0 : for (z = 4; z < 6; z++)
6800 : 0 : vec->quick_push (gimple_call_arg (call, z));
6801 : : /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6802 : 0 : unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6803 : 0 : gcc_assert (bytes_log2 < 5);
6804 : 0 : built_in_function fncode
6805 : : = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6806 : : + bytes_log2);
6807 : 0 : tree fndecl = builtin_decl_explicit (fncode);
6808 : 0 : tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6809 : : fndecl);
6810 : 0 : tree exp = build_call_vec (boolean_type_node, fn, vec);
6811 : 0 : tree lhs = gimple_call_lhs (call);
6812 : 0 : rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6813 : 0 : if (lhs)
6814 : : {
6815 : 0 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6816 : 0 : if (GET_MODE (boolret) != mode)
6817 : 0 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6818 : 0 : x = force_reg (mode, x);
6819 : 0 : write_complex_part (target, boolret, true, true);
6820 : 0 : write_complex_part (target, x, false, false);
6821 : : }
6822 : 0 : }
6823 : :
6824 : : /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6825 : :
6826 : : void
6827 : 13798 : expand_ifn_atomic_compare_exchange (gcall *call)
6828 : : {
6829 : 13798 : int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6830 : 13798 : gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6831 : 13798 : machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6832 : :
6833 : 13798 : memmodel success = get_memmodel (gimple_call_arg (call, 4));
6834 : 13798 : memmodel failure = get_memmodel (gimple_call_arg (call, 5));
6835 : :
6836 : 13798 : if (failure > success)
6837 : 0 : success = MEMMODEL_SEQ_CST;
6838 : :
6839 : 13798 : if (is_mm_release (failure) || is_mm_acq_rel (failure))
6840 : : {
6841 : : failure = MEMMODEL_SEQ_CST;
6842 : : success = MEMMODEL_SEQ_CST;
6843 : : }
6844 : :
6845 : 13798 : if (!flag_inline_atomics)
6846 : : {
6847 : 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6848 : 0 : return;
6849 : : }
6850 : :
6851 : : /* Expand the operands. */
6852 : 13798 : rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6853 : :
6854 : 13798 : rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6855 : 13798 : rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6856 : :
6857 : 13798 : bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6858 : :
6859 : 13798 : rtx boolret = NULL;
6860 : 13798 : rtx oldval = NULL;
6861 : :
6862 : 13798 : if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6863 : : is_weak, success, failure))
6864 : : {
6865 : 0 : expand_ifn_atomic_compare_exchange_into_call (call, mode);
6866 : 0 : return;
6867 : : }
6868 : :
6869 : 13798 : tree lhs = gimple_call_lhs (call);
6870 : 13798 : if (lhs)
6871 : : {
6872 : 13598 : rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6873 : 13598 : if (GET_MODE (boolret) != mode)
6874 : 12108 : boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6875 : 13598 : write_complex_part (target, boolret, true, true);
6876 : 13598 : write_complex_part (target, oldval, false, false);
6877 : : }
6878 : : }
6879 : :
6880 : : /* Expand the __atomic_load intrinsic:
6881 : : TYPE __atomic_load (TYPE *object, enum memmodel)
6882 : : EXP is the CALL_EXPR.
6883 : : TARGET is an optional place for us to store the results. */
6884 : :
6885 : : static rtx
6886 : 70922 : expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6887 : : {
6888 : 70922 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6889 : 70922 : if (is_mm_release (model) || is_mm_acq_rel (model))
6890 : : model = MEMMODEL_SEQ_CST;
6891 : :
6892 : 70922 : if (!flag_inline_atomics)
6893 : : return NULL_RTX;
6894 : :
6895 : : /* Expand the operand. */
6896 : 70881 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6897 : :
6898 : 70881 : return expand_atomic_load (target, mem, model);
6899 : : }
6900 : :
6901 : :
6902 : : /* Expand the __atomic_store intrinsic:
6903 : : void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6904 : : EXP is the CALL_EXPR.
6905 : : TARGET is an optional place for us to store the results. */
6906 : :
6907 : : static rtx
6908 : 17481 : expand_builtin_atomic_store (machine_mode mode, tree exp)
6909 : : {
6910 : 17481 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6911 : 17481 : if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6912 : 1585 : || is_mm_release (model)))
6913 : : model = MEMMODEL_SEQ_CST;
6914 : :
6915 : 17481 : if (!flag_inline_atomics)
6916 : : return NULL_RTX;
6917 : :
6918 : : /* Expand the operands. */
6919 : 17450 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6920 : 17450 : rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6921 : :
6922 : 17450 : return expand_atomic_store (mem, val, model, false);
6923 : : }
6924 : :
6925 : : /* Expand the __atomic_fetch_XXX intrinsic:
6926 : : TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6927 : : EXP is the CALL_EXPR.
6928 : : TARGET is an optional place for us to store the results.
6929 : : CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6930 : : FETCH_AFTER is true if returning the result of the operation.
6931 : : FETCH_AFTER is false if returning the value before the operation.
6932 : : IGNORE is true if the result is not used.
6933 : : EXT_CALL is the correct builtin for an external call if this cannot be
6934 : : resolved to an instruction sequence. */
6935 : :
6936 : : static rtx
6937 : 29964 : expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6938 : : enum rtx_code code, bool fetch_after,
6939 : : bool ignore, enum built_in_function ext_call)
6940 : : {
6941 : 29964 : rtx val, mem, ret;
6942 : 29964 : enum memmodel model;
6943 : 29964 : tree fndecl;
6944 : 29964 : tree addr;
6945 : :
6946 : 29964 : model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6947 : :
6948 : : /* Expand the operands. */
6949 : 29964 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6950 : 29964 : val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6951 : :
6952 : : /* Only try generating instructions if inlining is turned on. */
6953 : 29964 : if (flag_inline_atomics)
6954 : : {
6955 : 29421 : ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6956 : 29421 : if (ret)
6957 : : return ret;
6958 : : }
6959 : :
6960 : : /* Return if a different routine isn't needed for the library call. */
6961 : 1142 : if (ext_call == BUILT_IN_NONE)
6962 : : return NULL_RTX;
6963 : :
6964 : : /* Change the call to the specified function. */
6965 : 279 : fndecl = get_callee_fndecl (exp);
6966 : 279 : addr = CALL_EXPR_FN (exp);
6967 : 279 : STRIP_NOPS (addr);
6968 : :
6969 : 279 : gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6970 : 279 : TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6971 : :
6972 : : /* If we will emit code after the call, the call cannot be a tail call.
6973 : : If it is emitted as a tail call, a barrier is emitted after it, and
6974 : : then all trailing code is removed. */
6975 : 279 : if (!ignore)
6976 : 182 : CALL_EXPR_TAILCALL (exp) = 0;
6977 : :
6978 : : /* Expand the call here so we can emit trailing code. */
6979 : 279 : ret = expand_call (exp, target, ignore);
6980 : :
6981 : : /* Replace the original function just in case it matters. */
6982 : 279 : TREE_OPERAND (addr, 0) = fndecl;
6983 : :
6984 : : /* Then issue the arithmetic correction to return the right result. */
6985 : 279 : if (!ignore)
6986 : : {
6987 : 182 : if (code == NOT)
6988 : : {
6989 : 31 : ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6990 : : OPTAB_LIB_WIDEN);
6991 : 31 : ret = expand_simple_unop (mode, NOT, ret, target, true);
6992 : : }
6993 : : else
6994 : 151 : ret = expand_simple_binop (mode, code, ret, val, target, true,
6995 : : OPTAB_LIB_WIDEN);
6996 : : }
6997 : : return ret;
6998 : : }
6999 : :
7000 : : /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7001 : :
7002 : : void
7003 : 427 : expand_ifn_atomic_bit_test_and (gcall *call)
7004 : : {
7005 : 427 : tree ptr = gimple_call_arg (call, 0);
7006 : 427 : tree bit = gimple_call_arg (call, 1);
7007 : 427 : tree flag = gimple_call_arg (call, 2);
7008 : 427 : tree lhs = gimple_call_lhs (call);
7009 : 427 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7010 : 427 : machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7011 : 427 : enum rtx_code code;
7012 : 427 : optab optab;
7013 : 427 : class expand_operand ops[5];
7014 : :
7015 : 427 : gcc_assert (flag_inline_atomics);
7016 : :
7017 : 427 : if (gimple_call_num_args (call) == 5)
7018 : 296 : model = get_memmodel (gimple_call_arg (call, 3));
7019 : :
7020 : 427 : rtx mem = get_builtin_sync_mem (ptr, mode);
7021 : 427 : rtx val = expand_expr_force_mode (bit, mode);
7022 : :
7023 : 427 : switch (gimple_call_internal_fn (call))
7024 : : {
7025 : : case IFN_ATOMIC_BIT_TEST_AND_SET:
7026 : : code = IOR;
7027 : : optab = atomic_bit_test_and_set_optab;
7028 : : break;
7029 : : case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7030 : : code = XOR;
7031 : : optab = atomic_bit_test_and_complement_optab;
7032 : : break;
7033 : : case IFN_ATOMIC_BIT_TEST_AND_RESET:
7034 : : code = AND;
7035 : : optab = atomic_bit_test_and_reset_optab;
7036 : : break;
7037 : 0 : default:
7038 : 0 : gcc_unreachable ();
7039 : : }
7040 : :
7041 : 427 : if (lhs == NULL_TREE)
7042 : : {
7043 : 0 : rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
7044 : : val, NULL_RTX, true, OPTAB_DIRECT);
7045 : 0 : if (code == AND)
7046 : 0 : val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
7047 : 0 : if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
7048 : 427 : return;
7049 : : }
7050 : :
7051 : 427 : rtx target;
7052 : 427 : if (lhs)
7053 : 427 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7054 : : else
7055 : 0 : target = gen_reg_rtx (mode);
7056 : 427 : enum insn_code icode = direct_optab_handler (optab, mode);
7057 : 427 : gcc_assert (icode != CODE_FOR_nothing);
7058 : 427 : create_output_operand (&ops[0], target, mode);
7059 : 427 : create_fixed_operand (&ops[1], mem);
7060 : 427 : create_convert_operand_to (&ops[2], val, mode, true);
7061 : 427 : create_integer_operand (&ops[3], model);
7062 : 427 : create_integer_operand (&ops[4], integer_onep (flag));
7063 : 427 : if (maybe_expand_insn (icode, 5, ops))
7064 : : return;
7065 : :
7066 : 0 : rtx bitval = val;
7067 : 0 : val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7068 : : val, NULL_RTX, true, OPTAB_DIRECT);
7069 : 0 : rtx maskval = val;
7070 : 0 : if (code == AND)
7071 : 0 : val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7072 : 0 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7073 : : code, model, false);
7074 : 0 : if (!result)
7075 : : {
7076 : 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7077 : 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7078 : 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7079 : 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7080 : 0 : tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
7081 : : make_tree (type, val),
7082 : : is_atomic
7083 : 0 : ? gimple_call_arg (call, 3)
7084 : : : integer_zero_node);
7085 : 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7086 : : mode, !lhs);
7087 : : }
7088 : 0 : if (!lhs)
7089 : : return;
7090 : 0 : if (integer_onep (flag))
7091 : : {
7092 : 0 : result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7093 : : NULL_RTX, true, OPTAB_DIRECT);
7094 : 0 : result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7095 : : true, OPTAB_DIRECT);
7096 : : }
7097 : : else
7098 : 0 : result = expand_simple_binop (mode, AND, result, maskval, target, true,
7099 : : OPTAB_DIRECT);
7100 : 0 : if (result != target)
7101 : 0 : emit_move_insn (target, result);
7102 : : }
7103 : :
7104 : : /* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
7105 : :
7106 : : void
7107 : 2303 : expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
7108 : : {
7109 : 2303 : tree cmp = gimple_call_arg (call, 0);
7110 : 2303 : tree ptr = gimple_call_arg (call, 1);
7111 : 2303 : tree arg = gimple_call_arg (call, 2);
7112 : 2303 : tree lhs = gimple_call_lhs (call);
7113 : 2303 : enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7114 : 2303 : machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
7115 : 2303 : optab optab;
7116 : 2303 : rtx_code code;
7117 : 2303 : class expand_operand ops[5];
7118 : :
7119 : 2303 : gcc_assert (flag_inline_atomics);
7120 : :
7121 : 2303 : if (gimple_call_num_args (call) == 5)
7122 : 1895 : model = get_memmodel (gimple_call_arg (call, 3));
7123 : :
7124 : 2303 : rtx mem = get_builtin_sync_mem (ptr, mode);
7125 : 2303 : rtx op = expand_expr_force_mode (arg, mode);
7126 : :
7127 : 2303 : switch (gimple_call_internal_fn (call))
7128 : : {
7129 : : case IFN_ATOMIC_ADD_FETCH_CMP_0:
7130 : : code = PLUS;
7131 : : optab = atomic_add_fetch_cmp_0_optab;
7132 : : break;
7133 : : case IFN_ATOMIC_SUB_FETCH_CMP_0:
7134 : : code = MINUS;
7135 : : optab = atomic_sub_fetch_cmp_0_optab;
7136 : : break;
7137 : : case IFN_ATOMIC_AND_FETCH_CMP_0:
7138 : : code = AND;
7139 : : optab = atomic_and_fetch_cmp_0_optab;
7140 : : break;
7141 : : case IFN_ATOMIC_OR_FETCH_CMP_0:
7142 : : code = IOR;
7143 : : optab = atomic_or_fetch_cmp_0_optab;
7144 : : break;
7145 : : case IFN_ATOMIC_XOR_FETCH_CMP_0:
7146 : : code = XOR;
7147 : : optab = atomic_xor_fetch_cmp_0_optab;
7148 : : break;
7149 : 0 : default:
7150 : 0 : gcc_unreachable ();
7151 : : }
7152 : :
7153 : 2303 : enum rtx_code comp = UNKNOWN;
7154 : 2303 : switch (tree_to_uhwi (cmp))
7155 : : {
7156 : : case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
7157 : : case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
7158 : : case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
7159 : : case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
7160 : : case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
7161 : : case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
7162 : 0 : default: gcc_unreachable ();
7163 : : }
7164 : :
7165 : 2303 : rtx target;
7166 : 2303 : if (lhs == NULL_TREE)
7167 : 0 : target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
7168 : : else
7169 : 2303 : target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7170 : 2303 : enum insn_code icode = direct_optab_handler (optab, mode);
7171 : 2303 : gcc_assert (icode != CODE_FOR_nothing);
7172 : 2303 : create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
7173 : 2303 : create_fixed_operand (&ops[1], mem);
7174 : 2303 : create_convert_operand_to (&ops[2], op, mode, true);
7175 : 2303 : create_integer_operand (&ops[3], model);
7176 : 2303 : create_integer_operand (&ops[4], comp);
7177 : 2303 : if (maybe_expand_insn (icode, 5, ops))
7178 : 2271 : return;
7179 : :
7180 : 32 : rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
7181 : : code, model, true);
7182 : 32 : if (!result)
7183 : : {
7184 : 0 : bool is_atomic = gimple_call_num_args (call) == 5;
7185 : 0 : tree tcall = gimple_call_arg (call, 3 + is_atomic);
7186 : 0 : tree fndecl = gimple_call_addr_fndecl (tcall);
7187 : 0 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
7188 : 0 : tree exp = build_call_nary (type, tcall,
7189 : : 2 + is_atomic, ptr, arg,
7190 : : is_atomic
7191 : 0 : ? gimple_call_arg (call, 3)
7192 : : : integer_zero_node);
7193 : 0 : result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
7194 : : mode, !lhs);
7195 : : }
7196 : :
7197 : 32 : if (lhs)
7198 : : {
7199 : 32 : result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
7200 : : 0, 1);
7201 : 32 : if (result != target)
7202 : 32 : emit_move_insn (target, result);
7203 : : }
7204 : : }
7205 : :
7206 : : /* Expand an atomic clear operation.
7207 : : void _atomic_clear (BOOL *obj, enum memmodel)
7208 : : EXP is the call expression. */
7209 : :
7210 : : static rtx
7211 : 57 : expand_builtin_atomic_clear (tree exp)
7212 : : {
7213 : 57 : machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7214 : 57 : rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7215 : 57 : memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7216 : :
7217 : 57 : if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7218 : : model = MEMMODEL_SEQ_CST;
7219 : :
7220 : : /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7221 : : Failing that, a store is issued by __atomic_store. The only way this can
7222 : : fail is if the bool type is larger than a word size. Unlikely, but
7223 : : handle it anyway for completeness. Assume a single threaded model since
7224 : : there is no atomic support in this case, and no barriers are required. */
7225 : 57 : rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
7226 : 57 : if (!ret)
7227 : 0 : emit_move_insn (mem, const0_rtx);
7228 : 57 : return const0_rtx;
7229 : : }
7230 : :
7231 : : /* Expand an atomic test_and_set operation.
7232 : : bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7233 : : EXP is the call expression. */
7234 : :
7235 : : static rtx
7236 : 258 : expand_builtin_atomic_test_and_set (tree exp, rtx target)
7237 : : {
7238 : 258 : rtx mem;
7239 : 258 : enum memmodel model;
7240 : 258 : machine_mode mode;
7241 : :
7242 : 258 : mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7243 : 258 : mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7244 : 258 : model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7245 : :
7246 : 258 : return expand_atomic_test_and_set (target, mem, model);
7247 : : }
7248 : :
7249 : :
7250 : : /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7251 : : this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7252 : :
7253 : : static tree
7254 : 90595 : fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7255 : : {
7256 : 90595 : int size;
7257 : 90595 : machine_mode mode;
7258 : 90595 : unsigned int mode_align, type_align;
7259 : :
7260 : 90595 : if (TREE_CODE (arg0) != INTEGER_CST)
7261 : : return NULL_TREE;
7262 : :
7263 : : /* We need a corresponding integer mode for the access to be lock-free. */
7264 : 90501 : size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7265 : 90501 : if (!int_mode_for_size (size, 0).exists (&mode))
7266 : 1 : return boolean_false_node;
7267 : :
7268 : 90500 : mode_align = GET_MODE_ALIGNMENT (mode);
7269 : :
7270 : 90500 : if (TREE_CODE (arg1) == INTEGER_CST)
7271 : : {
7272 : 70156 : unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7273 : :
7274 : : /* Either this argument is null, or it's a fake pointer encoding
7275 : : the alignment of the object. */
7276 : 70156 : val = least_bit_hwi (val);
7277 : 70156 : val *= BITS_PER_UNIT;
7278 : :
7279 : 70156 : if (val == 0 || mode_align < val)
7280 : : type_align = mode_align;
7281 : : else
7282 : 19750 : type_align = val;
7283 : : }
7284 : : else
7285 : : {
7286 : 20344 : tree ttype = TREE_TYPE (arg1);
7287 : :
7288 : : /* This function is usually invoked and folded immediately by the front
7289 : : end before anything else has a chance to look at it. The pointer
7290 : : parameter at this point is usually cast to a void *, so check for that
7291 : : and look past the cast. */
7292 : 63 : if (CONVERT_EXPR_P (arg1)
7293 : 20312 : && POINTER_TYPE_P (ttype)
7294 : 20312 : && VOID_TYPE_P (TREE_TYPE (ttype))
7295 : 40656 : && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7296 : 20281 : arg1 = TREE_OPERAND (arg1, 0);
7297 : :
7298 : 20344 : ttype = TREE_TYPE (arg1);
7299 : 20344 : gcc_assert (POINTER_TYPE_P (ttype));
7300 : :
7301 : : /* Get the underlying type of the object. */
7302 : 20344 : ttype = TREE_TYPE (ttype);
7303 : 20344 : type_align = TYPE_ALIGN (ttype);
7304 : : }
7305 : :
7306 : : /* If the object has smaller alignment, the lock free routines cannot
7307 : : be used. */
7308 : 90500 : if (type_align < mode_align)
7309 : 91 : return boolean_false_node;
7310 : :
7311 : : /* Check if a compare_and_swap pattern exists for the mode which represents
7312 : : the required size. The pattern is not allowed to fail, so the existence
7313 : : of the pattern indicates support is present. Also require that an
7314 : : atomic load exists for the required size. */
7315 : 90409 : if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7316 : 90367 : return boolean_true_node;
7317 : : else
7318 : 42 : return boolean_false_node;
7319 : : }
7320 : :
7321 : : /* Return true if the parameters to call EXP represent an object which will
7322 : : always generate lock free instructions. The first argument represents the
7323 : : size of the object, and the second parameter is a pointer to the object
7324 : : itself. If NULL is passed for the object, then the result is based on
7325 : : typical alignment for an object of the specified size. Otherwise return
7326 : : false. */
7327 : :
7328 : : static rtx
7329 : 1 : expand_builtin_atomic_always_lock_free (tree exp)
7330 : : {
7331 : 1 : tree size;
7332 : 1 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7333 : 1 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7334 : :
7335 : 1 : if (TREE_CODE (arg0) != INTEGER_CST)
7336 : : {
7337 : 1 : error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7338 : 1 : return const0_rtx;
7339 : : }
7340 : :
7341 : 0 : size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7342 : 0 : if (size == boolean_true_node)
7343 : 0 : return const1_rtx;
7344 : 0 : return const0_rtx;
7345 : : }
7346 : :
7347 : : /* Return a one or zero if it can be determined that object ARG1 of size ARG
7348 : : is lock free on this architecture. */
7349 : :
7350 : : static tree
7351 : 40137 : fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7352 : : {
7353 : 40137 : if (!flag_inline_atomics)
7354 : : return NULL_TREE;
7355 : :
7356 : : /* If it isn't always lock free, don't generate a result. */
7357 : 40128 : if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7358 : : return boolean_true_node;
7359 : :
7360 : : return NULL_TREE;
7361 : : }
7362 : :
7363 : : /* Return true if the parameters to call EXP represent an object which will
7364 : : always generate lock free instructions. The first argument represents the
7365 : : size of the object, and the second parameter is a pointer to the object
7366 : : itself. If NULL is passed for the object, then the result is based on
7367 : : typical alignment for an object of the specified size. Otherwise return
7368 : : NULL*/
7369 : :
7370 : : static rtx
7371 : 3 : expand_builtin_atomic_is_lock_free (tree exp)
7372 : : {
7373 : 3 : tree size;
7374 : 3 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7375 : 3 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7376 : :
7377 : 3 : if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7378 : : {
7379 : 0 : error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7380 : 0 : return NULL_RTX;
7381 : : }
7382 : :
7383 : 3 : if (!flag_inline_atomics)
7384 : : return NULL_RTX;
7385 : :
7386 : : /* If the value is known at compile time, return the RTX for it. */
7387 : 2 : size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7388 : 2 : if (size == boolean_true_node)
7389 : 0 : return const1_rtx;
7390 : :
7391 : : return NULL_RTX;
7392 : : }
7393 : :
7394 : : /* Expand the __atomic_thread_fence intrinsic:
7395 : : void __atomic_thread_fence (enum memmodel)
7396 : : EXP is the CALL_EXPR. */
7397 : :
7398 : : static void
7399 : 694 : expand_builtin_atomic_thread_fence (tree exp)
7400 : : {
7401 : 694 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7402 : 694 : expand_mem_thread_fence (model);
7403 : 694 : }
7404 : :
7405 : : /* Expand the __atomic_signal_fence intrinsic:
7406 : : void __atomic_signal_fence (enum memmodel)
7407 : : EXP is the CALL_EXPR. */
7408 : :
7409 : : static void
7410 : 60 : expand_builtin_atomic_signal_fence (tree exp)
7411 : : {
7412 : 60 : enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7413 : 60 : expand_mem_signal_fence (model);
7414 : 60 : }
7415 : :
7416 : : /* Expand the __sync_synchronize intrinsic. */
7417 : :
7418 : : static void
7419 : 281 : expand_builtin_sync_synchronize (void)
7420 : : {
7421 : 0 : expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7422 : 0 : }
7423 : :
7424 : : static rtx
7425 : 3 : expand_builtin_thread_pointer (tree exp, rtx target)
7426 : : {
7427 : 3 : enum insn_code icode;
7428 : 3 : if (!validate_arglist (exp, VOID_TYPE))
7429 : 0 : return const0_rtx;
7430 : 3 : icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7431 : 3 : if (icode != CODE_FOR_nothing)
7432 : : {
7433 : 3 : class expand_operand op;
7434 : : /* If the target is not sutitable then create a new target. */
7435 : 3 : if (target == NULL_RTX
7436 : 3 : || !REG_P (target)
7437 : 6 : || GET_MODE (target) != Pmode)
7438 : 0 : target = gen_reg_rtx (Pmode);
7439 : 3 : create_output_operand (&op, target, Pmode);
7440 : 3 : expand_insn (icode, 1, &op);
7441 : 3 : return target;
7442 : : }
7443 : 0 : error ("%<__builtin_thread_pointer%> is not supported on this target");
7444 : 0 : return const0_rtx;
7445 : : }
7446 : :
7447 : : static void
7448 : 0 : expand_builtin_set_thread_pointer (tree exp)
7449 : : {
7450 : 0 : enum insn_code icode;
7451 : 0 : if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7452 : : return;
7453 : 0 : icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7454 : 0 : if (icode != CODE_FOR_nothing)
7455 : : {
7456 : 0 : class expand_operand op;
7457 : 0 : rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7458 : 0 : Pmode, EXPAND_NORMAL);
7459 : 0 : create_input_operand (&op, val, Pmode);
7460 : 0 : expand_insn (icode, 1, &op);
7461 : 0 : return;
7462 : : }
7463 : 0 : error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7464 : : }
7465 : :
7466 : :
7467 : : /* Emit code to restore the current value of stack. */
7468 : :
7469 : : static void
7470 : 1673 : expand_stack_restore (tree var)
7471 : : {
7472 : 1673 : rtx_insn *prev;
7473 : 1673 : rtx sa = expand_normal (var);
7474 : :
7475 : 1673 : sa = convert_memory_address (Pmode, sa);
7476 : :
7477 : 1673 : prev = get_last_insn ();
7478 : 1673 : emit_stack_restore (SAVE_BLOCK, sa);
7479 : :
7480 : 1673 : record_new_stack_level ();
7481 : :
7482 : 1673 : fixup_args_size_notes (prev, get_last_insn (), 0);
7483 : 1673 : }
7484 : :
7485 : : /* Emit code to save the current value of stack. */
7486 : :
7487 : : static rtx
7488 : 1819 : expand_stack_save (void)
7489 : : {
7490 : 1819 : rtx ret = NULL_RTX;
7491 : :
7492 : 0 : emit_stack_save (SAVE_BLOCK, &ret);
7493 : 1819 : return ret;
7494 : : }
7495 : :
7496 : : /* Emit code to get the openacc gang, worker or vector id or size. */
7497 : :
7498 : : static rtx
7499 : 356 : expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7500 : : {
7501 : 356 : const char *name;
7502 : 356 : rtx fallback_retval;
7503 : 356 : rtx_insn *(*gen_fn) (rtx, rtx);
7504 : 356 : switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7505 : : {
7506 : 244 : case BUILT_IN_GOACC_PARLEVEL_ID:
7507 : 244 : name = "__builtin_goacc_parlevel_id";
7508 : 244 : fallback_retval = const0_rtx;
7509 : 244 : gen_fn = targetm.gen_oacc_dim_pos;
7510 : 244 : break;
7511 : 112 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
7512 : 112 : name = "__builtin_goacc_parlevel_size";
7513 : 112 : fallback_retval = const1_rtx;
7514 : 112 : gen_fn = targetm.gen_oacc_dim_size;
7515 : 112 : break;
7516 : 0 : default:
7517 : 0 : gcc_unreachable ();
7518 : : }
7519 : :
7520 : 356 : if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7521 : : {
7522 : 8 : error ("%qs only supported in OpenACC code", name);
7523 : 8 : return const0_rtx;
7524 : : }
7525 : :
7526 : 348 : tree arg = CALL_EXPR_ARG (exp, 0);
7527 : 348 : if (TREE_CODE (arg) != INTEGER_CST)
7528 : : {
7529 : 8 : error ("non-constant argument 0 to %qs", name);
7530 : 8 : return const0_rtx;
7531 : : }
7532 : :
7533 : 340 : int dim = TREE_INT_CST_LOW (arg);
7534 : 340 : switch (dim)
7535 : : {
7536 : 324 : case GOMP_DIM_GANG:
7537 : 324 : case GOMP_DIM_WORKER:
7538 : 324 : case GOMP_DIM_VECTOR:
7539 : 324 : break;
7540 : 16 : default:
7541 : 16 : error ("illegal argument 0 to %qs", name);
7542 : 16 : return const0_rtx;
7543 : : }
7544 : :
7545 : 324 : if (ignore)
7546 : : return target;
7547 : :
7548 : 180 : if (target == NULL_RTX)
7549 : 0 : target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7550 : :
7551 : 180 : if (!targetm.have_oacc_dim_size ())
7552 : : {
7553 : 180 : emit_move_insn (target, fallback_retval);
7554 : 180 : return target;
7555 : : }
7556 : :
7557 : 0 : rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7558 : 0 : emit_insn (gen_fn (reg, GEN_INT (dim)));
7559 : 0 : if (reg != target)
7560 : 0 : emit_move_insn (target, reg);
7561 : :
7562 : : return target;
7563 : : }
7564 : :
7565 : : /* Expand a string compare operation using a sequence of char comparison
7566 : : to get rid of the calling overhead, with result going to TARGET if
7567 : : that's convenient.
7568 : :
7569 : : VAR_STR is the variable string source;
7570 : : CONST_STR is the constant string source;
7571 : : LENGTH is the number of chars to compare;
7572 : : CONST_STR_N indicates which source string is the constant string;
7573 : : IS_MEMCMP indicates whether it's a memcmp or strcmp.
7574 : :
7575 : : to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7576 : :
7577 : : target = (int) (unsigned char) var_str[0]
7578 : : - (int) (unsigned char) const_str[0];
7579 : : if (target != 0)
7580 : : goto ne_label;
7581 : : ...
7582 : : target = (int) (unsigned char) var_str[length - 2]
7583 : : - (int) (unsigned char) const_str[length - 2];
7584 : : if (target != 0)
7585 : : goto ne_label;
7586 : : target = (int) (unsigned char) var_str[length - 1]
7587 : : - (int) (unsigned char) const_str[length - 1];
7588 : : ne_label:
7589 : : */
7590 : :
7591 : : static rtx
7592 : 622 : inline_string_cmp (rtx target, tree var_str, const char *const_str,
7593 : : unsigned HOST_WIDE_INT length,
7594 : : int const_str_n, machine_mode mode)
7595 : : {
7596 : 622 : HOST_WIDE_INT offset = 0;
7597 : 622 : rtx var_rtx_array
7598 : 622 : = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7599 : 622 : rtx var_rtx = NULL_RTX;
7600 : 622 : rtx const_rtx = NULL_RTX;
7601 : 622 : rtx result = target ? target : gen_reg_rtx (mode);
7602 : 622 : rtx_code_label *ne_label = gen_label_rtx ();
7603 : 622 : tree unit_type_node = unsigned_char_type_node;
7604 : 622 : scalar_int_mode unit_mode
7605 : 622 : = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7606 : :
7607 : 622 : start_sequence ();
7608 : :
7609 : 2276 : for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7610 : : {
7611 : 1654 : var_rtx
7612 : 1654 : = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7613 : 1654 : const_rtx = c_readstr (const_str + offset, unit_mode);
7614 : 1654 : rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7615 : 1504 : rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7616 : :
7617 : 1654 : op0 = convert_modes (mode, unit_mode, op0, 1);
7618 : 1654 : op1 = convert_modes (mode, unit_mode, op1, 1);
7619 : 1654 : rtx diff = expand_simple_binop (mode, MINUS, op0, op1,
7620 : : result, 1, OPTAB_WIDEN);
7621 : :
7622 : : /* Force the difference into result register. We cannot reassign
7623 : : result here ("result = diff") or we may end up returning
7624 : : uninitialized result when expand_simple_binop allocates a new
7625 : : pseudo-register for returning. */
7626 : 1654 : if (diff != result)
7627 : 0 : emit_move_insn (result, diff);
7628 : :
7629 : 1654 : if (i < length - 1)
7630 : 1032 : emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7631 : : mode, true, ne_label);
7632 : 3308 : offset += GET_MODE_SIZE (unit_mode);
7633 : : }
7634 : :
7635 : 622 : emit_label (ne_label);
7636 : 622 : rtx_insn *insns = end_sequence ();
7637 : 622 : emit_insn (insns);
7638 : :
7639 : 622 : return result;
7640 : : }
7641 : :
7642 : : /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7643 : : to TARGET if that's convenient.
7644 : : If the call is not been inlined, return NULL_RTX. */
7645 : :
7646 : : static rtx
7647 : 144241 : inline_expand_builtin_bytecmp (tree exp, rtx target)
7648 : : {
7649 : 144241 : tree fndecl = get_callee_fndecl (exp);
7650 : 144241 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7651 : 144241 : bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7652 : :
7653 : : /* Do NOT apply this inlining expansion when optimizing for size or
7654 : : optimization level below 2 or if unused *cmp hasn't been DCEd. */
7655 : 144241 : if (optimize < 2 || optimize_insn_for_size_p () || target == const0_rtx)
7656 : 121939 : return NULL_RTX;
7657 : :
7658 : 22302 : gcc_checking_assert (fcode == BUILT_IN_STRCMP
7659 : : || fcode == BUILT_IN_STRNCMP
7660 : : || fcode == BUILT_IN_MEMCMP);
7661 : :
7662 : : /* On a target where the type of the call (int) has same or narrower presicion
7663 : : than unsigned char, give up the inlining expansion. */
7664 : 22302 : if (TYPE_PRECISION (unsigned_char_type_node)
7665 : 22302 : >= TYPE_PRECISION (TREE_TYPE (exp)))
7666 : : return NULL_RTX;
7667 : :
7668 : 22302 : tree arg1 = CALL_EXPR_ARG (exp, 0);
7669 : 22302 : tree arg2 = CALL_EXPR_ARG (exp, 1);
7670 : 22302 : tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7671 : :
7672 : 22302 : unsigned HOST_WIDE_INT len1 = 0;
7673 : 22302 : unsigned HOST_WIDE_INT len2 = 0;
7674 : 22302 : unsigned HOST_WIDE_INT len3 = 0;
7675 : :
7676 : : /* Get the object representation of the initializers of ARG1 and ARG2
7677 : : as strings, provided they refer to constant objects, with their byte
7678 : : sizes in LEN1 and LEN2, respectively. */
7679 : 22302 : const char *bytes1 = getbyterep (arg1, &len1);
7680 : 22302 : const char *bytes2 = getbyterep (arg2, &len2);
7681 : :
7682 : : /* Fail if neither argument refers to an initialized constant. */
7683 : 22302 : if (!bytes1 && !bytes2)
7684 : : return NULL_RTX;
7685 : :
7686 : 18419 : if (is_ncmp)
7687 : : {
7688 : : /* Fail if the memcmp/strncmp bound is not a constant. */
7689 : 1340 : if (!tree_fits_uhwi_p (len3_tree))
7690 : : return NULL_RTX;
7691 : :
7692 : 951 : len3 = tree_to_uhwi (len3_tree);
7693 : :
7694 : 951 : if (fcode == BUILT_IN_MEMCMP)
7695 : : {
7696 : : /* Fail if the memcmp bound is greater than the size of either
7697 : : of the two constant objects. */
7698 : 448 : if ((bytes1 && len1 < len3)
7699 : 448 : || (bytes2 && len2 < len3))
7700 : : return NULL_RTX;
7701 : : }
7702 : : }
7703 : :
7704 : : if (fcode != BUILT_IN_MEMCMP)
7705 : : {
7706 : : /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7707 : : and LEN2 to the length of the nul-terminated string stored
7708 : : in each. */
7709 : 17582 : if (bytes1 != NULL)
7710 : 236 : len1 = strnlen (bytes1, len1) + 1;
7711 : 17582 : if (bytes2 != NULL)
7712 : 17349 : len2 = strnlen (bytes2, len2) + 1;
7713 : : }
7714 : :
7715 : : /* See inline_string_cmp. */
7716 : 18026 : int const_str_n;
7717 : 18026 : if (!len1)
7718 : : const_str_n = 2;
7719 : 241 : else if (!len2)
7720 : : const_str_n = 1;
7721 : 3 : else if (len2 > len1)
7722 : : const_str_n = 1;
7723 : : else
7724 : : const_str_n = 2;
7725 : :
7726 : : /* For strncmp only, compute the new bound as the smallest of
7727 : : the lengths of the two strings (plus 1) and the bound provided
7728 : : to the function. */
7729 : 17787 : unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7730 : 18026 : if (is_ncmp && len3 < bound)
7731 : 524 : bound = len3;
7732 : :
7733 : : /* If the bound of the comparison is larger than the threshold,
7734 : : do nothing. */
7735 : 18026 : if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7736 : : return NULL_RTX;
7737 : :
7738 : 622 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7739 : :
7740 : : /* Now, start inline expansion the call. */
7741 : 1187 : return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7742 : : (const_str_n == 1) ? bytes1 : bytes2, bound,
7743 : 622 : const_str_n, mode);
7744 : : }
7745 : :
7746 : : /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7747 : : represents the size of the first argument to that call, or VOIDmode
7748 : : if the argument is a pointer. IGNORE will be true if the result
7749 : : isn't used. */
7750 : : static rtx
7751 : 34 : expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7752 : : bool ignore)
7753 : : {
7754 : 34 : rtx val, failsafe;
7755 : 34 : unsigned nargs = call_expr_nargs (exp);
7756 : :
7757 : 34 : tree arg0 = CALL_EXPR_ARG (exp, 0);
7758 : :
7759 : 34 : if (mode == VOIDmode)
7760 : : {
7761 : 4 : mode = TYPE_MODE (TREE_TYPE (arg0));
7762 : 4 : gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7763 : : }
7764 : :
7765 : 34 : val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7766 : :
7767 : : /* An optional second argument can be used as a failsafe value on
7768 : : some machines. If it isn't present, then the failsafe value is
7769 : : assumed to be 0. */
7770 : 34 : if (nargs > 1)
7771 : : {
7772 : 4 : tree arg1 = CALL_EXPR_ARG (exp, 1);
7773 : 4 : failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7774 : : }
7775 : : else
7776 : 30 : failsafe = const0_rtx;
7777 : :
7778 : : /* If the result isn't used, the behavior is undefined. It would be
7779 : : nice to emit a warning here, but path splitting means this might
7780 : : happen with legitimate code. So simply drop the builtin
7781 : : expansion in that case; we've handled any side-effects above. */
7782 : 34 : if (ignore)
7783 : 0 : return const0_rtx;
7784 : :
7785 : : /* If we don't have a suitable target, create one to hold the result. */
7786 : 34 : if (target == NULL || GET_MODE (target) != mode)
7787 : 0 : target = gen_reg_rtx (mode);
7788 : :
7789 : 34 : if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7790 : 0 : val = convert_modes (mode, VOIDmode, val, false);
7791 : :
7792 : 34 : return targetm.speculation_safe_value (mode, target, val, failsafe);
7793 : : }
7794 : :
7795 : : /* Expand CRC* or REV_CRC* built-ins. */
7796 : :
7797 : : rtx
7798 : 34 : expand_builtin_crc_table_based (internal_fn fn, scalar_mode crc_mode,
7799 : : scalar_mode data_mode, machine_mode mode,
7800 : : tree exp, rtx target)
7801 : : {
7802 : 34 : tree rhs1 = CALL_EXPR_ARG (exp, 0); // crc
7803 : 34 : tree rhs2 = CALL_EXPR_ARG (exp, 1); // data
7804 : 34 : tree rhs3 = CALL_EXPR_ARG (exp, 2); // polynomial
7805 : :
7806 : 34 : if (!target || mode == VOIDmode)
7807 : 0 : target = gen_reg_rtx (crc_mode);
7808 : :
7809 : 34 : rtx op1 = expand_normal (rhs1);
7810 : 34 : rtx op2 = expand_normal (rhs2);
7811 : 34 : rtx op3;
7812 : 34 : if (TREE_CODE (rhs3) != INTEGER_CST)
7813 : : {
7814 : 1 : error ("third argument to %<crc%> builtins must be a constant");
7815 : 1 : op3 = const0_rtx;
7816 : : }
7817 : : else
7818 : 33 : op3 = convert_to_mode (crc_mode, expand_normal (rhs3), 0);
7819 : :
7820 : 34 : if (CONST_INT_P (op2))
7821 : 33 : op2 = convert_to_mode (crc_mode, op2, 0);
7822 : :
7823 : 34 : if (fn == IFN_CRC)
7824 : 17 : expand_crc_table_based (target, op1, op2, op3, data_mode);
7825 : : else
7826 : : /* If it's IFN_CRC_REV generate bit-reversed CRC. */
7827 : 17 : expand_reversed_crc_table_based (target, op1, op2, op3,
7828 : : data_mode,
7829 : : generate_reflecting_code_standard);
7830 : 34 : return target;
7831 : : }
7832 : :
7833 : : /* Expand an expression EXP that calls a built-in function,
7834 : : with result going to TARGET if that's convenient
7835 : : (and in mode MODE if that's convenient).
7836 : : SUBTARGET may be used as the target for computing one of EXP's operands.
7837 : : IGNORE is nonzero if the value is to be ignored. */
7838 : :
7839 : : rtx
7840 : 1983799 : expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7841 : : int ignore)
7842 : : {
7843 : 1983799 : tree fndecl = get_callee_fndecl (exp);
7844 : 1983799 : machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7845 : 1983799 : int flags;
7846 : :
7847 : 1983799 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7848 : 170751 : return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7849 : :
7850 : : /* When ASan is enabled, we don't want to expand some memory/string
7851 : : builtins and rely on libsanitizer's hooks. This allows us to avoid
7852 : : redundant checks and be sure, that possible overflow will be detected
7853 : : by ASan. */
7854 : :
7855 : 1813048 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7856 : 1813048 : if (param_asan_kernel_mem_intrinsic_prefix
7857 : 1813048 : && sanitize_flags_p (SANITIZE_KERNEL_ADDRESS
7858 : : | SANITIZE_KERNEL_HWADDRESS))
7859 : 42 : switch (fcode)
7860 : : {
7861 : 42 : rtx save_decl_rtl, ret;
7862 : 42 : case BUILT_IN_MEMCPY:
7863 : 42 : case BUILT_IN_MEMMOVE:
7864 : 42 : case BUILT_IN_MEMSET:
7865 : 42 : save_decl_rtl = DECL_RTL (fndecl);
7866 : 42 : DECL_RTL (fndecl) = asan_memfn_rtl (fndecl);
7867 : 42 : ret = expand_call (exp, target, ignore);
7868 : 42 : DECL_RTL (fndecl) = save_decl_rtl;
7869 : 42 : return ret;
7870 : : default:
7871 : : break;
7872 : : }
7873 : 1813006 : if (sanitize_flags_p (SANITIZE_ADDRESS | SANITIZE_HWADDRESS)
7874 : 1813006 : && asan_intercepted_p (fcode))
7875 : 757 : return expand_call (exp, target, ignore);
7876 : :
7877 : : /* When not optimizing, generate calls to library functions for a certain
7878 : : set of builtins. */
7879 : 1812249 : if (!optimize
7880 : 420625 : && !called_as_built_in (fndecl)
7881 : 141297 : && fcode != BUILT_IN_FORK
7882 : 141297 : && fcode != BUILT_IN_EXECL
7883 : 141286 : && fcode != BUILT_IN_EXECV
7884 : 141286 : && fcode != BUILT_IN_EXECLP
7885 : 141284 : && fcode != BUILT_IN_EXECLE
7886 : 141284 : && fcode != BUILT_IN_EXECVP
7887 : 141283 : && fcode != BUILT_IN_EXECVE
7888 : 141283 : && fcode != BUILT_IN_CLEAR_CACHE
7889 : 141280 : && !ALLOCA_FUNCTION_CODE_P (fcode)
7890 : 141226 : && fcode != BUILT_IN_FREE
7891 : 139143 : && (fcode != BUILT_IN_MEMSET
7892 : 55889 : || !(flag_inline_stringops & ILSOP_MEMSET))
7893 : 139140 : && (fcode != BUILT_IN_MEMCPY
7894 : 918 : || !(flag_inline_stringops & ILSOP_MEMCPY))
7895 : 139004 : && (fcode != BUILT_IN_MEMMOVE
7896 : 347 : || !(flag_inline_stringops & ILSOP_MEMMOVE))
7897 : 1951253 : && (fcode != BUILT_IN_MEMCMP
7898 : 2504 : || !(flag_inline_stringops & ILSOP_MEMCMP)))
7899 : 138956 : return expand_call (exp, target, ignore);
7900 : :
7901 : : /* The built-in function expanders test for target == const0_rtx
7902 : : to determine whether the function's result will be ignored. */
7903 : 1673293 : if (ignore)
7904 : 978807 : target = const0_rtx;
7905 : :
7906 : : /* If the result of a pure or const built-in function is ignored, and
7907 : : none of its arguments are volatile, we can avoid expanding the
7908 : : built-in call and just evaluate the arguments for side-effects. */
7909 : 1673293 : if (target == const0_rtx
7910 : 978807 : && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7911 : 1679681 : && !(flags & ECF_LOOPING_CONST_OR_PURE))
7912 : : {
7913 : 6 : bool volatilep = false;
7914 : 6 : tree arg;
7915 : 6 : call_expr_arg_iterator iter;
7916 : :
7917 : 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7918 : 8 : if (TREE_THIS_VOLATILE (arg))
7919 : : {
7920 : : volatilep = true;
7921 : : break;
7922 : : }
7923 : :
7924 : 6 : if (! volatilep)
7925 : : {
7926 : 20 : FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7927 : 8 : expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7928 : 6 : return const0_rtx;
7929 : : }
7930 : : }
7931 : :
7932 : 1673287 : switch (fcode)
7933 : : {
7934 : 4 : CASE_FLT_FN (BUILT_IN_FABS):
7935 : 4 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7936 : 4 : case BUILT_IN_FABSD32:
7937 : 4 : case BUILT_IN_FABSD64:
7938 : 4 : case BUILT_IN_FABSD128:
7939 : 4 : case BUILT_IN_FABSD64X:
7940 : 4 : target = expand_builtin_fabs (exp, target, subtarget);
7941 : 4 : if (target)
7942 : 833251 : return target;
7943 : 840036 : break;
7944 : :
7945 : 11674 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
7946 : 11674 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7947 : 11674 : target = expand_builtin_copysign (exp, target, subtarget);
7948 : 11674 : if (target)
7949 : : return target;
7950 : : break;
7951 : :
7952 : : /* Just do a normal library call if we were unable to fold
7953 : : the values. */
7954 : : CASE_FLT_FN (BUILT_IN_CABS):
7955 : : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CABS):
7956 : : break;
7957 : :
7958 : 364 : CASE_FLT_FN (BUILT_IN_FMA):
7959 : 364 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7960 : 364 : target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7961 : 364 : if (target)
7962 : : return target;
7963 : : break;
7964 : :
7965 : 235 : CASE_FLT_FN (BUILT_IN_ILOGB):
7966 : 235 : if (! flag_unsafe_math_optimizations)
7967 : : break;
7968 : 4 : gcc_fallthrough ();
7969 : 4 : CASE_FLT_FN (BUILT_IN_ISINF):
7970 : 4 : CASE_FLT_FN (BUILT_IN_FINITE):
7971 : 4 : CASE_FLT_FN (BUILT_IN_ISNAN):
7972 : 4 : case BUILT_IN_ISFINITE:
7973 : 4 : case BUILT_IN_ISNORMAL:
7974 : 4 : target = expand_builtin_interclass_mathfn (exp, target);
7975 : 4 : if (target)
7976 : : return target;
7977 : : break;
7978 : :
7979 : 823 : case BUILT_IN_ISSIGNALING:
7980 : 823 : target = expand_builtin_issignaling (exp, target);
7981 : 823 : if (target)
7982 : : return target;
7983 : : break;
7984 : :
7985 : 231 : CASE_FLT_FN (BUILT_IN_ICEIL):
7986 : 231 : CASE_FLT_FN (BUILT_IN_LCEIL):
7987 : 231 : CASE_FLT_FN (BUILT_IN_LLCEIL):
7988 : 231 : CASE_FLT_FN (BUILT_IN_LFLOOR):
7989 : 231 : CASE_FLT_FN (BUILT_IN_IFLOOR):
7990 : 231 : CASE_FLT_FN (BUILT_IN_LLFLOOR):
7991 : 231 : target = expand_builtin_int_roundingfn (exp, target);
7992 : 231 : if (target)
7993 : : return target;
7994 : : break;
7995 : :
7996 : 590 : CASE_FLT_FN (BUILT_IN_IRINT):
7997 : 590 : CASE_FLT_FN (BUILT_IN_LRINT):
7998 : 590 : CASE_FLT_FN (BUILT_IN_LLRINT):
7999 : 590 : CASE_FLT_FN (BUILT_IN_IROUND):
8000 : 590 : CASE_FLT_FN (BUILT_IN_LROUND):
8001 : 590 : CASE_FLT_FN (BUILT_IN_LLROUND):
8002 : 590 : target = expand_builtin_int_roundingfn_2 (exp, target);
8003 : 590 : if (target)
8004 : : return target;
8005 : : break;
8006 : :
8007 : 279 : CASE_FLT_FN (BUILT_IN_POWI):
8008 : 279 : target = expand_builtin_powi (exp, target);
8009 : 279 : if (target)
8010 : : return target;
8011 : : break;
8012 : :
8013 : 158 : CASE_FLT_FN (BUILT_IN_CEXPI):
8014 : 158 : target = expand_builtin_cexpi (exp, target);
8015 : 158 : gcc_assert (target);
8016 : : return target;
8017 : :
8018 : 994 : CASE_FLT_FN (BUILT_IN_SIN):
8019 : 994 : CASE_FLT_FN (BUILT_IN_COS):
8020 : 994 : if (! flag_unsafe_math_optimizations)
8021 : : break;
8022 : 47 : target = expand_builtin_mathfn_3 (exp, target, subtarget);
8023 : 47 : if (target)
8024 : : return target;
8025 : : break;
8026 : :
8027 : 152 : CASE_FLT_FN (BUILT_IN_SINCOS):
8028 : 152 : if (! flag_unsafe_math_optimizations)
8029 : : break;
8030 : 3 : target = expand_builtin_sincos (exp);
8031 : 3 : if (target)
8032 : : return target;
8033 : : break;
8034 : :
8035 : 60 : case BUILT_IN_FEGETROUND:
8036 : 60 : target = expand_builtin_fegetround (exp, target, target_mode);
8037 : 60 : if (target)
8038 : : return target;
8039 : : break;
8040 : :
8041 : 1063 : case BUILT_IN_FECLEAREXCEPT:
8042 : 1063 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8043 : : feclearexcept_optab);
8044 : 1063 : if (target)
8045 : : return target;
8046 : : break;
8047 : :
8048 : 831 : case BUILT_IN_FERAISEEXCEPT:
8049 : 831 : target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
8050 : : feraiseexcept_optab);
8051 : 831 : if (target)
8052 : : return target;
8053 : : break;
8054 : :
8055 : 464 : case BUILT_IN_APPLY_ARGS:
8056 : 464 : return expand_builtin_apply_args ();
8057 : :
8058 : : /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8059 : : FUNCTION with a copy of the parameters described by
8060 : : ARGUMENTS, and ARGSIZE. It returns a block of memory
8061 : : allocated on the stack into which is stored all the registers
8062 : : that might possibly be used for returning the result of a
8063 : : function. ARGUMENTS is the value returned by
8064 : : __builtin_apply_args. ARGSIZE is the number of bytes of
8065 : : arguments that must be copied. ??? How should this value be
8066 : : computed? We'll also need a safe worst case value for varargs
8067 : : functions. */
8068 : 483 : case BUILT_IN_APPLY:
8069 : 483 : if (!validate_arglist (exp, POINTER_TYPE,
8070 : : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
8071 : 483 : && !validate_arglist (exp, REFERENCE_TYPE,
8072 : : POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8073 : 0 : return const0_rtx;
8074 : : else
8075 : : {
8076 : 483 : rtx ops[3];
8077 : :
8078 : 483 : ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
8079 : 483 : ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
8080 : 483 : ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
8081 : :
8082 : 483 : return expand_builtin_apply (ops[0], ops[1], ops[2]);
8083 : : }
8084 : :
8085 : : /* __builtin_return (RESULT) causes the function to return the
8086 : : value described by RESULT. RESULT is address of the block of
8087 : : memory returned by __builtin_apply. */
8088 : 379 : case BUILT_IN_RETURN:
8089 : 379 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8090 : 379 : expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
8091 : 379 : return const0_rtx;
8092 : :
8093 : 0 : case BUILT_IN_SAVEREGS:
8094 : 0 : return expand_builtin_saveregs ();
8095 : :
8096 : 16 : case BUILT_IN_VA_ARG_PACK:
8097 : : /* All valid uses of __builtin_va_arg_pack () are removed during
8098 : : inlining. */
8099 : 16 : error ("invalid use of %<__builtin_va_arg_pack ()%>");
8100 : 16 : return const0_rtx;
8101 : :
8102 : 0 : case BUILT_IN_VA_ARG_PACK_LEN:
8103 : : /* All valid uses of __builtin_va_arg_pack_len () are removed during
8104 : : inlining. */
8105 : 0 : error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
8106 : 0 : return const0_rtx;
8107 : :
8108 : : /* Return the address of the first anonymous stack arg. */
8109 : 150 : case BUILT_IN_NEXT_ARG:
8110 : 150 : if (fold_builtin_next_arg (exp, false))
8111 : 0 : return const0_rtx;
8112 : 150 : return expand_builtin_next_arg ();
8113 : :
8114 : 28 : case BUILT_IN_CLEAR_CACHE:
8115 : 28 : expand_builtin___clear_cache (exp);
8116 : 28 : return const0_rtx;
8117 : :
8118 : 0 : case BUILT_IN_CLASSIFY_TYPE:
8119 : 0 : return expand_builtin_classify_type (exp);
8120 : :
8121 : 0 : case BUILT_IN_CONSTANT_P:
8122 : 0 : return const0_rtx;
8123 : :
8124 : 15583 : case BUILT_IN_FRAME_ADDRESS:
8125 : 15583 : case BUILT_IN_RETURN_ADDRESS:
8126 : 15583 : return expand_builtin_frame_address (fndecl, exp);
8127 : :
8128 : 540 : case BUILT_IN_STACK_ADDRESS:
8129 : 540 : return expand_builtin_stack_address ();
8130 : :
8131 : 2159 : case BUILT_IN___STRUB_ENTER:
8132 : 2159 : target = expand_builtin_strub_enter (exp);
8133 : 2159 : if (target)
8134 : : return target;
8135 : : break;
8136 : :
8137 : 1072 : case BUILT_IN___STRUB_UPDATE:
8138 : 1072 : target = expand_builtin_strub_update (exp);
8139 : 1072 : if (target)
8140 : : return target;
8141 : : break;
8142 : :
8143 : 2729 : case BUILT_IN___STRUB_LEAVE:
8144 : 2729 : target = expand_builtin_strub_leave (exp);
8145 : 2729 : if (target)
8146 : : return target;
8147 : : break;
8148 : :
8149 : : /* Returns the address of the area where the structure is returned.
8150 : : 0 otherwise. */
8151 : 0 : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8152 : 0 : if (call_expr_nargs (exp) != 0
8153 : 0 : || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8154 : 0 : || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
8155 : 0 : return const0_rtx;
8156 : : else
8157 : 0 : return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8158 : :
8159 : 28233 : CASE_BUILT_IN_ALLOCA:
8160 : 28233 : target = expand_builtin_alloca (exp);
8161 : 28233 : if (target)
8162 : : return target;
8163 : : break;
8164 : :
8165 : 203 : case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8166 : 203 : return expand_asan_emit_allocas_unpoison (exp);
8167 : :
8168 : 1819 : case BUILT_IN_STACK_SAVE:
8169 : 1819 : return expand_stack_save ();
8170 : :
8171 : 1673 : case BUILT_IN_STACK_RESTORE:
8172 : 1673 : expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8173 : 1673 : return const0_rtx;
8174 : :
8175 : 1163 : case BUILT_IN_BSWAP16:
8176 : 1163 : case BUILT_IN_BSWAP32:
8177 : 1163 : case BUILT_IN_BSWAP64:
8178 : 1163 : case BUILT_IN_BSWAP128:
8179 : 1163 : target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8180 : 1163 : if (target)
8181 : : return target;
8182 : : break;
8183 : :
8184 : 0 : CASE_INT_FN (BUILT_IN_FFS):
8185 : 0 : target = expand_builtin_unop (target_mode, exp, target,
8186 : : subtarget, ffs_optab);
8187 : 0 : if (target)
8188 : : return target;
8189 : : break;
8190 : :
8191 : 183 : CASE_INT_FN (BUILT_IN_CLZ):
8192 : 183 : target = expand_builtin_unop (target_mode, exp, target,
8193 : : subtarget, clz_optab);
8194 : 183 : if (target)
8195 : : return target;
8196 : : break;
8197 : :
8198 : 50 : CASE_INT_FN (BUILT_IN_CTZ):
8199 : 50 : target = expand_builtin_unop (target_mode, exp, target,
8200 : : subtarget, ctz_optab);
8201 : 50 : if (target)
8202 : : return target;
8203 : : break;
8204 : :
8205 : 90 : CASE_INT_FN (BUILT_IN_CLRSB):
8206 : 90 : target = expand_builtin_unop (target_mode, exp, target,
8207 : : subtarget, clrsb_optab);
8208 : 90 : if (target)
8209 : : return target;
8210 : : break;
8211 : :
8212 : 419 : CASE_INT_FN (BUILT_IN_POPCOUNT):
8213 : 419 : target = expand_builtin_unop (target_mode, exp, target,
8214 : : subtarget, popcount_optab);
8215 : 419 : if (target)
8216 : : return target;
8217 : : break;
8218 : :
8219 : 11 : CASE_INT_FN (BUILT_IN_PARITY):
8220 : 11 : target = expand_builtin_unop (target_mode, exp, target,
8221 : : subtarget, parity_optab);
8222 : 11 : if (target)
8223 : : return target;
8224 : : break;
8225 : :
8226 : 14425 : case BUILT_IN_STRLEN:
8227 : 14425 : target = expand_builtin_strlen (exp, target, target_mode);
8228 : 14425 : if (target)
8229 : : return target;
8230 : : break;
8231 : :
8232 : 580 : case BUILT_IN_STRNLEN:
8233 : 580 : target = expand_builtin_strnlen (exp, target, target_mode);
8234 : 580 : if (target)
8235 : : return target;
8236 : : break;
8237 : :
8238 : 1862 : case BUILT_IN_STRCPY:
8239 : 1862 : target = expand_builtin_strcpy (exp, target);
8240 : 1862 : if (target)
8241 : : return target;
8242 : : break;
8243 : :
8244 : 2168 : case BUILT_IN_STRNCPY:
8245 : 2168 : target = expand_builtin_strncpy (exp, target);
8246 : 2168 : if (target)
8247 : : return target;
8248 : : break;
8249 : :
8250 : 454 : case BUILT_IN_STPCPY:
8251 : 454 : target = expand_builtin_stpcpy (exp, target, mode);
8252 : 454 : if (target)
8253 : : return target;
8254 : : break;
8255 : :
8256 : 96198 : case BUILT_IN_MEMCPY:
8257 : 96198 : target = expand_builtin_memcpy (exp, target);
8258 : 96198 : if (target)
8259 : : return target;
8260 : : break;
8261 : :
8262 : 16171 : case BUILT_IN_MEMMOVE:
8263 : 16171 : target = expand_builtin_memmove (exp, target);
8264 : 16171 : if (target)
8265 : : return target;
8266 : : break;
8267 : :
8268 : 1637 : case BUILT_IN_MEMPCPY:
8269 : 1637 : target = expand_builtin_mempcpy (exp, target);
8270 : 1637 : if (target)
8271 : : return target;
8272 : : break;
8273 : :
8274 : 35810 : case BUILT_IN_MEMSET:
8275 : 35810 : target = expand_builtin_memset (exp, target, mode);
8276 : 35810 : if (target)
8277 : : return target;
8278 : : break;
8279 : :
8280 : 0 : case BUILT_IN_BZERO:
8281 : 0 : target = expand_builtin_bzero (exp);
8282 : 0 : if (target)
8283 : : return target;
8284 : : break;
8285 : :
8286 : : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8287 : : back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
8288 : : when changing it to a strcmp call. */
8289 : 381 : case BUILT_IN_STRCMP_EQ:
8290 : 381 : target = expand_builtin_memcmp (exp, target, true);
8291 : 381 : if (target)
8292 : : return target;
8293 : :
8294 : : /* Change this call back to a BUILT_IN_STRCMP. */
8295 : 22 : TREE_OPERAND (exp, 1)
8296 : 22 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8297 : :
8298 : : /* Delete the last parameter. */
8299 : 22 : unsigned int i;
8300 : 22 : vec<tree, va_gc> *arg_vec;
8301 : 22 : vec_alloc (arg_vec, 2);
8302 : 88 : for (i = 0; i < 2; i++)
8303 : 44 : arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8304 : 22 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8305 : : /* FALLTHROUGH */
8306 : :
8307 : 127931 : case BUILT_IN_STRCMP:
8308 : 127931 : target = expand_builtin_strcmp (exp, target);
8309 : 127931 : if (target)
8310 : : return target;
8311 : : break;
8312 : :
8313 : : /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8314 : : back to a BUILT_IN_STRNCMP. */
8315 : 36 : case BUILT_IN_STRNCMP_EQ:
8316 : 36 : target = expand_builtin_memcmp (exp, target, true);
8317 : 36 : if (target)
8318 : : return target;
8319 : :
8320 : : /* Change it back to a BUILT_IN_STRNCMP. */
8321 : 0 : TREE_OPERAND (exp, 1)
8322 : 0 : = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8323 : : /* FALLTHROUGH */
8324 : :
8325 : 2029 : case BUILT_IN_STRNCMP:
8326 : 2029 : target = expand_builtin_strncmp (exp, target, mode);
8327 : 2029 : if (target)
8328 : : return target;
8329 : : break;
8330 : :
8331 : 102435 : case BUILT_IN_BCMP:
8332 : 102435 : case BUILT_IN_MEMCMP:
8333 : 102435 : case BUILT_IN_MEMCMP_EQ:
8334 : 102435 : target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8335 : 102435 : if (target)
8336 : : return target;
8337 : 36589 : if (fcode == BUILT_IN_MEMCMP_EQ)
8338 : : {
8339 : 22658 : tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8340 : 22658 : TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8341 : : }
8342 : : break;
8343 : :
8344 : 0 : case BUILT_IN_SETJMP:
8345 : : /* This should have been lowered to the builtins below. */
8346 : 0 : gcc_unreachable ();
8347 : :
8348 : 841 : case BUILT_IN_SETJMP_SETUP:
8349 : : /* __builtin_setjmp_setup is passed a pointer to an array of five words
8350 : : and the receiver label. */
8351 : 841 : if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8352 : : {
8353 : 841 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8354 : : VOIDmode, EXPAND_NORMAL);
8355 : 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8356 : 841 : rtx_insn *label_r = label_rtx (label);
8357 : :
8358 : 841 : expand_builtin_setjmp_setup (buf_addr, label_r);
8359 : 841 : return const0_rtx;
8360 : : }
8361 : : break;
8362 : :
8363 : 841 : case BUILT_IN_SETJMP_RECEIVER:
8364 : : /* __builtin_setjmp_receiver is passed the receiver label. */
8365 : 841 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8366 : : {
8367 : 841 : tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8368 : 841 : rtx_insn *label_r = label_rtx (label);
8369 : :
8370 : 841 : expand_builtin_setjmp_receiver (label_r);
8371 : 841 : nonlocal_goto_handler_labels
8372 : 1682 : = gen_rtx_INSN_LIST (VOIDmode, label_r,
8373 : 841 : nonlocal_goto_handler_labels);
8374 : : /* ??? Do not let expand_label treat us as such since we would
8375 : : not want to be both on the list of non-local labels and on
8376 : : the list of forced labels. */
8377 : 841 : FORCED_LABEL (label) = 0;
8378 : 841 : return const0_rtx;
8379 : : }
8380 : : break;
8381 : :
8382 : : /* __builtin_longjmp is passed a pointer to an array of five words.
8383 : : It's similar to the C library longjmp function but works with
8384 : : __builtin_setjmp above. */
8385 : 391 : case BUILT_IN_LONGJMP:
8386 : 391 : if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8387 : : {
8388 : 391 : rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8389 : : VOIDmode, EXPAND_NORMAL);
8390 : 391 : rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8391 : :
8392 : 391 : if (value != const1_rtx)
8393 : : {
8394 : 0 : error ("%<__builtin_longjmp%> second argument must be 1");
8395 : 0 : return const0_rtx;
8396 : : }
8397 : :
8398 : 391 : expand_builtin_longjmp (buf_addr, value);
8399 : 391 : return const0_rtx;
8400 : : }
8401 : : break;
8402 : :
8403 : 510 : case BUILT_IN_NONLOCAL_GOTO:
8404 : 510 : target = expand_builtin_nonlocal_goto (exp);
8405 : 510 : if (target)
8406 : : return target;
8407 : : break;
8408 : :
8409 : : /* This updates the setjmp buffer that is its argument with the value
8410 : : of the current stack pointer. */
8411 : 0 : case BUILT_IN_UPDATE_SETJMP_BUF:
8412 : 0 : if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8413 : : {
8414 : 0 : rtx buf_addr
8415 : 0 : = expand_normal (CALL_EXPR_ARG (exp, 0));
8416 : :
8417 : 0 : expand_builtin_update_setjmp_buf (buf_addr);
8418 : 0 : return const0_rtx;
8419 : : }
8420 : : break;
8421 : :
8422 : 38503 : case BUILT_IN_TRAP:
8423 : 38503 : case BUILT_IN_UNREACHABLE_TRAP:
8424 : 38503 : expand_builtin_trap ();
8425 : 38503 : return const0_rtx;
8426 : :
8427 : 5311 : case BUILT_IN_UNREACHABLE:
8428 : 5311 : expand_builtin_unreachable ();
8429 : 5311 : return const0_rtx;
8430 : :
8431 : 1133 : CASE_FLT_FN (BUILT_IN_SIGNBIT):
8432 : 1133 : case BUILT_IN_SIGNBITD32:
8433 : 1133 : case BUILT_IN_SIGNBITD64:
8434 : 1133 : case BUILT_IN_SIGNBITD128:
8435 : 1133 : target = expand_builtin_signbit (exp, target);
8436 : 1133 : if (target)
8437 : : return target;
8438 : : break;
8439 : :
8440 : : /* Various hooks for the DWARF 2 __throw routine. */
8441 : 34 : case BUILT_IN_UNWIND_INIT:
8442 : 34 : expand_builtin_unwind_init ();
8443 : 34 : return const0_rtx;
8444 : 1203 : case BUILT_IN_DWARF_CFA:
8445 : 1203 : return virtual_cfa_rtx;
8446 : : #ifdef DWARF2_UNWIND_INFO
8447 : 48 : case BUILT_IN_DWARF_SP_COLUMN:
8448 : 48 : return expand_builtin_dwarf_sp_column ();
8449 : 8 : case BUILT_IN_INIT_DWARF_REG_SIZES:
8450 : 8 : expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8451 : 8 : return const0_rtx;
8452 : : #endif
8453 : 19 : case BUILT_IN_FROB_RETURN_ADDR:
8454 : 19 : return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8455 : 2296 : case BUILT_IN_EXTRACT_RETURN_ADDR:
8456 : 2296 : return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8457 : 29 : case BUILT_IN_EH_RETURN:
8458 : 58 : expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8459 : 29 : CALL_EXPR_ARG (exp, 1));
8460 : 29 : return const0_rtx;
8461 : 21 : case BUILT_IN_EH_RETURN_DATA_REGNO:
8462 : 21 : return expand_builtin_eh_return_data_regno (exp);
8463 : 2 : case BUILT_IN_EXTEND_POINTER:
8464 : 2 : return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8465 : 99568 : case BUILT_IN_EH_POINTER:
8466 : 99568 : return expand_builtin_eh_pointer (exp);
8467 : 5030 : case BUILT_IN_EH_FILTER:
8468 : 5030 : return expand_builtin_eh_filter (exp);
8469 : 112192 : case BUILT_IN_EH_COPY_VALUES:
8470 : 112192 : return expand_builtin_eh_copy_values (exp);
8471 : :
8472 : 20892 : case BUILT_IN_VA_START:
8473 : 20892 : return expand_builtin_va_start (exp);
8474 : 12086 : case BUILT_IN_VA_END:
8475 : 12086 : return expand_builtin_va_end (exp);
8476 : 242 : case BUILT_IN_VA_COPY:
8477 : 242 : return expand_builtin_va_copy (exp);
8478 : 1025 : case BUILT_IN_EXPECT:
8479 : 1025 : return expand_builtin_expect (exp, target);
8480 : 5 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
8481 : 5 : return expand_builtin_expect_with_probability (exp, target);
8482 : 75 : case BUILT_IN_ASSUME_ALIGNED:
8483 : 75 : return expand_builtin_assume_aligned (exp, target);
8484 : 2037 : case BUILT_IN_PREFETCH:
8485 : 2037 : expand_builtin_prefetch (exp);
8486 : 2037 : return const0_rtx;
8487 : :
8488 : 295 : case BUILT_IN_INIT_TRAMPOLINE:
8489 : 295 : return expand_builtin_init_trampoline (exp, true);
8490 : 0 : case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8491 : 0 : return expand_builtin_init_trampoline (exp, false);
8492 : 339 : case BUILT_IN_ADJUST_TRAMPOLINE:
8493 : 339 : return expand_builtin_adjust_trampoline (exp);
8494 : :
8495 : 0 : case BUILT_IN_INIT_DESCRIPTOR:
8496 : 0 : return expand_builtin_init_descriptor (exp);
8497 : 0 : case BUILT_IN_ADJUST_DESCRIPTOR:
8498 : 0 : return expand_builtin_adjust_descriptor (exp);
8499 : :
8500 : : case BUILT_IN_GCC_NESTED_PTR_CREATED:
8501 : : case BUILT_IN_GCC_NESTED_PTR_DELETED:
8502 : : break; /* At present, no expansion, just call the function. */
8503 : :
8504 : 87 : case BUILT_IN_FORK:
8505 : 87 : case BUILT_IN_EXECL:
8506 : 87 : case BUILT_IN_EXECV:
8507 : 87 : case BUILT_IN_EXECLP:
8508 : 87 : case BUILT_IN_EXECLE:
8509 : 87 : case BUILT_IN_EXECVP:
8510 : 87 : case BUILT_IN_EXECVE:
8511 : 87 : target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8512 : 87 : if (target)
8513 : : return target;
8514 : : break;
8515 : :
8516 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8517 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8518 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8519 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8520 : 707 : case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8521 : 707 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8522 : 707 : target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8523 : 707 : if (target)
8524 : : return target;
8525 : : break;
8526 : :
8527 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8528 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8529 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8530 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8531 : 581 : case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8532 : 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8533 : 581 : target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8534 : 581 : if (target)
8535 : : return target;
8536 : : break;
8537 : :
8538 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_1:
8539 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_2:
8540 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_4:
8541 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_8:
8542 : 512 : case BUILT_IN_SYNC_FETCH_AND_OR_16:
8543 : 512 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8544 : 512 : target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8545 : 512 : if (target)
8546 : : return target;
8547 : : break;
8548 : :
8549 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_1:
8550 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_2:
8551 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_4:
8552 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_8:
8553 : 488 : case BUILT_IN_SYNC_FETCH_AND_AND_16:
8554 : 488 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8555 : 488 : target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8556 : 488 : if (target)
8557 : : return target;
8558 : : break;
8559 : :
8560 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8561 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8562 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8563 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8564 : 581 : case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8565 : 581 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8566 : 581 : target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8567 : 581 : if (target)
8568 : : return target;
8569 : : break;
8570 : :
8571 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8572 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8573 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8574 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8575 : 434 : case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8576 : 434 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8577 : 434 : target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8578 : 434 : if (target)
8579 : : return target;
8580 : : break;
8581 : :
8582 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8583 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8584 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8585 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8586 : 149 : case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8587 : 149 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8588 : 149 : target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8589 : 149 : if (target)
8590 : : return target;
8591 : : break;
8592 : :
8593 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8594 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8595 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8596 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8597 : 117 : case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8598 : 117 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8599 : 117 : target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8600 : 117 : if (target)
8601 : : return target;
8602 : : break;
8603 : :
8604 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_1:
8605 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_2:
8606 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_4:
8607 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_8:
8608 : 115 : case BUILT_IN_SYNC_OR_AND_FETCH_16:
8609 : 115 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8610 : 115 : target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8611 : 115 : if (target)
8612 : : return target;
8613 : : break;
8614 : :
8615 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_1:
8616 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_2:
8617 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_4:
8618 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_8:
8619 : 116 : case BUILT_IN_SYNC_AND_AND_FETCH_16:
8620 : 116 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8621 : 116 : target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8622 : 116 : if (target)
8623 : : return target;
8624 : : break;
8625 : :
8626 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8627 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8628 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8629 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8630 : 114 : case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8631 : 114 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8632 : 114 : target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8633 : 114 : if (target)
8634 : : return target;
8635 : : break;
8636 : :
8637 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8638 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8639 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8640 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8641 : 83 : case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8642 : 83 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8643 : 83 : target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8644 : 83 : if (target)
8645 : : return target;
8646 : : break;
8647 : :
8648 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8649 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8650 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8651 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8652 : 200 : case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8653 : 200 : if (mode == VOIDmode)
8654 : 40 : mode = TYPE_MODE (boolean_type_node);
8655 : 200 : if (!target || !register_operand (target, mode))
8656 : 40 : target = gen_reg_rtx (mode);
8657 : :
8658 : 200 : mode = get_builtin_sync_mode
8659 : 200 : (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8660 : 200 : target = expand_builtin_compare_and_swap (mode, exp, true, target);
8661 : 200 : if (target)
8662 : : return target;
8663 : : break;
8664 : :
8665 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8666 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8667 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8668 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8669 : 256 : case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8670 : 256 : mode = get_builtin_sync_mode
8671 : 256 : (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8672 : 256 : target = expand_builtin_compare_and_swap (mode, exp, false, target);
8673 : 256 : if (target)
8674 : : return target;
8675 : : break;
8676 : :
8677 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8678 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8679 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8680 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8681 : 326 : case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8682 : 326 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8683 : 326 : target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8684 : 326 : if (target)
8685 : : return target;
8686 : : break;
8687 : :
8688 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_1:
8689 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_2:
8690 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_4:
8691 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_8:
8692 : 158 : case BUILT_IN_SYNC_LOCK_RELEASE_16:
8693 : 158 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8694 : 158 : if (expand_builtin_sync_lock_release (mode, exp))
8695 : 156 : return const0_rtx;
8696 : : break;
8697 : :
8698 : 281 : case BUILT_IN_SYNC_SYNCHRONIZE:
8699 : 281 : expand_builtin_sync_synchronize ();
8700 : 281 : return const0_rtx;
8701 : :
8702 : 2811 : case BUILT_IN_ATOMIC_EXCHANGE_1:
8703 : 2811 : case BUILT_IN_ATOMIC_EXCHANGE_2:
8704 : 2811 : case BUILT_IN_ATOMIC_EXCHANGE_4:
8705 : 2811 : case BUILT_IN_ATOMIC_EXCHANGE_8:
8706 : 2811 : case BUILT_IN_ATOMIC_EXCHANGE_16:
8707 : 2811 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8708 : 2811 : target = expand_builtin_atomic_exchange (mode, exp, target);
8709 : 2811 : if (target)
8710 : : return target;
8711 : : break;
8712 : :
8713 : 9204 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8714 : 9204 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8715 : 9204 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8716 : 9204 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8717 : 9204 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8718 : 9204 : {
8719 : 9204 : unsigned int nargs, z;
8720 : 9204 : vec<tree, va_gc> *vec;
8721 : :
8722 : 9204 : mode =
8723 : 9204 : get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8724 : 9204 : target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8725 : 9204 : if (target)
8726 : 7410 : return target;
8727 : :
8728 : : /* If this is turned into an external library call, the weak parameter
8729 : : must be dropped to match the expected parameter list. */
8730 : 1794 : nargs = call_expr_nargs (exp);
8731 : 1794 : vec_alloc (vec, nargs - 1);
8732 : 8970 : for (z = 0; z < 3; z++)
8733 : 5382 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8734 : : /* Skip the boolean weak parameter. */
8735 : 5382 : for (z = 4; z < 6; z++)
8736 : 3588 : vec->quick_push (CALL_EXPR_ARG (exp, z));
8737 : 1794 : exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8738 : 1794 : break;
8739 : : }
8740 : :
8741 : 70922 : case BUILT_IN_ATOMIC_LOAD_1:
8742 : 70922 : case BUILT_IN_ATOMIC_LOAD_2:
8743 : 70922 : case BUILT_IN_ATOMIC_LOAD_4:
8744 : 70922 : case BUILT_IN_ATOMIC_LOAD_8:
8745 : 70922 : case BUILT_IN_ATOMIC_LOAD_16:
8746 : 70922 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8747 : 70922 : target = expand_builtin_atomic_load (mode, exp, target);
8748 : 70922 : if (target)
8749 : : return target;
8750 : : break;
8751 : :
8752 : 17481 : case BUILT_IN_ATOMIC_STORE_1:
8753 : 17481 : case BUILT_IN_ATOMIC_STORE_2:
8754 : 17481 : case BUILT_IN_ATOMIC_STORE_4:
8755 : 17481 : case BUILT_IN_ATOMIC_STORE_8:
8756 : 17481 : case BUILT_IN_ATOMIC_STORE_16:
8757 : 17481 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8758 : 17481 : target = expand_builtin_atomic_store (mode, exp);
8759 : 17481 : if (target)
8760 : 15854 : return const0_rtx;
8761 : : break;
8762 : :
8763 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_1:
8764 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_2:
8765 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_4:
8766 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_8:
8767 : 4377 : case BUILT_IN_ATOMIC_ADD_FETCH_16:
8768 : 4377 : {
8769 : 4377 : enum built_in_function lib;
8770 : 4377 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8771 : 4377 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8772 : : (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8773 : 4377 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8774 : : ignore, lib);
8775 : 4377 : if (target)
8776 : : return target;
8777 : : break;
8778 : : }
8779 : 2166 : case BUILT_IN_ATOMIC_SUB_FETCH_1:
8780 : 2166 : case BUILT_IN_ATOMIC_SUB_FETCH_2:
8781 : 2166 : case BUILT_IN_ATOMIC_SUB_FETCH_4:
8782 : 2166 : case BUILT_IN_ATOMIC_SUB_FETCH_8:
8783 : 2166 : case BUILT_IN_ATOMIC_SUB_FETCH_16:
8784 : 2166 : {
8785 : 2166 : enum built_in_function lib;
8786 : 2166 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8787 : 2166 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8788 : : (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8789 : 2166 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8790 : : ignore, lib);
8791 : 2166 : if (target)
8792 : : return target;
8793 : : break;
8794 : : }
8795 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_1:
8796 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_2:
8797 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_4:
8798 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_8:
8799 : 891 : case BUILT_IN_ATOMIC_AND_FETCH_16:
8800 : 891 : {
8801 : 891 : enum built_in_function lib;
8802 : 891 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8803 : 891 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8804 : : (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8805 : 891 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8806 : : ignore, lib);
8807 : 891 : if (target)
8808 : : return target;
8809 : : break;
8810 : : }
8811 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_1:
8812 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_2:
8813 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_4:
8814 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_8:
8815 : 155 : case BUILT_IN_ATOMIC_NAND_FETCH_16:
8816 : 155 : {
8817 : 155 : enum built_in_function lib;
8818 : 155 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8819 : 155 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8820 : : (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8821 : 155 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8822 : : ignore, lib);
8823 : 155 : if (target)
8824 : : return target;
8825 : : break;
8826 : : }
8827 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_1:
8828 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_2:
8829 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_4:
8830 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_8:
8831 : 836 : case BUILT_IN_ATOMIC_XOR_FETCH_16:
8832 : 836 : {
8833 : 836 : enum built_in_function lib;
8834 : 836 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8835 : 836 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8836 : : (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8837 : 836 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8838 : : ignore, lib);
8839 : 836 : if (target)
8840 : : return target;
8841 : : break;
8842 : : }
8843 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_1:
8844 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_2:
8845 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_4:
8846 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_8:
8847 : 953 : case BUILT_IN_ATOMIC_OR_FETCH_16:
8848 : 953 : {
8849 : 953 : enum built_in_function lib;
8850 : 953 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8851 : 953 : lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8852 : : (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8853 : 953 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8854 : : ignore, lib);
8855 : 953 : if (target)
8856 : : return target;
8857 : : break;
8858 : : }
8859 : 15313 : case BUILT_IN_ATOMIC_FETCH_ADD_1:
8860 : 15313 : case BUILT_IN_ATOMIC_FETCH_ADD_2:
8861 : 15313 : case BUILT_IN_ATOMIC_FETCH_ADD_4:
8862 : 15313 : case BUILT_IN_ATOMIC_FETCH_ADD_8:
8863 : 15313 : case BUILT_IN_ATOMIC_FETCH_ADD_16:
8864 : 15313 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8865 : 15313 : target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8866 : : ignore, BUILT_IN_NONE);
8867 : 15313 : if (target)
8868 : : return target;
8869 : : break;
8870 : :
8871 : 2376 : case BUILT_IN_ATOMIC_FETCH_SUB_1:
8872 : 2376 : case BUILT_IN_ATOMIC_FETCH_SUB_2:
8873 : 2376 : case BUILT_IN_ATOMIC_FETCH_SUB_4:
8874 : 2376 : case BUILT_IN_ATOMIC_FETCH_SUB_8:
8875 : 2376 : case BUILT_IN_ATOMIC_FETCH_SUB_16:
8876 : 2376 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8877 : 2376 : target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8878 : : ignore, BUILT_IN_NONE);
8879 : 2376 : if (target)
8880 : : return target;
8881 : : break;
8882 : :
8883 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_1:
8884 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_2:
8885 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_4:
8886 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_8:
8887 : 772 : case BUILT_IN_ATOMIC_FETCH_AND_16:
8888 : 772 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8889 : 772 : target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8890 : : ignore, BUILT_IN_NONE);
8891 : 772 : if (target)
8892 : : return target;
8893 : : break;
8894 : :
8895 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_1:
8896 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_2:
8897 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_4:
8898 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_8:
8899 : 118 : case BUILT_IN_ATOMIC_FETCH_NAND_16:
8900 : 118 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8901 : 118 : target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8902 : : ignore, BUILT_IN_NONE);
8903 : 118 : if (target)
8904 : : return target;
8905 : : break;
8906 : :
8907 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_1:
8908 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_2:
8909 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_4:
8910 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_8:
8911 : 862 : case BUILT_IN_ATOMIC_FETCH_XOR_16:
8912 : 862 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8913 : 862 : target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8914 : : ignore, BUILT_IN_NONE);
8915 : 862 : if (target)
8916 : : return target;
8917 : : break;
8918 : :
8919 : 1145 : case BUILT_IN_ATOMIC_FETCH_OR_1:
8920 : 1145 : case BUILT_IN_ATOMIC_FETCH_OR_2:
8921 : 1145 : case BUILT_IN_ATOMIC_FETCH_OR_4:
8922 : 1145 : case BUILT_IN_ATOMIC_FETCH_OR_8:
8923 : 1145 : case BUILT_IN_ATOMIC_FETCH_OR_16:
8924 : 1145 : mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8925 : 1145 : target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8926 : : ignore, BUILT_IN_NONE);
8927 : 1145 : if (target)
8928 : : return target;
8929 : : break;
8930 : :
8931 : 258 : case BUILT_IN_ATOMIC_TEST_AND_SET:
8932 : 258 : target = expand_builtin_atomic_test_and_set (exp, target);
8933 : 258 : if (target)
8934 : : return target;
8935 : : break;
8936 : :
8937 : 57 : case BUILT_IN_ATOMIC_CLEAR:
8938 : 57 : return expand_builtin_atomic_clear (exp);
8939 : :
8940 : 1 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8941 : 1 : return expand_builtin_atomic_always_lock_free (exp);
8942 : :
8943 : 3 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8944 : 3 : target = expand_builtin_atomic_is_lock_free (exp);
8945 : 3 : if (target)
8946 : : return target;
8947 : : break;
8948 : :
8949 : 694 : case BUILT_IN_ATOMIC_THREAD_FENCE:
8950 : 694 : expand_builtin_atomic_thread_fence (exp);
8951 : 694 : return const0_rtx;
8952 : :
8953 : 60 : case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8954 : 60 : expand_builtin_atomic_signal_fence (exp);
8955 : 60 : return const0_rtx;
8956 : :
8957 : 617 : case BUILT_IN_OBJECT_SIZE:
8958 : 617 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
8959 : 617 : return expand_builtin_object_size (exp);
8960 : :
8961 : 837 : case BUILT_IN_MEMCPY_CHK:
8962 : 837 : case BUILT_IN_MEMPCPY_CHK:
8963 : 837 : case BUILT_IN_MEMMOVE_CHK:
8964 : 837 : case BUILT_IN_MEMSET_CHK:
8965 : 837 : target = expand_builtin_memory_chk (exp, target, mode, fcode);
8966 : 837 : if (target)
8967 : : return target;
8968 : : break;
8969 : :
8970 : 1135 : case BUILT_IN_STRCPY_CHK:
8971 : 1135 : case BUILT_IN_STPCPY_CHK:
8972 : 1135 : case BUILT_IN_STRNCPY_CHK:
8973 : 1135 : case BUILT_IN_STPNCPY_CHK:
8974 : 1135 : case BUILT_IN_STRCAT_CHK:
8975 : 1135 : case BUILT_IN_STRNCAT_CHK:
8976 : 1135 : case BUILT_IN_SNPRINTF_CHK:
8977 : 1135 : case BUILT_IN_VSNPRINTF_CHK:
8978 : 1135 : maybe_emit_chk_warning (exp, fcode);
8979 : 1135 : break;
8980 : :
8981 : 1329 : case BUILT_IN_SPRINTF_CHK:
8982 : 1329 : case BUILT_IN_VSPRINTF_CHK:
8983 : 1329 : maybe_emit_sprintf_chk_warning (exp, fcode);
8984 : 1329 : break;
8985 : :
8986 : 3 : case BUILT_IN_THREAD_POINTER:
8987 : 3 : return expand_builtin_thread_pointer (exp, target);
8988 : :
8989 : 0 : case BUILT_IN_SET_THREAD_POINTER:
8990 : 0 : expand_builtin_set_thread_pointer (exp);
8991 : 0 : return const0_rtx;
8992 : :
8993 : : case BUILT_IN_ACC_ON_DEVICE:
8994 : : /* Do library call, if we failed to expand the builtin when
8995 : : folding. */
8996 : : break;
8997 : :
8998 : 356 : case BUILT_IN_GOACC_PARLEVEL_ID:
8999 : 356 : case BUILT_IN_GOACC_PARLEVEL_SIZE:
9000 : 356 : return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
9001 : :
9002 : 4 : case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
9003 : 4 : return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
9004 : :
9005 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_1:
9006 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_2:
9007 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_4:
9008 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_8:
9009 : 30 : case BUILT_IN_SPECULATION_SAFE_VALUE_16:
9010 : 30 : mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
9011 : 30 : return expand_speculation_safe_value (mode, exp, target, ignore);
9012 : :
9013 : 3 : case BUILT_IN_CRC8_DATA8:
9014 : 3 : return expand_builtin_crc_table_based (IFN_CRC, QImode, QImode, mode,
9015 : : exp, target);
9016 : 2 : case BUILT_IN_CRC16_DATA8:
9017 : 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, QImode, mode,
9018 : : exp, target);
9019 : 2 : case BUILT_IN_CRC16_DATA16:
9020 : 2 : return expand_builtin_crc_table_based (IFN_CRC, HImode, HImode, mode,
9021 : : exp, target);
9022 : 2 : case BUILT_IN_CRC32_DATA8:
9023 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, QImode, mode,
9024 : : exp, target);
9025 : 2 : case BUILT_IN_CRC32_DATA16:
9026 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, HImode, mode,
9027 : : exp, target);
9028 : 2 : case BUILT_IN_CRC32_DATA32:
9029 : 2 : return expand_builtin_crc_table_based (IFN_CRC, SImode, SImode, mode,
9030 : : exp, target);
9031 : 1 : case BUILT_IN_CRC64_DATA8:
9032 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, QImode, mode,
9033 : : exp, target);
9034 : 1 : case BUILT_IN_CRC64_DATA16:
9035 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, HImode, mode,
9036 : : exp, target);
9037 : 1 : case BUILT_IN_CRC64_DATA32:
9038 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, SImode, mode,
9039 : : exp, target);
9040 : 1 : case BUILT_IN_CRC64_DATA64:
9041 : 1 : return expand_builtin_crc_table_based (IFN_CRC, DImode, DImode, mode,
9042 : : exp, target);
9043 : 2 : case BUILT_IN_REV_CRC8_DATA8:
9044 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, QImode, QImode,
9045 : : mode, exp, target);
9046 : 2 : case BUILT_IN_REV_CRC16_DATA8:
9047 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, QImode,
9048 : : mode, exp, target);
9049 : 2 : case BUILT_IN_REV_CRC16_DATA16:
9050 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, HImode, HImode,
9051 : : mode, exp, target);
9052 : 3 : case BUILT_IN_REV_CRC32_DATA8:
9053 : 3 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, QImode,
9054 : : mode, exp, target);
9055 : 2 : case BUILT_IN_REV_CRC32_DATA16:
9056 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, HImode,
9057 : : mode, exp, target);
9058 : 2 : case BUILT_IN_REV_CRC32_DATA32:
9059 : 2 : return expand_builtin_crc_table_based (IFN_CRC_REV, SImode, SImode,
9060 : : mode, exp, target);
9061 : 1 : case BUILT_IN_REV_CRC64_DATA8:
9062 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, QImode,
9063 : : mode, exp, target);
9064 : 1 : case BUILT_IN_REV_CRC64_DATA16:
9065 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, HImode,
9066 : : mode, exp, target);
9067 : 1 : case BUILT_IN_REV_CRC64_DATA32:
9068 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, SImode,
9069 : : mode, exp, target);
9070 : 1 : case BUILT_IN_REV_CRC64_DATA64:
9071 : 1 : return expand_builtin_crc_table_based (IFN_CRC_REV, DImode, DImode,
9072 : : mode, exp, target);
9073 : : default: /* just do library call, if unknown builtin */
9074 : : break;
9075 : : }
9076 : :
9077 : : /* The switch statement above can drop through to cause the function
9078 : : to be called normally. */
9079 : 840036 : return expand_call (exp, target, ignore);
9080 : : }
9081 : :
9082 : : /* Determine whether a tree node represents a call to a built-in
9083 : : function. If the tree T is a call to a built-in function with
9084 : : the right number of arguments of the appropriate types, return
9085 : : the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
9086 : : Otherwise the return value is END_BUILTINS. */
9087 : :
9088 : : enum built_in_function
9089 : 94193120 : builtin_mathfn_code (const_tree t)
9090 : : {
9091 : 94193120 : const_tree fndecl, arg, parmlist;
9092 : 94193120 : const_tree argtype, parmtype;
9093 : 94193120 : const_call_expr_arg_iterator iter;
9094 : :
9095 : 94193120 : if (TREE_CODE (t) != CALL_EXPR)
9096 : : return END_BUILTINS;
9097 : :
9098 : 1346908 : fndecl = get_callee_fndecl (t);
9099 : 1346908 : if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9100 : : return END_BUILTINS;
9101 : :
9102 : 770982 : parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9103 : 770982 : init_const_call_expr_arg_iterator (t, &iter);
9104 : 2521594 : for (; parmlist; parmlist = TREE_CHAIN (parmlist))
9105 : : {
9106 : : /* If a function doesn't take a variable number of arguments,
9107 : : the last element in the list will have type `void'. */
9108 : 1750403 : parmtype = TREE_VALUE (parmlist);
9109 : 1750403 : if (VOID_TYPE_P (parmtype))
9110 : : {
9111 : 770759 : if (more_const_call_expr_args_p (&iter))
9112 : : return END_BUILTINS;
9113 : 770759 : return DECL_FUNCTION_CODE (fndecl);
9114 : : }
9115 : :
9116 : 979644 : if (! more_const_call_expr_args_p (&iter))
9117 : : return END_BUILTINS;
9118 : :
9119 : 979644 : arg = next_const_call_expr_arg (&iter);
9120 : 979644 : argtype = TREE_TYPE (arg);
9121 : :
9122 : 979644 : if (SCALAR_FLOAT_TYPE_P (parmtype))
9123 : : {
9124 : 723138 : if (! SCALAR_FLOAT_TYPE_P (argtype))
9125 : : return END_BUILTINS;
9126 : : }
9127 : 256506 : else if (COMPLEX_FLOAT_TYPE_P (parmtype))
9128 : : {
9129 : 11545 : if (! COMPLEX_FLOAT_TYPE_P (argtype))
9130 : : return END_BUILTINS;
9131 : : }
9132 : 244961 : else if (POINTER_TYPE_P (parmtype))
9133 : : {
9134 : 71635 : if (! POINTER_TYPE_P (argtype))
9135 : : return END_BUILTINS;
9136 : : }
9137 : 173326 : else if (INTEGRAL_TYPE_P (parmtype))
9138 : : {
9139 : 173326 : if (! INTEGRAL_TYPE_P (argtype))
9140 : : return END_BUILTINS;
9141 : : }
9142 : : else
9143 : : return END_BUILTINS;
9144 : : }
9145 : :
9146 : : /* Variable-length argument list. */
9147 : 209 : return DECL_FUNCTION_CODE (fndecl);
9148 : : }
9149 : :
9150 : : /* Fold a call to __builtin_constant_p, if we know its argument ARG will
9151 : : evaluate to a constant. */
9152 : :
9153 : : static tree
9154 : 796947 : fold_builtin_constant_p (tree arg)
9155 : : {
9156 : : /* We return 1 for a numeric type that's known to be a constant
9157 : : value at compile-time or for an aggregate type that's a
9158 : : literal constant. */
9159 : 796947 : STRIP_NOPS (arg);
9160 : :
9161 : : /* If we know this is a constant, emit the constant of one. */
9162 : 796947 : if (CONSTANT_CLASS_P (arg)
9163 : 796947 : || (TREE_CODE (arg) == CONSTRUCTOR
9164 : 6 : && TREE_CONSTANT (arg)))
9165 : 26706 : return integer_one_node;
9166 : 770241 : if (TREE_CODE (arg) == ADDR_EXPR)
9167 : : {
9168 : 110 : tree op = TREE_OPERAND (arg, 0);
9169 : 110 : if (TREE_CODE (op) == STRING_CST
9170 : 110 : || (TREE_CODE (op) == ARRAY_REF
9171 : 57 : && integer_zerop (TREE_OPERAND (op, 1))
9172 : 57 : && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9173 : 87 : return integer_one_node;
9174 : : }
9175 : :
9176 : : /* If this expression has side effects, show we don't know it to be a
9177 : : constant. Likewise if it's a pointer or aggregate type since in
9178 : : those case we only want literals, since those are only optimized
9179 : : when generating RTL, not later.
9180 : : And finally, if we are compiling an initializer, not code, we
9181 : : need to return a definite result now; there's not going to be any
9182 : : more optimization done. */
9183 : 770154 : if (TREE_SIDE_EFFECTS (arg)
9184 : 770029 : || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9185 : 770017 : || POINTER_TYPE_P (TREE_TYPE (arg))
9186 : 769852 : || cfun == 0
9187 : 769802 : || folding_initializer
9188 : 1539949 : || force_folding_builtin_constant_p)
9189 : 377 : return integer_zero_node;
9190 : :
9191 : : return NULL_TREE;
9192 : : }
9193 : :
9194 : : /* Create builtin_expect or builtin_expect_with_probability
9195 : : with PRED and EXPECTED as its arguments and return it as a truthvalue.
9196 : : Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9197 : : builtin_expect_with_probability instead uses third argument as PROBABILITY
9198 : : value. */
9199 : :
9200 : : static tree
9201 : 50034 : build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9202 : : tree predictor, tree probability)
9203 : : {
9204 : 50034 : tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9205 : :
9206 : 50034 : fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9207 : : : BUILT_IN_EXPECT_WITH_PROBABILITY);
9208 : 50034 : arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9209 : 50034 : ret_type = TREE_TYPE (TREE_TYPE (fn));
9210 : 50034 : pred_type = TREE_VALUE (arg_types);
9211 : 50034 : expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9212 : :
9213 : 50034 : pred = fold_convert_loc (loc, pred_type, pred);
9214 : 50034 : expected = fold_convert_loc (loc, expected_type, expected);
9215 : :
9216 : 50034 : if (probability)
9217 : 0 : call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9218 : : else
9219 : 100068 : call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9220 : : predictor);
9221 : :
9222 : 50034 : return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9223 : 50034 : build_int_cst (ret_type, 0));
9224 : : }
9225 : :
9226 : : /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9227 : : NULL_TREE if no simplification is possible. */
9228 : :
9229 : : tree
9230 : 5631802 : fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9231 : : tree arg3)
9232 : : {
9233 : 5631802 : tree inner, fndecl, inner_arg0;
9234 : 5631802 : enum tree_code code;
9235 : :
9236 : : /* Distribute the expected value over short-circuiting operators.
9237 : : See through the cast from truthvalue_type_node to long. */
9238 : 5631802 : inner_arg0 = arg0;
9239 : 11396666 : while (CONVERT_EXPR_P (inner_arg0)
9240 : 418896 : && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9241 : 6469593 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9242 : 418895 : inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9243 : :
9244 : : /* If this is a builtin_expect within a builtin_expect keep the
9245 : : inner one. See through a comparison against a constant. It
9246 : : might have been added to create a thruthvalue. */
9247 : 5631802 : inner = inner_arg0;
9248 : :
9249 : 5631802 : if (COMPARISON_CLASS_P (inner)
9250 : 5631802 : && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9251 : 712966 : inner = TREE_OPERAND (inner, 0);
9252 : :
9253 : 5631802 : if (TREE_CODE (inner) == CALL_EXPR
9254 : 54850 : && (fndecl = get_callee_fndecl (inner))
9255 : 5686652 : && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT,
9256 : : BUILT_IN_EXPECT_WITH_PROBABILITY))
9257 : : return arg0;
9258 : :
9259 : 5631794 : inner = inner_arg0;
9260 : 5631794 : code = TREE_CODE (inner);
9261 : 5631794 : if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9262 : : {
9263 : 25017 : tree op0 = TREE_OPERAND (inner, 0);
9264 : 25017 : tree op1 = TREE_OPERAND (inner, 1);
9265 : 25017 : arg1 = save_expr (arg1);
9266 : :
9267 : 25017 : op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9268 : 25017 : op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9269 : 25017 : inner = build2 (code, TREE_TYPE (inner), op0, op1);
9270 : :
9271 : 25017 : return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9272 : : }
9273 : :
9274 : : /* If the argument isn't invariant then there's nothing else we can do. */
9275 : 5606777 : if (!TREE_CONSTANT (inner_arg0))
9276 : : return NULL_TREE;
9277 : :
9278 : : /* If we expect that a comparison against the argument will fold to
9279 : : a constant return the constant. In practice, this means a true
9280 : : constant or the address of a non-weak symbol. */
9281 : 66489 : inner = inner_arg0;
9282 : 66489 : STRIP_NOPS (inner);
9283 : 66489 : if (TREE_CODE (inner) == ADDR_EXPR)
9284 : : {
9285 : 1 : do
9286 : : {
9287 : 1 : inner = TREE_OPERAND (inner, 0);
9288 : : }
9289 : 1 : while (TREE_CODE (inner) == COMPONENT_REF
9290 : 1 : || TREE_CODE (inner) == ARRAY_REF);
9291 : 1 : if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9292 : : return NULL_TREE;
9293 : : }
9294 : :
9295 : : /* Otherwise, ARG0 already has the proper type for the return value. */
9296 : : return arg0;
9297 : : }
9298 : :
9299 : : /* Fold a call to __builtin_classify_type with argument ARG. */
9300 : :
9301 : : static tree
9302 : 2096 : fold_builtin_classify_type (tree arg)
9303 : : {
9304 : 2096 : if (arg == 0)
9305 : 0 : return build_int_cst (integer_type_node, no_type_class);
9306 : :
9307 : 2096 : return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9308 : : }
9309 : :
9310 : : /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
9311 : : ARG. */
9312 : :
9313 : : static tree
9314 : 427862 : fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
9315 : : {
9316 : 427862 : if (!validate_arg (arg, POINTER_TYPE))
9317 : : return NULL_TREE;
9318 : : else
9319 : : {
9320 : 427859 : c_strlen_data lendata = { };
9321 : 427859 : tree len = c_strlen (arg, 0, &lendata);
9322 : :
9323 : 427859 : if (len)
9324 : 2359 : return fold_convert_loc (loc, type, len);
9325 : :
9326 : : /* TODO: Move this to gimple-ssa-warn-access once the pass runs
9327 : : also early enough to detect invalid reads in multimensional
9328 : : arrays and struct members. */
9329 : 425500 : if (!lendata.decl)
9330 : 418381 : c_strlen (arg, 1, &lendata);
9331 : :
9332 : 425500 : if (lendata.decl)
9333 : : {
9334 : 7133 : if (EXPR_HAS_LOCATION (arg))
9335 : 2812 : loc = EXPR_LOCATION (arg);
9336 : 4321 : else if (loc == UNKNOWN_LOCATION)
9337 : 0 : loc = input_location;
9338 : 7133 : warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
9339 : : }
9340 : :
9341 : 425500 : return NULL_TREE;
9342 : : }
9343 : : }
9344 : :
9345 : : /* Fold a call to __builtin_inf or __builtin_huge_val. */
9346 : :
9347 : : static tree
9348 : 231081 : fold_builtin_inf (location_t loc, tree type, int warn)
9349 : : {
9350 : : /* __builtin_inff is intended to be usable to define INFINITY on all
9351 : : targets. If an infinity is not available, INFINITY expands "to a
9352 : : positive constant of type float that overflows at translation
9353 : : time", footnote "In this case, using INFINITY will violate the
9354 : : constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9355 : : Thus we pedwarn to ensure this constraint violation is
9356 : : diagnosed. */
9357 : 923515 : if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9358 : 0 : pedwarn (loc, 0, "target format does not support infinity");
9359 : :
9360 : 231081 : return build_real (type, dconstinf);
9361 : : }
9362 : :
9363 : : /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9364 : : NULL_TREE if no simplification can be made. */
9365 : :
9366 : : static tree
9367 : 145 : fold_builtin_sincos (location_t loc,
9368 : : tree arg0, tree arg1, tree arg2)
9369 : : {
9370 : 145 : tree type;
9371 : 145 : tree fndecl, call = NULL_TREE;
9372 : :
9373 : 145 : if (!validate_arg (arg0, REAL_TYPE)
9374 : 145 : || !validate_arg (arg1, POINTER_TYPE)
9375 : 290 : || !validate_arg (arg2, POINTER_TYPE))
9376 : : return NULL_TREE;
9377 : :
9378 : 145 : type = TREE_TYPE (arg0);
9379 : :
9380 : : /* Calculate the result when the argument is a constant. */
9381 : 145 : built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9382 : 145 : if (fn == END_BUILTINS)
9383 : : return NULL_TREE;
9384 : :
9385 : : /* Canonicalize sincos to cexpi. */
9386 : 145 : if (TREE_CODE (arg0) == REAL_CST)
9387 : : {
9388 : 86 : tree complex_type = build_complex_type (type);
9389 : 86 : call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9390 : : }
9391 : 86 : if (!call)
9392 : : {
9393 : 59 : if (!targetm.libc_has_function (function_c99_math_complex, type)
9394 : 59 : || !builtin_decl_implicit_p (fn))
9395 : : return NULL_TREE;
9396 : 59 : fndecl = builtin_decl_explicit (fn);
9397 : 59 : call = build_call_expr_loc (loc, fndecl, 1, arg0);
9398 : 59 : call = builtin_save_expr (call);
9399 : : }
9400 : :
9401 : 145 : tree ptype = build_pointer_type (type);
9402 : 145 : arg1 = fold_convert (ptype, arg1);
9403 : 145 : arg2 = fold_convert (ptype, arg2);
9404 : 145 : return build2 (COMPOUND_EXPR, void_type_node,
9405 : : build2 (MODIFY_EXPR, void_type_node,
9406 : : build_fold_indirect_ref_loc (loc, arg1),
9407 : : fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9408 : : build2 (MODIFY_EXPR, void_type_node,
9409 : : build_fold_indirect_ref_loc (loc, arg2),
9410 : 145 : fold_build1_loc (loc, REALPART_EXPR, type, call)));
9411 : : }
9412 : :
9413 : : /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9414 : : Return NULL_TREE if no simplification can be made. */
9415 : :
9416 : : static tree
9417 : 2484331 : fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9418 : : {
9419 : 2484331 : if (!validate_arg (arg1, POINTER_TYPE)
9420 : 2484331 : || !validate_arg (arg2, POINTER_TYPE)
9421 : 4968662 : || !validate_arg (len, INTEGER_TYPE))
9422 : : return NULL_TREE;
9423 : :
9424 : : /* If the LEN parameter is zero, return zero. */
9425 : 2484331 : if (integer_zerop (len))
9426 : 0 : return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9427 : 0 : arg1, arg2);
9428 : :
9429 : : /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9430 : 2484331 : if (operand_equal_p (arg1, arg2, 0))
9431 : 736 : return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9432 : :
9433 : : /* If len parameter is one, return an expression corresponding to
9434 : : (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9435 : 2483595 : if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9436 : : {
9437 : 19325 : tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9438 : 19325 : tree cst_uchar_ptr_node
9439 : 19325 : = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9440 : :
9441 : 19325 : tree ind1
9442 : 19325 : = fold_convert_loc (loc, integer_type_node,
9443 : : build1 (INDIRECT_REF, cst_uchar_node,
9444 : : fold_convert_loc (loc,
9445 : : cst_uchar_ptr_node,
9446 : : arg1)));
9447 : 19325 : tree ind2
9448 : 19325 : = fold_convert_loc (loc, integer_type_node,
9449 : : build1 (INDIRECT_REF, cst_uchar_node,
9450 : : fold_convert_loc (loc,
9451 : : cst_uchar_ptr_node,
9452 : : arg2)));
9453 : 19325 : return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9454 : : }
9455 : :
9456 : : return NULL_TREE;
9457 : : }
9458 : :
9459 : : /* Fold a call to builtin isascii with argument ARG. */
9460 : :
9461 : : static tree
9462 : 211 : fold_builtin_isascii (location_t loc, tree arg)
9463 : : {
9464 : 211 : if (!validate_arg (arg, INTEGER_TYPE))
9465 : : return NULL_TREE;
9466 : : else
9467 : : {
9468 : : /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9469 : 211 : arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9470 : : build_int_cst (integer_type_node,
9471 : : ~ HOST_WIDE_INT_UC (0x7f)));
9472 : 211 : return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9473 : 211 : arg, integer_zero_node);
9474 : : }
9475 : : }
9476 : :
9477 : : /* Fold a call to builtin toascii with argument ARG. */
9478 : :
9479 : : static tree
9480 : 168 : fold_builtin_toascii (location_t loc, tree arg)
9481 : : {
9482 : 168 : if (!validate_arg (arg, INTEGER_TYPE))
9483 : : return NULL_TREE;
9484 : :
9485 : : /* Transform toascii(c) -> (c & 0x7f). */
9486 : 168 : return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9487 : : build_int_cst (integer_type_node, 0x7f));
9488 : : }
9489 : :
9490 : : /* Fold a call to builtin isdigit with argument ARG. */
9491 : :
9492 : : static tree
9493 : 325 : fold_builtin_isdigit (location_t loc, tree arg)
9494 : : {
9495 : 325 : if (!validate_arg (arg, INTEGER_TYPE))
9496 : : return NULL_TREE;
9497 : : else
9498 : : {
9499 : : /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9500 : : /* According to the C standard, isdigit is unaffected by locale.
9501 : : However, it definitely is affected by the target character set. */
9502 : 313 : unsigned HOST_WIDE_INT target_digit0
9503 : 313 : = lang_hooks.to_target_charset ('0');
9504 : :
9505 : 313 : if (target_digit0 == 0)
9506 : : return NULL_TREE;
9507 : :
9508 : 313 : arg = fold_convert_loc (loc, unsigned_type_node, arg);
9509 : 313 : arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9510 : : build_int_cst (unsigned_type_node, target_digit0));
9511 : 313 : return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9512 : : build_int_cst (unsigned_type_node, 9));
9513 : : }
9514 : : }
9515 : :
9516 : : /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9517 : :
9518 : : static tree
9519 : 366288 : fold_builtin_fabs (location_t loc, tree arg, tree type)
9520 : : {
9521 : 366288 : if (!validate_arg (arg, REAL_TYPE))
9522 : : return NULL_TREE;
9523 : :
9524 : 366203 : arg = fold_convert_loc (loc, type, arg);
9525 : 366203 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9526 : : }
9527 : :
9528 : : /* Fold a call to abs, labs, llabs, imaxabs, uabs, ulabs, ullabs or uimaxabs
9529 : : with argument ARG. */
9530 : :
9531 : : static tree
9532 : 95871 : fold_builtin_abs (location_t loc, tree arg, tree type)
9533 : : {
9534 : 95871 : if (!validate_arg (arg, INTEGER_TYPE))
9535 : : return NULL_TREE;
9536 : :
9537 : 95844 : if (TYPE_UNSIGNED (type))
9538 : : {
9539 : 1168 : if (TYPE_PRECISION (TREE_TYPE (arg))
9540 : 1168 : != TYPE_PRECISION (type)
9541 : 1168 : || TYPE_UNSIGNED (TREE_TYPE (arg)))
9542 : : return NULL_TREE;
9543 : 1168 : return fold_build1_loc (loc, ABSU_EXPR, type, arg);
9544 : : }
9545 : 94676 : arg = fold_convert_loc (loc, type, arg);
9546 : 94676 : return fold_build1_loc (loc, ABS_EXPR, type, arg);
9547 : : }
9548 : :
9549 : : /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9550 : :
9551 : : static tree
9552 : 115962 : fold_builtin_carg (location_t loc, tree arg, tree type)
9553 : : {
9554 : 115962 : if (validate_arg (arg, COMPLEX_TYPE)
9555 : 115962 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg))))
9556 : : {
9557 : 115962 : tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9558 : :
9559 : 115962 : if (atan2_fn)
9560 : : {
9561 : 111579 : tree new_arg = builtin_save_expr (arg);
9562 : 111579 : tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9563 : 111579 : tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9564 : 111579 : return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9565 : : }
9566 : : }
9567 : :
9568 : : return NULL_TREE;
9569 : : }
9570 : :
9571 : : /* Fold a call to builtin frexp, we can assume the base is 2. */
9572 : :
9573 : : static tree
9574 : 115598 : fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9575 : : {
9576 : 115598 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9577 : : return NULL_TREE;
9578 : :
9579 : 115598 : STRIP_NOPS (arg0);
9580 : :
9581 : 115598 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9582 : : return NULL_TREE;
9583 : :
9584 : 2210 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9585 : :
9586 : : /* Proceed if a valid pointer type was passed in. */
9587 : 2210 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9588 : : {
9589 : 2210 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9590 : 2210 : tree frac, exp, res;
9591 : :
9592 : 2210 : switch (value->cl)
9593 : : {
9594 : 276 : case rvc_zero:
9595 : 276 : case rvc_nan:
9596 : 276 : case rvc_inf:
9597 : : /* For +-0, return (*exp = 0, +-0). */
9598 : : /* For +-NaN or +-Inf, *exp is unspecified, but something should
9599 : : be stored there so that it isn't read from uninitialized object.
9600 : : As glibc and newlib store *exp = 0 for +-Inf/NaN, storing
9601 : : 0 here as well is easiest. */
9602 : 276 : exp = integer_zero_node;
9603 : 276 : frac = arg0;
9604 : 276 : break;
9605 : 1934 : case rvc_normal:
9606 : 1934 : {
9607 : : /* Since the frexp function always expects base 2, and in
9608 : : GCC normalized significands are already in the range
9609 : : [0.5, 1.0), we have exactly what frexp wants. */
9610 : 1934 : REAL_VALUE_TYPE frac_rvt = *value;
9611 : 1934 : SET_REAL_EXP (&frac_rvt, 0);
9612 : 1934 : frac = build_real (rettype, frac_rvt);
9613 : 1934 : exp = build_int_cst (integer_type_node, REAL_EXP (value));
9614 : : }
9615 : 1934 : break;
9616 : 0 : default:
9617 : 0 : gcc_unreachable ();
9618 : : }
9619 : :
9620 : : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9621 : 2210 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9622 : 2210 : TREE_SIDE_EFFECTS (arg1) = 1;
9623 : 2210 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9624 : 2210 : suppress_warning (res, OPT_Wunused_value);
9625 : 2210 : return res;
9626 : : }
9627 : :
9628 : : return NULL_TREE;
9629 : : }
9630 : :
9631 : : /* Fold a call to builtin modf. */
9632 : :
9633 : : static tree
9634 : 75851 : fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9635 : : {
9636 : 75851 : if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9637 : : return NULL_TREE;
9638 : :
9639 : 75851 : STRIP_NOPS (arg0);
9640 : :
9641 : 75851 : if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9642 : : return NULL_TREE;
9643 : :
9644 : 2474 : arg1 = build_fold_indirect_ref_loc (loc, arg1);
9645 : :
9646 : : /* Proceed if a valid pointer type was passed in. */
9647 : 2474 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9648 : : {
9649 : 2474 : const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9650 : 2474 : REAL_VALUE_TYPE trunc, frac;
9651 : 2474 : tree res;
9652 : :
9653 : 2474 : switch (value->cl)
9654 : : {
9655 : 228 : case rvc_nan:
9656 : 228 : case rvc_zero:
9657 : : /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9658 : 228 : trunc = frac = *value;
9659 : 228 : break;
9660 : 96 : case rvc_inf:
9661 : : /* For +-Inf, return (*arg1 = arg0, +-0). */
9662 : 96 : frac = dconst0;
9663 : 96 : frac.sign = value->sign;
9664 : 96 : trunc = *value;
9665 : 96 : break;
9666 : 2150 : case rvc_normal:
9667 : : /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9668 : 2150 : real_trunc (&trunc, VOIDmode, value);
9669 : 2150 : real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9670 : : /* If the original number was negative and already
9671 : : integral, then the fractional part is -0.0. */
9672 : 2150 : if (value->sign && frac.cl == rvc_zero)
9673 : 69 : frac.sign = value->sign;
9674 : : break;
9675 : : }
9676 : :
9677 : : /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9678 : 2474 : arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9679 : : build_real (rettype, trunc));
9680 : 2474 : TREE_SIDE_EFFECTS (arg1) = 1;
9681 : 2474 : res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9682 : : build_real (rettype, frac));
9683 : 2474 : suppress_warning (res, OPT_Wunused_value);
9684 : 2474 : return res;
9685 : : }
9686 : :
9687 : : return NULL_TREE;
9688 : : }
9689 : :
9690 : : /* Given a location LOC, an interclass builtin function decl FNDECL
9691 : : and its single argument ARG, return an folded expression computing
9692 : : the same, or NULL_TREE if we either couldn't or didn't want to fold
9693 : : (the latter happen if there's an RTL instruction available). */
9694 : :
9695 : : static tree
9696 : 1373670 : fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9697 : : {
9698 : 1373670 : machine_mode mode;
9699 : :
9700 : 1373670 : if (!validate_arg (arg, REAL_TYPE))
9701 : : return NULL_TREE;
9702 : :
9703 : 1373670 : if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9704 : : return NULL_TREE;
9705 : :
9706 : 1373670 : mode = TYPE_MODE (TREE_TYPE (arg));
9707 : :
9708 : 9611836 : bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9709 : :
9710 : : /* If there is no optab, try generic code. */
9711 : 1373670 : switch (DECL_FUNCTION_CODE (fndecl))
9712 : : {
9713 : 266404 : tree result;
9714 : :
9715 : 266404 : CASE_FLT_FN (BUILT_IN_ISINF):
9716 : 266404 : {
9717 : : /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9718 : 266404 : tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9719 : 266404 : tree type = TREE_TYPE (arg);
9720 : 266404 : REAL_VALUE_TYPE r;
9721 : 266404 : char buf[128];
9722 : :
9723 : 266404 : if (is_ibm_extended)
9724 : : {
9725 : : /* NaN and Inf are encoded in the high-order double value
9726 : : only. The low-order value is not significant. */
9727 : 0 : type = double_type_node;
9728 : 0 : mode = DFmode;
9729 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9730 : : }
9731 : 266404 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9732 : 266404 : real_from_string3 (&r, buf, mode);
9733 : 266404 : result = build_call_expr (isgr_fn, 2,
9734 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9735 : : build_real (type, r));
9736 : 266404 : return result;
9737 : : }
9738 : 573538 : CASE_FLT_FN (BUILT_IN_FINITE):
9739 : 573538 : case BUILT_IN_ISFINITE:
9740 : 573538 : {
9741 : : /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9742 : 573538 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9743 : 573538 : tree type = TREE_TYPE (arg);
9744 : 573538 : REAL_VALUE_TYPE r;
9745 : 573538 : char buf[128];
9746 : :
9747 : 573538 : if (is_ibm_extended)
9748 : : {
9749 : : /* NaN and Inf are encoded in the high-order double value
9750 : : only. The low-order value is not significant. */
9751 : 0 : type = double_type_node;
9752 : 0 : mode = DFmode;
9753 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9754 : : }
9755 : 573538 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9756 : 573538 : real_from_string3 (&r, buf, mode);
9757 : 573538 : result = build_call_expr (isle_fn, 2,
9758 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9759 : : build_real (type, r));
9760 : : /*result = fold_build2_loc (loc, UNGT_EXPR,
9761 : : TREE_TYPE (TREE_TYPE (fndecl)),
9762 : : fold_build1_loc (loc, ABS_EXPR, type, arg),
9763 : : build_real (type, r));
9764 : : result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9765 : : TREE_TYPE (TREE_TYPE (fndecl)),
9766 : : result);*/
9767 : 573538 : return result;
9768 : : }
9769 : 264620 : case BUILT_IN_ISNORMAL:
9770 : 264620 : {
9771 : : /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9772 : : islessequal(fabs(x),DBL_MAX). */
9773 : 264620 : tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9774 : 264620 : tree type = TREE_TYPE (arg);
9775 : 264620 : tree orig_arg, max_exp, min_exp;
9776 : 264620 : machine_mode orig_mode = mode;
9777 : 264620 : REAL_VALUE_TYPE rmax, rmin;
9778 : 264620 : char buf[128];
9779 : :
9780 : 264620 : orig_arg = arg = builtin_save_expr (arg);
9781 : 264620 : if (is_ibm_extended)
9782 : : {
9783 : : /* Use double to test the normal range of IBM extended
9784 : : precision. Emin for IBM extended precision is
9785 : : different to emin for IEEE double, being 53 higher
9786 : : since the low double exponent is at least 53 lower
9787 : : than the high double exponent. */
9788 : 0 : type = double_type_node;
9789 : 0 : mode = DFmode;
9790 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9791 : : }
9792 : 264620 : arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9793 : :
9794 : 264620 : get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9795 : 264620 : real_from_string3 (&rmax, buf, mode);
9796 : 264620 : if (DECIMAL_FLOAT_MODE_P (mode))
9797 : 1 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9798 : : else
9799 : 264619 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9800 : 264620 : real_from_string3 (&rmin, buf, orig_mode);
9801 : 264620 : max_exp = build_real (type, rmax);
9802 : 264620 : min_exp = build_real (type, rmin);
9803 : :
9804 : 264620 : max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9805 : 264620 : if (is_ibm_extended)
9806 : : {
9807 : : /* Testing the high end of the range is done just using
9808 : : the high double, using the same test as isfinite().
9809 : : For the subnormal end of the range we first test the
9810 : : high double, then if its magnitude is equal to the
9811 : : limit of 0x1p-969, we test whether the low double is
9812 : : non-zero and opposite sign to the high double. */
9813 : 0 : tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9814 : 0 : tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9815 : 0 : tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9816 : 0 : tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9817 : : arg, min_exp);
9818 : 0 : tree as_complex = build1 (VIEW_CONVERT_EXPR,
9819 : : complex_double_type_node, orig_arg);
9820 : 0 : tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9821 : 0 : tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9822 : 0 : tree zero = build_real (type, dconst0);
9823 : 0 : tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9824 : 0 : tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9825 : 0 : tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9826 : 0 : tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9827 : : fold_build3 (COND_EXPR,
9828 : : integer_type_node,
9829 : : hilt, logt, lolt));
9830 : 0 : eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9831 : : eq_min, ok_lo);
9832 : 0 : min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9833 : : gt_min, eq_min);
9834 : : }
9835 : : else
9836 : : {
9837 : 264620 : tree const isge_fn
9838 : 264620 : = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9839 : 264620 : min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9840 : : }
9841 : 264620 : result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9842 : : max_exp, min_exp);
9843 : 264620 : return result;
9844 : : }
9845 : 267568 : CASE_FLT_FN (BUILT_IN_ISNAN):
9846 : 267568 : case BUILT_IN_ISNAND32:
9847 : 267568 : case BUILT_IN_ISNAND64:
9848 : 267568 : case BUILT_IN_ISNAND128:
9849 : 267568 : {
9850 : : /* In IBM extended NaN and Inf are encoded in the high-order double
9851 : : value only. The low-order value is not significant. */
9852 : 267568 : if (is_ibm_extended)
9853 : 0 : arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9854 : 267568 : arg = builtin_save_expr (arg);
9855 : 267568 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9856 : 267568 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9857 : : }
9858 : : default:
9859 : : break;
9860 : : }
9861 : :
9862 : : return NULL_TREE;
9863 : : }
9864 : :
9865 : : /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9866 : : ARG is the argument for the call. */
9867 : :
9868 : : static tree
9869 : 1139077 : fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9870 : : {
9871 : 1139077 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
9872 : :
9873 : 1139077 : if (!validate_arg (arg, REAL_TYPE))
9874 : : return NULL_TREE;
9875 : :
9876 : 1139077 : switch (builtin_index)
9877 : : {
9878 : 268065 : case BUILT_IN_ISINF:
9879 : 268065 : if (tree_expr_infinite_p (arg))
9880 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9881 : 268065 : if (!tree_expr_maybe_infinite_p (arg))
9882 : 121 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9883 : : return NULL_TREE;
9884 : :
9885 : 704 : case BUILT_IN_ISINF_SIGN:
9886 : 704 : {
9887 : : /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9888 : : /* In a boolean context, GCC will fold the inner COND_EXPR to
9889 : : 1. So e.g. "if (isinf_sign(x))" would be folded to just
9890 : : "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9891 : 704 : tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9892 : 704 : tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9893 : 704 : tree tmp = NULL_TREE;
9894 : :
9895 : 704 : arg = builtin_save_expr (arg);
9896 : :
9897 : 704 : if (signbit_fn && isinf_fn)
9898 : : {
9899 : 704 : tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9900 : 704 : tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9901 : :
9902 : 704 : signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9903 : : signbit_call, integer_zero_node);
9904 : 704 : isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9905 : : isinf_call, integer_zero_node);
9906 : :
9907 : 704 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9908 : : integer_minus_one_node, integer_one_node);
9909 : 704 : tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9910 : : isinf_call, tmp,
9911 : : integer_zero_node);
9912 : : }
9913 : :
9914 : : return tmp;
9915 : : }
9916 : :
9917 : 573762 : case BUILT_IN_ISFINITE:
9918 : 573762 : if (tree_expr_finite_p (arg))
9919 : 224 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9920 : 573538 : if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9921 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9922 : : return NULL_TREE;
9923 : :
9924 : 267692 : case BUILT_IN_ISNAN:
9925 : 267692 : if (tree_expr_nan_p (arg))
9926 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg);
9927 : 267692 : if (!tree_expr_maybe_nan_p (arg))
9928 : 124 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9929 : : return NULL_TREE;
9930 : :
9931 : 28854 : case BUILT_IN_ISSIGNALING:
9932 : : /* Folding to true for REAL_CST is done in fold_const_call_ss.
9933 : : Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9934 : : and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9935 : : here, so there is some possibility of __builtin_issignaling working
9936 : : without -fsignaling-nans. Especially when -fno-signaling-nans is
9937 : : the default. */
9938 : 28854 : if (!tree_expr_maybe_nan_p (arg))
9939 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9940 : : return NULL_TREE;
9941 : :
9942 : 0 : default:
9943 : 0 : gcc_unreachable ();
9944 : : }
9945 : : }
9946 : :
9947 : : /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9948 : : This builtin will generate code to return the appropriate floating
9949 : : point classification depending on the value of the floating point
9950 : : number passed in. The possible return values must be supplied as
9951 : : int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9952 : : FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9953 : : one floating point argument which is "type generic". */
9954 : :
9955 : : static tree
9956 : 115110 : fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9957 : : {
9958 : 115110 : tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9959 : : arg, type, res, tmp;
9960 : 115110 : machine_mode mode;
9961 : 115110 : REAL_VALUE_TYPE r;
9962 : 115110 : char buf[128];
9963 : :
9964 : : /* Verify the required arguments in the original call. */
9965 : 115110 : if (nargs != 6
9966 : 115110 : || !validate_arg (args[0], INTEGER_TYPE)
9967 : 115110 : || !validate_arg (args[1], INTEGER_TYPE)
9968 : 115110 : || !validate_arg (args[2], INTEGER_TYPE)
9969 : 115110 : || !validate_arg (args[3], INTEGER_TYPE)
9970 : 115110 : || !validate_arg (args[4], INTEGER_TYPE)
9971 : 230220 : || !validate_arg (args[5], REAL_TYPE))
9972 : : return NULL_TREE;
9973 : :
9974 : 115110 : fp_nan = args[0];
9975 : 115110 : fp_infinite = args[1];
9976 : 115110 : fp_normal = args[2];
9977 : 115110 : fp_subnormal = args[3];
9978 : 115110 : fp_zero = args[4];
9979 : 115110 : arg = args[5];
9980 : 115110 : type = TREE_TYPE (arg);
9981 : 115110 : mode = TYPE_MODE (type);
9982 : 115110 : arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9983 : :
9984 : : /* fpclassify(x) ->
9985 : : isnan(x) ? FP_NAN :
9986 : : (fabs(x) == Inf ? FP_INFINITE :
9987 : : (fabs(x) >= DBL_MIN ? FP_NORMAL :
9988 : : (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9989 : :
9990 : 115110 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9991 : : build_real (type, dconst0));
9992 : 115110 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9993 : : tmp, fp_zero, fp_subnormal);
9994 : :
9995 : 115110 : if (DECIMAL_FLOAT_MODE_P (mode))
9996 : 3 : sprintf (buf, "1E%d", REAL_MODE_FORMAT (mode)->emin - 1);
9997 : : else
9998 : 115107 : sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9999 : 115110 : real_from_string3 (&r, buf, mode);
10000 : 115110 : tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10001 : : arg, build_real (type, r));
10002 : 115110 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10003 : : fp_normal, res);
10004 : :
10005 : 115110 : if (tree_expr_maybe_infinite_p (arg))
10006 : : {
10007 : 115022 : tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10008 : : build_real (type, dconstinf));
10009 : 115022 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10010 : : fp_infinite, res);
10011 : : }
10012 : :
10013 : 115110 : if (tree_expr_maybe_nan_p (arg))
10014 : : {
10015 : 115020 : tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10016 : 115020 : res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10017 : : res, fp_nan);
10018 : : }
10019 : :
10020 : : return res;
10021 : : }
10022 : :
10023 : : /* Fold a call to an unordered comparison function such as
10024 : : __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10025 : : being called and ARG0 and ARG1 are the arguments for the call.
10026 : : UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10027 : : the opposite of the desired result. UNORDERED_CODE is used
10028 : : for modes that can hold NaNs and ORDERED_CODE is used for
10029 : : the rest. */
10030 : :
10031 : : static tree
10032 : 2961376 : fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10033 : : enum tree_code unordered_code,
10034 : : enum tree_code ordered_code)
10035 : : {
10036 : 2961376 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10037 : 2961376 : enum tree_code code;
10038 : 2961376 : tree type0, type1;
10039 : 2961376 : enum tree_code code0, code1;
10040 : 2961376 : tree cmp_type = NULL_TREE;
10041 : :
10042 : 2961376 : type0 = TREE_TYPE (arg0);
10043 : 2961376 : type1 = TREE_TYPE (arg1);
10044 : :
10045 : 2961376 : code0 = TREE_CODE (type0);
10046 : 2961376 : code1 = TREE_CODE (type1);
10047 : :
10048 : 2961376 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10049 : : /* Choose the wider of two real types. */
10050 : 2961196 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10051 : 2961196 : ? type0 : type1;
10052 : 180 : else if (code0 == REAL_TYPE
10053 : 91 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10054 : : cmp_type = type0;
10055 : 89 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10056 : 89 : && code1 == REAL_TYPE)
10057 : 135 : cmp_type = type1;
10058 : :
10059 : 2961376 : arg0 = fold_convert_loc (loc, cmp_type, arg0);
10060 : 2961376 : arg1 = fold_convert_loc (loc, cmp_type, arg1);
10061 : :
10062 : 2961376 : if (unordered_code == UNORDERED_EXPR)
10063 : : {
10064 : 265667 : if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
10065 : 16 : return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
10066 : 265651 : if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
10067 : 141 : return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10068 : 265510 : return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10069 : : }
10070 : :
10071 : 2698560 : code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
10072 : 2695709 : ? unordered_code : ordered_code;
10073 : 2695709 : return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10074 : 2695709 : fold_build2_loc (loc, code, type, arg0, arg1));
10075 : : }
10076 : :
10077 : : /* Fold a call to __builtin_iseqsig(). ARG0 and ARG1 are the arguments.
10078 : : After choosing the wider floating-point type for the comparison,
10079 : : the code is folded to:
10080 : : SAVE_EXPR<ARG0> >= SAVE_EXPR<ARG1> && SAVE_EXPR<ARG0> <= SAVE_EXPR<ARG1> */
10081 : :
10082 : : static tree
10083 : 702 : fold_builtin_iseqsig (location_t loc, tree arg0, tree arg1)
10084 : : {
10085 : 702 : tree type0, type1;
10086 : 702 : enum tree_code code0, code1;
10087 : 702 : tree cmp1, cmp2, cmp_type = NULL_TREE;
10088 : :
10089 : 702 : type0 = TREE_TYPE (arg0);
10090 : 702 : type1 = TREE_TYPE (arg1);
10091 : :
10092 : 702 : code0 = TREE_CODE (type0);
10093 : 702 : code1 = TREE_CODE (type1);
10094 : :
10095 : 702 : if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10096 : : /* Choose the wider of two real types. */
10097 : 690 : cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10098 : 690 : ? type0 : type1;
10099 : 12 : else if (code0 == REAL_TYPE
10100 : 6 : && (code1 == INTEGER_TYPE || code1 == BITINT_TYPE))
10101 : : cmp_type = type0;
10102 : 6 : else if ((code0 == INTEGER_TYPE || code0 == BITINT_TYPE)
10103 : 6 : && code1 == REAL_TYPE)
10104 : 6 : cmp_type = type1;
10105 : :
10106 : 702 : arg0 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg0));
10107 : 702 : arg1 = builtin_save_expr (fold_convert_loc (loc, cmp_type, arg1));
10108 : :
10109 : 702 : cmp1 = fold_build2_loc (loc, GE_EXPR, integer_type_node, arg0, arg1);
10110 : 702 : cmp2 = fold_build2_loc (loc, LE_EXPR, integer_type_node, arg0, arg1);
10111 : :
10112 : 702 : return fold_build2_loc (loc, TRUTH_AND_EXPR, integer_type_node, cmp1, cmp2);
10113 : : }
10114 : :
10115 : : /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
10116 : : arithmetics if it can never overflow, or into internal functions that
10117 : : return both result of arithmetics and overflowed boolean flag in
10118 : : a complex integer result, or some other check for overflow.
10119 : : Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
10120 : : checking part of that. */
10121 : :
10122 : : static tree
10123 : 183068 : fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
10124 : : tree arg0, tree arg1, tree arg2)
10125 : : {
10126 : 183068 : enum internal_fn ifn = IFN_LAST;
10127 : : /* The code of the expression corresponding to the built-in. */
10128 : 183068 : enum tree_code opcode = ERROR_MARK;
10129 : 183068 : bool ovf_only = false;
10130 : :
10131 : 183068 : switch (fcode)
10132 : : {
10133 : : case BUILT_IN_ADD_OVERFLOW_P:
10134 : : ovf_only = true;
10135 : : /* FALLTHRU */
10136 : : case BUILT_IN_ADD_OVERFLOW:
10137 : : case BUILT_IN_SADD_OVERFLOW:
10138 : : case BUILT_IN_SADDL_OVERFLOW:
10139 : : case BUILT_IN_SADDLL_OVERFLOW:
10140 : : case BUILT_IN_UADD_OVERFLOW:
10141 : : case BUILT_IN_UADDL_OVERFLOW:
10142 : : case BUILT_IN_UADDLL_OVERFLOW:
10143 : : opcode = PLUS_EXPR;
10144 : : ifn = IFN_ADD_OVERFLOW;
10145 : : break;
10146 : 15577 : case BUILT_IN_SUB_OVERFLOW_P:
10147 : 15577 : ovf_only = true;
10148 : : /* FALLTHRU */
10149 : 37201 : case BUILT_IN_SUB_OVERFLOW:
10150 : 37201 : case BUILT_IN_SSUB_OVERFLOW:
10151 : 37201 : case BUILT_IN_SSUBL_OVERFLOW:
10152 : 37201 : case BUILT_IN_SSUBLL_OVERFLOW:
10153 : 37201 : case BUILT_IN_USUB_OVERFLOW:
10154 : 37201 : case BUILT_IN_USUBL_OVERFLOW:
10155 : 37201 : case BUILT_IN_USUBLL_OVERFLOW:
10156 : 37201 : opcode = MINUS_EXPR;
10157 : 37201 : ifn = IFN_SUB_OVERFLOW;
10158 : 37201 : break;
10159 : 15857 : case BUILT_IN_MUL_OVERFLOW_P:
10160 : 15857 : ovf_only = true;
10161 : : /* FALLTHRU */
10162 : 113907 : case BUILT_IN_MUL_OVERFLOW:
10163 : 113907 : case BUILT_IN_SMUL_OVERFLOW:
10164 : 113907 : case BUILT_IN_SMULL_OVERFLOW:
10165 : 113907 : case BUILT_IN_SMULLL_OVERFLOW:
10166 : 113907 : case BUILT_IN_UMUL_OVERFLOW:
10167 : 113907 : case BUILT_IN_UMULL_OVERFLOW:
10168 : 113907 : case BUILT_IN_UMULLL_OVERFLOW:
10169 : 113907 : opcode = MULT_EXPR;
10170 : 113907 : ifn = IFN_MUL_OVERFLOW;
10171 : 113907 : break;
10172 : 0 : default:
10173 : 0 : gcc_unreachable ();
10174 : : }
10175 : :
10176 : : /* For the "generic" overloads, the first two arguments can have different
10177 : : types and the last argument determines the target type to use to check
10178 : : for overflow. The arguments of the other overloads all have the same
10179 : : type. */
10180 : 183068 : tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
10181 : :
10182 : : /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
10183 : : arguments are constant, attempt to fold the built-in call into a constant
10184 : : expression indicating whether or not it detected an overflow. */
10185 : 183068 : if (ovf_only
10186 : 43244 : && TREE_CODE (arg0) == INTEGER_CST
10187 : 13131 : && TREE_CODE (arg1) == INTEGER_CST)
10188 : : /* Perform the computation in the target type and check for overflow. */
10189 : 7347 : return omit_one_operand_loc (loc, boolean_type_node,
10190 : 7347 : arith_overflowed_p (opcode, type, arg0, arg1)
10191 : : ? boolean_true_node : boolean_false_node,
10192 : 7347 : arg2);
10193 : :
10194 : 175721 : tree intres, ovfres;
10195 : 175721 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10196 : : {
10197 : 12261 : intres = fold_binary_loc (loc, opcode, type,
10198 : : fold_convert_loc (loc, type, arg0),
10199 : : fold_convert_loc (loc, type, arg1));
10200 : 12261 : if (TREE_OVERFLOW (intres))
10201 : 1586 : intres = drop_tree_overflow (intres);
10202 : 24522 : ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
10203 : 12261 : ? boolean_true_node : boolean_false_node);
10204 : : }
10205 : : else
10206 : : {
10207 : 163460 : tree ctype = build_complex_type (type);
10208 : 163460 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10209 : : arg0, arg1);
10210 : 163460 : tree tgt;
10211 : 163460 : if (ovf_only)
10212 : : {
10213 : : tgt = call;
10214 : 163460 : intres = NULL_TREE;
10215 : : }
10216 : : else
10217 : : {
10218 : : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10219 : : as while the call itself is const, the REALPART_EXPR store is
10220 : : certainly not. And in any case, we want just one call,
10221 : : not multiple and trying to CSE them later. */
10222 : 127563 : TREE_SIDE_EFFECTS (call) = 1;
10223 : 127563 : tgt = save_expr (call);
10224 : : }
10225 : 163460 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10226 : 163460 : ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10227 : 163460 : ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
10228 : : }
10229 : :
10230 : 175721 : if (ovf_only)
10231 : 35897 : return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10232 : :
10233 : 139824 : tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
10234 : 139824 : tree store
10235 : 139824 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10236 : 139824 : return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10237 : : }
10238 : :
10239 : : /* Fold __builtin_{clz,ctz,clrsb,ffs,parity,popcount}g into corresponding
10240 : : internal function. */
10241 : :
10242 : : static tree
10243 : 232932 : fold_builtin_bit_query (location_t loc, enum built_in_function fcode,
10244 : : tree arg0, tree arg1)
10245 : : {
10246 : 232932 : enum internal_fn ifn;
10247 : 232932 : enum built_in_function fcodei, fcodel, fcodell;
10248 : 232932 : tree arg0_type = TREE_TYPE (arg0);
10249 : 232932 : tree cast_type = NULL_TREE;
10250 : 232932 : int addend = 0;
10251 : :
10252 : 232932 : switch (fcode)
10253 : : {
10254 : 165628 : case BUILT_IN_CLZG:
10255 : 165628 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10256 : : return NULL_TREE;
10257 : : ifn = IFN_CLZ;
10258 : : fcodei = BUILT_IN_CLZ;
10259 : : fcodel = BUILT_IN_CLZL;
10260 : : fcodell = BUILT_IN_CLZLL;
10261 : : break;
10262 : 48581 : case BUILT_IN_CTZG:
10263 : 48581 : if (arg1 && TREE_CODE (arg1) != INTEGER_CST)
10264 : : return NULL_TREE;
10265 : : ifn = IFN_CTZ;
10266 : : fcodei = BUILT_IN_CTZ;
10267 : : fcodel = BUILT_IN_CTZL;
10268 : : fcodell = BUILT_IN_CTZLL;
10269 : : break;
10270 : : case BUILT_IN_CLRSBG:
10271 : : ifn = IFN_CLRSB;
10272 : : fcodei = BUILT_IN_CLRSB;
10273 : : fcodel = BUILT_IN_CLRSBL;
10274 : : fcodell = BUILT_IN_CLRSBLL;
10275 : : break;
10276 : 83 : case BUILT_IN_FFSG:
10277 : 83 : ifn = IFN_FFS;
10278 : 83 : fcodei = BUILT_IN_FFS;
10279 : 83 : fcodel = BUILT_IN_FFSL;
10280 : 83 : fcodell = BUILT_IN_FFSLL;
10281 : 83 : break;
10282 : 76 : case BUILT_IN_PARITYG:
10283 : 76 : ifn = IFN_PARITY;
10284 : 76 : fcodei = BUILT_IN_PARITY;
10285 : 76 : fcodel = BUILT_IN_PARITYL;
10286 : 76 : fcodell = BUILT_IN_PARITYLL;
10287 : 76 : break;
10288 : 18483 : case BUILT_IN_POPCOUNTG:
10289 : 18483 : ifn = IFN_POPCOUNT;
10290 : 18483 : fcodei = BUILT_IN_POPCOUNT;
10291 : 18483 : fcodel = BUILT_IN_POPCOUNTL;
10292 : 18483 : fcodell = BUILT_IN_POPCOUNTLL;
10293 : 18483 : break;
10294 : 0 : default:
10295 : 0 : gcc_unreachable ();
10296 : : }
10297 : :
10298 : 232810 : if (TYPE_PRECISION (arg0_type)
10299 : 232810 : <= TYPE_PRECISION (long_long_unsigned_type_node))
10300 : : {
10301 : 201327 : if (TYPE_PRECISION (arg0_type) <= TYPE_PRECISION (unsigned_type_node))
10302 : :
10303 : 86425 : cast_type = (TYPE_UNSIGNED (arg0_type)
10304 : 86371 : ? unsigned_type_node : integer_type_node);
10305 : 114956 : else if (TYPE_PRECISION (arg0_type)
10306 : 114956 : <= TYPE_PRECISION (long_unsigned_type_node))
10307 : : {
10308 : 114977 : cast_type = (TYPE_UNSIGNED (arg0_type)
10309 : 114905 : ? long_unsigned_type_node : long_integer_type_node);
10310 : : fcodei = fcodel;
10311 : : }
10312 : : else
10313 : : {
10314 : 51 : cast_type = (TYPE_UNSIGNED (arg0_type)
10315 : 51 : ? long_long_unsigned_type_node
10316 : : : long_long_integer_type_node);
10317 : : fcodei = fcodell;
10318 : : }
10319 : : }
10320 : 62966 : else if (TYPE_PRECISION (arg0_type) <= MAX_FIXED_MODE_SIZE)
10321 : : {
10322 : 31383 : cast_type
10323 : 31383 : = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
10324 : 31383 : TYPE_UNSIGNED (arg0_type));
10325 : 31383 : gcc_assert (TYPE_PRECISION (cast_type)
10326 : : == 2 * TYPE_PRECISION (long_long_unsigned_type_node));
10327 : : fcodei = END_BUILTINS;
10328 : : }
10329 : : else
10330 : : fcodei = END_BUILTINS;
10331 : 232710 : if (cast_type)
10332 : : {
10333 : 232710 : switch (fcode)
10334 : : {
10335 : 165617 : case BUILT_IN_CLZG:
10336 : 165617 : case BUILT_IN_CLRSBG:
10337 : 165617 : addend = TYPE_PRECISION (arg0_type) - TYPE_PRECISION (cast_type);
10338 : 165617 : break;
10339 : : default:
10340 : : break;
10341 : : }
10342 : 232710 : arg0 = fold_convert (cast_type, arg0);
10343 : 232710 : arg0_type = cast_type;
10344 : : }
10345 : :
10346 : 232810 : if (arg1)
10347 : 151245 : arg1 = fold_convert (integer_type_node, arg1);
10348 : :
10349 : 232810 : tree arg2 = arg1;
10350 : 232810 : if (fcode == BUILT_IN_CLZG && addend)
10351 : : {
10352 : 7422 : if (arg1)
10353 : 7389 : arg0 = save_expr (arg0);
10354 : : arg2 = NULL_TREE;
10355 : : }
10356 : 232810 : tree call = NULL_TREE, tem;
10357 : 232810 : if (TYPE_PRECISION (arg0_type) == MAX_FIXED_MODE_SIZE
10358 : 31434 : && (TYPE_PRECISION (arg0_type)
10359 : 31434 : == 2 * TYPE_PRECISION (long_long_unsigned_type_node))
10360 : : /* If the target supports the optab, then don't do the expansion. */
10361 : 264193 : && !direct_internal_fn_supported_p (ifn, arg0_type, OPTIMIZE_FOR_BOTH))
10362 : : {
10363 : : /* __int128 expansions using up to 2 long long builtins. */
10364 : 31383 : arg0 = save_expr (arg0);
10365 : 31383 : tree type = (TYPE_UNSIGNED (arg0_type)
10366 : 31383 : ? long_long_unsigned_type_node
10367 : 31383 : : long_long_integer_type_node);
10368 : 62766 : tree hi = fold_build2 (RSHIFT_EXPR, arg0_type, arg0,
10369 : : build_int_cst (integer_type_node,
10370 : : MAX_FIXED_MODE_SIZE / 2));
10371 : 31383 : hi = fold_convert (type, hi);
10372 : 31383 : tree lo = fold_convert (type, arg0);
10373 : 31383 : switch (fcode)
10374 : : {
10375 : 31288 : case BUILT_IN_CLZG:
10376 : 31288 : call = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10377 : 62576 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10378 : : build_int_cst (integer_type_node,
10379 : : MAX_FIXED_MODE_SIZE / 2));
10380 : 31288 : if (arg2)
10381 : 31275 : call = fold_build3 (COND_EXPR, integer_type_node,
10382 : : fold_build2 (NE_EXPR, boolean_type_node,
10383 : : lo, build_zero_cst (type)),
10384 : : call, arg2);
10385 : 31288 : call = fold_build3 (COND_EXPR, integer_type_node,
10386 : : fold_build2 (NE_EXPR, boolean_type_node,
10387 : : hi, build_zero_cst (type)),
10388 : : fold_builtin_bit_query (loc, fcode, hi,
10389 : : NULL_TREE),
10390 : : call);
10391 : 31288 : break;
10392 : 33 : case BUILT_IN_CTZG:
10393 : 33 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10394 : 66 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10395 : : build_int_cst (integer_type_node,
10396 : : MAX_FIXED_MODE_SIZE / 2));
10397 : 33 : if (arg2)
10398 : 24 : call = fold_build3 (COND_EXPR, integer_type_node,
10399 : : fold_build2 (NE_EXPR, boolean_type_node,
10400 : : hi, build_zero_cst (type)),
10401 : : call, arg2);
10402 : 33 : call = fold_build3 (COND_EXPR, integer_type_node,
10403 : : fold_build2 (NE_EXPR, boolean_type_node,
10404 : : lo, build_zero_cst (type)),
10405 : : fold_builtin_bit_query (loc, fcode, lo,
10406 : : NULL_TREE),
10407 : : call);
10408 : 33 : break;
10409 : 9 : case BUILT_IN_CLRSBG:
10410 : 9 : tem = fold_builtin_bit_query (loc, fcode, lo, NULL_TREE);
10411 : 18 : tem = fold_build2 (PLUS_EXPR, integer_type_node, tem,
10412 : : build_int_cst (integer_type_node,
10413 : : MAX_FIXED_MODE_SIZE / 2));
10414 : 18 : tem = fold_build3 (COND_EXPR, integer_type_node,
10415 : : fold_build2 (LT_EXPR, boolean_type_node,
10416 : : fold_build2 (BIT_XOR_EXPR, type,
10417 : : lo, hi),
10418 : : build_zero_cst (type)),
10419 : : build_int_cst (integer_type_node,
10420 : : MAX_FIXED_MODE_SIZE / 2 - 1),
10421 : : tem);
10422 : 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10423 : 9 : call = save_expr (call);
10424 : 18 : call = fold_build3 (COND_EXPR, integer_type_node,
10425 : : fold_build2 (NE_EXPR, boolean_type_node,
10426 : : call,
10427 : : build_int_cst (integer_type_node,
10428 : : MAX_FIXED_MODE_SIZE
10429 : : / 2 - 1)),
10430 : : call, tem);
10431 : 9 : break;
10432 : 9 : case BUILT_IN_FFSG:
10433 : 9 : call = fold_builtin_bit_query (loc, fcode, hi, NULL_TREE);
10434 : 18 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10435 : : build_int_cst (integer_type_node,
10436 : : MAX_FIXED_MODE_SIZE / 2));
10437 : 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10438 : : fold_build2 (NE_EXPR, boolean_type_node,
10439 : : hi, build_zero_cst (type)),
10440 : : call, integer_zero_node);
10441 : 9 : call = fold_build3 (COND_EXPR, integer_type_node,
10442 : : fold_build2 (NE_EXPR, boolean_type_node,
10443 : : lo, build_zero_cst (type)),
10444 : : fold_builtin_bit_query (loc, fcode, lo,
10445 : : NULL_TREE),
10446 : : call);
10447 : 9 : break;
10448 : 9 : case BUILT_IN_PARITYG:
10449 : 9 : call = fold_builtin_bit_query (loc, fcode,
10450 : : fold_build2 (BIT_XOR_EXPR, type,
10451 : : lo, hi), NULL_TREE);
10452 : 9 : break;
10453 : 35 : case BUILT_IN_POPCOUNTG:
10454 : 35 : call = fold_build2 (PLUS_EXPR, integer_type_node,
10455 : : fold_builtin_bit_query (loc, fcode, hi,
10456 : : NULL_TREE),
10457 : : fold_builtin_bit_query (loc, fcode, lo,
10458 : : NULL_TREE));
10459 : 35 : break;
10460 : 0 : default:
10461 : 0 : gcc_unreachable ();
10462 : : }
10463 : : }
10464 : : else
10465 : : {
10466 : : /* Only keep second argument to IFN_CLZ/IFN_CTZ if it is the
10467 : : value defined at zero during GIMPLE, or for large/huge _BitInt
10468 : : (which are then lowered during bitint lowering). */
10469 : 201427 : if (arg2 && TREE_CODE (TREE_TYPE (arg0)) != BITINT_TYPE)
10470 : : {
10471 : 112539 : int val;
10472 : 112539 : if (fcode == BUILT_IN_CLZG)
10473 : : {
10474 : 64216 : if (CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10475 : : val) != 2
10476 : 64228 : || wi::to_widest (arg2) != val)
10477 : 64204 : arg2 = NULL_TREE;
10478 : : }
10479 : 48323 : else if (CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_TYPE_MODE (arg0_type),
10480 : : val) != 2
10481 : 48323 : || wi::to_widest (arg2) != val)
10482 : 48323 : arg2 = NULL_TREE;
10483 : 112539 : if (!direct_internal_fn_supported_p (ifn, arg0_type,
10484 : : OPTIMIZE_FOR_BOTH))
10485 : : arg2 = NULL_TREE;
10486 : 112491 : if (arg2 == NULL_TREE)
10487 : 112527 : arg0 = save_expr (arg0);
10488 : : }
10489 : 201427 : if (fcodei == END_BUILTINS || arg2)
10490 : 194 : call = build_call_expr_internal_loc (loc, ifn, integer_type_node,
10491 : : arg2 ? 2 : 1, arg0, arg2);
10492 : : else
10493 : 201315 : call = build_call_expr_loc (loc, builtin_decl_explicit (fcodei), 1,
10494 : : arg0);
10495 : : }
10496 : 232810 : if (addend)
10497 : 7440 : call = fold_build2 (PLUS_EXPR, integer_type_node, call,
10498 : : build_int_cst (integer_type_node, addend));
10499 : 232810 : if (arg1 && arg2 == NULL_TREE)
10500 : 119916 : call = fold_build3 (COND_EXPR, integer_type_node,
10501 : : fold_build2 (NE_EXPR, boolean_type_node,
10502 : : arg0, build_zero_cst (arg0_type)),
10503 : : call, arg1);
10504 : :
10505 : : return call;
10506 : : }
10507 : :
10508 : : /* Fold __builtin_{add,sub}c{,l,ll} into pair of internal functions
10509 : : that return both result of arithmetics and overflowed boolean
10510 : : flag in a complex integer result. */
10511 : :
10512 : : static tree
10513 : 54 : fold_builtin_addc_subc (location_t loc, enum built_in_function fcode,
10514 : : tree *args)
10515 : : {
10516 : 54 : enum internal_fn ifn;
10517 : :
10518 : 54 : switch (fcode)
10519 : : {
10520 : : case BUILT_IN_ADDC:
10521 : : case BUILT_IN_ADDCL:
10522 : : case BUILT_IN_ADDCLL:
10523 : : ifn = IFN_ADD_OVERFLOW;
10524 : : break;
10525 : 28 : case BUILT_IN_SUBC:
10526 : 28 : case BUILT_IN_SUBCL:
10527 : 28 : case BUILT_IN_SUBCLL:
10528 : 28 : ifn = IFN_SUB_OVERFLOW;
10529 : 28 : break;
10530 : 0 : default:
10531 : 0 : gcc_unreachable ();
10532 : : }
10533 : :
10534 : 54 : tree type = TREE_TYPE (args[0]);
10535 : 54 : tree ctype = build_complex_type (type);
10536 : 54 : tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10537 : : args[0], args[1]);
10538 : : /* Force SAVE_EXPR even for calls which satisfy tree_invariant_p_1,
10539 : : as while the call itself is const, the REALPART_EXPR store is
10540 : : certainly not. And in any case, we want just one call,
10541 : : not multiple and trying to CSE them later. */
10542 : 54 : TREE_SIDE_EFFECTS (call) = 1;
10543 : 54 : tree tgt = save_expr (call);
10544 : 54 : tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10545 : 54 : tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10546 : 54 : call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
10547 : : intres, args[2]);
10548 : 54 : TREE_SIDE_EFFECTS (call) = 1;
10549 : 54 : tgt = save_expr (call);
10550 : 54 : intres = build1_loc (loc, REALPART_EXPR, type, tgt);
10551 : 54 : tree ovfres2 = build1_loc (loc, IMAGPART_EXPR, type, tgt);
10552 : 54 : ovfres = build2_loc (loc, BIT_IOR_EXPR, type, ovfres, ovfres2);
10553 : 54 : tree mem_arg3 = build_fold_indirect_ref_loc (loc, args[3]);
10554 : 54 : tree store
10555 : 54 : = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg3, ovfres);
10556 : 54 : return build2_loc (loc, COMPOUND_EXPR, type, store, intres);
10557 : : }
10558 : :
10559 : : /* Fold a call to __builtin_FILE to a constant string. */
10560 : :
10561 : : static inline tree
10562 : 5843 : fold_builtin_FILE (location_t loc)
10563 : : {
10564 : 5843 : if (const char *fname = LOCATION_FILE (loc))
10565 : : {
10566 : : /* The documentation says this builtin is equivalent to the preprocessor
10567 : : __FILE__ macro so it appears appropriate to use the same file prefix
10568 : : mappings. */
10569 : 5843 : fname = remap_macro_filename (fname);
10570 : 5843 : return build_string_literal (fname);
10571 : : }
10572 : :
10573 : 0 : return build_string_literal ("");
10574 : : }
10575 : :
10576 : : /* Fold a call to __builtin_FUNCTION to a constant string. */
10577 : :
10578 : : static inline tree
10579 : 68 : fold_builtin_FUNCTION ()
10580 : : {
10581 : 68 : const char *name = "";
10582 : :
10583 : 68 : if (current_function_decl)
10584 : 43 : name = lang_hooks.decl_printable_name (current_function_decl, 0);
10585 : :
10586 : 68 : return build_string_literal (name);
10587 : : }
10588 : :
10589 : : /* Fold a call to __builtin_LINE to an integer constant. */
10590 : :
10591 : : static inline tree
10592 : 11701 : fold_builtin_LINE (location_t loc, tree type)
10593 : : {
10594 : 11701 : return build_int_cst (type, LOCATION_LINE (loc));
10595 : : }
10596 : :
10597 : : /* Fold a call to built-in function FNDECL with 0 arguments.
10598 : : This function returns NULL_TREE if no simplification was possible. */
10599 : :
10600 : : static tree
10601 : 22854181 : fold_builtin_0 (location_t loc, tree fndecl)
10602 : : {
10603 : 22854181 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10604 : 22854181 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10605 : 22854181 : switch (fcode)
10606 : : {
10607 : 5843 : case BUILT_IN_FILE:
10608 : 5843 : return fold_builtin_FILE (loc);
10609 : :
10610 : 68 : case BUILT_IN_FUNCTION:
10611 : 68 : return fold_builtin_FUNCTION ();
10612 : :
10613 : 11701 : case BUILT_IN_LINE:
10614 : 11701 : return fold_builtin_LINE (loc, type);
10615 : :
10616 : 34993 : CASE_FLT_FN (BUILT_IN_INF):
10617 : 34993 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10618 : 34993 : case BUILT_IN_INFD32:
10619 : 34993 : case BUILT_IN_INFD64:
10620 : 34993 : case BUILT_IN_INFD128:
10621 : 34993 : case BUILT_IN_INFD64X:
10622 : 34993 : return fold_builtin_inf (loc, type, true);
10623 : :
10624 : 196088 : CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10625 : 196088 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10626 : 196088 : return fold_builtin_inf (loc, type, false);
10627 : :
10628 : 0 : case BUILT_IN_CLASSIFY_TYPE:
10629 : 0 : return fold_builtin_classify_type (NULL_TREE);
10630 : :
10631 : 17455188 : case BUILT_IN_UNREACHABLE:
10632 : : /* Rewrite any explicit calls to __builtin_unreachable. */
10633 : 17455188 : if (sanitize_flags_p (SANITIZE_UNREACHABLE))
10634 : 115 : return build_builtin_unreachable (loc);
10635 : : break;
10636 : :
10637 : : default:
10638 : : break;
10639 : : }
10640 : : return NULL_TREE;
10641 : : }
10642 : :
10643 : : /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10644 : : This function returns NULL_TREE if no simplification was possible. */
10645 : :
10646 : : static tree
10647 : 16116429 : fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
10648 : : {
10649 : 16116429 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10650 : 16116429 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10651 : :
10652 : 16116429 : if (error_operand_p (arg0))
10653 : : return NULL_TREE;
10654 : :
10655 : 16116429 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10656 : : return ret;
10657 : :
10658 : 15622276 : switch (fcode)
10659 : : {
10660 : 796947 : case BUILT_IN_CONSTANT_P:
10661 : 796947 : {
10662 : 796947 : tree val = fold_builtin_constant_p (arg0);
10663 : :
10664 : : /* Gimplification will pull the CALL_EXPR for the builtin out of
10665 : : an if condition. When not optimizing, we'll not CSE it back.
10666 : : To avoid link error types of regressions, return false now. */
10667 : 796947 : if (!val && !optimize)
10668 : 1178 : val = integer_zero_node;
10669 : :
10670 : : return val;
10671 : : }
10672 : :
10673 : 2096 : case BUILT_IN_CLASSIFY_TYPE:
10674 : 2096 : return fold_builtin_classify_type (arg0);
10675 : :
10676 : 427862 : case BUILT_IN_STRLEN:
10677 : 427862 : return fold_builtin_strlen (loc, expr, type, arg0);
10678 : :
10679 : 366288 : CASE_FLT_FN (BUILT_IN_FABS):
10680 : 366288 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10681 : 366288 : case BUILT_IN_FABSD32:
10682 : 366288 : case BUILT_IN_FABSD64:
10683 : 366288 : case BUILT_IN_FABSD128:
10684 : 366288 : case BUILT_IN_FABSD64X:
10685 : 366288 : return fold_builtin_fabs (loc, arg0, type);
10686 : :
10687 : 95871 : case BUILT_IN_ABS:
10688 : 95871 : case BUILT_IN_LABS:
10689 : 95871 : case BUILT_IN_LLABS:
10690 : 95871 : case BUILT_IN_IMAXABS:
10691 : 95871 : case BUILT_IN_UABS:
10692 : 95871 : case BUILT_IN_ULABS:
10693 : 95871 : case BUILT_IN_ULLABS:
10694 : 95871 : case BUILT_IN_UMAXABS:
10695 : 95871 : return fold_builtin_abs (loc, arg0, type);
10696 : :
10697 : 24501 : CASE_FLT_FN (BUILT_IN_CONJ):
10698 : 24501 : if (validate_arg (arg0, COMPLEX_TYPE)
10699 : 24501 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10700 : 24501 : return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10701 : : break;
10702 : :
10703 : 764 : CASE_FLT_FN (BUILT_IN_CREAL):
10704 : 764 : if (validate_arg (arg0, COMPLEX_TYPE)
10705 : 764 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10706 : 764 : return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10707 : : break;
10708 : :
10709 : 1884 : CASE_FLT_FN (BUILT_IN_CIMAG):
10710 : 1884 : if (validate_arg (arg0, COMPLEX_TYPE)
10711 : 1884 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10712 : 1884 : return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10713 : : break;
10714 : :
10715 : 115962 : CASE_FLT_FN (BUILT_IN_CARG):
10716 : 115962 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CARG):
10717 : 115962 : return fold_builtin_carg (loc, arg0, type);
10718 : :
10719 : 211 : case BUILT_IN_ISASCII:
10720 : 211 : return fold_builtin_isascii (loc, arg0);
10721 : :
10722 : 168 : case BUILT_IN_TOASCII:
10723 : 168 : return fold_builtin_toascii (loc, arg0);
10724 : :
10725 : 325 : case BUILT_IN_ISDIGIT:
10726 : 325 : return fold_builtin_isdigit (loc, arg0);
10727 : :
10728 : 573762 : CASE_FLT_FN (BUILT_IN_FINITE):
10729 : 573762 : case BUILT_IN_FINITED32:
10730 : 573762 : case BUILT_IN_FINITED64:
10731 : 573762 : case BUILT_IN_FINITED128:
10732 : 573762 : case BUILT_IN_ISFINITE:
10733 : 573762 : {
10734 : 573762 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10735 : 573762 : if (ret)
10736 : : return ret;
10737 : 573538 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10738 : : }
10739 : :
10740 : 268065 : CASE_FLT_FN (BUILT_IN_ISINF):
10741 : 268065 : case BUILT_IN_ISINFD32:
10742 : 268065 : case BUILT_IN_ISINFD64:
10743 : 268065 : case BUILT_IN_ISINFD128:
10744 : 268065 : {
10745 : 268065 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10746 : 268065 : if (ret)
10747 : : return ret;
10748 : 267944 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10749 : : }
10750 : :
10751 : 264620 : case BUILT_IN_ISNORMAL:
10752 : 264620 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10753 : :
10754 : 704 : case BUILT_IN_ISINF_SIGN:
10755 : 704 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10756 : :
10757 : 267692 : CASE_FLT_FN (BUILT_IN_ISNAN):
10758 : 267692 : case BUILT_IN_ISNAND32:
10759 : 267692 : case BUILT_IN_ISNAND64:
10760 : 267692 : case BUILT_IN_ISNAND128:
10761 : 267692 : {
10762 : 267692 : tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10763 : 267692 : if (ret)
10764 : : return ret;
10765 : 267568 : return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10766 : : }
10767 : :
10768 : 28854 : case BUILT_IN_ISSIGNALING:
10769 : 28854 : return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
10770 : :
10771 : 600510 : case BUILT_IN_FREE:
10772 : 600510 : if (integer_zerop (arg0))
10773 : 1049 : return build_empty_stmt (loc);
10774 : : break;
10775 : :
10776 : 18808 : case BUILT_IN_CLZG:
10777 : 18808 : case BUILT_IN_CTZG:
10778 : 18808 : case BUILT_IN_CLRSBG:
10779 : 18808 : case BUILT_IN_FFSG:
10780 : 18808 : case BUILT_IN_PARITYG:
10781 : 18808 : case BUILT_IN_POPCOUNTG:
10782 : 18808 : return fold_builtin_bit_query (loc, fcode, arg0, NULL_TREE);
10783 : :
10784 : : default:
10785 : : break;
10786 : : }
10787 : :
10788 : : return NULL_TREE;
10789 : :
10790 : : }
10791 : :
10792 : : /* Folds a call EXPR (which may be null) to built-in function FNDECL
10793 : : with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10794 : : if no simplification was possible. */
10795 : :
10796 : : static tree
10797 : 16948653 : fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10798 : : {
10799 : 16948653 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10800 : 16948653 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10801 : :
10802 : 16948653 : if (error_operand_p (arg0)
10803 : 16948653 : || error_operand_p (arg1))
10804 : : return NULL_TREE;
10805 : :
10806 : 16948649 : if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10807 : : return ret;
10808 : :
10809 : 16742321 : switch (fcode)
10810 : : {
10811 : 6720 : CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10812 : 6720 : CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10813 : 6720 : if (validate_arg (arg0, REAL_TYPE)
10814 : 6720 : && validate_arg (arg1, POINTER_TYPE))
10815 : 6720 : return do_mpfr_lgamma_r (arg0, arg1, type);
10816 : : break;
10817 : :
10818 : 115598 : CASE_FLT_FN (BUILT_IN_FREXP):
10819 : 115598 : return fold_builtin_frexp (loc, arg0, arg1, type);
10820 : :
10821 : 75851 : CASE_FLT_FN (BUILT_IN_MODF):
10822 : 75851 : return fold_builtin_modf (loc, arg0, arg1, type);
10823 : :
10824 : 2655 : case BUILT_IN_STRSPN:
10825 : 2655 : return fold_builtin_strspn (loc, expr, arg0, arg1, type);
10826 : :
10827 : 2553 : case BUILT_IN_STRCSPN:
10828 : 2553 : return fold_builtin_strcspn (loc, expr, arg0, arg1, type);
10829 : :
10830 : 83357 : case BUILT_IN_STRPBRK:
10831 : 83357 : return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10832 : :
10833 : 4985468 : case BUILT_IN_EXPECT:
10834 : 4985468 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10835 : :
10836 : 532728 : case BUILT_IN_ISGREATER:
10837 : 532728 : return fold_builtin_unordered_cmp (loc, fndecl,
10838 : 532728 : arg0, arg1, UNLE_EXPR, LE_EXPR);
10839 : 529345 : case BUILT_IN_ISGREATEREQUAL:
10840 : 529345 : return fold_builtin_unordered_cmp (loc, fndecl,
10841 : 529345 : arg0, arg1, UNLT_EXPR, LT_EXPR);
10842 : 266294 : case BUILT_IN_ISLESS:
10843 : 266294 : return fold_builtin_unordered_cmp (loc, fndecl,
10844 : 266294 : arg0, arg1, UNGE_EXPR, GE_EXPR);
10845 : 1102912 : case BUILT_IN_ISLESSEQUAL:
10846 : 1102912 : return fold_builtin_unordered_cmp (loc, fndecl,
10847 : 1102912 : arg0, arg1, UNGT_EXPR, GT_EXPR);
10848 : 264430 : case BUILT_IN_ISLESSGREATER:
10849 : 264430 : return fold_builtin_unordered_cmp (loc, fndecl,
10850 : 264430 : arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10851 : 265667 : case BUILT_IN_ISUNORDERED:
10852 : 265667 : return fold_builtin_unordered_cmp (loc, fndecl,
10853 : : arg0, arg1, UNORDERED_EXPR,
10854 : 265667 : NOP_EXPR);
10855 : :
10856 : 702 : case BUILT_IN_ISEQSIG:
10857 : 702 : return fold_builtin_iseqsig (loc, arg0, arg1);
10858 : :
10859 : : /* We do the folding for va_start in the expander. */
10860 : : case BUILT_IN_VA_START:
10861 : : break;
10862 : :
10863 : 199826 : case BUILT_IN_OBJECT_SIZE:
10864 : 199826 : case BUILT_IN_DYNAMIC_OBJECT_SIZE:
10865 : 199826 : return fold_builtin_object_size (arg0, arg1, fcode);
10866 : :
10867 : 50467 : case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10868 : 50467 : return fold_builtin_atomic_always_lock_free (arg0, arg1);
10869 : :
10870 : 40135 : case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10871 : 40135 : return fold_builtin_atomic_is_lock_free (arg0, arg1);
10872 : :
10873 : 151367 : case BUILT_IN_CLZG:
10874 : 151367 : case BUILT_IN_CTZG:
10875 : 151367 : return fold_builtin_bit_query (loc, fcode, arg0, arg1);
10876 : :
10877 : : default:
10878 : : break;
10879 : : }
10880 : : return NULL_TREE;
10881 : : }
10882 : :
10883 : : /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10884 : : and ARG2.
10885 : : This function returns NULL_TREE if no simplification was possible. */
10886 : :
10887 : : static tree
10888 : 6231488 : fold_builtin_3 (location_t loc, tree fndecl,
10889 : : tree arg0, tree arg1, tree arg2)
10890 : : {
10891 : 6231488 : tree type = TREE_TYPE (TREE_TYPE (fndecl));
10892 : 6231488 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10893 : :
10894 : 6231488 : if (error_operand_p (arg0)
10895 : 6231488 : || error_operand_p (arg1)
10896 : 12462976 : || error_operand_p (arg2))
10897 : : return NULL_TREE;
10898 : :
10899 : 6231486 : if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10900 : : arg0, arg1, arg2))
10901 : : return ret;
10902 : :
10903 : 6214747 : switch (fcode)
10904 : : {
10905 : :
10906 : 145 : CASE_FLT_FN (BUILT_IN_SINCOS):
10907 : 145 : return fold_builtin_sincos (loc, arg0, arg1, arg2);
10908 : :
10909 : 86884 : CASE_FLT_FN (BUILT_IN_REMQUO):
10910 : 86884 : if (validate_arg (arg0, REAL_TYPE)
10911 : 86884 : && validate_arg (arg1, REAL_TYPE)
10912 : 173768 : && validate_arg (arg2, POINTER_TYPE))
10913 : 86884 : return do_mpfr_remquo (arg0, arg1, arg2);
10914 : : break;
10915 : :
10916 : 2484331 : case BUILT_IN_MEMCMP:
10917 : 2484331 : return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10918 : :
10919 : 489135 : case BUILT_IN_EXPECT:
10920 : 489135 : return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10921 : :
10922 : 337 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
10923 : 337 : return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10924 : :
10925 : 183068 : case BUILT_IN_ADD_OVERFLOW:
10926 : 183068 : case BUILT_IN_SUB_OVERFLOW:
10927 : 183068 : case BUILT_IN_MUL_OVERFLOW:
10928 : 183068 : case BUILT_IN_ADD_OVERFLOW_P:
10929 : 183068 : case BUILT_IN_SUB_OVERFLOW_P:
10930 : 183068 : case BUILT_IN_MUL_OVERFLOW_P:
10931 : 183068 : case BUILT_IN_SADD_OVERFLOW:
10932 : 183068 : case BUILT_IN_SADDL_OVERFLOW:
10933 : 183068 : case BUILT_IN_SADDLL_OVERFLOW:
10934 : 183068 : case BUILT_IN_SSUB_OVERFLOW:
10935 : 183068 : case BUILT_IN_SSUBL_OVERFLOW:
10936 : 183068 : case BUILT_IN_SSUBLL_OVERFLOW:
10937 : 183068 : case BUILT_IN_SMUL_OVERFLOW:
10938 : 183068 : case BUILT_IN_SMULL_OVERFLOW:
10939 : 183068 : case BUILT_IN_SMULLL_OVERFLOW:
10940 : 183068 : case BUILT_IN_UADD_OVERFLOW:
10941 : 183068 : case BUILT_IN_UADDL_OVERFLOW:
10942 : 183068 : case BUILT_IN_UADDLL_OVERFLOW:
10943 : 183068 : case BUILT_IN_USUB_OVERFLOW:
10944 : 183068 : case BUILT_IN_USUBL_OVERFLOW:
10945 : 183068 : case BUILT_IN_USUBLL_OVERFLOW:
10946 : 183068 : case BUILT_IN_UMUL_OVERFLOW:
10947 : 183068 : case BUILT_IN_UMULL_OVERFLOW:
10948 : 183068 : case BUILT_IN_UMULLL_OVERFLOW:
10949 : 183068 : return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10950 : :
10951 : : default:
10952 : : break;
10953 : : }
10954 : : return NULL_TREE;
10955 : : }
10956 : :
10957 : : /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10958 : : ARGS is an array of NARGS arguments. IGNORE is true if the result
10959 : : of the function call is ignored. This function returns NULL_TREE
10960 : : if no simplification was possible. */
10961 : :
10962 : : static tree
10963 : 64794557 : fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10964 : : int nargs, bool)
10965 : : {
10966 : 64794557 : tree ret = NULL_TREE;
10967 : :
10968 : 64794557 : switch (nargs)
10969 : : {
10970 : 22854181 : case 0:
10971 : 22854181 : ret = fold_builtin_0 (loc, fndecl);
10972 : 22854181 : break;
10973 : 16116429 : case 1:
10974 : 16116429 : ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
10975 : 16116429 : break;
10976 : 16948653 : case 2:
10977 : 16948653 : ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10978 : 16948653 : break;
10979 : 6231488 : case 3:
10980 : 6231488 : ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10981 : 6231488 : break;
10982 : 2643806 : default:
10983 : 2643806 : ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10984 : 2643806 : break;
10985 : : }
10986 : 64794557 : if (ret)
10987 : : {
10988 : 6629844 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10989 : 6629844 : SET_EXPR_LOCATION (ret, loc);
10990 : 6629844 : return ret;
10991 : : }
10992 : : return NULL_TREE;
10993 : : }
10994 : :
10995 : : /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10996 : : list ARGS along with N new arguments in NEWARGS. SKIP is the number
10997 : : of arguments in ARGS to be omitted. OLDNARGS is the number of
10998 : : elements in ARGS. */
10999 : :
11000 : : static tree
11001 : 4 : rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11002 : : int skip, tree fndecl, int n, va_list newargs)
11003 : : {
11004 : 4 : int nargs = oldnargs - skip + n;
11005 : 4 : tree *buffer;
11006 : :
11007 : 4 : if (n > 0)
11008 : : {
11009 : 0 : int i, j;
11010 : :
11011 : 0 : buffer = XALLOCAVEC (tree, nargs);
11012 : 0 : for (i = 0; i < n; i++)
11013 : 0 : buffer[i] = va_arg (newargs, tree);
11014 : 0 : for (j = skip; j < oldnargs; j++, i++)
11015 : 0 : buffer[i] = args[j];
11016 : : }
11017 : : else
11018 : 4 : buffer = args + skip;
11019 : :
11020 : 4 : return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11021 : : }
11022 : :
11023 : : /* Return true if FNDECL shouldn't be folded right now.
11024 : : If a built-in function has an inline attribute always_inline
11025 : : wrapper, defer folding it after always_inline functions have
11026 : : been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11027 : : might not be performed. */
11028 : :
11029 : : bool
11030 : 144537934 : avoid_folding_inline_builtin (tree fndecl)
11031 : : {
11032 : 144537934 : return (DECL_DECLARED_INLINE_P (fndecl)
11033 : 9443 : && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11034 : 9401 : && cfun
11035 : 9401 : && !cfun->always_inline_functions_inlined
11036 : 144547335 : && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11037 : : }
11038 : :
11039 : : /* A wrapper function for builtin folding that prevents warnings for
11040 : : "statement without effect" and the like, caused by removing the
11041 : : call node earlier than the warning is generated. */
11042 : :
11043 : : tree
11044 : 183462330 : fold_call_expr (location_t loc, tree exp, bool ignore)
11045 : : {
11046 : 183462330 : tree ret = NULL_TREE;
11047 : 183462330 : tree fndecl = get_callee_fndecl (exp);
11048 : 181682472 : if (fndecl && fndecl_built_in_p (fndecl)
11049 : : /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11050 : : yet. Defer folding until we see all the arguments
11051 : : (after inlining). */
11052 : 240187329 : && !CALL_EXPR_VA_ARG_PACK (exp))
11053 : : {
11054 : 56724974 : int nargs = call_expr_nargs (exp);
11055 : :
11056 : : /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11057 : : instead last argument is __builtin_va_arg_pack (). Defer folding
11058 : : even in that case, until arguments are finalized. */
11059 : 56724974 : if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11060 : : {
11061 : 255037 : tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11062 : 255037 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11063 : : return NULL_TREE;
11064 : : }
11065 : :
11066 : 56724910 : if (avoid_folding_inline_builtin (fndecl))
11067 : : return NULL_TREE;
11068 : :
11069 : 56721375 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11070 : 67163114 : return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11071 : 67163114 : CALL_EXPR_ARGP (exp), ignore);
11072 : : else
11073 : : {
11074 : 23139818 : tree *args = CALL_EXPR_ARGP (exp);
11075 : 23139818 : ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
11076 : 23139818 : if (ret)
11077 : : return ret;
11078 : : }
11079 : : }
11080 : : return NULL_TREE;
11081 : : }
11082 : :
11083 : : /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
11084 : : N arguments are passed in the array ARGARRAY. Return a folded
11085 : : expression or NULL_TREE if no simplification was possible. */
11086 : :
11087 : : tree
11088 : 64264466 : fold_builtin_call_array (location_t loc, tree,
11089 : : tree fn,
11090 : : int n,
11091 : : tree *argarray)
11092 : : {
11093 : 64264466 : if (TREE_CODE (fn) != ADDR_EXPR)
11094 : : return NULL_TREE;
11095 : :
11096 : 64264466 : tree fndecl = TREE_OPERAND (fn, 0);
11097 : 64264466 : if (TREE_CODE (fndecl) == FUNCTION_DECL
11098 : 64264466 : && fndecl_built_in_p (fndecl))
11099 : : {
11100 : : /* If last argument is __builtin_va_arg_pack (), arguments to this
11101 : : function are not finalized yet. Defer folding until they are. */
11102 : 63686222 : if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11103 : : {
11104 : 103186 : tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11105 : 103186 : if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
11106 : : return NULL_TREE;
11107 : : }
11108 : 63686195 : if (avoid_folding_inline_builtin (fndecl))
11109 : : return NULL_TREE;
11110 : 63686195 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11111 : 32434858 : return targetm.fold_builtin (fndecl, n, argarray, false);
11112 : : else
11113 : 31251337 : return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
11114 : : }
11115 : :
11116 : : return NULL_TREE;
11117 : : }
11118 : :
11119 : : /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11120 : : along with N new arguments specified as the "..." parameters. SKIP
11121 : : is the number of arguments in EXP to be omitted. This function is used
11122 : : to do varargs-to-varargs transformations. */
11123 : :
11124 : : static tree
11125 : 4 : rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11126 : : {
11127 : 4 : va_list ap;
11128 : 4 : tree t;
11129 : :
11130 : 4 : va_start (ap, n);
11131 : 8 : t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11132 : 4 : CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11133 : 4 : va_end (ap);
11134 : :
11135 : 4 : return t;
11136 : : }
11137 : :
11138 : : /* Validate a single argument ARG against a tree code CODE representing
11139 : : a type. Return true when argument is valid. */
11140 : :
11141 : : static bool
11142 : 14055706 : validate_arg (const_tree arg, enum tree_code code)
11143 : : {
11144 : 14055706 : if (!arg)
11145 : : return false;
11146 : 14055683 : else if (code == POINTER_TYPE)
11147 : 6829098 : return POINTER_TYPE_P (TREE_TYPE (arg));
11148 : 7226585 : else if (code == INTEGER_TYPE)
11149 : 3689537 : return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11150 : 3537048 : return code == TREE_CODE (TREE_TYPE (arg));
11151 : : }
11152 : :
11153 : : /* This function validates the types of a function call argument list
11154 : : against a specified list of tree_codes. If the last specifier is a 0,
11155 : : that represents an ellipses, otherwise the last specifier must be a
11156 : : VOID_TYPE.
11157 : :
11158 : : This is the GIMPLE version of validate_arglist. Eventually we want to
11159 : : completely convert builtins.cc to work from GIMPLEs and the tree based
11160 : : validate_arglist will then be removed. */
11161 : :
11162 : : bool
11163 : 93 : validate_gimple_arglist (const gcall *call, ...)
11164 : : {
11165 : 93 : enum tree_code code;
11166 : 93 : bool res = 0;
11167 : 93 : va_list ap;
11168 : 93 : const_tree arg;
11169 : 93 : size_t i;
11170 : :
11171 : 93 : va_start (ap, call);
11172 : 93 : i = 0;
11173 : :
11174 : 372 : do
11175 : : {
11176 : 372 : code = (enum tree_code) va_arg (ap, int);
11177 : 372 : switch (code)
11178 : : {
11179 : 0 : case 0:
11180 : : /* This signifies an ellipses, any further arguments are all ok. */
11181 : 0 : res = true;
11182 : 0 : goto end;
11183 : 93 : case VOID_TYPE:
11184 : : /* This signifies an endlink, if no arguments remain, return
11185 : : true, otherwise return false. */
11186 : 93 : res = (i == gimple_call_num_args (call));
11187 : 93 : goto end;
11188 : 279 : default:
11189 : : /* If no parameters remain or the parameter's code does not
11190 : : match the specified code, return false. Otherwise continue
11191 : : checking any remaining arguments. */
11192 : 279 : arg = gimple_call_arg (call, i++);
11193 : 279 : if (!validate_arg (arg, code))
11194 : 0 : goto end;
11195 : : break;
11196 : : }
11197 : : }
11198 : : while (1);
11199 : :
11200 : : /* We need gotos here since we can only have one VA_CLOSE in a
11201 : : function. */
11202 : 93 : end: ;
11203 : 93 : va_end (ap);
11204 : :
11205 : 93 : return res;
11206 : : }
11207 : :
11208 : : /* Default target-specific builtin expander that does nothing. */
11209 : :
11210 : : rtx
11211 : 0 : default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11212 : : rtx target ATTRIBUTE_UNUSED,
11213 : : rtx subtarget ATTRIBUTE_UNUSED,
11214 : : machine_mode mode ATTRIBUTE_UNUSED,
11215 : : int ignore ATTRIBUTE_UNUSED)
11216 : : {
11217 : 0 : return NULL_RTX;
11218 : : }
11219 : :
11220 : : /* Returns true is EXP represents data that would potentially reside
11221 : : in a readonly section. */
11222 : :
11223 : : bool
11224 : 197943 : readonly_data_expr (tree exp)
11225 : : {
11226 : 197943 : STRIP_NOPS (exp);
11227 : :
11228 : 197943 : if (TREE_CODE (exp) != ADDR_EXPR)
11229 : : return false;
11230 : :
11231 : 23870 : exp = get_base_address (TREE_OPERAND (exp, 0));
11232 : 23870 : if (!exp)
11233 : : return false;
11234 : :
11235 : : /* Make sure we call decl_readonly_section only for trees it
11236 : : can handle (since it returns true for everything it doesn't
11237 : : understand). */
11238 : 23870 : if (TREE_CODE (exp) == STRING_CST
11239 : 5792 : || TREE_CODE (exp) == CONSTRUCTOR
11240 : 5792 : || (VAR_P (exp) && TREE_STATIC (exp)))
11241 : 20822 : return decl_readonly_section (exp, 0);
11242 : : else
11243 : : return false;
11244 : : }
11245 : :
11246 : : /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11247 : : to the call, and TYPE is its return type.
11248 : :
11249 : : Return NULL_TREE if no simplification was possible, otherwise return the
11250 : : simplified form of the call as a tree.
11251 : :
11252 : : The simplified form may be a constant or other expression which
11253 : : computes the same value, but in a more efficient manner (including
11254 : : calls to other builtin functions).
11255 : :
11256 : : The call may contain arguments which need to be evaluated, but
11257 : : which are not useful to determine the result of the call. In
11258 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11259 : : COMPOUND_EXPR will be an argument which must be evaluated.
11260 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11261 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11262 : : form of the builtin function call. */
11263 : :
11264 : : static tree
11265 : 83357 : fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
11266 : : {
11267 : 83357 : if (!validate_arg (s1, POINTER_TYPE)
11268 : 83357 : || !validate_arg (s2, POINTER_TYPE))
11269 : : return NULL_TREE;
11270 : :
11271 : 83357 : tree fn;
11272 : 83357 : const char *p1, *p2;
11273 : :
11274 : 83357 : p2 = c_getstr (s2);
11275 : 83357 : if (p2 == NULL)
11276 : : return NULL_TREE;
11277 : :
11278 : 92 : p1 = c_getstr (s1);
11279 : 92 : if (p1 != NULL)
11280 : : {
11281 : 22 : const char *r = strpbrk (p1, p2);
11282 : 22 : tree tem;
11283 : :
11284 : 22 : if (r == NULL)
11285 : 0 : return build_int_cst (TREE_TYPE (s1), 0);
11286 : :
11287 : : /* Return an offset into the constant string argument. */
11288 : 22 : tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11289 : 22 : return fold_convert_loc (loc, type, tem);
11290 : : }
11291 : :
11292 : 70 : if (p2[0] == '\0')
11293 : : /* strpbrk(x, "") == NULL.
11294 : : Evaluate and ignore s1 in case it had side-effects. */
11295 : 26 : return omit_one_operand_loc (loc, type, integer_zero_node, s1);
11296 : :
11297 : 44 : if (p2[1] != '\0')
11298 : : return NULL_TREE; /* Really call strpbrk. */
11299 : :
11300 : 83309 : fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11301 : 41 : if (!fn)
11302 : : return NULL_TREE;
11303 : :
11304 : : /* New argument list transforming strpbrk(s1, s2) to
11305 : : strchr(s1, s2[0]). */
11306 : 41 : return build_call_expr_loc (loc, fn, 2, s1,
11307 : 41 : build_int_cst (integer_type_node, p2[0]));
11308 : : }
11309 : :
11310 : : /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11311 : : to the call.
11312 : :
11313 : : Return NULL_TREE if no simplification was possible, otherwise return the
11314 : : simplified form of the call as a tree.
11315 : :
11316 : : The simplified form may be a constant or other expression which
11317 : : computes the same value, but in a more efficient manner (including
11318 : : calls to other builtin functions).
11319 : :
11320 : : The call may contain arguments which need to be evaluated, but
11321 : : which are not useful to determine the result of the call. In
11322 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11323 : : COMPOUND_EXPR will be an argument which must be evaluated.
11324 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11325 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11326 : : form of the builtin function call. */
11327 : :
11328 : : static tree
11329 : 2655 : fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11330 : : {
11331 : 2655 : if (!validate_arg (s1, POINTER_TYPE)
11332 : 2655 : || !validate_arg (s2, POINTER_TYPE))
11333 : : return NULL_TREE;
11334 : :
11335 : 2655 : if (!check_nul_terminated_array (expr, s1)
11336 : 2655 : || !check_nul_terminated_array (expr, s2))
11337 : 60 : return NULL_TREE;
11338 : :
11339 : 2595 : const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11340 : :
11341 : : /* If either argument is "", return NULL_TREE. */
11342 : 2595 : if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11343 : : /* Evaluate and ignore both arguments in case either one has
11344 : : side-effects. */
11345 : 147 : return omit_two_operands_loc (loc, type, size_zero_node, s1, s2);
11346 : : return NULL_TREE;
11347 : : }
11348 : :
11349 : : /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11350 : : to the call.
11351 : :
11352 : : Return NULL_TREE if no simplification was possible, otherwise return the
11353 : : simplified form of the call as a tree.
11354 : :
11355 : : The simplified form may be a constant or other expression which
11356 : : computes the same value, but in a more efficient manner (including
11357 : : calls to other builtin functions).
11358 : :
11359 : : The call may contain arguments which need to be evaluated, but
11360 : : which are not useful to determine the result of the call. In
11361 : : this case we return a chain of COMPOUND_EXPRs. The LHS of each
11362 : : COMPOUND_EXPR will be an argument which must be evaluated.
11363 : : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11364 : : COMPOUND_EXPR in the chain will contain the tree for the simplified
11365 : : form of the builtin function call. */
11366 : :
11367 : : static tree
11368 : 2553 : fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2, tree type)
11369 : : {
11370 : 2553 : if (!validate_arg (s1, POINTER_TYPE)
11371 : 2553 : || !validate_arg (s2, POINTER_TYPE))
11372 : : return NULL_TREE;
11373 : :
11374 : 2553 : if (!check_nul_terminated_array (expr, s1)
11375 : 2553 : || !check_nul_terminated_array (expr, s2))
11376 : 60 : return NULL_TREE;
11377 : :
11378 : : /* If the first argument is "", return NULL_TREE. */
11379 : 2493 : const char *p1 = c_getstr (s1);
11380 : 2493 : if (p1 && *p1 == '\0')
11381 : : {
11382 : : /* Evaluate and ignore argument s2 in case it has
11383 : : side-effects. */
11384 : 65 : return omit_one_operand_loc (loc, type, size_zero_node, s2);
11385 : : }
11386 : :
11387 : : /* If the second argument is "", return __builtin_strlen(s1). */
11388 : 2428 : const char *p2 = c_getstr (s2);
11389 : 2428 : if (p2 && *p2 == '\0')
11390 : : {
11391 : 2488 : tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11392 : :
11393 : : /* If the replacement _DECL isn't initialized, don't do the
11394 : : transformation. */
11395 : 81 : if (!fn)
11396 : : return NULL_TREE;
11397 : :
11398 : 81 : return fold_convert_loc (loc, type,
11399 : 81 : build_call_expr_loc (loc, fn, 1, s1));
11400 : : }
11401 : : return NULL_TREE;
11402 : : }
11403 : :
11404 : : /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11405 : : produced. False otherwise. This is done so that we don't output the error
11406 : : or warning twice or three times. */
11407 : :
11408 : : bool
11409 : 41731 : fold_builtin_next_arg (tree exp, bool va_start_p)
11410 : : {
11411 : 41731 : tree fntype = TREE_TYPE (current_function_decl);
11412 : 41731 : int nargs = call_expr_nargs (exp);
11413 : 41731 : tree arg;
11414 : : /* There is good chance the current input_location points inside the
11415 : : definition of the va_start macro (perhaps on the token for
11416 : : builtin) in a system header, so warnings will not be emitted.
11417 : : Use the location in real source code. */
11418 : 41731 : location_t current_location =
11419 : 41731 : linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11420 : : NULL);
11421 : :
11422 : 41731 : if (!stdarg_p (fntype))
11423 : : {
11424 : 8 : error ("%<va_start%> used in function with fixed arguments");
11425 : 8 : return true;
11426 : : }
11427 : :
11428 : 41723 : if (va_start_p)
11429 : : {
11430 : 41573 : if (va_start_p && (nargs != 2))
11431 : : {
11432 : 0 : error ("wrong number of arguments to function %<va_start%>");
11433 : 0 : return true;
11434 : : }
11435 : 41573 : arg = CALL_EXPR_ARG (exp, 1);
11436 : : }
11437 : : /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11438 : : when we checked the arguments and if needed issued a warning. */
11439 : : else
11440 : : {
11441 : 150 : if (nargs == 0)
11442 : : {
11443 : : /* Evidently an out of date version of <stdarg.h>; can't validate
11444 : : va_start's second argument, but can still work as intended. */
11445 : 0 : warning_at (current_location,
11446 : 0 : OPT_Wvarargs,
11447 : : "%<__builtin_next_arg%> called without an argument");
11448 : 0 : return true;
11449 : : }
11450 : 150 : else if (nargs > 1)
11451 : : {
11452 : 0 : error ("wrong number of arguments to function %<__builtin_next_arg%>");
11453 : 0 : return true;
11454 : : }
11455 : 150 : arg = CALL_EXPR_ARG (exp, 0);
11456 : : }
11457 : :
11458 : 41723 : if (TREE_CODE (arg) == SSA_NAME
11459 : 41723 : && SSA_NAME_VAR (arg))
11460 : : arg = SSA_NAME_VAR (arg);
11461 : :
11462 : : /* We destructively modify the call to be __builtin_va_start (ap, 0)
11463 : : or __builtin_next_arg (0) the first time we see it, after checking
11464 : : the arguments and if needed issuing a warning. */
11465 : 41723 : if (!integer_zerop (arg))
11466 : : {
11467 : 7089 : tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11468 : :
11469 : : /* Strip off all nops for the sake of the comparison. This
11470 : : is not quite the same as STRIP_NOPS. It does more.
11471 : : We must also strip off INDIRECT_EXPR for C++ reference
11472 : : parameters. */
11473 : 14181 : while (CONVERT_EXPR_P (arg)
11474 : 14195 : || INDIRECT_REF_P (arg))
11475 : 14 : arg = TREE_OPERAND (arg, 0);
11476 : 7089 : if (arg != last_parm)
11477 : : {
11478 : : /* FIXME: Sometimes with the tree optimizers we can get the
11479 : : not the last argument even though the user used the last
11480 : : argument. We just warn and set the arg to be the last
11481 : : argument so that we will get wrong-code because of
11482 : : it. */
11483 : 12 : warning_at (current_location,
11484 : 12 : OPT_Wvarargs,
11485 : : "second parameter of %<va_start%> not last named argument");
11486 : : }
11487 : :
11488 : : /* Undefined by C99 7.15.1.4p4 (va_start):
11489 : : "If the parameter parmN is declared with the register storage
11490 : : class, with a function or array type, or with a type that is
11491 : : not compatible with the type that results after application of
11492 : : the default argument promotions, the behavior is undefined."
11493 : : */
11494 : 7077 : else if (DECL_REGISTER (arg))
11495 : : {
11496 : 12 : warning_at (current_location,
11497 : 12 : OPT_Wvarargs,
11498 : : "undefined behavior when second parameter of "
11499 : : "%<va_start%> is declared with %<register%> storage");
11500 : : }
11501 : :
11502 : : /* We want to verify the second parameter just once before the tree
11503 : : optimizers are run and then avoid keeping it in the tree,
11504 : : as otherwise we could warn even for correct code like:
11505 : : void foo (int i, ...)
11506 : : { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11507 : 7089 : if (va_start_p)
11508 : 7087 : CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11509 : : else
11510 : 2 : CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11511 : : }
11512 : : return false;
11513 : : }
11514 : :
11515 : :
11516 : : /* Expand a call EXP to __builtin_object_size. */
11517 : :
11518 : : static rtx
11519 : 617 : expand_builtin_object_size (tree exp)
11520 : : {
11521 : 617 : tree ost;
11522 : 617 : int object_size_type;
11523 : 617 : tree fndecl = get_callee_fndecl (exp);
11524 : :
11525 : 617 : if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11526 : : {
11527 : 0 : error ("first argument of %qD must be a pointer, second integer constant",
11528 : : fndecl);
11529 : 0 : expand_builtin_trap ();
11530 : 0 : return const0_rtx;
11531 : : }
11532 : :
11533 : 617 : ost = CALL_EXPR_ARG (exp, 1);
11534 : 617 : STRIP_NOPS (ost);
11535 : :
11536 : 617 : if (TREE_CODE (ost) != INTEGER_CST
11537 : 617 : || tree_int_cst_sgn (ost) < 0
11538 : 1234 : || compare_tree_int (ost, 3) > 0)
11539 : : {
11540 : 0 : error ("last argument of %qD is not integer constant between 0 and 3",
11541 : : fndecl);
11542 : 0 : expand_builtin_trap ();
11543 : 0 : return const0_rtx;
11544 : : }
11545 : :
11546 : 617 : object_size_type = tree_to_shwi (ost);
11547 : :
11548 : 617 : return object_size_type < 2 ? constm1_rtx : const0_rtx;
11549 : : }
11550 : :
11551 : : /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11552 : : FCODE is the BUILT_IN_* to use.
11553 : : Return NULL_RTX if we failed; the caller should emit a normal call,
11554 : : otherwise try to get the result in TARGET, if convenient (and in
11555 : : mode MODE if that's convenient). */
11556 : :
11557 : : static rtx
11558 : 837 : expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
11559 : : enum built_in_function fcode)
11560 : : {
11561 : 1410 : if (!validate_arglist (exp,
11562 : : POINTER_TYPE,
11563 : : fcode == BUILT_IN_MEMSET_CHK
11564 : : ? INTEGER_TYPE : POINTER_TYPE,
11565 : : INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11566 : : return NULL_RTX;
11567 : :
11568 : 837 : tree dest = CALL_EXPR_ARG (exp, 0);
11569 : 837 : tree src = CALL_EXPR_ARG (exp, 1);
11570 : 837 : tree len = CALL_EXPR_ARG (exp, 2);
11571 : 837 : tree size = CALL_EXPR_ARG (exp, 3);
11572 : :
11573 : : /* FIXME: Set access mode to write only for memset et al. */
11574 : 837 : bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
11575 : : /*srcstr=*/NULL_TREE, size, access_read_write);
11576 : :
11577 : 837 : if (!tree_fits_uhwi_p (size))
11578 : : return NULL_RTX;
11579 : :
11580 : 627 : if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
11581 : : {
11582 : : /* Avoid transforming the checking call to an ordinary one when
11583 : : an overflow has been detected or when the call couldn't be
11584 : : validated because the size is not constant. */
11585 : 186 : if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11586 : : return NULL_RTX;
11587 : :
11588 : 0 : tree fn = NULL_TREE;
11589 : : /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11590 : : mem{cpy,pcpy,move,set} is available. */
11591 : 0 : switch (fcode)
11592 : : {
11593 : 0 : case BUILT_IN_MEMCPY_CHK:
11594 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
11595 : 0 : break;
11596 : 0 : case BUILT_IN_MEMPCPY_CHK:
11597 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
11598 : 0 : break;
11599 : 0 : case BUILT_IN_MEMMOVE_CHK:
11600 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
11601 : 0 : break;
11602 : 0 : case BUILT_IN_MEMSET_CHK:
11603 : 0 : fn = builtin_decl_explicit (BUILT_IN_MEMSET);
11604 : 0 : break;
11605 : : default:
11606 : : break;
11607 : : }
11608 : :
11609 : 0 : if (! fn)
11610 : : return NULL_RTX;
11611 : :
11612 : 0 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11613 : 0 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11614 : 0 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11615 : 0 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11616 : : }
11617 : 441 : else if (fcode == BUILT_IN_MEMSET_CHK)
11618 : : return NULL_RTX;
11619 : : else
11620 : : {
11621 : 293 : unsigned int dest_align = get_pointer_alignment (dest);
11622 : :
11623 : : /* If DEST is not a pointer type, call the normal function. */
11624 : 293 : if (dest_align == 0)
11625 : : return NULL_RTX;
11626 : :
11627 : : /* If SRC and DEST are the same (and not volatile), do nothing. */
11628 : 293 : if (operand_equal_p (src, dest, 0))
11629 : : {
11630 : 0 : tree expr;
11631 : :
11632 : 0 : if (fcode != BUILT_IN_MEMPCPY_CHK)
11633 : : {
11634 : : /* Evaluate and ignore LEN in case it has side-effects. */
11635 : 0 : expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11636 : 0 : return expand_expr (dest, target, mode, EXPAND_NORMAL);
11637 : : }
11638 : :
11639 : 0 : expr = fold_build_pointer_plus (dest, len);
11640 : 0 : return expand_expr (expr, target, mode, EXPAND_NORMAL);
11641 : : }
11642 : :
11643 : : /* __memmove_chk special case. */
11644 : 293 : if (fcode == BUILT_IN_MEMMOVE_CHK)
11645 : : {
11646 : 79 : unsigned int src_align = get_pointer_alignment (src);
11647 : :
11648 : 79 : if (src_align == 0)
11649 : : return NULL_RTX;
11650 : :
11651 : : /* If src is categorized for a readonly section we can use
11652 : : normal __memcpy_chk. */
11653 : 79 : if (readonly_data_expr (src))
11654 : : {
11655 : 15 : tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11656 : 15 : if (!fn)
11657 : : return NULL_RTX;
11658 : 15 : fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11659 : : dest, src, len, size);
11660 : 15 : gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11661 : 15 : CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11662 : 15 : return expand_expr (fn, target, mode, EXPAND_NORMAL);
11663 : : }
11664 : : }
11665 : 278 : return NULL_RTX;
11666 : : }
11667 : : }
11668 : :
11669 : : /* Emit warning if a buffer overflow is detected at compile time. */
11670 : :
11671 : : static void
11672 : 1135 : maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11673 : : {
11674 : : /* The source string. */
11675 : 1135 : tree srcstr = NULL_TREE;
11676 : : /* The size of the destination object returned by __builtin_object_size. */
11677 : 1135 : tree objsize = NULL_TREE;
11678 : : /* The string that is being concatenated with (as in __strcat_chk)
11679 : : or null if it isn't. */
11680 : 1135 : tree catstr = NULL_TREE;
11681 : : /* The maximum length of the source sequence in a bounded operation
11682 : : (such as __strncat_chk) or null if the operation isn't bounded
11683 : : (such as __strcat_chk). */
11684 : 1135 : tree maxread = NULL_TREE;
11685 : : /* The exact size of the access (such as in __strncpy_chk). */
11686 : 1135 : tree size = NULL_TREE;
11687 : : /* The access by the function that's checked. Except for snprintf
11688 : : both writing and reading is checked. */
11689 : 1135 : access_mode mode = access_read_write;
11690 : :
11691 : 1135 : switch (fcode)
11692 : : {
11693 : 276 : case BUILT_IN_STRCPY_CHK:
11694 : 276 : case BUILT_IN_STPCPY_CHK:
11695 : 276 : srcstr = CALL_EXPR_ARG (exp, 1);
11696 : 276 : objsize = CALL_EXPR_ARG (exp, 2);
11697 : 276 : break;
11698 : :
11699 : 198 : case BUILT_IN_STRCAT_CHK:
11700 : : /* For __strcat_chk the warning will be emitted only if overflowing
11701 : : by at least strlen (dest) + 1 bytes. */
11702 : 198 : catstr = CALL_EXPR_ARG (exp, 0);
11703 : 198 : srcstr = CALL_EXPR_ARG (exp, 1);
11704 : 198 : objsize = CALL_EXPR_ARG (exp, 2);
11705 : 198 : break;
11706 : :
11707 : 109 : case BUILT_IN_STRNCAT_CHK:
11708 : 109 : catstr = CALL_EXPR_ARG (exp, 0);
11709 : 109 : srcstr = CALL_EXPR_ARG (exp, 1);
11710 : 109 : maxread = CALL_EXPR_ARG (exp, 2);
11711 : 109 : objsize = CALL_EXPR_ARG (exp, 3);
11712 : 109 : break;
11713 : :
11714 : 243 : case BUILT_IN_STRNCPY_CHK:
11715 : 243 : case BUILT_IN_STPNCPY_CHK:
11716 : 243 : srcstr = CALL_EXPR_ARG (exp, 1);
11717 : 243 : size = CALL_EXPR_ARG (exp, 2);
11718 : 243 : objsize = CALL_EXPR_ARG (exp, 3);
11719 : 243 : break;
11720 : :
11721 : 309 : case BUILT_IN_SNPRINTF_CHK:
11722 : 309 : case BUILT_IN_VSNPRINTF_CHK:
11723 : 309 : maxread = CALL_EXPR_ARG (exp, 1);
11724 : 309 : objsize = CALL_EXPR_ARG (exp, 3);
11725 : : /* The only checked access the write to the destination. */
11726 : 309 : mode = access_write_only;
11727 : 309 : break;
11728 : 0 : default:
11729 : 0 : gcc_unreachable ();
11730 : : }
11731 : :
11732 : 1135 : if (catstr && maxread)
11733 : : {
11734 : : /* Check __strncat_chk. There is no way to determine the length
11735 : : of the string to which the source string is being appended so
11736 : : just warn when the length of the source string is not known. */
11737 : 109 : check_strncat_sizes (exp, objsize);
11738 : 109 : return;
11739 : : }
11740 : :
11741 : 1026 : check_access (exp, size, maxread, srcstr, objsize, mode);
11742 : : }
11743 : :
11744 : : /* Emit warning if a buffer overflow is detected at compile time
11745 : : in __sprintf_chk/__vsprintf_chk calls. */
11746 : :
11747 : : static void
11748 : 1329 : maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11749 : : {
11750 : 1329 : tree size, len, fmt;
11751 : 1329 : const char *fmt_str;
11752 : 1329 : int nargs = call_expr_nargs (exp);
11753 : :
11754 : : /* Verify the required arguments in the original call. */
11755 : :
11756 : 1329 : if (nargs < 4)
11757 : : return;
11758 : 1329 : size = CALL_EXPR_ARG (exp, 2);
11759 : 1329 : fmt = CALL_EXPR_ARG (exp, 3);
11760 : :
11761 : 1329 : if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11762 : 21 : return;
11763 : :
11764 : : /* Check whether the format is a literal string constant. */
11765 : 1308 : fmt_str = c_getstr (fmt);
11766 : 1308 : if (fmt_str == NULL)
11767 : : return;
11768 : :
11769 : 1272 : if (!init_target_chars ())
11770 : : return;
11771 : :
11772 : : /* If the format doesn't contain % args or %%, we know its size. */
11773 : 1272 : if (strchr (fmt_str, target_percent) == 0)
11774 : 22 : len = build_int_cstu (size_type_node, strlen (fmt_str));
11775 : : /* If the format is "%s" and first ... argument is a string literal,
11776 : : we know it too. */
11777 : 1250 : else if (fcode == BUILT_IN_SPRINTF_CHK
11778 : 1094 : && strcmp (fmt_str, target_percent_s) == 0)
11779 : : {
11780 : 49 : tree arg;
11781 : :
11782 : 49 : if (nargs < 5)
11783 : : return;
11784 : 49 : arg = CALL_EXPR_ARG (exp, 4);
11785 : 49 : if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11786 : : return;
11787 : :
11788 : 45 : len = c_strlen (arg, 1);
11789 : 45 : if (!len || ! tree_fits_uhwi_p (len))
11790 : : return;
11791 : : }
11792 : : else
11793 : : return;
11794 : :
11795 : : /* Add one for the terminating nul. */
11796 : 34 : len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11797 : :
11798 : 34 : check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
11799 : : access_write_only);
11800 : : }
11801 : :
11802 : : /* Fold a call to __builtin_object_size with arguments PTR and OST,
11803 : : if possible. */
11804 : :
11805 : : static tree
11806 : 199826 : fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
11807 : : {
11808 : 199826 : tree bytes;
11809 : 199826 : int object_size_type;
11810 : :
11811 : 199826 : if (!validate_arg (ptr, POINTER_TYPE)
11812 : 199826 : || !validate_arg (ost, INTEGER_TYPE))
11813 : : return NULL_TREE;
11814 : :
11815 : 199826 : STRIP_NOPS (ost);
11816 : :
11817 : 199826 : if (TREE_CODE (ost) != INTEGER_CST
11818 : 199826 : || tree_int_cst_sgn (ost) < 0
11819 : 399652 : || compare_tree_int (ost, 3) > 0)
11820 : 0 : return NULL_TREE;
11821 : :
11822 : 199826 : object_size_type = tree_to_shwi (ost);
11823 : :
11824 : : /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11825 : : if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11826 : : and (size_t) 0 for types 2 and 3. */
11827 : 199826 : if (TREE_SIDE_EFFECTS (ptr))
11828 : 603 : return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11829 : :
11830 : 199223 : if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
11831 : 66398 : object_size_type |= OST_DYNAMIC;
11832 : :
11833 : 199223 : if (TREE_CODE (ptr) == ADDR_EXPR)
11834 : : {
11835 : 12077 : compute_builtin_object_size (ptr, object_size_type, &bytes);
11836 : 12077 : if ((object_size_type & OST_DYNAMIC)
11837 : 10892 : || int_fits_type_p (bytes, size_type_node))
11838 : 12077 : return fold_convert (size_type_node, bytes);
11839 : : }
11840 : 187146 : else if (TREE_CODE (ptr) == SSA_NAME)
11841 : : {
11842 : : /* If object size is not known yet, delay folding until
11843 : : later. Maybe subsequent passes will help determining
11844 : : it. */
11845 : 101857 : if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11846 : 101857 : && ((object_size_type & OST_DYNAMIC)
11847 : 2544 : || int_fits_type_p (bytes, size_type_node)))
11848 : 4859 : return fold_convert (size_type_node, bytes);
11849 : : }
11850 : :
11851 : : return NULL_TREE;
11852 : : }
11853 : :
11854 : : /* Builtins with folding operations that operate on "..." arguments
11855 : : need special handling; we need to store the arguments in a convenient
11856 : : data structure before attempting any folding. Fortunately there are
11857 : : only a few builtins that fall into this category. FNDECL is the
11858 : : function, EXP is the CALL_EXPR for the call. */
11859 : :
11860 : : static tree
11861 : 2643806 : fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11862 : : {
11863 : 2643806 : enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11864 : 2643806 : tree ret = NULL_TREE;
11865 : :
11866 : 2643806 : switch (fcode)
11867 : : {
11868 : 115110 : case BUILT_IN_FPCLASSIFY:
11869 : 115110 : ret = fold_builtin_fpclassify (loc, args, nargs);
11870 : 115110 : break;
11871 : :
11872 : 54 : case BUILT_IN_ADDC:
11873 : 54 : case BUILT_IN_ADDCL:
11874 : 54 : case BUILT_IN_ADDCLL:
11875 : 54 : case BUILT_IN_SUBC:
11876 : 54 : case BUILT_IN_SUBCL:
11877 : 54 : case BUILT_IN_SUBCLL:
11878 : 54 : return fold_builtin_addc_subc (loc, fcode, args);
11879 : :
11880 : : default:
11881 : : break;
11882 : : }
11883 : 115110 : if (ret)
11884 : : {
11885 : 115110 : ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11886 : 115110 : SET_EXPR_LOCATION (ret, loc);
11887 : 115110 : suppress_warning (ret);
11888 : 115110 : return ret;
11889 : : }
11890 : : return NULL_TREE;
11891 : : }
11892 : :
11893 : : /* Initialize format string characters in the target charset. */
11894 : :
11895 : : bool
11896 : 290893 : init_target_chars (void)
11897 : : {
11898 : 290893 : static bool init;
11899 : 290893 : if (!init)
11900 : : {
11901 : 111591 : target_newline = lang_hooks.to_target_charset ('\n');
11902 : 111591 : target_percent = lang_hooks.to_target_charset ('%');
11903 : 111591 : target_c = lang_hooks.to_target_charset ('c');
11904 : 111591 : target_s = lang_hooks.to_target_charset ('s');
11905 : 111591 : if (target_newline == 0 || target_percent == 0 || target_c == 0
11906 : 111591 : || target_s == 0)
11907 : : return false;
11908 : :
11909 : 111591 : target_percent_c[0] = target_percent;
11910 : 111591 : target_percent_c[1] = target_c;
11911 : 111591 : target_percent_c[2] = '\0';
11912 : :
11913 : 111591 : target_percent_s[0] = target_percent;
11914 : 111591 : target_percent_s[1] = target_s;
11915 : 111591 : target_percent_s[2] = '\0';
11916 : :
11917 : 111591 : target_percent_s_newline[0] = target_percent;
11918 : 111591 : target_percent_s_newline[1] = target_s;
11919 : 111591 : target_percent_s_newline[2] = target_newline;
11920 : 111591 : target_percent_s_newline[3] = '\0';
11921 : :
11922 : 111591 : init = true;
11923 : : }
11924 : : return true;
11925 : : }
11926 : :
11927 : : /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11928 : : and no overflow/underflow occurred. INEXACT is true if M was not
11929 : : exactly calculated. TYPE is the tree type for the result. This
11930 : : function assumes that you cleared the MPFR flags and then
11931 : : calculated M to see if anything subsequently set a flag prior to
11932 : : entering this function. Return NULL_TREE if any checks fail. */
11933 : :
11934 : : static tree
11935 : 3037 : do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11936 : : {
11937 : : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11938 : : overflow/underflow occurred. If -frounding-math, proceed iff the
11939 : : result of calling FUNC was exact. */
11940 : 4890 : if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11941 : 4890 : && (!flag_rounding_math || !inexact))
11942 : : {
11943 : 1853 : REAL_VALUE_TYPE rr;
11944 : :
11945 : 1853 : real_from_mpfr (&rr, m, type, MPFR_RNDN);
11946 : : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11947 : : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11948 : : but the mpfr_t is not, then we underflowed in the
11949 : : conversion. */
11950 : 1853 : if (real_isfinite (&rr)
11951 : 1853 : && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11952 : : {
11953 : 1853 : REAL_VALUE_TYPE rmode;
11954 : :
11955 : 1853 : real_convert (&rmode, TYPE_MODE (type), &rr);
11956 : : /* Proceed iff the specified mode can hold the value. */
11957 : 1853 : if (real_identical (&rmode, &rr))
11958 : 1853 : return build_real (type, rmode);
11959 : : }
11960 : : }
11961 : : return NULL_TREE;
11962 : : }
11963 : :
11964 : : /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11965 : : number and no overflow/underflow occurred. INEXACT is true if M
11966 : : was not exactly calculated. TYPE is the tree type for the result.
11967 : : This function assumes that you cleared the MPFR flags and then
11968 : : calculated M to see if anything subsequently set a flag prior to
11969 : : entering this function. Return NULL_TREE if any checks fail, if
11970 : : FORCE_CONVERT is true, then bypass the checks. */
11971 : :
11972 : : static tree
11973 : 4130 : do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11974 : : {
11975 : : /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11976 : : overflow/underflow occurred. If -frounding-math, proceed iff the
11977 : : result of calling FUNC was exact. */
11978 : 4130 : if (force_convert
11979 : 4130 : || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11980 : 3784 : && !mpfr_overflow_p () && !mpfr_underflow_p ()
11981 : 3784 : && (!flag_rounding_math || !inexact)))
11982 : : {
11983 : 3951 : REAL_VALUE_TYPE re, im;
11984 : :
11985 : 3951 : real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11986 : 3951 : real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11987 : : /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11988 : : check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11989 : : but the mpfr_t is not, then we underflowed in the
11990 : : conversion. */
11991 : 3951 : if (force_convert
11992 : 3951 : || (real_isfinite (&re) && real_isfinite (&im)
11993 : 3784 : && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11994 : 3784 : && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11995 : : {
11996 : 3951 : REAL_VALUE_TYPE re_mode, im_mode;
11997 : :
11998 : 3951 : real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11999 : 3951 : real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12000 : : /* Proceed iff the specified mode can hold the value. */
12001 : 3951 : if (force_convert
12002 : 3951 : || (real_identical (&re_mode, &re)
12003 : 3784 : && real_identical (&im_mode, &im)))
12004 : 3951 : return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12005 : 7902 : build_real (TREE_TYPE (type), im_mode));
12006 : : }
12007 : : }
12008 : : return NULL_TREE;
12009 : : }
12010 : :
12011 : : /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12012 : : the pointer *(ARG_QUO) and return the result. The type is taken
12013 : : from the type of ARG0 and is used for setting the precision of the
12014 : : calculation and results. */
12015 : :
12016 : : static tree
12017 : 86884 : do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12018 : : {
12019 : 86884 : tree const type = TREE_TYPE (arg0);
12020 : 86884 : tree result = NULL_TREE;
12021 : :
12022 : 86884 : STRIP_NOPS (arg0);
12023 : 86884 : STRIP_NOPS (arg1);
12024 : :
12025 : : /* To proceed, MPFR must exactly represent the target floating point
12026 : : format, which only happens when the target base equals two. */
12027 : 86884 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12028 : 86884 : && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12029 : 89247 : && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12030 : : {
12031 : 2363 : const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12032 : 2363 : const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12033 : :
12034 : 2363 : if (real_isfinite (ra0) && real_isfinite (ra1))
12035 : : {
12036 : 2363 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12037 : 2363 : const int prec = fmt->p;
12038 : 2363 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12039 : 2363 : tree result_rem;
12040 : 2363 : long integer_quo;
12041 : 2363 : mpfr_t m0, m1;
12042 : :
12043 : 2363 : mpfr_inits2 (prec, m0, m1, NULL);
12044 : 2363 : mpfr_from_real (m0, ra0, MPFR_RNDN);
12045 : 2363 : mpfr_from_real (m1, ra1, MPFR_RNDN);
12046 : 2363 : mpfr_clear_flags ();
12047 : 2363 : mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
12048 : : /* Remquo is independent of the rounding mode, so pass
12049 : : inexact=0 to do_mpfr_ckconv(). */
12050 : 2363 : result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12051 : 2363 : mpfr_clears (m0, m1, NULL);
12052 : 2363 : if (result_rem)
12053 : : {
12054 : : /* MPFR calculates quo in the host's long so it may
12055 : : return more bits in quo than the target int can hold
12056 : : if sizeof(host long) > sizeof(target int). This can
12057 : : happen even for native compilers in LP64 mode. In
12058 : : these cases, modulo the quo value with the largest
12059 : : number that the target int can hold while leaving one
12060 : : bit for the sign. */
12061 : 1179 : if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12062 : 1179 : integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12063 : :
12064 : : /* Dereference the quo pointer argument. */
12065 : 1179 : arg_quo = build_fold_indirect_ref (arg_quo);
12066 : : /* Proceed iff a valid pointer type was passed in. */
12067 : 1179 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12068 : : {
12069 : : /* Set the value. */
12070 : 1179 : tree result_quo
12071 : 1179 : = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12072 : : build_int_cst (TREE_TYPE (arg_quo),
12073 : : integer_quo));
12074 : 1179 : TREE_SIDE_EFFECTS (result_quo) = 1;
12075 : : /* Combine the quo assignment with the rem. */
12076 : 1179 : result = fold_build2 (COMPOUND_EXPR, type,
12077 : : result_quo, result_rem);
12078 : 1179 : suppress_warning (result, OPT_Wunused_value);
12079 : 1179 : result = non_lvalue (result);
12080 : : }
12081 : : }
12082 : : }
12083 : : }
12084 : 86884 : return result;
12085 : : }
12086 : :
12087 : : /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12088 : : resulting value as a tree with type TYPE. The mpfr precision is
12089 : : set to the precision of TYPE. We assume that this mpfr function
12090 : : returns zero if the result could be calculated exactly within the
12091 : : requested precision. In addition, the integer pointer represented
12092 : : by ARG_SG will be dereferenced and set to the appropriate signgam
12093 : : (-1,1) value. */
12094 : :
12095 : : static tree
12096 : 6720 : do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12097 : : {
12098 : 6720 : tree result = NULL_TREE;
12099 : :
12100 : 6720 : STRIP_NOPS (arg);
12101 : :
12102 : : /* To proceed, MPFR must exactly represent the target floating point
12103 : : format, which only happens when the target base equals two. Also
12104 : : verify ARG is a constant and that ARG_SG is an int pointer. */
12105 : 6720 : if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12106 : 6720 : && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12107 : 6686 : && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12108 : 13406 : && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12109 : : {
12110 : 6686 : const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12111 : :
12112 : : /* In addition to NaN and Inf, the argument cannot be zero or a
12113 : : negative integer. */
12114 : 6686 : if (real_isfinite (ra)
12115 : 6686 : && ra->cl != rvc_zero
12116 : 13372 : && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
12117 : : {
12118 : 674 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12119 : 674 : const int prec = fmt->p;
12120 : 674 : const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
12121 : 674 : int inexact, sg;
12122 : 674 : tree result_lg;
12123 : :
12124 : 674 : auto_mpfr m (prec);
12125 : 674 : mpfr_from_real (m, ra, MPFR_RNDN);
12126 : 674 : mpfr_clear_flags ();
12127 : 674 : inexact = mpfr_lgamma (m, &sg, m, rnd);
12128 : 674 : result_lg = do_mpfr_ckconv (m, type, inexact);
12129 : 674 : if (result_lg)
12130 : : {
12131 : 674 : tree result_sg;
12132 : :
12133 : : /* Dereference the arg_sg pointer argument. */
12134 : 674 : arg_sg = build_fold_indirect_ref (arg_sg);
12135 : : /* Assign the signgam value into *arg_sg. */
12136 : 674 : result_sg = fold_build2 (MODIFY_EXPR,
12137 : : TREE_TYPE (arg_sg), arg_sg,
12138 : : build_int_cst (TREE_TYPE (arg_sg), sg));
12139 : 674 : TREE_SIDE_EFFECTS (result_sg) = 1;
12140 : : /* Combine the signgam assignment with the lgamma result. */
12141 : 674 : result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12142 : : result_sg, result_lg));
12143 : : }
12144 : 674 : }
12145 : : }
12146 : :
12147 : 6720 : return result;
12148 : : }
12149 : :
12150 : : /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12151 : : mpc function FUNC on it and return the resulting value as a tree
12152 : : with type TYPE. The mpfr precision is set to the precision of
12153 : : TYPE. We assume that function FUNC returns zero if the result
12154 : : could be calculated exactly within the requested precision. If
12155 : : DO_NONFINITE is true, then fold expressions containing Inf or NaN
12156 : : in the arguments and/or results. */
12157 : :
12158 : : tree
12159 : 4482 : do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
12160 : : int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12161 : : {
12162 : 4482 : tree result = NULL_TREE;
12163 : :
12164 : 4482 : STRIP_NOPS (arg0);
12165 : 4482 : STRIP_NOPS (arg1);
12166 : :
12167 : : /* To proceed, MPFR must exactly represent the target floating point
12168 : : format, which only happens when the target base equals two. */
12169 : 4482 : if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12170 : 4482 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg0)))
12171 : 4482 : && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12172 : 4482 : && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_TYPE (arg1)))
12173 : 8964 : && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12174 : : {
12175 : 4482 : const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12176 : 4482 : const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12177 : 4482 : const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12178 : 4482 : const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12179 : :
12180 : 4482 : if (do_nonfinite
12181 : 4482 : || (real_isfinite (re0) && real_isfinite (im0)
12182 : 3991 : && real_isfinite (re1) && real_isfinite (im1)))
12183 : : {
12184 : 4130 : const struct real_format *const fmt =
12185 : 4130 : REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12186 : 4130 : const int prec = fmt->p;
12187 : 4130 : const mpfr_rnd_t rnd = fmt->round_towards_zero
12188 : 4130 : ? MPFR_RNDZ : MPFR_RNDN;
12189 : 4130 : const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12190 : 4130 : int inexact;
12191 : 4130 : mpc_t m0, m1;
12192 : :
12193 : 4130 : mpc_init2 (m0, prec);
12194 : 4130 : mpc_init2 (m1, prec);
12195 : 4130 : mpfr_from_real (mpc_realref (m0), re0, rnd);
12196 : 4130 : mpfr_from_real (mpc_imagref (m0), im0, rnd);
12197 : 4130 : mpfr_from_real (mpc_realref (m1), re1, rnd);
12198 : 4130 : mpfr_from_real (mpc_imagref (m1), im1, rnd);
12199 : 4130 : mpfr_clear_flags ();
12200 : 4130 : inexact = func (m0, m0, m1, crnd);
12201 : 4130 : result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
12202 : 4130 : mpc_clear (m0);
12203 : 4130 : mpc_clear (m1);
12204 : : }
12205 : : }
12206 : :
12207 : 4482 : return result;
12208 : : }
12209 : :
12210 : : /* A wrapper function for builtin folding that prevents warnings for
12211 : : "statement without effect" and the like, caused by removing the
12212 : : call node earlier than the warning is generated. */
12213 : :
12214 : : tree
12215 : 10403468 : fold_call_stmt (gcall *stmt, bool ignore)
12216 : : {
12217 : 10403468 : tree ret = NULL_TREE;
12218 : 10403468 : tree fndecl = gimple_call_fndecl (stmt);
12219 : 10403468 : location_t loc = gimple_location (stmt);
12220 : 10403468 : if (fndecl && fndecl_built_in_p (fndecl)
12221 : 20806936 : && !gimple_call_va_arg_pack_p (stmt))
12222 : : {
12223 : 10403402 : int nargs = gimple_call_num_args (stmt);
12224 : 10403402 : tree *args = (nargs > 0
12225 : 10403402 : ? gimple_call_arg_ptr (stmt, 0)
12226 : : : &error_mark_node);
12227 : :
12228 : 10403402 : if (avoid_folding_inline_builtin (fndecl))
12229 : : return NULL_TREE;
12230 : 10403402 : if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12231 : : {
12232 : 0 : return targetm.fold_builtin (fndecl, nargs, args, ignore);
12233 : : }
12234 : : else
12235 : : {
12236 : 10403402 : ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
12237 : 10403402 : if (ret)
12238 : : {
12239 : : /* Propagate location information from original call to
12240 : : expansion of builtin. Otherwise things like
12241 : : maybe_emit_chk_warning, that operate on the expansion
12242 : : of a builtin, will use the wrong location information. */
12243 : 5732 : if (gimple_has_location (stmt))
12244 : : {
12245 : 5731 : tree realret = ret;
12246 : 5731 : if (TREE_CODE (ret) == NOP_EXPR)
12247 : 5731 : realret = TREE_OPERAND (ret, 0);
12248 : 5731 : if (CAN_HAVE_LOCATION_P (realret)
12249 : 10022 : && !EXPR_HAS_LOCATION (realret))
12250 : 6 : SET_EXPR_LOCATION (realret, loc);
12251 : 5731 : return realret;
12252 : : }
12253 : : return ret;
12254 : : }
12255 : : }
12256 : : }
12257 : : return NULL_TREE;
12258 : : }
12259 : :
12260 : : /* Look up the function in builtin_decl that corresponds to DECL
12261 : : and set ASMSPEC as its user assembler name. DECL must be a
12262 : : function decl that declares a builtin. */
12263 : :
12264 : : void
12265 : 150664 : set_builtin_user_assembler_name (tree decl, const char *asmspec)
12266 : : {
12267 : 150664 : gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
12268 : : && asmspec != 0);
12269 : :
12270 : 150664 : tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
12271 : 150664 : set_user_assembler_name (builtin, asmspec);
12272 : :
12273 : 150664 : if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
12274 : 150664 : && INT_TYPE_SIZE < BITS_PER_WORD)
12275 : : {
12276 : 1 : scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
12277 : 1 : set_user_assembler_libfunc ("ffs", asmspec);
12278 : 1 : set_optab_libfunc (ffs_optab, mode, "ffs");
12279 : : }
12280 : 150664 : }
12281 : :
12282 : : /* Return true if DECL is a builtin that expands to a constant or similarly
12283 : : simple code. */
12284 : : bool
12285 : 31939501 : is_simple_builtin (tree decl)
12286 : : {
12287 : 31939501 : if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
12288 : 30696696 : switch (DECL_FUNCTION_CODE (decl))
12289 : : {
12290 : : /* Builtins that expand to constants. */
12291 : : case BUILT_IN_CONSTANT_P:
12292 : : case BUILT_IN_EXPECT:
12293 : : case BUILT_IN_OBJECT_SIZE:
12294 : : case BUILT_IN_UNREACHABLE:
12295 : : /* Simple register moves or loads from stack. */
12296 : : case BUILT_IN_ASSUME_ALIGNED:
12297 : : case BUILT_IN_RETURN_ADDRESS:
12298 : : case BUILT_IN_EXTRACT_RETURN_ADDR:
12299 : : case BUILT_IN_FROB_RETURN_ADDR:
12300 : : case BUILT_IN_RETURN:
12301 : : case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12302 : : case BUILT_IN_FRAME_ADDRESS:
12303 : : case BUILT_IN_VA_END:
12304 : : case BUILT_IN_STACK_SAVE:
12305 : : case BUILT_IN_STACK_RESTORE:
12306 : : case BUILT_IN_DWARF_CFA:
12307 : : /* Exception state returns or moves registers around. */
12308 : : case BUILT_IN_EH_FILTER:
12309 : : case BUILT_IN_EH_POINTER:
12310 : : case BUILT_IN_EH_COPY_VALUES:
12311 : : return true;
12312 : :
12313 : 28001679 : default:
12314 : 28001679 : return false;
12315 : : }
12316 : :
12317 : : return false;
12318 : : }
12319 : :
12320 : : /* Return true if DECL is a builtin that is not expensive, i.e., they are
12321 : : most probably expanded inline into reasonably simple code. This is a
12322 : : superset of is_simple_builtin. */
12323 : : bool
12324 : 19348456 : is_inexpensive_builtin (tree decl)
12325 : : {
12326 : 19348456 : if (!decl)
12327 : : return false;
12328 : 19332227 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12329 : : return true;
12330 : 18067154 : else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12331 : 15160394 : switch (DECL_FUNCTION_CODE (decl))
12332 : : {
12333 : : case BUILT_IN_ABS:
12334 : : CASE_BUILT_IN_ALLOCA:
12335 : : case BUILT_IN_BSWAP16:
12336 : : case BUILT_IN_BSWAP32:
12337 : : case BUILT_IN_BSWAP64:
12338 : : case BUILT_IN_BSWAP128:
12339 : : case BUILT_IN_CLZ:
12340 : : case BUILT_IN_CLZIMAX:
12341 : : case BUILT_IN_CLZL:
12342 : : case BUILT_IN_CLZLL:
12343 : : case BUILT_IN_CTZ:
12344 : : case BUILT_IN_CTZIMAX:
12345 : : case BUILT_IN_CTZL:
12346 : : case BUILT_IN_CTZLL:
12347 : : case BUILT_IN_FFS:
12348 : : case BUILT_IN_FFSIMAX:
12349 : : case BUILT_IN_FFSL:
12350 : : case BUILT_IN_FFSLL:
12351 : : case BUILT_IN_IMAXABS:
12352 : : case BUILT_IN_FINITE:
12353 : : case BUILT_IN_FINITEF:
12354 : : case BUILT_IN_FINITEL:
12355 : : case BUILT_IN_FINITED32:
12356 : : case BUILT_IN_FINITED64:
12357 : : case BUILT_IN_FINITED128:
12358 : : case BUILT_IN_FPCLASSIFY:
12359 : : case BUILT_IN_ISFINITE:
12360 : : case BUILT_IN_ISINF_SIGN:
12361 : : case BUILT_IN_ISINF:
12362 : : case BUILT_IN_ISINFF:
12363 : : case BUILT_IN_ISINFL:
12364 : : case BUILT_IN_ISINFD32:
12365 : : case BUILT_IN_ISINFD64:
12366 : : case BUILT_IN_ISINFD128:
12367 : : case BUILT_IN_ISNAN:
12368 : : case BUILT_IN_ISNANF:
12369 : : case BUILT_IN_ISNANL:
12370 : : case BUILT_IN_ISNAND32:
12371 : : case BUILT_IN_ISNAND64:
12372 : : case BUILT_IN_ISNAND128:
12373 : : case BUILT_IN_ISNORMAL:
12374 : : case BUILT_IN_ISGREATER:
12375 : : case BUILT_IN_ISGREATEREQUAL:
12376 : : case BUILT_IN_ISLESS:
12377 : : case BUILT_IN_ISLESSEQUAL:
12378 : : case BUILT_IN_ISLESSGREATER:
12379 : : case BUILT_IN_ISUNORDERED:
12380 : : case BUILT_IN_ISEQSIG:
12381 : : case BUILT_IN_VA_ARG_PACK:
12382 : : case BUILT_IN_VA_ARG_PACK_LEN:
12383 : : case BUILT_IN_VA_COPY:
12384 : : case BUILT_IN_TRAP:
12385 : : case BUILT_IN_UNREACHABLE_TRAP:
12386 : : case BUILT_IN_SAVEREGS:
12387 : : case BUILT_IN_POPCOUNTL:
12388 : : case BUILT_IN_POPCOUNTLL:
12389 : : case BUILT_IN_POPCOUNTIMAX:
12390 : : case BUILT_IN_POPCOUNT:
12391 : : case BUILT_IN_PARITYL:
12392 : : case BUILT_IN_PARITYLL:
12393 : : case BUILT_IN_PARITYIMAX:
12394 : : case BUILT_IN_PARITY:
12395 : : case BUILT_IN_LABS:
12396 : : case BUILT_IN_LLABS:
12397 : : case BUILT_IN_PREFETCH:
12398 : : case BUILT_IN_ACC_ON_DEVICE:
12399 : : return true;
12400 : :
12401 : 14374061 : default:
12402 : 14374061 : return is_simple_builtin (decl);
12403 : : }
12404 : :
12405 : : return false;
12406 : : }
12407 : :
12408 : : /* Return true if T is a constant and the value cast to a target char
12409 : : can be represented by a host char.
12410 : : Store the casted char constant in *P if so. */
12411 : :
12412 : : bool
12413 : 2766 : target_char_cst_p (tree t, char *p)
12414 : : {
12415 : 2766 : if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
12416 : : return false;
12417 : :
12418 : 1623 : *p = (char)tree_to_uhwi (t);
12419 : 1623 : return true;
12420 : : }
12421 : :
12422 : : /* Return true if the builtin DECL is implemented in a standard library.
12423 : : Otherwise return false which doesn't guarantee it is not (thus the list
12424 : : of handled builtins below may be incomplete). */
12425 : :
12426 : : bool
12427 : 40436 : builtin_with_linkage_p (tree decl)
12428 : : {
12429 : 40436 : if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12430 : 40158 : switch (DECL_FUNCTION_CODE (decl))
12431 : : {
12432 : 1294 : CASE_FLT_FN (BUILT_IN_ACOS):
12433 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOS):
12434 : 1294 : CASE_FLT_FN (BUILT_IN_ACOSH):
12435 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ACOSH):
12436 : 1294 : CASE_FLT_FN (BUILT_IN_ASIN):
12437 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASIN):
12438 : 1294 : CASE_FLT_FN (BUILT_IN_ASINH):
12439 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ASINH):
12440 : 1294 : CASE_FLT_FN (BUILT_IN_ATAN):
12441 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN):
12442 : 1294 : CASE_FLT_FN (BUILT_IN_ATANH):
12443 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATANH):
12444 : 1294 : CASE_FLT_FN (BUILT_IN_ATAN2):
12445 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ATAN2):
12446 : 1294 : CASE_FLT_FN (BUILT_IN_CBRT):
12447 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CBRT):
12448 : 1294 : CASE_FLT_FN (BUILT_IN_CEIL):
12449 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
12450 : 1294 : CASE_FLT_FN (BUILT_IN_COPYSIGN):
12451 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
12452 : 1294 : CASE_FLT_FN (BUILT_IN_COS):
12453 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COS):
12454 : 1294 : CASE_FLT_FN (BUILT_IN_COSH):
12455 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_COSH):
12456 : 1294 : CASE_FLT_FN (BUILT_IN_ERF):
12457 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERF):
12458 : 1294 : CASE_FLT_FN (BUILT_IN_ERFC):
12459 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ERFC):
12460 : 1294 : CASE_FLT_FN (BUILT_IN_EXP):
12461 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP):
12462 : 1294 : CASE_FLT_FN (BUILT_IN_EXP2):
12463 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXP2):
12464 : 1294 : CASE_FLT_FN (BUILT_IN_EXPM1):
12465 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_EXPM1):
12466 : 1294 : CASE_FLT_FN (BUILT_IN_FABS):
12467 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12468 : 1294 : CASE_FLT_FN (BUILT_IN_FDIM):
12469 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FDIM):
12470 : 1294 : CASE_FLT_FN (BUILT_IN_FLOOR):
12471 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
12472 : 1294 : CASE_FLT_FN (BUILT_IN_FMA):
12473 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
12474 : 1294 : CASE_FLT_FN (BUILT_IN_FMAX):
12475 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
12476 : 1294 : CASE_FLT_FN (BUILT_IN_FMIN):
12477 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
12478 : 1294 : CASE_FLT_FN (BUILT_IN_FMOD):
12479 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMOD):
12480 : 1294 : CASE_FLT_FN (BUILT_IN_FREXP):
12481 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_FREXP):
12482 : 1294 : CASE_FLT_FN (BUILT_IN_HYPOT):
12483 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_HYPOT):
12484 : 1294 : CASE_FLT_FN (BUILT_IN_ILOGB):
12485 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ILOGB):
12486 : 1294 : CASE_FLT_FN (BUILT_IN_LDEXP):
12487 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LDEXP):
12488 : 1294 : CASE_FLT_FN (BUILT_IN_LGAMMA):
12489 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LGAMMA):
12490 : 1294 : CASE_FLT_FN (BUILT_IN_LLRINT):
12491 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLRINT):
12492 : 1294 : CASE_FLT_FN (BUILT_IN_LLROUND):
12493 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LLROUND):
12494 : 1294 : CASE_FLT_FN (BUILT_IN_LOG):
12495 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG):
12496 : 1294 : CASE_FLT_FN (BUILT_IN_LOG10):
12497 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG10):
12498 : 1294 : CASE_FLT_FN (BUILT_IN_LOG1P):
12499 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG1P):
12500 : 1294 : CASE_FLT_FN (BUILT_IN_LOG2):
12501 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOG2):
12502 : 1294 : CASE_FLT_FN (BUILT_IN_LOGB):
12503 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LOGB):
12504 : 1294 : CASE_FLT_FN (BUILT_IN_LRINT):
12505 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LRINT):
12506 : 1294 : CASE_FLT_FN (BUILT_IN_LROUND):
12507 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_LROUND):
12508 : 1294 : CASE_FLT_FN (BUILT_IN_MODF):
12509 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_MODF):
12510 : 1294 : CASE_FLT_FN (BUILT_IN_NAN):
12511 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NAN):
12512 : 1294 : CASE_FLT_FN (BUILT_IN_NEARBYINT):
12513 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
12514 : 1294 : CASE_FLT_FN (BUILT_IN_NEXTAFTER):
12515 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEXTAFTER):
12516 : 1294 : CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
12517 : 1294 : CASE_FLT_FN (BUILT_IN_POW):
12518 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_POW):
12519 : 1294 : CASE_FLT_FN (BUILT_IN_REMAINDER):
12520 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMAINDER):
12521 : 1294 : CASE_FLT_FN (BUILT_IN_REMQUO):
12522 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_REMQUO):
12523 : 1294 : CASE_FLT_FN (BUILT_IN_RINT):
12524 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
12525 : 1294 : CASE_FLT_FN (BUILT_IN_ROUND):
12526 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
12527 : 1294 : CASE_FLT_FN (BUILT_IN_SCALBLN):
12528 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBLN):
12529 : 1294 : CASE_FLT_FN (BUILT_IN_SCALBN):
12530 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SCALBN):
12531 : 1294 : CASE_FLT_FN (BUILT_IN_SIN):
12532 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SIN):
12533 : 1294 : CASE_FLT_FN (BUILT_IN_SINH):
12534 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SINH):
12535 : 1294 : CASE_FLT_FN (BUILT_IN_SINCOS):
12536 : 1294 : CASE_FLT_FN (BUILT_IN_SQRT):
12537 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
12538 : 1294 : CASE_FLT_FN (BUILT_IN_TAN):
12539 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TAN):
12540 : 1294 : CASE_FLT_FN (BUILT_IN_TANH):
12541 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TANH):
12542 : 1294 : CASE_FLT_FN (BUILT_IN_TGAMMA):
12543 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TGAMMA):
12544 : 1294 : CASE_FLT_FN (BUILT_IN_TRUNC):
12545 : 1294 : CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
12546 : 1294 : return true;
12547 : :
12548 : 16 : case BUILT_IN_STPCPY:
12549 : 16 : case BUILT_IN_STPNCPY:
12550 : : /* stpcpy is both referenced in libiberty's pex-win32.c and provided
12551 : : by libiberty's stpcpy.c for MinGW targets so we need to return true
12552 : : in order to be able to build libiberty in LTO mode for them. */
12553 : 16 : return true;
12554 : :
12555 : : default:
12556 : : break;
12557 : : }
12558 : : return false;
12559 : : }
12560 : :
12561 : : /* Return true if OFFRNG is bounded to a subrange of offset values
12562 : : valid for the largest possible object. */
12563 : :
12564 : : bool
12565 : 501 : access_ref::offset_bounded () const
12566 : : {
12567 : 501 : tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
12568 : 501 : tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
12569 : 975 : return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
12570 : : }
12571 : :
12572 : : /* If CALLEE has known side effects, fill in INFO and return true.
12573 : : See tree-ssa-structalias.cc:find_func_aliases
12574 : : for the list of builtins we might need to handle here. */
12575 : :
12576 : : attr_fnspec
12577 : 108970653 : builtin_fnspec (tree callee)
12578 : : {
12579 : 108970653 : built_in_function code = DECL_FUNCTION_CODE (callee);
12580 : :
12581 : 108970653 : switch (code)
12582 : : {
12583 : : /* All the following functions read memory pointed to by
12584 : : their second argument and write memory pointed to by first
12585 : : argument.
12586 : : strcat/strncat additionally reads memory pointed to by the first
12587 : : argument. */
12588 : 133241 : case BUILT_IN_STRCAT:
12589 : 133241 : case BUILT_IN_STRCAT_CHK:
12590 : 133241 : return "1cW 1 ";
12591 : 42638 : case BUILT_IN_STRNCAT:
12592 : 42638 : case BUILT_IN_STRNCAT_CHK:
12593 : 42638 : return "1cW 13";
12594 : 261215 : case BUILT_IN_STRCPY:
12595 : 261215 : case BUILT_IN_STRCPY_CHK:
12596 : 261215 : return "1cO 1 ";
12597 : 32307 : case BUILT_IN_STPCPY:
12598 : 32307 : case BUILT_IN_STPCPY_CHK:
12599 : 32307 : return ".cO 1 ";
12600 : 18789376 : case BUILT_IN_STRNCPY:
12601 : 18789376 : case BUILT_IN_MEMCPY:
12602 : 18789376 : case BUILT_IN_MEMMOVE:
12603 : 18789376 : case BUILT_IN_TM_MEMCPY:
12604 : 18789376 : case BUILT_IN_TM_MEMMOVE:
12605 : 18789376 : case BUILT_IN_STRNCPY_CHK:
12606 : 18789376 : case BUILT_IN_MEMCPY_CHK:
12607 : 18789376 : case BUILT_IN_MEMMOVE_CHK:
12608 : 18789376 : return "1cO313";
12609 : 152493 : case BUILT_IN_MEMPCPY:
12610 : 152493 : case BUILT_IN_MEMPCPY_CHK:
12611 : 152493 : return ".cO313";
12612 : 57322 : case BUILT_IN_STPNCPY:
12613 : 57322 : case BUILT_IN_STPNCPY_CHK:
12614 : 57322 : return ".cO313";
12615 : 0 : case BUILT_IN_BCOPY:
12616 : 0 : return ".c23O3";
12617 : 0 : case BUILT_IN_BZERO:
12618 : 0 : return ".cO2";
12619 : 8608996 : case BUILT_IN_MEMCMP:
12620 : 8608996 : case BUILT_IN_MEMCMP_EQ:
12621 : 8608996 : case BUILT_IN_BCMP:
12622 : 8608996 : case BUILT_IN_STRNCMP:
12623 : 8608996 : case BUILT_IN_STRNCMP_EQ:
12624 : 8608996 : case BUILT_IN_STRNCASECMP:
12625 : 8608996 : return ".cR3R3";
12626 : :
12627 : : /* The following functions read memory pointed to by their
12628 : : first argument. */
12629 : 774 : CASE_BUILT_IN_TM_LOAD (1):
12630 : 774 : CASE_BUILT_IN_TM_LOAD (2):
12631 : 774 : CASE_BUILT_IN_TM_LOAD (4):
12632 : 774 : CASE_BUILT_IN_TM_LOAD (8):
12633 : 774 : CASE_BUILT_IN_TM_LOAD (FLOAT):
12634 : 774 : CASE_BUILT_IN_TM_LOAD (DOUBLE):
12635 : 774 : CASE_BUILT_IN_TM_LOAD (LDOUBLE):
12636 : 774 : CASE_BUILT_IN_TM_LOAD (M64):
12637 : 774 : CASE_BUILT_IN_TM_LOAD (M128):
12638 : 774 : CASE_BUILT_IN_TM_LOAD (M256):
12639 : 774 : case BUILT_IN_TM_LOG:
12640 : 774 : case BUILT_IN_TM_LOG_1:
12641 : 774 : case BUILT_IN_TM_LOG_2:
12642 : 774 : case BUILT_IN_TM_LOG_4:
12643 : 774 : case BUILT_IN_TM_LOG_8:
12644 : 774 : case BUILT_IN_TM_LOG_FLOAT:
12645 : 774 : case BUILT_IN_TM_LOG_DOUBLE:
12646 : 774 : case BUILT_IN_TM_LOG_LDOUBLE:
12647 : 774 : case BUILT_IN_TM_LOG_M64:
12648 : 774 : case BUILT_IN_TM_LOG_M128:
12649 : 774 : case BUILT_IN_TM_LOG_M256:
12650 : 774 : return ".cR ";
12651 : :
12652 : 504849 : case BUILT_IN_INDEX:
12653 : 504849 : case BUILT_IN_RINDEX:
12654 : 504849 : case BUILT_IN_STRCHR:
12655 : 504849 : case BUILT_IN_STRLEN:
12656 : 504849 : case BUILT_IN_STRRCHR:
12657 : 504849 : return ".cR ";
12658 : 62550 : case BUILT_IN_STRNLEN:
12659 : 62550 : return ".cR2";
12660 : :
12661 : : /* These read memory pointed to by the first argument.
12662 : : Allocating memory does not have any side-effects apart from
12663 : : being the definition point for the pointer.
12664 : : Unix98 specifies that errno is set on allocation failure. */
12665 : 17712 : case BUILT_IN_STRDUP:
12666 : 17712 : return "mCR ";
12667 : 13312 : case BUILT_IN_STRNDUP:
12668 : 13312 : return "mCR2";
12669 : : /* Allocating memory does not have any side-effects apart from
12670 : : being the definition point for the pointer. */
12671 : 9379489 : case BUILT_IN_MALLOC:
12672 : 9379489 : case BUILT_IN_ALIGNED_ALLOC:
12673 : 9379489 : case BUILT_IN_CALLOC:
12674 : 9379489 : case BUILT_IN_GOMP_ALLOC:
12675 : 9379489 : return "mC";
12676 : 856293 : CASE_BUILT_IN_ALLOCA:
12677 : 856293 : return "mc";
12678 : : /* These read memory pointed to by the first argument with size
12679 : : in the third argument. */
12680 : 493195 : case BUILT_IN_MEMCHR:
12681 : 493195 : return ".cR3";
12682 : : /* These read memory pointed to by the first and second arguments. */
12683 : 11118827 : case BUILT_IN_STRSTR:
12684 : 11118827 : case BUILT_IN_STRPBRK:
12685 : 11118827 : case BUILT_IN_STRCASECMP:
12686 : 11118827 : case BUILT_IN_STRCSPN:
12687 : 11118827 : case BUILT_IN_STRSPN:
12688 : 11118827 : case BUILT_IN_STRCMP:
12689 : 11118827 : case BUILT_IN_STRCMP_EQ:
12690 : 11118827 : return ".cR R ";
12691 : : /* Freeing memory kills the pointed-to memory. More importantly
12692 : : the call has to serve as a barrier for moving loads and stores
12693 : : across it. */
12694 : 5559302 : case BUILT_IN_STACK_RESTORE:
12695 : 5559302 : case BUILT_IN_FREE:
12696 : 5559302 : case BUILT_IN_GOMP_FREE:
12697 : 5559302 : return ".co ";
12698 : 92689 : case BUILT_IN_VA_END:
12699 : 92689 : return ".cO ";
12700 : : /* Realloc serves both as allocation point and deallocation point. */
12701 : 978394 : case BUILT_IN_REALLOC:
12702 : 978394 : case BUILT_IN_GOMP_REALLOC:
12703 : 978394 : return ".Cw ";
12704 : 15516 : case BUILT_IN_GAMMA_R:
12705 : 15516 : case BUILT_IN_GAMMAF_R:
12706 : 15516 : case BUILT_IN_GAMMAL_R:
12707 : 15516 : case BUILT_IN_LGAMMA_R:
12708 : 15516 : case BUILT_IN_LGAMMAF_R:
12709 : 15516 : case BUILT_IN_LGAMMAL_R:
12710 : 15516 : return ".C. Ot";
12711 : 79284 : case BUILT_IN_FREXP:
12712 : 79284 : case BUILT_IN_FREXPF:
12713 : 79284 : case BUILT_IN_FREXPL:
12714 : 79284 : case BUILT_IN_MODF:
12715 : 79284 : case BUILT_IN_MODFF:
12716 : 79284 : case BUILT_IN_MODFL:
12717 : 79284 : return ".c. Ot";
12718 : 7316 : case BUILT_IN_REMQUO:
12719 : 7316 : case BUILT_IN_REMQUOF:
12720 : 7316 : case BUILT_IN_REMQUOL:
12721 : 7316 : return ".c. . Ot";
12722 : 152 : case BUILT_IN_SINCOS:
12723 : 152 : case BUILT_IN_SINCOSF:
12724 : 152 : case BUILT_IN_SINCOSL:
12725 : 152 : return ".c. OtOt";
12726 : 4174432 : case BUILT_IN_MEMSET:
12727 : 4174432 : case BUILT_IN_MEMSET_CHK:
12728 : 4174432 : case BUILT_IN_TM_MEMSET:
12729 : 4174432 : return "1cO3";
12730 : 365 : CASE_BUILT_IN_TM_STORE (1):
12731 : 365 : CASE_BUILT_IN_TM_STORE (2):
12732 : 365 : CASE_BUILT_IN_TM_STORE (4):
12733 : 365 : CASE_BUILT_IN_TM_STORE (8):
12734 : 365 : CASE_BUILT_IN_TM_STORE (FLOAT):
12735 : 365 : CASE_BUILT_IN_TM_STORE (DOUBLE):
12736 : 365 : CASE_BUILT_IN_TM_STORE (LDOUBLE):
12737 : 365 : CASE_BUILT_IN_TM_STORE (M64):
12738 : 365 : CASE_BUILT_IN_TM_STORE (M128):
12739 : 365 : CASE_BUILT_IN_TM_STORE (M256):
12740 : 365 : return ".cO ";
12741 : 2331797 : case BUILT_IN_STACK_SAVE:
12742 : 2331797 : case BUILT_IN_RETURN:
12743 : 2331797 : case BUILT_IN_EH_POINTER:
12744 : 2331797 : case BUILT_IN_EH_FILTER:
12745 : 2331797 : case BUILT_IN_UNWIND_RESUME:
12746 : 2331797 : case BUILT_IN_CXA_END_CLEANUP:
12747 : 2331797 : case BUILT_IN_EH_COPY_VALUES:
12748 : 2331797 : case BUILT_IN_FRAME_ADDRESS:
12749 : 2331797 : case BUILT_IN_APPLY_ARGS:
12750 : 2331797 : case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
12751 : 2331797 : case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
12752 : 2331797 : case BUILT_IN_PREFETCH:
12753 : 2331797 : case BUILT_IN_DWARF_CFA:
12754 : 2331797 : case BUILT_IN_RETURN_ADDRESS:
12755 : 2331797 : return ".c";
12756 : 1700586 : case BUILT_IN_ASSUME_ALIGNED:
12757 : 1700586 : case BUILT_IN_EXPECT:
12758 : 1700586 : case BUILT_IN_EXPECT_WITH_PROBABILITY:
12759 : 1700586 : return "1cX ";
12760 : : /* But posix_memalign stores a pointer into the memory pointed to
12761 : : by its first argument. */
12762 : 5573 : case BUILT_IN_POSIX_MEMALIGN:
12763 : 5573 : return ".cOt";
12764 : 4438 : case BUILT_IN_OMP_GET_MAPPED_PTR:
12765 : 4438 : return ". R ";
12766 : :
12767 : 43496220 : default:
12768 : 43496220 : return "";
12769 : : }
12770 : : }
|