Line data Source code
1 : /* Fold a constant sub-tree into a single node for C-compiler
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /*@@ This file should be rewritten to use an arbitrary precision
21 : @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 : @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 : @@ The routines that translate from the ap rep should
24 : @@ warn if precision et. al. is lost.
25 : @@ This would also make life easier when this technology is used
26 : @@ for cross-compilers. */
27 :
28 : /* The entry points in this file are fold, size_int and size_binop.
29 :
30 : fold takes a tree as argument and returns a simplified tree.
31 :
32 : size_binop takes a tree code for an arithmetic operation
33 : and two operands that are trees, and produces a tree for the
34 : result, assuming the type comes from `sizetype'.
35 :
36 : size_int takes an integer value, and creates a tree constant
37 : with type from `sizetype'.
38 :
39 : Note: Since the folders get called on non-gimple code as well as
40 : gimple code, we need to handle GIMPLE tuples as well as their
41 : corresponding tree equivalents. */
42 :
43 : #define INCLUDE_ALGORITHM
44 : #include "config.h"
45 : #include "system.h"
46 : #include "coretypes.h"
47 : #include "backend.h"
48 : #include "target.h"
49 : #include "rtl.h"
50 : #include "tree.h"
51 : #include "gimple.h"
52 : #include "predict.h"
53 : #include "memmodel.h"
54 : #include "tm_p.h"
55 : #include "tree-ssa-operands.h"
56 : #include "optabs-query.h"
57 : #include "cgraph.h"
58 : #include "diagnostic-core.h"
59 : #include "flags.h"
60 : #include "alias.h"
61 : #include "fold-const.h"
62 : #include "fold-const-call.h"
63 : #include "stor-layout.h"
64 : #include "calls.h"
65 : #include "tree-iterator.h"
66 : #include "expr.h"
67 : #include "intl.h"
68 : #include "langhooks.h"
69 : #include "tree-eh.h"
70 : #include "gimplify.h"
71 : #include "tree-dfa.h"
72 : #include "builtins.h"
73 : #include "generic-match.h"
74 : #include "gimple-iterator.h"
75 : #include "gimple-fold.h"
76 : #include "tree-into-ssa.h"
77 : #include "md5.h"
78 : #include "case-cfn-macros.h"
79 : #include "stringpool.h"
80 : #include "tree-vrp.h"
81 : #include "tree-ssanames.h"
82 : #include "selftest.h"
83 : #include "stringpool.h"
84 : #include "attribs.h"
85 : #include "tree-vector-builder.h"
86 : #include "vec-perm-indices.h"
87 : #include "asan.h"
88 : #include "gimple-range.h"
89 : #include "optabs-tree.h"
90 :
91 : /* Nonzero if we are folding constants inside an initializer or a C++
92 : manifestly-constant-evaluated context; zero otherwise.
93 : Should be used when folding in initializer enables additional
94 : optimizations. */
95 : int folding_initializer = 0;
96 :
97 : /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
98 : otherwise.
99 : Should be used when certain constructs shouldn't be optimized
100 : during folding in that context. */
101 : bool folding_cxx_constexpr = false;
102 :
103 : /* The following constants represent a bit based encoding of GCC's
104 : comparison operators. This encoding simplifies transformations
105 : on relational comparison operators, such as AND and OR. */
106 : enum comparison_code {
107 : COMPCODE_FALSE = 0,
108 : COMPCODE_LT = 1,
109 : COMPCODE_EQ = 2,
110 : COMPCODE_LE = 3,
111 : COMPCODE_GT = 4,
112 : COMPCODE_LTGT = 5,
113 : COMPCODE_GE = 6,
114 : COMPCODE_ORD = 7,
115 : COMPCODE_UNORD = 8,
116 : COMPCODE_UNLT = 9,
117 : COMPCODE_UNEQ = 10,
118 : COMPCODE_UNLE = 11,
119 : COMPCODE_UNGT = 12,
120 : COMPCODE_NE = 13,
121 : COMPCODE_UNGE = 14,
122 : COMPCODE_TRUE = 15
123 : };
124 :
125 : static bool negate_expr_p (tree);
126 : static tree negate_expr (tree);
127 : static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
128 : static enum comparison_code comparison_to_compcode (enum tree_code);
129 : static enum tree_code compcode_to_comparison (enum comparison_code);
130 : static bool twoval_comparison_p (tree, tree *, tree *);
131 : static tree eval_subst (location_t, tree, tree, tree, tree, tree);
132 : static tree optimize_bit_field_compare (location_t, enum tree_code,
133 : tree, tree, tree);
134 : static bool simple_operand_p (const_tree);
135 : static tree range_binop (enum tree_code, tree, tree, int, tree, int);
136 : static tree range_predecessor (tree);
137 : static tree range_successor (tree);
138 : static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
139 : static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
140 : tree, tree, tree, tree);
141 : static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 : static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 : static tree fold_binary_op_with_conditional_arg (location_t,
144 : enum tree_code, tree,
145 : tree, tree,
146 : tree, tree, int);
147 : static tree fold_negate_const (tree, tree);
148 : static tree fold_not_const (const_tree, tree);
149 : static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 : static tree fold_convert_const (enum tree_code, tree, tree);
151 : static tree fold_view_convert_expr (tree, tree);
152 : static tree fold_negate_expr (location_t, tree);
153 :
154 : /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 : The form is "(exp0 CMP cst1) ? exp0 : cst2". */
156 : tree_code
157 132843 : minmax_from_comparison (tree_code cmp, tree exp0,
158 : const widest_int cst1,
159 : const widest_int cst2)
160 : {
161 132843 : if (cst1 == cst2)
162 : {
163 277 : if (cmp == LE_EXPR || cmp == LT_EXPR)
164 : return MIN_EXPR;
165 132 : if (cmp == GT_EXPR || cmp == GE_EXPR)
166 : return MAX_EXPR;
167 : }
168 132698 : if (cst1 == cst2 - 1)
169 : {
170 : /* X <= Y - 1 equals to X < Y. */
171 77463 : if (cmp == LE_EXPR)
172 : return MIN_EXPR;
173 : /* X > Y - 1 equals to X >= Y. */
174 77063 : if (cmp == GT_EXPR)
175 : return MAX_EXPR;
176 : /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 67023 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
178 : {
179 17285 : int_range_max r;
180 34570 : get_range_query (cfun)->range_of_expr (r, exp0);
181 17285 : if (r.undefined_p ())
182 0 : r.set_varying (TREE_TYPE (exp0));
183 :
184 17285 : widest_int min = widest_int::from (r.lower_bound (),
185 34570 : TYPE_SIGN (TREE_TYPE (exp0)));
186 17285 : if (min == cst1)
187 688 : return MAX_EXPR;
188 17285 : }
189 : }
190 121570 : if (cst1 == cst2 + 1)
191 : {
192 : /* X < Y + 1 equals to X <= Y. */
193 1038 : if (cmp == LT_EXPR)
194 : return MIN_EXPR;
195 : /* X >= Y + 1 equals to X > Y. */
196 1010 : if (cmp == GE_EXPR)
197 : return MAX_EXPR;
198 : /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 882 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
200 : {
201 517 : int_range_max r;
202 1034 : get_range_query (cfun)->range_of_expr (r, exp0);
203 517 : if (r.undefined_p ())
204 0 : r.set_varying (TREE_TYPE (exp0));
205 :
206 517 : widest_int max = widest_int::from (r.upper_bound (),
207 1034 : TYPE_SIGN (TREE_TYPE (exp0)));
208 517 : if (max == cst1)
209 43 : return MIN_EXPR;
210 517 : }
211 : }
212 : return ERROR_MARK;
213 : }
214 :
215 :
216 : /* This is a helper function to detect min/max for some operands of COND_EXPR.
217 : The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
218 : tree_code
219 146652 : minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
220 : {
221 146652 : if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
222 11 : return ERROR_MARK;
223 :
224 146641 : if (!operand_equal_p (exp0, exp2))
225 : return ERROR_MARK;
226 :
227 146641 : if (operand_equal_p (exp1, exp3))
228 : {
229 14129 : if (cmp == LT_EXPR || cmp == LE_EXPR)
230 : return MIN_EXPR;
231 12024 : if (cmp == GT_EXPR || cmp == GE_EXPR)
232 : return MAX_EXPR;
233 : }
234 132644 : if (TREE_CODE (exp3) == INTEGER_CST
235 132560 : && TREE_CODE (exp1) == INTEGER_CST)
236 132018 : return minmax_from_comparison (cmp, exp0, wi::to_widest (exp1), wi::to_widest (exp3));
237 : return ERROR_MARK;
238 : }
239 :
240 : /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
241 : Otherwise, return LOC. */
242 :
243 : static location_t
244 2931153 : expr_location_or (tree t, location_t loc)
245 : {
246 931769 : location_t tloc = EXPR_LOCATION (t);
247 2915458 : return tloc == UNKNOWN_LOCATION ? loc : tloc;
248 : }
249 :
250 : /* Similar to protected_set_expr_location, but never modify x in place,
251 : if location can and needs to be set, unshare it. */
252 :
253 : tree
254 12140455 : protected_set_expr_location_unshare (tree x, location_t loc)
255 : {
256 12140455 : if (CAN_HAVE_LOCATION_P (x)
257 11256862 : && EXPR_LOCATION (x) != loc
258 7246434 : && !(TREE_CODE (x) == SAVE_EXPR
259 3623432 : || TREE_CODE (x) == TARGET_EXPR
260 : || TREE_CODE (x) == BIND_EXPR))
261 : {
262 3622686 : x = copy_node (x);
263 3622686 : SET_EXPR_LOCATION (x, loc);
264 : }
265 12140455 : return x;
266 : }
267 :
268 : /* This is nonzero if we should defer warnings about undefined
269 : overflow. This facility exists because these warnings are a
270 : special case. The code to estimate loop iterations does not want
271 : to issue any warnings, since it works with expressions which do not
272 : occur in user code. Various bits of cleanup code call fold(), but
273 : only use the result if it has certain characteristics (e.g., is a
274 : constant); that code only wants to issue a warning if the result is
275 : used. */
276 :
277 : static int fold_deferring_overflow_warnings;
278 :
279 : /* If a warning about undefined overflow is deferred, this is the
280 : warning. Note that this may cause us to turn two warnings into
281 : one, but that is fine since it is sufficient to only give one
282 : warning per expression. */
283 :
284 : static const char* fold_deferred_overflow_warning;
285 :
286 : /* If a warning about undefined overflow is deferred, this is the
287 : level at which the warning should be emitted. */
288 :
289 : static enum warn_strict_overflow_code fold_deferred_overflow_code;
290 :
291 : /* Start deferring overflow warnings. We could use a stack here to
292 : permit nested calls, but at present it is not necessary. */
293 :
294 : void
295 1194620999 : fold_defer_overflow_warnings (void)
296 : {
297 1194620999 : ++fold_deferring_overflow_warnings;
298 1194620999 : }
299 :
300 : /* Stop deferring overflow warnings. If there is a pending warning,
301 : and ISSUE is true, then issue the warning if appropriate. STMT is
302 : the statement with which the warning should be associated (used for
303 : location information); STMT may be NULL. CODE is the level of the
304 : warning--a warn_strict_overflow_code value. This function will use
305 : the smaller of CODE and the deferred code when deciding whether to
306 : issue the warning. CODE may be zero to mean to always use the
307 : deferred code. */
308 :
309 : void
310 1194620999 : fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
311 : {
312 1194620999 : const char *warnmsg;
313 1194620999 : location_t locus;
314 :
315 1194620999 : gcc_assert (fold_deferring_overflow_warnings > 0);
316 1194620999 : --fold_deferring_overflow_warnings;
317 1194620999 : if (fold_deferring_overflow_warnings > 0)
318 : {
319 9121354 : if (fold_deferred_overflow_warning != NULL
320 934982 : && code != 0
321 0 : && code < (int) fold_deferred_overflow_code)
322 0 : fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
323 9121354 : return;
324 : }
325 :
326 1185499645 : warnmsg = fold_deferred_overflow_warning;
327 1185499645 : fold_deferred_overflow_warning = NULL;
328 :
329 1185499645 : if (!issue || warnmsg == NULL)
330 : return;
331 :
332 6129 : if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
333 : return;
334 :
335 : /* Use the smallest code level when deciding to issue the
336 : warning. */
337 6129 : if (code == 0 || code > (int) fold_deferred_overflow_code)
338 6129 : code = fold_deferred_overflow_code;
339 :
340 6129 : if (!issue_strict_overflow_warning (code))
341 : return;
342 :
343 0 : if (stmt == NULL)
344 : locus = input_location;
345 : else
346 0 : locus = gimple_location (stmt);
347 0 : warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
348 : }
349 :
350 : /* Stop deferring overflow warnings, ignoring any deferred
351 : warnings. */
352 :
353 : void
354 189067872 : fold_undefer_and_ignore_overflow_warnings (void)
355 : {
356 189067872 : fold_undefer_overflow_warnings (false, NULL, 0);
357 189067872 : }
358 :
359 : /* Whether we are deferring overflow warnings. */
360 :
361 : bool
362 320041765 : fold_deferring_overflow_warnings_p (void)
363 : {
364 320041765 : return fold_deferring_overflow_warnings > 0;
365 : }
366 :
367 : /* This is called when we fold something based on the fact that signed
368 : overflow is undefined. */
369 :
370 : void
371 839682 : fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
372 : {
373 839682 : if (fold_deferring_overflow_warnings > 0)
374 : {
375 832106 : if (fold_deferred_overflow_warning == NULL
376 373839 : || wc < fold_deferred_overflow_code)
377 : {
378 463505 : fold_deferred_overflow_warning = gmsgid;
379 463505 : fold_deferred_overflow_code = wc;
380 : }
381 : }
382 7576 : else if (issue_strict_overflow_warning (wc))
383 6 : warning (OPT_Wstrict_overflow, gmsgid);
384 839682 : }
385 :
386 : /* Return true if the built-in mathematical function specified by CODE
387 : is odd, i.e. -f(x) == f(-x). */
388 :
389 : bool
390 2297883 : negate_mathfn_p (combined_fn fn)
391 : {
392 2297883 : switch (fn)
393 : {
394 : CASE_CFN_ASIN:
395 : CASE_CFN_ASIN_FN:
396 : CASE_CFN_ASINH:
397 : CASE_CFN_ASINH_FN:
398 : CASE_CFN_ASINPI:
399 : CASE_CFN_ASINPI_FN:
400 : CASE_CFN_ATAN:
401 : CASE_CFN_ATAN_FN:
402 : CASE_CFN_ATANH:
403 : CASE_CFN_ATANH_FN:
404 : CASE_CFN_ATANPI:
405 : CASE_CFN_ATANPI_FN:
406 : CASE_CFN_CASIN:
407 : CASE_CFN_CASIN_FN:
408 : CASE_CFN_CASINH:
409 : CASE_CFN_CASINH_FN:
410 : CASE_CFN_CATAN:
411 : CASE_CFN_CATAN_FN:
412 : CASE_CFN_CATANH:
413 : CASE_CFN_CATANH_FN:
414 : CASE_CFN_CBRT:
415 : CASE_CFN_CBRT_FN:
416 : CASE_CFN_CPROJ:
417 : CASE_CFN_CPROJ_FN:
418 : CASE_CFN_CSIN:
419 : CASE_CFN_CSIN_FN:
420 : CASE_CFN_CSINH:
421 : CASE_CFN_CSINH_FN:
422 : CASE_CFN_CTAN:
423 : CASE_CFN_CTAN_FN:
424 : CASE_CFN_CTANH:
425 : CASE_CFN_CTANH_FN:
426 : CASE_CFN_ERF:
427 : CASE_CFN_ERF_FN:
428 : CASE_CFN_LLROUND:
429 : CASE_CFN_LLROUND_FN:
430 : CASE_CFN_LROUND:
431 : CASE_CFN_LROUND_FN:
432 : CASE_CFN_ROUND:
433 : CASE_CFN_ROUNDEVEN:
434 : CASE_CFN_ROUNDEVEN_FN:
435 : CASE_CFN_SIN:
436 : CASE_CFN_SIN_FN:
437 : CASE_CFN_SINH:
438 : CASE_CFN_SINH_FN:
439 : CASE_CFN_SINPI:
440 : CASE_CFN_SINPI_FN:
441 : CASE_CFN_TAN:
442 : CASE_CFN_TAN_FN:
443 : CASE_CFN_TANH:
444 : CASE_CFN_TANH_FN:
445 : CASE_CFN_TANPI:
446 : CASE_CFN_TANPI_FN:
447 : CASE_CFN_TRUNC:
448 : CASE_CFN_TRUNC_FN:
449 : return true;
450 :
451 408 : CASE_CFN_LLRINT:
452 408 : CASE_CFN_LLRINT_FN:
453 408 : CASE_CFN_LRINT:
454 408 : CASE_CFN_LRINT_FN:
455 408 : CASE_CFN_NEARBYINT:
456 408 : CASE_CFN_NEARBYINT_FN:
457 408 : CASE_CFN_RINT:
458 408 : CASE_CFN_RINT_FN:
459 408 : return !flag_rounding_math;
460 :
461 2293871 : default:
462 2293871 : break;
463 : }
464 2293871 : return false;
465 : }
466 :
467 : /* Check whether we may negate an integer constant T without causing
468 : overflow. */
469 :
470 : bool
471 3121249 : may_negate_without_overflow_p (const_tree t)
472 : {
473 3121249 : tree type;
474 :
475 3121249 : gcc_assert (TREE_CODE (t) == INTEGER_CST);
476 :
477 3121249 : type = TREE_TYPE (t);
478 3121249 : if (TYPE_UNSIGNED (type))
479 : return false;
480 :
481 3121249 : return !wi::only_sign_bit_p (wi::to_wide (t));
482 : }
483 :
484 : /* Determine whether an expression T can be cheaply negated using
485 : the function negate_expr without introducing undefined overflow. */
486 :
487 : static bool
488 27970130 : negate_expr_p (tree t)
489 : {
490 28125316 : tree type;
491 :
492 28125316 : if (t == 0)
493 : return false;
494 :
495 28125316 : type = TREE_TYPE (t);
496 :
497 28125316 : STRIP_SIGN_NOPS (t);
498 28125316 : switch (TREE_CODE (t))
499 : {
500 1634585 : case INTEGER_CST:
501 1634585 : if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
502 : return true;
503 :
504 : /* Check that -CST will not overflow type. */
505 373859 : return may_negate_without_overflow_p (t);
506 539 : case BIT_NOT_EXPR:
507 539 : return (INTEGRAL_TYPE_P (type)
508 539 : && TYPE_OVERFLOW_WRAPS (type));
509 :
510 : case FIXED_CST:
511 : return true;
512 :
513 1308 : case NEGATE_EXPR:
514 1308 : return !TYPE_OVERFLOW_SANITIZED (type);
515 :
516 1253893 : case REAL_CST:
517 : /* We want to canonicalize to positive real constants. Pretend
518 : that only negative ones can be easily negated. */
519 1253893 : return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
520 :
521 454 : case COMPLEX_CST:
522 454 : return negate_expr_p (TREE_REALPART (t))
523 572 : && negate_expr_p (TREE_IMAGPART (t));
524 :
525 108 : case VECTOR_CST:
526 108 : {
527 108 : if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
528 : return true;
529 :
530 : /* Steps don't prevent negation. */
531 108 : unsigned int count = vector_cst_encoded_nelts (t);
532 216 : for (unsigned int i = 0; i < count; ++i)
533 108 : if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
534 : return false;
535 :
536 : return true;
537 : }
538 :
539 702 : case COMPLEX_EXPR:
540 702 : return negate_expr_p (TREE_OPERAND (t, 0))
541 702 : && negate_expr_p (TREE_OPERAND (t, 1));
542 :
543 33 : case CONJ_EXPR:
544 33 : return negate_expr_p (TREE_OPERAND (t, 0));
545 :
546 1535321 : case PLUS_EXPR:
547 1535321 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
548 1535315 : || HONOR_SIGNED_ZEROS (type)
549 2792643 : || (ANY_INTEGRAL_TYPE_P (type)
550 1257136 : && ! TYPE_OVERFLOW_WRAPS (type)))
551 725685 : return false;
552 : /* -(A + B) -> (-B) - A. */
553 809636 : if (negate_expr_p (TREE_OPERAND (t, 1)))
554 : return true;
555 : /* -(A + B) -> (-A) - B. */
556 147289 : return negate_expr_p (TREE_OPERAND (t, 0));
557 :
558 251734 : case MINUS_EXPR:
559 : /* We can't turn -(A-B) into B-A when we honor signed zeros. */
560 251734 : return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
561 251734 : && !HONOR_SIGNED_ZEROS (type)
562 335640 : && (! ANY_INTEGRAL_TYPE_P (type)
563 83683 : || TYPE_OVERFLOW_WRAPS (type));
564 :
565 2327015 : case MULT_EXPR:
566 2327015 : if (TYPE_UNSIGNED (type))
567 : break;
568 : /* INT_MIN/n * n doesn't overflow while negating one operand it does
569 : if n is a (negative) power of two. */
570 3990874 : if (INTEGRAL_TYPE_P (TREE_TYPE (t))
571 162839 : && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
572 2155928 : && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
573 0 : && (wi::popcount
574 1995437 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
575 160491 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
576 137542 : && (wi::popcount
577 4105467 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
578 : break;
579 :
580 : /* Fall through. */
581 :
582 2252851 : case RDIV_EXPR:
583 2252851 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
584 2252850 : return negate_expr_p (TREE_OPERAND (t, 1))
585 2252850 : || negate_expr_p (TREE_OPERAND (t, 0));
586 : break;
587 :
588 2489 : case TRUNC_DIV_EXPR:
589 2489 : case ROUND_DIV_EXPR:
590 2489 : case EXACT_DIV_EXPR:
591 2489 : if (TYPE_UNSIGNED (type))
592 : break;
593 : /* In general we can't negate A in A / B, because if A is INT_MIN and
594 : B is not 1 we change the sign of the result. */
595 542 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
596 542 : && negate_expr_p (TREE_OPERAND (t, 0)))
597 : return true;
598 : /* In general we can't negate B in A / B, because if A is INT_MIN and
599 : B is 1, we may turn this into INT_MIN / -1 which is undefined
600 : and actually traps on some architectures. */
601 754 : if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
602 377 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
603 669 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
604 282 : && ! integer_onep (TREE_OPERAND (t, 1))))
605 367 : return negate_expr_p (TREE_OPERAND (t, 1));
606 : break;
607 :
608 5173079 : case NOP_EXPR:
609 : /* Negate -((double)float) as (double)(-float). */
610 5173079 : if (SCALAR_FLOAT_TYPE_P (type))
611 : {
612 7645 : tree tem = strip_float_extensions (t);
613 7645 : if (tem != t)
614 : return negate_expr_p (tem);
615 : }
616 : break;
617 :
618 1271032 : case CALL_EXPR:
619 : /* Negate -f(x) as f(-x). */
620 1271032 : if (negate_mathfn_p (get_call_combined_fn (t)))
621 63 : return negate_expr_p (CALL_EXPR_ARG (t, 0));
622 : break;
623 :
624 643 : case RSHIFT_EXPR:
625 : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
626 643 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
627 : {
628 498 : tree op1 = TREE_OPERAND (t, 1);
629 498 : if (wi::to_wide (op1) == element_precision (type) - 1)
630 : return true;
631 : }
632 : break;
633 :
634 : default:
635 : break;
636 : }
637 : return false;
638 : }
639 :
640 : /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
641 : simplification is possible.
642 : If negate_expr_p would return true for T, NULL_TREE will never be
643 : returned. */
644 :
645 : static tree
646 39632148 : fold_negate_expr_1 (location_t loc, tree t)
647 : {
648 39632148 : tree type = TREE_TYPE (t);
649 39632148 : tree tem;
650 :
651 39632148 : switch (TREE_CODE (t))
652 : {
653 : /* Convert - (~A) to A + 1. */
654 162 : case BIT_NOT_EXPR:
655 162 : if (INTEGRAL_TYPE_P (type))
656 162 : return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
657 162 : build_one_cst (type));
658 : break;
659 :
660 30295589 : case INTEGER_CST:
661 30295589 : tem = fold_negate_const (t, type);
662 30295589 : if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
663 10081 : || (ANY_INTEGRAL_TYPE_P (type)
664 10081 : && !TYPE_OVERFLOW_TRAPS (type)
665 10081 : && TYPE_OVERFLOW_WRAPS (type))
666 30304882 : || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
667 : return tem;
668 : break;
669 :
670 1964288 : case POLY_INT_CST:
671 1964288 : case REAL_CST:
672 1964288 : case FIXED_CST:
673 1964288 : tem = fold_negate_const (t, type);
674 1964288 : return tem;
675 :
676 66139 : case COMPLEX_CST:
677 66139 : {
678 66139 : tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
679 66139 : tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
680 66139 : if (rpart && ipart)
681 66139 : return build_complex (type, rpart, ipart);
682 : }
683 : break;
684 :
685 51351 : case VECTOR_CST:
686 51351 : {
687 51351 : tree_vector_builder elts;
688 51351 : elts.new_unary_operation (type, t, true);
689 51351 : unsigned int count = elts.encoded_nelts ();
690 124582 : for (unsigned int i = 0; i < count; ++i)
691 : {
692 73231 : tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
693 73231 : if (elt == NULL_TREE)
694 0 : return NULL_TREE;
695 73231 : elts.quick_push (elt);
696 : }
697 :
698 51351 : return elts.build ();
699 51351 : }
700 :
701 78 : case COMPLEX_EXPR:
702 78 : if (negate_expr_p (t))
703 40 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
704 20 : fold_negate_expr (loc, TREE_OPERAND (t, 0)),
705 40 : fold_negate_expr (loc, TREE_OPERAND (t, 1)));
706 : break;
707 :
708 21 : case CONJ_EXPR:
709 21 : if (negate_expr_p (t))
710 21 : return fold_build1_loc (loc, CONJ_EXPR, type,
711 42 : fold_negate_expr (loc, TREE_OPERAND (t, 0)));
712 : break;
713 :
714 1234 : case NEGATE_EXPR:
715 1234 : if (!TYPE_OVERFLOW_SANITIZED (type))
716 1221 : return TREE_OPERAND (t, 0);
717 : break;
718 :
719 721147 : case PLUS_EXPR:
720 721147 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
721 721147 : && !HONOR_SIGNED_ZEROS (type))
722 : {
723 : /* -(A + B) -> (-B) - A. */
724 721037 : if (negate_expr_p (TREE_OPERAND (t, 1)))
725 : {
726 666029 : tem = negate_expr (TREE_OPERAND (t, 1));
727 666029 : return fold_build2_loc (loc, MINUS_EXPR, type,
728 1332058 : tem, TREE_OPERAND (t, 0));
729 : }
730 :
731 : /* -(A + B) -> (-A) - B. */
732 55008 : if (negate_expr_p (TREE_OPERAND (t, 0)))
733 : {
734 1027 : tem = negate_expr (TREE_OPERAND (t, 0));
735 1027 : return fold_build2_loc (loc, MINUS_EXPR, type,
736 2054 : tem, TREE_OPERAND (t, 1));
737 : }
738 : }
739 : break;
740 :
741 153116 : case MINUS_EXPR:
742 : /* - (A - B) -> B - A */
743 153116 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
744 153116 : && !HONOR_SIGNED_ZEROS (type))
745 78609 : return fold_build2_loc (loc, MINUS_EXPR, type,
746 157218 : TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
747 : break;
748 :
749 291579 : case MULT_EXPR:
750 291579 : if (TYPE_UNSIGNED (type))
751 : break;
752 :
753 : /* Fall through. */
754 :
755 35229 : case RDIV_EXPR:
756 35229 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
757 : {
758 35229 : tem = TREE_OPERAND (t, 1);
759 35229 : if (negate_expr_p (tem))
760 64308 : return fold_build2_loc (loc, TREE_CODE (t), type,
761 64308 : TREE_OPERAND (t, 0), negate_expr (tem));
762 3075 : tem = TREE_OPERAND (t, 0);
763 3075 : if (negate_expr_p (tem))
764 68 : return fold_build2_loc (loc, TREE_CODE (t), type,
765 136 : negate_expr (tem), TREE_OPERAND (t, 1));
766 : }
767 : break;
768 :
769 2032 : case TRUNC_DIV_EXPR:
770 2032 : case ROUND_DIV_EXPR:
771 2032 : case EXACT_DIV_EXPR:
772 2032 : if (TYPE_UNSIGNED (type))
773 : break;
774 : /* In general we can't negate A in A / B, because if A is INT_MIN and
775 : B is not 1 we change the sign of the result. */
776 718 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
777 718 : && negate_expr_p (TREE_OPERAND (t, 0)))
778 323 : return fold_build2_loc (loc, TREE_CODE (t), type,
779 323 : negate_expr (TREE_OPERAND (t, 0)),
780 646 : TREE_OPERAND (t, 1));
781 : /* In general we can't negate B in A / B, because if A is INT_MIN and
782 : B is 1, we may turn this into INT_MIN / -1 which is undefined
783 : and actually traps on some architectures. */
784 790 : if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
785 395 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
786 311 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
787 288 : && ! integer_onep (TREE_OPERAND (t, 1))))
788 767 : && negate_expr_p (TREE_OPERAND (t, 1)))
789 732 : return fold_build2_loc (loc, TREE_CODE (t), type,
790 366 : TREE_OPERAND (t, 0),
791 732 : negate_expr (TREE_OPERAND (t, 1)));
792 : break;
793 :
794 2257438 : case NOP_EXPR:
795 : /* Convert -((double)float) into (double)(-float). */
796 2257438 : if (SCALAR_FLOAT_TYPE_P (type))
797 : {
798 10543 : tem = strip_float_extensions (t);
799 10543 : if (tem != t && negate_expr_p (tem))
800 0 : return fold_convert_loc (loc, type, negate_expr (tem));
801 : }
802 : break;
803 :
804 284264 : case CALL_EXPR:
805 : /* Negate -f(x) as f(-x). */
806 284264 : if (negate_mathfn_p (get_call_combined_fn (t))
807 285553 : && negate_expr_p (CALL_EXPR_ARG (t, 0)))
808 : {
809 1191 : tree fndecl, arg;
810 :
811 1191 : fndecl = get_callee_fndecl (t);
812 1191 : arg = negate_expr (CALL_EXPR_ARG (t, 0));
813 1191 : return build_call_expr_loc (loc, fndecl, 1, arg);
814 : }
815 : break;
816 :
817 429 : case RSHIFT_EXPR:
818 : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
819 429 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
820 : {
821 411 : tree op1 = TREE_OPERAND (t, 1);
822 411 : if (wi::to_wide (op1) == element_precision (type) - 1)
823 : {
824 72 : tree ntype = TYPE_UNSIGNED (type)
825 72 : ? signed_type_for (type)
826 72 : : unsigned_type_for (type);
827 72 : tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
828 72 : temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
829 72 : return fold_convert_loc (loc, type, temp);
830 : }
831 : }
832 : break;
833 :
834 : default:
835 : break;
836 : }
837 :
838 : return NULL_TREE;
839 : }
840 :
841 : /* A wrapper for fold_negate_expr_1. */
842 :
843 : static tree
844 39632148 : fold_negate_expr (location_t loc, tree t)
845 : {
846 39632148 : tree type = TREE_TYPE (t);
847 39632148 : STRIP_SIGN_NOPS (t);
848 39632148 : tree tem = fold_negate_expr_1 (loc, t);
849 39632148 : if (tem == NULL_TREE)
850 : return NULL_TREE;
851 33158518 : return fold_convert_loc (loc, type, tem);
852 : }
853 :
854 : /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
855 : negated in a simpler way. Also allow for T to be NULL_TREE, in which case
856 : return NULL_TREE. */
857 :
858 : static tree
859 3881087 : negate_expr (tree t)
860 : {
861 3881087 : tree type, tem;
862 3881087 : location_t loc;
863 :
864 3881087 : if (t == NULL_TREE)
865 : return NULL_TREE;
866 :
867 3881087 : loc = EXPR_LOCATION (t);
868 3881087 : type = TREE_TYPE (t);
869 3881087 : STRIP_SIGN_NOPS (t);
870 :
871 3881087 : tem = fold_negate_expr (loc, t);
872 3881087 : if (!tem)
873 1939726 : tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
874 3881087 : return fold_convert_loc (loc, type, tem);
875 : }
876 :
877 : /* Split a tree IN into a constant, literal and variable parts that could be
878 : combined with CODE to make IN. "constant" means an expression with
879 : TREE_CONSTANT but that isn't an actual constant. CODE must be a
880 : commutative arithmetic operation. Store the constant part into *CONP,
881 : the literal in *LITP and return the variable part. If a part isn't
882 : present, set it to null. If the tree does not decompose in this way,
883 : return the entire tree as the variable part and the other parts as null.
884 :
885 : If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
886 : case, we negate an operand that was subtracted. Except if it is a
887 : literal for which we use *MINUS_LITP instead.
888 :
889 : If NEGATE_P is true, we are negating all of IN, again except a literal
890 : for which we use *MINUS_LITP instead. If a variable part is of pointer
891 : type, it is negated after converting to TYPE. This prevents us from
892 : generating illegal MINUS pointer expression. LOC is the location of
893 : the converted variable part.
894 :
895 : If IN is itself a literal or constant, return it as appropriate.
896 :
897 : Note that we do not guarantee that any of the three values will be the
898 : same type as IN, but they will have the same signedness and mode. */
899 :
900 : static tree
901 240876010 : split_tree (tree in, tree type, enum tree_code code,
902 : tree *minus_varp, tree *conp, tree *minus_conp,
903 : tree *litp, tree *minus_litp, int negate_p)
904 : {
905 240876010 : tree var = 0;
906 240876010 : *minus_varp = 0;
907 240876010 : *conp = 0;
908 240876010 : *minus_conp = 0;
909 240876010 : *litp = 0;
910 240876010 : *minus_litp = 0;
911 :
912 : /* Strip any conversions that don't change the machine mode or signedness. */
913 240876010 : STRIP_SIGN_NOPS (in);
914 :
915 240876010 : if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
916 153702718 : || TREE_CODE (in) == FIXED_CST)
917 87173292 : *litp = in;
918 153702718 : else if (TREE_CODE (in) == code
919 153702718 : || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
920 149169399 : && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
921 : /* We can associate addition and subtraction together (even
922 : though the C standard doesn't say so) for integers because
923 : the value is not affected. For reals, the value might be
924 : affected, so we can't. */
925 149169399 : && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
926 61030129 : || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
927 147379500 : || (code == MINUS_EXPR
928 24496356 : && (TREE_CODE (in) == PLUS_EXPR
929 22642005 : || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
930 : {
931 8642186 : tree op0 = TREE_OPERAND (in, 0);
932 8642186 : tree op1 = TREE_OPERAND (in, 1);
933 8642186 : bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
934 8642186 : bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
935 :
936 : /* First see if either of the operands is a literal, then a constant. */
937 8642186 : if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
938 8403590 : || TREE_CODE (op0) == FIXED_CST)
939 238596 : *litp = op0, op0 = 0;
940 8403590 : else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
941 5635604 : || TREE_CODE (op1) == FIXED_CST)
942 2767986 : *litp = op1, neg_litp_p = neg1_p, op1 = 0;
943 :
944 8642186 : if (op0 != 0 && TREE_CONSTANT (op0))
945 15249 : *conp = op0, op0 = 0;
946 8626937 : else if (op1 != 0 && TREE_CONSTANT (op1))
947 51791 : *conp = op1, neg_conp_p = neg1_p, op1 = 0;
948 :
949 : /* If we haven't dealt with either operand, this is not a case we can
950 : decompose. Otherwise, VAR is either of the ones remaining, if any. */
951 8642186 : if (op0 != 0 && op1 != 0)
952 : var = in;
953 3066490 : else if (op0 != 0)
954 : var = op0;
955 : else
956 253845 : var = op1, neg_var_p = neg1_p;
957 :
958 : /* Now do any needed negations. */
959 8642186 : if (neg_litp_p)
960 29814 : *minus_litp = *litp, *litp = 0;
961 8642186 : if (neg_conp_p && *conp)
962 11214 : *minus_conp = *conp, *conp = 0;
963 8642186 : if (neg_var_p && var)
964 243345 : *minus_varp = var, var = 0;
965 : }
966 145060532 : else if (TREE_CONSTANT (in))
967 801137 : *conp = in;
968 144259395 : else if (TREE_CODE (in) == BIT_NOT_EXPR
969 516394 : && code == PLUS_EXPR)
970 : {
971 : /* -1 - X is folded to ~X, undo that here. Do _not_ do this
972 : when IN is constant. */
973 371135 : *litp = build_minus_one_cst (type);
974 371135 : *minus_varp = TREE_OPERAND (in, 0);
975 : }
976 : else
977 : var = in;
978 :
979 240876010 : if (negate_p)
980 : {
981 13442428 : if (*litp)
982 1254180 : *minus_litp = *litp, *litp = 0;
983 12188248 : else if (*minus_litp)
984 152 : *litp = *minus_litp, *minus_litp = 0;
985 13442428 : if (*conp)
986 46071 : *minus_conp = *conp, *conp = 0;
987 13396357 : else if (*minus_conp)
988 0 : *conp = *minus_conp, *minus_conp = 0;
989 13442428 : if (var)
990 13384094 : *minus_varp = var, var = 0;
991 58334 : else if (*minus_varp)
992 757 : var = *minus_varp, *minus_varp = 0;
993 : }
994 :
995 240876010 : if (*litp
996 240876010 : && TREE_OVERFLOW_P (*litp))
997 19695 : *litp = drop_tree_overflow (*litp);
998 240876010 : if (*minus_litp
999 240876010 : && TREE_OVERFLOW_P (*minus_litp))
1000 0 : *minus_litp = drop_tree_overflow (*minus_litp);
1001 :
1002 240876010 : return var;
1003 : }
1004 :
1005 : /* Re-associate trees split by the above function. T1 and T2 are
1006 : either expressions to associate or null. Return the new
1007 : expression, if any. LOC is the location of the new expression. If
1008 : we build an operation, do it in TYPE and with CODE. */
1009 :
1010 : static tree
1011 21115918 : associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1012 : {
1013 21115918 : if (t1 == 0)
1014 : {
1015 13373580 : gcc_assert (t2 == 0 || code != MINUS_EXPR);
1016 : return t2;
1017 : }
1018 7742338 : else if (t2 == 0)
1019 : return t1;
1020 :
1021 : /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1022 : try to fold this since we will have infinite recursion. But do
1023 : deal with any NEGATE_EXPRs. */
1024 4305878 : if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1025 3400957 : || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1026 3333682 : || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1027 : {
1028 1704428 : if (code == PLUS_EXPR)
1029 : {
1030 964467 : if (TREE_CODE (t1) == NEGATE_EXPR)
1031 54 : return build2_loc (loc, MINUS_EXPR, type,
1032 : fold_convert_loc (loc, type, t2),
1033 : fold_convert_loc (loc, type,
1034 108 : TREE_OPERAND (t1, 0)));
1035 964413 : else if (TREE_CODE (t2) == NEGATE_EXPR)
1036 1 : return build2_loc (loc, MINUS_EXPR, type,
1037 : fold_convert_loc (loc, type, t1),
1038 : fold_convert_loc (loc, type,
1039 2 : TREE_OPERAND (t2, 0)));
1040 964412 : else if (integer_zerop (t2))
1041 37061 : return fold_convert_loc (loc, type, t1);
1042 : }
1043 739961 : else if (code == MINUS_EXPR)
1044 : {
1045 716681 : if (integer_zerop (t2))
1046 0 : return fold_convert_loc (loc, type, t1);
1047 : }
1048 :
1049 1667312 : return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1050 1667312 : fold_convert_loc (loc, type, t2));
1051 : }
1052 :
1053 2601450 : return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1054 2601450 : fold_convert_loc (loc, type, t2));
1055 : }
1056 :
1057 : /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1058 : for use in int_const_binop, size_binop and size_diffop. */
1059 :
1060 : static bool
1061 2802400137 : int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1062 : {
1063 2802400137 : if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1064 : return false;
1065 2802400137 : if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1066 : return false;
1067 :
1068 2802400137 : switch (code)
1069 : {
1070 : case LSHIFT_EXPR:
1071 : case RSHIFT_EXPR:
1072 : case LROTATE_EXPR:
1073 : case RROTATE_EXPR:
1074 : return true;
1075 :
1076 2802400137 : default:
1077 2802400137 : break;
1078 : }
1079 :
1080 2802400137 : return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1081 2802400137 : && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1082 5604800274 : && TYPE_MODE (type1) == TYPE_MODE (type2);
1083 : }
1084 :
1085 : /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1086 : a new constant in RES. Return FALSE if we don't know how to
1087 : evaluate CODE at compile-time. */
1088 :
1089 : bool
1090 1545187179 : wide_int_binop (wide_int &res,
1091 : enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1092 : signop sign, wi::overflow_type *overflow)
1093 : {
1094 1545187179 : wide_int tmp;
1095 1545187179 : *overflow = wi::OVF_NONE;
1096 1545187179 : switch (code)
1097 : {
1098 2293793 : case BIT_IOR_EXPR:
1099 2293793 : res = wi::bit_or (arg1, arg2);
1100 2293793 : break;
1101 :
1102 87701 : case BIT_XOR_EXPR:
1103 87701 : res = wi::bit_xor (arg1, arg2);
1104 87701 : break;
1105 :
1106 26431658 : case BIT_AND_EXPR:
1107 26431658 : res = wi::bit_and (arg1, arg2);
1108 26431658 : break;
1109 :
1110 16984483 : case LSHIFT_EXPR:
1111 16984483 : if (wi::neg_p (arg2))
1112 : return false;
1113 16954163 : res = wi::lshift (arg1, arg2);
1114 16954163 : break;
1115 :
1116 8479672 : case RSHIFT_EXPR:
1117 8479672 : if (wi::neg_p (arg2))
1118 : return false;
1119 : /* It's unclear from the C standard whether shifts can overflow.
1120 : The following code ignores overflow; perhaps a C standard
1121 : interpretation ruling is needed. */
1122 8479476 : res = wi::rshift (arg1, arg2, sign);
1123 8479476 : break;
1124 :
1125 1888 : case RROTATE_EXPR:
1126 1888 : case LROTATE_EXPR:
1127 1888 : if (wi::neg_p (arg2))
1128 : {
1129 14 : tmp = -arg2;
1130 14 : if (code == RROTATE_EXPR)
1131 : code = LROTATE_EXPR;
1132 : else
1133 : code = RROTATE_EXPR;
1134 : }
1135 : else
1136 1874 : tmp = arg2;
1137 :
1138 1874 : if (code == RROTATE_EXPR)
1139 1701 : res = wi::rrotate (arg1, tmp);
1140 : else
1141 187 : res = wi::lrotate (arg1, tmp);
1142 : break;
1143 :
1144 244183639 : case PLUS_EXPR:
1145 244183639 : res = wi::add (arg1, arg2, sign, overflow);
1146 244183639 : break;
1147 :
1148 73822089 : case MINUS_EXPR:
1149 73822089 : res = wi::sub (arg1, arg2, sign, overflow);
1150 73822089 : break;
1151 :
1152 429577563 : case MULT_EXPR:
1153 429577563 : res = wi::mul (arg1, arg2, sign, overflow);
1154 429577563 : break;
1155 :
1156 5288 : case MULT_HIGHPART_EXPR:
1157 5288 : res = wi::mul_high (arg1, arg2, sign);
1158 5288 : break;
1159 :
1160 380635409 : case TRUNC_DIV_EXPR:
1161 380635409 : case EXACT_DIV_EXPR:
1162 380635409 : if (arg2 == 0)
1163 : return false;
1164 380629456 : res = wi::div_trunc (arg1, arg2, sign, overflow);
1165 380629456 : break;
1166 :
1167 84333558 : case FLOOR_DIV_EXPR:
1168 84333558 : if (arg2 == 0)
1169 : return false;
1170 84333558 : res = wi::div_floor (arg1, arg2, sign, overflow);
1171 84333558 : break;
1172 :
1173 93872993 : case CEIL_DIV_EXPR:
1174 93872993 : if (arg2 == 0)
1175 : return false;
1176 93872993 : res = wi::div_ceil (arg1, arg2, sign, overflow);
1177 93872993 : break;
1178 :
1179 0 : case ROUND_DIV_EXPR:
1180 0 : if (arg2 == 0)
1181 : return false;
1182 0 : res = wi::div_round (arg1, arg2, sign, overflow);
1183 0 : break;
1184 :
1185 1209649 : case TRUNC_MOD_EXPR:
1186 1209649 : if (arg2 == 0)
1187 : return false;
1188 1208540 : res = wi::mod_trunc (arg1, arg2, sign, overflow);
1189 1208540 : break;
1190 :
1191 72773917 : case FLOOR_MOD_EXPR:
1192 72773917 : if (arg2 == 0)
1193 : return false;
1194 72773917 : res = wi::mod_floor (arg1, arg2, sign, overflow);
1195 72773917 : break;
1196 :
1197 178 : case CEIL_MOD_EXPR:
1198 178 : if (arg2 == 0)
1199 : return false;
1200 178 : res = wi::mod_ceil (arg1, arg2, sign, overflow);
1201 178 : break;
1202 :
1203 0 : case ROUND_MOD_EXPR:
1204 0 : if (arg2 == 0)
1205 : return false;
1206 0 : res = wi::mod_round (arg1, arg2, sign, overflow);
1207 0 : break;
1208 :
1209 28387 : case MIN_EXPR:
1210 28387 : res = wi::min (arg1, arg2, sign);
1211 28387 : break;
1212 :
1213 110465187 : case MAX_EXPR:
1214 110465187 : res = wi::max (arg1, arg2, sign);
1215 110465187 : break;
1216 :
1217 : default:
1218 : return false;
1219 : }
1220 : return true;
1221 1545187179 : }
1222 :
1223 : /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1224 : min value to RES. */
1225 : bool
1226 0 : can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1227 : {
1228 0 : if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1229 : {
1230 0 : res = wi::to_poly_wide (arg1);
1231 0 : return true;
1232 : }
1233 0 : else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1234 : {
1235 0 : res = wi::to_poly_wide (arg2);
1236 0 : return true;
1237 : }
1238 :
1239 : return false;
1240 : }
1241 :
1242 : /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1243 : produce a new constant in RES. Return FALSE if we don't know how
1244 : to evaluate CODE at compile-time. */
1245 :
1246 : bool
1247 1545187179 : poly_int_binop (poly_wide_int &res, enum tree_code code,
1248 : const_tree arg1, const_tree arg2,
1249 : signop sign, wi::overflow_type *overflow)
1250 : {
1251 1545187179 : gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1252 :
1253 1545187179 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1254 : {
1255 1545187179 : wide_int warg1 = wi::to_wide (arg1), wi_res;
1256 1545187179 : wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (TREE_TYPE (arg1)));
1257 1545187179 : if (!wide_int_binop (wi_res, code, warg1, warg2, sign, overflow))
1258 : return NULL_TREE;
1259 1545149474 : res = wi_res;
1260 1545149474 : return true;
1261 1545187396 : }
1262 :
1263 : gcc_assert (NUM_POLY_INT_COEFFS != 1);
1264 :
1265 : switch (code)
1266 : {
1267 : case PLUS_EXPR:
1268 : res = wi::add (wi::to_poly_wide (arg1),
1269 : wi::to_poly_wide (arg2), sign, overflow);
1270 : break;
1271 :
1272 : case MINUS_EXPR:
1273 : res = wi::sub (wi::to_poly_wide (arg1),
1274 : wi::to_poly_wide (arg2), sign, overflow);
1275 : break;
1276 :
1277 : case MULT_EXPR:
1278 : if (TREE_CODE (arg2) == INTEGER_CST)
1279 : res = wi::mul (wi::to_poly_wide (arg1),
1280 : wi::to_wide (arg2), sign, overflow);
1281 : else if (TREE_CODE (arg1) == INTEGER_CST)
1282 : res = wi::mul (wi::to_poly_wide (arg2),
1283 : wi::to_wide (arg1), sign, overflow);
1284 : else
1285 : return NULL_TREE;
1286 : break;
1287 :
1288 : case LSHIFT_EXPR:
1289 : if (TREE_CODE (arg2) == INTEGER_CST)
1290 : res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1291 : else
1292 : return false;
1293 : break;
1294 :
1295 : case BIT_AND_EXPR:
1296 : if (TREE_CODE (arg2) != INTEGER_CST
1297 : || !can_and_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1298 : &res))
1299 : return false;
1300 : break;
1301 :
1302 : case BIT_IOR_EXPR:
1303 : if (TREE_CODE (arg2) != INTEGER_CST
1304 : || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1305 : &res))
1306 : return false;
1307 : break;
1308 :
1309 : case MIN_EXPR:
1310 : if (!can_min_p (arg1, arg2, res))
1311 : return false;
1312 : break;
1313 :
1314 : default:
1315 : return false;
1316 : }
1317 : return true;
1318 : }
1319 :
1320 : /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1321 : produce a new constant. Return NULL_TREE if we don't know how to
1322 : evaluate CODE at compile-time. */
1323 :
1324 : tree
1325 1545187179 : int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1326 : int overflowable)
1327 : {
1328 1545187179 : poly_wide_int poly_res;
1329 1545187179 : tree type = TREE_TYPE (arg1);
1330 1545187179 : signop sign = TYPE_SIGN (type);
1331 1545187179 : wi::overflow_type overflow = wi::OVF_NONE;
1332 :
1333 1545187179 : if (!poly_int_tree_p (arg1)
1334 1545187179 : || !poly_int_tree_p (arg2)
1335 3090374358 : || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1336 37705 : return NULL_TREE;
1337 1545149474 : return force_fit_type (type, poly_res, overflowable,
1338 1545149474 : (((sign == SIGNED || overflowable == -1)
1339 1545149474 : && overflow)
1340 1545149474 : | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1341 1545187179 : }
1342 :
1343 : /* Return true if binary operation OP distributes over addition in operand
1344 : OPNO, with the other operand being held constant. OPNO counts from 1. */
1345 :
1346 : static bool
1347 185042 : distributes_over_addition_p (tree_code op, int opno)
1348 : {
1349 0 : switch (op)
1350 : {
1351 : case PLUS_EXPR:
1352 : case MINUS_EXPR:
1353 : case MULT_EXPR:
1354 : return true;
1355 :
1356 0 : case LSHIFT_EXPR:
1357 0 : return opno == 1;
1358 :
1359 3852 : default:
1360 3852 : return false;
1361 : }
1362 : }
1363 :
1364 : /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1365 : is the other operand. Try to use the value of OP to simplify the
1366 : operation in one step, without having to process individual elements. */
1367 : static tree
1368 438155 : simplify_const_binop (tree_code code, tree op, tree other_op,
1369 : int index ATTRIBUTE_UNUSED)
1370 : {
1371 : /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1372 438155 : if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1373 : {
1374 355383 : if (integer_zerop (other_op))
1375 : {
1376 26971 : if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1377 : return op;
1378 25950 : else if (code == BIT_AND_EXPR)
1379 : return other_op;
1380 : }
1381 : }
1382 :
1383 : return NULL_TREE;
1384 : }
1385 :
1386 : /* If ARG1 and ARG2 are constants, and if performing CODE on them would
1387 : be an elementwise vector operation, try to fold the operation to a
1388 : constant vector, using ELT_CONST_BINOP to fold each element. Return
1389 : the folded value on success, otherwise return null. */
1390 : tree
1391 263554 : vector_const_binop (tree_code code, tree arg1, tree arg2,
1392 : tree (*elt_const_binop) (enum tree_code, tree, tree))
1393 : {
1394 187746 : if (TREE_CODE (arg1) == VECTOR_CST && TREE_CODE (arg2) == VECTOR_CST
1395 444357 : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1396 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1397 : {
1398 180803 : tree type = TREE_TYPE (arg1);
1399 180803 : bool step_ok_p;
1400 180803 : if (VECTOR_CST_STEPPED_P (arg1)
1401 180803 : && VECTOR_CST_STEPPED_P (arg2))
1402 : /* We can operate directly on the encoding if:
1403 :
1404 : a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1405 : implies
1406 : (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1407 :
1408 : Addition and subtraction are the supported operators
1409 : for which this is true. */
1410 2704 : step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1411 178099 : else if (VECTOR_CST_STEPPED_P (arg1))
1412 : /* We can operate directly on stepped encodings if:
1413 :
1414 : a3 - a2 == a2 - a1
1415 : implies:
1416 : (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1417 :
1418 : which is true if (x -> x op c) distributes over addition. */
1419 52077 : step_ok_p = distributes_over_addition_p (code, 1);
1420 : else
1421 : /* Similarly in reverse. */
1422 126022 : step_ok_p = distributes_over_addition_p (code, 2);
1423 180803 : tree_vector_builder elts;
1424 180803 : if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1425 : return NULL_TREE;
1426 180803 : unsigned int count = elts.encoded_nelts ();
1427 710033 : for (unsigned int i = 0; i < count; ++i)
1428 : {
1429 529561 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1430 529561 : tree elem2 = VECTOR_CST_ELT (arg2, i);
1431 :
1432 529561 : tree elt = elt_const_binop (code, elem1, elem2);
1433 :
1434 : /* It is possible that const_binop cannot handle the given
1435 : code and return NULL_TREE */
1436 529561 : if (elt == NULL_TREE)
1437 331 : return NULL_TREE;
1438 529230 : elts.quick_push (elt);
1439 : }
1440 :
1441 180472 : return elts.build ();
1442 180803 : }
1443 :
1444 82751 : if (TREE_CODE (arg1) == VECTOR_CST
1445 6943 : && TREE_CODE (arg2) == INTEGER_CST)
1446 : {
1447 6943 : tree type = TREE_TYPE (arg1);
1448 6943 : bool step_ok_p = distributes_over_addition_p (code, 1);
1449 6943 : tree_vector_builder elts;
1450 6943 : if (!elts.new_unary_operation (type, arg1, step_ok_p))
1451 : return NULL_TREE;
1452 6943 : unsigned int count = elts.encoded_nelts ();
1453 31156 : for (unsigned int i = 0; i < count; ++i)
1454 : {
1455 24300 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1456 :
1457 24300 : tree elt = elt_const_binop (code, elem1, arg2);
1458 :
1459 : /* It is possible that const_binop cannot handle the given
1460 : code and return NULL_TREE. */
1461 24300 : if (elt == NULL_TREE)
1462 87 : return NULL_TREE;
1463 24213 : elts.quick_push (elt);
1464 : }
1465 :
1466 6856 : return elts.build ();
1467 6943 : }
1468 : return NULL_TREE;
1469 : }
1470 :
1471 : /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1472 : constant. We assume ARG1 and ARG2 have the same data type, or at least
1473 : are the same kind of constant and the same machine mode. Return zero if
1474 : combining the constants is not allowed in the current operating mode. */
1475 :
1476 : static tree
1477 210770873 : const_binop (enum tree_code code, tree arg1, tree arg2)
1478 : {
1479 : /* Sanity check for the recursive cases. */
1480 210770873 : if (!arg1 || !arg2)
1481 : return NULL_TREE;
1482 :
1483 210769609 : STRIP_NOPS (arg1);
1484 210769609 : STRIP_NOPS (arg2);
1485 :
1486 210769609 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1487 : {
1488 205130259 : if (code == POINTER_PLUS_EXPR)
1489 97378 : return int_const_binop (PLUS_EXPR,
1490 194756 : arg1, fold_convert (TREE_TYPE (arg1), arg2));
1491 :
1492 205032881 : return int_const_binop (code, arg1, arg2);
1493 : }
1494 :
1495 5639350 : if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1496 : {
1497 5359799 : machine_mode mode;
1498 5359799 : REAL_VALUE_TYPE d1;
1499 5359799 : REAL_VALUE_TYPE d2;
1500 5359799 : REAL_VALUE_TYPE value;
1501 5359799 : REAL_VALUE_TYPE result;
1502 5359799 : bool inexact;
1503 5359799 : tree t, type;
1504 :
1505 : /* The following codes are handled by real_arithmetic. */
1506 5359799 : switch (code)
1507 : {
1508 5359799 : case PLUS_EXPR:
1509 5359799 : case MINUS_EXPR:
1510 5359799 : case MULT_EXPR:
1511 5359799 : case RDIV_EXPR:
1512 5359799 : case MIN_EXPR:
1513 5359799 : case MAX_EXPR:
1514 5359799 : break;
1515 :
1516 : default:
1517 : return NULL_TREE;
1518 : }
1519 :
1520 5359799 : d1 = TREE_REAL_CST (arg1);
1521 5359799 : d2 = TREE_REAL_CST (arg2);
1522 :
1523 5359799 : type = TREE_TYPE (arg1);
1524 5359799 : mode = TYPE_MODE (type);
1525 :
1526 : /* Don't perform operation if we honor signaling NaNs and
1527 : either operand is a signaling NaN. */
1528 5359799 : if (HONOR_SNANS (mode)
1529 5359799 : && (REAL_VALUE_ISSIGNALING_NAN (d1)
1530 6949 : || REAL_VALUE_ISSIGNALING_NAN (d2)))
1531 33 : return NULL_TREE;
1532 :
1533 : /* Don't perform operation if it would raise a division
1534 : by zero exception. */
1535 5359766 : if (code == RDIV_EXPR
1536 2319463 : && real_equal (&d2, &dconst0)
1537 5370608 : && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1538 7554 : return NULL_TREE;
1539 :
1540 : /* If either operand is a NaN, just return it. Otherwise, set up
1541 : for floating-point trap; we return an overflow. */
1542 5352212 : if (REAL_VALUE_ISNAN (d1))
1543 : {
1544 : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1545 : is off. */
1546 238 : d1.signalling = 0;
1547 238 : t = build_real (type, d1);
1548 238 : return t;
1549 : }
1550 5351974 : else if (REAL_VALUE_ISNAN (d2))
1551 : {
1552 : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1553 : is off. */
1554 61 : d2.signalling = 0;
1555 61 : t = build_real (type, d2);
1556 61 : return t;
1557 : }
1558 :
1559 5351913 : inexact = real_arithmetic (&value, code, &d1, &d2);
1560 5351913 : real_convert (&result, mode, &value);
1561 :
1562 : /* Don't constant fold this floating point operation if
1563 : both operands are not NaN but the result is NaN, and
1564 : flag_trapping_math. Such operations should raise an
1565 : invalid operation exception. */
1566 5351913 : if (flag_trapping_math
1567 20766222 : && MODE_HAS_NANS (mode)
1568 5333104 : && REAL_VALUE_ISNAN (result)
1569 2459 : && !REAL_VALUE_ISNAN (d1)
1570 5354372 : && !REAL_VALUE_ISNAN (d2))
1571 2459 : return NULL_TREE;
1572 :
1573 : /* Don't constant fold this floating point operation if
1574 : the result has overflowed and flag_trapping_math. */
1575 5349454 : if (flag_trapping_math
1576 20756728 : && MODE_HAS_INFINITIES (mode)
1577 5330645 : && REAL_VALUE_ISINF (result)
1578 7497 : && !REAL_VALUE_ISINF (d1)
1579 5356368 : && !REAL_VALUE_ISINF (d2))
1580 4631 : return NULL_TREE;
1581 :
1582 : /* Don't constant fold this floating point operation if the
1583 : result may dependent upon the run-time rounding mode and
1584 : flag_rounding_math is set, or if GCC's software emulation
1585 : is unable to accurately represent the result. */
1586 5344823 : if ((flag_rounding_math
1587 36267924 : || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1588 5344823 : && (inexact || !real_identical (&result, &value)))
1589 1107 : return NULL_TREE;
1590 :
1591 5343716 : t = build_real (type, result);
1592 :
1593 5343716 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1594 5343716 : return t;
1595 : }
1596 :
1597 279551 : if (TREE_CODE (arg1) == FIXED_CST)
1598 : {
1599 0 : FIXED_VALUE_TYPE f1;
1600 0 : FIXED_VALUE_TYPE f2;
1601 0 : FIXED_VALUE_TYPE result;
1602 0 : tree t, type;
1603 0 : bool sat_p;
1604 0 : bool overflow_p;
1605 :
1606 : /* The following codes are handled by fixed_arithmetic. */
1607 0 : switch (code)
1608 : {
1609 0 : case PLUS_EXPR:
1610 0 : case MINUS_EXPR:
1611 0 : case MULT_EXPR:
1612 0 : case TRUNC_DIV_EXPR:
1613 0 : if (TREE_CODE (arg2) != FIXED_CST)
1614 : return NULL_TREE;
1615 0 : f2 = TREE_FIXED_CST (arg2);
1616 0 : break;
1617 :
1618 0 : case LSHIFT_EXPR:
1619 0 : case RSHIFT_EXPR:
1620 0 : {
1621 0 : if (TREE_CODE (arg2) != INTEGER_CST)
1622 0 : return NULL_TREE;
1623 0 : wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1624 0 : f2.data.high = w2.elt (1);
1625 0 : f2.data.low = w2.ulow ();
1626 0 : f2.mode = SImode;
1627 : }
1628 0 : break;
1629 :
1630 : default:
1631 : return NULL_TREE;
1632 : }
1633 :
1634 0 : f1 = TREE_FIXED_CST (arg1);
1635 0 : type = TREE_TYPE (arg1);
1636 0 : sat_p = TYPE_SATURATING (type);
1637 0 : overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1638 0 : t = build_fixed (type, result);
1639 : /* Propagate overflow flags. */
1640 0 : if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1641 0 : TREE_OVERFLOW (t) = 1;
1642 0 : return t;
1643 : }
1644 :
1645 279551 : if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1646 : {
1647 11221 : tree type = TREE_TYPE (arg1);
1648 11221 : tree r1 = TREE_REALPART (arg1);
1649 11221 : tree i1 = TREE_IMAGPART (arg1);
1650 11221 : tree r2 = TREE_REALPART (arg2);
1651 11221 : tree i2 = TREE_IMAGPART (arg2);
1652 11221 : tree real, imag;
1653 :
1654 11221 : switch (code)
1655 : {
1656 5334 : case PLUS_EXPR:
1657 5334 : case MINUS_EXPR:
1658 5334 : real = const_binop (code, r1, r2);
1659 5334 : imag = const_binop (code, i1, i2);
1660 5334 : break;
1661 :
1662 3929 : case MULT_EXPR:
1663 3929 : if (COMPLEX_FLOAT_TYPE_P (type))
1664 2777 : return do_mpc_arg2 (arg1, arg2, type,
1665 : /* do_nonfinite= */ folding_initializer,
1666 2777 : mpc_mul);
1667 :
1668 1152 : real = const_binop (MINUS_EXPR,
1669 : const_binop (MULT_EXPR, r1, r2),
1670 : const_binop (MULT_EXPR, i1, i2));
1671 1152 : imag = const_binop (PLUS_EXPR,
1672 : const_binop (MULT_EXPR, r1, i2),
1673 : const_binop (MULT_EXPR, i1, r2));
1674 1152 : break;
1675 :
1676 1704 : case RDIV_EXPR:
1677 1704 : if (COMPLEX_FLOAT_TYPE_P (type))
1678 1704 : return do_mpc_arg2 (arg1, arg2, type,
1679 : /* do_nonfinite= */ folding_initializer,
1680 1704 : mpc_div);
1681 : /* Fallthru. */
1682 254 : case TRUNC_DIV_EXPR:
1683 254 : case CEIL_DIV_EXPR:
1684 254 : case FLOOR_DIV_EXPR:
1685 254 : case ROUND_DIV_EXPR:
1686 254 : if (flag_complex_method == 0)
1687 : {
1688 : /* Keep this algorithm in sync with
1689 : tree-complex.cc:expand_complex_div_straight().
1690 :
1691 : Expand complex division to scalars, straightforward algorithm.
1692 : a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1693 : t = br*br + bi*bi
1694 : */
1695 0 : tree magsquared
1696 0 : = const_binop (PLUS_EXPR,
1697 : const_binop (MULT_EXPR, r2, r2),
1698 : const_binop (MULT_EXPR, i2, i2));
1699 0 : tree t1
1700 0 : = const_binop (PLUS_EXPR,
1701 : const_binop (MULT_EXPR, r1, r2),
1702 : const_binop (MULT_EXPR, i1, i2));
1703 0 : tree t2
1704 0 : = const_binop (MINUS_EXPR,
1705 : const_binop (MULT_EXPR, i1, r2),
1706 : const_binop (MULT_EXPR, r1, i2));
1707 :
1708 0 : real = const_binop (code, t1, magsquared);
1709 0 : imag = const_binop (code, t2, magsquared);
1710 : }
1711 : else
1712 : {
1713 : /* Keep this algorithm in sync with
1714 : tree-complex.cc:expand_complex_div_wide().
1715 :
1716 : Expand complex division to scalars, modified algorithm to minimize
1717 : overflow with wide input ranges. */
1718 254 : tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1719 : fold_abs_const (r2, TREE_TYPE (type)),
1720 : fold_abs_const (i2, TREE_TYPE (type)));
1721 :
1722 254 : if (integer_nonzerop (compare))
1723 : {
1724 : /* In the TRUE branch, we compute
1725 : ratio = br/bi;
1726 : div = (br * ratio) + bi;
1727 : tr = (ar * ratio) + ai;
1728 : ti = (ai * ratio) - ar;
1729 : tr = tr / div;
1730 : ti = ti / div; */
1731 48 : tree ratio = const_binop (code, r2, i2);
1732 48 : tree div = const_binop (PLUS_EXPR, i2,
1733 : const_binop (MULT_EXPR, r2, ratio));
1734 48 : real = const_binop (MULT_EXPR, r1, ratio);
1735 48 : real = const_binop (PLUS_EXPR, real, i1);
1736 48 : real = const_binop (code, real, div);
1737 :
1738 48 : imag = const_binop (MULT_EXPR, i1, ratio);
1739 48 : imag = const_binop (MINUS_EXPR, imag, r1);
1740 48 : imag = const_binop (code, imag, div);
1741 : }
1742 : else
1743 : {
1744 : /* In the FALSE branch, we compute
1745 : ratio = d/c;
1746 : divisor = (d * ratio) + c;
1747 : tr = (b * ratio) + a;
1748 : ti = b - (a * ratio);
1749 : tr = tr / div;
1750 : ti = ti / div; */
1751 206 : tree ratio = const_binop (code, i2, r2);
1752 206 : tree div = const_binop (PLUS_EXPR, r2,
1753 : const_binop (MULT_EXPR, i2, ratio));
1754 :
1755 206 : real = const_binop (MULT_EXPR, i1, ratio);
1756 206 : real = const_binop (PLUS_EXPR, real, r1);
1757 206 : real = const_binop (code, real, div);
1758 :
1759 206 : imag = const_binop (MULT_EXPR, r1, ratio);
1760 206 : imag = const_binop (MINUS_EXPR, i1, imag);
1761 206 : imag = const_binop (code, imag, div);
1762 : }
1763 : }
1764 : break;
1765 :
1766 : default:
1767 : return NULL_TREE;
1768 : }
1769 :
1770 6740 : if (real && imag)
1771 6582 : return build_complex (type, real, imag);
1772 : }
1773 :
1774 268488 : tree simplified;
1775 268488 : if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1776 : return simplified;
1777 :
1778 268094 : if (commutative_tree_code (code)
1779 268094 : && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1780 : return simplified;
1781 :
1782 263554 : return vector_const_binop (code, arg1, arg2, const_binop);
1783 : }
1784 :
1785 : /* Overload that adds a TYPE parameter to be able to dispatch
1786 : to fold_relational_const. */
1787 :
1788 : tree
1789 267965345 : const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1790 : {
1791 267965345 : if (TREE_CODE_CLASS (code) == tcc_comparison)
1792 72438718 : return fold_relational_const (code, type, arg1, arg2);
1793 :
1794 : /* ??? Until we make the const_binop worker take the type of the
1795 : result as argument put those cases that need it here. */
1796 195526627 : switch (code)
1797 : {
1798 18 : case VEC_SERIES_EXPR:
1799 18 : if (CONSTANT_CLASS_P (arg1)
1800 18 : && CONSTANT_CLASS_P (arg2))
1801 18 : return build_vec_series (type, arg1, arg2);
1802 : return NULL_TREE;
1803 :
1804 267523 : case COMPLEX_EXPR:
1805 267523 : if ((TREE_CODE (arg1) == REAL_CST
1806 257006 : && TREE_CODE (arg2) == REAL_CST)
1807 10519 : || (TREE_CODE (arg1) == INTEGER_CST
1808 10517 : && TREE_CODE (arg2) == INTEGER_CST))
1809 267521 : return build_complex (type, arg1, arg2);
1810 : return NULL_TREE;
1811 :
1812 100042 : case POINTER_DIFF_EXPR:
1813 100042 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1814 : {
1815 198856 : poly_offset_int res = (wi::to_poly_offset (arg1)
1816 99428 : - wi::to_poly_offset (arg2));
1817 99428 : return force_fit_type (type, res, 1,
1818 99428 : TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1819 : }
1820 : return NULL_TREE;
1821 :
1822 14579 : case VEC_PACK_TRUNC_EXPR:
1823 14579 : case VEC_PACK_FIX_TRUNC_EXPR:
1824 14579 : case VEC_PACK_FLOAT_EXPR:
1825 14579 : {
1826 14579 : unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1827 :
1828 14579 : if (TREE_CODE (arg1) != VECTOR_CST
1829 14579 : || TREE_CODE (arg2) != VECTOR_CST)
1830 : return NULL_TREE;
1831 :
1832 14579 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1833 : return NULL_TREE;
1834 :
1835 14579 : out_nelts = in_nelts * 2;
1836 14579 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1837 : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1838 :
1839 14579 : tree_vector_builder elts (type, out_nelts, 1);
1840 188791 : for (i = 0; i < out_nelts; i++)
1841 : {
1842 174224 : tree elt = (i < in_nelts
1843 174224 : ? VECTOR_CST_ELT (arg1, i)
1844 87106 : : VECTOR_CST_ELT (arg2, i - in_nelts));
1845 175268 : elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1846 : ? NOP_EXPR
1847 : : code == VEC_PACK_FLOAT_EXPR
1848 1044 : ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1849 174224 : TREE_TYPE (type), elt);
1850 174224 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1851 12 : return NULL_TREE;
1852 174212 : elts.quick_push (elt);
1853 : }
1854 :
1855 14567 : return elts.build ();
1856 14579 : }
1857 :
1858 206 : case VEC_WIDEN_MULT_LO_EXPR:
1859 206 : case VEC_WIDEN_MULT_HI_EXPR:
1860 206 : case VEC_WIDEN_MULT_EVEN_EXPR:
1861 206 : case VEC_WIDEN_MULT_ODD_EXPR:
1862 206 : {
1863 206 : unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1864 :
1865 206 : if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1866 : return NULL_TREE;
1867 :
1868 206 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1869 : return NULL_TREE;
1870 206 : out_nelts = in_nelts / 2;
1871 206 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1872 : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1873 :
1874 206 : if (code == VEC_WIDEN_MULT_LO_EXPR)
1875 : scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1876 : else if (code == VEC_WIDEN_MULT_HI_EXPR)
1877 : scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1878 : else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1879 : scale = 1, ofs = 0;
1880 : else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1881 206 : scale = 1, ofs = 1;
1882 :
1883 206 : tree_vector_builder elts (type, out_nelts, 1);
1884 738 : for (out = 0; out < out_nelts; out++)
1885 : {
1886 532 : unsigned int in = (out << scale) + ofs;
1887 532 : tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1888 : VECTOR_CST_ELT (arg1, in));
1889 532 : tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1890 : VECTOR_CST_ELT (arg2, in));
1891 :
1892 532 : if (t1 == NULL_TREE || t2 == NULL_TREE)
1893 0 : return NULL_TREE;
1894 532 : tree elt = const_binop (MULT_EXPR, t1, t2);
1895 532 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1896 : return NULL_TREE;
1897 532 : elts.quick_push (elt);
1898 : }
1899 :
1900 206 : return elts.build ();
1901 206 : }
1902 :
1903 195144259 : default:;
1904 : }
1905 :
1906 195144259 : if (TREE_CODE_CLASS (code) != tcc_binary)
1907 : return NULL_TREE;
1908 :
1909 : /* Make sure type and arg0 have the same saturating flag. */
1910 192348040 : gcc_checking_assert (TYPE_SATURATING (type)
1911 : == TYPE_SATURATING (TREE_TYPE (arg1)));
1912 :
1913 192348040 : return const_binop (code, arg1, arg2);
1914 : }
1915 :
1916 : /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1917 : Return zero if computing the constants is not possible. */
1918 :
1919 : tree
1920 399129662 : const_unop (enum tree_code code, tree type, tree arg0)
1921 : {
1922 : /* Don't perform the operation, other than NEGATE and ABS, if
1923 : flag_signaling_nans is on and the operand is a signaling NaN. */
1924 399129662 : if (TREE_CODE (arg0) == REAL_CST
1925 10527977 : && HONOR_SNANS (arg0)
1926 17129 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1927 4740 : && code != NEGATE_EXPR
1928 4740 : && code != ABS_EXPR
1929 399134367 : && code != ABSU_EXPR)
1930 : return NULL_TREE;
1931 :
1932 399124957 : switch (code)
1933 : {
1934 299427405 : CASE_CONVERT:
1935 299427405 : case FLOAT_EXPR:
1936 299427405 : case FIX_TRUNC_EXPR:
1937 299427405 : case FIXED_CONVERT_EXPR:
1938 299427405 : return fold_convert_const (code, type, arg0);
1939 :
1940 0 : case ADDR_SPACE_CONVERT_EXPR:
1941 : /* If the source address is 0, and the source address space
1942 : cannot have a valid object at 0, fold to dest type null. */
1943 0 : if (integer_zerop (arg0)
1944 0 : && !(targetm.addr_space.zero_address_valid
1945 0 : (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1946 0 : return fold_convert_const (code, type, arg0);
1947 : break;
1948 :
1949 14888911 : case VIEW_CONVERT_EXPR:
1950 14888911 : return fold_view_convert_expr (type, arg0);
1951 :
1952 31150435 : case NEGATE_EXPR:
1953 31150435 : {
1954 : /* Can't call fold_negate_const directly here as that doesn't
1955 : handle all cases and we might not be able to negate some
1956 : constants. */
1957 31150435 : tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1958 31150435 : if (tem && CONSTANT_CLASS_P (tem))
1959 : return tem;
1960 : break;
1961 : }
1962 :
1963 34345 : case ABS_EXPR:
1964 34345 : case ABSU_EXPR:
1965 34345 : if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1966 34056 : return fold_abs_const (arg0, type);
1967 : break;
1968 :
1969 24552 : case CONJ_EXPR:
1970 24552 : if (TREE_CODE (arg0) == COMPLEX_CST)
1971 : {
1972 24549 : tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1973 24549 : TREE_TYPE (type));
1974 24549 : return build_complex (type, TREE_REALPART (arg0), ipart);
1975 : }
1976 : break;
1977 :
1978 2338470 : case BIT_NOT_EXPR:
1979 2338470 : if (TREE_CODE (arg0) == INTEGER_CST)
1980 2337267 : return fold_not_const (arg0, type);
1981 1203 : else if (POLY_INT_CST_P (arg0))
1982 : return wide_int_to_tree (type, ~poly_int_cst_value (arg0));
1983 : /* Perform BIT_NOT_EXPR on each element individually. */
1984 1203 : else if (TREE_CODE (arg0) == VECTOR_CST)
1985 : {
1986 588 : tree elem;
1987 :
1988 : /* This can cope with stepped encodings because ~x == -1 - x. */
1989 588 : tree_vector_builder elements;
1990 588 : elements.new_unary_operation (type, arg0, true);
1991 588 : unsigned int i, count = elements.encoded_nelts ();
1992 2361 : for (i = 0; i < count; ++i)
1993 : {
1994 1773 : elem = VECTOR_CST_ELT (arg0, i);
1995 1773 : elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1996 1773 : if (elem == NULL_TREE)
1997 : break;
1998 1773 : elements.quick_push (elem);
1999 : }
2000 588 : if (i == count)
2001 588 : return elements.build ();
2002 588 : }
2003 : break;
2004 :
2005 10440229 : case TRUTH_NOT_EXPR:
2006 10440229 : if (TREE_CODE (arg0) == INTEGER_CST)
2007 10001243 : return constant_boolean_node (integer_zerop (arg0), type);
2008 : break;
2009 :
2010 179036 : case REALPART_EXPR:
2011 179036 : if (TREE_CODE (arg0) == COMPLEX_CST)
2012 178835 : return fold_convert (type, TREE_REALPART (arg0));
2013 : break;
2014 :
2015 182911 : case IMAGPART_EXPR:
2016 182911 : if (TREE_CODE (arg0) == COMPLEX_CST)
2017 182723 : return fold_convert (type, TREE_IMAGPART (arg0));
2018 : break;
2019 :
2020 18652 : case VEC_UNPACK_LO_EXPR:
2021 18652 : case VEC_UNPACK_HI_EXPR:
2022 18652 : case VEC_UNPACK_FLOAT_LO_EXPR:
2023 18652 : case VEC_UNPACK_FLOAT_HI_EXPR:
2024 18652 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
2025 18652 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2026 18652 : {
2027 18652 : unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2028 18652 : enum tree_code subcode;
2029 :
2030 18652 : if (TREE_CODE (arg0) != VECTOR_CST)
2031 : return NULL_TREE;
2032 :
2033 18652 : if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2034 : return NULL_TREE;
2035 18652 : out_nelts = in_nelts / 2;
2036 18652 : gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2037 :
2038 18652 : unsigned int offset = 0;
2039 18652 : if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2040 18652 : || code == VEC_UNPACK_FLOAT_LO_EXPR
2041 : || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2042 9317 : offset = out_nelts;
2043 :
2044 18652 : if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2045 : subcode = NOP_EXPR;
2046 7836 : else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2047 7836 : || code == VEC_UNPACK_FLOAT_HI_EXPR)
2048 : subcode = FLOAT_EXPR;
2049 : else
2050 4 : subcode = FIX_TRUNC_EXPR;
2051 :
2052 18652 : tree_vector_builder elts (type, out_nelts, 1);
2053 100202 : for (i = 0; i < out_nelts; i++)
2054 : {
2055 81550 : tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2056 81550 : VECTOR_CST_ELT (arg0, i + offset));
2057 81550 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2058 0 : return NULL_TREE;
2059 81550 : elts.quick_push (elt);
2060 : }
2061 :
2062 18652 : return elts.build ();
2063 18652 : }
2064 :
2065 4 : case VEC_DUPLICATE_EXPR:
2066 4 : if (CONSTANT_CLASS_P (arg0))
2067 4 : return build_vector_from_val (type, arg0);
2068 : return NULL_TREE;
2069 :
2070 : default:
2071 : break;
2072 : }
2073 :
2074 : return NULL_TREE;
2075 : }
2076 :
2077 : /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2078 : indicates which particular sizetype to create. */
2079 :
2080 : tree
2081 3568388019 : size_int_kind (poly_int64 number, enum size_type_kind kind)
2082 : {
2083 3568388019 : return build_int_cst (sizetype_tab[(int) kind], number);
2084 : }
2085 :
2086 : /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2087 : is a tree code. The type of the result is taken from the operands.
2088 : Both must be equivalent integer types, ala int_binop_types_match_p.
2089 : If the operands are constant, so is the result. */
2090 :
2091 : tree
2092 2762819274 : size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2093 : {
2094 2762819274 : tree type = TREE_TYPE (arg0);
2095 :
2096 2762819274 : if (arg0 == error_mark_node || arg1 == error_mark_node)
2097 : return error_mark_node;
2098 :
2099 2762819274 : gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2100 : TREE_TYPE (arg1)));
2101 :
2102 : /* Handle the special case of two poly_int constants faster. */
2103 2762819274 : if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2104 : {
2105 : /* And some specific cases even faster than that. */
2106 2730135372 : if (code == PLUS_EXPR)
2107 : {
2108 1261019441 : if (integer_zerop (arg0)
2109 1261019441 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2110 : return arg1;
2111 313208792 : if (integer_zerop (arg1)
2112 313208792 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2113 : return arg0;
2114 : }
2115 1469115931 : else if (code == MINUS_EXPR)
2116 : {
2117 118875009 : if (integer_zerop (arg1)
2118 118875009 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2119 : return arg0;
2120 : }
2121 1350240922 : else if (code == MULT_EXPR)
2122 : {
2123 614998858 : if (integer_onep (arg0)
2124 614998858 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2125 : return arg1;
2126 : }
2127 :
2128 : /* Handle general case of two integer constants. For sizetype
2129 : constant calculations we always want to know about overflow,
2130 : even in the unsigned case. */
2131 1318152259 : tree res = int_const_binop (code, arg0, arg1, -1);
2132 1318152259 : if (res != NULL_TREE)
2133 : return res;
2134 : }
2135 :
2136 32683902 : return fold_build2_loc (loc, code, type, arg0, arg1);
2137 : }
2138 :
2139 : /* Given two values, either both of sizetype or both of bitsizetype,
2140 : compute the difference between the two values. Return the value
2141 : in signed type corresponding to the type of the operands. */
2142 :
2143 : tree
2144 39580863 : size_diffop_loc (location_t loc, tree arg0, tree arg1)
2145 : {
2146 39580863 : tree type = TREE_TYPE (arg0);
2147 39580863 : tree ctype;
2148 :
2149 39580863 : gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2150 : TREE_TYPE (arg1)));
2151 :
2152 : /* If the type is already signed, just do the simple thing. */
2153 39580863 : if (!TYPE_UNSIGNED (type))
2154 10344186 : return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2155 :
2156 29236677 : if (type == sizetype)
2157 29236677 : ctype = ssizetype;
2158 0 : else if (type == bitsizetype)
2159 0 : ctype = sbitsizetype;
2160 : else
2161 0 : ctype = signed_type_for (type);
2162 :
2163 : /* If either operand is not a constant, do the conversions to the signed
2164 : type and subtract. The hardware will do the right thing with any
2165 : overflow in the subtraction. */
2166 29236677 : if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2167 17570 : return size_binop_loc (loc, MINUS_EXPR,
2168 : fold_convert_loc (loc, ctype, arg0),
2169 17570 : fold_convert_loc (loc, ctype, arg1));
2170 :
2171 : /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2172 : Otherwise, subtract the other way, convert to CTYPE (we know that can't
2173 : overflow) and negate (which can't either). Special-case a result
2174 : of zero while we're here. */
2175 29219107 : if (tree_int_cst_equal (arg0, arg1))
2176 25999540 : return build_int_cst (ctype, 0);
2177 3219567 : else if (tree_int_cst_lt (arg1, arg0))
2178 2127650 : return fold_convert_loc (loc, ctype,
2179 2127650 : size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2180 : else
2181 1091917 : return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2182 : fold_convert_loc (loc, ctype,
2183 : size_binop_loc (loc,
2184 : MINUS_EXPR,
2185 : arg1, arg0)));
2186 : }
2187 :
2188 : /* Convert integer constant ARG1 to TYPE, which is an integral or offset
2189 : or pointer type. */
2190 :
2191 : tree
2192 1490878375 : int_const_convert (tree type, const_tree arg1, int overflowable)
2193 : {
2194 : /* Given an integer constant, make new constant with new type,
2195 : appropriately sign-extended or truncated. Use widest_int
2196 : so that any extension is done according ARG1's type. */
2197 1490878375 : tree arg1_type = TREE_TYPE (arg1);
2198 1490878375 : unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2199 1490878375 : return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2200 1490878375 : TYPE_SIGN (arg1_type)),
2201 : overflowable,
2202 1490878375 : TREE_OVERFLOW (arg1));
2203 : }
2204 :
2205 : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2206 : to an integer type. */
2207 :
2208 : static tree
2209 53525 : fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2210 : {
2211 53525 : bool overflow = false;
2212 53525 : tree t;
2213 :
2214 : /* The following code implements the floating point to integer
2215 : conversion rules required by the Java Language Specification,
2216 : that IEEE NaNs are mapped to zero and values that overflow
2217 : the target precision saturate, i.e. values greater than
2218 : INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2219 : are mapped to INT_MIN. These semantics are allowed by the
2220 : C and C++ standards that simply state that the behavior of
2221 : FP-to-integer conversion is unspecified upon overflow. */
2222 :
2223 53525 : wide_int val;
2224 53525 : REAL_VALUE_TYPE r;
2225 53525 : REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2226 :
2227 53525 : switch (code)
2228 : {
2229 53525 : case FIX_TRUNC_EXPR:
2230 53525 : real_trunc (&r, VOIDmode, &x);
2231 53525 : break;
2232 :
2233 0 : default:
2234 0 : gcc_unreachable ();
2235 : }
2236 :
2237 : /* If R is NaN, return zero and show we have an overflow. */
2238 53525 : if (REAL_VALUE_ISNAN (r))
2239 : {
2240 3638 : overflow = true;
2241 3638 : val = wi::zero (TYPE_PRECISION (type));
2242 : }
2243 :
2244 : /* See if R is less than the lower bound or greater than the
2245 : upper bound. */
2246 :
2247 53525 : if (! overflow)
2248 : {
2249 49887 : tree lt = TYPE_MIN_VALUE (type);
2250 49887 : REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2251 49887 : if (real_less (&r, &l))
2252 : {
2253 1974 : overflow = true;
2254 1974 : val = wi::to_wide (lt);
2255 : }
2256 : }
2257 :
2258 53525 : if (! overflow)
2259 : {
2260 47913 : tree ut = TYPE_MAX_VALUE (type);
2261 47913 : if (ut)
2262 : {
2263 47913 : REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2264 47913 : if (real_less (&u, &r))
2265 : {
2266 1921 : overflow = true;
2267 1921 : val = wi::to_wide (ut);
2268 : }
2269 : }
2270 : }
2271 :
2272 53525 : if (! overflow)
2273 45994 : val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2274 :
2275 : /* According to IEEE standard, for conversions from floating point to
2276 : integer. When a NaN or infinite operand cannot be represented in the
2277 : destination format and this cannot otherwise be indicated, the invalid
2278 : operation exception shall be signaled. When a numeric operand would
2279 : convert to an integer outside the range of the destination format, the
2280 : invalid operation exception shall be signaled if this situation cannot
2281 : otherwise be indicated. */
2282 53525 : if (!flag_trapping_math || !overflow)
2283 46248 : t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2284 : else
2285 : t = NULL_TREE;
2286 :
2287 53525 : return t;
2288 53525 : }
2289 :
2290 : /* A subroutine of fold_convert_const handling conversions of a
2291 : FIXED_CST to an integer type. */
2292 :
2293 : static tree
2294 0 : fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2295 : {
2296 0 : tree t;
2297 0 : double_int temp, temp_trunc;
2298 0 : scalar_mode mode;
2299 :
2300 : /* Right shift FIXED_CST to temp by fbit. */
2301 0 : temp = TREE_FIXED_CST (arg1).data;
2302 0 : mode = TREE_FIXED_CST (arg1).mode;
2303 0 : if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2304 : {
2305 0 : temp = temp.rshift (GET_MODE_FBIT (mode),
2306 : HOST_BITS_PER_DOUBLE_INT,
2307 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2308 :
2309 : /* Left shift temp to temp_trunc by fbit. */
2310 0 : temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2311 : HOST_BITS_PER_DOUBLE_INT,
2312 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2313 : }
2314 : else
2315 : {
2316 0 : temp = double_int_zero;
2317 0 : temp_trunc = double_int_zero;
2318 : }
2319 :
2320 : /* If FIXED_CST is negative, we need to round the value toward 0.
2321 : By checking if the fractional bits are not zero to add 1 to temp. */
2322 0 : if (SIGNED_FIXED_POINT_MODE_P (mode)
2323 0 : && temp_trunc.is_negative ()
2324 0 : && TREE_FIXED_CST (arg1).data != temp_trunc)
2325 0 : temp += double_int_one;
2326 :
2327 : /* Given a fixed-point constant, make new constant with new type,
2328 : appropriately sign-extended or truncated. */
2329 0 : t = force_fit_type (type, temp, -1,
2330 0 : (temp.is_negative ()
2331 0 : && (TYPE_UNSIGNED (type)
2332 0 : < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2333 0 : | TREE_OVERFLOW (arg1));
2334 :
2335 0 : return t;
2336 : }
2337 :
2338 : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2339 : to another floating point type. */
2340 :
2341 : static tree
2342 2078503 : fold_convert_const_real_from_real (tree type, const_tree arg1)
2343 : {
2344 2078503 : REAL_VALUE_TYPE value;
2345 2078503 : tree t;
2346 :
2347 : /* If the underlying modes are the same, simply treat it as
2348 : copy and rebuild with TREE_REAL_CST information and the
2349 : given type. */
2350 2078503 : if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2351 : {
2352 98054 : t = build_real (type, TREE_REAL_CST (arg1));
2353 98054 : return t;
2354 : }
2355 :
2356 : /* Don't perform the operation if flag_signaling_nans is on
2357 : and the operand is a signaling NaN. */
2358 1980449 : if (HONOR_SNANS (arg1)
2359 1982331 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2360 : return NULL_TREE;
2361 :
2362 : /* With flag_rounding_math we should respect the current rounding mode
2363 : unless the conversion is exact. */
2364 1980449 : if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2365 1981105 : && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2366 509 : return NULL_TREE;
2367 :
2368 1979940 : real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2369 1979940 : t = build_real (type, value);
2370 :
2371 : /* If converting an infinity or NAN to a representation that doesn't
2372 : have one, set the overflow bit so that we can produce some kind of
2373 : error message at the appropriate point if necessary. It's not the
2374 : most user-friendly message, but it's better than nothing. */
2375 1979940 : if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2376 2104904 : && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2377 0 : TREE_OVERFLOW (t) = 1;
2378 1979940 : else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2379 2100635 : && !MODE_HAS_NANS (TYPE_MODE (type)))
2380 0 : TREE_OVERFLOW (t) = 1;
2381 : /* Regular overflow, conversion produced an infinity in a mode that
2382 : can't represent them. */
2383 9896379 : else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2384 0 : && REAL_VALUE_ISINF (value)
2385 1979940 : && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2386 0 : TREE_OVERFLOW (t) = 1;
2387 : else
2388 1979940 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2389 : return t;
2390 : }
2391 :
2392 : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2393 : to a floating point type. */
2394 :
2395 : static tree
2396 0 : fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2397 : {
2398 0 : REAL_VALUE_TYPE value;
2399 0 : tree t;
2400 :
2401 0 : real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2402 0 : &TREE_FIXED_CST (arg1));
2403 0 : t = build_real (type, value);
2404 :
2405 0 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2406 0 : return t;
2407 : }
2408 :
2409 : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2410 : to another fixed-point type. */
2411 :
2412 : static tree
2413 0 : fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2414 : {
2415 0 : FIXED_VALUE_TYPE value;
2416 0 : tree t;
2417 0 : bool overflow_p;
2418 :
2419 0 : overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2420 0 : &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2421 0 : t = build_fixed (type, value);
2422 :
2423 : /* Propagate overflow flags. */
2424 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2425 0 : TREE_OVERFLOW (t) = 1;
2426 0 : return t;
2427 : }
2428 :
2429 : /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2430 : to a fixed-point type. */
2431 :
2432 : static tree
2433 0 : fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2434 : {
2435 0 : FIXED_VALUE_TYPE value;
2436 0 : tree t;
2437 0 : bool overflow_p;
2438 0 : double_int di;
2439 :
2440 0 : gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2441 :
2442 0 : di.low = TREE_INT_CST_ELT (arg1, 0);
2443 0 : if (TREE_INT_CST_NUNITS (arg1) == 1)
2444 0 : di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2445 : else
2446 0 : di.high = TREE_INT_CST_ELT (arg1, 1);
2447 :
2448 0 : overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2449 0 : TYPE_UNSIGNED (TREE_TYPE (arg1)),
2450 0 : TYPE_SATURATING (type));
2451 0 : t = build_fixed (type, value);
2452 :
2453 : /* Propagate overflow flags. */
2454 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2455 0 : TREE_OVERFLOW (t) = 1;
2456 0 : return t;
2457 : }
2458 :
2459 : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2460 : to a fixed-point type. */
2461 :
2462 : static tree
2463 0 : fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2464 : {
2465 0 : FIXED_VALUE_TYPE value;
2466 0 : tree t;
2467 0 : bool overflow_p;
2468 :
2469 0 : overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2470 0 : &TREE_REAL_CST (arg1),
2471 0 : TYPE_SATURATING (type));
2472 0 : t = build_fixed (type, value);
2473 :
2474 : /* Propagate overflow flags. */
2475 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2476 0 : TREE_OVERFLOW (t) = 1;
2477 0 : return t;
2478 : }
2479 :
2480 : /* Attempt to fold type conversion operation CODE of expression ARG1 to
2481 : type TYPE. If no simplification can be done return NULL_TREE. */
2482 :
2483 : static tree
2484 1559920692 : fold_convert_const (enum tree_code code, tree type, tree arg1)
2485 : {
2486 1559920692 : tree arg_type = TREE_TYPE (arg1);
2487 1559920692 : if (arg_type == type)
2488 : return arg1;
2489 :
2490 : /* We can't widen types, since the runtime value could overflow the
2491 : original type before being extended to the new type. */
2492 1550071243 : if (POLY_INT_CST_P (arg1)
2493 : && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2494 : && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2495 : return build_poly_int_cst (type,
2496 : poly_wide_int::from (poly_int_cst_value (arg1),
2497 : TYPE_PRECISION (type),
2498 : TYPE_SIGN (arg_type)));
2499 :
2500 1550071243 : if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2501 : || TREE_CODE (type) == OFFSET_TYPE)
2502 : {
2503 1520108927 : if (TREE_CODE (arg1) == INTEGER_CST)
2504 1490878375 : return int_const_convert (type, arg1, !POINTER_TYPE_P (arg_type));
2505 29230552 : else if (TREE_CODE (arg1) == REAL_CST)
2506 53525 : return fold_convert_const_int_from_real (code, type, arg1);
2507 29177027 : else if (TREE_CODE (arg1) == FIXED_CST)
2508 0 : return fold_convert_const_int_from_fixed (type, arg1);
2509 : }
2510 : else if (SCALAR_FLOAT_TYPE_P (type))
2511 : {
2512 29907703 : if (TREE_CODE (arg1) == INTEGER_CST)
2513 : {
2514 23082405 : tree res = build_real_from_int_cst (type, arg1);
2515 : /* Avoid the folding if flag_rounding_math is on and the
2516 : conversion is not exact. */
2517 23082405 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2518 : {
2519 2901 : bool fail = false;
2520 5802 : wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2521 2901 : TYPE_PRECISION (TREE_TYPE (arg1)));
2522 2901 : if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2523 1742 : return NULL_TREE;
2524 2901 : }
2525 23080663 : return res;
2526 : }
2527 6825298 : else if (TREE_CODE (arg1) == REAL_CST)
2528 2078503 : return fold_convert_const_real_from_real (type, arg1);
2529 4746795 : else if (TREE_CODE (arg1) == FIXED_CST)
2530 0 : return fold_convert_const_real_from_fixed (type, arg1);
2531 : }
2532 : else if (FIXED_POINT_TYPE_P (type))
2533 : {
2534 0 : if (TREE_CODE (arg1) == FIXED_CST)
2535 0 : return fold_convert_const_fixed_from_fixed (type, arg1);
2536 0 : else if (TREE_CODE (arg1) == INTEGER_CST)
2537 0 : return fold_convert_const_fixed_from_int (type, arg1);
2538 0 : else if (TREE_CODE (arg1) == REAL_CST)
2539 0 : return fold_convert_const_fixed_from_real (type, arg1);
2540 : }
2541 : else if (VECTOR_TYPE_P (type))
2542 : {
2543 4620 : if (TREE_CODE (arg1) == VECTOR_CST
2544 4620 : && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2545 : {
2546 4620 : tree elttype = TREE_TYPE (type);
2547 4620 : tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2548 : /* We can't handle steps directly when extending, since the
2549 : values need to wrap at the original precision first. */
2550 4620 : bool step_ok_p
2551 4620 : = (INTEGRAL_TYPE_P (elttype)
2552 292 : && INTEGRAL_TYPE_P (arg1_elttype)
2553 4854 : && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2554 4620 : tree_vector_builder v;
2555 4620 : if (!v.new_unary_operation (type, arg1, step_ok_p))
2556 : return NULL_TREE;
2557 4620 : unsigned int len = v.encoded_nelts ();
2558 27593 : for (unsigned int i = 0; i < len; ++i)
2559 : {
2560 22973 : tree elt = VECTOR_CST_ELT (arg1, i);
2561 22973 : tree cvt = fold_convert_const (code, elttype, elt);
2562 22973 : if (cvt == NULL_TREE)
2563 0 : return NULL_TREE;
2564 22973 : v.quick_push (cvt);
2565 : }
2566 4620 : return v.build ();
2567 4620 : }
2568 : }
2569 11288 : else if (TREE_CODE (type) == NULLPTR_TYPE && integer_zerop (arg1))
2570 11288 : return build_zero_cst (type);
2571 : return NULL_TREE;
2572 : }
2573 :
2574 : /* Construct a vector of zero elements of vector type TYPE. */
2575 :
2576 : static tree
2577 17083 : build_zero_vector (tree type)
2578 : {
2579 17083 : tree t;
2580 :
2581 17083 : t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2582 17083 : return build_vector_from_val (type, t);
2583 : }
2584 :
2585 : /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2586 :
2587 : bool
2588 5452 : fold_convertible_p (const_tree type, const_tree arg)
2589 : {
2590 5452 : const_tree orig = TREE_TYPE (arg);
2591 :
2592 5452 : if (type == orig)
2593 : return true;
2594 :
2595 5452 : if (TREE_CODE (arg) == ERROR_MARK
2596 5452 : || TREE_CODE (type) == ERROR_MARK
2597 5452 : || TREE_CODE (orig) == ERROR_MARK)
2598 : return false;
2599 :
2600 5452 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2601 : return true;
2602 :
2603 5452 : switch (TREE_CODE (type))
2604 : {
2605 3787 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2606 3787 : case POINTER_TYPE: case REFERENCE_TYPE:
2607 3787 : case OFFSET_TYPE:
2608 3787 : return (INTEGRAL_TYPE_P (orig)
2609 374 : || (POINTER_TYPE_P (orig)
2610 239 : && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2611 3922 : || TREE_CODE (orig) == OFFSET_TYPE);
2612 :
2613 130 : case REAL_TYPE:
2614 130 : case FIXED_POINT_TYPE:
2615 130 : case VOID_TYPE:
2616 130 : return TREE_CODE (type) == TREE_CODE (orig);
2617 :
2618 209 : case VECTOR_TYPE:
2619 209 : return (VECTOR_TYPE_P (orig)
2620 322 : && known_eq (TYPE_VECTOR_SUBPARTS (type),
2621 : TYPE_VECTOR_SUBPARTS (orig))
2622 226 : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2623 :
2624 : default:
2625 : return false;
2626 : }
2627 : }
2628 :
2629 : /* Convert expression ARG to type TYPE. Used by the middle-end for
2630 : simple conversions in preference to calling the front-end's convert. */
2631 :
2632 : tree
2633 2238991292 : fold_convert_loc (location_t loc, tree type, tree arg)
2634 : {
2635 2238991292 : tree orig = TREE_TYPE (arg);
2636 2238991292 : tree tem;
2637 :
2638 2238991292 : if (type == orig)
2639 : return arg;
2640 :
2641 1515216777 : if (TREE_CODE (arg) == ERROR_MARK
2642 1515215759 : || TREE_CODE (type) == ERROR_MARK
2643 1515215758 : || TREE_CODE (orig) == ERROR_MARK)
2644 1019 : return error_mark_node;
2645 :
2646 1515215758 : switch (TREE_CODE (type))
2647 : {
2648 100172556 : case POINTER_TYPE:
2649 100172556 : case REFERENCE_TYPE:
2650 : /* Handle conversions between pointers to different address spaces. */
2651 100172556 : if (POINTER_TYPE_P (orig)
2652 100172556 : && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2653 85378122 : != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2654 124 : return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2655 : /* fall through */
2656 :
2657 1483776884 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2658 1483776884 : case OFFSET_TYPE: case BITINT_TYPE:
2659 1483776884 : if (TREE_CODE (arg) == INTEGER_CST)
2660 : {
2661 1260021791 : tem = fold_convert_const (NOP_EXPR, type, arg);
2662 1260021791 : if (tem != NULL_TREE)
2663 : return tem;
2664 : }
2665 223755093 : if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2666 2381 : || TREE_CODE (orig) == OFFSET_TYPE)
2667 223755093 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2668 0 : if (TREE_CODE (orig) == COMPLEX_TYPE)
2669 0 : return fold_convert_loc (loc, type,
2670 : fold_build1_loc (loc, REALPART_EXPR,
2671 0 : TREE_TYPE (orig), arg));
2672 0 : gcc_assert (VECTOR_TYPE_P (orig)
2673 : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2674 0 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2675 :
2676 541215 : case REAL_TYPE:
2677 541215 : if (TREE_CODE (arg) == INTEGER_CST)
2678 : {
2679 57300 : tem = fold_convert_const (FLOAT_EXPR, type, arg);
2680 57300 : if (tem != NULL_TREE)
2681 : return tem;
2682 : }
2683 483915 : else if (TREE_CODE (arg) == REAL_CST)
2684 : {
2685 117302 : tem = fold_convert_const (NOP_EXPR, type, arg);
2686 117302 : if (tem != NULL_TREE)
2687 : return tem;
2688 : }
2689 366613 : else if (TREE_CODE (arg) == FIXED_CST)
2690 : {
2691 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2692 0 : if (tem != NULL_TREE)
2693 : return tem;
2694 : }
2695 :
2696 366615 : switch (TREE_CODE (orig))
2697 : {
2698 651 : case INTEGER_TYPE: case BITINT_TYPE:
2699 651 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2700 651 : case POINTER_TYPE: case REFERENCE_TYPE:
2701 651 : return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2702 :
2703 365964 : case REAL_TYPE:
2704 365964 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2705 :
2706 0 : case FIXED_POINT_TYPE:
2707 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2708 :
2709 0 : case COMPLEX_TYPE:
2710 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2711 0 : return fold_convert_loc (loc, type, tem);
2712 :
2713 0 : default:
2714 0 : gcc_unreachable ();
2715 : }
2716 :
2717 0 : case FIXED_POINT_TYPE:
2718 0 : if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2719 0 : || TREE_CODE (arg) == REAL_CST)
2720 : {
2721 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2722 0 : if (tem != NULL_TREE)
2723 0 : goto fold_convert_exit;
2724 : }
2725 :
2726 0 : switch (TREE_CODE (orig))
2727 : {
2728 0 : case FIXED_POINT_TYPE:
2729 0 : case INTEGER_TYPE:
2730 0 : case ENUMERAL_TYPE:
2731 0 : case BOOLEAN_TYPE:
2732 0 : case REAL_TYPE:
2733 0 : case BITINT_TYPE:
2734 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2735 :
2736 0 : case COMPLEX_TYPE:
2737 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2738 0 : return fold_convert_loc (loc, type, tem);
2739 :
2740 0 : default:
2741 0 : gcc_unreachable ();
2742 : }
2743 :
2744 2263 : case COMPLEX_TYPE:
2745 2263 : switch (TREE_CODE (orig))
2746 : {
2747 584 : case INTEGER_TYPE: case BITINT_TYPE:
2748 584 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2749 584 : case POINTER_TYPE: case REFERENCE_TYPE:
2750 584 : case REAL_TYPE:
2751 584 : case FIXED_POINT_TYPE:
2752 1168 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
2753 584 : fold_convert_loc (loc, TREE_TYPE (type), arg),
2754 584 : fold_convert_loc (loc, TREE_TYPE (type),
2755 584 : integer_zero_node));
2756 1679 : case COMPLEX_TYPE:
2757 1679 : {
2758 1679 : tree rpart, ipart;
2759 :
2760 1679 : if (TREE_CODE (arg) == COMPLEX_EXPR)
2761 : {
2762 1534 : rpart = fold_convert_loc (loc, TREE_TYPE (type),
2763 1534 : TREE_OPERAND (arg, 0));
2764 1534 : ipart = fold_convert_loc (loc, TREE_TYPE (type),
2765 1534 : TREE_OPERAND (arg, 1));
2766 1534 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2767 : }
2768 :
2769 145 : arg = save_expr (arg);
2770 145 : rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2771 145 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2772 145 : rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2773 145 : ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2774 145 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2775 : }
2776 :
2777 0 : default:
2778 0 : gcc_unreachable ();
2779 : }
2780 :
2781 30781868 : case VECTOR_TYPE:
2782 30781868 : if (integer_zerop (arg))
2783 17083 : return build_zero_vector (type);
2784 30764785 : gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2785 30764785 : gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2786 : || VECTOR_TYPE_P (orig));
2787 30764785 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2788 :
2789 109666 : case VOID_TYPE:
2790 109666 : tem = fold_ignored_result (arg);
2791 109666 : return fold_build1_loc (loc, NOP_EXPR, type, tem);
2792 :
2793 63 : case NULLPTR_TYPE:
2794 63 : if (integer_zerop (arg))
2795 17 : return build_zero_cst (type);
2796 : /* FALLTHRU */
2797 3721 : default:
2798 3721 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2799 3721 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2800 0 : gcc_unreachable ();
2801 : }
2802 0 : fold_convert_exit:
2803 0 : tem = protected_set_expr_location_unshare (tem, loc);
2804 0 : return tem;
2805 : }
2806 :
2807 : /* Return false if expr can be assumed not to be an lvalue, true
2808 : otherwise. */
2809 :
2810 : static bool
2811 62164808 : maybe_lvalue_p (const_tree x)
2812 : {
2813 : /* We only need to wrap lvalue tree codes. */
2814 62164808 : switch (TREE_CODE (x))
2815 : {
2816 : case VAR_DECL:
2817 : case PARM_DECL:
2818 : case RESULT_DECL:
2819 : case LABEL_DECL:
2820 : case FUNCTION_DECL:
2821 : case SSA_NAME:
2822 : case COMPOUND_LITERAL_EXPR:
2823 :
2824 : case COMPONENT_REF:
2825 : case MEM_REF:
2826 : case INDIRECT_REF:
2827 : case ARRAY_REF:
2828 : case ARRAY_RANGE_REF:
2829 : case BIT_FIELD_REF:
2830 : case OBJ_TYPE_REF:
2831 :
2832 : case REALPART_EXPR:
2833 : case IMAGPART_EXPR:
2834 : case PREINCREMENT_EXPR:
2835 : case PREDECREMENT_EXPR:
2836 : case SAVE_EXPR:
2837 : case TRY_CATCH_EXPR:
2838 : case WITH_CLEANUP_EXPR:
2839 : case COMPOUND_EXPR:
2840 : case MODIFY_EXPR:
2841 : case TARGET_EXPR:
2842 : case COND_EXPR:
2843 : case BIND_EXPR:
2844 : case VIEW_CONVERT_EXPR:
2845 : break;
2846 :
2847 46391174 : default:
2848 : /* Assume the worst for front-end tree codes. */
2849 46391174 : if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2850 : break;
2851 : return false;
2852 : }
2853 :
2854 15977155 : return true;
2855 : }
2856 :
2857 : /* Return an expr equal to X but certainly not valid as an lvalue. */
2858 :
2859 : tree
2860 49297433 : non_lvalue_loc (location_t loc, tree x)
2861 : {
2862 : /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2863 : us. */
2864 49297433 : if (in_gimple_form)
2865 : return x;
2866 :
2867 13501258 : if (! maybe_lvalue_p (x))
2868 : return x;
2869 3392094 : return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2870 : }
2871 :
2872 : /* Given a tree comparison code, return the code that is the logical inverse.
2873 : It is generally not safe to do this for floating-point comparisons, except
2874 : for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2875 : ERROR_MARK in this case. */
2876 :
2877 : enum tree_code
2878 123147183 : invert_tree_comparison (enum tree_code code, bool honor_nans)
2879 : {
2880 123147183 : if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2881 987088 : && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2882 : return ERROR_MARK;
2883 :
2884 122397150 : switch (code)
2885 : {
2886 : case EQ_EXPR:
2887 : return NE_EXPR;
2888 53170663 : case NE_EXPR:
2889 53170663 : return EQ_EXPR;
2890 11721908 : case GT_EXPR:
2891 11721908 : return honor_nans ? UNLE_EXPR : LE_EXPR;
2892 16676918 : case GE_EXPR:
2893 16676918 : return honor_nans ? UNLT_EXPR : LT_EXPR;
2894 7497454 : case LT_EXPR:
2895 7497454 : return honor_nans ? UNGE_EXPR : GE_EXPR;
2896 7683289 : case LE_EXPR:
2897 7683289 : return honor_nans ? UNGT_EXPR : GT_EXPR;
2898 252 : case LTGT_EXPR:
2899 252 : return UNEQ_EXPR;
2900 289 : case UNEQ_EXPR:
2901 289 : return LTGT_EXPR;
2902 : case UNGT_EXPR:
2903 : return LE_EXPR;
2904 : case UNGE_EXPR:
2905 : return LT_EXPR;
2906 : case UNLT_EXPR:
2907 : return GE_EXPR;
2908 : case UNLE_EXPR:
2909 : return GT_EXPR;
2910 209235 : case ORDERED_EXPR:
2911 209235 : return UNORDERED_EXPR;
2912 56010 : case UNORDERED_EXPR:
2913 56010 : return ORDERED_EXPR;
2914 0 : default:
2915 0 : gcc_unreachable ();
2916 : }
2917 : }
2918 :
2919 : /* Similar, but return the comparison that results if the operands are
2920 : swapped. This is safe for floating-point. */
2921 :
2922 : enum tree_code
2923 167264518 : swap_tree_comparison (enum tree_code code)
2924 : {
2925 167264518 : switch (code)
2926 : {
2927 : case EQ_EXPR:
2928 : case NE_EXPR:
2929 : case ORDERED_EXPR:
2930 : case UNORDERED_EXPR:
2931 : case LTGT_EXPR:
2932 : case UNEQ_EXPR:
2933 : return code;
2934 38417976 : case GT_EXPR:
2935 38417976 : return LT_EXPR;
2936 11448687 : case GE_EXPR:
2937 11448687 : return LE_EXPR;
2938 22299688 : case LT_EXPR:
2939 22299688 : return GT_EXPR;
2940 17396267 : case LE_EXPR:
2941 17396267 : return GE_EXPR;
2942 229910 : case UNGT_EXPR:
2943 229910 : return UNLT_EXPR;
2944 19431 : case UNGE_EXPR:
2945 19431 : return UNLE_EXPR;
2946 344236 : case UNLT_EXPR:
2947 344236 : return UNGT_EXPR;
2948 105957 : case UNLE_EXPR:
2949 105957 : return UNGE_EXPR;
2950 0 : default:
2951 0 : gcc_unreachable ();
2952 : }
2953 : }
2954 :
2955 :
2956 : /* Convert a comparison tree code from an enum tree_code representation
2957 : into a compcode bit-based encoding. This function is the inverse of
2958 : compcode_to_comparison. */
2959 :
2960 : static enum comparison_code
2961 56020 : comparison_to_compcode (enum tree_code code)
2962 : {
2963 56020 : switch (code)
2964 : {
2965 : case LT_EXPR:
2966 : return COMPCODE_LT;
2967 : case EQ_EXPR:
2968 : return COMPCODE_EQ;
2969 : case LE_EXPR:
2970 : return COMPCODE_LE;
2971 : case GT_EXPR:
2972 : return COMPCODE_GT;
2973 : case NE_EXPR:
2974 : return COMPCODE_NE;
2975 : case GE_EXPR:
2976 : return COMPCODE_GE;
2977 : case ORDERED_EXPR:
2978 : return COMPCODE_ORD;
2979 : case UNORDERED_EXPR:
2980 : return COMPCODE_UNORD;
2981 : case UNLT_EXPR:
2982 : return COMPCODE_UNLT;
2983 : case UNEQ_EXPR:
2984 : return COMPCODE_UNEQ;
2985 : case UNLE_EXPR:
2986 : return COMPCODE_UNLE;
2987 : case UNGT_EXPR:
2988 : return COMPCODE_UNGT;
2989 : case LTGT_EXPR:
2990 : return COMPCODE_LTGT;
2991 : case UNGE_EXPR:
2992 : return COMPCODE_UNGE;
2993 0 : default:
2994 0 : gcc_unreachable ();
2995 : }
2996 : }
2997 :
2998 : /* Convert a compcode bit-based encoding of a comparison operator back
2999 : to GCC's enum tree_code representation. This function is the
3000 : inverse of comparison_to_compcode. */
3001 :
3002 : static enum tree_code
3003 13952 : compcode_to_comparison (enum comparison_code code)
3004 : {
3005 13952 : switch (code)
3006 : {
3007 : case COMPCODE_LT:
3008 : return LT_EXPR;
3009 : case COMPCODE_EQ:
3010 : return EQ_EXPR;
3011 : case COMPCODE_LE:
3012 : return LE_EXPR;
3013 : case COMPCODE_GT:
3014 : return GT_EXPR;
3015 : case COMPCODE_NE:
3016 : return NE_EXPR;
3017 : case COMPCODE_GE:
3018 : return GE_EXPR;
3019 : case COMPCODE_ORD:
3020 : return ORDERED_EXPR;
3021 : case COMPCODE_UNORD:
3022 : return UNORDERED_EXPR;
3023 : case COMPCODE_UNLT:
3024 : return UNLT_EXPR;
3025 : case COMPCODE_UNEQ:
3026 : return UNEQ_EXPR;
3027 : case COMPCODE_UNLE:
3028 : return UNLE_EXPR;
3029 : case COMPCODE_UNGT:
3030 : return UNGT_EXPR;
3031 : case COMPCODE_LTGT:
3032 : return LTGT_EXPR;
3033 : case COMPCODE_UNGE:
3034 : return UNGE_EXPR;
3035 0 : default:
3036 0 : gcc_unreachable ();
3037 : }
3038 : }
3039 :
3040 : /* Return true if COND1 tests the opposite condition of COND2. */
3041 :
3042 : bool
3043 1735677 : inverse_conditions_p (const_tree cond1, const_tree cond2)
3044 : {
3045 1735677 : return (COMPARISON_CLASS_P (cond1)
3046 1645512 : && COMPARISON_CLASS_P (cond2)
3047 1634558 : && (invert_tree_comparison
3048 1634558 : (TREE_CODE (cond1),
3049 3269116 : HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3050 67445 : && operand_equal_p (TREE_OPERAND (cond1, 0),
3051 67445 : TREE_OPERAND (cond2, 0), 0)
3052 1757235 : && operand_equal_p (TREE_OPERAND (cond1, 1),
3053 21558 : TREE_OPERAND (cond2, 1), 0));
3054 : }
3055 :
3056 : /* Return a tree for the comparison which is the combination of
3057 : doing the AND or OR (depending on CODE) of the two operations LCODE
3058 : and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3059 : the possibility of trapping if the mode has NaNs, and return NULL_TREE
3060 : if this makes the transformation invalid. */
3061 :
3062 : tree
3063 28010 : combine_comparisons (location_t loc,
3064 : enum tree_code code, enum tree_code lcode,
3065 : enum tree_code rcode, tree truth_type,
3066 : tree ll_arg, tree lr_arg)
3067 : {
3068 28010 : bool honor_nans = HONOR_NANS (ll_arg);
3069 28010 : enum comparison_code lcompcode = comparison_to_compcode (lcode);
3070 28010 : enum comparison_code rcompcode = comparison_to_compcode (rcode);
3071 28010 : int compcode;
3072 :
3073 28010 : switch (code)
3074 : {
3075 18353 : case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3076 18353 : compcode = lcompcode & rcompcode;
3077 18353 : break;
3078 :
3079 9657 : case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3080 9657 : compcode = lcompcode | rcompcode;
3081 9657 : break;
3082 :
3083 : default:
3084 : return NULL_TREE;
3085 : }
3086 :
3087 28010 : if (!honor_nans)
3088 : {
3089 : /* Eliminate unordered comparisons, as well as LTGT and ORD
3090 : which are not used unless the mode has NaNs. */
3091 22935 : compcode &= ~COMPCODE_UNORD;
3092 22935 : if (compcode == COMPCODE_LTGT)
3093 : compcode = COMPCODE_NE;
3094 21756 : else if (compcode == COMPCODE_ORD)
3095 : compcode = COMPCODE_TRUE;
3096 : }
3097 5075 : else if (flag_trapping_math)
3098 : {
3099 : /* Check that the original operation and the optimized ones will trap
3100 : under the same condition. */
3101 8308 : bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3102 3518 : && (lcompcode != COMPCODE_EQ)
3103 4154 : && (lcompcode != COMPCODE_ORD);
3104 8308 : bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3105 3666 : && (rcompcode != COMPCODE_EQ)
3106 4154 : && (rcompcode != COMPCODE_ORD);
3107 8308 : bool trap = (compcode & COMPCODE_UNORD) == 0
3108 3731 : && (compcode != COMPCODE_EQ)
3109 4154 : && (compcode != COMPCODE_ORD);
3110 :
3111 : /* In a short-circuited boolean expression the LHS might be
3112 : such that the RHS, if evaluated, will never trap. For
3113 : example, in ORD (x, y) && (x < y), we evaluate the RHS only
3114 : if neither x nor y is NaN. (This is a mixed blessing: for
3115 : example, the expression above will never trap, hence
3116 : optimizing it to x < y would be invalid). */
3117 4154 : if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3118 3753 : || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3119 4154 : rtrap = false;
3120 :
3121 : /* If the comparison was short-circuited, and only the RHS
3122 : trapped, we may now generate a spurious trap. */
3123 4154 : if (rtrap && !ltrap
3124 118 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3125 : return NULL_TREE;
3126 :
3127 : /* If we changed the conditions that cause a trap, we lose. */
3128 4036 : if ((ltrap || rtrap) != trap)
3129 : return NULL_TREE;
3130 : }
3131 :
3132 1642 : if (compcode == COMPCODE_TRUE)
3133 1238 : return constant_boolean_node (true, truth_type);
3134 23339 : else if (compcode == COMPCODE_FALSE)
3135 9387 : return constant_boolean_node (false, truth_type);
3136 : else
3137 : {
3138 13952 : enum tree_code tcode;
3139 :
3140 13952 : tcode = compcode_to_comparison ((enum comparison_code) compcode);
3141 13952 : return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3142 : }
3143 : }
3144 :
3145 : /* Return nonzero if two operands (typically of the same tree node)
3146 : are necessarily equal. FLAGS modifies behavior as follows:
3147 :
3148 : If OEP_ONLY_CONST is set, only return nonzero for constants.
3149 : This function tests whether the operands are indistinguishable;
3150 : it does not test whether they are equal using C's == operation.
3151 : The distinction is important for IEEE floating point, because
3152 : (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3153 : (2) two NaNs may be indistinguishable, but NaN!=NaN.
3154 :
3155 : If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3156 : even though it may hold multiple values during a function.
3157 : This is because a GCC tree node guarantees that nothing else is
3158 : executed between the evaluation of its "operands" (which may often
3159 : be evaluated in arbitrary order). Hence if the operands themselves
3160 : don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3161 : same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3162 : unset means assuming isochronic (or instantaneous) tree equivalence.
3163 : Unless comparing arbitrary expression trees, such as from different
3164 : statements, this flag can usually be left unset.
3165 :
3166 : If OEP_PURE_SAME is set, then pure functions with identical arguments
3167 : are considered the same. It is used when the caller has other ways
3168 : to ensure that global memory is unchanged in between.
3169 :
3170 : If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3171 : not values of expressions.
3172 :
3173 : If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3174 : such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3175 :
3176 : If OEP_BITWISE is set, then require the values to be bitwise identical
3177 : rather than simply numerically equal. Do not take advantage of things
3178 : like math-related flags or undefined behavior; only return true for
3179 : values that are provably bitwise identical in all circumstances.
3180 :
3181 : If OEP_ASSUME_WRAPV is set, then require the values to be bitwise identical
3182 : under two's compliment arithmetic (ignoring any possible Undefined Behaviour)
3183 : rather than just numerically equivalent. The compared expressions must
3184 : however perform the same operations but may do intermediate computations in
3185 : differing signs. Because this comparison ignores any possible UB it cannot
3186 : be used blindly without ensuring that the context you are using it in itself
3187 : doesn't guarantee that there will be no UB. Conditional expressions are
3188 : excluded from this relaxation.
3189 :
3190 : When OEP_ASSUME_WRAPV is used operand_compare::hash_operand may return
3191 : differing hashes even for cases where operand_compare::operand_equal_p
3192 : compares equal.
3193 :
3194 : Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3195 : any operand with side effect. This is unnecesarily conservative in the
3196 : case we know that arg0 and arg1 are in disjoint code paths (such as in
3197 : ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3198 : addresses with TREE_CONSTANT flag set so we know that &var == &var
3199 : even if var is volatile. */
3200 :
3201 : bool
3202 7141377561 : operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3203 : unsigned int flags)
3204 : {
3205 7141377561 : return operand_equal_p (TREE_TYPE (arg0), arg0, TREE_TYPE (arg1), arg1, flags);
3206 : }
3207 :
3208 : /* The same as operand_equal_p however the type of ARG0 and ARG1 are assumed to
3209 : be the TYPE0 and TYPE1 respectively. TYPE0 and TYPE1 represent the type the
3210 : expression is being compared under for equality. This means that they can
3211 : differ from the actual TREE_TYPE (..) value of ARG0 and ARG1. */
3212 :
3213 : bool
3214 7142108262 : operand_compare::operand_equal_p (tree type0, const_tree arg0,
3215 : tree type1, const_tree arg1,
3216 : unsigned int flags)
3217 : {
3218 7142108262 : bool r;
3219 7142108262 : if (verify_hash_value (arg0, arg1, flags, &r))
3220 3004494597 : return r;
3221 :
3222 4137613665 : STRIP_ANY_LOCATION_WRAPPER (arg0);
3223 4137613665 : STRIP_ANY_LOCATION_WRAPPER (arg1);
3224 :
3225 : /* If either is ERROR_MARK, they aren't equal. */
3226 4137613665 : if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3227 4137613053 : || type0 == error_mark_node
3228 4137613051 : || type1 == error_mark_node)
3229 : return false;
3230 :
3231 : /* Similar, if either does not have a type (like a template id),
3232 : they aren't equal. */
3233 4137613050 : if (!type0 || !type1)
3234 : return false;
3235 :
3236 : /* Bitwise identity makes no sense if the values have different layouts. */
3237 4137610374 : if ((flags & OEP_BITWISE)
3238 4137610374 : && !tree_nop_conversion_p (type0, type1))
3239 : return false;
3240 :
3241 : /* We cannot consider pointers to different address space equal. */
3242 4137610374 : if (POINTER_TYPE_P (type0)
3243 617066992 : && POINTER_TYPE_P (type1)
3244 4659863801 : && (TYPE_ADDR_SPACE (TREE_TYPE (type0))
3245 522253427 : != TYPE_ADDR_SPACE (TREE_TYPE (type1))))
3246 : return false;
3247 :
3248 : /* Check equality of integer constants before bailing out due to
3249 : precision differences. */
3250 4137610183 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3251 : {
3252 : /* Address of INTEGER_CST is not defined; check that we did not forget
3253 : to drop the OEP_ADDRESS_OF flags. */
3254 656371683 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3255 656371683 : return tree_int_cst_equal (arg0, arg1);
3256 : }
3257 :
3258 3481238500 : if ((flags & OEP_ASSUME_WRAPV)
3259 2075261 : && (CONVERT_EXPR_P (arg0) || CONVERT_EXPR_P (arg1)))
3260 : {
3261 786050 : const_tree t_arg0 = arg0;
3262 786050 : const_tree t_arg1 = arg1;
3263 786050 : STRIP_NOPS (arg0);
3264 786050 : STRIP_NOPS (arg1);
3265 : /* Only recurse if the conversion was one that was valid to strip. */
3266 786050 : if (t_arg0 != arg0 || t_arg1 != arg1)
3267 730701 : return operand_equal_p (type0, arg0, type1, arg1, flags);
3268 : }
3269 :
3270 3480507799 : if (!(flags & OEP_ADDRESS_OF))
3271 : {
3272 : /* Check if we are checking an operation where the two's compliment
3273 : bitwise representation of the result is not the same between signed and
3274 : unsigned arithmetic. */
3275 3096958609 : bool enforce_signedness = true;
3276 3096958609 : if (flags & OEP_ASSUME_WRAPV)
3277 : {
3278 1255429 : switch (TREE_CODE (arg0))
3279 : {
3280 : case PLUS_EXPR:
3281 : case MINUS_EXPR:
3282 : case MULT_EXPR:
3283 : case BIT_IOR_EXPR:
3284 : case BIT_XOR_EXPR:
3285 : case BIT_AND_EXPR:
3286 : case BIT_NOT_EXPR:
3287 : case ABS_EXPR:
3288 : CASE_CONVERT:
3289 : case SSA_NAME:
3290 : case INTEGER_CST:
3291 : case VAR_DECL:
3292 : case PARM_DECL:
3293 : case RESULT_DECL:
3294 3096958609 : enforce_signedness = false;
3295 : break;
3296 :
3297 : default:
3298 : break;
3299 : }
3300 : }
3301 :
3302 : /* If both types don't have the same signedness, then we can't consider
3303 : them equal. We must check this before the STRIP_NOPS calls
3304 : because they may change the signedness of the arguments. As pointers
3305 : strictly don't have a signedness, require either two pointers or
3306 : two non-pointers as well. */
3307 3096958609 : if (POINTER_TYPE_P (type0) != POINTER_TYPE_P (type1)
3308 3096958609 : || (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
3309 141732002 : && enforce_signedness))
3310 : return false;
3311 :
3312 : /* If both types don't have the same precision, then it is not safe
3313 : to strip NOPs. */
3314 2799097960 : if (element_precision (type0) != element_precision (type1))
3315 : return false;
3316 :
3317 2650957375 : STRIP_NOPS (arg0);
3318 2650957375 : STRIP_NOPS (arg1);
3319 :
3320 2650957375 : type0 = TREE_TYPE (arg0);
3321 2650957375 : type1 = TREE_TYPE (arg1);
3322 : }
3323 : #if 0
3324 : /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3325 : sanity check once the issue is solved. */
3326 : else
3327 : /* Addresses of conversions and SSA_NAMEs (and many other things)
3328 : are not defined. Check that we did not forget to drop the
3329 : OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3330 : gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3331 : && TREE_CODE (arg0) != SSA_NAME);
3332 : #endif
3333 :
3334 : /* In case both args are comparisons but with different comparison
3335 : code, try to swap the comparison operands of one arg to produce
3336 : a match and compare that variant. */
3337 3034506565 : if (TREE_CODE (arg0) != TREE_CODE (arg1)
3338 1242845684 : && COMPARISON_CLASS_P (arg0)
3339 6781922 : && COMPARISON_CLASS_P (arg1))
3340 : {
3341 5085234 : enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3342 :
3343 5085234 : if (TREE_CODE (arg0) == swap_code)
3344 2212174 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3345 2212174 : TREE_OPERAND (arg1, 1), flags)
3346 2231391 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3347 19217 : TREE_OPERAND (arg1, 0), flags);
3348 : }
3349 :
3350 3032294391 : if (TREE_CODE (arg0) != TREE_CODE (arg1))
3351 : {
3352 : /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3353 1240633510 : if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3354 : ;
3355 1240573867 : else if (flags & OEP_ADDRESS_OF)
3356 : {
3357 : /* If we are interested in comparing addresses ignore
3358 : MEM_REF wrappings of the base that can appear just for
3359 : TBAA reasons. */
3360 47772865 : if (TREE_CODE (arg0) == MEM_REF
3361 7392322 : && DECL_P (arg1)
3362 5080030 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3363 1083012 : && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3364 48319751 : && integer_zerop (TREE_OPERAND (arg0, 1)))
3365 : return true;
3366 47551566 : else if (TREE_CODE (arg1) == MEM_REF
3367 30133055 : && DECL_P (arg0)
3368 10960481 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3369 2187238 : && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3370 48095763 : && integer_zerop (TREE_OPERAND (arg1, 1)))
3371 : return true;
3372 47172512 : return false;
3373 : }
3374 : else
3375 : return false;
3376 : }
3377 :
3378 : /* When not checking adddresses, this is needed for conversions and for
3379 : COMPONENT_REF. Might as well play it safe and always test this. */
3380 1791720524 : if (TREE_CODE (type0) == ERROR_MARK
3381 1791720524 : || TREE_CODE (type1) == ERROR_MARK
3382 3583441048 : || (TYPE_MODE (type0) != TYPE_MODE (type1)
3383 24404989 : && !(flags & OEP_ADDRESS_OF)))
3384 3704232 : return false;
3385 :
3386 : /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3387 : We don't care about side effects in that case because the SAVE_EXPR
3388 : takes care of that for us. In all other cases, two expressions are
3389 : equal if they have no side effects. If we have two identical
3390 : expressions with side effects that should be treated the same due
3391 : to the only side effects being identical SAVE_EXPR's, that will
3392 : be detected in the recursive calls below.
3393 : If we are taking an invariant address of two identical objects
3394 : they are necessarily equal as well. */
3395 317986954 : if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3396 2106003094 : && (TREE_CODE (arg0) == SAVE_EXPR
3397 317959674 : || (flags & OEP_MATCH_SIDE_EFFECTS)
3398 283310576 : || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3399 : return true;
3400 :
3401 : /* Next handle constant cases, those for which we can return 1 even
3402 : if ONLY_CONST is set. */
3403 1470183443 : if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3404 21591205 : switch (TREE_CODE (arg0))
3405 : {
3406 151 : case INTEGER_CST:
3407 151 : return tree_int_cst_equal (arg0, arg1);
3408 :
3409 0 : case FIXED_CST:
3410 0 : return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3411 : TREE_FIXED_CST (arg1));
3412 :
3413 3570042 : case REAL_CST:
3414 3570042 : if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3415 : return true;
3416 :
3417 2555093 : if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3418 : {
3419 : /* If we do not distinguish between signed and unsigned zero,
3420 : consider them equal. */
3421 14140 : if (real_zerop (arg0) && real_zerop (arg1))
3422 : return true;
3423 : }
3424 2555084 : return false;
3425 :
3426 785126 : case VECTOR_CST:
3427 785126 : {
3428 785126 : if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3429 785126 : != VECTOR_CST_LOG2_NPATTERNS (arg1))
3430 : return false;
3431 :
3432 764535 : if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3433 764535 : != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3434 : return false;
3435 :
3436 732289 : unsigned int count = vector_cst_encoded_nelts (arg0);
3437 1076374 : for (unsigned int i = 0; i < count; ++i)
3438 1692522 : if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3439 846261 : VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3440 : return false;
3441 : return true;
3442 : }
3443 :
3444 13696 : case COMPLEX_CST:
3445 13696 : return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3446 : flags)
3447 13696 : && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3448 : flags));
3449 :
3450 1696949 : case STRING_CST:
3451 1696949 : return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3452 1696949 : && ! memcmp (TREE_STRING_POINTER (arg0),
3453 1264609 : TREE_STRING_POINTER (arg1),
3454 1264609 : TREE_STRING_LENGTH (arg0)));
3455 :
3456 0 : case RAW_DATA_CST:
3457 0 : return (RAW_DATA_LENGTH (arg0) == RAW_DATA_LENGTH (arg1)
3458 0 : && ! memcmp (RAW_DATA_POINTER (arg0),
3459 0 : RAW_DATA_POINTER (arg1),
3460 0 : RAW_DATA_LENGTH (arg0)));
3461 :
3462 14379800 : case ADDR_EXPR:
3463 14379800 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3464 14379800 : return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3465 : flags | OEP_ADDRESS_OF
3466 14379800 : | OEP_MATCH_SIDE_EFFECTS);
3467 176828 : case CONSTRUCTOR:
3468 176828 : {
3469 : /* In GIMPLE empty constructors are allowed in initializers of
3470 : aggregates. */
3471 176828 : if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3472 : return true;
3473 :
3474 : /* See sem_variable::equals in ipa-icf for a similar approach. */
3475 137904 : if (TREE_CODE (type0) != TREE_CODE (type1))
3476 : return false;
3477 137904 : else if (TREE_CODE (type0) == ARRAY_TYPE)
3478 : {
3479 : /* For arrays, check that the sizes all match. */
3480 44 : const HOST_WIDE_INT siz0 = int_size_in_bytes (type0);
3481 44 : if (TYPE_MODE (type0) != TYPE_MODE (type1)
3482 44 : || siz0 < 0
3483 88 : || siz0 != int_size_in_bytes (type1))
3484 0 : return false;
3485 : }
3486 137860 : else if (!types_compatible_p (type0, type1))
3487 : return false;
3488 :
3489 137904 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3490 137904 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3491 413712 : if (vec_safe_length (v0) != vec_safe_length (v1))
3492 : return false;
3493 :
3494 : /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3495 : of the CONSTRUCTOR referenced indirectly. */
3496 137904 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3497 :
3498 353389743 : for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3499 : {
3500 196500 : constructor_elt *c0 = &(*v0)[idx];
3501 196500 : constructor_elt *c1 = &(*v1)[idx];
3502 :
3503 : /* Check that the values are the same... */
3504 196500 : if (c0->value != c1->value
3505 196500 : && !operand_equal_p (c0->value, c1->value, flags))
3506 : return false;
3507 :
3508 : /* ... and that they apply to the same field! */
3509 107936 : if (c0->index != c1->index
3510 107936 : && (TREE_CODE (type0) == ARRAY_TYPE
3511 0 : ? !operand_equal_p (c0->index, c1->index, flags)
3512 0 : : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3513 0 : DECL_FIELD_OFFSET (c1->index),
3514 : flags)
3515 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3516 0 : DECL_FIELD_BIT_OFFSET (c1->index),
3517 : flags)))
3518 0 : return false;
3519 : }
3520 :
3521 : return true;
3522 : }
3523 :
3524 : default:
3525 : break;
3526 : }
3527 :
3528 : /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3529 : two instances of undefined behavior will give identical results. */
3530 1449560851 : if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3531 : return false;
3532 :
3533 : /* Define macros to test an operand from arg0 and arg1 for equality and a
3534 : variant that allows null and views null as being different from any
3535 : non-null value. In the latter case, if either is null, the both
3536 : must be; otherwise, do the normal comparison. */
3537 : #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3538 : TREE_OPERAND (arg1, N), flags)
3539 :
3540 : #define OP_SAME_WITH_NULL(N) \
3541 : ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3542 : ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3543 :
3544 1449560851 : switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3545 : {
3546 7656112 : case tcc_unary:
3547 : /* Two conversions are equal only if signedness and modes match. */
3548 7656112 : switch (TREE_CODE (arg0))
3549 : {
3550 7307206 : CASE_CONVERT:
3551 7307206 : case FIX_TRUNC_EXPR:
3552 7307206 : if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1))
3553 : return false;
3554 : break;
3555 : default:
3556 : break;
3557 : }
3558 :
3559 7656091 : return OP_SAME_WITH_NULL (0);
3560 :
3561 :
3562 22358666 : case tcc_comparison:
3563 22358666 : case tcc_binary:
3564 22358666 : if (OP_SAME (0) && OP_SAME (1))
3565 : return true;
3566 :
3567 : /* For commutative ops, allow the other order. */
3568 16411618 : return (commutative_tree_code (TREE_CODE (arg0))
3569 12600050 : && operand_equal_p (TREE_OPERAND (arg0, 0),
3570 12600050 : TREE_OPERAND (arg1, 1), flags)
3571 16628295 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3572 216677 : TREE_OPERAND (arg1, 0), flags));
3573 :
3574 871055007 : case tcc_reference:
3575 : /* If either of the pointer (or reference) expressions we are
3576 : dereferencing contain a side effect, these cannot be equal,
3577 : but their addresses can be. */
3578 871055007 : if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3579 871055007 : && (TREE_SIDE_EFFECTS (arg0)
3580 809308619 : || TREE_SIDE_EFFECTS (arg1)))
3581 : return false;
3582 :
3583 870561511 : switch (TREE_CODE (arg0))
3584 : {
3585 5865919 : case INDIRECT_REF:
3586 5865919 : if (!(flags & OEP_ADDRESS_OF))
3587 : {
3588 5844164 : if (TYPE_ALIGN (type0) != TYPE_ALIGN (type1))
3589 : return false;
3590 : /* Verify that the access types are compatible. */
3591 5838686 : if (TYPE_MAIN_VARIANT (type0) != TYPE_MAIN_VARIANT (type1))
3592 : return false;
3593 : }
3594 5801451 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3595 5801451 : return OP_SAME (0);
3596 :
3597 654825 : case IMAGPART_EXPR:
3598 : /* Require the same offset. */
3599 654825 : if (!operand_equal_p (TYPE_SIZE (type0),
3600 654825 : TYPE_SIZE (type1),
3601 : flags & ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV)))
3602 : return false;
3603 :
3604 : /* Fallthru. */
3605 2425958 : case REALPART_EXPR:
3606 2425958 : case VIEW_CONVERT_EXPR:
3607 2425958 : return OP_SAME (0);
3608 :
3609 78228046 : case TARGET_MEM_REF:
3610 78228046 : case MEM_REF:
3611 78228046 : if (!(flags & OEP_ADDRESS_OF))
3612 : {
3613 : /* Require equal access sizes */
3614 16290058 : if (TYPE_SIZE (type0) != TYPE_SIZE (type1)
3615 16290058 : && (!TYPE_SIZE (type0)
3616 1117222 : || !TYPE_SIZE (type1)
3617 1112285 : || !operand_equal_p (TYPE_SIZE (type0),
3618 1112285 : TYPE_SIZE (type1),
3619 : flags)))
3620 1113834 : return false;
3621 : /* Verify that access happens in similar types. */
3622 15176224 : if (!types_compatible_p (type0, type1))
3623 : return false;
3624 : /* Verify that accesses are TBAA compatible. */
3625 14833217 : if (!alias_ptr_types_compatible_p
3626 14833217 : (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3627 14833217 : TREE_TYPE (TREE_OPERAND (arg1, 1)))
3628 13967073 : || (MR_DEPENDENCE_CLIQUE (arg0)
3629 13967073 : != MR_DEPENDENCE_CLIQUE (arg1))
3630 27096166 : || (MR_DEPENDENCE_BASE (arg0)
3631 12262949 : != MR_DEPENDENCE_BASE (arg1)))
3632 : return false;
3633 : /* Verify that alignment is compatible. */
3634 11769396 : if (TYPE_ALIGN (type0) != TYPE_ALIGN (type1))
3635 : return false;
3636 : }
3637 73538466 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3638 125208142 : return (OP_SAME (0) && OP_SAME (1)
3639 : /* TARGET_MEM_REF require equal extra operands. */
3640 97858192 : && (TREE_CODE (arg0) != TARGET_MEM_REF
3641 580923 : || (OP_SAME_WITH_NULL (2)
3642 274305 : && OP_SAME_WITH_NULL (3)
3643 268422 : && OP_SAME_WITH_NULL (4))));
3644 :
3645 38220339 : case ARRAY_REF:
3646 38220339 : case ARRAY_RANGE_REF:
3647 38220339 : if (!OP_SAME (0))
3648 : return false;
3649 33374703 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3650 : /* Compare the array index by value if it is constant first as we
3651 : may have different types but same value here. */
3652 33374703 : return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3653 33374703 : TREE_OPERAND (arg1, 1))
3654 30368073 : || OP_SAME (1))
3655 6045282 : && OP_SAME_WITH_NULL (2)
3656 6043738 : && OP_SAME_WITH_NULL (3)
3657 : /* Compare low bound and element size as with OEP_ADDRESS_OF
3658 : we have to account for the offset of the ref. */
3659 42441082 : && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3660 3021869 : == TREE_TYPE (TREE_OPERAND (arg1, 0))
3661 2686 : || (operand_equal_p (array_ref_low_bound
3662 2686 : (const_cast<tree> (arg0)),
3663 : array_ref_low_bound
3664 2686 : (const_cast<tree> (arg1)),
3665 : flags)
3666 2686 : && operand_equal_p (array_ref_element_size
3667 2686 : (const_cast<tree> (arg0)),
3668 : array_ref_element_size
3669 2686 : (const_cast<tree> (arg1)),
3670 : flags))));
3671 :
3672 745043212 : case COMPONENT_REF:
3673 : /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3674 : may be NULL when we're called to compare MEM_EXPRs. */
3675 745043212 : if (!OP_SAME_WITH_NULL (0))
3676 : return false;
3677 57040247 : {
3678 57040247 : bool compare_address = flags & OEP_ADDRESS_OF;
3679 :
3680 : /* Most of time we only need to compare FIELD_DECLs for equality.
3681 : However when determining address look into actual offsets.
3682 : These may match for unions and unshared record types. */
3683 57040247 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3684 57040247 : if (!OP_SAME (1))
3685 : {
3686 33166086 : if (compare_address
3687 591851 : && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3688 : {
3689 591848 : tree field0 = TREE_OPERAND (arg0, 1);
3690 591848 : tree field1 = TREE_OPERAND (arg1, 1);
3691 :
3692 : /* Non-FIELD_DECL operands can appear in C++ templates. */
3693 591848 : if (TREE_CODE (field0) != FIELD_DECL
3694 591848 : || TREE_CODE (field1) != FIELD_DECL)
3695 : return false;
3696 :
3697 591848 : if (!DECL_FIELD_OFFSET (field0)
3698 591848 : || !DECL_FIELD_OFFSET (field1))
3699 3 : return field0 == field1;
3700 :
3701 591845 : if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3702 591845 : DECL_FIELD_OFFSET (field1), flags)
3703 779272 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3704 187427 : DECL_FIELD_BIT_OFFSET (field1),
3705 : flags))
3706 550999 : return false;
3707 : }
3708 : else
3709 : return false;
3710 : }
3711 : }
3712 23915007 : return OP_SAME_WITH_NULL (2);
3713 :
3714 777989 : case BIT_FIELD_REF:
3715 777989 : if (!OP_SAME (0))
3716 : return false;
3717 525479 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3718 525479 : return OP_SAME (1) && OP_SAME (2);
3719 :
3720 : default:
3721 : return false;
3722 : }
3723 :
3724 54947592 : case tcc_expression:
3725 54947592 : switch (TREE_CODE (arg0))
3726 : {
3727 49633005 : case ADDR_EXPR:
3728 : /* Be sure we pass right ADDRESS_OF flag. */
3729 49633005 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3730 49633005 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3731 49633005 : TREE_OPERAND (arg1, 0),
3732 49633005 : flags | OEP_ADDRESS_OF);
3733 :
3734 562643 : case TRUTH_NOT_EXPR:
3735 562643 : return OP_SAME (0);
3736 :
3737 73999 : case TRUTH_ANDIF_EXPR:
3738 73999 : case TRUTH_ORIF_EXPR:
3739 73999 : return OP_SAME (0) && OP_SAME (1);
3740 :
3741 0 : case WIDEN_MULT_PLUS_EXPR:
3742 0 : case WIDEN_MULT_MINUS_EXPR:
3743 0 : if (!OP_SAME (2))
3744 : return false;
3745 : /* The multiplcation operands are commutative. */
3746 : /* FALLTHRU */
3747 :
3748 46023 : case TRUTH_AND_EXPR:
3749 46023 : case TRUTH_OR_EXPR:
3750 46023 : case TRUTH_XOR_EXPR:
3751 46023 : if (OP_SAME (0) && OP_SAME (1))
3752 : return true;
3753 :
3754 : /* Otherwise take into account this is a commutative operation. */
3755 46005 : return (operand_equal_p (TREE_OPERAND (arg0, 0),
3756 46005 : TREE_OPERAND (arg1, 1), flags)
3757 46008 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3758 3 : TREE_OPERAND (arg1, 0), flags));
3759 :
3760 199114 : case COND_EXPR:
3761 199114 : if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3762 42813 : return false;
3763 156301 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3764 156301 : return OP_SAME (0);
3765 :
3766 4 : case BIT_INSERT_EXPR:
3767 : /* BIT_INSERT_EXPR has an implict operand as the type precision
3768 : of op1. Need to check to make sure they are the same. */
3769 4 : if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3770 1 : && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3771 5 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3772 1 : != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3773 : return false;
3774 : /* FALLTHRU */
3775 :
3776 191 : case VEC_COND_EXPR:
3777 191 : case DOT_PROD_EXPR:
3778 191 : return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3779 :
3780 37218 : case MODIFY_EXPR:
3781 37218 : case INIT_EXPR:
3782 37218 : case COMPOUND_EXPR:
3783 37218 : case PREDECREMENT_EXPR:
3784 37218 : case PREINCREMENT_EXPR:
3785 37218 : case POSTDECREMENT_EXPR:
3786 37218 : case POSTINCREMENT_EXPR:
3787 37218 : if (flags & OEP_LEXICOGRAPHIC)
3788 165 : return OP_SAME (0) && OP_SAME (1);
3789 : return false;
3790 :
3791 333065 : case CLEANUP_POINT_EXPR:
3792 333065 : case EXPR_STMT:
3793 333065 : case SAVE_EXPR:
3794 333065 : if (flags & OEP_LEXICOGRAPHIC)
3795 208 : return OP_SAME (0);
3796 : return false;
3797 :
3798 79237 : case OBJ_TYPE_REF:
3799 : /* Virtual table reference. */
3800 158474 : if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3801 79237 : OBJ_TYPE_REF_EXPR (arg1), flags))
3802 : return false;
3803 13853 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3804 13853 : if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3805 13853 : != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3806 : return false;
3807 13853 : if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3808 13853 : OBJ_TYPE_REF_OBJECT (arg1), flags))
3809 : return false;
3810 13853 : if (virtual_method_call_p (arg0))
3811 : {
3812 13853 : if (!virtual_method_call_p (arg1))
3813 : return false;
3814 13853 : return types_same_for_odr (obj_type_ref_class (arg0),
3815 27706 : obj_type_ref_class (arg1));
3816 : }
3817 : return false;
3818 :
3819 : default:
3820 : return false;
3821 : }
3822 :
3823 3873382 : case tcc_vl_exp:
3824 3873382 : switch (TREE_CODE (arg0))
3825 : {
3826 3873382 : case CALL_EXPR:
3827 3873382 : if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3828 3873382 : != (CALL_EXPR_FN (arg1) == NULL_TREE))
3829 : /* If not both CALL_EXPRs are either internal or normal function
3830 : functions, then they are not equal. */
3831 : return false;
3832 3873382 : else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3833 : {
3834 : /* If the CALL_EXPRs call different internal functions, then they
3835 : are not equal. */
3836 2 : if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3837 : return false;
3838 : }
3839 : else
3840 : {
3841 : /* If the CALL_EXPRs call different functions, then they are not
3842 : equal. */
3843 3873380 : if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3844 : flags))
3845 : return false;
3846 : }
3847 :
3848 : /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3849 2183315 : {
3850 2183315 : unsigned int cef = call_expr_flags (arg0);
3851 2183315 : if (flags & OEP_PURE_SAME)
3852 0 : cef &= ECF_CONST | ECF_PURE;
3853 : else
3854 2183315 : cef &= ECF_CONST;
3855 2183315 : if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3856 : return false;
3857 : }
3858 :
3859 : /* Now see if all the arguments are the same. */
3860 33244 : {
3861 33244 : const_call_expr_arg_iterator iter0, iter1;
3862 33244 : const_tree a0, a1;
3863 66488 : for (a0 = first_const_call_expr_arg (arg0, &iter0),
3864 33244 : a1 = first_const_call_expr_arg (arg1, &iter1);
3865 41357 : a0 && a1;
3866 8113 : a0 = next_const_call_expr_arg (&iter0),
3867 8113 : a1 = next_const_call_expr_arg (&iter1))
3868 34744 : if (! operand_equal_p (a0, a1, flags))
3869 : return false;
3870 :
3871 : /* If we get here and both argument lists are exhausted
3872 : then the CALL_EXPRs are equal. */
3873 6613 : return ! (a0 || a1);
3874 : }
3875 : default:
3876 : return false;
3877 : }
3878 :
3879 164268508 : case tcc_declaration:
3880 : /* Consider __builtin_sqrt equal to sqrt. */
3881 164268508 : if (TREE_CODE (arg0) == FUNCTION_DECL)
3882 6899599 : return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3883 286199 : && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3884 6290052 : && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3885 286199 : == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3886 :
3887 157978456 : if (DECL_P (arg0)
3888 157978456 : && (flags & OEP_DECL_NAME)
3889 35 : && (flags & OEP_LEXICOGRAPHIC))
3890 : {
3891 : /* Consider decls with the same name equal. The caller needs
3892 : to make sure they refer to the same entity (such as a function
3893 : formal parameter). */
3894 35 : tree a0name = DECL_NAME (arg0);
3895 35 : tree a1name = DECL_NAME (arg1);
3896 70 : const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3897 70 : const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3898 60 : return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3899 : }
3900 : return false;
3901 :
3902 322296554 : case tcc_exceptional:
3903 322296554 : if (TREE_CODE (arg0) == CONSTRUCTOR)
3904 : {
3905 19317 : if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3906 : return false;
3907 :
3908 : /* In GIMPLE constructors are used only to build vectors from
3909 : elements. Individual elements in the constructor must be
3910 : indexed in increasing order and form an initial sequence.
3911 :
3912 : We make no effort to compare nonconstant ones in GENERIC. */
3913 19317 : if (!VECTOR_TYPE_P (type0) || !VECTOR_TYPE_P (type1))
3914 : return false;
3915 :
3916 : /* Be sure that vectors constructed have the same representation.
3917 : We only tested element precision and modes to match.
3918 : Vectors may be BLKmode and thus also check that the number of
3919 : parts match. */
3920 619 : if (maybe_ne (TYPE_VECTOR_SUBPARTS (type0),
3921 1238 : TYPE_VECTOR_SUBPARTS (type1)))
3922 : return false;
3923 :
3924 619 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3925 619 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3926 619 : unsigned int len = vec_safe_length (v0);
3927 :
3928 1238 : if (len != vec_safe_length (v1))
3929 : return false;
3930 :
3931 3605 : for (unsigned int i = 0; i < len; i++)
3932 : {
3933 3188 : constructor_elt *c0 = &(*v0)[i];
3934 3188 : constructor_elt *c1 = &(*v1)[i];
3935 :
3936 3188 : if (!operand_equal_p (c0->value, c1->value, flags)
3937 : /* In GIMPLE the indexes can be either NULL or matching i.
3938 : Double check this so we won't get false
3939 : positives for GENERIC. */
3940 2986 : || (c0->index
3941 2588 : && (TREE_CODE (c0->index) != INTEGER_CST
3942 2588 : || compare_tree_int (c0->index, i)))
3943 6174 : || (c1->index
3944 2588 : && (TREE_CODE (c1->index) != INTEGER_CST
3945 2588 : || compare_tree_int (c1->index, i))))
3946 202 : return false;
3947 : }
3948 : return true;
3949 : }
3950 322277237 : else if (TREE_CODE (arg0) == STATEMENT_LIST
3951 3116 : && (flags & OEP_LEXICOGRAPHIC))
3952 : {
3953 : /* Compare the STATEMENT_LISTs. */
3954 16 : tree_stmt_iterator tsi1, tsi2;
3955 16 : tree body1 = const_cast<tree> (arg0);
3956 16 : tree body2 = const_cast<tree> (arg1);
3957 56 : for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3958 40 : tsi_next (&tsi1), tsi_next (&tsi2))
3959 : {
3960 : /* The lists don't have the same number of statements. */
3961 56 : if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3962 : return false;
3963 56 : if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3964 : return true;
3965 40 : if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3966 : flags & (OEP_LEXICOGRAPHIC
3967 : | OEP_NO_HASH_CHECK)))
3968 : return false;
3969 : }
3970 : }
3971 : return false;
3972 :
3973 3104844 : case tcc_statement:
3974 3104844 : switch (TREE_CODE (arg0))
3975 : {
3976 52 : case RETURN_EXPR:
3977 52 : if (flags & OEP_LEXICOGRAPHIC)
3978 52 : return OP_SAME_WITH_NULL (0);
3979 : return false;
3980 4 : case DEBUG_BEGIN_STMT:
3981 4 : if (flags & OEP_LEXICOGRAPHIC)
3982 : return true;
3983 : return false;
3984 : default:
3985 : return false;
3986 : }
3987 :
3988 : default:
3989 : return false;
3990 : }
3991 :
3992 : #undef OP_SAME
3993 : #undef OP_SAME_WITH_NULL
3994 : }
3995 :
3996 : /* Generate a hash value for an expression. This can be used iteratively
3997 : by passing a previous result as the HSTATE argument. */
3998 :
3999 : void
4000 3007484966 : operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
4001 : unsigned int flags)
4002 : {
4003 3007484966 : int i;
4004 3007484966 : enum tree_code code;
4005 3007484966 : enum tree_code_class tclass;
4006 :
4007 3007484966 : if (t == NULL_TREE || t == error_mark_node)
4008 : {
4009 76380378 : hstate.merge_hash (0);
4010 76380378 : return;
4011 : }
4012 :
4013 2931104588 : STRIP_ANY_LOCATION_WRAPPER (t);
4014 :
4015 2931104588 : if (!(flags & OEP_ADDRESS_OF))
4016 2683346777 : STRIP_NOPS (t);
4017 :
4018 2931104588 : code = TREE_CODE (t);
4019 :
4020 2931104588 : switch (code)
4021 : {
4022 : /* Alas, constants aren't shared, so we can't rely on pointer
4023 : identity. */
4024 757 : case VOID_CST:
4025 757 : hstate.merge_hash (0);
4026 757 : return;
4027 877595213 : case INTEGER_CST:
4028 877595213 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4029 1780520186 : for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
4030 902924973 : hstate.add_hwi (TREE_INT_CST_ELT (t, i));
4031 : return;
4032 15473039 : case REAL_CST:
4033 15473039 : {
4034 15473039 : unsigned int val2;
4035 15473039 : if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
4036 : val2 = rvc_zero;
4037 : else
4038 15255735 : val2 = real_hash (TREE_REAL_CST_PTR (t));
4039 15473039 : hstate.merge_hash (val2);
4040 15473039 : return;
4041 : }
4042 0 : case FIXED_CST:
4043 0 : {
4044 0 : unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
4045 0 : hstate.merge_hash (val2);
4046 0 : return;
4047 : }
4048 11793356 : case STRING_CST:
4049 11793356 : hstate.add ((const void *) TREE_STRING_POINTER (t),
4050 11793356 : TREE_STRING_LENGTH (t));
4051 11793356 : return;
4052 198 : case RAW_DATA_CST:
4053 198 : hstate.add ((const void *) RAW_DATA_POINTER (t),
4054 198 : RAW_DATA_LENGTH (t));
4055 198 : return;
4056 209722 : case COMPLEX_CST:
4057 209722 : hash_operand (TREE_REALPART (t), hstate, flags);
4058 209722 : hash_operand (TREE_IMAGPART (t), hstate, flags);
4059 209722 : return;
4060 3050470 : case VECTOR_CST:
4061 3050470 : {
4062 3050470 : hstate.add_int (VECTOR_CST_NPATTERNS (t));
4063 3050470 : hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
4064 3050470 : unsigned int count = vector_cst_encoded_nelts (t);
4065 9601774 : for (unsigned int i = 0; i < count; ++i)
4066 6551304 : hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
4067 : return;
4068 : }
4069 872889845 : case SSA_NAME:
4070 : /* We can just compare by pointer. */
4071 872889845 : hstate.add_hwi (SSA_NAME_VERSION (t));
4072 872889845 : return;
4073 : case PLACEHOLDER_EXPR:
4074 : /* The node itself doesn't matter. */
4075 : return;
4076 : case BLOCK:
4077 : case OMP_CLAUSE:
4078 : case OMP_NEXT_VARIANT:
4079 : case OMP_TARGET_DEVICE_MATCHES:
4080 : /* Ignore. */
4081 : return;
4082 : case TREE_LIST:
4083 : /* A list of expressions, for a CALL_EXPR or as the elements of a
4084 : VECTOR_CST. */
4085 283580 : for (; t; t = TREE_CHAIN (t))
4086 141790 : hash_operand (TREE_VALUE (t), hstate, flags);
4087 : return;
4088 4854855 : case CONSTRUCTOR:
4089 4854855 : {
4090 4854855 : unsigned HOST_WIDE_INT idx;
4091 4854855 : tree field, value;
4092 4854855 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4093 4854855 : hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
4094 19652555 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
4095 : {
4096 : /* In GIMPLE the indexes can be either NULL or matching i. */
4097 14797700 : if (field == NULL_TREE)
4098 1079492 : field = bitsize_int (idx);
4099 14797700 : if (TREE_CODE (field) == FIELD_DECL)
4100 : {
4101 9903700 : hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
4102 9903700 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
4103 : }
4104 : else
4105 4894000 : hash_operand (field, hstate, flags);
4106 14797700 : hash_operand (value, hstate, flags);
4107 : }
4108 : return;
4109 : }
4110 182 : case STATEMENT_LIST:
4111 182 : {
4112 182 : tree_stmt_iterator i;
4113 182 : for (i = tsi_start (const_cast<tree> (t));
4114 550 : !tsi_end_p (i); tsi_next (&i))
4115 368 : hash_operand (tsi_stmt (i), hstate, flags);
4116 182 : return;
4117 : }
4118 : case TREE_VEC:
4119 24 : for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
4120 12 : hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
4121 : return;
4122 4 : case IDENTIFIER_NODE:
4123 4 : hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4124 4 : return;
4125 20709192 : case FUNCTION_DECL:
4126 : /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4127 : Otherwise nodes that compare equal according to operand_equal_p might
4128 : get different hash codes. However, don't do this for machine specific
4129 : or front end builtins, since the function code is overloaded in those
4130 : cases. */
4131 20709192 : if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4132 20709192 : && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4133 : {
4134 7015236 : t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4135 7015236 : code = TREE_CODE (t);
4136 : }
4137 : /* FALL THROUGH */
4138 1145095030 : default:
4139 1145095030 : if (POLY_INT_CST_P (t))
4140 : {
4141 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4142 : hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4143 : return;
4144 : }
4145 1145095030 : tclass = TREE_CODE_CLASS (code);
4146 :
4147 1145095030 : if (tclass == tcc_declaration)
4148 : {
4149 : /* DECL's have a unique ID */
4150 817806835 : hstate.add_hwi (DECL_UID (t));
4151 : }
4152 327288195 : else if (tclass == tcc_comparison && !commutative_tree_code (code))
4153 : {
4154 : /* For comparisons that can be swapped, use the lower
4155 : tree code. */
4156 142928 : enum tree_code ccode = swap_tree_comparison (code);
4157 142928 : if (code < ccode)
4158 62100 : ccode = code;
4159 142928 : hstate.add_object (ccode);
4160 142928 : hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4161 142928 : hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4162 : }
4163 327145267 : else if (CONVERT_EXPR_CODE_P (code))
4164 : {
4165 : /* NOP_EXPR and CONVERT_EXPR are considered equal by
4166 : operand_equal_p. */
4167 5225060 : enum tree_code ccode = NOP_EXPR;
4168 5225060 : hstate.add_object (ccode);
4169 :
4170 : /* Don't hash the type, that can lead to having nodes which
4171 : compare equal according to operand_equal_p, but which
4172 : have different hash codes. Make sure to include signedness
4173 : in the hash computation. */
4174 5225060 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4175 5225060 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4176 : }
4177 : /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4178 321920207 : else if (code == MEM_REF
4179 77936882 : && (flags & OEP_ADDRESS_OF) != 0
4180 68817193 : && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4181 13447435 : && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4182 335145746 : && integer_zerop (TREE_OPERAND (t, 1)))
4183 6109774 : hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4184 : hstate, flags);
4185 : /* Don't ICE on FE specific trees, or their arguments etc.
4186 : during operand_equal_p hash verification. */
4187 315810433 : else if (!IS_EXPR_CODE_CLASS (tclass))
4188 252 : gcc_assert (flags & OEP_HASH_CHECK);
4189 : else
4190 : {
4191 315810181 : unsigned int sflags = flags;
4192 :
4193 315810181 : hstate.add_object (code);
4194 :
4195 315810181 : switch (code)
4196 : {
4197 124975751 : case ADDR_EXPR:
4198 124975751 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4199 124975751 : flags |= OEP_ADDRESS_OF;
4200 124975751 : sflags = flags;
4201 124975751 : break;
4202 :
4203 76954344 : case INDIRECT_REF:
4204 76954344 : case MEM_REF:
4205 76954344 : case TARGET_MEM_REF:
4206 76954344 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4207 76954344 : sflags = flags;
4208 76954344 : break;
4209 :
4210 74368944 : case COMPONENT_REF:
4211 74368944 : if (sflags & OEP_ADDRESS_OF)
4212 : {
4213 37308591 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4214 37308591 : hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4215 : hstate, flags & ~OEP_ADDRESS_OF);
4216 37308591 : hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4217 : hstate, flags & ~OEP_ADDRESS_OF);
4218 37308591 : return;
4219 : }
4220 : break;
4221 15389799 : case ARRAY_REF:
4222 15389799 : case ARRAY_RANGE_REF:
4223 15389799 : case BIT_FIELD_REF:
4224 15389799 : sflags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4225 15389799 : break;
4226 :
4227 8443 : case COND_EXPR:
4228 8443 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4229 8443 : break;
4230 :
4231 0 : case WIDEN_MULT_PLUS_EXPR:
4232 0 : case WIDEN_MULT_MINUS_EXPR:
4233 0 : {
4234 : /* The multiplication operands are commutative. */
4235 0 : inchash::hash one, two;
4236 0 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4237 0 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4238 0 : hstate.add_commutative (one, two);
4239 0 : hash_operand (TREE_OPERAND (t, 2), hstate, flags);
4240 0 : return;
4241 : }
4242 :
4243 49679 : case CALL_EXPR:
4244 49679 : if (CALL_EXPR_FN (t) == NULL_TREE)
4245 6 : hstate.add_int (CALL_EXPR_IFN (t));
4246 : break;
4247 :
4248 72 : case TARGET_EXPR:
4249 : /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4250 : Usually different TARGET_EXPRs just should use
4251 : different temporaries in their slots. */
4252 72 : hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4253 72 : return;
4254 :
4255 287740 : case OBJ_TYPE_REF:
4256 : /* Virtual table reference. */
4257 287740 : inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4258 287740 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4259 287740 : inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4260 287740 : inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4261 287740 : if (!virtual_method_call_p (t))
4262 : return;
4263 287725 : if (tree c = obj_type_ref_class (t))
4264 : {
4265 287725 : c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4266 : /* We compute mangled names only when free_lang_data is run.
4267 : In that case we can hash precisely. */
4268 287725 : if (TREE_CODE (c) == TYPE_DECL
4269 287725 : && DECL_ASSEMBLER_NAME_SET_P (c))
4270 7109 : hstate.add_object
4271 7109 : (IDENTIFIER_HASH_VALUE
4272 : (DECL_ASSEMBLER_NAME (c)));
4273 : }
4274 287725 : return;
4275 : default:
4276 : break;
4277 : }
4278 :
4279 : /* Don't hash the type, that can lead to having nodes which
4280 : compare equal according to operand_equal_p, but which
4281 : have different hash codes. */
4282 278213778 : if (code == NON_LVALUE_EXPR)
4283 : {
4284 : /* Make sure to include signness in the hash computation. */
4285 0 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4286 0 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4287 : }
4288 :
4289 278213778 : else if (commutative_tree_code (code))
4290 : {
4291 : /* It's a commutative expression. We want to hash it the same
4292 : however it appears. We do this by first hashing both operands
4293 : and then rehashing based on the order of their independent
4294 : hashes. */
4295 17490808 : inchash::hash one, two;
4296 17490808 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4297 17490808 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4298 17490808 : hstate.add_commutative (one, two);
4299 : }
4300 : else
4301 730052324 : for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4302 677935988 : hash_operand (TREE_OPERAND (t, i), hstate,
4303 : i == 0 ? flags : sflags);
4304 : }
4305 : return;
4306 : }
4307 : }
4308 :
4309 : bool
4310 7147154585 : operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4311 : unsigned int flags, bool *ret)
4312 : {
4313 : /* When checking and unless comparing DECL names, verify that if
4314 : the outermost operand_equal_p call returns non-zero then ARG0
4315 : and ARG1 have the same hash value. */
4316 7147154585 : if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4317 : {
4318 3006683840 : if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4319 : {
4320 463715008 : if (arg0 != arg1 && !(flags & (OEP_DECL_NAME | OEP_ASSUME_WRAPV)))
4321 : {
4322 82380505 : inchash::hash hstate0 (0), hstate1 (0);
4323 82380505 : hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4324 82380505 : hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4325 82380505 : hashval_t h0 = hstate0.end ();
4326 82380505 : hashval_t h1 = hstate1.end ();
4327 82380505 : gcc_assert (h0 == h1);
4328 : }
4329 463715008 : *ret = true;
4330 : }
4331 : else
4332 2542968832 : *ret = false;
4333 :
4334 3006683840 : return true;
4335 : }
4336 :
4337 : return false;
4338 : }
4339 :
4340 :
4341 : static operand_compare default_compare_instance;
4342 :
4343 : /* Conveinece wrapper around operand_compare class because usually we do
4344 : not need to play with the valueizer. */
4345 :
4346 : bool
4347 3004503595 : operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4348 : {
4349 3004503595 : return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4350 : }
4351 :
4352 : namespace inchash
4353 : {
4354 :
4355 : /* Generate a hash value for an expression. This can be used iteratively
4356 : by passing a previous result as the HSTATE argument.
4357 :
4358 : This function is intended to produce the same hash for expressions which
4359 : would compare equal using operand_equal_p. */
4360 : void
4361 2165524210 : add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4362 : {
4363 2165524210 : default_compare_instance.hash_operand (t, hstate, flags);
4364 2165524210 : }
4365 :
4366 : }
4367 :
4368 : /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4369 : with a different signedness or a narrower precision. */
4370 :
4371 : static bool
4372 22109872 : operand_equal_for_comparison_p (tree arg0, tree arg1)
4373 : {
4374 22109872 : if (operand_equal_p (arg0, arg1, 0))
4375 : return true;
4376 :
4377 42278834 : if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4378 36305426 : || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4379 : return false;
4380 :
4381 : /* Discard any conversions that don't change the modes of ARG0 and ARG1
4382 : and see if the inner values are the same. This removes any
4383 : signedness comparison, which doesn't matter here. */
4384 6356502 : tree op0 = arg0;
4385 6356502 : tree op1 = arg1;
4386 6356502 : STRIP_NOPS (op0);
4387 6356502 : STRIP_NOPS (op1);
4388 6356502 : if (operand_equal_p (op0, op1, 0))
4389 : return true;
4390 :
4391 : /* Discard a single widening conversion from ARG1 and see if the inner
4392 : value is the same as ARG0. */
4393 5345306 : if (CONVERT_EXPR_P (arg1)
4394 834997 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4395 834949 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4396 834949 : < TYPE_PRECISION (TREE_TYPE (arg1))
4397 6475766 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4398 : return true;
4399 :
4400 : return false;
4401 : }
4402 :
4403 : /* See if ARG is an expression that is either a comparison or is performing
4404 : arithmetic on comparisons. The comparisons must only be comparing
4405 : two different values, which will be stored in *CVAL1 and *CVAL2; if
4406 : they are nonzero it means that some operands have already been found.
4407 : No variables may be used anywhere else in the expression except in the
4408 : comparisons.
4409 :
4410 : If this is true, return 1. Otherwise, return zero. */
4411 :
4412 : static bool
4413 65828052 : twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4414 : {
4415 69969103 : enum tree_code code = TREE_CODE (arg);
4416 69969103 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4417 :
4418 : /* We can handle some of the tcc_expression cases here. */
4419 69969103 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4420 : tclass = tcc_unary;
4421 69301214 : else if (tclass == tcc_expression
4422 742235 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4423 742235 : || code == COMPOUND_EXPR))
4424 : tclass = tcc_binary;
4425 :
4426 69290495 : switch (tclass)
4427 : {
4428 4141051 : case tcc_unary:
4429 4141051 : return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4430 :
4431 5528172 : case tcc_binary:
4432 5528172 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4433 5528172 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4434 :
4435 : case tcc_constant:
4436 : return true;
4437 :
4438 731516 : case tcc_expression:
4439 731516 : if (code == COND_EXPR)
4440 713 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4441 713 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4442 777 : && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4443 : return false;
4444 :
4445 672310 : case tcc_comparison:
4446 : /* First see if we can handle the first operand, then the second. For
4447 : the second operand, we know *CVAL1 can't be zero. It must be that
4448 : one side of the comparison is each of the values; test for the
4449 : case where this isn't true by failing if the two operands
4450 : are the same. */
4451 :
4452 672310 : if (operand_equal_p (TREE_OPERAND (arg, 0),
4453 672310 : TREE_OPERAND (arg, 1), 0))
4454 : return false;
4455 :
4456 672310 : if (*cval1 == 0)
4457 670305 : *cval1 = TREE_OPERAND (arg, 0);
4458 2005 : else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4459 : ;
4460 1886 : else if (*cval2 == 0)
4461 0 : *cval2 = TREE_OPERAND (arg, 0);
4462 1886 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4463 : ;
4464 : else
4465 : return false;
4466 :
4467 670424 : if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4468 : ;
4469 670424 : else if (*cval2 == 0)
4470 670305 : *cval2 = TREE_OPERAND (arg, 1);
4471 119 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4472 : ;
4473 : else
4474 : return false;
4475 :
4476 : return true;
4477 :
4478 : default:
4479 : return false;
4480 : }
4481 : }
4482 :
4483 : /* ARG is a tree that is known to contain just arithmetic operations and
4484 : comparisons. Evaluate the operations in the tree substituting NEW0 for
4485 : any occurrence of OLD0 as an operand of a comparison and likewise for
4486 : NEW1 and OLD1. */
4487 :
4488 : static tree
4489 702 : eval_subst (location_t loc, tree arg, tree old0, tree new0,
4490 : tree old1, tree new1)
4491 : {
4492 702 : tree type = TREE_TYPE (arg);
4493 702 : enum tree_code code = TREE_CODE (arg);
4494 702 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4495 :
4496 : /* We can handle some of the tcc_expression cases here. */
4497 702 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4498 : tclass = tcc_unary;
4499 702 : else if (tclass == tcc_expression
4500 18 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4501 : tclass = tcc_binary;
4502 :
4503 693 : switch (tclass)
4504 : {
4505 165 : case tcc_unary:
4506 165 : return fold_build1_loc (loc, code, type,
4507 165 : eval_subst (loc, TREE_OPERAND (arg, 0),
4508 165 : old0, new0, old1, new1));
4509 :
4510 168 : case tcc_binary:
4511 336 : return fold_build2_loc (loc, code, type,
4512 168 : eval_subst (loc, TREE_OPERAND (arg, 0),
4513 : old0, new0, old1, new1),
4514 168 : eval_subst (loc, TREE_OPERAND (arg, 1),
4515 168 : old0, new0, old1, new1));
4516 :
4517 9 : case tcc_expression:
4518 9 : switch (code)
4519 : {
4520 0 : case SAVE_EXPR:
4521 0 : return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4522 0 : old1, new1);
4523 :
4524 0 : case COMPOUND_EXPR:
4525 0 : return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4526 0 : old1, new1);
4527 :
4528 9 : case COND_EXPR:
4529 27 : return fold_build3_loc (loc, code, type,
4530 9 : eval_subst (loc, TREE_OPERAND (arg, 0),
4531 : old0, new0, old1, new1),
4532 9 : eval_subst (loc, TREE_OPERAND (arg, 1),
4533 : old0, new0, old1, new1),
4534 9 : eval_subst (loc, TREE_OPERAND (arg, 2),
4535 9 : old0, new0, old1, new1));
4536 : default:
4537 : break;
4538 : }
4539 : /* Fall through - ??? */
4540 :
4541 180 : case tcc_comparison:
4542 180 : {
4543 180 : tree arg0 = TREE_OPERAND (arg, 0);
4544 180 : tree arg1 = TREE_OPERAND (arg, 1);
4545 :
4546 : /* We need to check both for exact equality and tree equality. The
4547 : former will be true if the operand has a side-effect. In that
4548 : case, we know the operand occurred exactly once. */
4549 :
4550 180 : if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4551 : arg0 = new0;
4552 0 : else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4553 : arg0 = new1;
4554 :
4555 180 : if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4556 : arg1 = new0;
4557 180 : else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4558 : arg1 = new1;
4559 :
4560 180 : return fold_build2_loc (loc, code, type, arg0, arg1);
4561 : }
4562 :
4563 : default:
4564 : return arg;
4565 : }
4566 : }
4567 :
4568 : /* Return a tree for the case when the result of an expression is RESULT
4569 : converted to TYPE and OMITTED was previously an operand of the expression
4570 : but is now not needed (e.g., we folded OMITTED * 0).
4571 :
4572 : If OMITTED has side effects, we must evaluate it. Otherwise, just do
4573 : the conversion of RESULT to TYPE. */
4574 :
4575 : tree
4576 289086 : omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4577 : {
4578 289086 : tree t = fold_convert_loc (loc, type, result);
4579 :
4580 : /* If the resulting operand is an empty statement, just return the omitted
4581 : statement casted to void. */
4582 289086 : if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4583 0 : return build1_loc (loc, NOP_EXPR, void_type_node,
4584 0 : fold_ignored_result (omitted));
4585 :
4586 289086 : if (TREE_SIDE_EFFECTS (omitted))
4587 19343 : return build2_loc (loc, COMPOUND_EXPR, type,
4588 19343 : fold_ignored_result (omitted), t);
4589 :
4590 269743 : return non_lvalue_loc (loc, t);
4591 : }
4592 :
4593 : /* Return a tree for the case when the result of an expression is RESULT
4594 : converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4595 : of the expression but are now not needed.
4596 :
4597 : If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4598 : If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4599 : evaluated before OMITTED2. Otherwise, if neither has side effects,
4600 : just do the conversion of RESULT to TYPE. */
4601 :
4602 : tree
4603 5574 : omit_two_operands_loc (location_t loc, tree type, tree result,
4604 : tree omitted1, tree omitted2)
4605 : {
4606 5574 : tree t = fold_convert_loc (loc, type, result);
4607 :
4608 5574 : if (TREE_SIDE_EFFECTS (omitted2))
4609 69 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4610 5574 : if (TREE_SIDE_EFFECTS (omitted1))
4611 176 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4612 :
4613 5574 : return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4614 : }
4615 :
4616 :
4617 : /* Return a simplified tree node for the truth-negation of ARG. This
4618 : never alters ARG itself. We assume that ARG is an operation that
4619 : returns a truth value (0 or 1).
4620 :
4621 : FIXME: one would think we would fold the result, but it causes
4622 : problems with the dominator optimizer. */
4623 :
4624 : static tree
4625 52302005 : fold_truth_not_expr (location_t loc, tree arg)
4626 : {
4627 52302005 : tree type = TREE_TYPE (arg);
4628 52302005 : enum tree_code code = TREE_CODE (arg);
4629 52302005 : location_t loc1, loc2;
4630 :
4631 : /* If this is a comparison, we can simply invert it, except for
4632 : floating-point non-equality comparisons, in which case we just
4633 : enclose a TRUTH_NOT_EXPR around what we have. */
4634 :
4635 52302005 : if (TREE_CODE_CLASS (code) == tcc_comparison)
4636 : {
4637 38992507 : tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4638 32273027 : if (FLOAT_TYPE_P (op_type)
4639 6729567 : && flag_trapping_math
4640 6699328 : && code != ORDERED_EXPR && code != UNORDERED_EXPR
4641 45651307 : && code != NE_EXPR && code != EQ_EXPR)
4642 : return NULL_TREE;
4643 :
4644 33012150 : code = invert_tree_comparison (code, HONOR_NANS (op_type));
4645 33012150 : if (code == ERROR_MARK)
4646 : return NULL_TREE;
4647 :
4648 33012150 : tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4649 33012150 : TREE_OPERAND (arg, 1));
4650 33012150 : copy_warning (ret, arg);
4651 33012150 : return ret;
4652 : }
4653 :
4654 13309498 : switch (code)
4655 : {
4656 0 : case INTEGER_CST:
4657 0 : return constant_boolean_node (integer_zerop (arg), type);
4658 :
4659 45128 : case TRUTH_AND_EXPR:
4660 45128 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4661 45128 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4662 90256 : return build2_loc (loc, TRUTH_OR_EXPR, type,
4663 45128 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4664 90256 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4665 :
4666 2505 : case TRUTH_OR_EXPR:
4667 2505 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4668 2505 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4669 5010 : return build2_loc (loc, TRUTH_AND_EXPR, type,
4670 2505 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4671 5010 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4672 :
4673 67025 : case TRUTH_XOR_EXPR:
4674 : /* Here we can invert either operand. We invert the first operand
4675 : unless the second operand is a TRUTH_NOT_EXPR in which case our
4676 : result is the XOR of the first operand with the inside of the
4677 : negation of the second operand. */
4678 :
4679 67025 : if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4680 188 : return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4681 376 : TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4682 : else
4683 66837 : return build2_loc (loc, TRUTH_XOR_EXPR, type,
4684 66837 : invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4685 133674 : TREE_OPERAND (arg, 1));
4686 :
4687 375634 : case TRUTH_ANDIF_EXPR:
4688 375634 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4689 375634 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4690 751268 : return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4691 375634 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4692 751268 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4693 :
4694 32268 : case TRUTH_ORIF_EXPR:
4695 32268 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4696 32268 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4697 64536 : return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4698 32268 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4699 64536 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4700 :
4701 782583 : case TRUTH_NOT_EXPR:
4702 782583 : return TREE_OPERAND (arg, 0);
4703 :
4704 9742 : case COND_EXPR:
4705 9742 : {
4706 9742 : tree arg1 = TREE_OPERAND (arg, 1);
4707 9742 : tree arg2 = TREE_OPERAND (arg, 2);
4708 :
4709 9742 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4710 9742 : loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4711 :
4712 : /* A COND_EXPR may have a throw as one operand, which
4713 : then has void type. Just leave void operands
4714 : as they are. */
4715 9742 : return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4716 9742 : VOID_TYPE_P (TREE_TYPE (arg1))
4717 9742 : ? arg1 : invert_truthvalue_loc (loc1, arg1),
4718 9742 : VOID_TYPE_P (TREE_TYPE (arg2))
4719 19481 : ? arg2 : invert_truthvalue_loc (loc2, arg2));
4720 : }
4721 :
4722 937 : case COMPOUND_EXPR:
4723 937 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4724 1874 : return build2_loc (loc, COMPOUND_EXPR, type,
4725 937 : TREE_OPERAND (arg, 0),
4726 1874 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4727 :
4728 0 : case NON_LVALUE_EXPR:
4729 0 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4730 0 : return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4731 :
4732 73343 : CASE_CONVERT:
4733 73343 : if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4734 73279 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4735 :
4736 : /* fall through */
4737 :
4738 64 : case FLOAT_EXPR:
4739 64 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4740 64 : return build1_loc (loc, TREE_CODE (arg), type,
4741 128 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4742 :
4743 500 : case BIT_AND_EXPR:
4744 500 : if (!integer_onep (TREE_OPERAND (arg, 1)))
4745 : return NULL_TREE;
4746 0 : return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4747 :
4748 2 : case SAVE_EXPR:
4749 2 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4750 :
4751 214 : case CLEANUP_POINT_EXPR:
4752 214 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4753 214 : return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4754 428 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4755 :
4756 : default:
4757 : return NULL_TREE;
4758 : }
4759 : }
4760 :
4761 : /* Fold the truth-negation of ARG. This never alters ARG itself. We
4762 : assume that ARG is an operation that returns a truth value (0 or 1
4763 : for scalars, 0 or -1 for vectors). Return the folded expression if
4764 : folding is successful. Otherwise, return NULL_TREE. */
4765 :
4766 : static tree
4767 1999384 : fold_invert_truthvalue (location_t loc, tree arg)
4768 : {
4769 1999384 : tree type = TREE_TYPE (arg);
4770 3998744 : return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4771 : ? BIT_NOT_EXPR
4772 : : TRUTH_NOT_EXPR,
4773 1999384 : type, arg);
4774 : }
4775 :
4776 : /* Return a simplified tree node for the truth-negation of ARG. This
4777 : never alters ARG itself. We assume that ARG is an operation that
4778 : returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4779 :
4780 : tree
4781 44017580 : invert_truthvalue_loc (location_t loc, tree arg)
4782 : {
4783 44017580 : if (TREE_CODE (arg) == ERROR_MARK)
4784 : return arg;
4785 :
4786 44017580 : tree type = TREE_TYPE (arg);
4787 88035160 : return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4788 : ? BIT_NOT_EXPR
4789 : : TRUTH_NOT_EXPR,
4790 44017580 : type, arg);
4791 : }
4792 :
4793 : /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4794 : starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4795 : and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4796 : is the original memory reference used to preserve the alias set of
4797 : the access. */
4798 :
4799 : tree
4800 2813902 : make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4801 : HOST_WIDE_INT bitsize, poly_int64 bitpos,
4802 : int unsignedp, int reversep)
4803 : {
4804 2813902 : tree result, bftype;
4805 :
4806 : /* Attempt not to lose the access path if possible. */
4807 2813902 : if (TREE_CODE (orig_inner) == COMPONENT_REF)
4808 : {
4809 2810132 : tree ninner = TREE_OPERAND (orig_inner, 0);
4810 2810132 : machine_mode nmode;
4811 2810132 : poly_int64 nbitsize, nbitpos;
4812 2810132 : tree noffset;
4813 2810132 : int nunsignedp, nreversep, nvolatilep = 0;
4814 2810132 : tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4815 : &noffset, &nmode, &nunsignedp,
4816 : &nreversep, &nvolatilep);
4817 2810132 : if (base == inner
4818 2809995 : && noffset == NULL_TREE
4819 2809995 : && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4820 2809980 : && !reversep
4821 2809908 : && !nreversep
4822 5620040 : && !nvolatilep)
4823 : {
4824 2809908 : inner = ninner;
4825 2810132 : bitpos -= nbitpos;
4826 : }
4827 : }
4828 :
4829 2813902 : alias_set_type iset = get_alias_set (orig_inner);
4830 2813902 : if (iset == 0 && get_alias_set (inner) != iset)
4831 228 : inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4832 : build_fold_addr_expr (inner),
4833 : build_int_cst (ptr_type_node, 0));
4834 :
4835 2813902 : if (known_eq (bitpos, 0) && !reversep)
4836 : {
4837 44542 : tree size = TYPE_SIZE (TREE_TYPE (inner));
4838 89084 : if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4839 44352 : || POINTER_TYPE_P (TREE_TYPE (inner)))
4840 194 : && tree_fits_shwi_p (size)
4841 44736 : && tree_to_shwi (size) == bitsize)
4842 171 : return fold_convert_loc (loc, type, inner);
4843 : }
4844 :
4845 2813731 : bftype = type;
4846 2813731 : if (TYPE_PRECISION (bftype) != bitsize
4847 2813731 : || TYPE_UNSIGNED (bftype) == !unsignedp)
4848 406 : bftype = build_nonstandard_integer_type (bitsize, 0);
4849 :
4850 2813731 : result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4851 2813731 : bitsize_int (bitsize), bitsize_int (bitpos));
4852 2813731 : REF_REVERSE_STORAGE_ORDER (result) = reversep;
4853 :
4854 2813731 : if (bftype != type)
4855 406 : result = fold_convert_loc (loc, type, result);
4856 :
4857 : return result;
4858 : }
4859 :
4860 : /* Optimize a bit-field compare.
4861 :
4862 : There are two cases: First is a compare against a constant and the
4863 : second is a comparison of two items where the fields are at the same
4864 : bit position relative to the start of a chunk (byte, halfword, word)
4865 : large enough to contain it. In these cases we can avoid the shift
4866 : implicit in bitfield extractions.
4867 :
4868 : For constants, we emit a compare of the shifted constant with the
4869 : BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4870 : compared. For two fields at the same position, we do the ANDs with the
4871 : similar mask and compare the result of the ANDs.
4872 :
4873 : CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4874 : COMPARE_TYPE is the type of the comparison, and LHS and RHS
4875 : are the left and right operands of the comparison, respectively.
4876 :
4877 : If the optimization described above can be done, we return the resulting
4878 : tree. Otherwise we return zero. */
4879 :
4880 : static tree
4881 7626108 : optimize_bit_field_compare (location_t loc, enum tree_code code,
4882 : tree compare_type, tree lhs, tree rhs)
4883 : {
4884 7626108 : poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4885 7626108 : HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4886 7626108 : tree type = TREE_TYPE (lhs);
4887 7626108 : tree unsigned_type;
4888 7626108 : int const_p = TREE_CODE (rhs) == INTEGER_CST;
4889 7626108 : machine_mode lmode, rmode;
4890 7626108 : scalar_int_mode nmode;
4891 7626108 : int lunsignedp, runsignedp;
4892 7626108 : int lreversep, rreversep;
4893 7626108 : int lvolatilep = 0, rvolatilep = 0;
4894 7626108 : tree linner, rinner = NULL_TREE;
4895 7626108 : tree mask;
4896 7626108 : tree offset;
4897 :
4898 : /* Get all the information about the extractions being done. If the bit size
4899 : is the same as the size of the underlying object, we aren't doing an
4900 : extraction at all and so can do nothing. We also don't want to
4901 : do anything if the inner expression is a PLACEHOLDER_EXPR since we
4902 : then will no longer be able to replace it. */
4903 7626108 : linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4904 : &lunsignedp, &lreversep, &lvolatilep);
4905 7626108 : if (linner == lhs
4906 7626108 : || !known_size_p (plbitsize)
4907 7626108 : || !plbitsize.is_constant (&lbitsize)
4908 7626108 : || !plbitpos.is_constant (&lbitpos)
4909 15252216 : || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4910 2775940 : || offset != 0
4911 2775915 : || TREE_CODE (linner) == PLACEHOLDER_EXPR
4912 10402023 : || lvolatilep)
4913 4850253 : return 0;
4914 :
4915 2775855 : if (const_p)
4916 2738653 : rreversep = lreversep;
4917 : else
4918 : {
4919 : /* If this is not a constant, we can only do something if bit positions,
4920 : sizes, signedness and storage order are the same. */
4921 37202 : rinner
4922 37202 : = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4923 : &runsignedp, &rreversep, &rvolatilep);
4924 :
4925 37202 : if (rinner == rhs
4926 37158 : || maybe_ne (lbitpos, rbitpos)
4927 37124 : || maybe_ne (lbitsize, rbitsize)
4928 37124 : || lunsignedp != runsignedp
4929 37124 : || lreversep != rreversep
4930 37124 : || offset != 0
4931 37124 : || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4932 74326 : || rvolatilep)
4933 : return 0;
4934 : }
4935 :
4936 : /* Honor the C++ memory model and mimic what RTL expansion does. */
4937 2775777 : poly_uint64 bitstart = 0;
4938 2775777 : poly_uint64 bitend = 0;
4939 2775777 : if (TREE_CODE (lhs) == COMPONENT_REF)
4940 : {
4941 2775777 : get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4942 2775777 : if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4943 : return 0;
4944 : }
4945 :
4946 : /* See if we can find a mode to refer to this field. We should be able to,
4947 : but fail if we can't. */
4948 5551554 : if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4949 2738653 : const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4950 37124 : : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4951 : TYPE_ALIGN (TREE_TYPE (rinner))),
4952 2775777 : BITS_PER_WORD, false, &nmode))
4953 : return 0;
4954 :
4955 : /* Set signed and unsigned types of the precision of this mode for the
4956 : shifts below. */
4957 2773780 : unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4958 :
4959 : /* Compute the bit position and size for the new reference and our offset
4960 : within it. If the new reference is the same size as the original, we
4961 : won't optimize anything, so return zero. */
4962 2773780 : nbitsize = GET_MODE_BITSIZE (nmode);
4963 2773780 : nbitpos = lbitpos & ~ (nbitsize - 1);
4964 2773780 : lbitpos -= nbitpos;
4965 2773780 : if (nbitsize == lbitsize)
4966 : return 0;
4967 :
4968 2773780 : if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4969 54 : lbitpos = nbitsize - lbitsize - lbitpos;
4970 :
4971 : /* Make the mask to be used against the extracted field. */
4972 2773780 : mask = build_int_cst_type (unsigned_type, -1);
4973 2773780 : mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4974 2773780 : mask = const_binop (RSHIFT_EXPR, mask,
4975 2773780 : size_int (nbitsize - lbitsize - lbitpos));
4976 :
4977 2773780 : if (! const_p)
4978 : {
4979 35569 : if (nbitpos < 0)
4980 : return 0;
4981 :
4982 : /* If not comparing with constant, just rework the comparison
4983 : and return. */
4984 35569 : tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4985 35569 : nbitsize, nbitpos, 1, lreversep);
4986 35569 : t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4987 35569 : tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4988 35569 : nbitsize, nbitpos, 1, rreversep);
4989 35569 : t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4990 35569 : return fold_build2_loc (loc, code, compare_type, t1, t2);
4991 : }
4992 :
4993 : /* Otherwise, we are handling the constant case. See if the constant is too
4994 : big for the field. Warn and return a tree for 0 (false) if so. We do
4995 : this not only for its own sake, but to avoid having to test for this
4996 : error case below. If we didn't, we might generate wrong code.
4997 :
4998 : For unsigned fields, the constant shifted right by the field length should
4999 : be all zero. For signed fields, the high-order bits should agree with
5000 : the sign bit. */
5001 :
5002 2738211 : if (lunsignedp)
5003 : {
5004 2737048 : if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
5005 : {
5006 0 : warning (0, "comparison is always %d due to width of bit-field",
5007 : code == NE_EXPR);
5008 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
5009 : }
5010 : }
5011 : else
5012 : {
5013 1163 : wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
5014 1163 : if (tem != 0 && tem != -1)
5015 : {
5016 0 : warning (0, "comparison is always %d due to width of bit-field",
5017 : code == NE_EXPR);
5018 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
5019 : }
5020 1163 : }
5021 :
5022 2738211 : if (nbitpos < 0)
5023 : return 0;
5024 :
5025 : /* Single-bit compares should always be against zero. */
5026 2738211 : if (lbitsize == 1 && ! integer_zerop (rhs))
5027 : {
5028 175 : code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
5029 175 : rhs = build_int_cst (type, 0);
5030 : }
5031 :
5032 : /* Make a new bitfield reference, shift the constant over the
5033 : appropriate number of bits and mask it with the computed mask
5034 : (in case this was a signed field). If we changed it, make a new one. */
5035 2738211 : lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
5036 2738211 : nbitsize, nbitpos, 1, lreversep);
5037 :
5038 2738211 : rhs = const_binop (BIT_AND_EXPR,
5039 : const_binop (LSHIFT_EXPR,
5040 : fold_convert_loc (loc, unsigned_type, rhs),
5041 2738211 : size_int (lbitpos)),
5042 : mask);
5043 :
5044 2738211 : lhs = build2_loc (loc, code, compare_type,
5045 : build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
5046 2738211 : return lhs;
5047 : }
5048 :
5049 : /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5050 : represents the sign bit of EXP's type. If EXP represents a sign
5051 : or zero extension, also test VAL against the unextended type.
5052 : The return value is the (sub)expression whose sign bit is VAL,
5053 : or NULL_TREE otherwise. */
5054 :
5055 : tree
5056 2179 : sign_bit_p (tree exp, const_tree val)
5057 : {
5058 2179 : int width;
5059 2179 : tree t;
5060 :
5061 : /* Tree EXP must have an integral type. */
5062 2179 : t = TREE_TYPE (exp);
5063 2179 : if (! INTEGRAL_TYPE_P (t))
5064 : return NULL_TREE;
5065 :
5066 : /* Tree VAL must be an integer constant. */
5067 1833 : if (TREE_CODE (val) != INTEGER_CST
5068 1833 : || TREE_OVERFLOW (val))
5069 : return NULL_TREE;
5070 :
5071 1451 : width = TYPE_PRECISION (t);
5072 1451 : if (wi::only_sign_bit_p (wi::to_wide (val), width))
5073 : return exp;
5074 :
5075 : /* Handle extension from a narrower type. */
5076 814 : if (TREE_CODE (exp) == NOP_EXPR
5077 814 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5078 0 : return sign_bit_p (TREE_OPERAND (exp, 0), val);
5079 :
5080 : return NULL_TREE;
5081 : }
5082 :
5083 : /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5084 : operand is simple enough to be evaluated unconditionally. */
5085 :
5086 : static bool
5087 66788190 : simple_operand_p (const_tree exp)
5088 : {
5089 : /* Strip any conversions that don't change the machine mode. */
5090 66788190 : STRIP_NOPS (exp);
5091 :
5092 66788190 : return (CONSTANT_CLASS_P (exp)
5093 46803995 : || TREE_CODE (exp) == SSA_NAME
5094 84442881 : || (DECL_P (exp)
5095 6088537 : && ! TREE_ADDRESSABLE (exp)
5096 5997063 : && ! TREE_THIS_VOLATILE (exp)
5097 5997063 : && ! DECL_NONLOCAL (exp)
5098 : /* Don't regard global variables as simple. They may be
5099 : allocated in ways unknown to the compiler (shared memory,
5100 : #pragma weak, etc). */
5101 5995410 : && ! TREE_PUBLIC (exp)
5102 5974795 : && ! DECL_EXTERNAL (exp)
5103 : /* DECL_VALUE_EXPR will expand to something non-simple. */
5104 5974795 : && ! ((VAR_P (exp)
5105 : || TREE_CODE (exp) == PARM_DECL
5106 : || TREE_CODE (exp) == RESULT_DECL)
5107 5974795 : && DECL_HAS_VALUE_EXPR_P (exp))
5108 : /* Weakrefs are not safe to be read, since they can be NULL.
5109 : They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5110 : have DECL_WEAK flag set. */
5111 5974206 : && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5112 : /* Loading a static variable is unduly expensive, but global
5113 : registers aren't expensive. */
5114 5974206 : && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5115 : }
5116 :
5117 : /* Determine if an operand is simple enough to be evaluated unconditionally.
5118 : In addition to simple_operand_p, we assume that comparisons, conversions,
5119 : and logic-not operations are simple, if their operands are simple, too. */
5120 :
5121 : bool
5122 8319014 : simple_condition_p (tree exp)
5123 : {
5124 8408826 : enum tree_code code;
5125 :
5126 8408826 : if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5127 6011503 : return false;
5128 :
5129 2431066 : while (CONVERT_EXPR_P (exp))
5130 33743 : exp = TREE_OPERAND (exp, 0);
5131 :
5132 2397323 : code = TREE_CODE (exp);
5133 :
5134 2397323 : if (TREE_CODE_CLASS (code) == tcc_comparison)
5135 1823180 : return (simple_operand_p (TREE_OPERAND (exp, 0))
5136 1823180 : && simple_operand_p (TREE_OPERAND (exp, 1)));
5137 :
5138 574143 : if (code == TRUTH_NOT_EXPR)
5139 89812 : return simple_condition_p (TREE_OPERAND (exp, 0));
5140 :
5141 484331 : return simple_operand_p (exp);
5142 : }
5143 :
5144 :
5145 : /* The following functions are subroutines to fold_range_test and allow it to
5146 : try to change a logical combination of comparisons into a range test.
5147 :
5148 : For example, both
5149 : X == 2 || X == 3 || X == 4 || X == 5
5150 : and
5151 : X >= 2 && X <= 5
5152 : are converted to
5153 : (unsigned) (X - 2) <= 3
5154 :
5155 : We describe each set of comparisons as being either inside or outside
5156 : a range, using a variable named like IN_P, and then describe the
5157 : range with a lower and upper bound. If one of the bounds is omitted,
5158 : it represents either the highest or lowest value of the type.
5159 :
5160 : In the comments below, we represent a range by two numbers in brackets
5161 : preceded by a "+" to designate being inside that range, or a "-" to
5162 : designate being outside that range, so the condition can be inverted by
5163 : flipping the prefix. An omitted bound is represented by a "-". For
5164 : example, "- [-, 10]" means being outside the range starting at the lowest
5165 : possible value and ending at 10, in other words, being greater than 10.
5166 : The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5167 : always false.
5168 :
5169 : We set up things so that the missing bounds are handled in a consistent
5170 : manner so neither a missing bound nor "true" and "false" need to be
5171 : handled using a special case. */
5172 :
5173 : /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5174 : of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5175 : and UPPER1_P are nonzero if the respective argument is an upper bound
5176 : and zero for a lower. TYPE, if nonzero, is the type of the result; it
5177 : must be specified for a comparison. ARG1 will be converted to ARG0's
5178 : type if both are specified. */
5179 :
5180 : static tree
5181 25489071 : range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5182 : tree arg1, int upper1_p)
5183 : {
5184 25489071 : tree tem;
5185 25489071 : int result;
5186 25489071 : int sgn0, sgn1;
5187 :
5188 : /* If neither arg represents infinity, do the normal operation.
5189 : Else, if not a comparison, return infinity. Else handle the special
5190 : comparison rules. Note that most of the cases below won't occur, but
5191 : are handled for consistency. */
5192 :
5193 25489071 : if (arg0 != 0 && arg1 != 0)
5194 : {
5195 13874716 : tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5196 : arg0, fold_convert (TREE_TYPE (arg0), arg1));
5197 13874716 : STRIP_NOPS (tem);
5198 13874716 : return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5199 : }
5200 :
5201 11614355 : if (TREE_CODE_CLASS (code) != tcc_comparison)
5202 : return 0;
5203 :
5204 : /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5205 : for neither. In real maths, we cannot assume open ended ranges are
5206 : the same. But, this is computer arithmetic, where numbers are finite.
5207 : We can therefore make the transformation of any unbounded range with
5208 : the value Z, Z being greater than any representable number. This permits
5209 : us to treat unbounded ranges as equal. */
5210 11605918 : sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5211 11605918 : sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5212 11605918 : switch (code)
5213 : {
5214 5427557 : case EQ_EXPR:
5215 5427557 : result = sgn0 == sgn1;
5216 5427557 : break;
5217 0 : case NE_EXPR:
5218 0 : result = sgn0 != sgn1;
5219 0 : break;
5220 430240 : case LT_EXPR:
5221 430240 : result = sgn0 < sgn1;
5222 430240 : break;
5223 2659969 : case LE_EXPR:
5224 2659969 : result = sgn0 <= sgn1;
5225 2659969 : break;
5226 3088152 : case GT_EXPR:
5227 3088152 : result = sgn0 > sgn1;
5228 3088152 : break;
5229 0 : case GE_EXPR:
5230 0 : result = sgn0 >= sgn1;
5231 0 : break;
5232 0 : default:
5233 0 : gcc_unreachable ();
5234 : }
5235 :
5236 11605918 : return constant_boolean_node (result, type);
5237 : }
5238 :
5239 : /* Helper routine for make_range. Perform one step for it, return
5240 : new expression if the loop should continue or NULL_TREE if it should
5241 : stop. */
5242 :
5243 : tree
5244 64489545 : make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5245 : tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5246 : bool *strict_overflow_p)
5247 : {
5248 64489545 : tree arg0_type = TREE_TYPE (arg0);
5249 64489545 : tree n_low, n_high, low = *p_low, high = *p_high;
5250 64489545 : int in_p = *p_in_p, n_in_p;
5251 :
5252 64489545 : switch (code)
5253 : {
5254 1912757 : case TRUTH_NOT_EXPR:
5255 : /* We can only do something if the range is testing for zero. */
5256 1912757 : if (low == NULL_TREE || high == NULL_TREE
5257 1912757 : || ! integer_zerop (low) || ! integer_zerop (high))
5258 0 : return NULL_TREE;
5259 1912757 : *p_in_p = ! in_p;
5260 1912757 : return arg0;
5261 :
5262 49090213 : case EQ_EXPR: case NE_EXPR:
5263 49090213 : case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5264 : /* We can only do something if the range is testing for zero
5265 : and if the second operand is an integer constant. Note that
5266 : saying something is "in" the range we make is done by
5267 : complementing IN_P since it will set in the initial case of
5268 : being not equal to zero; "out" is leaving it alone. */
5269 49090213 : if (low == NULL_TREE || high == NULL_TREE
5270 49090213 : || ! integer_zerop (low) || ! integer_zerop (high)
5271 98180338 : || TREE_CODE (arg1) != INTEGER_CST)
5272 17642761 : return NULL_TREE;
5273 :
5274 31447452 : switch (code)
5275 : {
5276 : case NE_EXPR: /* - [c, c] */
5277 : low = high = arg1;
5278 : break;
5279 8877331 : case EQ_EXPR: /* + [c, c] */
5280 8877331 : in_p = ! in_p, low = high = arg1;
5281 8877331 : break;
5282 2273995 : case GT_EXPR: /* - [-, c] */
5283 2273995 : low = 0, high = arg1;
5284 2273995 : break;
5285 853711 : case GE_EXPR: /* + [c, -] */
5286 853711 : in_p = ! in_p, low = arg1, high = 0;
5287 853711 : break;
5288 5584220 : case LT_EXPR: /* - [c, -] */
5289 5584220 : low = arg1, high = 0;
5290 5584220 : break;
5291 4516127 : case LE_EXPR: /* + [-, c] */
5292 4516127 : in_p = ! in_p, low = 0, high = arg1;
5293 4516127 : break;
5294 0 : default:
5295 0 : gcc_unreachable ();
5296 : }
5297 :
5298 : /* If this is an unsigned comparison, we also know that EXP is
5299 : greater than or equal to zero. We base the range tests we make
5300 : on that fact, so we record it here so we can parse existing
5301 : range tests. We test arg0_type since often the return type
5302 : of, e.g. EQ_EXPR, is boolean. */
5303 31447452 : if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5304 : {
5305 1938757 : if (! merge_ranges (&n_in_p, &n_low, &n_high,
5306 : in_p, low, high, 1,
5307 : build_int_cst (arg0_type, 0),
5308 : NULL_TREE))
5309 : return NULL_TREE;
5310 :
5311 1938748 : in_p = n_in_p, low = n_low, high = n_high;
5312 :
5313 : /* If the high bound is missing, but we have a nonzero low
5314 : bound, reverse the range so it goes from zero to the low bound
5315 : minus 1. */
5316 1938748 : if (high == 0 && low && ! integer_zerop (low))
5317 : {
5318 850334 : in_p = ! in_p;
5319 850334 : high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5320 850334 : build_int_cst (TREE_TYPE (low), 1), 0);
5321 850334 : low = build_int_cst (arg0_type, 0);
5322 : }
5323 : }
5324 :
5325 31447443 : *p_low = low;
5326 31447443 : *p_high = high;
5327 31447443 : *p_in_p = in_p;
5328 31447443 : return arg0;
5329 :
5330 265 : case NEGATE_EXPR:
5331 : /* If flag_wrapv and ARG0_TYPE is signed, make sure
5332 : low and high are non-NULL, then normalize will DTRT. */
5333 265 : if (!TYPE_UNSIGNED (arg0_type)
5334 265 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5335 : {
5336 95 : if (low == NULL_TREE)
5337 12 : low = TYPE_MIN_VALUE (arg0_type);
5338 95 : if (high == NULL_TREE)
5339 47 : high = TYPE_MAX_VALUE (arg0_type);
5340 : }
5341 :
5342 : /* (-x) IN [a,b] -> x in [-b, -a] */
5343 265 : n_low = range_binop (MINUS_EXPR, exp_type,
5344 : build_int_cst (exp_type, 0),
5345 : 0, high, 1);
5346 265 : n_high = range_binop (MINUS_EXPR, exp_type,
5347 : build_int_cst (exp_type, 0),
5348 : 0, low, 0);
5349 265 : if (n_high != 0 && TREE_OVERFLOW (n_high))
5350 : return NULL_TREE;
5351 253 : goto normalize;
5352 :
5353 12 : case BIT_NOT_EXPR:
5354 : /* ~ X -> -X - 1 */
5355 12 : return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5356 : build_int_cst (exp_type, 1));
5357 :
5358 866083 : case PLUS_EXPR:
5359 866083 : case MINUS_EXPR:
5360 866083 : if (TREE_CODE (arg1) != INTEGER_CST)
5361 : return NULL_TREE;
5362 :
5363 : /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5364 : move a constant to the other side. */
5365 661945 : if (!TYPE_UNSIGNED (arg0_type)
5366 661945 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5367 : return NULL_TREE;
5368 :
5369 : /* If EXP is signed, any overflow in the computation is undefined,
5370 : so we don't worry about it so long as our computations on
5371 : the bounds don't overflow. For unsigned, overflow is defined
5372 : and this is exactly the right thing. */
5373 933334 : n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 : arg0_type, low, 0, arg1, 0);
5375 468164 : n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5376 : arg0_type, high, 1, arg1, 0);
5377 465240 : if ((n_low != 0 && TREE_OVERFLOW (n_low))
5378 933392 : || (n_high != 0 && TREE_OVERFLOW (n_high)))
5379 : return NULL_TREE;
5380 :
5381 468152 : if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5382 22182 : *strict_overflow_p = true;
5383 :
5384 0 : normalize:
5385 : /* Check for an unsigned range which has wrapped around the maximum
5386 : value thus making n_high < n_low, and normalize it. */
5387 468405 : if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5388 : {
5389 134392 : low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5390 134392 : build_int_cst (TREE_TYPE (n_high), 1), 0);
5391 134392 : high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5392 134392 : build_int_cst (TREE_TYPE (n_low), 1), 0);
5393 :
5394 : /* If the range is of the form +/- [ x+1, x ], we won't
5395 : be able to normalize it. But then, it represents the
5396 : whole range or the empty set, so make it
5397 : +/- [ -, - ]. */
5398 134392 : if (tree_int_cst_equal (n_low, low)
5399 134392 : && tree_int_cst_equal (n_high, high))
5400 : low = high = 0;
5401 : else
5402 134392 : in_p = ! in_p;
5403 : }
5404 : else
5405 334013 : low = n_low, high = n_high;
5406 :
5407 468405 : *p_low = low;
5408 468405 : *p_high = high;
5409 468405 : *p_in_p = in_p;
5410 468405 : return arg0;
5411 :
5412 2547685 : CASE_CONVERT:
5413 2547685 : case NON_LVALUE_EXPR:
5414 2547685 : if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5415 : return NULL_TREE;
5416 :
5417 1261150 : if (! INTEGRAL_TYPE_P (arg0_type)
5418 1228895 : || (low != 0 && ! int_fits_type_p (low, arg0_type))
5419 1127018 : || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5420 : return NULL_TREE;
5421 :
5422 1107066 : n_low = low, n_high = high;
5423 :
5424 1107066 : if (n_low != 0)
5425 942597 : n_low = fold_convert_loc (loc, arg0_type, n_low);
5426 :
5427 1107066 : if (n_high != 0)
5428 1010109 : n_high = fold_convert_loc (loc, arg0_type, n_high);
5429 :
5430 : /* If we're converting arg0 from an unsigned type, to exp,
5431 : a signed type, we will be doing the comparison as unsigned.
5432 : The tests above have already verified that LOW and HIGH
5433 : are both positive.
5434 :
5435 : So we have to ensure that we will handle large unsigned
5436 : values the same way that the current signed bounds treat
5437 : negative values. */
5438 :
5439 1107066 : if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5440 : {
5441 286501 : tree high_positive;
5442 286501 : tree equiv_type;
5443 : /* For fixed-point modes, we need to pass the saturating flag
5444 : as the 2nd parameter. */
5445 286501 : if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5446 0 : equiv_type
5447 0 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5448 0 : TYPE_SATURATING (arg0_type));
5449 286501 : else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5450 : equiv_type = arg0_type;
5451 : else
5452 286493 : equiv_type
5453 286493 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5454 :
5455 : /* A range without an upper bound is, naturally, unbounded.
5456 : Since convert would have cropped a very large value, use
5457 : the max value for the destination type. */
5458 286501 : high_positive
5459 286501 : = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5460 0 : : TYPE_MAX_VALUE (arg0_type);
5461 :
5462 286501 : if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5463 265667 : high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5464 : fold_convert_loc (loc, arg0_type,
5465 : high_positive),
5466 : build_int_cst (arg0_type, 1));
5467 :
5468 : /* If the low bound is specified, "and" the range with the
5469 : range for which the original unsigned value will be
5470 : positive. */
5471 286501 : if (low != 0)
5472 : {
5473 127452 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5474 : 1, fold_convert_loc (loc, arg0_type,
5475 : integer_zero_node),
5476 : high_positive))
5477 : return NULL_TREE;
5478 :
5479 127452 : in_p = (n_in_p == in_p);
5480 : }
5481 : else
5482 : {
5483 : /* Otherwise, "or" the range with the range of the input
5484 : that will be interpreted as negative. */
5485 159049 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5486 : 1, fold_convert_loc (loc, arg0_type,
5487 : integer_zero_node),
5488 : high_positive))
5489 : return NULL_TREE;
5490 :
5491 159049 : in_p = (in_p != n_in_p);
5492 : }
5493 : }
5494 :
5495 : /* Otherwise, if we are converting arg0 from signed type, to exp,
5496 : an unsigned type, we will do the comparison as signed. If
5497 : high is non-NULL, we punt above if it doesn't fit in the signed
5498 : type, so if we get through here, +[-, high] or +[low, high] are
5499 : equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5500 : +[-, -] or -[-, -] are equivalent too. But if low is specified and
5501 : high is not, the +[low, -] range is equivalent to union of
5502 : +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5503 : -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5504 : low being 0, which should be treated as [-, -]. */
5505 820565 : else if (TYPE_UNSIGNED (exp_type)
5506 802293 : && !TYPE_UNSIGNED (arg0_type)
5507 416430 : && low
5508 1236995 : && !high)
5509 : {
5510 12 : if (integer_zerop (low))
5511 12 : n_low = NULL_TREE;
5512 : else
5513 : {
5514 0 : n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5515 : n_low, build_int_cst (arg0_type, -1));
5516 0 : n_low = build_zero_cst (arg0_type);
5517 0 : in_p = !in_p;
5518 : }
5519 : }
5520 :
5521 1107066 : *p_low = n_low;
5522 1107066 : *p_high = n_high;
5523 1107066 : *p_in_p = in_p;
5524 1107066 : return arg0;
5525 :
5526 : default:
5527 : return NULL_TREE;
5528 : }
5529 : }
5530 :
5531 : /* Given EXP, a logical expression, set the range it is testing into
5532 : variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5533 : actually being tested. *PLOW and *PHIGH will be made of the same
5534 : type as the returned expression. If EXP is not a comparison, we
5535 : will most likely not be returning a useful value and range. Set
5536 : *STRICT_OVERFLOW_P to true if the return value is only valid
5537 : because signed overflow is undefined; otherwise, do not change
5538 : *STRICT_OVERFLOW_P. */
5539 :
5540 : tree
5541 52544188 : make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5542 : bool *strict_overflow_p)
5543 : {
5544 52544188 : enum tree_code code;
5545 52544188 : tree arg0, arg1 = NULL_TREE;
5546 52544188 : tree exp_type, nexp;
5547 52544188 : int in_p;
5548 52544188 : tree low, high;
5549 52544188 : location_t loc = EXPR_LOCATION (exp);
5550 :
5551 : /* Start with simply saying "EXP != 0" and then look at the code of EXP
5552 : and see if we can refine the range. Some of the cases below may not
5553 : happen, but it doesn't seem worth worrying about this. We "continue"
5554 : the outer loop when we've changed something; otherwise we "break"
5555 : the switch, which will "break" the while. */
5556 :
5557 52544188 : in_p = 0;
5558 52544188 : low = high = build_int_cst (TREE_TYPE (exp), 0);
5559 :
5560 84526886 : while (1)
5561 : {
5562 84526886 : code = TREE_CODE (exp);
5563 84526886 : exp_type = TREE_TYPE (exp);
5564 84526886 : arg0 = NULL_TREE;
5565 :
5566 84526886 : if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5567 : {
5568 59831441 : if (TREE_OPERAND_LENGTH (exp) > 0)
5569 59831441 : arg0 = TREE_OPERAND (exp, 0);
5570 59831441 : if (TREE_CODE_CLASS (code) == tcc_binary
5571 56058080 : || TREE_CODE_CLASS (code) == tcc_comparison
5572 70615194 : || (TREE_CODE_CLASS (code) == tcc_expression
5573 3216483 : && TREE_OPERAND_LENGTH (exp) > 1))
5574 50335743 : arg1 = TREE_OPERAND (exp, 1);
5575 : }
5576 59831441 : if (arg0 == NULL_TREE)
5577 : break;
5578 :
5579 59831427 : nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5580 : &high, &in_p, strict_overflow_p);
5581 59831427 : if (nexp == NULL_TREE)
5582 : break;
5583 : exp = nexp;
5584 : }
5585 :
5586 : /* If EXP is a constant, we can evaluate whether this is true or false. */
5587 52544188 : if (TREE_CODE (exp) == INTEGER_CST)
5588 : {
5589 46247 : in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5590 : exp, 0, low, 0))
5591 46247 : && integer_onep (range_binop (LE_EXPR, integer_type_node,
5592 : exp, 1, high, 1)));
5593 46247 : low = high = 0;
5594 46247 : exp = 0;
5595 : }
5596 :
5597 52544188 : *pin_p = in_p, *plow = low, *phigh = high;
5598 52544188 : return exp;
5599 : }
5600 :
5601 : /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5602 : a bitwise check i.e. when
5603 : LOW == 0xXX...X00...0
5604 : HIGH == 0xXX...X11...1
5605 : Return corresponding mask in MASK and stem in VALUE. */
5606 :
5607 : static bool
5608 125 : maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5609 : tree *value)
5610 : {
5611 125 : if (TREE_CODE (low) != INTEGER_CST
5612 125 : || TREE_CODE (high) != INTEGER_CST)
5613 : return false;
5614 :
5615 125 : unsigned prec = TYPE_PRECISION (type);
5616 125 : wide_int lo = wi::to_wide (low, prec);
5617 125 : wide_int hi = wi::to_wide (high, prec);
5618 :
5619 125 : wide_int end_mask = lo ^ hi;
5620 250 : if ((end_mask & (end_mask + 1)) != 0
5621 235 : || (lo & end_mask) != 0)
5622 : return false;
5623 :
5624 86 : wide_int stem_mask = ~end_mask;
5625 86 : wide_int stem = lo & stem_mask;
5626 86 : if (stem != (hi & stem_mask))
5627 : return false;
5628 :
5629 86 : *mask = wide_int_to_tree (type, stem_mask);
5630 86 : *value = wide_int_to_tree (type, stem);
5631 :
5632 86 : return true;
5633 211 : }
5634 :
5635 : /* Helper routine for build_range_check and match.pd. Return the type to
5636 : perform the check or NULL if it shouldn't be optimized. */
5637 :
5638 : tree
5639 675650 : range_check_type (tree etype)
5640 : {
5641 : /* First make sure that arithmetics in this type is valid, then make sure
5642 : that it wraps around. */
5643 675650 : if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5644 73278 : etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5645 :
5646 675650 : if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5647 : {
5648 448493 : tree utype, minv, maxv;
5649 :
5650 : /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5651 : for the type in question, as we rely on this here. */
5652 448493 : utype = unsigned_type_for (etype);
5653 448493 : maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5654 448493 : maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5655 448493 : build_int_cst (TREE_TYPE (maxv), 1), 1);
5656 448493 : minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5657 :
5658 448493 : if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5659 : minv, 1, maxv, 1)))
5660 : etype = utype;
5661 : else
5662 0 : return NULL_TREE;
5663 : }
5664 227157 : else if (POINTER_TYPE_P (etype)
5665 : || TREE_CODE (etype) == OFFSET_TYPE
5666 : /* Right now all BITINT_TYPEs satisfy
5667 : (unsigned) max + 1 == (unsigned) min, so no need to verify
5668 : that like for INTEGER_TYPEs. */
5669 : || TREE_CODE (etype) == BITINT_TYPE)
5670 1365 : etype = unsigned_type_for (etype);
5671 : return etype;
5672 : }
5673 :
5674 : /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5675 : type, TYPE, return an expression to test if EXP is in (or out of, depending
5676 : on IN_P) the range. Return 0 if the test couldn't be created. */
5677 :
5678 : tree
5679 1834965 : build_range_check (location_t loc, tree type, tree exp, int in_p,
5680 : tree low, tree high)
5681 : {
5682 3251287 : tree etype = TREE_TYPE (exp), mask, value;
5683 :
5684 : /* Disable this optimization for function pointer expressions
5685 : on targets that require function pointer canonicalization. */
5686 3251287 : if (targetm.have_canonicalize_funcptr_for_compare ()
5687 0 : && POINTER_TYPE_P (etype)
5688 3251287 : && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5689 : return NULL_TREE;
5690 :
5691 3251287 : if (! in_p)
5692 : {
5693 363371 : value = build_range_check (loc, type, exp, 1, low, high);
5694 363371 : if (value != 0)
5695 363371 : return invert_truthvalue_loc (loc, value);
5696 :
5697 : return 0;
5698 : }
5699 :
5700 2887916 : if (low == 0 && high == 0)
5701 127113 : return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5702 :
5703 2760803 : if (low == 0)
5704 914684 : return fold_build2_loc (loc, LE_EXPR, type, exp,
5705 914684 : fold_convert_loc (loc, etype, high));
5706 :
5707 1846119 : if (high == 0)
5708 73830 : return fold_build2_loc (loc, GE_EXPR, type, exp,
5709 73830 : fold_convert_loc (loc, etype, low));
5710 :
5711 1772289 : if (operand_equal_p (low, high, 0))
5712 355779 : return fold_build2_loc (loc, EQ_EXPR, type, exp,
5713 355779 : fold_convert_loc (loc, etype, low));
5714 :
5715 1416510 : if (TREE_CODE (exp) == BIT_AND_EXPR
5716 1416510 : && maskable_range_p (low, high, etype, &mask, &value))
5717 86 : return fold_build2_loc (loc, EQ_EXPR, type,
5718 : fold_build2_loc (loc, BIT_AND_EXPR, etype,
5719 : exp, mask),
5720 86 : value);
5721 :
5722 1416424 : if (integer_zerop (low))
5723 : {
5724 828426 : if (! TYPE_UNSIGNED (etype))
5725 : {
5726 225196 : etype = unsigned_type_for (etype);
5727 225196 : high = fold_convert_loc (loc, etype, high);
5728 225196 : exp = fold_convert_loc (loc, etype, exp);
5729 : }
5730 828426 : return build_range_check (loc, type, exp, 1, 0, high);
5731 : }
5732 :
5733 : /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5734 587998 : if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5735 : {
5736 141661 : int prec = TYPE_PRECISION (etype);
5737 :
5738 141661 : if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5739 : {
5740 102 : if (TYPE_UNSIGNED (etype))
5741 : {
5742 96 : tree signed_etype = signed_type_for (etype);
5743 96 : if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5744 0 : etype
5745 0 : = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5746 : else
5747 : etype = signed_etype;
5748 96 : exp = fold_convert_loc (loc, etype, exp);
5749 : }
5750 102 : return fold_build2_loc (loc, GT_EXPR, type, exp,
5751 : build_int_cst (etype, 0));
5752 : }
5753 : }
5754 :
5755 : /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5756 : This requires wrap-around arithmetics for the type of the expression. */
5757 587896 : etype = range_check_type (etype);
5758 587896 : if (etype == NULL_TREE)
5759 : return NULL_TREE;
5760 :
5761 587896 : high = fold_convert_loc (loc, etype, high);
5762 587896 : low = fold_convert_loc (loc, etype, low);
5763 587896 : exp = fold_convert_loc (loc, etype, exp);
5764 :
5765 587896 : value = const_binop (MINUS_EXPR, high, low);
5766 :
5767 587896 : if (value != 0 && !TREE_OVERFLOW (value))
5768 587896 : return build_range_check (loc, type,
5769 : fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5770 : 1, build_int_cst (etype, 0), value);
5771 :
5772 : return 0;
5773 : }
5774 :
5775 : /* Return the predecessor of VAL in its type, handling the infinite case. */
5776 :
5777 : static tree
5778 187079 : range_predecessor (tree val)
5779 : {
5780 187079 : tree type = TREE_TYPE (val);
5781 :
5782 187079 : if (INTEGRAL_TYPE_P (type)
5783 187079 : && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5784 : return 0;
5785 : else
5786 187079 : return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5787 187079 : build_int_cst (TREE_TYPE (val), 1), 0);
5788 : }
5789 :
5790 : /* Return the successor of VAL in its type, handling the infinite case. */
5791 :
5792 : static tree
5793 1858741 : range_successor (tree val)
5794 : {
5795 1858741 : tree type = TREE_TYPE (val);
5796 :
5797 1858741 : if (INTEGRAL_TYPE_P (type)
5798 1858741 : && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5799 : return 0;
5800 : else
5801 1858732 : return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5802 1858732 : build_int_cst (TREE_TYPE (val), 1), 0);
5803 : }
5804 :
5805 : /* Given two ranges, see if we can merge them into one. Return 1 if we
5806 : can, 0 if we can't. Set the output range into the specified parameters. */
5807 :
5808 : bool
5809 3992936 : merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5810 : tree high0, int in1_p, tree low1, tree high1)
5811 : {
5812 3992936 : bool no_overlap;
5813 3992936 : int subset;
5814 3992936 : int temp;
5815 3992936 : tree tem;
5816 3992936 : int in_p;
5817 3992936 : tree low, high;
5818 3992936 : int lowequal = ((low0 == 0 && low1 == 0)
5819 3992936 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5820 3992936 : low0, 0, low1, 0)));
5821 3992936 : int highequal = ((high0 == 0 && high1 == 0)
5822 3992936 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5823 3992936 : high0, 1, high1, 1)));
5824 :
5825 : /* Make range 0 be the range that starts first, or ends last if they
5826 : start at the same value. Swap them if it isn't. */
5827 3992936 : if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5828 : low0, 0, low1, 0))
5829 3992936 : || (lowequal
5830 654187 : && integer_onep (range_binop (GT_EXPR, integer_type_node,
5831 : high1, 1, high0, 1))))
5832 : {
5833 : temp = in0_p, in0_p = in1_p, in1_p = temp;
5834 : tem = low0, low0 = low1, low1 = tem;
5835 : tem = high0, high0 = high1, high1 = tem;
5836 : }
5837 :
5838 : /* If the second range is != high1 where high1 is the type maximum of
5839 : the type, try first merging with < high1 range. */
5840 3992936 : if (low1
5841 3992936 : && high1
5842 1254531 : && TREE_CODE (low1) == INTEGER_CST
5843 1254531 : && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5844 233467 : || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5845 354142 : && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5846 : GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5847 5191071 : && operand_equal_p (low1, high1, 0))
5848 : {
5849 723506 : if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5850 723506 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5851 : !in1_p, NULL_TREE, range_predecessor (low1)))
5852 : return true;
5853 : /* Similarly for the second range != low1 where low1 is the type minimum
5854 : of the type, try first merging with > low1 range. */
5855 617865 : if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5856 617865 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5857 : !in1_p, range_successor (low1), NULL_TREE))
5858 : return true;
5859 : }
5860 :
5861 : /* Now flag two cases, whether the ranges are disjoint or whether the
5862 : second range is totally subsumed in the first. Note that the tests
5863 : below are simplified by the ones above. */
5864 3794912 : no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5865 : high0, 1, low1, 0));
5866 3794912 : subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5867 : high1, 1, high0, 1));
5868 :
5869 : /* We now have four cases, depending on whether we are including or
5870 : excluding the two ranges. */
5871 3794912 : if (in0_p && in1_p)
5872 : {
5873 : /* If they don't overlap, the result is false. If the second range
5874 : is a subset it is the result. Otherwise, the range is from the start
5875 : of the second to the end of the first. */
5876 1706472 : if (no_overlap)
5877 : in_p = 0, low = high = 0;
5878 1704861 : else if (subset)
5879 : in_p = 1, low = low1, high = high1;
5880 : else
5881 1538639 : in_p = 1, low = low1, high = high0;
5882 : }
5883 :
5884 2088440 : else if (in0_p && ! in1_p)
5885 : {
5886 : /* If they don't overlap, the result is the first range. If they are
5887 : equal, the result is false. If the second range is a subset of the
5888 : first, and the ranges begin at the same place, we go from just after
5889 : the end of the second range to the end of the first. If the second
5890 : range is not a subset of the first, or if it is a subset and both
5891 : ranges end at the same place, the range starts at the start of the
5892 : first range and ends just before the second range.
5893 : Otherwise, we can't describe this as a single range. */
5894 381348 : if (no_overlap)
5895 : in_p = 1, low = low0, high = high0;
5896 377032 : else if (lowequal && highequal)
5897 : in_p = 0, low = high = 0;
5898 376213 : else if (subset && lowequal)
5899 : {
5900 283760 : low = range_successor (high1);
5901 283760 : high = high0;
5902 283760 : in_p = 1;
5903 283760 : if (low == 0)
5904 : {
5905 : /* We are in the weird situation where high0 > high1 but
5906 : high1 has no successor. Punt. */
5907 : return 0;
5908 : }
5909 : }
5910 92453 : else if (! subset || highequal)
5911 : {
5912 55849 : low = low0;
5913 55849 : high = range_predecessor (low1);
5914 55849 : in_p = 1;
5915 55849 : if (high == 0)
5916 : {
5917 : /* low0 < low1 but low1 has no predecessor. Punt. */
5918 : return 0;
5919 : }
5920 : }
5921 : else
5922 : return 0;
5923 : }
5924 :
5925 1707092 : else if (! in0_p && in1_p)
5926 : {
5927 : /* If they don't overlap, the result is the second range. If the second
5928 : is a subset of the first, the result is false. Otherwise,
5929 : the range starts just after the first range and ends at the
5930 : end of the second. */
5931 1169644 : if (no_overlap)
5932 : in_p = 1, low = low1, high = high1;
5933 1161407 : else if (subset || highequal)
5934 : in_p = 0, low = high = 0;
5935 : else
5936 : {
5937 1051278 : low = range_successor (high0);
5938 1051278 : high = high1;
5939 1051278 : in_p = 1;
5940 1051278 : if (low == 0)
5941 : {
5942 : /* high1 > high0 but high0 has no successor. Punt. */
5943 : return 0;
5944 : }
5945 : }
5946 : }
5947 :
5948 : else
5949 : {
5950 : /* The case where we are excluding both ranges. Here the complex case
5951 : is if they don't overlap. In that case, the only time we have a
5952 : range is if they are adjacent. If the second is a subset of the
5953 : first, the result is the first. Otherwise, the range to exclude
5954 : starts at the beginning of the first range and ends at the end of the
5955 : second. */
5956 537448 : if (no_overlap)
5957 : {
5958 430991 : if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5959 : range_successor (high0),
5960 : 1, low1, 0)))
5961 : in_p = 0, low = low0, high = high1;
5962 : else
5963 : {
5964 : /* Canonicalize - [min, x] into - [-, x]. */
5965 369017 : if (low0 && TREE_CODE (low0) == INTEGER_CST)
5966 367950 : switch (TREE_CODE (TREE_TYPE (low0)))
5967 : {
5968 129592 : case ENUMERAL_TYPE:
5969 129592 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5970 : GET_MODE_BITSIZE
5971 259184 : (TYPE_MODE (TREE_TYPE (low0)))))
5972 : break;
5973 : /* FALLTHROUGH */
5974 367757 : case INTEGER_TYPE:
5975 367757 : if (tree_int_cst_equal (low0,
5976 367757 : TYPE_MIN_VALUE (TREE_TYPE (low0))))
5977 6564 : low0 = 0;
5978 : break;
5979 193 : case POINTER_TYPE:
5980 193 : if (TYPE_UNSIGNED (TREE_TYPE (low0))
5981 193 : && integer_zerop (low0))
5982 : low0 = 0;
5983 : break;
5984 : default:
5985 : break;
5986 : }
5987 :
5988 : /* Canonicalize - [x, max] into - [x, -]. */
5989 369017 : if (high1 && TREE_CODE (high1) == INTEGER_CST)
5990 368819 : switch (TREE_CODE (TREE_TYPE (high1)))
5991 : {
5992 129600 : case ENUMERAL_TYPE:
5993 129600 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5994 : GET_MODE_BITSIZE
5995 259200 : (TYPE_MODE (TREE_TYPE (high1)))))
5996 : break;
5997 : /* FALLTHROUGH */
5998 368626 : case INTEGER_TYPE:
5999 368626 : if (tree_int_cst_equal (high1,
6000 368626 : TYPE_MAX_VALUE (TREE_TYPE (high1))))
6001 25592 : high1 = 0;
6002 : break;
6003 193 : case POINTER_TYPE:
6004 193 : if (TYPE_UNSIGNED (TREE_TYPE (high1))
6005 386 : && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
6006 : high1, 1,
6007 193 : build_int_cst (TREE_TYPE (high1), 1),
6008 : 1)))
6009 133 : high1 = 0;
6010 : break;
6011 : default:
6012 : break;
6013 : }
6014 :
6015 : /* The ranges might be also adjacent between the maximum and
6016 : minimum values of the given type. For
6017 : - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6018 : return + [x + 1, y - 1]. */
6019 369017 : if (low0 == 0 && high1 == 0)
6020 : {
6021 329 : low = range_successor (high0);
6022 329 : high = range_predecessor (low1);
6023 329 : if (low == 0 || high == 0)
6024 : return 0;
6025 :
6026 : in_p = 1;
6027 : }
6028 : else
6029 : return 0;
6030 : }
6031 : }
6032 106457 : else if (subset)
6033 : in_p = 0, low = low0, high = high0;
6034 : else
6035 14822 : in_p = 0, low = low0, high = high1;
6036 : }
6037 :
6038 3389611 : *pin_p = in_p, *plow = low, *phigh = high;
6039 3389611 : return 1;
6040 : }
6041 :
6042 :
6043 : /* Subroutine of fold, looking inside expressions of the form
6044 : A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6045 : are the three operands of the COND_EXPR. This function is
6046 : being used also to optimize A op B ? C : A, by reversing the
6047 : comparison first.
6048 :
6049 : Return a folded expression whose code is not a COND_EXPR
6050 : anymore, or NULL_TREE if no folding opportunity is found. */
6051 :
6052 : static tree
6053 488223 : fold_cond_expr_with_comparison (location_t loc, tree type,
6054 : enum tree_code comp_code,
6055 : tree arg00, tree arg01, tree arg1, tree arg2)
6056 : {
6057 488223 : tree arg1_type = TREE_TYPE (arg1);
6058 488223 : tree tem;
6059 :
6060 488223 : STRIP_NOPS (arg1);
6061 488223 : STRIP_NOPS (arg2);
6062 :
6063 : /* If we have A op 0 ? A : -A, consider applying the following
6064 : transformations:
6065 :
6066 : A == 0? A : -A same as -A
6067 : A != 0? A : -A same as A
6068 : A >= 0? A : -A same as abs (A)
6069 : A > 0? A : -A same as abs (A)
6070 : A <= 0? A : -A same as -abs (A)
6071 : A < 0? A : -A same as -abs (A)
6072 :
6073 : None of these transformations work for modes with signed
6074 : zeros. If A is +/-0, the first two transformations will
6075 : change the sign of the result (from +0 to -0, or vice
6076 : versa). The last four will fix the sign of the result,
6077 : even though the original expressions could be positive or
6078 : negative, depending on the sign of A.
6079 :
6080 : Note that all these transformations are correct if A is
6081 : NaN, since the two alternatives (A and -A) are also NaNs. */
6082 488223 : if (!HONOR_SIGNED_ZEROS (type)
6083 976456 : && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6084 488223 : ? real_zerop (arg01)
6085 487163 : : integer_zerop (arg01))
6086 1312158 : && ((TREE_CODE (arg2) == NEGATE_EXPR
6087 1497 : && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6088 : /* In the case that A is of the form X-Y, '-A' (arg2) may
6089 : have already been folded to Y-X, check for that. */
6090 334439 : || (TREE_CODE (arg1) == MINUS_EXPR
6091 1718 : && TREE_CODE (arg2) == MINUS_EXPR
6092 0 : && operand_equal_p (TREE_OPERAND (arg1, 0),
6093 0 : TREE_OPERAND (arg2, 1), 0)
6094 0 : && operand_equal_p (TREE_OPERAND (arg1, 1),
6095 0 : TREE_OPERAND (arg2, 0), 0))))
6096 1273 : switch (comp_code)
6097 : {
6098 0 : case EQ_EXPR:
6099 0 : case UNEQ_EXPR:
6100 0 : tem = fold_convert_loc (loc, arg1_type, arg1);
6101 0 : return fold_convert_loc (loc, type, negate_expr (tem));
6102 0 : case NE_EXPR:
6103 0 : case LTGT_EXPR:
6104 0 : return fold_convert_loc (loc, type, arg1);
6105 0 : case UNGE_EXPR:
6106 0 : case UNGT_EXPR:
6107 0 : if (flag_trapping_math)
6108 : break;
6109 : /* Fall through. */
6110 1273 : case GE_EXPR:
6111 1273 : case GT_EXPR:
6112 1273 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6113 : break;
6114 1257 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6115 1257 : return fold_convert_loc (loc, type, tem);
6116 0 : case UNLE_EXPR:
6117 0 : case UNLT_EXPR:
6118 0 : if (flag_trapping_math)
6119 : break;
6120 : /* FALLTHRU */
6121 0 : case LE_EXPR:
6122 0 : case LT_EXPR:
6123 0 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6124 : break;
6125 0 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6126 0 : && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6127 : {
6128 : /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6129 : is not, invokes UB both in abs and in the negation of it.
6130 : So, use ABSU_EXPR instead. */
6131 0 : tree utype = unsigned_type_for (TREE_TYPE (arg1));
6132 0 : tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6133 0 : tem = negate_expr (tem);
6134 0 : return fold_convert_loc (loc, type, tem);
6135 : }
6136 : else
6137 : {
6138 0 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6139 0 : return negate_expr (fold_convert_loc (loc, type, tem));
6140 : }
6141 0 : default:
6142 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6143 : break;
6144 : }
6145 :
6146 : /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6147 : A == 0 ? A : 0 is always 0 unless A is -0. Note that
6148 : both transformations are correct when A is NaN: A != 0
6149 : is then true, and A == 0 is false. */
6150 :
6151 486966 : if (!HONOR_SIGNED_ZEROS (type)
6152 486966 : && integer_zerop (arg01) && integer_zerop (arg2))
6153 : {
6154 256900 : if (comp_code == NE_EXPR)
6155 145 : return fold_convert_loc (loc, type, arg1);
6156 256755 : else if (comp_code == EQ_EXPR)
6157 0 : return build_zero_cst (type);
6158 : }
6159 :
6160 : /* Try some transformations of A op B ? A : B.
6161 :
6162 : A == B? A : B same as B
6163 : A != B? A : B same as A
6164 : A >= B? A : B same as max (A, B)
6165 : A > B? A : B same as max (B, A)
6166 : A <= B? A : B same as min (A, B)
6167 : A < B? A : B same as min (B, A)
6168 :
6169 : As above, these transformations don't work in the presence
6170 : of signed zeros. For example, if A and B are zeros of
6171 : opposite sign, the first two transformations will change
6172 : the sign of the result. In the last four, the original
6173 : expressions give different results for (A=+0, B=-0) and
6174 : (A=-0, B=+0), but the transformed expressions do not.
6175 :
6176 : The first two transformations are correct if either A or B
6177 : is a NaN. In the first transformation, the condition will
6178 : be false, and B will indeed be chosen. In the case of the
6179 : second transformation, the condition A != B will be true,
6180 : and A will be chosen.
6181 :
6182 : The conversions to max() and min() are not correct if B is
6183 : a number and A is not. The conditions in the original
6184 : expressions will be false, so all four give B. The min()
6185 : and max() versions would give a NaN instead. */
6186 486821 : if (!HONOR_SIGNED_ZEROS (type)
6187 486821 : && operand_equal_for_comparison_p (arg01, arg2)
6188 : /* Avoid these transformations if the COND_EXPR may be used
6189 : as an lvalue in the C++ front-end. PR c++/19199. */
6190 752246 : && (in_gimple_form
6191 17004 : || VECTOR_TYPE_P (type)
6192 16942 : || (! lang_GNU_CXX ()
6193 14427 : && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6194 2515 : || ! maybe_lvalue_p (arg1)
6195 2494 : || ! maybe_lvalue_p (arg2)))
6196 : {
6197 263732 : tree comp_op0 = arg00;
6198 263732 : tree comp_op1 = arg01;
6199 263732 : tree comp_type = TREE_TYPE (comp_op0);
6200 :
6201 263732 : switch (comp_code)
6202 : {
6203 0 : case EQ_EXPR:
6204 0 : return fold_convert_loc (loc, type, arg2);
6205 1 : case NE_EXPR:
6206 1 : return fold_convert_loc (loc, type, arg1);
6207 5942 : case LE_EXPR:
6208 5942 : case LT_EXPR:
6209 5942 : case UNLE_EXPR:
6210 5942 : case UNLT_EXPR:
6211 : /* In C++ a ?: expression can be an lvalue, so put the
6212 : operand which will be used if they are equal first
6213 : so that we can convert this back to the
6214 : corresponding COND_EXPR. */
6215 5942 : if (!HONOR_NANS (arg1))
6216 : {
6217 5942 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6218 5942 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6219 5942 : tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6220 5942 : ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6221 4599 : : fold_build2_loc (loc, MIN_EXPR, comp_type,
6222 : comp_op1, comp_op0);
6223 5942 : return fold_convert_loc (loc, type, tem);
6224 : }
6225 : break;
6226 257789 : case GE_EXPR:
6227 257789 : case GT_EXPR:
6228 257789 : case UNGE_EXPR:
6229 257789 : case UNGT_EXPR:
6230 257789 : if (!HONOR_NANS (arg1))
6231 : {
6232 257787 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6233 257787 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6234 257787 : tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6235 257787 : ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6236 3613 : : fold_build2_loc (loc, MAX_EXPR, comp_type,
6237 : comp_op1, comp_op0);
6238 257787 : return fold_convert_loc (loc, type, tem);
6239 : }
6240 : break;
6241 0 : case UNEQ_EXPR:
6242 0 : if (!HONOR_NANS (arg1))
6243 0 : return fold_convert_loc (loc, type, arg2);
6244 : break;
6245 0 : case LTGT_EXPR:
6246 0 : if (!HONOR_NANS (arg1))
6247 0 : return fold_convert_loc (loc, type, arg1);
6248 : break;
6249 0 : default:
6250 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6251 : break;
6252 : }
6253 : }
6254 :
6255 : return NULL_TREE;
6256 : }
6257 :
6258 :
6259 :
6260 : #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6261 : #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6262 : (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6263 : false) >= 2)
6264 : #endif
6265 :
6266 : /* EXP is some logical combination of boolean tests. See if we can
6267 : merge it into some range test. Return the new tree if so. */
6268 :
6269 : static tree
6270 26271588 : fold_range_test (location_t loc, enum tree_code code, tree type,
6271 : tree op0, tree op1)
6272 : {
6273 26271588 : int or_op = (code == TRUTH_ORIF_EXPR
6274 26271588 : || code == TRUTH_OR_EXPR);
6275 26271588 : int in0_p, in1_p, in_p;
6276 26271588 : tree low0, low1, low, high0, high1, high;
6277 26271588 : bool strict_overflow_p = false;
6278 26271588 : tree tem, lhs, rhs;
6279 26271588 : const char * const warnmsg = G_("assuming signed overflow does not occur "
6280 : "when simplifying range test");
6281 :
6282 26271588 : if (!INTEGRAL_TYPE_P (type))
6283 : return 0;
6284 :
6285 26271588 : lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6286 : /* If op0 is known true or false and this is a short-circuiting
6287 : operation we must not merge with op1 since that makes side-effects
6288 : unconditional. So special-case this. */
6289 26271588 : if (!lhs
6290 2 : && ((code == TRUTH_ORIF_EXPR && in0_p)
6291 1 : || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6292 : return op0;
6293 26271586 : rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6294 :
6295 : /* If this is an OR operation, invert both sides; we will invert
6296 : again at the end. */
6297 26271586 : if (or_op)
6298 12146646 : in0_p = ! in0_p, in1_p = ! in1_p;
6299 :
6300 : /* If both expressions are the same, if we can merge the ranges, and we
6301 : can build the range test, return it or it inverted. If one of the
6302 : ranges is always true or always false, consider it to be the same
6303 : expression as the other. */
6304 26225343 : if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6305 1509489 : && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6306 : in1_p, low1, high1)
6307 27422189 : && (tem = (build_range_check (loc, type,
6308 : lhs != 0 ? lhs
6309 0 : : rhs != 0 ? rhs : integer_zero_node,
6310 : in_p, low, high))) != 0)
6311 : {
6312 1150603 : if (strict_overflow_p)
6313 315 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6314 1150603 : return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6315 : }
6316 :
6317 : /* On machines where the branch cost is expensive, if this is a
6318 : short-circuited branch and the underlying object on both sides
6319 : is the same, make a non-short-circuit operation. */
6320 25120983 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6321 25120983 : if (param_logical_op_non_short_circuit != -1)
6322 7745 : logical_op_non_short_circuit
6323 7745 : = param_logical_op_non_short_circuit;
6324 25120983 : if (logical_op_non_short_circuit
6325 25117107 : && !sanitize_coverage_p ()
6326 25117104 : && lhs != 0 && rhs != 0
6327 25116796 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6328 31869604 : && operand_equal_p (lhs, rhs, 0))
6329 : {
6330 : /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6331 : unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6332 : which cases we can't do this. */
6333 329105 : if (simple_operand_p (lhs))
6334 364110 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6335 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6336 182568 : type, op0, op1);
6337 :
6338 146537 : else if (!lang_hooks.decls.global_bindings_p ()
6339 146537 : && !CONTAINS_PLACEHOLDER_P (lhs))
6340 : {
6341 145884 : tree common = save_expr (lhs);
6342 :
6343 254916 : if ((lhs = build_range_check (loc, type, common,
6344 109032 : or_op ? ! in0_p : in0_p,
6345 : low0, high0)) != 0
6346 254916 : && (rhs = build_range_check (loc, type, common,
6347 109032 : or_op ? ! in1_p : in1_p,
6348 : low1, high1)) != 0)
6349 : {
6350 145884 : if (strict_overflow_p)
6351 0 : fold_overflow_warning (warnmsg,
6352 : WARN_STRICT_OVERFLOW_COMPARISON);
6353 254916 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6354 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6355 145884 : type, lhs, rhs);
6356 : }
6357 : }
6358 : }
6359 :
6360 : return 0;
6361 : }
6362 :
6363 : /* For an expression that has the form
6364 : (A && B) || ~B
6365 : or
6366 : (A || B) && ~B,
6367 : we can drop one of the inner expressions and simplify to
6368 : A || ~B
6369 : or
6370 : A && ~B
6371 : LOC is the location of the resulting expression. OP is the inner
6372 : logical operation; the left-hand side in the examples above, while CMPOP
6373 : is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6374 : removing a condition that guards another, as in
6375 : (A != NULL && A->...) || A == NULL
6376 : which we must not transform. If RHS_ONLY is true, only eliminate the
6377 : right-most operand of the inner logical operation. */
6378 :
6379 : static tree
6380 139026 : merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6381 : bool rhs_only)
6382 : {
6383 139026 : enum tree_code code = TREE_CODE (cmpop);
6384 139026 : enum tree_code truthop_code = TREE_CODE (op);
6385 139026 : tree lhs = TREE_OPERAND (op, 0);
6386 139026 : tree rhs = TREE_OPERAND (op, 1);
6387 139026 : tree orig_lhs = lhs, orig_rhs = rhs;
6388 139026 : enum tree_code rhs_code = TREE_CODE (rhs);
6389 139026 : enum tree_code lhs_code = TREE_CODE (lhs);
6390 139026 : enum tree_code inv_code;
6391 :
6392 139026 : if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6393 : return NULL_TREE;
6394 :
6395 110756 : if (TREE_CODE_CLASS (code) != tcc_comparison)
6396 : return NULL_TREE;
6397 :
6398 38509 : tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6399 :
6400 38509 : if (rhs_code == truthop_code)
6401 : {
6402 33 : tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6403 33 : if (newrhs != NULL_TREE)
6404 : {
6405 0 : rhs = newrhs;
6406 0 : rhs_code = TREE_CODE (rhs);
6407 : }
6408 : }
6409 38509 : if (lhs_code == truthop_code && !rhs_only)
6410 : {
6411 464 : tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6412 464 : if (newlhs != NULL_TREE)
6413 : {
6414 0 : lhs = newlhs;
6415 0 : lhs_code = TREE_CODE (lhs);
6416 : }
6417 : }
6418 :
6419 38509 : inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6420 38509 : if (inv_code == rhs_code
6421 922 : && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6422 38545 : && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6423 : return lhs;
6424 38496 : if (!rhs_only && inv_code == lhs_code
6425 604 : && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6426 38588 : && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6427 : return rhs;
6428 38405 : if (rhs != orig_rhs || lhs != orig_lhs)
6429 0 : return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6430 0 : lhs, rhs);
6431 : return NULL_TREE;
6432 : }
6433 :
6434 : /* Find ways of folding logical expressions of LHS and RHS:
6435 : Try to merge two comparisons to the same innermost item.
6436 : Look for range tests like "ch >= '0' && ch <= '9'".
6437 : Look for combinations of simple terms on machines with expensive branches
6438 : and evaluate the RHS unconditionally.
6439 :
6440 : We check for both normal comparisons and the BIT_AND_EXPRs made this by
6441 : function and the one above.
6442 :
6443 : CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6444 : TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6445 :
6446 : TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6447 : two operands.
6448 :
6449 : We return the simplified tree or 0 if no optimization is possible. */
6450 :
6451 : static tree
6452 25703227 : fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6453 : tree lhs, tree rhs)
6454 : {
6455 : /* If this is the "or" of two comparisons, we can do something if
6456 : the comparisons are NE_EXPR. If this is the "and", we can do something
6457 : if the comparisons are EQ_EXPR. I.e.,
6458 : (a->b == 2 && a->c == 4) can become (a->new == NEW).
6459 :
6460 : WANTED_CODE is this operation code. For single bit fields, we can
6461 : convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6462 : comparison for one-bit fields. */
6463 :
6464 25703227 : enum tree_code lcode, rcode;
6465 25703227 : tree ll_arg, lr_arg, rl_arg, rr_arg;
6466 25703227 : tree result;
6467 :
6468 : /* Start by getting the comparison codes. Fail if anything is volatile.
6469 : If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6470 : it were surrounded with a NE_EXPR. */
6471 :
6472 25703227 : if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6473 : return 0;
6474 :
6475 22373972 : lcode = TREE_CODE (lhs);
6476 22373972 : rcode = TREE_CODE (rhs);
6477 :
6478 22373972 : if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6479 : {
6480 0 : lhs = build2 (NE_EXPR, truth_type, lhs,
6481 0 : build_int_cst (TREE_TYPE (lhs), 0));
6482 0 : lcode = NE_EXPR;
6483 : }
6484 :
6485 22373972 : if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6486 : {
6487 0 : rhs = build2 (NE_EXPR, truth_type, rhs,
6488 0 : build_int_cst (TREE_TYPE (rhs), 0));
6489 0 : rcode = NE_EXPR;
6490 : }
6491 :
6492 22373972 : if (TREE_CODE_CLASS (lcode) != tcc_comparison
6493 19953020 : || TREE_CODE_CLASS (rcode) != tcc_comparison)
6494 : return 0;
6495 :
6496 18855098 : ll_arg = TREE_OPERAND (lhs, 0);
6497 18855098 : lr_arg = TREE_OPERAND (lhs, 1);
6498 18855098 : rl_arg = TREE_OPERAND (rhs, 0);
6499 18855098 : rr_arg = TREE_OPERAND (rhs, 1);
6500 :
6501 : /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6502 18855098 : if (simple_operand_p (ll_arg)
6503 18855098 : && simple_operand_p (lr_arg))
6504 : {
6505 14814942 : if (operand_equal_p (ll_arg, rl_arg, 0)
6506 14814942 : && operand_equal_p (lr_arg, rr_arg, 0))
6507 : {
6508 20847 : result = combine_comparisons (loc, code, lcode, rcode,
6509 : truth_type, ll_arg, lr_arg);
6510 20847 : if (result)
6511 : return result;
6512 : }
6513 14794095 : else if (operand_equal_p (ll_arg, rr_arg, 0)
6514 14794095 : && operand_equal_p (lr_arg, rl_arg, 0))
6515 : {
6516 286 : result = combine_comparisons (loc, code, lcode,
6517 : swap_tree_comparison (rcode),
6518 : truth_type, ll_arg, lr_arg);
6519 286 : if (result)
6520 : return result;
6521 : }
6522 : }
6523 :
6524 8844924 : code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6525 18834550 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6526 :
6527 : /* If the RHS can be evaluated unconditionally and its operands are
6528 : simple, it wins to evaluate the RHS unconditionally on machines
6529 : with expensive branches. In this case, this isn't a comparison
6530 : that can be merged. */
6531 :
6532 18834550 : if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6533 : false) >= 2
6534 18834447 : && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6535 17831440 : && simple_operand_p (rl_arg)
6536 28802788 : && simple_operand_p (rr_arg))
6537 : {
6538 : /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6539 11096340 : if (code == TRUTH_OR_EXPR
6540 1486316 : && lcode == NE_EXPR && integer_zerop (lr_arg)
6541 606226 : && rcode == NE_EXPR && integer_zerop (rr_arg)
6542 23186 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6543 11115688 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6544 38080 : return build2_loc (loc, NE_EXPR, truth_type,
6545 19040 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6546 : ll_arg, rl_arg),
6547 19040 : build_int_cst (TREE_TYPE (ll_arg), 0));
6548 :
6549 : /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6550 11077300 : if (code == TRUTH_AND_EXPR
6551 1720511 : && lcode == EQ_EXPR && integer_zerop (lr_arg)
6552 816042 : && rcode == EQ_EXPR && integer_zerop (rr_arg)
6553 5469 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6554 11078827 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6555 2626 : return build2_loc (loc, EQ_EXPR, truth_type,
6556 1313 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6557 : ll_arg, rl_arg),
6558 1313 : build_int_cst (TREE_TYPE (ll_arg), 0));
6559 : }
6560 :
6561 : return 0;
6562 : }
6563 :
6564 : /* T is an integer expression that is being multiplied, divided, or taken a
6565 : modulus (CODE says which and what kind of divide or modulus) by a
6566 : constant C. See if we can eliminate that operation by folding it with
6567 : other operations already in T. WIDE_TYPE, if non-null, is a type that
6568 : should be used for the computation if wider than our type.
6569 :
6570 : For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6571 : (X * 2) + (Y * 4). We must, however, be assured that either the original
6572 : expression would not overflow or that overflow is undefined for the type
6573 : in the language in question.
6574 :
6575 : If we return a non-null expression, it is an equivalent form of the
6576 : original computation, but need not be in the original type.
6577 :
6578 : We set *STRICT_OVERFLOW_P to true if the return values depends on
6579 : signed overflow being undefined. Otherwise we do not change
6580 : *STRICT_OVERFLOW_P. */
6581 :
6582 : static tree
6583 100396720 : extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6584 : bool *strict_overflow_p)
6585 : {
6586 : /* To avoid exponential search depth, refuse to allow recursion past
6587 : three levels. Beyond that (1) it's highly unlikely that we'll find
6588 : something interesting and (2) we've probably processed it before
6589 : when we built the inner expression. */
6590 :
6591 100396720 : static int depth;
6592 100396720 : tree ret;
6593 :
6594 100396720 : if (depth > 3)
6595 : return NULL;
6596 :
6597 97029031 : depth++;
6598 97029031 : ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6599 97029031 : depth--;
6600 :
6601 97029031 : return ret;
6602 : }
6603 :
6604 : static tree
6605 97029031 : extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6606 : bool *strict_overflow_p)
6607 : {
6608 97029031 : tree type = TREE_TYPE (t);
6609 97029031 : enum tree_code tcode = TREE_CODE (t);
6610 97029031 : tree ctype = type;
6611 97029031 : if (wide_type)
6612 : {
6613 31902745 : if (TREE_CODE (type) == BITINT_TYPE
6614 31902626 : || TREE_CODE (wide_type) == BITINT_TYPE)
6615 : {
6616 119 : if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6617 8926181 : ctype = wide_type;
6618 : }
6619 31902626 : else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6620 63805252 : > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6621 8926181 : ctype = wide_type;
6622 : }
6623 97029031 : tree t1, t2;
6624 97029031 : bool same_p = tcode == code;
6625 97029031 : tree op0 = NULL_TREE, op1 = NULL_TREE;
6626 97029031 : bool sub_strict_overflow_p;
6627 :
6628 : /* Don't deal with constants of zero here; they confuse the code below. */
6629 97029031 : if (integer_zerop (c))
6630 : return NULL_TREE;
6631 :
6632 96981548 : if (TREE_CODE_CLASS (tcode) == tcc_unary)
6633 38332098 : op0 = TREE_OPERAND (t, 0);
6634 :
6635 96981548 : if (TREE_CODE_CLASS (tcode) == tcc_binary)
6636 11942380 : op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6637 :
6638 : /* Note that we need not handle conditional operations here since fold
6639 : already handles those cases. So just do arithmetic here. */
6640 96981548 : switch (tcode)
6641 : {
6642 4258902 : case INTEGER_CST:
6643 : /* For a constant, we can always simplify if we are a multiply
6644 : or (for divide and modulus) if it is a multiple of our constant. */
6645 4258902 : if (code == MULT_EXPR
6646 5497653 : || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6647 1238751 : TYPE_SIGN (type)))
6648 : {
6649 3453358 : tree tem = const_binop (code, fold_convert (ctype, t),
6650 : fold_convert (ctype, c));
6651 : /* If the multiplication overflowed, we lost information on it.
6652 : See PR68142 and PR69845. */
6653 3453358 : if (TREE_OVERFLOW (tem))
6654 : return NULL_TREE;
6655 : return tem;
6656 : }
6657 : break;
6658 :
6659 37692501 : CASE_CONVERT: case NON_LVALUE_EXPR:
6660 37692501 : if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6661 : break;
6662 : /* If op0 is an expression ... */
6663 36434058 : if ((COMPARISON_CLASS_P (op0)
6664 : || UNARY_CLASS_P (op0)
6665 36434058 : || BINARY_CLASS_P (op0)
6666 33490836 : || VL_EXP_CLASS_P (op0)
6667 33432135 : || EXPRESSION_CLASS_P (op0))
6668 : /* ... and has wrapping overflow, and its type is smaller
6669 : than ctype, then we cannot pass through as widening. */
6670 36582759 : && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6671 1076917 : && (TYPE_PRECISION (ctype)
6672 1076917 : > TYPE_PRECISION (TREE_TYPE (op0))))
6673 : /* ... or this is a truncation (t is narrower than op0),
6674 : then we cannot pass through this narrowing. */
6675 2552981 : || (TYPE_PRECISION (type)
6676 2552981 : < TYPE_PRECISION (TREE_TYPE (op0)))
6677 : /* ... or signedness changes for division or modulus,
6678 : then we cannot pass through this conversion. */
6679 2522812 : || (code != MULT_EXPR
6680 122366 : && (TYPE_UNSIGNED (ctype)
6681 122366 : != TYPE_UNSIGNED (TREE_TYPE (op0))))
6682 : /* ... or has undefined overflow while the converted to
6683 : type has not, we cannot do the operation in the inner type
6684 : as that would introduce undefined overflow. */
6685 2425691 : || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6686 1910587 : && !TYPE_OVERFLOW_UNDEFINED (type))))
6687 : break;
6688 :
6689 : /* Pass the constant down and see if we can make a simplification. If
6690 : we can, replace this expression with the inner simplification for
6691 : possible later conversion to our or some other type. */
6692 33926999 : if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6693 33926999 : && TREE_CODE (t2) == INTEGER_CST
6694 33926999 : && !TREE_OVERFLOW (t2)
6695 69114335 : && (t1 = extract_muldiv (op0, t2, code,
6696 : code == MULT_EXPR ? ctype : NULL_TREE,
6697 : strict_overflow_p)) != 0)
6698 : return t1;
6699 : break;
6700 :
6701 185 : case ABS_EXPR:
6702 : /* If widening the type changes it from signed to unsigned, then we
6703 : must avoid building ABS_EXPR itself as unsigned. */
6704 185 : if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6705 : {
6706 0 : tree cstype = (*signed_type_for) (ctype);
6707 0 : if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6708 : != 0)
6709 : {
6710 0 : t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6711 0 : return fold_convert (ctype, t1);
6712 : }
6713 : break;
6714 : }
6715 : /* If the constant is negative, we cannot simplify this. */
6716 185 : if (tree_int_cst_sgn (c) == -1)
6717 : break;
6718 : /* FALLTHROUGH */
6719 51305 : case NEGATE_EXPR:
6720 : /* For division and modulus, type can't be unsigned, as e.g.
6721 : (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6722 : For signed types, even with wrapping overflow, this is fine. */
6723 51305 : if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6724 : break;
6725 49503 : if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6726 : != 0)
6727 1 : return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6728 : break;
6729 :
6730 783 : case MIN_EXPR: case MAX_EXPR:
6731 : /* If widening the type changes the signedness, then we can't perform
6732 : this optimization as that changes the result. */
6733 783 : if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6734 : break;
6735 :
6736 : /* Punt for multiplication altogether.
6737 : MAX (1U + INT_MAX, 1U) * 2U is not equivalent to
6738 : MAX ((1U + INT_MAX) * 2U, 1U * 2U), the former is
6739 : 0U, the latter is 2U.
6740 : MAX (INT_MIN / 2, 0) * -2 is not equivalent to
6741 : MIN (INT_MIN / 2 * -2, 0 * -2), the former is
6742 : well defined 0, the latter invokes UB.
6743 : MAX (INT_MIN / 2, 5) * 5 is not equivalent to
6744 : MAX (INT_MIN / 2 * 5, 5 * 5), the former is
6745 : well defined 25, the latter invokes UB. */
6746 783 : if (code == MULT_EXPR)
6747 : break;
6748 : /* For division/modulo, punt on c being -1 for MAX, as
6749 : MAX (INT_MIN, 0) / -1 is not equivalent to
6750 : MIN (INT_MIN / -1, 0 / -1), the former is well defined
6751 : 0, the latter invokes UB (or for -fwrapv is INT_MIN).
6752 : MIN (INT_MIN, 0) / -1 already invokes UB, so the
6753 : transformation won't make it worse. */
6754 8 : else if (tcode == MAX_EXPR && integer_minus_onep (c))
6755 : break;
6756 :
6757 : /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6758 8 : sub_strict_overflow_p = false;
6759 8 : if ((t1 = extract_muldiv (op0, c, code, wide_type,
6760 : &sub_strict_overflow_p)) != 0
6761 8 : && (t2 = extract_muldiv (op1, c, code, wide_type,
6762 : &sub_strict_overflow_p)) != 0)
6763 : {
6764 0 : if (tree_int_cst_sgn (c) < 0)
6765 0 : tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6766 0 : if (sub_strict_overflow_p)
6767 0 : *strict_overflow_p = true;
6768 0 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6769 : fold_convert (ctype, t2));
6770 : }
6771 : break;
6772 :
6773 1343 : case LSHIFT_EXPR: case RSHIFT_EXPR:
6774 : /* If the second operand is constant, this is a multiplication
6775 : or floor division, by a power of two, so we can treat it that
6776 : way unless the multiplier or divisor overflows. Signed
6777 : left-shift overflow is implementation-defined rather than
6778 : undefined in C90, so do not convert signed left shift into
6779 : multiplication. */
6780 1343 : if (TREE_CODE (op1) == INTEGER_CST
6781 1327 : && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6782 : /* const_binop may not detect overflow correctly,
6783 : so check for it explicitly here. */
6784 1209 : && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6785 1352 : wi::to_wide (op1))
6786 1200 : && (t1 = fold_convert (ctype,
6787 : const_binop (LSHIFT_EXPR, size_one_node,
6788 : op1))) != 0
6789 2543 : && !TREE_OVERFLOW (t1))
6790 2198 : return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6791 : ? MULT_EXPR : FLOOR_DIV_EXPR,
6792 : ctype,
6793 : fold_convert (ctype, op0),
6794 : t1),
6795 1200 : c, code, wide_type, strict_overflow_p);
6796 : break;
6797 :
6798 8192328 : case PLUS_EXPR: case MINUS_EXPR:
6799 : /* See if we can eliminate the operation on both sides. If we can, we
6800 : can return a new PLUS or MINUS. If we can't, the only remaining
6801 : cases where we can do anything are if the second operand is a
6802 : constant. */
6803 8192328 : sub_strict_overflow_p = false;
6804 8192328 : t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6805 8192328 : t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6806 815105 : if (t1 != 0 && t2 != 0
6807 276015 : && TYPE_OVERFLOW_WRAPS (ctype)
6808 8459118 : && (code == MULT_EXPR
6809 : /* If not multiplication, we can only do this if both operands
6810 : are divisible by c. */
6811 0 : || (multiple_of_p (ctype, op0, c)
6812 0 : && multiple_of_p (ctype, op1, c))))
6813 : {
6814 266790 : if (sub_strict_overflow_p)
6815 0 : *strict_overflow_p = true;
6816 266790 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6817 : fold_convert (ctype, t2));
6818 : }
6819 :
6820 : /* If this was a subtraction, negate OP1 and set it to be an addition.
6821 : This simplifies the logic below. */
6822 7925538 : if (tcode == MINUS_EXPR)
6823 : {
6824 2183826 : tcode = PLUS_EXPR, op1 = negate_expr (op1);
6825 : /* If OP1 was not easily negatable, the constant may be OP0. */
6826 2183826 : if (TREE_CODE (op0) == INTEGER_CST)
6827 : {
6828 367697 : std::swap (op0, op1);
6829 367697 : std::swap (t1, t2);
6830 : }
6831 : }
6832 :
6833 7925538 : if (TREE_CODE (op1) != INTEGER_CST)
6834 : break;
6835 :
6836 : /* If either OP1 or C are negative, this optimization is not safe for
6837 : some of the division and remainder types while for others we need
6838 : to change the code. */
6839 3592035 : if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6840 : {
6841 174287 : if (code == CEIL_DIV_EXPR)
6842 : code = FLOOR_DIV_EXPR;
6843 174285 : else if (code == FLOOR_DIV_EXPR)
6844 : code = CEIL_DIV_EXPR;
6845 173872 : else if (code != MULT_EXPR
6846 173872 : && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6847 : break;
6848 : }
6849 :
6850 : /* If it's a multiply or a division/modulus operation of a multiple
6851 : of our constant, do the operation and verify it doesn't overflow. */
6852 3586923 : if (code == MULT_EXPR
6853 4821298 : || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6854 1234375 : TYPE_SIGN (type)))
6855 : {
6856 2781964 : op1 = const_binop (code, fold_convert (ctype, op1),
6857 : fold_convert (ctype, c));
6858 : /* We allow the constant to overflow with wrapping semantics. */
6859 2781964 : if (op1 == 0
6860 2781964 : || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6861 : break;
6862 : }
6863 : else
6864 : break;
6865 :
6866 : /* If we have an unsigned type, we cannot widen the operation since it
6867 : will change the result if the original computation overflowed. */
6868 2778624 : if (TYPE_UNSIGNED (ctype) && ctype != type)
6869 : break;
6870 :
6871 : /* The last case is if we are a multiply. In that case, we can
6872 : apply the distributive law to commute the multiply and addition
6873 : if the multiplication of the constants doesn't overflow
6874 : and overflow is defined. With undefined overflow
6875 : op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6876 : But fold_plusminus_mult_expr would factor back any power-of-two
6877 : value so do not distribute in the first place in this case. */
6878 2778624 : if (code == MULT_EXPR
6879 2349943 : && TYPE_OVERFLOW_WRAPS (ctype)
6880 4792600 : && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6881 548163 : return fold_build2 (tcode, ctype,
6882 : fold_build2 (code, ctype,
6883 : fold_convert (ctype, op0),
6884 : fold_convert (ctype, c)),
6885 : op1);
6886 :
6887 : break;
6888 :
6889 2314046 : case MULT_EXPR:
6890 : /* We have a special case here if we are doing something like
6891 : (C * 8) % 4 since we know that's zero. */
6892 2314046 : if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6893 2314046 : || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6894 : /* If the multiplication can overflow we cannot optimize this. */
6895 10982 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6896 338 : && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6897 2325028 : && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6898 293 : TYPE_SIGN (type)))
6899 : {
6900 8 : *strict_overflow_p = true;
6901 8 : return omit_one_operand (type, integer_zero_node, op0);
6902 : }
6903 :
6904 : /* ... fall through ... */
6905 :
6906 2596926 : case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6907 2596926 : case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6908 : /* If we can extract our operation from the LHS, do so and return a
6909 : new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6910 : do something only if the second operand is a constant. */
6911 2596926 : if (same_p
6912 2081483 : && TYPE_OVERFLOW_WRAPS (ctype)
6913 4501469 : && (t1 = extract_muldiv (op0, c, code, wide_type,
6914 : strict_overflow_p)) != 0)
6915 65996 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6916 : fold_convert (ctype, op1));
6917 2530930 : else if (tcode == MULT_EXPR && code == MULT_EXPR
6918 2006451 : && TYPE_OVERFLOW_WRAPS (ctype)
6919 4360489 : && (t1 = extract_muldiv (op1, c, code, wide_type,
6920 : strict_overflow_p)) != 0)
6921 938316 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6922 : fold_convert (ctype, t1));
6923 1592614 : else if (TREE_CODE (op1) != INTEGER_CST)
6924 : return 0;
6925 :
6926 : /* If these are the same operation types, we can associate them
6927 : assuming no overflow. */
6928 715583 : if (tcode == code)
6929 : {
6930 200662 : bool overflow_p = false;
6931 200662 : wi::overflow_type overflow_mul;
6932 200662 : signop sign = TYPE_SIGN (ctype);
6933 200662 : unsigned prec = TYPE_PRECISION (ctype);
6934 401324 : wide_int mul = wi::mul (wi::to_wide (op1, prec),
6935 200662 : wi::to_wide (c, prec),
6936 200662 : sign, &overflow_mul);
6937 200662 : overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6938 200662 : if (overflow_mul
6939 1291 : && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6940 : overflow_p = true;
6941 200595 : if (!overflow_p)
6942 200595 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6943 : wide_int_to_tree (ctype, mul));
6944 200662 : }
6945 :
6946 : /* If these operations "cancel" each other, we have the main
6947 : optimizations of this pass, which occur when either constant is a
6948 : multiple of the other, in which case we replace this with either an
6949 : operation or CODE or TCODE.
6950 :
6951 : If we have an unsigned type, we cannot do this since it will change
6952 : the result if the original computation overflowed. */
6953 514988 : if (TYPE_OVERFLOW_UNDEFINED (ctype)
6954 95745 : && !TYPE_OVERFLOW_SANITIZED (ctype)
6955 610690 : && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6956 95662 : || (tcode == MULT_EXPR
6957 95662 : && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6958 852 : && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6959 848 : && code != MULT_EXPR)))
6960 : {
6961 882 : if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6962 882 : TYPE_SIGN (type)))
6963 : {
6964 106 : *strict_overflow_p = true;
6965 106 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6966 : fold_convert (ctype,
6967 : const_binop (TRUNC_DIV_EXPR,
6968 : op1, c)));
6969 : }
6970 776 : else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6971 776 : TYPE_SIGN (type)))
6972 : {
6973 64 : *strict_overflow_p = true;
6974 64 : return fold_build2 (code, ctype, fold_convert (ctype, op0),
6975 : fold_convert (ctype,
6976 : const_binop (TRUNC_DIV_EXPR,
6977 : c, op1)));
6978 : }
6979 : }
6980 : break;
6981 :
6982 : default:
6983 : break;
6984 : }
6985 :
6986 : return 0;
6987 : }
6988 :
6989 : /* Return a node which has the indicated constant VALUE (either 0 or
6990 : 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6991 : and is of the indicated TYPE. */
6992 :
6993 : tree
6994 106195659 : constant_boolean_node (bool value, tree type)
6995 : {
6996 106195659 : if (type == integer_type_node)
6997 22513030 : return value ? integer_one_node : integer_zero_node;
6998 83682629 : else if (type == boolean_type_node)
6999 79294907 : return value ? boolean_true_node : boolean_false_node;
7000 4387722 : else if (VECTOR_TYPE_P (type))
7001 872 : return build_vector_from_val (type,
7002 872 : build_int_cst (TREE_TYPE (type),
7003 1361 : value ? -1 : 0));
7004 : else
7005 4386850 : return fold_convert (type, value ? integer_one_node : integer_zero_node);
7006 : }
7007 :
7008 :
7009 : /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7010 : Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7011 : CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7012 : expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7013 : COND is the first argument to CODE; otherwise (as in the example
7014 : given here), it is the second argument. TYPE is the type of the
7015 : original expression. Return NULL_TREE if no simplification is
7016 : possible. */
7017 :
7018 : static tree
7019 1063495 : fold_binary_op_with_conditional_arg (location_t loc,
7020 : enum tree_code code,
7021 : tree type, tree op0, tree op1,
7022 : tree cond, tree arg, int cond_first_p)
7023 : {
7024 1063495 : tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7025 1063495 : tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7026 1063495 : tree test, true_value, false_value;
7027 1063495 : tree lhs = NULL_TREE;
7028 1063495 : tree rhs = NULL_TREE;
7029 1063495 : enum tree_code cond_code = COND_EXPR;
7030 :
7031 : /* Do not move possibly trapping operations into the conditional as this
7032 : pessimizes code and causes gimplification issues when applied late. */
7033 1082498 : if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7034 186633 : ANY_INTEGRAL_TYPE_P (type)
7035 1065991 : && TYPE_OVERFLOW_TRAPS (type), op1))
7036 : return NULL_TREE;
7037 :
7038 1044328 : if (TREE_CODE (cond) == COND_EXPR
7039 456163 : || TREE_CODE (cond) == VEC_COND_EXPR)
7040 : {
7041 590498 : test = TREE_OPERAND (cond, 0);
7042 590498 : true_value = TREE_OPERAND (cond, 1);
7043 590498 : false_value = TREE_OPERAND (cond, 2);
7044 : /* If this operand throws an expression, then it does not make
7045 : sense to try to perform a logical or arithmetic operation
7046 : involving it. */
7047 590498 : if (VOID_TYPE_P (TREE_TYPE (true_value)))
7048 7463 : lhs = true_value;
7049 590498 : if (VOID_TYPE_P (TREE_TYPE (false_value)))
7050 6 : rhs = false_value;
7051 : }
7052 453830 : else if (!(TREE_CODE (type) != VECTOR_TYPE
7053 453676 : && VECTOR_TYPE_P (TREE_TYPE (cond))))
7054 : {
7055 451818 : tree testtype = TREE_TYPE (cond);
7056 451818 : test = cond;
7057 451818 : true_value = constant_boolean_node (true, testtype);
7058 451818 : false_value = constant_boolean_node (false, testtype);
7059 : }
7060 : else
7061 : /* Detect the case of mixing vector and scalar types - bail out. */
7062 : return NULL_TREE;
7063 :
7064 1042316 : if (VECTOR_TYPE_P (TREE_TYPE (test)))
7065 2487 : cond_code = VEC_COND_EXPR;
7066 :
7067 : /* This transformation is only worthwhile if we don't have to wrap ARG
7068 : in a SAVE_EXPR and the operation can be simplified without recursing
7069 : on at least one of the branches once its pushed inside the COND_EXPR. */
7070 1042316 : if (!TREE_CONSTANT (arg)
7071 1042316 : && (TREE_SIDE_EFFECTS (arg)
7072 569015 : || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7073 564749 : || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7074 : return NULL_TREE;
7075 :
7076 488815 : arg = fold_convert_loc (loc, arg_type, arg);
7077 488815 : if (lhs == 0)
7078 : {
7079 482784 : true_value = fold_convert_loc (loc, cond_type, true_value);
7080 482784 : if (cond_first_p)
7081 473154 : lhs = fold_build2_loc (loc, code, type, true_value, arg);
7082 : else
7083 9630 : lhs = fold_build2_loc (loc, code, type, arg, true_value);
7084 : }
7085 488815 : if (rhs == 0)
7086 : {
7087 488809 : false_value = fold_convert_loc (loc, cond_type, false_value);
7088 488809 : if (cond_first_p)
7089 478618 : rhs = fold_build2_loc (loc, code, type, false_value, arg);
7090 : else
7091 10191 : rhs = fold_build2_loc (loc, code, type, arg, false_value);
7092 : }
7093 :
7094 : /* Check that we have simplified at least one of the branches. */
7095 488815 : if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7096 : return NULL_TREE;
7097 :
7098 469097 : return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7099 : }
7100 :
7101 :
7102 : /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7103 :
7104 : If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7105 : type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7106 : if ARG - ZERO_ARG is the same as X.
7107 :
7108 : If ARG is NULL, check for any value of type TYPE.
7109 :
7110 : X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7111 : and finite. The problematic cases are when X is zero, and its mode
7112 : has signed zeros. In the case of rounding towards -infinity,
7113 : X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7114 : modes, X + 0 is not the same as X because -0 + 0 is 0. */
7115 :
7116 : bool
7117 642752 : fold_real_zero_addition_p (const_tree type, const_tree arg,
7118 : const_tree zero_arg, int negate)
7119 : {
7120 642752 : if (!real_zerop (zero_arg))
7121 : return false;
7122 :
7123 : /* Don't allow the fold with -fsignaling-nans. */
7124 642071 : if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7125 : return false;
7126 :
7127 : /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7128 638733 : if (!HONOR_SIGNED_ZEROS (type))
7129 : return true;
7130 :
7131 : /* There is no case that is safe for all rounding modes. */
7132 621824 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7133 : return false;
7134 :
7135 : /* In a vector or complex, we would need to check the sign of all zeros. */
7136 621161 : if (TREE_CODE (zero_arg) == VECTOR_CST)
7137 1524 : zero_arg = uniform_vector_p (zero_arg);
7138 621161 : if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7139 1178 : return false;
7140 :
7141 : /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7142 619983 : if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7143 254 : negate = !negate;
7144 :
7145 : /* The mode has signed zeros, and we have to honor their sign.
7146 : In this situation, there are only two cases we can return true for.
7147 : (i) X - 0 is the same as X with default rounding.
7148 : (ii) X + 0 is X when X can't possibly be -0.0. */
7149 619983 : return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7150 : }
7151 :
7152 : /* Subroutine of match.pd that optimizes comparisons of a division by
7153 : a nonzero integer constant against an integer constant, i.e.
7154 : X/C1 op C2.
7155 :
7156 : CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7157 : GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7158 :
7159 : enum tree_code
7160 1779323 : fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7161 : tree *hi, bool *neg_overflow)
7162 : {
7163 1779323 : tree prod, tmp, type = TREE_TYPE (c1);
7164 1779323 : signop sign = TYPE_SIGN (type);
7165 1779323 : wi::overflow_type overflow;
7166 :
7167 : /* We have to do this the hard way to detect unsigned overflow.
7168 : prod = int_const_binop (MULT_EXPR, c1, c2); */
7169 1779323 : wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7170 1779323 : prod = force_fit_type (type, val, -1, overflow);
7171 1779323 : *neg_overflow = false;
7172 :
7173 1779323 : if (sign == UNSIGNED)
7174 : {
7175 1749974 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7176 1749974 : *lo = prod;
7177 :
7178 : /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7179 1749974 : val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7180 1749974 : *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7181 : }
7182 29349 : else if (tree_int_cst_sgn (c1) >= 0)
7183 : {
7184 27946 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7185 27946 : switch (tree_int_cst_sgn (c2))
7186 : {
7187 4824 : case -1:
7188 4824 : *neg_overflow = true;
7189 4824 : *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7190 4824 : *hi = prod;
7191 4824 : break;
7192 :
7193 14685 : case 0:
7194 14685 : *lo = fold_negate_const (tmp, type);
7195 14685 : *hi = tmp;
7196 14685 : break;
7197 :
7198 8437 : case 1:
7199 8437 : *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7200 8437 : *lo = prod;
7201 8437 : break;
7202 :
7203 0 : default:
7204 0 : gcc_unreachable ();
7205 : }
7206 : }
7207 : else
7208 : {
7209 : /* A negative divisor reverses the relational operators. */
7210 1403 : code = swap_tree_comparison (code);
7211 :
7212 1403 : tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7213 1403 : switch (tree_int_cst_sgn (c2))
7214 : {
7215 132 : case -1:
7216 132 : *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7217 132 : *lo = prod;
7218 132 : break;
7219 :
7220 173 : case 0:
7221 173 : *hi = fold_negate_const (tmp, type);
7222 173 : *lo = tmp;
7223 173 : break;
7224 :
7225 1098 : case 1:
7226 1098 : *neg_overflow = true;
7227 1098 : *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7228 1098 : *hi = prod;
7229 1098 : break;
7230 :
7231 0 : default:
7232 0 : gcc_unreachable ();
7233 : }
7234 : }
7235 :
7236 1779323 : if (code != EQ_EXPR && code != NE_EXPR)
7237 : return code;
7238 :
7239 16550 : if (TREE_OVERFLOW (*lo)
7240 16550 : || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7241 708 : *lo = NULL_TREE;
7242 16550 : if (TREE_OVERFLOW (*hi)
7243 16550 : || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7244 95 : *hi = NULL_TREE;
7245 :
7246 : return code;
7247 1779323 : }
7248 :
7249 : /* Test whether it is preferable to swap two operands, ARG0 and
7250 : ARG1, for example because ARG0 is an integer constant and ARG1
7251 : isn't. */
7252 :
7253 : bool
7254 1598042014 : tree_swap_operands_p (const_tree arg0, const_tree arg1)
7255 : {
7256 1598042014 : if (CONSTANT_CLASS_P (arg1))
7257 : return false;
7258 514739054 : if (CONSTANT_CLASS_P (arg0))
7259 : return true;
7260 :
7261 474022203 : STRIP_NOPS (arg0);
7262 474022203 : STRIP_NOPS (arg1);
7263 :
7264 474022203 : if (TREE_CONSTANT (arg1))
7265 : return false;
7266 458813266 : if (TREE_CONSTANT (arg0))
7267 : return true;
7268 :
7269 : /* Put addresses in arg1. */
7270 458046008 : if (TREE_CODE (arg1) == ADDR_EXPR)
7271 : return false;
7272 441058955 : if (TREE_CODE (arg0) == ADDR_EXPR)
7273 : return true;
7274 :
7275 : /* It is preferable to swap two SSA_NAME to ensure a canonical form
7276 : for commutative and comparison operators. Ensuring a canonical
7277 : form allows the optimizers to find additional redundancies without
7278 : having to explicitly check for both orderings. */
7279 440694216 : if (TREE_CODE (arg0) == SSA_NAME
7280 327181719 : && TREE_CODE (arg1) == SSA_NAME
7281 762044451 : && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7282 : return true;
7283 :
7284 : /* Put SSA_NAMEs last. */
7285 418145683 : if (TREE_CODE (arg1) == SSA_NAME)
7286 : return false;
7287 104704000 : if (TREE_CODE (arg0) == SSA_NAME)
7288 : return true;
7289 :
7290 : /* Put variables last. */
7291 98872516 : if (DECL_P (arg1))
7292 : return false;
7293 53397776 : if (DECL_P (arg0))
7294 : return true;
7295 :
7296 : return false;
7297 : }
7298 :
7299 :
7300 : /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7301 : means A >= Y && A != MAX, but in this case we know that
7302 : A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7303 :
7304 : static tree
7305 24755438 : fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7306 : {
7307 24755438 : tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7308 :
7309 24755438 : if (TREE_CODE (bound) == LT_EXPR)
7310 4951959 : a = TREE_OPERAND (bound, 0);
7311 19803479 : else if (TREE_CODE (bound) == GT_EXPR)
7312 2792283 : a = TREE_OPERAND (bound, 1);
7313 : else
7314 : return NULL_TREE;
7315 :
7316 7744242 : typea = TREE_TYPE (a);
7317 7744242 : if (!INTEGRAL_TYPE_P (typea)
7318 462413 : && !POINTER_TYPE_P (typea))
7319 : return NULL_TREE;
7320 :
7321 7572511 : if (TREE_CODE (ineq) == LT_EXPR)
7322 : {
7323 1495846 : a1 = TREE_OPERAND (ineq, 1);
7324 1495846 : y = TREE_OPERAND (ineq, 0);
7325 : }
7326 6076665 : else if (TREE_CODE (ineq) == GT_EXPR)
7327 : {
7328 1144499 : a1 = TREE_OPERAND (ineq, 0);
7329 1144499 : y = TREE_OPERAND (ineq, 1);
7330 : }
7331 : else
7332 : return NULL_TREE;
7333 :
7334 2640345 : if (TREE_TYPE (a1) != typea)
7335 : return NULL_TREE;
7336 :
7337 1842811 : if (POINTER_TYPE_P (typea))
7338 : {
7339 : /* Convert the pointer types into integer before taking the difference. */
7340 10422 : tree ta = fold_convert_loc (loc, ssizetype, a);
7341 10422 : tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7342 10422 : diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7343 : }
7344 : else
7345 1832389 : diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7346 :
7347 1842811 : if (!diff || !integer_onep (diff))
7348 1832607 : return NULL_TREE;
7349 :
7350 10204 : return fold_build2_loc (loc, GE_EXPR, type, a, y);
7351 : }
7352 :
7353 : /* Fold a sum or difference of at least one multiplication.
7354 : Returns the folded tree or NULL if no simplification could be made. */
7355 :
7356 : static tree
7357 8889630 : fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7358 : tree arg0, tree arg1)
7359 : {
7360 8889630 : tree arg00, arg01, arg10, arg11;
7361 8889630 : tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7362 :
7363 : /* (A * C) +- (B * C) -> (A+-B) * C.
7364 : (A * C) +- A -> A * (C+-1).
7365 : We are most concerned about the case where C is a constant,
7366 : but other combinations show up during loop reduction. Since
7367 : it is not difficult, try all four possibilities. */
7368 :
7369 8889630 : if (TREE_CODE (arg0) == MULT_EXPR)
7370 : {
7371 7866369 : arg00 = TREE_OPERAND (arg0, 0);
7372 7866369 : arg01 = TREE_OPERAND (arg0, 1);
7373 : }
7374 1023261 : else if (TREE_CODE (arg0) == INTEGER_CST)
7375 : {
7376 74532 : arg00 = build_one_cst (type);
7377 74532 : arg01 = arg0;
7378 : }
7379 : else
7380 : {
7381 : /* We cannot generate constant 1 for fract. */
7382 948729 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7383 0 : return NULL_TREE;
7384 948729 : arg00 = arg0;
7385 948729 : arg01 = build_one_cst (type);
7386 : }
7387 8889630 : if (TREE_CODE (arg1) == MULT_EXPR)
7388 : {
7389 2416384 : arg10 = TREE_OPERAND (arg1, 0);
7390 2416384 : arg11 = TREE_OPERAND (arg1, 1);
7391 : }
7392 6473246 : else if (TREE_CODE (arg1) == INTEGER_CST)
7393 : {
7394 3464967 : arg10 = build_one_cst (type);
7395 : /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7396 : the purpose of this canonicalization. */
7397 6706010 : if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7398 227212 : && negate_expr_p (arg1)
7399 3688891 : && code == PLUS_EXPR)
7400 : {
7401 223924 : arg11 = negate_expr (arg1);
7402 223924 : code = MINUS_EXPR;
7403 : }
7404 : else
7405 : arg11 = arg1;
7406 : }
7407 : else
7408 : {
7409 : /* We cannot generate constant 1 for fract. */
7410 3008279 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7411 0 : return NULL_TREE;
7412 3008279 : arg10 = arg1;
7413 3008279 : arg11 = build_one_cst (type);
7414 : }
7415 8889630 : same = NULL_TREE;
7416 :
7417 : /* Prefer factoring a common non-constant. */
7418 8889630 : if (operand_equal_p (arg00, arg10, 0))
7419 : same = arg00, alt0 = arg01, alt1 = arg11;
7420 8885706 : else if (operand_equal_p (arg01, arg11, 0))
7421 : same = arg01, alt0 = arg00, alt1 = arg10;
7422 8768032 : else if (operand_equal_p (arg00, arg11, 0))
7423 : same = arg00, alt0 = arg01, alt1 = arg10;
7424 8767994 : else if (operand_equal_p (arg01, arg10, 0))
7425 : same = arg01, alt0 = arg00, alt1 = arg11;
7426 :
7427 : /* No identical multiplicands; see if we can find a common
7428 : power-of-two factor in non-power-of-two multiplies. This
7429 : can help in multi-dimensional array access. */
7430 8763146 : else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7431 : {
7432 7350062 : HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7433 7350062 : HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7434 7350062 : HOST_WIDE_INT tmp;
7435 7350062 : bool swap = false;
7436 7350062 : tree maybe_same;
7437 :
7438 : /* Move min of absolute values to int11. */
7439 7350062 : if (absu_hwi (int01) < absu_hwi (int11))
7440 : {
7441 : tmp = int01, int01 = int11, int11 = tmp;
7442 : alt0 = arg00, arg00 = arg10, arg10 = alt0;
7443 : maybe_same = arg01;
7444 : swap = true;
7445 : }
7446 : else
7447 3779569 : maybe_same = arg11;
7448 :
7449 7350062 : const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7450 7350062 : if (factor > 1
7451 9709258 : && pow2p_hwi (factor)
7452 2113784 : && (int01 & (factor - 1)) == 0
7453 : /* The remainder should not be a constant, otherwise we
7454 : end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7455 : increased the number of multiplications necessary. */
7456 8651233 : && TREE_CODE (arg10) != INTEGER_CST)
7457 : {
7458 1167672 : alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7459 1167672 : build_int_cst (TREE_TYPE (arg00),
7460 1167672 : int01 / int11));
7461 1167672 : alt1 = arg10;
7462 1167672 : same = maybe_same;
7463 1167672 : if (swap)
7464 1054894 : maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7465 : }
7466 : }
7467 :
7468 1294156 : if (!same)
7469 7595474 : return NULL_TREE;
7470 :
7471 7 : if (! ANY_INTEGRAL_TYPE_P (type)
7472 1294156 : || TYPE_OVERFLOW_WRAPS (type)
7473 : /* We are neither factoring zero nor minus one. */
7474 1421510 : || TREE_CODE (same) == INTEGER_CST)
7475 1282591 : return fold_build2_loc (loc, MULT_EXPR, type,
7476 : fold_build2_loc (loc, code, type,
7477 : fold_convert_loc (loc, type, alt0),
7478 : fold_convert_loc (loc, type, alt1)),
7479 1282591 : fold_convert_loc (loc, type, same));
7480 :
7481 : /* Same may be zero and thus the operation 'code' may overflow. Likewise
7482 : same may be minus one and thus the multiplication may overflow. Perform
7483 : the sum operation in an unsigned type. */
7484 11565 : tree utype = unsigned_type_for (type);
7485 11565 : tree tem = fold_build2_loc (loc, code, utype,
7486 : fold_convert_loc (loc, utype, alt0),
7487 : fold_convert_loc (loc, utype, alt1));
7488 : /* If the sum evaluated to a constant that is not -INF the multiplication
7489 : cannot overflow. */
7490 23130 : if (TREE_CODE (tem) == INTEGER_CST
7491 18269 : && (wi::to_wide (tem)
7492 18269 : != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7493 3339 : return fold_build2_loc (loc, MULT_EXPR, type,
7494 3339 : fold_convert (type, tem), same);
7495 :
7496 : /* Do not resort to unsigned multiplication because
7497 : we lose the no-overflow property of the expression. */
7498 : return NULL_TREE;
7499 : }
7500 :
7501 :
7502 : /* Subroutine of native_encode_int. Encode the integer VAL with type TYPE
7503 : into the buffer PTR of length LEN bytes.
7504 : Return the number of bytes placed in the buffer, or zero
7505 : upon failure. */
7506 :
7507 : int
7508 53484189 : native_encode_wide_int (tree type, const wide_int_ref &val,
7509 : unsigned char *ptr, int len, int off)
7510 : {
7511 53484189 : int total_bytes;
7512 53484189 : if (TREE_CODE (type) == BITINT_TYPE)
7513 : {
7514 17143 : struct bitint_info info;
7515 17143 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7516 17143 : gcc_assert (ok);
7517 17143 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7518 17143 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7519 : {
7520 17020 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7521 : /* More work is needed when adding _BitInt support to PDP endian
7522 : if limb is smaller than word, or if _BitInt limb ordering doesn't
7523 : match target endianity here. */
7524 17020 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7525 : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7526 : || (GET_MODE_SIZE (limb_mode)
7527 : >= UNITS_PER_WORD)));
7528 : }
7529 : else
7530 246 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7531 : }
7532 : else
7533 106934092 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7534 53484189 : int byte, offset, word, words;
7535 53484189 : unsigned char value;
7536 :
7537 53484189 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7538 : return 0;
7539 53483701 : if (off == -1)
7540 52587160 : off = 0;
7541 :
7542 53483701 : if (ptr == NULL)
7543 : /* Dry run. */
7544 2763617 : return MIN (len, total_bytes - off);
7545 :
7546 : words = total_bytes / UNITS_PER_WORD;
7547 :
7548 245775823 : for (byte = 0; byte < total_bytes; byte++)
7549 : {
7550 195055739 : int bitpos = byte * BITS_PER_UNIT;
7551 : /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7552 : number of bytes. */
7553 195055739 : value = wi::extract_uhwi (val, bitpos, BITS_PER_UNIT);
7554 :
7555 195055739 : if (total_bytes > UNITS_PER_WORD)
7556 : {
7557 195055739 : word = byte / UNITS_PER_WORD;
7558 195055739 : if (WORDS_BIG_ENDIAN)
7559 : word = (words - 1) - word;
7560 195055739 : offset = word * UNITS_PER_WORD;
7561 195055739 : if (BYTES_BIG_ENDIAN)
7562 : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7563 : else
7564 195055739 : offset += byte % UNITS_PER_WORD;
7565 : }
7566 : else
7567 : offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7568 195055739 : if (offset >= off && offset - off < len)
7569 193763324 : ptr[offset - off] = value;
7570 : }
7571 50720084 : return MIN (len, total_bytes - off);
7572 : }
7573 :
7574 : /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7575 : specified by EXPR into the buffer PTR of length LEN bytes.
7576 : Return the number of bytes placed in the buffer, or zero
7577 : upon failure. */
7578 :
7579 : static int
7580 53484189 : native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7581 : {
7582 53484189 : return native_encode_wide_int (TREE_TYPE (expr), wi::to_widest (expr),
7583 53484189 : ptr, len, off);
7584 : }
7585 :
7586 :
7587 : /* Subroutine of native_encode_expr. Encode the FIXED_CST
7588 : specified by EXPR into the buffer PTR of length LEN bytes.
7589 : Return the number of bytes placed in the buffer, or zero
7590 : upon failure. */
7591 :
7592 : static int
7593 0 : native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7594 : {
7595 0 : tree type = TREE_TYPE (expr);
7596 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
7597 0 : int total_bytes = GET_MODE_SIZE (mode);
7598 0 : FIXED_VALUE_TYPE value;
7599 0 : tree i_value, i_type;
7600 :
7601 0 : if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7602 : return 0;
7603 :
7604 0 : i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7605 :
7606 0 : if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7607 : return 0;
7608 :
7609 0 : value = TREE_FIXED_CST (expr);
7610 0 : i_value = double_int_to_tree (i_type, value.data);
7611 :
7612 0 : return native_encode_int (i_value, ptr, len, off);
7613 : }
7614 :
7615 :
7616 : /* Subroutine of native_encode_expr. Encode the REAL_CST
7617 : specified by EXPR into the buffer PTR of length LEN bytes.
7618 : Return the number of bytes placed in the buffer, or zero
7619 : upon failure. */
7620 :
7621 : int
7622 830012 : native_encode_real (scalar_float_mode mode, const REAL_VALUE_TYPE *val,
7623 : unsigned char *ptr, int len, int off)
7624 : {
7625 830012 : int total_bytes = GET_MODE_SIZE (mode);
7626 830012 : int byte, offset, word, words, bitpos;
7627 830012 : unsigned char value;
7628 :
7629 : /* There are always 32 bits in each long, no matter the size of
7630 : the hosts long. We handle floating point representations with
7631 : up to 192 bits. */
7632 830012 : long tmp[6];
7633 :
7634 830012 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7635 : return 0;
7636 828192 : if (off == -1)
7637 687965 : off = 0;
7638 :
7639 828192 : if (ptr == NULL)
7640 : /* Dry run. */
7641 130857 : return MIN (len, total_bytes - off);
7642 :
7643 697335 : words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7644 :
7645 697335 : real_to_target (tmp, val, mode);
7646 :
7647 6700273 : for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7648 6002938 : bitpos += BITS_PER_UNIT)
7649 : {
7650 6002938 : byte = (bitpos / BITS_PER_UNIT) & 3;
7651 6002938 : value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7652 :
7653 6002938 : if (UNITS_PER_WORD < 4)
7654 : {
7655 : word = byte / UNITS_PER_WORD;
7656 : if (WORDS_BIG_ENDIAN)
7657 : word = (words - 1) - word;
7658 : offset = word * UNITS_PER_WORD;
7659 : if (BYTES_BIG_ENDIAN)
7660 : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7661 : else
7662 : offset += byte % UNITS_PER_WORD;
7663 : }
7664 : else
7665 : {
7666 6002938 : offset = byte;
7667 6002938 : if (BYTES_BIG_ENDIAN)
7668 : {
7669 : /* Reverse bytes within each long, or within the entire float
7670 : if it's smaller than a long (for HFmode). */
7671 : offset = MIN (3, total_bytes - 1) - offset;
7672 : gcc_assert (offset >= 0);
7673 : }
7674 : }
7675 6002938 : offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7676 6002938 : if (offset >= off
7677 5999670 : && offset - off < len)
7678 5982202 : ptr[offset - off] = value;
7679 : }
7680 697335 : return MIN (len, total_bytes - off);
7681 : }
7682 :
7683 : /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7684 : specified by EXPR into the buffer PTR of length LEN bytes.
7685 : Return the number of bytes placed in the buffer, or zero
7686 : upon failure. */
7687 :
7688 : static int
7689 15404 : native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7690 : {
7691 15404 : int rsize, isize;
7692 15404 : tree part;
7693 :
7694 15404 : part = TREE_REALPART (expr);
7695 15404 : rsize = native_encode_expr (part, ptr, len, off);
7696 15404 : if (off == -1 && rsize == 0)
7697 : return 0;
7698 15404 : part = TREE_IMAGPART (expr);
7699 15404 : if (off != -1)
7700 30483 : off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7701 15404 : isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7702 : len - rsize, off);
7703 15404 : if (off == -1 && isize != rsize)
7704 : return 0;
7705 15404 : return rsize + isize;
7706 : }
7707 :
7708 : /* Like native_encode_vector, but only encode the first COUNT elements.
7709 : The other arguments are as for native_encode_vector. */
7710 :
7711 : static int
7712 879626 : native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7713 : int off, unsigned HOST_WIDE_INT count)
7714 : {
7715 879626 : tree itype = TREE_TYPE (TREE_TYPE (expr));
7716 1759252 : if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7717 880644 : && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7718 : {
7719 : /* This is the only case in which elements can be smaller than a byte.
7720 : Element 0 is always in the lsb of the containing byte. */
7721 940 : unsigned int elt_bits = TYPE_PRECISION (itype);
7722 940 : int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7723 940 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7724 : return 0;
7725 :
7726 940 : if (off == -1)
7727 940 : off = 0;
7728 :
7729 : /* Zero the buffer and then set bits later where necessary. */
7730 940 : int extract_bytes = MIN (len, total_bytes - off);
7731 940 : if (ptr)
7732 940 : memset (ptr, 0, extract_bytes);
7733 :
7734 940 : unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7735 940 : unsigned int first_elt = off * elts_per_byte;
7736 940 : unsigned int extract_elts = extract_bytes * elts_per_byte;
7737 940 : unsigned int elt_mask = (1 << elt_bits) - 1;
7738 17333 : for (unsigned int i = 0; i < extract_elts; ++i)
7739 : {
7740 16393 : tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7741 16393 : if (TREE_CODE (elt) != INTEGER_CST)
7742 : return 0;
7743 :
7744 16393 : if (ptr && integer_nonzerop (elt))
7745 : {
7746 8441 : unsigned int bit = i * elt_bits;
7747 8441 : ptr[bit / BITS_PER_UNIT] |= elt_mask << (bit % BITS_PER_UNIT);
7748 : }
7749 : }
7750 : return extract_bytes;
7751 : }
7752 :
7753 878686 : int offset = 0;
7754 878686 : int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7755 2888754 : for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7756 : {
7757 2555998 : if (off >= size)
7758 : {
7759 22147 : off -= size;
7760 22147 : continue;
7761 : }
7762 2533851 : tree elem = VECTOR_CST_ELT (expr, i);
7763 2533851 : int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7764 : len - offset, off);
7765 2533851 : if ((off == -1 && res != size) || res == 0)
7766 : return 0;
7767 2533322 : offset += res;
7768 2533322 : if (offset >= len)
7769 545401 : return (off == -1 && i < count - 1) ? 0 : offset;
7770 1987921 : if (off != -1)
7771 377734 : off = 0;
7772 : }
7773 : return offset;
7774 : }
7775 :
7776 : /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7777 : specified by EXPR into the buffer PTR of length LEN bytes.
7778 : Return the number of bytes placed in the buffer, or zero
7779 : upon failure. */
7780 :
7781 : static int
7782 758880 : native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7783 : {
7784 758880 : unsigned HOST_WIDE_INT count;
7785 758880 : if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7786 : return 0;
7787 758880 : return native_encode_vector_part (expr, ptr, len, off, count);
7788 : }
7789 :
7790 :
7791 : /* Subroutine of native_encode_expr. Encode the STRING_CST
7792 : specified by EXPR into the buffer PTR of length LEN bytes.
7793 : Return the number of bytes placed in the buffer, or zero
7794 : upon failure. */
7795 :
7796 : static int
7797 140228 : native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7798 : {
7799 140228 : tree type = TREE_TYPE (expr);
7800 :
7801 : /* Wide-char strings are encoded in target byte-order so native
7802 : encoding them is trivial. */
7803 140228 : if (BITS_PER_UNIT != CHAR_BIT
7804 140228 : || TREE_CODE (type) != ARRAY_TYPE
7805 140228 : || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7806 280456 : || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7807 : return 0;
7808 :
7809 140228 : HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7810 140228 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7811 : return 0;
7812 139356 : if (off == -1)
7813 56132 : off = 0;
7814 139356 : len = MIN (total_bytes - off, len);
7815 139356 : if (ptr == NULL)
7816 : /* Dry run. */;
7817 : else
7818 : {
7819 139356 : int written = 0;
7820 139356 : if (off < TREE_STRING_LENGTH (expr))
7821 : {
7822 138881 : written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7823 138881 : memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7824 : }
7825 139356 : memset (ptr + written, 0, len - written);
7826 : }
7827 : return len;
7828 : }
7829 :
7830 : /* Subroutine of native_encode_expr. Encode the CONSTRUCTOR
7831 : specified by EXPR into the buffer PTR of length LEN bytes.
7832 : Return the number of bytes placed in the buffer, or zero
7833 : upon failure. */
7834 :
7835 : static int
7836 45358 : native_encode_constructor (const_tree expr, unsigned char *ptr, int len, int off)
7837 : {
7838 : /* We are only concerned with zero-initialization constructors here. That's
7839 : all we expect to see in GIMPLE, so that's all native_encode_expr should
7840 : deal with. For more general handling of constructors, there is
7841 : native_encode_initializer. */
7842 45358 : if (CONSTRUCTOR_NELTS (expr))
7843 : return 0;
7844 :
7845 : /* Wide-char strings are encoded in target byte-order so native
7846 : encoding them is trivial. */
7847 84520 : if (BITS_PER_UNIT != CHAR_BIT
7848 42260 : || !tree_fits_shwi_p (TYPE_SIZE_UNIT (TREE_TYPE (expr))))
7849 : return 0;
7850 :
7851 42260 : HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7852 42260 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7853 : return 0;
7854 42260 : if (off == -1)
7855 0 : off = 0;
7856 42260 : len = MIN (total_bytes - off, len);
7857 42260 : if (ptr == NULL)
7858 : /* Dry run. */;
7859 : else
7860 42260 : memset (ptr, 0, len);
7861 : return len;
7862 : }
7863 :
7864 : /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7865 : FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7866 : the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7867 : anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7868 : sufficient to encode the entire EXPR, or if OFF is out of bounds.
7869 : Otherwise, start at byte offset OFF and encode at most LEN bytes.
7870 : Return the number of bytes placed in the buffer, or zero upon failure. */
7871 :
7872 : int
7873 71478244 : native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7874 : {
7875 : /* We don't support starting at negative offset and -1 is special. */
7876 71478244 : if (off < -1)
7877 : return 0;
7878 :
7879 71478232 : switch (TREE_CODE (expr))
7880 : {
7881 53481953 : case INTEGER_CST:
7882 53481953 : return native_encode_int (expr, ptr, len, off);
7883 :
7884 830012 : case REAL_CST:
7885 830012 : return native_encode_real (SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (expr)),
7886 1660024 : TREE_REAL_CST_PTR (expr), ptr, len, off);
7887 :
7888 0 : case FIXED_CST:
7889 0 : return native_encode_fixed (expr, ptr, len, off);
7890 :
7891 15404 : case COMPLEX_CST:
7892 15404 : return native_encode_complex (expr, ptr, len, off);
7893 :
7894 758880 : case VECTOR_CST:
7895 758880 : return native_encode_vector (expr, ptr, len, off);
7896 :
7897 140228 : case STRING_CST:
7898 140228 : return native_encode_string (expr, ptr, len, off);
7899 :
7900 45358 : case CONSTRUCTOR:
7901 45358 : return native_encode_constructor (expr, ptr, len, off);
7902 :
7903 : default:
7904 : return 0;
7905 : }
7906 : }
7907 :
7908 : /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7909 : or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7910 : of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7911 : machine modes, we can't just use build_nonstandard_integer_type. */
7912 :
7913 : tree
7914 541 : find_bitfield_repr_type (int fieldsize, int len)
7915 : {
7916 541 : machine_mode mode;
7917 1063 : for (int pass = 0; pass < 2; pass++)
7918 : {
7919 802 : enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7920 4510 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
7921 7976 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7922 7286 : && known_eq (GET_MODE_PRECISION (mode),
7923 : GET_MODE_BITSIZE (mode))
7924 11274 : && known_le (GET_MODE_SIZE (mode), len))
7925 : {
7926 280 : tree ret = lang_hooks.types.type_for_mode (mode, 1);
7927 280 : if (ret && TYPE_MODE (ret) == mode)
7928 : return ret;
7929 : }
7930 : }
7931 :
7932 522 : for (int i = 0; i < NUM_INT_N_ENTS; i ++)
7933 261 : if (int_n_enabled_p[i]
7934 261 : && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
7935 261 : && int_n_trees[i].unsigned_type)
7936 : {
7937 261 : tree ret = int_n_trees[i].unsigned_type;
7938 261 : mode = TYPE_MODE (ret);
7939 522 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7940 522 : && known_eq (GET_MODE_PRECISION (mode),
7941 : GET_MODE_BITSIZE (mode))
7942 783 : && known_le (GET_MODE_SIZE (mode), len))
7943 : return ret;
7944 : }
7945 :
7946 : return NULL_TREE;
7947 : }
7948 :
7949 : /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7950 : NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
7951 : to be non-NULL and OFF zero), then in addition to filling the
7952 : bytes pointed by PTR with the value also clear any bits pointed
7953 : by MASK that are known to be initialized, keep them as is for
7954 : e.g. uninitialized padding bits or uninitialized fields. */
7955 :
7956 : int
7957 47640762 : native_encode_initializer (tree init, unsigned char *ptr, int len,
7958 : int off, unsigned char *mask)
7959 : {
7960 47640762 : int r;
7961 :
7962 : /* We don't support starting at negative offset and -1 is special. */
7963 47640762 : if (off < -1 || init == NULL_TREE)
7964 : return 0;
7965 :
7966 47640762 : gcc_assert (mask == NULL || (off == 0 && ptr));
7967 :
7968 47640762 : STRIP_NOPS (init);
7969 47640762 : switch (TREE_CODE (init))
7970 : {
7971 0 : case VIEW_CONVERT_EXPR:
7972 0 : case NON_LVALUE_EXPR:
7973 0 : return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
7974 0 : mask);
7975 45515161 : default:
7976 45515161 : r = native_encode_expr (init, ptr, len, off);
7977 45515161 : if (mask)
7978 1384 : memset (mask, 0, r);
7979 : return r;
7980 2125601 : case CONSTRUCTOR:
7981 2125601 : tree type = TREE_TYPE (init);
7982 2125601 : HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7983 2125601 : if (total_bytes < 0)
7984 : return 0;
7985 2125601 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7986 : return 0;
7987 2125601 : int o = off == -1 ? 0 : off;
7988 2125601 : if (TREE_CODE (type) == ARRAY_TYPE)
7989 : {
7990 300912 : tree min_index;
7991 300912 : unsigned HOST_WIDE_INT cnt;
7992 300912 : HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
7993 300912 : constructor_elt *ce;
7994 :
7995 300912 : if (!TYPE_DOMAIN (type)
7996 300912 : || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
7997 : return 0;
7998 :
7999 300912 : fieldsize = int_size_in_bytes (TREE_TYPE (type));
8000 300912 : if (fieldsize <= 0)
8001 : return 0;
8002 :
8003 300912 : min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8004 300912 : if (ptr)
8005 300912 : memset (ptr, '\0', MIN (total_bytes - off, len));
8006 :
8007 44985985 : for (cnt = 0; ; cnt++)
8008 : {
8009 45286897 : tree val = NULL_TREE, index = NULL_TREE;
8010 45286897 : HOST_WIDE_INT pos = curpos, count = 0;
8011 45286897 : bool full = false;
8012 45286897 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8013 : {
8014 45179697 : val = ce->value;
8015 45179697 : index = ce->index;
8016 : }
8017 107200 : else if (mask == NULL
8018 228 : || CONSTRUCTOR_NO_CLEARING (init)
8019 107428 : || curpos >= total_bytes)
8020 : break;
8021 : else
8022 : pos = total_bytes;
8023 :
8024 45179697 : if (index && TREE_CODE (index) == RANGE_EXPR)
8025 : {
8026 16 : if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8027 16 : || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8028 0 : return 0;
8029 16 : offset_int wpos
8030 16 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8031 32 : - wi::to_offset (min_index),
8032 16 : TYPE_PRECISION (sizetype));
8033 16 : wpos *= fieldsize;
8034 16 : if (!wi::fits_shwi_p (pos))
8035 : return 0;
8036 16 : pos = wpos.to_shwi ();
8037 16 : offset_int wcount
8038 16 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8039 32 : - wi::to_offset (TREE_OPERAND (index, 0)),
8040 16 : TYPE_PRECISION (sizetype));
8041 16 : if (!wi::fits_shwi_p (wcount))
8042 : return 0;
8043 16 : count = wcount.to_shwi ();
8044 16 : }
8045 44630536 : else if (index)
8046 : {
8047 44630536 : if (TREE_CODE (index) != INTEGER_CST)
8048 0 : return 0;
8049 44630536 : offset_int wpos
8050 44630536 : = wi::sext (wi::to_offset (index)
8051 89261072 : - wi::to_offset (min_index),
8052 44630536 : TYPE_PRECISION (sizetype));
8053 44630536 : wpos *= fieldsize;
8054 44630536 : if (!wi::fits_shwi_p (wpos))
8055 : return 0;
8056 44630536 : pos = wpos.to_shwi ();
8057 : }
8058 :
8059 45180368 : if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8060 : {
8061 14 : if (valueinit == -1)
8062 : {
8063 14 : tree zero = build_zero_cst (TREE_TYPE (type));
8064 28 : r = native_encode_initializer (zero, ptr + curpos,
8065 : fieldsize, 0,
8066 14 : mask + curpos);
8067 14 : if (TREE_CODE (zero) == CONSTRUCTOR)
8068 0 : ggc_free (zero);
8069 14 : if (!r)
8070 : return 0;
8071 14 : valueinit = curpos;
8072 14 : curpos += fieldsize;
8073 : }
8074 44 : while (curpos != pos)
8075 : {
8076 30 : memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8077 30 : memcpy (mask + curpos, mask + valueinit, fieldsize);
8078 30 : curpos += fieldsize;
8079 : }
8080 : }
8081 :
8082 45179711 : curpos = pos;
8083 45179711 : if (val && TREE_CODE (val) == RAW_DATA_CST)
8084 : {
8085 486 : if (count)
8086 : return 0;
8087 486 : if (off == -1
8088 486 : || (curpos >= off
8089 0 : && (curpos + RAW_DATA_LENGTH (val)
8090 0 : <= (HOST_WIDE_INT) off + len)))
8091 : {
8092 486 : if (ptr)
8093 486 : memcpy (ptr + (curpos - o), RAW_DATA_POINTER (val),
8094 486 : RAW_DATA_LENGTH (val));
8095 486 : if (mask)
8096 0 : memset (mask + curpos, 0, RAW_DATA_LENGTH (val));
8097 : }
8098 0 : else if (curpos + RAW_DATA_LENGTH (val) > off
8099 0 : && curpos < (HOST_WIDE_INT) off + len)
8100 : {
8101 : /* Partial overlap. */
8102 0 : unsigned char *p = NULL;
8103 0 : int no = 0;
8104 0 : int l;
8105 0 : gcc_assert (mask == NULL);
8106 0 : if (curpos >= off)
8107 : {
8108 0 : if (ptr)
8109 0 : p = ptr + curpos - off;
8110 0 : l = MIN ((HOST_WIDE_INT) off + len - curpos,
8111 : RAW_DATA_LENGTH (val));
8112 : }
8113 : else
8114 : {
8115 0 : p = ptr;
8116 0 : no = off - curpos;
8117 0 : l = len;
8118 : }
8119 0 : if (p)
8120 0 : memcpy (p, RAW_DATA_POINTER (val) + no, l);
8121 : }
8122 486 : curpos += RAW_DATA_LENGTH (val);
8123 486 : val = NULL_TREE;
8124 : }
8125 486 : if (val)
8126 45257251 : do
8127 : {
8128 45257251 : if (off == -1
8129 654797 : || (curpos >= off
8130 216386 : && (curpos + fieldsize
8131 216386 : <= (HOST_WIDE_INT) off + len)))
8132 : {
8133 44787509 : if (full)
8134 : {
8135 78040 : if (ptr)
8136 78040 : memcpy (ptr + (curpos - o), ptr + (pos - o),
8137 : fieldsize);
8138 78040 : if (mask)
8139 0 : memcpy (mask + curpos, mask + pos, fieldsize);
8140 : }
8141 89604636 : else if (!native_encode_initializer (val,
8142 : ptr
8143 44709469 : ? ptr + curpos - o
8144 : : NULL,
8145 : fieldsize,
8146 : off == -1 ? -1
8147 : : 0,
8148 : mask
8149 643 : ? mask + curpos
8150 : : NULL))
8151 : return 0;
8152 : else
8153 : {
8154 : full = true;
8155 : pos = curpos;
8156 : }
8157 : }
8158 469742 : else if (curpos + fieldsize > off
8159 33804 : && curpos < (HOST_WIDE_INT) off + len)
8160 : {
8161 : /* Partial overlap. */
8162 9299 : unsigned char *p = NULL;
8163 9299 : int no = 0;
8164 9299 : int l;
8165 9299 : gcc_assert (mask == NULL);
8166 9299 : if (curpos >= off)
8167 : {
8168 6826 : if (ptr)
8169 6826 : p = ptr + curpos - off;
8170 6826 : l = MIN ((HOST_WIDE_INT) off + len - curpos,
8171 : fieldsize);
8172 : }
8173 : else
8174 : {
8175 2473 : p = ptr;
8176 2473 : no = off - curpos;
8177 2473 : l = len;
8178 : }
8179 9299 : if (!native_encode_initializer (val, p, l, no, NULL))
8180 : return 0;
8181 : }
8182 45063525 : curpos += fieldsize;
8183 : }
8184 45063525 : while (count-- != 0);
8185 44985985 : }
8186 107186 : return MIN (total_bytes - off, len);
8187 : }
8188 1824689 : else if (TREE_CODE (type) == RECORD_TYPE
8189 1824689 : || TREE_CODE (type) == UNION_TYPE)
8190 : {
8191 1824689 : unsigned HOST_WIDE_INT cnt;
8192 1824689 : constructor_elt *ce;
8193 1824689 : tree fld_base = TYPE_FIELDS (type);
8194 1824689 : tree to_free = NULL_TREE;
8195 :
8196 1824689 : gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8197 1824689 : if (ptr != NULL)
8198 1824689 : memset (ptr, '\0', MIN (total_bytes - o, len));
8199 327932 : for (cnt = 0; ; cnt++)
8200 : {
8201 2152621 : tree val = NULL_TREE, field = NULL_TREE;
8202 2152621 : HOST_WIDE_INT pos = 0, fieldsize;
8203 2152621 : unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8204 :
8205 2152621 : if (to_free)
8206 : {
8207 0 : ggc_free (to_free);
8208 0 : to_free = NULL_TREE;
8209 : }
8210 :
8211 2152621 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8212 : {
8213 356131 : val = ce->value;
8214 356131 : field = ce->index;
8215 356131 : if (field == NULL_TREE)
8216 : return 0;
8217 :
8218 356131 : pos = int_byte_position (field);
8219 356131 : if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8220 1496 : continue;
8221 : }
8222 1796490 : else if (mask == NULL
8223 1796490 : || CONSTRUCTOR_NO_CLEARING (init))
8224 : break;
8225 : else
8226 : pos = total_bytes;
8227 :
8228 356652 : if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8229 : {
8230 : tree fld;
8231 10295 : for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8232 : {
8233 9892 : if (TREE_CODE (fld) != FIELD_DECL)
8234 8829 : continue;
8235 1063 : if (fld == field)
8236 : break;
8237 146 : if (DECL_PADDING_P (fld))
8238 87 : continue;
8239 59 : if (DECL_SIZE_UNIT (fld) == NULL_TREE
8240 59 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8241 : return 0;
8242 59 : if (integer_zerop (DECL_SIZE_UNIT (fld)))
8243 2 : continue;
8244 : break;
8245 : }
8246 1377 : if (fld == NULL_TREE)
8247 : {
8248 403 : if (ce == NULL)
8249 : break;
8250 : return 0;
8251 : }
8252 974 : fld_base = DECL_CHAIN (fld);
8253 974 : if (fld != field)
8254 : {
8255 57 : cnt--;
8256 57 : field = fld;
8257 57 : pos = int_byte_position (field);
8258 57 : val = build_zero_cst (TREE_TYPE (fld));
8259 57 : if (TREE_CODE (val) == CONSTRUCTOR)
8260 0 : to_free = val;
8261 : }
8262 : }
8263 :
8264 354692 : if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8265 13792 : && TYPE_DOMAIN (TREE_TYPE (field))
8266 368484 : && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8267 : {
8268 81 : if (mask || off != -1)
8269 : return 0;
8270 81 : if (val == NULL_TREE)
8271 0 : continue;
8272 81 : if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8273 : return 0;
8274 81 : fieldsize = int_size_in_bytes (TREE_TYPE (val));
8275 81 : if (fieldsize < 0
8276 81 : || (int) fieldsize != fieldsize
8277 81 : || (pos + fieldsize) > INT_MAX)
8278 : return 0;
8279 81 : if (pos + fieldsize > total_bytes)
8280 : {
8281 81 : if (ptr != NULL && total_bytes < len)
8282 81 : memset (ptr + total_bytes, '\0',
8283 81 : MIN (pos + fieldsize, len) - total_bytes);
8284 : total_bytes = pos + fieldsize;
8285 : }
8286 : }
8287 : else
8288 : {
8289 354611 : if (DECL_SIZE_UNIT (field) == NULL_TREE
8290 354611 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8291 : return 0;
8292 354611 : fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8293 : }
8294 354692 : if (fieldsize == 0)
8295 1 : continue;
8296 :
8297 : /* Prepare to deal with integral bit-fields and filter out other
8298 : bit-fields that do not start and end on a byte boundary. */
8299 354691 : if (DECL_BIT_FIELD (field))
8300 : {
8301 2701 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8302 : return 0;
8303 2701 : bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8304 2701 : if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8305 : {
8306 2701 : bpos %= BITS_PER_UNIT;
8307 2701 : fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8308 2701 : epos = fieldsize % BITS_PER_UNIT;
8309 2701 : fieldsize += BITS_PER_UNIT - 1;
8310 2701 : fieldsize /= BITS_PER_UNIT;
8311 : }
8312 0 : else if (bpos % BITS_PER_UNIT
8313 0 : || DECL_SIZE (field) == NULL_TREE
8314 0 : || !tree_fits_shwi_p (DECL_SIZE (field))
8315 0 : || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8316 : return 0;
8317 : }
8318 :
8319 354691 : if (off != -1 && pos + fieldsize <= off)
8320 3408 : continue;
8321 :
8322 351283 : if (val == NULL_TREE)
8323 0 : continue;
8324 :
8325 351283 : if (DECL_BIT_FIELD (field)
8326 351283 : && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8327 : {
8328 : /* FIXME: Handle PDP endian. */
8329 2497 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8330 261 : return 0;
8331 :
8332 2497 : if (TREE_CODE (val) == NON_LVALUE_EXPR)
8333 6 : val = TREE_OPERAND (val, 0);
8334 2497 : if (TREE_CODE (val) != INTEGER_CST)
8335 : return 0;
8336 :
8337 2497 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8338 2497 : tree repr_type = NULL_TREE;
8339 2497 : HOST_WIDE_INT rpos = 0;
8340 2497 : if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8341 : {
8342 1968 : rpos = int_byte_position (repr);
8343 1968 : repr_type = TREE_TYPE (repr);
8344 : }
8345 : else
8346 : {
8347 529 : repr_type = find_bitfield_repr_type (fieldsize, len);
8348 529 : if (repr_type == NULL_TREE)
8349 : return 0;
8350 268 : HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8351 268 : gcc_assert (repr_size > 0 && repr_size <= len);
8352 268 : if (pos + repr_size <= o + len)
8353 : rpos = pos;
8354 : else
8355 : {
8356 14 : rpos = o + len - repr_size;
8357 14 : gcc_assert (rpos <= pos);
8358 : }
8359 : }
8360 :
8361 2236 : if (rpos > pos)
8362 : return 0;
8363 2236 : wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8364 2236 : int diff = (TYPE_PRECISION (repr_type)
8365 2236 : - TYPE_PRECISION (TREE_TYPE (field)));
8366 2236 : HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8367 2236 : if (!BYTES_BIG_ENDIAN)
8368 2236 : w = wi::lshift (w, bitoff);
8369 : else
8370 : w = wi::lshift (w, diff - bitoff);
8371 2236 : val = wide_int_to_tree (repr_type, w);
8372 :
8373 2236 : unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8374 : / BITS_PER_UNIT + 1];
8375 2236 : int l = native_encode_int (val, buf, sizeof buf, 0);
8376 2236 : if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8377 0 : return 0;
8378 :
8379 2236 : if (ptr == NULL)
8380 0 : continue;
8381 :
8382 : /* If the bitfield does not start at byte boundary, handle
8383 : the partial byte at the start. */
8384 2236 : if (bpos
8385 1344 : && (off == -1 || (pos >= off && len >= 1)))
8386 : {
8387 1269 : if (!BYTES_BIG_ENDIAN)
8388 : {
8389 1269 : int msk = (1 << bpos) - 1;
8390 1269 : buf[pos - rpos] &= ~msk;
8391 1269 : buf[pos - rpos] |= ptr[pos - o] & msk;
8392 1269 : if (mask)
8393 : {
8394 147 : if (fieldsize > 1 || epos == 0)
8395 129 : mask[pos] &= msk;
8396 : else
8397 18 : mask[pos] &= (msk | ~((1 << epos) - 1));
8398 : }
8399 : }
8400 : else
8401 : {
8402 : int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8403 : buf[pos - rpos] &= msk;
8404 : buf[pos - rpos] |= ptr[pos - o] & ~msk;
8405 : if (mask)
8406 : {
8407 : if (fieldsize > 1 || epos == 0)
8408 : mask[pos] &= ~msk;
8409 : else
8410 : mask[pos] &= (~msk
8411 : | ((1 << (BITS_PER_UNIT - epos))
8412 : - 1));
8413 : }
8414 : }
8415 : }
8416 : /* If the bitfield does not end at byte boundary, handle
8417 : the partial byte at the end. */
8418 2236 : if (epos
8419 1714 : && (off == -1
8420 1004 : || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8421 : {
8422 1611 : if (!BYTES_BIG_ENDIAN)
8423 : {
8424 1611 : int msk = (1 << epos) - 1;
8425 1611 : buf[pos - rpos + fieldsize - 1] &= msk;
8426 1611 : buf[pos - rpos + fieldsize - 1]
8427 1611 : |= ptr[pos + fieldsize - 1 - o] & ~msk;
8428 1611 : if (mask && (fieldsize > 1 || bpos == 0))
8429 156 : mask[pos + fieldsize - 1] &= ~msk;
8430 : }
8431 : else
8432 : {
8433 : int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8434 : buf[pos - rpos + fieldsize - 1] &= ~msk;
8435 : buf[pos - rpos + fieldsize - 1]
8436 : |= ptr[pos + fieldsize - 1 - o] & msk;
8437 : if (mask && (fieldsize > 1 || bpos == 0))
8438 : mask[pos + fieldsize - 1] &= msk;
8439 : }
8440 : }
8441 2236 : if (off == -1
8442 1301 : || (pos >= off
8443 1212 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8444 : {
8445 2045 : memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8446 2045 : if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8447 75 : memset (mask + pos + (bpos != 0), 0,
8448 75 : fieldsize - (bpos != 0) - (epos != 0));
8449 : }
8450 : else
8451 : {
8452 : /* Partial overlap. */
8453 191 : HOST_WIDE_INT fsz = fieldsize;
8454 191 : gcc_assert (mask == NULL);
8455 191 : if (pos < off)
8456 : {
8457 89 : fsz -= (off - pos);
8458 89 : pos = off;
8459 : }
8460 191 : if (pos + fsz > (HOST_WIDE_INT) off + len)
8461 104 : fsz = (HOST_WIDE_INT) off + len - pos;
8462 191 : memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8463 : }
8464 2236 : continue;
8465 2236 : }
8466 :
8467 348786 : if (off == -1
8468 19890 : || (pos >= off
8469 19060 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8470 : {
8471 339253 : int fldsize = fieldsize;
8472 10357 : if (off == -1)
8473 : {
8474 328896 : tree fld = DECL_CHAIN (field);
8475 4775077 : while (fld)
8476 : {
8477 4460891 : if (TREE_CODE (fld) == FIELD_DECL)
8478 : break;
8479 4446181 : fld = DECL_CHAIN (fld);
8480 : }
8481 328896 : if (fld == NULL_TREE)
8482 314186 : fldsize = len - pos;
8483 : }
8484 350446 : r = native_encode_initializer (val, ptr ? ptr + pos - o
8485 : : NULL,
8486 : fldsize,
8487 : off == -1 ? -1 : 0,
8488 836 : mask ? mask + pos : NULL);
8489 339253 : if (!r)
8490 : return 0;
8491 319770 : if (off == -1
8492 317817 : && fldsize != fieldsize
8493 311 : && r > fieldsize
8494 48 : && pos + r > total_bytes)
8495 327932 : total_bytes = pos + r;
8496 : }
8497 : else
8498 : {
8499 : /* Partial overlap. */
8500 9533 : unsigned char *p = NULL;
8501 9533 : int no = 0;
8502 9533 : int l;
8503 9533 : gcc_assert (mask == NULL);
8504 9533 : if (pos >= off)
8505 : {
8506 8703 : if (ptr)
8507 8703 : p = ptr + pos - off;
8508 8703 : l = MIN ((HOST_WIDE_INT) off + len - pos,
8509 : fieldsize);
8510 : }
8511 : else
8512 : {
8513 830 : p = ptr;
8514 830 : no = off - pos;
8515 830 : l = len;
8516 : }
8517 9533 : if (!native_encode_initializer (val, p, l, no, NULL))
8518 : return 0;
8519 : }
8520 327932 : }
8521 1796433 : return MIN (total_bytes - off, len);
8522 : }
8523 : return 0;
8524 : }
8525 : }
8526 :
8527 :
8528 : /* Subroutine of native_interpret_expr. Interpret the contents of
8529 : the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8530 : If the buffer cannot be interpreted, return NULL_TREE. */
8531 :
8532 : static tree
8533 3225874 : native_interpret_int (tree type, const unsigned char *ptr, int len)
8534 : {
8535 3225874 : int total_bytes;
8536 3225874 : if (TREE_CODE (type) == BITINT_TYPE)
8537 : {
8538 30 : struct bitint_info info;
8539 30 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8540 30 : gcc_assert (ok);
8541 30 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8542 30 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8543 : {
8544 30 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8545 : /* More work is needed when adding _BitInt support to PDP endian
8546 : if limb is smaller than word, or if _BitInt limb ordering doesn't
8547 : match target endianity here. */
8548 30 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8549 : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8550 : || (GET_MODE_SIZE (limb_mode)
8551 : >= UNITS_PER_WORD)));
8552 : }
8553 : else
8554 0 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8555 : }
8556 : else
8557 6451688 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8558 :
8559 3225874 : if (total_bytes > len)
8560 : return NULL_TREE;
8561 :
8562 3225634 : wide_int result = wi::from_buffer (ptr, total_bytes);
8563 :
8564 3225634 : return wide_int_to_tree (type, result);
8565 3225634 : }
8566 :
8567 :
8568 : /* Subroutine of native_interpret_expr. Interpret the contents of
8569 : the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8570 : If the buffer cannot be interpreted, return NULL_TREE. */
8571 :
8572 : static tree
8573 0 : native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8574 : {
8575 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
8576 0 : int total_bytes = GET_MODE_SIZE (mode);
8577 0 : double_int result;
8578 0 : FIXED_VALUE_TYPE fixed_value;
8579 :
8580 0 : if (total_bytes > len
8581 0 : || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8582 : return NULL_TREE;
8583 :
8584 0 : result = double_int::from_buffer (ptr, total_bytes);
8585 0 : fixed_value = fixed_from_double_int (result, mode);
8586 :
8587 0 : return build_fixed (type, fixed_value);
8588 : }
8589 :
8590 :
8591 : /* Subroutine of native_interpret_expr. Interpret the contents of
8592 : the buffer PTR of length LEN as a REAL_CST of type TYPE.
8593 : If the buffer cannot be interpreted, return NULL_TREE. */
8594 :
8595 : tree
8596 33816 : native_interpret_real (tree type, const unsigned char *ptr, int len)
8597 : {
8598 33816 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8599 33816 : int total_bytes = GET_MODE_SIZE (mode);
8600 33816 : unsigned char value;
8601 : /* There are always 32 bits in each long, no matter the size of
8602 : the hosts long. We handle floating point representations with
8603 : up to 192 bits. */
8604 33816 : REAL_VALUE_TYPE r;
8605 33816 : long tmp[6];
8606 :
8607 33816 : if (total_bytes > len || total_bytes > 24)
8608 : return NULL_TREE;
8609 33755 : int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8610 :
8611 33755 : memset (tmp, 0, sizeof (tmp));
8612 244179 : for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8613 210424 : bitpos += BITS_PER_UNIT)
8614 : {
8615 : /* Both OFFSET and BYTE index within a long;
8616 : bitpos indexes the whole float. */
8617 210424 : int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8618 210424 : if (UNITS_PER_WORD < 4)
8619 : {
8620 : int word = byte / UNITS_PER_WORD;
8621 : if (WORDS_BIG_ENDIAN)
8622 : word = (words - 1) - word;
8623 : offset = word * UNITS_PER_WORD;
8624 : if (BYTES_BIG_ENDIAN)
8625 : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8626 : else
8627 : offset += byte % UNITS_PER_WORD;
8628 : }
8629 : else
8630 : {
8631 210424 : offset = byte;
8632 210424 : if (BYTES_BIG_ENDIAN)
8633 : {
8634 : /* Reverse bytes within each long, or within the entire float
8635 : if it's smaller than a long (for HFmode). */
8636 : offset = MIN (3, total_bytes - 1) - offset;
8637 : gcc_assert (offset >= 0);
8638 : }
8639 : }
8640 210424 : value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8641 :
8642 210424 : tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8643 : }
8644 :
8645 33755 : real_from_target (&r, tmp, mode);
8646 33755 : return build_real (type, r);
8647 : }
8648 :
8649 :
8650 : /* Subroutine of native_interpret_expr. Interpret the contents of
8651 : the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8652 : If the buffer cannot be interpreted, return NULL_TREE. */
8653 :
8654 : static tree
8655 1583 : native_interpret_complex (tree type, const unsigned char *ptr, int len)
8656 : {
8657 1583 : tree etype, rpart, ipart;
8658 1583 : int size;
8659 :
8660 1583 : etype = TREE_TYPE (type);
8661 1583 : size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8662 1583 : if (size * 2 > len)
8663 : return NULL_TREE;
8664 1550 : rpart = native_interpret_expr (etype, ptr, size);
8665 1550 : if (!rpart)
8666 : return NULL_TREE;
8667 1369 : ipart = native_interpret_expr (etype, ptr+size, size);
8668 1369 : if (!ipart)
8669 : return NULL_TREE;
8670 1369 : return build_complex (type, rpart, ipart);
8671 : }
8672 :
8673 : /* Read a vector of type TYPE from the target memory image given by BYTES,
8674 : which contains LEN bytes. The vector is known to be encodable using
8675 : NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8676 :
8677 : Return the vector on success, otherwise return null. */
8678 :
8679 : static tree
8680 196931 : native_interpret_vector_part (tree type, const unsigned char *bytes,
8681 : unsigned int len, unsigned int npatterns,
8682 : unsigned int nelts_per_pattern)
8683 : {
8684 196931 : tree elt_type = TREE_TYPE (type);
8685 196931 : if (VECTOR_BOOLEAN_TYPE_P (type)
8686 196934 : && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8687 : {
8688 : /* This is the only case in which elements can be smaller than a byte.
8689 : Element 0 is always in the lsb of the containing byte. */
8690 1 : unsigned int elt_bits = TYPE_PRECISION (elt_type);
8691 1 : if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8692 : return NULL_TREE;
8693 :
8694 1 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8695 17 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8696 : {
8697 16 : unsigned int bit_index = i * elt_bits;
8698 16 : unsigned int byte_index = bit_index / BITS_PER_UNIT;
8699 16 : unsigned int lsb = bit_index % BITS_PER_UNIT;
8700 32 : builder.quick_push (bytes[byte_index] & (1 << lsb)
8701 17 : ? build_all_ones_cst (elt_type)
8702 1 : : build_zero_cst (elt_type));
8703 : }
8704 1 : return builder.build ();
8705 1 : }
8706 :
8707 196930 : unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8708 196930 : if (elt_bytes * npatterns * nelts_per_pattern > len)
8709 : return NULL_TREE;
8710 :
8711 196930 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8712 803745 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8713 : {
8714 606853 : tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8715 606853 : if (!elt)
8716 38 : return NULL_TREE;
8717 606815 : builder.quick_push (elt);
8718 606815 : bytes += elt_bytes;
8719 : }
8720 196892 : return builder.build ();
8721 196930 : }
8722 :
8723 : /* Subroutine of native_interpret_expr. Interpret the contents of
8724 : the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8725 : If the buffer cannot be interpreted, return NULL_TREE. */
8726 :
8727 : static tree
8728 76187 : native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8729 : {
8730 76187 : unsigned HOST_WIDE_INT size;
8731 :
8732 76187 : if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8733 76187 : || size > len)
8734 2 : return NULL_TREE;
8735 :
8736 76185 : unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8737 76185 : return native_interpret_vector_part (type, ptr, len, count, 1);
8738 : }
8739 :
8740 :
8741 : /* Subroutine of fold_view_convert_expr. Interpret the contents of
8742 : the buffer PTR of length LEN as a constant of type TYPE. For
8743 : INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8744 : we return a REAL_CST, etc... If the buffer cannot be interpreted,
8745 : return NULL_TREE. */
8746 :
8747 : tree
8748 3386489 : native_interpret_expr (tree type, const unsigned char *ptr, int len)
8749 : {
8750 3386489 : switch (TREE_CODE (type))
8751 : {
8752 3225874 : case INTEGER_TYPE:
8753 3225874 : case ENUMERAL_TYPE:
8754 3225874 : case BOOLEAN_TYPE:
8755 3225874 : case POINTER_TYPE:
8756 3225874 : case REFERENCE_TYPE:
8757 3225874 : case OFFSET_TYPE:
8758 3225874 : case BITINT_TYPE:
8759 3225874 : return native_interpret_int (type, ptr, len);
8760 :
8761 32421 : case REAL_TYPE:
8762 32421 : if (tree ret = native_interpret_real (type, ptr, len))
8763 : {
8764 : /* For floating point values in composite modes, punt if this
8765 : folding doesn't preserve bit representation. As the mode doesn't
8766 : have fixed precision while GCC pretends it does, there could be
8767 : valid values that GCC can't really represent accurately.
8768 : See PR95450. Even for other modes, e.g. x86 XFmode can have some
8769 : bit combinationations which GCC doesn't preserve. */
8770 32360 : unsigned char buf[24 * 2];
8771 32360 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8772 32360 : int total_bytes = GET_MODE_SIZE (mode);
8773 32360 : memcpy (buf + 24, ptr, total_bytes);
8774 32360 : clear_type_padding_in_mask (type, buf + 24);
8775 32360 : if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8776 32360 : || memcmp (buf + 24, buf, total_bytes) != 0)
8777 336 : return NULL_TREE;
8778 : return ret;
8779 : }
8780 : return NULL_TREE;
8781 :
8782 0 : case FIXED_POINT_TYPE:
8783 0 : return native_interpret_fixed (type, ptr, len);
8784 :
8785 1583 : case COMPLEX_TYPE:
8786 1583 : return native_interpret_complex (type, ptr, len);
8787 :
8788 76187 : case VECTOR_TYPE:
8789 76187 : return native_interpret_vector (type, ptr, len);
8790 :
8791 : default:
8792 : return NULL_TREE;
8793 : }
8794 : }
8795 :
8796 : /* Returns true if we can interpret the contents of a native encoding
8797 : as TYPE. */
8798 :
8799 : bool
8800 377785 : can_native_interpret_type_p (tree type)
8801 : {
8802 377785 : switch (TREE_CODE (type))
8803 : {
8804 : case INTEGER_TYPE:
8805 : case ENUMERAL_TYPE:
8806 : case BOOLEAN_TYPE:
8807 : case POINTER_TYPE:
8808 : case REFERENCE_TYPE:
8809 : case FIXED_POINT_TYPE:
8810 : case REAL_TYPE:
8811 : case COMPLEX_TYPE:
8812 : case VECTOR_TYPE:
8813 : case OFFSET_TYPE:
8814 : return true;
8815 79990 : default:
8816 79990 : return false;
8817 : }
8818 : }
8819 :
8820 : /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8821 : byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8822 :
8823 : tree
8824 569 : native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8825 : int len)
8826 : {
8827 569 : vec<constructor_elt, va_gc> *elts = NULL;
8828 569 : if (TREE_CODE (type) == ARRAY_TYPE)
8829 : {
8830 197 : HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8831 394 : if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8832 : return NULL_TREE;
8833 :
8834 197 : HOST_WIDE_INT cnt = 0;
8835 197 : if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8836 : {
8837 197 : if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8838 : return NULL_TREE;
8839 197 : cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8840 : }
8841 197 : if (eltsz == 0)
8842 0 : cnt = 0;
8843 197 : HOST_WIDE_INT pos = 0;
8844 636 : for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8845 : {
8846 439 : tree v = NULL_TREE;
8847 439 : if (pos >= len || pos + eltsz > len)
8848 569 : return NULL_TREE;
8849 439 : if (can_native_interpret_type_p (TREE_TYPE (type)))
8850 : {
8851 367 : v = native_interpret_expr (TREE_TYPE (type),
8852 367 : ptr + off + pos, eltsz);
8853 367 : if (v == NULL_TREE)
8854 : return NULL_TREE;
8855 : }
8856 72 : else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8857 72 : || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8858 72 : v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8859 : eltsz);
8860 72 : if (v == NULL_TREE)
8861 0 : return NULL_TREE;
8862 439 : CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8863 : }
8864 197 : return build_constructor (type, elts);
8865 : }
8866 372 : if (TREE_CODE (type) != RECORD_TYPE)
8867 : return NULL_TREE;
8868 6068 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8869 : {
8870 1064 : if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
8871 6760 : || is_empty_type (TREE_TYPE (field)))
8872 4722 : continue;
8873 974 : tree fld = field;
8874 974 : HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8875 974 : int diff = 0;
8876 974 : tree v = NULL_TREE;
8877 974 : if (DECL_BIT_FIELD (field))
8878 : {
8879 180 : fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8880 180 : if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8881 : {
8882 168 : poly_int64 bitoffset;
8883 168 : poly_uint64 field_offset, fld_offset;
8884 168 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8885 336 : && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8886 168 : bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8887 : else
8888 : bitoffset = 0;
8889 168 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8890 168 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8891 168 : diff = (TYPE_PRECISION (TREE_TYPE (fld))
8892 168 : - TYPE_PRECISION (TREE_TYPE (field)));
8893 168 : if (!bitoffset.is_constant (&bitoff)
8894 168 : || bitoff < 0
8895 168 : || bitoff > diff)
8896 0 : return NULL_TREE;
8897 : }
8898 : else
8899 : {
8900 12 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8901 : return NULL_TREE;
8902 12 : int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8903 12 : int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8904 12 : bpos %= BITS_PER_UNIT;
8905 12 : fieldsize += bpos;
8906 12 : fieldsize += BITS_PER_UNIT - 1;
8907 12 : fieldsize /= BITS_PER_UNIT;
8908 12 : tree repr_type = find_bitfield_repr_type (fieldsize, len);
8909 12 : if (repr_type == NULL_TREE)
8910 : return NULL_TREE;
8911 12 : sz = int_size_in_bytes (repr_type);
8912 12 : if (sz < 0 || sz > len)
8913 : return NULL_TREE;
8914 12 : pos = int_byte_position (field);
8915 12 : if (pos < 0 || pos > len || pos + fieldsize > len)
8916 : return NULL_TREE;
8917 12 : HOST_WIDE_INT rpos;
8918 12 : if (pos + sz <= len)
8919 : rpos = pos;
8920 : else
8921 : {
8922 0 : rpos = len - sz;
8923 0 : gcc_assert (rpos <= pos);
8924 : }
8925 12 : bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8926 12 : pos = rpos;
8927 12 : diff = (TYPE_PRECISION (repr_type)
8928 12 : - TYPE_PRECISION (TREE_TYPE (field)));
8929 12 : v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8930 12 : if (v == NULL_TREE)
8931 : return NULL_TREE;
8932 : fld = NULL_TREE;
8933 : }
8934 : }
8935 :
8936 168 : if (fld)
8937 : {
8938 962 : sz = int_size_in_bytes (TREE_TYPE (fld));
8939 962 : if (sz < 0 || sz > len)
8940 : return NULL_TREE;
8941 962 : tree byte_pos = byte_position (fld);
8942 962 : if (!tree_fits_shwi_p (byte_pos))
8943 : return NULL_TREE;
8944 962 : pos = tree_to_shwi (byte_pos);
8945 962 : if (pos < 0 || pos > len || pos + sz > len)
8946 : return NULL_TREE;
8947 : }
8948 962 : if (fld == NULL_TREE)
8949 : /* Already handled above. */;
8950 962 : else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8951 : {
8952 770 : v = native_interpret_expr (TREE_TYPE (fld),
8953 770 : ptr + off + pos, sz);
8954 770 : if (v == NULL_TREE)
8955 : return NULL_TREE;
8956 : }
8957 192 : else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8958 192 : || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8959 192 : v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8960 204 : if (v == NULL_TREE)
8961 : return NULL_TREE;
8962 974 : if (fld != field)
8963 : {
8964 180 : if (TREE_CODE (v) != INTEGER_CST)
8965 : return NULL_TREE;
8966 :
8967 : /* FIXME: Figure out how to handle PDP endian bitfields. */
8968 180 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8969 : return NULL_TREE;
8970 180 : if (!BYTES_BIG_ENDIAN)
8971 180 : v = wide_int_to_tree (TREE_TYPE (field),
8972 360 : wi::lrshift (wi::to_wide (v), bitoff));
8973 : else
8974 : v = wide_int_to_tree (TREE_TYPE (field),
8975 : wi::lrshift (wi::to_wide (v),
8976 : diff - bitoff));
8977 : }
8978 974 : CONSTRUCTOR_APPEND_ELT (elts, field, v);
8979 : }
8980 372 : return build_constructor (type, elts);
8981 : }
8982 :
8983 : /* Routines for manipulation of native_encode_expr encoded data if the encoded
8984 : or extracted constant positions and/or sizes aren't byte aligned. */
8985 :
8986 : /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8987 : bits between adjacent elements. AMNT should be within
8988 : [0, BITS_PER_UNIT).
8989 : Example, AMNT = 2:
8990 : 00011111|11100000 << 2 = 01111111|10000000
8991 : PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8992 :
8993 : void
8994 30546 : shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8995 : unsigned int amnt)
8996 : {
8997 30546 : if (amnt == 0)
8998 : return;
8999 :
9000 17829 : unsigned char carry_over = 0U;
9001 17829 : unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9002 17829 : unsigned char clear_mask = (~0U) << amnt;
9003 :
9004 99307 : for (unsigned int i = 0; i < sz; i++)
9005 : {
9006 81478 : unsigned prev_carry_over = carry_over;
9007 81478 : carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9008 :
9009 81478 : ptr[i] <<= amnt;
9010 81478 : if (i != 0)
9011 : {
9012 63649 : ptr[i] &= clear_mask;
9013 63649 : ptr[i] |= prev_carry_over;
9014 : }
9015 : }
9016 : }
9017 :
9018 : /* Like shift_bytes_in_array_left but for big-endian.
9019 : Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9020 : bits between adjacent elements. AMNT should be within
9021 : [0, BITS_PER_UNIT).
9022 : Example, AMNT = 2:
9023 : 00011111|11100000 >> 2 = 00000111|11111000
9024 : PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9025 :
9026 : void
9027 8 : shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9028 : unsigned int amnt)
9029 : {
9030 8 : if (amnt == 0)
9031 : return;
9032 :
9033 4 : unsigned char carry_over = 0U;
9034 4 : unsigned char carry_mask = ~(~0U << amnt);
9035 :
9036 12 : for (unsigned int i = 0; i < sz; i++)
9037 : {
9038 8 : unsigned prev_carry_over = carry_over;
9039 8 : carry_over = ptr[i] & carry_mask;
9040 :
9041 8 : carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9042 8 : ptr[i] >>= amnt;
9043 8 : ptr[i] |= prev_carry_over;
9044 : }
9045 : }
9046 :
9047 : /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9048 : directly on the VECTOR_CST encoding, in a way that works for variable-
9049 : length vectors. Return the resulting VECTOR_CST on success or null
9050 : on failure. */
9051 :
9052 : static tree
9053 129785 : fold_view_convert_vector_encoding (tree type, tree expr)
9054 : {
9055 129785 : tree expr_type = TREE_TYPE (expr);
9056 129785 : poly_uint64 type_bits, expr_bits;
9057 129785 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9058 129785 : || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9059 0 : return NULL_TREE;
9060 :
9061 129785 : poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9062 129785 : poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9063 129785 : unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9064 129785 : unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9065 :
9066 : /* We can only preserve the semantics of a stepped pattern if the new
9067 : vector element is an integer of the same size. */
9068 129785 : if (VECTOR_CST_STEPPED_P (expr)
9069 129785 : && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9070 : return NULL_TREE;
9071 :
9072 : /* The number of bits needed to encode one element from every pattern
9073 : of the original vector. */
9074 120746 : unsigned int expr_sequence_bits
9075 120746 : = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9076 :
9077 : /* The number of bits needed to encode one element from every pattern
9078 : of the result. */
9079 120746 : unsigned int type_sequence_bits
9080 120746 : = least_common_multiple (expr_sequence_bits, type_elt_bits);
9081 :
9082 : /* Don't try to read more bytes than are available, which can happen
9083 : for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9084 : The general VIEW_CONVERT handling can cope with that case, so there's
9085 : no point complicating things here. */
9086 120746 : unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9087 120746 : unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9088 : BITS_PER_UNIT);
9089 120746 : unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9090 120746 : if (known_gt (buffer_bits, expr_bits))
9091 : return NULL_TREE;
9092 :
9093 : /* Get enough bytes of EXPR to form the new encoding. */
9094 120746 : auto_vec<unsigned char, 128> buffer (buffer_bytes);
9095 120746 : buffer.quick_grow (buffer_bytes);
9096 120746 : if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9097 120746 : buffer_bits / expr_elt_bits)
9098 : != (int) buffer_bytes)
9099 : return NULL_TREE;
9100 :
9101 : /* Reencode the bytes as TYPE. */
9102 120746 : unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9103 241492 : return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9104 120746 : type_npatterns, nelts_per_pattern);
9105 120746 : }
9106 :
9107 : /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9108 : TYPE at compile-time. If we're unable to perform the conversion
9109 : return NULL_TREE. */
9110 :
9111 : static tree
9112 14888911 : fold_view_convert_expr (tree type, tree expr)
9113 : {
9114 14888911 : unsigned char buffer[128];
9115 14888911 : unsigned char *buf;
9116 14888911 : int len;
9117 14888911 : HOST_WIDE_INT l;
9118 :
9119 : /* Check that the host and target are sane. */
9120 14888911 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9121 : return NULL_TREE;
9122 :
9123 14888911 : if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9124 129785 : if (tree res = fold_view_convert_vector_encoding (type, expr))
9125 : return res;
9126 :
9127 14768184 : l = int_size_in_bytes (type);
9128 14768184 : if (l > (int) sizeof (buffer)
9129 14768184 : && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9130 : {
9131 0 : buf = XALLOCAVEC (unsigned char, l);
9132 0 : len = l;
9133 : }
9134 : else
9135 : {
9136 : buf = buffer;
9137 : len = sizeof (buffer);
9138 : }
9139 14768184 : len = native_encode_expr (expr, buf, len);
9140 14768184 : if (len == 0)
9141 : return NULL_TREE;
9142 :
9143 1720138 : return native_interpret_expr (type, buf, len);
9144 : }
9145 :
9146 : /* Build an expression for the address of T. Folds away INDIRECT_REF
9147 : to avoid confusing the gimplify process. */
9148 :
9149 : tree
9150 704345355 : build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9151 : {
9152 : /* The size of the object is not relevant when talking about its address. */
9153 704345355 : if (TREE_CODE (t) == WITH_SIZE_EXPR)
9154 0 : t = TREE_OPERAND (t, 0);
9155 :
9156 704345355 : if (INDIRECT_REF_P (t))
9157 : {
9158 75372715 : t = TREE_OPERAND (t, 0);
9159 :
9160 75372715 : if (TREE_TYPE (t) != ptrtype)
9161 49227801 : t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9162 : }
9163 628972640 : else if (TREE_CODE (t) == MEM_REF
9164 628972640 : && integer_zerop (TREE_OPERAND (t, 1)))
9165 : {
9166 1638869 : t = TREE_OPERAND (t, 0);
9167 :
9168 1638869 : if (TREE_TYPE (t) != ptrtype)
9169 1077941 : t = fold_convert_loc (loc, ptrtype, t);
9170 : }
9171 627333771 : else if (TREE_CODE (t) == MEM_REF
9172 627333771 : && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9173 661 : return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9174 : TREE_OPERAND (t, 0),
9175 : convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9176 627333110 : else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9177 : {
9178 53675220 : t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9179 :
9180 53675220 : if (TREE_TYPE (t) != ptrtype)
9181 14079 : t = fold_convert_loc (loc, ptrtype, t);
9182 : }
9183 : else
9184 573657890 : t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9185 :
9186 : return t;
9187 : }
9188 :
9189 : /* Build an expression for the address of T. */
9190 :
9191 : tree
9192 616678285 : build_fold_addr_expr_loc (location_t loc, tree t)
9193 : {
9194 616678285 : tree ptrtype = build_pointer_type (TREE_TYPE (t));
9195 :
9196 616678285 : return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9197 : }
9198 :
9199 : /* Fold a unary expression of code CODE and type TYPE with operand
9200 : OP0. Return the folded expression if folding is successful.
9201 : Otherwise, return NULL_TREE. */
9202 :
9203 : tree
9204 2147438819 : fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9205 : {
9206 2147438819 : tree tem;
9207 2147438819 : tree arg0;
9208 2147438819 : enum tree_code_class kind = TREE_CODE_CLASS (code);
9209 :
9210 2147438819 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
9211 : && TREE_CODE_LENGTH (code) == 1);
9212 :
9213 2147438819 : arg0 = op0;
9214 2147438819 : if (arg0)
9215 : {
9216 2147425609 : if (CONVERT_EXPR_CODE_P (code)
9217 : || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9218 : {
9219 : /* Don't use STRIP_NOPS, because signedness of argument type
9220 : matters. */
9221 1175065939 : STRIP_SIGN_NOPS (arg0);
9222 : }
9223 : else
9224 : {
9225 : /* Strip any conversions that don't change the mode. This
9226 : is safe for every expression, except for a comparison
9227 : expression because its signedness is derived from its
9228 : operands.
9229 :
9230 : Note that this is done as an internal manipulation within
9231 : the constant folder, in order to find the simplest
9232 : representation of the arguments so that their form can be
9233 : studied. In any cases, the appropriate type conversions
9234 : should be put back in the tree that will get out of the
9235 : constant folder. */
9236 972359670 : STRIP_NOPS (arg0);
9237 : }
9238 :
9239 2147425609 : if (CONSTANT_CLASS_P (arg0))
9240 : {
9241 341193678 : tree tem = const_unop (code, type, arg0);
9242 341193678 : if (tem)
9243 : {
9244 295479892 : if (TREE_TYPE (tem) != type)
9245 77265 : tem = fold_convert_loc (loc, type, tem);
9246 295479892 : return tem;
9247 : }
9248 : }
9249 : }
9250 :
9251 1851958927 : tem = generic_simplify (loc, code, type, op0);
9252 1851958927 : if (tem)
9253 : return tem;
9254 :
9255 1393160282 : if (TREE_CODE_CLASS (code) == tcc_unary)
9256 : {
9257 760876333 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
9258 1056432 : return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9259 : fold_build1_loc (loc, code, type,
9260 1056432 : fold_convert_loc (loc, TREE_TYPE (op0),
9261 2112864 : TREE_OPERAND (arg0, 1))));
9262 759819901 : else if (TREE_CODE (arg0) == COND_EXPR)
9263 : {
9264 735597 : tree arg01 = TREE_OPERAND (arg0, 1);
9265 735597 : tree arg02 = TREE_OPERAND (arg0, 2);
9266 735597 : if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9267 731415 : arg01 = fold_build1_loc (loc, code, type,
9268 : fold_convert_loc (loc,
9269 731415 : TREE_TYPE (op0), arg01));
9270 735597 : if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9271 725679 : arg02 = fold_build1_loc (loc, code, type,
9272 : fold_convert_loc (loc,
9273 725679 : TREE_TYPE (op0), arg02));
9274 735597 : tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9275 : arg01, arg02);
9276 :
9277 : /* If this was a conversion, and all we did was to move into
9278 : inside the COND_EXPR, bring it back out. But leave it if
9279 : it is a conversion from integer to integer and the
9280 : result precision is no wider than a word since such a
9281 : conversion is cheap and may be optimized away by combine,
9282 : while it couldn't if it were outside the COND_EXPR. Then return
9283 : so we don't get into an infinite recursion loop taking the
9284 : conversion out and then back in. */
9285 :
9286 735597 : if ((CONVERT_EXPR_CODE_P (code)
9287 10133 : || code == NON_LVALUE_EXPR)
9288 725483 : && TREE_CODE (tem) == COND_EXPR
9289 707130 : && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9290 644717 : && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9291 490861 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9292 490649 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9293 490649 : && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9294 490649 : == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9295 1246072 : && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9296 20712 : && (INTEGRAL_TYPE_P
9297 : (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9298 20672 : && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9299 20559 : || flag_syntax_only))
9300 469244 : tem = build1_loc (loc, code, type,
9301 : build3 (COND_EXPR,
9302 469244 : TREE_TYPE (TREE_OPERAND
9303 : (TREE_OPERAND (tem, 1), 0)),
9304 469244 : TREE_OPERAND (tem, 0),
9305 469244 : TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9306 469244 : TREE_OPERAND (TREE_OPERAND (tem, 2),
9307 : 0)));
9308 735597 : return tem;
9309 : }
9310 : }
9311 :
9312 1391368253 : switch (code)
9313 : {
9314 48658541 : case NON_LVALUE_EXPR:
9315 48658541 : if (!maybe_lvalue_p (op0))
9316 36077667 : return fold_convert_loc (loc, type, op0);
9317 : return NULL_TREE;
9318 :
9319 699555488 : CASE_CONVERT:
9320 699555488 : case FLOAT_EXPR:
9321 699555488 : case FIX_TRUNC_EXPR:
9322 699555488 : if (COMPARISON_CLASS_P (op0))
9323 : {
9324 : /* If we have (type) (a CMP b) and type is an integral type, return
9325 : new expression involving the new type. Canonicalize
9326 : (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9327 : non-integral type.
9328 : Do not fold the result as that would not simplify further, also
9329 : folding again results in recursions. */
9330 567447 : if (TREE_CODE (type) == BOOLEAN_TYPE)
9331 149376 : return build2_loc (loc, TREE_CODE (op0), type,
9332 149376 : TREE_OPERAND (op0, 0),
9333 298752 : TREE_OPERAND (op0, 1));
9334 418071 : else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9335 7621 : && TREE_CODE (type) != VECTOR_TYPE)
9336 7621 : return build3_loc (loc, COND_EXPR, type, op0,
9337 : constant_boolean_node (true, type),
9338 7621 : constant_boolean_node (false, type));
9339 : }
9340 :
9341 : /* Handle (T *)&A.B.C for A being of type T and B and C
9342 : living at offset zero. This occurs frequently in
9343 : C++ upcasting and then accessing the base. */
9344 699398491 : if (TREE_CODE (op0) == ADDR_EXPR
9345 217528211 : && POINTER_TYPE_P (type)
9346 910303350 : && handled_component_p (TREE_OPERAND (op0, 0)))
9347 : {
9348 52242311 : poly_int64 bitsize, bitpos;
9349 52242311 : tree offset;
9350 52242311 : machine_mode mode;
9351 52242311 : int unsignedp, reversep, volatilep;
9352 52242311 : tree base
9353 52242311 : = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9354 : &offset, &mode, &unsignedp, &reversep,
9355 : &volatilep);
9356 : /* If the reference was to a (constant) zero offset, we can use
9357 : the address of the base if it has the same base type
9358 : as the result type and the pointer type is unqualified. */
9359 52242311 : if (!offset
9360 52037539 : && known_eq (bitpos, 0)
9361 35649634 : && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9362 35649634 : == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9363 52253730 : && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9364 11218 : return fold_convert_loc (loc, type,
9365 11218 : build_fold_addr_expr_loc (loc, base));
9366 : }
9367 :
9368 699387273 : if (TREE_CODE (op0) == MODIFY_EXPR
9369 285148 : && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9370 : /* Detect assigning a bitfield. */
9371 699389225 : && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9372 118 : && DECL_BIT_FIELD
9373 : (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9374 : {
9375 : /* Don't leave an assignment inside a conversion
9376 : unless assigning a bitfield. */
9377 1904 : tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9378 : /* First do the assignment, then return converted constant. */
9379 1904 : tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9380 1904 : suppress_warning (tem /* What warning? */);
9381 1904 : TREE_USED (tem) = 1;
9382 1904 : return tem;
9383 : }
9384 :
9385 : /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9386 : constants (if x has signed type, the sign bit cannot be set
9387 : in c). This folds extension into the BIT_AND_EXPR.
9388 : ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9389 : very likely don't have maximal range for their precision and this
9390 : transformation effectively doesn't preserve non-maximal ranges. */
9391 699385369 : if (TREE_CODE (type) == INTEGER_TYPE
9392 263099919 : && TREE_CODE (op0) == BIT_AND_EXPR
9393 700269631 : && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9394 : {
9395 556735 : tree and_expr = op0;
9396 556735 : tree and0 = TREE_OPERAND (and_expr, 0);
9397 556735 : tree and1 = TREE_OPERAND (and_expr, 1);
9398 556735 : int change = 0;
9399 :
9400 556735 : if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9401 556735 : || (TYPE_PRECISION (type)
9402 173336 : <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9403 : change = 1;
9404 13738 : else if (TYPE_PRECISION (TREE_TYPE (and1))
9405 : <= HOST_BITS_PER_WIDE_INT
9406 13738 : && tree_fits_uhwi_p (and1))
9407 : {
9408 12632 : unsigned HOST_WIDE_INT cst;
9409 :
9410 12632 : cst = tree_to_uhwi (and1);
9411 25264 : cst &= HOST_WIDE_INT_M1U
9412 12632 : << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9413 12632 : change = (cst == 0);
9414 12632 : if (change
9415 12632 : && !flag_syntax_only
9416 25264 : && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9417 : == ZERO_EXTEND))
9418 : {
9419 : tree uns = unsigned_type_for (TREE_TYPE (and0));
9420 : and0 = fold_convert_loc (loc, uns, and0);
9421 : and1 = fold_convert_loc (loc, uns, and1);
9422 : }
9423 : }
9424 12632 : if (change)
9425 : {
9426 555629 : tree and1_type = TREE_TYPE (and1);
9427 555629 : unsigned prec = MAX (TYPE_PRECISION (and1_type),
9428 : TYPE_PRECISION (type));
9429 555629 : tem = force_fit_type (type,
9430 555629 : wide_int::from (wi::to_wide (and1), prec,
9431 555629 : TYPE_SIGN (and1_type)),
9432 555629 : 0, TREE_OVERFLOW (and1));
9433 555629 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
9434 555629 : fold_convert_loc (loc, type, and0), tem);
9435 : }
9436 : }
9437 :
9438 : /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9439 : cast (T1)X will fold away. We assume that this happens when X itself
9440 : is a cast. */
9441 698829740 : if (POINTER_TYPE_P (type)
9442 401051879 : && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9443 711726210 : && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9444 : {
9445 9888975 : tree arg00 = TREE_OPERAND (arg0, 0);
9446 9888975 : tree arg01 = TREE_OPERAND (arg0, 1);
9447 :
9448 : /* If -fsanitize=alignment, avoid this optimization in GENERIC
9449 : when the pointed type needs higher alignment than
9450 : the p+ first operand's pointed type. */
9451 9888975 : if (!in_gimple_form
9452 9870392 : && sanitize_flags_p (SANITIZE_ALIGNMENT)
9453 9890189 : && (min_align_of_type (TREE_TYPE (type))
9454 607 : > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9455 : return NULL_TREE;
9456 :
9457 : /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9458 : when type is a reference type and arg00's type is not,
9459 : because arg00 could be validly nullptr and if arg01 doesn't return,
9460 : we don't want false positive binding of reference to nullptr. */
9461 9888908 : if (TREE_CODE (type) == REFERENCE_TYPE
9462 6755034 : && !in_gimple_form
9463 6755014 : && sanitize_flags_p (SANITIZE_NULL)
9464 9889339 : && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9465 : return NULL_TREE;
9466 :
9467 9888477 : arg00 = fold_convert_loc (loc, type, arg00);
9468 9888477 : return fold_build_pointer_plus_loc (loc, arg00, arg01);
9469 : }
9470 :
9471 : /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9472 : of the same precision, and X is an integer type not narrower than
9473 : types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9474 688940765 : if (INTEGRAL_TYPE_P (type)
9475 270698379 : && TREE_CODE (op0) == BIT_NOT_EXPR
9476 599496 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9477 599496 : && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9478 689262640 : && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9479 : {
9480 320168 : tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9481 387058 : if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9482 387056 : && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9483 260905 : return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9484 260905 : fold_convert_loc (loc, type, tem));
9485 : }
9486 :
9487 : /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9488 : type of X and Y (integer types only). */
9489 688679860 : if (INTEGRAL_TYPE_P (type)
9490 270437474 : && TREE_CODE (op0) == MULT_EXPR
9491 9675450 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9492 9655474 : && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9493 688748265 : && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9494 20530 : || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9495 : {
9496 : /* Be careful not to introduce new overflows. */
9497 68365 : tree mult_type;
9498 68365 : if (TYPE_OVERFLOW_WRAPS (type))
9499 : mult_type = type;
9500 : else
9501 2107 : mult_type = unsigned_type_for (type);
9502 :
9503 68365 : if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9504 : {
9505 136730 : tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9506 : fold_convert_loc (loc, mult_type,
9507 68365 : TREE_OPERAND (op0, 0)),
9508 : fold_convert_loc (loc, mult_type,
9509 68365 : TREE_OPERAND (op0, 1)));
9510 68365 : return fold_convert_loc (loc, type, tem);
9511 : }
9512 : }
9513 :
9514 : return NULL_TREE;
9515 :
9516 230460415 : case VIEW_CONVERT_EXPR:
9517 230460415 : if (TREE_CODE (op0) == MEM_REF)
9518 : {
9519 1793 : if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9520 5 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9521 1793 : tem = fold_build2_loc (loc, MEM_REF, type,
9522 1793 : TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9523 1793 : REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9524 1793 : return tem;
9525 : }
9526 :
9527 : return NULL_TREE;
9528 :
9529 4395056 : case NEGATE_EXPR:
9530 4395056 : tem = fold_negate_expr (loc, arg0);
9531 4395056 : if (tem)
9532 1552 : return fold_convert_loc (loc, type, tem);
9533 : return NULL_TREE;
9534 :
9535 3690752 : case ABS_EXPR:
9536 : /* Convert fabs((double)float) into (double)fabsf(float). */
9537 3690752 : if (TREE_CODE (arg0) == NOP_EXPR
9538 15971 : && TREE_CODE (type) == REAL_TYPE)
9539 : {
9540 15915 : tree targ0 = strip_float_extensions (arg0);
9541 15915 : if (targ0 != arg0)
9542 15711 : return fold_convert_loc (loc, type,
9543 : fold_build1_loc (loc, ABS_EXPR,
9544 15711 : TREE_TYPE (targ0),
9545 15711 : targ0));
9546 : }
9547 : return NULL_TREE;
9548 :
9549 2778408 : case BIT_NOT_EXPR:
9550 : /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9551 2778408 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
9552 2780093 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9553 : fold_convert_loc (loc, type,
9554 1685 : TREE_OPERAND (arg0, 0)))))
9555 14 : return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9556 : fold_convert_loc (loc, type,
9557 28 : TREE_OPERAND (arg0, 1)));
9558 2778394 : else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9559 2780065 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9560 : fold_convert_loc (loc, type,
9561 1671 : TREE_OPERAND (arg0, 1)))))
9562 23 : return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9563 : fold_convert_loc (loc, type,
9564 46 : TREE_OPERAND (arg0, 0)), tem);
9565 :
9566 : return NULL_TREE;
9567 :
9568 52302005 : case TRUTH_NOT_EXPR:
9569 : /* Note that the operand of this must be an int
9570 : and its values must be 0 or 1.
9571 : ("true" is a fixed value perhaps depending on the language,
9572 : but we don't handle values other than 1 correctly yet.) */
9573 52302005 : tem = fold_truth_not_expr (loc, arg0);
9574 52302005 : if (!tem)
9575 : return NULL_TREE;
9576 34401531 : return fold_convert_loc (loc, type, tem);
9577 :
9578 87629050 : case INDIRECT_REF:
9579 : /* Fold *&X to X if X is an lvalue. */
9580 87629050 : if (TREE_CODE (op0) == ADDR_EXPR)
9581 : {
9582 7326 : tree op00 = TREE_OPERAND (op0, 0);
9583 7326 : if ((VAR_P (op00)
9584 : || TREE_CODE (op00) == PARM_DECL
9585 : || TREE_CODE (op00) == RESULT_DECL)
9586 6155 : && !TREE_READONLY (op00))
9587 : return op00;
9588 : }
9589 : return NULL_TREE;
9590 :
9591 : default:
9592 : return NULL_TREE;
9593 : } /* switch (code) */
9594 : }
9595 :
9596 :
9597 : /* If the operation was a conversion do _not_ mark a resulting constant
9598 : with TREE_OVERFLOW if the original constant was not. These conversions
9599 : have implementation defined behavior and retaining the TREE_OVERFLOW
9600 : flag here would confuse later passes such as VRP. */
9601 : tree
9602 0 : fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9603 : tree type, tree op0)
9604 : {
9605 0 : tree res = fold_unary_loc (loc, code, type, op0);
9606 0 : if (res
9607 0 : && TREE_CODE (res) == INTEGER_CST
9608 0 : && TREE_CODE (op0) == INTEGER_CST
9609 0 : && CONVERT_EXPR_CODE_P (code))
9610 0 : TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9611 :
9612 0 : return res;
9613 : }
9614 :
9615 : /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9616 : operands OP0 and OP1. LOC is the location of the resulting expression.
9617 : ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9618 : Return the folded expression if folding is successful. Otherwise,
9619 : return NULL_TREE. */
9620 : static tree
9621 26633509 : fold_truth_andor (location_t loc, enum tree_code code, tree type,
9622 : tree arg0, tree arg1, tree op0, tree op1)
9623 : {
9624 26633509 : tree tem;
9625 :
9626 : /* We only do these simplifications if we are optimizing. */
9627 26633509 : if (!optimize)
9628 : return NULL_TREE;
9629 :
9630 : /* Check for things like (A || B) && (A || C). We can convert this
9631 : to A || (B && C). Note that either operator can be any of the four
9632 : truth and/or operations and the transformation will still be
9633 : valid. Also note that we only care about order for the
9634 : ANDIF and ORIF operators. If B contains side effects, this
9635 : might change the truth-value of A. */
9636 26272480 : if (TREE_CODE (arg0) == TREE_CODE (arg1)
9637 6583347 : && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9638 : || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9639 : || TREE_CODE (arg0) == TRUTH_AND_EXPR
9640 6583347 : || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9641 26331610 : && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9642 : {
9643 58638 : tree a00 = TREE_OPERAND (arg0, 0);
9644 58638 : tree a01 = TREE_OPERAND (arg0, 1);
9645 58638 : tree a10 = TREE_OPERAND (arg1, 0);
9646 58638 : tree a11 = TREE_OPERAND (arg1, 1);
9647 117276 : bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9648 58638 : || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9649 58638 : && (code == TRUTH_AND_EXPR
9650 22350 : || code == TRUTH_OR_EXPR));
9651 :
9652 58638 : if (operand_equal_p (a00, a10, 0))
9653 849 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9654 849 : fold_build2_loc (loc, code, type, a01, a11));
9655 57789 : else if (commutative && operand_equal_p (a00, a11, 0))
9656 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9657 0 : fold_build2_loc (loc, code, type, a01, a10));
9658 57789 : else if (commutative && operand_equal_p (a01, a10, 0))
9659 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9660 0 : fold_build2_loc (loc, code, type, a00, a11));
9661 :
9662 : /* This case if tricky because we must either have commutative
9663 : operators or else A10 must not have side-effects. */
9664 :
9665 57747 : else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9666 115044 : && operand_equal_p (a01, a11, 0))
9667 43 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
9668 : fold_build2_loc (loc, code, type, a00, a10),
9669 43 : a01);
9670 : }
9671 :
9672 : /* See if we can build a range comparison. */
9673 26271588 : if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9674 : return tem;
9675 :
9676 24792531 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9677 24790578 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9678 : {
9679 37313 : tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9680 37313 : if (tem)
9681 13 : return fold_build2_loc (loc, code, type, tem, arg1);
9682 : }
9683 :
9684 24792518 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9685 24782203 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9686 : {
9687 101216 : tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9688 101216 : if (tem)
9689 91 : return fold_build2_loc (loc, code, type, arg0, tem);
9690 : }
9691 :
9692 : /* Check for the possibility of merging component references. If our
9693 : lhs is another similar operation, try to merge its rhs with our
9694 : rhs. Then try to merge our lhs and rhs. */
9695 24792427 : if (TREE_CODE (arg0) == code
9696 25703312 : && (tem = fold_truth_andor_1 (loc, code, type,
9697 910885 : TREE_OPERAND (arg0, 1), arg1)) != 0)
9698 85 : return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9699 :
9700 24792342 : if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9701 : return tem;
9702 :
9703 24751526 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9704 24751526 : if (param_logical_op_non_short_circuit != -1)
9705 7656 : logical_op_non_short_circuit
9706 7656 : = param_logical_op_non_short_circuit;
9707 24751526 : if (logical_op_non_short_circuit
9708 24747681 : && !sanitize_coverage_p ()
9709 24751526 : && (code == TRUTH_AND_EXPR
9710 24747678 : || code == TRUTH_ANDIF_EXPR
9711 11177988 : || code == TRUTH_OR_EXPR
9712 11177988 : || code == TRUTH_ORIF_EXPR))
9713 : {
9714 24747678 : enum tree_code ncode, icode;
9715 :
9716 24747678 : ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9717 24747678 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9718 13569690 : icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9719 :
9720 : /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9721 : or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9722 : We don't want to pack more than two leafs to a non-IF AND/OR
9723 : expression.
9724 : If tree-code of left-hand operand isn't an AND/OR-IF code and not
9725 : equal to IF-CODE, then we don't want to add right-hand operand.
9726 : If the inner right-hand side of left-hand operand has
9727 : side-effects, or isn't simple, then we can't add to it,
9728 : as otherwise we might destroy if-sequence. */
9729 24747678 : if (TREE_CODE (arg0) == icode
9730 902066 : && simple_condition_p (arg1)
9731 : /* Needed for sequence points to handle trappings, and
9732 : side-effects. */
9733 24794803 : && simple_condition_p (TREE_OPERAND (arg0, 1)))
9734 : {
9735 40321 : tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9736 : arg1);
9737 40321 : return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9738 40321 : tem);
9739 : }
9740 : /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9741 : or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9742 24707357 : else if (TREE_CODE (arg1) == icode
9743 4362 : && simple_condition_p (arg0)
9744 : /* Needed for sequence points to handle trappings, and
9745 : side-effects. */
9746 24708310 : && simple_condition_p (TREE_OPERAND (arg1, 0)))
9747 : {
9748 36 : tem = fold_build2_loc (loc, ncode, type,
9749 36 : arg0, TREE_OPERAND (arg1, 0));
9750 36 : return fold_build2_loc (loc, icode, type, tem,
9751 72 : TREE_OPERAND (arg1, 1));
9752 : }
9753 : /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9754 : into (A OR B).
9755 : For sequence point consistancy, we need to check for trapping,
9756 : and side-effects. */
9757 6376641 : else if (code == icode && simple_condition_p (arg0)
9758 25694691 : && simple_condition_p (arg1))
9759 468672 : return fold_build2_loc (loc, ncode, type, arg0, arg1);
9760 : }
9761 :
9762 : return NULL_TREE;
9763 : }
9764 :
9765 : /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9766 : by changing CODE to reduce the magnitude of constants involved in
9767 : ARG0 of the comparison.
9768 : Returns a canonicalized comparison tree if a simplification was
9769 : possible, otherwise returns NULL_TREE.
9770 : Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9771 : valid if signed overflow is undefined. */
9772 :
9773 : static tree
9774 195067620 : maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9775 : tree arg0, tree arg1,
9776 : bool *strict_overflow_p)
9777 : {
9778 195067620 : enum tree_code code0 = TREE_CODE (arg0);
9779 195067620 : tree t, cst0 = NULL_TREE;
9780 195067620 : int sgn0;
9781 :
9782 : /* Match A +- CST code arg1. We can change this only if overflow
9783 : is undefined. */
9784 195067620 : if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9785 151625541 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9786 : /* In principle pointers also have undefined overflow behavior,
9787 : but that causes problems elsewhere. */
9788 72110607 : && !POINTER_TYPE_P (TREE_TYPE (arg0))
9789 72110607 : && (code0 == MINUS_EXPR
9790 72110607 : || code0 == PLUS_EXPR)
9791 2617516 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9792 : return NULL_TREE;
9793 :
9794 : /* Identify the constant in arg0 and its sign. */
9795 2147197 : cst0 = TREE_OPERAND (arg0, 1);
9796 2147197 : sgn0 = tree_int_cst_sgn (cst0);
9797 :
9798 : /* Overflowed constants and zero will cause problems. */
9799 2147197 : if (integer_zerop (cst0)
9800 2147197 : || TREE_OVERFLOW (cst0))
9801 : return NULL_TREE;
9802 :
9803 : /* See if we can reduce the magnitude of the constant in
9804 : arg0 by changing the comparison code. */
9805 : /* A - CST < arg1 -> A - CST-1 <= arg1. */
9806 2147197 : if (code == LT_EXPR
9807 1292794 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9808 : code = LE_EXPR;
9809 : /* A + CST > arg1 -> A + CST-1 >= arg1. */
9810 1965596 : else if (code == GT_EXPR
9811 570433 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9812 : code = GE_EXPR;
9813 : /* A + CST <= arg1 -> A + CST-1 < arg1. */
9814 1786408 : else if (code == LE_EXPR
9815 660167 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9816 : code = LT_EXPR;
9817 : /* A - CST >= arg1 -> A - CST-1 > arg1. */
9818 1556900 : else if (code == GE_EXPR
9819 488940 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9820 : code = GT_EXPR;
9821 : else
9822 : return NULL_TREE;
9823 793733 : *strict_overflow_p = true;
9824 :
9825 : /* Now build the constant reduced in magnitude. But not if that
9826 : would produce one outside of its types range. */
9827 1587466 : if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9828 1587466 : && ((sgn0 == 1
9829 408558 : && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9830 408558 : && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9831 793733 : || (sgn0 == -1
9832 385175 : && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9833 385175 : && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9834 0 : return NULL_TREE;
9835 :
9836 1202291 : t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9837 793733 : cst0, build_int_cst (TREE_TYPE (cst0), 1));
9838 793733 : t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9839 793733 : t = fold_convert (TREE_TYPE (arg1), t);
9840 :
9841 793733 : return fold_build2_loc (loc, code, type, t, arg1);
9842 : }
9843 :
9844 : /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9845 : overflow further. Try to decrease the magnitude of constants involved
9846 : by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9847 : and put sole constants at the second argument position.
9848 : Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9849 :
9850 : static tree
9851 97910252 : maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9852 : tree arg0, tree arg1)
9853 : {
9854 97910252 : tree t;
9855 97910252 : bool strict_overflow_p;
9856 97910252 : const char * const warnmsg = G_("assuming signed overflow does not occur "
9857 : "when reducing constant in comparison");
9858 :
9859 : /* Try canonicalization by simplifying arg0. */
9860 97910252 : strict_overflow_p = false;
9861 97910252 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9862 : &strict_overflow_p);
9863 97910252 : if (t)
9864 : {
9865 752884 : if (strict_overflow_p)
9866 752884 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9867 752884 : return t;
9868 : }
9869 :
9870 : /* Try canonicalization by simplifying arg1 using the swapped
9871 : comparison. */
9872 97157368 : code = swap_tree_comparison (code);
9873 97157368 : strict_overflow_p = false;
9874 97157368 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9875 : &strict_overflow_p);
9876 97157368 : if (t && strict_overflow_p)
9877 40849 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9878 : return t;
9879 : }
9880 :
9881 : /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9882 : space. This is used to avoid issuing overflow warnings for
9883 : expressions like &p->x which cannot wrap. */
9884 :
9885 : static bool
9886 15918 : pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9887 : {
9888 15918 : if (!POINTER_TYPE_P (TREE_TYPE (base)))
9889 : return true;
9890 :
9891 9690 : if (maybe_lt (bitpos, 0))
9892 : return true;
9893 :
9894 : poly_wide_int wi_offset;
9895 9036 : int precision = TYPE_PRECISION (TREE_TYPE (base));
9896 9036 : if (offset == NULL_TREE)
9897 4140 : wi_offset = wi::zero (precision);
9898 4896 : else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9899 : return true;
9900 : else
9901 0 : wi_offset = wi::to_poly_wide (offset);
9902 :
9903 4140 : wi::overflow_type overflow;
9904 4140 : poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9905 4140 : precision);
9906 4140 : poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9907 4140 : if (overflow)
9908 : return true;
9909 :
9910 4140 : poly_uint64 total_hwi, size;
9911 4140 : if (!total.to_uhwi (&total_hwi)
9912 4140 : || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9913 : &size)
9914 8202 : || known_eq (size, 0U))
9915 78 : return true;
9916 :
9917 4062 : if (known_le (total_hwi, size))
9918 : return false;
9919 :
9920 : /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9921 : array. */
9922 1113 : if (TREE_CODE (base) == ADDR_EXPR
9923 0 : && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9924 : &size)
9925 0 : && maybe_ne (size, 0U)
9926 1113 : && known_le (total_hwi, size))
9927 : return false;
9928 :
9929 : return true;
9930 9036 : }
9931 :
9932 : /* Return a positive integer when the symbol DECL is known to have
9933 : a nonzero address, zero when it's known not to (e.g., it's a weak
9934 : symbol), and a negative integer when the symbol is not yet in the
9935 : symbol table and so whether or not its address is zero is unknown.
9936 : For function local objects always return positive integer. */
9937 : static int
9938 11929227 : maybe_nonzero_address (tree decl)
9939 : {
9940 11929227 : if (!DECL_P (decl))
9941 : return -1;
9942 :
9943 : /* Normally, don't do anything for variables and functions before symtab is
9944 : built; it is quite possible that DECL will be declared weak later.
9945 : But if folding_initializer, we need a constant answer now, so create
9946 : the symtab entry and prevent later weak declaration. */
9947 9348080 : if (decl_in_symtab_p (decl))
9948 : {
9949 4138499 : if (struct symtab_node *symbol
9950 4138499 : = (folding_initializer
9951 4138499 : ? symtab_node::get_create (decl)
9952 4121887 : : symtab_node::get (decl)))
9953 4119574 : return symbol->nonzero_address ();
9954 : }
9955 5209581 : else if (folding_cxx_constexpr)
9956 : /* Anything that doesn't go in the symtab has non-zero address. */
9957 : return 1;
9958 :
9959 : /* Function local objects are never NULL. */
9960 5117354 : if (DECL_CONTEXT (decl)
9961 5100909 : && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9962 10214828 : && auto_var_in_fn_p (decl, DECL_CONTEXT (decl)))
9963 : return 1;
9964 :
9965 : return -1;
9966 : }
9967 :
9968 : /* Subroutine of fold_binary. This routine performs all of the
9969 : transformations that are common to the equality/inequality
9970 : operators (EQ_EXPR and NE_EXPR) and the ordering operators
9971 : (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9972 : fold_binary should call fold_binary. Fold a comparison with
9973 : tree code CODE and type TYPE with operands OP0 and OP1. Return
9974 : the folded comparison or NULL_TREE. */
9975 :
9976 : static tree
9977 97982739 : fold_comparison (location_t loc, enum tree_code code, tree type,
9978 : tree op0, tree op1)
9979 : {
9980 97982739 : const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9981 97982739 : tree arg0, arg1, tem;
9982 :
9983 97982739 : arg0 = op0;
9984 97982739 : arg1 = op1;
9985 :
9986 97982739 : STRIP_SIGN_NOPS (arg0);
9987 97982739 : STRIP_SIGN_NOPS (arg1);
9988 :
9989 : /* For comparisons of pointers we can decompose it to a compile time
9990 : comparison of the base objects and the offsets into the object.
9991 : This requires at least one operand being an ADDR_EXPR or a
9992 : POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9993 181816673 : if (POINTER_TYPE_P (TREE_TYPE (arg0))
9994 98243095 : && (TREE_CODE (arg0) == ADDR_EXPR
9995 14358172 : || TREE_CODE (arg1) == ADDR_EXPR
9996 12835801 : || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9997 12056875 : || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9998 : {
9999 2361409 : tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10000 2361409 : poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10001 2361409 : machine_mode mode;
10002 2361409 : int volatilep, reversep, unsignedp;
10003 2361409 : bool indirect_base0 = false, indirect_base1 = false;
10004 :
10005 : /* Get base and offset for the access. Strip ADDR_EXPR for
10006 : get_inner_reference, but put it back by stripping INDIRECT_REF
10007 : off the base object if possible. indirect_baseN will be true
10008 : if baseN is not an address but refers to the object itself. */
10009 2361409 : base0 = arg0;
10010 2361409 : if (TREE_CODE (arg0) == ADDR_EXPR)
10011 : {
10012 50989 : base0
10013 50989 : = get_inner_reference (TREE_OPERAND (arg0, 0),
10014 : &bitsize, &bitpos0, &offset0, &mode,
10015 : &unsignedp, &reversep, &volatilep);
10016 50989 : if (INDIRECT_REF_P (base0))
10017 2011 : base0 = TREE_OPERAND (base0, 0);
10018 : else
10019 : indirect_base0 = true;
10020 : }
10021 2310420 : else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10022 : {
10023 840944 : base0 = TREE_OPERAND (arg0, 0);
10024 840944 : STRIP_SIGN_NOPS (base0);
10025 840944 : if (TREE_CODE (base0) == ADDR_EXPR)
10026 : {
10027 37369 : base0
10028 37369 : = get_inner_reference (TREE_OPERAND (base0, 0),
10029 : &bitsize, &bitpos0, &offset0, &mode,
10030 : &unsignedp, &reversep, &volatilep);
10031 37369 : if (INDIRECT_REF_P (base0))
10032 20 : base0 = TREE_OPERAND (base0, 0);
10033 : else
10034 : indirect_base0 = true;
10035 : }
10036 840944 : if (offset0 == NULL_TREE || integer_zerop (offset0))
10037 840944 : offset0 = TREE_OPERAND (arg0, 1);
10038 : else
10039 0 : offset0 = size_binop (PLUS_EXPR, offset0,
10040 : TREE_OPERAND (arg0, 1));
10041 840944 : if (poly_int_tree_p (offset0))
10042 : {
10043 680433 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10044 680433 : TYPE_PRECISION (sizetype));
10045 680433 : tem <<= LOG2_BITS_PER_UNIT;
10046 680433 : tem += bitpos0;
10047 680433 : if (tem.to_shwi (&bitpos0))
10048 680431 : offset0 = NULL_TREE;
10049 : }
10050 : }
10051 :
10052 2361409 : base1 = arg1;
10053 2361409 : if (TREE_CODE (arg1) == ADDR_EXPR)
10054 : {
10055 1550175 : base1
10056 1550175 : = get_inner_reference (TREE_OPERAND (arg1, 0),
10057 : &bitsize, &bitpos1, &offset1, &mode,
10058 : &unsignedp, &reversep, &volatilep);
10059 1550175 : if (INDIRECT_REF_P (base1))
10060 66982 : base1 = TREE_OPERAND (base1, 0);
10061 : else
10062 : indirect_base1 = true;
10063 : }
10064 811234 : else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10065 : {
10066 89301 : base1 = TREE_OPERAND (arg1, 0);
10067 89301 : STRIP_SIGN_NOPS (base1);
10068 89301 : if (TREE_CODE (base1) == ADDR_EXPR)
10069 : {
10070 10168 : base1
10071 10168 : = get_inner_reference (TREE_OPERAND (base1, 0),
10072 : &bitsize, &bitpos1, &offset1, &mode,
10073 : &unsignedp, &reversep, &volatilep);
10074 10168 : if (INDIRECT_REF_P (base1))
10075 0 : base1 = TREE_OPERAND (base1, 0);
10076 : else
10077 : indirect_base1 = true;
10078 : }
10079 89301 : if (offset1 == NULL_TREE || integer_zerop (offset1))
10080 89277 : offset1 = TREE_OPERAND (arg1, 1);
10081 : else
10082 24 : offset1 = size_binop (PLUS_EXPR, offset1,
10083 : TREE_OPERAND (arg1, 1));
10084 89301 : if (poly_int_tree_p (offset1))
10085 : {
10086 77120 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10087 77120 : TYPE_PRECISION (sizetype));
10088 77120 : tem <<= LOG2_BITS_PER_UNIT;
10089 77120 : tem += bitpos1;
10090 77120 : if (tem.to_shwi (&bitpos1))
10091 77120 : offset1 = NULL_TREE;
10092 : }
10093 : }
10094 :
10095 : /* If we have equivalent bases we might be able to simplify. */
10096 2361409 : if (indirect_base0 == indirect_base1
10097 3181115 : && operand_equal_p (base0, base1,
10098 : indirect_base0 ? OEP_ADDRESS_OF : 0))
10099 : {
10100 : /* We can fold this expression to a constant if the non-constant
10101 : offset parts are equal. */
10102 21208 : if ((offset0 == offset1
10103 6974 : || (offset0 && offset1
10104 2642 : && operand_equal_p (offset0, offset1, 0)))
10105 21208 : && (equality_code
10106 9176 : || (indirect_base0
10107 5961 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10108 3215 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10109 : {
10110 14196 : if (!equality_code
10111 9138 : && maybe_ne (bitpos0, bitpos1)
10112 23334 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10113 1617 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10114 7806 : fold_overflow_warning (("assuming pointer wraparound does not "
10115 : "occur when comparing P +- C1 with "
10116 : "P +- C2"),
10117 : WARN_STRICT_OVERFLOW_CONDITIONAL);
10118 :
10119 14196 : switch (code)
10120 : {
10121 56 : case EQ_EXPR:
10122 56 : if (known_eq (bitpos0, bitpos1))
10123 50570 : return constant_boolean_node (true, type);
10124 21 : if (known_ne (bitpos0, bitpos1))
10125 21 : return constant_boolean_node (false, type);
10126 : break;
10127 5002 : case NE_EXPR:
10128 5002 : if (known_ne (bitpos0, bitpos1))
10129 4997 : return constant_boolean_node (true, type);
10130 5 : if (known_eq (bitpos0, bitpos1))
10131 5 : return constant_boolean_node (false, type);
10132 : break;
10133 2261 : case LT_EXPR:
10134 2261 : if (known_lt (bitpos0, bitpos1))
10135 2131 : return constant_boolean_node (true, type);
10136 130 : if (known_ge (bitpos0, bitpos1))
10137 130 : return constant_boolean_node (false, type);
10138 : break;
10139 1631 : case LE_EXPR:
10140 1631 : if (known_le (bitpos0, bitpos1))
10141 181 : return constant_boolean_node (true, type);
10142 1450 : if (known_gt (bitpos0, bitpos1))
10143 1450 : return constant_boolean_node (false, type);
10144 : break;
10145 3455 : case GE_EXPR:
10146 3455 : if (known_ge (bitpos0, bitpos1))
10147 1363 : return constant_boolean_node (true, type);
10148 2092 : if (known_lt (bitpos0, bitpos1))
10149 2092 : return constant_boolean_node (false, type);
10150 : break;
10151 1791 : case GT_EXPR:
10152 1791 : if (known_gt (bitpos0, bitpos1))
10153 1740 : return constant_boolean_node (true, type);
10154 51 : if (known_le (bitpos0, bitpos1))
10155 51 : return constant_boolean_node (false, type);
10156 : break;
10157 : default:;
10158 : }
10159 : }
10160 : /* We can simplify the comparison to a comparison of the variable
10161 : offset parts if the constant offset parts are equal.
10162 : Be careful to use signed sizetype here because otherwise we
10163 : mess with array offsets in the wrong way. This is possible
10164 : because pointer arithmetic is restricted to retain within an
10165 : object and overflow on pointer differences is undefined as of
10166 : 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10167 7012 : else if (known_eq (bitpos0, bitpos1)
10168 7012 : && (equality_code
10169 5163 : || (indirect_base0
10170 267 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10171 4896 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10172 : {
10173 : /* By converting to signed sizetype we cover middle-end pointer
10174 : arithmetic which operates on unsigned pointer types of size
10175 : type size and ARRAY_REF offsets which are properly sign or
10176 : zero extended from their type in case it is narrower than
10177 : sizetype. */
10178 5278 : if (offset0 == NULL_TREE)
10179 0 : offset0 = build_int_cst (ssizetype, 0);
10180 : else
10181 5278 : offset0 = fold_convert_loc (loc, ssizetype, offset0);
10182 5278 : if (offset1 == NULL_TREE)
10183 2668 : offset1 = build_int_cst (ssizetype, 0);
10184 : else
10185 2610 : offset1 = fold_convert_loc (loc, ssizetype, offset1);
10186 :
10187 5278 : if (!equality_code
10188 5278 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10189 0 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10190 5163 : fold_overflow_warning (("assuming pointer wraparound does not "
10191 : "occur when comparing P +- C1 with "
10192 : "P +- C2"),
10193 : WARN_STRICT_OVERFLOW_COMPARISON);
10194 :
10195 5278 : return fold_build2_loc (loc, code, type, offset0, offset1);
10196 : }
10197 : }
10198 : /* For equal offsets we can simplify to a comparison of the
10199 : base addresses. */
10200 2340201 : else if (known_eq (bitpos0, bitpos1)
10201 53609 : && (indirect_base0
10202 971253 : ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10203 13646 : && (indirect_base1
10204 182951 : ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10205 2559785 : && ((offset0 == offset1)
10206 5978 : || (offset0 && offset1
10207 5708 : && operand_equal_p (offset0, offset1, 0))))
10208 : {
10209 31033 : if (indirect_base0)
10210 3186 : base0 = build_fold_addr_expr_loc (loc, base0);
10211 31033 : if (indirect_base1)
10212 4470 : base1 = build_fold_addr_expr_loc (loc, base1);
10213 31033 : return fold_build2_loc (loc, code, type, base0, base1);
10214 : }
10215 : /* Comparison between an ordinary (non-weak) symbol and a null
10216 : pointer can be eliminated since such symbols must have a non
10217 : null address. In C, relational expressions between pointers
10218 : to objects and null pointers are undefined. The results
10219 : below follow the C++ rules with the additional property that
10220 : every object pointer compares greater than a null pointer.
10221 : */
10222 2309168 : else if (((DECL_P (base0)
10223 248475 : && maybe_nonzero_address (base0) > 0
10224 : /* Avoid folding references to struct members at offset 0 to
10225 : prevent tests like '&ptr->firstmember == 0' from getting
10226 : eliminated. When ptr is null, although the -> expression
10227 : is strictly speaking invalid, GCC retains it as a matter
10228 : of QoI. See PR c/44555. */
10229 234280 : && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10230 2287135 : || CONSTANT_CLASS_P (base0))
10231 26824 : && indirect_base0
10232 : /* The caller guarantees that when one of the arguments is
10233 : constant (i.e., null in this case) it is second. */
10234 2333133 : && integer_zerop (arg1))
10235 : {
10236 63 : switch (code)
10237 : {
10238 24 : case EQ_EXPR:
10239 24 : case LE_EXPR:
10240 24 : case LT_EXPR:
10241 24 : return constant_boolean_node (false, type);
10242 39 : case GE_EXPR:
10243 39 : case GT_EXPR:
10244 39 : case NE_EXPR:
10245 39 : return constant_boolean_node (true, type);
10246 0 : default:
10247 0 : gcc_unreachable ();
10248 : }
10249 : }
10250 : }
10251 :
10252 : /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10253 : X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10254 : the resulting offset is smaller in absolute value than the
10255 : original one and has the same sign. */
10256 191429189 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10257 152422268 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10258 36453666 : && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10259 2364050 : && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10260 1947767 : && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10261 1947767 : && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10262 174310379 : && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10263 164674 : && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10264 : {
10265 164674 : tree const1 = TREE_OPERAND (arg0, 1);
10266 164674 : tree const2 = TREE_OPERAND (arg1, 1);
10267 164674 : tree variable1 = TREE_OPERAND (arg0, 0);
10268 164674 : tree variable2 = TREE_OPERAND (arg1, 0);
10269 164674 : tree cst;
10270 164674 : const char * const warnmsg = G_("assuming signed overflow does not "
10271 : "occur when combining constants around "
10272 : "a comparison");
10273 :
10274 : /* Put the constant on the side where it doesn't overflow and is
10275 : of lower absolute value and of same sign than before. */
10276 164675 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10277 : ? MINUS_EXPR : PLUS_EXPR,
10278 : const2, const1);
10279 164674 : if (!TREE_OVERFLOW (cst)
10280 164658 : && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10281 186591 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10282 : {
10283 5669 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10284 5669 : return fold_build2_loc (loc, code, type,
10285 : variable1,
10286 5669 : fold_build2_loc (loc, TREE_CODE (arg1),
10287 5669 : TREE_TYPE (arg1),
10288 5669 : variable2, cst));
10289 : }
10290 :
10291 159006 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10292 : ? MINUS_EXPR : PLUS_EXPR,
10293 : const1, const2);
10294 159005 : if (!TREE_OVERFLOW (cst)
10295 158989 : && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10296 175253 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10297 : {
10298 16248 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10299 16248 : return fold_build2_loc (loc, code, type,
10300 16248 : fold_build2_loc (loc, TREE_CODE (arg0),
10301 16248 : TREE_TYPE (arg0),
10302 : variable1, cst),
10303 16248 : variable2);
10304 : }
10305 : }
10306 :
10307 97910252 : tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10308 97910252 : if (tem)
10309 : return tem;
10310 :
10311 : /* If we are comparing an expression that just has comparisons
10312 : of two integer values, arithmetic expressions of those comparisons,
10313 : and constants, we can simplify it. There are only three cases
10314 : to check: the two values can either be equal, the first can be
10315 : greater, or the second can be greater. Fold the expression for
10316 : those three values. Since each value must be 0 or 1, we have
10317 : eight possibilities, each of which corresponds to the constant 0
10318 : or 1 or one of the six possible comparisons.
10319 :
10320 : This handles common cases like (a > b) == 0 but also handles
10321 : expressions like ((x > y) - (y > x)) > 0, which supposedly
10322 : occur in macroized code. */
10323 :
10324 97116519 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10325 : {
10326 60225671 : tree cval1 = 0, cval2 = 0;
10327 :
10328 60225671 : if (twoval_comparison_p (arg0, &cval1, &cval2)
10329 : /* Don't handle degenerate cases here; they should already
10330 : have been handled anyway. */
10331 668682 : && cval1 != 0 && cval2 != 0
10332 667495 : && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10333 667495 : && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10334 667489 : && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10335 58 : && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10336 58 : && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10337 60225729 : && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10338 58 : TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10339 : {
10340 58 : tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10341 58 : tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10342 :
10343 : /* We can't just pass T to eval_subst in case cval1 or cval2
10344 : was the same as ARG1. */
10345 :
10346 58 : tree high_result
10347 58 : = fold_build2_loc (loc, code, type,
10348 : eval_subst (loc, arg0, cval1, maxval,
10349 : cval2, minval),
10350 : arg1);
10351 58 : tree equal_result
10352 58 : = fold_build2_loc (loc, code, type,
10353 : eval_subst (loc, arg0, cval1, maxval,
10354 : cval2, maxval),
10355 : arg1);
10356 58 : tree low_result
10357 58 : = fold_build2_loc (loc, code, type,
10358 : eval_subst (loc, arg0, cval1, minval,
10359 : cval2, maxval),
10360 : arg1);
10361 :
10362 : /* All three of these results should be 0 or 1. Confirm they are.
10363 : Then use those values to select the proper code to use. */
10364 :
10365 58 : if (TREE_CODE (high_result) == INTEGER_CST
10366 49 : && TREE_CODE (equal_result) == INTEGER_CST
10367 39 : && TREE_CODE (low_result) == INTEGER_CST)
10368 : {
10369 : /* Make a 3-bit mask with the high-order bit being the
10370 : value for `>', the next for '=', and the low for '<'. */
10371 39 : switch ((integer_onep (high_result) * 4)
10372 39 : + (integer_onep (equal_result) * 2)
10373 39 : + integer_onep (low_result))
10374 : {
10375 21 : case 0:
10376 : /* Always false. */
10377 39 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10378 : case 1:
10379 : code = LT_EXPR;
10380 : break;
10381 2 : case 2:
10382 2 : code = EQ_EXPR;
10383 2 : break;
10384 0 : case 3:
10385 0 : code = LE_EXPR;
10386 0 : break;
10387 0 : case 4:
10388 0 : code = GT_EXPR;
10389 0 : break;
10390 1 : case 5:
10391 1 : code = NE_EXPR;
10392 1 : break;
10393 0 : case 6:
10394 0 : code = GE_EXPR;
10395 0 : break;
10396 15 : case 7:
10397 : /* Always true. */
10398 15 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10399 : }
10400 :
10401 3 : return fold_build2_loc (loc, code, type, cval1, cval2);
10402 : }
10403 : }
10404 : }
10405 :
10406 : return NULL_TREE;
10407 : }
10408 :
10409 :
10410 : /* Subroutine of fold_binary. Optimize complex multiplications of the
10411 : form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10412 : argument EXPR represents the expression "z" of type TYPE. */
10413 :
10414 : static tree
10415 2 : fold_mult_zconjz (location_t loc, tree type, tree expr)
10416 : {
10417 2 : tree itype = TREE_TYPE (type);
10418 2 : tree rpart, ipart, tem;
10419 :
10420 2 : if (TREE_CODE (expr) == COMPLEX_EXPR)
10421 : {
10422 0 : rpart = TREE_OPERAND (expr, 0);
10423 0 : ipart = TREE_OPERAND (expr, 1);
10424 : }
10425 2 : else if (TREE_CODE (expr) == COMPLEX_CST)
10426 : {
10427 0 : rpart = TREE_REALPART (expr);
10428 0 : ipart = TREE_IMAGPART (expr);
10429 : }
10430 : else
10431 : {
10432 2 : expr = save_expr (expr);
10433 2 : rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10434 2 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10435 : }
10436 :
10437 2 : rpart = save_expr (rpart);
10438 2 : ipart = save_expr (ipart);
10439 2 : tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10440 : fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10441 : fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10442 2 : return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10443 2 : build_zero_cst (itype));
10444 : }
10445 :
10446 :
10447 : /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10448 : CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10449 : true if successful. */
10450 :
10451 : static bool
10452 12645 : vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10453 : {
10454 12645 : unsigned HOST_WIDE_INT i, nunits;
10455 :
10456 12645 : if (TREE_CODE (arg) == VECTOR_CST
10457 12645 : && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10458 : {
10459 2120 : for (i = 0; i < nunits; ++i)
10460 1710 : elts[i] = VECTOR_CST_ELT (arg, i);
10461 : }
10462 12235 : else if (TREE_CODE (arg) == CONSTRUCTOR)
10463 : {
10464 : constructor_elt *elt;
10465 :
10466 38079 : FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10467 31842 : if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10468 5998 : return false;
10469 : else
10470 25844 : elts[i] = elt->value;
10471 : }
10472 : else
10473 : return false;
10474 7421 : for (; i < nelts; i++)
10475 1548 : elts[i]
10476 774 : = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10477 : return true;
10478 : }
10479 :
10480 : /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10481 : mask for VLA vec_perm folding.
10482 : REASON if specified, will contain the reason why SEL is not suitable.
10483 : Used only for debugging and unit-testing. */
10484 :
10485 : static bool
10486 8447 : valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10487 : const vec_perm_indices &sel,
10488 : const char **reason = NULL)
10489 : {
10490 8447 : unsigned sel_npatterns = sel.encoding ().npatterns ();
10491 8447 : unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10492 :
10493 16894 : if (!(pow2p_hwi (sel_npatterns)
10494 8447 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10495 8447 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10496 : {
10497 0 : if (reason)
10498 0 : *reason = "npatterns is not power of 2";
10499 0 : return false;
10500 : }
10501 :
10502 : /* We want to avoid cases where sel.length is not a multiple of npatterns.
10503 : For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10504 8447 : poly_uint64 esel;
10505 8447 : if (!multiple_p (sel.length (), sel_npatterns, &esel))
10506 : {
10507 0 : if (reason)
10508 0 : *reason = "sel.length is not multiple of sel_npatterns";
10509 0 : return false;
10510 : }
10511 :
10512 8447 : if (sel_nelts_per_pattern < 3)
10513 : return true;
10514 :
10515 5600 : for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10516 : {
10517 4245 : poly_uint64 a1 = sel[pattern + sel_npatterns];
10518 4245 : poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10519 4245 : HOST_WIDE_INT step;
10520 4245 : if (!poly_int64 (a2 - a1).is_constant (&step))
10521 : {
10522 : if (reason)
10523 : *reason = "step is not constant";
10524 1012 : return false;
10525 : }
10526 : // FIXME: Punt on step < 0 for now, revisit later.
10527 4245 : if (step < 0)
10528 : return false;
10529 4191 : if (step == 0)
10530 0 : continue;
10531 :
10532 4191 : if (!pow2p_hwi (step))
10533 : {
10534 0 : if (reason)
10535 0 : *reason = "step is not power of 2";
10536 0 : return false;
10537 : }
10538 :
10539 : /* Ensure that stepped sequence of the pattern selects elements
10540 : only from the same input vector. */
10541 4191 : uint64_t q1, qe;
10542 4191 : poly_uint64 r1, re;
10543 4191 : poly_uint64 ae = a1 + (esel - 2) * step;
10544 4191 : poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10545 :
10546 4191 : if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10547 4191 : && can_div_trunc_p (ae, arg_len, &qe, &re)
10548 : && q1 == qe))
10549 : {
10550 360 : if (reason)
10551 0 : *reason = "crossed input vectors";
10552 360 : return false;
10553 : }
10554 :
10555 : /* Ensure that the stepped sequence always selects from the same
10556 : input pattern. */
10557 3831 : tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10558 3831 : unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10559 :
10560 3831 : if (!multiple_p (step, arg_npatterns))
10561 : {
10562 596 : if (reason)
10563 0 : *reason = "step is not multiple of npatterns";
10564 596 : return false;
10565 : }
10566 :
10567 : /* If a1 chooses base element from arg, ensure that it's a natural
10568 : stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10569 : to preserve arg's encoding. */
10570 :
10571 3235 : if (maybe_lt (r1, arg_npatterns))
10572 : {
10573 2 : unsigned HOST_WIDE_INT index;
10574 2 : if (!r1.is_constant (&index))
10575 2 : return false;
10576 :
10577 2 : tree arg_elem0 = vector_cst_elt (arg, index);
10578 2 : tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10579 2 : tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10580 :
10581 2 : tree step1, step2;
10582 2 : if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10583 2 : || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10584 4 : || !operand_equal_p (step1, step2, 0))
10585 : {
10586 2 : if (reason)
10587 0 : *reason = "not a natural stepped sequence";
10588 2 : return false;
10589 : }
10590 : }
10591 : }
10592 :
10593 : return true;
10594 : }
10595 :
10596 : /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10597 : the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10598 : REASON has same purpose as described in
10599 : valid_mask_for_fold_vec_perm_cst_p. */
10600 :
10601 : static tree
10602 8447 : fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10603 : const char **reason = NULL)
10604 : {
10605 8447 : unsigned res_npatterns, res_nelts_per_pattern;
10606 8447 : unsigned HOST_WIDE_INT res_nelts;
10607 :
10608 : /* First try to implement the fold in a VLA-friendly way.
10609 :
10610 : (1) If the selector is simply a duplication of N elements, the
10611 : result is likewise a duplication of N elements.
10612 :
10613 : (2) If the selector is N elements followed by a duplication
10614 : of N elements, the result is too.
10615 :
10616 : (3) If the selector is N elements followed by an interleaving
10617 : of N linear series, the situation is more complex.
10618 :
10619 : valid_mask_for_fold_vec_perm_cst_p detects whether we
10620 : can handle this case. If we can, then each of the N linear
10621 : series either (a) selects the same element each time or
10622 : (b) selects a linear series from one of the input patterns.
10623 :
10624 : If (b) holds for one of the linear series, the result
10625 : will contain a linear series, and so the result will have
10626 : the same shape as the selector. If (a) holds for all of
10627 : the linear series, the result will be the same as (2) above.
10628 :
10629 : (b) can only hold if one of the input patterns has a
10630 : stepped encoding. */
10631 :
10632 8447 : if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10633 : {
10634 7435 : res_npatterns = sel.encoding ().npatterns ();
10635 7435 : res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10636 7435 : if (res_nelts_per_pattern == 3
10637 1355 : && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10638 8309 : && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10639 : res_nelts_per_pattern = 2;
10640 7435 : res_nelts = res_npatterns * res_nelts_per_pattern;
10641 : }
10642 1012 : else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10643 : {
10644 1012 : res_npatterns = res_nelts;
10645 1012 : res_nelts_per_pattern = 1;
10646 : }
10647 : else
10648 : return NULL_TREE;
10649 :
10650 8447 : tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10651 53702 : for (unsigned i = 0; i < res_nelts; i++)
10652 : {
10653 45255 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10654 45255 : uint64_t q;
10655 45255 : poly_uint64 r;
10656 45255 : unsigned HOST_WIDE_INT index;
10657 :
10658 : /* Punt if sel[i] /trunc_div len cannot be determined,
10659 : because the input vector to be chosen will depend on
10660 : runtime vector length.
10661 : For example if len == 4 + 4x, and sel[i] == 4,
10662 : If len at runtime equals 4, we choose arg1[0].
10663 : For any other value of len > 4 at runtime, we choose arg0[4].
10664 : which makes the element choice dependent on runtime vector length. */
10665 45255 : if (!can_div_trunc_p (sel[i], len, &q, &r))
10666 : {
10667 : if (reason)
10668 : *reason = "cannot divide selector element by arg len";
10669 : return NULL_TREE;
10670 : }
10671 :
10672 : /* sel[i] % len will give the index of element in the chosen input
10673 : vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10674 : we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10675 45255 : if (!r.is_constant (&index))
10676 : {
10677 : if (reason)
10678 : *reason = "remainder is not constant";
10679 : return NULL_TREE;
10680 : }
10681 :
10682 45255 : tree arg = ((q & 1) == 0) ? arg0 : arg1;
10683 45255 : tree elem = vector_cst_elt (arg, index);
10684 45255 : out_elts.quick_push (elem);
10685 : }
10686 :
10687 8447 : return out_elts.build ();
10688 8447 : }
10689 :
10690 : /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10691 : selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10692 : NULL_TREE otherwise. */
10693 :
10694 : tree
10695 28365 : fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10696 : {
10697 28365 : unsigned int i;
10698 28365 : unsigned HOST_WIDE_INT nelts;
10699 :
10700 28365 : gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10701 : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10702 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10703 :
10704 28365 : if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10705 28365 : || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10706 : return NULL_TREE;
10707 :
10708 17752 : if (TREE_CODE (arg0) == VECTOR_CST
10709 8721 : && TREE_CODE (arg1) == VECTOR_CST)
10710 8447 : return fold_vec_perm_cst (type, arg0, arg1, sel);
10711 :
10712 : /* For fall back case, we want to ensure we have VLS vectors
10713 : with equal length. */
10714 9305 : if (!sel.length ().is_constant (&nelts)
10715 9305 : || !known_eq (sel.length (), TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
10716 0 : return NULL_TREE;
10717 :
10718 9305 : tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10719 9305 : if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10720 9305 : || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10721 5998 : return NULL_TREE;
10722 :
10723 3307 : vec<constructor_elt, va_gc> *v;
10724 3307 : vec_alloc (v, nelts);
10725 17339 : for (i = 0; i < nelts; i++)
10726 : {
10727 14032 : HOST_WIDE_INT index;
10728 14032 : if (!sel[i].is_constant (&index))
10729 : return NULL_TREE;
10730 14032 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10731 : }
10732 3307 : return build_constructor (type, v);
10733 : }
10734 :
10735 : /* Try to fold a pointer difference of type TYPE two address expressions of
10736 : array references AREF0 and AREF1 using location LOC. Return a
10737 : simplified expression for the difference or NULL_TREE. */
10738 :
10739 : static tree
10740 39 : fold_addr_of_array_ref_difference (location_t loc, tree type,
10741 : tree aref0, tree aref1,
10742 : bool use_pointer_diff)
10743 : {
10744 39 : tree base0 = TREE_OPERAND (aref0, 0);
10745 39 : tree base1 = TREE_OPERAND (aref1, 0);
10746 39 : tree base_offset = build_int_cst (type, 0);
10747 :
10748 : /* If the bases are array references as well, recurse. If the bases
10749 : are pointer indirections compute the difference of the pointers.
10750 : If the bases are equal, we are set. */
10751 39 : if ((TREE_CODE (base0) == ARRAY_REF
10752 1 : && TREE_CODE (base1) == ARRAY_REF
10753 1 : && (base_offset
10754 1 : = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10755 : use_pointer_diff)))
10756 38 : || (INDIRECT_REF_P (base0)
10757 7 : && INDIRECT_REF_P (base1)
10758 7 : && (base_offset
10759 : = use_pointer_diff
10760 8 : ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10761 1 : TREE_OPERAND (base0, 0),
10762 1 : TREE_OPERAND (base1, 0))
10763 12 : : fold_binary_loc (loc, MINUS_EXPR, type,
10764 6 : fold_convert (type,
10765 : TREE_OPERAND (base0, 0)),
10766 6 : fold_convert (type,
10767 : TREE_OPERAND (base1, 0)))))
10768 70 : || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10769 : {
10770 15 : tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10771 15 : tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10772 15 : tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10773 15 : tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10774 15 : return fold_build2_loc (loc, PLUS_EXPR, type,
10775 : base_offset,
10776 : fold_build2_loc (loc, MULT_EXPR, type,
10777 15 : diff, esz));
10778 : }
10779 : return NULL_TREE;
10780 : }
10781 :
10782 : /* If the real or vector real constant CST of type TYPE has an exact
10783 : inverse, return it, else return NULL. */
10784 :
10785 : tree
10786 1116163 : exact_inverse (tree type, tree cst)
10787 : {
10788 1116163 : REAL_VALUE_TYPE r;
10789 1116163 : tree unit_type;
10790 1116163 : machine_mode mode;
10791 :
10792 1116163 : switch (TREE_CODE (cst))
10793 : {
10794 1115598 : case REAL_CST:
10795 1115598 : r = TREE_REAL_CST (cst);
10796 :
10797 1115598 : if (exact_real_inverse (TYPE_MODE (type), &r))
10798 319384 : return build_real (type, r);
10799 :
10800 : return NULL_TREE;
10801 :
10802 565 : case VECTOR_CST:
10803 565 : {
10804 565 : unit_type = TREE_TYPE (type);
10805 565 : mode = TYPE_MODE (unit_type);
10806 :
10807 565 : tree_vector_builder elts;
10808 565 : if (!elts.new_unary_operation (type, cst, false))
10809 : return NULL_TREE;
10810 565 : unsigned int count = elts.encoded_nelts ();
10811 625 : for (unsigned int i = 0; i < count; ++i)
10812 : {
10813 565 : r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10814 565 : if (!exact_real_inverse (mode, &r))
10815 : return NULL_TREE;
10816 60 : elts.quick_push (build_real (unit_type, r));
10817 : }
10818 :
10819 60 : return elts.build ();
10820 565 : }
10821 :
10822 : default:
10823 : return NULL_TREE;
10824 : }
10825 : }
10826 :
10827 : /* Mask out the tz least significant bits of X of type TYPE where
10828 : tz is the number of trailing zeroes in Y. */
10829 : static wide_int
10830 137716 : mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10831 : {
10832 137716 : int tz = wi::ctz (y);
10833 137716 : if (tz > 0)
10834 6473 : return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10835 131243 : return x;
10836 : }
10837 :
10838 : /* Return true when T is an address and is known to be nonzero.
10839 : For floating point we further ensure that T is not denormal.
10840 : Similar logic is present in nonzero_address in rtlanal.h.
10841 :
10842 : If the return value is based on the assumption that signed overflow
10843 : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10844 : change *STRICT_OVERFLOW_P. */
10845 :
10846 : static bool
10847 148179697 : tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10848 : {
10849 148541160 : tree type = TREE_TYPE (t);
10850 148541160 : enum tree_code code;
10851 :
10852 : /* Doing something useful for floating point would need more work. */
10853 148541160 : if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10854 : return false;
10855 :
10856 148422294 : code = TREE_CODE (t);
10857 148422294 : switch (TREE_CODE_CLASS (code))
10858 : {
10859 946024 : case tcc_unary:
10860 946024 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10861 946024 : strict_overflow_p);
10862 2990888 : case tcc_binary:
10863 2990888 : case tcc_comparison:
10864 2990888 : return tree_binary_nonzero_warnv_p (code, type,
10865 2990888 : TREE_OPERAND (t, 0),
10866 2990888 : TREE_OPERAND (t, 1),
10867 2990888 : strict_overflow_p);
10868 14917566 : case tcc_constant:
10869 14917566 : case tcc_declaration:
10870 14917566 : case tcc_reference:
10871 14917566 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10872 :
10873 129567816 : default:
10874 129567816 : break;
10875 : }
10876 :
10877 129567816 : switch (code)
10878 : {
10879 667057 : case TRUTH_NOT_EXPR:
10880 667057 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10881 667057 : strict_overflow_p);
10882 :
10883 65493 : case TRUTH_AND_EXPR:
10884 65493 : case TRUTH_OR_EXPR:
10885 65493 : case TRUTH_XOR_EXPR:
10886 65493 : return tree_binary_nonzero_warnv_p (code, type,
10887 65493 : TREE_OPERAND (t, 0),
10888 65493 : TREE_OPERAND (t, 1),
10889 65493 : strict_overflow_p);
10890 :
10891 124969469 : case COND_EXPR:
10892 124969469 : case CONSTRUCTOR:
10893 124969469 : case OBJ_TYPE_REF:
10894 124969469 : case ADDR_EXPR:
10895 124969469 : case WITH_SIZE_EXPR:
10896 124969469 : case SSA_NAME:
10897 124969469 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10898 :
10899 85303 : case COMPOUND_EXPR:
10900 85303 : case MODIFY_EXPR:
10901 85303 : case BIND_EXPR:
10902 85303 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10903 85303 : strict_overflow_p);
10904 :
10905 276160 : case SAVE_EXPR:
10906 276160 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10907 276160 : strict_overflow_p);
10908 :
10909 3393003 : case CALL_EXPR:
10910 3393003 : {
10911 3393003 : tree fndecl = get_callee_fndecl (t);
10912 3393003 : if (!fndecl) return false;
10913 3391065 : if (flag_delete_null_pointer_checks && !flag_check_new
10914 3391065 : && DECL_IS_OPERATOR_NEW_P (fndecl)
10915 3391759 : && !TREE_NOTHROW (fndecl))
10916 : return true;
10917 3391759 : if (flag_delete_null_pointer_checks
10918 6782824 : && lookup_attribute ("returns_nonnull",
10919 3391065 : TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10920 : return true;
10921 3391751 : return alloca_call_p (t);
10922 : }
10923 :
10924 : default:
10925 : break;
10926 : }
10927 : return false;
10928 : }
10929 :
10930 : /* Return true when T is an address and is known to be nonzero.
10931 : Handle warnings about undefined signed overflow. */
10932 :
10933 : bool
10934 146991320 : tree_expr_nonzero_p (tree t)
10935 : {
10936 146991320 : bool ret, strict_overflow_p;
10937 :
10938 146991320 : strict_overflow_p = false;
10939 146991320 : ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10940 146991320 : if (strict_overflow_p)
10941 0 : fold_overflow_warning (("assuming signed overflow does not occur when "
10942 : "determining that expression is always "
10943 : "non-zero"),
10944 : WARN_STRICT_OVERFLOW_MISC);
10945 146991320 : return ret;
10946 : }
10947 :
10948 : /* Return true if T is known not to be equal to an integer W.
10949 : If STMT is specified, the check is if T on STMT is not equal
10950 : to W. */
10951 :
10952 : bool
10953 97659834 : expr_not_equal_to (tree t, const wide_int &w, gimple *stmt /* = NULL */)
10954 : {
10955 97659834 : int_range_max vr;
10956 97659834 : switch (TREE_CODE (t))
10957 : {
10958 1073191 : case INTEGER_CST:
10959 1073191 : return wi::to_wide (t) != w;
10960 :
10961 96585561 : case SSA_NAME:
10962 96585561 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10963 : return false;
10964 :
10965 193171122 : get_range_query (cfun)->range_of_expr (vr, t, stmt);
10966 96585561 : if (!vr.undefined_p () && !vr.contains_p (w))
10967 : return true;
10968 : /* If T has some known zero bits and W has any of those bits set,
10969 : then T is known not to be equal to W. */
10970 96471875 : if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10971 192943310 : TYPE_PRECISION (TREE_TYPE (t))), 0))
10972 : return true;
10973 : return false;
10974 :
10975 : default:
10976 : return false;
10977 : }
10978 97659834 : }
10979 :
10980 : /* Fold a binary expression of code CODE and type TYPE with operands
10981 : OP0 and OP1. LOC is the location of the resulting expression.
10982 : Return the folded expression if folding is successful. Otherwise,
10983 : return NULL_TREE. */
10984 :
10985 : tree
10986 939824842 : fold_binary_loc (location_t loc, enum tree_code code, tree type,
10987 : tree op0, tree op1)
10988 : {
10989 939824842 : enum tree_code_class kind = TREE_CODE_CLASS (code);
10990 939824842 : tree arg0, arg1, tem;
10991 939824842 : tree t1 = NULL_TREE;
10992 939824842 : bool strict_overflow_p;
10993 939824842 : unsigned int prec;
10994 :
10995 939824842 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
10996 : && TREE_CODE_LENGTH (code) == 2
10997 : && op0 != NULL_TREE
10998 : && op1 != NULL_TREE);
10999 :
11000 939824842 : arg0 = op0;
11001 939824842 : arg1 = op1;
11002 :
11003 : /* Strip any conversions that don't change the mode. This is
11004 : safe for every expression, except for a comparison expression
11005 : because its signedness is derived from its operands. So, in
11006 : the latter case, only strip conversions that don't change the
11007 : signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11008 : preserved.
11009 :
11010 : Note that this is done as an internal manipulation within the
11011 : constant folder, in order to find the simplest representation
11012 : of the arguments so that their form can be studied. In any
11013 : cases, the appropriate type conversions should be put back in
11014 : the tree that will get out of the constant folder. */
11015 :
11016 939824842 : if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11017 : {
11018 212773080 : STRIP_SIGN_NOPS (arg0);
11019 212773080 : STRIP_SIGN_NOPS (arg1);
11020 : }
11021 : else
11022 : {
11023 727051762 : STRIP_NOPS (arg0);
11024 727051762 : STRIP_NOPS (arg1);
11025 : }
11026 :
11027 : /* Note that TREE_CONSTANT isn't enough: static var addresses are
11028 : constant but we can't do arithmetic on them. */
11029 939824842 : if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11030 : {
11031 241850551 : tem = const_binop (code, type, arg0, arg1);
11032 241850551 : if (tem != NULL_TREE)
11033 : {
11034 239013903 : if (TREE_TYPE (tem) != type)
11035 3827806 : tem = fold_convert_loc (loc, type, tem);
11036 239013903 : return tem;
11037 : }
11038 : }
11039 :
11040 : /* If this is a commutative operation, and ARG0 is a constant, move it
11041 : to ARG1 to reduce the number of tests below. */
11042 700810939 : if (commutative_tree_code (code)
11043 700810939 : && tree_swap_operands_p (arg0, arg1))
11044 33217735 : return fold_build2_loc (loc, code, type, op1, op0);
11045 :
11046 : /* Likewise if this is a comparison, and ARG0 is a constant, move it
11047 : to ARG1 to reduce the number of tests below. */
11048 667593204 : if (kind == tcc_comparison
11049 667593204 : && tree_swap_operands_p (arg0, arg1))
11050 8885877 : return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11051 :
11052 658707327 : tem = generic_simplify (loc, code, type, op0, op1);
11053 658707327 : if (tem)
11054 : return tem;
11055 :
11056 : /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11057 :
11058 : First check for cases where an arithmetic operation is applied to a
11059 : compound, conditional, or comparison operation. Push the arithmetic
11060 : operation inside the compound or conditional to see if any folding
11061 : can then be done. Convert comparison to conditional for this purpose.
11062 : The also optimizes non-constant cases that used to be done in
11063 : expand_expr.
11064 :
11065 : Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11066 : one of the operands is a comparison and the other is a comparison, a
11067 : BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11068 : code below would make the expression more complex. Change it to a
11069 : TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11070 : TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11071 :
11072 555519645 : if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11073 : || code == EQ_EXPR || code == NE_EXPR)
11074 67743035 : && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11075 67168482 : && ((truth_value_p (TREE_CODE (arg0))
11076 1256741 : && (truth_value_p (TREE_CODE (arg1))
11077 962296 : || (TREE_CODE (arg1) == BIT_AND_EXPR
11078 45 : && integer_onep (TREE_OPERAND (arg1, 1)))))
11079 66874021 : || (truth_value_p (TREE_CODE (arg1))
11080 6509 : && (truth_value_p (TREE_CODE (arg0))
11081 6509 : || (TREE_CODE (arg0) == BIT_AND_EXPR
11082 177 : && integer_onep (TREE_OPERAND (arg0, 1)))))))
11083 : {
11084 367944 : tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11085 73469 : : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11086 : : TRUTH_XOR_EXPR,
11087 : boolean_type_node,
11088 : fold_convert_loc (loc, boolean_type_node, arg0),
11089 : fold_convert_loc (loc, boolean_type_node, arg1));
11090 :
11091 294475 : if (code == EQ_EXPR)
11092 67011 : tem = invert_truthvalue_loc (loc, tem);
11093 :
11094 294475 : return fold_convert_loc (loc, type, tem);
11095 : }
11096 :
11097 555225170 : if (TREE_CODE_CLASS (code) == tcc_binary
11098 322383439 : || TREE_CODE_CLASS (code) == tcc_comparison)
11099 : {
11100 337300473 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
11101 : {
11102 81541 : tem = fold_build2_loc (loc, code, type,
11103 81541 : fold_convert_loc (loc, TREE_TYPE (op0),
11104 81541 : TREE_OPERAND (arg0, 1)), op1);
11105 81541 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11106 81541 : tem);
11107 : }
11108 337218932 : if (TREE_CODE (arg1) == COMPOUND_EXPR)
11109 : {
11110 3142 : tem = fold_build2_loc (loc, code, type, op0,
11111 3142 : fold_convert_loc (loc, TREE_TYPE (op1),
11112 3142 : TREE_OPERAND (arg1, 1)));
11113 3142 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11114 3142 : tem);
11115 : }
11116 :
11117 337215790 : if (TREE_CODE (arg0) == COND_EXPR
11118 336842571 : || TREE_CODE (arg0) == VEC_COND_EXPR
11119 336840455 : || COMPARISON_CLASS_P (arg0))
11120 : {
11121 818016 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11122 : arg0, arg1,
11123 : /*cond_first_p=*/1);
11124 818016 : if (tem != NULL_TREE)
11125 : return tem;
11126 : }
11127 :
11128 336754408 : if (TREE_CODE (arg1) == COND_EXPR
11129 336520464 : || TREE_CODE (arg1) == VEC_COND_EXPR
11130 336520240 : || COMPARISON_CLASS_P (arg1))
11131 : {
11132 245479 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11133 : arg1, arg0,
11134 : /*cond_first_p=*/0);
11135 245479 : if (tem != NULL_TREE)
11136 : return tem;
11137 : }
11138 : }
11139 :
11140 554671390 : switch (code)
11141 : {
11142 57908200 : case MEM_REF:
11143 : /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11144 57908200 : if (TREE_CODE (arg0) == ADDR_EXPR
11145 57908200 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11146 : {
11147 858751 : tree iref = TREE_OPERAND (arg0, 0);
11148 858751 : return fold_build2 (MEM_REF, type,
11149 : TREE_OPERAND (iref, 0),
11150 : int_const_binop (PLUS_EXPR, arg1,
11151 : TREE_OPERAND (iref, 1)));
11152 : }
11153 :
11154 : /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11155 57049449 : if (TREE_CODE (arg0) == ADDR_EXPR
11156 57049449 : && handled_component_p (TREE_OPERAND (arg0, 0)))
11157 : {
11158 5761490 : tree base;
11159 5761490 : poly_int64 coffset;
11160 5761490 : base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11161 : &coffset);
11162 5761490 : if (!base)
11163 : return NULL_TREE;
11164 5757513 : return fold_build2 (MEM_REF, type,
11165 : build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11166 : int_const_binop (PLUS_EXPR, arg1,
11167 : size_int (coffset)));
11168 : }
11169 :
11170 : return NULL_TREE;
11171 :
11172 52639670 : case POINTER_PLUS_EXPR:
11173 : /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11174 105278924 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11175 105270595 : && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11176 35605 : return fold_convert_loc (loc, type,
11177 : fold_build2_loc (loc, PLUS_EXPR, sizetype,
11178 : fold_convert_loc (loc, sizetype,
11179 : arg1),
11180 : fold_convert_loc (loc, sizetype,
11181 35605 : arg0)));
11182 :
11183 : return NULL_TREE;
11184 :
11185 62800428 : case PLUS_EXPR:
11186 62800428 : if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11187 : {
11188 : /* X + (X / CST) * -CST is X % CST. */
11189 51715712 : if (TREE_CODE (arg1) == MULT_EXPR
11190 2341673 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11191 51721767 : && operand_equal_p (arg0,
11192 6055 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11193 : {
11194 214 : tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11195 214 : tree cst1 = TREE_OPERAND (arg1, 1);
11196 214 : tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11197 : cst1, cst0);
11198 214 : if (sum && integer_zerop (sum))
11199 214 : return fold_convert_loc (loc, type,
11200 : fold_build2_loc (loc, TRUNC_MOD_EXPR,
11201 214 : TREE_TYPE (arg0), arg0,
11202 214 : cst0));
11203 : }
11204 : }
11205 :
11206 : /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11207 : one. Make sure the type is not saturating and has the signedness of
11208 : the stripped operands, as fold_plusminus_mult_expr will re-associate.
11209 : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11210 62800214 : if ((TREE_CODE (arg0) == MULT_EXPR
11211 51374314 : || TREE_CODE (arg1) == MULT_EXPR)
11212 12743549 : && !TYPE_SATURATING (type)
11213 12743549 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11214 12339196 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11215 74474278 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
11216 : {
11217 8525366 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11218 8525366 : if (tem)
11219 : return tem;
11220 : }
11221 :
11222 61565187 : if (! FLOAT_TYPE_P (type))
11223 : {
11224 : /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11225 : (plus (plus (mult) (mult)) (foo)) so that we can
11226 : take advantage of the factoring cases below. */
11227 281339 : if (ANY_INTEGRAL_TYPE_P (type)
11228 50482819 : && TYPE_OVERFLOW_WRAPS (type)
11229 50482819 : && (((TREE_CODE (arg0) == PLUS_EXPR
11230 32417042 : || TREE_CODE (arg0) == MINUS_EXPR)
11231 3477428 : && TREE_CODE (arg1) == MULT_EXPR)
11232 31903024 : || ((TREE_CODE (arg1) == PLUS_EXPR
11233 31903024 : || TREE_CODE (arg1) == MINUS_EXPR)
11234 433387 : && TREE_CODE (arg0) == MULT_EXPR)))
11235 : {
11236 563334 : tree parg0, parg1, parg, marg;
11237 563334 : enum tree_code pcode;
11238 :
11239 563334 : if (TREE_CODE (arg1) == MULT_EXPR)
11240 : parg = arg0, marg = arg1;
11241 : else
11242 49316 : parg = arg1, marg = arg0;
11243 563334 : pcode = TREE_CODE (parg);
11244 563334 : parg0 = TREE_OPERAND (parg, 0);
11245 563334 : parg1 = TREE_OPERAND (parg, 1);
11246 563334 : STRIP_NOPS (parg0);
11247 563334 : STRIP_NOPS (parg1);
11248 :
11249 563334 : if (TREE_CODE (parg0) == MULT_EXPR
11250 265890 : && TREE_CODE (parg1) != MULT_EXPR)
11251 228986 : return fold_build2_loc (loc, pcode, type,
11252 : fold_build2_loc (loc, PLUS_EXPR, type,
11253 : fold_convert_loc (loc, type,
11254 : parg0),
11255 : fold_convert_loc (loc, type,
11256 : marg)),
11257 228986 : fold_convert_loc (loc, type, parg1));
11258 334348 : if (TREE_CODE (parg0) != MULT_EXPR
11259 297444 : && TREE_CODE (parg1) == MULT_EXPR)
11260 99843 : return
11261 99843 : fold_build2_loc (loc, PLUS_EXPR, type,
11262 : fold_convert_loc (loc, type, parg0),
11263 : fold_build2_loc (loc, pcode, type,
11264 : fold_convert_loc (loc, type, marg),
11265 : fold_convert_loc (loc, type,
11266 99843 : parg1)));
11267 : }
11268 : }
11269 : else
11270 : {
11271 : /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11272 : to __complex__ ( x, y ). This is not the same for SNaNs or
11273 : if signed zeros are involved. */
11274 11082368 : if (!HONOR_SNANS (arg0)
11275 11080708 : && !HONOR_SIGNED_ZEROS (arg0)
11276 11103450 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11277 : {
11278 3086 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11279 3086 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11280 3086 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11281 3086 : bool arg0rz = false, arg0iz = false;
11282 128 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11283 3190 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11284 : {
11285 86 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11286 86 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11287 86 : if (arg0rz && arg1i && real_zerop (arg1i))
11288 : {
11289 22 : tree rp = arg1r ? arg1r
11290 0 : : build1 (REALPART_EXPR, rtype, arg1);
11291 22 : tree ip = arg0i ? arg0i
11292 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11293 22 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11294 : }
11295 64 : else if (arg0iz && arg1r && real_zerop (arg1r))
11296 : {
11297 53 : tree rp = arg0r ? arg0r
11298 0 : : build1 (REALPART_EXPR, rtype, arg0);
11299 53 : tree ip = arg1i ? arg1i
11300 0 : : build1 (IMAGPART_EXPR, rtype, arg1);
11301 53 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11302 : }
11303 : }
11304 : }
11305 :
11306 : /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11307 : We associate floats only if the user has specified
11308 : -fassociative-math. */
11309 11082293 : if (flag_associative_math
11310 20985 : && TREE_CODE (arg1) == PLUS_EXPR
11311 36 : && TREE_CODE (arg0) != MULT_EXPR)
11312 : {
11313 21 : tree tree10 = TREE_OPERAND (arg1, 0);
11314 21 : tree tree11 = TREE_OPERAND (arg1, 1);
11315 21 : if (TREE_CODE (tree11) == MULT_EXPR
11316 5 : && TREE_CODE (tree10) == MULT_EXPR)
11317 : {
11318 1 : tree tree0;
11319 1 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11320 1 : return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11321 : }
11322 : }
11323 : /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11324 : We associate floats only if the user has specified
11325 : -fassociative-math. */
11326 11082292 : if (flag_associative_math
11327 20984 : && TREE_CODE (arg0) == PLUS_EXPR
11328 1221 : && TREE_CODE (arg1) != MULT_EXPR)
11329 : {
11330 831 : tree tree00 = TREE_OPERAND (arg0, 0);
11331 831 : tree tree01 = TREE_OPERAND (arg0, 1);
11332 831 : if (TREE_CODE (tree01) == MULT_EXPR
11333 49 : && TREE_CODE (tree00) == MULT_EXPR)
11334 : {
11335 9 : tree tree0;
11336 9 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11337 9 : return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11338 : }
11339 : }
11340 : }
11341 :
11342 11081461 : bit_rotate:
11343 : /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11344 : is a rotate of A by C1 bits. */
11345 : /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11346 : is a rotate of A by B bits.
11347 : Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11348 : though in this case CODE must be | and not + or ^, otherwise
11349 : it doesn't return A when B is 0. */
11350 64351045 : {
11351 64351045 : enum tree_code code0, code1;
11352 64351045 : tree rtype;
11353 64351045 : code0 = TREE_CODE (arg0);
11354 64351045 : code1 = TREE_CODE (arg1);
11355 93443 : if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11356 64334753 : || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11357 39586 : && operand_equal_p (TREE_OPERAND (arg0, 0),
11358 39586 : TREE_OPERAND (arg1, 0), 0)
11359 36819 : && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11360 36819 : TYPE_UNSIGNED (rtype))
11361 : /* Only create rotates in complete modes. Other cases are not
11362 : expanded properly. */
11363 64377766 : && (element_precision (rtype)
11364 53442 : == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11365 : {
11366 26651 : tree tree01, tree11;
11367 26651 : tree orig_tree01, orig_tree11;
11368 26651 : enum tree_code code01, code11;
11369 :
11370 26651 : tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11371 26651 : tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11372 26651 : STRIP_NOPS (tree01);
11373 26651 : STRIP_NOPS (tree11);
11374 26651 : code01 = TREE_CODE (tree01);
11375 26651 : code11 = TREE_CODE (tree11);
11376 26651 : if (code11 != MINUS_EXPR
11377 25965 : && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11378 : {
11379 1446 : std::swap (code0, code1);
11380 1446 : std::swap (code01, code11);
11381 1446 : std::swap (tree01, tree11);
11382 1446 : std::swap (orig_tree01, orig_tree11);
11383 : }
11384 53302 : if (code01 == INTEGER_CST
11385 3152 : && code11 == INTEGER_CST
11386 32953 : && (wi::to_widest (tree01) + wi::to_widest (tree11)
11387 32953 : == element_precision (rtype)))
11388 : {
11389 6022 : tem = build2_loc (loc, LROTATE_EXPR,
11390 3011 : rtype, TREE_OPERAND (arg0, 0),
11391 : code0 == LSHIFT_EXPR
11392 : ? orig_tree01 : orig_tree11);
11393 3011 : return fold_convert_loc (loc, type, tem);
11394 : }
11395 23640 : else if (code11 == MINUS_EXPR)
11396 : {
11397 941 : tree tree110, tree111;
11398 941 : tree110 = TREE_OPERAND (tree11, 0);
11399 941 : tree111 = TREE_OPERAND (tree11, 1);
11400 941 : STRIP_NOPS (tree110);
11401 941 : STRIP_NOPS (tree111);
11402 941 : if (TREE_CODE (tree110) == INTEGER_CST
11403 930 : && compare_tree_int (tree110,
11404 930 : element_precision (rtype)) == 0
11405 1855 : && operand_equal_p (tree01, tree111, 0))
11406 : {
11407 777 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11408 : ? LROTATE_EXPR : RROTATE_EXPR),
11409 558 : rtype, TREE_OPERAND (arg0, 0),
11410 : orig_tree01);
11411 558 : return fold_convert_loc (loc, type, tem);
11412 : }
11413 : }
11414 22699 : else if (code == BIT_IOR_EXPR
11415 21585 : && code11 == BIT_AND_EXPR
11416 44209 : && pow2p_hwi (element_precision (rtype)))
11417 : {
11418 21510 : tree tree110, tree111;
11419 21510 : tree110 = TREE_OPERAND (tree11, 0);
11420 21510 : tree111 = TREE_OPERAND (tree11, 1);
11421 21510 : STRIP_NOPS (tree110);
11422 21510 : STRIP_NOPS (tree111);
11423 21510 : if (TREE_CODE (tree110) == NEGATE_EXPR
11424 21087 : && TREE_CODE (tree111) == INTEGER_CST
11425 21087 : && compare_tree_int (tree111,
11426 21087 : element_precision (rtype) - 1) == 0
11427 42583 : && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11428 : {
11429 31505 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11430 : ? LROTATE_EXPR : RROTATE_EXPR),
11431 21027 : rtype, TREE_OPERAND (arg0, 0),
11432 : orig_tree01);
11433 21027 : return fold_convert_loc (loc, type, tem);
11434 : }
11435 : }
11436 : }
11437 : }
11438 :
11439 159729896 : associate:
11440 : /* In most languages, can't associate operations on floats through
11441 : parentheses. Rather than remember where the parentheses were, we
11442 : don't associate floats at all, unless the user has specified
11443 : -fassociative-math.
11444 : And, we need to make sure type is not saturating. */
11445 :
11446 159729896 : if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11447 120466198 : && !TYPE_SATURATING (type)
11448 280196094 : && !TYPE_OVERFLOW_SANITIZED (type))
11449 : {
11450 120438005 : tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11451 120438005 : tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11452 120438005 : tree atype = type;
11453 120438005 : bool ok = true;
11454 :
11455 : /* Split both trees into variables, constants, and literals. Then
11456 : associate each group together, the constants with literals,
11457 : then the result with variables. This increases the chances of
11458 : literals being recombined later and of generating relocatable
11459 : expressions for the sum of a constant and literal. */
11460 120438005 : var0 = split_tree (arg0, type, code,
11461 : &minus_var0, &con0, &minus_con0,
11462 : &lit0, &minus_lit0, 0);
11463 120438005 : var1 = split_tree (arg1, type, code,
11464 : &minus_var1, &con1, &minus_con1,
11465 : &lit1, &minus_lit1, code == MINUS_EXPR);
11466 :
11467 : /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11468 120438005 : if (code == MINUS_EXPR)
11469 13442428 : code = PLUS_EXPR;
11470 :
11471 : /* With undefined overflow prefer doing association in a type
11472 : which wraps on overflow, if that is one of the operand types. */
11473 120437774 : if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11474 239634491 : && !TYPE_OVERFLOW_WRAPS (type))
11475 : {
11476 60682672 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11477 60039679 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11478 774051 : atype = TREE_TYPE (arg0);
11479 59099067 : else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11480 58869988 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11481 235553 : atype = TREE_TYPE (arg1);
11482 30586359 : gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11483 : }
11484 :
11485 : /* With undefined overflow we can only associate constants with one
11486 : variable, and constants whose association doesn't overflow. */
11487 120437774 : if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11488 239634491 : && !TYPE_OVERFLOW_WRAPS (atype))
11489 : {
11490 29576755 : if ((var0 && var1) || (minus_var0 && minus_var1))
11491 : {
11492 : /* ??? If split_tree would handle NEGATE_EXPR we could
11493 : simply reject these cases and the allowed cases would
11494 : be the var0/minus_var1 ones. */
11495 1237 : tree tmp0 = var0 ? var0 : minus_var0;
11496 5561864 : tree tmp1 = var1 ? var1 : minus_var1;
11497 5561864 : bool one_neg = false;
11498 :
11499 5561864 : if (TREE_CODE (tmp0) == NEGATE_EXPR)
11500 : {
11501 1639 : tmp0 = TREE_OPERAND (tmp0, 0);
11502 1639 : one_neg = !one_neg;
11503 : }
11504 4921838 : if (CONVERT_EXPR_P (tmp0)
11505 664046 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11506 6224880 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11507 663016 : <= TYPE_PRECISION (atype)))
11508 650377 : tmp0 = TREE_OPERAND (tmp0, 0);
11509 5561864 : if (TREE_CODE (tmp1) == NEGATE_EXPR)
11510 : {
11511 168 : tmp1 = TREE_OPERAND (tmp1, 0);
11512 168 : one_neg = !one_neg;
11513 : }
11514 5242751 : if (CONVERT_EXPR_P (tmp1)
11515 383127 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11516 5944867 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11517 383003 : <= TYPE_PRECISION (atype)))
11518 366352 : tmp1 = TREE_OPERAND (tmp1, 0);
11519 : /* The only case we can still associate with two variables
11520 : is if they cancel out. */
11521 5561864 : if (!one_neg
11522 5561864 : || !operand_equal_p (tmp0, tmp1, 0))
11523 : ok = false;
11524 : }
11525 23641769 : else if ((var0 && minus_var1
11526 4065135 : && ! operand_equal_p (var0, minus_var1, 0))
11527 43591526 : || (minus_var0 && var1
11528 11367 : && ! operand_equal_p (minus_var0, var1, 0)))
11529 : ok = false;
11530 : }
11531 :
11532 : /* Only do something if we found more than two objects. Otherwise,
11533 : nothing has changed and we risk infinite recursion. */
11534 : if (ok
11535 110799712 : && ((var0 != 0) + (var1 != 0)
11536 110799712 : + (minus_var0 != 0) + (minus_var1 != 0)
11537 110799712 : + (con0 != 0) + (con1 != 0)
11538 110799712 : + (minus_con0 != 0) + (minus_con1 != 0)
11539 110799712 : + (lit0 != 0) + (lit1 != 0)
11540 110799712 : + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11541 : {
11542 2053407 : int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11543 4106814 : int minus_var0_origin
11544 2053407 : = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11545 2053407 : int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11546 4106814 : int minus_con0_origin
11547 2053407 : = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11548 2053407 : int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11549 4106814 : int minus_lit0_origin
11550 2053407 : = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11551 2053407 : var0 = associate_trees (loc, var0, var1, code, atype);
11552 2053407 : minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11553 : code, atype);
11554 2053407 : con0 = associate_trees (loc, con0, con1, code, atype);
11555 2053407 : minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11556 : code, atype);
11557 2053407 : lit0 = associate_trees (loc, lit0, lit1, code, atype);
11558 2053407 : minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11559 : code, atype);
11560 :
11561 2053407 : if (minus_var0 && var0)
11562 : {
11563 1372318 : var0_origin |= minus_var0_origin;
11564 1372318 : var0 = associate_trees (loc, var0, minus_var0,
11565 : MINUS_EXPR, atype);
11566 1372318 : minus_var0 = 0;
11567 1372318 : minus_var0_origin = 0;
11568 : }
11569 2053407 : if (minus_con0 && con0)
11570 : {
11571 3560 : con0_origin |= minus_con0_origin;
11572 3560 : con0 = associate_trees (loc, con0, minus_con0,
11573 : MINUS_EXPR, atype);
11574 3560 : minus_con0 = 0;
11575 3560 : minus_con0_origin = 0;
11576 : }
11577 :
11578 : /* Preserve the MINUS_EXPR if the negative part of the literal is
11579 : greater than the positive part. Otherwise, the multiplicative
11580 : folding code (i.e extract_muldiv) may be fooled in case
11581 : unsigned constants are subtracted, like in the following
11582 : example: ((X*2 + 4) - 8U)/2. */
11583 2053407 : if (minus_lit0 && lit0)
11584 : {
11585 229132 : if (TREE_CODE (lit0) == INTEGER_CST
11586 229132 : && TREE_CODE (minus_lit0) == INTEGER_CST
11587 229132 : && tree_int_cst_lt (lit0, minus_lit0)
11588 : /* But avoid ending up with only negated parts. */
11589 288114 : && (var0 || con0))
11590 : {
11591 54806 : minus_lit0_origin |= lit0_origin;
11592 54806 : minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11593 : MINUS_EXPR, atype);
11594 54806 : lit0 = 0;
11595 54806 : lit0_origin = 0;
11596 : }
11597 : else
11598 : {
11599 174326 : lit0_origin |= minus_lit0_origin;
11600 174326 : lit0 = associate_trees (loc, lit0, minus_lit0,
11601 : MINUS_EXPR, atype);
11602 174326 : minus_lit0 = 0;
11603 174326 : minus_lit0_origin = 0;
11604 : }
11605 : }
11606 :
11607 : /* Don't introduce overflows through reassociation. */
11608 1362402 : if ((lit0 && TREE_OVERFLOW_P (lit0))
11609 3415768 : || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11610 2053407 : return NULL_TREE;
11611 :
11612 : /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11613 2053366 : con0_origin |= lit0_origin;
11614 2053366 : con0 = associate_trees (loc, con0, lit0, code, atype);
11615 2053366 : minus_con0_origin |= minus_lit0_origin;
11616 2053366 : minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11617 : code, atype);
11618 :
11619 : /* Eliminate minus_con0. */
11620 2053366 : if (minus_con0)
11621 : {
11622 695748 : if (con0)
11623 : {
11624 15682 : con0_origin |= minus_con0_origin;
11625 15682 : con0 = associate_trees (loc, con0, minus_con0,
11626 : MINUS_EXPR, atype);
11627 : }
11628 680066 : else if (var0)
11629 : {
11630 680066 : var0_origin |= minus_con0_origin;
11631 680066 : var0 = associate_trees (loc, var0, minus_con0,
11632 : MINUS_EXPR, atype);
11633 : }
11634 : else
11635 0 : gcc_unreachable ();
11636 : }
11637 :
11638 : /* Eliminate minus_var0. */
11639 2053366 : if (minus_var0)
11640 : {
11641 336297 : if (con0)
11642 : {
11643 336297 : con0_origin |= minus_var0_origin;
11644 336297 : con0 = associate_trees (loc, con0, minus_var0,
11645 : MINUS_EXPR, atype);
11646 : }
11647 : else
11648 0 : gcc_unreachable ();
11649 : }
11650 :
11651 : /* Reassociate only if there has been any actual association
11652 : between subtrees from op0 and subtrees from op1 in at
11653 : least one of the operands, otherwise we risk infinite
11654 : recursion. See PR114084. */
11655 2053366 : if (var0_origin != 3 && con0_origin != 3)
11656 : return NULL_TREE;
11657 :
11658 2051689 : return
11659 2051689 : fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11660 2051689 : code, atype));
11661 : }
11662 : }
11663 :
11664 : return NULL_TREE;
11665 :
11666 24851671 : case POINTER_DIFF_EXPR:
11667 24851671 : case MINUS_EXPR:
11668 : /* Fold &a[i] - &a[j] to i-j. */
11669 24851671 : if (TREE_CODE (arg0) == ADDR_EXPR
11670 45528 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11671 6177 : && TREE_CODE (arg1) == ADDR_EXPR
11672 24852275 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11673 : {
11674 38 : tree tem = fold_addr_of_array_ref_difference (loc, type,
11675 38 : TREE_OPERAND (arg0, 0),
11676 38 : TREE_OPERAND (arg1, 0),
11677 : code
11678 : == POINTER_DIFF_EXPR);
11679 38 : if (tem)
11680 : return tem;
11681 : }
11682 :
11683 : /* Further transformations are not for pointers. */
11684 24851657 : if (code == POINTER_DIFF_EXPR)
11685 : return NULL_TREE;
11686 :
11687 : /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11688 21206021 : if (TREE_CODE (arg0) == NEGATE_EXPR
11689 141081 : && negate_expr_p (op1)
11690 : /* If arg0 is e.g. unsigned int and type is int, then this could
11691 : introduce UB, because if A is INT_MIN at runtime, the original
11692 : expression can be well defined while the latter is not.
11693 : See PR83269. */
11694 21206856 : && !(ANY_INTEGRAL_TYPE_P (type)
11695 835 : && TYPE_OVERFLOW_UNDEFINED (type)
11696 823 : && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11697 823 : && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11698 828 : return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11699 : fold_convert_loc (loc, type,
11700 1656 : TREE_OPERAND (arg0, 0)));
11701 :
11702 : /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11703 : __complex__ ( x, -y ). This is not the same for SNaNs or if
11704 : signed zeros are involved. */
11705 21205193 : if (!HONOR_SNANS (arg0)
11706 21204042 : && !HONOR_SIGNED_ZEROS (arg0)
11707 35475762 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11708 : {
11709 53 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11710 53 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11711 53 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11712 53 : bool arg0rz = false, arg0iz = false;
11713 25 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11714 69 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11715 : {
11716 25 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11717 25 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11718 25 : if (arg0rz && arg1i && real_zerop (arg1i))
11719 : {
11720 9 : tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11721 : arg1r ? arg1r
11722 0 : : build1 (REALPART_EXPR, rtype, arg1));
11723 9 : tree ip = arg0i ? arg0i
11724 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11725 9 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11726 : }
11727 16 : else if (arg0iz && arg1r && real_zerop (arg1r))
11728 : {
11729 15 : tree rp = arg0r ? arg0r
11730 0 : : build1 (REALPART_EXPR, rtype, arg0);
11731 15 : tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11732 : arg1i ? arg1i
11733 0 : : build1 (IMAGPART_EXPR, rtype, arg1));
11734 15 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11735 : }
11736 : }
11737 : }
11738 :
11739 : /* A - B -> A + (-B) if B is easily negatable. */
11740 21205169 : if (negate_expr_p (op1)
11741 772290 : && ! TYPE_OVERFLOW_SANITIZED (type)
11742 21974955 : && ((FLOAT_TYPE_P (type)
11743 : /* Avoid this transformation if B is a positive REAL_CST. */
11744 65 : && (TREE_CODE (op1) != REAL_CST
11745 0 : || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11746 769721 : || INTEGRAL_TYPE_P (type)))
11747 769594 : return fold_build2_loc (loc, PLUS_EXPR, type,
11748 : fold_convert_loc (loc, type, arg0),
11749 769594 : negate_expr (op1));
11750 :
11751 : /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11752 : one. Make sure the type is not saturating and has the signedness of
11753 : the stripped operands, as fold_plusminus_mult_expr will re-associate.
11754 : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11755 20435575 : if ((TREE_CODE (arg0) == MULT_EXPR
11756 19190815 : || TREE_CODE (arg1) == MULT_EXPR)
11757 2556701 : && !TYPE_SATURATING (type)
11758 2556701 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11759 2421896 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11760 22801880 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
11761 : {
11762 364264 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11763 364264 : if (tem)
11764 : return tem;
11765 : }
11766 :
11767 20384672 : goto associate;
11768 :
11769 66276906 : case MULT_EXPR:
11770 66276906 : if (! FLOAT_TYPE_P (type))
11771 : {
11772 : /* Transform x * -C into -x * C if x is easily negatable. */
11773 44979326 : if (TREE_CODE (op1) == INTEGER_CST
11774 42100729 : && tree_int_cst_sgn (op1) == -1
11775 217113 : && negate_expr_p (op0)
11776 336 : && negate_expr_p (op1)
11777 320 : && (tem = negate_expr (op1)) != op1
11778 44979646 : && ! TREE_OVERFLOW (tem))
11779 320 : return fold_build2_loc (loc, MULT_EXPR, type,
11780 : fold_convert_loc (loc, type,
11781 320 : negate_expr (op0)), tem);
11782 :
11783 44979006 : strict_overflow_p = false;
11784 44979006 : if (TREE_CODE (arg1) == INTEGER_CST
11785 44979006 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11786 : &strict_overflow_p)) != 0)
11787 : {
11788 593109 : if (strict_overflow_p)
11789 10 : fold_overflow_warning (("assuming signed overflow does not "
11790 : "occur when simplifying "
11791 : "multiplication"),
11792 : WARN_STRICT_OVERFLOW_MISC);
11793 593109 : return fold_convert_loc (loc, type, tem);
11794 : }
11795 :
11796 : /* Optimize z * conj(z) for integer complex numbers. */
11797 44385897 : if (TREE_CODE (arg0) == CONJ_EXPR
11798 44385897 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11799 1 : return fold_mult_zconjz (loc, type, arg1);
11800 44385896 : if (TREE_CODE (arg1) == CONJ_EXPR
11801 44385896 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11802 0 : return fold_mult_zconjz (loc, type, arg0);
11803 : }
11804 : else
11805 : {
11806 : /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11807 : This is not the same for NaNs or if signed zeros are
11808 : involved. */
11809 21297580 : if (!HONOR_NANS (arg0)
11810 32793 : && !HONOR_SIGNED_ZEROS (arg0)
11811 32493 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11812 3637 : && TREE_CODE (arg1) == COMPLEX_CST
11813 21297805 : && real_zerop (TREE_REALPART (arg1)))
11814 : {
11815 218 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11816 218 : if (real_onep (TREE_IMAGPART (arg1)))
11817 : {
11818 208 : if (TREE_CODE (arg0) != COMPLEX_EXPR)
11819 63 : arg0 = save_expr (arg0);
11820 208 : tree iarg0 = fold_build1_loc (loc, IMAGPART_EXPR,
11821 : rtype, arg0);
11822 208 : tree rarg0 = fold_build1_loc (loc, REALPART_EXPR,
11823 : rtype, arg0);
11824 208 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
11825 : negate_expr (iarg0),
11826 208 : rarg0);
11827 : }
11828 10 : else if (real_minus_onep (TREE_IMAGPART (arg1)))
11829 : {
11830 10 : if (TREE_CODE (arg0) != COMPLEX_EXPR)
11831 0 : arg0 = save_expr (arg0);
11832 10 : tree iarg0 = fold_build1_loc (loc, IMAGPART_EXPR,
11833 : rtype, arg0);
11834 10 : tree rarg0 = fold_build1_loc (loc, REALPART_EXPR,
11835 : rtype, arg0);
11836 10 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
11837 : iarg0,
11838 10 : negate_expr (rarg0));
11839 : }
11840 : }
11841 :
11842 : /* Optimize z * conj(z) for floating point complex numbers.
11843 : Guarded by flag_unsafe_math_optimizations as non-finite
11844 : imaginary components don't produce scalar results. */
11845 21297362 : if (flag_unsafe_math_optimizations
11846 32322 : && TREE_CODE (arg0) == CONJ_EXPR
11847 21297364 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11848 1 : return fold_mult_zconjz (loc, type, arg1);
11849 21297361 : if (flag_unsafe_math_optimizations
11850 32321 : && TREE_CODE (arg1) == CONJ_EXPR
11851 21297365 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11852 0 : return fold_mult_zconjz (loc, type, arg0);
11853 : }
11854 65683257 : goto associate;
11855 :
11856 2224271 : case BIT_IOR_EXPR:
11857 : /* Canonicalize (X & C1) | C2. */
11858 2224271 : if (TREE_CODE (arg0) == BIT_AND_EXPR
11859 308852 : && TREE_CODE (arg1) == INTEGER_CST
11860 2488521 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11861 : {
11862 264242 : int width = TYPE_PRECISION (type), w;
11863 264242 : wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11864 264242 : wide_int c2 = wi::to_wide (arg1);
11865 :
11866 : /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11867 264242 : if ((c1 & c2) == c1)
11868 0 : return omit_one_operand_loc (loc, type, arg1,
11869 0 : TREE_OPERAND (arg0, 0));
11870 :
11871 264242 : wide_int msk = wi::mask (width, false,
11872 264242 : TYPE_PRECISION (TREE_TYPE (arg1)));
11873 :
11874 : /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11875 264242 : if (wi::bit_and_not (msk, c1 | c2) == 0)
11876 : {
11877 6 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11878 6 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11879 : }
11880 :
11881 : /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11882 : unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11883 : mode which allows further optimizations. */
11884 264236 : c1 &= msk;
11885 264236 : c2 &= msk;
11886 264236 : wide_int c3 = wi::bit_and_not (c1, c2);
11887 797714 : for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11888 : {
11889 533720 : wide_int mask = wi::mask (w, false,
11890 533720 : TYPE_PRECISION (type));
11891 1067440 : if (((c1 | c2) & mask) == mask
11892 1067440 : && wi::bit_and_not (c1, mask) == 0)
11893 : {
11894 242 : c3 = mask;
11895 242 : break;
11896 : }
11897 533720 : }
11898 :
11899 264236 : if (c3 != c1)
11900 : {
11901 558 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11902 1116 : tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11903 558 : wide_int_to_tree (type, c3));
11904 558 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11905 : }
11906 265364 : }
11907 :
11908 : /* See if this can be simplified into a rotate first. If that
11909 : is unsuccessful continue in the association code. */
11910 2223707 : goto bit_rotate;
11911 :
11912 891065 : case BIT_XOR_EXPR:
11913 : /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11914 891065 : if (TREE_CODE (arg0) == BIT_AND_EXPR
11915 2433 : && INTEGRAL_TYPE_P (type)
11916 1828 : && integer_onep (TREE_OPERAND (arg0, 1))
11917 891068 : && integer_onep (arg1))
11918 0 : return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11919 0 : build_zero_cst (TREE_TYPE (arg0)));
11920 :
11921 : /* See if this can be simplified into a rotate first. If that
11922 : is unsuccessful continue in the association code. */
11923 891065 : goto bit_rotate;
11924 :
11925 8887129 : case BIT_AND_EXPR:
11926 : /* Fold !X & 1 as X == 0. */
11927 8887129 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11928 8887129 : && integer_onep (arg1))
11929 : {
11930 0 : tem = TREE_OPERAND (arg0, 0);
11931 0 : return fold_build2_loc (loc, EQ_EXPR, type, tem,
11932 0 : build_zero_cst (TREE_TYPE (tem)));
11933 : }
11934 :
11935 : /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11936 : multiple of 1 << CST. */
11937 8887129 : if (TREE_CODE (arg1) == INTEGER_CST)
11938 : {
11939 7121534 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11940 7121534 : wide_int ncst1 = -cst1;
11941 7121534 : if ((cst1 & ncst1) == ncst1
11942 7266603 : && multiple_of_p (type, arg0,
11943 7266603 : wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11944 467 : return fold_convert_loc (loc, type, arg0);
11945 7121534 : }
11946 :
11947 : /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11948 : bits from CST2. */
11949 8886662 : if (TREE_CODE (arg1) == INTEGER_CST
11950 7121067 : && TREE_CODE (arg0) == MULT_EXPR
11951 9024420 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11952 : {
11953 137716 : wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11954 137716 : wide_int masked
11955 137716 : = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11956 :
11957 137716 : if (masked == 0)
11958 5100 : return omit_two_operands_loc (loc, type, build_zero_cst (type),
11959 5100 : arg0, arg1);
11960 132616 : else if (masked != warg1)
11961 : {
11962 : /* Avoid the transform if arg1 is a mask of some
11963 : mode which allows further optimizations. */
11964 662 : int pop = wi::popcount (warg1);
11965 695 : if (!(pop >= BITS_PER_UNIT
11966 59 : && pow2p_hwi (pop)
11967 728 : && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11968 1258 : return fold_build2_loc (loc, code, type, op0,
11969 1258 : wide_int_to_tree (type, masked));
11970 : }
11971 137716 : }
11972 :
11973 : /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11974 7115338 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11975 9471020 : && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11976 : {
11977 515899 : prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11978 :
11979 515899 : wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11980 515899 : if (mask == -1)
11981 2447 : return
11982 2447 : fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11983 515899 : }
11984 :
11985 8878486 : goto associate;
11986 :
11987 5827511 : case RDIV_EXPR:
11988 : /* Don't touch a floating-point divide by zero unless the mode
11989 : of the constant can represent infinity. */
11990 5827511 : if (TREE_CODE (arg1) == REAL_CST
11991 2930453 : && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11992 5827511 : && real_zerop (arg1))
11993 0 : return NULL_TREE;
11994 :
11995 : /* (-A) / (-B) -> A / B */
11996 5827511 : if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11997 6 : return fold_build2_loc (loc, RDIV_EXPR, type,
11998 3 : TREE_OPERAND (arg0, 0),
11999 3 : negate_expr (arg1));
12000 5827508 : if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12001 0 : return fold_build2_loc (loc, RDIV_EXPR, type,
12002 : negate_expr (arg0),
12003 0 : TREE_OPERAND (arg1, 0));
12004 : return NULL_TREE;
12005 :
12006 2347507 : case TRUNC_DIV_EXPR:
12007 : /* Fall through */
12008 :
12009 2347507 : case FLOOR_DIV_EXPR:
12010 : /* Simplify A / (B << N) where A and B are positive and B is
12011 : a power of 2, to A >> (N + log2(B)). */
12012 2347507 : strict_overflow_p = false;
12013 2347507 : if (TREE_CODE (arg1) == LSHIFT_EXPR
12014 2347507 : && (TYPE_UNSIGNED (type)
12015 8 : || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12016 : {
12017 17 : tree sval = TREE_OPERAND (arg1, 0);
12018 17 : if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12019 : {
12020 16 : tree sh_cnt = TREE_OPERAND (arg1, 1);
12021 16 : tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12022 16 : wi::exact_log2 (wi::to_wide (sval)));
12023 :
12024 16 : if (strict_overflow_p)
12025 0 : fold_overflow_warning (("assuming signed overflow does not "
12026 : "occur when simplifying A / (B << N)"),
12027 : WARN_STRICT_OVERFLOW_MISC);
12028 :
12029 16 : sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12030 : sh_cnt, pow2);
12031 16 : return fold_build2_loc (loc, RSHIFT_EXPR, type,
12032 16 : fold_convert_loc (loc, type, arg0), sh_cnt);
12033 : }
12034 : }
12035 :
12036 : /* Fall through */
12037 :
12038 3823560 : case ROUND_DIV_EXPR:
12039 3823560 : case CEIL_DIV_EXPR:
12040 3823560 : case EXACT_DIV_EXPR:
12041 3823560 : if (integer_zerop (arg1))
12042 : return NULL_TREE;
12043 :
12044 : /* Convert -A / -B to A / B when the type is signed and overflow is
12045 : undefined. */
12046 3820532 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12047 1062091 : && TREE_CODE (op0) == NEGATE_EXPR
12048 3820594 : && negate_expr_p (op1))
12049 : {
12050 30 : if (ANY_INTEGRAL_TYPE_P (type))
12051 30 : fold_overflow_warning (("assuming signed overflow does not occur "
12052 : "when distributing negation across "
12053 : "division"),
12054 : WARN_STRICT_OVERFLOW_MISC);
12055 60 : return fold_build2_loc (loc, code, type,
12056 : fold_convert_loc (loc, type,
12057 30 : TREE_OPERAND (arg0, 0)),
12058 30 : negate_expr (op1));
12059 : }
12060 3820502 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12061 1062061 : && TREE_CODE (arg1) == NEGATE_EXPR
12062 3820746 : && negate_expr_p (op0))
12063 : {
12064 36 : if (ANY_INTEGRAL_TYPE_P (type))
12065 36 : fold_overflow_warning (("assuming signed overflow does not occur "
12066 : "when distributing negation across "
12067 : "division"),
12068 : WARN_STRICT_OVERFLOW_MISC);
12069 36 : return fold_build2_loc (loc, code, type,
12070 : negate_expr (op0),
12071 : fold_convert_loc (loc, type,
12072 72 : TREE_OPERAND (arg1, 0)));
12073 : }
12074 :
12075 : /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12076 : operation, EXACT_DIV_EXPR.
12077 :
12078 : Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12079 : At one time others generated faster code, it's not clear if they do
12080 : after the last round to changes to the DIV code in expmed.cc. */
12081 3820466 : if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12082 3820466 : && multiple_of_p (type, arg0, arg1))
12083 0 : return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12084 : fold_convert (type, arg0),
12085 0 : fold_convert (type, arg1));
12086 :
12087 3820466 : strict_overflow_p = false;
12088 3820466 : if (TREE_CODE (arg1) == INTEGER_CST
12089 3820466 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12090 : &strict_overflow_p)) != 0)
12091 : {
12092 9143 : if (strict_overflow_p)
12093 157 : fold_overflow_warning (("assuming signed overflow does not occur "
12094 : "when simplifying division"),
12095 : WARN_STRICT_OVERFLOW_MISC);
12096 9143 : return fold_convert_loc (loc, type, tem);
12097 : }
12098 :
12099 : return NULL_TREE;
12100 :
12101 889790 : case CEIL_MOD_EXPR:
12102 889790 : case FLOOR_MOD_EXPR:
12103 889790 : case ROUND_MOD_EXPR:
12104 889790 : case TRUNC_MOD_EXPR:
12105 889790 : strict_overflow_p = false;
12106 889790 : if (TREE_CODE (arg1) == INTEGER_CST
12107 889790 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12108 : &strict_overflow_p)) != 0)
12109 : {
12110 0 : if (strict_overflow_p)
12111 0 : fold_overflow_warning (("assuming signed overflow does not occur "
12112 : "when simplifying modulus"),
12113 : WARN_STRICT_OVERFLOW_MISC);
12114 0 : return fold_convert_loc (loc, type, tem);
12115 : }
12116 :
12117 : return NULL_TREE;
12118 :
12119 2588276 : case LROTATE_EXPR:
12120 2588276 : case RROTATE_EXPR:
12121 2588276 : case RSHIFT_EXPR:
12122 2588276 : case LSHIFT_EXPR:
12123 : /* Since negative shift count is not well-defined,
12124 : don't try to compute it in the compiler. */
12125 2588276 : if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12126 : return NULL_TREE;
12127 :
12128 2587239 : prec = element_precision (type);
12129 :
12130 : /* If we have a rotate of a bit operation with the rotate count and
12131 : the second operand of the bit operation both constant,
12132 : permute the two operations. */
12133 2741 : if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12134 2207 : && (TREE_CODE (arg0) == BIT_AND_EXPR
12135 2207 : || TREE_CODE (arg0) == BIT_IOR_EXPR
12136 2207 : || TREE_CODE (arg0) == BIT_XOR_EXPR)
12137 2587239 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12138 : {
12139 0 : tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12140 0 : tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12141 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
12142 : fold_build2_loc (loc, code, type,
12143 : arg00, arg1),
12144 : fold_build2_loc (loc, code, type,
12145 0 : arg01, arg1));
12146 : }
12147 :
12148 : return NULL_TREE;
12149 :
12150 457032 : case MIN_EXPR:
12151 457032 : case MAX_EXPR:
12152 457032 : goto associate;
12153 :
12154 7878501 : case TRUTH_ANDIF_EXPR:
12155 : /* Note that the operands of this must be ints
12156 : and their values must be 0 or 1.
12157 : ("true" is a fixed value perhaps depending on the language.) */
12158 : /* If first arg is constant zero, return it. */
12159 7878501 : if (integer_zerop (arg0))
12160 1925562 : return fold_convert_loc (loc, type, arg0);
12161 : /* FALLTHRU */
12162 16639345 : case TRUTH_AND_EXPR:
12163 : /* If either arg is constant true, drop it. */
12164 16639345 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12165 1563042 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12166 840392 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12167 : /* Preserve sequence points. */
12168 15863968 : && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12169 747970 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12170 : /* If second arg is constant zero, result is zero, but first arg
12171 : must be evaluated. */
12172 14328333 : if (integer_zerop (arg1))
12173 52727 : return omit_one_operand_loc (loc, type, arg1, arg0);
12174 : /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12175 : case will be handled here. */
12176 14275606 : if (integer_zerop (arg0))
12177 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12178 :
12179 : /* !X && X is always false. */
12180 14275606 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12181 14275606 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12182 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12183 : /* X && !X is always false. */
12184 14275606 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12185 14275606 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12186 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12187 :
12188 : /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12189 : means A >= Y && A != MAX, but in this case we know that
12190 : A < X <= MAX. */
12191 :
12192 14275606 : if (!TREE_SIDE_EFFECTS (arg0)
12193 14275606 : && !TREE_SIDE_EFFECTS (arg1))
12194 : {
12195 12377941 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12196 12377941 : if (tem && !operand_equal_p (tem, arg0, 0))
12197 444 : return fold_convert (type,
12198 : fold_build2_loc (loc, code, TREE_TYPE (arg1),
12199 : tem, arg1));
12200 :
12201 12377497 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12202 12377497 : if (tem && !operand_equal_p (tem, arg1, 0))
12203 9760 : return fold_convert (type,
12204 : fold_build2_loc (loc, code, TREE_TYPE (arg0),
12205 : arg0, tem));
12206 : }
12207 :
12208 14265402 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12209 : != NULL_TREE)
12210 : return tem;
12211 :
12212 : return NULL_TREE;
12213 :
12214 3951933 : case TRUTH_ORIF_EXPR:
12215 : /* Note that the operands of this must be ints
12216 : and their values must be 0 or true.
12217 : ("true" is a fixed value perhaps depending on the language.) */
12218 : /* If first arg is constant true, return it. */
12219 3951933 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12220 212990 : return fold_convert_loc (loc, type, arg0);
12221 : /* FALLTHRU */
12222 13144836 : case TRUTH_OR_EXPR:
12223 : /* If either arg is constant zero, drop it. */
12224 13144836 : if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12225 299557 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12226 488290 : if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12227 : /* Preserve sequence points. */
12228 13279044 : && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12229 422633 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12230 : /* If second arg is constant true, result is true, but we must
12231 : evaluate first arg. */
12232 12422646 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12233 54525 : return omit_one_operand_loc (loc, type, arg1, arg0);
12234 : /* Likewise for first arg, but note this only occurs here for
12235 : TRUTH_OR_EXPR. */
12236 12368121 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12237 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12238 :
12239 : /* !X || X is always true. */
12240 12368121 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12241 12368121 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12242 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12243 : /* X || !X is always true. */
12244 12368121 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12245 12368121 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12246 1 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12247 :
12248 : /* (X && !Y) || (!X && Y) is X ^ Y */
12249 12368120 : if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12250 1615 : && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12251 : {
12252 669 : tree a0, a1, l0, l1, n0, n1;
12253 :
12254 669 : a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12255 669 : a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12256 :
12257 669 : l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12258 669 : l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12259 :
12260 669 : n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12261 669 : n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12262 :
12263 669 : if ((operand_equal_p (n0, a0, 0)
12264 18 : && operand_equal_p (n1, a1, 0))
12265 677 : || (operand_equal_p (n0, a1, 0)
12266 3 : && operand_equal_p (n1, a0, 0)))
12267 13 : return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12268 : }
12269 :
12270 12368107 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12271 : != NULL_TREE)
12272 : return tem;
12273 :
12274 : return NULL_TREE;
12275 :
12276 72581 : case TRUTH_XOR_EXPR:
12277 : /* If the second arg is constant zero, drop it. */
12278 72581 : if (integer_zerop (arg1))
12279 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12280 : /* If the second arg is constant true, this is a logical inversion. */
12281 72581 : if (integer_onep (arg1))
12282 : {
12283 0 : tem = invert_truthvalue_loc (loc, arg0);
12284 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12285 : }
12286 : /* Identical arguments cancel to zero. */
12287 72581 : if (operand_equal_p (arg0, arg1, 0))
12288 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12289 :
12290 : /* !X ^ X is always true. */
12291 72581 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12292 72581 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12293 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12294 :
12295 : /* X ^ !X is always true. */
12296 72581 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12297 72581 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12298 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12299 :
12300 : return NULL_TREE;
12301 :
12302 55918554 : case EQ_EXPR:
12303 55918554 : case NE_EXPR:
12304 55918554 : STRIP_NOPS (arg0);
12305 55918554 : STRIP_NOPS (arg1);
12306 :
12307 55918554 : tem = fold_comparison (loc, code, type, op0, op1);
12308 55918554 : if (tem != NULL_TREE)
12309 : return tem;
12310 :
12311 : /* bool_var != 1 becomes !bool_var. */
12312 57060279 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12313 55950981 : && code == NE_EXPR)
12314 38731 : return fold_convert_loc (loc, type,
12315 : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12316 77462 : TREE_TYPE (arg0), arg0));
12317 :
12318 : /* bool_var == 0 becomes !bool_var. */
12319 56982817 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12320 56809609 : && code == EQ_EXPR)
12321 216045 : return fold_convert_loc (loc, type,
12322 : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12323 432090 : TREE_TYPE (arg0), arg0));
12324 :
12325 : /* !exp != 0 becomes !exp */
12326 667599 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12327 56319647 : && code == NE_EXPR)
12328 659904 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12329 :
12330 : /* If this is an EQ or NE comparison with zero and ARG0 is
12331 : (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12332 : two operations, but the latter can be done in one less insn
12333 : on machines that have only two-operand insns or on which a
12334 : constant cannot be the first operand. */
12335 54992856 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12336 54992856 : && integer_zerop (arg1))
12337 : {
12338 1617000 : tree arg00 = TREE_OPERAND (arg0, 0);
12339 1617000 : tree arg01 = TREE_OPERAND (arg0, 1);
12340 1617000 : if (TREE_CODE (arg00) == LSHIFT_EXPR
12341 1617000 : && integer_onep (TREE_OPERAND (arg00, 0)))
12342 : {
12343 4394 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12344 4394 : arg01, TREE_OPERAND (arg00, 1));
12345 4394 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12346 4394 : build_one_cst (TREE_TYPE (arg0)));
12347 4394 : return fold_build2_loc (loc, code, type,
12348 4394 : fold_convert_loc (loc, TREE_TYPE (arg1),
12349 4394 : tem), arg1);
12350 : }
12351 1612606 : else if (TREE_CODE (arg01) == LSHIFT_EXPR
12352 1612606 : && integer_onep (TREE_OPERAND (arg01, 0)))
12353 : {
12354 305 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12355 305 : arg00, TREE_OPERAND (arg01, 1));
12356 305 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12357 305 : build_one_cst (TREE_TYPE (arg0)));
12358 305 : return fold_build2_loc (loc, code, type,
12359 305 : fold_convert_loc (loc, TREE_TYPE (arg1),
12360 305 : tem), arg1);
12361 : }
12362 : }
12363 :
12364 : /* If this is a comparison of a field, we may be able to simplify it. */
12365 54988157 : if ((TREE_CODE (arg0) == COMPONENT_REF
12366 54988157 : || TREE_CODE (arg0) == BIT_FIELD_REF)
12367 : /* Handle the constant case even without -O
12368 : to make sure the warnings are given. */
12369 7925573 : && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12370 : {
12371 7626108 : t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12372 7626108 : if (t1)
12373 : return t1;
12374 : }
12375 :
12376 : /* Optimize comparisons of strlen vs zero to a compare of the
12377 : first character of the string vs zero. To wit,
12378 : strlen(ptr) == 0 => *ptr == 0
12379 : strlen(ptr) != 0 => *ptr != 0
12380 : Other cases should reduce to one of these two (or a constant)
12381 : due to the return value of strlen being unsigned. */
12382 52214377 : if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12383 : {
12384 3332860 : tree fndecl = get_callee_fndecl (arg0);
12385 :
12386 3332860 : if (fndecl
12387 3331783 : && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12388 537 : && call_expr_nargs (arg0) == 1
12389 3333397 : && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12390 : == POINTER_TYPE))
12391 : {
12392 537 : tree ptrtype
12393 537 : = build_pointer_type (build_qualified_type (char_type_node,
12394 : TYPE_QUAL_CONST));
12395 1074 : tree ptr = fold_convert_loc (loc, ptrtype,
12396 537 : CALL_EXPR_ARG (arg0, 0));
12397 537 : tree iref = build_fold_indirect_ref_loc (loc, ptr);
12398 537 : return fold_build2_loc (loc, code, type, iref,
12399 537 : build_int_cst (TREE_TYPE (iref), 0));
12400 : }
12401 : }
12402 : /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12403 : (X & C) == 0 when C is a single bit. */
12404 52213840 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12405 1794980 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12406 968 : && integer_zerop (arg1)
12407 52214410 : && integer_pow2p (TREE_OPERAND (arg0, 1)))
12408 : {
12409 255 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12410 255 : TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12411 255 : TREE_OPERAND (arg0, 1));
12412 395 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12413 : type, tem,
12414 255 : fold_convert_loc (loc, TREE_TYPE (arg0),
12415 255 : arg1));
12416 : }
12417 :
12418 : /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12419 : constant C is a power of two, i.e. a single bit. */
12420 52213585 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12421 4662 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12422 0 : && integer_zerop (arg1)
12423 0 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12424 52213585 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12425 0 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12426 : {
12427 0 : tree arg00 = TREE_OPERAND (arg0, 0);
12428 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12429 0 : arg00, build_int_cst (TREE_TYPE (arg00), 0));
12430 : }
12431 :
12432 : /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12433 : when is C is a power of two, i.e. a single bit. */
12434 52213585 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12435 1794725 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12436 36208 : && integer_zerop (arg1)
12437 36208 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12438 52247140 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12439 33555 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12440 : {
12441 0 : tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12442 0 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12443 0 : arg000, TREE_OPERAND (arg0, 1));
12444 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12445 0 : tem, build_int_cst (TREE_TYPE (tem), 0));
12446 : }
12447 :
12448 52213585 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12449 4662 : && TREE_CODE (arg1) == BIT_XOR_EXPR)
12450 : {
12451 482 : tree arg00 = TREE_OPERAND (arg0, 0);
12452 482 : tree arg01 = TREE_OPERAND (arg0, 1);
12453 482 : tree arg10 = TREE_OPERAND (arg1, 0);
12454 482 : tree arg11 = TREE_OPERAND (arg1, 1);
12455 482 : tree itype = TREE_TYPE (arg0);
12456 :
12457 : /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12458 : operand_equal_p guarantees no side-effects so we don't need
12459 : to use omit_one_operand on Z. */
12460 482 : if (operand_equal_p (arg01, arg11, 0))
12461 8 : return fold_build2_loc (loc, code, type, arg00,
12462 8 : fold_convert_loc (loc, TREE_TYPE (arg00),
12463 8 : arg10));
12464 474 : if (operand_equal_p (arg01, arg10, 0))
12465 0 : return fold_build2_loc (loc, code, type, arg00,
12466 0 : fold_convert_loc (loc, TREE_TYPE (arg00),
12467 0 : arg11));
12468 474 : if (operand_equal_p (arg00, arg11, 0))
12469 0 : return fold_build2_loc (loc, code, type, arg01,
12470 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12471 0 : arg10));
12472 474 : if (operand_equal_p (arg00, arg10, 0))
12473 0 : return fold_build2_loc (loc, code, type, arg01,
12474 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12475 0 : arg11));
12476 :
12477 : /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12478 474 : if (TREE_CODE (arg01) == INTEGER_CST
12479 8 : && TREE_CODE (arg11) == INTEGER_CST)
12480 : {
12481 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12482 : fold_convert_loc (loc, itype, arg11));
12483 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12484 8 : return fold_build2_loc (loc, code, type, tem,
12485 8 : fold_convert_loc (loc, itype, arg10));
12486 : }
12487 : }
12488 :
12489 : /* Attempt to simplify equality/inequality comparisons of complex
12490 : values. Only lower the comparison if the result is known or
12491 : can be simplified to a single scalar comparison. */
12492 52213569 : if ((TREE_CODE (arg0) == COMPLEX_EXPR
12493 52211042 : || TREE_CODE (arg0) == COMPLEX_CST)
12494 2527 : && (TREE_CODE (arg1) == COMPLEX_EXPR
12495 2335 : || TREE_CODE (arg1) == COMPLEX_CST))
12496 : {
12497 1726 : tree real0, imag0, real1, imag1;
12498 1726 : tree rcond, icond;
12499 :
12500 1726 : if (TREE_CODE (arg0) == COMPLEX_EXPR)
12501 : {
12502 1726 : real0 = TREE_OPERAND (arg0, 0);
12503 1726 : imag0 = TREE_OPERAND (arg0, 1);
12504 : }
12505 : else
12506 : {
12507 0 : real0 = TREE_REALPART (arg0);
12508 0 : imag0 = TREE_IMAGPART (arg0);
12509 : }
12510 :
12511 1726 : if (TREE_CODE (arg1) == COMPLEX_EXPR)
12512 : {
12513 192 : real1 = TREE_OPERAND (arg1, 0);
12514 192 : imag1 = TREE_OPERAND (arg1, 1);
12515 : }
12516 : else
12517 : {
12518 1534 : real1 = TREE_REALPART (arg1);
12519 1534 : imag1 = TREE_IMAGPART (arg1);
12520 : }
12521 :
12522 1726 : rcond = fold_binary_loc (loc, code, type, real0, real1);
12523 1726 : if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12524 : {
12525 11 : if (integer_zerop (rcond))
12526 : {
12527 11 : if (code == EQ_EXPR)
12528 0 : return omit_two_operands_loc (loc, type, boolean_false_node,
12529 0 : imag0, imag1);
12530 11 : return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12531 : }
12532 : else
12533 : {
12534 0 : if (code == NE_EXPR)
12535 0 : return omit_two_operands_loc (loc, type, boolean_true_node,
12536 0 : imag0, imag1);
12537 0 : return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12538 : }
12539 : }
12540 :
12541 1715 : icond = fold_binary_loc (loc, code, type, imag0, imag1);
12542 1715 : if (icond && TREE_CODE (icond) == INTEGER_CST)
12543 : {
12544 9 : if (integer_zerop (icond))
12545 : {
12546 7 : if (code == EQ_EXPR)
12547 1 : return omit_two_operands_loc (loc, type, boolean_false_node,
12548 1 : real0, real1);
12549 6 : return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12550 : }
12551 : else
12552 : {
12553 2 : if (code == NE_EXPR)
12554 1 : return omit_two_operands_loc (loc, type, boolean_true_node,
12555 1 : real0, real1);
12556 1 : return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12557 : }
12558 : }
12559 : }
12560 :
12561 : return NULL_TREE;
12562 :
12563 42064185 : case LT_EXPR:
12564 42064185 : case GT_EXPR:
12565 42064185 : case LE_EXPR:
12566 42064185 : case GE_EXPR:
12567 42064185 : tem = fold_comparison (loc, code, type, op0, op1);
12568 42064185 : if (tem != NULL_TREE)
12569 : return tem;
12570 :
12571 : /* Transform comparisons of the form X +- C CMP X. */
12572 41208944 : if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12573 4861584 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12574 50123 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12575 41208960 : && !HONOR_SNANS (arg0))
12576 : {
12577 14 : tree arg01 = TREE_OPERAND (arg0, 1);
12578 14 : enum tree_code code0 = TREE_CODE (arg0);
12579 14 : int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12580 :
12581 : /* (X - c) > X becomes false. */
12582 14 : if (code == GT_EXPR
12583 4 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12584 0 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12585 4 : return constant_boolean_node (0, type);
12586 :
12587 : /* Likewise (X + c) < X becomes false. */
12588 10 : if (code == LT_EXPR
12589 3 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12590 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12591 3 : return constant_boolean_node (0, type);
12592 :
12593 : /* Convert (X - c) <= X to true. */
12594 7 : if (!HONOR_NANS (arg1)
12595 6 : && code == LE_EXPR
12596 11 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12597 0 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12598 4 : return constant_boolean_node (1, type);
12599 :
12600 : /* Convert (X + c) >= X to true. */
12601 3 : if (!HONOR_NANS (arg1)
12602 2 : && code == GE_EXPR
12603 5 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12604 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12605 2 : return constant_boolean_node (1, type);
12606 : }
12607 :
12608 : /* If we are comparing an ABS_EXPR with a constant, we can
12609 : convert all the cases into explicit comparisons, but they may
12610 : well not be faster than doing the ABS and one comparison.
12611 : But ABS (X) <= C is a range comparison, which becomes a subtraction
12612 : and a comparison, and is probably faster. */
12613 41208931 : if (code == LE_EXPR
12614 8193427 : && TREE_CODE (arg1) == INTEGER_CST
12615 5849058 : && TREE_CODE (arg0) == ABS_EXPR
12616 818 : && ! TREE_SIDE_EFFECTS (arg0)
12617 818 : && (tem = negate_expr (arg1)) != 0
12618 818 : && TREE_CODE (tem) == INTEGER_CST
12619 41209749 : && !TREE_OVERFLOW (tem))
12620 1636 : return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12621 : build2 (GE_EXPR, type,
12622 818 : TREE_OPERAND (arg0, 0), tem),
12623 : build2 (LE_EXPR, type,
12624 1636 : TREE_OPERAND (arg0, 0), arg1));
12625 :
12626 : /* Convert ABS_EXPR<x> >= 0 to true. */
12627 41208113 : strict_overflow_p = false;
12628 41208113 : if (code == GE_EXPR
12629 4633529 : && (integer_zerop (arg1)
12630 3386240 : || (! HONOR_NANS (arg0)
12631 2748636 : && real_zerop (arg1)))
12632 42455643 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12633 : {
12634 1147 : if (strict_overflow_p)
12635 6 : fold_overflow_warning (("assuming signed overflow does not occur "
12636 : "when simplifying comparison of "
12637 : "absolute value and zero"),
12638 : WARN_STRICT_OVERFLOW_CONDITIONAL);
12639 1147 : return omit_one_operand_loc (loc, type,
12640 : constant_boolean_node (true, type),
12641 1147 : arg0);
12642 : }
12643 :
12644 : /* Convert ABS_EXPR<x> < 0 to false. */
12645 41206966 : strict_overflow_p = false;
12646 41206966 : if (code == LT_EXPR
12647 13690433 : && (integer_zerop (arg1) || real_zerop (arg1))
12648 44416806 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12649 : {
12650 3177 : if (strict_overflow_p)
12651 207 : fold_overflow_warning (("assuming signed overflow does not occur "
12652 : "when simplifying comparison of "
12653 : "absolute value and zero"),
12654 : WARN_STRICT_OVERFLOW_CONDITIONAL);
12655 3177 : return omit_one_operand_loc (loc, type,
12656 : constant_boolean_node (false, type),
12657 3177 : arg0);
12658 : }
12659 :
12660 : /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12661 : and similarly for >= into !=. */
12662 41203789 : if ((code == LT_EXPR || code == GE_EXPR)
12663 18319638 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12664 6288067 : && TREE_CODE (arg1) == LSHIFT_EXPR
12665 41205292 : && integer_onep (TREE_OPERAND (arg1, 0)))
12666 4054 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12667 1355 : build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12668 1355 : TREE_OPERAND (arg1, 1)),
12669 2710 : build_zero_cst (TREE_TYPE (arg0)));
12670 :
12671 : /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12672 : otherwise Y might be >= # of bits in X's type and thus e.g.
12673 : (unsigned char) (1 << Y) for Y 15 might be 0.
12674 : If the cast is widening, then 1 << Y should have unsigned type,
12675 : otherwise if Y is number of bits in the signed shift type minus 1,
12676 : we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12677 : 31 might be 0xffffffff80000000. */
12678 41202434 : if ((code == LT_EXPR || code == GE_EXPR)
12679 18318283 : && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12680 5516554 : || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12681 12824288 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12682 4826882 : && CONVERT_EXPR_P (arg1)
12683 1080933 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12684 42 : && (element_precision (TREE_TYPE (arg1))
12685 21 : >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12686 14 : && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12687 14 : || (element_precision (TREE_TYPE (arg1))
12688 7 : == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12689 41202441 : && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12690 : {
12691 7 : tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12692 7 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12693 21 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12694 7 : fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12695 14 : build_zero_cst (TREE_TYPE (arg0)));
12696 : }
12697 :
12698 : return NULL_TREE;
12699 :
12700 6051672 : case UNORDERED_EXPR:
12701 6051672 : case ORDERED_EXPR:
12702 6051672 : case UNLT_EXPR:
12703 6051672 : case UNLE_EXPR:
12704 6051672 : case UNGT_EXPR:
12705 6051672 : case UNGE_EXPR:
12706 6051672 : case UNEQ_EXPR:
12707 6051672 : case LTGT_EXPR:
12708 : /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12709 6051672 : {
12710 6051672 : tree targ0 = strip_float_extensions (arg0);
12711 6051672 : tree targ1 = strip_float_extensions (arg1);
12712 6051672 : tree newtype = TREE_TYPE (targ0);
12713 :
12714 6051672 : if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12715 1289 : newtype = TREE_TYPE (targ1);
12716 :
12717 6051672 : if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
12718 6051672 : && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
12719 328 : return fold_build2_loc (loc, code, type,
12720 : fold_convert_loc (loc, newtype, targ0),
12721 328 : fold_convert_loc (loc, newtype, targ1));
12722 : }
12723 :
12724 : return NULL_TREE;
12725 :
12726 8801513 : case COMPOUND_EXPR:
12727 : /* When pedantic, a compound expression can be neither an lvalue
12728 : nor an integer constant expression. */
12729 8801513 : if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12730 : return NULL_TREE;
12731 : /* Don't let (0, 0) be null pointer constant. */
12732 481410 : tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12733 481410 : : fold_convert_loc (loc, type, arg1);
12734 : return tem;
12735 :
12736 : default:
12737 : return NULL_TREE;
12738 : } /* switch (code) */
12739 : }
12740 :
12741 : /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12742 : ((A & N) + B) & M -> (A + B) & M
12743 : Similarly if (N & M) == 0,
12744 : ((A | N) + B) & M -> (A + B) & M
12745 : and for - instead of + (or unary - instead of +)
12746 : and/or ^ instead of |.
12747 : If B is constant and (B & M) == 0, fold into A & M.
12748 :
12749 : This function is a helper for match.pd patterns. Return non-NULL
12750 : type in which the simplified operation should be performed only
12751 : if any optimization is possible.
12752 :
12753 : ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12754 : then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12755 : Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12756 : +/-. */
12757 : tree
12758 1226884 : fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12759 : tree arg00, enum tree_code code00, tree arg000, tree arg001,
12760 : tree arg01, enum tree_code code01, tree arg010, tree arg011,
12761 : tree *pmop)
12762 : {
12763 1226884 : gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12764 1226884 : gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12765 1226884 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12766 2453768 : if (~cst1 == 0
12767 3675704 : || (cst1 & (cst1 + 1)) != 0
12768 1053020 : || !INTEGRAL_TYPE_P (type)
12769 1053020 : || (!TYPE_OVERFLOW_WRAPS (type)
12770 41439 : && TREE_CODE (type) != INTEGER_TYPE)
12771 4557334 : || (wi::max_value (type) & cst1) != cst1)
12772 : return NULL_TREE;
12773 :
12774 1053020 : enum tree_code codes[2] = { code00, code01 };
12775 1053020 : tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12776 1053020 : int which = 0;
12777 1053020 : wide_int cst0;
12778 :
12779 : /* Now we know that arg0 is (C + D) or (C - D) or -C and
12780 : arg1 (M) is == (1LL << cst) - 1.
12781 : Store C into PMOP[0] and D into PMOP[1]. */
12782 1053020 : pmop[0] = arg00;
12783 1053020 : pmop[1] = arg01;
12784 1053020 : which = code != NEGATE_EXPR;
12785 :
12786 3158164 : for (; which >= 0; which--)
12787 2105144 : switch (codes[which])
12788 : {
12789 20698 : case BIT_AND_EXPR:
12790 20698 : case BIT_IOR_EXPR:
12791 20698 : case BIT_XOR_EXPR:
12792 20698 : gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12793 20698 : cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12794 20698 : if (codes[which] == BIT_AND_EXPR)
12795 : {
12796 20584 : if (cst0 != cst1)
12797 : break;
12798 : }
12799 114 : else if (cst0 != 0)
12800 : break;
12801 : /* If C or D is of the form (A & N) where
12802 : (N & M) == M, or of the form (A | N) or
12803 : (A ^ N) where (N & M) == 0, replace it with A. */
12804 19157 : pmop[which] = arg0xx[2 * which];
12805 19157 : break;
12806 2084446 : case ERROR_MARK:
12807 2084446 : if (TREE_CODE (pmop[which]) != INTEGER_CST)
12808 : break;
12809 : /* If C or D is a N where (N & M) == 0, it can be
12810 : omitted (replaced with 0). */
12811 890398 : if ((code == PLUS_EXPR
12812 210300 : || (code == MINUS_EXPR && which == 0))
12813 655499 : && (cst1 & wi::to_wide (pmop[which])) == 0)
12814 138947 : pmop[which] = build_int_cst (type, 0);
12815 : /* Similarly, with C - N where (-N & M) == 0. */
12816 890398 : if (code == MINUS_EXPR
12817 445199 : && which == 1
12818 648268 : && (cst1 & -wi::to_wide (pmop[which])) == 0)
12819 193800 : pmop[which] = build_int_cst (type, 0);
12820 : break;
12821 0 : default:
12822 0 : gcc_unreachable ();
12823 : }
12824 :
12825 : /* Only build anything new if we optimized one or both arguments above. */
12826 1053020 : if (pmop[0] == arg00 && pmop[1] == arg01)
12827 : return NULL_TREE;
12828 :
12829 351158 : if (TYPE_OVERFLOW_WRAPS (type))
12830 : return type;
12831 : else
12832 2359 : return unsigned_type_for (type);
12833 1053020 : }
12834 :
12835 : /* Used by contains_label_[p1]. */
12836 :
12837 : struct contains_label_data
12838 : {
12839 : hash_set<tree> *pset;
12840 : bool inside_switch_p;
12841 : };
12842 :
12843 : /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12844 : a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12845 : return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12846 :
12847 : static tree
12848 4180331 : contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12849 : {
12850 4180331 : contains_label_data *d = (contains_label_data *) data;
12851 4180331 : switch (TREE_CODE (*tp))
12852 : {
12853 : case LABEL_EXPR:
12854 : return *tp;
12855 :
12856 0 : case CASE_LABEL_EXPR:
12857 0 : if (!d->inside_switch_p)
12858 : return *tp;
12859 : return NULL_TREE;
12860 :
12861 0 : case SWITCH_EXPR:
12862 0 : if (!d->inside_switch_p)
12863 : {
12864 0 : if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12865 0 : return *tp;
12866 0 : d->inside_switch_p = true;
12867 0 : if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12868 0 : return *tp;
12869 0 : d->inside_switch_p = false;
12870 0 : *walk_subtrees = 0;
12871 : }
12872 : return NULL_TREE;
12873 :
12874 6525 : case GOTO_EXPR:
12875 6525 : *walk_subtrees = 0;
12876 6525 : return NULL_TREE;
12877 :
12878 : default:
12879 : return NULL_TREE;
12880 : }
12881 : }
12882 :
12883 : /* Return whether the sub-tree ST contains a label which is accessible from
12884 : outside the sub-tree. */
12885 :
12886 : static bool
12887 305460 : contains_label_p (tree st)
12888 : {
12889 305460 : hash_set<tree> pset;
12890 305460 : contains_label_data data = { &pset, false };
12891 305460 : return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12892 305460 : }
12893 :
12894 : /* Fold a ternary expression of code CODE and type TYPE with operands
12895 : OP0, OP1, and OP2. Return the folded expression if folding is
12896 : successful. Otherwise, return NULL_TREE. */
12897 :
12898 : tree
12899 43471014 : fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12900 : tree op0, tree op1, tree op2)
12901 : {
12902 43471014 : tree tem;
12903 43471014 : tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12904 43471014 : enum tree_code_class kind = TREE_CODE_CLASS (code);
12905 :
12906 43471014 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
12907 : && TREE_CODE_LENGTH (code) == 3);
12908 :
12909 : /* If this is a commutative operation, and OP0 is a constant, move it
12910 : to OP1 to reduce the number of tests below. */
12911 43471014 : if (commutative_ternary_tree_code (code)
12912 43471014 : && tree_swap_operands_p (op0, op1))
12913 28 : return fold_build3_loc (loc, code, type, op1, op0, op2);
12914 :
12915 43470986 : tem = generic_simplify (loc, code, type, op0, op1, op2);
12916 43470986 : if (tem)
12917 : return tem;
12918 :
12919 : /* Strip any conversions that don't change the mode. This is safe
12920 : for every expression, except for a comparison expression because
12921 : its signedness is derived from its operands. So, in the latter
12922 : case, only strip conversions that don't change the signedness.
12923 :
12924 : Note that this is done as an internal manipulation within the
12925 : constant folder, in order to find the simplest representation of
12926 : the arguments so that their form can be studied. In any cases,
12927 : the appropriate type conversions should be put back in the tree
12928 : that will get out of the constant folder. */
12929 42516336 : if (op0)
12930 : {
12931 42450666 : arg0 = op0;
12932 42450666 : STRIP_NOPS (arg0);
12933 : }
12934 :
12935 42516336 : if (op1)
12936 : {
12937 42516336 : arg1 = op1;
12938 42516336 : STRIP_NOPS (arg1);
12939 : }
12940 :
12941 42516336 : if (op2)
12942 : {
12943 15669556 : arg2 = op2;
12944 15669556 : STRIP_NOPS (arg2);
12945 : }
12946 :
12947 42516336 : switch (code)
12948 : {
12949 26846305 : case COMPONENT_REF:
12950 26846305 : if (TREE_CODE (arg0) == CONSTRUCTOR
12951 26846305 : && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12952 : {
12953 : unsigned HOST_WIDE_INT idx;
12954 : tree field, value;
12955 884 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12956 679 : if (field == arg1)
12957 : return value;
12958 : }
12959 : return NULL_TREE;
12960 :
12961 13554079 : case COND_EXPR:
12962 13554079 : case VEC_COND_EXPR:
12963 : /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12964 : so all simple results must be passed through pedantic_non_lvalue. */
12965 13554079 : if (TREE_CODE (arg0) == INTEGER_CST)
12966 : {
12967 428698 : tree unused_op = integer_zerop (arg0) ? op1 : op2;
12968 428698 : tem = integer_zerop (arg0) ? op2 : op1;
12969 : /* Only optimize constant conditions when the selected branch
12970 : has the same type as the COND_EXPR. This avoids optimizing
12971 : away "c ? x : throw", where the throw has a void type.
12972 : Avoid throwing away that operand which contains label. */
12973 428698 : if ((!TREE_SIDE_EFFECTS (unused_op)
12974 305460 : || !contains_label_p (unused_op))
12975 729337 : && (! VOID_TYPE_P (TREE_TYPE (tem))
12976 346533 : || VOID_TYPE_P (type)))
12977 414673 : return protected_set_expr_location_unshare (tem, loc);
12978 14025 : return NULL_TREE;
12979 : }
12980 13125381 : else if (TREE_CODE (arg0) == VECTOR_CST)
12981 : {
12982 9901 : unsigned HOST_WIDE_INT nelts;
12983 9901 : if ((TREE_CODE (arg1) == VECTOR_CST
12984 7467 : || TREE_CODE (arg1) == CONSTRUCTOR)
12985 2434 : && (TREE_CODE (arg2) == VECTOR_CST
12986 0 : || TREE_CODE (arg2) == CONSTRUCTOR)
12987 19802 : && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12988 : {
12989 2434 : vec_perm_builder sel (nelts, nelts, 1);
12990 25166 : for (unsigned int i = 0; i < nelts; i++)
12991 : {
12992 22732 : tree val = VECTOR_CST_ELT (arg0, i);
12993 22732 : if (integer_all_onesp (val))
12994 11435 : sel.quick_push (i);
12995 11297 : else if (integer_zerop (val))
12996 11297 : sel.quick_push (nelts + i);
12997 : else /* Currently unreachable. */
12998 1742 : return NULL_TREE;
12999 : }
13000 2434 : vec_perm_indices indices (sel, 2, nelts);
13001 2434 : tree t = fold_vec_perm (type, arg1, arg2, indices);
13002 2434 : if (t != NULL_TREE)
13003 1742 : return t;
13004 4176 : }
13005 : }
13006 :
13007 : /* If we have A op B ? A : C, we may be able to convert this to a
13008 : simpler expression, depending on the operation and the values
13009 : of B and C. Signed zeros prevent all of these transformations,
13010 : for reasons given above each one.
13011 :
13012 : Also try swapping the arguments and inverting the conditional. */
13013 13123639 : if (COMPARISON_CLASS_P (arg0)
13014 10814833 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13015 13253423 : && !HONOR_SIGNED_ZEROS (op1))
13016 : {
13017 119074 : tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13018 119074 : TREE_OPERAND (arg0, 0),
13019 119074 : TREE_OPERAND (arg0, 1),
13020 : op1, op2);
13021 119074 : if (tem)
13022 : return tem;
13023 : }
13024 :
13025 13117024 : if (COMPARISON_CLASS_P (arg0)
13026 10808218 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13027 13566944 : && !HONOR_SIGNED_ZEROS (op2))
13028 : {
13029 369149 : enum tree_code comp_code = TREE_CODE (arg0);
13030 369149 : tree arg00 = TREE_OPERAND (arg0, 0);
13031 369149 : tree arg01 = TREE_OPERAND (arg0, 1);
13032 369149 : comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13033 369149 : if (comp_code != ERROR_MARK)
13034 369149 : tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13035 : arg00,
13036 : arg01,
13037 : op2, op1);
13038 369149 : if (tem)
13039 : return tem;
13040 : }
13041 :
13042 : /* If the second operand is simpler than the third, swap them
13043 : since that produces better jump optimization results. */
13044 12858507 : if (truth_value_p (TREE_CODE (arg0))
13045 12858507 : && tree_swap_operands_p (op1, op2))
13046 : {
13047 1962825 : location_t loc0 = expr_location_or (arg0, loc);
13048 : /* See if this can be inverted. If it can't, possibly because
13049 : it was a floating-point inequality comparison, don't do
13050 : anything. */
13051 1962825 : tem = fold_invert_truthvalue (loc0, arg0);
13052 1962825 : if (tem)
13053 1265986 : return fold_build3_loc (loc, code, type, tem, op2, op1);
13054 : }
13055 :
13056 : /* Convert A ? 1 : 0 to simply A. */
13057 11592521 : if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13058 11261598 : : (integer_onep (op1)
13059 399431 : && !VECTOR_TYPE_P (type)))
13060 600957 : && integer_zerop (op2)
13061 : /* If we try to convert OP0 to our type, the
13062 : call to fold will try to move the conversion inside
13063 : a COND, which will recurse. In that case, the COND_EXPR
13064 : is probably the best choice, so leave it alone. */
13065 12511223 : && type == TREE_TYPE (arg0))
13066 37198 : return protected_set_expr_location_unshare (arg0, loc);
13067 :
13068 : /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13069 : over COND_EXPR in cases such as floating point comparisons. */
13070 11555323 : if (integer_zerop (op1)
13071 345055 : && code == COND_EXPR
13072 343293 : && integer_onep (op2)
13073 30623 : && !VECTOR_TYPE_P (type)
13074 11585946 : && truth_value_p (TREE_CODE (arg0)))
13075 29070 : return fold_convert_loc (loc, type,
13076 29070 : invert_truthvalue_loc (loc, arg0));
13077 :
13078 : /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13079 11526253 : if (TREE_CODE (arg0) == LT_EXPR
13080 1328053 : && integer_zerop (TREE_OPERAND (arg0, 1))
13081 36087 : && integer_zerop (op2)
13082 11527196 : && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13083 : {
13084 : /* sign_bit_p looks through both zero and sign extensions,
13085 : but for this optimization only sign extensions are
13086 : usable. */
13087 56 : tree tem2 = TREE_OPERAND (arg0, 0);
13088 56 : while (tem != tem2)
13089 : {
13090 0 : if (TREE_CODE (tem2) != NOP_EXPR
13091 0 : || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13092 : {
13093 : tem = NULL_TREE;
13094 : break;
13095 : }
13096 0 : tem2 = TREE_OPERAND (tem2, 0);
13097 : }
13098 : /* sign_bit_p only checks ARG1 bits within A's precision.
13099 : If <sign bit of A> has wider type than A, bits outside
13100 : of A's precision in <sign bit of A> need to be checked.
13101 : If they are all 0, this optimization needs to be done
13102 : in unsigned A's type, if they are all 1 in signed A's type,
13103 : otherwise this can't be done. */
13104 56 : if (tem
13105 56 : && TYPE_PRECISION (TREE_TYPE (tem))
13106 56 : < TYPE_PRECISION (TREE_TYPE (arg1))
13107 112 : && TYPE_PRECISION (TREE_TYPE (tem))
13108 56 : < TYPE_PRECISION (type))
13109 : {
13110 56 : int inner_width, outer_width;
13111 56 : tree tem_type;
13112 :
13113 56 : inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13114 56 : outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13115 56 : if (outer_width > TYPE_PRECISION (type))
13116 0 : outer_width = TYPE_PRECISION (type);
13117 :
13118 56 : wide_int mask = wi::shifted_mask
13119 56 : (inner_width, outer_width - inner_width, false,
13120 56 : TYPE_PRECISION (TREE_TYPE (arg1)));
13121 :
13122 56 : wide_int common = mask & wi::to_wide (arg1);
13123 56 : if (common == mask)
13124 : {
13125 28 : tem_type = signed_type_for (TREE_TYPE (tem));
13126 28 : tem = fold_convert_loc (loc, tem_type, tem);
13127 : }
13128 28 : else if (common == 0)
13129 : {
13130 0 : tem_type = unsigned_type_for (TREE_TYPE (tem));
13131 0 : tem = fold_convert_loc (loc, tem_type, tem);
13132 : }
13133 : else
13134 : tem = NULL;
13135 56 : }
13136 :
13137 56 : if (tem)
13138 28 : return
13139 56 : fold_convert_loc (loc, type,
13140 : fold_build2_loc (loc, BIT_AND_EXPR,
13141 28 : TREE_TYPE (tem), tem,
13142 : fold_convert_loc (loc,
13143 28 : TREE_TYPE (tem),
13144 28 : arg1)));
13145 : }
13146 :
13147 : /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13148 : already handled above. */
13149 11526225 : if (TREE_CODE (arg0) == BIT_AND_EXPR
13150 347 : && integer_onep (TREE_OPERAND (arg0, 1))
13151 3 : && integer_zerop (op2)
13152 11526225 : && integer_pow2p (arg1))
13153 : {
13154 0 : tree tem = TREE_OPERAND (arg0, 0);
13155 0 : STRIP_NOPS (tem);
13156 0 : if (TREE_CODE (tem) == RSHIFT_EXPR
13157 0 : && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13158 0 : && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13159 0 : == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13160 0 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
13161 : fold_convert_loc (loc, type,
13162 0 : TREE_OPERAND (tem, 0)),
13163 0 : op1);
13164 : }
13165 :
13166 : /* A & N ? N : 0 is simply A & N if N is a power of two. This
13167 : is probably obsolete because the first operand should be a
13168 : truth value (that's why we have the two cases above), but let's
13169 : leave it in until we can confirm this for all front-ends. */
13170 11526225 : if (integer_zerop (op2)
13171 1986204 : && TREE_CODE (arg0) == NE_EXPR
13172 529089 : && integer_zerop (TREE_OPERAND (arg0, 1))
13173 302054 : && integer_pow2p (arg1)
13174 30973 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13175 91 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13176 : arg1, OEP_ONLY_CONST)
13177 : /* operand_equal_p compares just value, not precision, so e.g.
13178 : arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13179 : second operand 32-bit -128, which is not a power of two (or vice
13180 : versa. */
13181 11526225 : && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13182 0 : return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13183 :
13184 : /* Disable the transformations below for vectors, since
13185 : fold_binary_op_with_conditional_arg may undo them immediately,
13186 : yielding an infinite loop. */
13187 11526225 : if (code == VEC_COND_EXPR)
13188 : return NULL_TREE;
13189 :
13190 : /* Convert A ? B : 0 into A && B if A and B are truth values. */
13191 11195302 : if (integer_zerop (op2)
13192 1718955 : && truth_value_p (TREE_CODE (arg0))
13193 1566908 : && truth_value_p (TREE_CODE (arg1))
13194 11228036 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13195 32734 : return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13196 : : TRUTH_ANDIF_EXPR,
13197 32734 : type, fold_convert_loc (loc, type, arg0), op1);
13198 :
13199 : /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13200 11162568 : if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13201 525980 : && truth_value_p (TREE_CODE (arg0))
13202 287129 : && truth_value_p (TREE_CODE (arg1))
13203 11199099 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13204 : {
13205 36531 : location_t loc0 = expr_location_or (arg0, loc);
13206 : /* Only perform transformation if ARG0 is easily inverted. */
13207 36531 : tem = fold_invert_truthvalue (loc0, arg0);
13208 36531 : if (tem)
13209 36267 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13210 : ? BIT_IOR_EXPR
13211 : : TRUTH_ORIF_EXPR,
13212 : type, fold_convert_loc (loc, type, tem),
13213 36267 : op1);
13214 : }
13215 :
13216 : /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13217 11126301 : if (integer_zerop (arg1)
13218 314300 : && truth_value_p (TREE_CODE (arg0))
13219 80453 : && truth_value_p (TREE_CODE (op2))
13220 11126329 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13221 : {
13222 28 : location_t loc0 = expr_location_or (arg0, loc);
13223 : /* Only perform transformation if ARG0 is easily inverted. */
13224 28 : tem = fold_invert_truthvalue (loc0, arg0);
13225 28 : if (tem)
13226 0 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13227 : ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13228 : type, fold_convert_loc (loc, type, tem),
13229 0 : op2);
13230 : }
13231 :
13232 : /* Convert A ? 1 : B into A || B if A and B are truth values. */
13233 11126301 : if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13234 362233 : && truth_value_p (TREE_CODE (arg0))
13235 257632 : && truth_value_p (TREE_CODE (op2))
13236 11126487 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13237 186 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13238 : ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13239 186 : type, fold_convert_loc (loc, type, arg0), op2);
13240 :
13241 : return NULL_TREE;
13242 :
13243 0 : case CALL_EXPR:
13244 : /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13245 : of fold_ternary on them. */
13246 0 : gcc_unreachable ();
13247 :
13248 646163 : case BIT_FIELD_REF:
13249 646163 : if (TREE_CODE (arg0) == VECTOR_CST
13250 26222 : && (type == TREE_TYPE (TREE_TYPE (arg0))
13251 1866 : || (VECTOR_TYPE_P (type)
13252 1155 : && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13253 25491 : && tree_fits_uhwi_p (op1)
13254 671654 : && tree_fits_uhwi_p (op2))
13255 : {
13256 25491 : tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13257 25491 : unsigned HOST_WIDE_INT width
13258 25491 : = (TREE_CODE (eltype) == BOOLEAN_TYPE
13259 25491 : ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13260 25491 : unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13261 25491 : unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13262 :
13263 25491 : if (n != 0
13264 25491 : && (idx % width) == 0
13265 25491 : && (n % width) == 0
13266 50982 : && known_le ((idx + n) / width,
13267 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13268 : {
13269 25491 : idx = idx / width;
13270 25491 : n = n / width;
13271 :
13272 25491 : if (TREE_CODE (arg0) == VECTOR_CST)
13273 : {
13274 25491 : if (n == 1)
13275 : {
13276 24360 : tem = VECTOR_CST_ELT (arg0, idx);
13277 24360 : if (VECTOR_TYPE_P (type))
13278 4 : tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13279 24360 : return tem;
13280 : }
13281 :
13282 1131 : tree_vector_builder vals (type, n, 1);
13283 6951 : for (unsigned i = 0; i < n; ++i)
13284 5820 : vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13285 1131 : return vals.build ();
13286 1131 : }
13287 : }
13288 : }
13289 :
13290 : /* On constants we can use native encode/interpret to constant
13291 : fold (nearly) all BIT_FIELD_REFs. */
13292 620672 : if (CONSTANT_CLASS_P (arg0)
13293 1532 : && can_native_interpret_type_p (type)
13294 : && BITS_PER_UNIT == 8
13295 1532 : && tree_fits_uhwi_p (op1)
13296 622204 : && tree_fits_uhwi_p (op2))
13297 : {
13298 1532 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13299 1532 : unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13300 : /* Limit us to a reasonable amount of work. To relax the
13301 : other limitations we need bit-shifting of the buffer
13302 : and rounding up the size. */
13303 1532 : if (bitpos % BITS_PER_UNIT == 0
13304 1532 : && bitsize % BITS_PER_UNIT == 0
13305 1532 : && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13306 : {
13307 1532 : unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13308 1532 : unsigned HOST_WIDE_INT len
13309 1532 : = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13310 1532 : bitpos / BITS_PER_UNIT);
13311 1532 : if (len > 0
13312 1532 : && len * BITS_PER_UNIT >= bitsize)
13313 : {
13314 1532 : tree v = native_interpret_expr (type, b,
13315 : bitsize / BITS_PER_UNIT);
13316 1532 : if (v)
13317 1526 : return v;
13318 : }
13319 : }
13320 : }
13321 :
13322 : return NULL_TREE;
13323 :
13324 731877 : case VEC_PERM_EXPR:
13325 : /* Perform constant folding of BIT_INSERT_EXPR. */
13326 731877 : if (TREE_CODE (arg2) == VECTOR_CST
13327 720545 : && TREE_CODE (op0) == VECTOR_CST
13328 15342 : && TREE_CODE (op1) == VECTOR_CST)
13329 : {
13330 : /* Build a vector of integers from the tree mask. */
13331 3951 : vec_perm_builder builder;
13332 3951 : if (!tree_to_vec_perm_builder (&builder, arg2))
13333 : return NULL_TREE;
13334 :
13335 : /* Create a vec_perm_indices for the integer vector. */
13336 3951 : poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13337 3951 : bool single_arg = (op0 == op1);
13338 7902 : vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13339 3951 : return fold_vec_perm (type, op0, op1, sel);
13340 7902 : }
13341 : return NULL_TREE;
13342 :
13343 14647 : case BIT_INSERT_EXPR:
13344 : /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13345 14647 : if (TREE_CODE (arg0) == INTEGER_CST
13346 14 : && TREE_CODE (arg1) == INTEGER_CST)
13347 : {
13348 2 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13349 2 : unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13350 2 : if (BYTES_BIG_ENDIAN)
13351 : bitpos = TYPE_PRECISION (type) - bitpos - bitsize;
13352 2 : wide_int tem = (wi::to_wide (arg0)
13353 4 : & wi::shifted_mask (bitpos, bitsize, true,
13354 4 : TYPE_PRECISION (type)));
13355 2 : wide_int tem2
13356 4 : = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13357 2 : bitsize), bitpos);
13358 2 : return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13359 2 : }
13360 14645 : else if (TREE_CODE (arg0) == VECTOR_CST
13361 906 : && CONSTANT_CLASS_P (arg1)
13362 14947 : && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13363 302 : TREE_TYPE (arg1)))
13364 : {
13365 302 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13366 302 : unsigned HOST_WIDE_INT elsize
13367 302 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13368 302 : if (bitpos % elsize == 0)
13369 : {
13370 302 : unsigned k = bitpos / elsize;
13371 302 : unsigned HOST_WIDE_INT nelts;
13372 302 : if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13373 43471014 : return arg0;
13374 290 : else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13375 : {
13376 290 : tree_vector_builder elts (type, nelts, 1);
13377 290 : elts.quick_grow (nelts);
13378 1306 : for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13379 1016 : elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13380 290 : return elts.build ();
13381 290 : }
13382 : }
13383 : }
13384 : return NULL_TREE;
13385 :
13386 : default:
13387 : return NULL_TREE;
13388 : } /* switch (code) */
13389 : }
13390 :
13391 : /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13392 : of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13393 : constructor element index of the value returned. If the element is
13394 : not found NULL_TREE is returned and *CTOR_IDX is updated to
13395 : the index of the element after the ACCESS_INDEX position (which
13396 : may be outside of the CTOR array). */
13397 :
13398 : tree
13399 702073 : get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13400 : unsigned *ctor_idx)
13401 : {
13402 702073 : tree index_type = NULL_TREE;
13403 702073 : signop index_sgn = UNSIGNED;
13404 702073 : offset_int low_bound = 0;
13405 :
13406 702073 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13407 : {
13408 702073 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13409 702073 : if (domain_type && TYPE_MIN_VALUE (domain_type))
13410 : {
13411 : /* Static constructors for variably sized objects makes no sense. */
13412 702073 : gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13413 702073 : index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13414 : /* ??? When it is obvious that the range is signed, treat it so. */
13415 702073 : if (TYPE_UNSIGNED (index_type)
13416 351459 : && TYPE_MAX_VALUE (domain_type)
13417 1053501 : && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13418 351428 : TYPE_MIN_VALUE (domain_type)))
13419 : {
13420 0 : index_sgn = SIGNED;
13421 0 : low_bound
13422 0 : = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13423 : SIGNED);
13424 : }
13425 : else
13426 : {
13427 702073 : index_sgn = TYPE_SIGN (index_type);
13428 702073 : low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13429 : }
13430 : }
13431 : }
13432 :
13433 702073 : if (index_type)
13434 702073 : access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13435 : index_sgn);
13436 :
13437 702073 : offset_int index = low_bound;
13438 702073 : if (index_type)
13439 702073 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13440 :
13441 702073 : offset_int max_index = index;
13442 702073 : unsigned cnt;
13443 702073 : tree cfield, cval;
13444 702073 : bool first_p = true;
13445 :
13446 14589533 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13447 : {
13448 : /* Array constructor might explicitly set index, or specify a range,
13449 : or leave index NULL meaning that it is next index after previous
13450 : one. */
13451 14588400 : if (cfield)
13452 : {
13453 6517656 : if (TREE_CODE (cfield) == INTEGER_CST)
13454 13033862 : max_index = index
13455 6516931 : = offset_int::from (wi::to_wide (cfield), index_sgn);
13456 : else
13457 : {
13458 725 : gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13459 725 : index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13460 : index_sgn);
13461 725 : max_index
13462 725 : = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13463 : index_sgn);
13464 725 : gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13465 : }
13466 : }
13467 8070744 : else if (!first_p)
13468 : {
13469 7839506 : index = max_index + 1;
13470 7839506 : if (index_type)
13471 7839506 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13472 7839506 : gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13473 7839506 : max_index = index;
13474 : }
13475 : else
13476 : first_p = false;
13477 :
13478 14588400 : if (TREE_CODE (cval) == RAW_DATA_CST)
13479 2630 : max_index += RAW_DATA_LENGTH (cval) - 1;
13480 :
13481 : /* Do we have match? */
13482 14588400 : if (wi::cmp (access_index, index, index_sgn) >= 0)
13483 : {
13484 14588112 : if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13485 : {
13486 700824 : if (ctor_idx)
13487 700824 : *ctor_idx = cnt;
13488 700824 : return cval;
13489 : }
13490 : }
13491 288 : else if (in_gimple_form)
13492 : /* We're past the element we search for. Note during parsing
13493 : the elements might not be sorted.
13494 : ??? We should use a binary search and a flag on the
13495 : CONSTRUCTOR as to whether elements are sorted in declaration
13496 : order. */
13497 : break;
13498 : }
13499 1249 : if (ctor_idx)
13500 1249 : *ctor_idx = cnt;
13501 : return NULL_TREE;
13502 : }
13503 :
13504 : /* Perform constant folding and related simplification of EXPR.
13505 : The related simplifications include x*1 => x, x*0 => 0, etc.,
13506 : and application of the associative law.
13507 : NOP_EXPR conversions may be removed freely (as long as we
13508 : are careful not to change the type of the overall expression).
13509 : We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13510 : but we can constant-fold them if they have constant operands. */
13511 :
13512 : #ifdef ENABLE_FOLD_CHECKING
13513 : # define fold(x) fold_1 (x)
13514 : static tree fold_1 (tree);
13515 : static
13516 : #endif
13517 : tree
13518 1521429132 : fold (tree expr)
13519 : {
13520 1521598406 : const tree t = expr;
13521 1521598406 : enum tree_code code = TREE_CODE (t);
13522 1521598406 : enum tree_code_class kind = TREE_CODE_CLASS (code);
13523 1521598406 : tree tem;
13524 1521598406 : location_t loc = EXPR_LOCATION (expr);
13525 :
13526 : /* Return right away if a constant. */
13527 1521598406 : if (kind == tcc_constant)
13528 : return t;
13529 :
13530 : /* CALL_EXPR-like objects with variable numbers of operands are
13531 : treated specially. */
13532 1418284085 : if (kind == tcc_vl_exp)
13533 : {
13534 203034976 : if (code == CALL_EXPR)
13535 : {
13536 203034484 : tem = fold_call_expr (loc, expr, false);
13537 403124128 : return tem ? tem : expr;
13538 : }
13539 : return expr;
13540 : }
13541 :
13542 1215249109 : if (IS_EXPR_CODE_CLASS (kind))
13543 : {
13544 1212935988 : tree type = TREE_TYPE (t);
13545 1212935988 : tree op0, op1, op2;
13546 :
13547 1212935988 : switch (TREE_CODE_LENGTH (code))
13548 : {
13549 1100628122 : case 1:
13550 1100628122 : op0 = TREE_OPERAND (t, 0);
13551 1100628122 : tem = fold_unary_loc (loc, code, type, op0);
13552 1882714604 : return tem ? tem : expr;
13553 102905356 : case 2:
13554 102905356 : op0 = TREE_OPERAND (t, 0);
13555 102905356 : op1 = TREE_OPERAND (t, 1);
13556 102905356 : tem = fold_binary_loc (loc, code, type, op0, op1);
13557 195085197 : return tem ? tem : expr;
13558 4550960 : case 3:
13559 4550960 : op0 = TREE_OPERAND (t, 0);
13560 4550960 : op1 = TREE_OPERAND (t, 1);
13561 4550960 : op2 = TREE_OPERAND (t, 2);
13562 4550960 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13563 8830956 : return tem ? tem : expr;
13564 : default:
13565 : break;
13566 : }
13567 : }
13568 :
13569 7164671 : switch (code)
13570 : {
13571 4747657 : case ARRAY_REF:
13572 4747657 : {
13573 4747657 : tree op0 = TREE_OPERAND (t, 0);
13574 4747657 : tree op1 = TREE_OPERAND (t, 1);
13575 :
13576 4747657 : if (TREE_CODE (op1) == INTEGER_CST
13577 2977709 : && TREE_CODE (op0) == CONSTRUCTOR
13578 4749112 : && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13579 : {
13580 1455 : unsigned int idx;
13581 1455 : tree val
13582 1455 : = get_array_ctor_element_at_index (op0, wi::to_offset (op1),
13583 : &idx);
13584 1455 : if (val)
13585 : {
13586 1455 : if (TREE_CODE (val) != RAW_DATA_CST)
13587 : return val;
13588 2 : if (CONSTRUCTOR_ELT (op0, idx)->index == NULL_TREE
13589 2 : || (TREE_CODE (CONSTRUCTOR_ELT (op0, idx)->index)
13590 : != INTEGER_CST))
13591 : return t;
13592 2 : offset_int o
13593 2 : = (wi::to_offset (op1)
13594 2 : - wi::to_offset (CONSTRUCTOR_ELT (op0, idx)->index));
13595 2 : gcc_checking_assert (o < RAW_DATA_LENGTH (val));
13596 2 : return build_int_cst (TREE_TYPE (val),
13597 2 : RAW_DATA_UCHAR_ELT (val, o.to_uhwi ()));
13598 : }
13599 : }
13600 :
13601 : return t;
13602 : }
13603 :
13604 : /* Return a VECTOR_CST if possible. */
13605 127556 : case CONSTRUCTOR:
13606 127556 : {
13607 127556 : tree type = TREE_TYPE (t);
13608 127556 : if (TREE_CODE (type) != VECTOR_TYPE)
13609 : return t;
13610 :
13611 : unsigned i;
13612 : tree val;
13613 294600 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13614 256114 : if (! CONSTANT_CLASS_P (val))
13615 : return t;
13616 :
13617 38486 : return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13618 : }
13619 :
13620 169274 : case CONST_DECL:
13621 169274 : return fold (DECL_INITIAL (t));
13622 :
13623 : default:
13624 : return t;
13625 : } /* switch (code) */
13626 : }
13627 :
13628 : #ifdef ENABLE_FOLD_CHECKING
13629 : #undef fold
13630 :
13631 : static void fold_checksum_tree (const_tree, struct md5_ctx *,
13632 : hash_table<nofree_ptr_hash<const tree_node> > *);
13633 : static void fold_check_failed (const_tree, const_tree);
13634 : void print_fold_checksum (const_tree);
13635 :
13636 : /* When --enable-checking=fold, compute a digest of expr before
13637 : and after actual fold call to see if fold did not accidentally
13638 : change original expr. */
13639 :
13640 : tree
13641 : fold (tree expr)
13642 : {
13643 : tree ret;
13644 : struct md5_ctx ctx;
13645 : unsigned char checksum_before[16], checksum_after[16];
13646 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13647 :
13648 : md5_init_ctx (&ctx);
13649 : fold_checksum_tree (expr, &ctx, &ht);
13650 : md5_finish_ctx (&ctx, checksum_before);
13651 : ht.empty ();
13652 :
13653 : ret = fold_1 (expr);
13654 :
13655 : md5_init_ctx (&ctx);
13656 : fold_checksum_tree (expr, &ctx, &ht);
13657 : md5_finish_ctx (&ctx, checksum_after);
13658 :
13659 : if (memcmp (checksum_before, checksum_after, 16))
13660 : fold_check_failed (expr, ret);
13661 :
13662 : return ret;
13663 : }
13664 :
13665 : void
13666 : print_fold_checksum (const_tree expr)
13667 : {
13668 : struct md5_ctx ctx;
13669 : unsigned char checksum[16], cnt;
13670 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13671 :
13672 : md5_init_ctx (&ctx);
13673 : fold_checksum_tree (expr, &ctx, &ht);
13674 : md5_finish_ctx (&ctx, checksum);
13675 : for (cnt = 0; cnt < 16; ++cnt)
13676 : fprintf (stderr, "%02x", checksum[cnt]);
13677 : putc ('\n', stderr);
13678 : }
13679 :
13680 : static void
13681 : fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13682 : {
13683 : internal_error ("fold check: original tree changed by fold");
13684 : }
13685 :
13686 : static void
13687 : fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13688 : hash_table<nofree_ptr_hash <const tree_node> > *ht)
13689 : {
13690 : const tree_node **slot;
13691 : enum tree_code code;
13692 : union tree_node *buf;
13693 : int i, len;
13694 :
13695 : recursive_label:
13696 : if (expr == NULL)
13697 : return;
13698 : slot = ht->find_slot (expr, INSERT);
13699 : if (*slot != NULL)
13700 : return;
13701 : *slot = expr;
13702 : code = TREE_CODE (expr);
13703 : if (TREE_CODE_CLASS (code) == tcc_declaration
13704 : && HAS_DECL_ASSEMBLER_NAME_P (expr))
13705 : {
13706 : /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13707 : size_t sz = tree_size (expr);
13708 : buf = XALLOCAVAR (union tree_node, sz);
13709 : memcpy ((char *) buf, expr, sz);
13710 : SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13711 : buf->decl_with_vis.symtab_node = NULL;
13712 : buf->base.nowarning_flag = 0;
13713 : expr = (tree) buf;
13714 : }
13715 : else if (TREE_CODE_CLASS (code) == tcc_type
13716 : && (TYPE_POINTER_TO (expr)
13717 : || TYPE_REFERENCE_TO (expr)
13718 : || TYPE_CACHED_VALUES_P (expr)
13719 : || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13720 : || TYPE_NEXT_VARIANT (expr)
13721 : || TYPE_ALIAS_SET_KNOWN_P (expr)))
13722 : {
13723 : /* Allow these fields to be modified. */
13724 : tree tmp;
13725 : size_t sz = tree_size (expr);
13726 : buf = XALLOCAVAR (union tree_node, sz);
13727 : memcpy ((char *) buf, expr, sz);
13728 : expr = tmp = (tree) buf;
13729 : TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13730 : TYPE_POINTER_TO (tmp) = NULL;
13731 : TYPE_REFERENCE_TO (tmp) = NULL;
13732 : TYPE_NEXT_VARIANT (tmp) = NULL;
13733 : TYPE_ALIAS_SET (tmp) = -1;
13734 : if (TYPE_CACHED_VALUES_P (tmp))
13735 : {
13736 : TYPE_CACHED_VALUES_P (tmp) = 0;
13737 : TYPE_CACHED_VALUES (tmp) = NULL;
13738 : }
13739 : }
13740 : else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13741 : {
13742 : /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13743 : that and change builtins.cc etc. instead - see PR89543. */
13744 : size_t sz = tree_size (expr);
13745 : buf = XALLOCAVAR (union tree_node, sz);
13746 : memcpy ((char *) buf, expr, sz);
13747 : buf->base.nowarning_flag = 0;
13748 : expr = (tree) buf;
13749 : }
13750 : md5_process_bytes (expr, tree_size (expr), ctx);
13751 : if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13752 : fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13753 : if (TREE_CODE_CLASS (code) != tcc_type
13754 : && TREE_CODE_CLASS (code) != tcc_declaration
13755 : && code != TREE_LIST
13756 : && code != SSA_NAME
13757 : && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13758 : fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13759 : switch (TREE_CODE_CLASS (code))
13760 : {
13761 : case tcc_constant:
13762 : switch (code)
13763 : {
13764 : case STRING_CST:
13765 : md5_process_bytes (TREE_STRING_POINTER (expr),
13766 : TREE_STRING_LENGTH (expr), ctx);
13767 : break;
13768 : case COMPLEX_CST:
13769 : fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13770 : fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13771 : break;
13772 : case VECTOR_CST:
13773 : len = vector_cst_encoded_nelts (expr);
13774 : for (i = 0; i < len; ++i)
13775 : fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13776 : break;
13777 : default:
13778 : break;
13779 : }
13780 : break;
13781 : case tcc_exceptional:
13782 : switch (code)
13783 : {
13784 : case TREE_LIST:
13785 : fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13786 : fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13787 : expr = TREE_CHAIN (expr);
13788 : goto recursive_label;
13789 : break;
13790 : case TREE_VEC:
13791 : for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13792 : fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13793 : break;
13794 : default:
13795 : break;
13796 : }
13797 : break;
13798 : case tcc_expression:
13799 : case tcc_reference:
13800 : case tcc_comparison:
13801 : case tcc_unary:
13802 : case tcc_binary:
13803 : case tcc_statement:
13804 : case tcc_vl_exp:
13805 : len = TREE_OPERAND_LENGTH (expr);
13806 : for (i = 0; i < len; ++i)
13807 : fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13808 : break;
13809 : case tcc_declaration:
13810 : fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13811 : fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13812 : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13813 : {
13814 : fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13815 : fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13816 : fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13817 : fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13818 : fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13819 : }
13820 :
13821 : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13822 : {
13823 : if (TREE_CODE (expr) == FUNCTION_DECL)
13824 : {
13825 : fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13826 : fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13827 : }
13828 : fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13829 : }
13830 : break;
13831 : case tcc_type:
13832 : if (TREE_CODE (expr) == ENUMERAL_TYPE)
13833 : fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13834 : fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13835 : fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13836 : fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13837 : fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13838 : if (INTEGRAL_TYPE_P (expr)
13839 : || SCALAR_FLOAT_TYPE_P (expr))
13840 : {
13841 : fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13842 : fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13843 : }
13844 : fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13845 : if (RECORD_OR_UNION_TYPE_P (expr))
13846 : fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13847 : fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13848 : break;
13849 : default:
13850 : break;
13851 : }
13852 : }
13853 :
13854 : /* Helper function for outputting the checksum of a tree T. When
13855 : debugging with gdb, you can "define mynext" to be "next" followed
13856 : by "call debug_fold_checksum (op0)", then just trace down till the
13857 : outputs differ. */
13858 :
13859 : DEBUG_FUNCTION void
13860 : debug_fold_checksum (const_tree t)
13861 : {
13862 : int i;
13863 : unsigned char checksum[16];
13864 : struct md5_ctx ctx;
13865 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13866 :
13867 : md5_init_ctx (&ctx);
13868 : fold_checksum_tree (t, &ctx, &ht);
13869 : md5_finish_ctx (&ctx, checksum);
13870 : ht.empty ();
13871 :
13872 : for (i = 0; i < 16; i++)
13873 : fprintf (stderr, "%d ", checksum[i]);
13874 :
13875 : fprintf (stderr, "\n");
13876 : }
13877 :
13878 : #endif
13879 :
13880 : /* Fold a unary tree expression with code CODE of type TYPE with an
13881 : operand OP0. LOC is the location of the resulting expression.
13882 : Return a folded expression if successful. Otherwise, return a tree
13883 : expression with code CODE of type TYPE with an operand OP0. */
13884 :
13885 : tree
13886 1021438260 : fold_build1_loc (location_t loc,
13887 : enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13888 : {
13889 1021438260 : tree tem;
13890 : #ifdef ENABLE_FOLD_CHECKING
13891 : unsigned char checksum_before[16], checksum_after[16];
13892 : struct md5_ctx ctx;
13893 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13894 :
13895 : md5_init_ctx (&ctx);
13896 : fold_checksum_tree (op0, &ctx, &ht);
13897 : md5_finish_ctx (&ctx, checksum_before);
13898 : ht.empty ();
13899 : #endif
13900 :
13901 1021438260 : tem = fold_unary_loc (loc, code, type, op0);
13902 1021438260 : if (!tem)
13903 524108911 : tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13904 :
13905 : #ifdef ENABLE_FOLD_CHECKING
13906 : md5_init_ctx (&ctx);
13907 : fold_checksum_tree (op0, &ctx, &ht);
13908 : md5_finish_ctx (&ctx, checksum_after);
13909 :
13910 : if (memcmp (checksum_before, checksum_after, 16))
13911 : fold_check_failed (op0, tem);
13912 : #endif
13913 1021438260 : return tem;
13914 : }
13915 :
13916 : /* Fold a binary tree expression with code CODE of type TYPE with
13917 : operands OP0 and OP1. LOC is the location of the resulting
13918 : expression. Return a folded expression if successful. Otherwise,
13919 : return a tree expression with code CODE of type TYPE with operands
13920 : OP0 and OP1. */
13921 :
13922 : tree
13923 664971176 : fold_build2_loc (location_t loc,
13924 : enum tree_code code, tree type, tree op0, tree op1
13925 : MEM_STAT_DECL)
13926 : {
13927 664971176 : tree tem;
13928 : #ifdef ENABLE_FOLD_CHECKING
13929 : unsigned char checksum_before_op0[16],
13930 : checksum_before_op1[16],
13931 : checksum_after_op0[16],
13932 : checksum_after_op1[16];
13933 : struct md5_ctx ctx;
13934 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13935 :
13936 : md5_init_ctx (&ctx);
13937 : fold_checksum_tree (op0, &ctx, &ht);
13938 : md5_finish_ctx (&ctx, checksum_before_op0);
13939 : ht.empty ();
13940 :
13941 : md5_init_ctx (&ctx);
13942 : fold_checksum_tree (op1, &ctx, &ht);
13943 : md5_finish_ctx (&ctx, checksum_before_op1);
13944 : ht.empty ();
13945 : #endif
13946 :
13947 664971176 : tem = fold_binary_loc (loc, code, type, op0, op1);
13948 664971176 : if (!tem)
13949 372545486 : tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13950 :
13951 : #ifdef ENABLE_FOLD_CHECKING
13952 : md5_init_ctx (&ctx);
13953 : fold_checksum_tree (op0, &ctx, &ht);
13954 : md5_finish_ctx (&ctx, checksum_after_op0);
13955 : ht.empty ();
13956 :
13957 : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13958 : fold_check_failed (op0, tem);
13959 :
13960 : md5_init_ctx (&ctx);
13961 : fold_checksum_tree (op1, &ctx, &ht);
13962 : md5_finish_ctx (&ctx, checksum_after_op1);
13963 :
13964 : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13965 : fold_check_failed (op1, tem);
13966 : #endif
13967 664971176 : return tem;
13968 : }
13969 :
13970 : /* Fold a ternary tree expression with code CODE of type TYPE with
13971 : operands OP0, OP1, and OP2. Return a folded expression if
13972 : successful. Otherwise, return a tree expression with code CODE of
13973 : type TYPE with operands OP0, OP1, and OP2. */
13974 :
13975 : tree
13976 37237132 : fold_build3_loc (location_t loc, enum tree_code code, tree type,
13977 : tree op0, tree op1, tree op2 MEM_STAT_DECL)
13978 : {
13979 37237132 : tree tem;
13980 : #ifdef ENABLE_FOLD_CHECKING
13981 : unsigned char checksum_before_op0[16],
13982 : checksum_before_op1[16],
13983 : checksum_before_op2[16],
13984 : checksum_after_op0[16],
13985 : checksum_after_op1[16],
13986 : checksum_after_op2[16];
13987 : struct md5_ctx ctx;
13988 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13989 :
13990 : md5_init_ctx (&ctx);
13991 : fold_checksum_tree (op0, &ctx, &ht);
13992 : md5_finish_ctx (&ctx, checksum_before_op0);
13993 : ht.empty ();
13994 :
13995 : md5_init_ctx (&ctx);
13996 : fold_checksum_tree (op1, &ctx, &ht);
13997 : md5_finish_ctx (&ctx, checksum_before_op1);
13998 : ht.empty ();
13999 :
14000 : md5_init_ctx (&ctx);
14001 : fold_checksum_tree (op2, &ctx, &ht);
14002 : md5_finish_ctx (&ctx, checksum_before_op2);
14003 : ht.empty ();
14004 : #endif
14005 :
14006 37237132 : gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14007 37237132 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14008 37237132 : if (!tem)
14009 34489919 : tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14010 :
14011 : #ifdef ENABLE_FOLD_CHECKING
14012 : md5_init_ctx (&ctx);
14013 : fold_checksum_tree (op0, &ctx, &ht);
14014 : md5_finish_ctx (&ctx, checksum_after_op0);
14015 : ht.empty ();
14016 :
14017 : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14018 : fold_check_failed (op0, tem);
14019 :
14020 : md5_init_ctx (&ctx);
14021 : fold_checksum_tree (op1, &ctx, &ht);
14022 : md5_finish_ctx (&ctx, checksum_after_op1);
14023 : ht.empty ();
14024 :
14025 : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14026 : fold_check_failed (op1, tem);
14027 :
14028 : md5_init_ctx (&ctx);
14029 : fold_checksum_tree (op2, &ctx, &ht);
14030 : md5_finish_ctx (&ctx, checksum_after_op2);
14031 :
14032 : if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14033 : fold_check_failed (op2, tem);
14034 : #endif
14035 37237132 : return tem;
14036 : }
14037 :
14038 : /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14039 : arguments in ARGARRAY, and a null static chain.
14040 : Return a folded expression if successful. Otherwise, return a CALL_EXPR
14041 : of type TYPE from the given operands as constructed by build_call_array. */
14042 :
14043 : tree
14044 57096261 : fold_build_call_array_loc (location_t loc, tree type, tree fn,
14045 : int nargs, tree *argarray)
14046 : {
14047 57096261 : tree tem;
14048 : #ifdef ENABLE_FOLD_CHECKING
14049 : unsigned char checksum_before_fn[16],
14050 : checksum_before_arglist[16],
14051 : checksum_after_fn[16],
14052 : checksum_after_arglist[16];
14053 : struct md5_ctx ctx;
14054 : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14055 : int i;
14056 :
14057 : md5_init_ctx (&ctx);
14058 : fold_checksum_tree (fn, &ctx, &ht);
14059 : md5_finish_ctx (&ctx, checksum_before_fn);
14060 : ht.empty ();
14061 :
14062 : md5_init_ctx (&ctx);
14063 : for (i = 0; i < nargs; i++)
14064 : fold_checksum_tree (argarray[i], &ctx, &ht);
14065 : md5_finish_ctx (&ctx, checksum_before_arglist);
14066 : ht.empty ();
14067 : #endif
14068 :
14069 57096261 : tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14070 57096261 : if (!tem)
14071 54802076 : tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14072 :
14073 : #ifdef ENABLE_FOLD_CHECKING
14074 : md5_init_ctx (&ctx);
14075 : fold_checksum_tree (fn, &ctx, &ht);
14076 : md5_finish_ctx (&ctx, checksum_after_fn);
14077 : ht.empty ();
14078 :
14079 : if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14080 : fold_check_failed (fn, tem);
14081 :
14082 : md5_init_ctx (&ctx);
14083 : for (i = 0; i < nargs; i++)
14084 : fold_checksum_tree (argarray[i], &ctx, &ht);
14085 : md5_finish_ctx (&ctx, checksum_after_arglist);
14086 :
14087 : if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14088 : fold_check_failed (NULL_TREE, tem);
14089 : #endif
14090 57096261 : return tem;
14091 : }
14092 :
14093 : /* Perform constant folding and related simplification of initializer
14094 : expression EXPR. These behave identically to "fold_buildN" but ignore
14095 : potential run-time traps and exceptions that fold must preserve. */
14096 :
14097 : #define START_FOLD_INIT \
14098 : int saved_signaling_nans = flag_signaling_nans;\
14099 : int saved_trapping_math = flag_trapping_math;\
14100 : int saved_rounding_math = flag_rounding_math;\
14101 : int saved_trapv = flag_trapv;\
14102 : int saved_folding_initializer = folding_initializer;\
14103 : flag_signaling_nans = 0;\
14104 : flag_trapping_math = 0;\
14105 : flag_rounding_math = 0;\
14106 : flag_trapv = 0;\
14107 : folding_initializer = 1;
14108 :
14109 : #define END_FOLD_INIT \
14110 : flag_signaling_nans = saved_signaling_nans;\
14111 : flag_trapping_math = saved_trapping_math;\
14112 : flag_rounding_math = saved_rounding_math;\
14113 : flag_trapv = saved_trapv;\
14114 : folding_initializer = saved_folding_initializer;
14115 :
14116 : tree
14117 543133 : fold_init (tree expr)
14118 : {
14119 543133 : tree result;
14120 543133 : START_FOLD_INIT;
14121 :
14122 543133 : result = fold (expr);
14123 :
14124 543133 : END_FOLD_INIT;
14125 543133 : return result;
14126 : }
14127 :
14128 : tree
14129 2988819 : fold_build1_initializer_loc (location_t loc, enum tree_code code,
14130 : tree type, tree op)
14131 : {
14132 2988819 : tree result;
14133 2988819 : START_FOLD_INIT;
14134 :
14135 2988819 : result = fold_build1_loc (loc, code, type, op);
14136 :
14137 2988819 : END_FOLD_INIT;
14138 2988819 : return result;
14139 : }
14140 :
14141 : tree
14142 50475 : fold_build2_initializer_loc (location_t loc, enum tree_code code,
14143 : tree type, tree op0, tree op1)
14144 : {
14145 50475 : tree result;
14146 50475 : START_FOLD_INIT;
14147 :
14148 50475 : result = fold_build2_loc (loc, code, type, op0, op1);
14149 :
14150 50475 : END_FOLD_INIT;
14151 50475 : return result;
14152 : }
14153 :
14154 : tree
14155 3486 : fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14156 : int nargs, tree *argarray)
14157 : {
14158 3486 : tree result;
14159 3486 : START_FOLD_INIT;
14160 :
14161 3486 : result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14162 :
14163 3486 : END_FOLD_INIT;
14164 3486 : return result;
14165 : }
14166 :
14167 : tree
14168 45063714 : fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14169 : tree lhs, tree rhs)
14170 : {
14171 45063714 : tree result;
14172 45063714 : START_FOLD_INIT;
14173 :
14174 45063714 : result = fold_binary_loc (loc, code, type, lhs, rhs);
14175 :
14176 45063714 : END_FOLD_INIT;
14177 45063714 : return result;
14178 : }
14179 :
14180 : #undef START_FOLD_INIT
14181 : #undef END_FOLD_INIT
14182 :
14183 : /* Determine if first argument is a multiple of second argument. Return
14184 : false if it is not, or we cannot easily determined it to be.
14185 :
14186 : An example of the sort of thing we care about (at this point; this routine
14187 : could surely be made more general, and expanded to do what the *_DIV_EXPR's
14188 : fold cases do now) is discovering that
14189 :
14190 : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14191 :
14192 : is a multiple of
14193 :
14194 : SAVE_EXPR (J * 8)
14195 :
14196 : when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14197 :
14198 : This code also handles discovering that
14199 :
14200 : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14201 :
14202 : is a multiple of 8 so we don't have to worry about dealing with a
14203 : possible remainder.
14204 :
14205 : Note that we *look* inside a SAVE_EXPR only to determine how it was
14206 : calculated; it is not safe for fold to do much of anything else with the
14207 : internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14208 : at run time. For example, the latter example above *cannot* be implemented
14209 : as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14210 : evaluation time of the original SAVE_EXPR is not necessarily the same at
14211 : the time the new expression is evaluated. The only optimization of this
14212 : sort that would be valid is changing
14213 :
14214 : SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14215 :
14216 : divided by 8 to
14217 :
14218 : SAVE_EXPR (I) * SAVE_EXPR (J)
14219 :
14220 : (where the same SAVE_EXPR (J) is used in the original and the
14221 : transformed version).
14222 :
14223 : NOWRAP specifies whether all outer operations in TYPE should
14224 : be considered not wrapping. Any type conversion within TOP acts
14225 : as a barrier and we will fall back to NOWRAP being false.
14226 : NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14227 : as not wrapping even though they are generally using unsigned arithmetic. */
14228 :
14229 : bool
14230 1549091 : multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14231 : {
14232 1549091 : gimple *stmt;
14233 1549091 : tree op1, op2;
14234 :
14235 1549091 : if (operand_equal_p (top, bottom, 0))
14236 : return true;
14237 :
14238 1066546 : if (TREE_CODE (type) != INTEGER_TYPE)
14239 : return false;
14240 :
14241 1066541 : switch (TREE_CODE (top))
14242 : {
14243 636 : case BIT_AND_EXPR:
14244 : /* Bitwise and provides a power of two multiple. If the mask is
14245 : a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14246 636 : if (!integer_pow2p (bottom))
14247 : return false;
14248 636 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14249 636 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14250 :
14251 391748 : case MULT_EXPR:
14252 : /* If the multiplication can wrap we cannot recurse further unless
14253 : the bottom is a power of two which is where wrapping does not
14254 : matter. */
14255 391748 : if (!nowrap
14256 14446 : && !TYPE_OVERFLOW_UNDEFINED (type)
14257 396327 : && !integer_pow2p (bottom))
14258 : return false;
14259 391307 : if (TREE_CODE (bottom) == INTEGER_CST)
14260 : {
14261 389591 : op1 = TREE_OPERAND (top, 0);
14262 389591 : op2 = TREE_OPERAND (top, 1);
14263 389591 : if (TREE_CODE (op1) == INTEGER_CST)
14264 0 : std::swap (op1, op2);
14265 389591 : if (TREE_CODE (op2) == INTEGER_CST)
14266 : {
14267 379581 : if (multiple_of_p (type, op2, bottom, nowrap))
14268 : return true;
14269 : /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14270 3295 : if (multiple_of_p (type, bottom, op2, nowrap))
14271 : {
14272 1900 : widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14273 1900 : wi::to_widest (op2));
14274 1900 : if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14275 : {
14276 1900 : op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14277 1900 : return multiple_of_p (type, op1, op2, nowrap);
14278 : }
14279 1900 : }
14280 1395 : return multiple_of_p (type, op1, bottom, nowrap);
14281 : }
14282 : }
14283 11726 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14284 11726 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14285 :
14286 403 : case LSHIFT_EXPR:
14287 : /* Handle X << CST as X * (1 << CST) and only process the constant. */
14288 403 : if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14289 : {
14290 403 : op1 = TREE_OPERAND (top, 1);
14291 403 : if (wi::to_widest (op1) < TYPE_PRECISION (type))
14292 : {
14293 403 : wide_int mul_op
14294 403 : = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14295 806 : return multiple_of_p (type,
14296 806 : wide_int_to_tree (type, mul_op), bottom,
14297 : nowrap);
14298 403 : }
14299 : }
14300 : return false;
14301 :
14302 215949 : case MINUS_EXPR:
14303 215949 : case PLUS_EXPR:
14304 : /* If the addition or subtraction can wrap we cannot recurse further
14305 : unless bottom is a power of two which is where wrapping does not
14306 : matter. */
14307 215949 : if (!nowrap
14308 172751 : && !TYPE_OVERFLOW_UNDEFINED (type)
14309 387265 : && !integer_pow2p (bottom))
14310 : return false;
14311 :
14312 : /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14313 : unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14314 : but 0xfffffffd is not. */
14315 188378 : op1 = TREE_OPERAND (top, 1);
14316 188378 : if (TREE_CODE (top) == PLUS_EXPR
14317 182201 : && nowrap
14318 37108 : && TYPE_UNSIGNED (type)
14319 224777 : && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14320 32410 : op1 = fold_build1 (NEGATE_EXPR, type, op1);
14321 :
14322 : /* It is impossible to prove if op0 +- op1 is multiple of bottom
14323 : precisely, so be conservative here checking if both op0 and op1
14324 : are multiple of bottom. Note we check the second operand first
14325 : since it's usually simpler. */
14326 188378 : return (multiple_of_p (type, op1, bottom, nowrap)
14327 188378 : && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14328 :
14329 140818 : CASE_CONVERT:
14330 : /* Can't handle conversions from non-integral or wider integral type. */
14331 140818 : if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14332 140818 : || (TYPE_PRECISION (type)
14333 39370 : < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14334 : return false;
14335 : /* NOWRAP only extends to operations in the outermost type so
14336 : make sure to strip it off here. */
14337 39062 : return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14338 78124 : TREE_OPERAND (top, 0), bottom, false);
14339 :
14340 12746 : case SAVE_EXPR:
14341 12746 : return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14342 :
14343 86 : case COND_EXPR:
14344 86 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14345 86 : && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14346 :
14347 132701 : case INTEGER_CST:
14348 132701 : if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14349 2728 : return false;
14350 129973 : return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14351 : SIGNED);
14352 :
14353 64767 : case SSA_NAME:
14354 64767 : if (TREE_CODE (bottom) == INTEGER_CST
14355 61516 : && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14356 126283 : && gimple_code (stmt) == GIMPLE_ASSIGN)
14357 : {
14358 27086 : enum tree_code code = gimple_assign_rhs_code (stmt);
14359 :
14360 : /* Check for special cases to see if top is defined as multiple
14361 : of bottom:
14362 :
14363 : top = (X & ~(bottom - 1) ; bottom is power of 2
14364 :
14365 : or
14366 :
14367 : Y = X % bottom
14368 : top = X - Y. */
14369 27086 : if (code == BIT_AND_EXPR
14370 296 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14371 296 : && TREE_CODE (op2) == INTEGER_CST
14372 188 : && integer_pow2p (bottom)
14373 27274 : && wi::multiple_of_p (wi::to_widest (op2),
14374 188 : wi::to_widest (bottom), SIGNED))
14375 179 : return true;
14376 :
14377 26907 : op1 = gimple_assign_rhs1 (stmt);
14378 26907 : if (code == MINUS_EXPR
14379 2101 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14380 2101 : && TREE_CODE (op2) == SSA_NAME
14381 2101 : && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14382 2101 : && gimple_code (stmt) == GIMPLE_ASSIGN
14383 1721 : && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14384 62 : && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14385 26969 : && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14386 : return true;
14387 : }
14388 :
14389 : /* fall through */
14390 :
14391 : default:
14392 : if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14393 : return multiple_p (wi::to_poly_widest (top),
14394 : wi::to_poly_widest (bottom));
14395 :
14396 : return false;
14397 : }
14398 : }
14399 :
14400 : /* Return true if expression X cannot be (or contain) a NaN or infinity.
14401 : This function returns true for integer expressions, and returns
14402 : false if uncertain. */
14403 :
14404 : bool
14405 1337977 : tree_expr_finite_p (const_tree x)
14406 : {
14407 1337981 : machine_mode mode = element_mode (x);
14408 1337981 : if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14409 : return true;
14410 1337787 : switch (TREE_CODE (x))
14411 : {
14412 592 : case REAL_CST:
14413 592 : return real_isfinite (TREE_REAL_CST_PTR (x));
14414 0 : case COMPLEX_CST:
14415 0 : return tree_expr_finite_p (TREE_REALPART (x))
14416 0 : && tree_expr_finite_p (TREE_IMAGPART (x));
14417 : case FLOAT_EXPR:
14418 : return true;
14419 4 : case ABS_EXPR:
14420 4 : case CONVERT_EXPR:
14421 4 : case NON_LVALUE_EXPR:
14422 4 : case NEGATE_EXPR:
14423 4 : case SAVE_EXPR:
14424 4 : return tree_expr_finite_p (TREE_OPERAND (x, 0));
14425 0 : case MIN_EXPR:
14426 0 : case MAX_EXPR:
14427 0 : return tree_expr_finite_p (TREE_OPERAND (x, 0))
14428 0 : && tree_expr_finite_p (TREE_OPERAND (x, 1));
14429 0 : case COND_EXPR:
14430 0 : return tree_expr_finite_p (TREE_OPERAND (x, 1))
14431 0 : && tree_expr_finite_p (TREE_OPERAND (x, 2));
14432 38 : case CALL_EXPR:
14433 38 : switch (get_call_combined_fn (x))
14434 : {
14435 0 : CASE_CFN_FABS:
14436 0 : CASE_CFN_FABS_FN:
14437 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14438 0 : CASE_CFN_FMAX:
14439 0 : CASE_CFN_FMAX_FN:
14440 0 : CASE_CFN_FMIN:
14441 0 : CASE_CFN_FMIN_FN:
14442 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14443 0 : && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14444 : default:
14445 : return false;
14446 : }
14447 :
14448 : default:
14449 : return false;
14450 : }
14451 : }
14452 :
14453 : /* Return true if expression X evaluates to an infinity.
14454 : This function returns false for integer expressions. */
14455 :
14456 : bool
14457 1547049 : tree_expr_infinite_p (const_tree x)
14458 : {
14459 1547499 : if (!HONOR_INFINITIES (x))
14460 : return false;
14461 1547384 : switch (TREE_CODE (x))
14462 : {
14463 0 : case REAL_CST:
14464 0 : return real_isinf (TREE_REAL_CST_PTR (x));
14465 450 : case ABS_EXPR:
14466 450 : case NEGATE_EXPR:
14467 450 : case NON_LVALUE_EXPR:
14468 450 : case SAVE_EXPR:
14469 450 : return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14470 0 : case COND_EXPR:
14471 0 : return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14472 0 : && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14473 : default:
14474 : return false;
14475 : }
14476 : }
14477 :
14478 : /* Return true if expression X could evaluate to an infinity.
14479 : This function returns false for integer expressions, and returns
14480 : true if uncertain. */
14481 :
14482 : bool
14483 319733 : tree_expr_maybe_infinite_p (const_tree x)
14484 : {
14485 319741 : if (!HONOR_INFINITIES (x))
14486 : return false;
14487 319414 : switch (TREE_CODE (x))
14488 : {
14489 177 : case REAL_CST:
14490 177 : return real_isinf (TREE_REAL_CST_PTR (x));
14491 : case FLOAT_EXPR:
14492 : return false;
14493 8 : case ABS_EXPR:
14494 8 : case NEGATE_EXPR:
14495 8 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14496 1 : case COND_EXPR:
14497 1 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14498 1 : || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14499 : default:
14500 : return true;
14501 : }
14502 : }
14503 :
14504 : /* Return true if expression X evaluates to a signaling NaN.
14505 : This function returns false for integer expressions. */
14506 :
14507 : bool
14508 389 : tree_expr_signaling_nan_p (const_tree x)
14509 : {
14510 389 : if (!HONOR_SNANS (x))
14511 : return false;
14512 124 : switch (TREE_CODE (x))
14513 : {
14514 124 : case REAL_CST:
14515 124 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14516 0 : case NON_LVALUE_EXPR:
14517 0 : case SAVE_EXPR:
14518 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14519 0 : case COND_EXPR:
14520 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14521 0 : && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14522 : default:
14523 : return false;
14524 : }
14525 : }
14526 :
14527 : /* Return true if expression X could evaluate to a signaling NaN.
14528 : This function returns false for integer expressions, and returns
14529 : true if uncertain. */
14530 :
14531 : bool
14532 722394 : tree_expr_maybe_signaling_nan_p (const_tree x)
14533 : {
14534 722394 : if (!HONOR_SNANS (x))
14535 : return false;
14536 5028 : switch (TREE_CODE (x))
14537 : {
14538 1452 : case REAL_CST:
14539 1452 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14540 : case FLOAT_EXPR:
14541 : return false;
14542 0 : case ABS_EXPR:
14543 0 : case CONVERT_EXPR:
14544 0 : case NEGATE_EXPR:
14545 0 : case NON_LVALUE_EXPR:
14546 0 : case SAVE_EXPR:
14547 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14548 0 : case MIN_EXPR:
14549 0 : case MAX_EXPR:
14550 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14551 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14552 0 : case COND_EXPR:
14553 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14554 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14555 0 : case CALL_EXPR:
14556 0 : switch (get_call_combined_fn (x))
14557 : {
14558 0 : CASE_CFN_FABS:
14559 0 : CASE_CFN_FABS_FN:
14560 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14561 0 : CASE_CFN_FMAX:
14562 0 : CASE_CFN_FMAX_FN:
14563 0 : CASE_CFN_FMIN:
14564 0 : CASE_CFN_FMIN_FN:
14565 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14566 0 : || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14567 : default:
14568 : return true;
14569 : }
14570 : default:
14571 : return true;
14572 : }
14573 : }
14574 :
14575 : /* Return true if expression X evaluates to a NaN.
14576 : This function returns false for integer expressions. */
14577 :
14578 : bool
14579 4384305 : tree_expr_nan_p (const_tree x)
14580 : {
14581 4711767 : if (!HONOR_NANS (x))
14582 : return false;
14583 4711417 : switch (TREE_CODE (x))
14584 : {
14585 3774 : case REAL_CST:
14586 3774 : return real_isnan (TREE_REAL_CST_PTR (x));
14587 327462 : case NON_LVALUE_EXPR:
14588 327462 : case SAVE_EXPR:
14589 327462 : return tree_expr_nan_p (TREE_OPERAND (x, 0));
14590 900 : case COND_EXPR:
14591 900 : return tree_expr_nan_p (TREE_OPERAND (x, 1))
14592 900 : && tree_expr_nan_p (TREE_OPERAND (x, 2));
14593 : default:
14594 : return false;
14595 : }
14596 : }
14597 :
14598 : /* Return true if expression X could evaluate to a NaN.
14599 : This function returns false for integer expressions, and returns
14600 : true if uncertain. */
14601 :
14602 : bool
14603 5165766 : tree_expr_maybe_nan_p (const_tree x)
14604 : {
14605 7686162 : if (!HONOR_NANS (x))
14606 : return false;
14607 7505901 : switch (TREE_CODE (x))
14608 : {
14609 3302 : case REAL_CST:
14610 3302 : return real_isnan (TREE_REAL_CST_PTR (x));
14611 : case FLOAT_EXPR:
14612 : return false;
14613 13846 : case PLUS_EXPR:
14614 13846 : case MINUS_EXPR:
14615 13846 : case MULT_EXPR:
14616 13846 : return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14617 13846 : || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14618 2520396 : case ABS_EXPR:
14619 2520396 : case CONVERT_EXPR:
14620 2520396 : case NEGATE_EXPR:
14621 2520396 : case NON_LVALUE_EXPR:
14622 2520396 : case SAVE_EXPR:
14623 2520396 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14624 176 : case MIN_EXPR:
14625 176 : case MAX_EXPR:
14626 176 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14627 176 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14628 557 : case COND_EXPR:
14629 557 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14630 557 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14631 1082 : case CALL_EXPR:
14632 1082 : switch (get_call_combined_fn (x))
14633 : {
14634 0 : CASE_CFN_FABS:
14635 0 : CASE_CFN_FABS_FN:
14636 0 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14637 108 : CASE_CFN_FMAX:
14638 108 : CASE_CFN_FMAX_FN:
14639 108 : CASE_CFN_FMIN:
14640 108 : CASE_CFN_FMIN_FN:
14641 108 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14642 108 : || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14643 : default:
14644 : return true;
14645 : }
14646 : default:
14647 : return true;
14648 : }
14649 : }
14650 :
14651 : /* Return true if expression X could evaluate to -0.0.
14652 : This function returns true if uncertain. */
14653 :
14654 : bool
14655 600197 : tree_expr_maybe_real_minus_zero_p (const_tree x)
14656 : {
14657 600197 : if (!HONOR_SIGNED_ZEROS (x))
14658 : return false;
14659 600197 : switch (TREE_CODE (x))
14660 : {
14661 0 : case REAL_CST:
14662 0 : return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14663 : case INTEGER_CST:
14664 : case FLOAT_EXPR:
14665 : case ABS_EXPR:
14666 : return false;
14667 0 : case NON_LVALUE_EXPR:
14668 0 : case SAVE_EXPR:
14669 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14670 0 : case COND_EXPR:
14671 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14672 0 : || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14673 1 : case CALL_EXPR:
14674 1 : switch (get_call_combined_fn (x))
14675 : {
14676 : CASE_CFN_FABS:
14677 : CASE_CFN_FABS_FN:
14678 : return false;
14679 : default:
14680 : break;
14681 : }
14682 : default:
14683 : break;
14684 : }
14685 : /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14686 : * but currently those predicates require tree and not const_tree. */
14687 : return true;
14688 : }
14689 :
14690 : #define tree_expr_nonnegative_warnv_p(X, Y) \
14691 : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14692 :
14693 : #define RECURSE(X) \
14694 : ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14695 :
14696 : /* Return true if CODE or TYPE is known to be non-negative. */
14697 :
14698 : static bool
14699 41810411 : tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14700 : {
14701 41810411 : if (!VECTOR_TYPE_P (type)
14702 41791505 : && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14703 83600922 : && truth_value_p (code))
14704 : /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14705 : have a signed:1 type (where the value is -1 and 0). */
14706 : return true;
14707 : return false;
14708 : }
14709 :
14710 : /* Return true if (CODE OP0) is known to be non-negative. If the return
14711 : value is based on the assumption that signed overflow is undefined,
14712 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14713 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14714 :
14715 : bool
14716 14568151 : tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14717 : bool *strict_overflow_p, int depth)
14718 : {
14719 14568151 : if (TYPE_UNSIGNED (type))
14720 : return true;
14721 :
14722 5779040 : switch (code)
14723 : {
14724 271384 : case ABS_EXPR:
14725 : /* We can't return 1 if flag_wrapv is set because
14726 : ABS_EXPR<INT_MIN> = INT_MIN. */
14727 271384 : if (!ANY_INTEGRAL_TYPE_P (type))
14728 : return true;
14729 11082 : if (TYPE_OVERFLOW_UNDEFINED (type))
14730 : {
14731 10134 : *strict_overflow_p = true;
14732 10134 : return true;
14733 : }
14734 : break;
14735 :
14736 223115 : case NON_LVALUE_EXPR:
14737 223115 : case FLOAT_EXPR:
14738 223115 : case FIX_TRUNC_EXPR:
14739 223115 : return RECURSE (op0);
14740 :
14741 5181797 : CASE_CONVERT:
14742 5181797 : {
14743 5181797 : tree inner_type = TREE_TYPE (op0);
14744 5181797 : tree outer_type = type;
14745 :
14746 5181797 : if (SCALAR_FLOAT_TYPE_P (outer_type))
14747 : {
14748 364308 : if (SCALAR_FLOAT_TYPE_P (inner_type))
14749 364308 : return RECURSE (op0);
14750 0 : if (INTEGRAL_TYPE_P (inner_type))
14751 : {
14752 0 : if (TYPE_UNSIGNED (inner_type))
14753 : return true;
14754 0 : return RECURSE (op0);
14755 : }
14756 : }
14757 4817489 : else if (INTEGRAL_TYPE_P (outer_type))
14758 : {
14759 4817422 : if (SCALAR_FLOAT_TYPE_P (inner_type))
14760 0 : return RECURSE (op0);
14761 4817422 : if (INTEGRAL_TYPE_P (inner_type))
14762 4645860 : return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14763 4645860 : && TYPE_UNSIGNED (inner_type);
14764 : }
14765 : }
14766 : break;
14767 :
14768 102744 : default:
14769 102744 : return tree_simple_nonnegative_warnv_p (code, type);
14770 : }
14771 :
14772 : /* We don't know sign of `t', so be conservative and return false. */
14773 : return false;
14774 : }
14775 :
14776 : /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14777 : value is based on the assumption that signed overflow is undefined,
14778 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14779 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14780 :
14781 : bool
14782 43044193 : tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14783 : tree op1, bool *strict_overflow_p,
14784 : int depth)
14785 : {
14786 43044193 : if (TYPE_UNSIGNED (type))
14787 : return true;
14788 :
14789 16804405 : switch (code)
14790 : {
14791 6135104 : case POINTER_PLUS_EXPR:
14792 6135104 : case PLUS_EXPR:
14793 6135104 : if (FLOAT_TYPE_P (type))
14794 69711 : return RECURSE (op0) && RECURSE (op1);
14795 :
14796 : /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14797 : both unsigned and at least 2 bits shorter than the result. */
14798 6065393 : if (TREE_CODE (type) == INTEGER_TYPE
14799 6058978 : && TREE_CODE (op0) == NOP_EXPR
14800 7925 : && TREE_CODE (op1) == NOP_EXPR)
14801 : {
14802 200 : tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14803 200 : tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14804 200 : if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14805 301 : && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14806 : {
14807 95 : unsigned int prec = MAX (TYPE_PRECISION (inner1),
14808 95 : TYPE_PRECISION (inner2)) + 1;
14809 95 : return prec < TYPE_PRECISION (type);
14810 : }
14811 : }
14812 : break;
14813 :
14814 1537965 : case MULT_EXPR:
14815 1537965 : if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14816 : {
14817 : /* x * x is always non-negative for floating point x
14818 : or without overflow. */
14819 1476243 : if (operand_equal_p (op0, op1, 0)
14820 1476243 : || (RECURSE (op0) && RECURSE (op1)))
14821 : {
14822 1220 : if (ANY_INTEGRAL_TYPE_P (type)
14823 12509 : && TYPE_OVERFLOW_UNDEFINED (type))
14824 11289 : *strict_overflow_p = true;
14825 12488 : return true;
14826 : }
14827 : }
14828 :
14829 : /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14830 : both unsigned and their total bits is shorter than the result. */
14831 1525477 : if (TREE_CODE (type) == INTEGER_TYPE
14832 1446376 : && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14833 152 : && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14834 : {
14835 116 : tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14836 116 : ? TREE_TYPE (TREE_OPERAND (op0, 0))
14837 116 : : TREE_TYPE (op0);
14838 116 : tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14839 116 : ? TREE_TYPE (TREE_OPERAND (op1, 0))
14840 116 : : TREE_TYPE (op1);
14841 :
14842 116 : bool unsigned0 = TYPE_UNSIGNED (inner0);
14843 116 : bool unsigned1 = TYPE_UNSIGNED (inner1);
14844 :
14845 116 : if (TREE_CODE (op0) == INTEGER_CST)
14846 0 : unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14847 :
14848 116 : if (TREE_CODE (op1) == INTEGER_CST)
14849 69 : unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14850 :
14851 116 : if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14852 7 : && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14853 : {
14854 0 : unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14855 0 : ? tree_int_cst_min_precision (op0, UNSIGNED)
14856 0 : : TYPE_PRECISION (inner0);
14857 :
14858 0 : unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14859 0 : ? tree_int_cst_min_precision (op1, UNSIGNED)
14860 0 : : TYPE_PRECISION (inner1);
14861 :
14862 0 : return precision0 + precision1 < TYPE_PRECISION (type);
14863 : }
14864 : }
14865 : return false;
14866 :
14867 95405 : case BIT_AND_EXPR:
14868 95405 : return RECURSE (op0) || RECURSE (op1);
14869 :
14870 84219 : case MAX_EXPR:
14871 : /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14872 : things. */
14873 84219 : if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14874 76 : return RECURSE (op0) && RECURSE (op1);
14875 84143 : return RECURSE (op0) || RECURSE (op1);
14876 :
14877 833033 : case BIT_IOR_EXPR:
14878 833033 : case BIT_XOR_EXPR:
14879 833033 : case MIN_EXPR:
14880 833033 : case RDIV_EXPR:
14881 833033 : case TRUNC_DIV_EXPR:
14882 833033 : case CEIL_DIV_EXPR:
14883 833033 : case FLOOR_DIV_EXPR:
14884 833033 : case ROUND_DIV_EXPR:
14885 833033 : return RECURSE (op0) && RECURSE (op1);
14886 :
14887 122369 : case TRUNC_MOD_EXPR:
14888 122369 : return RECURSE (op0);
14889 :
14890 394 : case FLOOR_MOD_EXPR:
14891 394 : return RECURSE (op1);
14892 :
14893 7995916 : case CEIL_MOD_EXPR:
14894 7995916 : case ROUND_MOD_EXPR:
14895 7995916 : default:
14896 7995916 : return tree_simple_nonnegative_warnv_p (code, type);
14897 : }
14898 :
14899 : /* We don't know sign of `t', so be conservative and return false. */
14900 : return false;
14901 : }
14902 :
14903 : /* Return true if T is known to be non-negative. If the return
14904 : value is based on the assumption that signed overflow is undefined,
14905 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14906 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14907 :
14908 : bool
14909 52226516 : tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14910 : {
14911 52226516 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
14912 : return true;
14913 :
14914 34507452 : switch (TREE_CODE (t))
14915 : {
14916 3816798 : case INTEGER_CST:
14917 3816798 : return tree_int_cst_sgn (t) >= 0;
14918 :
14919 860267 : case REAL_CST:
14920 860267 : return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14921 :
14922 0 : case FIXED_CST:
14923 0 : return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14924 :
14925 490 : case COND_EXPR:
14926 490 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14927 :
14928 17927070 : case SSA_NAME:
14929 : /* Limit the depth of recursion to avoid quadratic behavior.
14930 : This is expected to catch almost all occurrences in practice.
14931 : If this code misses important cases that unbounded recursion
14932 : would not, passes that need this information could be revised
14933 : to provide it through dataflow propagation. */
14934 17927070 : return (!name_registered_for_update_p (t)
14935 17927069 : && depth < param_max_ssa_name_query_depth
14936 34475228 : && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14937 : strict_overflow_p, depth));
14938 :
14939 11902827 : default:
14940 11902827 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14941 : }
14942 : }
14943 :
14944 : /* Return true if T is known to be non-negative. If the return
14945 : value is based on the assumption that signed overflow is undefined,
14946 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14947 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14948 :
14949 : bool
14950 21964998 : tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14951 : bool *strict_overflow_p, int depth)
14952 : {
14953 21964998 : switch (fn)
14954 : {
14955 : CASE_CFN_ACOS:
14956 : CASE_CFN_ACOS_FN:
14957 : CASE_CFN_ACOSH:
14958 : CASE_CFN_ACOSH_FN:
14959 : CASE_CFN_ACOSPI:
14960 : CASE_CFN_ACOSPI_FN:
14961 : CASE_CFN_CABS:
14962 : CASE_CFN_CABS_FN:
14963 : CASE_CFN_COSH:
14964 : CASE_CFN_COSH_FN:
14965 : CASE_CFN_ERFC:
14966 : CASE_CFN_ERFC_FN:
14967 : CASE_CFN_EXP:
14968 : CASE_CFN_EXP_FN:
14969 : CASE_CFN_EXP10:
14970 : CASE_CFN_EXP2:
14971 : CASE_CFN_EXP2_FN:
14972 : CASE_CFN_FABS:
14973 : CASE_CFN_FABS_FN:
14974 : CASE_CFN_FDIM:
14975 : CASE_CFN_FDIM_FN:
14976 : CASE_CFN_HYPOT:
14977 : CASE_CFN_HYPOT_FN:
14978 : CASE_CFN_POW10:
14979 : CASE_CFN_FFS:
14980 : CASE_CFN_PARITY:
14981 : CASE_CFN_POPCOUNT:
14982 : CASE_CFN_CLRSB:
14983 : case CFN_BUILT_IN_BSWAP16:
14984 : case CFN_BUILT_IN_BSWAP32:
14985 : case CFN_BUILT_IN_BSWAP64:
14986 : case CFN_BUILT_IN_BSWAP128:
14987 : /* Always true. */
14988 : return true;
14989 :
14990 1067 : CASE_CFN_CLZ:
14991 1067 : CASE_CFN_CTZ:
14992 1067 : if (arg1)
14993 0 : return RECURSE (arg1);
14994 : return true;
14995 :
14996 1134 : CASE_CFN_SQRT:
14997 1134 : CASE_CFN_SQRT_FN:
14998 : /* sqrt(-0.0) is -0.0. */
14999 1134 : if (!HONOR_SIGNED_ZEROS (type))
15000 : return true;
15001 1097 : return RECURSE (arg0);
15002 :
15003 110910 : CASE_CFN_ASINH:
15004 110910 : CASE_CFN_ASINH_FN:
15005 110910 : CASE_CFN_ASINPI:
15006 110910 : CASE_CFN_ASINPI_FN:
15007 110910 : CASE_CFN_ATAN:
15008 110910 : CASE_CFN_ATAN_FN:
15009 110910 : CASE_CFN_ATANH:
15010 110910 : CASE_CFN_ATANH_FN:
15011 110910 : CASE_CFN_ATANPI:
15012 110910 : CASE_CFN_ATANPI_FN:
15013 110910 : CASE_CFN_CBRT:
15014 110910 : CASE_CFN_CBRT_FN:
15015 110910 : CASE_CFN_CEIL:
15016 110910 : CASE_CFN_CEIL_FN:
15017 110910 : CASE_CFN_ERF:
15018 110910 : CASE_CFN_ERF_FN:
15019 110910 : CASE_CFN_EXPM1:
15020 110910 : CASE_CFN_EXPM1_FN:
15021 110910 : CASE_CFN_FLOOR:
15022 110910 : CASE_CFN_FLOOR_FN:
15023 110910 : CASE_CFN_FMOD:
15024 110910 : CASE_CFN_FMOD_FN:
15025 110910 : CASE_CFN_FREXP:
15026 110910 : CASE_CFN_FREXP_FN:
15027 110910 : CASE_CFN_ICEIL:
15028 110910 : CASE_CFN_IFLOOR:
15029 110910 : CASE_CFN_IRINT:
15030 110910 : CASE_CFN_IROUND:
15031 110910 : CASE_CFN_LCEIL:
15032 110910 : CASE_CFN_LDEXP:
15033 110910 : CASE_CFN_LFLOOR:
15034 110910 : CASE_CFN_LLCEIL:
15035 110910 : CASE_CFN_LLFLOOR:
15036 110910 : CASE_CFN_LLRINT:
15037 110910 : CASE_CFN_LLRINT_FN:
15038 110910 : CASE_CFN_LLROUND:
15039 110910 : CASE_CFN_LLROUND_FN:
15040 110910 : CASE_CFN_LRINT:
15041 110910 : CASE_CFN_LRINT_FN:
15042 110910 : CASE_CFN_LROUND:
15043 110910 : CASE_CFN_LROUND_FN:
15044 110910 : CASE_CFN_MODF:
15045 110910 : CASE_CFN_MODF_FN:
15046 110910 : CASE_CFN_NEARBYINT:
15047 110910 : CASE_CFN_NEARBYINT_FN:
15048 110910 : CASE_CFN_RINT:
15049 110910 : CASE_CFN_RINT_FN:
15050 110910 : CASE_CFN_ROUND:
15051 110910 : CASE_CFN_ROUND_FN:
15052 110910 : CASE_CFN_ROUNDEVEN:
15053 110910 : CASE_CFN_ROUNDEVEN_FN:
15054 110910 : CASE_CFN_SCALB:
15055 110910 : CASE_CFN_SCALBLN:
15056 110910 : CASE_CFN_SCALBLN_FN:
15057 110910 : CASE_CFN_SCALBN:
15058 110910 : CASE_CFN_SCALBN_FN:
15059 110910 : CASE_CFN_SIGNBIT:
15060 110910 : CASE_CFN_SIGNIFICAND:
15061 110910 : CASE_CFN_SINH:
15062 110910 : CASE_CFN_SINH_FN:
15063 110910 : CASE_CFN_TANH:
15064 110910 : CASE_CFN_TANH_FN:
15065 110910 : CASE_CFN_TRUNC:
15066 110910 : CASE_CFN_TRUNC_FN:
15067 : /* True if the 1st argument is nonnegative. */
15068 110910 : return RECURSE (arg0);
15069 :
15070 1355 : CASE_CFN_FMAX:
15071 1355 : CASE_CFN_FMAX_FN:
15072 : /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15073 : things. In the presence of sNaNs, we're only guaranteed to be
15074 : non-negative if both operands are non-negative. In the presence
15075 : of qNaNs, we're non-negative if either operand is non-negative
15076 : and can't be a qNaN, or if both operands are non-negative. */
15077 1355 : if (tree_expr_maybe_signaling_nan_p (arg0)
15078 1355 : || tree_expr_maybe_signaling_nan_p (arg1))
15079 136 : return RECURSE (arg0) && RECURSE (arg1);
15080 1219 : return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15081 332 : || RECURSE (arg1))
15082 887 : : (RECURSE (arg1)
15083 887 : && !tree_expr_maybe_nan_p (arg1));
15084 :
15085 946 : CASE_CFN_FMIN:
15086 946 : CASE_CFN_FMIN_FN:
15087 : /* True if the 1st AND 2nd arguments are nonnegative. */
15088 946 : return RECURSE (arg0) && RECURSE (arg1);
15089 :
15090 828 : CASE_CFN_COPYSIGN:
15091 828 : CASE_CFN_COPYSIGN_FN:
15092 : /* True if the 2nd argument is nonnegative. */
15093 828 : return RECURSE (arg1);
15094 :
15095 2336 : CASE_CFN_POWI:
15096 : /* True if the 1st argument is nonnegative or the second
15097 : argument is an even integer. */
15098 2336 : if (TREE_CODE (arg1) == INTEGER_CST
15099 2336 : && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15100 : return true;
15101 2255 : return RECURSE (arg0);
15102 :
15103 5012 : CASE_CFN_POW:
15104 5012 : CASE_CFN_POW_FN:
15105 : /* True if the 1st argument is nonnegative or the second
15106 : argument is an even integer valued real. */
15107 5012 : if (TREE_CODE (arg1) == REAL_CST)
15108 : {
15109 2222 : REAL_VALUE_TYPE c;
15110 2222 : HOST_WIDE_INT n;
15111 :
15112 2222 : c = TREE_REAL_CST (arg1);
15113 2222 : n = real_to_integer (&c);
15114 2222 : if ((n & 1) == 0)
15115 : {
15116 1599 : REAL_VALUE_TYPE cint;
15117 1599 : real_from_integer (&cint, VOIDmode, n, SIGNED);
15118 1599 : if (real_identical (&c, &cint))
15119 588 : return true;
15120 : }
15121 : }
15122 4424 : return RECURSE (arg0);
15123 :
15124 21807253 : default:
15125 21807253 : break;
15126 : }
15127 21807253 : return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15128 : }
15129 :
15130 : /* Return true if T is known to be non-negative. If the return
15131 : value is based on the assumption that signed overflow is undefined,
15132 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15133 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15134 :
15135 : static bool
15136 1694105 : tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15137 : {
15138 1694105 : enum tree_code code = TREE_CODE (t);
15139 1694105 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
15140 : return true;
15141 :
15142 1232877 : switch (code)
15143 : {
15144 217 : case TARGET_EXPR:
15145 217 : {
15146 217 : tree temp = TARGET_EXPR_SLOT (t);
15147 217 : t = TARGET_EXPR_INITIAL (t);
15148 :
15149 : /* If the initializer is non-void, then it's a normal expression
15150 : that will be assigned to the slot. */
15151 217 : if (!VOID_TYPE_P (TREE_TYPE (t)))
15152 15 : return RECURSE (t);
15153 :
15154 : /* Otherwise, the initializer sets the slot in some way. One common
15155 : way is an assignment statement at the end of the initializer. */
15156 404 : while (1)
15157 : {
15158 404 : if (TREE_CODE (t) == BIND_EXPR)
15159 202 : t = expr_last (BIND_EXPR_BODY (t));
15160 202 : else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15161 202 : || TREE_CODE (t) == TRY_CATCH_EXPR)
15162 0 : t = expr_last (TREE_OPERAND (t, 0));
15163 202 : else if (TREE_CODE (t) == STATEMENT_LIST)
15164 0 : t = expr_last (t);
15165 : else
15166 : break;
15167 : }
15168 202 : if (TREE_CODE (t) == MODIFY_EXPR
15169 202 : && TREE_OPERAND (t, 0) == temp)
15170 202 : return RECURSE (TREE_OPERAND (t, 1));
15171 :
15172 : return false;
15173 : }
15174 :
15175 606204 : case CALL_EXPR:
15176 606204 : {
15177 606204 : tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15178 606204 : tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15179 :
15180 606204 : return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15181 : get_call_combined_fn (t),
15182 : arg0,
15183 : arg1,
15184 606204 : strict_overflow_p, depth);
15185 : }
15186 746 : case COMPOUND_EXPR:
15187 746 : case MODIFY_EXPR:
15188 746 : return RECURSE (TREE_OPERAND (t, 1));
15189 :
15190 9 : case BIND_EXPR:
15191 9 : return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15192 :
15193 624030 : case SAVE_EXPR:
15194 624030 : return RECURSE (TREE_OPERAND (t, 0));
15195 :
15196 1671 : default:
15197 1671 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15198 : }
15199 : }
15200 :
15201 : #undef RECURSE
15202 : #undef tree_expr_nonnegative_warnv_p
15203 :
15204 : /* Return true if T is known to be non-negative. If the return
15205 : value is based on the assumption that signed overflow is undefined,
15206 : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15207 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15208 :
15209 : bool
15210 30606550 : tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15211 : {
15212 30606550 : enum tree_code code;
15213 30606550 : if (error_operand_p (t))
15214 : return false;
15215 :
15216 30606549 : code = TREE_CODE (t);
15217 30606549 : switch (TREE_CODE_CLASS (code))
15218 : {
15219 1992735 : case tcc_binary:
15220 1992735 : case tcc_comparison:
15221 1992735 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15222 1992735 : TREE_TYPE (t),
15223 1992735 : TREE_OPERAND (t, 0),
15224 1992735 : TREE_OPERAND (t, 1),
15225 1992735 : strict_overflow_p, depth);
15226 :
15227 1914776 : case tcc_unary:
15228 1914776 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15229 1914776 : TREE_TYPE (t),
15230 1914776 : TREE_OPERAND (t, 0),
15231 1914776 : strict_overflow_p, depth);
15232 :
15233 15112775 : case tcc_constant:
15234 15112775 : case tcc_declaration:
15235 15112775 : case tcc_reference:
15236 15112775 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15237 :
15238 11586263 : default:
15239 11586263 : break;
15240 : }
15241 :
15242 11586263 : switch (code)
15243 : {
15244 7 : case TRUTH_AND_EXPR:
15245 7 : case TRUTH_OR_EXPR:
15246 7 : case TRUTH_XOR_EXPR:
15247 7 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15248 7 : TREE_TYPE (t),
15249 7 : TREE_OPERAND (t, 0),
15250 7 : TREE_OPERAND (t, 1),
15251 7 : strict_overflow_p, depth);
15252 72 : case TRUTH_NOT_EXPR:
15253 72 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15254 72 : TREE_TYPE (t),
15255 72 : TREE_OPERAND (t, 0),
15256 72 : strict_overflow_p, depth);
15257 :
15258 9892079 : case COND_EXPR:
15259 9892079 : case CONSTRUCTOR:
15260 9892079 : case OBJ_TYPE_REF:
15261 9892079 : case ADDR_EXPR:
15262 9892079 : case WITH_SIZE_EXPR:
15263 9892079 : case SSA_NAME:
15264 9892079 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15265 :
15266 1694105 : default:
15267 1694105 : return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15268 : }
15269 : }
15270 :
15271 : /* Return true if `t' is known to be non-negative. Handle warnings
15272 : about undefined signed overflow. */
15273 :
15274 : bool
15275 21665697 : tree_expr_nonnegative_p (tree t)
15276 : {
15277 21665697 : bool ret, strict_overflow_p;
15278 :
15279 21665697 : strict_overflow_p = false;
15280 21665697 : ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15281 21665697 : if (strict_overflow_p)
15282 10302 : fold_overflow_warning (("assuming signed overflow does not occur when "
15283 : "determining that expression is always "
15284 : "non-negative"),
15285 : WARN_STRICT_OVERFLOW_MISC);
15286 21665697 : return ret;
15287 : }
15288 :
15289 :
15290 : /* Return true when (CODE OP0) is an address and is known to be nonzero.
15291 : For floating point we further ensure that T is not denormal.
15292 : Similar logic is present in nonzero_address in rtlanal.h.
15293 :
15294 : If the return value is based on the assumption that signed overflow
15295 : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15296 : change *STRICT_OVERFLOW_P. */
15297 :
15298 : bool
15299 1613081 : tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15300 : bool *strict_overflow_p)
15301 : {
15302 1613081 : switch (code)
15303 : {
15304 1 : case ABS_EXPR:
15305 1 : return tree_expr_nonzero_warnv_p (op0,
15306 1 : strict_overflow_p);
15307 :
15308 886898 : case NOP_EXPR:
15309 886898 : {
15310 886898 : tree inner_type = TREE_TYPE (op0);
15311 886898 : tree outer_type = type;
15312 :
15313 886898 : return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15314 886898 : && tree_expr_nonzero_warnv_p (op0,
15315 : strict_overflow_p));
15316 : }
15317 28113 : break;
15318 :
15319 28113 : case NON_LVALUE_EXPR:
15320 28113 : return tree_expr_nonzero_warnv_p (op0,
15321 28113 : strict_overflow_p);
15322 :
15323 : default:
15324 : break;
15325 : }
15326 :
15327 : return false;
15328 : }
15329 :
15330 : /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15331 : For floating point we further ensure that T is not denormal.
15332 : Similar logic is present in nonzero_address in rtlanal.h.
15333 :
15334 : If the return value is based on the assumption that signed overflow
15335 : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15336 : change *STRICT_OVERFLOW_P. */
15337 :
15338 : bool
15339 3056381 : tree_binary_nonzero_warnv_p (enum tree_code code,
15340 : tree type,
15341 : tree op0,
15342 : tree op1, bool *strict_overflow_p)
15343 : {
15344 3056381 : bool sub_strict_overflow_p;
15345 3056381 : switch (code)
15346 : {
15347 473157 : case POINTER_PLUS_EXPR:
15348 473157 : case PLUS_EXPR:
15349 473157 : if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15350 : {
15351 : /* With the presence of negative values it is hard
15352 : to say something. */
15353 104058 : sub_strict_overflow_p = false;
15354 104058 : if (!tree_expr_nonnegative_warnv_p (op0,
15355 : &sub_strict_overflow_p)
15356 104058 : || !tree_expr_nonnegative_warnv_p (op1,
15357 : &sub_strict_overflow_p))
15358 101684 : return false;
15359 : /* One of operands must be positive and the other non-negative. */
15360 : /* We don't set *STRICT_OVERFLOW_P here: even if this value
15361 : overflows, on a twos-complement machine the sum of two
15362 : nonnegative numbers can never be zero. */
15363 2374 : return (tree_expr_nonzero_warnv_p (op0,
15364 : strict_overflow_p)
15365 2374 : || tree_expr_nonzero_warnv_p (op1,
15366 : strict_overflow_p));
15367 : }
15368 : break;
15369 :
15370 20884 : case MULT_EXPR:
15371 20884 : if (TYPE_OVERFLOW_UNDEFINED (type))
15372 : {
15373 458 : if (tree_expr_nonzero_warnv_p (op0,
15374 : strict_overflow_p)
15375 458 : && tree_expr_nonzero_warnv_p (op1,
15376 : strict_overflow_p))
15377 : {
15378 0 : *strict_overflow_p = true;
15379 0 : return true;
15380 : }
15381 : }
15382 : break;
15383 :
15384 12114 : case MIN_EXPR:
15385 12114 : sub_strict_overflow_p = false;
15386 12114 : if (tree_expr_nonzero_warnv_p (op0,
15387 : &sub_strict_overflow_p)
15388 12114 : && tree_expr_nonzero_warnv_p (op1,
15389 : &sub_strict_overflow_p))
15390 : {
15391 0 : if (sub_strict_overflow_p)
15392 0 : *strict_overflow_p = true;
15393 : }
15394 : break;
15395 :
15396 44 : case MAX_EXPR:
15397 44 : sub_strict_overflow_p = false;
15398 44 : if (tree_expr_nonzero_warnv_p (op0,
15399 : &sub_strict_overflow_p))
15400 : {
15401 0 : if (sub_strict_overflow_p)
15402 0 : *strict_overflow_p = true;
15403 :
15404 : /* When both operands are nonzero, then MAX must be too. */
15405 0 : if (tree_expr_nonzero_warnv_p (op1,
15406 : strict_overflow_p))
15407 : return true;
15408 :
15409 : /* MAX where operand 0 is positive is positive. */
15410 0 : return tree_expr_nonnegative_warnv_p (op0,
15411 0 : strict_overflow_p);
15412 : }
15413 : /* MAX where operand 1 is positive is positive. */
15414 44 : else if (tree_expr_nonzero_warnv_p (op1,
15415 : &sub_strict_overflow_p)
15416 44 : && tree_expr_nonnegative_warnv_p (op1,
15417 : &sub_strict_overflow_p))
15418 : {
15419 0 : if (sub_strict_overflow_p)
15420 0 : *strict_overflow_p = true;
15421 0 : return true;
15422 : }
15423 : break;
15424 :
15425 262220 : case BIT_IOR_EXPR:
15426 262220 : return (tree_expr_nonzero_warnv_p (op1,
15427 : strict_overflow_p)
15428 262220 : || tree_expr_nonzero_warnv_p (op0,
15429 : strict_overflow_p));
15430 :
15431 : default:
15432 : break;
15433 : }
15434 :
15435 : return false;
15436 : }
15437 :
15438 : /* Return true when T is an address and is known to be nonzero.
15439 : For floating point we further ensure that T is not denormal.
15440 : Similar logic is present in nonzero_address in rtlanal.h.
15441 :
15442 : If the return value is based on the assumption that signed overflow
15443 : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15444 : change *STRICT_OVERFLOW_P. */
15445 :
15446 : bool
15447 151067676 : tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15448 : {
15449 151067676 : bool sub_strict_overflow_p;
15450 151067676 : switch (TREE_CODE (t))
15451 : {
15452 1142203 : case INTEGER_CST:
15453 1142203 : return !integer_zerop (t);
15454 :
15455 11680752 : case ADDR_EXPR:
15456 11680752 : {
15457 11680752 : tree base = TREE_OPERAND (t, 0);
15458 :
15459 11680752 : if (!DECL_P (base))
15460 6024720 : base = get_base_address (base);
15461 :
15462 11680752 : if (base && TREE_CODE (base) == TARGET_EXPR)
15463 774 : base = TARGET_EXPR_SLOT (base);
15464 :
15465 774 : if (!base)
15466 0 : return false;
15467 :
15468 : /* For objects in symbol table check if we know they are non-zero.
15469 : Don't do anything for variables and functions before symtab is built;
15470 : it is quite possible that they will be declared weak later. */
15471 11680752 : int nonzero_addr = maybe_nonzero_address (base);
15472 11680752 : if (nonzero_addr >= 0)
15473 8961164 : return nonzero_addr;
15474 :
15475 : /* Constants are never weak. */
15476 2719588 : if (CONSTANT_CLASS_P (base))
15477 : return true;
15478 :
15479 : return false;
15480 : }
15481 :
15482 36704 : case COND_EXPR:
15483 36704 : sub_strict_overflow_p = false;
15484 36704 : if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15485 : &sub_strict_overflow_p)
15486 36704 : && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15487 : &sub_strict_overflow_p))
15488 : {
15489 1222 : if (sub_strict_overflow_p)
15490 0 : *strict_overflow_p = true;
15491 1222 : return true;
15492 : }
15493 : break;
15494 :
15495 124432405 : case SSA_NAME:
15496 124432405 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15497 : break;
15498 96523127 : return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15499 :
15500 : default:
15501 : break;
15502 : }
15503 : return false;
15504 : }
15505 :
15506 : #define integer_valued_real_p(X) \
15507 : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15508 :
15509 : #define RECURSE(X) \
15510 : ((integer_valued_real_p) (X, depth + 1))
15511 :
15512 : /* Return true if the floating point result of (CODE OP0) has an
15513 : integer value. We also allow +Inf, -Inf and NaN to be considered
15514 : integer values. Return false for signaling NaN.
15515 :
15516 : DEPTH is the current nesting depth of the query. */
15517 :
15518 : bool
15519 14964 : integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15520 : {
15521 14964 : switch (code)
15522 : {
15523 : case FLOAT_EXPR:
15524 : return true;
15525 :
15526 1403 : case ABS_EXPR:
15527 1403 : return RECURSE (op0);
15528 :
15529 9841 : CASE_CONVERT:
15530 9841 : {
15531 9841 : tree type = TREE_TYPE (op0);
15532 9841 : if (TREE_CODE (type) == INTEGER_TYPE)
15533 : return true;
15534 9841 : if (SCALAR_FLOAT_TYPE_P (type))
15535 9841 : return RECURSE (op0);
15536 : break;
15537 : }
15538 :
15539 : default:
15540 : break;
15541 : }
15542 : return false;
15543 : }
15544 :
15545 : /* Return true if the floating point result of (CODE OP0 OP1) has an
15546 : integer value. We also allow +Inf, -Inf and NaN to be considered
15547 : integer values. Return false for signaling NaN.
15548 :
15549 : DEPTH is the current nesting depth of the query. */
15550 :
15551 : bool
15552 12986 : integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15553 : {
15554 12986 : switch (code)
15555 : {
15556 7328 : case PLUS_EXPR:
15557 7328 : case MINUS_EXPR:
15558 7328 : case MULT_EXPR:
15559 7328 : case MIN_EXPR:
15560 7328 : case MAX_EXPR:
15561 7328 : return RECURSE (op0) && RECURSE (op1);
15562 :
15563 : default:
15564 : break;
15565 : }
15566 : return false;
15567 : }
15568 :
15569 : /* Return true if the floating point result of calling FNDECL with arguments
15570 : ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15571 : considered integer values. Return false for signaling NaN. If FNDECL
15572 : takes fewer than 2 arguments, the remaining ARGn are null.
15573 :
15574 : DEPTH is the current nesting depth of the query. */
15575 :
15576 : bool
15577 1212 : integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15578 : {
15579 1212 : switch (fn)
15580 : {
15581 : CASE_CFN_CEIL:
15582 : CASE_CFN_CEIL_FN:
15583 : CASE_CFN_FLOOR:
15584 : CASE_CFN_FLOOR_FN:
15585 : CASE_CFN_NEARBYINT:
15586 : CASE_CFN_NEARBYINT_FN:
15587 : CASE_CFN_RINT:
15588 : CASE_CFN_RINT_FN:
15589 : CASE_CFN_ROUND:
15590 : CASE_CFN_ROUND_FN:
15591 : CASE_CFN_ROUNDEVEN:
15592 : CASE_CFN_ROUNDEVEN_FN:
15593 : CASE_CFN_TRUNC:
15594 : CASE_CFN_TRUNC_FN:
15595 : return true;
15596 :
15597 336 : CASE_CFN_FMIN:
15598 336 : CASE_CFN_FMIN_FN:
15599 336 : CASE_CFN_FMAX:
15600 336 : CASE_CFN_FMAX_FN:
15601 336 : return RECURSE (arg0) && RECURSE (arg1);
15602 :
15603 : default:
15604 : break;
15605 : }
15606 : return false;
15607 : }
15608 :
15609 : /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15610 : has an integer value. We also allow +Inf, -Inf and NaN to be
15611 : considered integer values. Return false for signaling NaN.
15612 :
15613 : DEPTH is the current nesting depth of the query. */
15614 :
15615 : bool
15616 127327 : integer_valued_real_single_p (tree t, int depth)
15617 : {
15618 127327 : switch (TREE_CODE (t))
15619 : {
15620 2204 : case REAL_CST:
15621 2204 : return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15622 :
15623 0 : case COND_EXPR:
15624 0 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15625 :
15626 89271 : case SSA_NAME:
15627 : /* Limit the depth of recursion to avoid quadratic behavior.
15628 : This is expected to catch almost all occurrences in practice.
15629 : If this code misses important cases that unbounded recursion
15630 : would not, passes that need this information could be revised
15631 : to provide it through dataflow propagation. */
15632 89271 : return (!name_registered_for_update_p (t)
15633 89271 : && depth < param_max_ssa_name_query_depth
15634 177792 : && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15635 : depth));
15636 :
15637 : default:
15638 : break;
15639 : }
15640 : return false;
15641 : }
15642 :
15643 : /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15644 : has an integer value. We also allow +Inf, -Inf and NaN to be
15645 : considered integer values. Return false for signaling NaN.
15646 :
15647 : DEPTH is the current nesting depth of the query. */
15648 :
15649 : static bool
15650 0 : integer_valued_real_invalid_p (tree t, int depth)
15651 : {
15652 0 : switch (TREE_CODE (t))
15653 : {
15654 0 : case COMPOUND_EXPR:
15655 0 : case MODIFY_EXPR:
15656 0 : case BIND_EXPR:
15657 0 : return RECURSE (TREE_OPERAND (t, 1));
15658 :
15659 0 : case SAVE_EXPR:
15660 0 : return RECURSE (TREE_OPERAND (t, 0));
15661 :
15662 : default:
15663 : break;
15664 : }
15665 : return false;
15666 : }
15667 :
15668 : #undef RECURSE
15669 : #undef integer_valued_real_p
15670 :
15671 : /* Return true if the floating point expression T has an integer value.
15672 : We also allow +Inf, -Inf and NaN to be considered integer values.
15673 : Return false for signaling NaN.
15674 :
15675 : DEPTH is the current nesting depth of the query. */
15676 :
15677 : bool
15678 96187 : integer_valued_real_p (tree t, int depth)
15679 : {
15680 96187 : if (t == error_mark_node)
15681 : return false;
15682 :
15683 96187 : STRIP_ANY_LOCATION_WRAPPER (t);
15684 :
15685 96187 : tree_code code = TREE_CODE (t);
15686 96187 : switch (TREE_CODE_CLASS (code))
15687 : {
15688 0 : case tcc_binary:
15689 0 : case tcc_comparison:
15690 0 : return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15691 0 : TREE_OPERAND (t, 1), depth);
15692 :
15693 0 : case tcc_unary:
15694 0 : return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15695 :
15696 8397 : case tcc_constant:
15697 8397 : case tcc_declaration:
15698 8397 : case tcc_reference:
15699 8397 : return integer_valued_real_single_p (t, depth);
15700 :
15701 87790 : default:
15702 87790 : break;
15703 : }
15704 :
15705 87790 : switch (code)
15706 : {
15707 87790 : case COND_EXPR:
15708 87790 : case SSA_NAME:
15709 87790 : return integer_valued_real_single_p (t, depth);
15710 :
15711 0 : case CALL_EXPR:
15712 0 : {
15713 0 : tree arg0 = (call_expr_nargs (t) > 0
15714 0 : ? CALL_EXPR_ARG (t, 0)
15715 0 : : NULL_TREE);
15716 0 : tree arg1 = (call_expr_nargs (t) > 1
15717 0 : ? CALL_EXPR_ARG (t, 1)
15718 0 : : NULL_TREE);
15719 0 : return integer_valued_real_call_p (get_call_combined_fn (t),
15720 0 : arg0, arg1, depth);
15721 : }
15722 :
15723 0 : default:
15724 0 : return integer_valued_real_invalid_p (t, depth);
15725 : }
15726 : }
15727 :
15728 : /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15729 : attempt to fold the expression to a constant without modifying TYPE,
15730 : OP0 or OP1.
15731 :
15732 : If the expression could be simplified to a constant, then return
15733 : the constant. If the expression would not be simplified to a
15734 : constant, then return NULL_TREE. */
15735 :
15736 : tree
15737 15700210 : fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15738 : {
15739 15700210 : tree tem = fold_binary (code, type, op0, op1);
15740 15700210 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15741 : }
15742 :
15743 : /* Given the components of a unary expression CODE, TYPE and OP0,
15744 : attempt to fold the expression to a constant without modifying
15745 : TYPE or OP0.
15746 :
15747 : If the expression could be simplified to a constant, then return
15748 : the constant. If the expression would not be simplified to a
15749 : constant, then return NULL_TREE. */
15750 :
15751 : tree
15752 0 : fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15753 : {
15754 0 : tree tem = fold_unary (code, type, op0);
15755 0 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15756 : }
15757 :
15758 : /* If EXP represents referencing an element in a constant string
15759 : (either via pointer arithmetic or array indexing), return the
15760 : tree representing the value accessed, otherwise return NULL. */
15761 :
15762 : tree
15763 218167077 : fold_read_from_constant_string (tree exp)
15764 : {
15765 218167077 : if ((INDIRECT_REF_P (exp)
15766 218167058 : || TREE_CODE (exp) == ARRAY_REF)
15767 230561998 : && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15768 : {
15769 9217899 : tree exp1 = TREE_OPERAND (exp, 0);
15770 9217899 : tree index;
15771 9217899 : tree string;
15772 9217899 : location_t loc = EXPR_LOCATION (exp);
15773 :
15774 9217899 : if (INDIRECT_REF_P (exp))
15775 0 : string = string_constant (exp1, &index, NULL, NULL);
15776 : else
15777 : {
15778 9217899 : tree low_bound = array_ref_low_bound (exp);
15779 9217899 : index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15780 :
15781 : /* Optimize the special-case of a zero lower bound.
15782 :
15783 : We convert the low_bound to sizetype to avoid some problems
15784 : with constant folding. (E.g. suppose the lower bound is 1,
15785 : and its mode is QI. Without the conversion,l (ARRAY
15786 : +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15787 : +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15788 9217899 : if (! integer_zerop (low_bound))
15789 149609 : index = size_diffop_loc (loc, index,
15790 : fold_convert_loc (loc, sizetype, low_bound));
15791 :
15792 : string = exp1;
15793 : }
15794 :
15795 9217899 : scalar_int_mode char_mode;
15796 9217899 : if (string
15797 9217899 : && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15798 9217899 : && TREE_CODE (string) == STRING_CST
15799 146542 : && tree_fits_uhwi_p (index)
15800 142628 : && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15801 9360346 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15802 : &char_mode)
15803 18435798 : && GET_MODE_SIZE (char_mode) == 1)
15804 282358 : return build_int_cst_type (TREE_TYPE (exp),
15805 141179 : (TREE_STRING_POINTER (string)
15806 141179 : [TREE_INT_CST_LOW (index)]));
15807 : }
15808 : return NULL;
15809 : }
15810 :
15811 : /* Folds a read from vector element at IDX of vector ARG. */
15812 :
15813 : tree
15814 5805 : fold_read_from_vector (tree arg, poly_uint64 idx)
15815 : {
15816 5805 : unsigned HOST_WIDE_INT i;
15817 5805 : if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15818 5805 : && known_ge (idx, 0u)
15819 5805 : && idx.is_constant (&i))
15820 : {
15821 5805 : if (TREE_CODE (arg) == VECTOR_CST)
15822 1974 : return VECTOR_CST_ELT (arg, i);
15823 3831 : else if (TREE_CODE (arg) == CONSTRUCTOR)
15824 : {
15825 1748 : if (CONSTRUCTOR_NELTS (arg)
15826 1708 : && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15827 : return NULL_TREE;
15828 1746 : if (i >= CONSTRUCTOR_NELTS (arg))
15829 40 : return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15830 1706 : return CONSTRUCTOR_ELT (arg, i)->value;
15831 : }
15832 : }
15833 : return NULL_TREE;
15834 : }
15835 :
15836 : /* Return the tree for neg (ARG0) when ARG0 is known to be either
15837 : an integer constant, real, or fixed-point constant.
15838 :
15839 : TYPE is the type of the result. */
15840 :
15841 : static tree
15842 32299284 : fold_negate_const (tree arg0, tree type)
15843 : {
15844 32299284 : tree t = NULL_TREE;
15845 :
15846 32299284 : switch (TREE_CODE (arg0))
15847 : {
15848 1988825 : case REAL_CST:
15849 1988825 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15850 1988825 : break;
15851 :
15852 0 : case FIXED_CST:
15853 0 : {
15854 0 : FIXED_VALUE_TYPE f;
15855 0 : bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15856 0 : &(TREE_FIXED_CST (arg0)), NULL,
15857 0 : TYPE_SATURATING (type));
15858 0 : t = build_fixed (type, f);
15859 : /* Propagate overflow flags. */
15860 0 : if (overflow_p | TREE_OVERFLOW (arg0))
15861 0 : TREE_OVERFLOW (t) = 1;
15862 0 : break;
15863 : }
15864 :
15865 30310459 : default:
15866 30310459 : if (poly_int_tree_p (arg0))
15867 : {
15868 30310459 : wi::overflow_type overflow;
15869 30310459 : poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15870 30310459 : t = force_fit_type (type, res, 1,
15871 270738 : (overflow && ! TYPE_UNSIGNED (type))
15872 30571077 : || TREE_OVERFLOW (arg0));
15873 30310459 : break;
15874 30310459 : }
15875 :
15876 0 : gcc_unreachable ();
15877 : }
15878 :
15879 32299284 : return t;
15880 : }
15881 :
15882 : /* Return the tree for abs (ARG0) when ARG0 is known to be either
15883 : an integer constant or real constant.
15884 :
15885 : TYPE is the type of the result. */
15886 :
15887 : tree
15888 34564 : fold_abs_const (tree arg0, tree type)
15889 : {
15890 34564 : tree t = NULL_TREE;
15891 :
15892 34564 : switch (TREE_CODE (arg0))
15893 : {
15894 7007 : case INTEGER_CST:
15895 7007 : {
15896 : /* If the value is unsigned or non-negative, then the absolute value
15897 : is the same as the ordinary value. */
15898 7007 : wide_int val = wi::to_wide (arg0);
15899 7007 : wi::overflow_type overflow = wi::OVF_NONE;
15900 7007 : if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15901 : ;
15902 :
15903 : /* If the value is negative, then the absolute value is
15904 : its negation. */
15905 : else
15906 3112 : val = wi::neg (val, &overflow);
15907 :
15908 : /* Force to the destination type, set TREE_OVERFLOW for signed
15909 : TYPE only. */
15910 7007 : t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15911 7007 : }
15912 7007 : break;
15913 :
15914 27557 : case REAL_CST:
15915 27557 : if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15916 7338 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15917 : else
15918 : t = arg0;
15919 : break;
15920 :
15921 0 : default:
15922 0 : gcc_unreachable ();
15923 : }
15924 :
15925 34564 : return t;
15926 : }
15927 :
15928 : /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15929 : constant. TYPE is the type of the result. */
15930 :
15931 : static tree
15932 2337267 : fold_not_const (const_tree arg0, tree type)
15933 : {
15934 2337267 : gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15935 :
15936 2337267 : return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15937 : }
15938 :
15939 : /* Given CODE, a relational operator, the target type, TYPE and two
15940 : constant operands OP0 and OP1, return the result of the
15941 : relational operation. If the result is not a compile time
15942 : constant, then return NULL_TREE. */
15943 :
15944 : static tree
15945 72536469 : fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15946 : {
15947 72536469 : int result, invert;
15948 :
15949 : /* From here on, the only cases we handle are when the result is
15950 : known to be a constant. */
15951 :
15952 72536469 : if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15953 : {
15954 1189788 : const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15955 1189788 : const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15956 :
15957 : /* Handle the cases where either operand is a NaN. */
15958 1189788 : if (real_isnan (c0) || real_isnan (c1))
15959 : {
15960 13300 : switch (code)
15961 : {
15962 : case EQ_EXPR:
15963 : case ORDERED_EXPR:
15964 : result = 0;
15965 : break;
15966 :
15967 : case NE_EXPR:
15968 : case UNORDERED_EXPR:
15969 : case UNLT_EXPR:
15970 : case UNLE_EXPR:
15971 : case UNGT_EXPR:
15972 : case UNGE_EXPR:
15973 : case UNEQ_EXPR:
15974 6558 : result = 1;
15975 : break;
15976 :
15977 6761 : case LT_EXPR:
15978 6761 : case LE_EXPR:
15979 6761 : case GT_EXPR:
15980 6761 : case GE_EXPR:
15981 6761 : case LTGT_EXPR:
15982 6761 : if (flag_trapping_math)
15983 : return NULL_TREE;
15984 : result = 0;
15985 : break;
15986 :
15987 0 : default:
15988 0 : gcc_unreachable ();
15989 : }
15990 :
15991 6558 : return constant_boolean_node (result, type);
15992 : }
15993 :
15994 1176488 : return constant_boolean_node (real_compare (code, c0, c1), type);
15995 : }
15996 :
15997 71346681 : if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15998 : {
15999 0 : const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16000 0 : const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16001 0 : return constant_boolean_node (fixed_compare (code, c0, c1), type);
16002 : }
16003 :
16004 : /* Handle equality/inequality of complex constants. */
16005 71346681 : if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16006 : {
16007 58578 : tree rcond = fold_relational_const (code, type,
16008 29289 : TREE_REALPART (op0),
16009 29289 : TREE_REALPART (op1));
16010 117156 : tree icond = fold_relational_const (code, type,
16011 29289 : TREE_IMAGPART (op0),
16012 29289 : TREE_IMAGPART (op1));
16013 29289 : if (code == EQ_EXPR)
16014 307 : return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16015 28982 : else if (code == NE_EXPR)
16016 28982 : return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16017 : else
16018 : return NULL_TREE;
16019 : }
16020 :
16021 71317392 : if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16022 : {
16023 9222 : if (!VECTOR_TYPE_P (type))
16024 : {
16025 : /* Have vector comparison with scalar boolean result. */
16026 213 : gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16027 : && known_eq (VECTOR_CST_NELTS (op0),
16028 : VECTOR_CST_NELTS (op1)));
16029 213 : unsigned HOST_WIDE_INT nunits;
16030 213 : if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16031 : return NULL_TREE;
16032 948 : for (unsigned i = 0; i < nunits; i++)
16033 : {
16034 848 : tree elem0 = VECTOR_CST_ELT (op0, i);
16035 848 : tree elem1 = VECTOR_CST_ELT (op1, i);
16036 848 : tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16037 848 : if (tmp == NULL_TREE)
16038 : return NULL_TREE;
16039 848 : if (integer_zerop (tmp))
16040 113 : return constant_boolean_node (code == NE_EXPR, type);
16041 : }
16042 100 : return constant_boolean_node (code == EQ_EXPR, type);
16043 : }
16044 9009 : tree_vector_builder elts;
16045 9009 : if (!elts.new_binary_operation (type, op0, op1, false))
16046 : return NULL_TREE;
16047 9009 : unsigned int count = elts.encoded_nelts ();
16048 47334 : for (unsigned i = 0; i < count; i++)
16049 : {
16050 38325 : tree elem_type = TREE_TYPE (type);
16051 38325 : tree elem0 = VECTOR_CST_ELT (op0, i);
16052 38325 : tree elem1 = VECTOR_CST_ELT (op1, i);
16053 :
16054 38325 : tree tem = fold_relational_const (code, elem_type,
16055 : elem0, elem1);
16056 :
16057 38325 : if (tem == NULL_TREE)
16058 : return NULL_TREE;
16059 :
16060 38325 : elts.quick_push (build_int_cst (elem_type,
16061 56978 : integer_zerop (tem) ? 0 : -1));
16062 : }
16063 :
16064 9009 : return elts.build ();
16065 9009 : }
16066 :
16067 : /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16068 :
16069 : To compute GT, swap the arguments and do LT.
16070 : To compute GE, do LT and invert the result.
16071 : To compute LE, swap the arguments, do LT and invert the result.
16072 : To compute NE, do EQ and invert the result.
16073 :
16074 : Therefore, the code below must handle only EQ and LT. */
16075 :
16076 71308170 : if (code == LE_EXPR || code == GT_EXPR)
16077 : {
16078 13904405 : std::swap (op0, op1);
16079 13904405 : code = swap_tree_comparison (code);
16080 : }
16081 :
16082 : /* Note that it is safe to invert for real values here because we
16083 : have already handled the one case that it matters. */
16084 :
16085 71308170 : invert = 0;
16086 71308170 : if (code == NE_EXPR || code == GE_EXPR)
16087 : {
16088 32695932 : invert = 1;
16089 32695932 : code = invert_tree_comparison (code, false);
16090 : }
16091 :
16092 : /* Compute a result for LT or EQ if args permit;
16093 : Otherwise return T. */
16094 71308170 : if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16095 : {
16096 71286652 : if (code == EQ_EXPR)
16097 34470949 : result = tree_int_cst_equal (op0, op1);
16098 : else
16099 36815703 : result = tree_int_cst_lt (op0, op1);
16100 : }
16101 : else
16102 : return NULL_TREE;
16103 :
16104 71286652 : if (invert)
16105 32694659 : result ^= 1;
16106 71286652 : return constant_boolean_node (result, type);
16107 : }
16108 :
16109 : /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16110 : indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16111 : itself. */
16112 :
16113 : tree
16114 168910049 : fold_build_cleanup_point_expr (tree type, tree expr)
16115 : {
16116 : /* If the expression does not have side effects then we don't have to wrap
16117 : it with a cleanup point expression. */
16118 168910049 : if (!TREE_SIDE_EFFECTS (expr))
16119 : return expr;
16120 :
16121 : /* If the expression is a return, check to see if the expression inside the
16122 : return has no side effects or the right hand side of the modify expression
16123 : inside the return. If either don't have side effects set we don't need to
16124 : wrap the expression in a cleanup point expression. Note we don't check the
16125 : left hand side of the modify because it should always be a return decl. */
16126 140996725 : if (TREE_CODE (expr) == RETURN_EXPR)
16127 : {
16128 50848430 : tree op = TREE_OPERAND (expr, 0);
16129 50848430 : if (!op || !TREE_SIDE_EFFECTS (op))
16130 : return expr;
16131 49558658 : op = TREE_OPERAND (op, 1);
16132 49558658 : if (!TREE_SIDE_EFFECTS (op))
16133 : return expr;
16134 : }
16135 :
16136 114443752 : return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16137 : }
16138 :
16139 : /* Given a pointer value OP0 and a type TYPE, return a simplified version
16140 : of an indirection through OP0, or NULL_TREE if no simplification is
16141 : possible. */
16142 :
16143 : tree
16144 24505750 : fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16145 : {
16146 24505750 : tree sub = op0;
16147 24505750 : tree subtype;
16148 24505750 : poly_uint64 const_op01;
16149 :
16150 24505750 : STRIP_NOPS (sub);
16151 24505750 : subtype = TREE_TYPE (sub);
16152 24505750 : if (!POINTER_TYPE_P (subtype)
16153 24505750 : || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16154 : return NULL_TREE;
16155 :
16156 24359189 : if (TREE_CODE (sub) == ADDR_EXPR)
16157 : {
16158 5979138 : tree op = TREE_OPERAND (sub, 0);
16159 5979138 : tree optype = TREE_TYPE (op);
16160 :
16161 : /* *&CONST_DECL -> to the value of the const decl. */
16162 5979138 : if (TREE_CODE (op) == CONST_DECL)
16163 3018 : return DECL_INITIAL (op);
16164 : /* *&p => p; make sure to handle *&"str"[cst] here. */
16165 5976120 : if (type == optype)
16166 : {
16167 4805506 : tree fop = fold_read_from_constant_string (op);
16168 4805506 : if (fop)
16169 : return fop;
16170 : else
16171 4761614 : return op;
16172 : }
16173 : /* *(foo *)&fooarray => fooarray[0] */
16174 1170614 : else if (TREE_CODE (optype) == ARRAY_TYPE
16175 13577 : && type == TREE_TYPE (optype)
16176 1183046 : && (!in_gimple_form
16177 2955 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16178 : {
16179 12432 : tree type_domain = TYPE_DOMAIN (optype);
16180 12432 : tree min_val = size_zero_node;
16181 12432 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16182 12394 : min_val = TYPE_MIN_VALUE (type_domain);
16183 12432 : if (in_gimple_form
16184 2955 : && TREE_CODE (min_val) != INTEGER_CST)
16185 : return NULL_TREE;
16186 12432 : return build4_loc (loc, ARRAY_REF, type, op, min_val,
16187 12432 : NULL_TREE, NULL_TREE);
16188 : }
16189 : /* *(foo *)&complexfoo => __real__ complexfoo */
16190 1158182 : else if (TREE_CODE (optype) == COMPLEX_TYPE
16191 1158182 : && type == TREE_TYPE (optype))
16192 0 : return fold_build1_loc (loc, REALPART_EXPR, type, op);
16193 : /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16194 1158182 : else if (VECTOR_TYPE_P (optype)
16195 1158182 : && type == TREE_TYPE (optype))
16196 : {
16197 70 : tree part_width = TYPE_SIZE (type);
16198 70 : tree index = bitsize_int (0);
16199 70 : return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16200 70 : index);
16201 : }
16202 : }
16203 :
16204 19538163 : if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16205 19538163 : && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16206 : {
16207 259593 : tree op00 = TREE_OPERAND (sub, 0);
16208 259593 : tree op01 = TREE_OPERAND (sub, 1);
16209 :
16210 259593 : STRIP_NOPS (op00);
16211 259593 : if (TREE_CODE (op00) == ADDR_EXPR)
16212 : {
16213 2201 : tree op00type;
16214 2201 : op00 = TREE_OPERAND (op00, 0);
16215 2201 : op00type = TREE_TYPE (op00);
16216 :
16217 : /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16218 2201 : if (VECTOR_TYPE_P (op00type)
16219 240 : && type == TREE_TYPE (op00type)
16220 : /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16221 : but we want to treat offsets with MSB set as negative.
16222 : For the code below negative offsets are invalid and
16223 : TYPE_SIZE of the element is something unsigned, so
16224 : check whether op01 fits into poly_int64, which implies
16225 : it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16226 : then just use poly_uint64 because we want to treat the
16227 : value as unsigned. */
16228 2394 : && tree_fits_poly_int64_p (op01))
16229 : {
16230 179 : tree part_width = TYPE_SIZE (type);
16231 179 : poly_uint64 max_offset
16232 179 : = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16233 179 : * TYPE_VECTOR_SUBPARTS (op00type));
16234 179 : if (known_lt (const_op01, max_offset))
16235 : {
16236 179 : tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16237 179 : return fold_build3_loc (loc,
16238 : BIT_FIELD_REF, type, op00,
16239 179 : part_width, index);
16240 : }
16241 : }
16242 : /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16243 2022 : else if (TREE_CODE (op00type) == COMPLEX_TYPE
16244 2022 : && type == TREE_TYPE (op00type))
16245 : {
16246 0 : if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16247 : const_op01))
16248 0 : return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16249 : }
16250 : /* ((foo *)&fooarray)[1] => fooarray[1] */
16251 2022 : else if (TREE_CODE (op00type) == ARRAY_TYPE
16252 2022 : && type == TREE_TYPE (op00type))
16253 : {
16254 722 : tree type_domain = TYPE_DOMAIN (op00type);
16255 722 : tree min_val = size_zero_node;
16256 722 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16257 721 : min_val = TYPE_MIN_VALUE (type_domain);
16258 722 : poly_uint64 type_size, index;
16259 722 : if (poly_int_tree_p (min_val)
16260 722 : && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16261 722 : && multiple_p (const_op01, type_size, &index))
16262 : {
16263 722 : poly_offset_int off = index + wi::to_poly_offset (min_val);
16264 722 : op01 = wide_int_to_tree (sizetype, off);
16265 722 : return build4_loc (loc, ARRAY_REF, type, op00, op01,
16266 : NULL_TREE, NULL_TREE);
16267 : }
16268 : }
16269 : }
16270 : }
16271 :
16272 : /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16273 19537262 : if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16274 666106 : && type == TREE_TYPE (TREE_TYPE (subtype))
16275 19540288 : && (!in_gimple_form
16276 12 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16277 : {
16278 3025 : tree type_domain;
16279 3025 : tree min_val = size_zero_node;
16280 3025 : sub = build_fold_indirect_ref_loc (loc, sub);
16281 3025 : type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16282 3025 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16283 3025 : min_val = TYPE_MIN_VALUE (type_domain);
16284 3025 : if (in_gimple_form
16285 11 : && TREE_CODE (min_val) != INTEGER_CST)
16286 : return NULL_TREE;
16287 3025 : return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16288 3025 : NULL_TREE);
16289 : }
16290 :
16291 : return NULL_TREE;
16292 : }
16293 :
16294 : /* Builds an expression for an indirection through T, simplifying some
16295 : cases. */
16296 :
16297 : tree
16298 14040184 : build_fold_indirect_ref_loc (location_t loc, tree t)
16299 : {
16300 14040184 : tree type = TREE_TYPE (TREE_TYPE (t));
16301 14040184 : tree sub = fold_indirect_ref_1 (loc, type, t);
16302 :
16303 14040184 : if (sub)
16304 : return sub;
16305 :
16306 9238432 : return build1_loc (loc, INDIRECT_REF, type, t);
16307 : }
16308 :
16309 : /* Given an INDIRECT_REF T, return either T or a simplified version. */
16310 :
16311 : tree
16312 10136063 : fold_indirect_ref_loc (location_t loc, tree t)
16313 : {
16314 10136063 : tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16315 :
16316 10136063 : if (sub)
16317 : return sub;
16318 : else
16319 10115306 : return t;
16320 : }
16321 :
16322 : /* Strip non-trapping, non-side-effecting tree nodes from an expression
16323 : whose result is ignored. The type of the returned tree need not be
16324 : the same as the original expression. */
16325 :
16326 : tree
16327 142236 : fold_ignored_result (tree t)
16328 : {
16329 142236 : if (!TREE_SIDE_EFFECTS (t))
16330 16994 : return integer_zero_node;
16331 :
16332 165414 : for (;;)
16333 165414 : switch (TREE_CODE_CLASS (TREE_CODE (t)))
16334 : {
16335 3838 : case tcc_unary:
16336 3838 : t = TREE_OPERAND (t, 0);
16337 3838 : break;
16338 :
16339 5019 : case tcc_binary:
16340 5019 : case tcc_comparison:
16341 5019 : if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16342 3129 : t = TREE_OPERAND (t, 0);
16343 1890 : else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16344 18 : t = TREE_OPERAND (t, 1);
16345 : else
16346 : return t;
16347 : break;
16348 :
16349 108794 : case tcc_expression:
16350 108794 : switch (TREE_CODE (t))
16351 : {
16352 33172 : case COMPOUND_EXPR:
16353 33172 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16354 : return t;
16355 32893 : t = TREE_OPERAND (t, 0);
16356 32893 : break;
16357 :
16358 381 : case COND_EXPR:
16359 381 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16360 381 : || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16361 : return t;
16362 294 : t = TREE_OPERAND (t, 0);
16363 294 : break;
16364 :
16365 : default:
16366 : return t;
16367 : }
16368 : break;
16369 :
16370 : default:
16371 : return t;
16372 : }
16373 : }
16374 :
16375 : /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16376 :
16377 : tree
16378 3094759410 : round_up_loc (location_t loc, tree value, unsigned int divisor)
16379 : {
16380 3094759410 : tree div = NULL_TREE;
16381 :
16382 3094759410 : if (divisor == 1)
16383 : return value;
16384 :
16385 : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16386 : have to do anything. Only do this when we are not given a const,
16387 : because in that case, this check is more expensive than just
16388 : doing it. */
16389 1931209472 : if (TREE_CODE (value) != INTEGER_CST)
16390 : {
16391 364683 : div = build_int_cst (TREE_TYPE (value), divisor);
16392 :
16393 364683 : if (multiple_of_p (TREE_TYPE (value), value, div))
16394 : return value;
16395 : }
16396 :
16397 : /* If divisor is a power of two, simplify this to bit manipulation. */
16398 1930846623 : if (pow2_or_zerop (divisor))
16399 : {
16400 1930846623 : if (TREE_CODE (value) == INTEGER_CST)
16401 : {
16402 1930844789 : wide_int val = wi::to_wide (value);
16403 1930844789 : bool overflow_p;
16404 :
16405 1930844789 : if ((val & (divisor - 1)) == 0)
16406 : return value;
16407 :
16408 4099583 : overflow_p = TREE_OVERFLOW (value);
16409 4099583 : val += divisor - 1;
16410 4099583 : val &= (int) -divisor;
16411 4099583 : if (val == 0)
16412 4 : overflow_p = true;
16413 :
16414 4099583 : return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16415 1930844789 : }
16416 : else
16417 : {
16418 1834 : tree t;
16419 :
16420 1834 : t = build_int_cst (TREE_TYPE (value), divisor - 1);
16421 1834 : value = size_binop_loc (loc, PLUS_EXPR, value, t);
16422 1834 : t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16423 1834 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16424 : }
16425 : }
16426 : else
16427 : {
16428 0 : if (!div)
16429 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16430 0 : value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16431 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16432 : }
16433 :
16434 : return value;
16435 : }
16436 :
16437 : /* Likewise, but round down. */
16438 :
16439 : tree
16440 22605322 : round_down_loc (location_t loc, tree value, int divisor)
16441 : {
16442 22605322 : tree div = NULL_TREE;
16443 :
16444 22605322 : gcc_assert (divisor > 0);
16445 22605322 : if (divisor == 1)
16446 : return value;
16447 :
16448 : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16449 : have to do anything. Only do this when we are not given a const,
16450 : because in that case, this check is more expensive than just
16451 : doing it. */
16452 22605322 : if (TREE_CODE (value) != INTEGER_CST)
16453 : {
16454 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16455 :
16456 0 : if (multiple_of_p (TREE_TYPE (value), value, div))
16457 : return value;
16458 : }
16459 :
16460 : /* If divisor is a power of two, simplify this to bit manipulation. */
16461 22605322 : if (pow2_or_zerop (divisor))
16462 : {
16463 22605322 : tree t;
16464 :
16465 22605322 : t = build_int_cst (TREE_TYPE (value), -divisor);
16466 22605322 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16467 : }
16468 : else
16469 : {
16470 0 : if (!div)
16471 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16472 0 : value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16473 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16474 : }
16475 :
16476 : return value;
16477 : }
16478 :
16479 : /* Returns the pointer to the base of the object addressed by EXP and
16480 : extracts the information about the offset of the access, storing it
16481 : to PBITPOS and POFFSET. */
16482 :
16483 : static tree
16484 2484780 : split_address_to_core_and_offset (tree exp,
16485 : poly_int64 *pbitpos, tree *poffset)
16486 : {
16487 2484780 : tree core;
16488 2484780 : machine_mode mode;
16489 2484780 : int unsignedp, reversep, volatilep;
16490 2484780 : poly_int64 bitsize;
16491 2484780 : location_t loc = EXPR_LOCATION (exp);
16492 :
16493 2484780 : if (TREE_CODE (exp) == SSA_NAME)
16494 467196 : if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16495 351804 : if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16496 30547 : exp = gimple_assign_rhs1 (def);
16497 :
16498 2484780 : if (TREE_CODE (exp) == ADDR_EXPR)
16499 : {
16500 1377462 : core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16501 : poffset, &mode, &unsignedp, &reversep,
16502 : &volatilep);
16503 : /* If we are left with MEM[a + CST] strip that and add it to the
16504 : pbitpos and return a. */
16505 1377462 : if (TREE_CODE (core) == MEM_REF)
16506 : {
16507 27527 : poly_offset_int tem;
16508 27527 : tem = wi::to_poly_offset (TREE_OPERAND (core, 1));
16509 27527 : tem <<= LOG2_BITS_PER_UNIT;
16510 27527 : tem += *pbitpos;
16511 27527 : if (tem.to_shwi (pbitpos))
16512 27349 : return TREE_OPERAND (core, 0);
16513 : }
16514 1350113 : core = build_fold_addr_expr_loc (loc, core);
16515 : }
16516 1107318 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16517 : {
16518 376705 : core = TREE_OPERAND (exp, 0);
16519 376705 : STRIP_NOPS (core);
16520 376705 : *pbitpos = 0;
16521 376705 : *poffset = TREE_OPERAND (exp, 1);
16522 376705 : if (poly_int_tree_p (*poffset))
16523 : {
16524 376620 : poly_offset_int tem
16525 376620 : = wi::sext (wi::to_poly_offset (*poffset),
16526 376620 : TYPE_PRECISION (TREE_TYPE (*poffset)));
16527 376620 : tem <<= LOG2_BITS_PER_UNIT;
16528 376620 : if (tem.to_shwi (pbitpos))
16529 376620 : *poffset = NULL_TREE;
16530 : }
16531 : }
16532 : else
16533 : {
16534 730613 : core = exp;
16535 730613 : *pbitpos = 0;
16536 730613 : *poffset = NULL_TREE;
16537 : }
16538 :
16539 : return core;
16540 : }
16541 :
16542 : /* Returns true if addresses of E1 and E2 differ by a constant, false
16543 : otherwise. If they do, E1 - E2 is stored in *DIFF. */
16544 :
16545 : bool
16546 1242390 : ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16547 : {
16548 1242390 : tree core1, core2;
16549 1242390 : poly_int64 bitpos1, bitpos2;
16550 1242390 : tree toffset1, toffset2, tdiff, type;
16551 :
16552 1242390 : core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16553 1242390 : core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16554 :
16555 1242390 : poly_int64 bytepos1, bytepos2;
16556 1242390 : if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16557 1969932 : || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16558 2484780 : || !operand_equal_p (core1, core2, 0))
16559 727542 : return false;
16560 :
16561 514848 : if (toffset1 && toffset2)
16562 : {
16563 29 : type = TREE_TYPE (toffset1);
16564 29 : if (type != TREE_TYPE (toffset2))
16565 0 : toffset2 = fold_convert (type, toffset2);
16566 :
16567 29 : tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16568 29 : if (!cst_and_fits_in_hwi (tdiff))
16569 : return false;
16570 :
16571 15 : *diff = int_cst_value (tdiff);
16572 : }
16573 514819 : else if (toffset1 || toffset2)
16574 : {
16575 : /* If only one of the offsets is non-constant, the difference cannot
16576 : be a constant. */
16577 : return false;
16578 : }
16579 : else
16580 496612 : *diff = 0;
16581 :
16582 496627 : *diff += bytepos1 - bytepos2;
16583 496627 : return true;
16584 : }
16585 :
16586 : /* Return OFF converted to a pointer offset type suitable as offset for
16587 : POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16588 : tree
16589 32101825 : convert_to_ptrofftype_loc (location_t loc, tree off)
16590 : {
16591 32101825 : if (ptrofftype_p (TREE_TYPE (off)))
16592 : return off;
16593 4050848 : return fold_convert_loc (loc, sizetype, off);
16594 : }
16595 :
16596 : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16597 : tree
16598 28117761 : fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16599 : {
16600 28117761 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16601 28117761 : ptr, convert_to_ptrofftype_loc (loc, off));
16602 : }
16603 :
16604 : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16605 : tree
16606 162916 : fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16607 : {
16608 162916 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16609 162916 : ptr, size_int (off));
16610 : }
16611 :
16612 : /* Return a pointer to a NUL-terminated string containing the sequence
16613 : of bytes corresponding to the representation of the object referred to
16614 : by SRC (or a subsequence of such bytes within it if SRC is a reference
16615 : to an initialized constant array plus some constant offset).
16616 : Set *STRSIZE the number of bytes in the constant sequence including
16617 : the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16618 : where A is the array that stores the constant sequence that SRC points
16619 : to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16620 : need not point to a string or even an array of characters but may point
16621 : to an object of any type. */
16622 :
16623 : const char *
16624 12290045 : getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16625 : {
16626 : /* The offset into the array A storing the string, and A's byte size. */
16627 12290045 : tree offset_node;
16628 12290045 : tree mem_size;
16629 :
16630 12290045 : if (strsize)
16631 4630961 : *strsize = 0;
16632 :
16633 12290045 : if (strsize)
16634 4630961 : src = byte_representation (src, &offset_node, &mem_size, NULL);
16635 : else
16636 7659084 : src = string_constant (src, &offset_node, &mem_size, NULL);
16637 12290045 : if (!src)
16638 : return NULL;
16639 :
16640 2755988 : unsigned HOST_WIDE_INT offset = 0;
16641 2755988 : if (offset_node != NULL_TREE)
16642 : {
16643 2755988 : if (!tree_fits_uhwi_p (offset_node))
16644 : return NULL;
16645 : else
16646 2754224 : offset = tree_to_uhwi (offset_node);
16647 : }
16648 :
16649 2754224 : if (!tree_fits_uhwi_p (mem_size))
16650 : return NULL;
16651 :
16652 : /* ARRAY_SIZE is the byte size of the array the constant sequence
16653 : is stored in and equal to sizeof A. INIT_BYTES is the number
16654 : of bytes in the constant sequence used to initialize the array,
16655 : including any embedded NULs as well as the terminating NUL (for
16656 : strings), but not including any trailing zeros/NULs past
16657 : the terminating one appended implicitly to a string literal to
16658 : zero out the remainder of the array it's stored in. For example,
16659 : given:
16660 : const char a[7] = "abc\0d";
16661 : n = strlen (a + 1);
16662 : ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16663 : (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16664 : is equal to strlen (A) + 1. */
16665 2754224 : const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16666 2754224 : unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16667 2754224 : const char *string = TREE_STRING_POINTER (src);
16668 :
16669 : /* Ideally this would turn into a gcc_checking_assert over time. */
16670 2754224 : if (init_bytes > array_size)
16671 : init_bytes = array_size;
16672 :
16673 2754224 : if (init_bytes == 0 || offset >= array_size)
16674 : return NULL;
16675 :
16676 2753011 : if (strsize)
16677 : {
16678 : /* Compute and store the number of characters from the beginning
16679 : of the substring at OFFSET to the end, including the terminating
16680 : nul. Offsets past the initial length refer to null strings. */
16681 1439284 : if (offset < init_bytes)
16682 1439284 : *strsize = init_bytes - offset;
16683 : else
16684 0 : *strsize = 1;
16685 : }
16686 : else
16687 : {
16688 1313727 : tree eltype = TREE_TYPE (TREE_TYPE (src));
16689 : /* Support only properly NUL-terminated single byte strings. */
16690 1313727 : if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16691 : return NULL;
16692 1308978 : if (string[init_bytes - 1] != '\0')
16693 : return NULL;
16694 : }
16695 :
16696 2726353 : return offset < init_bytes ? string + offset : "";
16697 : }
16698 :
16699 : /* Return a pointer to a NUL-terminated string corresponding to
16700 : the expression STR referencing a constant string, possibly
16701 : involving a constant offset. Return null if STR either doesn't
16702 : reference a constant string or if it involves a nonconstant
16703 : offset. */
16704 :
16705 : const char *
16706 7659084 : c_getstr (tree str)
16707 : {
16708 7659084 : return getbyterep (str, NULL);
16709 : }
16710 :
16711 : /* Helper for tree_nonzero_bits. Given a tree T, compute which bits in T
16712 : may be nonzero, with precision PREC, the precision of T's type. */
16713 :
16714 : static wide_int
16715 251042974 : tree_nonzero_bits (const_tree t, unsigned prec)
16716 : {
16717 251042974 : switch (TREE_CODE (t))
16718 : {
16719 8898433 : case INTEGER_CST:
16720 8898433 : return wi::to_wide (t);
16721 136758388 : case SSA_NAME:
16722 136758388 : return get_nonzero_bits (t);
16723 250412 : case NON_LVALUE_EXPR:
16724 250412 : case SAVE_EXPR:
16725 250412 : return tree_nonzero_bits (TREE_OPERAND (t, 0), prec);
16726 1141812 : case BIT_AND_EXPR:
16727 2283624 : return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0), prec),
16728 3425436 : tree_nonzero_bits (TREE_OPERAND (t, 1), prec));
16729 4362 : case BIT_IOR_EXPR:
16730 4362 : case BIT_XOR_EXPR:
16731 8724 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0), prec),
16732 13086 : tree_nonzero_bits (TREE_OPERAND (t, 1), prec));
16733 63665 : case COND_EXPR:
16734 127330 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1), prec),
16735 190995 : tree_nonzero_bits (TREE_OPERAND (t, 2), prec));
16736 52421573 : CASE_CONVERT:
16737 52421573 : if (TREE_TYPE (t) != error_mark_node
16738 52421573 : && !error_operand_p (TREE_OPERAND (t, 0)))
16739 : {
16740 52421572 : tree op0 = TREE_OPERAND (t, 0);
16741 52421572 : tree inner_type = TREE_TYPE (op0);
16742 52421572 : unsigned inner_prec = TYPE_PRECISION (inner_type);
16743 104843144 : return wide_int::from (tree_nonzero_bits (op0, inner_prec),
16744 104843144 : prec, TYPE_SIGN (inner_type));
16745 : }
16746 : break;
16747 13623619 : case PLUS_EXPR:
16748 13623619 : if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16749 : {
16750 13623619 : wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0), prec);
16751 13623619 : wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1), prec);
16752 13623619 : if (wi::bit_and (nzbits1, nzbits2) == 0)
16753 495918 : return wi::bit_or (nzbits1, nzbits2);
16754 13623619 : }
16755 : break;
16756 165635 : case LSHIFT_EXPR:
16757 165635 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
16758 165635 : && TREE_TYPE (t) != error_mark_node)
16759 : {
16760 93761 : tree type = TREE_TYPE (t);
16761 93761 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0), prec);
16762 93761 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1), prec);
16763 93761 : return wi::neg_p (arg1)
16764 187522 : ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16765 93761 : : wi::lshift (nzbits, arg1);
16766 93761 : }
16767 : break;
16768 156583 : case RSHIFT_EXPR:
16769 156583 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
16770 156583 : && TREE_TYPE (t) != error_mark_node)
16771 : {
16772 155025 : tree type = TREE_TYPE (t);
16773 155025 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0), prec);
16774 155025 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1), prec);
16775 155025 : return wi::neg_p (arg1)
16776 310050 : ? wi::lshift (nzbits, -arg1)
16777 155025 : : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16778 155025 : }
16779 : break;
16780 : default:
16781 : break;
16782 : }
16783 :
16784 50759626 : return wi::shwi (-1, prec);
16785 : }
16786 :
16787 : /* Given a tree T, compute which bits in T may be nonzero. */
16788 :
16789 : wide_int
16790 168455288 : tree_nonzero_bits (const_tree t)
16791 : {
16792 168455288 : if (error_operand_p (t))
16793 0 : return wi::shwi (-1, 64);
16794 168455288 : return tree_nonzero_bits (t, TYPE_PRECISION (TREE_TYPE (t)));
16795 : }
16796 :
16797 : /* Helper function for address compare simplifications in match.pd.
16798 : OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16799 : TYPE is the type of comparison operands.
16800 : BASE0, BASE1, OFF0 and OFF1 are set by the function.
16801 : GENERIC is true if GENERIC folding and false for GIMPLE folding.
16802 : Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16803 : 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16804 : and 2 if unknown. */
16805 :
16806 : int
16807 3294663 : address_compare (tree_code code, tree type, tree op0, tree op1,
16808 : tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16809 : bool generic)
16810 : {
16811 3294663 : if (TREE_CODE (op0) == SSA_NAME)
16812 31929 : op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16813 3294663 : if (TREE_CODE (op1) == SSA_NAME)
16814 5204 : op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16815 3294663 : gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16816 3294663 : gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16817 3294663 : base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16818 3294663 : base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16819 3294663 : if (base0 && TREE_CODE (base0) == MEM_REF)
16820 : {
16821 32166 : off0 += mem_ref_offset (base0).force_shwi ();
16822 32166 : base0 = TREE_OPERAND (base0, 0);
16823 : }
16824 3294663 : if (base1 && TREE_CODE (base1) == MEM_REF)
16825 : {
16826 3348 : off1 += mem_ref_offset (base1).force_shwi ();
16827 3348 : base1 = TREE_OPERAND (base1, 0);
16828 : }
16829 3294663 : if (base0 == NULL_TREE || base1 == NULL_TREE)
16830 : return 2;
16831 :
16832 3285186 : int equal = 2;
16833 : /* Punt in GENERIC on variables with value expressions;
16834 : the value expressions might point to fields/elements
16835 : of other vars etc. */
16836 3285186 : if (generic
16837 3285186 : && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16838 3156162 : || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16839 : return 2;
16840 3284628 : else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16841 : {
16842 100758 : symtab_node *node0 = symtab_node::get_create (base0);
16843 100758 : symtab_node *node1 = symtab_node::get_create (base1);
16844 100758 : equal = node0->equal_address_to (node1);
16845 : }
16846 3183870 : else if ((DECL_P (base0)
16847 235651 : || TREE_CODE (base0) == SSA_NAME
16848 204604 : || TREE_CODE (base0) == STRING_CST)
16849 3183704 : && (DECL_P (base1)
16850 207887 : || TREE_CODE (base1) == SSA_NAME
16851 204769 : || TREE_CODE (base1) == STRING_CST))
16852 3183682 : equal = (base0 == base1);
16853 : /* Assume different STRING_CSTs with the same content will be
16854 : merged. */
16855 3284440 : if (equal == 0
16856 71825 : && TREE_CODE (base0) == STRING_CST
16857 17669 : && TREE_CODE (base1) == STRING_CST
16858 17518 : && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16859 3284440 : && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16860 6285 : TREE_STRING_LENGTH (base0)) == 0)
16861 : equal = 1;
16862 3280109 : if (equal == 1)
16863 : {
16864 3192701 : if (code == EQ_EXPR
16865 3192701 : || code == NE_EXPR
16866 : /* If the offsets are equal we can ignore overflow. */
16867 123855 : || known_eq (off0, off1)
16868 247544 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16869 : /* Or if we compare using pointers to decls or strings. */
16870 3316473 : || (POINTER_TYPE_P (type)
16871 0 : && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16872 : return 1;
16873 : return 2;
16874 : }
16875 91927 : if (equal != 0)
16876 : return equal;
16877 67306 : if (code != EQ_EXPR && code != NE_EXPR)
16878 : return 2;
16879 :
16880 : /* At this point we know (or assume) the two pointers point at
16881 : different objects. */
16882 62236 : HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16883 62236 : off0.is_constant (&ioff0);
16884 62236 : off1.is_constant (&ioff1);
16885 : /* Punt on non-zero offsets from functions. */
16886 62236 : if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16887 62236 : || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16888 : return 2;
16889 : /* Or if the bases are neither decls nor string literals. */
16890 62236 : if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16891 : return 2;
16892 31918 : if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16893 : return 2;
16894 : /* For initializers, assume addresses of different functions are
16895 : different. */
16896 31918 : if (folding_initializer
16897 7030 : && TREE_CODE (base0) == FUNCTION_DECL
16898 14 : && TREE_CODE (base1) == FUNCTION_DECL)
16899 : return 0;
16900 :
16901 : /* Compute whether one address points to the start of one
16902 : object and another one to the end of another one. */
16903 31904 : poly_int64 size0 = 0, size1 = 0;
16904 31904 : if (TREE_CODE (base0) == STRING_CST)
16905 : {
16906 13020 : if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16907 : equal = 2;
16908 : else
16909 : size0 = TREE_STRING_LENGTH (base0);
16910 : }
16911 18884 : else if (TREE_CODE (base0) == FUNCTION_DECL)
16912 : size0 = 1;
16913 : else
16914 : {
16915 18640 : tree sz0 = DECL_SIZE_UNIT (base0);
16916 18640 : if (!tree_fits_poly_int64_p (sz0))
16917 : equal = 2;
16918 : else
16919 18640 : size0 = tree_to_poly_int64 (sz0);
16920 : }
16921 31904 : if (TREE_CODE (base1) == STRING_CST)
16922 : {
16923 13125 : if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16924 : equal = 2;
16925 : else
16926 : size1 = TREE_STRING_LENGTH (base1);
16927 : }
16928 18779 : else if (TREE_CODE (base1) == FUNCTION_DECL)
16929 : size1 = 1;
16930 : else
16931 : {
16932 18539 : tree sz1 = DECL_SIZE_UNIT (base1);
16933 18539 : if (!tree_fits_poly_int64_p (sz1))
16934 : equal = 2;
16935 : else
16936 18539 : size1 = tree_to_poly_int64 (sz1);
16937 : }
16938 31904 : if (equal == 0)
16939 : {
16940 : /* If one offset is pointing (or could be) to the beginning of one
16941 : object and the other is pointing to one past the last byte of the
16942 : other object, punt. */
16943 31892 : if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16944 : equal = 2;
16945 31755 : else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16946 : equal = 2;
16947 : /* If both offsets are the same, there are some cases we know that are
16948 : ok. Either if we know they aren't zero, or if we know both sizes
16949 : are no zero. */
16950 : if (equal == 2
16951 273 : && known_eq (off0, off1)
16952 22 : && (known_ne (off0, 0)
16953 22 : || (known_ne (size0, 0) && known_ne (size1, 0))))
16954 : equal = 0;
16955 : }
16956 :
16957 : /* At this point, equal is 2 if either one or both pointers are out of
16958 : bounds of their object, or one points to start of its object and the
16959 : other points to end of its object. This is unspecified behavior
16960 : e.g. in C++. Otherwise equal is 0. */
16961 31904 : if (folding_cxx_constexpr && equal)
16962 : return equal;
16963 :
16964 : /* When both pointers point to string literals, even when equal is 0,
16965 : due to tail merging of string literals the pointers might be the same. */
16966 31841 : if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16967 : {
16968 12981 : if (ioff0 < 0
16969 12981 : || ioff1 < 0
16970 12981 : || ioff0 > TREE_STRING_LENGTH (base0)
16971 25950 : || ioff1 > TREE_STRING_LENGTH (base1))
16972 : return 2;
16973 :
16974 : /* If the bytes in the string literals starting at the pointers
16975 : differ, the pointers need to be different. */
16976 12969 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16977 12969 : TREE_STRING_POINTER (base1) + ioff1,
16978 12969 : MIN (TREE_STRING_LENGTH (base0) - ioff0,
16979 : TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16980 : {
16981 3897 : HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16982 3897 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16983 3897 : TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16984 : ioffmin) == 0)
16985 : /* If even the bytes in the string literal before the
16986 : pointers are the same, the string literals could be
16987 : tail merged. */
16988 : return 2;
16989 : }
16990 : return 0;
16991 : }
16992 :
16993 18860 : if (folding_cxx_constexpr)
16994 : return 0;
16995 :
16996 : /* If this is a pointer comparison, ignore for now even
16997 : valid equalities where one pointer is the offset zero
16998 : of one object and the other to one past end of another one. */
16999 11947 : if (!INTEGRAL_TYPE_P (type))
17000 : return 0;
17001 :
17002 : /* Assume that string literals can't be adjacent to variables
17003 : (automatic or global). */
17004 315 : if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17005 : return 0;
17006 :
17007 : /* Assume that automatic variables can't be adjacent to global
17008 : variables. */
17009 295 : if (is_global_var (base0) != is_global_var (base1))
17010 : return 0;
17011 :
17012 : return equal;
17013 : }
17014 :
17015 : /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17016 : tree
17017 52 : ctor_single_nonzero_element (const_tree t)
17018 : {
17019 52 : unsigned HOST_WIDE_INT idx;
17020 52 : constructor_elt *ce;
17021 52 : tree elt = NULL_TREE;
17022 :
17023 52 : if (TREE_CODE (t) != CONSTRUCTOR)
17024 : return NULL_TREE;
17025 113 : for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17026 110 : if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17027 : {
17028 101 : if (elt)
17029 : return NULL_TREE;
17030 52 : elt = ce->value;
17031 : }
17032 : return elt;
17033 : }
17034 :
17035 : #if CHECKING_P
17036 :
17037 : namespace selftest {
17038 :
17039 : /* Helper functions for writing tests of folding trees. */
17040 :
17041 : /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17042 :
17043 : static void
17044 16 : assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17045 : tree constant)
17046 : {
17047 16 : ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17048 16 : }
17049 :
17050 : /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17051 : wrapping WRAPPED_EXPR. */
17052 :
17053 : static void
17054 12 : assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17055 : tree wrapped_expr)
17056 : {
17057 12 : tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17058 12 : ASSERT_NE (wrapped_expr, result);
17059 12 : ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17060 12 : ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17061 12 : }
17062 :
17063 : /* Verify that various arithmetic binary operations are folded
17064 : correctly. */
17065 :
17066 : static void
17067 4 : test_arithmetic_folding ()
17068 : {
17069 4 : tree type = integer_type_node;
17070 4 : tree x = create_tmp_var_raw (type, "x");
17071 4 : tree zero = build_zero_cst (type);
17072 4 : tree one = build_int_cst (type, 1);
17073 :
17074 : /* Addition. */
17075 : /* 1 <-- (0 + 1) */
17076 4 : assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17077 : one);
17078 4 : assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17079 : one);
17080 :
17081 : /* (nonlvalue)x <-- (x + 0) */
17082 4 : assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17083 : x);
17084 :
17085 : /* Subtraction. */
17086 : /* 0 <-- (x - x) */
17087 4 : assert_binop_folds_to_const (x, MINUS_EXPR, x,
17088 : zero);
17089 4 : assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17090 : x);
17091 :
17092 : /* Multiplication. */
17093 : /* 0 <-- (x * 0) */
17094 4 : assert_binop_folds_to_const (x, MULT_EXPR, zero,
17095 : zero);
17096 :
17097 : /* (nonlvalue)x <-- (x * 1) */
17098 4 : assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17099 : x);
17100 4 : }
17101 :
17102 : namespace test_operand_equality {
17103 :
17104 : /* Verify structural equality. */
17105 :
17106 : /* Execute fold_vec_perm_cst unit tests. */
17107 :
17108 : static void
17109 4 : test ()
17110 : {
17111 4 : tree stype = integer_type_node;
17112 4 : tree utype = unsigned_type_node;
17113 4 : tree x = create_tmp_var_raw (stype, "x");
17114 4 : tree y = create_tmp_var_raw (stype, "y");
17115 4 : tree z = create_tmp_var_raw (stype, "z");
17116 4 : tree four = build_int_cst (stype, 4);
17117 4 : tree lhs1 = fold_build2 (PLUS_EXPR, stype, x, y);
17118 4 : tree rhs1 = fold_convert (stype,
17119 : fold_build2 (PLUS_EXPR, utype,
17120 : fold_convert (utype, x),
17121 : fold_convert (utype, y)));
17122 :
17123 : /* (int)((unsigned x) + (unsigned y)) == x + y. */
17124 4 : ASSERT_TRUE (operand_equal_p (lhs1, rhs1, OEP_ASSUME_WRAPV));
17125 4 : ASSERT_FALSE (operand_equal_p (lhs1, rhs1, 0));
17126 :
17127 : /* (int)(unsigned) x == x. */
17128 4 : tree lhs2 = build1 (NOP_EXPR, stype,
17129 : build1 (NOP_EXPR, utype, x));
17130 4 : tree rhs2 = x;
17131 4 : ASSERT_TRUE (operand_equal_p (lhs2, rhs2, OEP_ASSUME_WRAPV));
17132 4 : ASSERT_TRUE (operand_equal_p (lhs2, rhs2, 0));
17133 :
17134 : /* (unsigned x) + (unsigned y) == x + y. */
17135 4 : tree lhs3 = lhs1;
17136 4 : tree rhs3 = fold_build2 (PLUS_EXPR, utype,
17137 : fold_convert (utype, x),
17138 : fold_convert (utype, y));
17139 4 : ASSERT_TRUE (operand_equal_p (lhs3, rhs3, OEP_ASSUME_WRAPV));
17140 4 : ASSERT_FALSE (operand_equal_p (lhs3, rhs3, 0));
17141 :
17142 : /* (unsigned x) / (unsigned y) == x / y. */
17143 4 : tree lhs4 = fold_build2 (TRUNC_DIV_EXPR, stype, x, y);;
17144 4 : tree rhs4 = fold_build2 (TRUNC_DIV_EXPR, utype,
17145 : fold_convert (utype, x),
17146 : fold_convert (utype, y));
17147 4 : ASSERT_FALSE (operand_equal_p (lhs4, rhs4, OEP_ASSUME_WRAPV));
17148 4 : ASSERT_FALSE (operand_equal_p (lhs4, rhs4, 0));
17149 :
17150 : /* (long x) / 4 == (long)(x / 4). */
17151 4 : tree lstype = long_long_integer_type_node;
17152 4 : tree lfour = build_int_cst (lstype, 4);
17153 4 : tree lhs5 = fold_build2 (TRUNC_DIV_EXPR, lstype,
17154 : fold_build1 (VIEW_CONVERT_EXPR, lstype, x), lfour);
17155 4 : tree rhs5 = fold_build1 (VIEW_CONVERT_EXPR, lstype,
17156 : fold_build2 (TRUNC_DIV_EXPR, stype, x, four));
17157 4 : ASSERT_FALSE (operand_equal_p (lhs5, rhs5, OEP_ASSUME_WRAPV));
17158 4 : ASSERT_FALSE (operand_equal_p (lhs5, rhs5, 0));
17159 :
17160 : /* (unsigned x) / 4 == x / 4. */
17161 4 : tree lhs6 = fold_build2 (TRUNC_DIV_EXPR, stype, x, four);;
17162 4 : tree rhs6 = fold_build2 (TRUNC_DIV_EXPR, utype,
17163 : fold_convert (utype, x),
17164 : fold_convert (utype, four));
17165 4 : ASSERT_FALSE (operand_equal_p (lhs6, rhs6, OEP_ASSUME_WRAPV));
17166 4 : ASSERT_FALSE (operand_equal_p (lhs6, rhs6, 0));
17167 :
17168 : /* a / (int)((unsigned)b - (unsigned)c)) == a / (b - c). */
17169 4 : tree lhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, lhs1);
17170 4 : tree rhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, rhs1);
17171 4 : ASSERT_TRUE (operand_equal_p (lhs7, rhs7, OEP_ASSUME_WRAPV));
17172 4 : ASSERT_FALSE (operand_equal_p (lhs7, rhs7, 0));
17173 :
17174 : /* (unsigned x) + 4 == x + 4. */
17175 4 : tree lhs8 = fold_build2 (PLUS_EXPR, stype, x, four);
17176 4 : tree rhs8 = fold_build2 (PLUS_EXPR, utype,
17177 : fold_convert (utype, x),
17178 : fold_convert (utype, four));
17179 4 : ASSERT_TRUE (operand_equal_p (lhs8, rhs8, OEP_ASSUME_WRAPV));
17180 4 : ASSERT_FALSE (operand_equal_p (lhs8, rhs8, 0));
17181 :
17182 : /* (unsigned x) + 4 == 4 + x. */
17183 4 : tree lhs9 = fold_build2 (PLUS_EXPR, stype, four, x);
17184 4 : tree rhs9 = fold_build2 (PLUS_EXPR, utype,
17185 : fold_convert (utype, x),
17186 : fold_convert (utype, four));
17187 4 : ASSERT_TRUE (operand_equal_p (lhs9, rhs9, OEP_ASSUME_WRAPV));
17188 4 : ASSERT_FALSE (operand_equal_p (lhs9, rhs9, 0));
17189 :
17190 : /* ((unsigned x) + 4) * (unsigned y)) + z == ((4 + x) * y) + z. */
17191 4 : tree lhs10 = fold_build2 (PLUS_EXPR, stype,
17192 : fold_build2 (MULT_EXPR, stype,
17193 : fold_build2 (PLUS_EXPR, stype, four, x),
17194 : y),
17195 : z);
17196 4 : tree rhs10 = fold_build2 (MULT_EXPR, utype,
17197 : fold_build2 (PLUS_EXPR, utype,
17198 : fold_convert (utype, x),
17199 : fold_convert (utype, four)),
17200 : fold_convert (utype, y));
17201 4 : rhs10 = fold_build2 (PLUS_EXPR, stype, fold_convert (stype, rhs10), z);
17202 4 : ASSERT_TRUE (operand_equal_p (lhs10, rhs10, OEP_ASSUME_WRAPV));
17203 4 : ASSERT_FALSE (operand_equal_p (lhs10, rhs10, 0));
17204 4 : }
17205 : }
17206 :
17207 : namespace test_fold_vec_perm_cst {
17208 :
17209 : /* Build a VECTOR_CST corresponding to VMODE, and has
17210 : encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17211 : Fill it with randomized elements, using rand() % THRESHOLD. */
17212 :
17213 : static tree
17214 0 : build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17215 : unsigned nelts_per_pattern,
17216 : int step = 0, bool natural_stepped = false,
17217 : int threshold = 100)
17218 : {
17219 0 : tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17220 0 : tree vectype = build_vector_type_for_mode (inner_type, vmode);
17221 0 : tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17222 :
17223 : // Fill a0 for each pattern
17224 0 : for (unsigned i = 0; i < npatterns; i++)
17225 0 : builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17226 :
17227 0 : if (nelts_per_pattern == 1)
17228 0 : return builder.build ();
17229 :
17230 : // Fill a1 for each pattern
17231 0 : for (unsigned i = 0; i < npatterns; i++)
17232 : {
17233 0 : tree a1;
17234 0 : if (natural_stepped)
17235 : {
17236 0 : tree a0 = builder[i];
17237 0 : wide_int a0_val = wi::to_wide (a0);
17238 0 : wide_int a1_val = a0_val + step;
17239 0 : a1 = wide_int_to_tree (inner_type, a1_val);
17240 0 : }
17241 : else
17242 0 : a1 = build_int_cst (inner_type, rand () % threshold);
17243 0 : builder.quick_push (a1);
17244 : }
17245 0 : if (nelts_per_pattern == 2)
17246 0 : return builder.build ();
17247 :
17248 0 : for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17249 : {
17250 0 : tree prev_elem = builder[i - npatterns];
17251 0 : wide_int prev_elem_val = wi::to_wide (prev_elem);
17252 0 : wide_int val = prev_elem_val + step;
17253 0 : builder.quick_push (wide_int_to_tree (inner_type, val));
17254 0 : }
17255 :
17256 0 : return builder.build ();
17257 0 : }
17258 :
17259 : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17260 : when result is VLA. */
17261 :
17262 : static void
17263 0 : validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17264 : tree res, tree *expected_res)
17265 : {
17266 : /* Actual npatterns and encoded_elts in res may be less than expected due
17267 : to canonicalization. */
17268 0 : ASSERT_TRUE (res != NULL_TREE);
17269 0 : ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17270 0 : ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17271 :
17272 0 : for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17273 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17274 0 : }
17275 :
17276 : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17277 : when the result is VLS. */
17278 :
17279 : static void
17280 0 : validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17281 : {
17282 0 : ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17283 0 : for (unsigned i = 0; i < expected_nelts; i++)
17284 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17285 0 : }
17286 :
17287 : /* Helper routine to push multiple elements into BUILDER. */
17288 : template<unsigned N>
17289 0 : static void builder_push_elems (vec_perm_builder& builder,
17290 : poly_uint64 (&elems)[N])
17291 : {
17292 0 : for (unsigned i = 0; i < N; i++)
17293 0 : builder.quick_push (elems[i]);
17294 0 : }
17295 :
17296 : #define ARG0(index) vector_cst_elt (arg0, index)
17297 : #define ARG1(index) vector_cst_elt (arg1, index)
17298 :
17299 : /* Test cases where result is VNx4SI and input vectors are V4SI. */
17300 :
17301 : static void
17302 0 : test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17303 : {
17304 0 : for (int i = 0; i < 10; i++)
17305 : {
17306 : /* Case 1:
17307 : sel = { 0, 4, 1, 5, ... }
17308 : res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17309 0 : {
17310 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17311 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17312 :
17313 0 : tree inner_type
17314 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17315 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17316 :
17317 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17318 0 : vec_perm_builder builder (res_len, 4, 1);
17319 0 : poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17320 0 : builder_push_elems (builder, mask_elems);
17321 :
17322 0 : vec_perm_indices sel (builder, 2, res_len);
17323 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17324 :
17325 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17326 0 : validate_res (4, 1, res, expected_res);
17327 0 : }
17328 :
17329 : /* Case 2: Same as case 1, but contains an out of bounds access which
17330 : should wrap around.
17331 : sel = {0, 8, 4, 12, ...} (4, 1)
17332 : res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17333 0 : {
17334 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17335 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17336 :
17337 0 : tree inner_type
17338 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17339 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17340 :
17341 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17342 0 : vec_perm_builder builder (res_len, 4, 1);
17343 0 : poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17344 0 : builder_push_elems (builder, mask_elems);
17345 :
17346 0 : vec_perm_indices sel (builder, 2, res_len);
17347 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17348 :
17349 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17350 0 : validate_res (4, 1, res, expected_res);
17351 0 : }
17352 : }
17353 0 : }
17354 :
17355 : /* Test cases where result is V4SI and input vectors are VNx4SI. */
17356 :
17357 : static void
17358 0 : test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17359 : {
17360 0 : for (int i = 0; i < 10; i++)
17361 : {
17362 : /* Case 1:
17363 : sel = { 0, 1, 2, 3}
17364 : res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17365 0 : {
17366 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17367 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17368 :
17369 0 : tree inner_type
17370 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17371 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17372 :
17373 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17374 0 : vec_perm_builder builder (res_len, 4, 1);
17375 0 : poly_uint64 mask_elems[] = {0, 1, 2, 3};
17376 0 : builder_push_elems (builder, mask_elems);
17377 :
17378 0 : vec_perm_indices sel (builder, 2, res_len);
17379 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17380 :
17381 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17382 0 : validate_res_vls (res, expected_res, 4);
17383 0 : }
17384 :
17385 : /* Case 2: Same as Case 1, but crossing input vector.
17386 : sel = {0, 2, 4, 6}
17387 : In this case,the index 4 is ambiguous since len = 4 + 4x.
17388 : Since we cannot determine, which vector to choose from during
17389 : compile time, should return NULL_TREE. */
17390 0 : {
17391 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17392 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17393 :
17394 0 : tree inner_type
17395 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17396 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17397 :
17398 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17399 0 : vec_perm_builder builder (res_len, 4, 1);
17400 0 : poly_uint64 mask_elems[] = {0, 2, 4, 6};
17401 0 : builder_push_elems (builder, mask_elems);
17402 :
17403 0 : vec_perm_indices sel (builder, 2, res_len);
17404 0 : const char *reason;
17405 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17406 :
17407 0 : ASSERT_TRUE (res == NULL_TREE);
17408 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17409 0 : }
17410 : }
17411 0 : }
17412 :
17413 : /* Test all input vectors. */
17414 :
17415 : static void
17416 0 : test_all_nunits (machine_mode vmode)
17417 : {
17418 : /* Test with 10 different inputs. */
17419 0 : for (int i = 0; i < 10; i++)
17420 : {
17421 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17422 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17423 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17424 :
17425 : /* Case 1: mask = {0, ...} // (1, 1)
17426 : res = { arg0[0], ... } // (1, 1) */
17427 0 : {
17428 0 : vec_perm_builder builder (len, 1, 1);
17429 0 : builder.quick_push (0);
17430 0 : vec_perm_indices sel (builder, 2, len);
17431 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17432 0 : tree expected_res[] = { ARG0(0) };
17433 0 : validate_res (1, 1, res, expected_res);
17434 0 : }
17435 :
17436 : /* Case 2: mask = {len, ...} // (1, 1)
17437 : res = { arg1[0], ... } // (1, 1) */
17438 0 : {
17439 0 : vec_perm_builder builder (len, 1, 1);
17440 0 : builder.quick_push (len);
17441 0 : vec_perm_indices sel (builder, 2, len);
17442 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17443 :
17444 0 : tree expected_res[] = { ARG1(0) };
17445 0 : validate_res (1, 1, res, expected_res);
17446 0 : }
17447 : }
17448 0 : }
17449 :
17450 : /* Test all vectors which contain at-least 2 elements. */
17451 :
17452 : static void
17453 0 : test_nunits_min_2 (machine_mode vmode)
17454 : {
17455 0 : for (int i = 0; i < 10; i++)
17456 : {
17457 : /* Case 1: mask = { 0, len, ... } // (2, 1)
17458 : res = { arg0[0], arg1[0], ... } // (2, 1) */
17459 0 : {
17460 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17461 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17462 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17463 :
17464 0 : vec_perm_builder builder (len, 2, 1);
17465 0 : poly_uint64 mask_elems[] = { 0, len };
17466 0 : builder_push_elems (builder, mask_elems);
17467 :
17468 0 : vec_perm_indices sel (builder, 2, len);
17469 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17470 :
17471 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17472 0 : validate_res (2, 1, res, expected_res);
17473 0 : }
17474 :
17475 : /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17476 : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17477 0 : {
17478 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17479 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17480 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17481 :
17482 0 : vec_perm_builder builder (len, 2, 2);
17483 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17484 0 : builder_push_elems (builder, mask_elems);
17485 :
17486 0 : vec_perm_indices sel (builder, 2, len);
17487 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17488 :
17489 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17490 0 : validate_res (2, 2, res, expected_res);
17491 0 : }
17492 :
17493 : /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17494 : Test that the stepped sequence of the pattern selects from
17495 : same input pattern. Since input vectors have npatterns = 2,
17496 : and step (a2 - a1) = 1, step is not a multiple of npatterns
17497 : in input vector. So return NULL_TREE. */
17498 0 : {
17499 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17500 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17501 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17502 :
17503 0 : vec_perm_builder builder (len, 1, 3);
17504 0 : poly_uint64 mask_elems[] = { 0, 0, 1 };
17505 0 : builder_push_elems (builder, mask_elems);
17506 :
17507 0 : vec_perm_indices sel (builder, 2, len);
17508 0 : const char *reason;
17509 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17510 : &reason);
17511 0 : ASSERT_TRUE (res == NULL_TREE);
17512 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17513 0 : }
17514 :
17515 : /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17516 : Test that stepped sequence of the pattern selects from arg0.
17517 : res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17518 0 : {
17519 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17520 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17521 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17522 :
17523 0 : vec_perm_builder builder (len, 1, 3);
17524 0 : poly_uint64 mask_elems[] = { len, 0, 1 };
17525 0 : builder_push_elems (builder, mask_elems);
17526 :
17527 0 : vec_perm_indices sel (builder, 2, len);
17528 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17529 :
17530 0 : tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17531 0 : validate_res (1, 3, res, expected_res);
17532 0 : }
17533 :
17534 : /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17535 : In this case ensure that arg has a natural stepped sequence
17536 : to preserve arg's encoding.
17537 :
17538 : As a concrete example, consider:
17539 : arg0: { -16, -9, -10, ... } // (1, 3)
17540 : arg1: { -12, -5, -6, ... } // (1, 3)
17541 : sel = { 0, len, len + 1, ... } // (1, 3)
17542 :
17543 : This will create res with following encoding:
17544 : res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17545 : = { -16, -12, -5, ... }
17546 :
17547 : The step in above encoding would be: (-5) - (-12) = 7
17548 : And hence res[3] would be computed as -5 + 7 = 2.
17549 : instead of arg1[2], ie, -6.
17550 : Ensure that valid_mask_for_fold_vec_perm_cst returns false
17551 : for this case. */
17552 0 : {
17553 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17554 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17555 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17556 :
17557 0 : vec_perm_builder builder (len, 1, 3);
17558 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17559 0 : builder_push_elems (builder, mask_elems);
17560 :
17561 0 : vec_perm_indices sel (builder, 2, len);
17562 0 : const char *reason;
17563 : /* FIXME: It may happen that build_vec_cst_rand may build a natural
17564 : stepped pattern, even if we didn't explicitly tell it to. So folding
17565 : may not always fail, but if it does, ensure that's because arg1 does
17566 : not have a natural stepped sequence (and not due to other reason) */
17567 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17568 0 : if (res == NULL_TREE)
17569 0 : ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17570 0 : }
17571 :
17572 : /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17573 : sequence and thus folding should be valid for this case. */
17574 0 : {
17575 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17576 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17577 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17578 :
17579 0 : vec_perm_builder builder (len, 1, 3);
17580 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17581 0 : builder_push_elems (builder, mask_elems);
17582 :
17583 0 : vec_perm_indices sel (builder, 2, len);
17584 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17585 :
17586 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17587 0 : validate_res (1, 3, res, expected_res);
17588 0 : }
17589 :
17590 : /* Case 8: Same as aarch64/sve/slp_3.c:
17591 : arg0, arg1 are dup vectors.
17592 : sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17593 : So res = { arg0[0], arg1[0], ... } // (2, 1)
17594 :
17595 : In this case, since the input vectors are dup, only the first two
17596 : elements per pattern in sel are considered significant. */
17597 0 : {
17598 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17599 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17600 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17601 :
17602 0 : vec_perm_builder builder (len, 2, 3);
17603 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17604 0 : builder_push_elems (builder, mask_elems);
17605 :
17606 0 : vec_perm_indices sel (builder, 2, len);
17607 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17608 :
17609 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17610 0 : validate_res (2, 1, res, expected_res);
17611 0 : }
17612 : }
17613 0 : }
17614 :
17615 : /* Test all vectors which contain at-least 4 elements. */
17616 :
17617 : static void
17618 0 : test_nunits_min_4 (machine_mode vmode)
17619 : {
17620 0 : for (int i = 0; i < 10; i++)
17621 : {
17622 : /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17623 : res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17624 0 : {
17625 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17626 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17627 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17628 :
17629 0 : vec_perm_builder builder (len, 4, 1);
17630 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17631 0 : builder_push_elems (builder, mask_elems);
17632 :
17633 0 : vec_perm_indices sel (builder, 2, len);
17634 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17635 :
17636 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17637 0 : validate_res (4, 1, res, expected_res);
17638 0 : }
17639 :
17640 : /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17641 : res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17642 0 : {
17643 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17644 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17645 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17646 :
17647 0 : vec_perm_builder builder (arg0_len, 1, 3);
17648 0 : poly_uint64 mask_elems[] = {0, 1, 2};
17649 0 : builder_push_elems (builder, mask_elems);
17650 :
17651 0 : vec_perm_indices sel (builder, 2, arg0_len);
17652 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17653 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17654 0 : validate_res (1, 3, res, expected_res);
17655 0 : }
17656 :
17657 : /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17658 : res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17659 0 : {
17660 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17661 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17662 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17663 :
17664 0 : vec_perm_builder builder (len, 1, 3);
17665 0 : poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17666 0 : builder_push_elems (builder, mask_elems);
17667 :
17668 0 : vec_perm_indices sel (builder, 2, len);
17669 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17670 0 : tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17671 0 : validate_res (1, 3, res, expected_res);
17672 0 : }
17673 :
17674 : /* Case 4:
17675 : sel = { len, 0, 2, ... } // (1, 3)
17676 : This should return NULL because we cross the input vectors.
17677 : Because,
17678 : Let's assume len = C + Cx
17679 : a1 = 0
17680 : S = 2
17681 : esel = arg0_len / sel_npatterns = C + Cx
17682 : ae = 0 + (esel - 2) * S
17683 : = 0 + (C + Cx - 2) * 2
17684 : = 2(C-2) + 2Cx
17685 :
17686 : For C >= 4:
17687 : Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17688 : Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17689 : Since q1 != qe, we cross input vectors.
17690 : So return NULL_TREE. */
17691 0 : {
17692 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17693 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17694 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17695 :
17696 0 : vec_perm_builder builder (arg0_len, 1, 3);
17697 0 : poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17698 0 : builder_push_elems (builder, mask_elems);
17699 :
17700 0 : vec_perm_indices sel (builder, 2, arg0_len);
17701 0 : const char *reason;
17702 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17703 0 : ASSERT_TRUE (res == NULL_TREE);
17704 0 : ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17705 0 : }
17706 :
17707 : /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17708 : mask = { 0, len, 1, len + 1, ...} // (2, 2)
17709 : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17710 :
17711 : Note that fold_vec_perm_cst will set
17712 : res_npatterns = max(4, max(4, 2)) = 4
17713 : However after canonicalizing, we will end up with shape (2, 2). */
17714 0 : {
17715 0 : tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17716 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17717 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17718 :
17719 0 : vec_perm_builder builder (len, 2, 2);
17720 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17721 0 : builder_push_elems (builder, mask_elems);
17722 :
17723 0 : vec_perm_indices sel (builder, 2, len);
17724 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17725 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17726 0 : validate_res (2, 2, res, expected_res);
17727 0 : }
17728 :
17729 : /* Case 6: Test combination in sel, where one pattern is dup and other
17730 : is stepped sequence.
17731 : sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17732 : res = { arg0[0], arg0[0], arg0[0],
17733 : arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17734 0 : {
17735 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17736 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17737 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17738 :
17739 0 : vec_perm_builder builder (len, 2, 3);
17740 0 : poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17741 0 : builder_push_elems (builder, mask_elems);
17742 :
17743 0 : vec_perm_indices sel (builder, 2, len);
17744 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17745 :
17746 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17747 0 : ARG0(1), ARG0(0), ARG0(2) };
17748 0 : validate_res (2, 3, res, expected_res);
17749 0 : }
17750 :
17751 : /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17752 : when arg0, arg1 and sel have different number of patterns.
17753 : arg0 is of shape (1, 1)
17754 : arg1 is of shape (4, 1)
17755 : sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17756 :
17757 : In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17758 : However,
17759 : step = (len+2) - (len+1) = 1
17760 : arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17761 : Since step is not a multiple of arg_npatterns,
17762 : valid_mask_for_fold_vec_perm_cst should return false,
17763 : and thus fold_vec_perm_cst should return NULL_TREE. */
17764 0 : {
17765 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17766 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17767 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17768 :
17769 0 : vec_perm_builder builder (len, 2, 3);
17770 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17771 0 : builder_push_elems (builder, mask_elems);
17772 :
17773 0 : vec_perm_indices sel (builder, 2, len);
17774 0 : const char *reason;
17775 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17776 :
17777 0 : ASSERT_TRUE (res == NULL_TREE);
17778 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17779 0 : }
17780 :
17781 : /* Case 8: PR111754: When input vector is not a stepped sequence,
17782 : check that the result is not a stepped sequence either, even
17783 : if sel has a stepped sequence. */
17784 0 : {
17785 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17786 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17787 :
17788 0 : vec_perm_builder builder (len, 1, 3);
17789 0 : poly_uint64 mask_elems[] = { 0, 1, 2 };
17790 0 : builder_push_elems (builder, mask_elems);
17791 :
17792 0 : vec_perm_indices sel (builder, 1, len);
17793 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17794 :
17795 0 : tree expected_res[] = { ARG0(0), ARG0(1) };
17796 0 : validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17797 0 : }
17798 :
17799 : /* Case 9: If sel doesn't contain a stepped sequence,
17800 : check that the result has same encoding as sel, irrespective
17801 : of shape of input vectors. */
17802 0 : {
17803 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17804 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17805 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17806 :
17807 0 : vec_perm_builder builder (len, 1, 2);
17808 0 : poly_uint64 mask_elems[] = { 0, len };
17809 0 : builder_push_elems (builder, mask_elems);
17810 :
17811 0 : vec_perm_indices sel (builder, 2, len);
17812 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17813 :
17814 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17815 0 : validate_res (sel.encoding ().npatterns (),
17816 0 : sel.encoding ().nelts_per_pattern (), res, expected_res);
17817 0 : }
17818 : }
17819 0 : }
17820 :
17821 : /* Test all vectors which contain at-least 8 elements. */
17822 :
17823 : static void
17824 0 : test_nunits_min_8 (machine_mode vmode)
17825 : {
17826 0 : for (int i = 0; i < 10; i++)
17827 : {
17828 : /* Case 1: sel_npatterns (4) > input npatterns (2)
17829 : sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17830 : res: { arg0[0], arg0[0], arg0[0], arg1[0],
17831 : arg0[2], arg0[0], arg0[3], arg1[0],
17832 : arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17833 0 : {
17834 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17835 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17836 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17837 :
17838 0 : vec_perm_builder builder(len, 4, 3);
17839 0 : poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17840 0 : 4, 0, 5, len };
17841 0 : builder_push_elems (builder, mask_elems);
17842 :
17843 0 : vec_perm_indices sel (builder, 2, len);
17844 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17845 :
17846 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17847 0 : ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17848 0 : ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17849 0 : validate_res (4, 3, res, expected_res);
17850 0 : }
17851 : }
17852 0 : }
17853 :
17854 : /* Test vectors for which nunits[0] <= 4. */
17855 :
17856 : static void
17857 0 : test_nunits_max_4 (machine_mode vmode)
17858 : {
17859 : /* Case 1: mask = {0, 4, ...} // (1, 2)
17860 : This should return NULL_TREE because the index 4 may choose
17861 : from either arg0 or arg1 depending on vector length. */
17862 0 : {
17863 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17864 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17865 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17866 :
17867 0 : vec_perm_builder builder (len, 1, 2);
17868 0 : poly_uint64 mask_elems[] = {0, 4};
17869 0 : builder_push_elems (builder, mask_elems);
17870 :
17871 0 : vec_perm_indices sel (builder, 2, len);
17872 0 : const char *reason;
17873 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17874 0 : ASSERT_TRUE (res == NULL_TREE);
17875 0 : ASSERT_TRUE (reason != NULL);
17876 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17877 0 : }
17878 0 : }
17879 :
17880 : #undef ARG0
17881 : #undef ARG1
17882 :
17883 : /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17884 :
17885 : static bool
17886 0 : is_simple_vla_size (poly_uint64 size)
17887 : {
17888 124 : if (size.is_constant ()
17889 : || !pow2p_hwi (size.coeffs[0]))
17890 0 : return false;
17891 : for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17892 : if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17893 : return false;
17894 : return true;
17895 : }
17896 :
17897 : /* Execute fold_vec_perm_cst unit tests. */
17898 :
17899 : static void
17900 4 : test ()
17901 : {
17902 4 : machine_mode vnx4si_mode = E_VOIDmode;
17903 4 : machine_mode v4si_mode = E_VOIDmode;
17904 :
17905 4 : machine_mode vmode;
17906 128 : FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17907 : {
17908 : /* Obtain modes corresponding to VNx4SI and V4SI,
17909 : to call mixed mode tests below.
17910 : FIXME: Is there a better way to do this ? */
17911 124 : if (GET_MODE_INNER (vmode) == SImode)
17912 : {
17913 124 : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17914 124 : if (is_simple_vla_size (nunits)
17915 : && nunits.coeffs[0] == 4)
17916 : vnx4si_mode = vmode;
17917 124 : else if (known_eq (nunits, poly_uint64 (4)))
17918 124 : v4si_mode = vmode;
17919 : }
17920 :
17921 124 : if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17922 : || !targetm.vector_mode_supported_p (vmode))
17923 124 : continue;
17924 :
17925 : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17926 : test_all_nunits (vmode);
17927 : if (nunits.coeffs[0] >= 2)
17928 : test_nunits_min_2 (vmode);
17929 : if (nunits.coeffs[0] >= 4)
17930 : test_nunits_min_4 (vmode);
17931 : if (nunits.coeffs[0] >= 8)
17932 : test_nunits_min_8 (vmode);
17933 :
17934 : if (nunits.coeffs[0] <= 4)
17935 : test_nunits_max_4 (vmode);
17936 : }
17937 :
17938 4 : if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
17939 : && targetm.vector_mode_supported_p (vnx4si_mode)
17940 : && targetm.vector_mode_supported_p (v4si_mode))
17941 : {
17942 : test_vnx4si_v4si (vnx4si_mode, v4si_mode);
17943 : test_v4si_vnx4si (v4si_mode, vnx4si_mode);
17944 : }
17945 4 : }
17946 : } // end of test_fold_vec_perm_cst namespace
17947 :
17948 : /* Verify that various binary operations on vectors are folded
17949 : correctly. */
17950 :
17951 : static void
17952 4 : test_vector_folding ()
17953 : {
17954 4 : tree inner_type = integer_type_node;
17955 4 : tree type = build_vector_type (inner_type, 4);
17956 4 : tree zero = build_zero_cst (type);
17957 4 : tree one = build_one_cst (type);
17958 4 : tree index = build_index_vector (type, 0, 1);
17959 :
17960 : /* Verify equality tests that return a scalar boolean result. */
17961 4 : tree res_type = boolean_type_node;
17962 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
17963 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17964 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17965 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17966 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17967 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17968 : index, one)));
17969 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17970 : index, index)));
17971 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17972 : index, index)));
17973 4 : }
17974 :
17975 : /* Verify folding of VEC_DUPLICATE_EXPRs. */
17976 :
17977 : static void
17978 4 : test_vec_duplicate_folding ()
17979 : {
17980 4 : scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17981 4 : machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17982 : /* This will be 1 if VEC_MODE isn't a vector mode. */
17983 8 : poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17984 :
17985 4 : tree type = build_vector_type (ssizetype, nunits);
17986 4 : tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17987 4 : tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17988 4 : ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17989 4 : }
17990 :
17991 : /* Run all of the selftests within this file. */
17992 :
17993 : void
17994 4 : fold_const_cc_tests ()
17995 : {
17996 4 : test_arithmetic_folding ();
17997 4 : test_vector_folding ();
17998 4 : test_vec_duplicate_folding ();
17999 4 : test_fold_vec_perm_cst::test ();
18000 4 : test_operand_equality::test ();
18001 4 : }
18002 :
18003 : } // namespace selftest
18004 :
18005 : #endif /* CHECKING_P */
|