Branch data Line data Source code
1 : : /* Fold a constant sub-tree into a single node for C-compiler
2 : : Copyright (C) 1987-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /*@@ This file should be rewritten to use an arbitrary precision
21 : : @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 : : @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 : : @@ The routines that translate from the ap rep should
24 : : @@ warn if precision et. al. is lost.
25 : : @@ This would also make life easier when this technology is used
26 : : @@ for cross-compilers. */
27 : :
28 : : /* The entry points in this file are fold, size_int_wide and size_binop.
29 : :
30 : : fold takes a tree as argument and returns a simplified tree.
31 : :
32 : : size_binop takes a tree code for an arithmetic operation
33 : : and two operands that are trees, and produces a tree for the
34 : : result, assuming the type comes from `sizetype'.
35 : :
36 : : size_int takes an integer value, and creates a tree constant
37 : : with type from `sizetype'.
38 : :
39 : : Note: Since the folders get called on non-gimple code as well as
40 : : gimple code, we need to handle GIMPLE tuples as well as their
41 : : corresponding tree equivalents. */
42 : :
43 : : #define INCLUDE_ALGORITHM
44 : : #include "config.h"
45 : : #include "system.h"
46 : : #include "coretypes.h"
47 : : #include "backend.h"
48 : : #include "target.h"
49 : : #include "rtl.h"
50 : : #include "tree.h"
51 : : #include "gimple.h"
52 : : #include "predict.h"
53 : : #include "memmodel.h"
54 : : #include "tm_p.h"
55 : : #include "tree-ssa-operands.h"
56 : : #include "optabs-query.h"
57 : : #include "cgraph.h"
58 : : #include "diagnostic-core.h"
59 : : #include "flags.h"
60 : : #include "alias.h"
61 : : #include "fold-const.h"
62 : : #include "fold-const-call.h"
63 : : #include "stor-layout.h"
64 : : #include "calls.h"
65 : : #include "tree-iterator.h"
66 : : #include "expr.h"
67 : : #include "intl.h"
68 : : #include "langhooks.h"
69 : : #include "tree-eh.h"
70 : : #include "gimplify.h"
71 : : #include "tree-dfa.h"
72 : : #include "builtins.h"
73 : : #include "generic-match.h"
74 : : #include "gimple-iterator.h"
75 : : #include "gimple-fold.h"
76 : : #include "tree-into-ssa.h"
77 : : #include "md5.h"
78 : : #include "case-cfn-macros.h"
79 : : #include "stringpool.h"
80 : : #include "tree-vrp.h"
81 : : #include "tree-ssanames.h"
82 : : #include "selftest.h"
83 : : #include "stringpool.h"
84 : : #include "attribs.h"
85 : : #include "tree-vector-builder.h"
86 : : #include "vec-perm-indices.h"
87 : : #include "asan.h"
88 : : #include "gimple-range.h"
89 : :
90 : : /* Nonzero if we are folding constants inside an initializer or a C++
91 : : manifestly-constant-evaluated context; zero otherwise.
92 : : Should be used when folding in initializer enables additional
93 : : optimizations. */
94 : : int folding_initializer = 0;
95 : :
96 : : /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 : : otherwise.
98 : : Should be used when certain constructs shouldn't be optimized
99 : : during folding in that context. */
100 : : bool folding_cxx_constexpr = false;
101 : :
102 : : /* The following constants represent a bit based encoding of GCC's
103 : : comparison operators. This encoding simplifies transformations
104 : : on relational comparison operators, such as AND and OR. */
105 : : enum comparison_code {
106 : : COMPCODE_FALSE = 0,
107 : : COMPCODE_LT = 1,
108 : : COMPCODE_EQ = 2,
109 : : COMPCODE_LE = 3,
110 : : COMPCODE_GT = 4,
111 : : COMPCODE_LTGT = 5,
112 : : COMPCODE_GE = 6,
113 : : COMPCODE_ORD = 7,
114 : : COMPCODE_UNORD = 8,
115 : : COMPCODE_UNLT = 9,
116 : : COMPCODE_UNEQ = 10,
117 : : COMPCODE_UNLE = 11,
118 : : COMPCODE_UNGT = 12,
119 : : COMPCODE_NE = 13,
120 : : COMPCODE_UNGE = 14,
121 : : COMPCODE_TRUE = 15
122 : : };
123 : :
124 : : static bool negate_expr_p (tree);
125 : : static tree negate_expr (tree);
126 : : static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 : : static enum comparison_code comparison_to_compcode (enum tree_code);
128 : : static enum tree_code compcode_to_comparison (enum comparison_code);
129 : : static bool twoval_comparison_p (tree, tree *, tree *);
130 : : static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 : : static tree optimize_bit_field_compare (location_t, enum tree_code,
132 : : tree, tree, tree);
133 : : static bool simple_operand_p (const_tree);
134 : : static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 : : static tree range_predecessor (tree);
136 : : static tree range_successor (tree);
137 : : static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 : : static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 : : tree, tree, tree, tree);
140 : : static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
141 : : static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
142 : : static tree fold_binary_op_with_conditional_arg (location_t,
143 : : enum tree_code, tree,
144 : : tree, tree,
145 : : tree, tree, int);
146 : : static tree fold_negate_const (tree, tree);
147 : : static tree fold_not_const (const_tree, tree);
148 : : static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 : : static tree fold_convert_const (enum tree_code, tree, tree);
150 : : static tree fold_view_convert_expr (tree, tree);
151 : : static tree fold_negate_expr (location_t, tree);
152 : :
153 : : /* This is a helper function to detect min/max for some operands of COND_EXPR.
154 : : The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
155 : : tree_code
156 : 134807 : minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
157 : : {
158 : 134807 : enum tree_code code = ERROR_MARK;
159 : :
160 : 134807 : if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
161 : 11 : return ERROR_MARK;
162 : :
163 : 134796 : if (!operand_equal_p (exp0, exp2))
164 : : return ERROR_MARK;
165 : :
166 : 134796 : if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
167 : : {
168 : 132177 : if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
169 : : {
170 : : /* X <= Y - 1 equals to X < Y. */
171 : 76843 : if (cmp == LE_EXPR)
172 : : code = LT_EXPR;
173 : : /* X > Y - 1 equals to X >= Y. */
174 : 76480 : if (cmp == GT_EXPR)
175 : : code = GE_EXPR;
176 : : /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
177 : 67130 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
178 : : {
179 : 16809 : int_range_max r;
180 : 33618 : get_range_query (cfun)->range_of_expr (r, exp0);
181 : 16809 : if (r.undefined_p ())
182 : 0 : r.set_varying (TREE_TYPE (exp0));
183 : :
184 : 16809 : widest_int min = widest_int::from (r.lower_bound (),
185 : 33618 : TYPE_SIGN (TREE_TYPE (exp0)));
186 : 16809 : if (min == wi::to_widest (exp1))
187 : 578 : code = MAX_EXPR;
188 : 16809 : }
189 : : }
190 : 132177 : if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
191 : : {
192 : : /* X < Y + 1 equals to X <= Y. */
193 : 988 : if (cmp == LT_EXPR)
194 : : code = LE_EXPR;
195 : : /* X >= Y + 1 equals to X > Y. */
196 : 960 : if (cmp == GE_EXPR)
197 : : code = GT_EXPR;
198 : : /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
199 : 866 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
200 : : {
201 : 479 : int_range_max r;
202 : 958 : get_range_query (cfun)->range_of_expr (r, exp0);
203 : 479 : if (r.undefined_p ())
204 : 0 : r.set_varying (TREE_TYPE (exp0));
205 : :
206 : 479 : widest_int max = widest_int::from (r.upper_bound (),
207 : 958 : TYPE_SIGN (TREE_TYPE (exp0)));
208 : 479 : if (max == wi::to_widest (exp1))
209 : 43 : code = MIN_EXPR;
210 : 479 : }
211 : : }
212 : : }
213 : 132177 : if (code != ERROR_MARK
214 : 134796 : || operand_equal_p (exp1, exp3))
215 : : {
216 : 24110 : if (cmp == LT_EXPR || cmp == LE_EXPR)
217 : : code = MIN_EXPR;
218 : 21586 : if (cmp == GT_EXPR || cmp == GE_EXPR)
219 : 20852 : code = MAX_EXPR;
220 : : }
221 : : return code;
222 : : }
223 : :
224 : : /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
225 : : Otherwise, return LOC. */
226 : :
227 : : static location_t
228 : 2191507 : expr_location_or (tree t, location_t loc)
229 : : {
230 : 609609 : location_t tloc = EXPR_LOCATION (t);
231 : 2177260 : return tloc == UNKNOWN_LOCATION ? loc : tloc;
232 : : }
233 : :
234 : : /* Similar to protected_set_expr_location, but never modify x in place,
235 : : if location can and needs to be set, unshare it. */
236 : :
237 : : tree
238 : 3839399 : protected_set_expr_location_unshare (tree x, location_t loc)
239 : : {
240 : 3839399 : if (CAN_HAVE_LOCATION_P (x)
241 : 3353379 : && EXPR_LOCATION (x) != loc
242 : 1591342 : && !(TREE_CODE (x) == SAVE_EXPR
243 : 795890 : || TREE_CODE (x) == TARGET_EXPR
244 : : || TREE_CODE (x) == BIND_EXPR))
245 : : {
246 : 795140 : x = copy_node (x);
247 : 795140 : SET_EXPR_LOCATION (x, loc);
248 : : }
249 : 3839399 : return x;
250 : : }
251 : :
252 : : /* If ARG2 divides ARG1 with zero remainder, carries out the exact
253 : : division and returns the quotient. Otherwise returns
254 : : NULL_TREE. */
255 : :
256 : : tree
257 : 0 : div_if_zero_remainder (const_tree arg1, const_tree arg2)
258 : : {
259 : 0 : widest_int quo;
260 : :
261 : 0 : if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
262 : : SIGNED, &quo))
263 : 0 : return wide_int_to_tree (TREE_TYPE (arg1), quo);
264 : :
265 : : return NULL_TREE;
266 : 0 : }
267 : :
268 : : /* This is nonzero if we should defer warnings about undefined
269 : : overflow. This facility exists because these warnings are a
270 : : special case. The code to estimate loop iterations does not want
271 : : to issue any warnings, since it works with expressions which do not
272 : : occur in user code. Various bits of cleanup code call fold(), but
273 : : only use the result if it has certain characteristics (e.g., is a
274 : : constant); that code only wants to issue a warning if the result is
275 : : used. */
276 : :
277 : : static int fold_deferring_overflow_warnings;
278 : :
279 : : /* If a warning about undefined overflow is deferred, this is the
280 : : warning. Note that this may cause us to turn two warnings into
281 : : one, but that is fine since it is sufficient to only give one
282 : : warning per expression. */
283 : :
284 : : static const char* fold_deferred_overflow_warning;
285 : :
286 : : /* If a warning about undefined overflow is deferred, this is the
287 : : level at which the warning should be emitted. */
288 : :
289 : : static enum warn_strict_overflow_code fold_deferred_overflow_code;
290 : :
291 : : /* Start deferring overflow warnings. We could use a stack here to
292 : : permit nested calls, but at present it is not necessary. */
293 : :
294 : : void
295 : 1068830910 : fold_defer_overflow_warnings (void)
296 : : {
297 : 1068830910 : ++fold_deferring_overflow_warnings;
298 : 1068830910 : }
299 : :
300 : : /* Stop deferring overflow warnings. If there is a pending warning,
301 : : and ISSUE is true, then issue the warning if appropriate. STMT is
302 : : the statement with which the warning should be associated (used for
303 : : location information); STMT may be NULL. CODE is the level of the
304 : : warning--a warn_strict_overflow_code value. This function will use
305 : : the smaller of CODE and the deferred code when deciding whether to
306 : : issue the warning. CODE may be zero to mean to always use the
307 : : deferred code. */
308 : :
309 : : void
310 : 1068830910 : fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
311 : : {
312 : 1068830910 : const char *warnmsg;
313 : 1068830910 : location_t locus;
314 : :
315 : 1068830910 : gcc_assert (fold_deferring_overflow_warnings > 0);
316 : 1068830910 : --fold_deferring_overflow_warnings;
317 : 1068830910 : if (fold_deferring_overflow_warnings > 0)
318 : : {
319 : 8268368 : if (fold_deferred_overflow_warning != NULL
320 : 1776137 : && code != 0
321 : 0 : && code < (int) fold_deferred_overflow_code)
322 : 0 : fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
323 : 8268368 : return;
324 : : }
325 : :
326 : 1060562542 : warnmsg = fold_deferred_overflow_warning;
327 : 1060562542 : fold_deferred_overflow_warning = NULL;
328 : :
329 : 1060562542 : if (!issue || warnmsg == NULL)
330 : : return;
331 : :
332 : 10680 : if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
333 : : return;
334 : :
335 : : /* Use the smallest code level when deciding to issue the
336 : : warning. */
337 : 10680 : if (code == 0 || code > (int) fold_deferred_overflow_code)
338 : 10680 : code = fold_deferred_overflow_code;
339 : :
340 : 10680 : if (!issue_strict_overflow_warning (code))
341 : : return;
342 : :
343 : 0 : if (stmt == NULL)
344 : : locus = input_location;
345 : : else
346 : 0 : locus = gimple_location (stmt);
347 : 0 : warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
348 : : }
349 : :
350 : : /* Stop deferring overflow warnings, ignoring any deferred
351 : : warnings. */
352 : :
353 : : void
354 : 174044660 : fold_undefer_and_ignore_overflow_warnings (void)
355 : : {
356 : 174044660 : fold_undefer_overflow_warnings (false, NULL, 0);
357 : 174044660 : }
358 : :
359 : : /* Whether we are deferring overflow warnings. */
360 : :
361 : : bool
362 : 303091955 : fold_deferring_overflow_warnings_p (void)
363 : : {
364 : 303091955 : return fold_deferring_overflow_warnings > 0;
365 : : }
366 : :
367 : : /* This is called when we fold something based on the fact that signed
368 : : overflow is undefined. */
369 : :
370 : : void
371 : 1707739 : fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
372 : : {
373 : 1707739 : if (fold_deferring_overflow_warnings > 0)
374 : : {
375 : 1654233 : if (fold_deferred_overflow_warning == NULL
376 : 738992 : || wc < fold_deferred_overflow_code)
377 : : {
378 : 937871 : fold_deferred_overflow_warning = gmsgid;
379 : 937871 : fold_deferred_overflow_code = wc;
380 : : }
381 : : }
382 : 53506 : else if (issue_strict_overflow_warning (wc))
383 : 7 : warning (OPT_Wstrict_overflow, gmsgid);
384 : 1707739 : }
385 : :
386 : : /* Return true if the built-in mathematical function specified by CODE
387 : : is odd, i.e. -f(x) == f(-x). */
388 : :
389 : : bool
390 : 1883732 : negate_mathfn_p (combined_fn fn)
391 : : {
392 : 1883732 : switch (fn)
393 : : {
394 : : CASE_CFN_ASIN:
395 : : CASE_CFN_ASIN_FN:
396 : : CASE_CFN_ASINH:
397 : : CASE_CFN_ASINH_FN:
398 : : CASE_CFN_ATAN:
399 : : CASE_CFN_ATAN_FN:
400 : : CASE_CFN_ATANH:
401 : : CASE_CFN_ATANH_FN:
402 : : CASE_CFN_CASIN:
403 : : CASE_CFN_CASIN_FN:
404 : : CASE_CFN_CASINH:
405 : : CASE_CFN_CASINH_FN:
406 : : CASE_CFN_CATAN:
407 : : CASE_CFN_CATAN_FN:
408 : : CASE_CFN_CATANH:
409 : : CASE_CFN_CATANH_FN:
410 : : CASE_CFN_CBRT:
411 : : CASE_CFN_CBRT_FN:
412 : : CASE_CFN_CPROJ:
413 : : CASE_CFN_CPROJ_FN:
414 : : CASE_CFN_CSIN:
415 : : CASE_CFN_CSIN_FN:
416 : : CASE_CFN_CSINH:
417 : : CASE_CFN_CSINH_FN:
418 : : CASE_CFN_CTAN:
419 : : CASE_CFN_CTAN_FN:
420 : : CASE_CFN_CTANH:
421 : : CASE_CFN_CTANH_FN:
422 : : CASE_CFN_ERF:
423 : : CASE_CFN_ERF_FN:
424 : : CASE_CFN_LLROUND:
425 : : CASE_CFN_LLROUND_FN:
426 : : CASE_CFN_LROUND:
427 : : CASE_CFN_LROUND_FN:
428 : : CASE_CFN_ROUND:
429 : : CASE_CFN_ROUNDEVEN:
430 : : CASE_CFN_ROUNDEVEN_FN:
431 : : CASE_CFN_SIN:
432 : : CASE_CFN_SIN_FN:
433 : : CASE_CFN_SINH:
434 : : CASE_CFN_SINH_FN:
435 : : CASE_CFN_TAN:
436 : : CASE_CFN_TAN_FN:
437 : : CASE_CFN_TANH:
438 : : CASE_CFN_TANH_FN:
439 : : CASE_CFN_TRUNC:
440 : : CASE_CFN_TRUNC_FN:
441 : : return true;
442 : :
443 : 390 : CASE_CFN_LLRINT:
444 : 390 : CASE_CFN_LLRINT_FN:
445 : 390 : CASE_CFN_LRINT:
446 : 390 : CASE_CFN_LRINT_FN:
447 : 390 : CASE_CFN_NEARBYINT:
448 : 390 : CASE_CFN_NEARBYINT_FN:
449 : 390 : CASE_CFN_RINT:
450 : 390 : CASE_CFN_RINT_FN:
451 : 390 : return !flag_rounding_math;
452 : :
453 : 1879759 : default:
454 : 1879759 : break;
455 : : }
456 : 1879759 : return false;
457 : : }
458 : :
459 : : /* Check whether we may negate an integer constant T without causing
460 : : overflow. */
461 : :
462 : : bool
463 : 2922168 : may_negate_without_overflow_p (const_tree t)
464 : : {
465 : 2922168 : tree type;
466 : :
467 : 2922168 : gcc_assert (TREE_CODE (t) == INTEGER_CST);
468 : :
469 : 2922168 : type = TREE_TYPE (t);
470 : 2922168 : if (TYPE_UNSIGNED (type))
471 : : return false;
472 : :
473 : 2922168 : return !wi::only_sign_bit_p (wi::to_wide (t));
474 : : }
475 : :
476 : : /* Determine whether an expression T can be cheaply negated using
477 : : the function negate_expr without introducing undefined overflow. */
478 : :
479 : : static bool
480 : 25153448 : negate_expr_p (tree t)
481 : : {
482 : 25304035 : tree type;
483 : :
484 : 25304035 : if (t == 0)
485 : : return false;
486 : :
487 : 25304035 : type = TREE_TYPE (t);
488 : :
489 : 25304035 : STRIP_SIGN_NOPS (t);
490 : 25304035 : switch (TREE_CODE (t))
491 : : {
492 : 1408125 : case INTEGER_CST:
493 : 1408125 : if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
494 : : return true;
495 : :
496 : : /* Check that -CST will not overflow type. */
497 : 355863 : return may_negate_without_overflow_p (t);
498 : 515 : case BIT_NOT_EXPR:
499 : 515 : return (INTEGRAL_TYPE_P (type)
500 : 515 : && TYPE_OVERFLOW_WRAPS (type));
501 : :
502 : : case FIXED_CST:
503 : : return true;
504 : :
505 : 1308 : case NEGATE_EXPR:
506 : 1308 : return !TYPE_OVERFLOW_SANITIZED (type);
507 : :
508 : 1302468 : case REAL_CST:
509 : : /* We want to canonicalize to positive real constants. Pretend
510 : : that only negative ones can be easily negated. */
511 : 1302468 : return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
512 : :
513 : 452 : case COMPLEX_CST:
514 : 452 : return negate_expr_p (TREE_REALPART (t))
515 : 568 : && negate_expr_p (TREE_IMAGPART (t));
516 : :
517 : 97 : case VECTOR_CST:
518 : 97 : {
519 : 97 : if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
520 : : return true;
521 : :
522 : : /* Steps don't prevent negation. */
523 : 97 : unsigned int count = vector_cst_encoded_nelts (t);
524 : 194 : for (unsigned int i = 0; i < count; ++i)
525 : 97 : if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
526 : : return false;
527 : :
528 : : return true;
529 : : }
530 : :
531 : 705 : case COMPLEX_EXPR:
532 : 705 : return negate_expr_p (TREE_OPERAND (t, 0))
533 : 705 : && negate_expr_p (TREE_OPERAND (t, 1));
534 : :
535 : 33 : case CONJ_EXPR:
536 : 33 : return negate_expr_p (TREE_OPERAND (t, 0));
537 : :
538 : 1403709 : case PLUS_EXPR:
539 : 1403709 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
540 : 1403703 : || HONOR_SIGNED_ZEROS (type)
541 : 2517304 : || (ANY_INTEGRAL_TYPE_P (type)
542 : 1113409 : && ! TYPE_OVERFLOW_WRAPS (type)))
543 : 706249 : return false;
544 : : /* -(A + B) -> (-B) - A. */
545 : 697460 : if (negate_expr_p (TREE_OPERAND (t, 1)))
546 : : return true;
547 : : /* -(A + B) -> (-A) - B. */
548 : 138390 : return negate_expr_p (TREE_OPERAND (t, 0));
549 : :
550 : 245049 : case MINUS_EXPR:
551 : : /* We can't turn -(A-B) into B-A when we honor signed zeros. */
552 : 245049 : return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
553 : 245049 : && !HONOR_SIGNED_ZEROS (type)
554 : 314918 : && (! ANY_INTEGRAL_TYPE_P (type)
555 : 69646 : || TYPE_OVERFLOW_WRAPS (type));
556 : :
557 : 2356933 : case MULT_EXPR:
558 : 2356933 : if (TYPE_UNSIGNED (type))
559 : : break;
560 : : /* INT_MIN/n * n doesn't overflow while negating one operand it does
561 : : if n is a (negative) power of two. */
562 : 4110946 : if (INTEGRAL_TYPE_P (TREE_TYPE (t))
563 : 143334 : && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
564 : 2196482 : && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
565 : 0 : && (wi::popcount
566 : 2055473 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
567 : 141009 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
568 : 118582 : && (wi::popcount
569 : 4207101 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
570 : : break;
571 : :
572 : : /* Fall through. */
573 : :
574 : 2341284 : case RDIV_EXPR:
575 : 2341284 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
576 : 2341283 : return negate_expr_p (TREE_OPERAND (t, 1))
577 : 2341283 : || negate_expr_p (TREE_OPERAND (t, 0));
578 : : break;
579 : :
580 : 1758 : case TRUNC_DIV_EXPR:
581 : 1758 : case ROUND_DIV_EXPR:
582 : 1758 : case EXACT_DIV_EXPR:
583 : 1758 : if (TYPE_UNSIGNED (type))
584 : : break;
585 : : /* In general we can't negate A in A / B, because if A is INT_MIN and
586 : : B is not 1 we change the sign of the result. */
587 : 484 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
588 : 484 : && negate_expr_p (TREE_OPERAND (t, 0)))
589 : : return true;
590 : : /* In general we can't negate B in A / B, because if A is INT_MIN and
591 : : B is 1, we may turn this into INT_MIN / -1 which is undefined
592 : : and actually traps on some architectures. */
593 : 638 : if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
594 : 319 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
595 : 553 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
596 : 227 : && ! integer_onep (TREE_OPERAND (t, 1))))
597 : 312 : return negate_expr_p (TREE_OPERAND (t, 1));
598 : : break;
599 : :
600 : 4113137 : case NOP_EXPR:
601 : : /* Negate -((double)float) as (double)(-float). */
602 : 4113137 : if (SCALAR_FLOAT_TYPE_P (type))
603 : : {
604 : 12004 : tree tem = strip_float_extensions (t);
605 : 12004 : if (tem != t)
606 : : return negate_expr_p (tem);
607 : : }
608 : : break;
609 : :
610 : 932992 : case CALL_EXPR:
611 : : /* Negate -f(x) as f(-x). */
612 : 932992 : if (negate_mathfn_p (get_call_combined_fn (t)))
613 : 59 : return negate_expr_p (CALL_EXPR_ARG (t, 0));
614 : : break;
615 : :
616 : 637 : case RSHIFT_EXPR:
617 : : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
618 : 637 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
619 : : {
620 : 492 : tree op1 = TREE_OPERAND (t, 1);
621 : 492 : if (wi::to_wide (op1) == element_precision (type) - 1)
622 : : return true;
623 : : }
624 : : break;
625 : :
626 : : default:
627 : : break;
628 : : }
629 : : return false;
630 : : }
631 : :
632 : : /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
633 : : simplification is possible.
634 : : If negate_expr_p would return true for T, NULL_TREE will never be
635 : : returned. */
636 : :
637 : : static tree
638 : 35344931 : fold_negate_expr_1 (location_t loc, tree t)
639 : : {
640 : 35344931 : tree type = TREE_TYPE (t);
641 : 35344931 : tree tem;
642 : :
643 : 35344931 : switch (TREE_CODE (t))
644 : : {
645 : : /* Convert - (~A) to A + 1. */
646 : 138 : case BIT_NOT_EXPR:
647 : 138 : if (INTEGRAL_TYPE_P (type))
648 : 138 : return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
649 : 138 : build_one_cst (type));
650 : : break;
651 : :
652 : 27462573 : case INTEGER_CST:
653 : 27462573 : tem = fold_negate_const (t, type);
654 : 27462573 : if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
655 : 6384 : || (ANY_INTEGRAL_TYPE_P (type)
656 : 6384 : && !TYPE_OVERFLOW_TRAPS (type)
657 : 6384 : && TYPE_OVERFLOW_WRAPS (type))
658 : 27468264 : || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
659 : : return tem;
660 : : break;
661 : :
662 : 1966622 : case POLY_INT_CST:
663 : 1966622 : case REAL_CST:
664 : 1966622 : case FIXED_CST:
665 : 1966622 : tem = fold_negate_const (t, type);
666 : 1966622 : return tem;
667 : :
668 : 66122 : case COMPLEX_CST:
669 : 66122 : {
670 : 66122 : tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
671 : 66122 : tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
672 : 66122 : if (rpart && ipart)
673 : 66122 : return build_complex (type, rpart, ipart);
674 : : }
675 : : break;
676 : :
677 : 47880 : case VECTOR_CST:
678 : 47880 : {
679 : 47880 : tree_vector_builder elts;
680 : 47880 : elts.new_unary_operation (type, t, true);
681 : 47880 : unsigned int count = elts.encoded_nelts ();
682 : 116912 : for (unsigned int i = 0; i < count; ++i)
683 : : {
684 : 69032 : tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
685 : 69032 : if (elt == NULL_TREE)
686 : 0 : return NULL_TREE;
687 : 69032 : elts.quick_push (elt);
688 : : }
689 : :
690 : 47880 : return elts.build ();
691 : 47880 : }
692 : :
693 : 78 : case COMPLEX_EXPR:
694 : 78 : if (negate_expr_p (t))
695 : 40 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
696 : 20 : fold_negate_expr (loc, TREE_OPERAND (t, 0)),
697 : 40 : fold_negate_expr (loc, TREE_OPERAND (t, 1)));
698 : : break;
699 : :
700 : 21 : case CONJ_EXPR:
701 : 21 : if (negate_expr_p (t))
702 : 21 : return fold_build1_loc (loc, CONJ_EXPR, type,
703 : 42 : fold_negate_expr (loc, TREE_OPERAND (t, 0)));
704 : : break;
705 : :
706 : 1234 : case NEGATE_EXPR:
707 : 1234 : if (!TYPE_OVERFLOW_SANITIZED (type))
708 : 1221 : return TREE_OPERAND (t, 0);
709 : : break;
710 : :
711 : 613641 : case PLUS_EXPR:
712 : 613641 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
713 : 613641 : && !HONOR_SIGNED_ZEROS (type))
714 : : {
715 : : /* -(A + B) -> (-B) - A. */
716 : 613531 : if (negate_expr_p (TREE_OPERAND (t, 1)))
717 : : {
718 : 562692 : tem = negate_expr (TREE_OPERAND (t, 1));
719 : 562692 : return fold_build2_loc (loc, MINUS_EXPR, type,
720 : 1125384 : tem, TREE_OPERAND (t, 0));
721 : : }
722 : :
723 : : /* -(A + B) -> (-A) - B. */
724 : 50839 : if (negate_expr_p (TREE_OPERAND (t, 0)))
725 : : {
726 : 898 : tem = negate_expr (TREE_OPERAND (t, 0));
727 : 898 : return fold_build2_loc (loc, MINUS_EXPR, type,
728 : 1796 : tem, TREE_OPERAND (t, 1));
729 : : }
730 : : }
731 : : break;
732 : :
733 : 142577 : case MINUS_EXPR:
734 : : /* - (A - B) -> B - A */
735 : 142577 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
736 : 142577 : && !HONOR_SIGNED_ZEROS (type))
737 : 64826 : return fold_build2_loc (loc, MINUS_EXPR, type,
738 : 129652 : TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
739 : : break;
740 : :
741 : 113314 : case MULT_EXPR:
742 : 113314 : if (TYPE_UNSIGNED (type))
743 : : break;
744 : :
745 : : /* Fall through. */
746 : :
747 : 27328 : case RDIV_EXPR:
748 : 27328 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
749 : : {
750 : 27328 : tem = TREE_OPERAND (t, 1);
751 : 27328 : if (negate_expr_p (tem))
752 : 47866 : return fold_build2_loc (loc, TREE_CODE (t), type,
753 : 47866 : TREE_OPERAND (t, 0), negate_expr (tem));
754 : 3395 : tem = TREE_OPERAND (t, 0);
755 : 3395 : if (negate_expr_p (tem))
756 : 68 : return fold_build2_loc (loc, TREE_CODE (t), type,
757 : 136 : negate_expr (tem), TREE_OPERAND (t, 1));
758 : : }
759 : : break;
760 : :
761 : 1463 : case TRUNC_DIV_EXPR:
762 : 1463 : case ROUND_DIV_EXPR:
763 : 1463 : case EXACT_DIV_EXPR:
764 : 1463 : if (TYPE_UNSIGNED (type))
765 : : break;
766 : : /* In general we can't negate A in A / B, because if A is INT_MIN and
767 : : B is not 1 we change the sign of the result. */
768 : 663 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
769 : 663 : && negate_expr_p (TREE_OPERAND (t, 0)))
770 : 323 : return fold_build2_loc (loc, TREE_CODE (t), type,
771 : 323 : negate_expr (TREE_OPERAND (t, 0)),
772 : 646 : TREE_OPERAND (t, 1));
773 : : /* In general we can't negate B in A / B, because if A is INT_MIN and
774 : : B is 1, we may turn this into INT_MIN / -1 which is undefined
775 : : and actually traps on some architectures. */
776 : 680 : if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
777 : 340 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
778 : 256 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
779 : 233 : && ! integer_onep (TREE_OPERAND (t, 1))))
780 : 657 : && negate_expr_p (TREE_OPERAND (t, 1)))
781 : 622 : return fold_build2_loc (loc, TREE_CODE (t), type,
782 : 311 : TREE_OPERAND (t, 0),
783 : 622 : negate_expr (TREE_OPERAND (t, 1)));
784 : : break;
785 : :
786 : 1563384 : case NOP_EXPR:
787 : : /* Convert -((double)float) into (double)(-float). */
788 : 1563384 : if (SCALAR_FLOAT_TYPE_P (type))
789 : : {
790 : 10945 : tem = strip_float_extensions (t);
791 : 10945 : if (tem != t && negate_expr_p (tem))
792 : 0 : return fold_convert_loc (loc, type, negate_expr (tem));
793 : : }
794 : : break;
795 : :
796 : 294835 : case CALL_EXPR:
797 : : /* Negate -f(x) as f(-x). */
798 : 294835 : if (negate_mathfn_p (get_call_combined_fn (t))
799 : 296122 : && negate_expr_p (CALL_EXPR_ARG (t, 0)))
800 : : {
801 : 1191 : tree fndecl, arg;
802 : :
803 : 1191 : fndecl = get_callee_fndecl (t);
804 : 1191 : arg = negate_expr (CALL_EXPR_ARG (t, 0));
805 : 1191 : return build_call_expr_loc (loc, fndecl, 1, arg);
806 : : }
807 : : break;
808 : :
809 : 352 : case RSHIFT_EXPR:
810 : : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
811 : 352 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
812 : : {
813 : 334 : tree op1 = TREE_OPERAND (t, 1);
814 : 334 : if (wi::to_wide (op1) == element_precision (type) - 1)
815 : : {
816 : 72 : tree ntype = TYPE_UNSIGNED (type)
817 : 72 : ? signed_type_for (type)
818 : 72 : : unsigned_type_for (type);
819 : 72 : tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
820 : 72 : temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
821 : 72 : return fold_convert_loc (loc, type, temp);
822 : : }
823 : : }
824 : : break;
825 : :
826 : : default:
827 : : break;
828 : : }
829 : :
830 : : return NULL_TREE;
831 : : }
832 : :
833 : : /* A wrapper for fold_negate_expr_1. */
834 : :
835 : : static tree
836 : 35344931 : fold_negate_expr (location_t loc, tree t)
837 : : {
838 : 35344931 : tree type = TREE_TYPE (t);
839 : 35344931 : STRIP_SIGN_NOPS (t);
840 : 35344931 : tree tem = fold_negate_expr_1 (loc, t);
841 : 35344931 : if (tem == NULL_TREE)
842 : : return NULL_TREE;
843 : 30198803 : return fold_convert_loc (loc, type, tem);
844 : : }
845 : :
846 : : /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
847 : : negated in a simpler way. Also allow for T to be NULL_TREE, in which case
848 : : return NULL_TREE. */
849 : :
850 : : static tree
851 : 3138328 : negate_expr (tree t)
852 : : {
853 : 3138328 : tree type, tem;
854 : 3138328 : location_t loc;
855 : :
856 : 3138328 : if (t == NULL_TREE)
857 : : return NULL_TREE;
858 : :
859 : 3138328 : loc = EXPR_LOCATION (t);
860 : 3138328 : type = TREE_TYPE (t);
861 : 3138328 : STRIP_SIGN_NOPS (t);
862 : :
863 : 3138328 : tem = fold_negate_expr (loc, t);
864 : 3138328 : if (!tem)
865 : 1510252 : tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
866 : 3138328 : return fold_convert_loc (loc, type, tem);
867 : : }
868 : :
869 : : /* Split a tree IN into a constant, literal and variable parts that could be
870 : : combined with CODE to make IN. "constant" means an expression with
871 : : TREE_CONSTANT but that isn't an actual constant. CODE must be a
872 : : commutative arithmetic operation. Store the constant part into *CONP,
873 : : the literal in *LITP and return the variable part. If a part isn't
874 : : present, set it to null. If the tree does not decompose in this way,
875 : : return the entire tree as the variable part and the other parts as null.
876 : :
877 : : If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
878 : : case, we negate an operand that was subtracted. Except if it is a
879 : : literal for which we use *MINUS_LITP instead.
880 : :
881 : : If NEGATE_P is true, we are negating all of IN, again except a literal
882 : : for which we use *MINUS_LITP instead. If a variable part is of pointer
883 : : type, it is negated after converting to TYPE. This prevents us from
884 : : generating illegal MINUS pointer expression. LOC is the location of
885 : : the converted variable part.
886 : :
887 : : If IN is itself a literal or constant, return it as appropriate.
888 : :
889 : : Note that we do not guarantee that any of the three values will be the
890 : : same type as IN, but they will have the same signedness and mode. */
891 : :
892 : : static tree
893 : 200857960 : split_tree (tree in, tree type, enum tree_code code,
894 : : tree *minus_varp, tree *conp, tree *minus_conp,
895 : : tree *litp, tree *minus_litp, int negate_p)
896 : : {
897 : 200857960 : tree var = 0;
898 : 200857960 : *minus_varp = 0;
899 : 200857960 : *conp = 0;
900 : 200857960 : *minus_conp = 0;
901 : 200857960 : *litp = 0;
902 : 200857960 : *minus_litp = 0;
903 : :
904 : : /* Strip any conversions that don't change the machine mode or signedness. */
905 : 200857960 : STRIP_SIGN_NOPS (in);
906 : :
907 : 200857960 : if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
908 : 128320570 : || TREE_CODE (in) == FIXED_CST)
909 : 72537390 : *litp = in;
910 : 128320570 : else if (TREE_CODE (in) == code
911 : 128320570 : || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
912 : 124160320 : && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
913 : : /* We can associate addition and subtraction together (even
914 : : though the C standard doesn't say so) for integers because
915 : : the value is not affected. For reals, the value might be
916 : : affected, so we can't. */
917 : 124160320 : && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
918 : 53232983 : || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
919 : 122645272 : || (code == MINUS_EXPR
920 : 19411945 : && (TREE_CODE (in) == PLUS_EXPR
921 : 17796062 : || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
922 : : {
923 : 7601521 : tree op0 = TREE_OPERAND (in, 0);
924 : 7601521 : tree op1 = TREE_OPERAND (in, 1);
925 : 7601521 : bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
926 : 7601521 : bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
927 : :
928 : : /* First see if either of the operands is a literal, then a constant. */
929 : 7601521 : if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
930 : 7397275 : || TREE_CODE (op0) == FIXED_CST)
931 : 204246 : *litp = op0, op0 = 0;
932 : 7397275 : else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
933 : 4952713 : || TREE_CODE (op1) == FIXED_CST)
934 : 2444562 : *litp = op1, neg_litp_p = neg1_p, op1 = 0;
935 : :
936 : 7601521 : if (op0 != 0 && TREE_CONSTANT (op0))
937 : 14515 : *conp = op0, op0 = 0;
938 : 7587006 : else if (op1 != 0 && TREE_CONSTANT (op1))
939 : 43605 : *conp = op1, neg_conp_p = neg1_p, op1 = 0;
940 : :
941 : : /* If we haven't dealt with either operand, this is not a case we can
942 : : decompose. Otherwise, VAR is either of the ones remaining, if any. */
943 : 7601521 : if (op0 != 0 && op1 != 0)
944 : : var = in;
945 : 2699937 : else if (op0 != 0)
946 : : var = op0;
947 : : else
948 : 218761 : var = op1, neg_var_p = neg1_p;
949 : :
950 : : /* Now do any needed negations. */
951 : 7601521 : if (neg_litp_p)
952 : 17720 : *minus_litp = *litp, *litp = 0;
953 : 7601521 : if (neg_conp_p && *conp)
954 : 3237 : *minus_conp = *conp, *conp = 0;
955 : 7601521 : if (neg_var_p && var)
956 : 208408 : *minus_varp = var, var = 0;
957 : : }
958 : 120719049 : else if (TREE_CONSTANT (in))
959 : 738283 : *conp = in;
960 : 119980766 : else if (TREE_CODE (in) == BIT_NOT_EXPR
961 : 425710 : && code == PLUS_EXPR)
962 : : {
963 : : /* -1 - X is folded to ~X, undo that here. Do _not_ do this
964 : : when IN is constant. */
965 : 338123 : *litp = build_minus_one_cst (type);
966 : 338123 : *minus_varp = TREE_OPERAND (in, 0);
967 : : }
968 : : else
969 : : var = in;
970 : :
971 : 200857960 : if (negate_p)
972 : : {
973 : 10628047 : if (*litp)
974 : 1057073 : *minus_litp = *litp, *litp = 0;
975 : 9570974 : else if (*minus_litp)
976 : 162 : *litp = *minus_litp, *minus_litp = 0;
977 : 10628047 : if (*conp)
978 : 23337 : *minus_conp = *conp, *conp = 0;
979 : 10604710 : else if (*minus_conp)
980 : 0 : *conp = *minus_conp, *minus_conp = 0;
981 : 10628047 : if (var)
982 : 10593025 : *minus_varp = var, var = 0;
983 : 35022 : else if (*minus_varp)
984 : 757 : var = *minus_varp, *minus_varp = 0;
985 : : }
986 : :
987 : 200857960 : if (*litp
988 : 200857960 : && TREE_OVERFLOW_P (*litp))
989 : 19949 : *litp = drop_tree_overflow (*litp);
990 : 200857960 : if (*minus_litp
991 : 200857960 : && TREE_OVERFLOW_P (*minus_litp))
992 : 0 : *minus_litp = drop_tree_overflow (*minus_litp);
993 : :
994 : 200857960 : return var;
995 : : }
996 : :
997 : : /* Re-associate trees split by the above function. T1 and T2 are
998 : : either expressions to associate or null. Return the new
999 : : expression, if any. LOC is the location of the new expression. If
1000 : : we build an operation, do it in TYPE and with CODE. */
1001 : :
1002 : : static tree
1003 : 17888375 : associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1004 : : {
1005 : 17888375 : if (t1 == 0)
1006 : : {
1007 : 11343042 : gcc_assert (t2 == 0 || code != MINUS_EXPR);
1008 : : return t2;
1009 : : }
1010 : 6545333 : else if (t2 == 0)
1011 : : return t1;
1012 : :
1013 : : /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1014 : : try to fold this since we will have infinite recursion. But do
1015 : : deal with any NEGATE_EXPRs. */
1016 : 3655013 : if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1017 : 2889886 : || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1018 : 2839142 : || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1019 : : {
1020 : 1426963 : if (code == PLUS_EXPR)
1021 : : {
1022 : 827012 : if (TREE_CODE (t1) == NEGATE_EXPR)
1023 : 54 : return build2_loc (loc, MINUS_EXPR, type,
1024 : : fold_convert_loc (loc, type, t2),
1025 : : fold_convert_loc (loc, type,
1026 : 108 : TREE_OPERAND (t1, 0)));
1027 : 826958 : else if (TREE_CODE (t2) == NEGATE_EXPR)
1028 : 1 : return build2_loc (loc, MINUS_EXPR, type,
1029 : : fold_convert_loc (loc, type, t1),
1030 : : fold_convert_loc (loc, type,
1031 : 2 : TREE_OPERAND (t2, 0)));
1032 : 826957 : else if (integer_zerop (t2))
1033 : 24863 : return fold_convert_loc (loc, type, t1);
1034 : : }
1035 : 599951 : else if (code == MINUS_EXPR)
1036 : : {
1037 : 578710 : if (integer_zerop (t2))
1038 : 0 : return fold_convert_loc (loc, type, t1);
1039 : : }
1040 : :
1041 : 1402045 : return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1042 : 1402045 : fold_convert_loc (loc, type, t2));
1043 : : }
1044 : :
1045 : 2228050 : return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1046 : 2228050 : fold_convert_loc (loc, type, t2));
1047 : : }
1048 : :
1049 : : /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1050 : : for use in int_const_binop, size_binop and size_diffop. */
1051 : :
1052 : : static bool
1053 : 2025082036 : int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1054 : : {
1055 : 2025082036 : if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1056 : : return false;
1057 : 2025082036 : if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1058 : : return false;
1059 : :
1060 : 2025082036 : switch (code)
1061 : : {
1062 : : case LSHIFT_EXPR:
1063 : : case RSHIFT_EXPR:
1064 : : case LROTATE_EXPR:
1065 : : case RROTATE_EXPR:
1066 : : return true;
1067 : :
1068 : 2025082036 : default:
1069 : 2025082036 : break;
1070 : : }
1071 : :
1072 : 2025082036 : return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1073 : 2025082036 : && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1074 : 4050164072 : && TYPE_MODE (type1) == TYPE_MODE (type2);
1075 : : }
1076 : :
1077 : : /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1078 : : a new constant in RES. Return FALSE if we don't know how to
1079 : : evaluate CODE at compile-time. */
1080 : :
1081 : : bool
1082 : 1282857720 : wide_int_binop (wide_int &res,
1083 : : enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1084 : : signop sign, wi::overflow_type *overflow)
1085 : : {
1086 : 1282857720 : wide_int tmp;
1087 : 1282857720 : *overflow = wi::OVF_NONE;
1088 : 1282857720 : switch (code)
1089 : : {
1090 : 1969901 : case BIT_IOR_EXPR:
1091 : 1969901 : res = wi::bit_or (arg1, arg2);
1092 : 1969901 : break;
1093 : :
1094 : 77378 : case BIT_XOR_EXPR:
1095 : 77378 : res = wi::bit_xor (arg1, arg2);
1096 : 77378 : break;
1097 : :
1098 : 17212765 : case BIT_AND_EXPR:
1099 : 17212765 : res = wi::bit_and (arg1, arg2);
1100 : 17212765 : break;
1101 : :
1102 : 11965668 : case LSHIFT_EXPR:
1103 : 11965668 : if (wi::neg_p (arg2))
1104 : : return false;
1105 : 11935356 : res = wi::lshift (arg1, arg2);
1106 : 11935356 : break;
1107 : :
1108 : 7427066 : case RSHIFT_EXPR:
1109 : 7427066 : if (wi::neg_p (arg2))
1110 : : return false;
1111 : : /* It's unclear from the C standard whether shifts can overflow.
1112 : : The following code ignores overflow; perhaps a C standard
1113 : : interpretation ruling is needed. */
1114 : 7426872 : res = wi::rshift (arg1, arg2, sign);
1115 : 7426872 : break;
1116 : :
1117 : 1138 : case RROTATE_EXPR:
1118 : 1138 : case LROTATE_EXPR:
1119 : 1138 : if (wi::neg_p (arg2))
1120 : : {
1121 : 14 : tmp = -arg2;
1122 : 14 : if (code == RROTATE_EXPR)
1123 : : code = LROTATE_EXPR;
1124 : : else
1125 : : code = RROTATE_EXPR;
1126 : : }
1127 : : else
1128 : 1124 : tmp = arg2;
1129 : :
1130 : 1124 : if (code == RROTATE_EXPR)
1131 : 956 : res = wi::rrotate (arg1, tmp);
1132 : : else
1133 : 182 : res = wi::lrotate (arg1, tmp);
1134 : : break;
1135 : :
1136 : 193988276 : case PLUS_EXPR:
1137 : 193988276 : res = wi::add (arg1, arg2, sign, overflow);
1138 : 193988276 : break;
1139 : :
1140 : 68227667 : case MINUS_EXPR:
1141 : 68227667 : res = wi::sub (arg1, arg2, sign, overflow);
1142 : 68227667 : break;
1143 : :
1144 : 432315659 : case MULT_EXPR:
1145 : 432315659 : res = wi::mul (arg1, arg2, sign, overflow);
1146 : 432315659 : break;
1147 : :
1148 : 4760 : case MULT_HIGHPART_EXPR:
1149 : 4760 : res = wi::mul_high (arg1, arg2, sign);
1150 : 4760 : break;
1151 : :
1152 : 270985706 : case TRUNC_DIV_EXPR:
1153 : 270985706 : case EXACT_DIV_EXPR:
1154 : 270985706 : if (arg2 == 0)
1155 : : return false;
1156 : 270979821 : res = wi::div_trunc (arg1, arg2, sign, overflow);
1157 : 270979821 : break;
1158 : :
1159 : 61858815 : case FLOOR_DIV_EXPR:
1160 : 61858815 : if (arg2 == 0)
1161 : : return false;
1162 : 61858815 : res = wi::div_floor (arg1, arg2, sign, overflow);
1163 : 61858815 : break;
1164 : :
1165 : 95287827 : case CEIL_DIV_EXPR:
1166 : 95287827 : if (arg2 == 0)
1167 : : return false;
1168 : 95287827 : res = wi::div_ceil (arg1, arg2, sign, overflow);
1169 : 95287827 : break;
1170 : :
1171 : 0 : case ROUND_DIV_EXPR:
1172 : 0 : if (arg2 == 0)
1173 : : return false;
1174 : 0 : res = wi::div_round (arg1, arg2, sign, overflow);
1175 : 0 : break;
1176 : :
1177 : 628535 : case TRUNC_MOD_EXPR:
1178 : 628535 : if (arg2 == 0)
1179 : : return false;
1180 : 627429 : res = wi::mod_trunc (arg1, arg2, sign, overflow);
1181 : 627429 : break;
1182 : :
1183 : 51367509 : case FLOOR_MOD_EXPR:
1184 : 51367509 : if (arg2 == 0)
1185 : : return false;
1186 : 51367509 : res = wi::mod_floor (arg1, arg2, sign, overflow);
1187 : 51367509 : break;
1188 : :
1189 : 178 : case CEIL_MOD_EXPR:
1190 : 178 : if (arg2 == 0)
1191 : : return false;
1192 : 178 : res = wi::mod_ceil (arg1, arg2, sign, overflow);
1193 : 178 : break;
1194 : :
1195 : 0 : case ROUND_MOD_EXPR:
1196 : 0 : if (arg2 == 0)
1197 : : return false;
1198 : 0 : res = wi::mod_round (arg1, arg2, sign, overflow);
1199 : 0 : break;
1200 : :
1201 : 25878 : case MIN_EXPR:
1202 : 25878 : res = wi::min (arg1, arg2, sign);
1203 : 25878 : break;
1204 : :
1205 : 69512842 : case MAX_EXPR:
1206 : 69512842 : res = wi::max (arg1, arg2, sign);
1207 : 69512842 : break;
1208 : :
1209 : : default:
1210 : : return false;
1211 : : }
1212 : : return true;
1213 : 1282857720 : }
1214 : :
1215 : : /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1216 : : min value to RES. */
1217 : : bool
1218 : 0 : can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1219 : : {
1220 : 0 : if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1221 : : {
1222 : 0 : res = wi::to_poly_wide (arg1);
1223 : 0 : return true;
1224 : : }
1225 : 0 : else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1226 : : {
1227 : 0 : res = wi::to_poly_wide (arg2);
1228 : 0 : return true;
1229 : : }
1230 : :
1231 : : return false;
1232 : : }
1233 : :
1234 : : /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1235 : : produce a new constant in RES. Return FALSE if we don't know how
1236 : : to evaluate CODE at compile-time. */
1237 : :
1238 : : bool
1239 : 1282857720 : poly_int_binop (poly_wide_int &res, enum tree_code code,
1240 : : const_tree arg1, const_tree arg2,
1241 : : signop sign, wi::overflow_type *overflow)
1242 : : {
1243 : 1282857720 : gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1244 : :
1245 : 1282857720 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1246 : : {
1247 : 1282857720 : wide_int warg1 = wi::to_wide (arg1), wi_res;
1248 : 1282857720 : wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (TREE_TYPE (arg1)));
1249 : 1282857720 : if (!wide_int_binop (wi_res, code, warg1, warg2, sign, overflow))
1250 : : return NULL_TREE;
1251 : 1282820071 : res = wi_res;
1252 : 1282820071 : return true;
1253 : 1282857888 : }
1254 : :
1255 : : gcc_assert (NUM_POLY_INT_COEFFS != 1);
1256 : :
1257 : : switch (code)
1258 : : {
1259 : : case PLUS_EXPR:
1260 : : res = wi::add (wi::to_poly_wide (arg1),
1261 : : wi::to_poly_wide (arg2), sign, overflow);
1262 : : break;
1263 : :
1264 : : case MINUS_EXPR:
1265 : : res = wi::sub (wi::to_poly_wide (arg1),
1266 : : wi::to_poly_wide (arg2), sign, overflow);
1267 : : break;
1268 : :
1269 : : case MULT_EXPR:
1270 : : if (TREE_CODE (arg2) == INTEGER_CST)
1271 : : res = wi::mul (wi::to_poly_wide (arg1),
1272 : : wi::to_wide (arg2), sign, overflow);
1273 : : else if (TREE_CODE (arg1) == INTEGER_CST)
1274 : : res = wi::mul (wi::to_poly_wide (arg2),
1275 : : wi::to_wide (arg1), sign, overflow);
1276 : : else
1277 : : return NULL_TREE;
1278 : : break;
1279 : :
1280 : : case LSHIFT_EXPR:
1281 : : if (TREE_CODE (arg2) == INTEGER_CST)
1282 : : res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1283 : : else
1284 : : return false;
1285 : : break;
1286 : :
1287 : : case BIT_IOR_EXPR:
1288 : : if (TREE_CODE (arg2) != INTEGER_CST
1289 : : || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1290 : : &res))
1291 : : return false;
1292 : : break;
1293 : :
1294 : : case MIN_EXPR:
1295 : : if (!can_min_p (arg1, arg2, res))
1296 : : return false;
1297 : : break;
1298 : :
1299 : : default:
1300 : : return false;
1301 : : }
1302 : : return true;
1303 : : }
1304 : :
1305 : : /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1306 : : produce a new constant. Return NULL_TREE if we don't know how to
1307 : : evaluate CODE at compile-time. */
1308 : :
1309 : : tree
1310 : 1282857720 : int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1311 : : int overflowable)
1312 : : {
1313 : 1282857720 : poly_wide_int poly_res;
1314 : 1282857720 : tree type = TREE_TYPE (arg1);
1315 : 1282857720 : signop sign = TYPE_SIGN (type);
1316 : 1282857720 : wi::overflow_type overflow = wi::OVF_NONE;
1317 : :
1318 : 1282857720 : if (!poly_int_tree_p (arg1)
1319 : 1282857720 : || !poly_int_tree_p (arg2)
1320 : 2565715440 : || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1321 : 37649 : return NULL_TREE;
1322 : 1282820071 : return force_fit_type (type, poly_res, overflowable,
1323 : 1282820071 : (((sign == SIGNED || overflowable == -1)
1324 : 1282820071 : && overflow)
1325 : 1282820071 : | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1326 : 1282857720 : }
1327 : :
1328 : : /* Return true if binary operation OP distributes over addition in operand
1329 : : OPNO, with the other operand being held constant. OPNO counts from 1. */
1330 : :
1331 : : static bool
1332 : 171188 : distributes_over_addition_p (tree_code op, int opno)
1333 : : {
1334 : 0 : switch (op)
1335 : : {
1336 : : case PLUS_EXPR:
1337 : : case MINUS_EXPR:
1338 : : case MULT_EXPR:
1339 : : return true;
1340 : :
1341 : 0 : case LSHIFT_EXPR:
1342 : 0 : return opno == 1;
1343 : :
1344 : 3365 : default:
1345 : 3365 : return false;
1346 : : }
1347 : : }
1348 : :
1349 : : /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1350 : : is the other operand. Try to use the value of OP to simplify the
1351 : : operation in one step, without having to process individual elements. */
1352 : : static tree
1353 : 395206 : simplify_const_binop (tree_code code, tree op, tree other_op,
1354 : : int index ATTRIBUTE_UNUSED)
1355 : : {
1356 : : /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1357 : 395206 : if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1358 : : {
1359 : 331082 : if (integer_zerop (other_op))
1360 : : {
1361 : 23624 : if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1362 : : return op;
1363 : 22857 : else if (code == BIT_AND_EXPR)
1364 : : return other_op;
1365 : : }
1366 : : }
1367 : :
1368 : : return NULL_TREE;
1369 : : }
1370 : :
1371 : : /* If ARG1 and ARG2 are constants, and if performing CODE on them would
1372 : : be an elementwise vector operation, try to fold the operation to a
1373 : : constant vector, using ELT_CONST_BINOP to fold each element. Return
1374 : : the folded value on success, otherwise return null. */
1375 : : tree
1376 : 231310 : vector_const_binop (tree_code code, tree arg1, tree arg2,
1377 : : tree (*elt_const_binop) (enum tree_code, tree, tree))
1378 : : {
1379 : 173234 : if (TREE_CODE (arg1) == VECTOR_CST && TREE_CODE (arg2) == VECTOR_CST
1380 : 398517 : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1381 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1382 : : {
1383 : 167207 : tree type = TREE_TYPE (arg1);
1384 : 167207 : bool step_ok_p;
1385 : 167207 : if (VECTOR_CST_STEPPED_P (arg1)
1386 : 167207 : && VECTOR_CST_STEPPED_P (arg2))
1387 : : /* We can operate directly on the encoding if:
1388 : :
1389 : : a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1390 : : implies
1391 : : (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1392 : :
1393 : : Addition and subtraction are the supported operators
1394 : : for which this is true. */
1395 : 2046 : step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1396 : 165161 : else if (VECTOR_CST_STEPPED_P (arg1))
1397 : : /* We can operate directly on stepped encodings if:
1398 : :
1399 : : a3 - a2 == a2 - a1
1400 : : implies:
1401 : : (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1402 : :
1403 : : which is true if (x -> x op c) distributes over addition. */
1404 : 48348 : step_ok_p = distributes_over_addition_p (code, 1);
1405 : : else
1406 : : /* Similarly in reverse. */
1407 : 116813 : step_ok_p = distributes_over_addition_p (code, 2);
1408 : 167207 : tree_vector_builder elts;
1409 : 167207 : if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1410 : : return NULL_TREE;
1411 : 167207 : unsigned int count = elts.encoded_nelts ();
1412 : 621169 : for (unsigned int i = 0; i < count; ++i)
1413 : : {
1414 : 454289 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1415 : 454289 : tree elem2 = VECTOR_CST_ELT (arg2, i);
1416 : :
1417 : 454289 : tree elt = elt_const_binop (code, elem1, elem2);
1418 : :
1419 : : /* It is possible that const_binop cannot handle the given
1420 : : code and return NULL_TREE */
1421 : 454289 : if (elt == NULL_TREE)
1422 : 327 : return NULL_TREE;
1423 : 453962 : elts.quick_push (elt);
1424 : : }
1425 : :
1426 : 166880 : return elts.build ();
1427 : 167207 : }
1428 : :
1429 : 64103 : if (TREE_CODE (arg1) == VECTOR_CST
1430 : 6027 : && TREE_CODE (arg2) == INTEGER_CST)
1431 : : {
1432 : 6027 : tree type = TREE_TYPE (arg1);
1433 : 6027 : bool step_ok_p = distributes_over_addition_p (code, 1);
1434 : 6027 : tree_vector_builder elts;
1435 : 6027 : if (!elts.new_unary_operation (type, arg1, step_ok_p))
1436 : : return NULL_TREE;
1437 : 6027 : unsigned int count = elts.encoded_nelts ();
1438 : 30112 : for (unsigned int i = 0; i < count; ++i)
1439 : : {
1440 : 24172 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1441 : :
1442 : 24172 : tree elt = elt_const_binop (code, elem1, arg2);
1443 : :
1444 : : /* It is possible that const_binop cannot handle the given
1445 : : code and return NULL_TREE. */
1446 : 24172 : if (elt == NULL_TREE)
1447 : 87 : return NULL_TREE;
1448 : 24085 : elts.quick_push (elt);
1449 : : }
1450 : :
1451 : 5940 : return elts.build ();
1452 : 6027 : }
1453 : : return NULL_TREE;
1454 : : }
1455 : :
1456 : : /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1457 : : constant. We assume ARG1 and ARG2 have the same data type, or at least
1458 : : are the same kind of constant and the same machine mode. Return zero if
1459 : : combining the constants is not allowed in the current operating mode. */
1460 : :
1461 : : static tree
1462 : 163491264 : const_binop (enum tree_code code, tree arg1, tree arg2)
1463 : : {
1464 : : /* Sanity check for the recursive cases. */
1465 : 163491264 : if (!arg1 || !arg2)
1466 : : return NULL_TREE;
1467 : :
1468 : 163490000 : STRIP_NOPS (arg1);
1469 : 163490000 : STRIP_NOPS (arg2);
1470 : :
1471 : 163490000 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1472 : : {
1473 : 157948487 : if (code == POINTER_PLUS_EXPR)
1474 : 96279 : return int_const_binop (PLUS_EXPR,
1475 : 192558 : arg1, fold_convert (TREE_TYPE (arg1), arg2));
1476 : :
1477 : 157852208 : return int_const_binop (code, arg1, arg2);
1478 : : }
1479 : :
1480 : 5541513 : if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1481 : : {
1482 : 5294818 : machine_mode mode;
1483 : 5294818 : REAL_VALUE_TYPE d1;
1484 : 5294818 : REAL_VALUE_TYPE d2;
1485 : 5294818 : REAL_VALUE_TYPE value;
1486 : 5294818 : REAL_VALUE_TYPE result;
1487 : 5294818 : bool inexact;
1488 : 5294818 : tree t, type;
1489 : :
1490 : : /* The following codes are handled by real_arithmetic. */
1491 : 5294818 : switch (code)
1492 : : {
1493 : 5294818 : case PLUS_EXPR:
1494 : 5294818 : case MINUS_EXPR:
1495 : 5294818 : case MULT_EXPR:
1496 : 5294818 : case RDIV_EXPR:
1497 : 5294818 : case MIN_EXPR:
1498 : 5294818 : case MAX_EXPR:
1499 : 5294818 : break;
1500 : :
1501 : : default:
1502 : : return NULL_TREE;
1503 : : }
1504 : :
1505 : 5294818 : d1 = TREE_REAL_CST (arg1);
1506 : 5294818 : d2 = TREE_REAL_CST (arg2);
1507 : :
1508 : 5294818 : type = TREE_TYPE (arg1);
1509 : 5294818 : mode = TYPE_MODE (type);
1510 : :
1511 : : /* Don't perform operation if we honor signaling NaNs and
1512 : : either operand is a signaling NaN. */
1513 : 5294818 : if (HONOR_SNANS (mode)
1514 : 5294818 : && (REAL_VALUE_ISSIGNALING_NAN (d1)
1515 : 3600 : || REAL_VALUE_ISSIGNALING_NAN (d2)))
1516 : 33 : return NULL_TREE;
1517 : :
1518 : : /* Don't perform operation if it would raise a division
1519 : : by zero exception. */
1520 : 5294785 : if (code == RDIV_EXPR
1521 : 2353492 : && real_equal (&d2, &dconst0)
1522 : 5305167 : && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1523 : 7094 : return NULL_TREE;
1524 : :
1525 : : /* If either operand is a NaN, just return it. Otherwise, set up
1526 : : for floating-point trap; we return an overflow. */
1527 : 5287691 : if (REAL_VALUE_ISNAN (d1))
1528 : : {
1529 : : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1530 : : is off. */
1531 : 243 : d1.signalling = 0;
1532 : 243 : t = build_real (type, d1);
1533 : 243 : return t;
1534 : : }
1535 : 5287448 : else if (REAL_VALUE_ISNAN (d2))
1536 : : {
1537 : : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1538 : : is off. */
1539 : 65 : d2.signalling = 0;
1540 : 65 : t = build_real (type, d2);
1541 : 65 : return t;
1542 : : }
1543 : :
1544 : 5287383 : inexact = real_arithmetic (&value, code, &d1, &d2);
1545 : 5287383 : real_convert (&result, mode, &value);
1546 : :
1547 : : /* Don't constant fold this floating point operation if
1548 : : both operands are not NaN but the result is NaN, and
1549 : : flag_trapping_math. Such operations should raise an
1550 : : invalid operation exception. */
1551 : 5287383 : if (flag_trapping_math
1552 : 20517964 : && MODE_HAS_NANS (mode)
1553 : 5271031 : && REAL_VALUE_ISNAN (result)
1554 : 2432 : && !REAL_VALUE_ISNAN (d1)
1555 : 5289815 : && !REAL_VALUE_ISNAN (d2))
1556 : 2432 : return NULL_TREE;
1557 : :
1558 : : /* Don't constant fold this floating point operation if
1559 : : the result has overflowed and flag_trapping_math. */
1560 : 5284951 : if (flag_trapping_math
1561 : 20508542 : && MODE_HAS_INFINITIES (mode)
1562 : 5268599 : && REAL_VALUE_ISINF (result)
1563 : 7466 : && !REAL_VALUE_ISINF (d1)
1564 : 5291821 : && !REAL_VALUE_ISINF (d2))
1565 : 4587 : return NULL_TREE;
1566 : :
1567 : : /* Don't constant fold this floating point operation if the
1568 : : result may dependent upon the run-time rounding mode and
1569 : : flag_rounding_math is set, or if GCC's software emulation
1570 : : is unable to accurately represent the result. */
1571 : 5280364 : if ((flag_rounding_math
1572 : 35816656 : || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1573 : 5280364 : && (inexact || !real_identical (&result, &value)))
1574 : 1107 : return NULL_TREE;
1575 : :
1576 : 5279257 : t = build_real (type, result);
1577 : :
1578 : 5279257 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1579 : 5279257 : return t;
1580 : : }
1581 : :
1582 : 246695 : if (TREE_CODE (arg1) == FIXED_CST)
1583 : : {
1584 : 0 : FIXED_VALUE_TYPE f1;
1585 : 0 : FIXED_VALUE_TYPE f2;
1586 : 0 : FIXED_VALUE_TYPE result;
1587 : 0 : tree t, type;
1588 : 0 : bool sat_p;
1589 : 0 : bool overflow_p;
1590 : :
1591 : : /* The following codes are handled by fixed_arithmetic. */
1592 : 0 : switch (code)
1593 : : {
1594 : 0 : case PLUS_EXPR:
1595 : 0 : case MINUS_EXPR:
1596 : 0 : case MULT_EXPR:
1597 : 0 : case TRUNC_DIV_EXPR:
1598 : 0 : if (TREE_CODE (arg2) != FIXED_CST)
1599 : : return NULL_TREE;
1600 : 0 : f2 = TREE_FIXED_CST (arg2);
1601 : 0 : break;
1602 : :
1603 : 0 : case LSHIFT_EXPR:
1604 : 0 : case RSHIFT_EXPR:
1605 : 0 : {
1606 : 0 : if (TREE_CODE (arg2) != INTEGER_CST)
1607 : 0 : return NULL_TREE;
1608 : 0 : wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1609 : 0 : f2.data.high = w2.elt (1);
1610 : 0 : f2.data.low = w2.ulow ();
1611 : 0 : f2.mode = SImode;
1612 : : }
1613 : 0 : break;
1614 : :
1615 : : default:
1616 : : return NULL_TREE;
1617 : : }
1618 : :
1619 : 0 : f1 = TREE_FIXED_CST (arg1);
1620 : 0 : type = TREE_TYPE (arg1);
1621 : 0 : sat_p = TYPE_SATURATING (type);
1622 : 0 : overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1623 : 0 : t = build_fixed (type, result);
1624 : : /* Propagate overflow flags. */
1625 : 0 : if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1626 : 0 : TREE_OVERFLOW (t) = 1;
1627 : 0 : return t;
1628 : : }
1629 : :
1630 : 246695 : if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1631 : : {
1632 : 11103 : tree type = TREE_TYPE (arg1);
1633 : 11103 : tree r1 = TREE_REALPART (arg1);
1634 : 11103 : tree i1 = TREE_IMAGPART (arg1);
1635 : 11103 : tree r2 = TREE_REALPART (arg2);
1636 : 11103 : tree i2 = TREE_IMAGPART (arg2);
1637 : 11103 : tree real, imag;
1638 : :
1639 : 11103 : switch (code)
1640 : : {
1641 : 5258 : case PLUS_EXPR:
1642 : 5258 : case MINUS_EXPR:
1643 : 5258 : real = const_binop (code, r1, r2);
1644 : 5258 : imag = const_binop (code, i1, i2);
1645 : 5258 : break;
1646 : :
1647 : 3894 : case MULT_EXPR:
1648 : 3894 : if (COMPLEX_FLOAT_TYPE_P (type))
1649 : 2742 : return do_mpc_arg2 (arg1, arg2, type,
1650 : : /* do_nonfinite= */ folding_initializer,
1651 : 2742 : mpc_mul);
1652 : :
1653 : 1152 : real = const_binop (MINUS_EXPR,
1654 : : const_binop (MULT_EXPR, r1, r2),
1655 : : const_binop (MULT_EXPR, i1, i2));
1656 : 1152 : imag = const_binop (PLUS_EXPR,
1657 : : const_binop (MULT_EXPR, r1, i2),
1658 : : const_binop (MULT_EXPR, i1, r2));
1659 : 1152 : break;
1660 : :
1661 : 1697 : case RDIV_EXPR:
1662 : 1697 : if (COMPLEX_FLOAT_TYPE_P (type))
1663 : 1697 : return do_mpc_arg2 (arg1, arg2, type,
1664 : : /* do_nonfinite= */ folding_initializer,
1665 : 1697 : mpc_div);
1666 : : /* Fallthru. */
1667 : 254 : case TRUNC_DIV_EXPR:
1668 : 254 : case CEIL_DIV_EXPR:
1669 : 254 : case FLOOR_DIV_EXPR:
1670 : 254 : case ROUND_DIV_EXPR:
1671 : 254 : if (flag_complex_method == 0)
1672 : : {
1673 : : /* Keep this algorithm in sync with
1674 : : tree-complex.cc:expand_complex_div_straight().
1675 : :
1676 : : Expand complex division to scalars, straightforward algorithm.
1677 : : a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1678 : : t = br*br + bi*bi
1679 : : */
1680 : 0 : tree magsquared
1681 : 0 : = const_binop (PLUS_EXPR,
1682 : : const_binop (MULT_EXPR, r2, r2),
1683 : : const_binop (MULT_EXPR, i2, i2));
1684 : 0 : tree t1
1685 : 0 : = const_binop (PLUS_EXPR,
1686 : : const_binop (MULT_EXPR, r1, r2),
1687 : : const_binop (MULT_EXPR, i1, i2));
1688 : 0 : tree t2
1689 : 0 : = const_binop (MINUS_EXPR,
1690 : : const_binop (MULT_EXPR, i1, r2),
1691 : : const_binop (MULT_EXPR, r1, i2));
1692 : :
1693 : 0 : real = const_binop (code, t1, magsquared);
1694 : 0 : imag = const_binop (code, t2, magsquared);
1695 : : }
1696 : : else
1697 : : {
1698 : : /* Keep this algorithm in sync with
1699 : : tree-complex.cc:expand_complex_div_wide().
1700 : :
1701 : : Expand complex division to scalars, modified algorithm to minimize
1702 : : overflow with wide input ranges. */
1703 : 254 : tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1704 : : fold_abs_const (r2, TREE_TYPE (type)),
1705 : : fold_abs_const (i2, TREE_TYPE (type)));
1706 : :
1707 : 254 : if (integer_nonzerop (compare))
1708 : : {
1709 : : /* In the TRUE branch, we compute
1710 : : ratio = br/bi;
1711 : : div = (br * ratio) + bi;
1712 : : tr = (ar * ratio) + ai;
1713 : : ti = (ai * ratio) - ar;
1714 : : tr = tr / div;
1715 : : ti = ti / div; */
1716 : 48 : tree ratio = const_binop (code, r2, i2);
1717 : 48 : tree div = const_binop (PLUS_EXPR, i2,
1718 : : const_binop (MULT_EXPR, r2, ratio));
1719 : 48 : real = const_binop (MULT_EXPR, r1, ratio);
1720 : 48 : real = const_binop (PLUS_EXPR, real, i1);
1721 : 48 : real = const_binop (code, real, div);
1722 : :
1723 : 48 : imag = const_binop (MULT_EXPR, i1, ratio);
1724 : 48 : imag = const_binop (MINUS_EXPR, imag, r1);
1725 : 48 : imag = const_binop (code, imag, div);
1726 : : }
1727 : : else
1728 : : {
1729 : : /* In the FALSE branch, we compute
1730 : : ratio = d/c;
1731 : : divisor = (d * ratio) + c;
1732 : : tr = (b * ratio) + a;
1733 : : ti = b - (a * ratio);
1734 : : tr = tr / div;
1735 : : ti = ti / div; */
1736 : 206 : tree ratio = const_binop (code, i2, r2);
1737 : 206 : tree div = const_binop (PLUS_EXPR, r2,
1738 : : const_binop (MULT_EXPR, i2, ratio));
1739 : :
1740 : 206 : real = const_binop (MULT_EXPR, i1, ratio);
1741 : 206 : real = const_binop (PLUS_EXPR, real, r1);
1742 : 206 : real = const_binop (code, real, div);
1743 : :
1744 : 206 : imag = const_binop (MULT_EXPR, r1, ratio);
1745 : 206 : imag = const_binop (MINUS_EXPR, i1, imag);
1746 : 206 : imag = const_binop (code, imag, div);
1747 : : }
1748 : : }
1749 : : break;
1750 : :
1751 : : default:
1752 : : return NULL_TREE;
1753 : : }
1754 : :
1755 : 6664 : if (real && imag)
1756 : 6506 : return build_complex (type, real, imag);
1757 : : }
1758 : :
1759 : 235750 : tree simplified;
1760 : 235750 : if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1761 : : return simplified;
1762 : :
1763 : 235524 : if (commutative_tree_code (code)
1764 : 235524 : && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1765 : : return simplified;
1766 : :
1767 : 231310 : return vector_const_binop (code, arg1, arg2, const_binop);
1768 : : }
1769 : :
1770 : : /* Overload that adds a TYPE parameter to be able to dispatch
1771 : : to fold_relational_const. */
1772 : :
1773 : : tree
1774 : 210058758 : const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1775 : : {
1776 : 210058758 : if (TREE_CODE_CLASS (code) == tcc_comparison)
1777 : 52873478 : return fold_relational_const (code, type, arg1, arg2);
1778 : :
1779 : : /* ??? Until we make the const_binop worker take the type of the
1780 : : result as argument put those cases that need it here. */
1781 : 157185280 : switch (code)
1782 : : {
1783 : 18 : case VEC_SERIES_EXPR:
1784 : 18 : if (CONSTANT_CLASS_P (arg1)
1785 : 18 : && CONSTANT_CLASS_P (arg2))
1786 : 18 : return build_vec_series (type, arg1, arg2);
1787 : : return NULL_TREE;
1788 : :
1789 : 283285 : case COMPLEX_EXPR:
1790 : 283285 : if ((TREE_CODE (arg1) == REAL_CST
1791 : 272698 : && TREE_CODE (arg2) == REAL_CST)
1792 : 10588 : || (TREE_CODE (arg1) == INTEGER_CST
1793 : 10587 : && TREE_CODE (arg2) == INTEGER_CST))
1794 : 283284 : return build_complex (type, arg1, arg2);
1795 : : return NULL_TREE;
1796 : :
1797 : 9200 : case POINTER_DIFF_EXPR:
1798 : 9200 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1799 : : {
1800 : 17766 : poly_offset_int res = (wi::to_poly_offset (arg1)
1801 : 8883 : - wi::to_poly_offset (arg2));
1802 : 8883 : return force_fit_type (type, res, 1,
1803 : 8883 : TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1804 : : }
1805 : : return NULL_TREE;
1806 : :
1807 : 11639 : case VEC_PACK_TRUNC_EXPR:
1808 : 11639 : case VEC_PACK_FIX_TRUNC_EXPR:
1809 : 11639 : case VEC_PACK_FLOAT_EXPR:
1810 : 11639 : {
1811 : 11639 : unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1812 : :
1813 : 11639 : if (TREE_CODE (arg1) != VECTOR_CST
1814 : 11639 : || TREE_CODE (arg2) != VECTOR_CST)
1815 : : return NULL_TREE;
1816 : :
1817 : 11639 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1818 : : return NULL_TREE;
1819 : :
1820 : 11639 : out_nelts = in_nelts * 2;
1821 : 11639 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1822 : : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1823 : :
1824 : 11639 : tree_vector_builder elts (type, out_nelts, 1);
1825 : 142711 : for (i = 0; i < out_nelts; i++)
1826 : : {
1827 : 131090 : tree elt = (i < in_nelts
1828 : 131090 : ? VECTOR_CST_ELT (arg1, i)
1829 : 65536 : : VECTOR_CST_ELT (arg2, i - in_nelts));
1830 : 131936 : elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1831 : : ? NOP_EXPR
1832 : : : code == VEC_PACK_FLOAT_EXPR
1833 : 846 : ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1834 : 131090 : TREE_TYPE (type), elt);
1835 : 131090 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1836 : 18 : return NULL_TREE;
1837 : 131072 : elts.quick_push (elt);
1838 : : }
1839 : :
1840 : 11621 : return elts.build ();
1841 : 11639 : }
1842 : :
1843 : 186 : case VEC_WIDEN_MULT_LO_EXPR:
1844 : 186 : case VEC_WIDEN_MULT_HI_EXPR:
1845 : 186 : case VEC_WIDEN_MULT_EVEN_EXPR:
1846 : 186 : case VEC_WIDEN_MULT_ODD_EXPR:
1847 : 186 : {
1848 : 186 : unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1849 : :
1850 : 186 : if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1851 : : return NULL_TREE;
1852 : :
1853 : 186 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1854 : : return NULL_TREE;
1855 : 186 : out_nelts = in_nelts / 2;
1856 : 186 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1857 : : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1858 : :
1859 : 186 : if (code == VEC_WIDEN_MULT_LO_EXPR)
1860 : : scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1861 : : else if (code == VEC_WIDEN_MULT_HI_EXPR)
1862 : : scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1863 : : else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1864 : : scale = 1, ofs = 0;
1865 : : else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1866 : 186 : scale = 1, ofs = 1;
1867 : :
1868 : 186 : tree_vector_builder elts (type, out_nelts, 1);
1869 : 646 : for (out = 0; out < out_nelts; out++)
1870 : : {
1871 : 460 : unsigned int in = (out << scale) + ofs;
1872 : 460 : tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1873 : : VECTOR_CST_ELT (arg1, in));
1874 : 460 : tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1875 : : VECTOR_CST_ELT (arg2, in));
1876 : :
1877 : 460 : if (t1 == NULL_TREE || t2 == NULL_TREE)
1878 : 0 : return NULL_TREE;
1879 : 460 : tree elt = const_binop (MULT_EXPR, t1, t2);
1880 : 460 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1881 : : return NULL_TREE;
1882 : 460 : elts.quick_push (elt);
1883 : : }
1884 : :
1885 : 186 : return elts.build ();
1886 : 186 : }
1887 : :
1888 : 156880952 : default:;
1889 : : }
1890 : :
1891 : 156880952 : if (TREE_CODE_CLASS (code) != tcc_binary)
1892 : : return NULL_TREE;
1893 : :
1894 : : /* Make sure type and arg0 have the same saturating flag. */
1895 : 154410122 : gcc_checking_assert (TYPE_SATURATING (type)
1896 : : == TYPE_SATURATING (TREE_TYPE (arg1)));
1897 : :
1898 : 154410122 : return const_binop (code, arg1, arg2);
1899 : : }
1900 : :
1901 : : /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1902 : : Return zero if computing the constants is not possible. */
1903 : :
1904 : : tree
1905 : 266210699 : const_unop (enum tree_code code, tree type, tree arg0)
1906 : : {
1907 : : /* Don't perform the operation, other than NEGATE and ABS, if
1908 : : flag_signaling_nans is on and the operand is a signaling NaN. */
1909 : 266210699 : if (TREE_CODE (arg0) == REAL_CST
1910 : 10660272 : && HONOR_SNANS (arg0)
1911 : 7345 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1912 : 454 : && code != NEGATE_EXPR
1913 : 454 : && code != ABS_EXPR
1914 : 266211118 : && code != ABSU_EXPR)
1915 : : return NULL_TREE;
1916 : :
1917 : 266210280 : switch (code)
1918 : : {
1919 : 182702182 : CASE_CONVERT:
1920 : 182702182 : case FLOAT_EXPR:
1921 : 182702182 : case FIX_TRUNC_EXPR:
1922 : 182702182 : case FIXED_CONVERT_EXPR:
1923 : 182702182 : return fold_convert_const (code, type, arg0);
1924 : :
1925 : 0 : case ADDR_SPACE_CONVERT_EXPR:
1926 : : /* If the source address is 0, and the source address space
1927 : : cannot have a valid object at 0, fold to dest type null. */
1928 : 0 : if (integer_zerop (arg0)
1929 : 0 : && !(targetm.addr_space.zero_address_valid
1930 : 0 : (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1931 : 0 : return fold_convert_const (code, type, arg0);
1932 : : break;
1933 : :
1934 : 12111980 : case VIEW_CONVERT_EXPR:
1935 : 12111980 : return fold_view_convert_expr (type, arg0);
1936 : :
1937 : 28514053 : case NEGATE_EXPR:
1938 : 28514053 : {
1939 : : /* Can't call fold_negate_const directly here as that doesn't
1940 : : handle all cases and we might not be able to negate some
1941 : : constants. */
1942 : 28514053 : tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1943 : 28514053 : if (tem && CONSTANT_CLASS_P (tem))
1944 : : return tem;
1945 : : break;
1946 : : }
1947 : :
1948 : 33006 : case ABS_EXPR:
1949 : 33006 : case ABSU_EXPR:
1950 : 33006 : if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1951 : 32723 : return fold_abs_const (arg0, type);
1952 : : break;
1953 : :
1954 : 24552 : case CONJ_EXPR:
1955 : 24552 : if (TREE_CODE (arg0) == COMPLEX_CST)
1956 : : {
1957 : 24549 : tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1958 : 24549 : TREE_TYPE (type));
1959 : 24549 : return build_complex (type, TREE_REALPART (arg0), ipart);
1960 : : }
1961 : : break;
1962 : :
1963 : 2217912 : case BIT_NOT_EXPR:
1964 : 2217912 : if (TREE_CODE (arg0) == INTEGER_CST)
1965 : 2217014 : return fold_not_const (arg0, type);
1966 : 898 : else if (POLY_INT_CST_P (arg0))
1967 : : return wide_int_to_tree (type, ~poly_int_cst_value (arg0));
1968 : : /* Perform BIT_NOT_EXPR on each element individually. */
1969 : 898 : else if (TREE_CODE (arg0) == VECTOR_CST)
1970 : : {
1971 : 291 : tree elem;
1972 : :
1973 : : /* This can cope with stepped encodings because ~x == -1 - x. */
1974 : 291 : tree_vector_builder elements;
1975 : 291 : elements.new_unary_operation (type, arg0, true);
1976 : 291 : unsigned int i, count = elements.encoded_nelts ();
1977 : 1628 : for (i = 0; i < count; ++i)
1978 : : {
1979 : 1337 : elem = VECTOR_CST_ELT (arg0, i);
1980 : 1337 : elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1981 : 1337 : if (elem == NULL_TREE)
1982 : : break;
1983 : 1337 : elements.quick_push (elem);
1984 : : }
1985 : 291 : if (i == count)
1986 : 291 : return elements.build ();
1987 : 291 : }
1988 : : break;
1989 : :
1990 : 8639044 : case TRUTH_NOT_EXPR:
1991 : 8639044 : if (TREE_CODE (arg0) == INTEGER_CST)
1992 : 8338404 : return constant_boolean_node (integer_zerop (arg0), type);
1993 : : break;
1994 : :
1995 : 194382 : case REALPART_EXPR:
1996 : 194382 : if (TREE_CODE (arg0) == COMPLEX_CST)
1997 : 194181 : return fold_convert (type, TREE_REALPART (arg0));
1998 : : break;
1999 : :
2000 : 198233 : case IMAGPART_EXPR:
2001 : 198233 : if (TREE_CODE (arg0) == COMPLEX_CST)
2002 : 198045 : return fold_convert (type, TREE_IMAGPART (arg0));
2003 : : break;
2004 : :
2005 : 11506 : case VEC_UNPACK_LO_EXPR:
2006 : 11506 : case VEC_UNPACK_HI_EXPR:
2007 : 11506 : case VEC_UNPACK_FLOAT_LO_EXPR:
2008 : 11506 : case VEC_UNPACK_FLOAT_HI_EXPR:
2009 : 11506 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
2010 : 11506 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2011 : 11506 : {
2012 : 11506 : unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2013 : 11506 : enum tree_code subcode;
2014 : :
2015 : 11506 : if (TREE_CODE (arg0) != VECTOR_CST)
2016 : : return NULL_TREE;
2017 : :
2018 : 11506 : if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2019 : : return NULL_TREE;
2020 : 11506 : out_nelts = in_nelts / 2;
2021 : 11506 : gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2022 : :
2023 : 11506 : unsigned int offset = 0;
2024 : 11506 : if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2025 : 11506 : || code == VEC_UNPACK_FLOAT_LO_EXPR
2026 : : || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2027 : 5743 : offset = out_nelts;
2028 : :
2029 : 11506 : if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2030 : : subcode = NOP_EXPR;
2031 : 7192 : else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2032 : 7192 : || code == VEC_UNPACK_FLOAT_HI_EXPR)
2033 : : subcode = FLOAT_EXPR;
2034 : : else
2035 : 0 : subcode = FIX_TRUNC_EXPR;
2036 : :
2037 : 11506 : tree_vector_builder elts (type, out_nelts, 1);
2038 : 52304 : for (i = 0; i < out_nelts; i++)
2039 : : {
2040 : 40798 : tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2041 : 40798 : VECTOR_CST_ELT (arg0, i + offset));
2042 : 40798 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2043 : 0 : return NULL_TREE;
2044 : 40798 : elts.quick_push (elt);
2045 : : }
2046 : :
2047 : 11506 : return elts.build ();
2048 : 11506 : }
2049 : :
2050 : 4 : case VEC_DUPLICATE_EXPR:
2051 : 4 : if (CONSTANT_CLASS_P (arg0))
2052 : 4 : return build_vector_from_val (type, arg0);
2053 : : return NULL_TREE;
2054 : :
2055 : : default:
2056 : : break;
2057 : : }
2058 : :
2059 : : return NULL_TREE;
2060 : : }
2061 : :
2062 : : /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2063 : : indicates which particular sizetype to create. */
2064 : :
2065 : : tree
2066 : 2970047534 : size_int_kind (poly_int64 number, enum size_type_kind kind)
2067 : : {
2068 : 2970047534 : return build_int_cst (sizetype_tab[(int) kind], number);
2069 : : }
2070 : :
2071 : : /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2072 : : is a tree code. The type of the result is taken from the operands.
2073 : : Both must be equivalent integer types, ala int_binop_types_match_p.
2074 : : If the operands are constant, so is the result. */
2075 : :
2076 : : tree
2077 : 1993841257 : size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2078 : : {
2079 : 1993841257 : tree type = TREE_TYPE (arg0);
2080 : :
2081 : 1993841257 : if (arg0 == error_mark_node || arg1 == error_mark_node)
2082 : : return error_mark_node;
2083 : :
2084 : 1993841257 : gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2085 : : TREE_TYPE (arg1)));
2086 : :
2087 : : /* Handle the special case of two poly_int constants faster. */
2088 : 1993841257 : if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2089 : : {
2090 : : /* And some specific cases even faster than that. */
2091 : 1968163020 : if (code == PLUS_EXPR)
2092 : : {
2093 : 894974940 : if (integer_zerop (arg0)
2094 : 894974940 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2095 : : return arg1;
2096 : 250153154 : if (integer_zerop (arg1)
2097 : 250153154 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2098 : : return arg0;
2099 : : }
2100 : 1073188080 : else if (code == MINUS_EXPR)
2101 : : {
2102 : 98284163 : if (integer_zerop (arg1)
2103 : 98284163 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2104 : : return arg0;
2105 : : }
2106 : 974903917 : else if (code == MULT_EXPR)
2107 : : {
2108 : 433807576 : if (integer_onep (arg0)
2109 : 433807576 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2110 : : return arg1;
2111 : : }
2112 : :
2113 : : /* Handle general case of two integer constants. For sizetype
2114 : : constant calculations we always want to know about overflow,
2115 : : even in the unsigned case. */
2116 : 978792675 : tree res = int_const_binop (code, arg0, arg1, -1);
2117 : 978792675 : if (res != NULL_TREE)
2118 : : return res;
2119 : : }
2120 : :
2121 : 25678237 : return fold_build2_loc (loc, code, type, arg0, arg1);
2122 : : }
2123 : :
2124 : : /* Given two values, either both of sizetype or both of bitsizetype,
2125 : : compute the difference between the two values. Return the value
2126 : : in signed type corresponding to the type of the operands. */
2127 : :
2128 : : tree
2129 : 31240779 : size_diffop_loc (location_t loc, tree arg0, tree arg1)
2130 : : {
2131 : 31240779 : tree type = TREE_TYPE (arg0);
2132 : 31240779 : tree ctype;
2133 : :
2134 : 31240779 : gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2135 : : TREE_TYPE (arg1)));
2136 : :
2137 : : /* If the type is already signed, just do the simple thing. */
2138 : 31240779 : if (!TYPE_UNSIGNED (type))
2139 : 9261616 : return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2140 : :
2141 : 21979163 : if (type == sizetype)
2142 : 21979163 : ctype = ssizetype;
2143 : 0 : else if (type == bitsizetype)
2144 : 0 : ctype = sbitsizetype;
2145 : : else
2146 : 0 : ctype = signed_type_for (type);
2147 : :
2148 : : /* If either operand is not a constant, do the conversions to the signed
2149 : : type and subtract. The hardware will do the right thing with any
2150 : : overflow in the subtraction. */
2151 : 21979163 : if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2152 : 8795 : return size_binop_loc (loc, MINUS_EXPR,
2153 : : fold_convert_loc (loc, ctype, arg0),
2154 : 8795 : fold_convert_loc (loc, ctype, arg1));
2155 : :
2156 : : /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2157 : : Otherwise, subtract the other way, convert to CTYPE (we know that can't
2158 : : overflow) and negate (which can't either). Special-case a result
2159 : : of zero while we're here. */
2160 : 21970368 : if (tree_int_cst_equal (arg0, arg1))
2161 : 18766391 : return build_int_cst (ctype, 0);
2162 : 3203977 : else if (tree_int_cst_lt (arg1, arg0))
2163 : 2081545 : return fold_convert_loc (loc, ctype,
2164 : 2081545 : size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2165 : : else
2166 : 1122432 : return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2167 : : fold_convert_loc (loc, ctype,
2168 : : size_binop_loc (loc,
2169 : : MINUS_EXPR,
2170 : : arg1, arg0)));
2171 : : }
2172 : :
2173 : : /* A subroutine of fold_convert_const handling conversions of an
2174 : : INTEGER_CST to another integer type. */
2175 : :
2176 : : static tree
2177 : 1087352825 : fold_convert_const_int_from_int (tree type, const_tree arg1)
2178 : : {
2179 : : /* Given an integer constant, make new constant with new type,
2180 : : appropriately sign-extended or truncated. Use widest_int
2181 : : so that any extension is done according ARG1's type. */
2182 : 1087352825 : tree arg1_type = TREE_TYPE (arg1);
2183 : 1087352825 : unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2184 : 1087352825 : return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2185 : 1087352825 : TYPE_SIGN (arg1_type)),
2186 : 1087352825 : !POINTER_TYPE_P (TREE_TYPE (arg1)),
2187 : 1087352825 : TREE_OVERFLOW (arg1));
2188 : : }
2189 : :
2190 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2191 : : to an integer type. */
2192 : :
2193 : : static tree
2194 : 35303 : fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2195 : : {
2196 : 35303 : bool overflow = false;
2197 : 35303 : tree t;
2198 : :
2199 : : /* The following code implements the floating point to integer
2200 : : conversion rules required by the Java Language Specification,
2201 : : that IEEE NaNs are mapped to zero and values that overflow
2202 : : the target precision saturate, i.e. values greater than
2203 : : INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2204 : : are mapped to INT_MIN. These semantics are allowed by the
2205 : : C and C++ standards that simply state that the behavior of
2206 : : FP-to-integer conversion is unspecified upon overflow. */
2207 : :
2208 : 35303 : wide_int val;
2209 : 35303 : REAL_VALUE_TYPE r;
2210 : 35303 : REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2211 : :
2212 : 35303 : switch (code)
2213 : : {
2214 : 35303 : case FIX_TRUNC_EXPR:
2215 : 35303 : real_trunc (&r, VOIDmode, &x);
2216 : 35303 : break;
2217 : :
2218 : 0 : default:
2219 : 0 : gcc_unreachable ();
2220 : : }
2221 : :
2222 : : /* If R is NaN, return zero and show we have an overflow. */
2223 : 35303 : if (REAL_VALUE_ISNAN (r))
2224 : : {
2225 : 3638 : overflow = true;
2226 : 3638 : val = wi::zero (TYPE_PRECISION (type));
2227 : : }
2228 : :
2229 : : /* See if R is less than the lower bound or greater than the
2230 : : upper bound. */
2231 : :
2232 : 35303 : if (! overflow)
2233 : : {
2234 : 31665 : tree lt = TYPE_MIN_VALUE (type);
2235 : 31665 : REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2236 : 31665 : if (real_less (&r, &l))
2237 : : {
2238 : 1974 : overflow = true;
2239 : 1974 : val = wi::to_wide (lt);
2240 : : }
2241 : : }
2242 : :
2243 : 35303 : if (! overflow)
2244 : : {
2245 : 29691 : tree ut = TYPE_MAX_VALUE (type);
2246 : 29691 : if (ut)
2247 : : {
2248 : 29691 : REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2249 : 29691 : if (real_less (&u, &r))
2250 : : {
2251 : 1927 : overflow = true;
2252 : 1927 : val = wi::to_wide (ut);
2253 : : }
2254 : : }
2255 : : }
2256 : :
2257 : 35303 : if (! overflow)
2258 : 27764 : val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2259 : :
2260 : : /* According to IEEE standard, for conversions from floating point to
2261 : : integer. When a NaN or infinite operand cannot be represented in the
2262 : : destination format and this cannot otherwise be indicated, the invalid
2263 : : operation exception shall be signaled. When a numeric operand would
2264 : : convert to an integer outside the range of the destination format, the
2265 : : invalid operation exception shall be signaled if this situation cannot
2266 : : otherwise be indicated. */
2267 : 35303 : if (!flag_trapping_math || !overflow)
2268 : 28020 : t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2269 : : else
2270 : : t = NULL_TREE;
2271 : :
2272 : 35303 : return t;
2273 : 35303 : }
2274 : :
2275 : : /* A subroutine of fold_convert_const handling conversions of a
2276 : : FIXED_CST to an integer type. */
2277 : :
2278 : : static tree
2279 : 0 : fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2280 : : {
2281 : 0 : tree t;
2282 : 0 : double_int temp, temp_trunc;
2283 : 0 : scalar_mode mode;
2284 : :
2285 : : /* Right shift FIXED_CST to temp by fbit. */
2286 : 0 : temp = TREE_FIXED_CST (arg1).data;
2287 : 0 : mode = TREE_FIXED_CST (arg1).mode;
2288 : 0 : if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2289 : : {
2290 : 0 : temp = temp.rshift (GET_MODE_FBIT (mode),
2291 : : HOST_BITS_PER_DOUBLE_INT,
2292 : 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2293 : :
2294 : : /* Left shift temp to temp_trunc by fbit. */
2295 : 0 : temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2296 : : HOST_BITS_PER_DOUBLE_INT,
2297 : 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2298 : : }
2299 : : else
2300 : : {
2301 : 0 : temp = double_int_zero;
2302 : 0 : temp_trunc = double_int_zero;
2303 : : }
2304 : :
2305 : : /* If FIXED_CST is negative, we need to round the value toward 0.
2306 : : By checking if the fractional bits are not zero to add 1 to temp. */
2307 : 0 : if (SIGNED_FIXED_POINT_MODE_P (mode)
2308 : 0 : && temp_trunc.is_negative ()
2309 : 0 : && TREE_FIXED_CST (arg1).data != temp_trunc)
2310 : 0 : temp += double_int_one;
2311 : :
2312 : : /* Given a fixed-point constant, make new constant with new type,
2313 : : appropriately sign-extended or truncated. */
2314 : 0 : t = force_fit_type (type, temp, -1,
2315 : 0 : (temp.is_negative ()
2316 : 0 : && (TYPE_UNSIGNED (type)
2317 : 0 : < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2318 : 0 : | TREE_OVERFLOW (arg1));
2319 : :
2320 : 0 : return t;
2321 : : }
2322 : :
2323 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2324 : : to another floating point type. */
2325 : :
2326 : : static tree
2327 : 1975811 : fold_convert_const_real_from_real (tree type, const_tree arg1)
2328 : : {
2329 : 1975811 : REAL_VALUE_TYPE value;
2330 : 1975811 : tree t;
2331 : :
2332 : : /* If the underlying modes are the same, simply treat it as
2333 : : copy and rebuild with TREE_REAL_CST information and the
2334 : : given type. */
2335 : 1975811 : if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2336 : : {
2337 : 97472 : t = build_real (type, TREE_REAL_CST (arg1));
2338 : 97472 : return t;
2339 : : }
2340 : :
2341 : : /* Don't perform the operation if flag_signaling_nans is on
2342 : : and the operand is a signaling NaN. */
2343 : 1878339 : if (HONOR_SNANS (arg1)
2344 : 1878699 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2345 : : return NULL_TREE;
2346 : :
2347 : : /* With flag_rounding_math we should respect the current rounding mode
2348 : : unless the conversion is exact. */
2349 : 1878339 : if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2350 : 1878995 : && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2351 : 509 : return NULL_TREE;
2352 : :
2353 : 1877830 : real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2354 : 1877830 : t = build_real (type, value);
2355 : :
2356 : : /* If converting an infinity or NAN to a representation that doesn't
2357 : : have one, set the overflow bit so that we can produce some kind of
2358 : : error message at the appropriate point if necessary. It's not the
2359 : : most user-friendly message, but it's better than nothing. */
2360 : 1877830 : if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2361 : 1985572 : && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2362 : 0 : TREE_OVERFLOW (t) = 1;
2363 : 1877830 : else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2364 : 1979852 : && !MODE_HAS_NANS (TYPE_MODE (type)))
2365 : 0 : TREE_OVERFLOW (t) = 1;
2366 : : /* Regular overflow, conversion produced an infinity in a mode that
2367 : : can't represent them. */
2368 : 9385677 : else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2369 : 0 : && REAL_VALUE_ISINF (value)
2370 : 1877830 : && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2371 : 0 : TREE_OVERFLOW (t) = 1;
2372 : : else
2373 : 1877830 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2374 : : return t;
2375 : : }
2376 : :
2377 : : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2378 : : to a floating point type. */
2379 : :
2380 : : static tree
2381 : 0 : fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2382 : : {
2383 : 0 : REAL_VALUE_TYPE value;
2384 : 0 : tree t;
2385 : :
2386 : 0 : real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2387 : 0 : &TREE_FIXED_CST (arg1));
2388 : 0 : t = build_real (type, value);
2389 : :
2390 : 0 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2391 : 0 : return t;
2392 : : }
2393 : :
2394 : : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2395 : : to another fixed-point type. */
2396 : :
2397 : : static tree
2398 : 0 : fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2399 : : {
2400 : 0 : FIXED_VALUE_TYPE value;
2401 : 0 : tree t;
2402 : 0 : bool overflow_p;
2403 : :
2404 : 0 : overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2405 : 0 : &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2406 : 0 : t = build_fixed (type, value);
2407 : :
2408 : : /* Propagate overflow flags. */
2409 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2410 : 0 : TREE_OVERFLOW (t) = 1;
2411 : 0 : return t;
2412 : : }
2413 : :
2414 : : /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2415 : : to a fixed-point type. */
2416 : :
2417 : : static tree
2418 : 0 : fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2419 : : {
2420 : 0 : FIXED_VALUE_TYPE value;
2421 : 0 : tree t;
2422 : 0 : bool overflow_p;
2423 : 0 : double_int di;
2424 : :
2425 : 0 : gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2426 : :
2427 : 0 : di.low = TREE_INT_CST_ELT (arg1, 0);
2428 : 0 : if (TREE_INT_CST_NUNITS (arg1) == 1)
2429 : 0 : di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2430 : : else
2431 : 0 : di.high = TREE_INT_CST_ELT (arg1, 1);
2432 : :
2433 : 0 : overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2434 : 0 : TYPE_UNSIGNED (TREE_TYPE (arg1)),
2435 : 0 : TYPE_SATURATING (type));
2436 : 0 : t = build_fixed (type, value);
2437 : :
2438 : : /* Propagate overflow flags. */
2439 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2440 : 0 : TREE_OVERFLOW (t) = 1;
2441 : 0 : return t;
2442 : : }
2443 : :
2444 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2445 : : to a fixed-point type. */
2446 : :
2447 : : static tree
2448 : 0 : fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2449 : : {
2450 : 0 : FIXED_VALUE_TYPE value;
2451 : 0 : tree t;
2452 : 0 : bool overflow_p;
2453 : :
2454 : 0 : overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2455 : 0 : &TREE_REAL_CST (arg1),
2456 : 0 : TYPE_SATURATING (type));
2457 : 0 : t = build_fixed (type, value);
2458 : :
2459 : : /* Propagate overflow flags. */
2460 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2461 : 0 : TREE_OVERFLOW (t) = 1;
2462 : 0 : return t;
2463 : : }
2464 : :
2465 : : /* Attempt to fold type conversion operation CODE of expression ARG1 to
2466 : : type TYPE. If no simplification can be done return NULL_TREE. */
2467 : :
2468 : : static tree
2469 : 1144890228 : fold_convert_const (enum tree_code code, tree type, tree arg1)
2470 : : {
2471 : 1144890228 : tree arg_type = TREE_TYPE (arg1);
2472 : 1144890228 : if (arg_type == type)
2473 : : return arg1;
2474 : :
2475 : : /* We can't widen types, since the runtime value could overflow the
2476 : : original type before being extended to the new type. */
2477 : 1134555776 : if (POLY_INT_CST_P (arg1)
2478 : : && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2479 : : && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2480 : : return build_poly_int_cst (type,
2481 : : poly_wide_int::from (poly_int_cst_value (arg1),
2482 : : TYPE_PRECISION (type),
2483 : : TYPE_SIGN (arg_type)));
2484 : :
2485 : 1134555776 : if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2486 : : || TREE_CODE (type) == OFFSET_TYPE)
2487 : : {
2488 : 1103378165 : if (TREE_CODE (arg1) == INTEGER_CST)
2489 : 1087352825 : return fold_convert_const_int_from_int (type, arg1);
2490 : 16025340 : else if (TREE_CODE (arg1) == REAL_CST)
2491 : 35303 : return fold_convert_const_int_from_real (code, type, arg1);
2492 : 15990037 : else if (TREE_CODE (arg1) == FIXED_CST)
2493 : 0 : return fold_convert_const_int_from_fixed (type, arg1);
2494 : : }
2495 : : else if (SCALAR_FLOAT_TYPE_P (type))
2496 : : {
2497 : 31124931 : if (TREE_CODE (arg1) == INTEGER_CST)
2498 : : {
2499 : 24113959 : tree res = build_real_from_int_cst (type, arg1);
2500 : : /* Avoid the folding if flag_rounding_math is on and the
2501 : : conversion is not exact. */
2502 : 24113959 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2503 : : {
2504 : 2885 : bool fail = false;
2505 : 5770 : wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2506 : 2885 : TYPE_PRECISION (TREE_TYPE (arg1)));
2507 : 2885 : if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2508 : 1730 : return NULL_TREE;
2509 : 2885 : }
2510 : 24112229 : return res;
2511 : : }
2512 : 7010972 : else if (TREE_CODE (arg1) == REAL_CST)
2513 : 1975811 : return fold_convert_const_real_from_real (type, arg1);
2514 : 5035161 : else if (TREE_CODE (arg1) == FIXED_CST)
2515 : 0 : return fold_convert_const_real_from_fixed (type, arg1);
2516 : : }
2517 : : else if (FIXED_POINT_TYPE_P (type))
2518 : : {
2519 : 0 : if (TREE_CODE (arg1) == FIXED_CST)
2520 : 0 : return fold_convert_const_fixed_from_fixed (type, arg1);
2521 : 0 : else if (TREE_CODE (arg1) == INTEGER_CST)
2522 : 0 : return fold_convert_const_fixed_from_int (type, arg1);
2523 : 0 : else if (TREE_CODE (arg1) == REAL_CST)
2524 : 0 : return fold_convert_const_fixed_from_real (type, arg1);
2525 : : }
2526 : : else if (VECTOR_TYPE_P (type))
2527 : : {
2528 : 6900 : if (TREE_CODE (arg1) == VECTOR_CST
2529 : 6900 : && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2530 : : {
2531 : 6900 : tree elttype = TREE_TYPE (type);
2532 : 6900 : tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2533 : : /* We can't handle steps directly when extending, since the
2534 : : values need to wrap at the original precision first. */
2535 : 6900 : bool step_ok_p
2536 : 6900 : = (INTEGRAL_TYPE_P (elttype)
2537 : 257 : && INTEGRAL_TYPE_P (arg1_elttype)
2538 : 7113 : && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2539 : 6900 : tree_vector_builder v;
2540 : 6900 : if (!v.new_unary_operation (type, arg1, step_ok_p))
2541 : : return NULL_TREE;
2542 : 6900 : unsigned int len = v.encoded_nelts ();
2543 : 39312 : for (unsigned int i = 0; i < len; ++i)
2544 : : {
2545 : 32412 : tree elt = VECTOR_CST_ELT (arg1, i);
2546 : 32412 : tree cvt = fold_convert_const (code, elttype, elt);
2547 : 32412 : if (cvt == NULL_TREE)
2548 : 0 : return NULL_TREE;
2549 : 32412 : v.quick_push (cvt);
2550 : : }
2551 : 6900 : return v.build ();
2552 : 6900 : }
2553 : : }
2554 : : return NULL_TREE;
2555 : : }
2556 : :
2557 : : /* Construct a vector of zero elements of vector type TYPE. */
2558 : :
2559 : : static tree
2560 : 16783 : build_zero_vector (tree type)
2561 : : {
2562 : 16783 : tree t;
2563 : :
2564 : 16783 : t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2565 : 16783 : return build_vector_from_val (type, t);
2566 : : }
2567 : :
2568 : : /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2569 : :
2570 : : bool
2571 : 165819 : fold_convertible_p (const_tree type, const_tree arg)
2572 : : {
2573 : 165819 : const_tree orig = TREE_TYPE (arg);
2574 : :
2575 : 165819 : if (type == orig)
2576 : : return true;
2577 : :
2578 : 165819 : if (TREE_CODE (arg) == ERROR_MARK
2579 : 165819 : || TREE_CODE (type) == ERROR_MARK
2580 : 165819 : || TREE_CODE (orig) == ERROR_MARK)
2581 : : return false;
2582 : :
2583 : 165819 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2584 : : return true;
2585 : :
2586 : 165819 : switch (TREE_CODE (type))
2587 : : {
2588 : 165458 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2589 : 165458 : case POINTER_TYPE: case REFERENCE_TYPE:
2590 : 165458 : case OFFSET_TYPE:
2591 : 165458 : return (INTEGRAL_TYPE_P (orig)
2592 : 228 : || (POINTER_TYPE_P (orig)
2593 : 93 : && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2594 : 165593 : || TREE_CODE (orig) == OFFSET_TYPE);
2595 : :
2596 : 42 : case REAL_TYPE:
2597 : 42 : case FIXED_POINT_TYPE:
2598 : 42 : case VOID_TYPE:
2599 : 42 : return TREE_CODE (type) == TREE_CODE (orig);
2600 : :
2601 : 201 : case VECTOR_TYPE:
2602 : 201 : return (VECTOR_TYPE_P (orig)
2603 : 306 : && known_eq (TYPE_VECTOR_SUBPARTS (type),
2604 : : TYPE_VECTOR_SUBPARTS (orig))
2605 : 210 : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2606 : :
2607 : : default:
2608 : : return false;
2609 : : }
2610 : : }
2611 : :
2612 : : /* Convert expression ARG to type TYPE. Used by the middle-end for
2613 : : simple conversions in preference to calling the front-end's convert. */
2614 : :
2615 : : tree
2616 : 1779701996 : fold_convert_loc (location_t loc, tree type, tree arg)
2617 : : {
2618 : 1779701996 : tree orig = TREE_TYPE (arg);
2619 : 1779701996 : tree tem;
2620 : :
2621 : 1779701996 : if (type == orig)
2622 : : return arg;
2623 : :
2624 : 1161666584 : if (TREE_CODE (arg) == ERROR_MARK
2625 : 1161665576 : || TREE_CODE (type) == ERROR_MARK
2626 : 1161665575 : || TREE_CODE (orig) == ERROR_MARK)
2627 : 1009 : return error_mark_node;
2628 : :
2629 : 1161665575 : switch (TREE_CODE (type))
2630 : : {
2631 : 53159803 : case POINTER_TYPE:
2632 : 53159803 : case REFERENCE_TYPE:
2633 : : /* Handle conversions between pointers to different address spaces. */
2634 : 53159803 : if (POINTER_TYPE_P (orig)
2635 : 53159803 : && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2636 : 43982899 : != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2637 : 125 : return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2638 : : /* fall through */
2639 : :
2640 : 1129510598 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2641 : 1129510598 : case OFFSET_TYPE: case BITINT_TYPE:
2642 : 1129510598 : if (TREE_CODE (arg) == INTEGER_CST)
2643 : : {
2644 : 961792588 : tem = fold_convert_const (NOP_EXPR, type, arg);
2645 : 961792588 : if (tem != NULL_TREE)
2646 : : return tem;
2647 : : }
2648 : 167718010 : if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2649 : 3088 : || TREE_CODE (orig) == OFFSET_TYPE)
2650 : 167718010 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2651 : 0 : if (TREE_CODE (orig) == COMPLEX_TYPE)
2652 : 0 : return fold_convert_loc (loc, type,
2653 : : fold_build1_loc (loc, REALPART_EXPR,
2654 : 0 : TREE_TYPE (orig), arg));
2655 : 0 : gcc_assert (VECTOR_TYPE_P (orig)
2656 : : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2657 : 0 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2658 : :
2659 : 533362 : case REAL_TYPE:
2660 : 533362 : if (TREE_CODE (arg) == INTEGER_CST)
2661 : : {
2662 : 57264 : tem = fold_convert_const (FLOAT_EXPR, type, arg);
2663 : 57264 : if (tem != NULL_TREE)
2664 : : return tem;
2665 : : }
2666 : 476098 : else if (TREE_CODE (arg) == REAL_CST)
2667 : : {
2668 : 116191 : tem = fold_convert_const (NOP_EXPR, type, arg);
2669 : 116191 : if (tem != NULL_TREE)
2670 : : return tem;
2671 : : }
2672 : 359907 : else if (TREE_CODE (arg) == FIXED_CST)
2673 : : {
2674 : 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2675 : 0 : if (tem != NULL_TREE)
2676 : : return tem;
2677 : : }
2678 : :
2679 : 359909 : switch (TREE_CODE (orig))
2680 : : {
2681 : 636 : case INTEGER_TYPE: case BITINT_TYPE:
2682 : 636 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2683 : 636 : case POINTER_TYPE: case REFERENCE_TYPE:
2684 : 636 : return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2685 : :
2686 : 359273 : case REAL_TYPE:
2687 : 359273 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2688 : :
2689 : 0 : case FIXED_POINT_TYPE:
2690 : 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2691 : :
2692 : 0 : case COMPLEX_TYPE:
2693 : 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2694 : 0 : return fold_convert_loc (loc, type, tem);
2695 : :
2696 : 0 : default:
2697 : 0 : gcc_unreachable ();
2698 : : }
2699 : :
2700 : 0 : case FIXED_POINT_TYPE:
2701 : 0 : if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2702 : 0 : || TREE_CODE (arg) == REAL_CST)
2703 : : {
2704 : 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2705 : 0 : if (tem != NULL_TREE)
2706 : 0 : goto fold_convert_exit;
2707 : : }
2708 : :
2709 : 0 : switch (TREE_CODE (orig))
2710 : : {
2711 : 0 : case FIXED_POINT_TYPE:
2712 : 0 : case INTEGER_TYPE:
2713 : 0 : case ENUMERAL_TYPE:
2714 : 0 : case BOOLEAN_TYPE:
2715 : 0 : case REAL_TYPE:
2716 : 0 : case BITINT_TYPE:
2717 : 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2718 : :
2719 : 0 : case COMPLEX_TYPE:
2720 : 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2721 : 0 : return fold_convert_loc (loc, type, tem);
2722 : :
2723 : 0 : default:
2724 : 0 : gcc_unreachable ();
2725 : : }
2726 : :
2727 : 2254 : case COMPLEX_TYPE:
2728 : 2254 : switch (TREE_CODE (orig))
2729 : : {
2730 : 584 : case INTEGER_TYPE: case BITINT_TYPE:
2731 : 584 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2732 : 584 : case POINTER_TYPE: case REFERENCE_TYPE:
2733 : 584 : case REAL_TYPE:
2734 : 584 : case FIXED_POINT_TYPE:
2735 : 1168 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
2736 : 584 : fold_convert_loc (loc, TREE_TYPE (type), arg),
2737 : 584 : fold_convert_loc (loc, TREE_TYPE (type),
2738 : 584 : integer_zero_node));
2739 : 1670 : case COMPLEX_TYPE:
2740 : 1670 : {
2741 : 1670 : tree rpart, ipart;
2742 : :
2743 : 1670 : if (TREE_CODE (arg) == COMPLEX_EXPR)
2744 : : {
2745 : 1534 : rpart = fold_convert_loc (loc, TREE_TYPE (type),
2746 : 1534 : TREE_OPERAND (arg, 0));
2747 : 1534 : ipart = fold_convert_loc (loc, TREE_TYPE (type),
2748 : 1534 : TREE_OPERAND (arg, 1));
2749 : 1534 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2750 : : }
2751 : :
2752 : 136 : arg = save_expr (arg);
2753 : 136 : rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2754 : 136 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2755 : 136 : rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2756 : 136 : ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2757 : 136 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2758 : : }
2759 : :
2760 : 0 : default:
2761 : 0 : gcc_unreachable ();
2762 : : }
2763 : :
2764 : 31506548 : case VECTOR_TYPE:
2765 : 31506548 : if (integer_zerop (arg))
2766 : 16783 : return build_zero_vector (type);
2767 : 31489765 : gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2768 : 31489765 : gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2769 : : || VECTOR_TYPE_P (orig));
2770 : 31489765 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2771 : :
2772 : 109443 : case VOID_TYPE:
2773 : 109443 : tem = fold_ignored_result (arg);
2774 : 109443 : return fold_build1_loc (loc, NOP_EXPR, type, tem);
2775 : :
2776 : 3245 : default:
2777 : 3245 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2778 : 3245 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2779 : 0 : gcc_unreachable ();
2780 : : }
2781 : 0 : fold_convert_exit:
2782 : 0 : tem = protected_set_expr_location_unshare (tem, loc);
2783 : 0 : return tem;
2784 : : }
2785 : :
2786 : : /* Return false if expr can be assumed not to be an lvalue, true
2787 : : otherwise. */
2788 : :
2789 : : static bool
2790 : 47724503 : maybe_lvalue_p (const_tree x)
2791 : : {
2792 : : /* We only need to wrap lvalue tree codes. */
2793 : 47724503 : switch (TREE_CODE (x))
2794 : : {
2795 : : case VAR_DECL:
2796 : : case PARM_DECL:
2797 : : case RESULT_DECL:
2798 : : case LABEL_DECL:
2799 : : case FUNCTION_DECL:
2800 : : case SSA_NAME:
2801 : : case COMPOUND_LITERAL_EXPR:
2802 : :
2803 : : case COMPONENT_REF:
2804 : : case MEM_REF:
2805 : : case INDIRECT_REF:
2806 : : case ARRAY_REF:
2807 : : case ARRAY_RANGE_REF:
2808 : : case BIT_FIELD_REF:
2809 : : case OBJ_TYPE_REF:
2810 : :
2811 : : case REALPART_EXPR:
2812 : : case IMAGPART_EXPR:
2813 : : case PREINCREMENT_EXPR:
2814 : : case PREDECREMENT_EXPR:
2815 : : case SAVE_EXPR:
2816 : : case TRY_CATCH_EXPR:
2817 : : case WITH_CLEANUP_EXPR:
2818 : : case COMPOUND_EXPR:
2819 : : case MODIFY_EXPR:
2820 : : case TARGET_EXPR:
2821 : : case COND_EXPR:
2822 : : case BIND_EXPR:
2823 : : case VIEW_CONVERT_EXPR:
2824 : : break;
2825 : :
2826 : 35940771 : default:
2827 : : /* Assume the worst for front-end tree codes. */
2828 : 35940771 : if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2829 : : break;
2830 : : return false;
2831 : : }
2832 : :
2833 : 11863327 : return true;
2834 : : }
2835 : :
2836 : : /* Return an expr equal to X but certainly not valid as an lvalue. */
2837 : :
2838 : : tree
2839 : 39272789 : non_lvalue_loc (location_t loc, tree x)
2840 : : {
2841 : : /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2842 : : us. */
2843 : 39272789 : if (in_gimple_form)
2844 : : return x;
2845 : :
2846 : 8737217 : if (! maybe_lvalue_p (x))
2847 : : return x;
2848 : 1922349 : return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2849 : : }
2850 : :
2851 : : /* Given a tree comparison code, return the code that is the logical inverse.
2852 : : It is generally not safe to do this for floating-point comparisons, except
2853 : : for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2854 : : ERROR_MARK in this case. */
2855 : :
2856 : : enum tree_code
2857 : 107403187 : invert_tree_comparison (enum tree_code code, bool honor_nans)
2858 : : {
2859 : 107403187 : if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2860 : 1040655 : && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2861 : : return ERROR_MARK;
2862 : :
2863 : 106617195 : switch (code)
2864 : : {
2865 : : case EQ_EXPR:
2866 : : return NE_EXPR;
2867 : 47866854 : case NE_EXPR:
2868 : 47866854 : return EQ_EXPR;
2869 : 10574470 : case GT_EXPR:
2870 : 10574470 : return honor_nans ? UNLE_EXPR : LE_EXPR;
2871 : 11877469 : case GE_EXPR:
2872 : 11877469 : return honor_nans ? UNLT_EXPR : LT_EXPR;
2873 : 6722337 : case LT_EXPR:
2874 : 6722337 : return honor_nans ? UNGE_EXPR : GE_EXPR;
2875 : 6971169 : case LE_EXPR:
2876 : 6971169 : return honor_nans ? UNGT_EXPR : GT_EXPR;
2877 : 252 : case LTGT_EXPR:
2878 : 252 : return UNEQ_EXPR;
2879 : 289 : case UNEQ_EXPR:
2880 : 289 : return LTGT_EXPR;
2881 : : case UNGT_EXPR:
2882 : : return LE_EXPR;
2883 : : case UNGE_EXPR:
2884 : : return LT_EXPR;
2885 : : case UNLT_EXPR:
2886 : : return GE_EXPR;
2887 : : case UNLE_EXPR:
2888 : : return GT_EXPR;
2889 : 229607 : case ORDERED_EXPR:
2890 : 229607 : return UNORDERED_EXPR;
2891 : 52842 : case UNORDERED_EXPR:
2892 : 52842 : return ORDERED_EXPR;
2893 : 0 : default:
2894 : 0 : gcc_unreachable ();
2895 : : }
2896 : : }
2897 : :
2898 : : /* Similar, but return the comparison that results if the operands are
2899 : : swapped. This is safe for floating-point. */
2900 : :
2901 : : enum tree_code
2902 : 136572336 : swap_tree_comparison (enum tree_code code)
2903 : : {
2904 : 136572336 : switch (code)
2905 : : {
2906 : : case EQ_EXPR:
2907 : : case NE_EXPR:
2908 : : case ORDERED_EXPR:
2909 : : case UNORDERED_EXPR:
2910 : : case LTGT_EXPR:
2911 : : case UNEQ_EXPR:
2912 : : return code;
2913 : 32079809 : case GT_EXPR:
2914 : 32079809 : return LT_EXPR;
2915 : 9760491 : case GE_EXPR:
2916 : 9760491 : return LE_EXPR;
2917 : 19020818 : case LT_EXPR:
2918 : 19020818 : return GT_EXPR;
2919 : 13696409 : case LE_EXPR:
2920 : 13696409 : return GE_EXPR;
2921 : 258221 : case UNGT_EXPR:
2922 : 258221 : return UNLT_EXPR;
2923 : 18591 : case UNGE_EXPR:
2924 : 18591 : return UNLE_EXPR;
2925 : 386752 : case UNLT_EXPR:
2926 : 386752 : return UNGT_EXPR;
2927 : 128647 : case UNLE_EXPR:
2928 : 128647 : return UNGE_EXPR;
2929 : 0 : default:
2930 : 0 : gcc_unreachable ();
2931 : : }
2932 : : }
2933 : :
2934 : :
2935 : : /* Convert a comparison tree code from an enum tree_code representation
2936 : : into a compcode bit-based encoding. This function is the inverse of
2937 : : compcode_to_comparison. */
2938 : :
2939 : : static enum comparison_code
2940 : 53784 : comparison_to_compcode (enum tree_code code)
2941 : : {
2942 : 53784 : switch (code)
2943 : : {
2944 : : case LT_EXPR:
2945 : : return COMPCODE_LT;
2946 : : case EQ_EXPR:
2947 : : return COMPCODE_EQ;
2948 : : case LE_EXPR:
2949 : : return COMPCODE_LE;
2950 : : case GT_EXPR:
2951 : : return COMPCODE_GT;
2952 : : case NE_EXPR:
2953 : : return COMPCODE_NE;
2954 : : case GE_EXPR:
2955 : : return COMPCODE_GE;
2956 : : case ORDERED_EXPR:
2957 : : return COMPCODE_ORD;
2958 : : case UNORDERED_EXPR:
2959 : : return COMPCODE_UNORD;
2960 : : case UNLT_EXPR:
2961 : : return COMPCODE_UNLT;
2962 : : case UNEQ_EXPR:
2963 : : return COMPCODE_UNEQ;
2964 : : case UNLE_EXPR:
2965 : : return COMPCODE_UNLE;
2966 : : case UNGT_EXPR:
2967 : : return COMPCODE_UNGT;
2968 : : case LTGT_EXPR:
2969 : : return COMPCODE_LTGT;
2970 : : case UNGE_EXPR:
2971 : : return COMPCODE_UNGE;
2972 : 0 : default:
2973 : 0 : gcc_unreachable ();
2974 : : }
2975 : : }
2976 : :
2977 : : /* Convert a compcode bit-based encoding of a comparison operator back
2978 : : to GCC's enum tree_code representation. This function is the
2979 : : inverse of comparison_to_compcode. */
2980 : :
2981 : : static enum tree_code
2982 : 13196 : compcode_to_comparison (enum comparison_code code)
2983 : : {
2984 : 13196 : switch (code)
2985 : : {
2986 : : case COMPCODE_LT:
2987 : : return LT_EXPR;
2988 : : case COMPCODE_EQ:
2989 : : return EQ_EXPR;
2990 : : case COMPCODE_LE:
2991 : : return LE_EXPR;
2992 : : case COMPCODE_GT:
2993 : : return GT_EXPR;
2994 : : case COMPCODE_NE:
2995 : : return NE_EXPR;
2996 : : case COMPCODE_GE:
2997 : : return GE_EXPR;
2998 : : case COMPCODE_ORD:
2999 : : return ORDERED_EXPR;
3000 : : case COMPCODE_UNORD:
3001 : : return UNORDERED_EXPR;
3002 : : case COMPCODE_UNLT:
3003 : : return UNLT_EXPR;
3004 : : case COMPCODE_UNEQ:
3005 : : return UNEQ_EXPR;
3006 : : case COMPCODE_UNLE:
3007 : : return UNLE_EXPR;
3008 : : case COMPCODE_UNGT:
3009 : : return UNGT_EXPR;
3010 : : case COMPCODE_LTGT:
3011 : : return LTGT_EXPR;
3012 : : case COMPCODE_UNGE:
3013 : : return UNGE_EXPR;
3014 : 0 : default:
3015 : 0 : gcc_unreachable ();
3016 : : }
3017 : : }
3018 : :
3019 : : /* Return true if COND1 tests the opposite condition of COND2. */
3020 : :
3021 : : bool
3022 : 1279703 : inverse_conditions_p (const_tree cond1, const_tree cond2)
3023 : : {
3024 : 1279703 : return (COMPARISON_CLASS_P (cond1)
3025 : 1190891 : && COMPARISON_CLASS_P (cond2)
3026 : 1186844 : && (invert_tree_comparison
3027 : 1186844 : (TREE_CODE (cond1),
3028 : 2373688 : HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3029 : 70504 : && operand_equal_p (TREE_OPERAND (cond1, 0),
3030 : 70504 : TREE_OPERAND (cond2, 0), 0)
3031 : 1302253 : && operand_equal_p (TREE_OPERAND (cond1, 1),
3032 : 22550 : TREE_OPERAND (cond2, 1), 0));
3033 : : }
3034 : :
3035 : : /* Return a tree for the comparison which is the combination of
3036 : : doing the AND or OR (depending on CODE) of the two operations LCODE
3037 : : and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3038 : : the possibility of trapping if the mode has NaNs, and return NULL_TREE
3039 : : if this makes the transformation invalid. */
3040 : :
3041 : : tree
3042 : 26892 : combine_comparisons (location_t loc,
3043 : : enum tree_code code, enum tree_code lcode,
3044 : : enum tree_code rcode, tree truth_type,
3045 : : tree ll_arg, tree lr_arg)
3046 : : {
3047 : 26892 : bool honor_nans = HONOR_NANS (ll_arg);
3048 : 26892 : enum comparison_code lcompcode = comparison_to_compcode (lcode);
3049 : 26892 : enum comparison_code rcompcode = comparison_to_compcode (rcode);
3050 : 26892 : int compcode;
3051 : :
3052 : 26892 : switch (code)
3053 : : {
3054 : 17618 : case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3055 : 17618 : compcode = lcompcode & rcompcode;
3056 : 17618 : break;
3057 : :
3058 : 9274 : case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3059 : 9274 : compcode = lcompcode | rcompcode;
3060 : 9274 : break;
3061 : :
3062 : : default:
3063 : : return NULL_TREE;
3064 : : }
3065 : :
3066 : 26892 : if (!honor_nans)
3067 : : {
3068 : : /* Eliminate unordered comparisons, as well as LTGT and ORD
3069 : : which are not used unless the mode has NaNs. */
3070 : 21817 : compcode &= ~COMPCODE_UNORD;
3071 : 21817 : if (compcode == COMPCODE_LTGT)
3072 : : compcode = COMPCODE_NE;
3073 : 20897 : else if (compcode == COMPCODE_ORD)
3074 : : compcode = COMPCODE_TRUE;
3075 : : }
3076 : 5075 : else if (flag_trapping_math)
3077 : : {
3078 : : /* Check that the original operation and the optimized ones will trap
3079 : : under the same condition. */
3080 : 8308 : bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3081 : 3518 : && (lcompcode != COMPCODE_EQ)
3082 : 4154 : && (lcompcode != COMPCODE_ORD);
3083 : 8308 : bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3084 : 3666 : && (rcompcode != COMPCODE_EQ)
3085 : 4154 : && (rcompcode != COMPCODE_ORD);
3086 : 8308 : bool trap = (compcode & COMPCODE_UNORD) == 0
3087 : 3731 : && (compcode != COMPCODE_EQ)
3088 : 4154 : && (compcode != COMPCODE_ORD);
3089 : :
3090 : : /* In a short-circuited boolean expression the LHS might be
3091 : : such that the RHS, if evaluated, will never trap. For
3092 : : example, in ORD (x, y) && (x < y), we evaluate the RHS only
3093 : : if neither x nor y is NaN. (This is a mixed blessing: for
3094 : : example, the expression above will never trap, hence
3095 : : optimizing it to x < y would be invalid). */
3096 : 4154 : if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3097 : 3753 : || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3098 : 4154 : rtrap = false;
3099 : :
3100 : : /* If the comparison was short-circuited, and only the RHS
3101 : : trapped, we may now generate a spurious trap. */
3102 : 4154 : if (rtrap && !ltrap
3103 : 118 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3104 : : return NULL_TREE;
3105 : :
3106 : : /* If we changed the conditions that cause a trap, we lose. */
3107 : 4036 : if ((ltrap || rtrap) != trap)
3108 : : return NULL_TREE;
3109 : : }
3110 : :
3111 : 21313 : if (compcode == COMPCODE_TRUE)
3112 : 1254 : return constant_boolean_node (true, truth_type);
3113 : 22205 : else if (compcode == COMPCODE_FALSE)
3114 : 9009 : return constant_boolean_node (false, truth_type);
3115 : : else
3116 : : {
3117 : 13196 : enum tree_code tcode;
3118 : :
3119 : 13196 : tcode = compcode_to_comparison ((enum comparison_code) compcode);
3120 : 13196 : return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3121 : : }
3122 : : }
3123 : :
3124 : : /* Return nonzero if two operands (typically of the same tree node)
3125 : : are necessarily equal. FLAGS modifies behavior as follows:
3126 : :
3127 : : If OEP_ONLY_CONST is set, only return nonzero for constants.
3128 : : This function tests whether the operands are indistinguishable;
3129 : : it does not test whether they are equal using C's == operation.
3130 : : The distinction is important for IEEE floating point, because
3131 : : (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3132 : : (2) two NaNs may be indistinguishable, but NaN!=NaN.
3133 : :
3134 : : If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3135 : : even though it may hold multiple values during a function.
3136 : : This is because a GCC tree node guarantees that nothing else is
3137 : : executed between the evaluation of its "operands" (which may often
3138 : : be evaluated in arbitrary order). Hence if the operands themselves
3139 : : don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3140 : : same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3141 : : unset means assuming isochronic (or instantaneous) tree equivalence.
3142 : : Unless comparing arbitrary expression trees, such as from different
3143 : : statements, this flag can usually be left unset.
3144 : :
3145 : : If OEP_PURE_SAME is set, then pure functions with identical arguments
3146 : : are considered the same. It is used when the caller has other ways
3147 : : to ensure that global memory is unchanged in between.
3148 : :
3149 : : If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3150 : : not values of expressions.
3151 : :
3152 : : If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3153 : : such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3154 : :
3155 : : If OEP_BITWISE is set, then require the values to be bitwise identical
3156 : : rather than simply numerically equal. Do not take advantage of things
3157 : : like math-related flags or undefined behavior; only return true for
3158 : : values that are provably bitwise identical in all circumstances.
3159 : :
3160 : : If OEP_ASSUME_WRAPV is set, then require the values to be bitwise identical
3161 : : under two's compliment arithmetic (ignoring any possible Undefined Behaviour)
3162 : : rather than just numerically equivalent. The compared expressions must
3163 : : however perform the same operations but may do intermediate computations in
3164 : : differing signs. Because this comparison ignores any possible UB it cannot
3165 : : be used blindly without ensuring that the context you are using it in itself
3166 : : doesn't guarantee that there will be no UB. Conditional expressions are
3167 : : excluded from this relaxation.
3168 : :
3169 : : When OEP_ASSUME_WRAPV is used operand_compare::hash_operand may return
3170 : : differing hashes even for cases where operand_compare::operand_equal_p
3171 : : compares equal.
3172 : :
3173 : : Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3174 : : any operand with side effect. This is unnecesarily conservative in the
3175 : : case we know that arg0 and arg1 are in disjoint code paths (such as in
3176 : : ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3177 : : addresses with TREE_CONSTANT flag set so we know that &var == &var
3178 : : even if var is volatile. */
3179 : :
3180 : : bool
3181 : 6561556496 : operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3182 : : unsigned int flags)
3183 : : {
3184 : 6561556496 : return operand_equal_p (TREE_TYPE (arg0), arg0, TREE_TYPE (arg1), arg1, flags);
3185 : : }
3186 : :
3187 : : /* The same as operand_equal_p however the type of ARG0 and ARG1 are assumed to
3188 : : be the TYPE0 and TYPE1 respectively. TYPE0 and TYPE1 represent the type the
3189 : : expression is being compared under for equality. This means that they can
3190 : : differ from the actual TREE_TYPE (..) value of ARG0 and ARG1. */
3191 : :
3192 : : bool
3193 : 6562242962 : operand_compare::operand_equal_p (tree type0, const_tree arg0,
3194 : : tree type1, const_tree arg1,
3195 : : unsigned int flags)
3196 : : {
3197 : 6562242962 : bool r;
3198 : 6562242962 : if (verify_hash_value (arg0, arg1, flags, &r))
3199 : 2732810469 : return r;
3200 : :
3201 : 3829432493 : STRIP_ANY_LOCATION_WRAPPER (arg0);
3202 : 3829432493 : STRIP_ANY_LOCATION_WRAPPER (arg1);
3203 : :
3204 : : /* If either is ERROR_MARK, they aren't equal. */
3205 : 3829432493 : if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3206 : 3829431666 : || type0 == error_mark_node
3207 : 3829431666 : || type1 == error_mark_node)
3208 : : return false;
3209 : :
3210 : : /* Similar, if either does not have a type (like a template id),
3211 : : they aren't equal. */
3212 : 3829431666 : if (!type0 || !type1)
3213 : : return false;
3214 : :
3215 : : /* Bitwise identity makes no sense if the values have different layouts. */
3216 : 3829429406 : if ((flags & OEP_BITWISE)
3217 : 3829429406 : && !tree_nop_conversion_p (type0, type1))
3218 : : return false;
3219 : :
3220 : : /* We cannot consider pointers to different address space equal. */
3221 : 3829429406 : if (POINTER_TYPE_P (type0)
3222 : 539816131 : && POINTER_TYPE_P (type1)
3223 : 4279040082 : && (TYPE_ADDR_SPACE (TREE_TYPE (type0))
3224 : 449610676 : != TYPE_ADDR_SPACE (TREE_TYPE (type1))))
3225 : : return false;
3226 : :
3227 : : /* Check equality of integer constants before bailing out due to
3228 : : precision differences. */
3229 : 3829429205 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3230 : : {
3231 : : /* Address of INTEGER_CST is not defined; check that we did not forget
3232 : : to drop the OEP_ADDRESS_OF flags. */
3233 : 593485906 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3234 : 593485906 : return tree_int_cst_equal (arg0, arg1);
3235 : : }
3236 : :
3237 : 3235943299 : if ((flags & OEP_ASSUME_WRAPV)
3238 : 1961954 : && (CONVERT_EXPR_P (arg0) || CONVERT_EXPR_P (arg1)))
3239 : : {
3240 : 739235 : const_tree t_arg0 = arg0;
3241 : 739235 : const_tree t_arg1 = arg1;
3242 : 739235 : STRIP_NOPS (arg0);
3243 : 739235 : STRIP_NOPS (arg1);
3244 : : /* Only recurse if the conversion was one that was valid to strip. */
3245 : 739235 : if (t_arg0 != arg0 || t_arg1 != arg1)
3246 : 686466 : return operand_equal_p (type0, arg0, type1, arg1, flags);
3247 : : }
3248 : :
3249 : 3235256833 : if (!(flags & OEP_ADDRESS_OF))
3250 : : {
3251 : : /* Check if we are checking an operation where the two's compliment
3252 : : bitwise representation of the result is not the same between signed and
3253 : : unsigned arithmetic. */
3254 : 2896690841 : bool enforce_signedness = true;
3255 : 2896690841 : if (flags & OEP_ASSUME_WRAPV)
3256 : : {
3257 : 1197786 : switch (TREE_CODE (arg0))
3258 : : {
3259 : : case PLUS_EXPR:
3260 : : case MINUS_EXPR:
3261 : : case MULT_EXPR:
3262 : : case BIT_IOR_EXPR:
3263 : : case BIT_XOR_EXPR:
3264 : : case BIT_AND_EXPR:
3265 : : case BIT_NOT_EXPR:
3266 : : case ABS_EXPR:
3267 : : CASE_CONVERT:
3268 : : case SSA_NAME:
3269 : : case INTEGER_CST:
3270 : : case VAR_DECL:
3271 : : case PARM_DECL:
3272 : : case RESULT_DECL:
3273 : 2896690841 : enforce_signedness = false;
3274 : : break;
3275 : :
3276 : : default:
3277 : : break;
3278 : : }
3279 : : }
3280 : :
3281 : : /* If both types don't have the same signedness, then we can't consider
3282 : : them equal. We must check this before the STRIP_NOPS calls
3283 : : because they may change the signedness of the arguments. As pointers
3284 : : strictly don't have a signedness, require either two pointers or
3285 : : two non-pointers as well. */
3286 : 2896690841 : if (POINTER_TYPE_P (type0) != POINTER_TYPE_P (type1)
3287 : 2896690841 : || (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
3288 : 131011023 : && enforce_signedness))
3289 : : return false;
3290 : :
3291 : : /* If both types don't have the same precision, then it is not safe
3292 : : to strip NOPs. */
3293 : 2609898995 : if (element_precision (type0) != element_precision (type1))
3294 : : return false;
3295 : :
3296 : 2455285239 : STRIP_NOPS (arg0);
3297 : 2455285239 : STRIP_NOPS (arg1);
3298 : :
3299 : 2455285239 : type0 = TREE_TYPE (arg0);
3300 : 2455285239 : type1 = TREE_TYPE (arg1);
3301 : : }
3302 : : #if 0
3303 : : /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3304 : : sanity check once the issue is solved. */
3305 : : else
3306 : : /* Addresses of conversions and SSA_NAMEs (and many other things)
3307 : : are not defined. Check that we did not forget to drop the
3308 : : OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3309 : : gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3310 : : && TREE_CODE (arg0) != SSA_NAME);
3311 : : #endif
3312 : :
3313 : : /* In case both args are comparisons but with different comparison
3314 : : code, try to swap the comparison operands of one arg to produce
3315 : : a match and compare that variant. */
3316 : 2793851231 : if (TREE_CODE (arg0) != TREE_CODE (arg1)
3317 : 1103911200 : && COMPARISON_CLASS_P (arg0)
3318 : 5794866 : && COMPARISON_CLASS_P (arg1))
3319 : : {
3320 : 4276309 : enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3321 : :
3322 : 4276309 : if (TREE_CODE (arg0) == swap_code)
3323 : 1900446 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3324 : 1900446 : TREE_OPERAND (arg1, 1), flags)
3325 : 1919860 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3326 : 19414 : TREE_OPERAND (arg1, 0), flags);
3327 : : }
3328 : :
3329 : 2791950785 : if (TREE_CODE (arg0) != TREE_CODE (arg1))
3330 : : {
3331 : : /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3332 : 1102010754 : if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3333 : : ;
3334 : 1101994263 : else if (flags & OEP_ADDRESS_OF)
3335 : : {
3336 : : /* If we are interested in comparing addresses ignore
3337 : : MEM_REF wrappings of the base that can appear just for
3338 : : TBAA reasons. */
3339 : 31288527 : if (TREE_CODE (arg0) == MEM_REF
3340 : 6079250 : && DECL_P (arg1)
3341 : 4372389 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3342 : 897025 : && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3343 : 31707059 : && integer_zerop (TREE_OPERAND (arg0, 1)))
3344 : : return true;
3345 : 31084919 : else if (TREE_CODE (arg1) == MEM_REF
3346 : 16339396 : && DECL_P (arg0)
3347 : 9482731 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3348 : 1755190 : && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3349 : 31586001 : && integer_zerop (TREE_OPERAND (arg1, 1)))
3350 : : return true;
3351 : 30733922 : return false;
3352 : : }
3353 : : else
3354 : : return false;
3355 : : }
3356 : :
3357 : : /* When not checking adddresses, this is needed for conversions and for
3358 : : COMPONENT_REF. Might as well play it safe and always test this. */
3359 : 1689956522 : if (TREE_CODE (type0) == ERROR_MARK
3360 : 1689956522 : || TREE_CODE (type1) == ERROR_MARK
3361 : 3379913044 : || (TYPE_MODE (type0) != TYPE_MODE (type1)
3362 : 21106571 : && !(flags & OEP_ADDRESS_OF)))
3363 : 3673484 : return false;
3364 : :
3365 : : /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3366 : : We don't care about side effects in that case because the SAVE_EXPR
3367 : : takes care of that for us. In all other cases, two expressions are
3368 : : equal if they have no side effects. If we have two identical
3369 : : expressions with side effects that should be treated the same due
3370 : : to the only side effects being identical SAVE_EXPR's, that will
3371 : : be detected in the recursive calls below.
3372 : : If we are taking an invariant address of two identical objects
3373 : : they are necessarily equal as well. */
3374 : 289417702 : if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3375 : 1975700588 : && (TREE_CODE (arg0) == SAVE_EXPR
3376 : 289404871 : || (flags & OEP_MATCH_SIDE_EFFECTS)
3377 : 259880602 : || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3378 : : return true;
3379 : :
3380 : : /* Next handle constant cases, those for which we can return 1 even
3381 : : if ONLY_CONST is set. */
3382 : 1397002810 : if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3383 : 17498240 : switch (TREE_CODE (arg0))
3384 : : {
3385 : 153 : case INTEGER_CST:
3386 : 153 : return tree_int_cst_equal (arg0, arg1);
3387 : :
3388 : 0 : case FIXED_CST:
3389 : 0 : return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3390 : : TREE_FIXED_CST (arg1));
3391 : :
3392 : 3610577 : case REAL_CST:
3393 : 3610577 : if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3394 : : return true;
3395 : :
3396 : 2608675 : if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3397 : : {
3398 : : /* If we do not distinguish between signed and unsigned zero,
3399 : : consider them equal. */
3400 : 14913 : if (real_zerop (arg0) && real_zerop (arg1))
3401 : : return true;
3402 : : }
3403 : 2608670 : return false;
3404 : :
3405 : 655096 : case VECTOR_CST:
3406 : 655096 : {
3407 : 655096 : if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3408 : 655096 : != VECTOR_CST_LOG2_NPATTERNS (arg1))
3409 : : return false;
3410 : :
3411 : 643598 : if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3412 : 643598 : != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3413 : : return false;
3414 : :
3415 : 621495 : unsigned int count = vector_cst_encoded_nelts (arg0);
3416 : 826372 : for (unsigned int i = 0; i < count; ++i)
3417 : 1328322 : if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3418 : 664161 : VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3419 : : return false;
3420 : : return true;
3421 : : }
3422 : :
3423 : 12030 : case COMPLEX_CST:
3424 : 12030 : return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3425 : : flags)
3426 : 12030 : && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3427 : : flags));
3428 : :
3429 : 1059550 : case STRING_CST:
3430 : 1059550 : return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3431 : 1059550 : && ! memcmp (TREE_STRING_POINTER (arg0),
3432 : 650815 : TREE_STRING_POINTER (arg1),
3433 : 650815 : TREE_STRING_LENGTH (arg0)));
3434 : :
3435 : 0 : case RAW_DATA_CST:
3436 : 0 : return (RAW_DATA_LENGTH (arg0) == RAW_DATA_LENGTH (arg1)
3437 : 0 : && ! memcmp (RAW_DATA_POINTER (arg0),
3438 : 0 : RAW_DATA_POINTER (arg1),
3439 : 0 : RAW_DATA_LENGTH (arg0)));
3440 : :
3441 : 11283531 : case ADDR_EXPR:
3442 : 11283531 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3443 : 11283531 : return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3444 : : flags | OEP_ADDRESS_OF
3445 : 11283531 : | OEP_MATCH_SIDE_EFFECTS);
3446 : 164256 : case CONSTRUCTOR:
3447 : 164256 : {
3448 : : /* In GIMPLE empty constructors are allowed in initializers of
3449 : : aggregates. */
3450 : 164256 : if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3451 : : return true;
3452 : :
3453 : : /* See sem_variable::equals in ipa-icf for a similar approach. */
3454 : 135269 : if (TREE_CODE (type0) != TREE_CODE (type1))
3455 : : return false;
3456 : 135269 : else if (TREE_CODE (type0) == ARRAY_TYPE)
3457 : : {
3458 : : /* For arrays, check that the sizes all match. */
3459 : 2 : const HOST_WIDE_INT siz0 = int_size_in_bytes (type0);
3460 : 2 : if (TYPE_MODE (type0) != TYPE_MODE (type1)
3461 : 2 : || siz0 < 0
3462 : 4 : || siz0 != int_size_in_bytes (type1))
3463 : 0 : return false;
3464 : : }
3465 : 135267 : else if (!types_compatible_p (type0, type1))
3466 : : return false;
3467 : :
3468 : 135269 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3469 : 135269 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3470 : 405807 : if (vec_safe_length (v0) != vec_safe_length (v1))
3471 : : return false;
3472 : :
3473 : : /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3474 : : of the CONSTRUCTOR referenced indirectly. */
3475 : 135269 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3476 : :
3477 : 323332182 : for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3478 : : {
3479 : 193010 : constructor_elt *c0 = &(*v0)[idx];
3480 : 193010 : constructor_elt *c1 = &(*v1)[idx];
3481 : :
3482 : : /* Check that the values are the same... */
3483 : 193010 : if (c0->value != c1->value
3484 : 193010 : && !operand_equal_p (c0->value, c1->value, flags))
3485 : : return false;
3486 : :
3487 : : /* ... and that they apply to the same field! */
3488 : 106975 : if (c0->index != c1->index
3489 : 106975 : && (TREE_CODE (type0) == ARRAY_TYPE
3490 : 0 : ? !operand_equal_p (c0->index, c1->index, flags)
3491 : 0 : : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3492 : 0 : DECL_FIELD_OFFSET (c1->index),
3493 : : flags)
3494 : 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3495 : 0 : DECL_FIELD_BIT_OFFSET (c1->index),
3496 : : flags)))
3497 : 0 : return false;
3498 : : }
3499 : :
3500 : : return true;
3501 : : }
3502 : :
3503 : : default:
3504 : : break;
3505 : : }
3506 : :
3507 : : /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3508 : : two instances of undefined behavior will give identical results. */
3509 : 1380217617 : if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3510 : : return false;
3511 : :
3512 : : /* Define macros to test an operand from arg0 and arg1 for equality and a
3513 : : variant that allows null and views null as being different from any
3514 : : non-null value. In the latter case, if either is null, the both
3515 : : must be; otherwise, do the normal comparison. */
3516 : : #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3517 : : TREE_OPERAND (arg1, N), flags)
3518 : :
3519 : : #define OP_SAME_WITH_NULL(N) \
3520 : : ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3521 : : ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3522 : :
3523 : 1380217617 : switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3524 : : {
3525 : 6921979 : case tcc_unary:
3526 : : /* Two conversions are equal only if signedness and modes match. */
3527 : 6921979 : switch (TREE_CODE (arg0))
3528 : : {
3529 : 6563648 : CASE_CONVERT:
3530 : 6563648 : case FIX_TRUNC_EXPR:
3531 : 6563648 : if (TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1))
3532 : : return false;
3533 : : break;
3534 : : default:
3535 : : break;
3536 : : }
3537 : :
3538 : 6921958 : return OP_SAME_WITH_NULL (0);
3539 : :
3540 : :
3541 : 20708516 : case tcc_comparison:
3542 : 20708516 : case tcc_binary:
3543 : 20708516 : if (OP_SAME (0) && OP_SAME (1))
3544 : : return true;
3545 : :
3546 : : /* For commutative ops, allow the other order. */
3547 : 15386629 : return (commutative_tree_code (TREE_CODE (arg0))
3548 : 11800167 : && operand_equal_p (TREE_OPERAND (arg0, 0),
3549 : 11800167 : TREE_OPERAND (arg1, 1), flags)
3550 : 15589129 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3551 : 202500 : TREE_OPERAND (arg1, 0), flags));
3552 : :
3553 : 866145321 : case tcc_reference:
3554 : : /* If either of the pointer (or reference) expressions we are
3555 : : dereferencing contain a side effect, these cannot be equal,
3556 : : but their addresses can be. */
3557 : 866145321 : if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3558 : 866145321 : && (TREE_SIDE_EFFECTS (arg0)
3559 : 804434247 : || TREE_SIDE_EFFECTS (arg1)))
3560 : : return false;
3561 : :
3562 : 865954552 : switch (TREE_CODE (arg0))
3563 : : {
3564 : 3617686 : case INDIRECT_REF:
3565 : 3617686 : if (!(flags & OEP_ADDRESS_OF))
3566 : : {
3567 : 3595421 : if (TYPE_ALIGN (type0) != TYPE_ALIGN (type1))
3568 : : return false;
3569 : : /* Verify that the access types are compatible. */
3570 : 3593656 : if (TYPE_MAIN_VARIANT (type0) != TYPE_MAIN_VARIANT (type1))
3571 : : return false;
3572 : : }
3573 : 3559218 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3574 : 3559218 : return OP_SAME (0);
3575 : :
3576 : 653100 : case IMAGPART_EXPR:
3577 : : /* Require the same offset. */
3578 : 653100 : if (!operand_equal_p (TYPE_SIZE (type0),
3579 : 653100 : TYPE_SIZE (type1),
3580 : : flags & ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV)))
3581 : : return false;
3582 : :
3583 : : /* Fallthru. */
3584 : 2399711 : case REALPART_EXPR:
3585 : 2399711 : case VIEW_CONVERT_EXPR:
3586 : 2399711 : return OP_SAME (0);
3587 : :
3588 : 59121849 : case TARGET_MEM_REF:
3589 : 59121849 : case MEM_REF:
3590 : 59121849 : if (!(flags & OEP_ADDRESS_OF))
3591 : : {
3592 : : /* Require equal access sizes */
3593 : 14337302 : if (TYPE_SIZE (type0) != TYPE_SIZE (type1)
3594 : 14337302 : && (!TYPE_SIZE (type0)
3595 : 1044270 : || !TYPE_SIZE (type1)
3596 : 1040156 : || !operand_equal_p (TYPE_SIZE (type0),
3597 : 1040156 : TYPE_SIZE (type1),
3598 : : flags)))
3599 : 1044596 : return false;
3600 : : /* Verify that access happens in similar types. */
3601 : 13292706 : if (!types_compatible_p (type0, type1))
3602 : : return false;
3603 : : /* Verify that accesses are TBAA compatible. */
3604 : 12992918 : if (!alias_ptr_types_compatible_p
3605 : 12992918 : (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3606 : 12992918 : TREE_TYPE (TREE_OPERAND (arg1, 1)))
3607 : 12203425 : || (MR_DEPENDENCE_CLIQUE (arg0)
3608 : 12203425 : != MR_DEPENDENCE_CLIQUE (arg1))
3609 : 23317812 : || (MR_DEPENDENCE_BASE (arg0)
3610 : 10324894 : != MR_DEPENDENCE_BASE (arg1)))
3611 : : return false;
3612 : : /* Verify that alignment is compatible. */
3613 : 10022033 : if (TYPE_ALIGN (type0) != TYPE_ALIGN (type1))
3614 : : return false;
3615 : : }
3616 : 54696318 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3617 : 94979650 : return (OP_SAME (0) && OP_SAME (1)
3618 : : /* TARGET_MEM_REF require equal extra operands. */
3619 : 78425643 : && (TREE_CODE (arg0) != TARGET_MEM_REF
3620 : 499491 : || (OP_SAME_WITH_NULL (2)
3621 : 255838 : && OP_SAME_WITH_NULL (3)
3622 : 251112 : && OP_SAME_WITH_NULL (4))));
3623 : :
3624 : 38160727 : case ARRAY_REF:
3625 : 38160727 : case ARRAY_RANGE_REF:
3626 : 38160727 : if (!OP_SAME (0))
3627 : : return false;
3628 : 33245712 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3629 : : /* Compare the array index by value if it is constant first as we
3630 : : may have different types but same value here. */
3631 : 33245712 : return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3632 : 33245712 : TREE_OPERAND (arg1, 1))
3633 : 30280458 : || OP_SAME (1))
3634 : 5955060 : && OP_SAME_WITH_NULL (2)
3635 : 5954878 : && OP_SAME_WITH_NULL (3)
3636 : : /* Compare low bound and element size as with OEP_ADDRESS_OF
3637 : : we have to account for the offset of the ref. */
3638 : 42178120 : && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3639 : 2977439 : == TREE_TYPE (TREE_OPERAND (arg1, 0))
3640 : 2541 : || (operand_equal_p (array_ref_low_bound
3641 : 2541 : (CONST_CAST_TREE (arg0)),
3642 : : array_ref_low_bound
3643 : 2541 : (CONST_CAST_TREE (arg1)), flags)
3644 : 2541 : && operand_equal_p (array_ref_element_size
3645 : 2541 : (CONST_CAST_TREE (arg0)),
3646 : : array_ref_element_size
3647 : 2541 : (CONST_CAST_TREE (arg1)),
3648 : : flags))));
3649 : :
3650 : 762181910 : case COMPONENT_REF:
3651 : : /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3652 : : may be NULL when we're called to compare MEM_EXPRs. */
3653 : 762181910 : if (!OP_SAME_WITH_NULL (0))
3654 : : return false;
3655 : 64640036 : {
3656 : 64640036 : bool compare_address = flags & OEP_ADDRESS_OF;
3657 : :
3658 : : /* Most of time we only need to compare FIELD_DECLs for equality.
3659 : : However when determining address look into actual offsets.
3660 : : These may match for unions and unshared record types. */
3661 : 64640036 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3662 : 64640036 : if (!OP_SAME (1))
3663 : : {
3664 : 37285600 : if (compare_address
3665 : 719350 : && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3666 : : {
3667 : 719347 : tree field0 = TREE_OPERAND (arg0, 1);
3668 : 719347 : tree field1 = TREE_OPERAND (arg1, 1);
3669 : :
3670 : : /* Non-FIELD_DECL operands can appear in C++ templates. */
3671 : 719347 : if (TREE_CODE (field0) != FIELD_DECL
3672 : 719347 : || TREE_CODE (field1) != FIELD_DECL)
3673 : : return false;
3674 : :
3675 : 719347 : if (!DECL_FIELD_OFFSET (field0)
3676 : 719347 : || !DECL_FIELD_OFFSET (field1))
3677 : 3 : return field0 == field1;
3678 : :
3679 : 719344 : if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3680 : 719344 : DECL_FIELD_OFFSET (field1), flags)
3681 : 954108 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3682 : 234764 : DECL_FIELD_BIT_OFFSET (field1),
3683 : : flags))
3684 : 681931 : return false;
3685 : : }
3686 : : else
3687 : : return false;
3688 : : }
3689 : : }
3690 : 27391849 : return OP_SAME_WITH_NULL (2);
3691 : :
3692 : 472621 : case BIT_FIELD_REF:
3693 : 472621 : if (!OP_SAME (0))
3694 : : return false;
3695 : 374060 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3696 : 374060 : return OP_SAME (1) && OP_SAME (2);
3697 : :
3698 : : default:
3699 : : return false;
3700 : : }
3701 : :
3702 : 34153331 : case tcc_expression:
3703 : 34153331 : switch (TREE_CODE (arg0))
3704 : : {
3705 : 30877640 : case ADDR_EXPR:
3706 : : /* Be sure we pass right ADDRESS_OF flag. */
3707 : 30877640 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3708 : 30877640 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3709 : 30877640 : TREE_OPERAND (arg1, 0),
3710 : 30877640 : flags | OEP_ADDRESS_OF);
3711 : :
3712 : 632831 : case TRUTH_NOT_EXPR:
3713 : 632831 : return OP_SAME (0);
3714 : :
3715 : 36703 : case TRUTH_ANDIF_EXPR:
3716 : 36703 : case TRUTH_ORIF_EXPR:
3717 : 36703 : return OP_SAME (0) && OP_SAME (1);
3718 : :
3719 : 0 : case WIDEN_MULT_PLUS_EXPR:
3720 : 0 : case WIDEN_MULT_MINUS_EXPR:
3721 : 0 : if (!OP_SAME (2))
3722 : : return false;
3723 : : /* The multiplcation operands are commutative. */
3724 : : /* FALLTHRU */
3725 : :
3726 : 18687 : case TRUTH_AND_EXPR:
3727 : 18687 : case TRUTH_OR_EXPR:
3728 : 18687 : case TRUTH_XOR_EXPR:
3729 : 18687 : if (OP_SAME (0) && OP_SAME (1))
3730 : : return true;
3731 : :
3732 : : /* Otherwise take into account this is a commutative operation. */
3733 : 18669 : return (operand_equal_p (TREE_OPERAND (arg0, 0),
3734 : 18669 : TREE_OPERAND (arg1, 1), flags)
3735 : 18672 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3736 : 3 : TREE_OPERAND (arg1, 0), flags));
3737 : :
3738 : 108562 : case COND_EXPR:
3739 : 108562 : if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3740 : 46506 : return false;
3741 : 62056 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3742 : 62056 : return OP_SAME (0);
3743 : :
3744 : 4 : case BIT_INSERT_EXPR:
3745 : : /* BIT_INSERT_EXPR has an implict operand as the type precision
3746 : : of op1. Need to check to make sure they are the same. */
3747 : 4 : if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3748 : 1 : && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3749 : 5 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3750 : 1 : != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3751 : : return false;
3752 : : /* FALLTHRU */
3753 : :
3754 : 191 : case VEC_COND_EXPR:
3755 : 191 : case DOT_PROD_EXPR:
3756 : 191 : return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3757 : :
3758 : 19878 : case MODIFY_EXPR:
3759 : 19878 : case INIT_EXPR:
3760 : 19878 : case COMPOUND_EXPR:
3761 : 19878 : case PREDECREMENT_EXPR:
3762 : 19878 : case PREINCREMENT_EXPR:
3763 : 19878 : case POSTDECREMENT_EXPR:
3764 : 19878 : case POSTINCREMENT_EXPR:
3765 : 19878 : if (flags & OEP_LEXICOGRAPHIC)
3766 : 163 : return OP_SAME (0) && OP_SAME (1);
3767 : : return false;
3768 : :
3769 : 92052 : case CLEANUP_POINT_EXPR:
3770 : 92052 : case EXPR_STMT:
3771 : 92052 : case SAVE_EXPR:
3772 : 92052 : if (flags & OEP_LEXICOGRAPHIC)
3773 : 208 : return OP_SAME (0);
3774 : : return false;
3775 : :
3776 : 98993 : case OBJ_TYPE_REF:
3777 : : /* Virtual table reference. */
3778 : 197986 : if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3779 : 98993 : OBJ_TYPE_REF_EXPR (arg1), flags))
3780 : : return false;
3781 : 1003 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
3782 : 1003 : if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3783 : 1003 : != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3784 : : return false;
3785 : 1003 : if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3786 : 1003 : OBJ_TYPE_REF_OBJECT (arg1), flags))
3787 : : return false;
3788 : 1003 : if (virtual_method_call_p (arg0))
3789 : : {
3790 : 1003 : if (!virtual_method_call_p (arg1))
3791 : : return false;
3792 : 1003 : return types_same_for_odr (obj_type_ref_class (arg0),
3793 : 2006 : obj_type_ref_class (arg1));
3794 : : }
3795 : : return false;
3796 : :
3797 : : default:
3798 : : return false;
3799 : : }
3800 : :
3801 : 2681649 : case tcc_vl_exp:
3802 : 2681649 : switch (TREE_CODE (arg0))
3803 : : {
3804 : 2681649 : case CALL_EXPR:
3805 : 2681649 : if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3806 : 2681649 : != (CALL_EXPR_FN (arg1) == NULL_TREE))
3807 : : /* If not both CALL_EXPRs are either internal or normal function
3808 : : functions, then they are not equal. */
3809 : : return false;
3810 : 2681649 : else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3811 : : {
3812 : : /* If the CALL_EXPRs call different internal functions, then they
3813 : : are not equal. */
3814 : 4 : if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3815 : : return false;
3816 : : }
3817 : : else
3818 : : {
3819 : : /* If the CALL_EXPRs call different functions, then they are not
3820 : : equal. */
3821 : 2681645 : if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3822 : : flags))
3823 : : return false;
3824 : : }
3825 : :
3826 : : /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3827 : 1787886 : {
3828 : 1787886 : unsigned int cef = call_expr_flags (arg0);
3829 : 1787886 : if (flags & OEP_PURE_SAME)
3830 : 0 : cef &= ECF_CONST | ECF_PURE;
3831 : : else
3832 : 1787886 : cef &= ECF_CONST;
3833 : 1787886 : if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3834 : : return false;
3835 : : }
3836 : :
3837 : : /* Now see if all the arguments are the same. */
3838 : 31919 : {
3839 : 31919 : const_call_expr_arg_iterator iter0, iter1;
3840 : 31919 : const_tree a0, a1;
3841 : 63838 : for (a0 = first_const_call_expr_arg (arg0, &iter0),
3842 : 31919 : a1 = first_const_call_expr_arg (arg1, &iter1);
3843 : 40005 : a0 && a1;
3844 : 8086 : a0 = next_const_call_expr_arg (&iter0),
3845 : 8086 : a1 = next_const_call_expr_arg (&iter1))
3846 : 33407 : if (! operand_equal_p (a0, a1, flags))
3847 : : return false;
3848 : :
3849 : : /* If we get here and both argument lists are exhausted
3850 : : then the CALL_EXPRs are equal. */
3851 : 6598 : return ! (a0 || a1);
3852 : : }
3853 : : default:
3854 : : return false;
3855 : : }
3856 : :
3857 : 152349494 : case tcc_declaration:
3858 : : /* Consider __builtin_sqrt equal to sqrt. */
3859 : 152349494 : if (TREE_CODE (arg0) == FUNCTION_DECL)
3860 : 6027633 : return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3861 : 330585 : && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3862 : 5418896 : && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3863 : 330585 : == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3864 : :
3865 : 146930598 : if (DECL_P (arg0)
3866 : : && (flags & OEP_DECL_NAME)
3867 : 146930598 : && (flags & OEP_LEXICOGRAPHIC))
3868 : : {
3869 : : /* Consider decls with the same name equal. The caller needs
3870 : : to make sure they refer to the same entity (such as a function
3871 : : formal parameter). */
3872 : 35 : tree a0name = DECL_NAME (arg0);
3873 : 35 : tree a1name = DECL_NAME (arg1);
3874 : 70 : const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3875 : 70 : const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3876 : 60 : return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3877 : : }
3878 : : return false;
3879 : :
3880 : 295152190 : case tcc_exceptional:
3881 : 295152190 : if (TREE_CODE (arg0) == CONSTRUCTOR)
3882 : : {
3883 : 19402 : if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3884 : : return false;
3885 : :
3886 : : /* In GIMPLE constructors are used only to build vectors from
3887 : : elements. Individual elements in the constructor must be
3888 : : indexed in increasing order and form an initial sequence.
3889 : :
3890 : : We make no effort to compare nonconstant ones in GENERIC. */
3891 : 19402 : if (!VECTOR_TYPE_P (type0) || !VECTOR_TYPE_P (type1))
3892 : : return false;
3893 : :
3894 : : /* Be sure that vectors constructed have the same representation.
3895 : : We only tested element precision and modes to match.
3896 : : Vectors may be BLKmode and thus also check that the number of
3897 : : parts match. */
3898 : 513 : if (maybe_ne (TYPE_VECTOR_SUBPARTS (type0),
3899 : 1026 : TYPE_VECTOR_SUBPARTS (type1)))
3900 : : return false;
3901 : :
3902 : 513 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3903 : 513 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3904 : 513 : unsigned int len = vec_safe_length (v0);
3905 : :
3906 : 1026 : if (len != vec_safe_length (v1))
3907 : : return false;
3908 : :
3909 : 1859 : for (unsigned int i = 0; i < len; i++)
3910 : : {
3911 : 1516 : constructor_elt *c0 = &(*v0)[i];
3912 : 1516 : constructor_elt *c1 = &(*v1)[i];
3913 : :
3914 : 1516 : if (!operand_equal_p (c0->value, c1->value, flags)
3915 : : /* In GIMPLE the indexes can be either NULL or matching i.
3916 : : Double check this so we won't get false
3917 : : positives for GENERIC. */
3918 : 1346 : || (c0->index
3919 : 868 : && (TREE_CODE (c0->index) != INTEGER_CST
3920 : 868 : || compare_tree_int (c0->index, i)))
3921 : 2862 : || (c1->index
3922 : 868 : && (TREE_CODE (c1->index) != INTEGER_CST
3923 : 868 : || compare_tree_int (c1->index, i))))
3924 : 170 : return false;
3925 : : }
3926 : : return true;
3927 : : }
3928 : 295132788 : else if (TREE_CODE (arg0) == STATEMENT_LIST
3929 : 2945 : && (flags & OEP_LEXICOGRAPHIC))
3930 : : {
3931 : : /* Compare the STATEMENT_LISTs. */
3932 : 16 : tree_stmt_iterator tsi1, tsi2;
3933 : 16 : tree body1 = CONST_CAST_TREE (arg0);
3934 : 16 : tree body2 = CONST_CAST_TREE (arg1);
3935 : 56 : for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3936 : 40 : tsi_next (&tsi1), tsi_next (&tsi2))
3937 : : {
3938 : : /* The lists don't have the same number of statements. */
3939 : 56 : if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3940 : : return false;
3941 : 56 : if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3942 : : return true;
3943 : 40 : if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3944 : : flags & (OEP_LEXICOGRAPHIC
3945 : : | OEP_NO_HASH_CHECK)))
3946 : : return false;
3947 : : }
3948 : : }
3949 : : return false;
3950 : :
3951 : 2104953 : case tcc_statement:
3952 : 2104953 : switch (TREE_CODE (arg0))
3953 : : {
3954 : 52 : case RETURN_EXPR:
3955 : 52 : if (flags & OEP_LEXICOGRAPHIC)
3956 : 52 : return OP_SAME_WITH_NULL (0);
3957 : : return false;
3958 : 4 : case DEBUG_BEGIN_STMT:
3959 : 4 : if (flags & OEP_LEXICOGRAPHIC)
3960 : : return true;
3961 : : return false;
3962 : : default:
3963 : : return false;
3964 : : }
3965 : :
3966 : : default:
3967 : : return false;
3968 : : }
3969 : :
3970 : : #undef OP_SAME
3971 : : #undef OP_SAME_WITH_NULL
3972 : : }
3973 : :
3974 : : /* Generate a hash value for an expression. This can be used iteratively
3975 : : by passing a previous result as the HSTATE argument. */
3976 : :
3977 : : void
3978 : 2652902481 : operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3979 : : unsigned int flags)
3980 : : {
3981 : 2652902481 : int i;
3982 : 2652902481 : enum tree_code code;
3983 : 2652902481 : enum tree_code_class tclass;
3984 : :
3985 : 2652902481 : if (t == NULL_TREE || t == error_mark_node)
3986 : : {
3987 : 81338457 : hstate.merge_hash (0);
3988 : 81338457 : return;
3989 : : }
3990 : :
3991 : 2571564024 : STRIP_ANY_LOCATION_WRAPPER (t);
3992 : :
3993 : 2571564024 : if (!(flags & OEP_ADDRESS_OF))
3994 : 2340987209 : STRIP_NOPS (t);
3995 : :
3996 : 2571564024 : code = TREE_CODE (t);
3997 : :
3998 : 2571564024 : switch (code)
3999 : : {
4000 : : /* Alas, constants aren't shared, so we can't rely on pointer
4001 : : identity. */
4002 : 72 : case VOID_CST:
4003 : 72 : hstate.merge_hash (0);
4004 : 72 : return;
4005 : 742239168 : case INTEGER_CST:
4006 : 742239168 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4007 : 1508822984 : for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
4008 : 766583816 : hstate.add_hwi (TREE_INT_CST_ELT (t, i));
4009 : : return;
4010 : 15270017 : case REAL_CST:
4011 : 15270017 : {
4012 : 15270017 : unsigned int val2;
4013 : 15270017 : if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
4014 : : val2 = rvc_zero;
4015 : : else
4016 : 15052314 : val2 = real_hash (TREE_REAL_CST_PTR (t));
4017 : 15270017 : hstate.merge_hash (val2);
4018 : 15270017 : return;
4019 : : }
4020 : 0 : case FIXED_CST:
4021 : 0 : {
4022 : 0 : unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
4023 : 0 : hstate.merge_hash (val2);
4024 : 0 : return;
4025 : : }
4026 : 9029033 : case STRING_CST:
4027 : 9029033 : hstate.add ((const void *) TREE_STRING_POINTER (t),
4028 : 9029033 : TREE_STRING_LENGTH (t));
4029 : 9029033 : return;
4030 : 186 : case RAW_DATA_CST:
4031 : 186 : hstate.add ((const void *) RAW_DATA_POINTER (t),
4032 : 186 : RAW_DATA_LENGTH (t));
4033 : 186 : return;
4034 : 208509 : case COMPLEX_CST:
4035 : 208509 : hash_operand (TREE_REALPART (t), hstate, flags);
4036 : 208509 : hash_operand (TREE_IMAGPART (t), hstate, flags);
4037 : 208509 : return;
4038 : 2706903 : case VECTOR_CST:
4039 : 2706903 : {
4040 : 2706903 : hstate.add_int (VECTOR_CST_NPATTERNS (t));
4041 : 2706903 : hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
4042 : 2706903 : unsigned int count = vector_cst_encoded_nelts (t);
4043 : 8526457 : for (unsigned int i = 0; i < count; ++i)
4044 : 5819554 : hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
4045 : : return;
4046 : : }
4047 : 808235041 : case SSA_NAME:
4048 : : /* We can just compare by pointer. */
4049 : 808235041 : hstate.add_hwi (SSA_NAME_VERSION (t));
4050 : 808235041 : return;
4051 : : case PLACEHOLDER_EXPR:
4052 : : /* The node itself doesn't matter. */
4053 : : return;
4054 : : case BLOCK:
4055 : : case OMP_CLAUSE:
4056 : : case OMP_NEXT_VARIANT:
4057 : : case OMP_TARGET_DEVICE_MATCHES:
4058 : : /* Ignore. */
4059 : : return;
4060 : : case TREE_LIST:
4061 : : /* A list of expressions, for a CALL_EXPR or as the elements of a
4062 : : VECTOR_CST. */
4063 : 271270 : for (; t; t = TREE_CHAIN (t))
4064 : 135635 : hash_operand (TREE_VALUE (t), hstate, flags);
4065 : : return;
4066 : 4791438 : case CONSTRUCTOR:
4067 : 4791438 : {
4068 : 4791438 : unsigned HOST_WIDE_INT idx;
4069 : 4791438 : tree field, value;
4070 : 4791438 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4071 : 4791438 : hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
4072 : 19418076 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
4073 : : {
4074 : : /* In GIMPLE the indexes can be either NULL or matching i. */
4075 : 14626638 : if (field == NULL_TREE)
4076 : 1082582 : field = bitsize_int (idx);
4077 : 14626638 : if (TREE_CODE (field) == FIELD_DECL)
4078 : : {
4079 : 9873974 : hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
4080 : 9873974 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
4081 : : }
4082 : : else
4083 : 4752664 : hash_operand (field, hstate, flags);
4084 : 14626638 : hash_operand (value, hstate, flags);
4085 : : }
4086 : : return;
4087 : : }
4088 : 182 : case STATEMENT_LIST:
4089 : 182 : {
4090 : 182 : tree_stmt_iterator i;
4091 : 182 : for (i = tsi_start (CONST_CAST_TREE (t));
4092 : 550 : !tsi_end_p (i); tsi_next (&i))
4093 : 368 : hash_operand (tsi_stmt (i), hstate, flags);
4094 : 182 : return;
4095 : : }
4096 : : case TREE_VEC:
4097 : 24 : for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
4098 : 12 : hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
4099 : : return;
4100 : 4 : case IDENTIFIER_NODE:
4101 : 4 : hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4102 : 4 : return;
4103 : 19142297 : case FUNCTION_DECL:
4104 : : /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4105 : : Otherwise nodes that compare equal according to operand_equal_p might
4106 : : get different hash codes. However, don't do this for machine specific
4107 : : or front end builtins, since the function code is overloaded in those
4108 : : cases. */
4109 : 19142297 : if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4110 : 19142297 : && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4111 : : {
4112 : 6628304 : t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4113 : 6628304 : code = TREE_CODE (t);
4114 : : }
4115 : : /* FALL THROUGH */
4116 : 988947712 : default:
4117 : 988947712 : if (POLY_INT_CST_P (t))
4118 : : {
4119 : : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4120 : : hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4121 : : return;
4122 : : }
4123 : 988947712 : tclass = TREE_CODE_CLASS (code);
4124 : :
4125 : 988947712 : if (tclass == tcc_declaration)
4126 : : {
4127 : : /* DECL's have a unique ID */
4128 : 677551931 : hstate.add_hwi (DECL_UID (t));
4129 : : }
4130 : 311395781 : else if (tclass == tcc_comparison && !commutative_tree_code (code))
4131 : : {
4132 : : /* For comparisons that can be swapped, use the lower
4133 : : tree code. */
4134 : 141635 : enum tree_code ccode = swap_tree_comparison (code);
4135 : 141635 : if (code < ccode)
4136 : 60625 : ccode = code;
4137 : 141635 : hstate.add_object (ccode);
4138 : 141635 : hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4139 : 141635 : hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4140 : : }
4141 : 311254146 : else if (CONVERT_EXPR_CODE_P (code))
4142 : : {
4143 : : /* NOP_EXPR and CONVERT_EXPR are considered equal by
4144 : : operand_equal_p. */
4145 : 5064148 : enum tree_code ccode = NOP_EXPR;
4146 : 5064148 : hstate.add_object (ccode);
4147 : :
4148 : : /* Don't hash the type, that can lead to having nodes which
4149 : : compare equal according to operand_equal_p, but which
4150 : : have different hash codes. Make sure to include signedness
4151 : : in the hash computation. */
4152 : 5064148 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4153 : 5064148 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4154 : : }
4155 : : /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4156 : 306189998 : else if (code == MEM_REF
4157 : 72158229 : && (flags & OEP_ADDRESS_OF) != 0
4158 : 64039489 : && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4159 : 9949813 : && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4160 : 315995724 : && integer_zerop (TREE_OPERAND (t, 1)))
4161 : 4839742 : hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4162 : : hstate, flags);
4163 : : /* Don't ICE on FE specific trees, or their arguments etc.
4164 : : during operand_equal_p hash verification. */
4165 : 301350256 : else if (!IS_EXPR_CODE_CLASS (tclass))
4166 : 252 : gcc_assert (flags & OEP_HASH_CHECK);
4167 : : else
4168 : : {
4169 : 301350004 : unsigned int sflags = flags;
4170 : :
4171 : 301350004 : hstate.add_object (code);
4172 : :
4173 : 301350004 : switch (code)
4174 : : {
4175 : 113122568 : case ADDR_EXPR:
4176 : 113122568 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4177 : 113122568 : flags |= OEP_ADDRESS_OF;
4178 : 113122568 : sflags = flags;
4179 : 113122568 : break;
4180 : :
4181 : 71486139 : case INDIRECT_REF:
4182 : 71486139 : case MEM_REF:
4183 : 71486139 : case TARGET_MEM_REF:
4184 : 71486139 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4185 : 71486139 : sflags = flags;
4186 : 71486139 : break;
4187 : :
4188 : 79957560 : case COMPONENT_REF:
4189 : 79957560 : if (sflags & OEP_ADDRESS_OF)
4190 : : {
4191 : 35076513 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4192 : 35076513 : hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4193 : : hstate, flags & ~OEP_ADDRESS_OF);
4194 : 35076513 : hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4195 : : hstate, flags & ~OEP_ADDRESS_OF);
4196 : 35076513 : return;
4197 : : }
4198 : : break;
4199 : 14879770 : case ARRAY_REF:
4200 : 14879770 : case ARRAY_RANGE_REF:
4201 : 14879770 : case BIT_FIELD_REF:
4202 : 14879770 : sflags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4203 : 14879770 : break;
4204 : :
4205 : 8354 : case COND_EXPR:
4206 : 8354 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4207 : 8354 : break;
4208 : :
4209 : 0 : case WIDEN_MULT_PLUS_EXPR:
4210 : 0 : case WIDEN_MULT_MINUS_EXPR:
4211 : 0 : {
4212 : : /* The multiplication operands are commutative. */
4213 : 0 : inchash::hash one, two;
4214 : 0 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4215 : 0 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4216 : 0 : hstate.add_commutative (one, two);
4217 : 0 : hash_operand (TREE_OPERAND (t, 2), hstate, flags);
4218 : 0 : return;
4219 : : }
4220 : :
4221 : 66645 : case CALL_EXPR:
4222 : 66645 : if (CALL_EXPR_FN (t) == NULL_TREE)
4223 : 2 : hstate.add_int (CALL_EXPR_IFN (t));
4224 : : break;
4225 : :
4226 : 72 : case TARGET_EXPR:
4227 : : /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4228 : : Usually different TARGET_EXPRs just should use
4229 : : different temporaries in their slots. */
4230 : 72 : hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4231 : 72 : return;
4232 : :
4233 : 193855 : case OBJ_TYPE_REF:
4234 : : /* Virtual table reference. */
4235 : 193855 : inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4236 : 193855 : flags &= ~(OEP_ADDRESS_OF | OEP_ASSUME_WRAPV);
4237 : 193855 : inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4238 : 193855 : inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4239 : 193855 : if (!virtual_method_call_p (t))
4240 : : return;
4241 : 193840 : if (tree c = obj_type_ref_class (t))
4242 : : {
4243 : 193840 : c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4244 : : /* We compute mangled names only when free_lang_data is run.
4245 : : In that case we can hash precisely. */
4246 : 193840 : if (TREE_CODE (c) == TYPE_DECL
4247 : 193840 : && DECL_ASSEMBLER_NAME_SET_P (c))
4248 : 674 : hstate.add_object
4249 : 674 : (IDENTIFIER_HASH_VALUE
4250 : : (DECL_ASSEMBLER_NAME (c)));
4251 : : }
4252 : 193840 : return;
4253 : : default:
4254 : : break;
4255 : : }
4256 : :
4257 : : /* Don't hash the type, that can lead to having nodes which
4258 : : compare equal according to operand_equal_p, but which
4259 : : have different hash codes. */
4260 : 266079564 : if (code == NON_LVALUE_EXPR)
4261 : : {
4262 : : /* Make sure to include signness in the hash computation. */
4263 : 0 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4264 : 0 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4265 : : }
4266 : :
4267 : 266079564 : else if (commutative_tree_code (code))
4268 : : {
4269 : : /* It's a commutative expression. We want to hash it the same
4270 : : however it appears. We do this by first hashing both operands
4271 : : and then rehashing based on the order of their independent
4272 : : hashes. */
4273 : 15986201 : inchash::hash one, two;
4274 : 15986201 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4275 : 15986201 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4276 : 15986201 : hstate.add_commutative (one, two);
4277 : : }
4278 : : else
4279 : 716798332 : for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4280 : 683316619 : hash_operand (TREE_OPERAND (t, i), hstate,
4281 : : i == 0 ? flags : sflags);
4282 : : }
4283 : : return;
4284 : : }
4285 : : }
4286 : :
4287 : : bool
4288 : 6566765675 : operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4289 : : unsigned int flags, bool *ret)
4290 : : {
4291 : : /* When checking and unless comparing DECL names, verify that if
4292 : : the outermost operand_equal_p call returns non-zero then ARG0
4293 : : and ARG1 have the same hash value. */
4294 : 6566765675 : if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4295 : : {
4296 : 2734798111 : if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4297 : : {
4298 : 410955171 : if (arg0 != arg1 && !(flags & (OEP_DECL_NAME | OEP_ASSUME_WRAPV)))
4299 : : {
4300 : 82021501 : inchash::hash hstate0 (0), hstate1 (0);
4301 : 82021501 : hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4302 : 82021501 : hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4303 : 82021501 : hashval_t h0 = hstate0.end ();
4304 : 82021501 : hashval_t h1 = hstate1.end ();
4305 : 82021501 : gcc_assert (h0 == h1);
4306 : : }
4307 : 410955171 : *ret = true;
4308 : : }
4309 : : else
4310 : 2323842940 : *ret = false;
4311 : :
4312 : 2734798111 : return true;
4313 : : }
4314 : :
4315 : : return false;
4316 : : }
4317 : :
4318 : :
4319 : : static operand_compare default_compare_instance;
4320 : :
4321 : : /* Conveinece wrapper around operand_compare class because usually we do
4322 : : not need to play with the valueizer. */
4323 : :
4324 : : bool
4325 : 2732819238 : operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4326 : : {
4327 : 2732819238 : return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4328 : : }
4329 : :
4330 : : namespace inchash
4331 : : {
4332 : :
4333 : : /* Generate a hash value for an expression. This can be used iteratively
4334 : : by passing a previous result as the HSTATE argument.
4335 : :
4336 : : This function is intended to produce the same hash for expressions which
4337 : : would compare equal using operand_equal_p. */
4338 : : void
4339 : 1826806984 : add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4340 : : {
4341 : 1826806984 : default_compare_instance.hash_operand (t, hstate, flags);
4342 : 1826806984 : }
4343 : :
4344 : : }
4345 : :
4346 : : /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4347 : : with a different signedness or a narrower precision. */
4348 : :
4349 : : static bool
4350 : 16418429 : operand_equal_for_comparison_p (tree arg0, tree arg1)
4351 : : {
4352 : 16418429 : if (operand_equal_p (arg0, arg1, 0))
4353 : : return true;
4354 : :
4355 : 31292002 : if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4356 : 26521915 : || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4357 : : return false;
4358 : :
4359 : : /* Discard any conversions that don't change the modes of ARG0 and ARG1
4360 : : and see if the inner values are the same. This removes any
4361 : : signedness comparison, which doesn't matter here. */
4362 : 4954128 : tree op0 = arg0;
4363 : 4954128 : tree op1 = arg1;
4364 : 4954128 : STRIP_NOPS (op0);
4365 : 4954128 : STRIP_NOPS (op1);
4366 : 4954128 : if (operand_equal_p (op0, op1, 0))
4367 : : return true;
4368 : :
4369 : : /* Discard a single widening conversion from ARG1 and see if the inner
4370 : : value is the same as ARG0. */
4371 : 4097617 : if (CONVERT_EXPR_P (arg1)
4372 : 675258 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4373 : 675212 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4374 : 675212 : < TYPE_PRECISION (TREE_TYPE (arg1))
4375 : 5051113 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4376 : : return true;
4377 : :
4378 : : return false;
4379 : : }
4380 : :
4381 : : /* See if ARG is an expression that is either a comparison or is performing
4382 : : arithmetic on comparisons. The comparisons must only be comparing
4383 : : two different values, which will be stored in *CVAL1 and *CVAL2; if
4384 : : they are nonzero it means that some operands have already been found.
4385 : : No variables may be used anywhere else in the expression except in the
4386 : : comparisons.
4387 : :
4388 : : If this is true, return 1. Otherwise, return zero. */
4389 : :
4390 : : static bool
4391 : 50652180 : twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4392 : : {
4393 : 53889050 : enum tree_code code = TREE_CODE (arg);
4394 : 53889050 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4395 : :
4396 : : /* We can handle some of the tcc_expression cases here. */
4397 : 53889050 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4398 : : tclass = tcc_unary;
4399 : 53334488 : else if (tclass == tcc_expression
4400 : 539695 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4401 : 539695 : || code == COMPOUND_EXPR))
4402 : : tclass = tcc_binary;
4403 : :
4404 : 53323744 : switch (tclass)
4405 : : {
4406 : 3236870 : case tcc_unary:
4407 : 3236870 : return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4408 : :
4409 : 4417045 : case tcc_binary:
4410 : 4417045 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4411 : 4417045 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4412 : :
4413 : : case tcc_constant:
4414 : : return true;
4415 : :
4416 : 528951 : case tcc_expression:
4417 : 528951 : if (code == COND_EXPR)
4418 : 759 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4419 : 759 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4420 : 823 : && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4421 : : return false;
4422 : :
4423 : 559118 : case tcc_comparison:
4424 : : /* First see if we can handle the first operand, then the second. For
4425 : : the second operand, we know *CVAL1 can't be zero. It must be that
4426 : : one side of the comparison is each of the values; test for the
4427 : : case where this isn't true by failing if the two operands
4428 : : are the same. */
4429 : :
4430 : 559118 : if (operand_equal_p (TREE_OPERAND (arg, 0),
4431 : 559118 : TREE_OPERAND (arg, 1), 0))
4432 : : return false;
4433 : :
4434 : 559118 : if (*cval1 == 0)
4435 : 557067 : *cval1 = TREE_OPERAND (arg, 0);
4436 : 2051 : else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4437 : : ;
4438 : 1932 : else if (*cval2 == 0)
4439 : 0 : *cval2 = TREE_OPERAND (arg, 0);
4440 : 1932 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4441 : : ;
4442 : : else
4443 : : return false;
4444 : :
4445 : 557186 : if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4446 : : ;
4447 : 557186 : else if (*cval2 == 0)
4448 : 557067 : *cval2 = TREE_OPERAND (arg, 1);
4449 : 119 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4450 : : ;
4451 : : else
4452 : : return false;
4453 : :
4454 : : return true;
4455 : :
4456 : : default:
4457 : : return false;
4458 : : }
4459 : : }
4460 : :
4461 : : /* ARG is a tree that is known to contain just arithmetic operations and
4462 : : comparisons. Evaluate the operations in the tree substituting NEW0 for
4463 : : any occurrence of OLD0 as an operand of a comparison and likewise for
4464 : : NEW1 and OLD1. */
4465 : :
4466 : : static tree
4467 : 702 : eval_subst (location_t loc, tree arg, tree old0, tree new0,
4468 : : tree old1, tree new1)
4469 : : {
4470 : 702 : tree type = TREE_TYPE (arg);
4471 : 702 : enum tree_code code = TREE_CODE (arg);
4472 : 702 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4473 : :
4474 : : /* We can handle some of the tcc_expression cases here. */
4475 : 702 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4476 : : tclass = tcc_unary;
4477 : 702 : else if (tclass == tcc_expression
4478 : 18 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4479 : : tclass = tcc_binary;
4480 : :
4481 : 693 : switch (tclass)
4482 : : {
4483 : 165 : case tcc_unary:
4484 : 165 : return fold_build1_loc (loc, code, type,
4485 : 165 : eval_subst (loc, TREE_OPERAND (arg, 0),
4486 : 165 : old0, new0, old1, new1));
4487 : :
4488 : 168 : case tcc_binary:
4489 : 336 : return fold_build2_loc (loc, code, type,
4490 : 168 : eval_subst (loc, TREE_OPERAND (arg, 0),
4491 : : old0, new0, old1, new1),
4492 : 168 : eval_subst (loc, TREE_OPERAND (arg, 1),
4493 : 168 : old0, new0, old1, new1));
4494 : :
4495 : 9 : case tcc_expression:
4496 : 9 : switch (code)
4497 : : {
4498 : 0 : case SAVE_EXPR:
4499 : 0 : return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4500 : 0 : old1, new1);
4501 : :
4502 : 0 : case COMPOUND_EXPR:
4503 : 0 : return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4504 : 0 : old1, new1);
4505 : :
4506 : 9 : case COND_EXPR:
4507 : 27 : return fold_build3_loc (loc, code, type,
4508 : 9 : eval_subst (loc, TREE_OPERAND (arg, 0),
4509 : : old0, new0, old1, new1),
4510 : 9 : eval_subst (loc, TREE_OPERAND (arg, 1),
4511 : : old0, new0, old1, new1),
4512 : 9 : eval_subst (loc, TREE_OPERAND (arg, 2),
4513 : 9 : old0, new0, old1, new1));
4514 : : default:
4515 : : break;
4516 : : }
4517 : : /* Fall through - ??? */
4518 : :
4519 : 180 : case tcc_comparison:
4520 : 180 : {
4521 : 180 : tree arg0 = TREE_OPERAND (arg, 0);
4522 : 180 : tree arg1 = TREE_OPERAND (arg, 1);
4523 : :
4524 : : /* We need to check both for exact equality and tree equality. The
4525 : : former will be true if the operand has a side-effect. In that
4526 : : case, we know the operand occurred exactly once. */
4527 : :
4528 : 180 : if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4529 : : arg0 = new0;
4530 : 0 : else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4531 : : arg0 = new1;
4532 : :
4533 : 180 : if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4534 : : arg1 = new0;
4535 : 180 : else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4536 : : arg1 = new1;
4537 : :
4538 : 180 : return fold_build2_loc (loc, code, type, arg0, arg1);
4539 : : }
4540 : :
4541 : : default:
4542 : : return arg;
4543 : : }
4544 : : }
4545 : :
4546 : : /* Return a tree for the case when the result of an expression is RESULT
4547 : : converted to TYPE and OMITTED was previously an operand of the expression
4548 : : but is now not needed (e.g., we folded OMITTED * 0).
4549 : :
4550 : : If OMITTED has side effects, we must evaluate it. Otherwise, just do
4551 : : the conversion of RESULT to TYPE. */
4552 : :
4553 : : tree
4554 : 274418 : omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4555 : : {
4556 : 274418 : tree t = fold_convert_loc (loc, type, result);
4557 : :
4558 : : /* If the resulting operand is an empty statement, just return the omitted
4559 : : statement casted to void. */
4560 : 274418 : if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4561 : 0 : return build1_loc (loc, NOP_EXPR, void_type_node,
4562 : 0 : fold_ignored_result (omitted));
4563 : :
4564 : 274418 : if (TREE_SIDE_EFFECTS (omitted))
4565 : 13007 : return build2_loc (loc, COMPOUND_EXPR, type,
4566 : 13007 : fold_ignored_result (omitted), t);
4567 : :
4568 : 261411 : return non_lvalue_loc (loc, t);
4569 : : }
4570 : :
4571 : : /* Return a tree for the case when the result of an expression is RESULT
4572 : : converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4573 : : of the expression but are now not needed.
4574 : :
4575 : : If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4576 : : If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4577 : : evaluated before OMITTED2. Otherwise, if neither has side effects,
4578 : : just do the conversion of RESULT to TYPE. */
4579 : :
4580 : : tree
4581 : 6978 : omit_two_operands_loc (location_t loc, tree type, tree result,
4582 : : tree omitted1, tree omitted2)
4583 : : {
4584 : 6978 : tree t = fold_convert_loc (loc, type, result);
4585 : :
4586 : 6978 : if (TREE_SIDE_EFFECTS (omitted2))
4587 : 68 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4588 : 6978 : if (TREE_SIDE_EFFECTS (omitted1))
4589 : 175 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4590 : :
4591 : 6978 : return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4592 : : }
4593 : :
4594 : :
4595 : : /* Return a simplified tree node for the truth-negation of ARG. This
4596 : : never alters ARG itself. We assume that ARG is an operation that
4597 : : returns a truth value (0 or 1).
4598 : :
4599 : : FIXME: one would think we would fold the result, but it causes
4600 : : problems with the dominator optimizer. */
4601 : :
4602 : : static tree
4603 : 44958090 : fold_truth_not_expr (location_t loc, tree arg)
4604 : : {
4605 : 44958090 : tree type = TREE_TYPE (arg);
4606 : 44958090 : enum tree_code code = TREE_CODE (arg);
4607 : 44958090 : location_t loc1, loc2;
4608 : :
4609 : : /* If this is a comparison, we can simply invert it, except for
4610 : : floating-point non-equality comparisons, in which case we just
4611 : : enclose a TRUTH_NOT_EXPR around what we have. */
4612 : :
4613 : 44958090 : if (TREE_CODE_CLASS (code) == tcc_comparison)
4614 : : {
4615 : 34907759 : tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4616 : 29321010 : if (FLOAT_TYPE_P (op_type)
4617 : 5596846 : && flag_trapping_math
4618 : 5566968 : && code != ORDERED_EXPR && code != UNORDERED_EXPR
4619 : 40435110 : && code != NE_EXPR && code != EQ_EXPR)
4620 : : return NULL_TREE;
4621 : :
4622 : 30060799 : code = invert_tree_comparison (code, HONOR_NANS (op_type));
4623 : 30060799 : if (code == ERROR_MARK)
4624 : : return NULL_TREE;
4625 : :
4626 : 30060799 : tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4627 : 30060799 : TREE_OPERAND (arg, 1));
4628 : 30060799 : copy_warning (ret, arg);
4629 : 30060799 : return ret;
4630 : : }
4631 : :
4632 : 10050331 : switch (code)
4633 : : {
4634 : 0 : case INTEGER_CST:
4635 : 0 : return constant_boolean_node (integer_zerop (arg), type);
4636 : :
4637 : 47090 : case TRUTH_AND_EXPR:
4638 : 47090 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4639 : 47090 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4640 : 94180 : return build2_loc (loc, TRUTH_OR_EXPR, type,
4641 : 47090 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4642 : 94180 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4643 : :
4644 : 2442 : case TRUTH_OR_EXPR:
4645 : 2442 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4646 : 2442 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4647 : 4884 : return build2_loc (loc, TRUTH_AND_EXPR, type,
4648 : 2442 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4649 : 4884 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4650 : :
4651 : 28303 : case TRUTH_XOR_EXPR:
4652 : : /* Here we can invert either operand. We invert the first operand
4653 : : unless the second operand is a TRUTH_NOT_EXPR in which case our
4654 : : result is the XOR of the first operand with the inside of the
4655 : : negation of the second operand. */
4656 : :
4657 : 28303 : if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4658 : 7 : return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4659 : 14 : TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4660 : : else
4661 : 28296 : return build2_loc (loc, TRUTH_XOR_EXPR, type,
4662 : 28296 : invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4663 : 56592 : TREE_OPERAND (arg, 1));
4664 : :
4665 : 228457 : case TRUTH_ANDIF_EXPR:
4666 : 228457 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4667 : 228457 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4668 : 456914 : return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4669 : 228457 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4670 : 456914 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4671 : :
4672 : 18733 : case TRUTH_ORIF_EXPR:
4673 : 18733 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4674 : 18733 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4675 : 37466 : return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4676 : 18733 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4677 : 37466 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4678 : :
4679 : 775925 : case TRUTH_NOT_EXPR:
4680 : 775925 : return TREE_OPERAND (arg, 0);
4681 : :
4682 : 7945 : case COND_EXPR:
4683 : 7945 : {
4684 : 7945 : tree arg1 = TREE_OPERAND (arg, 1);
4685 : 7945 : tree arg2 = TREE_OPERAND (arg, 2);
4686 : :
4687 : 7945 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4688 : 7945 : loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4689 : :
4690 : : /* A COND_EXPR may have a throw as one operand, which
4691 : : then has void type. Just leave void operands
4692 : : as they are. */
4693 : 7945 : return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4694 : 7945 : VOID_TYPE_P (TREE_TYPE (arg1))
4695 : 7945 : ? arg1 : invert_truthvalue_loc (loc1, arg1),
4696 : 7945 : VOID_TYPE_P (TREE_TYPE (arg2))
4697 : 15887 : ? arg2 : invert_truthvalue_loc (loc2, arg2));
4698 : : }
4699 : :
4700 : 140 : case COMPOUND_EXPR:
4701 : 140 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4702 : 280 : return build2_loc (loc, COMPOUND_EXPR, type,
4703 : 140 : TREE_OPERAND (arg, 0),
4704 : 280 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4705 : :
4706 : 0 : case NON_LVALUE_EXPR:
4707 : 0 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4708 : 0 : return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4709 : :
4710 : 73450 : CASE_CONVERT:
4711 : 73450 : if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4712 : 73386 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4713 : :
4714 : : /* fall through */
4715 : :
4716 : 64 : case FLOAT_EXPR:
4717 : 64 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4718 : 64 : return build1_loc (loc, TREE_CODE (arg), type,
4719 : 128 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4720 : :
4721 : 652 : case BIT_AND_EXPR:
4722 : 652 : if (!integer_onep (TREE_OPERAND (arg, 1)))
4723 : : return NULL_TREE;
4724 : 0 : return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4725 : :
4726 : 2 : case SAVE_EXPR:
4727 : 2 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4728 : :
4729 : 71 : case CLEANUP_POINT_EXPR:
4730 : 71 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4731 : 71 : return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4732 : 142 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4733 : :
4734 : : default:
4735 : : return NULL_TREE;
4736 : : }
4737 : : }
4738 : :
4739 : : /* Fold the truth-negation of ARG. This never alters ARG itself. We
4740 : : assume that ARG is an operation that returns a truth value (0 or 1
4741 : : for scalars, 0 or -1 for vectors). Return the folded expression if
4742 : : folding is successful. Otherwise, return NULL_TREE. */
4743 : :
4744 : : static tree
4745 : 1581898 : fold_invert_truthvalue (location_t loc, tree arg)
4746 : : {
4747 : 1581898 : tree type = TREE_TYPE (arg);
4748 : 3163772 : return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4749 : : ? BIT_NOT_EXPR
4750 : : : TRUTH_NOT_EXPR,
4751 : 1581898 : type, arg);
4752 : : }
4753 : :
4754 : : /* Return a simplified tree node for the truth-negation of ARG. This
4755 : : never alters ARG itself. We assume that ARG is an operation that
4756 : : returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4757 : :
4758 : : tree
4759 : 38255373 : invert_truthvalue_loc (location_t loc, tree arg)
4760 : : {
4761 : 38255373 : if (TREE_CODE (arg) == ERROR_MARK)
4762 : : return arg;
4763 : :
4764 : 38255373 : tree type = TREE_TYPE (arg);
4765 : 76510746 : return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4766 : : ? BIT_NOT_EXPR
4767 : : : TRUTH_NOT_EXPR,
4768 : 38255373 : type, arg);
4769 : : }
4770 : :
4771 : : /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4772 : : starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4773 : : and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4774 : : is the original memory reference used to preserve the alias set of
4775 : : the access. */
4776 : :
4777 : : tree
4778 : 637414 : make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4779 : : HOST_WIDE_INT bitsize, poly_int64 bitpos,
4780 : : int unsignedp, int reversep)
4781 : : {
4782 : 637414 : tree result, bftype;
4783 : :
4784 : : /* Attempt not to lose the access path if possible. */
4785 : 637414 : if (TREE_CODE (orig_inner) == COMPONENT_REF)
4786 : : {
4787 : 633644 : tree ninner = TREE_OPERAND (orig_inner, 0);
4788 : 633644 : machine_mode nmode;
4789 : 633644 : poly_int64 nbitsize, nbitpos;
4790 : 633644 : tree noffset;
4791 : 633644 : int nunsignedp, nreversep, nvolatilep = 0;
4792 : 633644 : tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4793 : : &noffset, &nmode, &nunsignedp,
4794 : : &nreversep, &nvolatilep);
4795 : 633644 : if (base == inner
4796 : 633516 : && noffset == NULL_TREE
4797 : 633516 : && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4798 : 633502 : && !reversep
4799 : 633430 : && !nreversep
4800 : 1267074 : && !nvolatilep)
4801 : : {
4802 : 633430 : inner = ninner;
4803 : 633644 : bitpos -= nbitpos;
4804 : : }
4805 : : }
4806 : :
4807 : 637414 : alias_set_type iset = get_alias_set (orig_inner);
4808 : 637414 : if (iset == 0 && get_alias_set (inner) != iset)
4809 : 209 : inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4810 : : build_fold_addr_expr (inner),
4811 : : build_int_cst (ptr_type_node, 0));
4812 : :
4813 : 637414 : if (known_eq (bitpos, 0) && !reversep)
4814 : : {
4815 : 322690 : tree size = TYPE_SIZE (TREE_TYPE (inner));
4816 : 645380 : if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4817 : 322492 : || POINTER_TYPE_P (TREE_TYPE (inner)))
4818 : 202 : && tree_fits_shwi_p (size)
4819 : 322892 : && tree_to_shwi (size) == bitsize)
4820 : 179 : return fold_convert_loc (loc, type, inner);
4821 : : }
4822 : :
4823 : 637235 : bftype = type;
4824 : 637235 : if (TYPE_PRECISION (bftype) != bitsize
4825 : 637235 : || TYPE_UNSIGNED (bftype) == !unsignedp)
4826 : 399 : bftype = build_nonstandard_integer_type (bitsize, 0);
4827 : :
4828 : 637235 : result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4829 : 637235 : bitsize_int (bitsize), bitsize_int (bitpos));
4830 : 637235 : REF_REVERSE_STORAGE_ORDER (result) = reversep;
4831 : :
4832 : 637235 : if (bftype != type)
4833 : 399 : result = fold_convert_loc (loc, type, result);
4834 : :
4835 : : return result;
4836 : : }
4837 : :
4838 : : /* Optimize a bit-field compare.
4839 : :
4840 : : There are two cases: First is a compare against a constant and the
4841 : : second is a comparison of two items where the fields are at the same
4842 : : bit position relative to the start of a chunk (byte, halfword, word)
4843 : : large enough to contain it. In these cases we can avoid the shift
4844 : : implicit in bitfield extractions.
4845 : :
4846 : : For constants, we emit a compare of the shifted constant with the
4847 : : BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4848 : : compared. For two fields at the same position, we do the ANDs with the
4849 : : similar mask and compare the result of the ANDs.
4850 : :
4851 : : CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4852 : : COMPARE_TYPE is the type of the comparison, and LHS and RHS
4853 : : are the left and right operands of the comparison, respectively.
4854 : :
4855 : : If the optimization described above can be done, we return the resulting
4856 : : tree. Otherwise we return zero. */
4857 : :
4858 : : static tree
4859 : 3757554 : optimize_bit_field_compare (location_t loc, enum tree_code code,
4860 : : tree compare_type, tree lhs, tree rhs)
4861 : : {
4862 : 3757554 : poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4863 : 3757554 : HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4864 : 3757554 : tree type = TREE_TYPE (lhs);
4865 : 3757554 : tree unsigned_type;
4866 : 3757554 : int const_p = TREE_CODE (rhs) == INTEGER_CST;
4867 : 3757554 : machine_mode lmode, rmode;
4868 : 3757554 : scalar_int_mode nmode;
4869 : 3757554 : int lunsignedp, runsignedp;
4870 : 3757554 : int lreversep, rreversep;
4871 : 3757554 : int lvolatilep = 0, rvolatilep = 0;
4872 : 3757554 : tree linner, rinner = NULL_TREE;
4873 : 3757554 : tree mask;
4874 : 3757554 : tree offset;
4875 : :
4876 : : /* Get all the information about the extractions being done. If the bit size
4877 : : is the same as the size of the underlying object, we aren't doing an
4878 : : extraction at all and so can do nothing. We also don't want to
4879 : : do anything if the inner expression is a PLACEHOLDER_EXPR since we
4880 : : then will no longer be able to replace it. */
4881 : 3757554 : linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4882 : : &lunsignedp, &lreversep, &lvolatilep);
4883 : 3757554 : if (linner == lhs
4884 : 3757554 : || !known_size_p (plbitsize)
4885 : 3757554 : || !plbitsize.is_constant (&lbitsize)
4886 : 3757554 : || !plbitpos.is_constant (&lbitpos)
4887 : 7515108 : || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4888 : 636327 : || offset != 0
4889 : 636302 : || TREE_CODE (linner) == PLACEHOLDER_EXPR
4890 : 4393856 : || lvolatilep)
4891 : 3121312 : return 0;
4892 : :
4893 : 636242 : if (const_p)
4894 : 623110 : rreversep = lreversep;
4895 : : else
4896 : : {
4897 : : /* If this is not a constant, we can only do something if bit positions,
4898 : : sizes, signedness and storage order are the same. */
4899 : 13132 : rinner
4900 : 13132 : = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4901 : : &runsignedp, &rreversep, &rvolatilep);
4902 : :
4903 : 13132 : if (rinner == rhs
4904 : 12968 : || maybe_ne (lbitpos, rbitpos)
4905 : 12934 : || maybe_ne (lbitsize, rbitsize)
4906 : 12934 : || lunsignedp != runsignedp
4907 : 12934 : || lreversep != rreversep
4908 : 12934 : || offset != 0
4909 : 12934 : || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4910 : 26066 : || rvolatilep)
4911 : : return 0;
4912 : : }
4913 : :
4914 : : /* Honor the C++ memory model and mimic what RTL expansion does. */
4915 : 636044 : poly_uint64 bitstart = 0;
4916 : 636044 : poly_uint64 bitend = 0;
4917 : 636044 : if (TREE_CODE (lhs) == COMPONENT_REF)
4918 : : {
4919 : 636044 : get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4920 : 636044 : if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4921 : : return 0;
4922 : : }
4923 : :
4924 : : /* See if we can find a mode to refer to this field. We should be able to,
4925 : : but fail if we can't. */
4926 : 1272088 : if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4927 : 623110 : const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4928 : 12934 : : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4929 : : TYPE_ALIGN (TREE_TYPE (rinner))),
4930 : 636044 : BITS_PER_WORD, false, &nmode))
4931 : : return 0;
4932 : :
4933 : : /* Set signed and unsigned types of the precision of this mode for the
4934 : : shifts below. */
4935 : 634307 : unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4936 : :
4937 : : /* Compute the bit position and size for the new reference and our offset
4938 : : within it. If the new reference is the same size as the original, we
4939 : : won't optimize anything, so return zero. */
4940 : 634307 : nbitsize = GET_MODE_BITSIZE (nmode);
4941 : 634307 : nbitpos = lbitpos & ~ (nbitsize - 1);
4942 : 634307 : lbitpos -= nbitpos;
4943 : 634307 : if (nbitsize == lbitsize)
4944 : : return 0;
4945 : :
4946 : 622755 : if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4947 : 54 : lbitpos = nbitsize - lbitsize - lbitpos;
4948 : :
4949 : : /* Make the mask to be used against the extracted field. */
4950 : 622755 : mask = build_int_cst_type (unsigned_type, -1);
4951 : 622755 : mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4952 : 622755 : mask = const_binop (RSHIFT_EXPR, mask,
4953 : 622755 : size_int (nbitsize - lbitsize - lbitpos));
4954 : :
4955 : 622755 : if (! const_p)
4956 : : {
4957 : 10099 : if (nbitpos < 0)
4958 : : return 0;
4959 : :
4960 : : /* If not comparing with constant, just rework the comparison
4961 : : and return. */
4962 : 10099 : tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4963 : : nbitsize, nbitpos, 1, lreversep);
4964 : 10099 : t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4965 : 10099 : tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4966 : : nbitsize, nbitpos, 1, rreversep);
4967 : 10099 : t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4968 : 10099 : return fold_build2_loc (loc, code, compare_type, t1, t2);
4969 : : }
4970 : :
4971 : : /* Otherwise, we are handling the constant case. See if the constant is too
4972 : : big for the field. Warn and return a tree for 0 (false) if so. We do
4973 : : this not only for its own sake, but to avoid having to test for this
4974 : : error case below. If we didn't, we might generate wrong code.
4975 : :
4976 : : For unsigned fields, the constant shifted right by the field length should
4977 : : be all zero. For signed fields, the high-order bits should agree with
4978 : : the sign bit. */
4979 : :
4980 : 612656 : if (lunsignedp)
4981 : : {
4982 : 611537 : if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4983 : : {
4984 : 0 : warning (0, "comparison is always %d due to width of bit-field",
4985 : : code == NE_EXPR);
4986 : 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
4987 : : }
4988 : : }
4989 : : else
4990 : : {
4991 : 1119 : wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4992 : 1119 : if (tem != 0 && tem != -1)
4993 : : {
4994 : 0 : warning (0, "comparison is always %d due to width of bit-field",
4995 : : code == NE_EXPR);
4996 : 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
4997 : : }
4998 : 1119 : }
4999 : :
5000 : 612656 : if (nbitpos < 0)
5001 : : return 0;
5002 : :
5003 : : /* Single-bit compares should always be against zero. */
5004 : 612656 : if (lbitsize == 1 && ! integer_zerop (rhs))
5005 : : {
5006 : 175 : code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
5007 : 175 : rhs = build_int_cst (type, 0);
5008 : : }
5009 : :
5010 : : /* Make a new bitfield reference, shift the constant over the
5011 : : appropriate number of bits and mask it with the computed mask
5012 : : (in case this was a signed field). If we changed it, make a new one. */
5013 : 612656 : lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
5014 : : nbitsize, nbitpos, 1, lreversep);
5015 : :
5016 : 612656 : rhs = const_binop (BIT_AND_EXPR,
5017 : : const_binop (LSHIFT_EXPR,
5018 : : fold_convert_loc (loc, unsigned_type, rhs),
5019 : 612656 : size_int (lbitpos)),
5020 : : mask);
5021 : :
5022 : 612656 : lhs = build2_loc (loc, code, compare_type,
5023 : : build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
5024 : 612656 : return lhs;
5025 : : }
5026 : :
5027 : : /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5028 : : represents the sign bit of EXP's type. If EXP represents a sign
5029 : : or zero extension, also test VAL against the unextended type.
5030 : : The return value is the (sub)expression whose sign bit is VAL,
5031 : : or NULL_TREE otherwise. */
5032 : :
5033 : : tree
5034 : 2165 : sign_bit_p (tree exp, const_tree val)
5035 : : {
5036 : 2165 : int width;
5037 : 2165 : tree t;
5038 : :
5039 : : /* Tree EXP must have an integral type. */
5040 : 2165 : t = TREE_TYPE (exp);
5041 : 2165 : if (! INTEGRAL_TYPE_P (t))
5042 : : return NULL_TREE;
5043 : :
5044 : : /* Tree VAL must be an integer constant. */
5045 : 1833 : if (TREE_CODE (val) != INTEGER_CST
5046 : 1833 : || TREE_OVERFLOW (val))
5047 : : return NULL_TREE;
5048 : :
5049 : 1444 : width = TYPE_PRECISION (t);
5050 : 1444 : if (wi::only_sign_bit_p (wi::to_wide (val), width))
5051 : : return exp;
5052 : :
5053 : : /* Handle extension from a narrower type. */
5054 : 807 : if (TREE_CODE (exp) == NOP_EXPR
5055 : 807 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5056 : 0 : return sign_bit_p (TREE_OPERAND (exp, 0), val);
5057 : :
5058 : : return NULL_TREE;
5059 : : }
5060 : :
5061 : : /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5062 : : operand is simple enough to be evaluated unconditionally. */
5063 : :
5064 : : static bool
5065 : 59693983 : simple_operand_p (const_tree exp)
5066 : : {
5067 : : /* Strip any conversions that don't change the machine mode. */
5068 : 59693983 : STRIP_NOPS (exp);
5069 : :
5070 : 59693983 : return (CONSTANT_CLASS_P (exp)
5071 : 41139494 : || TREE_CODE (exp) == SSA_NAME
5072 : 72913891 : || (DECL_P (exp)
5073 : : && ! TREE_ADDRESSABLE (exp)
5074 : 4273457 : && ! TREE_THIS_VOLATILE (exp)
5075 : 4191609 : && ! DECL_NONLOCAL (exp)
5076 : : /* Don't regard global variables as simple. They may be
5077 : : allocated in ways unknown to the compiler (shared memory,
5078 : : #pragma weak, etc). */
5079 : 4190157 : && ! TREE_PUBLIC (exp)
5080 : 4169633 : && ! DECL_EXTERNAL (exp)
5081 : : /* Weakrefs are not safe to be read, since they can be NULL.
5082 : : They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5083 : : have DECL_WEAK flag set. */
5084 : 4169633 : && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5085 : : /* Loading a static variable is unduly expensive, but global
5086 : : registers aren't expensive. */
5087 : 4169633 : && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5088 : : }
5089 : :
5090 : : /* Determine if an operand is simple enough to be evaluated unconditionally.
5091 : : In addition to simple_operand_p, we assume that comparisons, conversions,
5092 : : and logic-not operations are simple, if their operands are simple, too. */
5093 : :
5094 : : bool
5095 : 5529045 : simple_condition_p (tree exp)
5096 : : {
5097 : 5596116 : enum tree_code code;
5098 : :
5099 : 5596116 : if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5100 : 4013838 : return false;
5101 : :
5102 : 1590576 : while (CONVERT_EXPR_P (exp))
5103 : 8298 : exp = TREE_OPERAND (exp, 0);
5104 : :
5105 : 1582278 : code = TREE_CODE (exp);
5106 : :
5107 : 1582278 : if (TREE_CODE_CLASS (code) == tcc_comparison)
5108 : 1203272 : return (simple_operand_p (TREE_OPERAND (exp, 0))
5109 : 1203272 : && simple_operand_p (TREE_OPERAND (exp, 1)));
5110 : :
5111 : 379006 : if (code == TRUTH_NOT_EXPR)
5112 : 67071 : return simple_condition_p (TREE_OPERAND (exp, 0));
5113 : :
5114 : 311935 : return simple_operand_p (exp);
5115 : : }
5116 : :
5117 : :
5118 : : /* The following functions are subroutines to fold_range_test and allow it to
5119 : : try to change a logical combination of comparisons into a range test.
5120 : :
5121 : : For example, both
5122 : : X == 2 || X == 3 || X == 4 || X == 5
5123 : : and
5124 : : X >= 2 && X <= 5
5125 : : are converted to
5126 : : (unsigned) (X - 2) <= 3
5127 : :
5128 : : We describe each set of comparisons as being either inside or outside
5129 : : a range, using a variable named like IN_P, and then describe the
5130 : : range with a lower and upper bound. If one of the bounds is omitted,
5131 : : it represents either the highest or lowest value of the type.
5132 : :
5133 : : In the comments below, we represent a range by two numbers in brackets
5134 : : preceded by a "+" to designate being inside that range, or a "-" to
5135 : : designate being outside that range, so the condition can be inverted by
5136 : : flipping the prefix. An omitted bound is represented by a "-". For
5137 : : example, "- [-, 10]" means being outside the range starting at the lowest
5138 : : possible value and ending at 10, in other words, being greater than 10.
5139 : : The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5140 : : always false.
5141 : :
5142 : : We set up things so that the missing bounds are handled in a consistent
5143 : : manner so neither a missing bound nor "true" and "false" need to be
5144 : : handled using a special case. */
5145 : :
5146 : : /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5147 : : of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5148 : : and UPPER1_P are nonzero if the respective argument is an upper bound
5149 : : and zero for a lower. TYPE, if nonzero, is the type of the result; it
5150 : : must be specified for a comparison. ARG1 will be converted to ARG0's
5151 : : type if both are specified. */
5152 : :
5153 : : static tree
5154 : 20026761 : range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5155 : : tree arg1, int upper1_p)
5156 : : {
5157 : 20026761 : tree tem;
5158 : 20026761 : int result;
5159 : 20026761 : int sgn0, sgn1;
5160 : :
5161 : : /* If neither arg represents infinity, do the normal operation.
5162 : : Else, if not a comparison, return infinity. Else handle the special
5163 : : comparison rules. Note that most of the cases below won't occur, but
5164 : : are handled for consistency. */
5165 : :
5166 : 20026761 : if (arg0 != 0 && arg1 != 0)
5167 : : {
5168 : 10401038 : tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5169 : : arg0, fold_convert (TREE_TYPE (arg0), arg1));
5170 : 10401038 : STRIP_NOPS (tem);
5171 : 10401038 : return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5172 : : }
5173 : :
5174 : 9625723 : if (TREE_CODE_CLASS (code) != tcc_comparison)
5175 : : return 0;
5176 : :
5177 : : /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5178 : : for neither. In real maths, we cannot assume open ended ranges are
5179 : : the same. But, this is computer arithmetic, where numbers are finite.
5180 : : We can therefore make the transformation of any unbounded range with
5181 : : the value Z, Z being greater than any representable number. This permits
5182 : : us to treat unbounded ranges as equal. */
5183 : 9617179 : sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5184 : 9617179 : sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5185 : 9617179 : switch (code)
5186 : : {
5187 : 4459773 : case EQ_EXPR:
5188 : 4459773 : result = sgn0 == sgn1;
5189 : 4459773 : break;
5190 : 0 : case NE_EXPR:
5191 : 0 : result = sgn0 != sgn1;
5192 : 0 : break;
5193 : 388841 : case LT_EXPR:
5194 : 388841 : result = sgn0 < sgn1;
5195 : 388841 : break;
5196 : 2191561 : case LE_EXPR:
5197 : 2191561 : result = sgn0 <= sgn1;
5198 : 2191561 : break;
5199 : 2577004 : case GT_EXPR:
5200 : 2577004 : result = sgn0 > sgn1;
5201 : 2577004 : break;
5202 : 0 : case GE_EXPR:
5203 : 0 : result = sgn0 >= sgn1;
5204 : 0 : break;
5205 : 0 : default:
5206 : 0 : gcc_unreachable ();
5207 : : }
5208 : :
5209 : 9617179 : return constant_boolean_node (result, type);
5210 : : }
5211 : :
5212 : : /* Helper routine for make_range. Perform one step for it, return
5213 : : new expression if the loop should continue or NULL_TREE if it should
5214 : : stop. */
5215 : :
5216 : : tree
5217 : 53982074 : make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5218 : : tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5219 : : bool *strict_overflow_p)
5220 : : {
5221 : 53982074 : tree arg0_type = TREE_TYPE (arg0);
5222 : 53982074 : tree n_low, n_high, low = *p_low, high = *p_high;
5223 : 53982074 : int in_p = *p_in_p, n_in_p;
5224 : :
5225 : 53982074 : switch (code)
5226 : : {
5227 : 1545946 : case TRUTH_NOT_EXPR:
5228 : : /* We can only do something if the range is testing for zero. */
5229 : 1545946 : if (low == NULL_TREE || high == NULL_TREE
5230 : 1545946 : || ! integer_zerop (low) || ! integer_zerop (high))
5231 : 0 : return NULL_TREE;
5232 : 1545946 : *p_in_p = ! in_p;
5233 : 1545946 : return arg0;
5234 : :
5235 : 42933174 : case EQ_EXPR: case NE_EXPR:
5236 : 42933174 : case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5237 : : /* We can only do something if the range is testing for zero
5238 : : and if the second operand is an integer constant. Note that
5239 : : saying something is "in" the range we make is done by
5240 : : complementing IN_P since it will set in the initial case of
5241 : : being not equal to zero; "out" is leaving it alone. */
5242 : 42933174 : if (low == NULL_TREE || high == NULL_TREE
5243 : 42933174 : || ! integer_zerop (low) || ! integer_zerop (high)
5244 : 85866255 : || TREE_CODE (arg1) != INTEGER_CST)
5245 : 16008107 : return NULL_TREE;
5246 : :
5247 : 26925067 : switch (code)
5248 : : {
5249 : : case NE_EXPR: /* - [c, c] */
5250 : : low = high = arg1;
5251 : : break;
5252 : 7023814 : case EQ_EXPR: /* + [c, c] */
5253 : 7023814 : in_p = ! in_p, low = high = arg1;
5254 : 7023814 : break;
5255 : 2043528 : case GT_EXPR: /* - [-, c] */
5256 : 2043528 : low = 0, high = arg1;
5257 : 2043528 : break;
5258 : 847856 : case GE_EXPR: /* + [c, -] */
5259 : 847856 : in_p = ! in_p, low = arg1, high = 0;
5260 : 847856 : break;
5261 : 5201271 : case LT_EXPR: /* - [c, -] */
5262 : 5201271 : low = arg1, high = 0;
5263 : 5201271 : break;
5264 : 4119338 : case LE_EXPR: /* + [-, c] */
5265 : 4119338 : in_p = ! in_p, low = 0, high = arg1;
5266 : 4119338 : break;
5267 : 0 : default:
5268 : 0 : gcc_unreachable ();
5269 : : }
5270 : :
5271 : : /* If this is an unsigned comparison, we also know that EXP is
5272 : : greater than or equal to zero. We base the range tests we make
5273 : : on that fact, so we record it here so we can parse existing
5274 : : range tests. We test arg0_type since often the return type
5275 : : of, e.g. EQ_EXPR, is boolean. */
5276 : 26925067 : if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5277 : : {
5278 : 1683155 : if (! merge_ranges (&n_in_p, &n_low, &n_high,
5279 : : in_p, low, high, 1,
5280 : 1683155 : build_int_cst (arg0_type, 0),
5281 : : NULL_TREE))
5282 : : return NULL_TREE;
5283 : :
5284 : 1683146 : in_p = n_in_p, low = n_low, high = n_high;
5285 : :
5286 : : /* If the high bound is missing, but we have a nonzero low
5287 : : bound, reverse the range so it goes from zero to the low bound
5288 : : minus 1. */
5289 : 1683146 : if (high == 0 && low && ! integer_zerop (low))
5290 : : {
5291 : 812975 : in_p = ! in_p;
5292 : 812975 : high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5293 : 812975 : build_int_cst (TREE_TYPE (low), 1), 0);
5294 : 812975 : low = build_int_cst (arg0_type, 0);
5295 : : }
5296 : : }
5297 : :
5298 : 26925058 : *p_low = low;
5299 : 26925058 : *p_high = high;
5300 : 26925058 : *p_in_p = in_p;
5301 : 26925058 : return arg0;
5302 : :
5303 : 183 : case NEGATE_EXPR:
5304 : : /* If flag_wrapv and ARG0_TYPE is signed, make sure
5305 : : low and high are non-NULL, then normalize will DTRT. */
5306 : 183 : if (!TYPE_UNSIGNED (arg0_type)
5307 : 183 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5308 : : {
5309 : 95 : if (low == NULL_TREE)
5310 : 12 : low = TYPE_MIN_VALUE (arg0_type);
5311 : 95 : if (high == NULL_TREE)
5312 : 47 : high = TYPE_MAX_VALUE (arg0_type);
5313 : : }
5314 : :
5315 : : /* (-x) IN [a,b] -> x in [-b, -a] */
5316 : 183 : n_low = range_binop (MINUS_EXPR, exp_type,
5317 : 183 : build_int_cst (exp_type, 0),
5318 : : 0, high, 1);
5319 : 183 : n_high = range_binop (MINUS_EXPR, exp_type,
5320 : 183 : build_int_cst (exp_type, 0),
5321 : : 0, low, 0);
5322 : 183 : if (n_high != 0 && TREE_OVERFLOW (n_high))
5323 : : return NULL_TREE;
5324 : 171 : goto normalize;
5325 : :
5326 : 0 : case BIT_NOT_EXPR:
5327 : : /* ~ X -> -X - 1 */
5328 : 0 : return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5329 : 0 : build_int_cst (exp_type, 1));
5330 : :
5331 : 770064 : case PLUS_EXPR:
5332 : 770064 : case MINUS_EXPR:
5333 : 770064 : if (TREE_CODE (arg1) != INTEGER_CST)
5334 : : return NULL_TREE;
5335 : :
5336 : : /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5337 : : move a constant to the other side. */
5338 : 636373 : if (!TYPE_UNSIGNED (arg0_type)
5339 : 636373 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5340 : : return NULL_TREE;
5341 : :
5342 : : /* If EXP is signed, any overflow in the computation is undefined,
5343 : : so we don't worry about it so long as our computations on
5344 : : the bounds don't overflow. For unsigned, overflow is defined
5345 : : and this is exactly the right thing. */
5346 : 829259 : n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5347 : : arg0_type, low, 0, arg1, 0);
5348 : 414998 : n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5349 : : arg0_type, high, 1, arg1, 0);
5350 : 411884 : if ((n_low != 0 && TREE_OVERFLOW (n_low))
5351 : 826869 : || (n_high != 0 && TREE_OVERFLOW (n_high)))
5352 : : return NULL_TREE;
5353 : :
5354 : 414985 : if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5355 : 20831 : *strict_overflow_p = true;
5356 : :
5357 : 0 : normalize:
5358 : : /* Check for an unsigned range which has wrapped around the maximum
5359 : : value thus making n_high < n_low, and normalize it. */
5360 : 415156 : if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5361 : : {
5362 : 153748 : low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5363 : 153748 : build_int_cst (TREE_TYPE (n_high), 1), 0);
5364 : 153748 : high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5365 : 153748 : build_int_cst (TREE_TYPE (n_low), 1), 0);
5366 : :
5367 : : /* If the range is of the form +/- [ x+1, x ], we won't
5368 : : be able to normalize it. But then, it represents the
5369 : : whole range or the empty set, so make it
5370 : : +/- [ -, - ]. */
5371 : 153748 : if (tree_int_cst_equal (n_low, low)
5372 : 153748 : && tree_int_cst_equal (n_high, high))
5373 : : low = high = 0;
5374 : : else
5375 : 153748 : in_p = ! in_p;
5376 : : }
5377 : : else
5378 : 261408 : low = n_low, high = n_high;
5379 : :
5380 : 415156 : *p_low = low;
5381 : 415156 : *p_high = high;
5382 : 415156 : *p_in_p = in_p;
5383 : 415156 : return arg0;
5384 : :
5385 : 2303645 : CASE_CONVERT:
5386 : 2303645 : case NON_LVALUE_EXPR:
5387 : 2303645 : if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5388 : : return NULL_TREE;
5389 : :
5390 : 975560 : if (! INTEGRAL_TYPE_P (arg0_type)
5391 : 947231 : || (low != 0 && ! int_fits_type_p (low, arg0_type))
5392 : 835208 : || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5393 : : return NULL_TREE;
5394 : :
5395 : 814127 : n_low = low, n_high = high;
5396 : :
5397 : 814127 : if (n_low != 0)
5398 : 709606 : n_low = fold_convert_loc (loc, arg0_type, n_low);
5399 : :
5400 : 814127 : if (n_high != 0)
5401 : 732342 : n_high = fold_convert_loc (loc, arg0_type, n_high);
5402 : :
5403 : : /* If we're converting arg0 from an unsigned type, to exp,
5404 : : a signed type, we will be doing the comparison as unsigned.
5405 : : The tests above have already verified that LOW and HIGH
5406 : : are both positive.
5407 : :
5408 : : So we have to ensure that we will handle large unsigned
5409 : : values the same way that the current signed bounds treat
5410 : : negative values. */
5411 : :
5412 : 814127 : if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5413 : : {
5414 : 211674 : tree high_positive;
5415 : 211674 : tree equiv_type;
5416 : : /* For fixed-point modes, we need to pass the saturating flag
5417 : : as the 2nd parameter. */
5418 : 211674 : if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5419 : 0 : equiv_type
5420 : 0 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5421 : 0 : TYPE_SATURATING (arg0_type));
5422 : 211674 : else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5423 : : equiv_type = arg0_type;
5424 : : else
5425 : 211666 : equiv_type
5426 : 211666 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5427 : :
5428 : : /* A range without an upper bound is, naturally, unbounded.
5429 : : Since convert would have cropped a very large value, use
5430 : : the max value for the destination type. */
5431 : 211674 : high_positive
5432 : 211674 : = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5433 : 0 : : TYPE_MAX_VALUE (arg0_type);
5434 : :
5435 : 211674 : if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5436 : 190914 : high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5437 : : fold_convert_loc (loc, arg0_type,
5438 : : high_positive),
5439 : 190914 : build_int_cst (arg0_type, 1));
5440 : :
5441 : : /* If the low bound is specified, "and" the range with the
5442 : : range for which the original unsigned value will be
5443 : : positive. */
5444 : 211674 : if (low != 0)
5445 : : {
5446 : 112404 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5447 : : 1, fold_convert_loc (loc, arg0_type,
5448 : : integer_zero_node),
5449 : : high_positive))
5450 : : return NULL_TREE;
5451 : :
5452 : 112404 : in_p = (n_in_p == in_p);
5453 : : }
5454 : : else
5455 : : {
5456 : : /* Otherwise, "or" the range with the range of the input
5457 : : that will be interpreted as negative. */
5458 : 99270 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5459 : : 1, fold_convert_loc (loc, arg0_type,
5460 : : integer_zero_node),
5461 : : high_positive))
5462 : : return NULL_TREE;
5463 : :
5464 : 99270 : in_p = (in_p != n_in_p);
5465 : : }
5466 : : }
5467 : :
5468 : : /* Otherwise, if we are converting arg0 from signed type, to exp,
5469 : : an unsigned type, we will do the comparison as signed. If
5470 : : high is non-NULL, we punt above if it doesn't fit in the signed
5471 : : type, so if we get through here, +[-, high] or +[low, high] are
5472 : : equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5473 : : +[-, -] or -[-, -] are equivalent too. But if low is specified and
5474 : : high is not, the +[low, -] range is equivalent to union of
5475 : : +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5476 : : -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5477 : : low being 0, which should be treated as [-, -]. */
5478 : 602453 : else if (TYPE_UNSIGNED (exp_type)
5479 : 585008 : && !TYPE_UNSIGNED (arg0_type)
5480 : 318630 : && low
5481 : 921083 : && !high)
5482 : : {
5483 : 12 : if (integer_zerop (low))
5484 : 12 : n_low = NULL_TREE;
5485 : : else
5486 : : {
5487 : 0 : n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5488 : 0 : n_low, build_int_cst (arg0_type, -1));
5489 : 0 : n_low = build_zero_cst (arg0_type);
5490 : 0 : in_p = !in_p;
5491 : : }
5492 : : }
5493 : :
5494 : 814127 : *p_low = n_low;
5495 : 814127 : *p_high = n_high;
5496 : 814127 : *p_in_p = in_p;
5497 : 814127 : return arg0;
5498 : :
5499 : : default:
5500 : : return NULL_TREE;
5501 : : }
5502 : : }
5503 : :
5504 : : /* Given EXP, a logical expression, set the range it is testing into
5505 : : variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5506 : : actually being tested. *PLOW and *PHIGH will be made of the same
5507 : : type as the returned expression. If EXP is not a comparison, we
5508 : : will most likely not be returning a useful value and range. Set
5509 : : *STRICT_OVERFLOW_P to true if the return value is only valid
5510 : : because signed overflow is undefined; otherwise, do not change
5511 : : *STRICT_OVERFLOW_P. */
5512 : :
5513 : : tree
5514 : 45232522 : make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5515 : : bool *strict_overflow_p)
5516 : : {
5517 : 45232522 : enum tree_code code;
5518 : 45232522 : tree arg0, arg1 = NULL_TREE;
5519 : 45232522 : tree exp_type, nexp;
5520 : 45232522 : int in_p;
5521 : 45232522 : tree low, high;
5522 : 45232522 : location_t loc = EXPR_LOCATION (exp);
5523 : :
5524 : : /* Start with simply saying "EXP != 0" and then look at the code of EXP
5525 : : and see if we can refine the range. Some of the cases below may not
5526 : : happen, but it doesn't seem worth worrying about this. We "continue"
5527 : : the outer loop when we've changed something; otherwise we "break"
5528 : : the switch, which will "break" the while. */
5529 : :
5530 : 45232522 : in_p = 0;
5531 : 45232522 : low = high = build_int_cst (TREE_TYPE (exp), 0);
5532 : :
5533 : 72176277 : while (1)
5534 : : {
5535 : 72176277 : code = TREE_CODE (exp);
5536 : 72176277 : exp_type = TREE_TYPE (exp);
5537 : 72176277 : arg0 = NULL_TREE;
5538 : :
5539 : 72176277 : if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5540 : : {
5541 : 49636539 : if (TREE_OPERAND_LENGTH (exp) > 0)
5542 : 49636539 : arg0 = TREE_OPERAND (exp, 0);
5543 : 49636539 : if (TREE_CODE_CLASS (code) == tcc_binary
5544 : 46925186 : || TREE_CODE_CLASS (code) == tcc_comparison
5545 : 57329734 : || (TREE_CODE_CLASS (code) == tcc_expression
5546 : 2515492 : && TREE_OPERAND_LENGTH (exp) > 1))
5547 : 42900687 : arg1 = TREE_OPERAND (exp, 1);
5548 : : }
5549 : 49636539 : if (arg0 == NULL_TREE)
5550 : : break;
5551 : :
5552 : 49636525 : nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5553 : : &high, &in_p, strict_overflow_p);
5554 : 49636525 : if (nexp == NULL_TREE)
5555 : : break;
5556 : : exp = nexp;
5557 : : }
5558 : :
5559 : : /* If EXP is a constant, we can evaluate whether this is true or false. */
5560 : 45232522 : if (TREE_CODE (exp) == INTEGER_CST)
5561 : : {
5562 : 33029 : in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5563 : : exp, 0, low, 0))
5564 : 33029 : && integer_onep (range_binop (LE_EXPR, integer_type_node,
5565 : : exp, 1, high, 1)));
5566 : 33029 : low = high = 0;
5567 : 33029 : exp = 0;
5568 : : }
5569 : :
5570 : 45232522 : *pin_p = in_p, *plow = low, *phigh = high;
5571 : 45232522 : return exp;
5572 : : }
5573 : :
5574 : : /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5575 : : a bitwise check i.e. when
5576 : : LOW == 0xXX...X00...0
5577 : : HIGH == 0xXX...X11...1
5578 : : Return corresponding mask in MASK and stem in VALUE. */
5579 : :
5580 : : static bool
5581 : 131 : maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5582 : : tree *value)
5583 : : {
5584 : 131 : if (TREE_CODE (low) != INTEGER_CST
5585 : 131 : || TREE_CODE (high) != INTEGER_CST)
5586 : : return false;
5587 : :
5588 : 131 : unsigned prec = TYPE_PRECISION (type);
5589 : 131 : wide_int lo = wi::to_wide (low, prec);
5590 : 131 : wide_int hi = wi::to_wide (high, prec);
5591 : :
5592 : 131 : wide_int end_mask = lo ^ hi;
5593 : 262 : if ((end_mask & (end_mask + 1)) != 0
5594 : 241 : || (lo & end_mask) != 0)
5595 : : return false;
5596 : :
5597 : 86 : wide_int stem_mask = ~end_mask;
5598 : 86 : wide_int stem = lo & stem_mask;
5599 : 86 : if (stem != (hi & stem_mask))
5600 : : return false;
5601 : :
5602 : 86 : *mask = wide_int_to_tree (type, stem_mask);
5603 : 86 : *value = wide_int_to_tree (type, stem);
5604 : :
5605 : 86 : return true;
5606 : 217 : }
5607 : :
5608 : : /* Helper routine for build_range_check and match.pd. Return the type to
5609 : : perform the check or NULL if it shouldn't be optimized. */
5610 : :
5611 : : tree
5612 : 518658 : range_check_type (tree etype)
5613 : : {
5614 : : /* First make sure that arithmetics in this type is valid, then make sure
5615 : : that it wraps around. */
5616 : 518658 : if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5617 : 58350 : etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5618 : :
5619 : 518658 : if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5620 : : {
5621 : 382899 : tree utype, minv, maxv;
5622 : :
5623 : : /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5624 : : for the type in question, as we rely on this here. */
5625 : 382899 : utype = unsigned_type_for (etype);
5626 : 382899 : maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5627 : 382899 : maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5628 : 382899 : build_int_cst (TREE_TYPE (maxv), 1), 1);
5629 : 382899 : minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5630 : :
5631 : 382899 : if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5632 : : minv, 1, maxv, 1)))
5633 : : etype = utype;
5634 : : else
5635 : 0 : return NULL_TREE;
5636 : : }
5637 : 135759 : else if (POINTER_TYPE_P (etype)
5638 : : || TREE_CODE (etype) == OFFSET_TYPE
5639 : : /* Right now all BITINT_TYPEs satisfy
5640 : : (unsigned) max + 1 == (unsigned) min, so no need to verify
5641 : : that like for INTEGER_TYPEs. */
5642 : : || TREE_CODE (etype) == BITINT_TYPE)
5643 : 1357 : etype = unsigned_type_for (etype);
5644 : : return etype;
5645 : : }
5646 : :
5647 : : /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5648 : : type, TYPE, return an expression to test if EXP is in (or out of, depending
5649 : : on IN_P) the range. Return 0 if the test couldn't be created. */
5650 : :
5651 : : tree
5652 : 1384712 : build_range_check (location_t loc, tree type, tree exp, int in_p,
5653 : : tree low, tree high)
5654 : : {
5655 : 2347441 : tree etype = TREE_TYPE (exp), mask, value;
5656 : :
5657 : : /* Disable this optimization for function pointer expressions
5658 : : on targets that require function pointer canonicalization. */
5659 : 2347441 : if (targetm.have_canonicalize_funcptr_for_compare ()
5660 : 0 : && POINTER_TYPE_P (etype)
5661 : 2347441 : && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5662 : : return NULL_TREE;
5663 : :
5664 : 2347441 : if (! in_p)
5665 : : {
5666 : 339466 : value = build_range_check (loc, type, exp, 1, low, high);
5667 : 339466 : if (value != 0)
5668 : 339466 : return invert_truthvalue_loc (loc, value);
5669 : :
5670 : : return 0;
5671 : : }
5672 : :
5673 : 2007975 : if (low == 0 && high == 0)
5674 : 135368 : return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5675 : :
5676 : 1872607 : if (low == 0)
5677 : 634568 : return fold_build2_loc (loc, LE_EXPR, type, exp,
5678 : 634568 : fold_convert_loc (loc, etype, high));
5679 : :
5680 : 1238039 : if (high == 0)
5681 : 74959 : return fold_build2_loc (loc, GE_EXPR, type, exp,
5682 : 74959 : fold_convert_loc (loc, etype, low));
5683 : :
5684 : 1163080 : if (operand_equal_p (low, high, 0))
5685 : 200113 : return fold_build2_loc (loc, EQ_EXPR, type, exp,
5686 : 200113 : fold_convert_loc (loc, etype, low));
5687 : :
5688 : 962967 : if (TREE_CODE (exp) == BIT_AND_EXPR
5689 : 962967 : && maskable_range_p (low, high, etype, &mask, &value))
5690 : 86 : return fold_build2_loc (loc, EQ_EXPR, type,
5691 : : fold_build2_loc (loc, BIT_AND_EXPR, etype,
5692 : : exp, mask),
5693 : 86 : value);
5694 : :
5695 : 962881 : if (integer_zerop (low))
5696 : : {
5697 : 531757 : if (! TYPE_UNSIGNED (etype))
5698 : : {
5699 : 92970 : etype = unsigned_type_for (etype);
5700 : 92970 : high = fold_convert_loc (loc, etype, high);
5701 : 92970 : exp = fold_convert_loc (loc, etype, exp);
5702 : : }
5703 : 531757 : return build_range_check (loc, type, exp, 1, 0, high);
5704 : : }
5705 : :
5706 : : /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5707 : 431124 : if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5708 : : {
5709 : 97846 : int prec = TYPE_PRECISION (etype);
5710 : :
5711 : 97846 : if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5712 : : {
5713 : 152 : if (TYPE_UNSIGNED (etype))
5714 : : {
5715 : 146 : tree signed_etype = signed_type_for (etype);
5716 : 146 : if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5717 : 0 : etype
5718 : 0 : = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5719 : : else
5720 : : etype = signed_etype;
5721 : 146 : exp = fold_convert_loc (loc, etype, exp);
5722 : : }
5723 : 152 : return fold_build2_loc (loc, GT_EXPR, type, exp,
5724 : 152 : build_int_cst (etype, 0));
5725 : : }
5726 : : }
5727 : :
5728 : : /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5729 : : This requires wrap-around arithmetics for the type of the expression. */
5730 : 430972 : etype = range_check_type (etype);
5731 : 430972 : if (etype == NULL_TREE)
5732 : : return NULL_TREE;
5733 : :
5734 : 430972 : high = fold_convert_loc (loc, etype, high);
5735 : 430972 : low = fold_convert_loc (loc, etype, low);
5736 : 430972 : exp = fold_convert_loc (loc, etype, exp);
5737 : :
5738 : 430972 : value = const_binop (MINUS_EXPR, high, low);
5739 : :
5740 : 430972 : if (value != 0 && !TREE_OVERFLOW (value))
5741 : 430972 : return build_range_check (loc, type,
5742 : : fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5743 : 430972 : 1, build_int_cst (etype, 0), value);
5744 : :
5745 : : return 0;
5746 : : }
5747 : :
5748 : : /* Return the predecessor of VAL in its type, handling the infinite case. */
5749 : :
5750 : : static tree
5751 : 152229 : range_predecessor (tree val)
5752 : : {
5753 : 152229 : tree type = TREE_TYPE (val);
5754 : :
5755 : 152229 : if (INTEGRAL_TYPE_P (type)
5756 : 152229 : && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5757 : : return 0;
5758 : : else
5759 : 152229 : return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5760 : 152229 : build_int_cst (TREE_TYPE (val), 1), 0);
5761 : : }
5762 : :
5763 : : /* Return the successor of VAL in its type, handling the infinite case. */
5764 : :
5765 : : static tree
5766 : 1414623 : range_successor (tree val)
5767 : : {
5768 : 1414623 : tree type = TREE_TYPE (val);
5769 : :
5770 : 1414623 : if (INTEGRAL_TYPE_P (type)
5771 : 1414623 : && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5772 : : return 0;
5773 : : else
5774 : 1414614 : return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5775 : 1414614 : build_int_cst (TREE_TYPE (val), 1), 0);
5776 : : }
5777 : :
5778 : : /* Given two ranges, see if we can merge them into one. Return 1 if we
5779 : : can, 0 if we can't. Set the output range into the specified parameters. */
5780 : :
5781 : : bool
5782 : 3100353 : merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5783 : : tree high0, int in1_p, tree low1, tree high1)
5784 : : {
5785 : 3100353 : bool no_overlap;
5786 : 3100353 : int subset;
5787 : 3100353 : int temp;
5788 : 3100353 : tree tem;
5789 : 3100353 : int in_p;
5790 : 3100353 : tree low, high;
5791 : 3100353 : int lowequal = ((low0 == 0 && low1 == 0)
5792 : 3100353 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5793 : 3100353 : low0, 0, low1, 0)));
5794 : 3100353 : int highequal = ((high0 == 0 && high1 == 0)
5795 : 3100353 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5796 : 3100353 : high0, 1, high1, 1)));
5797 : :
5798 : : /* Make range 0 be the range that starts first, or ends last if they
5799 : : start at the same value. Swap them if it isn't. */
5800 : 3100353 : if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5801 : : low0, 0, low1, 0))
5802 : 3100353 : || (lowequal
5803 : 532199 : && integer_onep (range_binop (GT_EXPR, integer_type_node,
5804 : : high1, 1, high0, 1))))
5805 : : {
5806 : : temp = in0_p, in0_p = in1_p, in1_p = temp;
5807 : : tem = low0, low0 = low1, low1 = tem;
5808 : : tem = high0, high0 = high1, high1 = tem;
5809 : : }
5810 : :
5811 : : /* If the second range is != high1 where high1 is the type maximum of
5812 : : the type, try first merging with < high1 range. */
5813 : 3100353 : if (low1
5814 : 3100353 : && high1
5815 : 817766 : && TREE_CODE (low1) == INTEGER_CST
5816 : 817766 : && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5817 : 126187 : || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5818 : 166858 : && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5819 : : GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5820 : 3875361 : && operand_equal_p (low1, high1, 0))
5821 : : {
5822 : 459948 : if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5823 : 459948 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5824 : : !in1_p, NULL_TREE, range_predecessor (low1)))
5825 : : return true;
5826 : : /* Similarly for the second range != low1 where low1 is the type minimum
5827 : : of the type, try first merging with > low1 range. */
5828 : 372344 : if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5829 : 372344 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5830 : : !in1_p, range_successor (low1), NULL_TREE))
5831 : : return true;
5832 : : }
5833 : :
5834 : : /* Now flag two cases, whether the ranges are disjoint or whether the
5835 : : second range is totally subsumed in the first. Note that the tests
5836 : : below are simplified by the ones above. */
5837 : 2948433 : no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5838 : : high0, 1, low1, 0));
5839 : 2948433 : subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5840 : : high1, 1, high0, 1));
5841 : :
5842 : : /* We now have four cases, depending on whether we are including or
5843 : : excluding the two ranges. */
5844 : 2948433 : if (in0_p && in1_p)
5845 : : {
5846 : : /* If they don't overlap, the result is false. If the second range
5847 : : is a subset it is the result. Otherwise, the range is from the start
5848 : : of the second to the end of the first. */
5849 : 1264587 : if (no_overlap)
5850 : : in_p = 0, low = high = 0;
5851 : 1262326 : else if (subset)
5852 : : in_p = 1, low = low1, high = high1;
5853 : : else
5854 : 1133672 : in_p = 1, low = low1, high = high0;
5855 : : }
5856 : :
5857 : 1683846 : else if (in0_p && ! in1_p)
5858 : : {
5859 : : /* If they don't overlap, the result is the first range. If they are
5860 : : equal, the result is false. If the second range is a subset of the
5861 : : first, and the ranges begin at the same place, we go from just after
5862 : : the end of the second range to the end of the first. If the second
5863 : : range is not a subset of the first, or if it is a subset and both
5864 : : ranges end at the same place, the range starts at the start of the
5865 : : first range and ends just before the second range.
5866 : : Otherwise, we can't describe this as a single range. */
5867 : 296437 : if (no_overlap)
5868 : : in_p = 1, low = low0, high = high0;
5869 : 290972 : else if (lowequal && highequal)
5870 : : in_p = 0, low = high = 0;
5871 : 290002 : else if (subset && lowequal)
5872 : : {
5873 : 205366 : low = range_successor (high1);
5874 : 205366 : high = high0;
5875 : 205366 : in_p = 1;
5876 : 205366 : if (low == 0)
5877 : : {
5878 : : /* We are in the weird situation where high0 > high1 but
5879 : : high1 has no successor. Punt. */
5880 : : return 0;
5881 : : }
5882 : : }
5883 : 84636 : else if (! subset || highequal)
5884 : : {
5885 : 56783 : low = low0;
5886 : 56783 : high = range_predecessor (low1);
5887 : 56783 : in_p = 1;
5888 : 56783 : if (high == 0)
5889 : : {
5890 : : /* low0 < low1 but low1 has no predecessor. Punt. */
5891 : : return 0;
5892 : : }
5893 : : }
5894 : : else
5895 : : return 0;
5896 : : }
5897 : :
5898 : 1387409 : else if (! in0_p && in1_p)
5899 : : {
5900 : : /* If they don't overlap, the result is the second range. If the second
5901 : : is a subset of the first, the result is false. Otherwise,
5902 : : the range starts just after the first range and ends at the
5903 : : end of the second. */
5904 : 1077685 : if (no_overlap)
5905 : : in_p = 1, low = low1, high = high1;
5906 : 1072562 : else if (subset || highequal)
5907 : : in_p = 0, low = high = 0;
5908 : : else
5909 : : {
5910 : 955514 : low = range_successor (high0);
5911 : 955514 : high = high1;
5912 : 955514 : in_p = 1;
5913 : 955514 : if (low == 0)
5914 : : {
5915 : : /* high1 > high0 but high0 has no successor. Punt. */
5916 : : return 0;
5917 : : }
5918 : : }
5919 : : }
5920 : :
5921 : : else
5922 : : {
5923 : : /* The case where we are excluding both ranges. Here the complex case
5924 : : is if they don't overlap. In that case, the only time we have a
5925 : : range is if they are adjacent. If the second is a subset of the
5926 : : first, the result is the first. Otherwise, the range to exclude
5927 : : starts at the beginning of the first range and ends at the end of the
5928 : : second. */
5929 : 309724 : if (no_overlap)
5930 : : {
5931 : 188886 : if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5932 : : range_successor (high0),
5933 : : 1, low1, 0)))
5934 : : in_p = 0, low = low0, high = high1;
5935 : : else
5936 : : {
5937 : : /* Canonicalize - [min, x] into - [-, x]. */
5938 : 146933 : if (low0 && TREE_CODE (low0) == INTEGER_CST)
5939 : 145671 : switch (TREE_CODE (TREE_TYPE (low0)))
5940 : : {
5941 : 51187 : case ENUMERAL_TYPE:
5942 : 51187 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5943 : : GET_MODE_BITSIZE
5944 : 102374 : (TYPE_MODE (TREE_TYPE (low0)))))
5945 : : break;
5946 : : /* FALLTHROUGH */
5947 : 145478 : case INTEGER_TYPE:
5948 : 145478 : if (tree_int_cst_equal (low0,
5949 : 145478 : TYPE_MIN_VALUE (TREE_TYPE (low0))))
5950 : 11281 : low0 = 0;
5951 : : break;
5952 : 193 : case POINTER_TYPE:
5953 : 193 : if (TYPE_UNSIGNED (TREE_TYPE (low0))
5954 : 193 : && integer_zerop (low0))
5955 : : low0 = 0;
5956 : : break;
5957 : : default:
5958 : : break;
5959 : : }
5960 : :
5961 : : /* Canonicalize - [x, max] into - [x, -]. */
5962 : 146933 : if (high1 && TREE_CODE (high1) == INTEGER_CST)
5963 : 146727 : switch (TREE_CODE (TREE_TYPE (high1)))
5964 : : {
5965 : 51193 : case ENUMERAL_TYPE:
5966 : 51193 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5967 : : GET_MODE_BITSIZE
5968 : 102386 : (TYPE_MODE (TREE_TYPE (high1)))))
5969 : : break;
5970 : : /* FALLTHROUGH */
5971 : 146534 : case INTEGER_TYPE:
5972 : 146534 : if (tree_int_cst_equal (high1,
5973 : 146534 : TYPE_MAX_VALUE (TREE_TYPE (high1))))
5974 : 7641 : high1 = 0;
5975 : : break;
5976 : 193 : case POINTER_TYPE:
5977 : 193 : if (TYPE_UNSIGNED (TREE_TYPE (high1))
5978 : 386 : && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5979 : : high1, 1,
5980 : 193 : build_int_cst (TREE_TYPE (high1), 1),
5981 : : 1)))
5982 : 133 : high1 = 0;
5983 : : break;
5984 : : default:
5985 : : break;
5986 : : }
5987 : :
5988 : : /* The ranges might be also adjacent between the maximum and
5989 : : minimum values of the given type. For
5990 : : - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5991 : : return + [x + 1, y - 1]. */
5992 : 146933 : if (low0 == 0 && high1 == 0)
5993 : : {
5994 : 541 : low = range_successor (high0);
5995 : 541 : high = range_predecessor (low1);
5996 : 541 : if (low == 0 || high == 0)
5997 : : return 0;
5998 : :
5999 : : in_p = 1;
6000 : : }
6001 : : else
6002 : : return 0;
6003 : : }
6004 : : }
6005 : 120838 : else if (subset)
6006 : : in_p = 0, low = low0, high = high0;
6007 : : else
6008 : 15389 : in_p = 0, low = low0, high = high1;
6009 : : }
6010 : :
6011 : 2774179 : *pin_p = in_p, *plow = low, *phigh = high;
6012 : 2774179 : return 1;
6013 : : }
6014 : :
6015 : :
6016 : : /* Subroutine of fold, looking inside expressions of the form
6017 : : A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6018 : : are the three operands of the COND_EXPR. This function is
6019 : : being used also to optimize A op B ? C : A, by reversing the
6020 : : comparison first.
6021 : :
6022 : : Return a folded expression whose code is not a COND_EXPR
6023 : : anymore, or NULL_TREE if no folding opportunity is found. */
6024 : :
6025 : : static tree
6026 : 432526 : fold_cond_expr_with_comparison (location_t loc, tree type,
6027 : : enum tree_code comp_code,
6028 : : tree arg00, tree arg01, tree arg1, tree arg2)
6029 : : {
6030 : 432526 : tree arg1_type = TREE_TYPE (arg1);
6031 : 432526 : tree tem;
6032 : :
6033 : 432526 : STRIP_NOPS (arg1);
6034 : 432526 : STRIP_NOPS (arg2);
6035 : :
6036 : : /* If we have A op 0 ? A : -A, consider applying the following
6037 : : transformations:
6038 : :
6039 : : A == 0? A : -A same as -A
6040 : : A != 0? A : -A same as A
6041 : : A >= 0? A : -A same as abs (A)
6042 : : A > 0? A : -A same as abs (A)
6043 : : A <= 0? A : -A same as -abs (A)
6044 : : A < 0? A : -A same as -abs (A)
6045 : :
6046 : : None of these transformations work for modes with signed
6047 : : zeros. If A is +/-0, the first two transformations will
6048 : : change the sign of the result (from +0 to -0, or vice
6049 : : versa). The last four will fix the sign of the result,
6050 : : even though the original expressions could be positive or
6051 : : negative, depending on the sign of A.
6052 : :
6053 : : Note that all these transformations are correct if A is
6054 : : NaN, since the two alternatives (A and -A) are also NaNs. */
6055 : 432526 : if (!HONOR_SIGNED_ZEROS (type)
6056 : 865062 : && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6057 : 432526 : ? real_zerop (arg01)
6058 : 431466 : : integer_zerop (arg01))
6059 : 1146134 : && ((TREE_CODE (arg2) == NEGATE_EXPR
6060 : 1459 : && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6061 : : /* In the case that A is of the form X-Y, '-A' (arg2) may
6062 : : have already been folded to Y-X, check for that. */
6063 : 279847 : || (TREE_CODE (arg1) == MINUS_EXPR
6064 : 1718 : && TREE_CODE (arg2) == MINUS_EXPR
6065 : 0 : && operand_equal_p (TREE_OPERAND (arg1, 0),
6066 : 0 : TREE_OPERAND (arg2, 1), 0)
6067 : 0 : && operand_equal_p (TREE_OPERAND (arg1, 1),
6068 : 0 : TREE_OPERAND (arg2, 0), 0))))
6069 : 1235 : switch (comp_code)
6070 : : {
6071 : 0 : case EQ_EXPR:
6072 : 0 : case UNEQ_EXPR:
6073 : 0 : tem = fold_convert_loc (loc, arg1_type, arg1);
6074 : 0 : return fold_convert_loc (loc, type, negate_expr (tem));
6075 : 0 : case NE_EXPR:
6076 : 0 : case LTGT_EXPR:
6077 : 0 : return fold_convert_loc (loc, type, arg1);
6078 : 0 : case UNGE_EXPR:
6079 : 0 : case UNGT_EXPR:
6080 : 0 : if (flag_trapping_math)
6081 : : break;
6082 : : /* Fall through. */
6083 : 1235 : case GE_EXPR:
6084 : 1235 : case GT_EXPR:
6085 : 1235 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6086 : : break;
6087 : 1219 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6088 : 1219 : return fold_convert_loc (loc, type, tem);
6089 : 0 : case UNLE_EXPR:
6090 : 0 : case UNLT_EXPR:
6091 : 0 : if (flag_trapping_math)
6092 : : break;
6093 : : /* FALLTHRU */
6094 : 0 : case LE_EXPR:
6095 : 0 : case LT_EXPR:
6096 : 0 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6097 : : break;
6098 : 0 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6099 : 0 : && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6100 : : {
6101 : : /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6102 : : is not, invokes UB both in abs and in the negation of it.
6103 : : So, use ABSU_EXPR instead. */
6104 : 0 : tree utype = unsigned_type_for (TREE_TYPE (arg1));
6105 : 0 : tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6106 : 0 : tem = negate_expr (tem);
6107 : 0 : return fold_convert_loc (loc, type, tem);
6108 : : }
6109 : : else
6110 : : {
6111 : 0 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6112 : 0 : return negate_expr (fold_convert_loc (loc, type, tem));
6113 : : }
6114 : 0 : default:
6115 : 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6116 : : break;
6117 : : }
6118 : :
6119 : : /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6120 : : A == 0 ? A : 0 is always 0 unless A is -0. Note that
6121 : : both transformations are correct when A is NaN: A != 0
6122 : : is then true, and A == 0 is false. */
6123 : :
6124 : 431307 : if (!HONOR_SIGNED_ZEROS (type)
6125 : 431307 : && integer_zerop (arg01) && integer_zerop (arg2))
6126 : : {
6127 : 234180 : if (comp_code == NE_EXPR)
6128 : 145 : return fold_convert_loc (loc, type, arg1);
6129 : 234035 : else if (comp_code == EQ_EXPR)
6130 : 0 : return build_zero_cst (type);
6131 : : }
6132 : :
6133 : : /* Try some transformations of A op B ? A : B.
6134 : :
6135 : : A == B? A : B same as B
6136 : : A != B? A : B same as A
6137 : : A >= B? A : B same as max (A, B)
6138 : : A > B? A : B same as max (B, A)
6139 : : A <= B? A : B same as min (A, B)
6140 : : A < B? A : B same as min (B, A)
6141 : :
6142 : : As above, these transformations don't work in the presence
6143 : : of signed zeros. For example, if A and B are zeros of
6144 : : opposite sign, the first two transformations will change
6145 : : the sign of the result. In the last four, the original
6146 : : expressions give different results for (A=+0, B=-0) and
6147 : : (A=-0, B=+0), but the transformed expressions do not.
6148 : :
6149 : : The first two transformations are correct if either A or B
6150 : : is a NaN. In the first transformation, the condition will
6151 : : be false, and B will indeed be chosen. In the case of the
6152 : : second transformation, the condition A != B will be true,
6153 : : and A will be chosen.
6154 : :
6155 : : The conversions to max() and min() are not correct if B is
6156 : : a number and A is not. The conditions in the original
6157 : : expressions will be false, so all four give B. The min()
6158 : : and max() versions would give a NaN instead. */
6159 : 431162 : if (!HONOR_SIGNED_ZEROS (type)
6160 : 431162 : && operand_equal_for_comparison_p (arg01, arg2)
6161 : : /* Avoid these transformations if the COND_EXPR may be used
6162 : : as an lvalue in the C++ front-end. PR c++/19199. */
6163 : 673675 : && (in_gimple_form
6164 : 15975 : || VECTOR_TYPE_P (type)
6165 : 15913 : || (! lang_GNU_CXX ()
6166 : 13487 : && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6167 : 2426 : || ! maybe_lvalue_p (arg1)
6168 : 2407 : || ! maybe_lvalue_p (arg2)))
6169 : : {
6170 : 240862 : tree comp_op0 = arg00;
6171 : 240862 : tree comp_op1 = arg01;
6172 : 240862 : tree comp_type = TREE_TYPE (comp_op0);
6173 : :
6174 : 240862 : switch (comp_code)
6175 : : {
6176 : 0 : case EQ_EXPR:
6177 : 0 : return fold_convert_loc (loc, type, arg2);
6178 : 1 : case NE_EXPR:
6179 : 1 : return fold_convert_loc (loc, type, arg1);
6180 : 5846 : case LE_EXPR:
6181 : 5846 : case LT_EXPR:
6182 : 5846 : case UNLE_EXPR:
6183 : 5846 : case UNLT_EXPR:
6184 : : /* In C++ a ?: expression can be an lvalue, so put the
6185 : : operand which will be used if they are equal first
6186 : : so that we can convert this back to the
6187 : : corresponding COND_EXPR. */
6188 : 5846 : if (!HONOR_NANS (arg1))
6189 : : {
6190 : 5846 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6191 : 5846 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6192 : 11692 : tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6193 : 5846 : ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6194 : 4480 : : fold_build2_loc (loc, MIN_EXPR, comp_type,
6195 : : comp_op1, comp_op0);
6196 : 5846 : return fold_convert_loc (loc, type, tem);
6197 : : }
6198 : : break;
6199 : 235015 : case GE_EXPR:
6200 : 235015 : case GT_EXPR:
6201 : 235015 : case UNGE_EXPR:
6202 : 235015 : case UNGT_EXPR:
6203 : 235015 : if (!HONOR_NANS (arg1))
6204 : : {
6205 : 235013 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6206 : 235013 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6207 : 470026 : tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6208 : 235013 : ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6209 : 3474 : : fold_build2_loc (loc, MAX_EXPR, comp_type,
6210 : : comp_op1, comp_op0);
6211 : 235013 : return fold_convert_loc (loc, type, tem);
6212 : : }
6213 : : break;
6214 : 0 : case UNEQ_EXPR:
6215 : 0 : if (!HONOR_NANS (arg1))
6216 : 0 : return fold_convert_loc (loc, type, arg2);
6217 : : break;
6218 : 0 : case LTGT_EXPR:
6219 : 0 : if (!HONOR_NANS (arg1))
6220 : 0 : return fold_convert_loc (loc, type, arg1);
6221 : : break;
6222 : 0 : default:
6223 : 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6224 : : break;
6225 : : }
6226 : : }
6227 : :
6228 : : return NULL_TREE;
6229 : : }
6230 : :
6231 : :
6232 : :
6233 : : #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6234 : : #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6235 : : (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6236 : : false) >= 2)
6237 : : #endif
6238 : :
6239 : : /* EXP is some logical combination of boolean tests. See if we can
6240 : : merge it into some range test. Return the new tree if so. */
6241 : :
6242 : : static tree
6243 : 22615755 : fold_range_test (location_t loc, enum tree_code code, tree type,
6244 : : tree op0, tree op1)
6245 : : {
6246 : 22615755 : int or_op = (code == TRUTH_ORIF_EXPR
6247 : 22615755 : || code == TRUTH_OR_EXPR);
6248 : 22615755 : int in0_p, in1_p, in_p;
6249 : 22615755 : tree low0, low1, low, high0, high1, high;
6250 : 22615755 : bool strict_overflow_p = false;
6251 : 22615755 : tree tem, lhs, rhs;
6252 : 22615755 : const char * const warnmsg = G_("assuming signed overflow does not occur "
6253 : : "when simplifying range test");
6254 : :
6255 : 22615755 : if (!INTEGRAL_TYPE_P (type))
6256 : : return 0;
6257 : :
6258 : 22615755 : lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6259 : : /* If op0 is known true or false and this is a short-circuiting
6260 : : operation we must not merge with op1 since that makes side-effects
6261 : : unconditional. So special-case this. */
6262 : 22615755 : if (!lhs
6263 : 2 : && ((code == TRUTH_ORIF_EXPR && in0_p)
6264 : 1 : || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6265 : : return op0;
6266 : 22615753 : rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6267 : :
6268 : : /* If this is an OR operation, invert both sides; we will invert
6269 : : again at the end. */
6270 : 22615753 : if (or_op)
6271 : 11018813 : in0_p = ! in0_p, in1_p = ! in1_p;
6272 : :
6273 : : /* If both expressions are the same, if we can merge the ranges, and we
6274 : : can build the range test, return it or it inverted. If one of the
6275 : : ranges is always true or always false, consider it to be the same
6276 : : expression as the other. */
6277 : 22582728 : if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6278 : 1017146 : && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6279 : : in1_p, low1, high1)
6280 : 23485944 : && (tem = (build_range_check (loc, type,
6281 : : lhs != 0 ? lhs
6282 : 0 : : rhs != 0 ? rhs : integer_zero_node,
6283 : : in_p, low, high))) != 0)
6284 : : {
6285 : 870191 : if (strict_overflow_p)
6286 : 259 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6287 : 870191 : return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6288 : : }
6289 : :
6290 : : /* On machines where the branch cost is expensive, if this is a
6291 : : short-circuited branch and the underlying object on both sides
6292 : : is the same, make a non-short-circuit operation. */
6293 : 21745562 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6294 : 21745562 : if (param_logical_op_non_short_circuit != -1)
6295 : 7777 : logical_op_non_short_circuit
6296 : 7777 : = param_logical_op_non_short_circuit;
6297 : 21745562 : if (logical_op_non_short_circuit
6298 : 21741674 : && !sanitize_coverage_p ()
6299 : 21741671 : && lhs != 0 && rhs != 0
6300 : 21741448 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6301 : 26046565 : && operand_equal_p (lhs, rhs, 0))
6302 : : {
6303 : : /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6304 : : unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6305 : : which cases we can't do this. */
6306 : 107978 : if (simple_operand_p (lhs))
6307 : 63079 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6308 : : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6309 : 32053 : type, op0, op1);
6310 : :
6311 : 75925 : else if (!lang_hooks.decls.global_bindings_p ()
6312 : 75925 : && !CONTAINS_PLACEHOLDER_P (lhs))
6313 : : {
6314 : 75268 : tree common = save_expr (lhs);
6315 : :
6316 : 126695 : if ((lhs = build_range_check (loc, type, common,
6317 : 51427 : or_op ? ! in0_p : in0_p,
6318 : : low0, high0)) != 0
6319 : 126695 : && (rhs = build_range_check (loc, type, common,
6320 : 51427 : or_op ? ! in1_p : in1_p,
6321 : : low1, high1)) != 0)
6322 : : {
6323 : 75268 : if (strict_overflow_p)
6324 : 0 : fold_overflow_warning (warnmsg,
6325 : : WARN_STRICT_OVERFLOW_COMPARISON);
6326 : 126695 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6327 : : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6328 : 75268 : type, lhs, rhs);
6329 : : }
6330 : : }
6331 : : }
6332 : :
6333 : : return 0;
6334 : : }
6335 : :
6336 : : /* For an expression that has the form
6337 : : (A && B) || ~B
6338 : : or
6339 : : (A || B) && ~B,
6340 : : we can drop one of the inner expressions and simplify to
6341 : : A || ~B
6342 : : or
6343 : : A && ~B
6344 : : LOC is the location of the resulting expression. OP is the inner
6345 : : logical operation; the left-hand side in the examples above, while CMPOP
6346 : : is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6347 : : removing a condition that guards another, as in
6348 : : (A != NULL && A->...) || A == NULL
6349 : : which we must not transform. If RHS_ONLY is true, only eliminate the
6350 : : right-most operand of the inner logical operation. */
6351 : :
6352 : : static tree
6353 : 132691 : merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6354 : : bool rhs_only)
6355 : : {
6356 : 132691 : enum tree_code code = TREE_CODE (cmpop);
6357 : 132691 : enum tree_code truthop_code = TREE_CODE (op);
6358 : 132691 : tree lhs = TREE_OPERAND (op, 0);
6359 : 132691 : tree rhs = TREE_OPERAND (op, 1);
6360 : 132691 : tree orig_lhs = lhs, orig_rhs = rhs;
6361 : 132691 : enum tree_code rhs_code = TREE_CODE (rhs);
6362 : 132691 : enum tree_code lhs_code = TREE_CODE (lhs);
6363 : 132691 : enum tree_code inv_code;
6364 : :
6365 : 132691 : if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6366 : : return NULL_TREE;
6367 : :
6368 : 88689 : if (TREE_CODE_CLASS (code) != tcc_comparison)
6369 : : return NULL_TREE;
6370 : :
6371 : 53938 : tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6372 : :
6373 : 53938 : if (rhs_code == truthop_code)
6374 : : {
6375 : 29 : tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6376 : 29 : if (newrhs != NULL_TREE)
6377 : : {
6378 : 0 : rhs = newrhs;
6379 : 0 : rhs_code = TREE_CODE (rhs);
6380 : : }
6381 : : }
6382 : 53938 : if (lhs_code == truthop_code && !rhs_only)
6383 : : {
6384 : 460 : tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6385 : 460 : if (newlhs != NULL_TREE)
6386 : : {
6387 : 0 : lhs = newlhs;
6388 : 0 : lhs_code = TREE_CODE (lhs);
6389 : : }
6390 : : }
6391 : :
6392 : 53938 : inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6393 : 53938 : if (inv_code == rhs_code
6394 : 699 : && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6395 : 53974 : && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6396 : : return lhs;
6397 : 53925 : if (!rhs_only && inv_code == lhs_code
6398 : 615 : && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6399 : 54002 : && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6400 : : return rhs;
6401 : 53849 : if (rhs != orig_rhs || lhs != orig_lhs)
6402 : 0 : return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6403 : 0 : lhs, rhs);
6404 : : return NULL_TREE;
6405 : : }
6406 : :
6407 : : /* Find ways of folding logical expressions of LHS and RHS:
6408 : : Try to merge two comparisons to the same innermost item.
6409 : : Look for range tests like "ch >= '0' && ch <= '9'".
6410 : : Look for combinations of simple terms on machines with expensive branches
6411 : : and evaluate the RHS unconditionally.
6412 : :
6413 : : We check for both normal comparisons and the BIT_AND_EXPRs made this by
6414 : : function and the one above.
6415 : :
6416 : : CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6417 : : TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6418 : :
6419 : : TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6420 : : two operands.
6421 : :
6422 : : We return the simplified tree or 0 if no optimization is possible. */
6423 : :
6424 : : static tree
6425 : 22331643 : fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6426 : : tree lhs, tree rhs)
6427 : : {
6428 : : /* If this is the "or" of two comparisons, we can do something if
6429 : : the comparisons are NE_EXPR. If this is the "and", we can do something
6430 : : if the comparisons are EQ_EXPR. I.e.,
6431 : : (a->b == 2 && a->c == 4) can become (a->new == NEW).
6432 : :
6433 : : WANTED_CODE is this operation code. For single bit fields, we can
6434 : : convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6435 : : comparison for one-bit fields. */
6436 : :
6437 : 22331643 : enum tree_code lcode, rcode;
6438 : 22331643 : tree ll_arg, lr_arg, rl_arg, rr_arg;
6439 : 22331643 : tree result;
6440 : :
6441 : : /* Start by getting the comparison codes. Fail if anything is volatile.
6442 : : If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6443 : : it were surrounded with a NE_EXPR. */
6444 : :
6445 : 22331643 : if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6446 : : return 0;
6447 : :
6448 : 20180779 : lcode = TREE_CODE (lhs);
6449 : 20180779 : rcode = TREE_CODE (rhs);
6450 : :
6451 : 20180779 : if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6452 : : {
6453 : 0 : lhs = build2 (NE_EXPR, truth_type, lhs,
6454 : 0 : build_int_cst (TREE_TYPE (lhs), 0));
6455 : 0 : lcode = NE_EXPR;
6456 : : }
6457 : :
6458 : 20180779 : if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6459 : : {
6460 : 0 : rhs = build2 (NE_EXPR, truth_type, rhs,
6461 : 0 : build_int_cst (TREE_TYPE (rhs), 0));
6462 : 0 : rcode = NE_EXPR;
6463 : : }
6464 : :
6465 : 20180779 : if (TREE_CODE_CLASS (lcode) != tcc_comparison
6466 : 17977712 : || TREE_CODE_CLASS (rcode) != tcc_comparison)
6467 : : return 0;
6468 : :
6469 : 16873383 : ll_arg = TREE_OPERAND (lhs, 0);
6470 : 16873383 : lr_arg = TREE_OPERAND (lhs, 1);
6471 : 16873383 : rl_arg = TREE_OPERAND (rhs, 0);
6472 : 16873383 : rr_arg = TREE_OPERAND (rhs, 1);
6473 : :
6474 : : /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6475 : 16873383 : if (simple_operand_p (ll_arg)
6476 : 16873383 : && simple_operand_p (lr_arg))
6477 : : {
6478 : 13754937 : if (operand_equal_p (ll_arg, rl_arg, 0)
6479 : 13754937 : && operand_equal_p (lr_arg, rr_arg, 0))
6480 : : {
6481 : 19879 : result = combine_comparisons (loc, code, lcode, rcode,
6482 : : truth_type, ll_arg, lr_arg);
6483 : 19879 : if (result)
6484 : : return result;
6485 : : }
6486 : 13735058 : else if (operand_equal_p (ll_arg, rr_arg, 0)
6487 : 13735058 : && operand_equal_p (lr_arg, rl_arg, 0))
6488 : : {
6489 : 286 : result = combine_comparisons (loc, code, lcode,
6490 : : swap_tree_comparison (rcode),
6491 : : truth_type, ll_arg, lr_arg);
6492 : 286 : if (result)
6493 : : return result;
6494 : : }
6495 : : }
6496 : :
6497 : 33707606 : code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6498 : 16853803 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6499 : :
6500 : : /* If the RHS can be evaluated unconditionally and its operands are
6501 : : simple, it wins to evaluate the RHS unconditionally on machines
6502 : : with expensive branches. In this case, this isn't a comparison
6503 : : that can be merged. */
6504 : :
6505 : 16853803 : if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6506 : : false) >= 2
6507 : 16853698 : && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6508 : 15880165 : && simple_operand_p (rl_arg)
6509 : 26224263 : && simple_operand_p (rr_arg))
6510 : : {
6511 : : /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6512 : 10453142 : if (code == TRUTH_OR_EXPR
6513 : 1419524 : && lcode == NE_EXPR && integer_zerop (lr_arg)
6514 : 566336 : && rcode == NE_EXPR && integer_zerop (rr_arg)
6515 : 23625 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6516 : 10472557 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6517 : 38298 : return build2_loc (loc, NE_EXPR, truth_type,
6518 : 19149 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6519 : : ll_arg, rl_arg),
6520 : 19149 : build_int_cst (TREE_TYPE (ll_arg), 0));
6521 : :
6522 : : /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6523 : 10433993 : if (code == TRUTH_AND_EXPR
6524 : 1543807 : && lcode == EQ_EXPR && integer_zerop (lr_arg)
6525 : 697794 : && rcode == EQ_EXPR && integer_zerop (rr_arg)
6526 : 4948 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6527 : 10435530 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6528 : 2608 : return build2_loc (loc, EQ_EXPR, truth_type,
6529 : 1304 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6530 : : ll_arg, rl_arg),
6531 : 1304 : build_int_cst (TREE_TYPE (ll_arg), 0));
6532 : : }
6533 : :
6534 : : return 0;
6535 : : }
6536 : :
6537 : : /* T is an integer expression that is being multiplied, divided, or taken a
6538 : : modulus (CODE says which and what kind of divide or modulus) by a
6539 : : constant C. See if we can eliminate that operation by folding it with
6540 : : other operations already in T. WIDE_TYPE, if non-null, is a type that
6541 : : should be used for the computation if wider than our type.
6542 : :
6543 : : For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6544 : : (X * 2) + (Y * 4). We must, however, be assured that either the original
6545 : : expression would not overflow or that overflow is undefined for the type
6546 : : in the language in question.
6547 : :
6548 : : If we return a non-null expression, it is an equivalent form of the
6549 : : original computation, but need not be in the original type.
6550 : :
6551 : : We set *STRICT_OVERFLOW_P to true if the return values depends on
6552 : : signed overflow being undefined. Otherwise we do not change
6553 : : *STRICT_OVERFLOW_P. */
6554 : :
6555 : : static tree
6556 : 84451912 : extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6557 : : bool *strict_overflow_p)
6558 : : {
6559 : : /* To avoid exponential search depth, refuse to allow recursion past
6560 : : three levels. Beyond that (1) it's highly unlikely that we'll find
6561 : : something interesting and (2) we've probably processed it before
6562 : : when we built the inner expression. */
6563 : :
6564 : 84451912 : static int depth;
6565 : 84451912 : tree ret;
6566 : :
6567 : 84451912 : if (depth > 3)
6568 : : return NULL;
6569 : :
6570 : 81250613 : depth++;
6571 : 81250613 : ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6572 : 81250613 : depth--;
6573 : :
6574 : 81250613 : return ret;
6575 : : }
6576 : :
6577 : : static tree
6578 : 81250613 : extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6579 : : bool *strict_overflow_p)
6580 : : {
6581 : 81250613 : tree type = TREE_TYPE (t);
6582 : 81250613 : enum tree_code tcode = TREE_CODE (t);
6583 : 81250613 : tree ctype = type;
6584 : 81250613 : if (wide_type)
6585 : : {
6586 : 26623345 : if (TREE_CODE (type) == BITINT_TYPE
6587 : 26623235 : || TREE_CODE (wide_type) == BITINT_TYPE)
6588 : : {
6589 : 110 : if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6590 : 8506482 : ctype = wide_type;
6591 : : }
6592 : 26623235 : else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6593 : 53246470 : > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6594 : 8506482 : ctype = wide_type;
6595 : : }
6596 : 81250613 : tree t1, t2;
6597 : 81250613 : bool same_p = tcode == code;
6598 : 81250613 : tree op0 = NULL_TREE, op1 = NULL_TREE;
6599 : 81250613 : bool sub_strict_overflow_p;
6600 : :
6601 : : /* Don't deal with constants of zero here; they confuse the code below. */
6602 : 81250613 : if (integer_zerop (c))
6603 : : return NULL_TREE;
6604 : :
6605 : 81228796 : if (TREE_CODE_CLASS (tcode) == tcc_unary)
6606 : 32067569 : op0 = TREE_OPERAND (t, 0);
6607 : :
6608 : 81228796 : if (TREE_CODE_CLASS (tcode) == tcc_binary)
6609 : 10346555 : op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6610 : :
6611 : : /* Note that we need not handle conditional operations here since fold
6612 : : already handles those cases. So just do arithmetic here. */
6613 : 81228796 : switch (tcode)
6614 : : {
6615 : 3840615 : case INTEGER_CST:
6616 : : /* For a constant, we can always simplify if we are a multiply
6617 : : or (for divide and modulus) if it is a multiple of our constant. */
6618 : 3840615 : if (code == MULT_EXPR
6619 : 4808991 : || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6620 : 968376 : TYPE_SIGN (type)))
6621 : : {
6622 : 3181475 : tree tem = const_binop (code, fold_convert (ctype, t),
6623 : : fold_convert (ctype, c));
6624 : : /* If the multiplication overflowed, we lost information on it.
6625 : : See PR68142 and PR69845. */
6626 : 3181475 : if (TREE_OVERFLOW (tem))
6627 : : return NULL_TREE;
6628 : : return tem;
6629 : : }
6630 : : break;
6631 : :
6632 : 31473266 : CASE_CONVERT: case NON_LVALUE_EXPR:
6633 : 31473266 : if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6634 : : break;
6635 : : /* If op0 is an expression ... */
6636 : 30533399 : if ((COMPARISON_CLASS_P (op0)
6637 : : || UNARY_CLASS_P (op0)
6638 : 30533399 : || BINARY_CLASS_P (op0)
6639 : 27724517 : || VL_EXP_CLASS_P (op0)
6640 : 27688381 : || EXPRESSION_CLASS_P (op0))
6641 : : /* ... and has wrapping overflow, and its type is smaller
6642 : : than ctype, then we cannot pass through as widening. */
6643 : 30642192 : && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6644 : 1033446 : && (TYPE_PRECISION (ctype)
6645 : 1033446 : > TYPE_PRECISION (TREE_TYPE (op0))))
6646 : : /* ... or this is a truncation (t is narrower than op0),
6647 : : then we cannot pass through this narrowing. */
6648 : 2386951 : || (TYPE_PRECISION (type)
6649 : 2386951 : < TYPE_PRECISION (TREE_TYPE (op0)))
6650 : : /* ... or signedness changes for division or modulus,
6651 : : then we cannot pass through this conversion. */
6652 : 2361186 : || (code != MULT_EXPR
6653 : 97671 : && (TYPE_UNSIGNED (ctype)
6654 : 97671 : != TYPE_UNSIGNED (TREE_TYPE (op0))))
6655 : : /* ... or has undefined overflow while the converted to
6656 : : type has not, we cannot do the operation in the inner type
6657 : : as that would introduce undefined overflow. */
6658 : 2282813 : || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6659 : 1798932 : && !TYPE_OVERFLOW_UNDEFINED (type))))
6660 : : break;
6661 : :
6662 : : /* Pass the constant down and see if we can make a simplification. If
6663 : : we can, replace this expression with the inner simplification for
6664 : : possible later conversion to our or some other type. */
6665 : 28152908 : if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6666 : 28152908 : && TREE_CODE (t2) == INTEGER_CST
6667 : 28152908 : && !TREE_OVERFLOW (t2)
6668 : 57047141 : && (t1 = extract_muldiv (op0, t2, code,
6669 : : code == MULT_EXPR ? ctype : NULL_TREE,
6670 : : strict_overflow_p)) != 0)
6671 : : return t1;
6672 : : break;
6673 : :
6674 : 419 : case ABS_EXPR:
6675 : : /* If widening the type changes it from signed to unsigned, then we
6676 : : must avoid building ABS_EXPR itself as unsigned. */
6677 : 419 : if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6678 : : {
6679 : 0 : tree cstype = (*signed_type_for) (ctype);
6680 : 0 : if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6681 : : != 0)
6682 : : {
6683 : 0 : t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6684 : 0 : return fold_convert (ctype, t1);
6685 : : }
6686 : : break;
6687 : : }
6688 : : /* If the constant is negative, we cannot simplify this. */
6689 : 419 : if (tree_int_cst_sgn (c) == -1)
6690 : : break;
6691 : : /* FALLTHROUGH */
6692 : 49960 : case NEGATE_EXPR:
6693 : : /* For division and modulus, type can't be unsigned, as e.g.
6694 : : (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6695 : : For signed types, even with wrapping overflow, this is fine. */
6696 : 49960 : if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6697 : : break;
6698 : 48519 : if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6699 : : != 0)
6700 : 0 : return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6701 : : break;
6702 : :
6703 : 788 : case MIN_EXPR: case MAX_EXPR:
6704 : : /* If widening the type changes the signedness, then we can't perform
6705 : : this optimization as that changes the result. */
6706 : 788 : if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6707 : : break;
6708 : :
6709 : : /* Punt for multiplication altogether.
6710 : : MAX (1U + INT_MAX, 1U) * 2U is not equivalent to
6711 : : MAX ((1U + INT_MAX) * 2U, 1U * 2U), the former is
6712 : : 0U, the latter is 2U.
6713 : : MAX (INT_MIN / 2, 0) * -2 is not equivalent to
6714 : : MIN (INT_MIN / 2 * -2, 0 * -2), the former is
6715 : : well defined 0, the latter invokes UB.
6716 : : MAX (INT_MIN / 2, 5) * 5 is not equivalent to
6717 : : MAX (INT_MIN / 2 * 5, 5 * 5), the former is
6718 : : well defined 25, the latter invokes UB. */
6719 : 788 : if (code == MULT_EXPR)
6720 : : break;
6721 : : /* For division/modulo, punt on c being -1 for MAX, as
6722 : : MAX (INT_MIN, 0) / -1 is not equivalent to
6723 : : MIN (INT_MIN / -1, 0 / -1), the former is well defined
6724 : : 0, the latter invokes UB (or for -fwrapv is INT_MIN).
6725 : : MIN (INT_MIN, 0) / -1 already invokes UB, so the
6726 : : transformation won't make it worse. */
6727 : 8 : else if (tcode == MAX_EXPR && integer_minus_onep (c))
6728 : : break;
6729 : :
6730 : : /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6731 : 8 : sub_strict_overflow_p = false;
6732 : 8 : if ((t1 = extract_muldiv (op0, c, code, wide_type,
6733 : : &sub_strict_overflow_p)) != 0
6734 : 8 : && (t2 = extract_muldiv (op1, c, code, wide_type,
6735 : : &sub_strict_overflow_p)) != 0)
6736 : : {
6737 : 0 : if (tree_int_cst_sgn (c) < 0)
6738 : 0 : tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6739 : 0 : if (sub_strict_overflow_p)
6740 : 0 : *strict_overflow_p = true;
6741 : 0 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6742 : : fold_convert (ctype, t2));
6743 : : }
6744 : : break;
6745 : :
6746 : 1176 : case LSHIFT_EXPR: case RSHIFT_EXPR:
6747 : : /* If the second operand is constant, this is a multiplication
6748 : : or floor division, by a power of two, so we can treat it that
6749 : : way unless the multiplier or divisor overflows. Signed
6750 : : left-shift overflow is implementation-defined rather than
6751 : : undefined in C90, so do not convert signed left shift into
6752 : : multiplication. */
6753 : 1176 : if (TREE_CODE (op1) == INTEGER_CST
6754 : 1160 : && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6755 : : /* const_binop may not detect overflow correctly,
6756 : : so check for it explicitly here. */
6757 : 1043 : && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6758 : 1185 : wi::to_wide (op1))
6759 : 1034 : && (t1 = fold_convert (ctype,
6760 : : const_binop (LSHIFT_EXPR, size_one_node,
6761 : : op1))) != 0
6762 : 2210 : && !TREE_OVERFLOW (t1))
6763 : 1866 : return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6764 : : ? MULT_EXPR : FLOOR_DIV_EXPR,
6765 : : ctype,
6766 : : fold_convert (ctype, op0),
6767 : : t1),
6768 : 1034 : c, code, wide_type, strict_overflow_p);
6769 : : break;
6770 : :
6771 : 7214244 : case PLUS_EXPR: case MINUS_EXPR:
6772 : : /* See if we can eliminate the operation on both sides. If we can, we
6773 : : can return a new PLUS or MINUS. If we can't, the only remaining
6774 : : cases where we can do anything are if the second operand is a
6775 : : constant. */
6776 : 7214244 : sub_strict_overflow_p = false;
6777 : 7214244 : t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6778 : 7214244 : t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6779 : 780436 : if (t1 != 0 && t2 != 0
6780 : 272826 : && TYPE_OVERFLOW_WRAPS (ctype)
6781 : 7478312 : && (code == MULT_EXPR
6782 : : /* If not multiplication, we can only do this if both operands
6783 : : are divisible by c. */
6784 : 0 : || (multiple_of_p (ctype, op0, c)
6785 : 0 : && multiple_of_p (ctype, op1, c))))
6786 : : {
6787 : 264068 : if (sub_strict_overflow_p)
6788 : 0 : *strict_overflow_p = true;
6789 : 264068 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6790 : : fold_convert (ctype, t2));
6791 : : }
6792 : :
6793 : : /* If this was a subtraction, negate OP1 and set it to be an addition.
6794 : : This simplifies the logic below. */
6795 : 6950176 : if (tcode == MINUS_EXPR)
6796 : : {
6797 : 1680859 : tcode = PLUS_EXPR, op1 = negate_expr (op1);
6798 : : /* If OP1 was not easily negatable, the constant may be OP0. */
6799 : 1680859 : if (TREE_CODE (op0) == INTEGER_CST)
6800 : : {
6801 : 276669 : std::swap (op0, op1);
6802 : 276669 : std::swap (t1, t2);
6803 : : }
6804 : : }
6805 : :
6806 : 6950176 : if (TREE_CODE (op1) != INTEGER_CST)
6807 : : break;
6808 : :
6809 : : /* If either OP1 or C are negative, this optimization is not safe for
6810 : : some of the division and remainder types while for others we need
6811 : : to change the code. */
6812 : 3161385 : if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6813 : : {
6814 : 164615 : if (code == CEIL_DIV_EXPR)
6815 : : code = FLOOR_DIV_EXPR;
6816 : 164613 : else if (code == FLOOR_DIV_EXPR)
6817 : : code = CEIL_DIV_EXPR;
6818 : 164348 : else if (code != MULT_EXPR
6819 : 164348 : && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6820 : : break;
6821 : : }
6822 : :
6823 : : /* If it's a multiply or a division/modulus operation of a multiple
6824 : : of our constant, do the operation and verify it doesn't overflow. */
6825 : 3157549 : if (code == MULT_EXPR
6826 : 4122677 : || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6827 : 965128 : TYPE_SIGN (type)))
6828 : : {
6829 : 2498030 : op1 = const_binop (code, fold_convert (ctype, op1),
6830 : : fold_convert (ctype, c));
6831 : : /* We allow the constant to overflow with wrapping semantics. */
6832 : 2498030 : if (op1 == 0
6833 : 2498030 : || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6834 : : break;
6835 : : }
6836 : : else
6837 : : break;
6838 : :
6839 : : /* If we have an unsigned type, we cannot widen the operation since it
6840 : : will change the result if the original computation overflowed. */
6841 : 2494719 : if (TYPE_UNSIGNED (ctype) && ctype != type)
6842 : : break;
6843 : :
6844 : : /* The last case is if we are a multiply. In that case, we can
6845 : : apply the distributive law to commute the multiply and addition
6846 : : if the multiplication of the constants doesn't overflow
6847 : : and overflow is defined. With undefined overflow
6848 : : op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6849 : : But fold_plusminus_mult_expr would factor back any power-of-two
6850 : : value so do not distribute in the first place in this case. */
6851 : 2494719 : if (code == MULT_EXPR
6852 : 2189697 : && TYPE_OVERFLOW_WRAPS (ctype)
6853 : 4363119 : && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6854 : 443760 : return fold_build2 (tcode, ctype,
6855 : : fold_build2 (code, ctype,
6856 : : fold_convert (ctype, op0),
6857 : : fold_convert (ctype, c)),
6858 : : op1);
6859 : :
6860 : : break;
6861 : :
6862 : 2102639 : case MULT_EXPR:
6863 : : /* We have a special case here if we are doing something like
6864 : : (C * 8) % 4 since we know that's zero. */
6865 : 2102639 : if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6866 : 2102639 : || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6867 : : /* If the multiplication can overflow we cannot optimize this. */
6868 : 10604 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6869 : 346 : && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6870 : 2113243 : && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 : 301 : TYPE_SIGN (type)))
6872 : : {
6873 : 8 : *strict_overflow_p = true;
6874 : 8 : return omit_one_operand (type, integer_zero_node, op0);
6875 : : }
6876 : :
6877 : : /* ... fall through ... */
6878 : :
6879 : 2279208 : case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6880 : 2279208 : case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6881 : : /* If we can extract our operation from the LHS, do so and return a
6882 : : new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6883 : : do something only if the second operand is a constant. */
6884 : 2279208 : if (same_p
6885 : 1972685 : && TYPE_OVERFLOW_WRAPS (ctype)
6886 : 4078174 : && (t1 = extract_muldiv (op0, c, code, wide_type,
6887 : : strict_overflow_p)) != 0)
6888 : 57378 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6889 : : fold_convert (ctype, op1));
6890 : 2221830 : else if (tcode == MULT_EXPR && code == MULT_EXPR
6891 : 1913166 : && TYPE_OVERFLOW_WRAPS (ctype)
6892 : 3961325 : && (t1 = extract_muldiv (op1, c, code, wide_type,
6893 : : strict_overflow_p)) != 0)
6894 : 920866 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6895 : : fold_convert (ctype, t1));
6896 : 1300964 : else if (TREE_CODE (op1) != INTEGER_CST)
6897 : : return 0;
6898 : :
6899 : : /* If these are the same operation types, we can associate them
6900 : : assuming no overflow. */
6901 : 501424 : if (tcode == code)
6902 : : {
6903 : 195391 : bool overflow_p = false;
6904 : 195391 : wi::overflow_type overflow_mul;
6905 : 195391 : signop sign = TYPE_SIGN (ctype);
6906 : 195391 : unsigned prec = TYPE_PRECISION (ctype);
6907 : 390782 : wide_int mul = wi::mul (wi::to_wide (op1, prec),
6908 : 195391 : wi::to_wide (c, prec),
6909 : 195391 : sign, &overflow_mul);
6910 : 195391 : overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6911 : 195391 : if (overflow_mul
6912 : 1200 : && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6913 : : overflow_p = true;
6914 : 195325 : if (!overflow_p)
6915 : 195325 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6916 : : wide_int_to_tree (ctype, mul));
6917 : 195391 : }
6918 : :
6919 : : /* If these operations "cancel" each other, we have the main
6920 : : optimizations of this pass, which occur when either constant is a
6921 : : multiple of the other, in which case we replace this with either an
6922 : : operation or CODE or TCODE.
6923 : :
6924 : : If we have an unsigned type, we cannot do this since it will change
6925 : : the result if the original computation overflowed. */
6926 : 306099 : if (TYPE_OVERFLOW_UNDEFINED (ctype)
6927 : 28183 : && !TYPE_OVERFLOW_SANITIZED (ctype)
6928 : 334239 : && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6929 : 28100 : || (tcode == MULT_EXPR
6930 : 28100 : && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6931 : 872 : && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6932 : 848 : && code != MULT_EXPR)))
6933 : : {
6934 : 883 : if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6935 : 883 : TYPE_SIGN (type)))
6936 : : {
6937 : 106 : *strict_overflow_p = true;
6938 : 106 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 : : fold_convert (ctype,
6940 : : const_binop (TRUNC_DIV_EXPR,
6941 : : op1, c)));
6942 : : }
6943 : 777 : else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6944 : 777 : TYPE_SIGN (type)))
6945 : : {
6946 : 64 : *strict_overflow_p = true;
6947 : 64 : return fold_build2 (code, ctype, fold_convert (ctype, op0),
6948 : : fold_convert (ctype,
6949 : : const_binop (TRUNC_DIV_EXPR,
6950 : : c, op1)));
6951 : : }
6952 : : }
6953 : : break;
6954 : :
6955 : : default:
6956 : : break;
6957 : : }
6958 : :
6959 : : return 0;
6960 : : }
6961 : :
6962 : : /* Return a node which has the indicated constant VALUE (either 0 or
6963 : : 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6964 : : and is of the indicated TYPE. */
6965 : :
6966 : : tree
6967 : 79373949 : constant_boolean_node (bool value, tree type)
6968 : : {
6969 : 79373949 : if (type == integer_type_node)
6970 : 17746383 : return value ? integer_one_node : integer_zero_node;
6971 : 61627566 : else if (type == boolean_type_node)
6972 : 57763940 : return value ? boolean_true_node : boolean_false_node;
6973 : 3863626 : else if (VECTOR_TYPE_P (type))
6974 : 584 : return build_vector_from_val (type,
6975 : 584 : build_int_cst (TREE_TYPE (type),
6976 : 923 : value ? -1 : 0));
6977 : : else
6978 : 3863042 : return fold_convert (type, value ? integer_one_node : integer_zero_node);
6979 : : }
6980 : :
6981 : :
6982 : : /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6983 : : Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6984 : : CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6985 : : expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6986 : : COND is the first argument to CODE; otherwise (as in the example
6987 : : given here), it is the second argument. TYPE is the type of the
6988 : : original expression. Return NULL_TREE if no simplification is
6989 : : possible. */
6990 : :
6991 : : static tree
6992 : 931641 : fold_binary_op_with_conditional_arg (location_t loc,
6993 : : enum tree_code code,
6994 : : tree type, tree op0, tree op1,
6995 : : tree cond, tree arg, int cond_first_p)
6996 : : {
6997 : 931641 : tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6998 : 931641 : tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6999 : 931641 : tree test, true_value, false_value;
7000 : 931641 : tree lhs = NULL_TREE;
7001 : 931641 : tree rhs = NULL_TREE;
7002 : 931641 : enum tree_code cond_code = COND_EXPR;
7003 : :
7004 : : /* Do not move possibly trapping operations into the conditional as this
7005 : : pessimizes code and causes gimplification issues when applied late. */
7006 : 951460 : if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7007 : 197365 : ANY_INTEGRAL_TYPE_P (type)
7008 : 934225 : && TYPE_OVERFLOW_TRAPS (type), op1))
7009 : : return NULL_TREE;
7010 : :
7011 : 911656 : if (TREE_CODE (cond) == COND_EXPR
7012 : 343179 : || TREE_CODE (cond) == VEC_COND_EXPR)
7013 : : {
7014 : 571028 : test = TREE_OPERAND (cond, 0);
7015 : 571028 : true_value = TREE_OPERAND (cond, 1);
7016 : 571028 : false_value = TREE_OPERAND (cond, 2);
7017 : : /* If this operand throws an expression, then it does not make
7018 : : sense to try to perform a logical or arithmetic operation
7019 : : involving it. */
7020 : 571028 : if (VOID_TYPE_P (TREE_TYPE (true_value)))
7021 : 7463 : lhs = true_value;
7022 : 571028 : if (VOID_TYPE_P (TREE_TYPE (false_value)))
7023 : 6 : rhs = false_value;
7024 : : }
7025 : 340628 : else if (!(TREE_CODE (type) != VECTOR_TYPE
7026 : 340604 : && VECTOR_TYPE_P (TREE_TYPE (cond))))
7027 : : {
7028 : 339079 : tree testtype = TREE_TYPE (cond);
7029 : 339079 : test = cond;
7030 : 339079 : true_value = constant_boolean_node (true, testtype);
7031 : 339079 : false_value = constant_boolean_node (false, testtype);
7032 : : }
7033 : : else
7034 : : /* Detect the case of mixing vector and scalar types - bail out. */
7035 : : return NULL_TREE;
7036 : :
7037 : 910107 : if (VECTOR_TYPE_P (TREE_TYPE (test)))
7038 : 2575 : cond_code = VEC_COND_EXPR;
7039 : :
7040 : : /* This transformation is only worthwhile if we don't have to wrap ARG
7041 : : in a SAVE_EXPR and the operation can be simplified without recursing
7042 : : on at least one of the branches once its pushed inside the COND_EXPR. */
7043 : 910107 : if (!TREE_CONSTANT (arg)
7044 : 910107 : && (TREE_SIDE_EFFECTS (arg)
7045 : 434360 : || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7046 : 429899 : || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7047 : : return NULL_TREE;
7048 : :
7049 : 490415 : arg = fold_convert_loc (loc, arg_type, arg);
7050 : 490415 : if (lhs == 0)
7051 : : {
7052 : 484384 : true_value = fold_convert_loc (loc, cond_type, true_value);
7053 : 484384 : if (cond_first_p)
7054 : 474206 : lhs = fold_build2_loc (loc, code, type, true_value, arg);
7055 : : else
7056 : 10178 : lhs = fold_build2_loc (loc, code, type, arg, true_value);
7057 : : }
7058 : 490415 : if (rhs == 0)
7059 : : {
7060 : 490409 : false_value = fold_convert_loc (loc, cond_type, false_value);
7061 : 490409 : if (cond_first_p)
7062 : 479670 : rhs = fold_build2_loc (loc, code, type, false_value, arg);
7063 : : else
7064 : 10739 : rhs = fold_build2_loc (loc, code, type, arg, false_value);
7065 : : }
7066 : :
7067 : : /* Check that we have simplified at least one of the branches. */
7068 : 490415 : if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7069 : : return NULL_TREE;
7070 : :
7071 : 471654 : return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7072 : : }
7073 : :
7074 : :
7075 : : /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7076 : :
7077 : : If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7078 : : type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7079 : : if ARG - ZERO_ARG is the same as X.
7080 : :
7081 : : If ARG is NULL, check for any value of type TYPE.
7082 : :
7083 : : X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7084 : : and finite. The problematic cases are when X is zero, and its mode
7085 : : has signed zeros. In the case of rounding towards -infinity,
7086 : : X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7087 : : modes, X + 0 is not the same as X because -0 + 0 is 0. */
7088 : :
7089 : : bool
7090 : 643188 : fold_real_zero_addition_p (const_tree type, const_tree arg,
7091 : : const_tree zero_arg, int negate)
7092 : : {
7093 : 643188 : if (!real_zerop (zero_arg))
7094 : : return false;
7095 : :
7096 : : /* Don't allow the fold with -fsignaling-nans. */
7097 : 642402 : if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7098 : : return false;
7099 : :
7100 : : /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7101 : 639062 : if (!HONOR_SIGNED_ZEROS (type))
7102 : : return true;
7103 : :
7104 : : /* There is no case that is safe for all rounding modes. */
7105 : 622503 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7106 : : return false;
7107 : :
7108 : : /* In a vector or complex, we would need to check the sign of all zeros. */
7109 : 621840 : if (TREE_CODE (zero_arg) == VECTOR_CST)
7110 : 1088 : zero_arg = uniform_vector_p (zero_arg);
7111 : 621840 : if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7112 : 1178 : return false;
7113 : :
7114 : : /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7115 : 620662 : if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7116 : 226 : negate = !negate;
7117 : :
7118 : : /* The mode has signed zeros, and we have to honor their sign.
7119 : : In this situation, there are only two cases we can return true for.
7120 : : (i) X - 0 is the same as X with default rounding.
7121 : : (ii) X + 0 is X when X can't possibly be -0.0. */
7122 : 620662 : return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7123 : : }
7124 : :
7125 : : /* Subroutine of match.pd that optimizes comparisons of a division by
7126 : : a nonzero integer constant against an integer constant, i.e.
7127 : : X/C1 op C2.
7128 : :
7129 : : CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7130 : : GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7131 : :
7132 : : enum tree_code
7133 : 1600428 : fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7134 : : tree *hi, bool *neg_overflow)
7135 : : {
7136 : 1600428 : tree prod, tmp, type = TREE_TYPE (c1);
7137 : 1600428 : signop sign = TYPE_SIGN (type);
7138 : 1600428 : wi::overflow_type overflow;
7139 : :
7140 : : /* We have to do this the hard way to detect unsigned overflow.
7141 : : prod = int_const_binop (MULT_EXPR, c1, c2); */
7142 : 1600428 : wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7143 : 1600428 : prod = force_fit_type (type, val, -1, overflow);
7144 : 1600428 : *neg_overflow = false;
7145 : :
7146 : 1600428 : if (sign == UNSIGNED)
7147 : : {
7148 : 1573444 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7149 : 1573444 : *lo = prod;
7150 : :
7151 : : /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7152 : 1573444 : val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7153 : 1573444 : *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7154 : : }
7155 : 26984 : else if (tree_int_cst_sgn (c1) >= 0)
7156 : : {
7157 : 25593 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7158 : 25593 : switch (tree_int_cst_sgn (c2))
7159 : : {
7160 : 4776 : case -1:
7161 : 4776 : *neg_overflow = true;
7162 : 4776 : *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7163 : 4776 : *hi = prod;
7164 : 4776 : break;
7165 : :
7166 : 12451 : case 0:
7167 : 12451 : *lo = fold_negate_const (tmp, type);
7168 : 12451 : *hi = tmp;
7169 : 12451 : break;
7170 : :
7171 : 8366 : case 1:
7172 : 8366 : *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7173 : 8366 : *lo = prod;
7174 : 8366 : break;
7175 : :
7176 : 0 : default:
7177 : 0 : gcc_unreachable ();
7178 : : }
7179 : : }
7180 : : else
7181 : : {
7182 : : /* A negative divisor reverses the relational operators. */
7183 : 1391 : code = swap_tree_comparison (code);
7184 : :
7185 : 1391 : tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7186 : 1391 : switch (tree_int_cst_sgn (c2))
7187 : : {
7188 : 132 : case -1:
7189 : 132 : *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7190 : 132 : *lo = prod;
7191 : 132 : break;
7192 : :
7193 : 161 : case 0:
7194 : 161 : *hi = fold_negate_const (tmp, type);
7195 : 161 : *lo = tmp;
7196 : 161 : break;
7197 : :
7198 : 1098 : case 1:
7199 : 1098 : *neg_overflow = true;
7200 : 1098 : *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7201 : 1098 : *hi = prod;
7202 : 1098 : break;
7203 : :
7204 : 0 : default:
7205 : 0 : gcc_unreachable ();
7206 : : }
7207 : : }
7208 : :
7209 : 1600428 : if (code != EQ_EXPR && code != NE_EXPR)
7210 : : return code;
7211 : :
7212 : 15993 : if (TREE_OVERFLOW (*lo)
7213 : 15993 : || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7214 : 727 : *lo = NULL_TREE;
7215 : 15993 : if (TREE_OVERFLOW (*hi)
7216 : 15993 : || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7217 : 92 : *hi = NULL_TREE;
7218 : :
7219 : : return code;
7220 : 1600428 : }
7221 : :
7222 : : /* Test whether it is preferable to swap two operands, ARG0 and
7223 : : ARG1, for example because ARG0 is an integer constant and ARG1
7224 : : isn't. */
7225 : :
7226 : : bool
7227 : 1420905307 : tree_swap_operands_p (const_tree arg0, const_tree arg1)
7228 : : {
7229 : 1420905307 : if (CONSTANT_CLASS_P (arg1))
7230 : : return false;
7231 : 453335601 : if (CONSTANT_CLASS_P (arg0))
7232 : : return true;
7233 : :
7234 : 418685513 : STRIP_NOPS (arg0);
7235 : 418685513 : STRIP_NOPS (arg1);
7236 : :
7237 : 418685513 : if (TREE_CONSTANT (arg1))
7238 : : return false;
7239 : 406548169 : if (TREE_CONSTANT (arg0))
7240 : : return true;
7241 : :
7242 : : /* It is preferable to swap two SSA_NAME to ensure a canonical form
7243 : : for commutative and comparison operators. Ensuring a canonical
7244 : : form allows the optimizers to find additional redundancies without
7245 : : having to explicitly check for both orderings. */
7246 : 406062218 : if (TREE_CODE (arg0) == SSA_NAME
7247 : 297712811 : && TREE_CODE (arg1) == SSA_NAME
7248 : 697826909 : && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7249 : : return true;
7250 : :
7251 : : /* Put SSA_NAMEs last. */
7252 : 383190976 : if (TREE_CODE (arg1) == SSA_NAME)
7253 : : return false;
7254 : 94014887 : if (TREE_CODE (arg0) == SSA_NAME)
7255 : : return true;
7256 : :
7257 : : /* Put variables last. */
7258 : 88066767 : if (DECL_P (arg1))
7259 : : return false;
7260 : 47442689 : if (DECL_P (arg0))
7261 : : return true;
7262 : :
7263 : : return false;
7264 : : }
7265 : :
7266 : :
7267 : : /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7268 : : means A >= Y && A != MAX, but in this case we know that
7269 : : A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7270 : :
7271 : : static tree
7272 : 21621782 : fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7273 : : {
7274 : 21621782 : tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7275 : :
7276 : 21621782 : if (TREE_CODE (bound) == LT_EXPR)
7277 : 4757818 : a = TREE_OPERAND (bound, 0);
7278 : 16863964 : else if (TREE_CODE (bound) == GT_EXPR)
7279 : 2353946 : a = TREE_OPERAND (bound, 1);
7280 : : else
7281 : : return NULL_TREE;
7282 : :
7283 : 7111764 : typea = TREE_TYPE (a);
7284 : 7111764 : if (!INTEGRAL_TYPE_P (typea)
7285 : 326189 : && !POINTER_TYPE_P (typea))
7286 : : return NULL_TREE;
7287 : :
7288 : 6932556 : if (TREE_CODE (ineq) == LT_EXPR)
7289 : : {
7290 : 1404460 : a1 = TREE_OPERAND (ineq, 1);
7291 : 1404460 : y = TREE_OPERAND (ineq, 0);
7292 : : }
7293 : 5528096 : else if (TREE_CODE (ineq) == GT_EXPR)
7294 : : {
7295 : 989713 : a1 = TREE_OPERAND (ineq, 0);
7296 : 989713 : y = TREE_OPERAND (ineq, 1);
7297 : : }
7298 : : else
7299 : : return NULL_TREE;
7300 : :
7301 : 2394173 : if (TREE_TYPE (a1) != typea)
7302 : : return NULL_TREE;
7303 : :
7304 : 1670185 : if (POINTER_TYPE_P (typea))
7305 : : {
7306 : : /* Convert the pointer types into integer before taking the difference. */
7307 : 8321 : tree ta = fold_convert_loc (loc, ssizetype, a);
7308 : 8321 : tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7309 : 8321 : diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7310 : : }
7311 : : else
7312 : 1661864 : diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7313 : :
7314 : 1670185 : if (!diff || !integer_onep (diff))
7315 : 1659790 : return NULL_TREE;
7316 : :
7317 : 10395 : return fold_build2_loc (loc, GE_EXPR, type, a, y);
7318 : : }
7319 : :
7320 : : /* Fold a sum or difference of at least one multiplication.
7321 : : Returns the folded tree or NULL if no simplification could be made. */
7322 : :
7323 : : static tree
7324 : 8467288 : fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7325 : : tree arg0, tree arg1)
7326 : : {
7327 : 8467288 : tree arg00, arg01, arg10, arg11;
7328 : 8467288 : tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7329 : :
7330 : : /* (A * C) +- (B * C) -> (A+-B) * C.
7331 : : (A * C) +- A -> A * (C+-1).
7332 : : We are most concerned about the case where C is a constant,
7333 : : but other combinations show up during loop reduction. Since
7334 : : it is not difficult, try all four possibilities. */
7335 : :
7336 : 8467288 : if (TREE_CODE (arg0) == MULT_EXPR)
7337 : : {
7338 : 7288947 : arg00 = TREE_OPERAND (arg0, 0);
7339 : 7288947 : arg01 = TREE_OPERAND (arg0, 1);
7340 : : }
7341 : 1178341 : else if (TREE_CODE (arg0) == INTEGER_CST)
7342 : : {
7343 : 70109 : arg00 = build_one_cst (type);
7344 : 70109 : arg01 = arg0;
7345 : : }
7346 : : else
7347 : : {
7348 : : /* We cannot generate constant 1 for fract. */
7349 : 1108232 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7350 : 0 : return NULL_TREE;
7351 : 1108232 : arg00 = arg0;
7352 : 1108232 : arg01 = build_one_cst (type);
7353 : : }
7354 : 8467288 : if (TREE_CODE (arg1) == MULT_EXPR)
7355 : : {
7356 : 2484270 : arg10 = TREE_OPERAND (arg1, 0);
7357 : 2484270 : arg11 = TREE_OPERAND (arg1, 1);
7358 : : }
7359 : 5983018 : else if (TREE_CODE (arg1) == INTEGER_CST)
7360 : : {
7361 : 3328795 : arg10 = build_one_cst (type);
7362 : : /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7363 : : the purpose of this canonicalization. */
7364 : 6436134 : if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7365 : 224744 : && negate_expr_p (arg1)
7366 : 3550251 : && code == PLUS_EXPR)
7367 : : {
7368 : 221456 : arg11 = negate_expr (arg1);
7369 : 221456 : code = MINUS_EXPR;
7370 : : }
7371 : : else
7372 : : arg11 = arg1;
7373 : : }
7374 : : else
7375 : : {
7376 : : /* We cannot generate constant 1 for fract. */
7377 : 2654223 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7378 : 0 : return NULL_TREE;
7379 : 2654223 : arg10 = arg1;
7380 : 2654223 : arg11 = build_one_cst (type);
7381 : : }
7382 : 8467288 : same = NULL_TREE;
7383 : :
7384 : : /* Prefer factoring a common non-constant. */
7385 : 8467288 : if (operand_equal_p (arg00, arg10, 0))
7386 : : same = arg00, alt0 = arg01, alt1 = arg11;
7387 : 8463670 : else if (operand_equal_p (arg01, arg11, 0))
7388 : : same = arg01, alt0 = arg00, alt1 = arg10;
7389 : 8394838 : else if (operand_equal_p (arg00, arg11, 0))
7390 : : same = arg00, alt0 = arg01, alt1 = arg10;
7391 : 8394762 : else if (operand_equal_p (arg01, arg10, 0))
7392 : : same = arg01, alt0 = arg00, alt1 = arg11;
7393 : :
7394 : : /* No identical multiplicands; see if we can find a common
7395 : : power-of-two factor in non-power-of-two multiplies. This
7396 : : can help in multi-dimensional array access. */
7397 : 8390482 : else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7398 : : {
7399 : 7066131 : HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7400 : 7066131 : HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7401 : 7066131 : HOST_WIDE_INT tmp;
7402 : 7066131 : bool swap = false;
7403 : 7066131 : tree maybe_same;
7404 : :
7405 : : /* Move min of absolute values to int11. */
7406 : 7066131 : if (absu_hwi (int01) < absu_hwi (int11))
7407 : : {
7408 : : tmp = int01, int01 = int11, int11 = tmp;
7409 : : alt0 = arg00, arg00 = arg10, arg10 = alt0;
7410 : : maybe_same = arg01;
7411 : : swap = true;
7412 : : }
7413 : : else
7414 : 3430457 : maybe_same = arg11;
7415 : :
7416 : 7066131 : const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7417 : 7066131 : if (factor > 1
7418 : 9337811 : && pow2p_hwi (factor)
7419 : 2072530 : && (int01 & (factor - 1)) == 0
7420 : : /* The remainder should not be a constant, otherwise we
7421 : : end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7422 : : increased the number of multiplications necessary. */
7423 : 8339306 : && TREE_CODE (arg10) != INTEGER_CST)
7424 : : {
7425 : 1125201 : alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7426 : 1125201 : build_int_cst (TREE_TYPE (arg00),
7427 : 1125201 : int01 / int11));
7428 : 1125201 : alt1 = arg10;
7429 : 1125201 : same = maybe_same;
7430 : 1125201 : if (swap)
7431 : 1022761 : maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7432 : : }
7433 : : }
7434 : :
7435 : 1202007 : if (!same)
7436 : 7265281 : return NULL_TREE;
7437 : :
7438 : 7 : if (! ANY_INTEGRAL_TYPE_P (type)
7439 : 1202007 : || TYPE_OVERFLOW_WRAPS (type)
7440 : : /* We are neither factoring zero nor minus one. */
7441 : 1322971 : || TREE_CODE (same) == INTEGER_CST)
7442 : 1190642 : return fold_build2_loc (loc, MULT_EXPR, type,
7443 : : fold_build2_loc (loc, code, type,
7444 : : fold_convert_loc (loc, type, alt0),
7445 : : fold_convert_loc (loc, type, alt1)),
7446 : 1190642 : fold_convert_loc (loc, type, same));
7447 : :
7448 : : /* Same may be zero and thus the operation 'code' may overflow. Likewise
7449 : : same may be minus one and thus the multiplication may overflow. Perform
7450 : : the sum operation in an unsigned type. */
7451 : 11365 : tree utype = unsigned_type_for (type);
7452 : 11365 : tree tem = fold_build2_loc (loc, code, utype,
7453 : : fold_convert_loc (loc, utype, alt0),
7454 : : fold_convert_loc (loc, utype, alt1));
7455 : : /* If the sum evaluated to a constant that is not -INF the multiplication
7456 : : cannot overflow. */
7457 : 22730 : if (TREE_CODE (tem) == INTEGER_CST
7458 : 17907 : && (wi::to_wide (tem)
7459 : 17907 : != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7460 : 3258 : return fold_build2_loc (loc, MULT_EXPR, type,
7461 : 3258 : fold_convert (type, tem), same);
7462 : :
7463 : : /* Do not resort to unsigned multiplication because
7464 : : we lose the no-overflow property of the expression. */
7465 : : return NULL_TREE;
7466 : : }
7467 : :
7468 : : /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7469 : : specified by EXPR into the buffer PTR of length LEN bytes.
7470 : : Return the number of bytes placed in the buffer, or zero
7471 : : upon failure. */
7472 : :
7473 : : static int
7474 : 16922560 : native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7475 : : {
7476 : 16922560 : tree type = TREE_TYPE (expr);
7477 : 16922560 : int total_bytes;
7478 : 16922560 : if (TREE_CODE (type) == BITINT_TYPE)
7479 : : {
7480 : 16610 : struct bitint_info info;
7481 : 16610 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7482 : 16610 : gcc_assert (ok);
7483 : 16610 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7484 : 16610 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7485 : : {
7486 : 16539 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7487 : : /* More work is needed when adding _BitInt support to PDP endian
7488 : : if limb is smaller than word, or if _BitInt limb ordering doesn't
7489 : : match target endianity here. */
7490 : 16539 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7491 : : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7492 : : || (GET_MODE_SIZE (limb_mode)
7493 : : >= UNITS_PER_WORD)));
7494 : : }
7495 : : else
7496 : 142 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7497 : : }
7498 : : else
7499 : 33811900 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7500 : 16922560 : int byte, offset, word, words;
7501 : 16922560 : unsigned char value;
7502 : :
7503 : 16922560 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7504 : : return 0;
7505 : 16922079 : if (off == -1)
7506 : 15857581 : off = 0;
7507 : :
7508 : 16922079 : if (ptr == NULL)
7509 : : /* Dry run. */
7510 : 2531925 : return MIN (len, total_bytes - off);
7511 : :
7512 : : words = total_bytes / UNITS_PER_WORD;
7513 : :
7514 : 74942480 : for (byte = 0; byte < total_bytes; byte++)
7515 : : {
7516 : 60552326 : int bitpos = byte * BITS_PER_UNIT;
7517 : : /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7518 : : number of bytes. */
7519 : 60552326 : value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7520 : :
7521 : 60552326 : if (total_bytes > UNITS_PER_WORD)
7522 : : {
7523 : 60552326 : word = byte / UNITS_PER_WORD;
7524 : 60552326 : if (WORDS_BIG_ENDIAN)
7525 : : word = (words - 1) - word;
7526 : 60552326 : offset = word * UNITS_PER_WORD;
7527 : 60552326 : if (BYTES_BIG_ENDIAN)
7528 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7529 : : else
7530 : 60552326 : offset += byte % UNITS_PER_WORD;
7531 : : }
7532 : : else
7533 : : offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7534 : 60552326 : if (offset >= off && offset - off < len)
7535 : 58547277 : ptr[offset - off] = value;
7536 : : }
7537 : 14390154 : return MIN (len, total_bytes - off);
7538 : : }
7539 : :
7540 : :
7541 : : /* Subroutine of native_encode_expr. Encode the FIXED_CST
7542 : : specified by EXPR into the buffer PTR of length LEN bytes.
7543 : : Return the number of bytes placed in the buffer, or zero
7544 : : upon failure. */
7545 : :
7546 : : static int
7547 : 0 : native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7548 : : {
7549 : 0 : tree type = TREE_TYPE (expr);
7550 : 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
7551 : 0 : int total_bytes = GET_MODE_SIZE (mode);
7552 : 0 : FIXED_VALUE_TYPE value;
7553 : 0 : tree i_value, i_type;
7554 : :
7555 : 0 : if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7556 : : return 0;
7557 : :
7558 : 0 : i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7559 : :
7560 : 0 : if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7561 : : return 0;
7562 : :
7563 : 0 : value = TREE_FIXED_CST (expr);
7564 : 0 : i_value = double_int_to_tree (i_type, value.data);
7565 : :
7566 : 0 : return native_encode_int (i_value, ptr, len, off);
7567 : : }
7568 : :
7569 : :
7570 : : /* Subroutine of native_encode_expr. Encode the REAL_CST
7571 : : specified by EXPR into the buffer PTR of length LEN bytes.
7572 : : Return the number of bytes placed in the buffer, or zero
7573 : : upon failure. */
7574 : :
7575 : : static int
7576 : 544480 : native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7577 : : {
7578 : 544480 : tree type = TREE_TYPE (expr);
7579 : 544480 : int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7580 : 544480 : int byte, offset, word, words, bitpos;
7581 : 544480 : unsigned char value;
7582 : :
7583 : : /* There are always 32 bits in each long, no matter the size of
7584 : : the hosts long. We handle floating point representations with
7585 : : up to 192 bits. */
7586 : 544480 : long tmp[6];
7587 : :
7588 : 544480 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7589 : : return 0;
7590 : 542685 : if (off == -1)
7591 : 411347 : off = 0;
7592 : :
7593 : 542685 : if (ptr == NULL)
7594 : : /* Dry run. */
7595 : 135233 : return MIN (len, total_bytes - off);
7596 : :
7597 : 407452 : words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7598 : :
7599 : 407452 : real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7600 : :
7601 : 3295108 : for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7602 : 2887656 : bitpos += BITS_PER_UNIT)
7603 : : {
7604 : 2887656 : byte = (bitpos / BITS_PER_UNIT) & 3;
7605 : 2887656 : value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7606 : :
7607 : 2887656 : if (UNITS_PER_WORD < 4)
7608 : : {
7609 : : word = byte / UNITS_PER_WORD;
7610 : : if (WORDS_BIG_ENDIAN)
7611 : : word = (words - 1) - word;
7612 : : offset = word * UNITS_PER_WORD;
7613 : : if (BYTES_BIG_ENDIAN)
7614 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7615 : : else
7616 : : offset += byte % UNITS_PER_WORD;
7617 : : }
7618 : : else
7619 : : {
7620 : 2887656 : offset = byte;
7621 : 2887656 : if (BYTES_BIG_ENDIAN)
7622 : : {
7623 : : /* Reverse bytes within each long, or within the entire float
7624 : : if it's smaller than a long (for HFmode). */
7625 : : offset = MIN (3, total_bytes - 1) - offset;
7626 : : gcc_assert (offset >= 0);
7627 : : }
7628 : : }
7629 : 2887656 : offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7630 : 2887656 : if (offset >= off
7631 : 2884380 : && offset - off < len)
7632 : 2866980 : ptr[offset - off] = value;
7633 : : }
7634 : 407452 : return MIN (len, total_bytes - off);
7635 : : }
7636 : :
7637 : : /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7638 : : specified by EXPR into the buffer PTR of length LEN bytes.
7639 : : Return the number of bytes placed in the buffer, or zero
7640 : : upon failure. */
7641 : :
7642 : : static int
7643 : 20150 : native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7644 : : {
7645 : 20150 : int rsize, isize;
7646 : 20150 : tree part;
7647 : :
7648 : 20150 : part = TREE_REALPART (expr);
7649 : 20150 : rsize = native_encode_expr (part, ptr, len, off);
7650 : 20150 : if (off == -1 && rsize == 0)
7651 : : return 0;
7652 : 20150 : part = TREE_IMAGPART (expr);
7653 : 20150 : if (off != -1)
7654 : 39869 : off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7655 : 20150 : isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7656 : : len - rsize, off);
7657 : 20150 : if (off == -1 && isize != rsize)
7658 : : return 0;
7659 : 20150 : return rsize + isize;
7660 : : }
7661 : :
7662 : : /* Like native_encode_vector, but only encode the first COUNT elements.
7663 : : The other arguments are as for native_encode_vector. */
7664 : :
7665 : : static int
7666 : 832198 : native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7667 : : int off, unsigned HOST_WIDE_INT count)
7668 : : {
7669 : 832198 : tree itype = TREE_TYPE (TREE_TYPE (expr));
7670 : 1664396 : if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7671 : 832510 : && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7672 : : {
7673 : : /* This is the only case in which elements can be smaller than a byte.
7674 : : Element 0 is always in the lsb of the containing byte. */
7675 : 236 : unsigned int elt_bits = TYPE_PRECISION (itype);
7676 : 236 : int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7677 : 236 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7678 : : return 0;
7679 : :
7680 : 236 : if (off == -1)
7681 : 236 : off = 0;
7682 : :
7683 : : /* Zero the buffer and then set bits later where necessary. */
7684 : 236 : int extract_bytes = MIN (len, total_bytes - off);
7685 : 236 : if (ptr)
7686 : 236 : memset (ptr, 0, extract_bytes);
7687 : :
7688 : 236 : unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7689 : 236 : unsigned int first_elt = off * elts_per_byte;
7690 : 236 : unsigned int extract_elts = extract_bytes * elts_per_byte;
7691 : 236 : unsigned int elt_mask = (1 << elt_bits) - 1;
7692 : 2821 : for (unsigned int i = 0; i < extract_elts; ++i)
7693 : : {
7694 : 2585 : tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7695 : 2585 : if (TREE_CODE (elt) != INTEGER_CST)
7696 : : return 0;
7697 : :
7698 : 2585 : if (ptr && integer_nonzerop (elt))
7699 : : {
7700 : 1066 : unsigned int bit = i * elt_bits;
7701 : 1066 : ptr[bit / BITS_PER_UNIT] |= elt_mask << (bit % BITS_PER_UNIT);
7702 : : }
7703 : : }
7704 : : return extract_bytes;
7705 : : }
7706 : :
7707 : 831962 : int offset = 0;
7708 : 831962 : int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7709 : 2661164 : for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7710 : : {
7711 : 2351316 : if (off >= size)
7712 : : {
7713 : 19726 : off -= size;
7714 : 19726 : continue;
7715 : : }
7716 : 2331590 : tree elem = VECTOR_CST_ELT (expr, i);
7717 : 2331590 : int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7718 : : len - offset, off);
7719 : 2331590 : if ((off == -1 && res != size) || res == 0)
7720 : : return 0;
7721 : 2331067 : offset += res;
7722 : 2331067 : if (offset >= len)
7723 : 1043182 : return (off == -1 && i < count - 1) ? 0 : offset;
7724 : 1809476 : if (off != -1)
7725 : 316087 : off = 0;
7726 : : }
7727 : : return offset;
7728 : : }
7729 : :
7730 : : /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7731 : : specified by EXPR into the buffer PTR of length LEN bytes.
7732 : : Return the number of bytes placed in the buffer, or zero
7733 : : upon failure. */
7734 : :
7735 : : static int
7736 : 717394 : native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7737 : : {
7738 : 717394 : unsigned HOST_WIDE_INT count;
7739 : 717394 : if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7740 : : return 0;
7741 : 717394 : return native_encode_vector_part (expr, ptr, len, off, count);
7742 : : }
7743 : :
7744 : :
7745 : : /* Subroutine of native_encode_expr. Encode the STRING_CST
7746 : : specified by EXPR into the buffer PTR of length LEN bytes.
7747 : : Return the number of bytes placed in the buffer, or zero
7748 : : upon failure. */
7749 : :
7750 : : static int
7751 : 123713 : native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7752 : : {
7753 : 123713 : tree type = TREE_TYPE (expr);
7754 : :
7755 : : /* Wide-char strings are encoded in target byte-order so native
7756 : : encoding them is trivial. */
7757 : 123713 : if (BITS_PER_UNIT != CHAR_BIT
7758 : 123713 : || TREE_CODE (type) != ARRAY_TYPE
7759 : 123713 : || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7760 : 247426 : || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7761 : : return 0;
7762 : :
7763 : 123713 : HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7764 : 123713 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7765 : : return 0;
7766 : 122867 : if (off == -1)
7767 : 56607 : off = 0;
7768 : 122867 : len = MIN (total_bytes - off, len);
7769 : 122867 : if (ptr == NULL)
7770 : : /* Dry run. */;
7771 : : else
7772 : : {
7773 : 122867 : int written = 0;
7774 : 122867 : if (off < TREE_STRING_LENGTH (expr))
7775 : : {
7776 : 122391 : written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7777 : 122391 : memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7778 : : }
7779 : 122867 : memset (ptr + written, 0, len - written);
7780 : : }
7781 : : return len;
7782 : : }
7783 : :
7784 : : /* Subroutine of native_encode_expr. Encode the CONSTRUCTOR
7785 : : specified by EXPR into the buffer PTR of length LEN bytes.
7786 : : Return the number of bytes placed in the buffer, or zero
7787 : : upon failure. */
7788 : :
7789 : : static int
7790 : 46290 : native_encode_constructor (const_tree expr, unsigned char *ptr, int len, int off)
7791 : : {
7792 : : /* We are only concerned with zero-initialization constructors here. That's
7793 : : all we expect to see in GIMPLE, so that's all native_encode_expr should
7794 : : deal with. For more general handling of constructors, there is
7795 : : native_encode_initializer. */
7796 : 46290 : if (CONSTRUCTOR_NELTS (expr))
7797 : : return 0;
7798 : :
7799 : : /* Wide-char strings are encoded in target byte-order so native
7800 : : encoding them is trivial. */
7801 : 85526 : if (BITS_PER_UNIT != CHAR_BIT
7802 : 42763 : || !tree_fits_shwi_p (TYPE_SIZE_UNIT (TREE_TYPE (expr))))
7803 : : return 0;
7804 : :
7805 : 42763 : HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7806 : 42763 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7807 : : return 0;
7808 : 42763 : if (off == -1)
7809 : 0 : off = 0;
7810 : 42763 : len = MIN (total_bytes - off, len);
7811 : 42763 : if (ptr == NULL)
7812 : : /* Dry run. */;
7813 : : else
7814 : 42763 : memset (ptr, 0, len);
7815 : : return len;
7816 : : }
7817 : :
7818 : : /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7819 : : FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7820 : : the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7821 : : anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7822 : : sufficient to encode the entire EXPR, or if OFF is out of bounds.
7823 : : Otherwise, start at byte offset OFF and encode at most LEN bytes.
7824 : : Return the number of bytes placed in the buffer, or zero upon failure. */
7825 : :
7826 : : int
7827 : 30077158 : native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7828 : : {
7829 : : /* We don't support starting at negative offset and -1 is special. */
7830 : 30077158 : if (off < -1)
7831 : : return 0;
7832 : :
7833 : 30077146 : switch (TREE_CODE (expr))
7834 : : {
7835 : 16920582 : case INTEGER_CST:
7836 : 16920582 : return native_encode_int (expr, ptr, len, off);
7837 : :
7838 : 544480 : case REAL_CST:
7839 : 544480 : return native_encode_real (expr, ptr, len, off);
7840 : :
7841 : 0 : case FIXED_CST:
7842 : 0 : return native_encode_fixed (expr, ptr, len, off);
7843 : :
7844 : 20150 : case COMPLEX_CST:
7845 : 20150 : return native_encode_complex (expr, ptr, len, off);
7846 : :
7847 : 717394 : case VECTOR_CST:
7848 : 717394 : return native_encode_vector (expr, ptr, len, off);
7849 : :
7850 : 123713 : case STRING_CST:
7851 : 123713 : return native_encode_string (expr, ptr, len, off);
7852 : :
7853 : 46290 : case CONSTRUCTOR:
7854 : 46290 : return native_encode_constructor (expr, ptr, len, off);
7855 : :
7856 : : default:
7857 : : return 0;
7858 : : }
7859 : : }
7860 : :
7861 : : /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7862 : : or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7863 : : of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7864 : : machine modes, we can't just use build_nonstandard_integer_type. */
7865 : :
7866 : : tree
7867 : 541 : find_bitfield_repr_type (int fieldsize, int len)
7868 : : {
7869 : 541 : machine_mode mode;
7870 : 1063 : for (int pass = 0; pass < 2; pass++)
7871 : : {
7872 : 802 : enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7873 : 4510 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
7874 : 7976 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7875 : 7286 : && known_eq (GET_MODE_PRECISION (mode),
7876 : : GET_MODE_BITSIZE (mode))
7877 : 11274 : && known_le (GET_MODE_SIZE (mode), len))
7878 : : {
7879 : 280 : tree ret = lang_hooks.types.type_for_mode (mode, 1);
7880 : 280 : if (ret && TYPE_MODE (ret) == mode)
7881 : : return ret;
7882 : : }
7883 : : }
7884 : :
7885 : 522 : for (int i = 0; i < NUM_INT_N_ENTS; i ++)
7886 : 261 : if (int_n_enabled_p[i]
7887 : 261 : && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
7888 : 261 : && int_n_trees[i].unsigned_type)
7889 : : {
7890 : 261 : tree ret = int_n_trees[i].unsigned_type;
7891 : 261 : mode = TYPE_MODE (ret);
7892 : 522 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7893 : 522 : && known_eq (GET_MODE_PRECISION (mode),
7894 : : GET_MODE_BITSIZE (mode))
7895 : 783 : && known_le (GET_MODE_SIZE (mode), len))
7896 : : return ret;
7897 : : }
7898 : :
7899 : : return NULL_TREE;
7900 : : }
7901 : :
7902 : : /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7903 : : NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
7904 : : to be non-NULL and OFF zero), then in addition to filling the
7905 : : bytes pointed by PTR with the value also clear any bits pointed
7906 : : by MASK that are known to be initialized, keep them as is for
7907 : : e.g. uninitialized padding bits or uninitialized fields. */
7908 : :
7909 : : int
7910 : 8066364 : native_encode_initializer (tree init, unsigned char *ptr, int len,
7911 : : int off, unsigned char *mask)
7912 : : {
7913 : 8066364 : int r;
7914 : :
7915 : : /* We don't support starting at negative offset and -1 is special. */
7916 : 8066364 : if (off < -1 || init == NULL_TREE)
7917 : : return 0;
7918 : :
7919 : 8066364 : gcc_assert (mask == NULL || (off == 0 && ptr));
7920 : :
7921 : 8066364 : STRIP_NOPS (init);
7922 : 8066364 : switch (TREE_CODE (init))
7923 : : {
7924 : 0 : case VIEW_CONVERT_EXPR:
7925 : 0 : case NON_LVALUE_EXPR:
7926 : 0 : return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
7927 : 0 : mask);
7928 : 7296293 : default:
7929 : 7296293 : r = native_encode_expr (init, ptr, len, off);
7930 : 7296293 : if (mask)
7931 : 1667 : memset (mask, 0, r);
7932 : : return r;
7933 : 770071 : case CONSTRUCTOR:
7934 : 770071 : tree type = TREE_TYPE (init);
7935 : 770071 : HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7936 : 770071 : if (total_bytes < 0)
7937 : : return 0;
7938 : 770071 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7939 : : return 0;
7940 : 770068 : int o = off == -1 ? 0 : off;
7941 : 770068 : if (TREE_CODE (type) == ARRAY_TYPE)
7942 : : {
7943 : 242474 : tree min_index;
7944 : 242474 : unsigned HOST_WIDE_INT cnt;
7945 : 242474 : HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
7946 : 242474 : constructor_elt *ce;
7947 : :
7948 : 242474 : if (!TYPE_DOMAIN (type)
7949 : 242474 : || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
7950 : : return 0;
7951 : :
7952 : 242474 : fieldsize = int_size_in_bytes (TREE_TYPE (type));
7953 : 242474 : if (fieldsize <= 0)
7954 : : return 0;
7955 : :
7956 : 242474 : min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7957 : 242474 : if (ptr)
7958 : 242474 : memset (ptr, '\0', MIN (total_bytes - off, len));
7959 : :
7960 : 7214240 : for (cnt = 0; ; cnt++)
7961 : : {
7962 : 7456714 : tree val = NULL_TREE, index = NULL_TREE;
7963 : 7456714 : HOST_WIDE_INT pos = curpos, count = 0;
7964 : 7456714 : bool full = false;
7965 : 7456714 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
7966 : : {
7967 : 7417284 : val = ce->value;
7968 : 7417284 : index = ce->index;
7969 : : }
7970 : 39430 : else if (mask == NULL
7971 : 228 : || CONSTRUCTOR_NO_CLEARING (init)
7972 : 39658 : || curpos >= total_bytes)
7973 : : break;
7974 : : else
7975 : : pos = total_bytes;
7976 : :
7977 : 7417284 : if (index && TREE_CODE (index) == RANGE_EXPR)
7978 : : {
7979 : 18 : if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
7980 : 18 : || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
7981 : 0 : return 0;
7982 : 18 : offset_int wpos
7983 : 18 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
7984 : 36 : - wi::to_offset (min_index),
7985 : 18 : TYPE_PRECISION (sizetype));
7986 : 18 : wpos *= fieldsize;
7987 : 18 : if (!wi::fits_shwi_p (pos))
7988 : : return 0;
7989 : 18 : pos = wpos.to_shwi ();
7990 : 18 : offset_int wcount
7991 : 18 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
7992 : 36 : - wi::to_offset (TREE_OPERAND (index, 0)),
7993 : 18 : TYPE_PRECISION (sizetype));
7994 : 18 : if (!wi::fits_shwi_p (wcount))
7995 : : return 0;
7996 : 18 : count = wcount.to_shwi ();
7997 : 18 : }
7998 : 6834025 : else if (index)
7999 : : {
8000 : 6834025 : if (TREE_CODE (index) != INTEGER_CST)
8001 : 0 : return 0;
8002 : 6834025 : offset_int wpos
8003 : 6834025 : = wi::sext (wi::to_offset (index)
8004 : 13668050 : - wi::to_offset (min_index),
8005 : 6834025 : TYPE_PRECISION (sizetype));
8006 : 6834025 : wpos *= fieldsize;
8007 : 6834025 : if (!wi::fits_shwi_p (wpos))
8008 : : return 0;
8009 : 6834025 : pos = wpos.to_shwi ();
8010 : : }
8011 : :
8012 : 7417949 : if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8013 : : {
8014 : 14 : if (valueinit == -1)
8015 : : {
8016 : 14 : tree zero = build_zero_cst (TREE_TYPE (type));
8017 : 28 : r = native_encode_initializer (zero, ptr + curpos,
8018 : : fieldsize, 0,
8019 : 14 : mask + curpos);
8020 : 14 : if (TREE_CODE (zero) == CONSTRUCTOR)
8021 : 0 : ggc_free (zero);
8022 : 14 : if (!r)
8023 : : return 0;
8024 : 14 : valueinit = curpos;
8025 : 14 : curpos += fieldsize;
8026 : : }
8027 : 44 : while (curpos != pos)
8028 : : {
8029 : 30 : memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8030 : 30 : memcpy (mask + curpos, mask + valueinit, fieldsize);
8031 : 30 : curpos += fieldsize;
8032 : : }
8033 : : }
8034 : :
8035 : 7417298 : curpos = pos;
8036 : 7417298 : if (val && TREE_CODE (val) == RAW_DATA_CST)
8037 : : {
8038 : 480 : if (count)
8039 : : return 0;
8040 : 480 : if (off == -1
8041 : 480 : || (curpos >= off
8042 : 0 : && (curpos + RAW_DATA_LENGTH (val)
8043 : 0 : <= (HOST_WIDE_INT) off + len)))
8044 : : {
8045 : 480 : if (ptr)
8046 : 480 : memcpy (ptr + (curpos - o), RAW_DATA_POINTER (val),
8047 : 480 : RAW_DATA_LENGTH (val));
8048 : 480 : if (mask)
8049 : 0 : memset (mask + curpos, 0, RAW_DATA_LENGTH (val));
8050 : : }
8051 : 0 : else if (curpos + RAW_DATA_LENGTH (val) > off
8052 : 0 : && curpos < (HOST_WIDE_INT) off + len)
8053 : : {
8054 : : /* Partial overlap. */
8055 : 0 : unsigned char *p = NULL;
8056 : 0 : int no = 0;
8057 : 0 : int l;
8058 : 0 : gcc_assert (mask == NULL);
8059 : 0 : if (curpos >= off)
8060 : : {
8061 : 0 : if (ptr)
8062 : 0 : p = ptr + curpos - off;
8063 : 0 : l = MIN ((HOST_WIDE_INT) off + len - curpos,
8064 : : RAW_DATA_LENGTH (val));
8065 : : }
8066 : : else
8067 : : {
8068 : 0 : p = ptr;
8069 : 0 : no = off - curpos;
8070 : 0 : l = len;
8071 : : }
8072 : 0 : if (p)
8073 : 0 : memcpy (p, RAW_DATA_POINTER (val) + no, l);
8074 : : }
8075 : 480 : curpos += RAW_DATA_LENGTH (val);
8076 : 480 : val = NULL_TREE;
8077 : : }
8078 : 480 : if (val)
8079 : 7494856 : do
8080 : : {
8081 : 7494856 : if (off == -1
8082 : 608943 : || (curpos >= off
8083 : 221047 : && (curpos + fieldsize
8084 : 221047 : <= (HOST_WIDE_INT) off + len)))
8085 : : {
8086 : 7087136 : if (full)
8087 : : {
8088 : 78052 : if (ptr)
8089 : 78052 : memcpy (ptr + (curpos - o), ptr + (pos - o),
8090 : : fieldsize);
8091 : 78052 : if (mask)
8092 : 0 : memcpy (mask + curpos, mask + pos, fieldsize);
8093 : : }
8094 : 14220028 : else if (!native_encode_initializer (val,
8095 : : ptr
8096 : 7009084 : ? ptr + curpos - o
8097 : : : NULL,
8098 : : fieldsize,
8099 : : off == -1 ? -1
8100 : : : 0,
8101 : : mask
8102 : 637 : ? mask + curpos
8103 : : : NULL))
8104 : : return 0;
8105 : : else
8106 : : {
8107 : : full = true;
8108 : : pos = curpos;
8109 : : }
8110 : : }
8111 : 407720 : else if (curpos + fieldsize > off
8112 : 20326 : && curpos < (HOST_WIDE_INT) off + len)
8113 : : {
8114 : : /* Partial overlap. */
8115 : 649 : unsigned char *p = NULL;
8116 : 649 : int no = 0;
8117 : 649 : int l;
8118 : 649 : gcc_assert (mask == NULL);
8119 : 649 : if (curpos >= off)
8120 : : {
8121 : 147 : if (ptr)
8122 : 147 : p = ptr + curpos - off;
8123 : 147 : l = MIN ((HOST_WIDE_INT) off + len - curpos,
8124 : : fieldsize);
8125 : : }
8126 : : else
8127 : : {
8128 : 502 : p = ptr;
8129 : 502 : no = off - curpos;
8130 : 502 : l = len;
8131 : : }
8132 : 649 : if (!native_encode_initializer (val, p, l, no, NULL))
8133 : : return 0;
8134 : : }
8135 : 7291798 : curpos += fieldsize;
8136 : : }
8137 : 7291798 : while (count-- != 0);
8138 : 7214240 : }
8139 : 39416 : return MIN (total_bytes - off, len);
8140 : : }
8141 : 527594 : else if (TREE_CODE (type) == RECORD_TYPE
8142 : 527594 : || TREE_CODE (type) == UNION_TYPE)
8143 : : {
8144 : 527594 : unsigned HOST_WIDE_INT cnt;
8145 : 527594 : constructor_elt *ce;
8146 : 527594 : tree fld_base = TYPE_FIELDS (type);
8147 : 527594 : tree to_free = NULL_TREE;
8148 : :
8149 : 527594 : gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8150 : 527594 : if (ptr != NULL)
8151 : 527594 : memset (ptr, '\0', MIN (total_bytes - o, len));
8152 : 91341 : for (cnt = 0; ; cnt++)
8153 : : {
8154 : 618935 : tree val = NULL_TREE, field = NULL_TREE;
8155 : 618935 : HOST_WIDE_INT pos = 0, fieldsize;
8156 : 618935 : unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8157 : :
8158 : 618935 : if (to_free)
8159 : : {
8160 : 0 : ggc_free (to_free);
8161 : 0 : to_free = NULL_TREE;
8162 : : }
8163 : :
8164 : 618935 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8165 : : {
8166 : 97376 : val = ce->value;
8167 : 97376 : field = ce->index;
8168 : 97376 : if (field == NULL_TREE)
8169 : : return 0;
8170 : :
8171 : 97376 : pos = int_byte_position (field);
8172 : 97376 : if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8173 : 1039 : continue;
8174 : : }
8175 : 521559 : else if (mask == NULL
8176 : 521559 : || CONSTRUCTOR_NO_CLEARING (init))
8177 : : break;
8178 : : else
8179 : : pos = total_bytes;
8180 : :
8181 : 98516 : if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8182 : : {
8183 : : tree fld;
8184 : 11525 : for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8185 : : {
8186 : 11032 : if (TREE_CODE (fld) != FIELD_DECL)
8187 : 9885 : continue;
8188 : 1147 : if (fld == field)
8189 : : break;
8190 : 146 : if (DECL_PADDING_P (fld))
8191 : 87 : continue;
8192 : 59 : if (DECL_SIZE_UNIT (fld) == NULL_TREE
8193 : 59 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8194 : : return 0;
8195 : 59 : if (integer_zerop (DECL_SIZE_UNIT (fld)))
8196 : 2 : continue;
8197 : : break;
8198 : : }
8199 : 1551 : if (fld == NULL_TREE)
8200 : : {
8201 : 493 : if (ce == NULL)
8202 : : break;
8203 : : return 0;
8204 : : }
8205 : 1058 : fld_base = DECL_CHAIN (fld);
8206 : 1058 : if (fld != field)
8207 : : {
8208 : 57 : cnt--;
8209 : 57 : field = fld;
8210 : 57 : pos = int_byte_position (field);
8211 : 57 : val = build_zero_cst (TREE_TYPE (fld));
8212 : 57 : if (TREE_CODE (val) == CONSTRUCTOR)
8213 : 0 : to_free = val;
8214 : : }
8215 : : }
8216 : :
8217 : 96394 : if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8218 : 4396 : && TYPE_DOMAIN (TREE_TYPE (field))
8219 : 100790 : && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8220 : : {
8221 : 81 : if (mask || off != -1)
8222 : : return 0;
8223 : 81 : if (val == NULL_TREE)
8224 : 0 : continue;
8225 : 81 : if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8226 : : return 0;
8227 : 81 : fieldsize = int_size_in_bytes (TREE_TYPE (val));
8228 : 81 : if (fieldsize < 0
8229 : 81 : || (int) fieldsize != fieldsize
8230 : 81 : || (pos + fieldsize) > INT_MAX)
8231 : : return 0;
8232 : 81 : if (pos + fieldsize > total_bytes)
8233 : : {
8234 : 81 : if (ptr != NULL && total_bytes < len)
8235 : 81 : memset (ptr + total_bytes, '\0',
8236 : 81 : MIN (pos + fieldsize, len) - total_bytes);
8237 : : total_bytes = pos + fieldsize;
8238 : : }
8239 : : }
8240 : : else
8241 : : {
8242 : 96313 : if (DECL_SIZE_UNIT (field) == NULL_TREE
8243 : 96313 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8244 : : return 0;
8245 : 96313 : fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8246 : : }
8247 : 96394 : if (fieldsize == 0)
8248 : 1 : continue;
8249 : :
8250 : : /* Prepare to deal with integral bit-fields and filter out other
8251 : : bit-fields that do not start and end on a byte boundary. */
8252 : 96393 : if (DECL_BIT_FIELD (field))
8253 : : {
8254 : 2431 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8255 : : return 0;
8256 : 2431 : bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8257 : 2431 : if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8258 : : {
8259 : 2431 : bpos %= BITS_PER_UNIT;
8260 : 2431 : fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8261 : 2431 : epos = fieldsize % BITS_PER_UNIT;
8262 : 2431 : fieldsize += BITS_PER_UNIT - 1;
8263 : 2431 : fieldsize /= BITS_PER_UNIT;
8264 : : }
8265 : 0 : else if (bpos % BITS_PER_UNIT
8266 : 0 : || DECL_SIZE (field) == NULL_TREE
8267 : 0 : || !tree_fits_shwi_p (DECL_SIZE (field))
8268 : 0 : || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8269 : : return 0;
8270 : : }
8271 : :
8272 : 96393 : if (off != -1 && pos + fieldsize <= off)
8273 : 1298 : continue;
8274 : :
8275 : 95095 : if (val == NULL_TREE)
8276 : 0 : continue;
8277 : :
8278 : 95095 : if (DECL_BIT_FIELD (field)
8279 : 95095 : && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8280 : : {
8281 : : /* FIXME: Handle PDP endian. */
8282 : 2239 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8283 : 261 : return 0;
8284 : :
8285 : 2239 : if (TREE_CODE (val) == NON_LVALUE_EXPR)
8286 : 6 : val = TREE_OPERAND (val, 0);
8287 : 2239 : if (TREE_CODE (val) != INTEGER_CST)
8288 : : return 0;
8289 : :
8290 : 2239 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8291 : 2239 : tree repr_type = NULL_TREE;
8292 : 2239 : HOST_WIDE_INT rpos = 0;
8293 : 2239 : if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8294 : : {
8295 : 1710 : rpos = int_byte_position (repr);
8296 : 1710 : repr_type = TREE_TYPE (repr);
8297 : : }
8298 : : else
8299 : : {
8300 : 529 : repr_type = find_bitfield_repr_type (fieldsize, len);
8301 : 529 : if (repr_type == NULL_TREE)
8302 : : return 0;
8303 : 268 : HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8304 : 268 : gcc_assert (repr_size > 0 && repr_size <= len);
8305 : 268 : if (pos + repr_size <= o + len)
8306 : : rpos = pos;
8307 : : else
8308 : : {
8309 : 14 : rpos = o + len - repr_size;
8310 : 14 : gcc_assert (rpos <= pos);
8311 : : }
8312 : : }
8313 : :
8314 : 1978 : if (rpos > pos)
8315 : : return 0;
8316 : 1978 : wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8317 : 1978 : int diff = (TYPE_PRECISION (repr_type)
8318 : 1978 : - TYPE_PRECISION (TREE_TYPE (field)));
8319 : 1978 : HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8320 : 1978 : if (!BYTES_BIG_ENDIAN)
8321 : 1978 : w = wi::lshift (w, bitoff);
8322 : : else
8323 : : w = wi::lshift (w, diff - bitoff);
8324 : 1978 : val = wide_int_to_tree (repr_type, w);
8325 : :
8326 : 1978 : unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8327 : : / BITS_PER_UNIT + 1];
8328 : 1978 : int l = native_encode_int (val, buf, sizeof buf, 0);
8329 : 1978 : if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8330 : 0 : return 0;
8331 : :
8332 : 1978 : if (ptr == NULL)
8333 : 0 : continue;
8334 : :
8335 : : /* If the bitfield does not start at byte boundary, handle
8336 : : the partial byte at the start. */
8337 : 1978 : if (bpos
8338 : 1164 : && (off == -1 || (pos >= off && len >= 1)))
8339 : : {
8340 : 1101 : if (!BYTES_BIG_ENDIAN)
8341 : : {
8342 : 1101 : int msk = (1 << bpos) - 1;
8343 : 1101 : buf[pos - rpos] &= ~msk;
8344 : 1101 : buf[pos - rpos] |= ptr[pos - o] & msk;
8345 : 1101 : if (mask)
8346 : : {
8347 : 147 : if (fieldsize > 1 || epos == 0)
8348 : 129 : mask[pos] &= msk;
8349 : : else
8350 : 18 : mask[pos] &= (msk | ~((1 << epos) - 1));
8351 : : }
8352 : : }
8353 : : else
8354 : : {
8355 : : int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8356 : : buf[pos - rpos] &= msk;
8357 : : buf[pos - rpos] |= ptr[pos - o] & ~msk;
8358 : : if (mask)
8359 : : {
8360 : : if (fieldsize > 1 || epos == 0)
8361 : : mask[pos] &= ~msk;
8362 : : else
8363 : : mask[pos] &= (~msk
8364 : : | ((1 << (BITS_PER_UNIT - epos))
8365 : : - 1));
8366 : : }
8367 : : }
8368 : : }
8369 : : /* If the bitfield does not end at byte boundary, handle
8370 : : the partial byte at the end. */
8371 : 1978 : if (epos
8372 : 1507 : && (off == -1
8373 : 920 : || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8374 : : {
8375 : 1416 : if (!BYTES_BIG_ENDIAN)
8376 : : {
8377 : 1416 : int msk = (1 << epos) - 1;
8378 : 1416 : buf[pos - rpos + fieldsize - 1] &= msk;
8379 : 1416 : buf[pos - rpos + fieldsize - 1]
8380 : 1416 : |= ptr[pos + fieldsize - 1 - o] & ~msk;
8381 : 1416 : if (mask && (fieldsize > 1 || bpos == 0))
8382 : 156 : mask[pos + fieldsize - 1] &= ~msk;
8383 : : }
8384 : : else
8385 : : {
8386 : : int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8387 : : buf[pos - rpos + fieldsize - 1] &= ~msk;
8388 : : buf[pos - rpos + fieldsize - 1]
8389 : : |= ptr[pos + fieldsize - 1 - o] & msk;
8390 : : if (mask && (fieldsize > 1 || bpos == 0))
8391 : : mask[pos + fieldsize - 1] &= msk;
8392 : : }
8393 : : }
8394 : 1978 : if (off == -1
8395 : 1181 : || (pos >= off
8396 : 1104 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8397 : : {
8398 : 1811 : memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8399 : 1811 : if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8400 : 75 : memset (mask + pos + (bpos != 0), 0,
8401 : 75 : fieldsize - (bpos != 0) - (epos != 0));
8402 : : }
8403 : : else
8404 : : {
8405 : : /* Partial overlap. */
8406 : 167 : HOST_WIDE_INT fsz = fieldsize;
8407 : 167 : gcc_assert (mask == NULL);
8408 : 167 : if (pos < off)
8409 : : {
8410 : 77 : fsz -= (off - pos);
8411 : 77 : pos = off;
8412 : : }
8413 : 167 : if (pos + fsz > (HOST_WIDE_INT) off + len)
8414 : 92 : fsz = (HOST_WIDE_INT) off + len - pos;
8415 : 167 : memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8416 : : }
8417 : 1978 : continue;
8418 : 1978 : }
8419 : :
8420 : 92856 : if (off == -1
8421 : 2480 : || (pos >= off
8422 : 2106 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8423 : : {
8424 : 91983 : int fldsize = fieldsize;
8425 : 1607 : if (off == -1)
8426 : : {
8427 : 90376 : tree fld = DECL_CHAIN (field);
8428 : 1163900 : while (fld)
8429 : : {
8430 : 1082159 : if (TREE_CODE (fld) == FIELD_DECL)
8431 : : break;
8432 : 1073524 : fld = DECL_CHAIN (fld);
8433 : : }
8434 : 90376 : if (fld == NULL_TREE)
8435 : 81741 : fldsize = len - pos;
8436 : : }
8437 : 94408 : r = native_encode_initializer (val, ptr ? ptr + pos - o
8438 : : : NULL,
8439 : : fldsize,
8440 : : off == -1 ? -1 : 0,
8441 : 818 : mask ? mask + pos : NULL);
8442 : 91983 : if (!r)
8443 : : return 0;
8444 : 86485 : if (off == -1
8445 : 85103 : && fldsize != fieldsize
8446 : 324 : && r > fieldsize
8447 : 54 : && pos + r > total_bytes)
8448 : 91341 : total_bytes = pos + r;
8449 : : }
8450 : : else
8451 : : {
8452 : : /* Partial overlap. */
8453 : 873 : unsigned char *p = NULL;
8454 : 873 : int no = 0;
8455 : 873 : int l;
8456 : 873 : gcc_assert (mask == NULL);
8457 : 873 : if (pos >= off)
8458 : : {
8459 : 499 : if (ptr)
8460 : 499 : p = ptr + pos - off;
8461 : 499 : l = MIN ((HOST_WIDE_INT) off + len - pos,
8462 : : fieldsize);
8463 : : }
8464 : : else
8465 : : {
8466 : 374 : p = ptr;
8467 : 374 : no = off - pos;
8468 : 374 : l = len;
8469 : : }
8470 : 873 : if (!native_encode_initializer (val, p, l, no, NULL))
8471 : : return 0;
8472 : : }
8473 : 91341 : }
8474 : 521502 : return MIN (total_bytes - off, len);
8475 : : }
8476 : : return 0;
8477 : : }
8478 : : }
8479 : :
8480 : :
8481 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8482 : : the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8483 : : If the buffer cannot be interpreted, return NULL_TREE. */
8484 : :
8485 : : static tree
8486 : 4052772 : native_interpret_int (tree type, const unsigned char *ptr, int len)
8487 : : {
8488 : 4052772 : int total_bytes;
8489 : 4052772 : if (TREE_CODE (type) == BITINT_TYPE)
8490 : : {
8491 : 17 : struct bitint_info info;
8492 : 17 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8493 : 17 : gcc_assert (ok);
8494 : 17 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8495 : 17 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8496 : : {
8497 : 17 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8498 : : /* More work is needed when adding _BitInt support to PDP endian
8499 : : if limb is smaller than word, or if _BitInt limb ordering doesn't
8500 : : match target endianity here. */
8501 : 17 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8502 : : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8503 : : || (GET_MODE_SIZE (limb_mode)
8504 : : >= UNITS_PER_WORD)));
8505 : : }
8506 : : else
8507 : 0 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8508 : : }
8509 : : else
8510 : 8105510 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8511 : :
8512 : 4052772 : if (total_bytes > len)
8513 : : return NULL_TREE;
8514 : :
8515 : 4052555 : wide_int result = wi::from_buffer (ptr, total_bytes);
8516 : :
8517 : 4052555 : return wide_int_to_tree (type, result);
8518 : 4052555 : }
8519 : :
8520 : :
8521 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8522 : : the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8523 : : If the buffer cannot be interpreted, return NULL_TREE. */
8524 : :
8525 : : static tree
8526 : 0 : native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8527 : : {
8528 : 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
8529 : 0 : int total_bytes = GET_MODE_SIZE (mode);
8530 : 0 : double_int result;
8531 : 0 : FIXED_VALUE_TYPE fixed_value;
8532 : :
8533 : 0 : if (total_bytes > len
8534 : 0 : || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8535 : : return NULL_TREE;
8536 : :
8537 : 0 : result = double_int::from_buffer (ptr, total_bytes);
8538 : 0 : fixed_value = fixed_from_double_int (result, mode);
8539 : :
8540 : 0 : return build_fixed (type, fixed_value);
8541 : : }
8542 : :
8543 : :
8544 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8545 : : the buffer PTR of length LEN as a REAL_CST of type TYPE.
8546 : : If the buffer cannot be interpreted, return NULL_TREE. */
8547 : :
8548 : : tree
8549 : 31239 : native_interpret_real (tree type, const unsigned char *ptr, int len)
8550 : : {
8551 : 31239 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8552 : 31239 : int total_bytes = GET_MODE_SIZE (mode);
8553 : 31239 : unsigned char value;
8554 : : /* There are always 32 bits in each long, no matter the size of
8555 : : the hosts long. We handle floating point representations with
8556 : : up to 192 bits. */
8557 : 31239 : REAL_VALUE_TYPE r;
8558 : 31239 : long tmp[6];
8559 : :
8560 : 31239 : if (total_bytes > len || total_bytes > 24)
8561 : : return NULL_TREE;
8562 : 31178 : int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8563 : :
8564 : 31178 : memset (tmp, 0, sizeof (tmp));
8565 : 219928 : for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8566 : 188750 : bitpos += BITS_PER_UNIT)
8567 : : {
8568 : : /* Both OFFSET and BYTE index within a long;
8569 : : bitpos indexes the whole float. */
8570 : 188750 : int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8571 : 188750 : if (UNITS_PER_WORD < 4)
8572 : : {
8573 : : int word = byte / UNITS_PER_WORD;
8574 : : if (WORDS_BIG_ENDIAN)
8575 : : word = (words - 1) - word;
8576 : : offset = word * UNITS_PER_WORD;
8577 : : if (BYTES_BIG_ENDIAN)
8578 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8579 : : else
8580 : : offset += byte % UNITS_PER_WORD;
8581 : : }
8582 : : else
8583 : : {
8584 : 188750 : offset = byte;
8585 : 188750 : if (BYTES_BIG_ENDIAN)
8586 : : {
8587 : : /* Reverse bytes within each long, or within the entire float
8588 : : if it's smaller than a long (for HFmode). */
8589 : : offset = MIN (3, total_bytes - 1) - offset;
8590 : : gcc_assert (offset >= 0);
8591 : : }
8592 : : }
8593 : 188750 : value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8594 : :
8595 : 188750 : tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8596 : : }
8597 : :
8598 : 31178 : real_from_target (&r, tmp, mode);
8599 : 31178 : return build_real (type, r);
8600 : : }
8601 : :
8602 : :
8603 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8604 : : the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8605 : : If the buffer cannot be interpreted, return NULL_TREE. */
8606 : :
8607 : : static tree
8608 : 1295 : native_interpret_complex (tree type, const unsigned char *ptr, int len)
8609 : : {
8610 : 1295 : tree etype, rpart, ipart;
8611 : 1295 : int size;
8612 : :
8613 : 1295 : etype = TREE_TYPE (type);
8614 : 1295 : size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8615 : 1295 : if (size * 2 > len)
8616 : : return NULL_TREE;
8617 : 1260 : rpart = native_interpret_expr (etype, ptr, size);
8618 : 1260 : if (!rpart)
8619 : : return NULL_TREE;
8620 : 1259 : ipart = native_interpret_expr (etype, ptr+size, size);
8621 : 1259 : if (!ipart)
8622 : : return NULL_TREE;
8623 : 1259 : return build_complex (type, rpart, ipart);
8624 : : }
8625 : :
8626 : : /* Read a vector of type TYPE from the target memory image given by BYTES,
8627 : : which contains LEN bytes. The vector is known to be encodable using
8628 : : NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8629 : :
8630 : : Return the vector on success, otherwise return null. */
8631 : :
8632 : : static tree
8633 : 187842 : native_interpret_vector_part (tree type, const unsigned char *bytes,
8634 : : unsigned int len, unsigned int npatterns,
8635 : : unsigned int nelts_per_pattern)
8636 : : {
8637 : 187842 : tree elt_type = TREE_TYPE (type);
8638 : 187842 : if (VECTOR_BOOLEAN_TYPE_P (type)
8639 : 187845 : && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8640 : : {
8641 : : /* This is the only case in which elements can be smaller than a byte.
8642 : : Element 0 is always in the lsb of the containing byte. */
8643 : 1 : unsigned int elt_bits = TYPE_PRECISION (elt_type);
8644 : 1 : if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8645 : : return NULL_TREE;
8646 : :
8647 : 1 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8648 : 17 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8649 : : {
8650 : 16 : unsigned int bit_index = i * elt_bits;
8651 : 16 : unsigned int byte_index = bit_index / BITS_PER_UNIT;
8652 : 16 : unsigned int lsb = bit_index % BITS_PER_UNIT;
8653 : 32 : builder.quick_push (bytes[byte_index] & (1 << lsb)
8654 : 17 : ? build_all_ones_cst (elt_type)
8655 : 1 : : build_zero_cst (elt_type));
8656 : : }
8657 : 1 : return builder.build ();
8658 : 1 : }
8659 : :
8660 : 187841 : unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8661 : 187841 : if (elt_bytes * npatterns * nelts_per_pattern > len)
8662 : : return NULL_TREE;
8663 : :
8664 : 187841 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8665 : 731221 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8666 : : {
8667 : 543418 : tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8668 : 543418 : if (!elt)
8669 : 38 : return NULL_TREE;
8670 : 543380 : builder.quick_push (elt);
8671 : 543380 : bytes += elt_bytes;
8672 : : }
8673 : 187803 : return builder.build ();
8674 : 187841 : }
8675 : :
8676 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8677 : : the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8678 : : If the buffer cannot be interpreted, return NULL_TREE. */
8679 : :
8680 : : static tree
8681 : 73038 : native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8682 : : {
8683 : 73038 : unsigned HOST_WIDE_INT size;
8684 : :
8685 : 73038 : if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
8686 : 73038 : || size > len)
8687 : 0 : return NULL_TREE;
8688 : :
8689 : 73038 : unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8690 : 73038 : return native_interpret_vector_part (type, ptr, len, count, 1);
8691 : : }
8692 : :
8693 : :
8694 : : /* Subroutine of fold_view_convert_expr. Interpret the contents of
8695 : : the buffer PTR of length LEN as a constant of type TYPE. For
8696 : : INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8697 : : we return a REAL_CST, etc... If the buffer cannot be interpreted,
8698 : : return NULL_TREE. */
8699 : :
8700 : : tree
8701 : 4211362 : native_interpret_expr (tree type, const unsigned char *ptr, int len)
8702 : : {
8703 : 4211362 : switch (TREE_CODE (type))
8704 : : {
8705 : 4052772 : case INTEGER_TYPE:
8706 : 4052772 : case ENUMERAL_TYPE:
8707 : 4052772 : case BOOLEAN_TYPE:
8708 : 4052772 : case POINTER_TYPE:
8709 : 4052772 : case REFERENCE_TYPE:
8710 : 4052772 : case OFFSET_TYPE:
8711 : 4052772 : case BITINT_TYPE:
8712 : 4052772 : return native_interpret_int (type, ptr, len);
8713 : :
8714 : 30103 : case REAL_TYPE:
8715 : 30103 : if (tree ret = native_interpret_real (type, ptr, len))
8716 : : {
8717 : : /* For floating point values in composite modes, punt if this
8718 : : folding doesn't preserve bit representation. As the mode doesn't
8719 : : have fixed precision while GCC pretends it does, there could be
8720 : : valid values that GCC can't really represent accurately.
8721 : : See PR95450. Even for other modes, e.g. x86 XFmode can have some
8722 : : bit combinationations which GCC doesn't preserve. */
8723 : 30042 : unsigned char buf[24 * 2];
8724 : 30042 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8725 : 30042 : int total_bytes = GET_MODE_SIZE (mode);
8726 : 30042 : memcpy (buf + 24, ptr, total_bytes);
8727 : 30042 : clear_type_padding_in_mask (type, buf + 24);
8728 : 30042 : if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8729 : 30042 : || memcmp (buf + 24, buf, total_bytes) != 0)
8730 : 156 : return NULL_TREE;
8731 : : return ret;
8732 : : }
8733 : : return NULL_TREE;
8734 : :
8735 : 0 : case FIXED_POINT_TYPE:
8736 : 0 : return native_interpret_fixed (type, ptr, len);
8737 : :
8738 : 1295 : case COMPLEX_TYPE:
8739 : 1295 : return native_interpret_complex (type, ptr, len);
8740 : :
8741 : 73038 : case VECTOR_TYPE:
8742 : 73038 : return native_interpret_vector (type, ptr, len);
8743 : :
8744 : : default:
8745 : : return NULL_TREE;
8746 : : }
8747 : : }
8748 : :
8749 : : /* Returns true if we can interpret the contents of a native encoding
8750 : : as TYPE. */
8751 : :
8752 : : bool
8753 : 372456 : can_native_interpret_type_p (tree type)
8754 : : {
8755 : 372456 : switch (TREE_CODE (type))
8756 : : {
8757 : : case INTEGER_TYPE:
8758 : : case ENUMERAL_TYPE:
8759 : : case BOOLEAN_TYPE:
8760 : : case POINTER_TYPE:
8761 : : case REFERENCE_TYPE:
8762 : : case FIXED_POINT_TYPE:
8763 : : case REAL_TYPE:
8764 : : case COMPLEX_TYPE:
8765 : : case VECTOR_TYPE:
8766 : : case OFFSET_TYPE:
8767 : : return true;
8768 : 83765 : default:
8769 : 83765 : return false;
8770 : : }
8771 : : }
8772 : :
8773 : : /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8774 : : byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8775 : :
8776 : : tree
8777 : 659 : native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8778 : : int len)
8779 : : {
8780 : 659 : vec<constructor_elt, va_gc> *elts = NULL;
8781 : 659 : if (TREE_CODE (type) == ARRAY_TYPE)
8782 : : {
8783 : 197 : HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8784 : 394 : if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8785 : : return NULL_TREE;
8786 : :
8787 : 197 : HOST_WIDE_INT cnt = 0;
8788 : 197 : if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8789 : : {
8790 : 197 : if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8791 : : return NULL_TREE;
8792 : 197 : cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8793 : : }
8794 : 197 : if (eltsz == 0)
8795 : 0 : cnt = 0;
8796 : 197 : HOST_WIDE_INT pos = 0;
8797 : 636 : for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8798 : : {
8799 : 439 : tree v = NULL_TREE;
8800 : 439 : if (pos >= len || pos + eltsz > len)
8801 : 659 : return NULL_TREE;
8802 : 439 : if (can_native_interpret_type_p (TREE_TYPE (type)))
8803 : : {
8804 : 367 : v = native_interpret_expr (TREE_TYPE (type),
8805 : 367 : ptr + off + pos, eltsz);
8806 : 367 : if (v == NULL_TREE)
8807 : : return NULL_TREE;
8808 : : }
8809 : 72 : else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8810 : 72 : || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8811 : 72 : v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8812 : : eltsz);
8813 : 72 : if (v == NULL_TREE)
8814 : 0 : return NULL_TREE;
8815 : 439 : CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8816 : : }
8817 : 197 : return build_constructor (type, elts);
8818 : : }
8819 : 462 : if (TREE_CODE (type) != RECORD_TYPE)
8820 : : return NULL_TREE;
8821 : 7514 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8822 : : {
8823 : 1250 : if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
8824 : 8302 : || is_empty_type (TREE_TYPE (field)))
8825 : 5892 : continue;
8826 : 1160 : tree fld = field;
8827 : 1160 : HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8828 : 1160 : int diff = 0;
8829 : 1160 : tree v = NULL_TREE;
8830 : 1160 : if (DECL_BIT_FIELD (field))
8831 : : {
8832 : 180 : fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8833 : 180 : if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8834 : : {
8835 : 168 : poly_int64 bitoffset;
8836 : 168 : poly_uint64 field_offset, fld_offset;
8837 : 168 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8838 : 336 : && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8839 : 168 : bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8840 : : else
8841 : : bitoffset = 0;
8842 : 168 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8843 : 168 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8844 : 168 : diff = (TYPE_PRECISION (TREE_TYPE (fld))
8845 : 168 : - TYPE_PRECISION (TREE_TYPE (field)));
8846 : 168 : if (!bitoffset.is_constant (&bitoff)
8847 : 168 : || bitoff < 0
8848 : 168 : || bitoff > diff)
8849 : 0 : return NULL_TREE;
8850 : : }
8851 : : else
8852 : : {
8853 : 12 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8854 : : return NULL_TREE;
8855 : 12 : int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8856 : 12 : int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8857 : 12 : bpos %= BITS_PER_UNIT;
8858 : 12 : fieldsize += bpos;
8859 : 12 : fieldsize += BITS_PER_UNIT - 1;
8860 : 12 : fieldsize /= BITS_PER_UNIT;
8861 : 12 : tree repr_type = find_bitfield_repr_type (fieldsize, len);
8862 : 12 : if (repr_type == NULL_TREE)
8863 : : return NULL_TREE;
8864 : 12 : sz = int_size_in_bytes (repr_type);
8865 : 12 : if (sz < 0 || sz > len)
8866 : : return NULL_TREE;
8867 : 12 : pos = int_byte_position (field);
8868 : 12 : if (pos < 0 || pos > len || pos + fieldsize > len)
8869 : : return NULL_TREE;
8870 : 12 : HOST_WIDE_INT rpos;
8871 : 12 : if (pos + sz <= len)
8872 : : rpos = pos;
8873 : : else
8874 : : {
8875 : 0 : rpos = len - sz;
8876 : 0 : gcc_assert (rpos <= pos);
8877 : : }
8878 : 12 : bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8879 : 12 : pos = rpos;
8880 : 12 : diff = (TYPE_PRECISION (repr_type)
8881 : 12 : - TYPE_PRECISION (TREE_TYPE (field)));
8882 : 12 : v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8883 : 12 : if (v == NULL_TREE)
8884 : : return NULL_TREE;
8885 : : fld = NULL_TREE;
8886 : : }
8887 : : }
8888 : :
8889 : 168 : if (fld)
8890 : : {
8891 : 1148 : sz = int_size_in_bytes (TREE_TYPE (fld));
8892 : 1148 : if (sz < 0 || sz > len)
8893 : : return NULL_TREE;
8894 : 1148 : tree byte_pos = byte_position (fld);
8895 : 1148 : if (!tree_fits_shwi_p (byte_pos))
8896 : : return NULL_TREE;
8897 : 1148 : pos = tree_to_shwi (byte_pos);
8898 : 1148 : if (pos < 0 || pos > len || pos + sz > len)
8899 : : return NULL_TREE;
8900 : : }
8901 : 1148 : if (fld == NULL_TREE)
8902 : : /* Already handled above. */;
8903 : 1148 : else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8904 : : {
8905 : 956 : v = native_interpret_expr (TREE_TYPE (fld),
8906 : 956 : ptr + off + pos, sz);
8907 : 956 : if (v == NULL_TREE)
8908 : : return NULL_TREE;
8909 : : }
8910 : 192 : else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8911 : 192 : || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8912 : 192 : v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8913 : 204 : if (v == NULL_TREE)
8914 : : return NULL_TREE;
8915 : 1160 : if (fld != field)
8916 : : {
8917 : 180 : if (TREE_CODE (v) != INTEGER_CST)
8918 : : return NULL_TREE;
8919 : :
8920 : : /* FIXME: Figure out how to handle PDP endian bitfields. */
8921 : 180 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8922 : : return NULL_TREE;
8923 : 180 : if (!BYTES_BIG_ENDIAN)
8924 : 180 : v = wide_int_to_tree (TREE_TYPE (field),
8925 : 360 : wi::lrshift (wi::to_wide (v), bitoff));
8926 : : else
8927 : : v = wide_int_to_tree (TREE_TYPE (field),
8928 : : wi::lrshift (wi::to_wide (v),
8929 : : diff - bitoff));
8930 : : }
8931 : 1160 : CONSTRUCTOR_APPEND_ELT (elts, field, v);
8932 : : }
8933 : 462 : return build_constructor (type, elts);
8934 : : }
8935 : :
8936 : : /* Routines for manipulation of native_encode_expr encoded data if the encoded
8937 : : or extracted constant positions and/or sizes aren't byte aligned. */
8938 : :
8939 : : /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8940 : : bits between adjacent elements. AMNT should be within
8941 : : [0, BITS_PER_UNIT).
8942 : : Example, AMNT = 2:
8943 : : 00011111|11100000 << 2 = 01111111|10000000
8944 : : PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8945 : :
8946 : : void
8947 : 20541 : shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8948 : : unsigned int amnt)
8949 : : {
8950 : 20541 : if (amnt == 0)
8951 : : return;
8952 : :
8953 : 12236 : unsigned char carry_over = 0U;
8954 : 12236 : unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8955 : 12236 : unsigned char clear_mask = (~0U) << amnt;
8956 : :
8957 : 69993 : for (unsigned int i = 0; i < sz; i++)
8958 : : {
8959 : 57757 : unsigned prev_carry_over = carry_over;
8960 : 57757 : carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8961 : :
8962 : 57757 : ptr[i] <<= amnt;
8963 : 57757 : if (i != 0)
8964 : : {
8965 : 45521 : ptr[i] &= clear_mask;
8966 : 45521 : ptr[i] |= prev_carry_over;
8967 : : }
8968 : : }
8969 : : }
8970 : :
8971 : : /* Like shift_bytes_in_array_left but for big-endian.
8972 : : Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8973 : : bits between adjacent elements. AMNT should be within
8974 : : [0, BITS_PER_UNIT).
8975 : : Example, AMNT = 2:
8976 : : 00011111|11100000 >> 2 = 00000111|11111000
8977 : : PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8978 : :
8979 : : void
8980 : 8 : shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8981 : : unsigned int amnt)
8982 : : {
8983 : 8 : if (amnt == 0)
8984 : : return;
8985 : :
8986 : 4 : unsigned char carry_over = 0U;
8987 : 4 : unsigned char carry_mask = ~(~0U << amnt);
8988 : :
8989 : 12 : for (unsigned int i = 0; i < sz; i++)
8990 : : {
8991 : 8 : unsigned prev_carry_over = carry_over;
8992 : 8 : carry_over = ptr[i] & carry_mask;
8993 : :
8994 : 8 : carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8995 : 8 : ptr[i] >>= amnt;
8996 : 8 : ptr[i] |= prev_carry_over;
8997 : : }
8998 : : }
8999 : :
9000 : : /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9001 : : directly on the VECTOR_CST encoding, in a way that works for variable-
9002 : : length vectors. Return the resulting VECTOR_CST on success or null
9003 : : on failure. */
9004 : :
9005 : : static tree
9006 : 121853 : fold_view_convert_vector_encoding (tree type, tree expr)
9007 : : {
9008 : 121853 : tree expr_type = TREE_TYPE (expr);
9009 : 121853 : poly_uint64 type_bits, expr_bits;
9010 : 121853 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9011 : 121853 : || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9012 : 0 : return NULL_TREE;
9013 : :
9014 : 121853 : poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9015 : 121853 : poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9016 : 121853 : unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9017 : 121853 : unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9018 : :
9019 : : /* We can only preserve the semantics of a stepped pattern if the new
9020 : : vector element is an integer of the same size. */
9021 : 121853 : if (VECTOR_CST_STEPPED_P (expr)
9022 : 121853 : && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9023 : : return NULL_TREE;
9024 : :
9025 : : /* The number of bits needed to encode one element from every pattern
9026 : : of the original vector. */
9027 : 114804 : unsigned int expr_sequence_bits
9028 : 114804 : = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9029 : :
9030 : : /* The number of bits needed to encode one element from every pattern
9031 : : of the result. */
9032 : 114804 : unsigned int type_sequence_bits
9033 : 114804 : = least_common_multiple (expr_sequence_bits, type_elt_bits);
9034 : :
9035 : : /* Don't try to read more bytes than are available, which can happen
9036 : : for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9037 : : The general VIEW_CONVERT handling can cope with that case, so there's
9038 : : no point complicating things here. */
9039 : 114804 : unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9040 : 114804 : unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9041 : : BITS_PER_UNIT);
9042 : 114804 : unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9043 : 114804 : if (known_gt (buffer_bits, expr_bits))
9044 : : return NULL_TREE;
9045 : :
9046 : : /* Get enough bytes of EXPR to form the new encoding. */
9047 : 114804 : auto_vec<unsigned char, 128> buffer (buffer_bytes);
9048 : 114804 : buffer.quick_grow (buffer_bytes);
9049 : 114804 : if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9050 : 114804 : buffer_bits / expr_elt_bits)
9051 : : != (int) buffer_bytes)
9052 : : return NULL_TREE;
9053 : :
9054 : : /* Reencode the bytes as TYPE. */
9055 : 114804 : unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9056 : 229608 : return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9057 : 114804 : type_npatterns, nelts_per_pattern);
9058 : 114804 : }
9059 : :
9060 : : /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9061 : : TYPE at compile-time. If we're unable to perform the conversion
9062 : : return NULL_TREE. */
9063 : :
9064 : : static tree
9065 : 12111980 : fold_view_convert_expr (tree type, tree expr)
9066 : : {
9067 : 12111980 : unsigned char buffer[128];
9068 : 12111980 : unsigned char *buf;
9069 : 12111980 : int len;
9070 : 12111980 : HOST_WIDE_INT l;
9071 : :
9072 : : /* Check that the host and target are sane. */
9073 : 12111980 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9074 : : return NULL_TREE;
9075 : :
9076 : 12111980 : if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9077 : 121853 : if (tree res = fold_view_convert_vector_encoding (type, expr))
9078 : : return res;
9079 : :
9080 : 11997195 : l = int_size_in_bytes (type);
9081 : 11997195 : if (l > (int) sizeof (buffer)
9082 : 11997195 : && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9083 : : {
9084 : 0 : buf = XALLOCAVEC (unsigned char, l);
9085 : 0 : len = l;
9086 : : }
9087 : : else
9088 : : {
9089 : : buf = buffer;
9090 : : len = sizeof (buffer);
9091 : : }
9092 : 11997195 : len = native_encode_expr (expr, buf, len);
9093 : 11997195 : if (len == 0)
9094 : : return NULL_TREE;
9095 : :
9096 : 3304142 : return native_interpret_expr (type, buf, len);
9097 : : }
9098 : :
9099 : : /* Build an expression for the address of T. Folds away INDIRECT_REF
9100 : : to avoid confusing the gimplify process. */
9101 : :
9102 : : tree
9103 : 378767416 : build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9104 : : {
9105 : : /* The size of the object is not relevant when talking about its address. */
9106 : 378767416 : if (TREE_CODE (t) == WITH_SIZE_EXPR)
9107 : 0 : t = TREE_OPERAND (t, 0);
9108 : :
9109 : 378767416 : if (INDIRECT_REF_P (t))
9110 : : {
9111 : 43594170 : t = TREE_OPERAND (t, 0);
9112 : :
9113 : 43594170 : if (TREE_TYPE (t) != ptrtype)
9114 : 27563249 : t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9115 : : }
9116 : 335173246 : else if (TREE_CODE (t) == MEM_REF
9117 : 335173246 : && integer_zerop (TREE_OPERAND (t, 1)))
9118 : : {
9119 : 1515262 : t = TREE_OPERAND (t, 0);
9120 : :
9121 : 1515262 : if (TREE_TYPE (t) != ptrtype)
9122 : 1000150 : t = fold_convert_loc (loc, ptrtype, t);
9123 : : }
9124 : 333657984 : else if (TREE_CODE (t) == MEM_REF
9125 : 333657984 : && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9126 : 21 : return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9127 : : TREE_OPERAND (t, 0),
9128 : : convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9129 : 333657963 : else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9130 : : {
9131 : 21883159 : t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9132 : :
9133 : 21883159 : if (TREE_TYPE (t) != ptrtype)
9134 : 12994 : t = fold_convert_loc (loc, ptrtype, t);
9135 : : }
9136 : : else
9137 : 311774804 : t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9138 : :
9139 : : return t;
9140 : : }
9141 : :
9142 : : /* Build an expression for the address of T. */
9143 : :
9144 : : tree
9145 : 345810740 : build_fold_addr_expr_loc (location_t loc, tree t)
9146 : : {
9147 : 345810740 : tree ptrtype = build_pointer_type (TREE_TYPE (t));
9148 : :
9149 : 345810740 : return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9150 : : }
9151 : :
9152 : : /* Fold a unary expression of code CODE and type TYPE with operand
9153 : : OP0. Return the folded expression if folding is successful.
9154 : : Otherwise, return NULL_TREE. */
9155 : :
9156 : : tree
9157 : 1658269776 : fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9158 : : {
9159 : 1658269776 : tree tem;
9160 : 1658269776 : tree arg0;
9161 : 1658269776 : enum tree_code_class kind = TREE_CODE_CLASS (code);
9162 : :
9163 : 1658269776 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
9164 : : && TREE_CODE_LENGTH (code) == 1);
9165 : :
9166 : 1658269776 : arg0 = op0;
9167 : 1658269776 : if (arg0)
9168 : : {
9169 : 1658256635 : if (CONVERT_EXPR_CODE_P (code)
9170 : : || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9171 : : {
9172 : : /* Don't use STRIP_NOPS, because signedness of argument type
9173 : : matters. */
9174 : 851271189 : STRIP_SIGN_NOPS (arg0);
9175 : : }
9176 : : else
9177 : : {
9178 : : /* Strip any conversions that don't change the mode. This
9179 : : is safe for every expression, except for a comparison
9180 : : expression because its signedness is derived from its
9181 : : operands.
9182 : :
9183 : : Note that this is done as an internal manipulation within
9184 : : the constant folder, in order to find the simplest
9185 : : representation of the arguments so that their form can be
9186 : : studied. In any cases, the appropriate type conversions
9187 : : should be put back in the tree that will get out of the
9188 : : constant folder. */
9189 : 806985446 : STRIP_NOPS (arg0);
9190 : : }
9191 : :
9192 : 1658256635 : if (CONSTANT_CLASS_P (arg0))
9193 : : {
9194 : 226492321 : tree tem = const_unop (code, type, arg0);
9195 : 226492321 : if (tem)
9196 : : {
9197 : 191352643 : if (TREE_TYPE (tem) != type)
9198 : 65010 : tem = fold_convert_loc (loc, type, tem);
9199 : 191352643 : return tem;
9200 : : }
9201 : : }
9202 : : }
9203 : :
9204 : 1466917133 : tem = generic_simplify (loc, code, type, op0);
9205 : 1466917133 : if (tem)
9206 : : return tem;
9207 : :
9208 : 1112765988 : if (TREE_CODE_CLASS (code) == tcc_unary)
9209 : : {
9210 : 587278658 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
9211 : 980430 : return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9212 : : fold_build1_loc (loc, code, type,
9213 : 980430 : fold_convert_loc (loc, TREE_TYPE (op0),
9214 : 1960860 : TREE_OPERAND (arg0, 1))));
9215 : 586298228 : else if (TREE_CODE (arg0) == COND_EXPR)
9216 : : {
9217 : 335464 : tree arg01 = TREE_OPERAND (arg0, 1);
9218 : 335464 : tree arg02 = TREE_OPERAND (arg0, 2);
9219 : 335464 : if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9220 : 331282 : arg01 = fold_build1_loc (loc, code, type,
9221 : : fold_convert_loc (loc,
9222 : 331282 : TREE_TYPE (op0), arg01));
9223 : 335464 : if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9224 : 335461 : arg02 = fold_build1_loc (loc, code, type,
9225 : : fold_convert_loc (loc,
9226 : 335461 : TREE_TYPE (op0), arg02));
9227 : 335464 : tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9228 : : arg01, arg02);
9229 : :
9230 : : /* If this was a conversion, and all we did was to move into
9231 : : inside the COND_EXPR, bring it back out. But leave it if
9232 : : it is a conversion from integer to integer and the
9233 : : result precision is no wider than a word since such a
9234 : : conversion is cheap and may be optimized away by combine,
9235 : : while it couldn't if it were outside the COND_EXPR. Then return
9236 : : so we don't get into an infinite recursion loop taking the
9237 : : conversion out and then back in. */
9238 : :
9239 : 335464 : if ((CONVERT_EXPR_CODE_P (code)
9240 : 10007 : || code == NON_LVALUE_EXPR)
9241 : 325476 : && TREE_CODE (tem) == COND_EXPR
9242 : 311690 : && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9243 : 283334 : && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9244 : 151546 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9245 : 151334 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9246 : 151334 : && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9247 : 151334 : == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9248 : 492382 : && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9249 : 6449 : && (INTEGRAL_TYPE_P
9250 : : (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9251 : 6409 : && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9252 : 6384 : || flag_syntax_only))
9253 : 144125 : tem = build1_loc (loc, code, type,
9254 : : build3 (COND_EXPR,
9255 : 144125 : TREE_TYPE (TREE_OPERAND
9256 : : (TREE_OPERAND (tem, 1), 0)),
9257 : 144125 : TREE_OPERAND (tem, 0),
9258 : 144125 : TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9259 : 144125 : TREE_OPERAND (TREE_OPERAND (tem, 2),
9260 : : 0)));
9261 : 335464 : return tem;
9262 : : }
9263 : : }
9264 : :
9265 : 1111450094 : switch (code)
9266 : : {
9267 : 38982453 : case NON_LVALUE_EXPR:
9268 : 38982453 : if (!maybe_lvalue_p (op0))
9269 : 29045533 : return fold_convert_loc (loc, type, op0);
9270 : : return NULL_TREE;
9271 : :
9272 : 538530336 : CASE_CONVERT:
9273 : 538530336 : case FLOAT_EXPR:
9274 : 538530336 : case FIX_TRUNC_EXPR:
9275 : 538530336 : if (COMPARISON_CLASS_P (op0))
9276 : : {
9277 : : /* If we have (type) (a CMP b) and type is an integral type, return
9278 : : new expression involving the new type. Canonicalize
9279 : : (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9280 : : non-integral type.
9281 : : Do not fold the result as that would not simplify further, also
9282 : : folding again results in recursions. */
9283 : 359304 : if (TREE_CODE (type) == BOOLEAN_TYPE)
9284 : 70648 : return build2_loc (loc, TREE_CODE (op0), type,
9285 : 70648 : TREE_OPERAND (op0, 0),
9286 : 141296 : TREE_OPERAND (op0, 1));
9287 : 288656 : else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9288 : 5952 : && TREE_CODE (type) != VECTOR_TYPE)
9289 : 5952 : return build3_loc (loc, COND_EXPR, type, op0,
9290 : : constant_boolean_node (true, type),
9291 : 5952 : constant_boolean_node (false, type));
9292 : : }
9293 : :
9294 : : /* Handle (T *)&A.B.C for A being of type T and B and C
9295 : : living at offset zero. This occurs frequently in
9296 : : C++ upcasting and then accessing the base. */
9297 : 538453736 : if (TREE_CODE (op0) == ADDR_EXPR
9298 : 106287639 : && POINTER_TYPE_P (type)
9299 : 638612968 : && handled_component_p (TREE_OPERAND (op0, 0)))
9300 : : {
9301 : 25589774 : poly_int64 bitsize, bitpos;
9302 : 25589774 : tree offset;
9303 : 25589774 : machine_mode mode;
9304 : 25589774 : int unsignedp, reversep, volatilep;
9305 : 25589774 : tree base
9306 : 25589774 : = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9307 : : &offset, &mode, &unsignedp, &reversep,
9308 : : &volatilep);
9309 : : /* If the reference was to a (constant) zero offset, we can use
9310 : : the address of the base if it has the same base type
9311 : : as the result type and the pointer type is unqualified. */
9312 : 25589774 : if (!offset
9313 : 25493320 : && known_eq (bitpos, 0)
9314 : 16463575 : && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9315 : 16463575 : == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9316 : 25595652 : && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9317 : 5678 : return fold_convert_loc (loc, type,
9318 : 5678 : build_fold_addr_expr_loc (loc, base));
9319 : : }
9320 : :
9321 : 538448058 : if (TREE_CODE (op0) == MODIFY_EXPR
9322 : 253406 : && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9323 : : /* Detect assigning a bitfield. */
9324 : 538449827 : && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9325 : 110 : && DECL_BIT_FIELD
9326 : : (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9327 : : {
9328 : : /* Don't leave an assignment inside a conversion
9329 : : unless assigning a bitfield. */
9330 : 1720 : tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9331 : : /* First do the assignment, then return converted constant. */
9332 : 1720 : tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9333 : 1720 : suppress_warning (tem /* What warning? */);
9334 : 1720 : TREE_USED (tem) = 1;
9335 : 1720 : return tem;
9336 : : }
9337 : :
9338 : : /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9339 : : constants (if x has signed type, the sign bit cannot be set
9340 : : in c). This folds extension into the BIT_AND_EXPR.
9341 : : ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9342 : : very likely don't have maximal range for their precision and this
9343 : : transformation effectively doesn't preserve non-maximal ranges. */
9344 : 538446338 : if (TREE_CODE (type) == INTEGER_TYPE
9345 : 255286879 : && TREE_CODE (op0) == BIT_AND_EXPR
9346 : 538911962 : && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9347 : : {
9348 : 234005 : tree and_expr = op0;
9349 : 234005 : tree and0 = TREE_OPERAND (and_expr, 0);
9350 : 234005 : tree and1 = TREE_OPERAND (and_expr, 1);
9351 : 234005 : int change = 0;
9352 : :
9353 : 234005 : if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9354 : 234005 : || (TYPE_PRECISION (type)
9355 : 116145 : <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9356 : : change = 1;
9357 : 67894 : else if (TYPE_PRECISION (TREE_TYPE (and1))
9358 : : <= HOST_BITS_PER_WIDE_INT
9359 : 67894 : && tree_fits_uhwi_p (and1))
9360 : : {
9361 : 66798 : unsigned HOST_WIDE_INT cst;
9362 : :
9363 : 66798 : cst = tree_to_uhwi (and1);
9364 : 133596 : cst &= HOST_WIDE_INT_M1U
9365 : 66798 : << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9366 : 66798 : change = (cst == 0);
9367 : 66798 : if (change
9368 : 66798 : && !flag_syntax_only
9369 : 132741 : && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9370 : : == ZERO_EXTEND))
9371 : : {
9372 : : tree uns = unsigned_type_for (TREE_TYPE (and0));
9373 : : and0 = fold_convert_loc (loc, uns, and0);
9374 : : and1 = fold_convert_loc (loc, uns, and1);
9375 : : }
9376 : : }
9377 : 66798 : if (change)
9378 : : {
9379 : 232909 : tree and1_type = TREE_TYPE (and1);
9380 : 232909 : unsigned prec = MAX (TYPE_PRECISION (and1_type),
9381 : : TYPE_PRECISION (type));
9382 : 232909 : tem = force_fit_type (type,
9383 : 232909 : wide_int::from (wi::to_wide (and1), prec,
9384 : 232909 : TYPE_SIGN (and1_type)),
9385 : 232909 : 0, TREE_OVERFLOW (and1));
9386 : 232909 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
9387 : 232909 : fold_convert_loc (loc, type, and0), tem);
9388 : : }
9389 : : }
9390 : :
9391 : : /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9392 : : cast (T1)X will fold away. We assume that this happens when X itself
9393 : : is a cast. */
9394 : 538213429 : if (POINTER_TYPE_P (type)
9395 : 249300131 : && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9396 : 541870489 : && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9397 : : {
9398 : 1541890 : tree arg00 = TREE_OPERAND (arg0, 0);
9399 : 1541890 : tree arg01 = TREE_OPERAND (arg0, 1);
9400 : :
9401 : : /* If -fsanitize=alignment, avoid this optimization in GENERIC
9402 : : when the pointed type needs higher alignment than
9403 : : the p+ first operand's pointed type. */
9404 : 1541890 : if (!in_gimple_form
9405 : 1529366 : && sanitize_flags_p (SANITIZE_ALIGNMENT)
9406 : 1542758 : && (min_align_of_type (TREE_TYPE (type))
9407 : 434 : > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9408 : : return NULL_TREE;
9409 : :
9410 : : /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9411 : : when type is a reference type and arg00's type is not,
9412 : : because arg00 could be validly nullptr and if arg01 doesn't return,
9413 : : we don't want false positive binding of reference to nullptr. */
9414 : 1541823 : if (TREE_CODE (type) == REFERENCE_TYPE
9415 : 1034379 : && !in_gimple_form
9416 : 1034362 : && sanitize_flags_p (SANITIZE_NULL)
9417 : 1542106 : && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9418 : : return NULL_TREE;
9419 : :
9420 : 1541540 : arg00 = fold_convert_loc (loc, type, arg00);
9421 : 1541540 : return fold_build_pointer_plus_loc (loc, arg00, arg01);
9422 : : }
9423 : :
9424 : : /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9425 : : of the same precision, and X is an integer type not narrower than
9426 : : types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9427 : 536671539 : if (INTEGRAL_TYPE_P (type)
9428 : 260149681 : && TREE_CODE (op0) == BIT_NOT_EXPR
9429 : 498977 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9430 : 498977 : && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9431 : 536979829 : && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9432 : : {
9433 : 306652 : tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9434 : 372536 : if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9435 : 372534 : && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9436 : 247868 : return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9437 : 247868 : fold_convert_loc (loc, type, tem));
9438 : : }
9439 : :
9440 : : /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9441 : : type of X and Y (integer types only). */
9442 : 536423671 : if (INTEGRAL_TYPE_P (type)
9443 : 259901813 : && TREE_CODE (op0) == MULT_EXPR
9444 : 7485235 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9445 : 7464558 : && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9446 : 536482582 : && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9447 : 13250 : || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9448 : : {
9449 : : /* Be careful not to introduce new overflows. */
9450 : 58871 : tree mult_type;
9451 : 58871 : if (TYPE_OVERFLOW_WRAPS (type))
9452 : : mult_type = type;
9453 : : else
9454 : 1964 : mult_type = unsigned_type_for (type);
9455 : :
9456 : 58871 : if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9457 : : {
9458 : 117742 : tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9459 : : fold_convert_loc (loc, mult_type,
9460 : 58871 : TREE_OPERAND (op0, 0)),
9461 : : fold_convert_loc (loc, mult_type,
9462 : 58871 : TREE_OPERAND (op0, 1)));
9463 : 58871 : return fold_convert_loc (loc, type, tem);
9464 : : }
9465 : : }
9466 : :
9467 : : return NULL_TREE;
9468 : :
9469 : 236261835 : case VIEW_CONVERT_EXPR:
9470 : 236261835 : if (TREE_CODE (op0) == MEM_REF)
9471 : : {
9472 : 1 : if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9473 : 1 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9474 : 1 : tem = fold_build2_loc (loc, MEM_REF, type,
9475 : 1 : TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9476 : 1 : REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9477 : 1 : return tem;
9478 : : }
9479 : :
9480 : : return NULL_TREE;
9481 : :
9482 : 3491213 : case NEGATE_EXPR:
9483 : 3491213 : tem = fold_negate_expr (loc, arg0);
9484 : 3491213 : if (tem)
9485 : 1547 : return fold_convert_loc (loc, type, tem);
9486 : : return NULL_TREE;
9487 : :
9488 : 2494382 : case ABS_EXPR:
9489 : : /* Convert fabs((double)float) into (double)fabsf(float). */
9490 : 2494382 : if (TREE_CODE (arg0) == NOP_EXPR
9491 : 18685 : && TREE_CODE (type) == REAL_TYPE)
9492 : : {
9493 : 18649 : tree targ0 = strip_float_extensions (arg0);
9494 : 18649 : if (targ0 != arg0)
9495 : 18445 : return fold_convert_loc (loc, type,
9496 : : fold_build1_loc (loc, ABS_EXPR,
9497 : 18445 : TREE_TYPE (targ0),
9498 : 18445 : targ0));
9499 : : }
9500 : : return NULL_TREE;
9501 : :
9502 : 2458555 : case BIT_NOT_EXPR:
9503 : : /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9504 : 2458555 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
9505 : 2460240 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9506 : : fold_convert_loc (loc, type,
9507 : 1685 : TREE_OPERAND (arg0, 0)))))
9508 : 14 : return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9509 : : fold_convert_loc (loc, type,
9510 : 28 : TREE_OPERAND (arg0, 1)));
9511 : 2458541 : else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9512 : 2460212 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9513 : : fold_convert_loc (loc, type,
9514 : 1671 : TREE_OPERAND (arg0, 1)))))
9515 : 23 : return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9516 : : fold_convert_loc (loc, type,
9517 : 46 : TREE_OPERAND (arg0, 0)), tem);
9518 : :
9519 : : return NULL_TREE;
9520 : :
9521 : 44958090 : case TRUTH_NOT_EXPR:
9522 : : /* Note that the operand of this must be an int
9523 : : and its values must be 0 or 1.
9524 : : ("true" is a fixed value perhaps depending on the language,
9525 : : but we don't handle values other than 1 correctly yet.) */
9526 : 44958090 : tem = fold_truth_not_expr (loc, arg0);
9527 : 44958090 : if (!tem)
9528 : : return NULL_TREE;
9529 : 31243357 : return fold_convert_loc (loc, type, tem);
9530 : :
9531 : 59539737 : case INDIRECT_REF:
9532 : : /* Fold *&X to X if X is an lvalue. */
9533 : 59539737 : if (TREE_CODE (op0) == ADDR_EXPR)
9534 : : {
9535 : 6127 : tree op00 = TREE_OPERAND (op0, 0);
9536 : 6127 : if ((VAR_P (op00)
9537 : : || TREE_CODE (op00) == PARM_DECL
9538 : : || TREE_CODE (op00) == RESULT_DECL)
9539 : 5049 : && !TREE_READONLY (op00))
9540 : : return op00;
9541 : : }
9542 : : return NULL_TREE;
9543 : :
9544 : : default:
9545 : : return NULL_TREE;
9546 : : } /* switch (code) */
9547 : : }
9548 : :
9549 : :
9550 : : /* If the operation was a conversion do _not_ mark a resulting constant
9551 : : with TREE_OVERFLOW if the original constant was not. These conversions
9552 : : have implementation defined behavior and retaining the TREE_OVERFLOW
9553 : : flag here would confuse later passes such as VRP. */
9554 : : tree
9555 : 0 : fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9556 : : tree type, tree op0)
9557 : : {
9558 : 0 : tree res = fold_unary_loc (loc, code, type, op0);
9559 : 0 : if (res
9560 : 0 : && TREE_CODE (res) == INTEGER_CST
9561 : 0 : && TREE_CODE (op0) == INTEGER_CST
9562 : 0 : && CONVERT_EXPR_CODE_P (code))
9563 : 0 : TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9564 : :
9565 : 0 : return res;
9566 : : }
9567 : :
9568 : : /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9569 : : operands OP0 and OP1. LOC is the location of the resulting expression.
9570 : : ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9571 : : Return the folded expression if folding is successful. Otherwise,
9572 : : return NULL_TREE. */
9573 : : static tree
9574 : 22877799 : fold_truth_andor (location_t loc, enum tree_code code, tree type,
9575 : : tree arg0, tree arg1, tree op0, tree op1)
9576 : : {
9577 : 22877799 : tree tem;
9578 : :
9579 : : /* We only do these simplifications if we are optimizing. */
9580 : 22877799 : if (!optimize)
9581 : : return NULL_TREE;
9582 : :
9583 : : /* Check for things like (A || B) && (A || C). We can convert this
9584 : : to A || (B && C). Note that either operator can be any of the four
9585 : : truth and/or operations and the transformation will still be
9586 : : valid. Also note that we only care about order for the
9587 : : ANDIF and ORIF operators. If B contains side effects, this
9588 : : might change the truth-value of A. */
9589 : 22616177 : if (TREE_CODE (arg0) == TREE_CODE (arg1)
9590 : 4952198 : && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9591 : : || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9592 : : || TREE_CODE (arg0) == TRUTH_AND_EXPR
9593 : 4952198 : || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9594 : 22642533 : && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9595 : : {
9596 : 25838 : tree a00 = TREE_OPERAND (arg0, 0);
9597 : 25838 : tree a01 = TREE_OPERAND (arg0, 1);
9598 : 25838 : tree a10 = TREE_OPERAND (arg1, 0);
9599 : 25838 : tree a11 = TREE_OPERAND (arg1, 1);
9600 : 51676 : bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9601 : 25838 : || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9602 : 25838 : && (code == TRUTH_AND_EXPR
9603 : 8705 : || code == TRUTH_OR_EXPR));
9604 : :
9605 : 25838 : if (operand_equal_p (a00, a10, 0))
9606 : 379 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9607 : 379 : fold_build2_loc (loc, code, type, a01, a11));
9608 : 25459 : else if (commutative && operand_equal_p (a00, a11, 0))
9609 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9610 : 0 : fold_build2_loc (loc, code, type, a01, a10));
9611 : 25459 : else if (commutative && operand_equal_p (a01, a10, 0))
9612 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9613 : 0 : fold_build2_loc (loc, code, type, a00, a11));
9614 : :
9615 : : /* This case if tricky because we must either have commutative
9616 : : operators or else A10 must not have side-effects. */
9617 : :
9618 : 25424 : else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9619 : 50355 : && operand_equal_p (a01, a11, 0))
9620 : 43 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
9621 : : fold_build2_loc (loc, code, type, a00, a10),
9622 : 43 : a01);
9623 : : }
9624 : :
9625 : : /* See if we can build a range comparison. */
9626 : 22615755 : if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9627 : : return tem;
9628 : :
9629 : 21638241 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9630 : 21636253 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9631 : : {
9632 : 18652 : tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9633 : 18652 : if (tem)
9634 : 13 : return fold_build2_loc (loc, code, type, tem, arg1);
9635 : : }
9636 : :
9637 : 21638228 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9638 : 21630859 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9639 : : {
9640 : 113550 : tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9641 : 113550 : if (tem)
9642 : 76 : return fold_build2_loc (loc, code, type, arg0, tem);
9643 : : }
9644 : :
9645 : : /* Check for the possibility of merging component references. If our
9646 : : lhs is another similar operation, try to merge its rhs with our
9647 : : rhs. Then try to merge our lhs and rhs. */
9648 : 21638152 : if (TREE_CODE (arg0) == code
9649 : 22331728 : && (tem = fold_truth_andor_1 (loc, code, type,
9650 : 693576 : TREE_OPERAND (arg0, 1), arg1)) != 0)
9651 : 85 : return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9652 : :
9653 : 21638067 : if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9654 : : return tem;
9655 : :
9656 : 21598119 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9657 : 21598119 : if (param_logical_op_non_short_circuit != -1)
9658 : 7694 : logical_op_non_short_circuit
9659 : 7694 : = param_logical_op_non_short_circuit;
9660 : 21598119 : if (logical_op_non_short_circuit
9661 : 21594259 : && !sanitize_coverage_p ()
9662 : 21598119 : && (code == TRUTH_AND_EXPR
9663 : 21594256 : || code == TRUTH_ANDIF_EXPR
9664 : 10336392 : || code == TRUTH_OR_EXPR
9665 : 10336392 : || code == TRUTH_ORIF_EXPR))
9666 : : {
9667 : 21594256 : enum tree_code ncode, icode;
9668 : :
9669 : 54446376 : ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9670 : 21594256 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9671 : 11257864 : icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9672 : :
9673 : : /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9674 : : or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9675 : : We don't want to pack more than two leafs to a non-IF AND/OR
9676 : : expression.
9677 : : If tree-code of left-hand operand isn't an AND/OR-IF code and not
9678 : : equal to IF-CODE, then we don't want to add right-hand operand.
9679 : : If the inner right-hand side of left-hand operand has
9680 : : side-effects, or isn't simple, then we can't add to it,
9681 : : as otherwise we might destroy if-sequence. */
9682 : 21594256 : if (TREE_CODE (arg0) == icode
9683 : 685689 : && simple_condition_p (arg1)
9684 : : /* Needed for sequence points to handle trappings, and
9685 : : side-effects. */
9686 : 21641607 : && simple_condition_p (TREE_OPERAND (arg0, 1)))
9687 : : {
9688 : 40714 : tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9689 : : arg1);
9690 : 40714 : return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9691 : 40714 : tem);
9692 : : }
9693 : : /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9694 : : or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9695 : 21553542 : else if (TREE_CODE (arg1) == icode
9696 : 2418 : && simple_condition_p (arg0)
9697 : : /* Needed for sequence points to handle trappings, and
9698 : : side-effects. */
9699 : 21554146 : && simple_condition_p (TREE_OPERAND (arg1, 0)))
9700 : : {
9701 : 21 : tem = fold_build2_loc (loc, ncode, type,
9702 : 21 : arg0, TREE_OPERAND (arg1, 0));
9703 : 21 : return fold_build2_loc (loc, icode, type, tem,
9704 : 42 : TREE_OPERAND (arg1, 1));
9705 : : }
9706 : : /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9707 : : into (A OR B).
9708 : : For sequence point consistancy, we need to check for trapping,
9709 : : and side-effects. */
9710 : 4149806 : else if (code == icode && simple_condition_p (arg0)
9711 : 22196222 : && simple_condition_p (arg1))
9712 : 350308 : return fold_build2_loc (loc, ncode, type, arg0, arg1);
9713 : : }
9714 : :
9715 : : return NULL_TREE;
9716 : : }
9717 : :
9718 : : /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9719 : : by changing CODE to reduce the magnitude of constants involved in
9720 : : ARG0 of the comparison.
9721 : : Returns a canonicalized comparison tree if a simplification was
9722 : : possible, otherwise returns NULL_TREE.
9723 : : Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9724 : : valid if signed overflow is undefined. */
9725 : :
9726 : : static tree
9727 : 152918725 : maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9728 : : tree arg0, tree arg1,
9729 : : bool *strict_overflow_p)
9730 : : {
9731 : 152918725 : enum tree_code code0 = TREE_CODE (arg0);
9732 : 152918725 : tree t, cst0 = NULL_TREE;
9733 : 152918725 : int sgn0;
9734 : :
9735 : : /* Match A +- CST code arg1. We can change this only if overflow
9736 : : is undefined. */
9737 : 152918725 : if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9738 : 116073948 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9739 : : /* In principle pointers also have undefined overflow behavior,
9740 : : but that causes problems elsewhere. */
9741 : 57849622 : && !POINTER_TYPE_P (TREE_TYPE (arg0))
9742 : 57849622 : && (code0 == MINUS_EXPR
9743 : 57849622 : || code0 == PLUS_EXPR)
9744 : 2422279 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9745 : : return NULL_TREE;
9746 : :
9747 : : /* Identify the constant in arg0 and its sign. */
9748 : 2020623 : cst0 = TREE_OPERAND (arg0, 1);
9749 : 2020623 : sgn0 = tree_int_cst_sgn (cst0);
9750 : :
9751 : : /* Overflowed constants and zero will cause problems. */
9752 : 2020623 : if (integer_zerop (cst0)
9753 : 2020623 : || TREE_OVERFLOW (cst0))
9754 : : return NULL_TREE;
9755 : :
9756 : : /* See if we can reduce the magnitude of the constant in
9757 : : arg0 by changing the comparison code. */
9758 : : /* A - CST < arg1 -> A - CST-1 <= arg1. */
9759 : 2020623 : if (code == LT_EXPR
9760 : 1128398 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9761 : : code = LE_EXPR;
9762 : : /* A + CST > arg1 -> A + CST-1 >= arg1. */
9763 : 1822786 : else if (code == GT_EXPR
9764 : 542413 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9765 : : code = GE_EXPR;
9766 : : /* A + CST <= arg1 -> A + CST-1 < arg1. */
9767 : 1655578 : else if (code == LE_EXPR
9768 : 619089 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9769 : : code = LT_EXPR;
9770 : : /* A - CST >= arg1 -> A - CST-1 > arg1. */
9771 : 1443930 : else if (code == GE_EXPR
9772 : 480440 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9773 : : code = GT_EXPR;
9774 : : else
9775 : : return NULL_TREE;
9776 : 796801 : *strict_overflow_p = true;
9777 : :
9778 : : /* Now build the constant reduced in magnitude. But not if that
9779 : : would produce one outside of its types range. */
9780 : 1593602 : if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9781 : 1593602 : && ((sgn0 == 1
9782 : 378718 : && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9783 : 378718 : && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9784 : 796801 : || (sgn0 == -1
9785 : 418083 : && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9786 : 418083 : && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9787 : 0 : return NULL_TREE;
9788 : :
9789 : 1175519 : t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9790 : 796801 : cst0, build_int_cst (TREE_TYPE (cst0), 1));
9791 : 796801 : t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9792 : 796801 : t = fold_convert (TREE_TYPE (arg1), t);
9793 : :
9794 : 796801 : return fold_build2_loc (loc, code, type, t, arg1);
9795 : : }
9796 : :
9797 : : /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9798 : : overflow further. Try to decrease the magnitude of constants involved
9799 : : by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9800 : : and put sole constants at the second argument position.
9801 : : Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9802 : :
9803 : : static tree
9804 : 76837075 : maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9805 : : tree arg0, tree arg1)
9806 : : {
9807 : 76837075 : tree t;
9808 : 76837075 : bool strict_overflow_p;
9809 : 76837075 : const char * const warnmsg = G_("assuming signed overflow does not occur "
9810 : : "when reducing constant in comparison");
9811 : :
9812 : : /* Try canonicalization by simplifying arg0. */
9813 : 76837075 : strict_overflow_p = false;
9814 : 76837075 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9815 : : &strict_overflow_p);
9816 : 76837075 : if (t)
9817 : : {
9818 : 755425 : if (strict_overflow_p)
9819 : 755425 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9820 : 755425 : return t;
9821 : : }
9822 : :
9823 : : /* Try canonicalization by simplifying arg1 using the swapped
9824 : : comparison. */
9825 : 76081650 : code = swap_tree_comparison (code);
9826 : 76081650 : strict_overflow_p = false;
9827 : 76081650 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9828 : : &strict_overflow_p);
9829 : 76081650 : if (t && strict_overflow_p)
9830 : 41376 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9831 : : return t;
9832 : : }
9833 : :
9834 : : /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9835 : : space. This is used to avoid issuing overflow warnings for
9836 : : expressions like &p->x which cannot wrap. */
9837 : :
9838 : : static bool
9839 : 18107 : pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9840 : : {
9841 : 18107 : if (!POINTER_TYPE_P (TREE_TYPE (base)))
9842 : : return true;
9843 : :
9844 : 10317 : if (maybe_lt (bitpos, 0))
9845 : : return true;
9846 : :
9847 : : poly_wide_int wi_offset;
9848 : 9469 : int precision = TYPE_PRECISION (TREE_TYPE (base));
9849 : 9469 : if (offset == NULL_TREE)
9850 : 4870 : wi_offset = wi::zero (precision);
9851 : 4599 : else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9852 : : return true;
9853 : : else
9854 : 0 : wi_offset = wi::to_poly_wide (offset);
9855 : :
9856 : 4870 : wi::overflow_type overflow;
9857 : 4870 : poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9858 : 4870 : precision);
9859 : 4870 : poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9860 : 4870 : if (overflow)
9861 : : return true;
9862 : :
9863 : 4870 : poly_uint64 total_hwi, size;
9864 : 4870 : if (!total.to_uhwi (&total_hwi)
9865 : 4870 : || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9866 : : &size)
9867 : 9626 : || known_eq (size, 0U))
9868 : 114 : return true;
9869 : :
9870 : 4756 : if (known_le (total_hwi, size))
9871 : : return false;
9872 : :
9873 : : /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9874 : : array. */
9875 : 1121 : if (TREE_CODE (base) == ADDR_EXPR
9876 : 0 : && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9877 : : &size)
9878 : 0 : && maybe_ne (size, 0U)
9879 : 1121 : && known_le (total_hwi, size))
9880 : : return false;
9881 : :
9882 : : return true;
9883 : 9469 : }
9884 : :
9885 : : /* Return a positive integer when the symbol DECL is known to have
9886 : : a nonzero address, zero when it's known not to (e.g., it's a weak
9887 : : symbol), and a negative integer when the symbol is not yet in the
9888 : : symbol table and so whether or not its address is zero is unknown.
9889 : : For function local objects always return positive integer. */
9890 : : static int
9891 : 10101015 : maybe_nonzero_address (tree decl)
9892 : : {
9893 : : /* Normally, don't do anything for variables and functions before symtab is
9894 : : built; it is quite possible that DECL will be declared weak later.
9895 : : But if folding_initializer, we need a constant answer now, so create
9896 : : the symtab entry and prevent later weak declaration. */
9897 : 10101015 : if (DECL_P (decl) && decl_in_symtab_p (decl))
9898 : 7215726 : if (struct symtab_node *symbol
9899 : 3607863 : = (folding_initializer
9900 : 3607863 : ? symtab_node::get_create (decl)
9901 : 3607863 : : symtab_node::get (decl)))
9902 : 3588911 : return symbol->nonzero_address ();
9903 : :
9904 : : /* Function local objects are never NULL. */
9905 : 6512104 : if (DECL_P (decl)
9906 : 6512104 : && (DECL_CONTEXT (decl)
9907 : 4402829 : && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9908 : 4399610 : && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9909 : : return 1;
9910 : :
9911 : : return -1;
9912 : : }
9913 : :
9914 : : /* Subroutine of fold_binary. This routine performs all of the
9915 : : transformations that are common to the equality/inequality
9916 : : operators (EQ_EXPR and NE_EXPR) and the ordering operators
9917 : : (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9918 : : fold_binary should call fold_binary. Fold a comparison with
9919 : : tree code CODE and type TYPE with operands OP0 and OP1. Return
9920 : : the folded comparison or NULL_TREE. */
9921 : :
9922 : : static tree
9923 : 76896713 : fold_comparison (location_t loc, enum tree_code code, tree type,
9924 : : tree op0, tree op1)
9925 : : {
9926 : 76896713 : const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9927 : 76896713 : tree arg0, arg1, tem;
9928 : :
9929 : 76896713 : arg0 = op0;
9930 : 76896713 : arg1 = op1;
9931 : :
9932 : 76896713 : STRIP_SIGN_NOPS (arg0);
9933 : 76896713 : STRIP_SIGN_NOPS (arg1);
9934 : :
9935 : : /* For comparisons of pointers we can decompose it to a compile time
9936 : : comparison of the base objects and the offsets into the object.
9937 : : This requires at least one operand being an ADDR_EXPR or a
9938 : : POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9939 : 142791707 : if (POINTER_TYPE_P (TREE_TYPE (arg0))
9940 : 77113657 : && (TREE_CODE (arg0) == ADDR_EXPR
9941 : 10540042 : || TREE_CODE (arg1) == ADDR_EXPR
9942 : 9923725 : || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9943 : 9406072 : || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9944 : : {
9945 : 1820391 : tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9946 : 1820391 : poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9947 : 1820391 : machine_mode mode;
9948 : 1820391 : int volatilep, reversep, unsignedp;
9949 : 1820391 : bool indirect_base0 = false, indirect_base1 = false;
9950 : :
9951 : : /* Get base and offset for the access. Strip ADDR_EXPR for
9952 : : get_inner_reference, but put it back by stripping INDIRECT_REF
9953 : : off the base object if possible. indirect_baseN will be true
9954 : : if baseN is not an address but refers to the object itself. */
9955 : 1820391 : base0 = arg0;
9956 : 1820391 : if (TREE_CODE (arg0) == ADDR_EXPR)
9957 : : {
9958 : 678621 : base0
9959 : 678621 : = get_inner_reference (TREE_OPERAND (arg0, 0),
9960 : : &bitsize, &bitpos0, &offset0, &mode,
9961 : : &unsignedp, &reversep, &volatilep);
9962 : 678621 : if (INDIRECT_REF_P (base0))
9963 : 68760 : base0 = TREE_OPERAND (base0, 0);
9964 : : else
9965 : : indirect_base0 = true;
9966 : : }
9967 : 1141770 : else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9968 : : {
9969 : 552480 : base0 = TREE_OPERAND (arg0, 0);
9970 : 552480 : STRIP_SIGN_NOPS (base0);
9971 : 552480 : if (TREE_CODE (base0) == ADDR_EXPR)
9972 : : {
9973 : 30971 : base0
9974 : 30971 : = get_inner_reference (TREE_OPERAND (base0, 0),
9975 : : &bitsize, &bitpos0, &offset0, &mode,
9976 : : &unsignedp, &reversep, &volatilep);
9977 : 30971 : if (INDIRECT_REF_P (base0))
9978 : 8 : base0 = TREE_OPERAND (base0, 0);
9979 : : else
9980 : : indirect_base0 = true;
9981 : : }
9982 : 552480 : if (offset0 == NULL_TREE || integer_zerop (offset0))
9983 : 552480 : offset0 = TREE_OPERAND (arg0, 1);
9984 : : else
9985 : 0 : offset0 = size_binop (PLUS_EXPR, offset0,
9986 : : TREE_OPERAND (arg0, 1));
9987 : 552480 : if (poly_int_tree_p (offset0))
9988 : : {
9989 : 464632 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9990 : 464632 : TYPE_PRECISION (sizetype));
9991 : 464632 : tem <<= LOG2_BITS_PER_UNIT;
9992 : 464632 : tem += bitpos0;
9993 : 464632 : if (tem.to_shwi (&bitpos0))
9994 : 464629 : offset0 = NULL_TREE;
9995 : : }
9996 : : }
9997 : :
9998 : 1820391 : base1 = arg1;
9999 : 1820391 : if (TREE_CODE (arg1) == ADDR_EXPR)
10000 : : {
10001 : 640366 : base1
10002 : 640366 : = get_inner_reference (TREE_OPERAND (arg1, 0),
10003 : : &bitsize, &bitpos1, &offset1, &mode,
10004 : : &unsignedp, &reversep, &volatilep);
10005 : 640366 : if (INDIRECT_REF_P (base1))
10006 : 1642 : base1 = TREE_OPERAND (base1, 0);
10007 : : else
10008 : : indirect_base1 = true;
10009 : : }
10010 : 1180025 : else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10011 : : {
10012 : 72292 : base1 = TREE_OPERAND (arg1, 0);
10013 : 72292 : STRIP_SIGN_NOPS (base1);
10014 : 72292 : if (TREE_CODE (base1) == ADDR_EXPR)
10015 : : {
10016 : 12374 : base1
10017 : 12374 : = get_inner_reference (TREE_OPERAND (base1, 0),
10018 : : &bitsize, &bitpos1, &offset1, &mode,
10019 : : &unsignedp, &reversep, &volatilep);
10020 : 12374 : if (INDIRECT_REF_P (base1))
10021 : 6 : base1 = TREE_OPERAND (base1, 0);
10022 : : else
10023 : : indirect_base1 = true;
10024 : : }
10025 : 72292 : if (offset1 == NULL_TREE || integer_zerop (offset1))
10026 : 72276 : offset1 = TREE_OPERAND (arg1, 1);
10027 : : else
10028 : 16 : offset1 = size_binop (PLUS_EXPR, offset1,
10029 : : TREE_OPERAND (arg1, 1));
10030 : 72292 : if (poly_int_tree_p (offset1))
10031 : : {
10032 : 62267 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10033 : 62267 : TYPE_PRECISION (sizetype));
10034 : 62267 : tem <<= LOG2_BITS_PER_UNIT;
10035 : 62267 : tem += bitpos1;
10036 : 62267 : if (tem.to_shwi (&bitpos1))
10037 : 62267 : offset1 = NULL_TREE;
10038 : : }
10039 : : }
10040 : :
10041 : : /* If we have equivalent bases we might be able to simplify. */
10042 : 1820391 : if (indirect_base0 == indirect_base1
10043 : 2382911 : && operand_equal_p (base0, base1,
10044 : : indirect_base0 ? OEP_ADDRESS_OF : 0))
10045 : : {
10046 : : /* We can fold this expression to a constant if the non-constant
10047 : : offset parts are equal. */
10048 : 20816 : if ((offset0 == offset1
10049 : 6449 : || (offset0 && offset1
10050 : 2485 : && operand_equal_p (offset0, offset1, 0)))
10051 : 20816 : && (equality_code
10052 : 11223 : || (indirect_base0
10053 : 7553 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10054 : 3670 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10055 : : {
10056 : 14330 : if (!equality_code
10057 : 11186 : && maybe_ne (bitpos0, bitpos1)
10058 : 25485 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10059 : 2093 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10060 : 9613 : fold_overflow_warning (("assuming pointer wraparound does not "
10061 : : "occur when comparing P +- C1 with "
10062 : : "P +- C2"),
10063 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
10064 : :
10065 : 14330 : switch (code)
10066 : : {
10067 : 194 : case EQ_EXPR:
10068 : 194 : if (known_eq (bitpos0, bitpos1))
10069 : 38227 : return constant_boolean_node (true, type);
10070 : 157 : if (known_ne (bitpos0, bitpos1))
10071 : 157 : return constant_boolean_node (false, type);
10072 : : break;
10073 : 2950 : case NE_EXPR:
10074 : 2950 : if (known_ne (bitpos0, bitpos1))
10075 : 2945 : return constant_boolean_node (true, type);
10076 : 5 : if (known_eq (bitpos0, bitpos1))
10077 : 5 : return constant_boolean_node (false, type);
10078 : : break;
10079 : 2757 : case LT_EXPR:
10080 : 2757 : if (known_lt (bitpos0, bitpos1))
10081 : 2546 : return constant_boolean_node (true, type);
10082 : 211 : if (known_ge (bitpos0, bitpos1))
10083 : 211 : return constant_boolean_node (false, type);
10084 : : break;
10085 : 4361 : case LE_EXPR:
10086 : 4361 : if (known_le (bitpos0, bitpos1))
10087 : 1808 : return constant_boolean_node (true, type);
10088 : 2553 : if (known_gt (bitpos0, bitpos1))
10089 : 2553 : return constant_boolean_node (false, type);
10090 : : break;
10091 : 2352 : case GE_EXPR:
10092 : 2352 : if (known_ge (bitpos0, bitpos1))
10093 : 581 : return constant_boolean_node (true, type);
10094 : 1771 : if (known_lt (bitpos0, bitpos1))
10095 : 1771 : return constant_boolean_node (false, type);
10096 : : break;
10097 : 1716 : case GT_EXPR:
10098 : 1716 : if (known_gt (bitpos0, bitpos1))
10099 : 1699 : return constant_boolean_node (true, type);
10100 : 17 : if (known_le (bitpos0, bitpos1))
10101 : 17 : return constant_boolean_node (false, type);
10102 : : break;
10103 : : default:;
10104 : : }
10105 : : }
10106 : : /* We can simplify the comparison to a comparison of the variable
10107 : : offset parts if the constant offset parts are equal.
10108 : : Be careful to use signed sizetype here because otherwise we
10109 : : mess with array offsets in the wrong way. This is possible
10110 : : because pointer arithmetic is restricted to retain within an
10111 : : object and overflow on pointer differences is undefined as of
10112 : : 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10113 : 6486 : else if (known_eq (bitpos0, bitpos1)
10114 : 6486 : && (equality_code
10115 : 4859 : || (indirect_base0
10116 : 260 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10117 : 4599 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10118 : : {
10119 : : /* By converting to signed sizetype we cover middle-end pointer
10120 : : arithmetic which operates on unsigned pointer types of size
10121 : : type size and ARRAY_REF offsets which are properly sign or
10122 : : zero extended from their type in case it is narrower than
10123 : : sizetype. */
10124 : 4971 : if (offset0 == NULL_TREE)
10125 : 55 : offset0 = build_int_cst (ssizetype, 0);
10126 : : else
10127 : 4916 : offset0 = fold_convert_loc (loc, ssizetype, offset0);
10128 : 4971 : if (offset1 == NULL_TREE)
10129 : 2463 : offset1 = build_int_cst (ssizetype, 0);
10130 : : else
10131 : 2508 : offset1 = fold_convert_loc (loc, ssizetype, offset1);
10132 : :
10133 : 4971 : if (!equality_code
10134 : 4971 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10135 : 0 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10136 : 4859 : fold_overflow_warning (("assuming pointer wraparound does not "
10137 : : "occur when comparing P +- C1 with "
10138 : : "P +- C2"),
10139 : : WARN_STRICT_OVERFLOW_COMPARISON);
10140 : :
10141 : 4971 : return fold_build2_loc (loc, code, type, offset0, offset1);
10142 : : }
10143 : : }
10144 : : /* For equal offsets we can simplify to a comparison of the
10145 : : base addresses. */
10146 : 1799575 : else if (known_eq (bitpos0, bitpos1)
10147 : 115187 : && (indirect_base0
10148 : 742218 : ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10149 : 9269 : && (indirect_base1
10150 : 197678 : ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10151 : 2020529 : && ((offset0 == offset1)
10152 : 4706 : || (offset0 && offset1
10153 : 4485 : && operand_equal_p (offset0, offset1, 0))))
10154 : : {
10155 : 18900 : if (indirect_base0)
10156 : 1594 : base0 = build_fold_addr_expr_loc (loc, base0);
10157 : 18900 : if (indirect_base1)
10158 : 2431 : base1 = build_fold_addr_expr_loc (loc, base1);
10159 : 18900 : return fold_build2_loc (loc, code, type, base0, base1);
10160 : : }
10161 : : /* Comparison between an ordinary (non-weak) symbol and a null
10162 : : pointer can be eliminated since such symbols must have a non
10163 : : null address. In C, relational expressions between pointers
10164 : : to objects and null pointers are undefined. The results
10165 : : below follow the C++ rules with the additional property that
10166 : : every object pointer compares greater than a null pointer.
10167 : : */
10168 : 1780675 : else if (((DECL_P (base0)
10169 : 584870 : && maybe_nonzero_address (base0) > 0
10170 : : /* Avoid folding references to struct members at offset 0 to
10171 : : prevent tests like '&ptr->firstmember == 0' from getting
10172 : : eliminated. When ptr is null, although the -> expression
10173 : : is strictly speaking invalid, GCC retains it as a matter
10174 : : of QoI. See PR c/44555. */
10175 : 572442 : && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10176 : 1363083 : || CONSTANT_CLASS_P (base0))
10177 : 421850 : && indirect_base0
10178 : : /* The caller guarantees that when one of the arguments is
10179 : : constant (i.e., null in this case) it is second. */
10180 : 2199844 : && integer_zerop (arg1))
10181 : : {
10182 : 26 : switch (code)
10183 : : {
10184 : 24 : case EQ_EXPR:
10185 : 24 : case LE_EXPR:
10186 : 24 : case LT_EXPR:
10187 : 24 : return constant_boolean_node (false, type);
10188 : 2 : case GE_EXPR:
10189 : 2 : case GT_EXPR:
10190 : 2 : case NE_EXPR:
10191 : 2 : return constant_boolean_node (true, type);
10192 : 0 : default:
10193 : 0 : gcc_unreachable ();
10194 : : }
10195 : : }
10196 : : }
10197 : :
10198 : : /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10199 : : X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10200 : : the resulting offset is smaller in absolute value than the
10201 : : original one and has the same sign. */
10202 : 151170763 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10203 : 116872202 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10204 : 29323937 : && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10205 : 2178007 : && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10206 : 1825468 : && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10207 : 1825468 : && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10208 : 135463568 : && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10209 : 166707 : && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10210 : : {
10211 : 166707 : tree const1 = TREE_OPERAND (arg0, 1);
10212 : 166707 : tree const2 = TREE_OPERAND (arg1, 1);
10213 : 166707 : tree variable1 = TREE_OPERAND (arg0, 0);
10214 : 166707 : tree variable2 = TREE_OPERAND (arg1, 0);
10215 : 166707 : tree cst;
10216 : 166707 : const char * const warnmsg = G_("assuming signed overflow does not "
10217 : : "occur when combining constants around "
10218 : : "a comparison");
10219 : :
10220 : : /* Put the constant on the side where it doesn't overflow and is
10221 : : of lower absolute value and of same sign than before. */
10222 : 166708 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10223 : : ? MINUS_EXPR : PLUS_EXPR,
10224 : : const2, const1);
10225 : 166707 : if (!TREE_OVERFLOW (cst)
10226 : 166697 : && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10227 : 188118 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10228 : : {
10229 : 5485 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10230 : 5485 : return fold_build2_loc (loc, code, type,
10231 : : variable1,
10232 : 5485 : fold_build2_loc (loc, TREE_CODE (arg1),
10233 : 5485 : TREE_TYPE (arg1),
10234 : 5485 : variable2, cst));
10235 : : }
10236 : :
10237 : 161223 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10238 : : ? MINUS_EXPR : PLUS_EXPR,
10239 : : const1, const2);
10240 : 161222 : if (!TREE_OVERFLOW (cst)
10241 : 161212 : && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10242 : 177148 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10243 : : {
10244 : 15926 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10245 : 15926 : return fold_build2_loc (loc, code, type,
10246 : 15926 : fold_build2_loc (loc, TREE_CODE (arg0),
10247 : 15926 : TREE_TYPE (arg0),
10248 : : variable1, cst),
10249 : 15926 : variable2);
10250 : : }
10251 : : }
10252 : :
10253 : 76837075 : tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10254 : 76837075 : if (tem)
10255 : : return tem;
10256 : :
10257 : : /* If we are comparing an expression that just has comparisons
10258 : : of two integer values, arithmetic expressions of those comparisons,
10259 : : and constants, we can simplify it. There are only three cases
10260 : : to check: the two values can either be equal, the first can be
10261 : : greater, or the second can be greater. Fold the expression for
10262 : : those three values. Since each value must be 0 or 1, we have
10263 : : eight possibilities, each of which corresponds to the constant 0
10264 : : or 1 or one of the six possible comparisons.
10265 : :
10266 : : This handles common cases like (a > b) == 0 but also handles
10267 : : expressions like ((x > y) - (y > x)) > 0, which supposedly
10268 : : occur in macroized code. */
10269 : :
10270 : 76040274 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10271 : : {
10272 : 46166006 : tree cval1 = 0, cval2 = 0;
10273 : :
10274 : 46166006 : if (twoval_comparison_p (arg0, &cval1, &cval2)
10275 : : /* Don't handle degenerate cases here; they should already
10276 : : have been handled anyway. */
10277 : 555320 : && cval1 != 0 && cval2 != 0
10278 : 554177 : && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10279 : 554177 : && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10280 : 554171 : && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10281 : 58 : && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10282 : 58 : && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10283 : 46166064 : && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10284 : 58 : TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10285 : : {
10286 : 58 : tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10287 : 58 : tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10288 : :
10289 : : /* We can't just pass T to eval_subst in case cval1 or cval2
10290 : : was the same as ARG1. */
10291 : :
10292 : 58 : tree high_result
10293 : 58 : = fold_build2_loc (loc, code, type,
10294 : : eval_subst (loc, arg0, cval1, maxval,
10295 : : cval2, minval),
10296 : : arg1);
10297 : 58 : tree equal_result
10298 : 58 : = fold_build2_loc (loc, code, type,
10299 : : eval_subst (loc, arg0, cval1, maxval,
10300 : : cval2, maxval),
10301 : : arg1);
10302 : 58 : tree low_result
10303 : 58 : = fold_build2_loc (loc, code, type,
10304 : : eval_subst (loc, arg0, cval1, minval,
10305 : : cval2, maxval),
10306 : : arg1);
10307 : :
10308 : : /* All three of these results should be 0 or 1. Confirm they are.
10309 : : Then use those values to select the proper code to use. */
10310 : :
10311 : 58 : if (TREE_CODE (high_result) == INTEGER_CST
10312 : 49 : && TREE_CODE (equal_result) == INTEGER_CST
10313 : 39 : && TREE_CODE (low_result) == INTEGER_CST)
10314 : : {
10315 : : /* Make a 3-bit mask with the high-order bit being the
10316 : : value for `>', the next for '=', and the low for '<'. */
10317 : 39 : switch ((integer_onep (high_result) * 4)
10318 : 39 : + (integer_onep (equal_result) * 2)
10319 : 39 : + integer_onep (low_result))
10320 : : {
10321 : 21 : case 0:
10322 : : /* Always false. */
10323 : 39 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10324 : : case 1:
10325 : : code = LT_EXPR;
10326 : : break;
10327 : 2 : case 2:
10328 : 2 : code = EQ_EXPR;
10329 : 2 : break;
10330 : 0 : case 3:
10331 : 0 : code = LE_EXPR;
10332 : 0 : break;
10333 : 0 : case 4:
10334 : 0 : code = GT_EXPR;
10335 : 0 : break;
10336 : 1 : case 5:
10337 : 1 : code = NE_EXPR;
10338 : 1 : break;
10339 : 0 : case 6:
10340 : 0 : code = GE_EXPR;
10341 : 0 : break;
10342 : 15 : case 7:
10343 : : /* Always true. */
10344 : 15 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10345 : : }
10346 : :
10347 : 3 : return fold_build2_loc (loc, code, type, cval1, cval2);
10348 : : }
10349 : : }
10350 : : }
10351 : :
10352 : : return NULL_TREE;
10353 : : }
10354 : :
10355 : :
10356 : : /* Subroutine of fold_binary. Optimize complex multiplications of the
10357 : : form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10358 : : argument EXPR represents the expression "z" of type TYPE. */
10359 : :
10360 : : static tree
10361 : 2 : fold_mult_zconjz (location_t loc, tree type, tree expr)
10362 : : {
10363 : 2 : tree itype = TREE_TYPE (type);
10364 : 2 : tree rpart, ipart, tem;
10365 : :
10366 : 2 : if (TREE_CODE (expr) == COMPLEX_EXPR)
10367 : : {
10368 : 0 : rpart = TREE_OPERAND (expr, 0);
10369 : 0 : ipart = TREE_OPERAND (expr, 1);
10370 : : }
10371 : 2 : else if (TREE_CODE (expr) == COMPLEX_CST)
10372 : : {
10373 : 0 : rpart = TREE_REALPART (expr);
10374 : 0 : ipart = TREE_IMAGPART (expr);
10375 : : }
10376 : : else
10377 : : {
10378 : 2 : expr = save_expr (expr);
10379 : 2 : rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10380 : 2 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10381 : : }
10382 : :
10383 : 2 : rpart = save_expr (rpart);
10384 : 2 : ipart = save_expr (ipart);
10385 : 2 : tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10386 : : fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10387 : : fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10388 : 2 : return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10389 : 2 : build_zero_cst (itype));
10390 : : }
10391 : :
10392 : :
10393 : : /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10394 : : CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10395 : : true if successful. */
10396 : :
10397 : : static bool
10398 : 10563 : vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10399 : : {
10400 : 10563 : unsigned HOST_WIDE_INT i, nunits;
10401 : :
10402 : 10563 : if (TREE_CODE (arg) == VECTOR_CST
10403 : 10563 : && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10404 : : {
10405 : 2006 : for (i = 0; i < nunits; ++i)
10406 : 1630 : elts[i] = VECTOR_CST_ELT (arg, i);
10407 : : }
10408 : 10187 : else if (TREE_CODE (arg) == CONSTRUCTOR)
10409 : : {
10410 : : constructor_elt *elt;
10411 : :
10412 : 34683 : FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10413 : 29184 : if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10414 : 4688 : return false;
10415 : : else
10416 : 24496 : elts[i] = elt->value;
10417 : : }
10418 : : else
10419 : : return false;
10420 : 6649 : for (; i < nelts; i++)
10421 : 1548 : elts[i]
10422 : 774 : = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10423 : : return true;
10424 : : }
10425 : :
10426 : : /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10427 : : mask for VLA vec_perm folding.
10428 : : REASON if specified, will contain the reason why SEL is not suitable.
10429 : : Used only for debugging and unit-testing. */
10430 : :
10431 : : static bool
10432 : 7051 : valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10433 : : const vec_perm_indices &sel,
10434 : : const char **reason = NULL)
10435 : : {
10436 : 7051 : unsigned sel_npatterns = sel.encoding ().npatterns ();
10437 : 7051 : unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10438 : :
10439 : 14102 : if (!(pow2p_hwi (sel_npatterns)
10440 : 7051 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10441 : 7051 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10442 : : {
10443 : 0 : if (reason)
10444 : 0 : *reason = "npatterns is not power of 2";
10445 : 0 : return false;
10446 : : }
10447 : :
10448 : : /* We want to avoid cases where sel.length is not a multiple of npatterns.
10449 : : For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10450 : 7051 : poly_uint64 esel;
10451 : 7051 : if (!multiple_p (sel.length (), sel_npatterns, &esel))
10452 : : {
10453 : 0 : if (reason)
10454 : 0 : *reason = "sel.length is not multiple of sel_npatterns";
10455 : 0 : return false;
10456 : : }
10457 : :
10458 : 7051 : if (sel_nelts_per_pattern < 3)
10459 : : return true;
10460 : :
10461 : 4441 : for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10462 : : {
10463 : 3482 : poly_uint64 a1 = sel[pattern + sel_npatterns];
10464 : 3482 : poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10465 : 3482 : HOST_WIDE_INT step;
10466 : 3482 : if (!poly_int64 (a2 - a1).is_constant (&step))
10467 : : {
10468 : : if (reason)
10469 : : *reason = "step is not constant";
10470 : 803 : return false;
10471 : : }
10472 : : // FIXME: Punt on step < 0 for now, revisit later.
10473 : 3482 : if (step < 0)
10474 : : return false;
10475 : 3418 : if (step == 0)
10476 : 0 : continue;
10477 : :
10478 : 3418 : if (!pow2p_hwi (step))
10479 : : {
10480 : 0 : if (reason)
10481 : 0 : *reason = "step is not power of 2";
10482 : 0 : return false;
10483 : : }
10484 : :
10485 : : /* Ensure that stepped sequence of the pattern selects elements
10486 : : only from the same input vector. */
10487 : 3418 : uint64_t q1, qe;
10488 : 3418 : poly_uint64 r1, re;
10489 : 3418 : poly_uint64 ae = a1 + (esel - 2) * step;
10490 : 3418 : poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10491 : :
10492 : 3418 : if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10493 : 3418 : && can_div_trunc_p (ae, arg_len, &qe, &re)
10494 : : && q1 == qe))
10495 : : {
10496 : 298 : if (reason)
10497 : 0 : *reason = "crossed input vectors";
10498 : 298 : return false;
10499 : : }
10500 : :
10501 : : /* Ensure that the stepped sequence always selects from the same
10502 : : input pattern. */
10503 : 3120 : tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10504 : 3120 : unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10505 : :
10506 : 3120 : if (!multiple_p (step, arg_npatterns))
10507 : : {
10508 : 439 : if (reason)
10509 : 0 : *reason = "step is not multiple of npatterns";
10510 : 439 : return false;
10511 : : }
10512 : :
10513 : : /* If a1 chooses base element from arg, ensure that it's a natural
10514 : : stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10515 : : to preserve arg's encoding. */
10516 : :
10517 : 2681 : if (maybe_lt (r1, arg_npatterns))
10518 : : {
10519 : 2 : unsigned HOST_WIDE_INT index;
10520 : 2 : if (!r1.is_constant (&index))
10521 : 2 : return false;
10522 : :
10523 : 2 : tree arg_elem0 = vector_cst_elt (arg, index);
10524 : 2 : tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10525 : 2 : tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10526 : :
10527 : 2 : tree step1, step2;
10528 : 2 : if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10529 : 2 : || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10530 : 4 : || !operand_equal_p (step1, step2, 0))
10531 : : {
10532 : 2 : if (reason)
10533 : 0 : *reason = "not a natural stepped sequence";
10534 : 2 : return false;
10535 : : }
10536 : : }
10537 : : }
10538 : :
10539 : : return true;
10540 : : }
10541 : :
10542 : : /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10543 : : the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10544 : : REASON has same purpose as described in
10545 : : valid_mask_for_fold_vec_perm_cst_p. */
10546 : :
10547 : : static tree
10548 : 7051 : fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10549 : : const char **reason = NULL)
10550 : : {
10551 : 7051 : unsigned res_npatterns, res_nelts_per_pattern;
10552 : 7051 : unsigned HOST_WIDE_INT res_nelts;
10553 : :
10554 : : /* First try to implement the fold in a VLA-friendly way.
10555 : :
10556 : : (1) If the selector is simply a duplication of N elements, the
10557 : : result is likewise a duplication of N elements.
10558 : :
10559 : : (2) If the selector is N elements followed by a duplication
10560 : : of N elements, the result is too.
10561 : :
10562 : : (3) If the selector is N elements followed by an interleaving
10563 : : of N linear series, the situation is more complex.
10564 : :
10565 : : valid_mask_for_fold_vec_perm_cst_p detects whether we
10566 : : can handle this case. If we can, then each of the N linear
10567 : : series either (a) selects the same element each time or
10568 : : (b) selects a linear series from one of the input patterns.
10569 : :
10570 : : If (b) holds for one of the linear series, the result
10571 : : will contain a linear series, and so the result will have
10572 : : the same shape as the selector. If (a) holds for all of
10573 : : the linear series, the result will be the same as (2) above.
10574 : :
10575 : : (b) can only hold if one of the input patterns has a
10576 : : stepped encoding. */
10577 : :
10578 : 7051 : if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10579 : : {
10580 : 6248 : res_npatterns = sel.encoding ().npatterns ();
10581 : 6248 : res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10582 : 6248 : if (res_nelts_per_pattern == 3
10583 : 959 : && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10584 : 6746 : && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10585 : : res_nelts_per_pattern = 2;
10586 : 6248 : res_nelts = res_npatterns * res_nelts_per_pattern;
10587 : : }
10588 : 803 : else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10589 : : {
10590 : 803 : res_npatterns = res_nelts;
10591 : 803 : res_nelts_per_pattern = 1;
10592 : : }
10593 : : else
10594 : : return NULL_TREE;
10595 : :
10596 : 7051 : tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10597 : 43542 : for (unsigned i = 0; i < res_nelts; i++)
10598 : : {
10599 : 36491 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10600 : 36491 : uint64_t q;
10601 : 36491 : poly_uint64 r;
10602 : 36491 : unsigned HOST_WIDE_INT index;
10603 : :
10604 : : /* Punt if sel[i] /trunc_div len cannot be determined,
10605 : : because the input vector to be chosen will depend on
10606 : : runtime vector length.
10607 : : For example if len == 4 + 4x, and sel[i] == 4,
10608 : : If len at runtime equals 4, we choose arg1[0].
10609 : : For any other value of len > 4 at runtime, we choose arg0[4].
10610 : : which makes the element choice dependent on runtime vector length. */
10611 : 36491 : if (!can_div_trunc_p (sel[i], len, &q, &r))
10612 : : {
10613 : : if (reason)
10614 : : *reason = "cannot divide selector element by arg len";
10615 : : return NULL_TREE;
10616 : : }
10617 : :
10618 : : /* sel[i] % len will give the index of element in the chosen input
10619 : : vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10620 : : we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10621 : 36491 : if (!r.is_constant (&index))
10622 : : {
10623 : : if (reason)
10624 : : *reason = "remainder is not constant";
10625 : : return NULL_TREE;
10626 : : }
10627 : :
10628 : 36491 : tree arg = ((q & 1) == 0) ? arg0 : arg1;
10629 : 36491 : tree elem = vector_cst_elt (arg, index);
10630 : 36491 : out_elts.quick_push (elem);
10631 : : }
10632 : :
10633 : 7051 : return out_elts.build ();
10634 : 7051 : }
10635 : :
10636 : : /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10637 : : selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10638 : : NULL_TREE otherwise. */
10639 : :
10640 : : tree
10641 : 19824 : fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10642 : : {
10643 : 19824 : unsigned int i;
10644 : 19824 : unsigned HOST_WIDE_INT nelts;
10645 : :
10646 : 19824 : gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10647 : : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10648 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10649 : :
10650 : 19824 : if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10651 : 19824 : || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10652 : : return NULL_TREE;
10653 : :
10654 : 14649 : if (TREE_CODE (arg0) == VECTOR_CST
10655 : 7338 : && TREE_CODE (arg1) == VECTOR_CST)
10656 : 7051 : return fold_vec_perm_cst (type, arg0, arg1, sel);
10657 : :
10658 : : /* For fall back case, we want to ensure we have VLS vectors
10659 : : with equal length. */
10660 : 7598 : if (!sel.length ().is_constant (&nelts))
10661 : : return NULL_TREE;
10662 : :
10663 : 7598 : gcc_assert (known_eq (sel.length (),
10664 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10665 : 7598 : tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10666 : 7598 : if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10667 : 7598 : || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10668 : 4688 : return NULL_TREE;
10669 : :
10670 : 2910 : vec<constructor_elt, va_gc> *v;
10671 : 2910 : vec_alloc (v, nelts);
10672 : 16140 : for (i = 0; i < nelts; i++)
10673 : : {
10674 : 13230 : HOST_WIDE_INT index;
10675 : 13230 : if (!sel[i].is_constant (&index))
10676 : : return NULL_TREE;
10677 : 13230 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10678 : : }
10679 : 2910 : return build_constructor (type, v);
10680 : : }
10681 : :
10682 : : /* Try to fold a pointer difference of type TYPE two address expressions of
10683 : : array references AREF0 and AREF1 using location LOC. Return a
10684 : : simplified expression for the difference or NULL_TREE. */
10685 : :
10686 : : static tree
10687 : 39 : fold_addr_of_array_ref_difference (location_t loc, tree type,
10688 : : tree aref0, tree aref1,
10689 : : bool use_pointer_diff)
10690 : : {
10691 : 39 : tree base0 = TREE_OPERAND (aref0, 0);
10692 : 39 : tree base1 = TREE_OPERAND (aref1, 0);
10693 : 39 : tree base_offset = build_int_cst (type, 0);
10694 : :
10695 : : /* If the bases are array references as well, recurse. If the bases
10696 : : are pointer indirections compute the difference of the pointers.
10697 : : If the bases are equal, we are set. */
10698 : 39 : if ((TREE_CODE (base0) == ARRAY_REF
10699 : 1 : && TREE_CODE (base1) == ARRAY_REF
10700 : 1 : && (base_offset
10701 : 1 : = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10702 : : use_pointer_diff)))
10703 : 38 : || (INDIRECT_REF_P (base0)
10704 : 7 : && INDIRECT_REF_P (base1)
10705 : 7 : && (base_offset
10706 : 7 : = use_pointer_diff
10707 : 8 : ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10708 : 1 : TREE_OPERAND (base0, 0),
10709 : 1 : TREE_OPERAND (base1, 0))
10710 : 12 : : fold_binary_loc (loc, MINUS_EXPR, type,
10711 : 6 : fold_convert (type,
10712 : : TREE_OPERAND (base0, 0)),
10713 : 6 : fold_convert (type,
10714 : : TREE_OPERAND (base1, 0)))))
10715 : 70 : || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10716 : : {
10717 : 15 : tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10718 : 15 : tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10719 : 15 : tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10720 : 15 : tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10721 : 15 : return fold_build2_loc (loc, PLUS_EXPR, type,
10722 : : base_offset,
10723 : : fold_build2_loc (loc, MULT_EXPR, type,
10724 : 15 : diff, esz));
10725 : : }
10726 : : return NULL_TREE;
10727 : : }
10728 : :
10729 : : /* If the real or vector real constant CST of type TYPE has an exact
10730 : : inverse, return it, else return NULL. */
10731 : :
10732 : : tree
10733 : 1096481 : exact_inverse (tree type, tree cst)
10734 : : {
10735 : 1096481 : REAL_VALUE_TYPE r;
10736 : 1096481 : tree unit_type;
10737 : 1096481 : machine_mode mode;
10738 : :
10739 : 1096481 : switch (TREE_CODE (cst))
10740 : : {
10741 : 1095956 : case REAL_CST:
10742 : 1095956 : r = TREE_REAL_CST (cst);
10743 : :
10744 : 1095956 : if (exact_real_inverse (TYPE_MODE (type), &r))
10745 : 313417 : return build_real (type, r);
10746 : :
10747 : : return NULL_TREE;
10748 : :
10749 : 525 : case VECTOR_CST:
10750 : 525 : {
10751 : 525 : unit_type = TREE_TYPE (type);
10752 : 525 : mode = TYPE_MODE (unit_type);
10753 : :
10754 : 525 : tree_vector_builder elts;
10755 : 525 : if (!elts.new_unary_operation (type, cst, false))
10756 : : return NULL_TREE;
10757 : 525 : unsigned int count = elts.encoded_nelts ();
10758 : 585 : for (unsigned int i = 0; i < count; ++i)
10759 : : {
10760 : 525 : r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10761 : 525 : if (!exact_real_inverse (mode, &r))
10762 : : return NULL_TREE;
10763 : 60 : elts.quick_push (build_real (unit_type, r));
10764 : : }
10765 : :
10766 : 60 : return elts.build ();
10767 : 525 : }
10768 : :
10769 : : default:
10770 : : return NULL_TREE;
10771 : : }
10772 : : }
10773 : :
10774 : : /* Mask out the tz least significant bits of X of type TYPE where
10775 : : tz is the number of trailing zeroes in Y. */
10776 : : static wide_int
10777 : 105088 : mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10778 : : {
10779 : 105088 : int tz = wi::ctz (y);
10780 : 105088 : if (tz > 0)
10781 : 7681 : return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10782 : 97407 : return x;
10783 : : }
10784 : :
10785 : : /* Return true when T is an address and is known to be nonzero.
10786 : : For floating point we further ensure that T is not denormal.
10787 : : Similar logic is present in nonzero_address in rtlanal.h.
10788 : :
10789 : : If the return value is based on the assumption that signed overflow
10790 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10791 : : change *STRICT_OVERFLOW_P. */
10792 : :
10793 : : static bool
10794 : 136577607 : tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10795 : : {
10796 : 136837163 : tree type = TREE_TYPE (t);
10797 : 136837163 : enum tree_code code;
10798 : :
10799 : : /* Doing something useful for floating point would need more work. */
10800 : 136837163 : if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10801 : : return false;
10802 : :
10803 : 136743719 : code = TREE_CODE (t);
10804 : 136743719 : switch (TREE_CODE_CLASS (code))
10805 : : {
10806 : 743646 : case tcc_unary:
10807 : 743646 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10808 : 743646 : strict_overflow_p);
10809 : 2553451 : case tcc_binary:
10810 : 2553451 : case tcc_comparison:
10811 : 2553451 : return tree_binary_nonzero_warnv_p (code, type,
10812 : 2553451 : TREE_OPERAND (t, 0),
10813 : 2553451 : TREE_OPERAND (t, 1),
10814 : 2553451 : strict_overflow_p);
10815 : 10998861 : case tcc_constant:
10816 : 10998861 : case tcc_declaration:
10817 : 10998861 : case tcc_reference:
10818 : 10998861 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10819 : :
10820 : 122447761 : default:
10821 : 122447761 : break;
10822 : : }
10823 : :
10824 : 122447761 : switch (code)
10825 : : {
10826 : 553730 : case TRUTH_NOT_EXPR:
10827 : 553730 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10828 : 553730 : strict_overflow_p);
10829 : :
10830 : 72903 : case TRUTH_AND_EXPR:
10831 : 72903 : case TRUTH_OR_EXPR:
10832 : 72903 : case TRUTH_XOR_EXPR:
10833 : 72903 : return tree_binary_nonzero_warnv_p (code, type,
10834 : 72903 : TREE_OPERAND (t, 0),
10835 : 72903 : TREE_OPERAND (t, 1),
10836 : 72903 : strict_overflow_p);
10837 : :
10838 : 119054444 : case COND_EXPR:
10839 : 119054444 : case CONSTRUCTOR:
10840 : 119054444 : case OBJ_TYPE_REF:
10841 : 119054444 : case ADDR_EXPR:
10842 : 119054444 : case WITH_SIZE_EXPR:
10843 : 119054444 : case SSA_NAME:
10844 : 119054444 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10845 : :
10846 : 80416 : case COMPOUND_EXPR:
10847 : 80416 : case MODIFY_EXPR:
10848 : 80416 : case BIND_EXPR:
10849 : 80416 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10850 : 80416 : strict_overflow_p);
10851 : :
10852 : 179140 : case SAVE_EXPR:
10853 : 179140 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10854 : 179140 : strict_overflow_p);
10855 : :
10856 : 2433403 : case CALL_EXPR:
10857 : 2433403 : {
10858 : 2433403 : tree fndecl = get_callee_fndecl (t);
10859 : 2433403 : if (!fndecl) return false;
10860 : 2431713 : if (flag_delete_null_pointer_checks && !flag_check_new
10861 : 2431704 : && DECL_IS_OPERATOR_NEW_P (fndecl)
10862 : 2432423 : && !TREE_NOTHROW (fndecl))
10863 : : return true;
10864 : 2432333 : if (flag_delete_null_pointer_checks
10865 : 4864010 : && lookup_attribute ("returns_nonnull",
10866 : 2431677 : TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10867 : : return true;
10868 : 2432325 : return alloca_call_p (t);
10869 : : }
10870 : :
10871 : : default:
10872 : : break;
10873 : : }
10874 : : return false;
10875 : : }
10876 : :
10877 : : /* Return true when T is an address and is known to be nonzero.
10878 : : Handle warnings about undefined signed overflow. */
10879 : :
10880 : : bool
10881 : 135620435 : tree_expr_nonzero_p (tree t)
10882 : : {
10883 : 135620435 : bool ret, strict_overflow_p;
10884 : :
10885 : 135620435 : strict_overflow_p = false;
10886 : 135620435 : ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10887 : 135620435 : if (strict_overflow_p)
10888 : 0 : fold_overflow_warning (("assuming signed overflow does not occur when "
10889 : : "determining that expression is always "
10890 : : "non-zero"),
10891 : : WARN_STRICT_OVERFLOW_MISC);
10892 : 135620435 : return ret;
10893 : : }
10894 : :
10895 : : /* Return true if T is known not to be equal to an integer W. */
10896 : :
10897 : : bool
10898 : 93720778 : expr_not_equal_to (tree t, const wide_int &w)
10899 : : {
10900 : 93720778 : int_range_max vr;
10901 : 93720778 : switch (TREE_CODE (t))
10902 : : {
10903 : 1065314 : case INTEGER_CST:
10904 : 1065314 : return wi::to_wide (t) != w;
10905 : :
10906 : 92654409 : case SSA_NAME:
10907 : 92654409 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10908 : : return false;
10909 : :
10910 : 185308818 : get_range_query (cfun)->range_of_expr (vr, t);
10911 : 92654409 : if (!vr.undefined_p () && !vr.contains_p (w))
10912 : : return true;
10913 : : /* If T has some known zero bits and W has any of those bits set,
10914 : : then T is known not to be equal to W. */
10915 : 92569702 : if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10916 : 185139012 : TYPE_PRECISION (TREE_TYPE (t))), 0))
10917 : : return true;
10918 : : return false;
10919 : :
10920 : : default:
10921 : : return false;
10922 : : }
10923 : 93720778 : }
10924 : :
10925 : : /* Fold a binary expression of code CODE and type TYPE with operands
10926 : : OP0 and OP1. LOC is the location of the resulting expression.
10927 : : Return the folded expression if folding is successful. Otherwise,
10928 : : return NULL_TREE. */
10929 : :
10930 : : tree
10931 : 733572757 : fold_binary_loc (location_t loc, enum tree_code code, tree type,
10932 : : tree op0, tree op1)
10933 : : {
10934 : 733572757 : enum tree_code_class kind = TREE_CODE_CLASS (code);
10935 : 733572757 : tree arg0, arg1, tem;
10936 : 733572757 : tree t1 = NULL_TREE;
10937 : 733572757 : bool strict_overflow_p;
10938 : 733572757 : unsigned int prec;
10939 : :
10940 : 733572757 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
10941 : : && TREE_CODE_LENGTH (code) == 2
10942 : : && op0 != NULL_TREE
10943 : : && op1 != NULL_TREE);
10944 : :
10945 : 733572757 : arg0 = op0;
10946 : 733572757 : arg1 = op1;
10947 : :
10948 : : /* Strip any conversions that don't change the mode. This is
10949 : : safe for every expression, except for a comparison expression
10950 : : because its signedness is derived from its operands. So, in
10951 : : the latter case, only strip conversions that don't change the
10952 : : signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10953 : : preserved.
10954 : :
10955 : : Note that this is done as an internal manipulation within the
10956 : : constant folder, in order to find the simplest representation
10957 : : of the arguments so that their form can be studied. In any
10958 : : cases, the appropriate type conversions should be put back in
10959 : : the tree that will get out of the constant folder. */
10960 : :
10961 : 733572757 : if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10962 : : {
10963 : 159763303 : STRIP_SIGN_NOPS (arg0);
10964 : 159763303 : STRIP_SIGN_NOPS (arg1);
10965 : : }
10966 : : else
10967 : : {
10968 : 573809454 : STRIP_NOPS (arg0);
10969 : 573809454 : STRIP_NOPS (arg1);
10970 : : }
10971 : :
10972 : : /* Note that TREE_CONSTANT isn't enough: static var addresses are
10973 : : constant but we can't do arithmetic on them. */
10974 : 733572757 : if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10975 : : {
10976 : 186485205 : tem = const_binop (code, type, arg0, arg1);
10977 : 186485205 : if (tem != NULL_TREE)
10978 : : {
10979 : 183974046 : if (TREE_TYPE (tem) != type)
10980 : 1538817 : tem = fold_convert_loc (loc, type, tem);
10981 : 183974046 : return tem;
10982 : : }
10983 : : }
10984 : :
10985 : : /* If this is a commutative operation, and ARG0 is a constant, move it
10986 : : to ARG1 to reduce the number of tests below. */
10987 : 549598711 : if (commutative_tree_code (code)
10988 : 549598711 : && tree_swap_operands_p (arg0, arg1))
10989 : 27884713 : return fold_build2_loc (loc, code, type, op1, op0);
10990 : :
10991 : : /* Likewise if this is a comparison, and ARG0 is a constant, move it
10992 : : to ARG1 to reduce the number of tests below. */
10993 : 521713998 : if (kind == tcc_comparison
10994 : 521713998 : && tree_swap_operands_p (arg0, arg1))
10995 : 6887793 : return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10996 : :
10997 : 514826205 : tem = generic_simplify (loc, code, type, op0, op1);
10998 : 514826205 : if (tem)
10999 : : return tem;
11000 : :
11001 : : /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11002 : :
11003 : : First check for cases where an arithmetic operation is applied to a
11004 : : compound, conditional, or comparison operation. Push the arithmetic
11005 : : operation inside the compound or conditional to see if any folding
11006 : : can then be done. Convert comparison to conditional for this purpose.
11007 : : The also optimizes non-constant cases that used to be done in
11008 : : expand_expr.
11009 : :
11010 : : Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11011 : : one of the operands is a comparison and the other is a comparison, a
11012 : : BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11013 : : code below would make the expression more complex. Change it to a
11014 : : TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11015 : : TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11016 : :
11017 : 436655304 : if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11018 : : || code == EQ_EXPR || code == NE_EXPR)
11019 : 49802292 : && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11020 : 49246822 : && ((truth_value_p (TREE_CODE (arg0))
11021 : 1130435 : && (truth_value_p (TREE_CODE (arg1))
11022 : 848501 : || (TREE_CODE (arg1) == BIT_AND_EXPR
11023 : 40 : && integer_onep (TREE_OPERAND (arg1, 1)))))
11024 : 48964872 : || (truth_value_p (TREE_CODE (arg1))
11025 : 6528 : && (truth_value_p (TREE_CODE (arg0))
11026 : 6528 : || (TREE_CODE (arg0) == BIT_AND_EXPR
11027 : 167 : && integer_onep (TREE_OPERAND (arg0, 1)))))))
11028 : : {
11029 : 315610 : tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11030 : 33646 : : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11031 : : : TRUTH_XOR_EXPR,
11032 : : boolean_type_node,
11033 : : fold_convert_loc (loc, boolean_type_node, arg0),
11034 : : fold_convert_loc (loc, boolean_type_node, arg1));
11035 : :
11036 : 281964 : if (code == EQ_EXPR)
11037 : 28291 : tem = invert_truthvalue_loc (loc, tem);
11038 : :
11039 : 281964 : return fold_convert_loc (loc, type, tem);
11040 : : }
11041 : :
11042 : 436373340 : if (TREE_CODE_CLASS (code) == tcc_binary
11043 : 250427599 : || TREE_CODE_CLASS (code) == tcc_comparison)
11044 : : {
11045 : 268486768 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
11046 : : {
11047 : 79724 : tem = fold_build2_loc (loc, code, type,
11048 : 79724 : fold_convert_loc (loc, TREE_TYPE (op0),
11049 : 79724 : TREE_OPERAND (arg0, 1)), op1);
11050 : 79724 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11051 : 79724 : tem);
11052 : : }
11053 : 268407044 : if (TREE_CODE (arg1) == COMPOUND_EXPR)
11054 : : {
11055 : 3120 : tem = fold_build2_loc (loc, code, type, op0,
11056 : 3120 : fold_convert_loc (loc, TREE_TYPE (op1),
11057 : 3120 : TREE_OPERAND (arg1, 1)));
11058 : 3120 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11059 : 3120 : tem);
11060 : : }
11061 : :
11062 : 268403924 : if (TREE_CODE (arg0) == COND_EXPR
11063 : 268036123 : || TREE_CODE (arg0) == VEC_COND_EXPR
11064 : 268033889 : || COMPARISON_CLASS_P (arg0))
11065 : : {
11066 : 701115 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11067 : : arg0, arg1,
11068 : : /*cond_first_p=*/1);
11069 : 701115 : if (tem != NULL_TREE)
11070 : : return tem;
11071 : : }
11072 : :
11073 : 267940550 : if (TREE_CODE (arg1) == COND_EXPR
11074 : 267720058 : || TREE_CODE (arg1) == VEC_COND_EXPR
11075 : 267719734 : || COMPARISON_CLASS_P (arg1))
11076 : : {
11077 : 230526 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11078 : : arg1, arg0,
11079 : : /*cond_first_p=*/0);
11080 : 230526 : if (tem != NULL_TREE)
11081 : : return tem;
11082 : : }
11083 : : }
11084 : :
11085 : 435818842 : switch (code)
11086 : : {
11087 : 44833746 : case MEM_REF:
11088 : : /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11089 : 44833746 : if (TREE_CODE (arg0) == ADDR_EXPR
11090 : 44833746 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11091 : : {
11092 : 705065 : tree iref = TREE_OPERAND (arg0, 0);
11093 : 705065 : return fold_build2 (MEM_REF, type,
11094 : : TREE_OPERAND (iref, 0),
11095 : : int_const_binop (PLUS_EXPR, arg1,
11096 : : TREE_OPERAND (iref, 1)));
11097 : : }
11098 : :
11099 : : /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11100 : 44128681 : if (TREE_CODE (arg0) == ADDR_EXPR
11101 : 44128681 : && handled_component_p (TREE_OPERAND (arg0, 0)))
11102 : : {
11103 : 2103950 : tree base;
11104 : 2103950 : poly_int64 coffset;
11105 : 2103950 : base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11106 : : &coffset);
11107 : 2103950 : if (!base)
11108 : : return NULL_TREE;
11109 : 2100383 : return fold_build2 (MEM_REF, type,
11110 : : build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11111 : : int_const_binop (PLUS_EXPR, arg1,
11112 : : size_int (coffset)));
11113 : : }
11114 : :
11115 : : return NULL_TREE;
11116 : :
11117 : 27836722 : case POINTER_PLUS_EXPR:
11118 : : /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11119 : 55673024 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11120 : 55663886 : && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11121 : 34476 : return fold_convert_loc (loc, type,
11122 : : fold_build2_loc (loc, PLUS_EXPR, sizetype,
11123 : : fold_convert_loc (loc, sizetype,
11124 : : arg1),
11125 : : fold_convert_loc (loc, sizetype,
11126 : 34476 : arg0)));
11127 : :
11128 : : return NULL_TREE;
11129 : :
11130 : 57092789 : case PLUS_EXPR:
11131 : 57092789 : if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11132 : : {
11133 : : /* X + (X / CST) * -CST is X % CST. */
11134 : 45723113 : if (TREE_CODE (arg1) == MULT_EXPR
11135 : 2427271 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11136 : 45729135 : && operand_equal_p (arg0,
11137 : 6022 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11138 : : {
11139 : 172 : tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11140 : 172 : tree cst1 = TREE_OPERAND (arg1, 1);
11141 : 172 : tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11142 : : cst1, cst0);
11143 : 172 : if (sum && integer_zerop (sum))
11144 : 172 : return fold_convert_loc (loc, type,
11145 : : fold_build2_loc (loc, TRUNC_MOD_EXPR,
11146 : 172 : TREE_TYPE (arg0), arg0,
11147 : 172 : cst0));
11148 : : }
11149 : : }
11150 : :
11151 : : /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11152 : : one. Make sure the type is not saturating and has the signedness of
11153 : : the stripped operands, as fold_plusminus_mult_expr will re-associate.
11154 : : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11155 : 57092617 : if ((TREE_CODE (arg0) == MULT_EXPR
11156 : 46322571 : || TREE_CODE (arg1) == MULT_EXPR)
11157 : 12296895 : && !TYPE_SATURATING (type)
11158 : 12296895 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11159 : 11908593 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11160 : 68391630 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
11161 : : {
11162 : 8138366 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11163 : 8138366 : if (tem)
11164 : : return tem;
11165 : : }
11166 : :
11167 : 55947802 : if (! FLOAT_TYPE_P (type))
11168 : : {
11169 : : /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11170 : : (plus (plus (mult) (mult)) (foo)) so that we can
11171 : : take advantage of the factoring cases below. */
11172 : 273722 : if (ANY_INTEGRAL_TYPE_P (type)
11173 : 44580472 : && TYPE_OVERFLOW_WRAPS (type)
11174 : 44580472 : && (((TREE_CODE (arg0) == PLUS_EXPR
11175 : 27773102 : || TREE_CODE (arg0) == MINUS_EXPR)
11176 : 3160000 : && TREE_CODE (arg1) == MULT_EXPR)
11177 : 27287828 : || ((TREE_CODE (arg1) == PLUS_EXPR
11178 : 27287828 : || TREE_CODE (arg1) == MINUS_EXPR)
11179 : 390781 : && TREE_CODE (arg0) == MULT_EXPR)))
11180 : : {
11181 : 543805 : tree parg0, parg1, parg, marg;
11182 : 543805 : enum tree_code pcode;
11183 : :
11184 : 543805 : if (TREE_CODE (arg1) == MULT_EXPR)
11185 : : parg = arg0, marg = arg1;
11186 : : else
11187 : 58531 : parg = arg1, marg = arg0;
11188 : 543805 : pcode = TREE_CODE (parg);
11189 : 543805 : parg0 = TREE_OPERAND (parg, 0);
11190 : 543805 : parg1 = TREE_OPERAND (parg, 1);
11191 : 543805 : STRIP_NOPS (parg0);
11192 : 543805 : STRIP_NOPS (parg1);
11193 : :
11194 : 543805 : if (TREE_CODE (parg0) == MULT_EXPR
11195 : 240556 : && TREE_CODE (parg1) != MULT_EXPR)
11196 : 213601 : return fold_build2_loc (loc, pcode, type,
11197 : : fold_build2_loc (loc, PLUS_EXPR, type,
11198 : : fold_convert_loc (loc, type,
11199 : : parg0),
11200 : : fold_convert_loc (loc, type,
11201 : : marg)),
11202 : 213601 : fold_convert_loc (loc, type, parg1));
11203 : 330204 : if (TREE_CODE (parg0) != MULT_EXPR
11204 : 303249 : && TREE_CODE (parg1) == MULT_EXPR)
11205 : 113684 : return
11206 : 113684 : fold_build2_loc (loc, PLUS_EXPR, type,
11207 : : fold_convert_loc (loc, type, parg0),
11208 : : fold_build2_loc (loc, pcode, type,
11209 : : fold_convert_loc (loc, type, marg),
11210 : : fold_convert_loc (loc, type,
11211 : 113684 : parg1)));
11212 : : }
11213 : : }
11214 : : else
11215 : : {
11216 : : /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11217 : : to __complex__ ( x, y ). This is not the same for SNaNs or
11218 : : if signed zeros are involved. */
11219 : 11367330 : if (!HONOR_SNANS (arg0)
11220 : 11366166 : && !HONOR_SIGNED_ZEROS (arg0)
11221 : 11386044 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11222 : : {
11223 : 3086 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11224 : 3086 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11225 : 3086 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11226 : 3086 : bool arg0rz = false, arg0iz = false;
11227 : 128 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11228 : 3190 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11229 : : {
11230 : 86 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11231 : 86 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11232 : 86 : if (arg0rz && arg1i && real_zerop (arg1i))
11233 : : {
11234 : 22 : tree rp = arg1r ? arg1r
11235 : 0 : : build1 (REALPART_EXPR, rtype, arg1);
11236 : 22 : tree ip = arg0i ? arg0i
11237 : 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11238 : 22 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11239 : : }
11240 : 64 : else if (arg0iz && arg1r && real_zerop (arg1r))
11241 : : {
11242 : 53 : tree rp = arg0r ? arg0r
11243 : 0 : : build1 (REALPART_EXPR, rtype, arg0);
11244 : 53 : tree ip = arg1i ? arg1i
11245 : 0 : : build1 (IMAGPART_EXPR, rtype, arg1);
11246 : 53 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11247 : : }
11248 : : }
11249 : : }
11250 : :
11251 : : /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11252 : : We associate floats only if the user has specified
11253 : : -fassociative-math. */
11254 : 11367255 : if (flag_associative_math
11255 : 18617 : && TREE_CODE (arg1) == PLUS_EXPR
11256 : 38 : && TREE_CODE (arg0) != MULT_EXPR)
11257 : : {
11258 : 21 : tree tree10 = TREE_OPERAND (arg1, 0);
11259 : 21 : tree tree11 = TREE_OPERAND (arg1, 1);
11260 : 21 : if (TREE_CODE (tree11) == MULT_EXPR
11261 : 5 : && TREE_CODE (tree10) == MULT_EXPR)
11262 : : {
11263 : 1 : tree tree0;
11264 : 1 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11265 : 1 : return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11266 : : }
11267 : : }
11268 : : /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11269 : : We associate floats only if the user has specified
11270 : : -fassociative-math. */
11271 : 11367254 : if (flag_associative_math
11272 : 18616 : && TREE_CODE (arg0) == PLUS_EXPR
11273 : 1182 : && TREE_CODE (arg1) != MULT_EXPR)
11274 : : {
11275 : 796 : tree tree00 = TREE_OPERAND (arg0, 0);
11276 : 796 : tree tree01 = TREE_OPERAND (arg0, 1);
11277 : 796 : if (TREE_CODE (tree01) == MULT_EXPR
11278 : 51 : && TREE_CODE (tree00) == MULT_EXPR)
11279 : : {
11280 : 11 : tree tree0;
11281 : 11 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11282 : 11 : return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11283 : : }
11284 : : }
11285 : : }
11286 : :
11287 : 11366458 : bit_rotate:
11288 : : /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11289 : : is a rotate of A by C1 bits. */
11290 : : /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11291 : : is a rotate of A by B bits.
11292 : : Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11293 : : though in this case CODE must be | and not + or ^, otherwise
11294 : : it doesn't return A when B is 0. */
11295 : 58021025 : {
11296 : 58021025 : enum tree_code code0, code1;
11297 : 58021025 : tree rtype;
11298 : 58021025 : code0 = TREE_CODE (arg0);
11299 : 58021025 : code1 = TREE_CODE (arg1);
11300 : 51591 : if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11301 : 58005026 : || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11302 : 38989 : && operand_equal_p (TREE_OPERAND (arg0, 0),
11303 : 38989 : TREE_OPERAND (arg1, 0), 0)
11304 : 36306 : && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11305 : 36306 : TYPE_UNSIGNED (rtype))
11306 : : /* Only create rotates in complete modes. Other cases are not
11307 : : expanded properly. */
11308 : 58047187 : && (element_precision (rtype)
11309 : 52324 : == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11310 : : {
11311 : 26100 : tree tree01, tree11;
11312 : 26100 : tree orig_tree01, orig_tree11;
11313 : 26100 : enum tree_code code01, code11;
11314 : :
11315 : 26100 : tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11316 : 26100 : tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11317 : 26100 : STRIP_NOPS (tree01);
11318 : 26100 : STRIP_NOPS (tree11);
11319 : 26100 : code01 = TREE_CODE (tree01);
11320 : 26100 : code11 = TREE_CODE (tree11);
11321 : 26100 : if (code11 != MINUS_EXPR
11322 : 25416 : && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11323 : : {
11324 : 1478 : std::swap (code0, code1);
11325 : 1478 : std::swap (code01, code11);
11326 : 1478 : std::swap (tree01, tree11);
11327 : 1478 : std::swap (orig_tree01, orig_tree11);
11328 : : }
11329 : 52200 : if (code01 == INTEGER_CST
11330 : 3151 : && code11 == INTEGER_CST
11331 : 32400 : && (wi::to_widest (tree01) + wi::to_widest (tree11)
11332 : 32400 : == element_precision (rtype)))
11333 : : {
11334 : 6020 : tem = build2_loc (loc, LROTATE_EXPR,
11335 : 3010 : rtype, TREE_OPERAND (arg0, 0),
11336 : : code0 == LSHIFT_EXPR
11337 : : ? orig_tree01 : orig_tree11);
11338 : 3010 : return fold_convert_loc (loc, type, tem);
11339 : : }
11340 : 23090 : else if (code11 == MINUS_EXPR)
11341 : : {
11342 : 939 : tree tree110, tree111;
11343 : 939 : tree110 = TREE_OPERAND (tree11, 0);
11344 : 939 : tree111 = TREE_OPERAND (tree11, 1);
11345 : 939 : STRIP_NOPS (tree110);
11346 : 939 : STRIP_NOPS (tree111);
11347 : 939 : if (TREE_CODE (tree110) == INTEGER_CST
11348 : 928 : && compare_tree_int (tree110,
11349 : 928 : element_precision (rtype)) == 0
11350 : 1851 : && operand_equal_p (tree01, tree111, 0))
11351 : : {
11352 : 775 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11353 : : ? LROTATE_EXPR : RROTATE_EXPR),
11354 : 556 : rtype, TREE_OPERAND (arg0, 0),
11355 : : orig_tree01);
11356 : 556 : return fold_convert_loc (loc, type, tem);
11357 : : }
11358 : : }
11359 : 22151 : else if (code == BIT_IOR_EXPR
11360 : 21037 : && code11 == BIT_AND_EXPR
11361 : 43113 : && pow2p_hwi (element_precision (rtype)))
11362 : : {
11363 : 20962 : tree tree110, tree111;
11364 : 20962 : tree110 = TREE_OPERAND (tree11, 0);
11365 : 20962 : tree111 = TREE_OPERAND (tree11, 1);
11366 : 20962 : STRIP_NOPS (tree110);
11367 : 20962 : STRIP_NOPS (tree111);
11368 : 20962 : if (TREE_CODE (tree110) == NEGATE_EXPR
11369 : 20475 : && TREE_CODE (tree111) == INTEGER_CST
11370 : 20475 : && compare_tree_int (tree111,
11371 : 20475 : element_precision (rtype) - 1) == 0
11372 : 41423 : && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11373 : : {
11374 : 30539 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11375 : : ? LROTATE_EXPR : RROTATE_EXPR),
11376 : 20383 : rtype, TREE_OPERAND (arg0, 0),
11377 : : orig_tree01);
11378 : 20383 : return fold_convert_loc (loc, type, tem);
11379 : : }
11380 : : }
11381 : : }
11382 : : }
11383 : :
11384 : 140586556 : associate:
11385 : : /* In most languages, can't associate operations on floats through
11386 : : parentheses. Rather than remember where the parentheses were, we
11387 : : don't associate floats at all, unless the user has specified
11388 : : -fassociative-math.
11389 : : And, we need to make sure type is not saturating. */
11390 : :
11391 : 140586556 : if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11392 : 100456990 : && !TYPE_SATURATING (type)
11393 : 241043546 : && !TYPE_OVERFLOW_SANITIZED (type))
11394 : : {
11395 : 100428980 : tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11396 : 100428980 : tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11397 : 100428980 : tree atype = type;
11398 : 100428980 : bool ok = true;
11399 : :
11400 : : /* Split both trees into variables, constants, and literals. Then
11401 : : associate each group together, the constants with literals,
11402 : : then the result with variables. This increases the chances of
11403 : : literals being recombined later and of generating relocatable
11404 : : expressions for the sum of a constant and literal. */
11405 : 100428980 : var0 = split_tree (arg0, type, code,
11406 : : &minus_var0, &con0, &minus_con0,
11407 : : &lit0, &minus_lit0, 0);
11408 : 100428980 : var1 = split_tree (arg1, type, code,
11409 : : &minus_var1, &con1, &minus_con1,
11410 : : &lit1, &minus_lit1, code == MINUS_EXPR);
11411 : :
11412 : : /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11413 : 100428980 : if (code == MINUS_EXPR)
11414 : 10628047 : code = PLUS_EXPR;
11415 : :
11416 : : /* With undefined overflow prefer doing association in a type
11417 : : which wraps on overflow, if that is one of the operand types. */
11418 : 100428749 : if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11419 : 199651824 : && !TYPE_OVERFLOW_WRAPS (type))
11420 : : {
11421 : 56367797 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11422 : 55751949 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11423 : 720020 : atype = TREE_TYPE (arg0);
11424 : 54928127 : else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11425 : 54743619 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11426 : 205333 : atype = TREE_TYPE (arg1);
11427 : 28439356 : gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11428 : : }
11429 : :
11430 : : /* With undefined overflow we can only associate constants with one
11431 : : variable, and constants whose association doesn't overflow. */
11432 : 100428749 : if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11433 : 199651824 : && !TYPE_OVERFLOW_WRAPS (atype))
11434 : : {
11435 : 27514003 : if ((var0 && var1) || (minus_var0 && minus_var1))
11436 : : {
11437 : : /* ??? If split_tree would handle NEGATE_EXPR we could
11438 : : simply reject these cases and the allowed cases would
11439 : : be the var0/minus_var1 ones. */
11440 : 1227 : tree tmp0 = var0 ? var0 : minus_var0;
11441 : 5163205 : tree tmp1 = var1 ? var1 : minus_var1;
11442 : 5163205 : bool one_neg = false;
11443 : :
11444 : 5163205 : if (TREE_CODE (tmp0) == NEGATE_EXPR)
11445 : : {
11446 : 1656 : tmp0 = TREE_OPERAND (tmp0, 0);
11447 : 1656 : one_neg = !one_neg;
11448 : : }
11449 : 4646541 : if (CONVERT_EXPR_P (tmp0)
11450 : 531647 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11451 : 5694575 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11452 : 531370 : <= TYPE_PRECISION (atype)))
11453 : 519833 : tmp0 = TREE_OPERAND (tmp0, 0);
11454 : 5163205 : if (TREE_CODE (tmp1) == NEGATE_EXPR)
11455 : : {
11456 : 170 : tmp1 = TREE_OPERAND (tmp1, 0);
11457 : 170 : one_neg = !one_neg;
11458 : : }
11459 : 4855949 : if (CONVERT_EXPR_P (tmp1)
11460 : 328325 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11461 : 5491420 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11462 : 328215 : <= TYPE_PRECISION (atype)))
11463 : 318790 : tmp1 = TREE_OPERAND (tmp1, 0);
11464 : : /* The only case we can still associate with two variables
11465 : : is if they cancel out. */
11466 : 5163205 : if (!one_neg
11467 : 5163205 : || !operand_equal_p (tmp0, tmp1, 0))
11468 : : ok = false;
11469 : : }
11470 : 22025671 : else if ((var0 && minus_var1
11471 : 3663853 : && ! operand_equal_p (var0, minus_var1, 0))
11472 : 40712617 : || (minus_var0 && var1
11473 : 11326 : && ! operand_equal_p (minus_var0, var1, 0)))
11474 : : ok = false;
11475 : : }
11476 : :
11477 : : /* Only do something if we found more than two objects. Otherwise,
11478 : : nothing has changed and we risk infinite recursion. */
11479 : : if (ok
11480 : 91590668 : && ((var0 != 0) + (var1 != 0)
11481 : 91590668 : + (minus_var0 != 0) + (minus_var1 != 0)
11482 : 91590668 : + (con0 != 0) + (con1 != 0)
11483 : 91590668 : + (minus_con0 != 0) + (minus_con1 != 0)
11484 : 91590668 : + (lit0 != 0) + (lit1 != 0)
11485 : 91590668 : + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11486 : : {
11487 : 1744835 : int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11488 : 3489670 : int minus_var0_origin
11489 : 1744835 : = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11490 : 1744835 : int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11491 : 3489670 : int minus_con0_origin
11492 : 1744835 : = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11493 : 1744835 : int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11494 : 3489670 : int minus_lit0_origin
11495 : 1744835 : = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11496 : 1744835 : var0 = associate_trees (loc, var0, var1, code, atype);
11497 : 1744835 : minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11498 : : code, atype);
11499 : 1744835 : con0 = associate_trees (loc, con0, con1, code, atype);
11500 : 1744835 : minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11501 : : code, atype);
11502 : 1744835 : lit0 = associate_trees (loc, lit0, lit1, code, atype);
11503 : 1744835 : minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11504 : : code, atype);
11505 : :
11506 : 1744835 : if (minus_var0 && var0)
11507 : : {
11508 : 1149421 : var0_origin |= minus_var0_origin;
11509 : 1149421 : var0 = associate_trees (loc, var0, minus_var0,
11510 : : MINUS_EXPR, atype);
11511 : 1149421 : minus_var0 = 0;
11512 : 1149421 : minus_var0_origin = 0;
11513 : : }
11514 : 1744835 : if (minus_con0 && con0)
11515 : : {
11516 : 3586 : con0_origin |= minus_con0_origin;
11517 : 3586 : con0 = associate_trees (loc, con0, minus_con0,
11518 : : MINUS_EXPR, atype);
11519 : 3586 : minus_con0 = 0;
11520 : 3586 : minus_con0_origin = 0;
11521 : : }
11522 : :
11523 : : /* Preserve the MINUS_EXPR if the negative part of the literal is
11524 : : greater than the positive part. Otherwise, the multiplicative
11525 : : folding code (i.e extract_muldiv) may be fooled in case
11526 : : unsigned constants are subtracted, like in the following
11527 : : example: ((X*2 + 4) - 8U)/2. */
11528 : 1744835 : if (minus_lit0 && lit0)
11529 : : {
11530 : 178113 : if (TREE_CODE (lit0) == INTEGER_CST
11531 : 178113 : && TREE_CODE (minus_lit0) == INTEGER_CST
11532 : 178113 : && tree_int_cst_lt (lit0, minus_lit0)
11533 : : /* But avoid ending up with only negated parts. */
11534 : 231467 : && (var0 || con0))
11535 : : {
11536 : 49381 : minus_lit0_origin |= lit0_origin;
11537 : 49381 : minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11538 : : MINUS_EXPR, atype);
11539 : 49381 : lit0 = 0;
11540 : 49381 : lit0_origin = 0;
11541 : : }
11542 : : else
11543 : : {
11544 : 128732 : lit0_origin |= minus_lit0_origin;
11545 : 128732 : lit0 = associate_trees (loc, lit0, minus_lit0,
11546 : : MINUS_EXPR, atype);
11547 : 128732 : minus_lit0 = 0;
11548 : 128732 : minus_lit0_origin = 0;
11549 : : }
11550 : : }
11551 : :
11552 : : /* Don't introduce overflows through reassociation. */
11553 : 1186906 : if ((lit0 && TREE_OVERFLOW_P (lit0))
11554 : 2931703 : || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11555 : 1744835 : return NULL_TREE;
11556 : :
11557 : : /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11558 : 1744797 : con0_origin |= lit0_origin;
11559 : 1744797 : con0 = associate_trees (loc, con0, lit0, code, atype);
11560 : 1744797 : minus_con0_origin |= minus_lit0_origin;
11561 : 1744797 : minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11562 : : code, atype);
11563 : :
11564 : : /* Eliminate minus_con0. */
11565 : 1744797 : if (minus_con0)
11566 : : {
11567 : 555292 : if (con0)
11568 : : {
11569 : 8159 : con0_origin |= minus_con0_origin;
11570 : 8159 : con0 = associate_trees (loc, con0, minus_con0,
11571 : : MINUS_EXPR, atype);
11572 : : }
11573 : 547133 : else if (var0)
11574 : : {
11575 : 547133 : var0_origin |= minus_con0_origin;
11576 : 547133 : var0 = associate_trees (loc, var0, minus_con0,
11577 : : MINUS_EXPR, atype);
11578 : : }
11579 : : else
11580 : 0 : gcc_unreachable ();
11581 : : }
11582 : :
11583 : : /* Eliminate minus_var0. */
11584 : 1744797 : if (minus_var0)
11585 : : {
11586 : 300254 : if (con0)
11587 : : {
11588 : 300254 : con0_origin |= minus_var0_origin;
11589 : 300254 : con0 = associate_trees (loc, con0, minus_var0,
11590 : : MINUS_EXPR, atype);
11591 : : }
11592 : : else
11593 : 0 : gcc_unreachable ();
11594 : : }
11595 : :
11596 : : /* Reassociate only if there has been any actual association
11597 : : between subtrees from op0 and subtrees from op1 in at
11598 : : least one of the operands, otherwise we risk infinite
11599 : : recursion. See PR114084. */
11600 : 1744797 : if (var0_origin != 3 && con0_origin != 3)
11601 : : return NULL_TREE;
11602 : :
11603 : 1743105 : return
11604 : 1743105 : fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11605 : 1743105 : code, atype));
11606 : : }
11607 : : }
11608 : :
11609 : : return NULL_TREE;
11610 : :
11611 : 20875807 : case POINTER_DIFF_EXPR:
11612 : 20875807 : case MINUS_EXPR:
11613 : : /* Fold &a[i] - &a[j] to i-j. */
11614 : 20875807 : if (TREE_CODE (arg0) == ADDR_EXPR
11615 : 34726 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11616 : 5951 : && TREE_CODE (arg1) == ADDR_EXPR
11617 : 20876351 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11618 : : {
11619 : 38 : tree tem = fold_addr_of_array_ref_difference (loc, type,
11620 : 38 : TREE_OPERAND (arg0, 0),
11621 : 38 : TREE_OPERAND (arg1, 0),
11622 : : code
11623 : : == POINTER_DIFF_EXPR);
11624 : 38 : if (tem)
11625 : : return tem;
11626 : : }
11627 : :
11628 : : /* Further transformations are not for pointers. */
11629 : 20875793 : if (code == POINTER_DIFF_EXPR)
11630 : : return NULL_TREE;
11631 : :
11632 : : /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11633 : 18453106 : if (TREE_CODE (arg0) == NEGATE_EXPR
11634 : 145089 : && negate_expr_p (op1)
11635 : : /* If arg0 is e.g. unsigned int and type is int, then this could
11636 : : introduce UB, because if A is INT_MIN at runtime, the original
11637 : : expression can be well defined while the latter is not.
11638 : : See PR83269. */
11639 : 18453940 : && !(ANY_INTEGRAL_TYPE_P (type)
11640 : 834 : && TYPE_OVERFLOW_UNDEFINED (type)
11641 : 822 : && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11642 : 822 : && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11643 : 827 : return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11644 : : fold_convert_loc (loc, type,
11645 : 1654 : TREE_OPERAND (arg0, 0)));
11646 : :
11647 : : /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11648 : : __complex__ ( x, -y ). This is not the same for SNaNs or if
11649 : : signed zeros are involved. */
11650 : 18452279 : if (!HONOR_SNANS (arg0)
11651 : 18451415 : && !HONOR_SIGNED_ZEROS (arg0)
11652 : 29781218 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11653 : : {
11654 : 53 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11655 : 53 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11656 : 53 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11657 : 53 : bool arg0rz = false, arg0iz = false;
11658 : 25 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11659 : 69 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11660 : : {
11661 : 25 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11662 : 25 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11663 : 25 : if (arg0rz && arg1i && real_zerop (arg1i))
11664 : : {
11665 : 9 : tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11666 : : arg1r ? arg1r
11667 : 0 : : build1 (REALPART_EXPR, rtype, arg1));
11668 : 9 : tree ip = arg0i ? arg0i
11669 : 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11670 : 9 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11671 : : }
11672 : 16 : else if (arg0iz && arg1r && real_zerop (arg1r))
11673 : : {
11674 : 15 : tree rp = arg0r ? arg0r
11675 : 0 : : build1 (REALPART_EXPR, rtype, arg0);
11676 : 15 : tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11677 : : arg1i ? arg1i
11678 : 0 : : build1 (IMAGPART_EXPR, rtype, arg1));
11679 : 15 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11680 : : }
11681 : : }
11682 : : }
11683 : :
11684 : : /* A - B -> A + (-B) if B is easily negatable. */
11685 : 18452255 : if (negate_expr_p (op1)
11686 : 646916 : && ! TYPE_OVERFLOW_SANITIZED (type)
11687 : 19096688 : && ((FLOAT_TYPE_P (type)
11688 : : /* Avoid this transformation if B is a positive REAL_CST. */
11689 : 65 : && (TREE_CODE (op1) != REAL_CST
11690 : 0 : || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11691 : 644368 : || INTEGRAL_TYPE_P (type)))
11692 : 644254 : return fold_build2_loc (loc, PLUS_EXPR, type,
11693 : : fold_convert_loc (loc, type, arg0),
11694 : 644254 : negate_expr (op1));
11695 : :
11696 : : /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11697 : : one. Make sure the type is not saturating and has the signedness of
11698 : : the stripped operands, as fold_plusminus_mult_expr will re-associate.
11699 : : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11700 : 17808001 : if ((TREE_CODE (arg0) == MULT_EXPR
11701 : 16534176 : || TREE_CODE (arg1) == MULT_EXPR)
11702 : 2595484 : && !TYPE_SATURATING (type)
11703 : 2595484 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11704 : 2467129 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11705 : 20225899 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
11706 : : {
11707 : 328922 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11708 : 328922 : if (tem)
11709 : : return tem;
11710 : : }
11711 : :
11712 : 17758916 : goto associate;
11713 : :
11714 : 59427816 : case MULT_EXPR:
11715 : 59427816 : if (! FLOAT_TYPE_P (type))
11716 : : {
11717 : : /* Transform x * -C into -x * C if x is easily negatable. */
11718 : 37740427 : if (TREE_CODE (op1) == INTEGER_CST
11719 : 35101742 : && tree_int_cst_sgn (op1) == -1
11720 : 194918 : && negate_expr_p (op0)
11721 : 336 : && negate_expr_p (op1)
11722 : 320 : && (tem = negate_expr (op1)) != op1
11723 : 37740747 : && ! TREE_OVERFLOW (tem))
11724 : 320 : return fold_build2_loc (loc, MULT_EXPR, type,
11725 : : fold_convert_loc (loc, type,
11726 : 320 : negate_expr (op0)), tem);
11727 : :
11728 : 37740107 : strict_overflow_p = false;
11729 : 37740107 : if (TREE_CODE (arg1) == INTEGER_CST
11730 : 37740107 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11731 : : &strict_overflow_p)) != 0)
11732 : : {
11733 : 486098 : if (strict_overflow_p)
11734 : 10 : fold_overflow_warning (("assuming signed overflow does not "
11735 : : "occur when simplifying "
11736 : : "multiplication"),
11737 : : WARN_STRICT_OVERFLOW_MISC);
11738 : 486098 : return fold_convert_loc (loc, type, tem);
11739 : : }
11740 : :
11741 : : /* Optimize z * conj(z) for integer complex numbers. */
11742 : 37254009 : if (TREE_CODE (arg0) == CONJ_EXPR
11743 : 37254009 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11744 : 1 : return fold_mult_zconjz (loc, type, arg1);
11745 : 37254008 : if (TREE_CODE (arg1) == CONJ_EXPR
11746 : 37254008 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11747 : 0 : return fold_mult_zconjz (loc, type, arg0);
11748 : : }
11749 : : else
11750 : : {
11751 : : /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11752 : : This is not the same for NaNs or if signed zeros are
11753 : : involved. */
11754 : 21687389 : if (!HONOR_NANS (arg0)
11755 : 32719 : && !HONOR_SIGNED_ZEROS (arg0)
11756 : 32431 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11757 : 3642 : && TREE_CODE (arg1) == COMPLEX_CST
11758 : 21687619 : && real_zerop (TREE_REALPART (arg1)))
11759 : : {
11760 : 223 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11761 : 223 : if (real_onep (TREE_IMAGPART (arg1)))
11762 : : {
11763 : 213 : if (TREE_CODE (arg0) != COMPLEX_EXPR)
11764 : 68 : arg0 = save_expr (arg0);
11765 : 213 : tree iarg0 = fold_build1_loc (loc, IMAGPART_EXPR,
11766 : : rtype, arg0);
11767 : 213 : tree rarg0 = fold_build1_loc (loc, REALPART_EXPR,
11768 : : rtype, arg0);
11769 : 213 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
11770 : : negate_expr (iarg0),
11771 : 213 : rarg0);
11772 : : }
11773 : 10 : else if (real_minus_onep (TREE_IMAGPART (arg1)))
11774 : : {
11775 : 10 : if (TREE_CODE (arg0) != COMPLEX_EXPR)
11776 : 0 : arg0 = save_expr (arg0);
11777 : 10 : tree iarg0 = fold_build1_loc (loc, IMAGPART_EXPR,
11778 : : rtype, arg0);
11779 : 10 : tree rarg0 = fold_build1_loc (loc, REALPART_EXPR,
11780 : : rtype, arg0);
11781 : 10 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
11782 : : iarg0,
11783 : 10 : negate_expr (rarg0));
11784 : : }
11785 : : }
11786 : :
11787 : : /* Optimize z * conj(z) for floating point complex numbers.
11788 : : Guarded by flag_unsafe_math_optimizations as non-finite
11789 : : imaginary components don't produce scalar results. */
11790 : 21687166 : if (flag_unsafe_math_optimizations
11791 : 32252 : && TREE_CODE (arg0) == CONJ_EXPR
11792 : 21687168 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11793 : 1 : return fold_mult_zconjz (loc, type, arg1);
11794 : 21687165 : if (flag_unsafe_math_optimizations
11795 : 32251 : && TREE_CODE (arg1) == CONJ_EXPR
11796 : 21687169 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11797 : 0 : return fold_mult_zconjz (loc, type, arg0);
11798 : : }
11799 : 58941173 : goto associate;
11800 : :
11801 : 1754888 : case BIT_IOR_EXPR:
11802 : : /* Canonicalize (X & C1) | C2. */
11803 : 1754888 : if (TREE_CODE (arg0) == BIT_AND_EXPR
11804 : 117092 : && TREE_CODE (arg1) == INTEGER_CST
11805 : 1829682 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11806 : : {
11807 : 74786 : int width = TYPE_PRECISION (type), w;
11808 : 74786 : wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11809 : 74786 : wide_int c2 = wi::to_wide (arg1);
11810 : :
11811 : : /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11812 : 74786 : if ((c1 & c2) == c1)
11813 : 0 : return omit_one_operand_loc (loc, type, arg1,
11814 : 0 : TREE_OPERAND (arg0, 0));
11815 : :
11816 : 74786 : wide_int msk = wi::mask (width, false,
11817 : 74786 : TYPE_PRECISION (TREE_TYPE (arg1)));
11818 : :
11819 : : /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11820 : 74786 : if (wi::bit_and_not (msk, c1 | c2) == 0)
11821 : : {
11822 : 6 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11823 : 6 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11824 : : }
11825 : :
11826 : : /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11827 : : unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11828 : : mode which allows further optimizations. */
11829 : 74780 : c1 &= msk;
11830 : 74780 : c2 &= msk;
11831 : 74780 : wide_int c3 = wi::bit_and_not (c1, c2);
11832 : 231527 : for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11833 : : {
11834 : 156989 : wide_int mask = wi::mask (w, false,
11835 : 156989 : TYPE_PRECISION (type));
11836 : 313978 : if (((c1 | c2) & mask) == mask
11837 : 313978 : && wi::bit_and_not (c1, mask) == 0)
11838 : : {
11839 : 242 : c3 = mask;
11840 : 242 : break;
11841 : : }
11842 : 156989 : }
11843 : :
11844 : 74780 : if (c3 != c1)
11845 : : {
11846 : 562 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11847 : 1124 : tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11848 : 562 : wide_int_to_tree (type, c3));
11849 : 562 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11850 : : }
11851 : 75916 : }
11852 : :
11853 : : /* See if this can be simplified into a rotate first. If that
11854 : : is unsuccessful continue in the association code. */
11855 : 1754320 : goto bit_rotate;
11856 : :
11857 : 646275 : case BIT_XOR_EXPR:
11858 : : /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11859 : 646275 : if (TREE_CODE (arg0) == BIT_AND_EXPR
11860 : 2420 : && INTEGRAL_TYPE_P (type)
11861 : 1815 : && integer_onep (TREE_OPERAND (arg0, 1))
11862 : 646278 : && integer_onep (arg1))
11863 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11864 : 0 : build_zero_cst (TREE_TYPE (arg0)));
11865 : :
11866 : : /* See if this can be simplified into a rotate first. If that
11867 : : is unsuccessful continue in the association code. */
11868 : 646275 : goto bit_rotate;
11869 : :
11870 : 5489436 : case BIT_AND_EXPR:
11871 : : /* Fold !X & 1 as X == 0. */
11872 : 5489436 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11873 : 5489436 : && integer_onep (arg1))
11874 : : {
11875 : 0 : tem = TREE_OPERAND (arg0, 0);
11876 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, tem,
11877 : 0 : build_zero_cst (TREE_TYPE (tem)));
11878 : : }
11879 : :
11880 : : /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11881 : : multiple of 1 << CST. */
11882 : 5489436 : if (TREE_CODE (arg1) == INTEGER_CST)
11883 : : {
11884 : 3942605 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11885 : 3942605 : wide_int ncst1 = -cst1;
11886 : 3942605 : if ((cst1 & ncst1) == ncst1
11887 : 4087421 : && multiple_of_p (type, arg0,
11888 : 4087421 : wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11889 : 466 : return fold_convert_loc (loc, type, arg0);
11890 : 3942605 : }
11891 : :
11892 : : /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11893 : : bits from CST2. */
11894 : 5488970 : if (TREE_CODE (arg1) == INTEGER_CST
11895 : 3942139 : && TREE_CODE (arg0) == MULT_EXPR
11896 : 5594088 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11897 : : {
11898 : 105088 : wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11899 : 105088 : wide_int masked
11900 : 105088 : = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11901 : :
11902 : 105088 : if (masked == 0)
11903 : 6523 : return omit_two_operands_loc (loc, type, build_zero_cst (type),
11904 : 6523 : arg0, arg1);
11905 : 98565 : else if (masked != warg1)
11906 : : {
11907 : : /* Avoid the transform if arg1 is a mask of some
11908 : : mode which allows further optimizations. */
11909 : 549 : int pop = wi::popcount (warg1);
11910 : 571 : if (!(pop >= BITS_PER_UNIT
11911 : 48 : && pow2p_hwi (pop)
11912 : 593 : && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11913 : 1054 : return fold_build2_loc (loc, code, type, op0,
11914 : 1054 : wide_int_to_tree (type, masked));
11915 : : }
11916 : 105088 : }
11917 : :
11918 : : /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11919 : 3935089 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11920 : 5772345 : && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11921 : : {
11922 : 146442 : prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11923 : :
11924 : 146442 : wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11925 : 146442 : if (mask == -1)
11926 : 538 : return
11927 : 538 : fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11928 : 146442 : }
11929 : :
11930 : 5481382 : goto associate;
11931 : :
11932 : 6024978 : case RDIV_EXPR:
11933 : : /* Don't touch a floating-point divide by zero unless the mode
11934 : : of the constant can represent infinity. */
11935 : 6024978 : if (TREE_CODE (arg1) == REAL_CST
11936 : 2864295 : && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11937 : 6024978 : && real_zerop (arg1))
11938 : 0 : return NULL_TREE;
11939 : :
11940 : : /* (-A) / (-B) -> A / B */
11941 : 6024978 : if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11942 : 6 : return fold_build2_loc (loc, RDIV_EXPR, type,
11943 : 3 : TREE_OPERAND (arg0, 0),
11944 : 3 : negate_expr (arg1));
11945 : 6024975 : if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11946 : 0 : return fold_build2_loc (loc, RDIV_EXPR, type,
11947 : : negate_expr (arg0),
11948 : 0 : TREE_OPERAND (arg1, 0));
11949 : : return NULL_TREE;
11950 : :
11951 : 1905044 : case TRUNC_DIV_EXPR:
11952 : : /* Fall through */
11953 : :
11954 : 1905044 : case FLOOR_DIV_EXPR:
11955 : : /* Simplify A / (B << N) where A and B are positive and B is
11956 : : a power of 2, to A >> (N + log2(B)). */
11957 : 1905044 : strict_overflow_p = false;
11958 : 1905044 : if (TREE_CODE (arg1) == LSHIFT_EXPR
11959 : 1905044 : && (TYPE_UNSIGNED (type)
11960 : 8 : || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11961 : : {
11962 : 17 : tree sval = TREE_OPERAND (arg1, 0);
11963 : 17 : if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11964 : : {
11965 : 16 : tree sh_cnt = TREE_OPERAND (arg1, 1);
11966 : 16 : tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11967 : 16 : wi::exact_log2 (wi::to_wide (sval)));
11968 : :
11969 : 16 : if (strict_overflow_p)
11970 : 0 : fold_overflow_warning (("assuming signed overflow does not "
11971 : : "occur when simplifying A / (B << N)"),
11972 : : WARN_STRICT_OVERFLOW_MISC);
11973 : :
11974 : 16 : sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11975 : : sh_cnt, pow2);
11976 : 16 : return fold_build2_loc (loc, RSHIFT_EXPR, type,
11977 : 16 : fold_convert_loc (loc, type, arg0), sh_cnt);
11978 : : }
11979 : : }
11980 : :
11981 : : /* Fall through */
11982 : :
11983 : 3014323 : case ROUND_DIV_EXPR:
11984 : 3014323 : case CEIL_DIV_EXPR:
11985 : 3014323 : case EXACT_DIV_EXPR:
11986 : 3014323 : if (integer_zerop (arg1))
11987 : : return NULL_TREE;
11988 : :
11989 : : /* Convert -A / -B to A / B when the type is signed and overflow is
11990 : : undefined. */
11991 : 3011366 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11992 : 817151 : && TREE_CODE (op0) == NEGATE_EXPR
11993 : 3011428 : && negate_expr_p (op1))
11994 : : {
11995 : 30 : if (ANY_INTEGRAL_TYPE_P (type))
11996 : 30 : fold_overflow_warning (("assuming signed overflow does not occur "
11997 : : "when distributing negation across "
11998 : : "division"),
11999 : : WARN_STRICT_OVERFLOW_MISC);
12000 : 60 : return fold_build2_loc (loc, code, type,
12001 : : fold_convert_loc (loc, type,
12002 : 30 : TREE_OPERAND (arg0, 0)),
12003 : 30 : negate_expr (op1));
12004 : : }
12005 : 3011336 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12006 : 817121 : && TREE_CODE (arg1) == NEGATE_EXPR
12007 : 3011580 : && negate_expr_p (op0))
12008 : : {
12009 : 36 : if (ANY_INTEGRAL_TYPE_P (type))
12010 : 36 : fold_overflow_warning (("assuming signed overflow does not occur "
12011 : : "when distributing negation across "
12012 : : "division"),
12013 : : WARN_STRICT_OVERFLOW_MISC);
12014 : 36 : return fold_build2_loc (loc, code, type,
12015 : : negate_expr (op0),
12016 : : fold_convert_loc (loc, type,
12017 : 72 : TREE_OPERAND (arg1, 0)));
12018 : : }
12019 : :
12020 : : /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12021 : : operation, EXACT_DIV_EXPR.
12022 : :
12023 : : Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12024 : : At one time others generated faster code, it's not clear if they do
12025 : : after the last round to changes to the DIV code in expmed.cc. */
12026 : 3011300 : if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12027 : 3011300 : && multiple_of_p (type, arg0, arg1))
12028 : 0 : return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12029 : : fold_convert (type, arg0),
12030 : 0 : fold_convert (type, arg1));
12031 : :
12032 : 3011300 : strict_overflow_p = false;
12033 : 3011300 : if (TREE_CODE (arg1) == INTEGER_CST
12034 : 3011300 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12035 : : &strict_overflow_p)) != 0)
12036 : : {
12037 : 2248 : if (strict_overflow_p)
12038 : 157 : fold_overflow_warning (("assuming signed overflow does not occur "
12039 : : "when simplifying division"),
12040 : : WARN_STRICT_OVERFLOW_MISC);
12041 : 2248 : return fold_convert_loc (loc, type, tem);
12042 : : }
12043 : :
12044 : : return NULL_TREE;
12045 : :
12046 : 586217 : case CEIL_MOD_EXPR:
12047 : 586217 : case FLOOR_MOD_EXPR:
12048 : 586217 : case ROUND_MOD_EXPR:
12049 : 586217 : case TRUNC_MOD_EXPR:
12050 : 586217 : strict_overflow_p = false;
12051 : 586217 : if (TREE_CODE (arg1) == INTEGER_CST
12052 : 586217 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12053 : : &strict_overflow_p)) != 0)
12054 : : {
12055 : 0 : if (strict_overflow_p)
12056 : 0 : fold_overflow_warning (("assuming signed overflow does not occur "
12057 : : "when simplifying modulus"),
12058 : : WARN_STRICT_OVERFLOW_MISC);
12059 : 0 : return fold_convert_loc (loc, type, tem);
12060 : : }
12061 : :
12062 : : return NULL_TREE;
12063 : :
12064 : 1909843 : case LROTATE_EXPR:
12065 : 1909843 : case RROTATE_EXPR:
12066 : 1909843 : case RSHIFT_EXPR:
12067 : 1909843 : case LSHIFT_EXPR:
12068 : : /* Since negative shift count is not well-defined,
12069 : : don't try to compute it in the compiler. */
12070 : 1909843 : if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12071 : : return NULL_TREE;
12072 : :
12073 : 1908886 : prec = element_precision (type);
12074 : :
12075 : : /* If we have a rotate of a bit operation with the rotate count and
12076 : : the second operand of the bit operation both constant,
12077 : : permute the two operations. */
12078 : 2284 : if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12079 : 1831 : && (TREE_CODE (arg0) == BIT_AND_EXPR
12080 : 1831 : || TREE_CODE (arg0) == BIT_IOR_EXPR
12081 : 1831 : || TREE_CODE (arg0) == BIT_XOR_EXPR)
12082 : 1908886 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12083 : : {
12084 : 0 : tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12085 : 0 : tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12086 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
12087 : : fold_build2_loc (loc, code, type,
12088 : : arg00, arg1),
12089 : : fold_build2_loc (loc, code, type,
12090 : 0 : arg01, arg1));
12091 : : }
12092 : :
12093 : : return NULL_TREE;
12094 : :
12095 : 408009 : case MIN_EXPR:
12096 : 408009 : case MAX_EXPR:
12097 : 408009 : goto associate;
12098 : :
12099 : 5247679 : case TRUTH_ANDIF_EXPR:
12100 : : /* Note that the operands of this must be ints
12101 : : and their values must be 0 or 1.
12102 : : ("true" is a fixed value perhaps depending on the language.) */
12103 : : /* If first arg is constant zero, return it. */
12104 : 5247679 : if (integer_zerop (arg0))
12105 : 1005630 : return fold_convert_loc (loc, type, arg0);
12106 : : /* FALLTHRU */
12107 : 14337181 : case TRUTH_AND_EXPR:
12108 : : /* If either arg is constant true, drop it. */
12109 : 14337181 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12110 : 1914175 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12111 : 733184 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12112 : : /* Preserve sequence points. */
12113 : 13110636 : && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12114 : 662193 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12115 : : /* If second arg is constant zero, result is zero, but first arg
12116 : : must be evaluated. */
12117 : 11760813 : if (integer_zerop (arg1))
12118 : 45554 : return omit_one_operand_loc (loc, type, arg1, arg0);
12119 : : /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12120 : : case will be handled here. */
12121 : 11715259 : if (integer_zerop (arg0))
12122 : 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12123 : :
12124 : : /* !X && X is always false. */
12125 : 11715259 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12126 : 11715259 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12127 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12128 : : /* X && !X is always false. */
12129 : 11715259 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12130 : 11715259 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12131 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12132 : :
12133 : : /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12134 : : means A >= Y && A != MAX, but in this case we know that
12135 : : A < X <= MAX. */
12136 : :
12137 : 11715259 : if (!TREE_SIDE_EFFECTS (arg0)
12138 : 11715259 : && !TREE_SIDE_EFFECTS (arg1))
12139 : : {
12140 : 10811124 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12141 : 10811124 : if (tem && !operand_equal_p (tem, arg0, 0))
12142 : 466 : return fold_convert (type,
12143 : : fold_build2_loc (loc, code, TREE_TYPE (arg1),
12144 : : tem, arg1));
12145 : :
12146 : 10810658 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12147 : 10810658 : if (tem && !operand_equal_p (tem, arg1, 0))
12148 : 9929 : return fold_convert (type,
12149 : : fold_build2_loc (loc, code, TREE_TYPE (arg0),
12150 : : arg0, tem));
12151 : : }
12152 : :
12153 : 11704864 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12154 : : != NULL_TREE)
12155 : : return tem;
12156 : :
12157 : : return NULL_TREE;
12158 : :
12159 : 2932908 : case TRUTH_ORIF_EXPR:
12160 : : /* Note that the operands of this must be ints
12161 : : and their values must be 0 or true.
12162 : : ("true" is a fixed value perhaps depending on the language.) */
12163 : : /* If first arg is constant true, return it. */
12164 : 2932908 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12165 : 121309 : return fold_convert_loc (loc, type, arg0);
12166 : : /* FALLTHRU */
12167 : 11747506 : case TRUTH_OR_EXPR:
12168 : : /* If either arg is constant zero, drop it. */
12169 : 11747506 : if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12170 : 142376 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12171 : 443049 : if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12172 : : /* Preserve sequence points. */
12173 : 12009068 : && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12174 : 393070 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12175 : : /* If second arg is constant true, result is true, but we must
12176 : : evaluate first arg. */
12177 : 11212060 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12178 : 39111 : return omit_one_operand_loc (loc, type, arg1, arg0);
12179 : : /* Likewise for first arg, but note this only occurs here for
12180 : : TRUTH_OR_EXPR. */
12181 : 11172949 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12182 : 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12183 : :
12184 : : /* !X || X is always true. */
12185 : 11172949 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12186 : 11172949 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12187 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12188 : : /* X || !X is always true. */
12189 : 11172949 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12190 : 11172949 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12191 : 1 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12192 : :
12193 : : /* (X && !Y) || (!X && Y) is X ^ Y */
12194 : 11172948 : if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12195 : 1574 : && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12196 : : {
12197 : 655 : tree a0, a1, l0, l1, n0, n1;
12198 : :
12199 : 655 : a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12200 : 655 : a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12201 : :
12202 : 655 : l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12203 : 655 : l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12204 : :
12205 : 655 : n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12206 : 655 : n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12207 : :
12208 : 655 : if ((operand_equal_p (n0, a0, 0)
12209 : 18 : && operand_equal_p (n1, a1, 0))
12210 : 663 : || (operand_equal_p (n0, a1, 0)
12211 : 3 : && operand_equal_p (n1, a0, 0)))
12212 : 13 : return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12213 : : }
12214 : :
12215 : 11172935 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12216 : : != NULL_TREE)
12217 : : return tem;
12218 : :
12219 : : return NULL_TREE;
12220 : :
12221 : 33196 : case TRUTH_XOR_EXPR:
12222 : : /* If the second arg is constant zero, drop it. */
12223 : 33196 : if (integer_zerop (arg1))
12224 : 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12225 : : /* If the second arg is constant true, this is a logical inversion. */
12226 : 33196 : if (integer_onep (arg1))
12227 : : {
12228 : 0 : tem = invert_truthvalue_loc (loc, arg0);
12229 : 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12230 : : }
12231 : : /* Identical arguments cancel to zero. */
12232 : 33196 : if (operand_equal_p (arg0, arg1, 0))
12233 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12234 : :
12235 : : /* !X ^ X is always true. */
12236 : 33196 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12237 : 33196 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12238 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12239 : :
12240 : : /* X ^ !X is always true. */
12241 : 33196 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12242 : 33196 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12243 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12244 : :
12245 : : return NULL_TREE;
12246 : :
12247 : 41861107 : case EQ_EXPR:
12248 : 41861107 : case NE_EXPR:
12249 : 41861107 : STRIP_NOPS (arg0);
12250 : 41861107 : STRIP_NOPS (arg1);
12251 : :
12252 : 41861107 : tem = fold_comparison (loc, code, type, op0, op1);
12253 : 41861107 : if (tem != NULL_TREE)
12254 : : return tem;
12255 : :
12256 : : /* bool_var != 1 becomes !bool_var. */
12257 : 42946447 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12258 : 41932262 : && code == NE_EXPR)
12259 : 73230 : return fold_convert_loc (loc, type,
12260 : : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12261 : 146460 : TREE_TYPE (arg0), arg0));
12262 : :
12263 : : /* bool_var == 0 becomes !bool_var. */
12264 : 42799987 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12265 : 42631853 : && code == EQ_EXPR)
12266 : 197517 : return fold_convert_loc (loc, type,
12267 : : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12268 : 395034 : TREE_TYPE (arg0), arg0));
12269 : :
12270 : : /* !exp != 0 becomes !exp */
12271 : 554282 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12272 : 42137103 : && code == NE_EXPR)
12273 : 549418 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12274 : :
12275 : : /* If this is an EQ or NE comparison with zero and ARG0 is
12276 : : (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12277 : : two operations, but the latter can be done in one less insn
12278 : : on machines that have only two-operand insns or on which a
12279 : : constant cannot be the first operand. */
12280 : 41034116 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12281 : 41034116 : && integer_zerop (arg1))
12282 : : {
12283 : 1286546 : tree arg00 = TREE_OPERAND (arg0, 0);
12284 : 1286546 : tree arg01 = TREE_OPERAND (arg0, 1);
12285 : 1286546 : if (TREE_CODE (arg00) == LSHIFT_EXPR
12286 : 1286546 : && integer_onep (TREE_OPERAND (arg00, 0)))
12287 : : {
12288 : 4231 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12289 : 4231 : arg01, TREE_OPERAND (arg00, 1));
12290 : 4231 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12291 : 4231 : build_one_cst (TREE_TYPE (arg0)));
12292 : 4231 : return fold_build2_loc (loc, code, type,
12293 : 4231 : fold_convert_loc (loc, TREE_TYPE (arg1),
12294 : 4231 : tem), arg1);
12295 : : }
12296 : 1282315 : else if (TREE_CODE (arg01) == LSHIFT_EXPR
12297 : 1282315 : && integer_onep (TREE_OPERAND (arg01, 0)))
12298 : : {
12299 : 847 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12300 : 847 : arg00, TREE_OPERAND (arg01, 1));
12301 : 847 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12302 : 847 : build_one_cst (TREE_TYPE (arg0)));
12303 : 847 : return fold_build2_loc (loc, code, type,
12304 : 847 : fold_convert_loc (loc, TREE_TYPE (arg1),
12305 : 847 : tem), arg1);
12306 : : }
12307 : : }
12308 : :
12309 : : /* If this is a comparison of a field, we may be able to simplify it. */
12310 : 41029038 : if ((TREE_CODE (arg0) == COMPONENT_REF
12311 : 41029038 : || TREE_CODE (arg0) == BIT_FIELD_REF)
12312 : : /* Handle the constant case even without -O
12313 : : to make sure the warnings are given. */
12314 : 4045183 : && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12315 : : {
12316 : 3757554 : t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12317 : 3757554 : if (t1)
12318 : : return t1;
12319 : : }
12320 : :
12321 : : /* Optimize comparisons of strlen vs zero to a compare of the
12322 : : first character of the string vs zero. To wit,
12323 : : strlen(ptr) == 0 => *ptr == 0
12324 : : strlen(ptr) != 0 => *ptr != 0
12325 : : Other cases should reduce to one of these two (or a constant)
12326 : : due to the return value of strlen being unsigned. */
12327 : 40406283 : if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12328 : : {
12329 : 2396630 : tree fndecl = get_callee_fndecl (arg0);
12330 : :
12331 : 2396630 : if (fndecl
12332 : 2395744 : && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12333 : 537 : && call_expr_nargs (arg0) == 1
12334 : 2397167 : && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12335 : : == POINTER_TYPE))
12336 : : {
12337 : 537 : tree ptrtype
12338 : 537 : = build_pointer_type (build_qualified_type (char_type_node,
12339 : : TYPE_QUAL_CONST));
12340 : 1074 : tree ptr = fold_convert_loc (loc, ptrtype,
12341 : 537 : CALL_EXPR_ARG (arg0, 0));
12342 : 537 : tree iref = build_fold_indirect_ref_loc (loc, ptr);
12343 : 537 : return fold_build2_loc (loc, code, type, iref,
12344 : 537 : build_int_cst (TREE_TYPE (iref), 0));
12345 : : }
12346 : : }
12347 : :
12348 : : /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12349 : : of X. Similarly fold (X >> C) == 0 into X >= 0. */
12350 : 40405746 : if (TREE_CODE (arg0) == RSHIFT_EXPR
12351 : 29234 : && integer_zerop (arg1)
12352 : 40417003 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12353 : : {
12354 : 9234 : tree arg00 = TREE_OPERAND (arg0, 0);
12355 : 9234 : tree arg01 = TREE_OPERAND (arg0, 1);
12356 : 9234 : tree itype = TREE_TYPE (arg00);
12357 : 9234 : if (wi::to_wide (arg01) == element_precision (itype) - 1)
12358 : : {
12359 : 885 : if (TYPE_UNSIGNED (itype))
12360 : : {
12361 : 810 : itype = signed_type_for (itype);
12362 : 810 : arg00 = fold_convert_loc (loc, itype, arg00);
12363 : : }
12364 : 1739 : return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12365 : 885 : type, arg00, build_zero_cst (itype));
12366 : : }
12367 : : }
12368 : :
12369 : : /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12370 : : (X & C) == 0 when C is a single bit. */
12371 : 40404861 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12372 : 1437437 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12373 : 853 : && integer_zerop (arg1)
12374 : 40405316 : && integer_pow2p (TREE_OPERAND (arg0, 1)))
12375 : : {
12376 : 140 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12377 : 140 : TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12378 : 140 : TREE_OPERAND (arg0, 1));
12379 : 280 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12380 : : type, tem,
12381 : 140 : fold_convert_loc (loc, TREE_TYPE (arg0),
12382 : 140 : arg1));
12383 : : }
12384 : :
12385 : : /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12386 : : constant C is a power of two, i.e. a single bit. */
12387 : 40404721 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12388 : 4165 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12389 : 0 : && integer_zerop (arg1)
12390 : 0 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12391 : 40404721 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12392 : 0 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12393 : : {
12394 : 0 : tree arg00 = TREE_OPERAND (arg0, 0);
12395 : 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12396 : 0 : arg00, build_int_cst (TREE_TYPE (arg00), 0));
12397 : : }
12398 : :
12399 : : /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12400 : : when is C is a power of two, i.e. a single bit. */
12401 : 40404721 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12402 : 1437297 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12403 : 10676 : && integer_zerop (arg1)
12404 : 10676 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12405 : 40412803 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12406 : 8082 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12407 : : {
12408 : 0 : tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12409 : 0 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12410 : 0 : arg000, TREE_OPERAND (arg0, 1));
12411 : 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12412 : 0 : tem, build_int_cst (TREE_TYPE (tem), 0));
12413 : : }
12414 : :
12415 : 40404721 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12416 : 4165 : && TREE_CODE (arg1) == BIT_XOR_EXPR)
12417 : : {
12418 : 426 : tree arg00 = TREE_OPERAND (arg0, 0);
12419 : 426 : tree arg01 = TREE_OPERAND (arg0, 1);
12420 : 426 : tree arg10 = TREE_OPERAND (arg1, 0);
12421 : 426 : tree arg11 = TREE_OPERAND (arg1, 1);
12422 : 426 : tree itype = TREE_TYPE (arg0);
12423 : :
12424 : : /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12425 : : operand_equal_p guarantees no side-effects so we don't need
12426 : : to use omit_one_operand on Z. */
12427 : 426 : if (operand_equal_p (arg01, arg11, 0))
12428 : 8 : return fold_build2_loc (loc, code, type, arg00,
12429 : 8 : fold_convert_loc (loc, TREE_TYPE (arg00),
12430 : 8 : arg10));
12431 : 418 : if (operand_equal_p (arg01, arg10, 0))
12432 : 0 : return fold_build2_loc (loc, code, type, arg00,
12433 : 0 : fold_convert_loc (loc, TREE_TYPE (arg00),
12434 : 0 : arg11));
12435 : 418 : if (operand_equal_p (arg00, arg11, 0))
12436 : 0 : return fold_build2_loc (loc, code, type, arg01,
12437 : 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12438 : 0 : arg10));
12439 : 418 : if (operand_equal_p (arg00, arg10, 0))
12440 : 0 : return fold_build2_loc (loc, code, type, arg01,
12441 : 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12442 : 0 : arg11));
12443 : :
12444 : : /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12445 : 418 : if (TREE_CODE (arg01) == INTEGER_CST
12446 : 8 : && TREE_CODE (arg11) == INTEGER_CST)
12447 : : {
12448 : 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12449 : : fold_convert_loc (loc, itype, arg11));
12450 : 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12451 : 8 : return fold_build2_loc (loc, code, type, tem,
12452 : 8 : fold_convert_loc (loc, itype, arg10));
12453 : : }
12454 : : }
12455 : :
12456 : : /* Attempt to simplify equality/inequality comparisons of complex
12457 : : values. Only lower the comparison if the result is known or
12458 : : can be simplified to a single scalar comparison. */
12459 : 40404705 : if ((TREE_CODE (arg0) == COMPLEX_EXPR
12460 : 40402256 : || TREE_CODE (arg0) == COMPLEX_CST)
12461 : 2449 : && (TREE_CODE (arg1) == COMPLEX_EXPR
12462 : 2269 : || TREE_CODE (arg1) == COMPLEX_CST))
12463 : : {
12464 : 1704 : tree real0, imag0, real1, imag1;
12465 : 1704 : tree rcond, icond;
12466 : :
12467 : 1704 : if (TREE_CODE (arg0) == COMPLEX_EXPR)
12468 : : {
12469 : 1704 : real0 = TREE_OPERAND (arg0, 0);
12470 : 1704 : imag0 = TREE_OPERAND (arg0, 1);
12471 : : }
12472 : : else
12473 : : {
12474 : 0 : real0 = TREE_REALPART (arg0);
12475 : 0 : imag0 = TREE_IMAGPART (arg0);
12476 : : }
12477 : :
12478 : 1704 : if (TREE_CODE (arg1) == COMPLEX_EXPR)
12479 : : {
12480 : 180 : real1 = TREE_OPERAND (arg1, 0);
12481 : 180 : imag1 = TREE_OPERAND (arg1, 1);
12482 : : }
12483 : : else
12484 : : {
12485 : 1524 : real1 = TREE_REALPART (arg1);
12486 : 1524 : imag1 = TREE_IMAGPART (arg1);
12487 : : }
12488 : :
12489 : 1704 : rcond = fold_binary_loc (loc, code, type, real0, real1);
12490 : 1704 : if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12491 : : {
12492 : 11 : if (integer_zerop (rcond))
12493 : : {
12494 : 11 : if (code == EQ_EXPR)
12495 : 0 : return omit_two_operands_loc (loc, type, boolean_false_node,
12496 : 0 : imag0, imag1);
12497 : 11 : return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12498 : : }
12499 : : else
12500 : : {
12501 : 0 : if (code == NE_EXPR)
12502 : 0 : return omit_two_operands_loc (loc, type, boolean_true_node,
12503 : 0 : imag0, imag1);
12504 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12505 : : }
12506 : : }
12507 : :
12508 : 1693 : icond = fold_binary_loc (loc, code, type, imag0, imag1);
12509 : 1693 : if (icond && TREE_CODE (icond) == INTEGER_CST)
12510 : : {
12511 : 9 : if (integer_zerop (icond))
12512 : : {
12513 : 7 : if (code == EQ_EXPR)
12514 : 1 : return omit_two_operands_loc (loc, type, boolean_false_node,
12515 : 1 : real0, real1);
12516 : 6 : return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12517 : : }
12518 : : else
12519 : : {
12520 : 2 : if (code == NE_EXPR)
12521 : 1 : return omit_two_operands_loc (loc, type, boolean_true_node,
12522 : 1 : real0, real1);
12523 : 1 : return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12524 : : }
12525 : : }
12526 : : }
12527 : :
12528 : : return NULL_TREE;
12529 : :
12530 : 35035606 : case LT_EXPR:
12531 : 35035606 : case GT_EXPR:
12532 : 35035606 : case LE_EXPR:
12533 : 35035606 : case GE_EXPR:
12534 : 35035606 : tem = fold_comparison (loc, code, type, op0, op1);
12535 : 35035606 : if (tem != NULL_TREE)
12536 : : return tem;
12537 : :
12538 : : /* Transform comparisons of the form X +- C CMP X. */
12539 : 34185954 : if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12540 : 4151343 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12541 : 44167 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12542 : 34185972 : && !HONOR_SNANS (arg0))
12543 : : {
12544 : 16 : tree arg01 = TREE_OPERAND (arg0, 1);
12545 : 16 : enum tree_code code0 = TREE_CODE (arg0);
12546 : 16 : int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12547 : :
12548 : : /* (X - c) > X becomes false. */
12549 : 16 : if (code == GT_EXPR
12550 : 7 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12551 : 3 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12552 : 4 : return constant_boolean_node (0, type);
12553 : :
12554 : : /* Likewise (X + c) < X becomes false. */
12555 : 12 : if (code == LT_EXPR
12556 : 2 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12557 : 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12558 : 2 : return constant_boolean_node (0, type);
12559 : :
12560 : : /* Convert (X - c) <= X to true. */
12561 : 10 : if (!HONOR_NANS (arg1)
12562 : 6 : && code == LE_EXPR
12563 : 14 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12564 : 0 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12565 : 4 : return constant_boolean_node (1, type);
12566 : :
12567 : : /* Convert (X + c) >= X to true. */
12568 : 6 : if (!HONOR_NANS (arg1)
12569 : 2 : && code == GE_EXPR
12570 : 8 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12571 : 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12572 : 2 : return constant_boolean_node (1, type);
12573 : : }
12574 : :
12575 : : /* If we are comparing an ABS_EXPR with a constant, we can
12576 : : convert all the cases into explicit comparisons, but they may
12577 : : well not be faster than doing the ABS and one comparison.
12578 : : But ABS (X) <= C is a range comparison, which becomes a subtraction
12579 : : and a comparison, and is probably faster. */
12580 : 34185942 : if (code == LE_EXPR
12581 : 6485138 : && TREE_CODE (arg1) == INTEGER_CST
12582 : 4538443 : && TREE_CODE (arg0) == ABS_EXPR
12583 : 584 : && ! TREE_SIDE_EFFECTS (arg0)
12584 : 584 : && (tem = negate_expr (arg1)) != 0
12585 : 584 : && TREE_CODE (tem) == INTEGER_CST
12586 : 34186526 : && !TREE_OVERFLOW (tem))
12587 : 1168 : return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12588 : : build2 (GE_EXPR, type,
12589 : 584 : TREE_OPERAND (arg0, 0), tem),
12590 : : build2 (LE_EXPR, type,
12591 : 1168 : TREE_OPERAND (arg0, 0), arg1));
12592 : :
12593 : : /* Convert ABS_EXPR<x> >= 0 to true. */
12594 : 34185358 : strict_overflow_p = false;
12595 : 34185358 : if (code == GE_EXPR
12596 : 3567166 : && (integer_zerop (arg1)
12597 : 2769271 : || (! HONOR_NANS (arg0)
12598 : 2173887 : && real_zerop (arg1)))
12599 : 34983466 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12600 : : {
12601 : 1173 : if (strict_overflow_p)
12602 : 6 : fold_overflow_warning (("assuming signed overflow does not occur "
12603 : : "when simplifying comparison of "
12604 : : "absolute value and zero"),
12605 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
12606 : 1173 : return omit_one_operand_loc (loc, type,
12607 : : constant_boolean_node (true, type),
12608 : 1173 : arg0);
12609 : : }
12610 : :
12611 : : /* Convert ABS_EXPR<x> < 0 to false. */
12612 : 34184185 : strict_overflow_p = false;
12613 : 34184185 : if (code == LT_EXPR
12614 : 11381032 : && (integer_zerop (arg1) || real_zerop (arg1))
12615 : 36810428 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12616 : : {
12617 : 3416 : if (strict_overflow_p)
12618 : 285 : fold_overflow_warning (("assuming signed overflow does not occur "
12619 : : "when simplifying comparison of "
12620 : : "absolute value and zero"),
12621 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
12622 : 3416 : return omit_one_operand_loc (loc, type,
12623 : : constant_boolean_node (false, type),
12624 : 3416 : arg0);
12625 : : }
12626 : :
12627 : : /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12628 : : and similarly for >= into !=. */
12629 : 34180769 : if ((code == LT_EXPR || code == GE_EXPR)
12630 : 14943609 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12631 : 4269646 : && TREE_CODE (arg1) == LSHIFT_EXPR
12632 : 34182090 : && integer_onep (TREE_OPERAND (arg1, 0)))
12633 : 3440 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12634 : 1151 : build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12635 : 1151 : TREE_OPERAND (arg1, 1)),
12636 : 2302 : build_zero_cst (TREE_TYPE (arg0)));
12637 : :
12638 : : /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12639 : : otherwise Y might be >= # of bits in X's type and thus e.g.
12640 : : (unsigned char) (1 << Y) for Y 15 might be 0.
12641 : : If the cast is widening, then 1 << Y should have unsigned type,
12642 : : otherwise if Y is number of bits in the signed shift type minus 1,
12643 : : we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12644 : : 31 might be 0xffffffff80000000. */
12645 : 34179618 : if ((code == LT_EXPR || code == GE_EXPR)
12646 : 14942458 : && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12647 : 5244586 : || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12648 : 9719781 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12649 : 3118597 : && CONVERT_EXPR_P (arg1)
12650 : 1004324 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12651 : 42 : && (element_precision (TREE_TYPE (arg1))
12652 : 21 : >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12653 : 14 : && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12654 : 14 : || (element_precision (TREE_TYPE (arg1))
12655 : 7 : == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12656 : 34179625 : && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12657 : : {
12658 : 7 : tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12659 : 7 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12660 : 21 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12661 : 7 : fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12662 : 14 : build_zero_cst (TREE_TYPE (arg0)));
12663 : : }
12664 : :
12665 : : return NULL_TREE;
12666 : :
12667 : 5223850 : case UNORDERED_EXPR:
12668 : 5223850 : case ORDERED_EXPR:
12669 : 5223850 : case UNLT_EXPR:
12670 : 5223850 : case UNLE_EXPR:
12671 : 5223850 : case UNGT_EXPR:
12672 : 5223850 : case UNGE_EXPR:
12673 : 5223850 : case UNEQ_EXPR:
12674 : 5223850 : case LTGT_EXPR:
12675 : : /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12676 : 5223850 : {
12677 : 5223850 : tree targ0 = strip_float_extensions (arg0);
12678 : 5223850 : tree targ1 = strip_float_extensions (arg1);
12679 : 5223850 : tree newtype = TREE_TYPE (targ0);
12680 : :
12681 : 5223850 : if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12682 : 1213 : newtype = TREE_TYPE (targ1);
12683 : :
12684 : 5223850 : if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
12685 : 5223850 : && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
12686 : 317 : return fold_build2_loc (loc, code, type,
12687 : : fold_convert_loc (loc, newtype, targ0),
12688 : 317 : fold_convert_loc (loc, newtype, targ1));
12689 : : }
12690 : :
12691 : : return NULL_TREE;
12692 : :
12693 : 5327549 : case COMPOUND_EXPR:
12694 : : /* When pedantic, a compound expression can be neither an lvalue
12695 : : nor an integer constant expression. */
12696 : 5327549 : if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12697 : : return NULL_TREE;
12698 : : /* Don't let (0, 0) be null pointer constant. */
12699 : 463761 : tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
12700 : 463761 : : fold_convert_loc (loc, type, arg1);
12701 : : return tem;
12702 : :
12703 : : default:
12704 : : return NULL_TREE;
12705 : : } /* switch (code) */
12706 : : }
12707 : :
12708 : : /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12709 : : ((A & N) + B) & M -> (A + B) & M
12710 : : Similarly if (N & M) == 0,
12711 : : ((A | N) + B) & M -> (A + B) & M
12712 : : and for - instead of + (or unary - instead of +)
12713 : : and/or ^ instead of |.
12714 : : If B is constant and (B & M) == 0, fold into A & M.
12715 : :
12716 : : This function is a helper for match.pd patterns. Return non-NULL
12717 : : type in which the simplified operation should be performed only
12718 : : if any optimization is possible.
12719 : :
12720 : : ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12721 : : then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12722 : : Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12723 : : +/-. */
12724 : : tree
12725 : 1145560 : fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12726 : : tree arg00, enum tree_code code00, tree arg000, tree arg001,
12727 : : tree arg01, enum tree_code code01, tree arg010, tree arg011,
12728 : : tree *pmop)
12729 : : {
12730 : 1145560 : gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12731 : 1145560 : gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12732 : 1145560 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12733 : 2291120 : if (~cst1 == 0
12734 : 3432202 : || (cst1 & (cst1 + 1)) != 0
12735 : 939938 : || !INTEGRAL_TYPE_P (type)
12736 : 939938 : || (!TYPE_OVERFLOW_WRAPS (type)
12737 : 40904 : && TREE_CODE (type) != INTEGER_TYPE)
12738 : 4168757 : || (wi::max_value (type) & cst1) != cst1)
12739 : : return NULL_TREE;
12740 : :
12741 : 939938 : enum tree_code codes[2] = { code00, code01 };
12742 : 939938 : tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12743 : 939938 : int which = 0;
12744 : 939938 : wide_int cst0;
12745 : :
12746 : : /* Now we know that arg0 is (C + D) or (C - D) or -C and
12747 : : arg1 (M) is == (1LL << cst) - 1.
12748 : : Store C into PMOP[0] and D into PMOP[1]. */
12749 : 939938 : pmop[0] = arg00;
12750 : 939938 : pmop[1] = arg01;
12751 : 939938 : which = code != NEGATE_EXPR;
12752 : :
12753 : 2818920 : for (; which >= 0; which--)
12754 : 1878982 : switch (codes[which])
12755 : : {
12756 : 21472 : case BIT_AND_EXPR:
12757 : 21472 : case BIT_IOR_EXPR:
12758 : 21472 : case BIT_XOR_EXPR:
12759 : 21472 : gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12760 : 21472 : cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12761 : 21472 : if (codes[which] == BIT_AND_EXPR)
12762 : : {
12763 : 21358 : if (cst0 != cst1)
12764 : : break;
12765 : : }
12766 : 114 : else if (cst0 != 0)
12767 : : break;
12768 : : /* If C or D is of the form (A & N) where
12769 : : (N & M) == M, or of the form (A | N) or
12770 : : (A ^ N) where (N & M) == 0, replace it with A. */
12771 : 19928 : pmop[which] = arg0xx[2 * which];
12772 : 19928 : break;
12773 : 1857510 : case ERROR_MARK:
12774 : 1857510 : if (TREE_CODE (pmop[which]) != INTEGER_CST)
12775 : : break;
12776 : : /* If C or D is a N where (N & M) == 0, it can be
12777 : : omitted (replaced with 0). */
12778 : 780972 : if ((code == PLUS_EXPR
12779 : 171283 : || (code == MINUS_EXPR && which == 0))
12780 : 561769 : && (cst1 & wi::to_wide (pmop[which])) == 0)
12781 : 125381 : pmop[which] = build_int_cst (type, 0);
12782 : : /* Similarly, with C - N where (-N & M) == 0. */
12783 : 780972 : if (code == MINUS_EXPR
12784 : 390486 : && which == 1
12785 : 554995 : && (cst1 & -wi::to_wide (pmop[which])) == 0)
12786 : 156362 : pmop[which] = build_int_cst (type, 0);
12787 : : break;
12788 : 0 : default:
12789 : 0 : gcc_unreachable ();
12790 : : }
12791 : :
12792 : : /* Only build anything new if we optimized one or both arguments above. */
12793 : 939938 : if (pmop[0] == arg00 && pmop[1] == arg01)
12794 : : return NULL_TREE;
12795 : :
12796 : 300127 : if (TYPE_OVERFLOW_WRAPS (type))
12797 : : return type;
12798 : : else
12799 : 2237 : return unsigned_type_for (type);
12800 : 939938 : }
12801 : :
12802 : : /* Used by contains_label_[p1]. */
12803 : :
12804 : : struct contains_label_data
12805 : : {
12806 : : hash_set<tree> *pset;
12807 : : bool inside_switch_p;
12808 : : };
12809 : :
12810 : : /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12811 : : a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12812 : : return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12813 : :
12814 : : static tree
12815 : 2301174 : contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12816 : : {
12817 : 2301174 : contains_label_data *d = (contains_label_data *) data;
12818 : 2301174 : switch (TREE_CODE (*tp))
12819 : : {
12820 : : case LABEL_EXPR:
12821 : : return *tp;
12822 : :
12823 : 0 : case CASE_LABEL_EXPR:
12824 : 0 : if (!d->inside_switch_p)
12825 : : return *tp;
12826 : : return NULL_TREE;
12827 : :
12828 : 0 : case SWITCH_EXPR:
12829 : 0 : if (!d->inside_switch_p)
12830 : : {
12831 : 0 : if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12832 : 0 : return *tp;
12833 : 0 : d->inside_switch_p = true;
12834 : 0 : if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12835 : 0 : return *tp;
12836 : 0 : d->inside_switch_p = false;
12837 : 0 : *walk_subtrees = 0;
12838 : : }
12839 : : return NULL_TREE;
12840 : :
12841 : 5898 : case GOTO_EXPR:
12842 : 5898 : *walk_subtrees = 0;
12843 : 5898 : return NULL_TREE;
12844 : :
12845 : : default:
12846 : : return NULL_TREE;
12847 : : }
12848 : : }
12849 : :
12850 : : /* Return whether the sub-tree ST contains a label which is accessible from
12851 : : outside the sub-tree. */
12852 : :
12853 : : static bool
12854 : 200523 : contains_label_p (tree st)
12855 : : {
12856 : 200523 : hash_set<tree> pset;
12857 : 200523 : contains_label_data data = { &pset, false };
12858 : 200523 : return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12859 : 200523 : }
12860 : :
12861 : : /* Fold a ternary expression of code CODE and type TYPE with operands
12862 : : OP0, OP1, and OP2. Return the folded expression if folding is
12863 : : successful. Otherwise, return NULL_TREE. */
12864 : :
12865 : : tree
12866 : 28872975 : fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12867 : : tree op0, tree op1, tree op2)
12868 : : {
12869 : 28872975 : tree tem;
12870 : 28872975 : tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12871 : 28872975 : enum tree_code_class kind = TREE_CODE_CLASS (code);
12872 : :
12873 : 28872975 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
12874 : : && TREE_CODE_LENGTH (code) == 3);
12875 : :
12876 : : /* If this is a commutative operation, and OP0 is a constant, move it
12877 : : to OP1 to reduce the number of tests below. */
12878 : 28872975 : if (commutative_ternary_tree_code (code)
12879 : 28872975 : && tree_swap_operands_p (op0, op1))
12880 : 41 : return fold_build3_loc (loc, code, type, op1, op0, op2);
12881 : :
12882 : 28872934 : tem = generic_simplify (loc, code, type, op0, op1, op2);
12883 : 28872934 : if (tem)
12884 : : return tem;
12885 : :
12886 : : /* Strip any conversions that don't change the mode. This is safe
12887 : : for every expression, except for a comparison expression because
12888 : : its signedness is derived from its operands. So, in the latter
12889 : : case, only strip conversions that don't change the signedness.
12890 : :
12891 : : Note that this is done as an internal manipulation within the
12892 : : constant folder, in order to find the simplest representation of
12893 : : the arguments so that their form can be studied. In any cases,
12894 : : the appropriate type conversions should be put back in the tree
12895 : : that will get out of the constant folder. */
12896 : 28001382 : if (op0)
12897 : : {
12898 : 27937597 : arg0 = op0;
12899 : 27937597 : STRIP_NOPS (arg0);
12900 : : }
12901 : :
12902 : 28001382 : if (op1)
12903 : : {
12904 : 28001382 : arg1 = op1;
12905 : 28001382 : STRIP_NOPS (arg1);
12906 : : }
12907 : :
12908 : 28001382 : if (op2)
12909 : : {
12910 : 11885263 : arg2 = op2;
12911 : 11885263 : STRIP_NOPS (arg2);
12912 : : }
12913 : :
12914 : 28001382 : switch (code)
12915 : : {
12916 : 16115648 : case COMPONENT_REF:
12917 : 16115648 : if (TREE_CODE (arg0) == CONSTRUCTOR
12918 : 16115648 : && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12919 : : {
12920 : : unsigned HOST_WIDE_INT idx;
12921 : : tree field, value;
12922 : 856 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12923 : 665 : if (field == arg1)
12924 : : return value;
12925 : : }
12926 : : return NULL_TREE;
12927 : :
12928 : 9898965 : case COND_EXPR:
12929 : 9898965 : case VEC_COND_EXPR:
12930 : : /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12931 : : so all simple results must be passed through pedantic_non_lvalue. */
12932 : 9898965 : if (TREE_CODE (arg0) == INTEGER_CST)
12933 : : {
12934 : 325998 : tree unused_op = integer_zerop (arg0) ? op1 : op2;
12935 : 325998 : tem = integer_zerop (arg0) ? op2 : op1;
12936 : : /* Only optimize constant conditions when the selected branch
12937 : : has the same type as the COND_EXPR. This avoids optimizing
12938 : : away "c ? x : throw", where the throw has a void type.
12939 : : Avoid throwing away that operand which contains label. */
12940 : 325998 : if ((!TREE_SIDE_EFFECTS (unused_op)
12941 : 200523 : || !contains_label_p (unused_op))
12942 : 522174 : && (! VOID_TYPE_P (TREE_TYPE (tem))
12943 : 246168 : || VOID_TYPE_P (type)))
12944 : 315249 : return protected_set_expr_location_unshare (tem, loc);
12945 : 10749 : return NULL_TREE;
12946 : : }
12947 : 9572967 : else if (TREE_CODE (arg0) == VECTOR_CST)
12948 : : {
12949 : 1338 : unsigned HOST_WIDE_INT nelts;
12950 : 1338 : if ((TREE_CODE (arg1) == VECTOR_CST
12951 : 212 : || TREE_CODE (arg1) == CONSTRUCTOR)
12952 : 1126 : && (TREE_CODE (arg2) == VECTOR_CST
12953 : 6 : || TREE_CODE (arg2) == CONSTRUCTOR)
12954 : 2676 : && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12955 : : {
12956 : 1120 : vec_perm_builder sel (nelts, nelts, 1);
12957 : 11842 : for (unsigned int i = 0; i < nelts; i++)
12958 : : {
12959 : 10722 : tree val = VECTOR_CST_ELT (arg0, i);
12960 : 10722 : if (integer_all_onesp (val))
12961 : 5833 : sel.quick_push (i);
12962 : 4889 : else if (integer_zerop (val))
12963 : 4889 : sel.quick_push (nelts + i);
12964 : : else /* Currently unreachable. */
12965 : 1088 : return NULL_TREE;
12966 : : }
12967 : 1120 : vec_perm_indices indices (sel, 2, nelts);
12968 : 1120 : tree t = fold_vec_perm (type, arg1, arg2, indices);
12969 : 1120 : if (t != NULL_TREE)
12970 : 1088 : return t;
12971 : 2208 : }
12972 : : }
12973 : :
12974 : : /* If we have A op B ? A : C, we may be able to convert this to a
12975 : : simpler expression, depending on the operation and the values
12976 : : of B and C. Signed zeros prevent all of these transformations,
12977 : : for reasons given above each one.
12978 : :
12979 : : Also try swapping the arguments and inverting the conditional. */
12980 : 9571879 : if (COMPARISON_CLASS_P (arg0)
12981 : 7996861 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12982 : 9689878 : && !HONOR_SIGNED_ZEROS (op1))
12983 : : {
12984 : 107600 : tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
12985 : 107600 : TREE_OPERAND (arg0, 0),
12986 : 107600 : TREE_OPERAND (arg0, 1),
12987 : : op1, op2);
12988 : 107600 : if (tem)
12989 : : return tem;
12990 : : }
12991 : :
12992 : 9565424 : if (COMPARISON_CLASS_P (arg0)
12993 : 7990406 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12994 : 9977491 : && !HONOR_SIGNED_ZEROS (op2))
12995 : : {
12996 : 324926 : enum tree_code comp_code = TREE_CODE (arg0);
12997 : 324926 : tree arg00 = TREE_OPERAND (arg0, 0);
12998 : 324926 : tree arg01 = TREE_OPERAND (arg0, 1);
12999 : 324926 : comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13000 : 324926 : if (comp_code != ERROR_MARK)
13001 : 324926 : tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13002 : : arg00,
13003 : : arg01,
13004 : : op2, op1);
13005 : 324926 : if (tem)
13006 : : return tem;
13007 : : }
13008 : :
13009 : : /* If the second operand is simpler than the third, swap them
13010 : : since that produces better jump optimization results. */
13011 : 9329655 : if (truth_value_p (TREE_CODE (arg0))
13012 : 9329655 : && tree_swap_operands_p (op1, op2))
13013 : : {
13014 : 1547749 : location_t loc0 = expr_location_or (arg0, loc);
13015 : : /* See if this can be inverted. If it can't, possibly because
13016 : : it was a floating-point inequality comparison, don't do
13017 : : anything. */
13018 : 1547749 : tem = fold_invert_truthvalue (loc0, arg0);
13019 : 1547749 : if (tem)
13020 : 1009400 : return fold_build3_loc (loc, code, type, tem, op2, op1);
13021 : : }
13022 : :
13023 : : /* Convert A ? 1 : 0 to simply A. */
13024 : 8320255 : if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13025 : 8016062 : : (integer_onep (op1)
13026 : 368790 : && !VECTOR_TYPE_P (type)))
13027 : 564673 : && integer_zerop (op2)
13028 : : /* If we try to convert OP0 to our type, the
13029 : : call to fold will try to move the conversion inside
13030 : : a COND, which will recurse. In that case, the COND_EXPR
13031 : : is probably the best choice, so leave it alone. */
13032 : 9177566 : && type == TREE_TYPE (arg0))
13033 : 29422 : return protected_set_expr_location_unshare (arg0, loc);
13034 : :
13035 : : /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13036 : : over COND_EXPR in cases such as floating point comparisons. */
13037 : 8290833 : if (integer_zerop (op1)
13038 : 242263 : && code == COND_EXPR
13039 : 240343 : && integer_onep (op2)
13040 : 32833 : && !VECTOR_TYPE_P (type)
13041 : 8323666 : && truth_value_p (TREE_CODE (arg0)))
13042 : 31257 : return fold_convert_loc (loc, type,
13043 : 31257 : invert_truthvalue_loc (loc, arg0));
13044 : :
13045 : : /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13046 : 8259576 : if (TREE_CODE (arg0) == LT_EXPR
13047 : 970666 : && integer_zerop (TREE_OPERAND (arg0, 1))
13048 : 14719 : && integer_zerop (op2)
13049 : 8260512 : && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13050 : : {
13051 : : /* sign_bit_p looks through both zero and sign extensions,
13052 : : but for this optimization only sign extensions are
13053 : : usable. */
13054 : 56 : tree tem2 = TREE_OPERAND (arg0, 0);
13055 : 56 : while (tem != tem2)
13056 : : {
13057 : 0 : if (TREE_CODE (tem2) != NOP_EXPR
13058 : 0 : || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13059 : : {
13060 : : tem = NULL_TREE;
13061 : : break;
13062 : : }
13063 : 0 : tem2 = TREE_OPERAND (tem2, 0);
13064 : : }
13065 : : /* sign_bit_p only checks ARG1 bits within A's precision.
13066 : : If <sign bit of A> has wider type than A, bits outside
13067 : : of A's precision in <sign bit of A> need to be checked.
13068 : : If they are all 0, this optimization needs to be done
13069 : : in unsigned A's type, if they are all 1 in signed A's type,
13070 : : otherwise this can't be done. */
13071 : 56 : if (tem
13072 : 56 : && TYPE_PRECISION (TREE_TYPE (tem))
13073 : 56 : < TYPE_PRECISION (TREE_TYPE (arg1))
13074 : 112 : && TYPE_PRECISION (TREE_TYPE (tem))
13075 : 56 : < TYPE_PRECISION (type))
13076 : : {
13077 : 56 : int inner_width, outer_width;
13078 : 56 : tree tem_type;
13079 : :
13080 : 56 : inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13081 : 56 : outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13082 : 56 : if (outer_width > TYPE_PRECISION (type))
13083 : 0 : outer_width = TYPE_PRECISION (type);
13084 : :
13085 : 56 : wide_int mask = wi::shifted_mask
13086 : 56 : (inner_width, outer_width - inner_width, false,
13087 : 56 : TYPE_PRECISION (TREE_TYPE (arg1)));
13088 : :
13089 : 56 : wide_int common = mask & wi::to_wide (arg1);
13090 : 56 : if (common == mask)
13091 : : {
13092 : 28 : tem_type = signed_type_for (TREE_TYPE (tem));
13093 : 28 : tem = fold_convert_loc (loc, tem_type, tem);
13094 : : }
13095 : 28 : else if (common == 0)
13096 : : {
13097 : 0 : tem_type = unsigned_type_for (TREE_TYPE (tem));
13098 : 0 : tem = fold_convert_loc (loc, tem_type, tem);
13099 : : }
13100 : : else
13101 : : tem = NULL;
13102 : 56 : }
13103 : :
13104 : 56 : if (tem)
13105 : 28 : return
13106 : 56 : fold_convert_loc (loc, type,
13107 : : fold_build2_loc (loc, BIT_AND_EXPR,
13108 : 28 : TREE_TYPE (tem), tem,
13109 : : fold_convert_loc (loc,
13110 : 28 : TREE_TYPE (tem),
13111 : 28 : arg1)));
13112 : : }
13113 : :
13114 : : /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13115 : : already handled above. */
13116 : 8259548 : if (TREE_CODE (arg0) == BIT_AND_EXPR
13117 : 355 : && integer_onep (TREE_OPERAND (arg0, 1))
13118 : 3 : && integer_zerop (op2)
13119 : 8259548 : && integer_pow2p (arg1))
13120 : : {
13121 : 0 : tree tem = TREE_OPERAND (arg0, 0);
13122 : 0 : STRIP_NOPS (tem);
13123 : 0 : if (TREE_CODE (tem) == RSHIFT_EXPR
13124 : 0 : && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13125 : 0 : && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13126 : 0 : == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13127 : 0 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
13128 : : fold_convert_loc (loc, type,
13129 : 0 : TREE_OPERAND (tem, 0)),
13130 : 0 : op1);
13131 : : }
13132 : :
13133 : : /* A & N ? N : 0 is simply A & N if N is a power of two. This
13134 : : is probably obsolete because the first operand should be a
13135 : : truth value (that's why we have the two cases above), but let's
13136 : : leave it in until we can confirm this for all front-ends. */
13137 : 8259548 : if (integer_zerop (op2)
13138 : 1681598 : && TREE_CODE (arg0) == NE_EXPR
13139 : 385239 : && integer_zerop (TREE_OPERAND (arg0, 1))
13140 : 215343 : && integer_pow2p (arg1)
13141 : 30217 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13142 : 91 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13143 : : arg1, OEP_ONLY_CONST)
13144 : : /* operand_equal_p compares just value, not precision, so e.g.
13145 : : arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13146 : : second operand 32-bit -128, which is not a power of two (or vice
13147 : : versa. */
13148 : 8259548 : && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13149 : 0 : return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13150 : :
13151 : : /* Disable the transformations below for vectors, since
13152 : : fold_binary_op_with_conditional_arg may undo them immediately,
13153 : : yielding an infinite loop. */
13154 : 8259548 : if (code == VEC_COND_EXPR)
13155 : : return NULL_TREE;
13156 : :
13157 : : /* Convert A ? B : 0 into A && B if A and B are truth values. */
13158 : 7955355 : if (integer_zerop (op2)
13159 : 1428852 : && truth_value_p (TREE_CODE (arg0))
13160 : 1313099 : && truth_value_p (TREE_CODE (arg1))
13161 : 7985853 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13162 : 30498 : return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13163 : : : TRUTH_ANDIF_EXPR,
13164 : 30498 : type, fold_convert_loc (loc, type, arg0), op1);
13165 : :
13166 : : /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13167 : 7924857 : if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13168 : 431552 : && truth_value_p (TREE_CODE (arg0))
13169 : 292663 : && truth_value_p (TREE_CODE (arg1))
13170 : 7958978 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13171 : : {
13172 : 34121 : location_t loc0 = expr_location_or (arg0, loc);
13173 : : /* Only perform transformation if ARG0 is easily inverted. */
13174 : 34121 : tem = fold_invert_truthvalue (loc0, arg0);
13175 : 34121 : if (tem)
13176 : 33857 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13177 : : ? BIT_IOR_EXPR
13178 : : : TRUTH_ORIF_EXPR,
13179 : : type, fold_convert_loc (loc, type, tem),
13180 : 33857 : op1);
13181 : : }
13182 : :
13183 : : /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13184 : 7891000 : if (integer_zerop (arg1)
13185 : 209165 : && truth_value_p (TREE_CODE (arg0))
13186 : 46376 : && truth_value_p (TREE_CODE (op2))
13187 : 7891028 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13188 : : {
13189 : 28 : location_t loc0 = expr_location_or (arg0, loc);
13190 : : /* Only perform transformation if ARG0 is easily inverted. */
13191 : 28 : tem = fold_invert_truthvalue (loc0, arg0);
13192 : 28 : if (tem)
13193 : 0 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13194 : : ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13195 : : type, fold_convert_loc (loc, type, tem),
13196 : 0 : op2);
13197 : : }
13198 : :
13199 : : /* Convert A ? 1 : B into A || B if A and B are truth values. */
13200 : 7891000 : if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13201 : 339368 : && truth_value_p (TREE_CODE (arg0))
13202 : 262889 : && truth_value_p (TREE_CODE (op2))
13203 : 7891186 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13204 : 186 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13205 : : ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13206 : 186 : type, fold_convert_loc (loc, type, arg0), op2);
13207 : :
13208 : : return NULL_TREE;
13209 : :
13210 : 0 : case CALL_EXPR:
13211 : : /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13212 : : of fold_ternary on them. */
13213 : 0 : gcc_unreachable ();
13214 : :
13215 : 626790 : case BIT_FIELD_REF:
13216 : 626790 : if (TREE_CODE (arg0) == VECTOR_CST
13217 : 27490 : && (type == TREE_TYPE (TREE_TYPE (arg0))
13218 : 1631 : || (VECTOR_TYPE_P (type)
13219 : 1028 : && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13220 : 26867 : && tree_fits_uhwi_p (op1)
13221 : 653657 : && tree_fits_uhwi_p (op2))
13222 : : {
13223 : 26867 : tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13224 : 26867 : unsigned HOST_WIDE_INT width
13225 : 26867 : = (TREE_CODE (eltype) == BOOLEAN_TYPE
13226 : 26867 : ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13227 : 26867 : unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13228 : 26867 : unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13229 : :
13230 : 26867 : if (n != 0
13231 : 26867 : && (idx % width) == 0
13232 : 26867 : && (n % width) == 0
13233 : 53734 : && known_le ((idx + n) / width,
13234 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13235 : : {
13236 : 26867 : idx = idx / width;
13237 : 26867 : n = n / width;
13238 : :
13239 : 26867 : if (TREE_CODE (arg0) == VECTOR_CST)
13240 : : {
13241 : 26867 : if (n == 1)
13242 : : {
13243 : 25863 : tem = VECTOR_CST_ELT (arg0, idx);
13244 : 25863 : if (VECTOR_TYPE_P (type))
13245 : 4 : tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13246 : 25863 : return tem;
13247 : : }
13248 : :
13249 : 1004 : tree_vector_builder vals (type, n, 1);
13250 : 6292 : for (unsigned i = 0; i < n; ++i)
13251 : 5288 : vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13252 : 1004 : return vals.build ();
13253 : 1004 : }
13254 : : }
13255 : : }
13256 : :
13257 : : /* On constants we can use native encode/interpret to constant
13258 : : fold (nearly) all BIT_FIELD_REFs. */
13259 : 599923 : if (CONSTANT_CLASS_P (arg0)
13260 : 1407 : && can_native_interpret_type_p (type)
13261 : : && BITS_PER_UNIT == 8
13262 : 1407 : && tree_fits_uhwi_p (op1)
13263 : 601330 : && tree_fits_uhwi_p (op2))
13264 : : {
13265 : 1407 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13266 : 1407 : unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13267 : : /* Limit us to a reasonable amount of work. To relax the
13268 : : other limitations we need bit-shifting of the buffer
13269 : : and rounding up the size. */
13270 : 1407 : if (bitpos % BITS_PER_UNIT == 0
13271 : 1407 : && bitsize % BITS_PER_UNIT == 0
13272 : 1407 : && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13273 : : {
13274 : 1407 : unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13275 : 1407 : unsigned HOST_WIDE_INT len
13276 : 1407 : = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13277 : 1407 : bitpos / BITS_PER_UNIT);
13278 : 1407 : if (len > 0
13279 : 1407 : && len * BITS_PER_UNIT >= bitsize)
13280 : : {
13281 : 1407 : tree v = native_interpret_expr (type, b,
13282 : : bitsize / BITS_PER_UNIT);
13283 : 1407 : if (v)
13284 : 1401 : return v;
13285 : : }
13286 : : }
13287 : : }
13288 : :
13289 : : return NULL_TREE;
13290 : :
13291 : 666697 : case VEC_PERM_EXPR:
13292 : : /* Perform constant folding of BIT_INSERT_EXPR. */
13293 : 666697 : if (TREE_CODE (arg2) == VECTOR_CST
13294 : 655468 : && TREE_CODE (op0) == VECTOR_CST
13295 : 13128 : && TREE_CODE (op1) == VECTOR_CST)
13296 : : {
13297 : : /* Build a vector of integers from the tree mask. */
13298 : 2049 : vec_perm_builder builder;
13299 : 2049 : if (!tree_to_vec_perm_builder (&builder, arg2))
13300 : : return NULL_TREE;
13301 : :
13302 : : /* Create a vec_perm_indices for the integer vector. */
13303 : 2049 : poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13304 : 2049 : bool single_arg = (op0 == op1);
13305 : 4098 : vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13306 : 2049 : return fold_vec_perm (type, op0, op1, sel);
13307 : 4098 : }
13308 : : return NULL_TREE;
13309 : :
13310 : 12504 : case BIT_INSERT_EXPR:
13311 : : /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13312 : 12504 : if (TREE_CODE (arg0) == INTEGER_CST
13313 : 14 : && TREE_CODE (arg1) == INTEGER_CST)
13314 : : {
13315 : 2 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13316 : 2 : unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13317 : 2 : if (BYTES_BIG_ENDIAN)
13318 : : bitpos = TYPE_PRECISION (type) - bitpos - bitsize;
13319 : 2 : wide_int tem = (wi::to_wide (arg0)
13320 : 4 : & wi::shifted_mask (bitpos, bitsize, true,
13321 : 4 : TYPE_PRECISION (type)));
13322 : 2 : wide_int tem2
13323 : 4 : = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13324 : 2 : bitsize), bitpos);
13325 : 2 : return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13326 : 2 : }
13327 : 12502 : else if (TREE_CODE (arg0) == VECTOR_CST
13328 : 799 : && CONSTANT_CLASS_P (arg1)
13329 : 12697 : && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13330 : 195 : TREE_TYPE (arg1)))
13331 : : {
13332 : 195 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13333 : 195 : unsigned HOST_WIDE_INT elsize
13334 : 195 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13335 : 195 : if (bitpos % elsize == 0)
13336 : : {
13337 : 195 : unsigned k = bitpos / elsize;
13338 : 195 : unsigned HOST_WIDE_INT nelts;
13339 : 195 : if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13340 : 28872975 : return arg0;
13341 : 186 : else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13342 : : {
13343 : 186 : tree_vector_builder elts (type, nelts, 1);
13344 : 186 : elts.quick_grow (nelts);
13345 : 810 : for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13346 : 624 : elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13347 : 186 : return elts.build ();
13348 : 186 : }
13349 : : }
13350 : : }
13351 : : return NULL_TREE;
13352 : :
13353 : : default:
13354 : : return NULL_TREE;
13355 : : } /* switch (code) */
13356 : : }
13357 : :
13358 : : /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13359 : : of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13360 : : constructor element index of the value returned. If the element is
13361 : : not found NULL_TREE is returned and *CTOR_IDX is updated to
13362 : : the index of the element after the ACCESS_INDEX position (which
13363 : : may be outside of the CTOR array). */
13364 : :
13365 : : tree
13366 : 682742 : get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13367 : : unsigned *ctor_idx)
13368 : : {
13369 : 682742 : tree index_type = NULL_TREE;
13370 : 682742 : signop index_sgn = UNSIGNED;
13371 : 682742 : offset_int low_bound = 0;
13372 : :
13373 : 682742 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13374 : : {
13375 : 682742 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13376 : 682742 : if (domain_type && TYPE_MIN_VALUE (domain_type))
13377 : : {
13378 : : /* Static constructors for variably sized objects makes no sense. */
13379 : 682742 : gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13380 : 682742 : index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13381 : : /* ??? When it is obvious that the range is signed, treat it so. */
13382 : 682742 : if (TYPE_UNSIGNED (index_type)
13383 : 341340 : && TYPE_MAX_VALUE (domain_type)
13384 : 1024048 : && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13385 : 341306 : TYPE_MIN_VALUE (domain_type)))
13386 : : {
13387 : 0 : index_sgn = SIGNED;
13388 : 0 : low_bound
13389 : 0 : = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13390 : : SIGNED);
13391 : : }
13392 : : else
13393 : : {
13394 : 682742 : index_sgn = TYPE_SIGN (index_type);
13395 : 682742 : low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13396 : : }
13397 : : }
13398 : : }
13399 : :
13400 : 682742 : if (index_type)
13401 : 682742 : access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13402 : : index_sgn);
13403 : :
13404 : 682742 : offset_int index = low_bound;
13405 : 682742 : if (index_type)
13406 : 682742 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13407 : :
13408 : 682742 : offset_int max_index = index;
13409 : 682742 : unsigned cnt;
13410 : 682742 : tree cfield, cval;
13411 : 682742 : bool first_p = true;
13412 : :
13413 : 11306586 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13414 : : {
13415 : : /* Array constructor might explicitly set index, or specify a range,
13416 : : or leave index NULL meaning that it is next index after previous
13417 : : one. */
13418 : 11305327 : if (cfield)
13419 : : {
13420 : 3198971 : if (TREE_CODE (cfield) == INTEGER_CST)
13421 : 6396522 : max_index = index
13422 : 3198261 : = offset_int::from (wi::to_wide (cfield), index_sgn);
13423 : : else
13424 : : {
13425 : 710 : gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13426 : 710 : index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13427 : : index_sgn);
13428 : 710 : max_index
13429 : 710 : = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13430 : : index_sgn);
13431 : 710 : gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13432 : : }
13433 : : }
13434 : 8106356 : else if (!first_p)
13435 : : {
13436 : 7853213 : index = max_index + 1;
13437 : 7853213 : if (index_type)
13438 : 7853213 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13439 : 7853213 : gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13440 : 7853213 : max_index = index;
13441 : : }
13442 : : else
13443 : : first_p = false;
13444 : :
13445 : 11305327 : if (TREE_CODE (cval) == RAW_DATA_CST)
13446 : 1678 : max_index += RAW_DATA_LENGTH (cval) - 1;
13447 : :
13448 : : /* Do we have match? */
13449 : 11305327 : if (wi::cmp (access_index, index, index_sgn) >= 0)
13450 : : {
13451 : 11305036 : if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13452 : : {
13453 : 681364 : if (ctor_idx)
13454 : 681364 : *ctor_idx = cnt;
13455 : 681364 : return cval;
13456 : : }
13457 : : }
13458 : 291 : else if (in_gimple_form)
13459 : : /* We're past the element we search for. Note during parsing
13460 : : the elements might not be sorted.
13461 : : ??? We should use a binary search and a flag on the
13462 : : CONSTRUCTOR as to whether elements are sorted in declaration
13463 : : order. */
13464 : : break;
13465 : : }
13466 : 1378 : if (ctor_idx)
13467 : 1378 : *ctor_idx = cnt;
13468 : : return NULL_TREE;
13469 : : }
13470 : :
13471 : : /* Perform constant folding and related simplification of EXPR.
13472 : : The related simplifications include x*1 => x, x*0 => 0, etc.,
13473 : : and application of the associative law.
13474 : : NOP_EXPR conversions may be removed freely (as long as we
13475 : : are careful not to change the type of the overall expression).
13476 : : We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13477 : : but we can constant-fold them if they have constant operands. */
13478 : :
13479 : : #ifdef ENABLE_FOLD_CHECKING
13480 : : # define fold(x) fold_1 (x)
13481 : : static tree fold_1 (tree);
13482 : : static
13483 : : #endif
13484 : : tree
13485 : 1251967453 : fold (tree expr)
13486 : : {
13487 : 1252083484 : const tree t = expr;
13488 : 1252083484 : enum tree_code code = TREE_CODE (t);
13489 : 1252083484 : enum tree_code_class kind = TREE_CODE_CLASS (code);
13490 : 1252083484 : tree tem;
13491 : 1252083484 : location_t loc = EXPR_LOCATION (expr);
13492 : :
13493 : : /* Return right away if a constant. */
13494 : 1252083484 : if (kind == tcc_constant)
13495 : : return t;
13496 : :
13497 : : /* CALL_EXPR-like objects with variable numbers of operands are
13498 : : treated specially. */
13499 : 1136069670 : if (kind == tcc_vl_exp)
13500 : : {
13501 : 154930484 : if (code == CALL_EXPR)
13502 : : {
13503 : 154930186 : tem = fold_call_expr (loc, expr, false);
13504 : 307441062 : return tem ? tem : expr;
13505 : : }
13506 : : return expr;
13507 : : }
13508 : :
13509 : 981139186 : if (IS_EXPR_CODE_CLASS (kind))
13510 : : {
13511 : 979276323 : tree type = TREE_TYPE (t);
13512 : 979276323 : tree op0, op1, op2;
13513 : :
13514 : 979276323 : switch (TREE_CODE_LENGTH (code))
13515 : : {
13516 : 890966129 : case 1:
13517 : 890966129 : op0 = TREE_OPERAND (t, 0);
13518 : 890966129 : tem = fold_unary_loc (loc, code, type, op0);
13519 : 1536972531 : return tem ? tem : expr;
13520 : 79718361 : case 2:
13521 : 79718361 : op0 = TREE_OPERAND (t, 0);
13522 : 79718361 : op1 = TREE_OPERAND (t, 1);
13523 : 79718361 : tem = fold_binary_loc (loc, code, type, op0, op1);
13524 : 151534379 : return tem ? tem : expr;
13525 : 4128069 : case 3:
13526 : 4128069 : op0 = TREE_OPERAND (t, 0);
13527 : 4128069 : op1 = TREE_OPERAND (t, 1);
13528 : 4128069 : op2 = TREE_OPERAND (t, 2);
13529 : 4128069 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13530 : 8060017 : return tem ? tem : expr;
13531 : : default:
13532 : : break;
13533 : : }
13534 : : }
13535 : :
13536 : 6326627 : switch (code)
13537 : : {
13538 : 4361055 : case ARRAY_REF:
13539 : 4361055 : {
13540 : 4361055 : tree op0 = TREE_OPERAND (t, 0);
13541 : 4361055 : tree op1 = TREE_OPERAND (t, 1);
13542 : :
13543 : 4361055 : if (TREE_CODE (op1) == INTEGER_CST
13544 : 2851991 : && TREE_CODE (op0) == CONSTRUCTOR
13545 : 4361062 : && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13546 : : {
13547 : 7 : unsigned int idx;
13548 : 7 : tree val
13549 : 7 : = get_array_ctor_element_at_index (op0, wi::to_offset (op1),
13550 : : &idx);
13551 : 7 : if (val)
13552 : : {
13553 : 7 : if (TREE_CODE (val) != RAW_DATA_CST)
13554 : : return val;
13555 : 2 : if (CONSTRUCTOR_ELT (op0, idx)->index == NULL_TREE
13556 : 2 : || (TREE_CODE (CONSTRUCTOR_ELT (op0, idx)->index)
13557 : : != INTEGER_CST))
13558 : : return t;
13559 : 2 : offset_int o
13560 : 2 : = (wi::to_offset (op1)
13561 : 2 : - wi::to_offset (CONSTRUCTOR_ELT (op0, idx)->index));
13562 : 2 : gcc_checking_assert (o < RAW_DATA_LENGTH (val));
13563 : 2 : return build_int_cst (TREE_TYPE (val),
13564 : 2 : RAW_DATA_UCHAR_ELT (val, o.to_uhwi ()));
13565 : : }
13566 : : }
13567 : :
13568 : : return t;
13569 : : }
13570 : :
13571 : : /* Return a VECTOR_CST if possible. */
13572 : 105083 : case CONSTRUCTOR:
13573 : 105083 : {
13574 : 105083 : tree type = TREE_TYPE (t);
13575 : 105083 : if (TREE_CODE (type) != VECTOR_TYPE)
13576 : : return t;
13577 : :
13578 : : unsigned i;
13579 : : tree val;
13580 : 247778 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13581 : 214364 : if (! CONSTANT_CLASS_P (val))
13582 : : return t;
13583 : :
13584 : 33414 : return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13585 : : }
13586 : :
13587 : 116031 : case CONST_DECL:
13588 : 116031 : return fold (DECL_INITIAL (t));
13589 : :
13590 : : default:
13591 : : return t;
13592 : : } /* switch (code) */
13593 : : }
13594 : :
13595 : : #ifdef ENABLE_FOLD_CHECKING
13596 : : #undef fold
13597 : :
13598 : : static void fold_checksum_tree (const_tree, struct md5_ctx *,
13599 : : hash_table<nofree_ptr_hash<const tree_node> > *);
13600 : : static void fold_check_failed (const_tree, const_tree);
13601 : : void print_fold_checksum (const_tree);
13602 : :
13603 : : /* When --enable-checking=fold, compute a digest of expr before
13604 : : and after actual fold call to see if fold did not accidentally
13605 : : change original expr. */
13606 : :
13607 : : tree
13608 : : fold (tree expr)
13609 : : {
13610 : : tree ret;
13611 : : struct md5_ctx ctx;
13612 : : unsigned char checksum_before[16], checksum_after[16];
13613 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13614 : :
13615 : : md5_init_ctx (&ctx);
13616 : : fold_checksum_tree (expr, &ctx, &ht);
13617 : : md5_finish_ctx (&ctx, checksum_before);
13618 : : ht.empty ();
13619 : :
13620 : : ret = fold_1 (expr);
13621 : :
13622 : : md5_init_ctx (&ctx);
13623 : : fold_checksum_tree (expr, &ctx, &ht);
13624 : : md5_finish_ctx (&ctx, checksum_after);
13625 : :
13626 : : if (memcmp (checksum_before, checksum_after, 16))
13627 : : fold_check_failed (expr, ret);
13628 : :
13629 : : return ret;
13630 : : }
13631 : :
13632 : : void
13633 : : print_fold_checksum (const_tree expr)
13634 : : {
13635 : : struct md5_ctx ctx;
13636 : : unsigned char checksum[16], cnt;
13637 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13638 : :
13639 : : md5_init_ctx (&ctx);
13640 : : fold_checksum_tree (expr, &ctx, &ht);
13641 : : md5_finish_ctx (&ctx, checksum);
13642 : : for (cnt = 0; cnt < 16; ++cnt)
13643 : : fprintf (stderr, "%02x", checksum[cnt]);
13644 : : putc ('\n', stderr);
13645 : : }
13646 : :
13647 : : static void
13648 : : fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13649 : : {
13650 : : internal_error ("fold check: original tree changed by fold");
13651 : : }
13652 : :
13653 : : static void
13654 : : fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13655 : : hash_table<nofree_ptr_hash <const tree_node> > *ht)
13656 : : {
13657 : : const tree_node **slot;
13658 : : enum tree_code code;
13659 : : union tree_node *buf;
13660 : : int i, len;
13661 : :
13662 : : recursive_label:
13663 : : if (expr == NULL)
13664 : : return;
13665 : : slot = ht->find_slot (expr, INSERT);
13666 : : if (*slot != NULL)
13667 : : return;
13668 : : *slot = expr;
13669 : : code = TREE_CODE (expr);
13670 : : if (TREE_CODE_CLASS (code) == tcc_declaration
13671 : : && HAS_DECL_ASSEMBLER_NAME_P (expr))
13672 : : {
13673 : : /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13674 : : size_t sz = tree_size (expr);
13675 : : buf = XALLOCAVAR (union tree_node, sz);
13676 : : memcpy ((char *) buf, expr, sz);
13677 : : SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13678 : : buf->decl_with_vis.symtab_node = NULL;
13679 : : buf->base.nowarning_flag = 0;
13680 : : expr = (tree) buf;
13681 : : }
13682 : : else if (TREE_CODE_CLASS (code) == tcc_type
13683 : : && (TYPE_POINTER_TO (expr)
13684 : : || TYPE_REFERENCE_TO (expr)
13685 : : || TYPE_CACHED_VALUES_P (expr)
13686 : : || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13687 : : || TYPE_NEXT_VARIANT (expr)
13688 : : || TYPE_ALIAS_SET_KNOWN_P (expr)))
13689 : : {
13690 : : /* Allow these fields to be modified. */
13691 : : tree tmp;
13692 : : size_t sz = tree_size (expr);
13693 : : buf = XALLOCAVAR (union tree_node, sz);
13694 : : memcpy ((char *) buf, expr, sz);
13695 : : expr = tmp = (tree) buf;
13696 : : TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13697 : : TYPE_POINTER_TO (tmp) = NULL;
13698 : : TYPE_REFERENCE_TO (tmp) = NULL;
13699 : : TYPE_NEXT_VARIANT (tmp) = NULL;
13700 : : TYPE_ALIAS_SET (tmp) = -1;
13701 : : if (TYPE_CACHED_VALUES_P (tmp))
13702 : : {
13703 : : TYPE_CACHED_VALUES_P (tmp) = 0;
13704 : : TYPE_CACHED_VALUES (tmp) = NULL;
13705 : : }
13706 : : }
13707 : : else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
13708 : : {
13709 : : /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
13710 : : that and change builtins.cc etc. instead - see PR89543. */
13711 : : size_t sz = tree_size (expr);
13712 : : buf = XALLOCAVAR (union tree_node, sz);
13713 : : memcpy ((char *) buf, expr, sz);
13714 : : buf->base.nowarning_flag = 0;
13715 : : expr = (tree) buf;
13716 : : }
13717 : : md5_process_bytes (expr, tree_size (expr), ctx);
13718 : : if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13719 : : fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13720 : : if (TREE_CODE_CLASS (code) != tcc_type
13721 : : && TREE_CODE_CLASS (code) != tcc_declaration
13722 : : && code != TREE_LIST
13723 : : && code != SSA_NAME
13724 : : && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13725 : : fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13726 : : switch (TREE_CODE_CLASS (code))
13727 : : {
13728 : : case tcc_constant:
13729 : : switch (code)
13730 : : {
13731 : : case STRING_CST:
13732 : : md5_process_bytes (TREE_STRING_POINTER (expr),
13733 : : TREE_STRING_LENGTH (expr), ctx);
13734 : : break;
13735 : : case COMPLEX_CST:
13736 : : fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13737 : : fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13738 : : break;
13739 : : case VECTOR_CST:
13740 : : len = vector_cst_encoded_nelts (expr);
13741 : : for (i = 0; i < len; ++i)
13742 : : fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13743 : : break;
13744 : : default:
13745 : : break;
13746 : : }
13747 : : break;
13748 : : case tcc_exceptional:
13749 : : switch (code)
13750 : : {
13751 : : case TREE_LIST:
13752 : : fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13753 : : fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13754 : : expr = TREE_CHAIN (expr);
13755 : : goto recursive_label;
13756 : : break;
13757 : : case TREE_VEC:
13758 : : for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13759 : : fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13760 : : break;
13761 : : default:
13762 : : break;
13763 : : }
13764 : : break;
13765 : : case tcc_expression:
13766 : : case tcc_reference:
13767 : : case tcc_comparison:
13768 : : case tcc_unary:
13769 : : case tcc_binary:
13770 : : case tcc_statement:
13771 : : case tcc_vl_exp:
13772 : : len = TREE_OPERAND_LENGTH (expr);
13773 : : for (i = 0; i < len; ++i)
13774 : : fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13775 : : break;
13776 : : case tcc_declaration:
13777 : : fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13778 : : fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13779 : : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13780 : : {
13781 : : fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13782 : : fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13783 : : fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13784 : : fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13785 : : fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13786 : : }
13787 : :
13788 : : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13789 : : {
13790 : : if (TREE_CODE (expr) == FUNCTION_DECL)
13791 : : {
13792 : : fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13793 : : fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13794 : : }
13795 : : fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13796 : : }
13797 : : break;
13798 : : case tcc_type:
13799 : : if (TREE_CODE (expr) == ENUMERAL_TYPE)
13800 : : fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13801 : : fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13802 : : fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13803 : : fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13804 : : fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13805 : : if (INTEGRAL_TYPE_P (expr)
13806 : : || SCALAR_FLOAT_TYPE_P (expr))
13807 : : {
13808 : : fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13809 : : fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13810 : : }
13811 : : fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13812 : : if (RECORD_OR_UNION_TYPE_P (expr))
13813 : : fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13814 : : fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13815 : : break;
13816 : : default:
13817 : : break;
13818 : : }
13819 : : }
13820 : :
13821 : : /* Helper function for outputting the checksum of a tree T. When
13822 : : debugging with gdb, you can "define mynext" to be "next" followed
13823 : : by "call debug_fold_checksum (op0)", then just trace down till the
13824 : : outputs differ. */
13825 : :
13826 : : DEBUG_FUNCTION void
13827 : : debug_fold_checksum (const_tree t)
13828 : : {
13829 : : int i;
13830 : : unsigned char checksum[16];
13831 : : struct md5_ctx ctx;
13832 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13833 : :
13834 : : md5_init_ctx (&ctx);
13835 : : fold_checksum_tree (t, &ctx, &ht);
13836 : : md5_finish_ctx (&ctx, checksum);
13837 : : ht.empty ();
13838 : :
13839 : : for (i = 0; i < 16; i++)
13840 : : fprintf (stderr, "%d ", checksum[i]);
13841 : :
13842 : : fprintf (stderr, "\n");
13843 : : }
13844 : :
13845 : : #endif
13846 : :
13847 : : /* Fold a unary tree expression with code CODE of type TYPE with an
13848 : : operand OP0. LOC is the location of the resulting expression.
13849 : : Return a folded expression if successful. Otherwise, return a tree
13850 : : expression with code CODE of type TYPE with an operand OP0. */
13851 : :
13852 : : tree
13853 : 743211180 : fold_build1_loc (location_t loc,
13854 : : enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13855 : : {
13856 : 743211180 : tree tem;
13857 : : #ifdef ENABLE_FOLD_CHECKING
13858 : : unsigned char checksum_before[16], checksum_after[16];
13859 : : struct md5_ctx ctx;
13860 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13861 : :
13862 : : md5_init_ctx (&ctx);
13863 : : fold_checksum_tree (op0, &ctx, &ht);
13864 : : md5_finish_ctx (&ctx, checksum_before);
13865 : : ht.empty ();
13866 : : #endif
13867 : :
13868 : 743211180 : tem = fold_unary_loc (loc, code, type, op0);
13869 : 743211180 : if (!tem)
13870 : 399483081 : tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13871 : :
13872 : : #ifdef ENABLE_FOLD_CHECKING
13873 : : md5_init_ctx (&ctx);
13874 : : fold_checksum_tree (op0, &ctx, &ht);
13875 : : md5_finish_ctx (&ctx, checksum_after);
13876 : :
13877 : : if (memcmp (checksum_before, checksum_after, 16))
13878 : : fold_check_failed (op0, tem);
13879 : : #endif
13880 : 743211180 : return tem;
13881 : : }
13882 : :
13883 : : /* Fold a binary tree expression with code CODE of type TYPE with
13884 : : operands OP0 and OP1. LOC is the location of the resulting
13885 : : expression. Return a folded expression if successful. Otherwise,
13886 : : return a tree expression with code CODE of type TYPE with operands
13887 : : OP0 and OP1. */
13888 : :
13889 : : tree
13890 : 522308759 : fold_build2_loc (location_t loc,
13891 : : enum tree_code code, tree type, tree op0, tree op1
13892 : : MEM_STAT_DECL)
13893 : : {
13894 : 522308759 : tree tem;
13895 : : #ifdef ENABLE_FOLD_CHECKING
13896 : : unsigned char checksum_before_op0[16],
13897 : : checksum_before_op1[16],
13898 : : checksum_after_op0[16],
13899 : : checksum_after_op1[16];
13900 : : struct md5_ctx ctx;
13901 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13902 : :
13903 : : md5_init_ctx (&ctx);
13904 : : fold_checksum_tree (op0, &ctx, &ht);
13905 : : md5_finish_ctx (&ctx, checksum_before_op0);
13906 : : ht.empty ();
13907 : :
13908 : : md5_init_ctx (&ctx);
13909 : : fold_checksum_tree (op1, &ctx, &ht);
13910 : : md5_finish_ctx (&ctx, checksum_before_op1);
13911 : : ht.empty ();
13912 : : #endif
13913 : :
13914 : 522308759 : tem = fold_binary_loc (loc, code, type, op0, op1);
13915 : 522308759 : if (!tem)
13916 : 291910161 : tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13917 : :
13918 : : #ifdef ENABLE_FOLD_CHECKING
13919 : : md5_init_ctx (&ctx);
13920 : : fold_checksum_tree (op0, &ctx, &ht);
13921 : : md5_finish_ctx (&ctx, checksum_after_op0);
13922 : : ht.empty ();
13923 : :
13924 : : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13925 : : fold_check_failed (op0, tem);
13926 : :
13927 : : md5_init_ctx (&ctx);
13928 : : fold_checksum_tree (op1, &ctx, &ht);
13929 : : md5_finish_ctx (&ctx, checksum_after_op1);
13930 : :
13931 : : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13932 : : fold_check_failed (op1, tem);
13933 : : #endif
13934 : 522308759 : return tem;
13935 : : }
13936 : :
13937 : : /* Fold a ternary tree expression with code CODE of type TYPE with
13938 : : operands OP0, OP1, and OP2. Return a folded expression if
13939 : : successful. Otherwise, return a tree expression with code CODE of
13940 : : type TYPE with operands OP0, OP1, and OP2. */
13941 : :
13942 : : tree
13943 : 23200747 : fold_build3_loc (location_t loc, enum tree_code code, tree type,
13944 : : tree op0, tree op1, tree op2 MEM_STAT_DECL)
13945 : : {
13946 : 23200747 : tree tem;
13947 : : #ifdef ENABLE_FOLD_CHECKING
13948 : : unsigned char checksum_before_op0[16],
13949 : : checksum_before_op1[16],
13950 : : checksum_before_op2[16],
13951 : : checksum_after_op0[16],
13952 : : checksum_after_op1[16],
13953 : : checksum_after_op2[16];
13954 : : struct md5_ctx ctx;
13955 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13956 : :
13957 : : md5_init_ctx (&ctx);
13958 : : fold_checksum_tree (op0, &ctx, &ht);
13959 : : md5_finish_ctx (&ctx, checksum_before_op0);
13960 : : ht.empty ();
13961 : :
13962 : : md5_init_ctx (&ctx);
13963 : : fold_checksum_tree (op1, &ctx, &ht);
13964 : : md5_finish_ctx (&ctx, checksum_before_op1);
13965 : : ht.empty ();
13966 : :
13967 : : md5_init_ctx (&ctx);
13968 : : fold_checksum_tree (op2, &ctx, &ht);
13969 : : md5_finish_ctx (&ctx, checksum_before_op2);
13970 : : ht.empty ();
13971 : : #endif
13972 : :
13973 : 23200747 : gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13974 : 23200747 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13975 : 23200747 : if (!tem)
13976 : 20848885 : tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13977 : :
13978 : : #ifdef ENABLE_FOLD_CHECKING
13979 : : md5_init_ctx (&ctx);
13980 : : fold_checksum_tree (op0, &ctx, &ht);
13981 : : md5_finish_ctx (&ctx, checksum_after_op0);
13982 : : ht.empty ();
13983 : :
13984 : : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13985 : : fold_check_failed (op0, tem);
13986 : :
13987 : : md5_init_ctx (&ctx);
13988 : : fold_checksum_tree (op1, &ctx, &ht);
13989 : : md5_finish_ctx (&ctx, checksum_after_op1);
13990 : : ht.empty ();
13991 : :
13992 : : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13993 : : fold_check_failed (op1, tem);
13994 : :
13995 : : md5_init_ctx (&ctx);
13996 : : fold_checksum_tree (op2, &ctx, &ht);
13997 : : md5_finish_ctx (&ctx, checksum_after_op2);
13998 : :
13999 : : if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14000 : : fold_check_failed (op2, tem);
14001 : : #endif
14002 : 23200747 : return tem;
14003 : : }
14004 : :
14005 : : /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14006 : : arguments in ARGARRAY, and a null static chain.
14007 : : Return a folded expression if successful. Otherwise, return a CALL_EXPR
14008 : : of type TYPE from the given operands as constructed by build_call_array. */
14009 : :
14010 : : tree
14011 : 49722660 : fold_build_call_array_loc (location_t loc, tree type, tree fn,
14012 : : int nargs, tree *argarray)
14013 : : {
14014 : 49722660 : tree tem;
14015 : : #ifdef ENABLE_FOLD_CHECKING
14016 : : unsigned char checksum_before_fn[16],
14017 : : checksum_before_arglist[16],
14018 : : checksum_after_fn[16],
14019 : : checksum_after_arglist[16];
14020 : : struct md5_ctx ctx;
14021 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14022 : : int i;
14023 : :
14024 : : md5_init_ctx (&ctx);
14025 : : fold_checksum_tree (fn, &ctx, &ht);
14026 : : md5_finish_ctx (&ctx, checksum_before_fn);
14027 : : ht.empty ();
14028 : :
14029 : : md5_init_ctx (&ctx);
14030 : : for (i = 0; i < nargs; i++)
14031 : : fold_checksum_tree (argarray[i], &ctx, &ht);
14032 : : md5_finish_ctx (&ctx, checksum_before_arglist);
14033 : : ht.empty ();
14034 : : #endif
14035 : :
14036 : 49722660 : tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14037 : 49722660 : if (!tem)
14038 : 48154111 : tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14039 : :
14040 : : #ifdef ENABLE_FOLD_CHECKING
14041 : : md5_init_ctx (&ctx);
14042 : : fold_checksum_tree (fn, &ctx, &ht);
14043 : : md5_finish_ctx (&ctx, checksum_after_fn);
14044 : : ht.empty ();
14045 : :
14046 : : if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14047 : : fold_check_failed (fn, tem);
14048 : :
14049 : : md5_init_ctx (&ctx);
14050 : : for (i = 0; i < nargs; i++)
14051 : : fold_checksum_tree (argarray[i], &ctx, &ht);
14052 : : md5_finish_ctx (&ctx, checksum_after_arglist);
14053 : :
14054 : : if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14055 : : fold_check_failed (NULL_TREE, tem);
14056 : : #endif
14057 : 49722660 : return tem;
14058 : : }
14059 : :
14060 : : /* Perform constant folding and related simplification of initializer
14061 : : expression EXPR. These behave identically to "fold_buildN" but ignore
14062 : : potential run-time traps and exceptions that fold must preserve. */
14063 : :
14064 : : #define START_FOLD_INIT \
14065 : : int saved_signaling_nans = flag_signaling_nans;\
14066 : : int saved_trapping_math = flag_trapping_math;\
14067 : : int saved_rounding_math = flag_rounding_math;\
14068 : : int saved_trapv = flag_trapv;\
14069 : : int saved_folding_initializer = folding_initializer;\
14070 : : flag_signaling_nans = 0;\
14071 : : flag_trapping_math = 0;\
14072 : : flag_rounding_math = 0;\
14073 : : flag_trapv = 0;\
14074 : : folding_initializer = 1;
14075 : :
14076 : : #define END_FOLD_INIT \
14077 : : flag_signaling_nans = saved_signaling_nans;\
14078 : : flag_trapping_math = saved_trapping_math;\
14079 : : flag_rounding_math = saved_rounding_math;\
14080 : : flag_trapv = saved_trapv;\
14081 : : folding_initializer = saved_folding_initializer;
14082 : :
14083 : : tree
14084 : 543577 : fold_init (tree expr)
14085 : : {
14086 : 543577 : tree result;
14087 : 543577 : START_FOLD_INIT;
14088 : :
14089 : 543577 : result = fold (expr);
14090 : :
14091 : 543577 : END_FOLD_INIT;
14092 : 543577 : return result;
14093 : : }
14094 : :
14095 : : tree
14096 : 2988145 : fold_build1_initializer_loc (location_t loc, enum tree_code code,
14097 : : tree type, tree op)
14098 : : {
14099 : 2988145 : tree result;
14100 : 2988145 : START_FOLD_INIT;
14101 : :
14102 : 2988145 : result = fold_build1_loc (loc, code, type, op);
14103 : :
14104 : 2988145 : END_FOLD_INIT;
14105 : 2988145 : return result;
14106 : : }
14107 : :
14108 : : tree
14109 : 50445 : fold_build2_initializer_loc (location_t loc, enum tree_code code,
14110 : : tree type, tree op0, tree op1)
14111 : : {
14112 : 50445 : tree result;
14113 : 50445 : START_FOLD_INIT;
14114 : :
14115 : 50445 : result = fold_build2_loc (loc, code, type, op0, op1);
14116 : :
14117 : 50445 : END_FOLD_INIT;
14118 : 50445 : return result;
14119 : : }
14120 : :
14121 : : tree
14122 : 3487 : fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14123 : : int nargs, tree *argarray)
14124 : : {
14125 : 3487 : tree result;
14126 : 3487 : START_FOLD_INIT;
14127 : :
14128 : 3487 : result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14129 : :
14130 : 3487 : END_FOLD_INIT;
14131 : 3487 : return result;
14132 : : }
14133 : :
14134 : : tree
14135 : 16996251 : fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14136 : : tree lhs, tree rhs)
14137 : : {
14138 : 16996251 : tree result;
14139 : 16996251 : START_FOLD_INIT;
14140 : :
14141 : 16996251 : result = fold_binary_loc (loc, code, type, lhs, rhs);
14142 : :
14143 : 16996251 : END_FOLD_INIT;
14144 : 16996251 : return result;
14145 : : }
14146 : :
14147 : : #undef START_FOLD_INIT
14148 : : #undef END_FOLD_INIT
14149 : :
14150 : : /* Determine if first argument is a multiple of second argument. Return
14151 : : false if it is not, or we cannot easily determined it to be.
14152 : :
14153 : : An example of the sort of thing we care about (at this point; this routine
14154 : : could surely be made more general, and expanded to do what the *_DIV_EXPR's
14155 : : fold cases do now) is discovering that
14156 : :
14157 : : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14158 : :
14159 : : is a multiple of
14160 : :
14161 : : SAVE_EXPR (J * 8)
14162 : :
14163 : : when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14164 : :
14165 : : This code also handles discovering that
14166 : :
14167 : : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14168 : :
14169 : : is a multiple of 8 so we don't have to worry about dealing with a
14170 : : possible remainder.
14171 : :
14172 : : Note that we *look* inside a SAVE_EXPR only to determine how it was
14173 : : calculated; it is not safe for fold to do much of anything else with the
14174 : : internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14175 : : at run time. For example, the latter example above *cannot* be implemented
14176 : : as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14177 : : evaluation time of the original SAVE_EXPR is not necessarily the same at
14178 : : the time the new expression is evaluated. The only optimization of this
14179 : : sort that would be valid is changing
14180 : :
14181 : : SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14182 : :
14183 : : divided by 8 to
14184 : :
14185 : : SAVE_EXPR (I) * SAVE_EXPR (J)
14186 : :
14187 : : (where the same SAVE_EXPR (J) is used in the original and the
14188 : : transformed version).
14189 : :
14190 : : NOWRAP specifies whether all outer operations in TYPE should
14191 : : be considered not wrapping. Any type conversion within TOP acts
14192 : : as a barrier and we will fall back to NOWRAP being false.
14193 : : NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14194 : : as not wrapping even though they are generally using unsigned arithmetic. */
14195 : :
14196 : : bool
14197 : 1412971 : multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14198 : : {
14199 : 1412971 : gimple *stmt;
14200 : 1412971 : tree op1, op2;
14201 : :
14202 : 1412971 : if (operand_equal_p (top, bottom, 0))
14203 : : return true;
14204 : :
14205 : 979210 : if (TREE_CODE (type) != INTEGER_TYPE)
14206 : : return false;
14207 : :
14208 : 979205 : switch (TREE_CODE (top))
14209 : : {
14210 : 642 : case BIT_AND_EXPR:
14211 : : /* Bitwise and provides a power of two multiple. If the mask is
14212 : : a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14213 : 642 : if (!integer_pow2p (bottom))
14214 : : return false;
14215 : 642 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14216 : 642 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14217 : :
14218 : 345793 : case MULT_EXPR:
14219 : : /* If the multiplication can wrap we cannot recurse further unless
14220 : : the bottom is a power of two which is where wrapping does not
14221 : : matter. */
14222 : 345793 : if (!nowrap
14223 : 13803 : && !TYPE_OVERFLOW_UNDEFINED (type)
14224 : 350437 : && !integer_pow2p (bottom))
14225 : : return false;
14226 : 345379 : if (TREE_CODE (bottom) == INTEGER_CST)
14227 : : {
14228 : 343671 : op1 = TREE_OPERAND (top, 0);
14229 : 343671 : op2 = TREE_OPERAND (top, 1);
14230 : 343671 : if (TREE_CODE (op1) == INTEGER_CST)
14231 : 0 : std::swap (op1, op2);
14232 : 343671 : if (TREE_CODE (op2) == INTEGER_CST)
14233 : : {
14234 : 333719 : if (multiple_of_p (type, op2, bottom, nowrap))
14235 : : return true;
14236 : : /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14237 : 3374 : if (multiple_of_p (type, bottom, op2, nowrap))
14238 : : {
14239 : 2272 : widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14240 : 2272 : wi::to_widest (op2));
14241 : 2272 : if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14242 : : {
14243 : 2272 : op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14244 : 2272 : return multiple_of_p (type, op1, op2, nowrap);
14245 : : }
14246 : 2272 : }
14247 : 1102 : return multiple_of_p (type, op1, bottom, nowrap);
14248 : : }
14249 : : }
14250 : 11660 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14251 : 11660 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14252 : :
14253 : 284 : case LSHIFT_EXPR:
14254 : : /* Handle X << CST as X * (1 << CST) and only process the constant. */
14255 : 284 : if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14256 : : {
14257 : 284 : op1 = TREE_OPERAND (top, 1);
14258 : 284 : if (wi::to_widest (op1) < TYPE_PRECISION (type))
14259 : : {
14260 : 284 : wide_int mul_op
14261 : 284 : = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14262 : 568 : return multiple_of_p (type,
14263 : 568 : wide_int_to_tree (type, mul_op), bottom,
14264 : : nowrap);
14265 : 284 : }
14266 : : }
14267 : : return false;
14268 : :
14269 : 194756 : case MINUS_EXPR:
14270 : 194756 : case PLUS_EXPR:
14271 : : /* If the addition or subtraction can wrap we cannot recurse further
14272 : : unless bottom is a power of two which is where wrapping does not
14273 : : matter. */
14274 : 194756 : if (!nowrap
14275 : 155764 : && !TYPE_OVERFLOW_UNDEFINED (type)
14276 : 349106 : && !integer_pow2p (bottom))
14277 : : return false;
14278 : :
14279 : : /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14280 : : unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14281 : : but 0xfffffffd is not. */
14282 : 169698 : op1 = TREE_OPERAND (top, 1);
14283 : 169698 : if (TREE_CODE (top) == PLUS_EXPR
14284 : 164603 : && nowrap
14285 : 33965 : && TYPE_UNSIGNED (type)
14286 : 203102 : && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14287 : 29485 : op1 = fold_build1 (NEGATE_EXPR, type, op1);
14288 : :
14289 : : /* It is impossible to prove if op0 +- op1 is multiple of bottom
14290 : : precisely, so be conservative here checking if both op0 and op1
14291 : : are multiple of bottom. Note we check the second operand first
14292 : : since it's usually simpler. */
14293 : 169698 : return (multiple_of_p (type, op1, bottom, nowrap)
14294 : 169698 : && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14295 : :
14296 : 142157 : CASE_CONVERT:
14297 : : /* Can't handle conversions from non-integral or wider integral type. */
14298 : 142157 : if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14299 : 142157 : || (TYPE_PRECISION (type)
14300 : 50069 : < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14301 : : return false;
14302 : : /* NOWRAP only extends to operations in the outermost type so
14303 : : make sure to strip it off here. */
14304 : 49803 : return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14305 : 99606 : TREE_OPERAND (top, 0), bottom, false);
14306 : :
14307 : 12405 : case SAVE_EXPR:
14308 : 12405 : return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14309 : :
14310 : 0 : case COND_EXPR:
14311 : 0 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14312 : 0 : && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14313 : :
14314 : 113060 : case INTEGER_CST:
14315 : 113060 : if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14316 : 2698 : return false;
14317 : 110362 : return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14318 : : SIGNED);
14319 : :
14320 : 58266 : case SSA_NAME:
14321 : 58266 : if (TREE_CODE (bottom) == INTEGER_CST
14322 : 55454 : && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14323 : 113720 : && gimple_code (stmt) == GIMPLE_ASSIGN)
14324 : : {
14325 : 24405 : enum tree_code code = gimple_assign_rhs_code (stmt);
14326 : :
14327 : : /* Check for special cases to see if top is defined as multiple
14328 : : of bottom:
14329 : :
14330 : : top = (X & ~(bottom - 1) ; bottom is power of 2
14331 : :
14332 : : or
14333 : :
14334 : : Y = X % bottom
14335 : : top = X - Y. */
14336 : 24405 : if (code == BIT_AND_EXPR
14337 : 712 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14338 : 712 : && TREE_CODE (op2) == INTEGER_CST
14339 : 604 : && integer_pow2p (bottom)
14340 : 25009 : && wi::multiple_of_p (wi::to_widest (op2),
14341 : 604 : wi::to_widest (bottom), SIGNED))
14342 : 595 : return true;
14343 : :
14344 : 23810 : op1 = gimple_assign_rhs1 (stmt);
14345 : 23810 : if (code == MINUS_EXPR
14346 : 2136 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14347 : 2136 : && TREE_CODE (op2) == SSA_NAME
14348 : 2136 : && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14349 : 2136 : && gimple_code (stmt) == GIMPLE_ASSIGN
14350 : 1816 : && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14351 : 62 : && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14352 : 23872 : && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14353 : : return true;
14354 : : }
14355 : :
14356 : : /* fall through */
14357 : :
14358 : : default:
14359 : : if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14360 : : return multiple_p (wi::to_poly_widest (top),
14361 : : wi::to_poly_widest (bottom));
14362 : :
14363 : : return false;
14364 : : }
14365 : : }
14366 : :
14367 : : /* Return true if expression X cannot be (or contain) a NaN or infinity.
14368 : : This function returns true for integer expressions, and returns
14369 : : false if uncertain. */
14370 : :
14371 : : bool
14372 : 533198 : tree_expr_finite_p (const_tree x)
14373 : : {
14374 : 533206 : machine_mode mode = element_mode (x);
14375 : 533206 : if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14376 : : return true;
14377 : 533012 : switch (TREE_CODE (x))
14378 : : {
14379 : 478 : case REAL_CST:
14380 : 478 : return real_isfinite (TREE_REAL_CST_PTR (x));
14381 : 0 : case COMPLEX_CST:
14382 : 0 : return tree_expr_finite_p (TREE_REALPART (x))
14383 : 0 : && tree_expr_finite_p (TREE_IMAGPART (x));
14384 : : case FLOAT_EXPR:
14385 : : return true;
14386 : 8 : case ABS_EXPR:
14387 : 8 : case CONVERT_EXPR:
14388 : 8 : case NON_LVALUE_EXPR:
14389 : 8 : case NEGATE_EXPR:
14390 : 8 : case SAVE_EXPR:
14391 : 8 : return tree_expr_finite_p (TREE_OPERAND (x, 0));
14392 : 0 : case MIN_EXPR:
14393 : 0 : case MAX_EXPR:
14394 : 0 : return tree_expr_finite_p (TREE_OPERAND (x, 0))
14395 : 0 : && tree_expr_finite_p (TREE_OPERAND (x, 1));
14396 : 0 : case COND_EXPR:
14397 : 0 : return tree_expr_finite_p (TREE_OPERAND (x, 1))
14398 : 0 : && tree_expr_finite_p (TREE_OPERAND (x, 2));
14399 : 62 : case CALL_EXPR:
14400 : 62 : switch (get_call_combined_fn (x))
14401 : : {
14402 : 0 : CASE_CFN_FABS:
14403 : 0 : CASE_CFN_FABS_FN:
14404 : 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14405 : 0 : CASE_CFN_FMAX:
14406 : 0 : CASE_CFN_FMAX_FN:
14407 : 0 : CASE_CFN_FMIN:
14408 : 0 : CASE_CFN_FMIN_FN:
14409 : 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14410 : 0 : && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14411 : : default:
14412 : : return false;
14413 : : }
14414 : :
14415 : : default:
14416 : : return false;
14417 : : }
14418 : : }
14419 : :
14420 : : /* Return true if expression X evaluates to an infinity.
14421 : : This function returns false for integer expressions. */
14422 : :
14423 : : bool
14424 : 770289 : tree_expr_infinite_p (const_tree x)
14425 : : {
14426 : 770739 : if (!HONOR_INFINITIES (x))
14427 : : return false;
14428 : 770624 : switch (TREE_CODE (x))
14429 : : {
14430 : 0 : case REAL_CST:
14431 : 0 : return real_isinf (TREE_REAL_CST_PTR (x));
14432 : 450 : case ABS_EXPR:
14433 : 450 : case NEGATE_EXPR:
14434 : 450 : case NON_LVALUE_EXPR:
14435 : 450 : case SAVE_EXPR:
14436 : 450 : return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14437 : 0 : case COND_EXPR:
14438 : 0 : return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14439 : 0 : && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14440 : : default:
14441 : : return false;
14442 : : }
14443 : : }
14444 : :
14445 : : /* Return true if expression X could evaluate to an infinity.
14446 : : This function returns false for integer expressions, and returns
14447 : : true if uncertain. */
14448 : :
14449 : : bool
14450 : 359490 : tree_expr_maybe_infinite_p (const_tree x)
14451 : : {
14452 : 359498 : if (!HONOR_INFINITIES (x))
14453 : : return false;
14454 : 359170 : switch (TREE_CODE (x))
14455 : : {
14456 : 166 : case REAL_CST:
14457 : 166 : return real_isinf (TREE_REAL_CST_PTR (x));
14458 : : case FLOAT_EXPR:
14459 : : return false;
14460 : 8 : case ABS_EXPR:
14461 : 8 : case NEGATE_EXPR:
14462 : 8 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14463 : 1 : case COND_EXPR:
14464 : 1 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14465 : 1 : || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14466 : : default:
14467 : : return true;
14468 : : }
14469 : : }
14470 : :
14471 : : /* Return true if expression X evaluates to a signaling NaN.
14472 : : This function returns false for integer expressions. */
14473 : :
14474 : : bool
14475 : 391 : tree_expr_signaling_nan_p (const_tree x)
14476 : : {
14477 : 391 : if (!HONOR_SNANS (x))
14478 : : return false;
14479 : 124 : switch (TREE_CODE (x))
14480 : : {
14481 : 124 : case REAL_CST:
14482 : 124 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14483 : 0 : case NON_LVALUE_EXPR:
14484 : 0 : case SAVE_EXPR:
14485 : 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14486 : 0 : case COND_EXPR:
14487 : 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14488 : 0 : && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14489 : : default:
14490 : : return false;
14491 : : }
14492 : : }
14493 : :
14494 : : /* Return true if expression X could evaluate to a signaling NaN.
14495 : : This function returns false for integer expressions, and returns
14496 : : true if uncertain. */
14497 : :
14498 : : bool
14499 : 724875 : tree_expr_maybe_signaling_nan_p (const_tree x)
14500 : : {
14501 : 724875 : if (!HONOR_SNANS (x))
14502 : : return false;
14503 : 5034 : switch (TREE_CODE (x))
14504 : : {
14505 : 1456 : case REAL_CST:
14506 : 1456 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14507 : : case FLOAT_EXPR:
14508 : : return false;
14509 : 0 : case ABS_EXPR:
14510 : 0 : case CONVERT_EXPR:
14511 : 0 : case NEGATE_EXPR:
14512 : 0 : case NON_LVALUE_EXPR:
14513 : 0 : case SAVE_EXPR:
14514 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14515 : 0 : case MIN_EXPR:
14516 : 0 : case MAX_EXPR:
14517 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14518 : 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14519 : 0 : case COND_EXPR:
14520 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14521 : 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14522 : 0 : case CALL_EXPR:
14523 : 0 : switch (get_call_combined_fn (x))
14524 : : {
14525 : 0 : CASE_CFN_FABS:
14526 : 0 : CASE_CFN_FABS_FN:
14527 : 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14528 : 0 : CASE_CFN_FMAX:
14529 : 0 : CASE_CFN_FMAX_FN:
14530 : 0 : CASE_CFN_FMIN:
14531 : 0 : CASE_CFN_FMIN_FN:
14532 : 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14533 : 0 : || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14534 : : default:
14535 : : return true;
14536 : : }
14537 : : default:
14538 : : return true;
14539 : : }
14540 : : }
14541 : :
14542 : : /* Return true if expression X evaluates to a NaN.
14543 : : This function returns false for integer expressions. */
14544 : :
14545 : : bool
14546 : 3875415 : tree_expr_nan_p (const_tree x)
14547 : : {
14548 : 4242647 : if (!HONOR_NANS (x))
14549 : : return false;
14550 : 4242297 : switch (TREE_CODE (x))
14551 : : {
14552 : 3795 : case REAL_CST:
14553 : 3795 : return real_isnan (TREE_REAL_CST_PTR (x));
14554 : 367232 : case NON_LVALUE_EXPR:
14555 : 367232 : case SAVE_EXPR:
14556 : 367232 : return tree_expr_nan_p (TREE_OPERAND (x, 0));
14557 : 900 : case COND_EXPR:
14558 : 900 : return tree_expr_nan_p (TREE_OPERAND (x, 1))
14559 : 900 : && tree_expr_nan_p (TREE_OPERAND (x, 2));
14560 : : default:
14561 : : return false;
14562 : : }
14563 : : }
14564 : :
14565 : : /* Return true if expression X could evaluate to a NaN.
14566 : : This function returns false for integer expressions, and returns
14567 : : true if uncertain. */
14568 : :
14569 : : bool
14570 : 4704996 : tree_expr_maybe_nan_p (const_tree x)
14571 : : {
14572 : 6568075 : if (!HONOR_NANS (x))
14573 : : return false;
14574 : 6404909 : switch (TREE_CODE (x))
14575 : : {
14576 : 3242 : case REAL_CST:
14577 : 3242 : return real_isnan (TREE_REAL_CST_PTR (x));
14578 : : case FLOAT_EXPR:
14579 : : return false;
14580 : 13684 : case PLUS_EXPR:
14581 : 13684 : case MINUS_EXPR:
14582 : 13684 : case MULT_EXPR:
14583 : 13684 : return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14584 : 13684 : || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14585 : 1863079 : case ABS_EXPR:
14586 : 1863079 : case CONVERT_EXPR:
14587 : 1863079 : case NEGATE_EXPR:
14588 : 1863079 : case NON_LVALUE_EXPR:
14589 : 1863079 : case SAVE_EXPR:
14590 : 1863079 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14591 : 168 : case MIN_EXPR:
14592 : 168 : case MAX_EXPR:
14593 : 168 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14594 : 168 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14595 : 557 : case COND_EXPR:
14596 : 557 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14597 : 557 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14598 : 1064 : case CALL_EXPR:
14599 : 1064 : switch (get_call_combined_fn (x))
14600 : : {
14601 : 0 : CASE_CFN_FABS:
14602 : 0 : CASE_CFN_FABS_FN:
14603 : 0 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14604 : 108 : CASE_CFN_FMAX:
14605 : 108 : CASE_CFN_FMAX_FN:
14606 : 108 : CASE_CFN_FMIN:
14607 : 108 : CASE_CFN_FMIN_FN:
14608 : 108 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14609 : 108 : || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14610 : : default:
14611 : : return true;
14612 : : }
14613 : : default:
14614 : : return true;
14615 : : }
14616 : : }
14617 : :
14618 : : /* Return true if expression X could evaluate to -0.0.
14619 : : This function returns true if uncertain. */
14620 : :
14621 : : bool
14622 : 601092 : tree_expr_maybe_real_minus_zero_p (const_tree x)
14623 : : {
14624 : 601092 : if (!HONOR_SIGNED_ZEROS (x))
14625 : : return false;
14626 : 601092 : switch (TREE_CODE (x))
14627 : : {
14628 : 0 : case REAL_CST:
14629 : 0 : return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14630 : : case INTEGER_CST:
14631 : : case FLOAT_EXPR:
14632 : : case ABS_EXPR:
14633 : : return false;
14634 : 0 : case NON_LVALUE_EXPR:
14635 : 0 : case SAVE_EXPR:
14636 : 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14637 : 0 : case COND_EXPR:
14638 : 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14639 : 0 : || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14640 : 1 : case CALL_EXPR:
14641 : 1 : switch (get_call_combined_fn (x))
14642 : : {
14643 : : CASE_CFN_FABS:
14644 : : CASE_CFN_FABS_FN:
14645 : : return false;
14646 : : default:
14647 : : break;
14648 : : }
14649 : : default:
14650 : : break;
14651 : : }
14652 : : /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14653 : : * but currently those predicates require tree and not const_tree. */
14654 : : return true;
14655 : : }
14656 : :
14657 : : #define tree_expr_nonnegative_warnv_p(X, Y) \
14658 : : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14659 : :
14660 : : #define RECURSE(X) \
14661 : : ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14662 : :
14663 : : /* Return true if CODE or TYPE is known to be non-negative. */
14664 : :
14665 : : static bool
14666 : 35210087 : tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14667 : : {
14668 : 35210087 : if (!VECTOR_TYPE_P (type)
14669 : 35192325 : && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14670 : 70401699 : && truth_value_p (code))
14671 : : /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14672 : : have a signed:1 type (where the value is -1 and 0). */
14673 : : return true;
14674 : : return false;
14675 : : }
14676 : :
14677 : : /* Return true if (CODE OP0) is known to be non-negative. If the return
14678 : : value is based on the assumption that signed overflow is undefined,
14679 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14680 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14681 : :
14682 : : bool
14683 : 11991803 : tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14684 : : bool *strict_overflow_p, int depth)
14685 : : {
14686 : 11991803 : if (TYPE_UNSIGNED (type))
14687 : : return true;
14688 : :
14689 : 4724539 : switch (code)
14690 : : {
14691 : 315194 : case ABS_EXPR:
14692 : : /* We can't return 1 if flag_wrapv is set because
14693 : : ABS_EXPR<INT_MIN> = INT_MIN. */
14694 : 315194 : if (!ANY_INTEGRAL_TYPE_P (type))
14695 : : return true;
14696 : 23156 : if (TYPE_OVERFLOW_UNDEFINED (type))
14697 : : {
14698 : 22243 : *strict_overflow_p = true;
14699 : 22243 : return true;
14700 : : }
14701 : : break;
14702 : :
14703 : 84403 : case NON_LVALUE_EXPR:
14704 : 84403 : case FLOAT_EXPR:
14705 : 84403 : case FIX_TRUNC_EXPR:
14706 : 84403 : return RECURSE (op0);
14707 : :
14708 : 4227762 : CASE_CONVERT:
14709 : 4227762 : {
14710 : 4227762 : tree inner_type = TREE_TYPE (op0);
14711 : 4227762 : tree outer_type = type;
14712 : :
14713 : 4227762 : if (SCALAR_FLOAT_TYPE_P (outer_type))
14714 : : {
14715 : 256688 : if (SCALAR_FLOAT_TYPE_P (inner_type))
14716 : 256688 : return RECURSE (op0);
14717 : 0 : if (INTEGRAL_TYPE_P (inner_type))
14718 : : {
14719 : 0 : if (TYPE_UNSIGNED (inner_type))
14720 : : return true;
14721 : 0 : return RECURSE (op0);
14722 : : }
14723 : : }
14724 : 3971074 : else if (INTEGRAL_TYPE_P (outer_type))
14725 : : {
14726 : 3971037 : if (SCALAR_FLOAT_TYPE_P (inner_type))
14727 : 0 : return RECURSE (op0);
14728 : 3971037 : if (INTEGRAL_TYPE_P (inner_type))
14729 : 3814298 : return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14730 : 3814298 : && TYPE_UNSIGNED (inner_type);
14731 : : }
14732 : : }
14733 : : break;
14734 : :
14735 : 97180 : default:
14736 : 97180 : return tree_simple_nonnegative_warnv_p (code, type);
14737 : : }
14738 : :
14739 : : /* We don't know sign of `t', so be conservative and return false. */
14740 : : return false;
14741 : : }
14742 : :
14743 : : /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14744 : : value is based on the assumption that signed overflow is undefined,
14745 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14746 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14747 : :
14748 : : bool
14749 : 34568961 : tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14750 : : tree op1, bool *strict_overflow_p,
14751 : : int depth)
14752 : : {
14753 : 34568961 : if (TYPE_UNSIGNED (type))
14754 : : return true;
14755 : :
14756 : 13317466 : switch (code)
14757 : : {
14758 : 5289274 : case POINTER_PLUS_EXPR:
14759 : 5289274 : case PLUS_EXPR:
14760 : 5289274 : if (FLOAT_TYPE_P (type))
14761 : 48117 : return RECURSE (op0) && RECURSE (op1);
14762 : :
14763 : : /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14764 : : both unsigned and at least 2 bits shorter than the result. */
14765 : 5241157 : if (TREE_CODE (type) == INTEGER_TYPE
14766 : 5235103 : && TREE_CODE (op0) == NOP_EXPR
14767 : 7700 : && TREE_CODE (op1) == NOP_EXPR)
14768 : : {
14769 : 205 : tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14770 : 205 : tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14771 : 205 : if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14772 : 308 : && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14773 : : {
14774 : 95 : unsigned int prec = MAX (TYPE_PRECISION (inner1),
14775 : 95 : TYPE_PRECISION (inner2)) + 1;
14776 : 95 : return prec < TYPE_PRECISION (type);
14777 : : }
14778 : : }
14779 : : break;
14780 : :
14781 : 1296304 : case MULT_EXPR:
14782 : 1296304 : if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14783 : : {
14784 : : /* x * x is always non-negative for floating point x
14785 : : or without overflow. */
14786 : 1237512 : if (operand_equal_p (op0, op1, 0)
14787 : 1237512 : || (RECURSE (op0) && RECURSE (op1)))
14788 : : {
14789 : 1234 : if (ANY_INTEGRAL_TYPE_P (type)
14790 : 12293 : && TYPE_OVERFLOW_UNDEFINED (type))
14791 : 11059 : *strict_overflow_p = true;
14792 : 12272 : return true;
14793 : : }
14794 : : }
14795 : :
14796 : : /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14797 : : both unsigned and their total bits is shorter than the result. */
14798 : 1284032 : if (TREE_CODE (type) == INTEGER_TYPE
14799 : 1213883 : && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14800 : 151 : && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14801 : : {
14802 : 115 : tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14803 : 115 : ? TREE_TYPE (TREE_OPERAND (op0, 0))
14804 : 0 : : TREE_TYPE (op0);
14805 : 115 : tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14806 : 183 : ? TREE_TYPE (TREE_OPERAND (op1, 0))
14807 : 68 : : TREE_TYPE (op1);
14808 : :
14809 : 115 : bool unsigned0 = TYPE_UNSIGNED (inner0);
14810 : 115 : bool unsigned1 = TYPE_UNSIGNED (inner1);
14811 : :
14812 : 115 : if (TREE_CODE (op0) == INTEGER_CST)
14813 : 0 : unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14814 : :
14815 : 115 : if (TREE_CODE (op1) == INTEGER_CST)
14816 : 68 : unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14817 : :
14818 : 115 : if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14819 : 7 : && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14820 : : {
14821 : 0 : unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14822 : 0 : ? tree_int_cst_min_precision (op0, UNSIGNED)
14823 : 0 : : TYPE_PRECISION (inner0);
14824 : :
14825 : 0 : unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14826 : 0 : ? tree_int_cst_min_precision (op1, UNSIGNED)
14827 : 0 : : TYPE_PRECISION (inner1);
14828 : :
14829 : 0 : return precision0 + precision1 < TYPE_PRECISION (type);
14830 : : }
14831 : : }
14832 : : return false;
14833 : :
14834 : 92446 : case BIT_AND_EXPR:
14835 : 92446 : return RECURSE (op0) || RECURSE (op1);
14836 : :
14837 : 75679 : case MAX_EXPR:
14838 : : /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14839 : : things. */
14840 : 75679 : if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14841 : 76 : return RECURSE (op0) && RECURSE (op1);
14842 : 75603 : return RECURSE (op0) || RECURSE (op1);
14843 : :
14844 : 728092 : case BIT_IOR_EXPR:
14845 : 728092 : case BIT_XOR_EXPR:
14846 : 728092 : case MIN_EXPR:
14847 : 728092 : case RDIV_EXPR:
14848 : 728092 : case TRUNC_DIV_EXPR:
14849 : 728092 : case CEIL_DIV_EXPR:
14850 : 728092 : case FLOOR_DIV_EXPR:
14851 : 728092 : case ROUND_DIV_EXPR:
14852 : 728092 : return RECURSE (op0) && RECURSE (op1);
14853 : :
14854 : 91614 : case TRUNC_MOD_EXPR:
14855 : 91614 : return RECURSE (op0);
14856 : :
14857 : 253 : case FLOOR_MOD_EXPR:
14858 : 253 : return RECURSE (op1);
14859 : :
14860 : 5743804 : case CEIL_MOD_EXPR:
14861 : 5743804 : case ROUND_MOD_EXPR:
14862 : 5743804 : default:
14863 : 5743804 : return tree_simple_nonnegative_warnv_p (code, type);
14864 : : }
14865 : :
14866 : : /* We don't know sign of `t', so be conservative and return false. */
14867 : : return false;
14868 : : }
14869 : :
14870 : : /* Return true if T is known to be non-negative. If the return
14871 : : value is based on the assumption that signed overflow is undefined,
14872 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14873 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14874 : :
14875 : : bool
14876 : 43178735 : tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14877 : : {
14878 : 43178735 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
14879 : : return true;
14880 : :
14881 : 29823268 : switch (TREE_CODE (t))
14882 : : {
14883 : 3407186 : case INTEGER_CST:
14884 : 3407186 : return tree_int_cst_sgn (t) >= 0;
14885 : :
14886 : 947093 : case REAL_CST:
14887 : 947093 : return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14888 : :
14889 : 0 : case FIXED_CST:
14890 : 0 : return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14891 : :
14892 : 500 : case COND_EXPR:
14893 : 500 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14894 : :
14895 : 16044793 : case SSA_NAME:
14896 : : /* Limit the depth of recursion to avoid quadratic behavior.
14897 : : This is expected to catch almost all occurrences in practice.
14898 : : If this code misses important cases that unbounded recursion
14899 : : would not, passes that need this information could be revised
14900 : : to provide it through dataflow propagation. */
14901 : 16044793 : return (!name_registered_for_update_p (t)
14902 : 16044792 : && depth < param_max_ssa_name_query_depth
14903 : 30772833 : && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14904 : : strict_overflow_p, depth));
14905 : :
14906 : 9423696 : default:
14907 : 9423696 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14908 : : }
14909 : : }
14910 : :
14911 : : /* Return true if T is known to be non-negative. If the return
14912 : : value is based on the assumption that signed overflow is undefined,
14913 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14914 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14915 : :
14916 : : bool
14917 : 20085664 : tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14918 : : bool *strict_overflow_p, int depth)
14919 : : {
14920 : 20085664 : switch (fn)
14921 : : {
14922 : : CASE_CFN_ACOS:
14923 : : CASE_CFN_ACOS_FN:
14924 : : CASE_CFN_ACOSH:
14925 : : CASE_CFN_ACOSH_FN:
14926 : : CASE_CFN_CABS:
14927 : : CASE_CFN_CABS_FN:
14928 : : CASE_CFN_COSH:
14929 : : CASE_CFN_COSH_FN:
14930 : : CASE_CFN_ERFC:
14931 : : CASE_CFN_ERFC_FN:
14932 : : CASE_CFN_EXP:
14933 : : CASE_CFN_EXP_FN:
14934 : : CASE_CFN_EXP10:
14935 : : CASE_CFN_EXP2:
14936 : : CASE_CFN_EXP2_FN:
14937 : : CASE_CFN_FABS:
14938 : : CASE_CFN_FABS_FN:
14939 : : CASE_CFN_FDIM:
14940 : : CASE_CFN_FDIM_FN:
14941 : : CASE_CFN_HYPOT:
14942 : : CASE_CFN_HYPOT_FN:
14943 : : CASE_CFN_POW10:
14944 : : CASE_CFN_FFS:
14945 : : CASE_CFN_PARITY:
14946 : : CASE_CFN_POPCOUNT:
14947 : : CASE_CFN_CLRSB:
14948 : : case CFN_BUILT_IN_BSWAP16:
14949 : : case CFN_BUILT_IN_BSWAP32:
14950 : : case CFN_BUILT_IN_BSWAP64:
14951 : : case CFN_BUILT_IN_BSWAP128:
14952 : : /* Always true. */
14953 : : return true;
14954 : :
14955 : 1015 : CASE_CFN_CLZ:
14956 : 1015 : CASE_CFN_CTZ:
14957 : 1015 : if (arg1)
14958 : 0 : return RECURSE (arg1);
14959 : : return true;
14960 : :
14961 : 1007 : CASE_CFN_SQRT:
14962 : 1007 : CASE_CFN_SQRT_FN:
14963 : : /* sqrt(-0.0) is -0.0. */
14964 : 1007 : if (!HONOR_SIGNED_ZEROS (type))
14965 : : return true;
14966 : 975 : return RECURSE (arg0);
14967 : :
14968 : 98788 : CASE_CFN_ASINH:
14969 : 98788 : CASE_CFN_ASINH_FN:
14970 : 98788 : CASE_CFN_ATAN:
14971 : 98788 : CASE_CFN_ATAN_FN:
14972 : 98788 : CASE_CFN_ATANH:
14973 : 98788 : CASE_CFN_ATANH_FN:
14974 : 98788 : CASE_CFN_CBRT:
14975 : 98788 : CASE_CFN_CBRT_FN:
14976 : 98788 : CASE_CFN_CEIL:
14977 : 98788 : CASE_CFN_CEIL_FN:
14978 : 98788 : CASE_CFN_ERF:
14979 : 98788 : CASE_CFN_ERF_FN:
14980 : 98788 : CASE_CFN_EXPM1:
14981 : 98788 : CASE_CFN_EXPM1_FN:
14982 : 98788 : CASE_CFN_FLOOR:
14983 : 98788 : CASE_CFN_FLOOR_FN:
14984 : 98788 : CASE_CFN_FMOD:
14985 : 98788 : CASE_CFN_FMOD_FN:
14986 : 98788 : CASE_CFN_FREXP:
14987 : 98788 : CASE_CFN_FREXP_FN:
14988 : 98788 : CASE_CFN_ICEIL:
14989 : 98788 : CASE_CFN_IFLOOR:
14990 : 98788 : CASE_CFN_IRINT:
14991 : 98788 : CASE_CFN_IROUND:
14992 : 98788 : CASE_CFN_LCEIL:
14993 : 98788 : CASE_CFN_LDEXP:
14994 : 98788 : CASE_CFN_LFLOOR:
14995 : 98788 : CASE_CFN_LLCEIL:
14996 : 98788 : CASE_CFN_LLFLOOR:
14997 : 98788 : CASE_CFN_LLRINT:
14998 : 98788 : CASE_CFN_LLRINT_FN:
14999 : 98788 : CASE_CFN_LLROUND:
15000 : 98788 : CASE_CFN_LLROUND_FN:
15001 : 98788 : CASE_CFN_LRINT:
15002 : 98788 : CASE_CFN_LRINT_FN:
15003 : 98788 : CASE_CFN_LROUND:
15004 : 98788 : CASE_CFN_LROUND_FN:
15005 : 98788 : CASE_CFN_MODF:
15006 : 98788 : CASE_CFN_MODF_FN:
15007 : 98788 : CASE_CFN_NEARBYINT:
15008 : 98788 : CASE_CFN_NEARBYINT_FN:
15009 : 98788 : CASE_CFN_RINT:
15010 : 98788 : CASE_CFN_RINT_FN:
15011 : 98788 : CASE_CFN_ROUND:
15012 : 98788 : CASE_CFN_ROUND_FN:
15013 : 98788 : CASE_CFN_ROUNDEVEN:
15014 : 98788 : CASE_CFN_ROUNDEVEN_FN:
15015 : 98788 : CASE_CFN_SCALB:
15016 : 98788 : CASE_CFN_SCALBLN:
15017 : 98788 : CASE_CFN_SCALBLN_FN:
15018 : 98788 : CASE_CFN_SCALBN:
15019 : 98788 : CASE_CFN_SCALBN_FN:
15020 : 98788 : CASE_CFN_SIGNBIT:
15021 : 98788 : CASE_CFN_SIGNIFICAND:
15022 : 98788 : CASE_CFN_SINH:
15023 : 98788 : CASE_CFN_SINH_FN:
15024 : 98788 : CASE_CFN_TANH:
15025 : 98788 : CASE_CFN_TANH_FN:
15026 : 98788 : CASE_CFN_TRUNC:
15027 : 98788 : CASE_CFN_TRUNC_FN:
15028 : : /* True if the 1st argument is nonnegative. */
15029 : 98788 : return RECURSE (arg0);
15030 : :
15031 : 1296 : CASE_CFN_FMAX:
15032 : 1296 : CASE_CFN_FMAX_FN:
15033 : : /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15034 : : things. In the presence of sNaNs, we're only guaranteed to be
15035 : : non-negative if both operands are non-negative. In the presence
15036 : : of qNaNs, we're non-negative if either operand is non-negative
15037 : : and can't be a qNaN, or if both operands are non-negative. */
15038 : 1296 : if (tree_expr_maybe_signaling_nan_p (arg0)
15039 : 1296 : || tree_expr_maybe_signaling_nan_p (arg1))
15040 : 136 : return RECURSE (arg0) && RECURSE (arg1);
15041 : 1160 : return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15042 : 331 : || RECURSE (arg1))
15043 : 829 : : (RECURSE (arg1)
15044 : 829 : && !tree_expr_maybe_nan_p (arg1));
15045 : :
15046 : 908 : CASE_CFN_FMIN:
15047 : 908 : CASE_CFN_FMIN_FN:
15048 : : /* True if the 1st AND 2nd arguments are nonnegative. */
15049 : 908 : return RECURSE (arg0) && RECURSE (arg1);
15050 : :
15051 : 1407 : CASE_CFN_COPYSIGN:
15052 : 1407 : CASE_CFN_COPYSIGN_FN:
15053 : : /* True if the 2nd argument is nonnegative. */
15054 : 1407 : return RECURSE (arg1);
15055 : :
15056 : 2510 : CASE_CFN_POWI:
15057 : : /* True if the 1st argument is nonnegative or the second
15058 : : argument is an even integer. */
15059 : 2510 : if (TREE_CODE (arg1) == INTEGER_CST
15060 : 2510 : && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15061 : : return true;
15062 : 2429 : return RECURSE (arg0);
15063 : :
15064 : 4898 : CASE_CFN_POW:
15065 : 4898 : CASE_CFN_POW_FN:
15066 : : /* True if the 1st argument is nonnegative or the second
15067 : : argument is an even integer valued real. */
15068 : 4898 : if (TREE_CODE (arg1) == REAL_CST)
15069 : : {
15070 : 2139 : REAL_VALUE_TYPE c;
15071 : 2139 : HOST_WIDE_INT n;
15072 : :
15073 : 2139 : c = TREE_REAL_CST (arg1);
15074 : 2139 : n = real_to_integer (&c);
15075 : 2139 : if ((n & 1) == 0)
15076 : : {
15077 : 1498 : REAL_VALUE_TYPE cint;
15078 : 1498 : real_from_integer (&cint, VOIDmode, n, SIGNED);
15079 : 1498 : if (real_identical (&c, &cint))
15080 : 488 : return true;
15081 : : }
15082 : : }
15083 : 4410 : return RECURSE (arg0);
15084 : :
15085 : 19943743 : default:
15086 : 19943743 : break;
15087 : : }
15088 : 19943743 : return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15089 : : }
15090 : :
15091 : : /* Return true if T is known to be non-negative. If the return
15092 : : value is based on the assumption that signed overflow is undefined,
15093 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15094 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15095 : :
15096 : : static bool
15097 : 880425 : tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15098 : : {
15099 : 880425 : enum tree_code code = TREE_CODE (t);
15100 : 880425 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
15101 : : return true;
15102 : :
15103 : 724869 : switch (code)
15104 : : {
15105 : 203 : case TARGET_EXPR:
15106 : 203 : {
15107 : 203 : tree temp = TARGET_EXPR_SLOT (t);
15108 : 203 : t = TARGET_EXPR_INITIAL (t);
15109 : :
15110 : : /* If the initializer is non-void, then it's a normal expression
15111 : : that will be assigned to the slot. */
15112 : 203 : if (!VOID_TYPE_P (TREE_TYPE (t)))
15113 : 0 : return RECURSE (t);
15114 : :
15115 : : /* Otherwise, the initializer sets the slot in some way. One common
15116 : : way is an assignment statement at the end of the initializer. */
15117 : 405 : while (1)
15118 : : {
15119 : 405 : if (TREE_CODE (t) == BIND_EXPR)
15120 : 202 : t = expr_last (BIND_EXPR_BODY (t));
15121 : 203 : else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15122 : 203 : || TREE_CODE (t) == TRY_CATCH_EXPR)
15123 : 0 : t = expr_last (TREE_OPERAND (t, 0));
15124 : 203 : else if (TREE_CODE (t) == STATEMENT_LIST)
15125 : 0 : t = expr_last (t);
15126 : : else
15127 : : break;
15128 : : }
15129 : 203 : if (TREE_CODE (t) == MODIFY_EXPR
15130 : 203 : && TREE_OPERAND (t, 0) == temp)
15131 : 202 : return RECURSE (TREE_OPERAND (t, 1));
15132 : :
15133 : : return false;
15134 : : }
15135 : :
15136 : 331399 : case CALL_EXPR:
15137 : 331399 : {
15138 : 331399 : tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15139 : 331399 : tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15140 : :
15141 : 331399 : return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15142 : : get_call_combined_fn (t),
15143 : : arg0,
15144 : : arg1,
15145 : 331399 : strict_overflow_p, depth);
15146 : : }
15147 : 767 : case COMPOUND_EXPR:
15148 : 767 : case MODIFY_EXPR:
15149 : 767 : return RECURSE (TREE_OPERAND (t, 1));
15150 : :
15151 : 9 : case BIND_EXPR:
15152 : 9 : return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15153 : :
15154 : 390827 : case SAVE_EXPR:
15155 : 390827 : return RECURSE (TREE_OPERAND (t, 0));
15156 : :
15157 : 1664 : default:
15158 : 1664 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15159 : : }
15160 : : }
15161 : :
15162 : : #undef RECURSE
15163 : : #undef tree_expr_nonnegative_warnv_p
15164 : :
15165 : : /* Return true if T is known to be non-negative. If the return
15166 : : value is based on the assumption that signed overflow is undefined,
15167 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15168 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15169 : :
15170 : : bool
15171 : 22593460 : tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15172 : : {
15173 : 22593460 : enum tree_code code;
15174 : 22593460 : if (t == error_mark_node)
15175 : : return false;
15176 : :
15177 : 22593460 : code = TREE_CODE (t);
15178 : 22593460 : switch (TREE_CODE_CLASS (code))
15179 : : {
15180 : 909084 : case tcc_binary:
15181 : 909084 : case tcc_comparison:
15182 : 909084 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15183 : 909084 : TREE_TYPE (t),
15184 : 909084 : TREE_OPERAND (t, 0),
15185 : 909084 : TREE_OPERAND (t, 1),
15186 : 909084 : strict_overflow_p, depth);
15187 : :
15188 : 1491913 : case tcc_unary:
15189 : 1491913 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15190 : 1491913 : TREE_TYPE (t),
15191 : 1491913 : TREE_OPERAND (t, 0),
15192 : 1491913 : strict_overflow_p, depth);
15193 : :
15194 : 10784606 : case tcc_constant:
15195 : 10784606 : case tcc_declaration:
15196 : 10784606 : case tcc_reference:
15197 : 10784606 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15198 : :
15199 : 9407857 : default:
15200 : 9407857 : break;
15201 : : }
15202 : :
15203 : 9407857 : switch (code)
15204 : : {
15205 : 7 : case TRUTH_AND_EXPR:
15206 : 7 : case TRUTH_OR_EXPR:
15207 : 7 : case TRUTH_XOR_EXPR:
15208 : 7 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15209 : 7 : TREE_TYPE (t),
15210 : 7 : TREE_OPERAND (t, 0),
15211 : 7 : TREE_OPERAND (t, 1),
15212 : 7 : strict_overflow_p, depth);
15213 : 72 : case TRUTH_NOT_EXPR:
15214 : 72 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15215 : 72 : TREE_TYPE (t),
15216 : 72 : TREE_OPERAND (t, 0),
15217 : 72 : strict_overflow_p, depth);
15218 : :
15219 : 8527353 : case COND_EXPR:
15220 : 8527353 : case CONSTRUCTOR:
15221 : 8527353 : case OBJ_TYPE_REF:
15222 : 8527353 : case ADDR_EXPR:
15223 : 8527353 : case WITH_SIZE_EXPR:
15224 : 8527353 : case SSA_NAME:
15225 : 8527353 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15226 : :
15227 : 880425 : default:
15228 : 880425 : return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15229 : : }
15230 : : }
15231 : :
15232 : : /* Return true if `t' is known to be non-negative. Handle warnings
15233 : : about undefined signed overflow. */
15234 : :
15235 : : bool
15236 : 15612306 : tree_expr_nonnegative_p (tree t)
15237 : : {
15238 : 15612306 : bool ret, strict_overflow_p;
15239 : :
15240 : 15612306 : strict_overflow_p = false;
15241 : 15612306 : ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15242 : 15612306 : if (strict_overflow_p)
15243 : 22577 : fold_overflow_warning (("assuming signed overflow does not occur when "
15244 : : "determining that expression is always "
15245 : : "non-negative"),
15246 : : WARN_STRICT_OVERFLOW_MISC);
15247 : 15612306 : return ret;
15248 : : }
15249 : :
15250 : :
15251 : : /* Return true when (CODE OP0) is an address and is known to be nonzero.
15252 : : For floating point we further ensure that T is not denormal.
15253 : : Similar logic is present in nonzero_address in rtlanal.h.
15254 : :
15255 : : If the return value is based on the assumption that signed overflow
15256 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15257 : : change *STRICT_OVERFLOW_P. */
15258 : :
15259 : : bool
15260 : 1297376 : tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15261 : : bool *strict_overflow_p)
15262 : : {
15263 : 1297376 : switch (code)
15264 : : {
15265 : 1 : case ABS_EXPR:
15266 : 1 : return tree_expr_nonzero_warnv_p (op0,
15267 : 1 : strict_overflow_p);
15268 : :
15269 : 684578 : case NOP_EXPR:
15270 : 684578 : {
15271 : 684578 : tree inner_type = TREE_TYPE (op0);
15272 : 684578 : tree outer_type = type;
15273 : :
15274 : 684578 : return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15275 : 684578 : && tree_expr_nonzero_warnv_p (op0,
15276 : : strict_overflow_p));
15277 : : }
15278 : 28101 : break;
15279 : :
15280 : 28101 : case NON_LVALUE_EXPR:
15281 : 28101 : return tree_expr_nonzero_warnv_p (op0,
15282 : 28101 : strict_overflow_p);
15283 : :
15284 : : default:
15285 : : break;
15286 : : }
15287 : :
15288 : : return false;
15289 : : }
15290 : :
15291 : : /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15292 : : For floating point we further ensure that T is not denormal.
15293 : : Similar logic is present in nonzero_address in rtlanal.h.
15294 : :
15295 : : If the return value is based on the assumption that signed overflow
15296 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15297 : : change *STRICT_OVERFLOW_P. */
15298 : :
15299 : : bool
15300 : 2626354 : tree_binary_nonzero_warnv_p (enum tree_code code,
15301 : : tree type,
15302 : : tree op0,
15303 : : tree op1, bool *strict_overflow_p)
15304 : : {
15305 : 2626354 : bool sub_strict_overflow_p;
15306 : 2626354 : switch (code)
15307 : : {
15308 : 411596 : case POINTER_PLUS_EXPR:
15309 : 411596 : case PLUS_EXPR:
15310 : 411596 : if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15311 : : {
15312 : : /* With the presence of negative values it is hard
15313 : : to say something. */
15314 : 93572 : sub_strict_overflow_p = false;
15315 : 93572 : if (!tree_expr_nonnegative_warnv_p (op0,
15316 : : &sub_strict_overflow_p)
15317 : 93572 : || !tree_expr_nonnegative_warnv_p (op1,
15318 : : &sub_strict_overflow_p))
15319 : 91173 : return false;
15320 : : /* One of operands must be positive and the other non-negative. */
15321 : : /* We don't set *STRICT_OVERFLOW_P here: even if this value
15322 : : overflows, on a twos-complement machine the sum of two
15323 : : nonnegative numbers can never be zero. */
15324 : 2399 : return (tree_expr_nonzero_warnv_p (op0,
15325 : : strict_overflow_p)
15326 : 2399 : || tree_expr_nonzero_warnv_p (op1,
15327 : : strict_overflow_p));
15328 : : }
15329 : : break;
15330 : :
15331 : 16907 : case MULT_EXPR:
15332 : 16907 : if (TYPE_OVERFLOW_UNDEFINED (type))
15333 : : {
15334 : 415 : if (tree_expr_nonzero_warnv_p (op0,
15335 : : strict_overflow_p)
15336 : 415 : && tree_expr_nonzero_warnv_p (op1,
15337 : : strict_overflow_p))
15338 : : {
15339 : 0 : *strict_overflow_p = true;
15340 : 0 : return true;
15341 : : }
15342 : : }
15343 : : break;
15344 : :
15345 : 9645 : case MIN_EXPR:
15346 : 9645 : sub_strict_overflow_p = false;
15347 : 9645 : if (tree_expr_nonzero_warnv_p (op0,
15348 : : &sub_strict_overflow_p)
15349 : 9645 : && tree_expr_nonzero_warnv_p (op1,
15350 : : &sub_strict_overflow_p))
15351 : : {
15352 : 0 : if (sub_strict_overflow_p)
15353 : 0 : *strict_overflow_p = true;
15354 : : }
15355 : : break;
15356 : :
15357 : 42 : case MAX_EXPR:
15358 : 42 : sub_strict_overflow_p = false;
15359 : 42 : if (tree_expr_nonzero_warnv_p (op0,
15360 : : &sub_strict_overflow_p))
15361 : : {
15362 : 0 : if (sub_strict_overflow_p)
15363 : 0 : *strict_overflow_p = true;
15364 : :
15365 : : /* When both operands are nonzero, then MAX must be too. */
15366 : 0 : if (tree_expr_nonzero_warnv_p (op1,
15367 : : strict_overflow_p))
15368 : : return true;
15369 : :
15370 : : /* MAX where operand 0 is positive is positive. */
15371 : 0 : return tree_expr_nonnegative_warnv_p (op0,
15372 : 0 : strict_overflow_p);
15373 : : }
15374 : : /* MAX where operand 1 is positive is positive. */
15375 : 42 : else if (tree_expr_nonzero_warnv_p (op1,
15376 : : &sub_strict_overflow_p)
15377 : 42 : && tree_expr_nonnegative_warnv_p (op1,
15378 : : &sub_strict_overflow_p))
15379 : : {
15380 : 0 : if (sub_strict_overflow_p)
15381 : 0 : *strict_overflow_p = true;
15382 : 0 : return true;
15383 : : }
15384 : : break;
15385 : :
15386 : 240277 : case BIT_IOR_EXPR:
15387 : 240277 : return (tree_expr_nonzero_warnv_p (op1,
15388 : : strict_overflow_p)
15389 : 240277 : || tree_expr_nonzero_warnv_p (op0,
15390 : : strict_overflow_p));
15391 : :
15392 : : default:
15393 : : break;
15394 : : }
15395 : :
15396 : : return false;
15397 : : }
15398 : :
15399 : : /* Return true when T is an address and is known to be nonzero.
15400 : : For floating point we further ensure that T is not denormal.
15401 : : Similar logic is present in nonzero_address in rtlanal.h.
15402 : :
15403 : : If the return value is based on the assumption that signed overflow
15404 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15405 : : change *STRICT_OVERFLOW_P. */
15406 : :
15407 : : bool
15408 : 139413205 : tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15409 : : {
15410 : 139413205 : bool sub_strict_overflow_p;
15411 : 139413205 : switch (TREE_CODE (t))
15412 : : {
15413 : 1116693 : case INTEGER_CST:
15414 : 1116693 : return !integer_zerop (t);
15415 : :
15416 : 9516145 : case ADDR_EXPR:
15417 : 9516145 : {
15418 : 9516145 : tree base = TREE_OPERAND (t, 0);
15419 : :
15420 : 9516145 : if (!DECL_P (base))
15421 : 4633256 : base = get_base_address (base);
15422 : :
15423 : 9516145 : if (base && TREE_CODE (base) == TARGET_EXPR)
15424 : 657 : base = TARGET_EXPR_SLOT (base);
15425 : :
15426 : 657 : if (!base)
15427 : 0 : return false;
15428 : :
15429 : : /* For objects in symbol table check if we know they are non-zero.
15430 : : Don't do anything for variables and functions before symtab is built;
15431 : : it is quite possible that they will be declared weak later. */
15432 : 9516145 : int nonzero_addr = maybe_nonzero_address (base);
15433 : 9516145 : if (nonzero_addr >= 0)
15434 : 7295486 : return nonzero_addr;
15435 : :
15436 : : /* Constants are never weak. */
15437 : 2220659 : if (CONSTANT_CLASS_P (base))
15438 : : return true;
15439 : :
15440 : : return false;
15441 : : }
15442 : :
15443 : 30109 : case COND_EXPR:
15444 : 30109 : sub_strict_overflow_p = false;
15445 : 30109 : if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15446 : : &sub_strict_overflow_p)
15447 : 30109 : && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15448 : : &sub_strict_overflow_p))
15449 : : {
15450 : 1221 : if (sub_strict_overflow_p)
15451 : 0 : *strict_overflow_p = true;
15452 : 1221 : return true;
15453 : : }
15454 : : break;
15455 : :
15456 : 118868044 : case SSA_NAME:
15457 : 118868044 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15458 : : break;
15459 : 92594831 : return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15460 : :
15461 : : default:
15462 : : break;
15463 : : }
15464 : : return false;
15465 : : }
15466 : :
15467 : : #define integer_valued_real_p(X) \
15468 : : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15469 : :
15470 : : #define RECURSE(X) \
15471 : : ((integer_valued_real_p) (X, depth + 1))
15472 : :
15473 : : /* Return true if the floating point result of (CODE OP0) has an
15474 : : integer value. We also allow +Inf, -Inf and NaN to be considered
15475 : : integer values. Return false for signaling NaN.
15476 : :
15477 : : DEPTH is the current nesting depth of the query. */
15478 : :
15479 : : bool
15480 : 14773 : integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15481 : : {
15482 : 14773 : switch (code)
15483 : : {
15484 : : case FLOAT_EXPR:
15485 : : return true;
15486 : :
15487 : 1413 : case ABS_EXPR:
15488 : 1413 : return RECURSE (op0);
15489 : :
15490 : 9685 : CASE_CONVERT:
15491 : 9685 : {
15492 : 9685 : tree type = TREE_TYPE (op0);
15493 : 9685 : if (TREE_CODE (type) == INTEGER_TYPE)
15494 : : return true;
15495 : 9685 : if (SCALAR_FLOAT_TYPE_P (type))
15496 : 9685 : return RECURSE (op0);
15497 : : break;
15498 : : }
15499 : :
15500 : : default:
15501 : : break;
15502 : : }
15503 : : return false;
15504 : : }
15505 : :
15506 : : /* Return true if the floating point result of (CODE OP0 OP1) has an
15507 : : integer value. We also allow +Inf, -Inf and NaN to be considered
15508 : : integer values. Return false for signaling NaN.
15509 : :
15510 : : DEPTH is the current nesting depth of the query. */
15511 : :
15512 : : bool
15513 : 13783 : integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15514 : : {
15515 : 13783 : switch (code)
15516 : : {
15517 : 7226 : case PLUS_EXPR:
15518 : 7226 : case MINUS_EXPR:
15519 : 7226 : case MULT_EXPR:
15520 : 7226 : case MIN_EXPR:
15521 : 7226 : case MAX_EXPR:
15522 : 7226 : return RECURSE (op0) && RECURSE (op1);
15523 : :
15524 : : default:
15525 : : break;
15526 : : }
15527 : : return false;
15528 : : }
15529 : :
15530 : : /* Return true if the floating point result of calling FNDECL with arguments
15531 : : ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15532 : : considered integer values. Return false for signaling NaN. If FNDECL
15533 : : takes fewer than 2 arguments, the remaining ARGn are null.
15534 : :
15535 : : DEPTH is the current nesting depth of the query. */
15536 : :
15537 : : bool
15538 : 916 : integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15539 : : {
15540 : 916 : switch (fn)
15541 : : {
15542 : : CASE_CFN_CEIL:
15543 : : CASE_CFN_CEIL_FN:
15544 : : CASE_CFN_FLOOR:
15545 : : CASE_CFN_FLOOR_FN:
15546 : : CASE_CFN_NEARBYINT:
15547 : : CASE_CFN_NEARBYINT_FN:
15548 : : CASE_CFN_RINT:
15549 : : CASE_CFN_RINT_FN:
15550 : : CASE_CFN_ROUND:
15551 : : CASE_CFN_ROUND_FN:
15552 : : CASE_CFN_ROUNDEVEN:
15553 : : CASE_CFN_ROUNDEVEN_FN:
15554 : : CASE_CFN_TRUNC:
15555 : : CASE_CFN_TRUNC_FN:
15556 : : return true;
15557 : :
15558 : 336 : CASE_CFN_FMIN:
15559 : 336 : CASE_CFN_FMIN_FN:
15560 : 336 : CASE_CFN_FMAX:
15561 : 336 : CASE_CFN_FMAX_FN:
15562 : 336 : return RECURSE (arg0) && RECURSE (arg1);
15563 : :
15564 : : default:
15565 : : break;
15566 : : }
15567 : : return false;
15568 : : }
15569 : :
15570 : : /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15571 : : has an integer value. We also allow +Inf, -Inf and NaN to be
15572 : : considered integer values. Return false for signaling NaN.
15573 : :
15574 : : DEPTH is the current nesting depth of the query. */
15575 : :
15576 : : bool
15577 : 127823 : integer_valued_real_single_p (tree t, int depth)
15578 : : {
15579 : 127823 : switch (TREE_CODE (t))
15580 : : {
15581 : 2197 : case REAL_CST:
15582 : 2197 : return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15583 : :
15584 : 0 : case COND_EXPR:
15585 : 0 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15586 : :
15587 : 89255 : case SSA_NAME:
15588 : : /* Limit the depth of recursion to avoid quadratic behavior.
15589 : : This is expected to catch almost all occurrences in practice.
15590 : : If this code misses important cases that unbounded recursion
15591 : : would not, passes that need this information could be revised
15592 : : to provide it through dataflow propagation. */
15593 : 89255 : return (!name_registered_for_update_p (t)
15594 : 89255 : && depth < param_max_ssa_name_query_depth
15595 : 177733 : && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15596 : : depth));
15597 : :
15598 : : default:
15599 : : break;
15600 : : }
15601 : : return false;
15602 : : }
15603 : :
15604 : : /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15605 : : has an integer value. We also allow +Inf, -Inf and NaN to be
15606 : : considered integer values. Return false for signaling NaN.
15607 : :
15608 : : DEPTH is the current nesting depth of the query. */
15609 : :
15610 : : static bool
15611 : 0 : integer_valued_real_invalid_p (tree t, int depth)
15612 : : {
15613 : 0 : switch (TREE_CODE (t))
15614 : : {
15615 : 0 : case COMPOUND_EXPR:
15616 : 0 : case MODIFY_EXPR:
15617 : 0 : case BIND_EXPR:
15618 : 0 : return RECURSE (TREE_OPERAND (t, 1));
15619 : :
15620 : 0 : case SAVE_EXPR:
15621 : 0 : return RECURSE (TREE_OPERAND (t, 0));
15622 : :
15623 : : default:
15624 : : break;
15625 : : }
15626 : : return false;
15627 : : }
15628 : :
15629 : : #undef RECURSE
15630 : : #undef integer_valued_real_p
15631 : :
15632 : : /* Return true if the floating point expression T has an integer value.
15633 : : We also allow +Inf, -Inf and NaN to be considered integer values.
15634 : : Return false for signaling NaN.
15635 : :
15636 : : DEPTH is the current nesting depth of the query. */
15637 : :
15638 : : bool
15639 : 96153 : integer_valued_real_p (tree t, int depth)
15640 : : {
15641 : 96153 : if (t == error_mark_node)
15642 : : return false;
15643 : :
15644 : 96153 : STRIP_ANY_LOCATION_WRAPPER (t);
15645 : :
15646 : 96153 : tree_code code = TREE_CODE (t);
15647 : 96153 : switch (TREE_CODE_CLASS (code))
15648 : : {
15649 : 0 : case tcc_binary:
15650 : 0 : case tcc_comparison:
15651 : 0 : return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15652 : 0 : TREE_OPERAND (t, 1), depth);
15653 : :
15654 : 0 : case tcc_unary:
15655 : 0 : return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15656 : :
15657 : 8334 : case tcc_constant:
15658 : 8334 : case tcc_declaration:
15659 : 8334 : case tcc_reference:
15660 : 8334 : return integer_valued_real_single_p (t, depth);
15661 : :
15662 : 87819 : default:
15663 : 87819 : break;
15664 : : }
15665 : :
15666 : 87819 : switch (code)
15667 : : {
15668 : 87819 : case COND_EXPR:
15669 : 87819 : case SSA_NAME:
15670 : 87819 : return integer_valued_real_single_p (t, depth);
15671 : :
15672 : 0 : case CALL_EXPR:
15673 : 0 : {
15674 : 0 : tree arg0 = (call_expr_nargs (t) > 0
15675 : 0 : ? CALL_EXPR_ARG (t, 0)
15676 : 0 : : NULL_TREE);
15677 : 0 : tree arg1 = (call_expr_nargs (t) > 1
15678 : 0 : ? CALL_EXPR_ARG (t, 1)
15679 : 0 : : NULL_TREE);
15680 : 0 : return integer_valued_real_call_p (get_call_combined_fn (t),
15681 : 0 : arg0, arg1, depth);
15682 : : }
15683 : :
15684 : 0 : default:
15685 : 0 : return integer_valued_real_invalid_p (t, depth);
15686 : : }
15687 : : }
15688 : :
15689 : : /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15690 : : attempt to fold the expression to a constant without modifying TYPE,
15691 : : OP0 or OP1.
15692 : :
15693 : : If the expression could be simplified to a constant, then return
15694 : : the constant. If the expression would not be simplified to a
15695 : : constant, then return NULL_TREE. */
15696 : :
15697 : : tree
15698 : 14110978 : fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15699 : : {
15700 : 14110978 : tree tem = fold_binary (code, type, op0, op1);
15701 : 14110978 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15702 : : }
15703 : :
15704 : : /* Given the components of a unary expression CODE, TYPE and OP0,
15705 : : attempt to fold the expression to a constant without modifying
15706 : : TYPE or OP0.
15707 : :
15708 : : If the expression could be simplified to a constant, then return
15709 : : the constant. If the expression would not be simplified to a
15710 : : constant, then return NULL_TREE. */
15711 : :
15712 : : tree
15713 : 0 : fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15714 : : {
15715 : 0 : tree tem = fold_unary (code, type, op0);
15716 : 0 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15717 : : }
15718 : :
15719 : : /* If EXP represents referencing an element in a constant string
15720 : : (either via pointer arithmetic or array indexing), return the
15721 : : tree representing the value accessed, otherwise return NULL. */
15722 : :
15723 : : tree
15724 : 149861963 : fold_read_from_constant_string (tree exp)
15725 : : {
15726 : 149861963 : if ((INDIRECT_REF_P (exp)
15727 : 149861945 : || TREE_CODE (exp) == ARRAY_REF)
15728 : 160433609 : && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15729 : : {
15730 : 7457275 : tree exp1 = TREE_OPERAND (exp, 0);
15731 : 7457275 : tree index;
15732 : 7457275 : tree string;
15733 : 7457275 : location_t loc = EXPR_LOCATION (exp);
15734 : :
15735 : 7457275 : if (INDIRECT_REF_P (exp))
15736 : 0 : string = string_constant (exp1, &index, NULL, NULL);
15737 : : else
15738 : : {
15739 : 7457275 : tree low_bound = array_ref_low_bound (exp);
15740 : 7457275 : index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15741 : :
15742 : : /* Optimize the special-case of a zero lower bound.
15743 : :
15744 : : We convert the low_bound to sizetype to avoid some problems
15745 : : with constant folding. (E.g. suppose the lower bound is 1,
15746 : : and its mode is QI. Without the conversion,l (ARRAY
15747 : : +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15748 : : +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15749 : 7457275 : if (! integer_zerop (low_bound))
15750 : 136653 : index = size_diffop_loc (loc, index,
15751 : : fold_convert_loc (loc, sizetype, low_bound));
15752 : :
15753 : : string = exp1;
15754 : : }
15755 : :
15756 : 7457275 : scalar_int_mode char_mode;
15757 : 7457275 : if (string
15758 : 7457275 : && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15759 : 7457275 : && TREE_CODE (string) == STRING_CST
15760 : 63778 : && tree_fits_uhwi_p (index)
15761 : 58676 : && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15762 : 7515975 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15763 : : &char_mode)
15764 : 14914550 : && GET_MODE_SIZE (char_mode) == 1)
15765 : 114044 : return build_int_cst_type (TREE_TYPE (exp),
15766 : 57022 : (TREE_STRING_POINTER (string)
15767 : 57022 : [TREE_INT_CST_LOW (index)]));
15768 : : }
15769 : : return NULL;
15770 : : }
15771 : :
15772 : : /* Folds a read from vector element at IDX of vector ARG. */
15773 : :
15774 : : tree
15775 : 4782 : fold_read_from_vector (tree arg, poly_uint64 idx)
15776 : : {
15777 : 4782 : unsigned HOST_WIDE_INT i;
15778 : 4782 : if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15779 : 4782 : && known_ge (idx, 0u)
15780 : 4782 : && idx.is_constant (&i))
15781 : : {
15782 : 4782 : if (TREE_CODE (arg) == VECTOR_CST)
15783 : 1338 : return VECTOR_CST_ELT (arg, i);
15784 : 3444 : else if (TREE_CODE (arg) == CONSTRUCTOR)
15785 : : {
15786 : 1421 : if (CONSTRUCTOR_NELTS (arg)
15787 : 1381 : && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15788 : : return NULL_TREE;
15789 : 1419 : if (i >= CONSTRUCTOR_NELTS (arg))
15790 : 40 : return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15791 : 1379 : return CONSTRUCTOR_ELT (arg, i)->value;
15792 : : }
15793 : : }
15794 : : return NULL_TREE;
15795 : : }
15796 : :
15797 : : /* Return the tree for neg (ARG0) when ARG0 is known to be either
15798 : : an integer constant, real, or fixed-point constant.
15799 : :
15800 : : TYPE is the type of the result. */
15801 : :
15802 : : static tree
15803 : 29466356 : fold_negate_const (tree arg0, tree type)
15804 : : {
15805 : 29466356 : tree t = NULL_TREE;
15806 : :
15807 : 29466356 : switch (TREE_CODE (arg0))
15808 : : {
15809 : 1991159 : case REAL_CST:
15810 : 1991159 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15811 : 1991159 : break;
15812 : :
15813 : 0 : case FIXED_CST:
15814 : 0 : {
15815 : 0 : FIXED_VALUE_TYPE f;
15816 : 0 : bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15817 : 0 : &(TREE_FIXED_CST (arg0)), NULL,
15818 : 0 : TYPE_SATURATING (type));
15819 : 0 : t = build_fixed (type, f);
15820 : : /* Propagate overflow flags. */
15821 : 0 : if (overflow_p | TREE_OVERFLOW (arg0))
15822 : 0 : TREE_OVERFLOW (t) = 1;
15823 : 0 : break;
15824 : : }
15825 : :
15826 : 27475197 : default:
15827 : 27475197 : if (poly_int_tree_p (arg0))
15828 : : {
15829 : 27475197 : wi::overflow_type overflow;
15830 : 27475197 : poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15831 : 27475197 : t = force_fit_type (type, res, 1,
15832 : 203830 : (overflow && ! TYPE_UNSIGNED (type))
15833 : 27672600 : || TREE_OVERFLOW (arg0));
15834 : 27475197 : break;
15835 : 27475197 : }
15836 : :
15837 : 0 : gcc_unreachable ();
15838 : : }
15839 : :
15840 : 29466356 : return t;
15841 : : }
15842 : :
15843 : : /* Return the tree for abs (ARG0) when ARG0 is known to be either
15844 : : an integer constant or real constant.
15845 : :
15846 : : TYPE is the type of the result. */
15847 : :
15848 : : tree
15849 : 33231 : fold_abs_const (tree arg0, tree type)
15850 : : {
15851 : 33231 : tree t = NULL_TREE;
15852 : :
15853 : 33231 : switch (TREE_CODE (arg0))
15854 : : {
15855 : 6270 : case INTEGER_CST:
15856 : 6270 : {
15857 : : /* If the value is unsigned or non-negative, then the absolute value
15858 : : is the same as the ordinary value. */
15859 : 6270 : wide_int val = wi::to_wide (arg0);
15860 : 6270 : wi::overflow_type overflow = wi::OVF_NONE;
15861 : 6270 : if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15862 : : ;
15863 : :
15864 : : /* If the value is negative, then the absolute value is
15865 : : its negation. */
15866 : : else
15867 : 2841 : val = wi::neg (val, &overflow);
15868 : :
15869 : : /* Force to the destination type, set TREE_OVERFLOW for signed
15870 : : TYPE only. */
15871 : 6270 : t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15872 : 6270 : }
15873 : 6270 : break;
15874 : :
15875 : 26961 : case REAL_CST:
15876 : 26961 : if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15877 : 7183 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15878 : : else
15879 : : t = arg0;
15880 : : break;
15881 : :
15882 : 0 : default:
15883 : 0 : gcc_unreachable ();
15884 : : }
15885 : :
15886 : 33231 : return t;
15887 : : }
15888 : :
15889 : : /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15890 : : constant. TYPE is the type of the result. */
15891 : :
15892 : : static tree
15893 : 2217014 : fold_not_const (const_tree arg0, tree type)
15894 : : {
15895 : 2217014 : gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15896 : :
15897 : 2217014 : return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15898 : : }
15899 : :
15900 : : /* Given CODE, a relational operator, the target type, TYPE and two
15901 : : constant operands OP0 and OP1, return the result of the
15902 : : relational operation. If the result is not a compile time
15903 : : constant, then return NULL_TREE. */
15904 : :
15905 : : static tree
15906 : 52947815 : fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15907 : : {
15908 : 52947815 : int result, invert;
15909 : :
15910 : : /* From here on, the only cases we handle are when the result is
15911 : : known to be a constant. */
15912 : :
15913 : 52947815 : if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15914 : : {
15915 : 1088534 : const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15916 : 1088534 : const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15917 : :
15918 : : /* Handle the cases where either operand is a NaN. */
15919 : 1088534 : if (real_isnan (c0) || real_isnan (c1))
15920 : : {
15921 : 14827 : switch (code)
15922 : : {
15923 : : case EQ_EXPR:
15924 : : case ORDERED_EXPR:
15925 : : result = 0;
15926 : : break;
15927 : :
15928 : : case NE_EXPR:
15929 : : case UNORDERED_EXPR:
15930 : : case UNLT_EXPR:
15931 : : case UNLE_EXPR:
15932 : : case UNGT_EXPR:
15933 : : case UNGE_EXPR:
15934 : : case UNEQ_EXPR:
15935 : 6572 : result = 1;
15936 : : break;
15937 : :
15938 : 8277 : case LT_EXPR:
15939 : 8277 : case LE_EXPR:
15940 : 8277 : case GT_EXPR:
15941 : 8277 : case GE_EXPR:
15942 : 8277 : case LTGT_EXPR:
15943 : 8277 : if (flag_trapping_math)
15944 : : return NULL_TREE;
15945 : : result = 0;
15946 : : break;
15947 : :
15948 : 0 : default:
15949 : 0 : gcc_unreachable ();
15950 : : }
15951 : :
15952 : 6572 : return constant_boolean_node (result, type);
15953 : : }
15954 : :
15955 : 1073707 : return constant_boolean_node (real_compare (code, c0, c1), type);
15956 : : }
15957 : :
15958 : 51859281 : if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15959 : : {
15960 : 0 : const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15961 : 0 : const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15962 : 0 : return constant_boolean_node (fixed_compare (code, c0, c1), type);
15963 : : }
15964 : :
15965 : : /* Handle equality/inequality of complex constants. */
15966 : 51859281 : if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15967 : : {
15968 : 58072 : tree rcond = fold_relational_const (code, type,
15969 : 29036 : TREE_REALPART (op0),
15970 : 29036 : TREE_REALPART (op1));
15971 : 116144 : tree icond = fold_relational_const (code, type,
15972 : 29036 : TREE_IMAGPART (op0),
15973 : 29036 : TREE_IMAGPART (op1));
15974 : 29036 : if (code == EQ_EXPR)
15975 : 280 : return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15976 : 28756 : else if (code == NE_EXPR)
15977 : 28756 : return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15978 : : else
15979 : : return NULL_TREE;
15980 : : }
15981 : :
15982 : 51830245 : if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15983 : : {
15984 : 4916 : if (!VECTOR_TYPE_P (type))
15985 : : {
15986 : : /* Have vector comparison with scalar boolean result. */
15987 : 142 : gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15988 : : && known_eq (VECTOR_CST_NELTS (op0),
15989 : : VECTOR_CST_NELTS (op1)));
15990 : 142 : unsigned HOST_WIDE_INT nunits;
15991 : 142 : if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15992 : : return NULL_TREE;
15993 : 805 : for (unsigned i = 0; i < nunits; i++)
15994 : : {
15995 : 718 : tree elem0 = VECTOR_CST_ELT (op0, i);
15996 : 718 : tree elem1 = VECTOR_CST_ELT (op1, i);
15997 : 718 : tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15998 : 718 : if (tmp == NULL_TREE)
15999 : : return NULL_TREE;
16000 : 718 : if (integer_zerop (tmp))
16001 : 55 : return constant_boolean_node (code == NE_EXPR, type);
16002 : : }
16003 : 87 : return constant_boolean_node (code == EQ_EXPR, type);
16004 : : }
16005 : 4774 : tree_vector_builder elts;
16006 : 4774 : if (!elts.new_binary_operation (type, op0, op1, false))
16007 : : return NULL_TREE;
16008 : 4774 : unsigned int count = elts.encoded_nelts ();
16009 : 20321 : for (unsigned i = 0; i < count; i++)
16010 : : {
16011 : 15547 : tree elem_type = TREE_TYPE (type);
16012 : 15547 : tree elem0 = VECTOR_CST_ELT (op0, i);
16013 : 15547 : tree elem1 = VECTOR_CST_ELT (op1, i);
16014 : :
16015 : 15547 : tree tem = fold_relational_const (code, elem_type,
16016 : : elem0, elem1);
16017 : :
16018 : 15547 : if (tem == NULL_TREE)
16019 : : return NULL_TREE;
16020 : :
16021 : 15547 : elts.quick_push (build_int_cst (elem_type,
16022 : 21987 : integer_zerop (tem) ? 0 : -1));
16023 : : }
16024 : :
16025 : 4774 : return elts.build ();
16026 : 4774 : }
16027 : :
16028 : : /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16029 : :
16030 : : To compute GT, swap the arguments and do LT.
16031 : : To compute GE, do LT and invert the result.
16032 : : To compute LE, swap the arguments, do LT and invert the result.
16033 : : To compute NE, do EQ and invert the result.
16034 : :
16035 : : Therefore, the code below must handle only EQ and LT. */
16036 : :
16037 : 51825329 : if (code == LE_EXPR || code == GT_EXPR)
16038 : : {
16039 : 10274874 : std::swap (op0, op1);
16040 : 10274874 : code = swap_tree_comparison (code);
16041 : : }
16042 : :
16043 : : /* Note that it is safe to invert for real values here because we
16044 : : have already handled the one case that it matters. */
16045 : :
16046 : 51825329 : invert = 0;
16047 : 51825329 : if (code == NE_EXPR || code == GE_EXPR)
16048 : : {
16049 : 25318995 : invert = 1;
16050 : 25318995 : code = invert_tree_comparison (code, false);
16051 : : }
16052 : :
16053 : : /* Compute a result for LT or EQ if args permit;
16054 : : Otherwise return T. */
16055 : 51825329 : if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16056 : : {
16057 : 51807984 : if (code == EQ_EXPR)
16058 : 25906098 : result = tree_int_cst_equal (op0, op1);
16059 : : else
16060 : 25901886 : result = tree_int_cst_lt (op0, op1);
16061 : : }
16062 : : else
16063 : : return NULL_TREE;
16064 : :
16065 : 51807984 : if (invert)
16066 : 25316797 : result ^= 1;
16067 : 51807984 : return constant_boolean_node (result, type);
16068 : : }
16069 : :
16070 : : /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16071 : : indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16072 : : itself. */
16073 : :
16074 : : tree
16075 : 108738077 : fold_build_cleanup_point_expr (tree type, tree expr)
16076 : : {
16077 : : /* If the expression does not have side effects then we don't have to wrap
16078 : : it with a cleanup point expression. */
16079 : 108738077 : if (!TREE_SIDE_EFFECTS (expr))
16080 : : return expr;
16081 : :
16082 : : /* If the expression is a return, check to see if the expression inside the
16083 : : return has no side effects or the right hand side of the modify expression
16084 : : inside the return. If either don't have side effects set we don't need to
16085 : : wrap the expression in a cleanup point expression. Note we don't check the
16086 : : left hand side of the modify because it should always be a return decl. */
16087 : 94895564 : if (TREE_CODE (expr) == RETURN_EXPR)
16088 : : {
16089 : 34000060 : tree op = TREE_OPERAND (expr, 0);
16090 : 34000060 : if (!op || !TREE_SIDE_EFFECTS (op))
16091 : : return expr;
16092 : 33389149 : op = TREE_OPERAND (op, 1);
16093 : 33389149 : if (!TREE_SIDE_EFFECTS (op))
16094 : : return expr;
16095 : : }
16096 : :
16097 : 77018193 : return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16098 : : }
16099 : :
16100 : : /* Given a pointer value OP0 and a type TYPE, return a simplified version
16101 : : of an indirection through OP0, or NULL_TREE if no simplification is
16102 : : possible. */
16103 : :
16104 : : tree
16105 : 16424272 : fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16106 : : {
16107 : 16424272 : tree sub = op0;
16108 : 16424272 : tree subtype;
16109 : 16424272 : poly_uint64 const_op01;
16110 : :
16111 : 16424272 : STRIP_NOPS (sub);
16112 : 16424272 : subtype = TREE_TYPE (sub);
16113 : 16424272 : if (!POINTER_TYPE_P (subtype)
16114 : 16424272 : || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16115 : : return NULL_TREE;
16116 : :
16117 : 16280888 : if (TREE_CODE (sub) == ADDR_EXPR)
16118 : : {
16119 : 2661829 : tree op = TREE_OPERAND (sub, 0);
16120 : 2661829 : tree optype = TREE_TYPE (op);
16121 : :
16122 : : /* *&CONST_DECL -> to the value of the const decl. */
16123 : 2661829 : if (TREE_CODE (op) == CONST_DECL)
16124 : 2717 : return DECL_INITIAL (op);
16125 : : /* *&p => p; make sure to handle *&"str"[cst] here. */
16126 : 2659112 : if (type == optype)
16127 : : {
16128 : 1804974 : tree fop = fold_read_from_constant_string (op);
16129 : 1804974 : if (fop)
16130 : : return fop;
16131 : : else
16132 : 1762800 : return op;
16133 : : }
16134 : : /* *(foo *)&fooarray => fooarray[0] */
16135 : 854138 : else if (TREE_CODE (optype) == ARRAY_TYPE
16136 : 11266 : && type == TREE_TYPE (optype)
16137 : 864451 : && (!in_gimple_form
16138 : 993 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16139 : : {
16140 : 10313 : tree type_domain = TYPE_DOMAIN (optype);
16141 : 10313 : tree min_val = size_zero_node;
16142 : 10313 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16143 : 10275 : min_val = TYPE_MIN_VALUE (type_domain);
16144 : 10313 : if (in_gimple_form
16145 : 993 : && TREE_CODE (min_val) != INTEGER_CST)
16146 : : return NULL_TREE;
16147 : 10313 : return build4_loc (loc, ARRAY_REF, type, op, min_val,
16148 : 10313 : NULL_TREE, NULL_TREE);
16149 : : }
16150 : : /* *(foo *)&complexfoo => __real__ complexfoo */
16151 : 843825 : else if (TREE_CODE (optype) == COMPLEX_TYPE
16152 : 843825 : && type == TREE_TYPE (optype))
16153 : 0 : return fold_build1_loc (loc, REALPART_EXPR, type, op);
16154 : : /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16155 : 843825 : else if (VECTOR_TYPE_P (optype)
16156 : 843825 : && type == TREE_TYPE (optype))
16157 : : {
16158 : 70 : tree part_width = TYPE_SIZE (type);
16159 : 70 : tree index = bitsize_int (0);
16160 : 70 : return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16161 : 70 : index);
16162 : : }
16163 : : }
16164 : :
16165 : 14462814 : if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16166 : 14462814 : && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16167 : : {
16168 : 262671 : tree op00 = TREE_OPERAND (sub, 0);
16169 : 262671 : tree op01 = TREE_OPERAND (sub, 1);
16170 : :
16171 : 262671 : STRIP_NOPS (op00);
16172 : 262671 : if (TREE_CODE (op00) == ADDR_EXPR)
16173 : : {
16174 : 2679 : tree op00type;
16175 : 2679 : op00 = TREE_OPERAND (op00, 0);
16176 : 2679 : op00type = TREE_TYPE (op00);
16177 : :
16178 : : /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16179 : 2679 : if (VECTOR_TYPE_P (op00type)
16180 : 240 : && type == TREE_TYPE (op00type)
16181 : : /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16182 : : but we want to treat offsets with MSB set as negative.
16183 : : For the code below negative offsets are invalid and
16184 : : TYPE_SIZE of the element is something unsigned, so
16185 : : check whether op01 fits into poly_int64, which implies
16186 : : it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16187 : : then just use poly_uint64 because we want to treat the
16188 : : value as unsigned. */
16189 : 2872 : && tree_fits_poly_int64_p (op01))
16190 : : {
16191 : 179 : tree part_width = TYPE_SIZE (type);
16192 : 179 : poly_uint64 max_offset
16193 : 179 : = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16194 : 179 : * TYPE_VECTOR_SUBPARTS (op00type));
16195 : 179 : if (known_lt (const_op01, max_offset))
16196 : : {
16197 : 179 : tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16198 : 179 : return fold_build3_loc (loc,
16199 : : BIT_FIELD_REF, type, op00,
16200 : 179 : part_width, index);
16201 : : }
16202 : : }
16203 : : /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16204 : 2500 : else if (TREE_CODE (op00type) == COMPLEX_TYPE
16205 : 2500 : && type == TREE_TYPE (op00type))
16206 : : {
16207 : 0 : if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16208 : : const_op01))
16209 : 0 : return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16210 : : }
16211 : : /* ((foo *)&fooarray)[1] => fooarray[1] */
16212 : 2500 : else if (TREE_CODE (op00type) == ARRAY_TYPE
16213 : 2500 : && type == TREE_TYPE (op00type))
16214 : : {
16215 : 1621 : tree type_domain = TYPE_DOMAIN (op00type);
16216 : 1621 : tree min_val = size_zero_node;
16217 : 1621 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16218 : 1620 : min_val = TYPE_MIN_VALUE (type_domain);
16219 : 1621 : poly_uint64 type_size, index;
16220 : 1621 : if (poly_int_tree_p (min_val)
16221 : 1621 : && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16222 : 1621 : && multiple_p (const_op01, type_size, &index))
16223 : : {
16224 : 1621 : poly_offset_int off = index + wi::to_poly_offset (min_val);
16225 : 1621 : op01 = wide_int_to_tree (sizetype, off);
16226 : 1621 : return build4_loc (loc, ARRAY_REF, type, op00, op01,
16227 : : NULL_TREE, NULL_TREE);
16228 : : }
16229 : : }
16230 : : }
16231 : : }
16232 : :
16233 : : /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16234 : 14461014 : if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16235 : 630218 : && type == TREE_TYPE (TREE_TYPE (subtype))
16236 : 14463985 : && (!in_gimple_form
16237 : 12 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16238 : : {
16239 : 2970 : tree type_domain;
16240 : 2970 : tree min_val = size_zero_node;
16241 : 2970 : sub = build_fold_indirect_ref_loc (loc, sub);
16242 : 2970 : type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16243 : 2970 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16244 : 2970 : min_val = TYPE_MIN_VALUE (type_domain);
16245 : 2970 : if (in_gimple_form
16246 : 11 : && TREE_CODE (min_val) != INTEGER_CST)
16247 : : return NULL_TREE;
16248 : 2970 : return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16249 : 2970 : NULL_TREE);
16250 : : }
16251 : :
16252 : : return NULL_TREE;
16253 : : }
16254 : :
16255 : : /* Builds an expression for an indirection through T, simplifying some
16256 : : cases. */
16257 : :
16258 : : tree
16259 : 6355247 : build_fold_indirect_ref_loc (location_t loc, tree t)
16260 : : {
16261 : 6355247 : tree type = TREE_TYPE (TREE_TYPE (t));
16262 : 6355247 : tree sub = fold_indirect_ref_1 (loc, type, t);
16263 : :
16264 : 6355247 : if (sub)
16265 : : return sub;
16266 : :
16267 : 4547797 : return build1_loc (loc, INDIRECT_REF, type, t);
16268 : : }
16269 : :
16270 : : /* Given an INDIRECT_REF T, return either T or a simplified version. */
16271 : :
16272 : : tree
16273 : 9788354 : fold_indirect_ref_loc (location_t loc, tree t)
16274 : : {
16275 : 9788354 : tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16276 : :
16277 : 9788354 : if (sub)
16278 : : return sub;
16279 : : else
16280 : 9773228 : return t;
16281 : : }
16282 : :
16283 : : /* Strip non-trapping, non-side-effecting tree nodes from an expression
16284 : : whose result is ignored. The type of the returned tree need not be
16285 : : the same as the original expression. */
16286 : :
16287 : : tree
16288 : 132453 : fold_ignored_result (tree t)
16289 : : {
16290 : 132453 : if (!TREE_SIDE_EFFECTS (t))
16291 : 18256 : return integer_zero_node;
16292 : :
16293 : 151519 : for (;;)
16294 : 151519 : switch (TREE_CODE_CLASS (TREE_CODE (t)))
16295 : : {
16296 : 3758 : case tcc_unary:
16297 : 3758 : t = TREE_OPERAND (t, 0);
16298 : 3758 : break;
16299 : :
16300 : 4929 : case tcc_binary:
16301 : 4929 : case tcc_comparison:
16302 : 4929 : if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16303 : 3027 : t = TREE_OPERAND (t, 0);
16304 : 1902 : else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16305 : 23 : t = TREE_OPERAND (t, 1);
16306 : : else
16307 : : return t;
16308 : : break;
16309 : :
16310 : 98490 : case tcc_expression:
16311 : 98490 : switch (TREE_CODE (t))
16312 : : {
16313 : 30496 : case COMPOUND_EXPR:
16314 : 30496 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16315 : : return t;
16316 : 30219 : t = TREE_OPERAND (t, 0);
16317 : 30219 : break;
16318 : :
16319 : 382 : case COND_EXPR:
16320 : 382 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16321 : 382 : || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16322 : : return t;
16323 : 295 : t = TREE_OPERAND (t, 0);
16324 : 295 : break;
16325 : :
16326 : : default:
16327 : : return t;
16328 : : }
16329 : : break;
16330 : :
16331 : : default:
16332 : : return t;
16333 : : }
16334 : : }
16335 : :
16336 : : /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16337 : :
16338 : : tree
16339 : 2571760357 : round_up_loc (location_t loc, tree value, unsigned int divisor)
16340 : : {
16341 : 2571760357 : tree div = NULL_TREE;
16342 : :
16343 : 2571760357 : if (divisor == 1)
16344 : : return value;
16345 : :
16346 : : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16347 : : have to do anything. Only do this when we are not given a const,
16348 : : because in that case, this check is more expensive than just
16349 : : doing it. */
16350 : 1597450222 : if (TREE_CODE (value) != INTEGER_CST)
16351 : : {
16352 : 319753 : div = build_int_cst (TREE_TYPE (value), divisor);
16353 : :
16354 : 319753 : if (multiple_of_p (TREE_TYPE (value), value, div))
16355 : : return value;
16356 : : }
16357 : :
16358 : : /* If divisor is a power of two, simplify this to bit manipulation. */
16359 : 1597132248 : if (pow2_or_zerop (divisor))
16360 : : {
16361 : 1597132248 : if (TREE_CODE (value) == INTEGER_CST)
16362 : : {
16363 : 1597130469 : wide_int val = wi::to_wide (value);
16364 : 1597130469 : bool overflow_p;
16365 : :
16366 : 1597130469 : if ((val & (divisor - 1)) == 0)
16367 : : return value;
16368 : :
16369 : 3442724 : overflow_p = TREE_OVERFLOW (value);
16370 : 3442724 : val += divisor - 1;
16371 : 3442724 : val &= (int) -divisor;
16372 : 3442724 : if (val == 0)
16373 : 0 : overflow_p = true;
16374 : :
16375 : 3442724 : return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16376 : 1597130469 : }
16377 : : else
16378 : : {
16379 : 1779 : tree t;
16380 : :
16381 : 1779 : t = build_int_cst (TREE_TYPE (value), divisor - 1);
16382 : 1779 : value = size_binop_loc (loc, PLUS_EXPR, value, t);
16383 : 1779 : t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16384 : 1779 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16385 : : }
16386 : : }
16387 : : else
16388 : : {
16389 : 0 : if (!div)
16390 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16391 : 0 : value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16392 : 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16393 : : }
16394 : :
16395 : : return value;
16396 : : }
16397 : :
16398 : : /* Likewise, but round down. */
16399 : :
16400 : : tree
16401 : 15836795 : round_down_loc (location_t loc, tree value, int divisor)
16402 : : {
16403 : 15836795 : tree div = NULL_TREE;
16404 : :
16405 : 15836795 : gcc_assert (divisor > 0);
16406 : 15836795 : if (divisor == 1)
16407 : : return value;
16408 : :
16409 : : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16410 : : have to do anything. Only do this when we are not given a const,
16411 : : because in that case, this check is more expensive than just
16412 : : doing it. */
16413 : 15836795 : if (TREE_CODE (value) != INTEGER_CST)
16414 : : {
16415 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16416 : :
16417 : 0 : if (multiple_of_p (TREE_TYPE (value), value, div))
16418 : : return value;
16419 : : }
16420 : :
16421 : : /* If divisor is a power of two, simplify this to bit manipulation. */
16422 : 15836795 : if (pow2_or_zerop (divisor))
16423 : : {
16424 : 15836795 : tree t;
16425 : :
16426 : 15836795 : t = build_int_cst (TREE_TYPE (value), -divisor);
16427 : 15836795 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16428 : : }
16429 : : else
16430 : : {
16431 : 0 : if (!div)
16432 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16433 : 0 : value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16434 : 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16435 : : }
16436 : :
16437 : : return value;
16438 : : }
16439 : :
16440 : : /* Returns the pointer to the base of the object addressed by EXP and
16441 : : extracts the information about the offset of the access, storing it
16442 : : to PBITPOS and POFFSET. */
16443 : :
16444 : : static tree
16445 : 1225548 : split_address_to_core_and_offset (tree exp,
16446 : : poly_int64 *pbitpos, tree *poffset)
16447 : : {
16448 : 1225548 : tree core;
16449 : 1225548 : machine_mode mode;
16450 : 1225548 : int unsignedp, reversep, volatilep;
16451 : 1225548 : poly_int64 bitsize;
16452 : 1225548 : location_t loc = EXPR_LOCATION (exp);
16453 : :
16454 : 1225548 : if (TREE_CODE (exp) == SSA_NAME)
16455 : 399658 : if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16456 : 318559 : if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16457 : 31093 : exp = gimple_assign_rhs1 (def);
16458 : :
16459 : 1225548 : if (TREE_CODE (exp) == ADDR_EXPR)
16460 : : {
16461 : 729836 : core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16462 : : poffset, &mode, &unsignedp, &reversep,
16463 : : &volatilep);
16464 : 729836 : core = build_fold_addr_expr_loc (loc, core);
16465 : : }
16466 : 495712 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16467 : : {
16468 : 34964 : core = TREE_OPERAND (exp, 0);
16469 : 34964 : STRIP_NOPS (core);
16470 : 34964 : *pbitpos = 0;
16471 : 34964 : *poffset = TREE_OPERAND (exp, 1);
16472 : 34964 : if (poly_int_tree_p (*poffset))
16473 : : {
16474 : 34877 : poly_offset_int tem
16475 : 34877 : = wi::sext (wi::to_poly_offset (*poffset),
16476 : 34877 : TYPE_PRECISION (TREE_TYPE (*poffset)));
16477 : 34877 : tem <<= LOG2_BITS_PER_UNIT;
16478 : 34877 : if (tem.to_shwi (pbitpos))
16479 : 34877 : *poffset = NULL_TREE;
16480 : : }
16481 : : }
16482 : : else
16483 : : {
16484 : 460748 : core = exp;
16485 : 460748 : *pbitpos = 0;
16486 : 460748 : *poffset = NULL_TREE;
16487 : : }
16488 : :
16489 : 1225548 : return core;
16490 : : }
16491 : :
16492 : : /* Returns true if addresses of E1 and E2 differ by a constant, false
16493 : : otherwise. If they do, E1 - E2 is stored in *DIFF. */
16494 : :
16495 : : bool
16496 : 612774 : ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16497 : : {
16498 : 612774 : tree core1, core2;
16499 : 612774 : poly_int64 bitpos1, bitpos2;
16500 : 612774 : tree toffset1, toffset2, tdiff, type;
16501 : :
16502 : 612774 : core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16503 : 612774 : core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16504 : :
16505 : 612774 : poly_int64 bytepos1, bytepos2;
16506 : 612774 : if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16507 : 1068817 : || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16508 : 1225548 : || !operand_equal_p (core1, core2, 0))
16509 : 456043 : return false;
16510 : :
16511 : 156731 : if (toffset1 && toffset2)
16512 : : {
16513 : 26 : type = TREE_TYPE (toffset1);
16514 : 26 : if (type != TREE_TYPE (toffset2))
16515 : 0 : toffset2 = fold_convert (type, toffset2);
16516 : :
16517 : 26 : tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16518 : 26 : if (!cst_and_fits_in_hwi (tdiff))
16519 : : return false;
16520 : :
16521 : 12 : *diff = int_cst_value (tdiff);
16522 : : }
16523 : 156705 : else if (toffset1 || toffset2)
16524 : : {
16525 : : /* If only one of the offsets is non-constant, the difference cannot
16526 : : be a constant. */
16527 : : return false;
16528 : : }
16529 : : else
16530 : 139005 : *diff = 0;
16531 : :
16532 : 139017 : *diff += bytepos1 - bytepos2;
16533 : 139017 : return true;
16534 : : }
16535 : :
16536 : : /* Return OFF converted to a pointer offset type suitable as offset for
16537 : : POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16538 : : tree
16539 : 16884132 : convert_to_ptrofftype_loc (location_t loc, tree off)
16540 : : {
16541 : 16884132 : if (ptrofftype_p (TREE_TYPE (off)))
16542 : : return off;
16543 : 2093433 : return fold_convert_loc (loc, sizetype, off);
16544 : : }
16545 : :
16546 : : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16547 : : tree
16548 : 15000903 : fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16549 : : {
16550 : 15000903 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16551 : 15000903 : ptr, convert_to_ptrofftype_loc (loc, off));
16552 : : }
16553 : :
16554 : : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16555 : : tree
16556 : 159549 : fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16557 : : {
16558 : 159549 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16559 : 159549 : ptr, size_int (off));
16560 : : }
16561 : :
16562 : : /* Return a pointer to a NUL-terminated string containing the sequence
16563 : : of bytes corresponding to the representation of the object referred to
16564 : : by SRC (or a subsequence of such bytes within it if SRC is a reference
16565 : : to an initialized constant array plus some constant offset).
16566 : : Set *STRSIZE the number of bytes in the constant sequence including
16567 : : the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16568 : : where A is the array that stores the constant sequence that SRC points
16569 : : to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16570 : : need not point to a string or even an array of characters but may point
16571 : : to an object of any type. */
16572 : :
16573 : : const char *
16574 : 12167807 : getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16575 : : {
16576 : : /* The offset into the array A storing the string, and A's byte size. */
16577 : 12167807 : tree offset_node;
16578 : 12167807 : tree mem_size;
16579 : :
16580 : 12167807 : if (strsize)
16581 : 4837580 : *strsize = 0;
16582 : :
16583 : 12167807 : if (strsize)
16584 : 4837580 : src = byte_representation (src, &offset_node, &mem_size, NULL);
16585 : : else
16586 : 7330227 : src = string_constant (src, &offset_node, &mem_size, NULL);
16587 : 12167807 : if (!src)
16588 : : return NULL;
16589 : :
16590 : 2708100 : unsigned HOST_WIDE_INT offset = 0;
16591 : 2708100 : if (offset_node != NULL_TREE)
16592 : : {
16593 : 2708100 : if (!tree_fits_uhwi_p (offset_node))
16594 : : return NULL;
16595 : : else
16596 : 2706336 : offset = tree_to_uhwi (offset_node);
16597 : : }
16598 : :
16599 : 2706336 : if (!tree_fits_uhwi_p (mem_size))
16600 : : return NULL;
16601 : :
16602 : : /* ARRAY_SIZE is the byte size of the array the constant sequence
16603 : : is stored in and equal to sizeof A. INIT_BYTES is the number
16604 : : of bytes in the constant sequence used to initialize the array,
16605 : : including any embedded NULs as well as the terminating NUL (for
16606 : : strings), but not including any trailing zeros/NULs past
16607 : : the terminating one appended implicitly to a string literal to
16608 : : zero out the remainder of the array it's stored in. For example,
16609 : : given:
16610 : : const char a[7] = "abc\0d";
16611 : : n = strlen (a + 1);
16612 : : ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16613 : : (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16614 : : is equal to strlen (A) + 1. */
16615 : 2706336 : const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16616 : 2706336 : unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16617 : 2706336 : const char *string = TREE_STRING_POINTER (src);
16618 : :
16619 : : /* Ideally this would turn into a gcc_checking_assert over time. */
16620 : 2706336 : if (init_bytes > array_size)
16621 : : init_bytes = array_size;
16622 : :
16623 : 2706336 : if (init_bytes == 0 || offset >= array_size)
16624 : : return NULL;
16625 : :
16626 : 2705147 : if (strsize)
16627 : : {
16628 : : /* Compute and store the number of characters from the beginning
16629 : : of the substring at OFFSET to the end, including the terminating
16630 : : nul. Offsets past the initial length refer to null strings. */
16631 : 1559184 : if (offset < init_bytes)
16632 : 1559184 : *strsize = init_bytes - offset;
16633 : : else
16634 : 0 : *strsize = 1;
16635 : : }
16636 : : else
16637 : : {
16638 : 1145963 : tree eltype = TREE_TYPE (TREE_TYPE (src));
16639 : : /* Support only properly NUL-terminated single byte strings. */
16640 : 1145963 : if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16641 : : return NULL;
16642 : 1141494 : if (string[init_bytes - 1] != '\0')
16643 : : return NULL;
16644 : : }
16645 : :
16646 : 2679212 : return offset < init_bytes ? string + offset : "";
16647 : : }
16648 : :
16649 : : /* Return a pointer to a NUL-terminated string corresponding to
16650 : : the expression STR referencing a constant string, possibly
16651 : : involving a constant offset. Return null if STR either doesn't
16652 : : reference a constant string or if it involves a nonconstant
16653 : : offset. */
16654 : :
16655 : : const char *
16656 : 7330227 : c_getstr (tree str)
16657 : : {
16658 : 7330227 : return getbyterep (str, NULL);
16659 : : }
16660 : :
16661 : : /* Given a tree T, compute which bits in T may be nonzero. */
16662 : :
16663 : : wide_int
16664 : 220391218 : tree_nonzero_bits (const_tree t)
16665 : : {
16666 : 220391218 : switch (TREE_CODE (t))
16667 : : {
16668 : 8191687 : case INTEGER_CST:
16669 : 8191687 : return wi::to_wide (t);
16670 : 124411780 : case SSA_NAME:
16671 : 124411780 : return get_nonzero_bits (t);
16672 : 240521 : case NON_LVALUE_EXPR:
16673 : 240521 : case SAVE_EXPR:
16674 : 240521 : return tree_nonzero_bits (TREE_OPERAND (t, 0));
16675 : 465102 : case BIT_AND_EXPR:
16676 : 930204 : return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16677 : 1395306 : tree_nonzero_bits (TREE_OPERAND (t, 1)));
16678 : 4192 : case BIT_IOR_EXPR:
16679 : 4192 : case BIT_XOR_EXPR:
16680 : 8384 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16681 : 12576 : tree_nonzero_bits (TREE_OPERAND (t, 1)));
16682 : 61822 : case COND_EXPR:
16683 : 123644 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16684 : 185466 : tree_nonzero_bits (TREE_OPERAND (t, 2)));
16685 : 45353376 : CASE_CONVERT:
16686 : 90706752 : return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16687 : 45353376 : TYPE_PRECISION (TREE_TYPE (t)),
16688 : 136060128 : TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16689 : 12997056 : case PLUS_EXPR:
16690 : 12997056 : if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16691 : : {
16692 : 12997056 : wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16693 : 12997056 : wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16694 : 12997056 : if (wi::bit_and (nzbits1, nzbits2) == 0)
16695 : 481873 : return wi::bit_or (nzbits1, nzbits2);
16696 : 12997056 : }
16697 : : break;
16698 : 150712 : case LSHIFT_EXPR:
16699 : 150712 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16700 : : {
16701 : 82766 : tree type = TREE_TYPE (t);
16702 : 82766 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16703 : 165532 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16704 : 82766 : TYPE_PRECISION (type));
16705 : 82766 : return wi::neg_p (arg1)
16706 : 165532 : ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16707 : 82766 : : wi::lshift (nzbits, arg1);
16708 : 82766 : }
16709 : : break;
16710 : 61341 : case RSHIFT_EXPR:
16711 : 61341 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16712 : : {
16713 : 59789 : tree type = TREE_TYPE (t);
16714 : 59789 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16715 : 119578 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16716 : 59789 : TYPE_PRECISION (type));
16717 : 59789 : return wi::neg_p (arg1)
16718 : 119578 : ? wi::lshift (nzbits, -arg1)
16719 : 59789 : : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16720 : 59789 : }
16721 : : break;
16722 : : default:
16723 : : break;
16724 : : }
16725 : :
16726 : 41038310 : return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16727 : : }
16728 : :
16729 : : /* Helper function for address compare simplifications in match.pd.
16730 : : OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
16731 : : TYPE is the type of comparison operands.
16732 : : BASE0, BASE1, OFF0 and OFF1 are set by the function.
16733 : : GENERIC is true if GENERIC folding and false for GIMPLE folding.
16734 : : Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
16735 : : 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
16736 : : and 2 if unknown. */
16737 : :
16738 : : int
16739 : 1013972 : address_compare (tree_code code, tree type, tree op0, tree op1,
16740 : : tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
16741 : : bool generic)
16742 : : {
16743 : 1013972 : if (TREE_CODE (op0) == SSA_NAME)
16744 : 16171 : op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
16745 : 1013972 : if (TREE_CODE (op1) == SSA_NAME)
16746 : 7767 : op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
16747 : 1013972 : gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
16748 : 1013972 : gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
16749 : 1013972 : base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
16750 : 1013972 : base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
16751 : 1013972 : if (base0 && TREE_CODE (base0) == MEM_REF)
16752 : : {
16753 : 17552 : off0 += mem_ref_offset (base0).force_shwi ();
16754 : 17552 : base0 = TREE_OPERAND (base0, 0);
16755 : : }
16756 : 1013972 : if (base1 && TREE_CODE (base1) == MEM_REF)
16757 : : {
16758 : 4252 : off1 += mem_ref_offset (base1).force_shwi ();
16759 : 4252 : base1 = TREE_OPERAND (base1, 0);
16760 : : }
16761 : 1013972 : if (base0 == NULL_TREE || base1 == NULL_TREE)
16762 : : return 2;
16763 : :
16764 : 1006689 : int equal = 2;
16765 : : /* Punt in GENERIC on variables with value expressions;
16766 : : the value expressions might point to fields/elements
16767 : : of other vars etc. */
16768 : 1006689 : if (generic
16769 : 1006689 : && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
16770 : 901897 : || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
16771 : : return 2;
16772 : 1006126 : else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
16773 : : {
16774 : 150074 : symtab_node *node0 = symtab_node::get_create (base0);
16775 : 150074 : symtab_node *node1 = symtab_node::get_create (base1);
16776 : 150074 : equal = node0->equal_address_to (node1);
16777 : : }
16778 : 856052 : else if ((DECL_P (base0)
16779 : 109508 : || TREE_CODE (base0) == SSA_NAME
16780 : 92863 : || TREE_CODE (base0) == STRING_CST)
16781 : 855507 : && (DECL_P (base1)
16782 : 96765 : || TREE_CODE (base1) == SSA_NAME
16783 : 92739 : || TREE_CODE (base1) == STRING_CST))
16784 : 855485 : equal = (base0 == base1);
16785 : : /* Assume different STRING_CSTs with the same content will be
16786 : : merged. */
16787 : 1005559 : if (equal == 0
16788 : 46275 : && TREE_CODE (base0) == STRING_CST
16789 : 17134 : && TREE_CODE (base1) == STRING_CST
16790 : 17105 : && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
16791 : 1005559 : && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
16792 : 6099 : TREE_STRING_LENGTH (base0)) == 0)
16793 : : equal = 1;
16794 : 1001674 : if (equal == 1)
16795 : : {
16796 : 939890 : if (code == EQ_EXPR
16797 : 939890 : || code == NE_EXPR
16798 : : /* If the offsets are equal we can ignore overflow. */
16799 : 59518 : || known_eq (off0, off1)
16800 : 118868 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
16801 : : /* Or if we compare using pointers to decls or strings. */
16802 : 999324 : || (POINTER_TYPE_P (type)
16803 : 0 : && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
16804 : : return 1;
16805 : : return 2;
16806 : : }
16807 : 66236 : if (equal != 0)
16808 : : return equal;
16809 : 41823 : if (code != EQ_EXPR && code != NE_EXPR)
16810 : : return 2;
16811 : :
16812 : : /* At this point we know (or assume) the two pointers point at
16813 : : different objects. */
16814 : 37714 : HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
16815 : 37714 : off0.is_constant (&ioff0);
16816 : 37714 : off1.is_constant (&ioff1);
16817 : : /* Punt on non-zero offsets from functions. */
16818 : 37714 : if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
16819 : 37714 : || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
16820 : : return 2;
16821 : : /* Or if the bases are neither decls nor string literals. */
16822 : 37714 : if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
16823 : : return 2;
16824 : 21553 : if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
16825 : : return 2;
16826 : : /* For initializers, assume addresses of different functions are
16827 : : different. */
16828 : 20112 : if (folding_initializer
16829 : 520 : && TREE_CODE (base0) == FUNCTION_DECL
16830 : 14 : && TREE_CODE (base1) == FUNCTION_DECL)
16831 : : return 0;
16832 : :
16833 : : /* Compute whether one address points to the start of one
16834 : : object and another one to the end of another one. */
16835 : 20098 : poly_int64 size0 = 0, size1 = 0;
16836 : 20098 : if (TREE_CODE (base0) == STRING_CST)
16837 : : {
16838 : 12658 : if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
16839 : : equal = 2;
16840 : : else
16841 : : size0 = TREE_STRING_LENGTH (base0);
16842 : : }
16843 : 7440 : else if (TREE_CODE (base0) == FUNCTION_DECL)
16844 : : size0 = 1;
16845 : : else
16846 : : {
16847 : 7367 : tree sz0 = DECL_SIZE_UNIT (base0);
16848 : 7367 : if (!tree_fits_poly_int64_p (sz0))
16849 : : equal = 2;
16850 : : else
16851 : 7367 : size0 = tree_to_poly_int64 (sz0);
16852 : : }
16853 : 20098 : if (TREE_CODE (base1) == STRING_CST)
16854 : : {
16855 : 12779 : if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
16856 : : equal = 2;
16857 : : else
16858 : : size1 = TREE_STRING_LENGTH (base1);
16859 : : }
16860 : 7319 : else if (TREE_CODE (base1) == FUNCTION_DECL)
16861 : : size1 = 1;
16862 : : else
16863 : : {
16864 : 7250 : tree sz1 = DECL_SIZE_UNIT (base1);
16865 : 7250 : if (!tree_fits_poly_int64_p (sz1))
16866 : : equal = 2;
16867 : : else
16868 : 7250 : size1 = tree_to_poly_int64 (sz1);
16869 : : }
16870 : 20098 : if (equal == 0)
16871 : : {
16872 : : /* If one offset is pointing (or could be) to the beginning of one
16873 : : object and the other is pointing to one past the last byte of the
16874 : : other object, punt. */
16875 : 20086 : if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
16876 : : equal = 2;
16877 : 19945 : else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
16878 : : equal = 2;
16879 : : /* If both offsets are the same, there are some cases we know that are
16880 : : ok. Either if we know they aren't zero, or if we know both sizes
16881 : : are no zero. */
16882 : : if (equal == 2
16883 : 274 : && known_eq (off0, off1)
16884 : 22 : && (known_ne (off0, 0)
16885 : 22 : || (known_ne (size0, 0) && known_ne (size1, 0))))
16886 : : equal = 0;
16887 : : }
16888 : :
16889 : : /* At this point, equal is 2 if either one or both pointers are out of
16890 : : bounds of their object, or one points to start of its object and the
16891 : : other points to end of its object. This is unspecified behavior
16892 : : e.g. in C++. Otherwise equal is 0. */
16893 : 20098 : if (folding_cxx_constexpr && equal)
16894 : : return equal;
16895 : :
16896 : : /* When both pointers point to string literals, even when equal is 0,
16897 : : due to tail merging of string literals the pointers might be the same. */
16898 : 20035 : if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
16899 : : {
16900 : 12635 : if (ioff0 < 0
16901 : 12635 : || ioff1 < 0
16902 : 12635 : || ioff0 > TREE_STRING_LENGTH (base0)
16903 : 25258 : || ioff1 > TREE_STRING_LENGTH (base1))
16904 : : return 2;
16905 : :
16906 : : /* If the bytes in the string literals starting at the pointers
16907 : : differ, the pointers need to be different. */
16908 : 12623 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
16909 : 12623 : TREE_STRING_POINTER (base1) + ioff1,
16910 : 12623 : MIN (TREE_STRING_LENGTH (base0) - ioff0,
16911 : : TREE_STRING_LENGTH (base1) - ioff1)) == 0)
16912 : : {
16913 : 3739 : HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
16914 : 3739 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
16915 : 3739 : TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
16916 : : ioffmin) == 0)
16917 : : /* If even the bytes in the string literal before the
16918 : : pointers are the same, the string literals could be
16919 : : tail merged. */
16920 : : return 2;
16921 : : }
16922 : : return 0;
16923 : : }
16924 : :
16925 : 7400 : if (folding_cxx_constexpr)
16926 : : return 0;
16927 : :
16928 : : /* If this is a pointer comparison, ignore for now even
16929 : : valid equalities where one pointer is the offset zero
16930 : : of one object and the other to one past end of another one. */
16931 : 7000 : if (!INTEGRAL_TYPE_P (type))
16932 : : return 0;
16933 : :
16934 : : /* Assume that string literals can't be adjacent to variables
16935 : : (automatic or global). */
16936 : 299 : if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
16937 : : return 0;
16938 : :
16939 : : /* Assume that automatic variables can't be adjacent to global
16940 : : variables. */
16941 : 295 : if (is_global_var (base0) != is_global_var (base1))
16942 : : return 0;
16943 : :
16944 : : return equal;
16945 : : }
16946 : :
16947 : : /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
16948 : : tree
16949 : 44 : ctor_single_nonzero_element (const_tree t)
16950 : : {
16951 : 44 : unsigned HOST_WIDE_INT idx;
16952 : 44 : constructor_elt *ce;
16953 : 44 : tree elt = NULL_TREE;
16954 : :
16955 : 44 : if (TREE_CODE (t) != CONSTRUCTOR)
16956 : : return NULL_TREE;
16957 : 97 : for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
16958 : 94 : if (!integer_zerop (ce->value) && !real_zerop (ce->value))
16959 : : {
16960 : 85 : if (elt)
16961 : : return NULL_TREE;
16962 : 44 : elt = ce->value;
16963 : : }
16964 : : return elt;
16965 : : }
16966 : :
16967 : : #if CHECKING_P
16968 : :
16969 : : namespace selftest {
16970 : :
16971 : : /* Helper functions for writing tests of folding trees. */
16972 : :
16973 : : /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16974 : :
16975 : : static void
16976 : 16 : assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16977 : : tree constant)
16978 : : {
16979 : 16 : ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16980 : 16 : }
16981 : :
16982 : : /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16983 : : wrapping WRAPPED_EXPR. */
16984 : :
16985 : : static void
16986 : 12 : assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16987 : : tree wrapped_expr)
16988 : : {
16989 : 12 : tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16990 : 12 : ASSERT_NE (wrapped_expr, result);
16991 : 12 : ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16992 : 12 : ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16993 : 12 : }
16994 : :
16995 : : /* Verify that various arithmetic binary operations are folded
16996 : : correctly. */
16997 : :
16998 : : static void
16999 : 4 : test_arithmetic_folding ()
17000 : : {
17001 : 4 : tree type = integer_type_node;
17002 : 4 : tree x = create_tmp_var_raw (type, "x");
17003 : 4 : tree zero = build_zero_cst (type);
17004 : 4 : tree one = build_int_cst (type, 1);
17005 : :
17006 : : /* Addition. */
17007 : : /* 1 <-- (0 + 1) */
17008 : 4 : assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17009 : : one);
17010 : 4 : assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17011 : : one);
17012 : :
17013 : : /* (nonlvalue)x <-- (x + 0) */
17014 : 4 : assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17015 : : x);
17016 : :
17017 : : /* Subtraction. */
17018 : : /* 0 <-- (x - x) */
17019 : 4 : assert_binop_folds_to_const (x, MINUS_EXPR, x,
17020 : : zero);
17021 : 4 : assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17022 : : x);
17023 : :
17024 : : /* Multiplication. */
17025 : : /* 0 <-- (x * 0) */
17026 : 4 : assert_binop_folds_to_const (x, MULT_EXPR, zero,
17027 : : zero);
17028 : :
17029 : : /* (nonlvalue)x <-- (x * 1) */
17030 : 4 : assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17031 : : x);
17032 : 4 : }
17033 : :
17034 : : namespace test_operand_equality {
17035 : :
17036 : : /* Verify structural equality. */
17037 : :
17038 : : /* Execute fold_vec_perm_cst unit tests. */
17039 : :
17040 : : static void
17041 : 4 : test ()
17042 : : {
17043 : 4 : tree stype = integer_type_node;
17044 : 4 : tree utype = unsigned_type_node;
17045 : 4 : tree x = create_tmp_var_raw (stype, "x");
17046 : 4 : tree y = create_tmp_var_raw (stype, "y");
17047 : 4 : tree z = create_tmp_var_raw (stype, "z");
17048 : 4 : tree four = build_int_cst (stype, 4);
17049 : 4 : tree lhs1 = fold_build2 (PLUS_EXPR, stype, x, y);
17050 : 4 : tree rhs1 = fold_convert (stype,
17051 : : fold_build2 (PLUS_EXPR, utype,
17052 : : fold_convert (utype, x),
17053 : : fold_convert (utype, y)));
17054 : :
17055 : : /* (int)((unsigned x) + (unsigned y)) == x + y. */
17056 : 4 : ASSERT_TRUE (operand_equal_p (lhs1, rhs1, OEP_ASSUME_WRAPV));
17057 : 4 : ASSERT_FALSE (operand_equal_p (lhs1, rhs1, 0));
17058 : :
17059 : : /* (int)(unsigned) x == x. */
17060 : 4 : tree lhs2 = build1 (NOP_EXPR, stype,
17061 : : build1 (NOP_EXPR, utype, x));
17062 : 4 : tree rhs2 = x;
17063 : 4 : ASSERT_TRUE (operand_equal_p (lhs2, rhs2, OEP_ASSUME_WRAPV));
17064 : 4 : ASSERT_TRUE (operand_equal_p (lhs2, rhs2, 0));
17065 : :
17066 : : /* (unsigned x) + (unsigned y) == x + y. */
17067 : 4 : tree lhs3 = lhs1;
17068 : 4 : tree rhs3 = fold_build2 (PLUS_EXPR, utype,
17069 : : fold_convert (utype, x),
17070 : : fold_convert (utype, y));
17071 : 4 : ASSERT_TRUE (operand_equal_p (lhs3, rhs3, OEP_ASSUME_WRAPV));
17072 : 4 : ASSERT_FALSE (operand_equal_p (lhs3, rhs3, 0));
17073 : :
17074 : : /* (unsigned x) / (unsigned y) == x / y. */
17075 : 4 : tree lhs4 = fold_build2 (TRUNC_DIV_EXPR, stype, x, y);;
17076 : 4 : tree rhs4 = fold_build2 (TRUNC_DIV_EXPR, utype,
17077 : : fold_convert (utype, x),
17078 : : fold_convert (utype, y));
17079 : 4 : ASSERT_FALSE (operand_equal_p (lhs4, rhs4, OEP_ASSUME_WRAPV));
17080 : 4 : ASSERT_FALSE (operand_equal_p (lhs4, rhs4, 0));
17081 : :
17082 : : /* (long x) / 4 == (long)(x / 4). */
17083 : 4 : tree lstype = long_long_integer_type_node;
17084 : 4 : tree lfour = build_int_cst (lstype, 4);
17085 : 4 : tree lhs5 = fold_build2 (TRUNC_DIV_EXPR, lstype,
17086 : : fold_build1 (VIEW_CONVERT_EXPR, lstype, x), lfour);
17087 : 4 : tree rhs5 = fold_build1 (VIEW_CONVERT_EXPR, lstype,
17088 : : fold_build2 (TRUNC_DIV_EXPR, stype, x, four));
17089 : 4 : ASSERT_FALSE (operand_equal_p (lhs5, rhs5, OEP_ASSUME_WRAPV));
17090 : 4 : ASSERT_FALSE (operand_equal_p (lhs5, rhs5, 0));
17091 : :
17092 : : /* (unsigned x) / 4 == x / 4. */
17093 : 4 : tree lhs6 = fold_build2 (TRUNC_DIV_EXPR, stype, x, four);;
17094 : 4 : tree rhs6 = fold_build2 (TRUNC_DIV_EXPR, utype,
17095 : : fold_convert (utype, x),
17096 : : fold_convert (utype, four));
17097 : 4 : ASSERT_FALSE (operand_equal_p (lhs6, rhs6, OEP_ASSUME_WRAPV));
17098 : 4 : ASSERT_FALSE (operand_equal_p (lhs6, rhs6, 0));
17099 : :
17100 : : /* a / (int)((unsigned)b - (unsigned)c)) == a / (b - c). */
17101 : 4 : tree lhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, lhs1);
17102 : 4 : tree rhs7 = fold_build2 (TRUNC_DIV_EXPR, stype, x, rhs1);
17103 : 4 : ASSERT_TRUE (operand_equal_p (lhs7, rhs7, OEP_ASSUME_WRAPV));
17104 : 4 : ASSERT_FALSE (operand_equal_p (lhs7, rhs7, 0));
17105 : :
17106 : : /* (unsigned x) + 4 == x + 4. */
17107 : 4 : tree lhs8 = fold_build2 (PLUS_EXPR, stype, x, four);
17108 : 4 : tree rhs8 = fold_build2 (PLUS_EXPR, utype,
17109 : : fold_convert (utype, x),
17110 : : fold_convert (utype, four));
17111 : 4 : ASSERT_TRUE (operand_equal_p (lhs8, rhs8, OEP_ASSUME_WRAPV));
17112 : 4 : ASSERT_FALSE (operand_equal_p (lhs8, rhs8, 0));
17113 : :
17114 : : /* (unsigned x) + 4 == 4 + x. */
17115 : 4 : tree lhs9 = fold_build2 (PLUS_EXPR, stype, four, x);
17116 : 4 : tree rhs9 = fold_build2 (PLUS_EXPR, utype,
17117 : : fold_convert (utype, x),
17118 : : fold_convert (utype, four));
17119 : 4 : ASSERT_TRUE (operand_equal_p (lhs9, rhs9, OEP_ASSUME_WRAPV));
17120 : 4 : ASSERT_FALSE (operand_equal_p (lhs9, rhs9, 0));
17121 : :
17122 : : /* ((unsigned x) + 4) * (unsigned y)) + z == ((4 + x) * y) + z. */
17123 : 4 : tree lhs10 = fold_build2 (PLUS_EXPR, stype,
17124 : : fold_build2 (MULT_EXPR, stype,
17125 : : fold_build2 (PLUS_EXPR, stype, four, x),
17126 : : y),
17127 : : z);
17128 : 4 : tree rhs10 = fold_build2 (MULT_EXPR, utype,
17129 : : fold_build2 (PLUS_EXPR, utype,
17130 : : fold_convert (utype, x),
17131 : : fold_convert (utype, four)),
17132 : : fold_convert (utype, y));
17133 : 4 : rhs10 = fold_build2 (PLUS_EXPR, stype, fold_convert (stype, rhs10), z);
17134 : 4 : ASSERT_TRUE (operand_equal_p (lhs10, rhs10, OEP_ASSUME_WRAPV));
17135 : 4 : ASSERT_FALSE (operand_equal_p (lhs10, rhs10, 0));
17136 : 4 : }
17137 : : }
17138 : :
17139 : : namespace test_fold_vec_perm_cst {
17140 : :
17141 : : /* Build a VECTOR_CST corresponding to VMODE, and has
17142 : : encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17143 : : Fill it with randomized elements, using rand() % THRESHOLD. */
17144 : :
17145 : : static tree
17146 : 0 : build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17147 : : unsigned nelts_per_pattern,
17148 : : int step = 0, bool natural_stepped = false,
17149 : : int threshold = 100)
17150 : : {
17151 : 0 : tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17152 : 0 : tree vectype = build_vector_type_for_mode (inner_type, vmode);
17153 : 0 : tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17154 : :
17155 : : // Fill a0 for each pattern
17156 : 0 : for (unsigned i = 0; i < npatterns; i++)
17157 : 0 : builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17158 : :
17159 : 0 : if (nelts_per_pattern == 1)
17160 : 0 : return builder.build ();
17161 : :
17162 : : // Fill a1 for each pattern
17163 : 0 : for (unsigned i = 0; i < npatterns; i++)
17164 : : {
17165 : 0 : tree a1;
17166 : 0 : if (natural_stepped)
17167 : : {
17168 : 0 : tree a0 = builder[i];
17169 : 0 : wide_int a0_val = wi::to_wide (a0);
17170 : 0 : wide_int a1_val = a0_val + step;
17171 : 0 : a1 = wide_int_to_tree (inner_type, a1_val);
17172 : 0 : }
17173 : : else
17174 : 0 : a1 = build_int_cst (inner_type, rand () % threshold);
17175 : 0 : builder.quick_push (a1);
17176 : : }
17177 : 0 : if (nelts_per_pattern == 2)
17178 : 0 : return builder.build ();
17179 : :
17180 : 0 : for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17181 : : {
17182 : 0 : tree prev_elem = builder[i - npatterns];
17183 : 0 : wide_int prev_elem_val = wi::to_wide (prev_elem);
17184 : 0 : wide_int val = prev_elem_val + step;
17185 : 0 : builder.quick_push (wide_int_to_tree (inner_type, val));
17186 : 0 : }
17187 : :
17188 : 0 : return builder.build ();
17189 : 0 : }
17190 : :
17191 : : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17192 : : when result is VLA. */
17193 : :
17194 : : static void
17195 : 0 : validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17196 : : tree res, tree *expected_res)
17197 : : {
17198 : : /* Actual npatterns and encoded_elts in res may be less than expected due
17199 : : to canonicalization. */
17200 : 0 : ASSERT_TRUE (res != NULL_TREE);
17201 : 0 : ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17202 : 0 : ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17203 : :
17204 : 0 : for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17205 : 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17206 : 0 : }
17207 : :
17208 : : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17209 : : when the result is VLS. */
17210 : :
17211 : : static void
17212 : 0 : validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17213 : : {
17214 : 0 : ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17215 : 0 : for (unsigned i = 0; i < expected_nelts; i++)
17216 : 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17217 : 0 : }
17218 : :
17219 : : /* Helper routine to push multiple elements into BUILDER. */
17220 : : template<unsigned N>
17221 : 0 : static void builder_push_elems (vec_perm_builder& builder,
17222 : : poly_uint64 (&elems)[N])
17223 : : {
17224 : 0 : for (unsigned i = 0; i < N; i++)
17225 : 0 : builder.quick_push (elems[i]);
17226 : 0 : }
17227 : :
17228 : : #define ARG0(index) vector_cst_elt (arg0, index)
17229 : : #define ARG1(index) vector_cst_elt (arg1, index)
17230 : :
17231 : : /* Test cases where result is VNx4SI and input vectors are V4SI. */
17232 : :
17233 : : static void
17234 : 0 : test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17235 : : {
17236 : 0 : for (int i = 0; i < 10; i++)
17237 : : {
17238 : : /* Case 1:
17239 : : sel = { 0, 4, 1, 5, ... }
17240 : : res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17241 : 0 : {
17242 : 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17243 : 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17244 : :
17245 : 0 : tree inner_type
17246 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17247 : 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17248 : :
17249 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17250 : 0 : vec_perm_builder builder (res_len, 4, 1);
17251 : 0 : poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17252 : 0 : builder_push_elems (builder, mask_elems);
17253 : :
17254 : 0 : vec_perm_indices sel (builder, 2, res_len);
17255 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17256 : :
17257 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17258 : 0 : validate_res (4, 1, res, expected_res);
17259 : 0 : }
17260 : :
17261 : : /* Case 2: Same as case 1, but contains an out of bounds access which
17262 : : should wrap around.
17263 : : sel = {0, 8, 4, 12, ...} (4, 1)
17264 : : res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17265 : 0 : {
17266 : 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17267 : 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17268 : :
17269 : 0 : tree inner_type
17270 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17271 : 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17272 : :
17273 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17274 : 0 : vec_perm_builder builder (res_len, 4, 1);
17275 : 0 : poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17276 : 0 : builder_push_elems (builder, mask_elems);
17277 : :
17278 : 0 : vec_perm_indices sel (builder, 2, res_len);
17279 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17280 : :
17281 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17282 : 0 : validate_res (4, 1, res, expected_res);
17283 : 0 : }
17284 : : }
17285 : 0 : }
17286 : :
17287 : : /* Test cases where result is V4SI and input vectors are VNx4SI. */
17288 : :
17289 : : static void
17290 : 0 : test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17291 : : {
17292 : 0 : for (int i = 0; i < 10; i++)
17293 : : {
17294 : : /* Case 1:
17295 : : sel = { 0, 1, 2, 3}
17296 : : res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17297 : 0 : {
17298 : 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17299 : 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17300 : :
17301 : 0 : tree inner_type
17302 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17303 : 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17304 : :
17305 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17306 : 0 : vec_perm_builder builder (res_len, 4, 1);
17307 : 0 : poly_uint64 mask_elems[] = {0, 1, 2, 3};
17308 : 0 : builder_push_elems (builder, mask_elems);
17309 : :
17310 : 0 : vec_perm_indices sel (builder, 2, res_len);
17311 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17312 : :
17313 : 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17314 : 0 : validate_res_vls (res, expected_res, 4);
17315 : 0 : }
17316 : :
17317 : : /* Case 2: Same as Case 1, but crossing input vector.
17318 : : sel = {0, 2, 4, 6}
17319 : : In this case,the index 4 is ambiguous since len = 4 + 4x.
17320 : : Since we cannot determine, which vector to choose from during
17321 : : compile time, should return NULL_TREE. */
17322 : 0 : {
17323 : 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17324 : 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17325 : :
17326 : 0 : tree inner_type
17327 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17328 : 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17329 : :
17330 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17331 : 0 : vec_perm_builder builder (res_len, 4, 1);
17332 : 0 : poly_uint64 mask_elems[] = {0, 2, 4, 6};
17333 : 0 : builder_push_elems (builder, mask_elems);
17334 : :
17335 : 0 : vec_perm_indices sel (builder, 2, res_len);
17336 : 0 : const char *reason;
17337 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17338 : :
17339 : 0 : ASSERT_TRUE (res == NULL_TREE);
17340 : 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17341 : 0 : }
17342 : : }
17343 : 0 : }
17344 : :
17345 : : /* Test all input vectors. */
17346 : :
17347 : : static void
17348 : 0 : test_all_nunits (machine_mode vmode)
17349 : : {
17350 : : /* Test with 10 different inputs. */
17351 : 0 : for (int i = 0; i < 10; i++)
17352 : : {
17353 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17354 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17355 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17356 : :
17357 : : /* Case 1: mask = {0, ...} // (1, 1)
17358 : : res = { arg0[0], ... } // (1, 1) */
17359 : 0 : {
17360 : 0 : vec_perm_builder builder (len, 1, 1);
17361 : 0 : builder.quick_push (0);
17362 : 0 : vec_perm_indices sel (builder, 2, len);
17363 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17364 : 0 : tree expected_res[] = { ARG0(0) };
17365 : 0 : validate_res (1, 1, res, expected_res);
17366 : 0 : }
17367 : :
17368 : : /* Case 2: mask = {len, ...} // (1, 1)
17369 : : res = { arg1[0], ... } // (1, 1) */
17370 : 0 : {
17371 : 0 : vec_perm_builder builder (len, 1, 1);
17372 : 0 : builder.quick_push (len);
17373 : 0 : vec_perm_indices sel (builder, 2, len);
17374 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17375 : :
17376 : 0 : tree expected_res[] = { ARG1(0) };
17377 : 0 : validate_res (1, 1, res, expected_res);
17378 : 0 : }
17379 : : }
17380 : 0 : }
17381 : :
17382 : : /* Test all vectors which contain at-least 2 elements. */
17383 : :
17384 : : static void
17385 : 0 : test_nunits_min_2 (machine_mode vmode)
17386 : : {
17387 : 0 : for (int i = 0; i < 10; i++)
17388 : : {
17389 : : /* Case 1: mask = { 0, len, ... } // (2, 1)
17390 : : res = { arg0[0], arg1[0], ... } // (2, 1) */
17391 : 0 : {
17392 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17393 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17394 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17395 : :
17396 : 0 : vec_perm_builder builder (len, 2, 1);
17397 : 0 : poly_uint64 mask_elems[] = { 0, len };
17398 : 0 : builder_push_elems (builder, mask_elems);
17399 : :
17400 : 0 : vec_perm_indices sel (builder, 2, len);
17401 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17402 : :
17403 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17404 : 0 : validate_res (2, 1, res, expected_res);
17405 : 0 : }
17406 : :
17407 : : /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17408 : : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17409 : 0 : {
17410 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17411 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17412 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17413 : :
17414 : 0 : vec_perm_builder builder (len, 2, 2);
17415 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17416 : 0 : builder_push_elems (builder, mask_elems);
17417 : :
17418 : 0 : vec_perm_indices sel (builder, 2, len);
17419 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17420 : :
17421 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17422 : 0 : validate_res (2, 2, res, expected_res);
17423 : 0 : }
17424 : :
17425 : : /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17426 : : Test that the stepped sequence of the pattern selects from
17427 : : same input pattern. Since input vectors have npatterns = 2,
17428 : : and step (a2 - a1) = 1, step is not a multiple of npatterns
17429 : : in input vector. So return NULL_TREE. */
17430 : 0 : {
17431 : 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17432 : 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17433 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17434 : :
17435 : 0 : vec_perm_builder builder (len, 1, 3);
17436 : 0 : poly_uint64 mask_elems[] = { 0, 0, 1 };
17437 : 0 : builder_push_elems (builder, mask_elems);
17438 : :
17439 : 0 : vec_perm_indices sel (builder, 2, len);
17440 : 0 : const char *reason;
17441 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17442 : : &reason);
17443 : 0 : ASSERT_TRUE (res == NULL_TREE);
17444 : 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17445 : 0 : }
17446 : :
17447 : : /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17448 : : Test that stepped sequence of the pattern selects from arg0.
17449 : : res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17450 : 0 : {
17451 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17452 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17453 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17454 : :
17455 : 0 : vec_perm_builder builder (len, 1, 3);
17456 : 0 : poly_uint64 mask_elems[] = { len, 0, 1 };
17457 : 0 : builder_push_elems (builder, mask_elems);
17458 : :
17459 : 0 : vec_perm_indices sel (builder, 2, len);
17460 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17461 : :
17462 : 0 : tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17463 : 0 : validate_res (1, 3, res, expected_res);
17464 : 0 : }
17465 : :
17466 : : /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17467 : : In this case ensure that arg has a natural stepped sequence
17468 : : to preserve arg's encoding.
17469 : :
17470 : : As a concrete example, consider:
17471 : : arg0: { -16, -9, -10, ... } // (1, 3)
17472 : : arg1: { -12, -5, -6, ... } // (1, 3)
17473 : : sel = { 0, len, len + 1, ... } // (1, 3)
17474 : :
17475 : : This will create res with following encoding:
17476 : : res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17477 : : = { -16, -12, -5, ... }
17478 : :
17479 : : The step in above encoding would be: (-5) - (-12) = 7
17480 : : And hence res[3] would be computed as -5 + 7 = 2.
17481 : : instead of arg1[2], ie, -6.
17482 : : Ensure that valid_mask_for_fold_vec_perm_cst returns false
17483 : : for this case. */
17484 : 0 : {
17485 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17486 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17487 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17488 : :
17489 : 0 : vec_perm_builder builder (len, 1, 3);
17490 : 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17491 : 0 : builder_push_elems (builder, mask_elems);
17492 : :
17493 : 0 : vec_perm_indices sel (builder, 2, len);
17494 : 0 : const char *reason;
17495 : : /* FIXME: It may happen that build_vec_cst_rand may build a natural
17496 : : stepped pattern, even if we didn't explicitly tell it to. So folding
17497 : : may not always fail, but if it does, ensure that's because arg1 does
17498 : : not have a natural stepped sequence (and not due to other reason) */
17499 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17500 : 0 : if (res == NULL_TREE)
17501 : 0 : ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17502 : 0 : }
17503 : :
17504 : : /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17505 : : sequence and thus folding should be valid for this case. */
17506 : 0 : {
17507 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17508 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17509 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17510 : :
17511 : 0 : vec_perm_builder builder (len, 1, 3);
17512 : 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17513 : 0 : builder_push_elems (builder, mask_elems);
17514 : :
17515 : 0 : vec_perm_indices sel (builder, 2, len);
17516 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17517 : :
17518 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17519 : 0 : validate_res (1, 3, res, expected_res);
17520 : 0 : }
17521 : :
17522 : : /* Case 8: Same as aarch64/sve/slp_3.c:
17523 : : arg0, arg1 are dup vectors.
17524 : : sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17525 : : So res = { arg0[0], arg1[0], ... } // (2, 1)
17526 : :
17527 : : In this case, since the input vectors are dup, only the first two
17528 : : elements per pattern in sel are considered significant. */
17529 : 0 : {
17530 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17531 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17532 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17533 : :
17534 : 0 : vec_perm_builder builder (len, 2, 3);
17535 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17536 : 0 : builder_push_elems (builder, mask_elems);
17537 : :
17538 : 0 : vec_perm_indices sel (builder, 2, len);
17539 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17540 : :
17541 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17542 : 0 : validate_res (2, 1, res, expected_res);
17543 : 0 : }
17544 : : }
17545 : 0 : }
17546 : :
17547 : : /* Test all vectors which contain at-least 4 elements. */
17548 : :
17549 : : static void
17550 : 0 : test_nunits_min_4 (machine_mode vmode)
17551 : : {
17552 : 0 : for (int i = 0; i < 10; i++)
17553 : : {
17554 : : /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17555 : : res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17556 : 0 : {
17557 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17558 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17559 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17560 : :
17561 : 0 : vec_perm_builder builder (len, 4, 1);
17562 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17563 : 0 : builder_push_elems (builder, mask_elems);
17564 : :
17565 : 0 : vec_perm_indices sel (builder, 2, len);
17566 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17567 : :
17568 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17569 : 0 : validate_res (4, 1, res, expected_res);
17570 : 0 : }
17571 : :
17572 : : /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17573 : : res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17574 : 0 : {
17575 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17576 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17577 : 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17578 : :
17579 : 0 : vec_perm_builder builder (arg0_len, 1, 3);
17580 : 0 : poly_uint64 mask_elems[] = {0, 1, 2};
17581 : 0 : builder_push_elems (builder, mask_elems);
17582 : :
17583 : 0 : vec_perm_indices sel (builder, 2, arg0_len);
17584 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17585 : 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17586 : 0 : validate_res (1, 3, res, expected_res);
17587 : 0 : }
17588 : :
17589 : : /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17590 : : res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17591 : 0 : {
17592 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17593 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17594 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17595 : :
17596 : 0 : vec_perm_builder builder (len, 1, 3);
17597 : 0 : poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17598 : 0 : builder_push_elems (builder, mask_elems);
17599 : :
17600 : 0 : vec_perm_indices sel (builder, 2, len);
17601 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17602 : 0 : tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17603 : 0 : validate_res (1, 3, res, expected_res);
17604 : 0 : }
17605 : :
17606 : : /* Case 4:
17607 : : sel = { len, 0, 2, ... } // (1, 3)
17608 : : This should return NULL because we cross the input vectors.
17609 : : Because,
17610 : : Let's assume len = C + Cx
17611 : : a1 = 0
17612 : : S = 2
17613 : : esel = arg0_len / sel_npatterns = C + Cx
17614 : : ae = 0 + (esel - 2) * S
17615 : : = 0 + (C + Cx - 2) * 2
17616 : : = 2(C-2) + 2Cx
17617 : :
17618 : : For C >= 4:
17619 : : Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17620 : : Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17621 : : Since q1 != qe, we cross input vectors.
17622 : : So return NULL_TREE. */
17623 : 0 : {
17624 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17625 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17626 : 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17627 : :
17628 : 0 : vec_perm_builder builder (arg0_len, 1, 3);
17629 : 0 : poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17630 : 0 : builder_push_elems (builder, mask_elems);
17631 : :
17632 : 0 : vec_perm_indices sel (builder, 2, arg0_len);
17633 : 0 : const char *reason;
17634 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17635 : 0 : ASSERT_TRUE (res == NULL_TREE);
17636 : 0 : ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17637 : 0 : }
17638 : :
17639 : : /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17640 : : mask = { 0, len, 1, len + 1, ...} // (2, 2)
17641 : : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17642 : :
17643 : : Note that fold_vec_perm_cst will set
17644 : : res_npatterns = max(4, max(4, 2)) = 4
17645 : : However after canonicalizing, we will end up with shape (2, 2). */
17646 : 0 : {
17647 : 0 : tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17648 : 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17649 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17650 : :
17651 : 0 : vec_perm_builder builder (len, 2, 2);
17652 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17653 : 0 : builder_push_elems (builder, mask_elems);
17654 : :
17655 : 0 : vec_perm_indices sel (builder, 2, len);
17656 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17657 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17658 : 0 : validate_res (2, 2, res, expected_res);
17659 : 0 : }
17660 : :
17661 : : /* Case 6: Test combination in sel, where one pattern is dup and other
17662 : : is stepped sequence.
17663 : : sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17664 : : res = { arg0[0], arg0[0], arg0[0],
17665 : : arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17666 : 0 : {
17667 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17668 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17669 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17670 : :
17671 : 0 : vec_perm_builder builder (len, 2, 3);
17672 : 0 : poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17673 : 0 : builder_push_elems (builder, mask_elems);
17674 : :
17675 : 0 : vec_perm_indices sel (builder, 2, len);
17676 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17677 : :
17678 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17679 : 0 : ARG0(1), ARG0(0), ARG0(2) };
17680 : 0 : validate_res (2, 3, res, expected_res);
17681 : 0 : }
17682 : :
17683 : : /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17684 : : when arg0, arg1 and sel have different number of patterns.
17685 : : arg0 is of shape (1, 1)
17686 : : arg1 is of shape (4, 1)
17687 : : sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17688 : :
17689 : : In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17690 : : However,
17691 : : step = (len+2) - (len+1) = 1
17692 : : arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17693 : : Since step is not a multiple of arg_npatterns,
17694 : : valid_mask_for_fold_vec_perm_cst should return false,
17695 : : and thus fold_vec_perm_cst should return NULL_TREE. */
17696 : 0 : {
17697 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17698 : 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17699 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17700 : :
17701 : 0 : vec_perm_builder builder (len, 2, 3);
17702 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17703 : 0 : builder_push_elems (builder, mask_elems);
17704 : :
17705 : 0 : vec_perm_indices sel (builder, 2, len);
17706 : 0 : const char *reason;
17707 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17708 : :
17709 : 0 : ASSERT_TRUE (res == NULL_TREE);
17710 : 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17711 : 0 : }
17712 : :
17713 : : /* Case 8: PR111754: When input vector is not a stepped sequence,
17714 : : check that the result is not a stepped sequence either, even
17715 : : if sel has a stepped sequence. */
17716 : 0 : {
17717 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17718 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17719 : :
17720 : 0 : vec_perm_builder builder (len, 1, 3);
17721 : 0 : poly_uint64 mask_elems[] = { 0, 1, 2 };
17722 : 0 : builder_push_elems (builder, mask_elems);
17723 : :
17724 : 0 : vec_perm_indices sel (builder, 1, len);
17725 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17726 : :
17727 : 0 : tree expected_res[] = { ARG0(0), ARG0(1) };
17728 : 0 : validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17729 : 0 : }
17730 : :
17731 : : /* Case 9: If sel doesn't contain a stepped sequence,
17732 : : check that the result has same encoding as sel, irrespective
17733 : : of shape of input vectors. */
17734 : 0 : {
17735 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17736 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17737 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17738 : :
17739 : 0 : vec_perm_builder builder (len, 1, 2);
17740 : 0 : poly_uint64 mask_elems[] = { 0, len };
17741 : 0 : builder_push_elems (builder, mask_elems);
17742 : :
17743 : 0 : vec_perm_indices sel (builder, 2, len);
17744 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17745 : :
17746 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17747 : 0 : validate_res (sel.encoding ().npatterns (),
17748 : 0 : sel.encoding ().nelts_per_pattern (), res, expected_res);
17749 : 0 : }
17750 : : }
17751 : 0 : }
17752 : :
17753 : : /* Test all vectors which contain at-least 8 elements. */
17754 : :
17755 : : static void
17756 : 0 : test_nunits_min_8 (machine_mode vmode)
17757 : : {
17758 : 0 : for (int i = 0; i < 10; i++)
17759 : : {
17760 : : /* Case 1: sel_npatterns (4) > input npatterns (2)
17761 : : sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17762 : : res: { arg0[0], arg0[0], arg0[0], arg1[0],
17763 : : arg0[2], arg0[0], arg0[3], arg1[0],
17764 : : arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17765 : 0 : {
17766 : 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17767 : 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17768 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17769 : :
17770 : 0 : vec_perm_builder builder(len, 4, 3);
17771 : 0 : poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17772 : 0 : 4, 0, 5, len };
17773 : 0 : builder_push_elems (builder, mask_elems);
17774 : :
17775 : 0 : vec_perm_indices sel (builder, 2, len);
17776 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17777 : :
17778 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17779 : 0 : ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17780 : 0 : ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17781 : 0 : validate_res (4, 3, res, expected_res);
17782 : 0 : }
17783 : : }
17784 : 0 : }
17785 : :
17786 : : /* Test vectors for which nunits[0] <= 4. */
17787 : :
17788 : : static void
17789 : 0 : test_nunits_max_4 (machine_mode vmode)
17790 : : {
17791 : : /* Case 1: mask = {0, 4, ...} // (1, 2)
17792 : : This should return NULL_TREE because the index 4 may choose
17793 : : from either arg0 or arg1 depending on vector length. */
17794 : 0 : {
17795 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17796 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17797 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17798 : :
17799 : 0 : vec_perm_builder builder (len, 1, 2);
17800 : 0 : poly_uint64 mask_elems[] = {0, 4};
17801 : 0 : builder_push_elems (builder, mask_elems);
17802 : :
17803 : 0 : vec_perm_indices sel (builder, 2, len);
17804 : 0 : const char *reason;
17805 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17806 : 0 : ASSERT_TRUE (res == NULL_TREE);
17807 : 0 : ASSERT_TRUE (reason != NULL);
17808 : 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17809 : 0 : }
17810 : 0 : }
17811 : :
17812 : : #undef ARG0
17813 : : #undef ARG1
17814 : :
17815 : : /* Return true if SIZE is of the form C + Cx and C is power of 2. */
17816 : :
17817 : : static bool
17818 : 0 : is_simple_vla_size (poly_uint64 size)
17819 : : {
17820 : 128 : if (size.is_constant ()
17821 : : || !pow2p_hwi (size.coeffs[0]))
17822 : 0 : return false;
17823 : : for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
17824 : : if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
17825 : : return false;
17826 : : return true;
17827 : : }
17828 : :
17829 : : /* Execute fold_vec_perm_cst unit tests. */
17830 : :
17831 : : static void
17832 : 4 : test ()
17833 : : {
17834 : 4 : machine_mode vnx4si_mode = E_VOIDmode;
17835 : 4 : machine_mode v4si_mode = E_VOIDmode;
17836 : :
17837 : 4 : machine_mode vmode;
17838 : 132 : FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
17839 : : {
17840 : : /* Obtain modes corresponding to VNx4SI and V4SI,
17841 : : to call mixed mode tests below.
17842 : : FIXME: Is there a better way to do this ? */
17843 : 128 : if (GET_MODE_INNER (vmode) == SImode)
17844 : : {
17845 : 128 : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17846 : 128 : if (is_simple_vla_size (nunits)
17847 : : && nunits.coeffs[0] == 4)
17848 : : vnx4si_mode = vmode;
17849 : 128 : else if (known_eq (nunits, poly_uint64 (4)))
17850 : 128 : v4si_mode = vmode;
17851 : : }
17852 : :
17853 : 128 : if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
17854 : : || !targetm.vector_mode_supported_p (vmode))
17855 : 128 : continue;
17856 : :
17857 : : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
17858 : : test_all_nunits (vmode);
17859 : : if (nunits.coeffs[0] >= 2)
17860 : : test_nunits_min_2 (vmode);
17861 : : if (nunits.coeffs[0] >= 4)
17862 : : test_nunits_min_4 (vmode);
17863 : : if (nunits.coeffs[0] >= 8)
17864 : : test_nunits_min_8 (vmode);
17865 : :
17866 : : if (nunits.coeffs[0] <= 4)
17867 : : test_nunits_max_4 (vmode);
17868 : : }
17869 : :
17870 : 4 : if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
17871 : : && targetm.vector_mode_supported_p (vnx4si_mode)
17872 : : && targetm.vector_mode_supported_p (v4si_mode))
17873 : : {
17874 : : test_vnx4si_v4si (vnx4si_mode, v4si_mode);
17875 : : test_v4si_vnx4si (v4si_mode, vnx4si_mode);
17876 : : }
17877 : 4 : }
17878 : : } // end of test_fold_vec_perm_cst namespace
17879 : :
17880 : : /* Verify that various binary operations on vectors are folded
17881 : : correctly. */
17882 : :
17883 : : static void
17884 : 4 : test_vector_folding ()
17885 : : {
17886 : 4 : tree inner_type = integer_type_node;
17887 : 4 : tree type = build_vector_type (inner_type, 4);
17888 : 4 : tree zero = build_zero_cst (type);
17889 : 4 : tree one = build_one_cst (type);
17890 : 4 : tree index = build_index_vector (type, 0, 1);
17891 : :
17892 : : /* Verify equality tests that return a scalar boolean result. */
17893 : 4 : tree res_type = boolean_type_node;
17894 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
17895 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
17896 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
17897 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
17898 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
17899 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17900 : : index, one)));
17901 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
17902 : : index, index)));
17903 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
17904 : : index, index)));
17905 : 4 : }
17906 : :
17907 : : /* Verify folding of VEC_DUPLICATE_EXPRs. */
17908 : :
17909 : : static void
17910 : 4 : test_vec_duplicate_folding ()
17911 : : {
17912 : 4 : scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
17913 : 4 : machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
17914 : : /* This will be 1 if VEC_MODE isn't a vector mode. */
17915 : 8 : poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
17916 : :
17917 : 4 : tree type = build_vector_type (ssizetype, nunits);
17918 : 4 : tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
17919 : 4 : tree dup5_cst = build_vector_from_val (type, ssize_int (5));
17920 : 4 : ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
17921 : 4 : }
17922 : :
17923 : : /* Run all of the selftests within this file. */
17924 : :
17925 : : void
17926 : 4 : fold_const_cc_tests ()
17927 : : {
17928 : 4 : test_arithmetic_folding ();
17929 : 4 : test_vector_folding ();
17930 : 4 : test_vec_duplicate_folding ();
17931 : 4 : test_fold_vec_perm_cst::test ();
17932 : 4 : test_operand_equality::test ();
17933 : 4 : }
17934 : :
17935 : : } // namespace selftest
17936 : :
17937 : : #endif /* CHECKING_P */
|