Branch data Line data Source code
1 : : /* Fold a constant sub-tree into a single node for C-compiler
2 : : Copyright (C) 1987-2024 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /*@@ This file should be rewritten to use an arbitrary precision
21 : : @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 : : @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 : : @@ The routines that translate from the ap rep should
24 : : @@ warn if precision et. al. is lost.
25 : : @@ This would also make life easier when this technology is used
26 : : @@ for cross-compilers. */
27 : :
28 : : /* The entry points in this file are fold, size_int_wide and size_binop.
29 : :
30 : : fold takes a tree as argument and returns a simplified tree.
31 : :
32 : : size_binop takes a tree code for an arithmetic operation
33 : : and two operands that are trees, and produces a tree for the
34 : : result, assuming the type comes from `sizetype'.
35 : :
36 : : size_int takes an integer value, and creates a tree constant
37 : : with type from `sizetype'.
38 : :
39 : : Note: Since the folders get called on non-gimple code as well as
40 : : gimple code, we need to handle GIMPLE tuples as well as their
41 : : corresponding tree equivalents. */
42 : :
43 : : #define INCLUDE_ALGORITHM
44 : : #include "config.h"
45 : : #include "system.h"
46 : : #include "coretypes.h"
47 : : #include "backend.h"
48 : : #include "target.h"
49 : : #include "rtl.h"
50 : : #include "tree.h"
51 : : #include "gimple.h"
52 : : #include "predict.h"
53 : : #include "memmodel.h"
54 : : #include "tm_p.h"
55 : : #include "tree-ssa-operands.h"
56 : : #include "optabs-query.h"
57 : : #include "cgraph.h"
58 : : #include "diagnostic-core.h"
59 : : #include "flags.h"
60 : : #include "alias.h"
61 : : #include "fold-const.h"
62 : : #include "fold-const-call.h"
63 : : #include "stor-layout.h"
64 : : #include "calls.h"
65 : : #include "tree-iterator.h"
66 : : #include "expr.h"
67 : : #include "intl.h"
68 : : #include "langhooks.h"
69 : : #include "tree-eh.h"
70 : : #include "gimplify.h"
71 : : #include "tree-dfa.h"
72 : : #include "builtins.h"
73 : : #include "generic-match.h"
74 : : #include "gimple-iterator.h"
75 : : #include "gimple-fold.h"
76 : : #include "tree-into-ssa.h"
77 : : #include "md5.h"
78 : : #include "case-cfn-macros.h"
79 : : #include "stringpool.h"
80 : : #include "tree-vrp.h"
81 : : #include "tree-ssanames.h"
82 : : #include "selftest.h"
83 : : #include "stringpool.h"
84 : : #include "attribs.h"
85 : : #include "tree-vector-builder.h"
86 : : #include "vec-perm-indices.h"
87 : : #include "asan.h"
88 : : #include "gimple-range.h"
89 : :
90 : : /* Nonzero if we are folding constants inside an initializer or a C++
91 : : manifestly-constant-evaluated context; zero otherwise.
92 : : Should be used when folding in initializer enables additional
93 : : optimizations. */
94 : : int folding_initializer = 0;
95 : :
96 : : /* Nonzero if we are folding C++ manifestly-constant-evaluated context; zero
97 : : otherwise.
98 : : Should be used when certain constructs shouldn't be optimized
99 : : during folding in that context. */
100 : : bool folding_cxx_constexpr = false;
101 : :
102 : : /* The following constants represent a bit based encoding of GCC's
103 : : comparison operators. This encoding simplifies transformations
104 : : on relational comparison operators, such as AND and OR. */
105 : : enum comparison_code {
106 : : COMPCODE_FALSE = 0,
107 : : COMPCODE_LT = 1,
108 : : COMPCODE_EQ = 2,
109 : : COMPCODE_LE = 3,
110 : : COMPCODE_GT = 4,
111 : : COMPCODE_LTGT = 5,
112 : : COMPCODE_GE = 6,
113 : : COMPCODE_ORD = 7,
114 : : COMPCODE_UNORD = 8,
115 : : COMPCODE_UNLT = 9,
116 : : COMPCODE_UNEQ = 10,
117 : : COMPCODE_UNLE = 11,
118 : : COMPCODE_UNGT = 12,
119 : : COMPCODE_NE = 13,
120 : : COMPCODE_UNGE = 14,
121 : : COMPCODE_TRUE = 15
122 : : };
123 : :
124 : : static bool negate_expr_p (tree);
125 : : static tree negate_expr (tree);
126 : : static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
127 : : static enum comparison_code comparison_to_compcode (enum tree_code);
128 : : static enum tree_code compcode_to_comparison (enum comparison_code);
129 : : static bool twoval_comparison_p (tree, tree *, tree *);
130 : : static tree eval_subst (location_t, tree, tree, tree, tree, tree);
131 : : static tree optimize_bit_field_compare (location_t, enum tree_code,
132 : : tree, tree, tree);
133 : : static bool simple_operand_p (const_tree);
134 : : static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 : : static tree range_predecessor (tree);
136 : : static tree range_successor (tree);
137 : : static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 : : static tree fold_cond_expr_with_comparison (location_t, tree, enum tree_code,
139 : : tree, tree, tree, tree);
140 : : static tree unextend (tree, int, int, tree);
141 : : static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
142 : : static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
143 : : static tree fold_binary_op_with_conditional_arg (location_t,
144 : : enum tree_code, tree,
145 : : tree, tree,
146 : : tree, tree, int);
147 : : static tree fold_negate_const (tree, tree);
148 : : static tree fold_not_const (const_tree, tree);
149 : : static tree fold_relational_const (enum tree_code, tree, tree, tree);
150 : : static tree fold_convert_const (enum tree_code, tree, tree);
151 : : static tree fold_view_convert_expr (tree, tree);
152 : : static tree fold_negate_expr (location_t, tree);
153 : :
154 : : /* This is a helper function to detect min/max for some operands of COND_EXPR.
155 : : The form is "(EXP0 CMP EXP1) ? EXP2 : EXP3". */
156 : : tree_code
157 : 130540 : minmax_from_comparison (tree_code cmp, tree exp0, tree exp1, tree exp2, tree exp3)
158 : : {
159 : 130540 : enum tree_code code = ERROR_MARK;
160 : :
161 : 130540 : if (HONOR_NANS (exp0) || HONOR_SIGNED_ZEROS (exp0))
162 : 11 : return ERROR_MARK;
163 : :
164 : 130529 : if (!operand_equal_p (exp0, exp2))
165 : : return ERROR_MARK;
166 : :
167 : 130529 : if (TREE_CODE (exp3) == INTEGER_CST && TREE_CODE (exp1) == INTEGER_CST)
168 : : {
169 : 127908 : if (wi::to_widest (exp1) == (wi::to_widest (exp3) - 1))
170 : : {
171 : : /* X <= Y - 1 equals to X < Y. */
172 : 74553 : if (cmp == LE_EXPR)
173 : : code = LT_EXPR;
174 : : /* X > Y - 1 equals to X >= Y. */
175 : 74220 : if (cmp == GT_EXPR)
176 : : code = GE_EXPR;
177 : : /* a != MIN_RANGE<a> ? a : MIN_RANGE<a>+1 -> MAX_EXPR<MIN_RANGE<a>+1, a> */
178 : 65143 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
179 : : {
180 : 15040 : value_range r;
181 : 30080 : get_range_query (cfun)->range_of_expr (r, exp0);
182 : 15040 : if (r.undefined_p ())
183 : 1 : r.set_varying (TREE_TYPE (exp0));
184 : :
185 : 15040 : widest_int min = widest_int::from (r.lower_bound (),
186 : 30080 : TYPE_SIGN (TREE_TYPE (exp0)));
187 : 15040 : if (min == wi::to_widest (exp1))
188 : 737 : code = MAX_EXPR;
189 : 15040 : }
190 : : }
191 : 127908 : if (wi::to_widest (exp1) == (wi::to_widest (exp3) + 1))
192 : : {
193 : : /* X < Y + 1 equals to X <= Y. */
194 : 988 : if (cmp == LT_EXPR)
195 : : code = LE_EXPR;
196 : : /* X >= Y + 1 equals to X > Y. */
197 : 959 : if (cmp == GE_EXPR)
198 : : code = GT_EXPR;
199 : : /* a != MAX_RANGE<a> ? a : MAX_RANGE<a>-1 -> MIN_EXPR<MIN_RANGE<a>-1, a> */
200 : 865 : if (cmp == NE_EXPR && TREE_CODE (exp0) == SSA_NAME)
201 : : {
202 : 479 : value_range r;
203 : 958 : get_range_query (cfun)->range_of_expr (r, exp0);
204 : 479 : if (r.undefined_p ())
205 : 0 : r.set_varying (TREE_TYPE (exp0));
206 : :
207 : 479 : widest_int max = widest_int::from (r.upper_bound (),
208 : 958 : TYPE_SIGN (TREE_TYPE (exp0)));
209 : 479 : if (max == wi::to_widest (exp1))
210 : 43 : code = MIN_EXPR;
211 : 479 : }
212 : : }
213 : : }
214 : 127908 : if (code != ERROR_MARK
215 : 130529 : || operand_equal_p (exp1, exp3))
216 : : {
217 : 23365 : if (cmp == LT_EXPR || cmp == LE_EXPR)
218 : 2359 : code = MIN_EXPR;
219 : 23365 : if (cmp == GT_EXPR || cmp == GE_EXPR)
220 : 20115 : code = MAX_EXPR;
221 : : }
222 : : return code;
223 : : }
224 : :
225 : : /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
226 : : Otherwise, return LOC. */
227 : :
228 : : static location_t
229 : 2082410 : expr_location_or (tree t, location_t loc)
230 : : {
231 : 580922 : location_t tloc = EXPR_LOCATION (t);
232 : 2068624 : return tloc == UNKNOWN_LOCATION ? loc : tloc;
233 : : }
234 : :
235 : : /* Similar to protected_set_expr_location, but never modify x in place,
236 : : if location can and needs to be set, unshare it. */
237 : :
238 : : tree
239 : 3520890 : protected_set_expr_location_unshare (tree x, location_t loc)
240 : : {
241 : 3520890 : if (CAN_HAVE_LOCATION_P (x)
242 : 3109242 : && EXPR_LOCATION (x) != loc
243 : 1435603 : && !(TREE_CODE (x) == SAVE_EXPR
244 : 718020 : || TREE_CODE (x) == TARGET_EXPR
245 : : || TREE_CODE (x) == BIND_EXPR))
246 : : {
247 : 717271 : x = copy_node (x);
248 : 717271 : SET_EXPR_LOCATION (x, loc);
249 : : }
250 : 3520890 : return x;
251 : : }
252 : :
253 : : /* If ARG2 divides ARG1 with zero remainder, carries out the exact
254 : : division and returns the quotient. Otherwise returns
255 : : NULL_TREE. */
256 : :
257 : : tree
258 : 0 : div_if_zero_remainder (const_tree arg1, const_tree arg2)
259 : : {
260 : 0 : widest_int quo;
261 : :
262 : 0 : if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
263 : : SIGNED, &quo))
264 : 0 : return wide_int_to_tree (TREE_TYPE (arg1), quo);
265 : :
266 : : return NULL_TREE;
267 : 0 : }
268 : :
269 : : /* This is nonzero if we should defer warnings about undefined
270 : : overflow. This facility exists because these warnings are a
271 : : special case. The code to estimate loop iterations does not want
272 : : to issue any warnings, since it works with expressions which do not
273 : : occur in user code. Various bits of cleanup code call fold(), but
274 : : only use the result if it has certain characteristics (e.g., is a
275 : : constant); that code only wants to issue a warning if the result is
276 : : used. */
277 : :
278 : : static int fold_deferring_overflow_warnings;
279 : :
280 : : /* If a warning about undefined overflow is deferred, this is the
281 : : warning. Note that this may cause us to turn two warnings into
282 : : one, but that is fine since it is sufficient to only give one
283 : : warning per expression. */
284 : :
285 : : static const char* fold_deferred_overflow_warning;
286 : :
287 : : /* If a warning about undefined overflow is deferred, this is the
288 : : level at which the warning should be emitted. */
289 : :
290 : : static enum warn_strict_overflow_code fold_deferred_overflow_code;
291 : :
292 : : /* Start deferring overflow warnings. We could use a stack here to
293 : : permit nested calls, but at present it is not necessary. */
294 : :
295 : : void
296 : 1018822954 : fold_defer_overflow_warnings (void)
297 : : {
298 : 1018822954 : ++fold_deferring_overflow_warnings;
299 : 1018822954 : }
300 : :
301 : : /* Stop deferring overflow warnings. If there is a pending warning,
302 : : and ISSUE is true, then issue the warning if appropriate. STMT is
303 : : the statement with which the warning should be associated (used for
304 : : location information); STMT may be NULL. CODE is the level of the
305 : : warning--a warn_strict_overflow_code value. This function will use
306 : : the smaller of CODE and the deferred code when deciding whether to
307 : : issue the warning. CODE may be zero to mean to always use the
308 : : deferred code. */
309 : :
310 : : void
311 : 1018822954 : fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
312 : : {
313 : 1018822954 : const char *warnmsg;
314 : 1018822954 : location_t locus;
315 : :
316 : 1018822954 : gcc_assert (fold_deferring_overflow_warnings > 0);
317 : 1018822954 : --fold_deferring_overflow_warnings;
318 : 1018822954 : if (fold_deferring_overflow_warnings > 0)
319 : : {
320 : 7561049 : if (fold_deferred_overflow_warning != NULL
321 : 1610151 : && code != 0
322 : 0 : && code < (int) fold_deferred_overflow_code)
323 : 0 : fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
324 : 7561049 : return;
325 : : }
326 : :
327 : 1011261905 : warnmsg = fold_deferred_overflow_warning;
328 : 1011261905 : fold_deferred_overflow_warning = NULL;
329 : :
330 : 1011261905 : if (!issue || warnmsg == NULL)
331 : : return;
332 : :
333 : 10051 : if (warning_suppressed_p (stmt, OPT_Wstrict_overflow))
334 : : return;
335 : :
336 : : /* Use the smallest code level when deciding to issue the
337 : : warning. */
338 : 10051 : if (code == 0 || code > (int) fold_deferred_overflow_code)
339 : 10051 : code = fold_deferred_overflow_code;
340 : :
341 : 10051 : if (!issue_strict_overflow_warning (code))
342 : : return;
343 : :
344 : 0 : if (stmt == NULL)
345 : : locus = input_location;
346 : : else
347 : 0 : locus = gimple_location (stmt);
348 : 0 : warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
349 : : }
350 : :
351 : : /* Stop deferring overflow warnings, ignoring any deferred
352 : : warnings. */
353 : :
354 : : void
355 : 162758332 : fold_undefer_and_ignore_overflow_warnings (void)
356 : : {
357 : 162758332 : fold_undefer_overflow_warnings (false, NULL, 0);
358 : 162758332 : }
359 : :
360 : : /* Whether we are deferring overflow warnings. */
361 : :
362 : : bool
363 : 295270818 : fold_deferring_overflow_warnings_p (void)
364 : : {
365 : 295270818 : return fold_deferring_overflow_warnings > 0;
366 : : }
367 : :
368 : : /* This is called when we fold something based on the fact that signed
369 : : overflow is undefined. */
370 : :
371 : : void
372 : 1590564 : fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
373 : : {
374 : 1590564 : if (fold_deferring_overflow_warnings > 0)
375 : : {
376 : 1512930 : if (fold_deferred_overflow_warning == NULL
377 : 684339 : || wc < fold_deferred_overflow_code)
378 : : {
379 : 844235 : fold_deferred_overflow_warning = gmsgid;
380 : 844235 : fold_deferred_overflow_code = wc;
381 : : }
382 : : }
383 : 77634 : else if (issue_strict_overflow_warning (wc))
384 : 7 : warning (OPT_Wstrict_overflow, gmsgid);
385 : 1590564 : }
386 : :
387 : : /* Return true if the built-in mathematical function specified by CODE
388 : : is odd, i.e. -f(x) == f(-x). */
389 : :
390 : : bool
391 : 1821026 : negate_mathfn_p (combined_fn fn)
392 : : {
393 : 1821026 : switch (fn)
394 : : {
395 : : CASE_CFN_ASIN:
396 : : CASE_CFN_ASIN_FN:
397 : : CASE_CFN_ASINH:
398 : : CASE_CFN_ASINH_FN:
399 : : CASE_CFN_ATAN:
400 : : CASE_CFN_ATAN_FN:
401 : : CASE_CFN_ATANH:
402 : : CASE_CFN_ATANH_FN:
403 : : CASE_CFN_CASIN:
404 : : CASE_CFN_CASIN_FN:
405 : : CASE_CFN_CASINH:
406 : : CASE_CFN_CASINH_FN:
407 : : CASE_CFN_CATAN:
408 : : CASE_CFN_CATAN_FN:
409 : : CASE_CFN_CATANH:
410 : : CASE_CFN_CATANH_FN:
411 : : CASE_CFN_CBRT:
412 : : CASE_CFN_CBRT_FN:
413 : : CASE_CFN_CPROJ:
414 : : CASE_CFN_CPROJ_FN:
415 : : CASE_CFN_CSIN:
416 : : CASE_CFN_CSIN_FN:
417 : : CASE_CFN_CSINH:
418 : : CASE_CFN_CSINH_FN:
419 : : CASE_CFN_CTAN:
420 : : CASE_CFN_CTAN_FN:
421 : : CASE_CFN_CTANH:
422 : : CASE_CFN_CTANH_FN:
423 : : CASE_CFN_ERF:
424 : : CASE_CFN_ERF_FN:
425 : : CASE_CFN_LLROUND:
426 : : CASE_CFN_LLROUND_FN:
427 : : CASE_CFN_LROUND:
428 : : CASE_CFN_LROUND_FN:
429 : : CASE_CFN_ROUND:
430 : : CASE_CFN_ROUNDEVEN:
431 : : CASE_CFN_ROUNDEVEN_FN:
432 : : CASE_CFN_SIN:
433 : : CASE_CFN_SIN_FN:
434 : : CASE_CFN_SINH:
435 : : CASE_CFN_SINH_FN:
436 : : CASE_CFN_TAN:
437 : : CASE_CFN_TAN_FN:
438 : : CASE_CFN_TANH:
439 : : CASE_CFN_TANH_FN:
440 : : CASE_CFN_TRUNC:
441 : : CASE_CFN_TRUNC_FN:
442 : : return true;
443 : :
444 : 330 : CASE_CFN_LLRINT:
445 : 330 : CASE_CFN_LLRINT_FN:
446 : 330 : CASE_CFN_LRINT:
447 : 330 : CASE_CFN_LRINT_FN:
448 : 330 : CASE_CFN_NEARBYINT:
449 : 330 : CASE_CFN_NEARBYINT_FN:
450 : 330 : CASE_CFN_RINT:
451 : 330 : CASE_CFN_RINT_FN:
452 : 330 : return !flag_rounding_math;
453 : :
454 : 1817206 : default:
455 : 1817206 : break;
456 : : }
457 : 1817206 : return false;
458 : : }
459 : :
460 : : /* Check whether we may negate an integer constant T without causing
461 : : overflow. */
462 : :
463 : : bool
464 : 2884168 : may_negate_without_overflow_p (const_tree t)
465 : : {
466 : 2884168 : tree type;
467 : :
468 : 2884168 : gcc_assert (TREE_CODE (t) == INTEGER_CST);
469 : :
470 : 2884168 : type = TREE_TYPE (t);
471 : 2884168 : if (TYPE_UNSIGNED (type))
472 : : return false;
473 : :
474 : 2884168 : return !wi::only_sign_bit_p (wi::to_wide (t));
475 : : }
476 : :
477 : : /* Determine whether an expression T can be cheaply negated using
478 : : the function negate_expr without introducing undefined overflow. */
479 : :
480 : : static bool
481 : 23972315 : negate_expr_p (tree t)
482 : : {
483 : 24108042 : tree type;
484 : :
485 : 24108042 : if (t == 0)
486 : : return false;
487 : :
488 : 24108042 : type = TREE_TYPE (t);
489 : :
490 : 24108042 : STRIP_SIGN_NOPS (t);
491 : 24108042 : switch (TREE_CODE (t))
492 : : {
493 : 1316650 : case INTEGER_CST:
494 : 1316650 : if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
495 : : return true;
496 : :
497 : : /* Check that -CST will not overflow type. */
498 : 388044 : return may_negate_without_overflow_p (t);
499 : 535 : case BIT_NOT_EXPR:
500 : 535 : return (INTEGRAL_TYPE_P (type)
501 : 535 : && TYPE_OVERFLOW_WRAPS (type));
502 : :
503 : : case FIXED_CST:
504 : : return true;
505 : :
506 : 1307 : case NEGATE_EXPR:
507 : 1307 : return !TYPE_OVERFLOW_SANITIZED (type);
508 : :
509 : 1289318 : case REAL_CST:
510 : : /* We want to canonicalize to positive real constants. Pretend
511 : : that only negative ones can be easily negated. */
512 : 1289318 : return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
513 : :
514 : 474 : case COMPLEX_CST:
515 : 474 : return negate_expr_p (TREE_REALPART (t))
516 : 600 : && negate_expr_p (TREE_IMAGPART (t));
517 : :
518 : 110 : case VECTOR_CST:
519 : 110 : {
520 : 110 : if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
521 : : return true;
522 : :
523 : : /* Steps don't prevent negation. */
524 : 110 : unsigned int count = vector_cst_encoded_nelts (t);
525 : 220 : for (unsigned int i = 0; i < count; ++i)
526 : 110 : if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
527 : : return false;
528 : :
529 : : return true;
530 : : }
531 : :
532 : 763 : case COMPLEX_EXPR:
533 : 763 : return negate_expr_p (TREE_OPERAND (t, 0))
534 : 763 : && negate_expr_p (TREE_OPERAND (t, 1));
535 : :
536 : 33 : case CONJ_EXPR:
537 : 33 : return negate_expr_p (TREE_OPERAND (t, 0));
538 : :
539 : 1321115 : case PLUS_EXPR:
540 : 1321115 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type)
541 : 1321109 : || HONOR_SIGNED_ZEROS (type)
542 : 2355946 : || (ANY_INTEGRAL_TYPE_P (type)
543 : 1034645 : && ! TYPE_OVERFLOW_WRAPS (type)))
544 : 700863 : return false;
545 : : /* -(A + B) -> (-B) - A. */
546 : 620252 : if (negate_expr_p (TREE_OPERAND (t, 1)))
547 : : return true;
548 : : /* -(A + B) -> (-A) - B. */
549 : 124683 : return negate_expr_p (TREE_OPERAND (t, 0));
550 : :
551 : 261122 : case MINUS_EXPR:
552 : : /* We can't turn -(A-B) into B-A when we honor signed zeros. */
553 : 261122 : return !HONOR_SIGN_DEPENDENT_ROUNDING (type)
554 : 261122 : && !HONOR_SIGNED_ZEROS (type)
555 : 349445 : && (! ANY_INTEGRAL_TYPE_P (type)
556 : 88100 : || TYPE_OVERFLOW_WRAPS (type));
557 : :
558 : 2328537 : case MULT_EXPR:
559 : 2328537 : if (TYPE_UNSIGNED (type))
560 : : break;
561 : : /* INT_MIN/n * n doesn't overflow while negating one operand it does
562 : : if n is a (negative) power of two. */
563 : 4075244 : if (INTEGRAL_TYPE_P (TREE_TYPE (t))
564 : 150747 : && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
565 : 2186004 : && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
566 : 0 : && (wi::popcount
567 : 2037622 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
568 : 148382 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
569 : 130720 : && (wi::popcount
570 : 2299062 : (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
571 : : break;
572 : :
573 : : /* Fall through. */
574 : :
575 : 2305255 : case RDIV_EXPR:
576 : 2305255 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (t))
577 : 2305254 : return negate_expr_p (TREE_OPERAND (t, 1))
578 : 2305254 : || negate_expr_p (TREE_OPERAND (t, 0));
579 : : break;
580 : :
581 : 2469 : case TRUNC_DIV_EXPR:
582 : 2469 : case ROUND_DIV_EXPR:
583 : 2469 : case EXACT_DIV_EXPR:
584 : 2469 : if (TYPE_UNSIGNED (type))
585 : : break;
586 : : /* In general we can't negate A in A / B, because if A is INT_MIN and
587 : : B is not 1 we change the sign of the result. */
588 : 466 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
589 : 466 : && negate_expr_p (TREE_OPERAND (t, 0)))
590 : : return true;
591 : : /* In general we can't negate B in A / B, because if A is INT_MIN and
592 : : B is 1, we may turn this into INT_MIN / -1 which is undefined
593 : : and actually traps on some architectures. */
594 : 596 : if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
595 : 298 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
596 : 511 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
597 : 206 : && ! integer_onep (TREE_OPERAND (t, 1))))
598 : 291 : return negate_expr_p (TREE_OPERAND (t, 1));
599 : : break;
600 : :
601 : 3623178 : case NOP_EXPR:
602 : : /* Negate -((double)float) as (double)(-float). */
603 : 3623178 : if (SCALAR_FLOAT_TYPE_P (type))
604 : : {
605 : 10872 : tree tem = strip_float_extensions (t);
606 : 10872 : if (tem != t)
607 : : return negate_expr_p (tem);
608 : : }
609 : : break;
610 : :
611 : 901081 : case CALL_EXPR:
612 : : /* Negate -f(x) as f(-x). */
613 : 901081 : if (negate_mathfn_p (get_call_combined_fn (t)))
614 : 59 : return negate_expr_p (CALL_EXPR_ARG (t, 0));
615 : : break;
616 : :
617 : 673 : case RSHIFT_EXPR:
618 : : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
619 : 673 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
620 : : {
621 : 498 : tree op1 = TREE_OPERAND (t, 1);
622 : 498 : if (wi::to_wide (op1) == element_precision (type) - 1)
623 : : return true;
624 : : }
625 : : break;
626 : :
627 : : default:
628 : : break;
629 : : }
630 : : return false;
631 : : }
632 : :
633 : : /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
634 : : simplification is possible.
635 : : If negate_expr_p would return true for T, NULL_TREE will never be
636 : : returned. */
637 : :
638 : : static tree
639 : 32287986 : fold_negate_expr_1 (location_t loc, tree t)
640 : : {
641 : 32287986 : tree type = TREE_TYPE (t);
642 : 32287986 : tree tem;
643 : :
644 : 32287986 : switch (TREE_CODE (t))
645 : : {
646 : : /* Convert - (~A) to A + 1. */
647 : 146 : case BIT_NOT_EXPR:
648 : 146 : if (INTEGRAL_TYPE_P (type))
649 : 146 : return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
650 : 146 : build_one_cst (type));
651 : : break;
652 : :
653 : 24855874 : case INTEGER_CST:
654 : 24855874 : tem = fold_negate_const (t, type);
655 : 24855874 : if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
656 : 6418 : || (ANY_INTEGRAL_TYPE_P (type)
657 : 6418 : && !TYPE_OVERFLOW_TRAPS (type)
658 : 6418 : && TYPE_OVERFLOW_WRAPS (type))
659 : 24861599 : || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
660 : : return tem;
661 : : break;
662 : :
663 : 1935039 : case POLY_INT_CST:
664 : 1935039 : case REAL_CST:
665 : 1935039 : case FIXED_CST:
666 : 1935039 : tem = fold_negate_const (t, type);
667 : 1935039 : return tem;
668 : :
669 : 66126 : case COMPLEX_CST:
670 : 66126 : {
671 : 66126 : tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
672 : 66126 : tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
673 : 66126 : if (rpart && ipart)
674 : 66126 : return build_complex (type, rpart, ipart);
675 : : }
676 : : break;
677 : :
678 : 35890 : case VECTOR_CST:
679 : 35890 : {
680 : 35890 : tree_vector_builder elts;
681 : 35890 : elts.new_unary_operation (type, t, true);
682 : 35890 : unsigned int count = elts.encoded_nelts ();
683 : 85314 : for (unsigned int i = 0; i < count; ++i)
684 : : {
685 : 49424 : tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
686 : 49424 : if (elt == NULL_TREE)
687 : 0 : return NULL_TREE;
688 : 49424 : elts.quick_push (elt);
689 : : }
690 : :
691 : 35890 : return elts.build ();
692 : 35890 : }
693 : :
694 : 78 : case COMPLEX_EXPR:
695 : 78 : if (negate_expr_p (t))
696 : 40 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
697 : 20 : fold_negate_expr (loc, TREE_OPERAND (t, 0)),
698 : 40 : fold_negate_expr (loc, TREE_OPERAND (t, 1)));
699 : : break;
700 : :
701 : 21 : case CONJ_EXPR:
702 : 21 : if (negate_expr_p (t))
703 : 21 : return fold_build1_loc (loc, CONJ_EXPR, type,
704 : 42 : fold_negate_expr (loc, TREE_OPERAND (t, 0)));
705 : : break;
706 : :
707 : 1239 : case NEGATE_EXPR:
708 : 1239 : if (!TYPE_OVERFLOW_SANITIZED (type))
709 : 1224 : return TREE_OPERAND (t, 0);
710 : : break;
711 : :
712 : 544192 : case PLUS_EXPR:
713 : 544192 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
714 : 544192 : && !HONOR_SIGNED_ZEROS (type))
715 : : {
716 : : /* -(A + B) -> (-B) - A. */
717 : 544082 : if (negate_expr_p (TREE_OPERAND (t, 1)))
718 : : {
719 : 497625 : tem = negate_expr (TREE_OPERAND (t, 1));
720 : 497625 : return fold_build2_loc (loc, MINUS_EXPR, type,
721 : 995250 : tem, TREE_OPERAND (t, 0));
722 : : }
723 : :
724 : : /* -(A + B) -> (-A) - B. */
725 : 46457 : if (negate_expr_p (TREE_OPERAND (t, 0)))
726 : : {
727 : 976 : tem = negate_expr (TREE_OPERAND (t, 0));
728 : 976 : return fold_build2_loc (loc, MINUS_EXPR, type,
729 : 1952 : tem, TREE_OPERAND (t, 1));
730 : : }
731 : : }
732 : : break;
733 : :
734 : 140171 : case MINUS_EXPR:
735 : : /* - (A - B) -> B - A */
736 : 140171 : if (!HONOR_SIGN_DEPENDENT_ROUNDING (type)
737 : 140171 : && !HONOR_SIGNED_ZEROS (type))
738 : 63448 : return fold_build2_loc (loc, MINUS_EXPR, type,
739 : 126896 : TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
740 : : break;
741 : :
742 : 161333 : case MULT_EXPR:
743 : 161333 : if (TYPE_UNSIGNED (type))
744 : : break;
745 : :
746 : : /* Fall through. */
747 : :
748 : 72203 : case RDIV_EXPR:
749 : 72203 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
750 : : {
751 : 72203 : tem = TREE_OPERAND (t, 1);
752 : 72203 : if (negate_expr_p (tem))
753 : 137566 : return fold_build2_loc (loc, TREE_CODE (t), type,
754 : 137566 : TREE_OPERAND (t, 0), negate_expr (tem));
755 : 3420 : tem = TREE_OPERAND (t, 0);
756 : 3420 : if (negate_expr_p (tem))
757 : 63 : return fold_build2_loc (loc, TREE_CODE (t), type,
758 : 126 : negate_expr (tem), TREE_OPERAND (t, 1));
759 : : }
760 : : break;
761 : :
762 : 1677 : case TRUNC_DIV_EXPR:
763 : 1677 : case ROUND_DIV_EXPR:
764 : 1677 : case EXACT_DIV_EXPR:
765 : 1677 : if (TYPE_UNSIGNED (type))
766 : : break;
767 : : /* In general we can't negate A in A / B, because if A is INT_MIN and
768 : : B is not 1 we change the sign of the result. */
769 : 646 : if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
770 : 646 : && negate_expr_p (TREE_OPERAND (t, 0)))
771 : 327 : return fold_build2_loc (loc, TREE_CODE (t), type,
772 : 327 : negate_expr (TREE_OPERAND (t, 0)),
773 : 654 : TREE_OPERAND (t, 1));
774 : : /* In general we can't negate B in A / B, because if A is INT_MIN and
775 : : B is 1, we may turn this into INT_MIN / -1 which is undefined
776 : : and actually traps on some architectures. */
777 : 638 : if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
778 : 319 : || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
779 : 235 : || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
780 : 212 : && ! integer_onep (TREE_OPERAND (t, 1))))
781 : 615 : && negate_expr_p (TREE_OPERAND (t, 1)))
782 : 580 : return fold_build2_loc (loc, TREE_CODE (t), type,
783 : 290 : TREE_OPERAND (t, 0),
784 : 580 : negate_expr (TREE_OPERAND (t, 1)));
785 : : break;
786 : :
787 : 1361505 : case NOP_EXPR:
788 : : /* Convert -((double)float) into (double)(-float). */
789 : 1361505 : if (SCALAR_FLOAT_TYPE_P (type))
790 : : {
791 : 10828 : tem = strip_float_extensions (t);
792 : 10828 : if (tem != t && negate_expr_p (tem))
793 : 0 : return fold_convert_loc (loc, type, negate_expr (tem));
794 : : }
795 : : break;
796 : :
797 : 290398 : case CALL_EXPR:
798 : : /* Negate -f(x) as f(-x). */
799 : 290398 : if (negate_mathfn_p (get_call_combined_fn (t))
800 : 291687 : && negate_expr_p (CALL_EXPR_ARG (t, 0)))
801 : : {
802 : 1191 : tree fndecl, arg;
803 : :
804 : 1191 : fndecl = get_callee_fndecl (t);
805 : 1191 : arg = negate_expr (CALL_EXPR_ARG (t, 0));
806 : 1191 : return build_call_expr_loc (loc, fndecl, 1, arg);
807 : : }
808 : : break;
809 : :
810 : 250 : case RSHIFT_EXPR:
811 : : /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
812 : 250 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
813 : : {
814 : 233 : tree op1 = TREE_OPERAND (t, 1);
815 : 233 : if (wi::to_wide (op1) == element_precision (type) - 1)
816 : : {
817 : 72 : tree ntype = TYPE_UNSIGNED (type)
818 : 72 : ? signed_type_for (type)
819 : 72 : : unsigned_type_for (type);
820 : 72 : tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
821 : 72 : temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
822 : 72 : return fold_convert_loc (loc, type, temp);
823 : : }
824 : : }
825 : : break;
826 : :
827 : : default:
828 : : break;
829 : : }
830 : :
831 : : return NULL_TREE;
832 : : }
833 : :
834 : : /* A wrapper for fold_negate_expr_1. */
835 : :
836 : : static tree
837 : 32287986 : fold_negate_expr (location_t loc, tree t)
838 : : {
839 : 32287986 : tree type = TREE_TYPE (t);
840 : 32287986 : STRIP_SIGN_NOPS (t);
841 : 32287986 : tree tem = fold_negate_expr_1 (loc, t);
842 : 32287986 : if (tem == NULL_TREE)
843 : : return NULL_TREE;
844 : 27526998 : return fold_convert_loc (loc, type, tem);
845 : : }
846 : :
847 : : /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
848 : : negated in a simpler way. Also allow for T to be NULL_TREE, in which case
849 : : return NULL_TREE. */
850 : :
851 : : static tree
852 : 2781691 : negate_expr (tree t)
853 : : {
854 : 2781691 : tree type, tem;
855 : 2781691 : location_t loc;
856 : :
857 : 2781691 : if (t == NULL_TREE)
858 : : return NULL_TREE;
859 : :
860 : 2781691 : loc = EXPR_LOCATION (t);
861 : 2781691 : type = TREE_TYPE (t);
862 : 2781691 : STRIP_SIGN_NOPS (t);
863 : :
864 : 2781691 : tem = fold_negate_expr (loc, t);
865 : 2781691 : if (!tem)
866 : 1215153 : tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
867 : 2781691 : return fold_convert_loc (loc, type, tem);
868 : : }
869 : :
870 : : /* Split a tree IN into a constant, literal and variable parts that could be
871 : : combined with CODE to make IN. "constant" means an expression with
872 : : TREE_CONSTANT but that isn't an actual constant. CODE must be a
873 : : commutative arithmetic operation. Store the constant part into *CONP,
874 : : the literal in *LITP and return the variable part. If a part isn't
875 : : present, set it to null. If the tree does not decompose in this way,
876 : : return the entire tree as the variable part and the other parts as null.
877 : :
878 : : If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
879 : : case, we negate an operand that was subtracted. Except if it is a
880 : : literal for which we use *MINUS_LITP instead.
881 : :
882 : : If NEGATE_P is true, we are negating all of IN, again except a literal
883 : : for which we use *MINUS_LITP instead. If a variable part is of pointer
884 : : type, it is negated after converting to TYPE. This prevents us from
885 : : generating illegal MINUS pointer expression. LOC is the location of
886 : : the converted variable part.
887 : :
888 : : If IN is itself a literal or constant, return it as appropriate.
889 : :
890 : : Note that we do not guarantee that any of the three values will be the
891 : : same type as IN, but they will have the same signedness and mode. */
892 : :
893 : : static tree
894 : 184269824 : split_tree (tree in, tree type, enum tree_code code,
895 : : tree *minus_varp, tree *conp, tree *minus_conp,
896 : : tree *litp, tree *minus_litp, int negate_p)
897 : : {
898 : 184269824 : tree var = 0;
899 : 184269824 : *minus_varp = 0;
900 : 184269824 : *conp = 0;
901 : 184269824 : *minus_conp = 0;
902 : 184269824 : *litp = 0;
903 : 184269824 : *minus_litp = 0;
904 : :
905 : : /* Strip any conversions that don't change the machine mode or signedness. */
906 : 184269824 : STRIP_SIGN_NOPS (in);
907 : :
908 : 184269824 : if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
909 : 118007581 : || TREE_CODE (in) == FIXED_CST)
910 : 66262243 : *litp = in;
911 : 118007581 : else if (TREE_CODE (in) == code
912 : 118007581 : || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
913 : 114344823 : && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
914 : : /* We can associate addition and subtraction together (even
915 : : though the C standard doesn't say so) for integers because
916 : : the value is not affected. For reals, the value might be
917 : : affected, so we can't. */
918 : 114344823 : && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
919 : 47464515 : || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 : 112943107 : || (code == MINUS_EXPR
921 : 17931603 : && (TREE_CODE (in) == PLUS_EXPR
922 : 16497340 : || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
923 : : {
924 : 6788313 : tree op0 = TREE_OPERAND (in, 0);
925 : 6788313 : tree op1 = TREE_OPERAND (in, 1);
926 : 6788313 : bool neg1_p = TREE_CODE (in) == MINUS_EXPR;
927 : 6788313 : bool neg_litp_p = false, neg_conp_p = false, neg_var_p = false;
928 : :
929 : : /* First see if either of the operands is a literal, then a constant. */
930 : 6788313 : if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
931 : 6598217 : || TREE_CODE (op0) == FIXED_CST)
932 : 190096 : *litp = op0, op0 = 0;
933 : 6598217 : else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
934 : 4339082 : || TREE_CODE (op1) == FIXED_CST)
935 : 2259135 : *litp = op1, neg_litp_p = neg1_p, op1 = 0;
936 : :
937 : 6788313 : if (op0 != 0 && TREE_CONSTANT (op0))
938 : 13217 : *conp = op0, op0 = 0;
939 : 6775096 : else if (op1 != 0 && TREE_CONSTANT (op1))
940 : 32009 : *conp = op1, neg_conp_p = neg1_p, op1 = 0;
941 : :
942 : : /* If we haven't dealt with either operand, this is not a case we can
943 : : decompose. Otherwise, VAR is either of the ones remaining, if any. */
944 : 6788313 : if (op0 != 0 && op1 != 0)
945 : : var = in;
946 : 2482620 : else if (op0 != 0)
947 : : var = op0;
948 : : else
949 : 203313 : var = op1, neg_var_p = neg1_p;
950 : :
951 : : /* Now do any needed negations. */
952 : 6788313 : if (neg_litp_p)
953 : 21032 : *minus_litp = *litp, *litp = 0;
954 : 6788313 : if (neg_conp_p && *conp)
955 : 5563 : *minus_conp = *conp, *conp = 0;
956 : 6788313 : if (neg_var_p && var)
957 : 191087 : *minus_varp = var, var = 0;
958 : : }
959 : 111219268 : else if (TREE_CONSTANT (in))
960 : 517186 : *conp = in;
961 : 110702082 : else if (TREE_CODE (in) == BIT_NOT_EXPR
962 : 370050 : && code == PLUS_EXPR)
963 : : {
964 : : /* -1 - X is folded to ~X, undo that here. Do _not_ do this
965 : : when IN is constant. */
966 : 286013 : *litp = build_minus_one_cst (type);
967 : 286013 : *minus_varp = TREE_OPERAND (in, 0);
968 : : }
969 : : else
970 : : var = in;
971 : :
972 : 184269824 : if (negate_p)
973 : : {
974 : 9775471 : if (*litp)
975 : 949941 : *minus_litp = *litp, *litp = 0;
976 : 8825530 : else if (*minus_litp)
977 : 149 : *litp = *minus_litp, *minus_litp = 0;
978 : 9775471 : if (*conp)
979 : 25152 : *minus_conp = *conp, *conp = 0;
980 : 9750319 : else if (*minus_conp)
981 : 0 : *conp = *minus_conp, *minus_conp = 0;
982 : 9775471 : if (var)
983 : 9738605 : *minus_varp = var, var = 0;
984 : 36866 : else if (*minus_varp)
985 : 1287 : var = *minus_varp, *minus_varp = 0;
986 : : }
987 : :
988 : 184269824 : if (*litp
989 : 184269824 : && TREE_OVERFLOW_P (*litp))
990 : 13680 : *litp = drop_tree_overflow (*litp);
991 : 184269824 : if (*minus_litp
992 : 184269824 : && TREE_OVERFLOW_P (*minus_litp))
993 : 0 : *minus_litp = drop_tree_overflow (*minus_litp);
994 : :
995 : 184269824 : return var;
996 : : }
997 : :
998 : : /* Re-associate trees split by the above function. T1 and T2 are
999 : : either expressions to associate or null. Return the new
1000 : : expression, if any. LOC is the location of the new expression. If
1001 : : we build an operation, do it in TYPE and with CODE. */
1002 : :
1003 : : static tree
1004 : 15839857 : associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1005 : : {
1006 : 15839857 : if (t1 == 0)
1007 : : {
1008 : 10054518 : gcc_assert (t2 == 0 || code != MINUS_EXPR);
1009 : : return t2;
1010 : : }
1011 : 5785339 : else if (t2 == 0)
1012 : : return t1;
1013 : :
1014 : : /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1015 : : try to fold this since we will have infinite recursion. But do
1016 : : deal with any NEGATE_EXPRs. */
1017 : 3235312 : if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1018 : 2560282 : || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
1019 : 2514521 : || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1020 : : {
1021 : 1260908 : if (code == PLUS_EXPR)
1022 : : {
1023 : 726771 : if (TREE_CODE (t1) == NEGATE_EXPR)
1024 : 46 : return build2_loc (loc, MINUS_EXPR, type,
1025 : : fold_convert_loc (loc, type, t2),
1026 : : fold_convert_loc (loc, type,
1027 : 92 : TREE_OPERAND (t1, 0)));
1028 : 726725 : else if (TREE_CODE (t2) == NEGATE_EXPR)
1029 : 0 : return build2_loc (loc, MINUS_EXPR, type,
1030 : : fold_convert_loc (loc, type, t1),
1031 : : fold_convert_loc (loc, type,
1032 : 0 : TREE_OPERAND (t2, 0)));
1033 : 726725 : else if (integer_zerop (t2))
1034 : 23634 : return fold_convert_loc (loc, type, t1);
1035 : : }
1036 : 534137 : else if (code == MINUS_EXPR)
1037 : : {
1038 : 511619 : if (integer_zerop (t2))
1039 : 0 : return fold_convert_loc (loc, type, t1);
1040 : : }
1041 : :
1042 : 1237228 : return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1043 : 1237228 : fold_convert_loc (loc, type, t2));
1044 : : }
1045 : :
1046 : 1974404 : return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1047 : 1974404 : fold_convert_loc (loc, type, t2));
1048 : : }
1049 : :
1050 : : /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1051 : : for use in int_const_binop, size_binop and size_diffop. */
1052 : :
1053 : : static bool
1054 : 2008703722 : int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1055 : : {
1056 : 2008703722 : if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
1057 : : return false;
1058 : 2008703722 : if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
1059 : : return false;
1060 : :
1061 : 2008703722 : switch (code)
1062 : : {
1063 : : case LSHIFT_EXPR:
1064 : : case RSHIFT_EXPR:
1065 : : case LROTATE_EXPR:
1066 : : case RROTATE_EXPR:
1067 : : return true;
1068 : :
1069 : 2008703722 : default:
1070 : 2008703722 : break;
1071 : : }
1072 : :
1073 : 2008703722 : return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1074 : 2008703722 : && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1075 : 4017407444 : && TYPE_MODE (type1) == TYPE_MODE (type2);
1076 : : }
1077 : :
1078 : : /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
1079 : : a new constant in RES. Return FALSE if we don't know how to
1080 : : evaluate CODE at compile-time. */
1081 : :
1082 : : bool
1083 : 1317947264 : wide_int_binop (wide_int &res,
1084 : : enum tree_code code, const wide_int &arg1, const wide_int &arg2,
1085 : : signop sign, wi::overflow_type *overflow)
1086 : : {
1087 : 1317947264 : wide_int tmp;
1088 : 1317947264 : *overflow = wi::OVF_NONE;
1089 : 1317947264 : switch (code)
1090 : : {
1091 : 2043640 : case BIT_IOR_EXPR:
1092 : 2043640 : res = wi::bit_or (arg1, arg2);
1093 : 2043640 : break;
1094 : :
1095 : 75877 : case BIT_XOR_EXPR:
1096 : 75877 : res = wi::bit_xor (arg1, arg2);
1097 : 75877 : break;
1098 : :
1099 : 18725313 : case BIT_AND_EXPR:
1100 : 18725313 : res = wi::bit_and (arg1, arg2);
1101 : 18725313 : break;
1102 : :
1103 : 12727892 : case LSHIFT_EXPR:
1104 : 12727892 : if (wi::neg_p (arg2))
1105 : : return false;
1106 : 12697667 : res = wi::lshift (arg1, arg2);
1107 : 12697667 : break;
1108 : :
1109 : 8165496 : case RSHIFT_EXPR:
1110 : 8165496 : if (wi::neg_p (arg2))
1111 : : return false;
1112 : : /* It's unclear from the C standard whether shifts can overflow.
1113 : : The following code ignores overflow; perhaps a C standard
1114 : : interpretation ruling is needed. */
1115 : 8165221 : res = wi::rshift (arg1, arg2, sign);
1116 : 8165221 : break;
1117 : :
1118 : 923 : case RROTATE_EXPR:
1119 : 923 : case LROTATE_EXPR:
1120 : 923 : if (wi::neg_p (arg2))
1121 : : {
1122 : 14 : tmp = -arg2;
1123 : 14 : if (code == RROTATE_EXPR)
1124 : : code = LROTATE_EXPR;
1125 : : else
1126 : : code = RROTATE_EXPR;
1127 : : }
1128 : : else
1129 : 909 : tmp = arg2;
1130 : :
1131 : 909 : if (code == RROTATE_EXPR)
1132 : 844 : res = wi::rrotate (arg1, tmp);
1133 : : else
1134 : 79 : res = wi::lrotate (arg1, tmp);
1135 : : break;
1136 : :
1137 : 180394429 : case PLUS_EXPR:
1138 : 180394429 : res = wi::add (arg1, arg2, sign, overflow);
1139 : 180394429 : break;
1140 : :
1141 : 65064579 : case MINUS_EXPR:
1142 : 65064579 : res = wi::sub (arg1, arg2, sign, overflow);
1143 : 65064579 : break;
1144 : :
1145 : 475170424 : case MULT_EXPR:
1146 : 475170424 : res = wi::mul (arg1, arg2, sign, overflow);
1147 : 475170424 : break;
1148 : :
1149 : 4392 : case MULT_HIGHPART_EXPR:
1150 : 4392 : res = wi::mul_high (arg1, arg2, sign);
1151 : 4392 : break;
1152 : :
1153 : 270158548 : case TRUNC_DIV_EXPR:
1154 : 270158548 : case EXACT_DIV_EXPR:
1155 : 270158548 : if (arg2 == 0)
1156 : : return false;
1157 : 270152757 : res = wi::div_trunc (arg1, arg2, sign, overflow);
1158 : 270152757 : break;
1159 : :
1160 : 62173137 : case FLOOR_DIV_EXPR:
1161 : 62173137 : if (arg2 == 0)
1162 : : return false;
1163 : 62173137 : res = wi::div_floor (arg1, arg2, sign, overflow);
1164 : 62173137 : break;
1165 : :
1166 : 97740041 : case CEIL_DIV_EXPR:
1167 : 97740041 : if (arg2 == 0)
1168 : : return false;
1169 : 97740041 : res = wi::div_ceil (arg1, arg2, sign, overflow);
1170 : 97740041 : break;
1171 : :
1172 : 0 : case ROUND_DIV_EXPR:
1173 : 0 : if (arg2 == 0)
1174 : : return false;
1175 : 0 : res = wi::div_round (arg1, arg2, sign, overflow);
1176 : 0 : break;
1177 : :
1178 : 581953 : case TRUNC_MOD_EXPR:
1179 : 581953 : if (arg2 == 0)
1180 : : return false;
1181 : 580873 : res = wi::mod_trunc (arg1, arg2, sign, overflow);
1182 : 580873 : break;
1183 : :
1184 : 52475518 : case FLOOR_MOD_EXPR:
1185 : 52475518 : if (arg2 == 0)
1186 : : return false;
1187 : 52475518 : res = wi::mod_floor (arg1, arg2, sign, overflow);
1188 : 52475518 : break;
1189 : :
1190 : 178 : case CEIL_MOD_EXPR:
1191 : 178 : if (arg2 == 0)
1192 : : return false;
1193 : 178 : res = wi::mod_ceil (arg1, arg2, sign, overflow);
1194 : 178 : break;
1195 : :
1196 : 0 : case ROUND_MOD_EXPR:
1197 : 0 : if (arg2 == 0)
1198 : : return false;
1199 : 0 : res = wi::mod_round (arg1, arg2, sign, overflow);
1200 : 0 : break;
1201 : :
1202 : 24458 : case MIN_EXPR:
1203 : 24458 : res = wi::min (arg1, arg2, sign);
1204 : 24458 : break;
1205 : :
1206 : 72420314 : case MAX_EXPR:
1207 : 72420314 : res = wi::max (arg1, arg2, sign);
1208 : 72420314 : break;
1209 : :
1210 : : default:
1211 : : return false;
1212 : : }
1213 : : return true;
1214 : 1317947264 : }
1215 : :
1216 : : /* Returns true if we know who is smaller or equal, ARG1 or ARG2, and set the
1217 : : min value to RES. */
1218 : : bool
1219 : 0 : can_min_p (const_tree arg1, const_tree arg2, poly_wide_int &res)
1220 : : {
1221 : 0 : if (known_le (wi::to_poly_widest (arg1), wi::to_poly_widest (arg2)))
1222 : : {
1223 : 0 : res = wi::to_poly_wide (arg1);
1224 : 0 : return true;
1225 : : }
1226 : 0 : else if (known_le (wi::to_poly_widest (arg2), wi::to_poly_widest (arg1)))
1227 : : {
1228 : 0 : res = wi::to_poly_wide (arg2);
1229 : 0 : return true;
1230 : : }
1231 : :
1232 : : return false;
1233 : : }
1234 : :
1235 : : /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1236 : : produce a new constant in RES. Return FALSE if we don't know how
1237 : : to evaluate CODE at compile-time. */
1238 : :
1239 : : static bool
1240 : 0 : poly_int_binop (poly_wide_int &res, enum tree_code code,
1241 : : const_tree arg1, const_tree arg2,
1242 : : signop sign, wi::overflow_type *overflow)
1243 : : {
1244 : 0 : gcc_assert (NUM_POLY_INT_COEFFS != 1);
1245 : : gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1246 : : switch (code)
1247 : : {
1248 : : case PLUS_EXPR:
1249 : : res = wi::add (wi::to_poly_wide (arg1),
1250 : : wi::to_poly_wide (arg2), sign, overflow);
1251 : : break;
1252 : :
1253 : : case MINUS_EXPR:
1254 : : res = wi::sub (wi::to_poly_wide (arg1),
1255 : : wi::to_poly_wide (arg2), sign, overflow);
1256 : : break;
1257 : :
1258 : : case MULT_EXPR:
1259 : : if (TREE_CODE (arg2) == INTEGER_CST)
1260 : : res = wi::mul (wi::to_poly_wide (arg1),
1261 : : wi::to_wide (arg2), sign, overflow);
1262 : : else if (TREE_CODE (arg1) == INTEGER_CST)
1263 : : res = wi::mul (wi::to_poly_wide (arg2),
1264 : : wi::to_wide (arg1), sign, overflow);
1265 : : else
1266 : : return NULL_TREE;
1267 : : break;
1268 : :
1269 : : case LSHIFT_EXPR:
1270 : : if (TREE_CODE (arg2) == INTEGER_CST)
1271 : : res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1272 : : else
1273 : : return false;
1274 : : break;
1275 : :
1276 : : case BIT_IOR_EXPR:
1277 : : if (TREE_CODE (arg2) != INTEGER_CST
1278 : : || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1279 : : &res))
1280 : : return false;
1281 : : break;
1282 : :
1283 : : case MIN_EXPR:
1284 : : if (!can_min_p (arg1, arg2, res))
1285 : : return false;
1286 : : break;
1287 : :
1288 : : default:
1289 : : return false;
1290 : : }
1291 : : return true;
1292 : : }
1293 : :
1294 : : /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1295 : : produce a new constant. Return NULL_TREE if we don't know how to
1296 : : evaluate CODE at compile-time. */
1297 : :
1298 : : tree
1299 : 1317947264 : int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1300 : : int overflowable)
1301 : : {
1302 : 1317947264 : poly_wide_int poly_res;
1303 : 1317947264 : tree type = TREE_TYPE (arg1);
1304 : 1317947264 : signop sign = TYPE_SIGN (type);
1305 : 1317947264 : wi::overflow_type overflow = wi::OVF_NONE;
1306 : :
1307 : 1317947264 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1308 : : {
1309 : 1317947264 : wide_int warg1 = wi::to_wide (arg1), res;
1310 : 1317947264 : wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1311 : 1317947264 : if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1312 : 37523 : return NULL_TREE;
1313 : 1317909741 : poly_res = res;
1314 : 1317947434 : }
1315 : 0 : else if (!poly_int_tree_p (arg1)
1316 : 0 : || !poly_int_tree_p (arg2)
1317 : 0 : || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1318 : : return NULL_TREE;
1319 : 1317909741 : return force_fit_type (type, poly_res, overflowable,
1320 : 1317909741 : (((sign == SIGNED || overflowable == -1)
1321 : 1317909741 : && overflow)
1322 : 1317909741 : | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1323 : 1317947264 : }
1324 : :
1325 : : /* Return true if binary operation OP distributes over addition in operand
1326 : : OPNO, with the other operand being held constant. OPNO counts from 1. */
1327 : :
1328 : : static bool
1329 : 105483 : distributes_over_addition_p (tree_code op, int opno)
1330 : : {
1331 : 0 : switch (op)
1332 : : {
1333 : : case PLUS_EXPR:
1334 : : case MINUS_EXPR:
1335 : : case MULT_EXPR:
1336 : : return true;
1337 : :
1338 : 0 : case LSHIFT_EXPR:
1339 : 0 : return opno == 1;
1340 : :
1341 : 3322 : default:
1342 : 3322 : return false;
1343 : : }
1344 : : }
1345 : :
1346 : : /* OP is the INDEXth operand to CODE (counting from zero) and OTHER_OP
1347 : : is the other operand. Try to use the value of OP to simplify the
1348 : : operation in one step, without having to process individual elements. */
1349 : : static tree
1350 : 266945 : simplify_const_binop (tree_code code, tree op, tree other_op,
1351 : : int index ATTRIBUTE_UNUSED)
1352 : : {
1353 : : /* AND, IOR as well as XOR with a zerop can be simplified directly. */
1354 : 266945 : if (TREE_CODE (op) == VECTOR_CST && TREE_CODE (other_op) == VECTOR_CST)
1355 : : {
1356 : 200389 : if (integer_zerop (other_op))
1357 : : {
1358 : 12664 : if (code == BIT_IOR_EXPR || code == BIT_XOR_EXPR)
1359 : : return op;
1360 : 12079 : else if (code == BIT_AND_EXPR)
1361 : : return other_op;
1362 : : }
1363 : : }
1364 : :
1365 : : return NULL_TREE;
1366 : : }
1367 : :
1368 : :
1369 : : /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1370 : : constant. We assume ARG1 and ARG2 have the same data type, or at least
1371 : : are the same kind of constant and the same machine mode. Return zero if
1372 : : combining the constants is not allowed in the current operating mode. */
1373 : :
1374 : : static tree
1375 : 156300595 : const_binop (enum tree_code code, tree arg1, tree arg2)
1376 : : {
1377 : : /* Sanity check for the recursive cases. */
1378 : 156300595 : if (!arg1 || !arg2)
1379 : : return NULL_TREE;
1380 : :
1381 : 156299331 : STRIP_NOPS (arg1);
1382 : 156299331 : STRIP_NOPS (arg2);
1383 : :
1384 : 156299331 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1385 : : {
1386 : 151409769 : if (code == POINTER_PLUS_EXPR)
1387 : 99732 : return int_const_binop (PLUS_EXPR,
1388 : 199464 : arg1, fold_convert (TREE_TYPE (arg1), arg2));
1389 : :
1390 : 151310037 : return int_const_binop (code, arg1, arg2);
1391 : : }
1392 : :
1393 : 4889562 : if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1394 : : {
1395 : 4706484 : machine_mode mode;
1396 : 4706484 : REAL_VALUE_TYPE d1;
1397 : 4706484 : REAL_VALUE_TYPE d2;
1398 : 4706484 : REAL_VALUE_TYPE value;
1399 : 4706484 : REAL_VALUE_TYPE result;
1400 : 4706484 : bool inexact;
1401 : 4706484 : tree t, type;
1402 : :
1403 : : /* The following codes are handled by real_arithmetic. */
1404 : 4706484 : switch (code)
1405 : : {
1406 : 4706484 : case PLUS_EXPR:
1407 : 4706484 : case MINUS_EXPR:
1408 : 4706484 : case MULT_EXPR:
1409 : 4706484 : case RDIV_EXPR:
1410 : 4706484 : case MIN_EXPR:
1411 : 4706484 : case MAX_EXPR:
1412 : 4706484 : break;
1413 : :
1414 : : default:
1415 : : return NULL_TREE;
1416 : : }
1417 : :
1418 : 4706484 : d1 = TREE_REAL_CST (arg1);
1419 : 4706484 : d2 = TREE_REAL_CST (arg2);
1420 : :
1421 : 4706484 : type = TREE_TYPE (arg1);
1422 : 4706484 : mode = TYPE_MODE (type);
1423 : :
1424 : : /* Don't perform operation if we honor signaling NaNs and
1425 : : either operand is a signaling NaN. */
1426 : 4706484 : if (HONOR_SNANS (mode)
1427 : 4706484 : && (REAL_VALUE_ISSIGNALING_NAN (d1)
1428 : 3676 : || REAL_VALUE_ISSIGNALING_NAN (d2)))
1429 : 33 : return NULL_TREE;
1430 : :
1431 : : /* Don't perform operation if it would raise a division
1432 : : by zero exception. */
1433 : 4706451 : if (code == RDIV_EXPR
1434 : 2324107 : && real_equal (&d2, &dconst0)
1435 : 4717544 : && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1436 : 7817 : return NULL_TREE;
1437 : :
1438 : : /* If either operand is a NaN, just return it. Otherwise, set up
1439 : : for floating-point trap; we return an overflow. */
1440 : 4698634 : if (REAL_VALUE_ISNAN (d1))
1441 : : {
1442 : : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1443 : : is off. */
1444 : 231 : d1.signalling = 0;
1445 : 231 : t = build_real (type, d1);
1446 : 231 : return t;
1447 : : }
1448 : 4698403 : else if (REAL_VALUE_ISNAN (d2))
1449 : : {
1450 : : /* Make resulting NaN value to be qNaN when flag_signaling_nans
1451 : : is off. */
1452 : 75 : d2.signalling = 0;
1453 : 75 : t = build_real (type, d2);
1454 : 75 : return t;
1455 : : }
1456 : :
1457 : 4698328 : inexact = real_arithmetic (&value, code, &d1, &d2);
1458 : 4698328 : real_convert (&result, mode, &value);
1459 : :
1460 : : /* Don't constant fold this floating point operation if
1461 : : both operands are not NaN but the result is NaN, and
1462 : : flag_trapping_math. Such operations should raise an
1463 : : invalid operation exception. */
1464 : 4698328 : if (flag_trapping_math
1465 : 18356166 : && MODE_HAS_NANS (mode)
1466 : 4680547 : && REAL_VALUE_ISNAN (result)
1467 : 2531 : && !REAL_VALUE_ISNAN (d1)
1468 : 4700859 : && !REAL_VALUE_ISNAN (d2))
1469 : 2531 : return NULL_TREE;
1470 : :
1471 : : /* Don't constant fold this floating point operation if
1472 : : the result has overflowed and flag_trapping_math. */
1473 : 4695797 : if (flag_trapping_math
1474 : 18346447 : && MODE_HAS_INFINITIES (mode)
1475 : 4678016 : && REAL_VALUE_ISINF (result)
1476 : 7233 : && !REAL_VALUE_ISINF (d1)
1477 : 4702399 : && !REAL_VALUE_ISINF (d2))
1478 : 4319 : return NULL_TREE;
1479 : :
1480 : : /* Don't constant fold this floating point operation if the
1481 : : result may dependent upon the run-time rounding mode and
1482 : : flag_rounding_math is set, or if GCC's software emulation
1483 : : is unable to accurately represent the result. */
1484 : 4691478 : if ((flag_rounding_math
1485 : 32094069 : || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1486 : 4691478 : && (inexact || !real_identical (&result, &value)))
1487 : 1164 : return NULL_TREE;
1488 : :
1489 : 4690314 : t = build_real (type, result);
1490 : :
1491 : 4690314 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1492 : 4690314 : return t;
1493 : : }
1494 : :
1495 : 183078 : if (TREE_CODE (arg1) == FIXED_CST)
1496 : : {
1497 : 0 : FIXED_VALUE_TYPE f1;
1498 : 0 : FIXED_VALUE_TYPE f2;
1499 : 0 : FIXED_VALUE_TYPE result;
1500 : 0 : tree t, type;
1501 : 0 : bool sat_p;
1502 : 0 : bool overflow_p;
1503 : :
1504 : : /* The following codes are handled by fixed_arithmetic. */
1505 : 0 : switch (code)
1506 : : {
1507 : 0 : case PLUS_EXPR:
1508 : 0 : case MINUS_EXPR:
1509 : 0 : case MULT_EXPR:
1510 : 0 : case TRUNC_DIV_EXPR:
1511 : 0 : if (TREE_CODE (arg2) != FIXED_CST)
1512 : : return NULL_TREE;
1513 : 0 : f2 = TREE_FIXED_CST (arg2);
1514 : 0 : break;
1515 : :
1516 : 0 : case LSHIFT_EXPR:
1517 : 0 : case RSHIFT_EXPR:
1518 : 0 : {
1519 : 0 : if (TREE_CODE (arg2) != INTEGER_CST)
1520 : 0 : return NULL_TREE;
1521 : 0 : wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1522 : 0 : f2.data.high = w2.elt (1);
1523 : 0 : f2.data.low = w2.ulow ();
1524 : 0 : f2.mode = SImode;
1525 : : }
1526 : 0 : break;
1527 : :
1528 : : default:
1529 : : return NULL_TREE;
1530 : : }
1531 : :
1532 : 0 : f1 = TREE_FIXED_CST (arg1);
1533 : 0 : type = TREE_TYPE (arg1);
1534 : 0 : sat_p = TYPE_SATURATING (type);
1535 : 0 : overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1536 : 0 : t = build_fixed (type, result);
1537 : : /* Propagate overflow flags. */
1538 : 0 : if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1539 : 0 : TREE_OVERFLOW (t) = 1;
1540 : 0 : return t;
1541 : : }
1542 : :
1543 : 183078 : if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1544 : : {
1545 : 11166 : tree type = TREE_TYPE (arg1);
1546 : 11166 : tree r1 = TREE_REALPART (arg1);
1547 : 11166 : tree i1 = TREE_IMAGPART (arg1);
1548 : 11166 : tree r2 = TREE_REALPART (arg2);
1549 : 11166 : tree i2 = TREE_IMAGPART (arg2);
1550 : 11166 : tree real, imag;
1551 : :
1552 : 11166 : switch (code)
1553 : : {
1554 : 5328 : case PLUS_EXPR:
1555 : 5328 : case MINUS_EXPR:
1556 : 5328 : real = const_binop (code, r1, r2);
1557 : 5328 : imag = const_binop (code, i1, i2);
1558 : 5328 : break;
1559 : :
1560 : 3887 : case MULT_EXPR:
1561 : 3887 : if (COMPLEX_FLOAT_TYPE_P (type))
1562 : 2737 : return do_mpc_arg2 (arg1, arg2, type,
1563 : : /* do_nonfinite= */ folding_initializer,
1564 : 2737 : mpc_mul);
1565 : :
1566 : 1150 : real = const_binop (MINUS_EXPR,
1567 : : const_binop (MULT_EXPR, r1, r2),
1568 : : const_binop (MULT_EXPR, i1, i2));
1569 : 1150 : imag = const_binop (PLUS_EXPR,
1570 : : const_binop (MULT_EXPR, r1, i2),
1571 : : const_binop (MULT_EXPR, i1, r2));
1572 : 1150 : break;
1573 : :
1574 : 1697 : case RDIV_EXPR:
1575 : 1697 : if (COMPLEX_FLOAT_TYPE_P (type))
1576 : 1697 : return do_mpc_arg2 (arg1, arg2, type,
1577 : : /* do_nonfinite= */ folding_initializer,
1578 : 1697 : mpc_div);
1579 : : /* Fallthru. */
1580 : 254 : case TRUNC_DIV_EXPR:
1581 : 254 : case CEIL_DIV_EXPR:
1582 : 254 : case FLOOR_DIV_EXPR:
1583 : 254 : case ROUND_DIV_EXPR:
1584 : 254 : if (flag_complex_method == 0)
1585 : : {
1586 : : /* Keep this algorithm in sync with
1587 : : tree-complex.cc:expand_complex_div_straight().
1588 : :
1589 : : Expand complex division to scalars, straightforward algorithm.
1590 : : a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1591 : : t = br*br + bi*bi
1592 : : */
1593 : 0 : tree magsquared
1594 : 0 : = const_binop (PLUS_EXPR,
1595 : : const_binop (MULT_EXPR, r2, r2),
1596 : : const_binop (MULT_EXPR, i2, i2));
1597 : 0 : tree t1
1598 : 0 : = const_binop (PLUS_EXPR,
1599 : : const_binop (MULT_EXPR, r1, r2),
1600 : : const_binop (MULT_EXPR, i1, i2));
1601 : 0 : tree t2
1602 : 0 : = const_binop (MINUS_EXPR,
1603 : : const_binop (MULT_EXPR, i1, r2),
1604 : : const_binop (MULT_EXPR, r1, i2));
1605 : :
1606 : 0 : real = const_binop (code, t1, magsquared);
1607 : 0 : imag = const_binop (code, t2, magsquared);
1608 : : }
1609 : : else
1610 : : {
1611 : : /* Keep this algorithm in sync with
1612 : : tree-complex.cc:expand_complex_div_wide().
1613 : :
1614 : : Expand complex division to scalars, modified algorithm to minimize
1615 : : overflow with wide input ranges. */
1616 : 254 : tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1617 : : fold_abs_const (r2, TREE_TYPE (type)),
1618 : : fold_abs_const (i2, TREE_TYPE (type)));
1619 : :
1620 : 254 : if (integer_nonzerop (compare))
1621 : : {
1622 : : /* In the TRUE branch, we compute
1623 : : ratio = br/bi;
1624 : : div = (br * ratio) + bi;
1625 : : tr = (ar * ratio) + ai;
1626 : : ti = (ai * ratio) - ar;
1627 : : tr = tr / div;
1628 : : ti = ti / div; */
1629 : 48 : tree ratio = const_binop (code, r2, i2);
1630 : 48 : tree div = const_binop (PLUS_EXPR, i2,
1631 : : const_binop (MULT_EXPR, r2, ratio));
1632 : 48 : real = const_binop (MULT_EXPR, r1, ratio);
1633 : 48 : real = const_binop (PLUS_EXPR, real, i1);
1634 : 48 : real = const_binop (code, real, div);
1635 : :
1636 : 48 : imag = const_binop (MULT_EXPR, i1, ratio);
1637 : 48 : imag = const_binop (MINUS_EXPR, imag, r1);
1638 : 48 : imag = const_binop (code, imag, div);
1639 : : }
1640 : : else
1641 : : {
1642 : : /* In the FALSE branch, we compute
1643 : : ratio = d/c;
1644 : : divisor = (d * ratio) + c;
1645 : : tr = (b * ratio) + a;
1646 : : ti = b - (a * ratio);
1647 : : tr = tr / div;
1648 : : ti = ti / div; */
1649 : 206 : tree ratio = const_binop (code, i2, r2);
1650 : 206 : tree div = const_binop (PLUS_EXPR, r2,
1651 : : const_binop (MULT_EXPR, i2, ratio));
1652 : :
1653 : 206 : real = const_binop (MULT_EXPR, i1, ratio);
1654 : 206 : real = const_binop (PLUS_EXPR, real, r1);
1655 : 206 : real = const_binop (code, real, div);
1656 : :
1657 : 206 : imag = const_binop (MULT_EXPR, r1, ratio);
1658 : 206 : imag = const_binop (MINUS_EXPR, i1, imag);
1659 : 206 : imag = const_binop (code, imag, div);
1660 : : }
1661 : : }
1662 : : break;
1663 : :
1664 : : default:
1665 : : return NULL_TREE;
1666 : : }
1667 : :
1668 : 6732 : if (real && imag)
1669 : 6574 : return build_complex (type, real, imag);
1670 : : }
1671 : :
1672 : 172070 : tree simplified;
1673 : 172070 : if ((simplified = simplify_const_binop (code, arg1, arg2, 0)))
1674 : : return simplified;
1675 : :
1676 : 171825 : if (commutative_tree_code (code)
1677 : 171825 : && (simplified = simplify_const_binop (code, arg2, arg1, 1)))
1678 : : return simplified;
1679 : :
1680 : 167829 : if (TREE_CODE (arg1) == VECTOR_CST
1681 : 107415 : && TREE_CODE (arg2) == VECTOR_CST
1682 : 269124 : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1683 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1684 : : {
1685 : 101295 : tree type = TREE_TYPE (arg1);
1686 : 101295 : bool step_ok_p;
1687 : 101295 : if (VECTOR_CST_STEPPED_P (arg1)
1688 : 101295 : && VECTOR_CST_STEPPED_P (arg2))
1689 : : /* We can operate directly on the encoding if:
1690 : :
1691 : : a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1692 : : implies
1693 : : (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1694 : :
1695 : : Addition and subtraction are the supported operators
1696 : : for which this is true. */
1697 : 1932 : step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1698 : 99363 : else if (VECTOR_CST_STEPPED_P (arg1))
1699 : : /* We can operate directly on stepped encodings if:
1700 : :
1701 : : a3 - a2 == a2 - a1
1702 : : implies:
1703 : : (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1704 : :
1705 : : which is true if (x -> x op c) distributes over addition. */
1706 : 43263 : step_ok_p = distributes_over_addition_p (code, 1);
1707 : : else
1708 : : /* Similarly in reverse. */
1709 : 56100 : step_ok_p = distributes_over_addition_p (code, 2);
1710 : 101295 : tree_vector_builder elts;
1711 : 101295 : if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1712 : : return NULL_TREE;
1713 : 101295 : unsigned int count = elts.encoded_nelts ();
1714 : 407412 : for (unsigned int i = 0; i < count; ++i)
1715 : : {
1716 : 306444 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1717 : 306444 : tree elem2 = VECTOR_CST_ELT (arg2, i);
1718 : :
1719 : 306444 : tree elt = const_binop (code, elem1, elem2);
1720 : :
1721 : : /* It is possible that const_binop cannot handle the given
1722 : : code and return NULL_TREE */
1723 : 306444 : if (elt == NULL_TREE)
1724 : 327 : return NULL_TREE;
1725 : 306117 : elts.quick_push (elt);
1726 : : }
1727 : :
1728 : 100968 : return elts.build ();
1729 : 101295 : }
1730 : :
1731 : : /* Shifts allow a scalar offset for a vector. */
1732 : 66534 : if (TREE_CODE (arg1) == VECTOR_CST
1733 : 6120 : && TREE_CODE (arg2) == INTEGER_CST)
1734 : : {
1735 : 6120 : tree type = TREE_TYPE (arg1);
1736 : 6120 : bool step_ok_p = distributes_over_addition_p (code, 1);
1737 : 6120 : tree_vector_builder elts;
1738 : 6120 : if (!elts.new_unary_operation (type, arg1, step_ok_p))
1739 : : return NULL_TREE;
1740 : 6120 : unsigned int count = elts.encoded_nelts ();
1741 : 29727 : for (unsigned int i = 0; i < count; ++i)
1742 : : {
1743 : 23694 : tree elem1 = VECTOR_CST_ELT (arg1, i);
1744 : :
1745 : 23694 : tree elt = const_binop (code, elem1, arg2);
1746 : :
1747 : : /* It is possible that const_binop cannot handle the given
1748 : : code and return NULL_TREE. */
1749 : 23694 : if (elt == NULL_TREE)
1750 : 87 : return NULL_TREE;
1751 : 23607 : elts.quick_push (elt);
1752 : : }
1753 : :
1754 : 6033 : return elts.build ();
1755 : 6120 : }
1756 : : return NULL_TREE;
1757 : : }
1758 : :
1759 : : /* Overload that adds a TYPE parameter to be able to dispatch
1760 : : to fold_relational_const. */
1761 : :
1762 : : tree
1763 : 198824739 : const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1764 : : {
1765 : 198824739 : if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 : 50493521 : return fold_relational_const (code, type, arg1, arg2);
1767 : :
1768 : : /* ??? Until we make the const_binop worker take the type of the
1769 : : result as argument put those cases that need it here. */
1770 : 148331218 : switch (code)
1771 : : {
1772 : 18 : case VEC_SERIES_EXPR:
1773 : 18 : if (CONSTANT_CLASS_P (arg1)
1774 : 18 : && CONSTANT_CLASS_P (arg2))
1775 : 18 : return build_vec_series (type, arg1, arg2);
1776 : : return NULL_TREE;
1777 : :
1778 : 282804 : case COMPLEX_EXPR:
1779 : 282804 : if ((TREE_CODE (arg1) == REAL_CST
1780 : 271947 : && TREE_CODE (arg2) == REAL_CST)
1781 : 10858 : || (TREE_CODE (arg1) == INTEGER_CST
1782 : 10857 : && TREE_CODE (arg2) == INTEGER_CST))
1783 : 282803 : return build_complex (type, arg1, arg2);
1784 : : return NULL_TREE;
1785 : :
1786 : 5786 : case POINTER_DIFF_EXPR:
1787 : 5786 : if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1788 : : {
1789 : 10948 : poly_offset_int res = (wi::to_poly_offset (arg1)
1790 : 5474 : - wi::to_poly_offset (arg2));
1791 : 5474 : return force_fit_type (type, res, 1,
1792 : 5474 : TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1793 : : }
1794 : : return NULL_TREE;
1795 : :
1796 : 8467 : case VEC_PACK_TRUNC_EXPR:
1797 : 8467 : case VEC_PACK_FIX_TRUNC_EXPR:
1798 : 8467 : case VEC_PACK_FLOAT_EXPR:
1799 : 8467 : {
1800 : 8467 : unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1801 : :
1802 : 8467 : if (TREE_CODE (arg1) != VECTOR_CST
1803 : 8467 : || TREE_CODE (arg2) != VECTOR_CST)
1804 : : return NULL_TREE;
1805 : :
1806 : 8467 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1807 : : return NULL_TREE;
1808 : :
1809 : 8467 : out_nelts = in_nelts * 2;
1810 : 8467 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1811 : : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1812 : :
1813 : 8467 : tree_vector_builder elts (type, out_nelts, 1);
1814 : 113279 : for (i = 0; i < out_nelts; i++)
1815 : : {
1816 : 104824 : tree elt = (i < in_nelts
1817 : 104824 : ? VECTOR_CST_ELT (arg1, i)
1818 : 52406 : : VECTOR_CST_ELT (arg2, i - in_nelts));
1819 : 105552 : elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1820 : : ? NOP_EXPR
1821 : : : code == VEC_PACK_FLOAT_EXPR
1822 : 728 : ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1823 : 104824 : TREE_TYPE (type), elt);
1824 : 104824 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1825 : 12 : return NULL_TREE;
1826 : 104812 : elts.quick_push (elt);
1827 : : }
1828 : :
1829 : 8455 : return elts.build ();
1830 : 8467 : }
1831 : :
1832 : 206 : case VEC_WIDEN_MULT_LO_EXPR:
1833 : 206 : case VEC_WIDEN_MULT_HI_EXPR:
1834 : 206 : case VEC_WIDEN_MULT_EVEN_EXPR:
1835 : 206 : case VEC_WIDEN_MULT_ODD_EXPR:
1836 : 206 : {
1837 : 206 : unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1838 : :
1839 : 206 : if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1840 : : return NULL_TREE;
1841 : :
1842 : 206 : if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1843 : : return NULL_TREE;
1844 : 206 : out_nelts = in_nelts / 2;
1845 : 206 : gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1846 : : && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1847 : :
1848 : 206 : if (code == VEC_WIDEN_MULT_LO_EXPR)
1849 : : scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1850 : : else if (code == VEC_WIDEN_MULT_HI_EXPR)
1851 : : scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1852 : : else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1853 : : scale = 1, ofs = 0;
1854 : : else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1855 : 206 : scale = 1, ofs = 1;
1856 : :
1857 : 206 : tree_vector_builder elts (type, out_nelts, 1);
1858 : 722 : for (out = 0; out < out_nelts; out++)
1859 : : {
1860 : 516 : unsigned int in = (out << scale) + ofs;
1861 : 516 : tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1862 : : VECTOR_CST_ELT (arg1, in));
1863 : 516 : tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1864 : : VECTOR_CST_ELT (arg2, in));
1865 : :
1866 : 516 : if (t1 == NULL_TREE || t2 == NULL_TREE)
1867 : 0 : return NULL_TREE;
1868 : 516 : tree elt = const_binop (MULT_EXPR, t1, t2);
1869 : 516 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1870 : : return NULL_TREE;
1871 : 516 : elts.quick_push (elt);
1872 : : }
1873 : :
1874 : 206 : return elts.build ();
1875 : 206 : }
1876 : :
1877 : 148033937 : default:;
1878 : : }
1879 : :
1880 : 148033937 : if (TREE_CODE_CLASS (code) != tcc_binary)
1881 : : return NULL_TREE;
1882 : :
1883 : : /* Make sure type and arg0 have the same saturating flag. */
1884 : 145649888 : gcc_checking_assert (TYPE_SATURATING (type)
1885 : : == TYPE_SATURATING (TREE_TYPE (arg1)));
1886 : :
1887 : 145649888 : return const_binop (code, arg1, arg2);
1888 : : }
1889 : :
1890 : : /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1891 : : Return zero if computing the constants is not possible. */
1892 : :
1893 : : tree
1894 : 260566398 : const_unop (enum tree_code code, tree type, tree arg0)
1895 : : {
1896 : : /* Don't perform the operation, other than NEGATE and ABS, if
1897 : : flag_signaling_nans is on and the operand is a signaling NaN. */
1898 : 260566398 : if (TREE_CODE (arg0) == REAL_CST
1899 : 10503563 : && HONOR_SNANS (arg0)
1900 : 7207 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1901 : 466 : && code != NEGATE_EXPR
1902 : 466 : && code != ABS_EXPR
1903 : 260566829 : && code != ABSU_EXPR)
1904 : : return NULL_TREE;
1905 : :
1906 : 260565967 : switch (code)
1907 : : {
1908 : 179039214 : CASE_CONVERT:
1909 : 179039214 : case FLOAT_EXPR:
1910 : 179039214 : case FIX_TRUNC_EXPR:
1911 : 179039214 : case FIXED_CONVERT_EXPR:
1912 : 179039214 : return fold_convert_const (code, type, arg0);
1913 : :
1914 : 0 : case ADDR_SPACE_CONVERT_EXPR:
1915 : : /* If the source address is 0, and the source address space
1916 : : cannot have a valid object at 0, fold to dest type null. */
1917 : 0 : if (integer_zerop (arg0)
1918 : 0 : && !(targetm.addr_space.zero_address_valid
1919 : 0 : (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1920 : 0 : return fold_convert_const (code, type, arg0);
1921 : : break;
1922 : :
1923 : 11602826 : case VIEW_CONVERT_EXPR:
1924 : 11602826 : return fold_view_convert_expr (type, arg0);
1925 : :
1926 : 25921373 : case NEGATE_EXPR:
1927 : 25921373 : {
1928 : : /* Can't call fold_negate_const directly here as that doesn't
1929 : : handle all cases and we might not be able to negate some
1930 : : constants. */
1931 : 25921373 : tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1932 : 25921373 : if (tem && CONSTANT_CLASS_P (tem))
1933 : : return tem;
1934 : : break;
1935 : : }
1936 : :
1937 : 31292 : case ABS_EXPR:
1938 : 31292 : case ABSU_EXPR:
1939 : 31292 : if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1940 : 31021 : return fold_abs_const (arg0, type);
1941 : : break;
1942 : :
1943 : 24553 : case CONJ_EXPR:
1944 : 24553 : if (TREE_CODE (arg0) == COMPLEX_CST)
1945 : : {
1946 : 24549 : tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1947 : 24549 : TREE_TYPE (type));
1948 : 24549 : return build_complex (type, TREE_REALPART (arg0), ipart);
1949 : : }
1950 : : break;
1951 : :
1952 : 2174930 : case BIT_NOT_EXPR:
1953 : 2174930 : if (TREE_CODE (arg0) == INTEGER_CST)
1954 : 2174021 : return fold_not_const (arg0, type);
1955 : 909 : else if (POLY_INT_CST_P (arg0))
1956 : : return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1957 : : /* Perform BIT_NOT_EXPR on each element individually. */
1958 : 909 : else if (TREE_CODE (arg0) == VECTOR_CST)
1959 : : {
1960 : 286 : tree elem;
1961 : :
1962 : : /* This can cope with stepped encodings because ~x == -1 - x. */
1963 : 286 : tree_vector_builder elements;
1964 : 286 : elements.new_unary_operation (type, arg0, true);
1965 : 286 : unsigned int i, count = elements.encoded_nelts ();
1966 : 1618 : for (i = 0; i < count; ++i)
1967 : : {
1968 : 1332 : elem = VECTOR_CST_ELT (arg0, i);
1969 : 1332 : elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1970 : 1332 : if (elem == NULL_TREE)
1971 : : break;
1972 : 1332 : elements.quick_push (elem);
1973 : : }
1974 : 286 : if (i == count)
1975 : 286 : return elements.build ();
1976 : 286 : }
1977 : : break;
1978 : :
1979 : 7995514 : case TRUTH_NOT_EXPR:
1980 : 7995514 : if (TREE_CODE (arg0) == INTEGER_CST)
1981 : 7723901 : return constant_boolean_node (integer_zerop (arg0), type);
1982 : : break;
1983 : :
1984 : 194099 : case REALPART_EXPR:
1985 : 194099 : if (TREE_CODE (arg0) == COMPLEX_CST)
1986 : 193898 : return fold_convert (type, TREE_REALPART (arg0));
1987 : : break;
1988 : :
1989 : 197897 : case IMAGPART_EXPR:
1990 : 197897 : if (TREE_CODE (arg0) == COMPLEX_CST)
1991 : 197711 : return fold_convert (type, TREE_IMAGPART (arg0));
1992 : : break;
1993 : :
1994 : 8452 : case VEC_UNPACK_LO_EXPR:
1995 : 8452 : case VEC_UNPACK_HI_EXPR:
1996 : 8452 : case VEC_UNPACK_FLOAT_LO_EXPR:
1997 : 8452 : case VEC_UNPACK_FLOAT_HI_EXPR:
1998 : 8452 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1999 : 8452 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2000 : 8452 : {
2001 : 8452 : unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
2002 : 8452 : enum tree_code subcode;
2003 : :
2004 : 8452 : if (TREE_CODE (arg0) != VECTOR_CST)
2005 : : return NULL_TREE;
2006 : :
2007 : 8452 : if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
2008 : : return NULL_TREE;
2009 : 8452 : out_nelts = in_nelts / 2;
2010 : 8452 : gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
2011 : :
2012 : 8452 : unsigned int offset = 0;
2013 : 8452 : if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
2014 : 8452 : || code == VEC_UNPACK_FLOAT_LO_EXPR
2015 : : || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
2016 : 4225 : offset = out_nelts;
2017 : :
2018 : 8452 : if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
2019 : : subcode = NOP_EXPR;
2020 : 4650 : else if (code == VEC_UNPACK_FLOAT_LO_EXPR
2021 : 4650 : || code == VEC_UNPACK_FLOAT_HI_EXPR)
2022 : : subcode = FLOAT_EXPR;
2023 : : else
2024 : 0 : subcode = FIX_TRUNC_EXPR;
2025 : :
2026 : 8452 : tree_vector_builder elts (type, out_nelts, 1);
2027 : 42388 : for (i = 0; i < out_nelts; i++)
2028 : : {
2029 : 33936 : tree elt = fold_convert_const (subcode, TREE_TYPE (type),
2030 : 33936 : VECTOR_CST_ELT (arg0, i + offset));
2031 : 33936 : if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
2032 : 0 : return NULL_TREE;
2033 : 33936 : elts.quick_push (elt);
2034 : : }
2035 : :
2036 : 8452 : return elts.build ();
2037 : 8452 : }
2038 : :
2039 : 4 : case VEC_DUPLICATE_EXPR:
2040 : 4 : if (CONSTANT_CLASS_P (arg0))
2041 : 4 : return build_vector_from_val (type, arg0);
2042 : : return NULL_TREE;
2043 : :
2044 : : default:
2045 : : break;
2046 : : }
2047 : :
2048 : : return NULL_TREE;
2049 : : }
2050 : :
2051 : : /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
2052 : : indicates which particular sizetype to create. */
2053 : :
2054 : : tree
2055 : 3183262933 : size_int_kind (poly_int64 number, enum size_type_kind kind)
2056 : : {
2057 : 3183262933 : return build_int_cst (sizetype_tab[(int) kind], number);
2058 : : }
2059 : :
2060 : : /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2061 : : is a tree code. The type of the result is taken from the operands.
2062 : : Both must be equivalent integer types, ala int_binop_types_match_p.
2063 : : If the operands are constant, so is the result. */
2064 : :
2065 : : tree
2066 : 1976662320 : size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2067 : : {
2068 : 1976662320 : tree type = TREE_TYPE (arg0);
2069 : :
2070 : 1976662320 : if (arg0 == error_mark_node || arg1 == error_mark_node)
2071 : : return error_mark_node;
2072 : :
2073 : 1976662320 : gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 : : TREE_TYPE (arg1)));
2075 : :
2076 : : /* Handle the special case of two poly_int constants faster. */
2077 : 1976662320 : if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
2078 : : {
2079 : : /* And some specific cases even faster than that. */
2080 : 1953980112 : if (code == PLUS_EXPR)
2081 : : {
2082 : 887049744 : if (integer_zerop (arg0)
2083 : 887049744 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2084 : : return arg1;
2085 : 240680363 : if (integer_zerop (arg1)
2086 : 240680363 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2087 : : return arg0;
2088 : : }
2089 : 1066930368 : else if (code == MINUS_EXPR)
2090 : : {
2091 : 92749997 : if (integer_zerop (arg1)
2092 : 92749997 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
2093 : : return arg0;
2094 : : }
2095 : 974180371 : else if (code == MULT_EXPR)
2096 : : {
2097 : 425019684 : if (integer_onep (arg0)
2098 : 425019684 : && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
2099 : : return arg1;
2100 : : }
2101 : :
2102 : : /* Handle general case of two integer constants. For sizetype
2103 : : constant calculations we always want to know about overflow,
2104 : : even in the unsigned case. */
2105 : 972409104 : tree res = int_const_binop (code, arg0, arg1, -1);
2106 : 972409104 : if (res != NULL_TREE)
2107 : : return res;
2108 : : }
2109 : :
2110 : 22682208 : return fold_build2_loc (loc, code, type, arg0, arg1);
2111 : : }
2112 : :
2113 : : /* Given two values, either both of sizetype or both of bitsizetype,
2114 : : compute the difference between the two values. Return the value
2115 : : in signed type corresponding to the type of the operands. */
2116 : :
2117 : : tree
2118 : 32041402 : size_diffop_loc (location_t loc, tree arg0, tree arg1)
2119 : : {
2120 : 32041402 : tree type = TREE_TYPE (arg0);
2121 : 32041402 : tree ctype;
2122 : :
2123 : 32041402 : gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2124 : : TREE_TYPE (arg1)));
2125 : :
2126 : : /* If the type is already signed, just do the simple thing. */
2127 : 32041402 : if (!TYPE_UNSIGNED (type))
2128 : 9249272 : return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2129 : :
2130 : 22792130 : if (type == sizetype)
2131 : 22792130 : ctype = ssizetype;
2132 : 0 : else if (type == bitsizetype)
2133 : 0 : ctype = sbitsizetype;
2134 : : else
2135 : 0 : ctype = signed_type_for (type);
2136 : :
2137 : : /* If either operand is not a constant, do the conversions to the signed
2138 : : type and subtract. The hardware will do the right thing with any
2139 : : overflow in the subtraction. */
2140 : 22792130 : if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2141 : 7863 : return size_binop_loc (loc, MINUS_EXPR,
2142 : : fold_convert_loc (loc, ctype, arg0),
2143 : 7863 : fold_convert_loc (loc, ctype, arg1));
2144 : :
2145 : : /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2146 : : Otherwise, subtract the other way, convert to CTYPE (we know that can't
2147 : : overflow) and negate (which can't either). Special-case a result
2148 : : of zero while we're here. */
2149 : 22784267 : if (tree_int_cst_equal (arg0, arg1))
2150 : 19568179 : return build_int_cst (ctype, 0);
2151 : 3216088 : else if (tree_int_cst_lt (arg1, arg0))
2152 : 2085330 : return fold_convert_loc (loc, ctype,
2153 : 2085330 : size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2154 : : else
2155 : 1130758 : return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2156 : : fold_convert_loc (loc, ctype,
2157 : : size_binop_loc (loc,
2158 : : MINUS_EXPR,
2159 : : arg1, arg0)));
2160 : : }
2161 : :
2162 : : /* A subroutine of fold_convert_const handling conversions of an
2163 : : INTEGER_CST to another integer type. */
2164 : :
2165 : : static tree
2166 : 1100363980 : fold_convert_const_int_from_int (tree type, const_tree arg1)
2167 : : {
2168 : : /* Given an integer constant, make new constant with new type,
2169 : : appropriately sign-extended or truncated. Use widest_int
2170 : : so that any extension is done according ARG1's type. */
2171 : 1100363980 : tree arg1_type = TREE_TYPE (arg1);
2172 : 1100363980 : unsigned prec = MAX (TYPE_PRECISION (arg1_type), TYPE_PRECISION (type));
2173 : 1100363980 : return force_fit_type (type, wide_int::from (wi::to_wide (arg1), prec,
2174 : 1100363980 : TYPE_SIGN (arg1_type)),
2175 : 1100363980 : !POINTER_TYPE_P (TREE_TYPE (arg1)),
2176 : 1100363980 : TREE_OVERFLOW (arg1));
2177 : : }
2178 : :
2179 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2180 : : to an integer type. */
2181 : :
2182 : : static tree
2183 : 28401 : fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2184 : : {
2185 : 28401 : bool overflow = false;
2186 : 28401 : tree t;
2187 : :
2188 : : /* The following code implements the floating point to integer
2189 : : conversion rules required by the Java Language Specification,
2190 : : that IEEE NaNs are mapped to zero and values that overflow
2191 : : the target precision saturate, i.e. values greater than
2192 : : INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2193 : : are mapped to INT_MIN. These semantics are allowed by the
2194 : : C and C++ standards that simply state that the behavior of
2195 : : FP-to-integer conversion is unspecified upon overflow. */
2196 : :
2197 : 28401 : wide_int val;
2198 : 28401 : REAL_VALUE_TYPE r;
2199 : 28401 : REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2200 : :
2201 : 28401 : switch (code)
2202 : : {
2203 : 28401 : case FIX_TRUNC_EXPR:
2204 : 28401 : real_trunc (&r, VOIDmode, &x);
2205 : 28401 : break;
2206 : :
2207 : 0 : default:
2208 : 0 : gcc_unreachable ();
2209 : : }
2210 : :
2211 : : /* If R is NaN, return zero and show we have an overflow. */
2212 : 28401 : if (REAL_VALUE_ISNAN (r))
2213 : : {
2214 : 280 : overflow = true;
2215 : 280 : val = wi::zero (TYPE_PRECISION (type));
2216 : : }
2217 : :
2218 : : /* See if R is less than the lower bound or greater than the
2219 : : upper bound. */
2220 : :
2221 : 28401 : if (! overflow)
2222 : : {
2223 : 28121 : tree lt = TYPE_MIN_VALUE (type);
2224 : 28121 : REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2225 : 28121 : if (real_less (&r, &l))
2226 : : {
2227 : 316 : overflow = true;
2228 : 316 : val = wi::to_wide (lt);
2229 : : }
2230 : : }
2231 : :
2232 : 28401 : if (! overflow)
2233 : : {
2234 : 27805 : tree ut = TYPE_MAX_VALUE (type);
2235 : 27805 : if (ut)
2236 : : {
2237 : 27805 : REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2238 : 27805 : if (real_less (&u, &r))
2239 : : {
2240 : 386 : overflow = true;
2241 : 386 : val = wi::to_wide (ut);
2242 : : }
2243 : : }
2244 : : }
2245 : :
2246 : 28401 : if (! overflow)
2247 : 27419 : val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2248 : :
2249 : 28401 : t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2250 : 28401 : return t;
2251 : 28401 : }
2252 : :
2253 : : /* A subroutine of fold_convert_const handling conversions of a
2254 : : FIXED_CST to an integer type. */
2255 : :
2256 : : static tree
2257 : 0 : fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2258 : : {
2259 : 0 : tree t;
2260 : 0 : double_int temp, temp_trunc;
2261 : 0 : scalar_mode mode;
2262 : :
2263 : : /* Right shift FIXED_CST to temp by fbit. */
2264 : 0 : temp = TREE_FIXED_CST (arg1).data;
2265 : 0 : mode = TREE_FIXED_CST (arg1).mode;
2266 : 0 : if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2267 : : {
2268 : 0 : temp = temp.rshift (GET_MODE_FBIT (mode),
2269 : : HOST_BITS_PER_DOUBLE_INT,
2270 : 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2271 : :
2272 : : /* Left shift temp to temp_trunc by fbit. */
2273 : 0 : temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2274 : : HOST_BITS_PER_DOUBLE_INT,
2275 : 0 : SIGNED_FIXED_POINT_MODE_P (mode));
2276 : : }
2277 : : else
2278 : : {
2279 : 0 : temp = double_int_zero;
2280 : 0 : temp_trunc = double_int_zero;
2281 : : }
2282 : :
2283 : : /* If FIXED_CST is negative, we need to round the value toward 0.
2284 : : By checking if the fractional bits are not zero to add 1 to temp. */
2285 : 0 : if (SIGNED_FIXED_POINT_MODE_P (mode)
2286 : 0 : && temp_trunc.is_negative ()
2287 : 0 : && TREE_FIXED_CST (arg1).data != temp_trunc)
2288 : 0 : temp += double_int_one;
2289 : :
2290 : : /* Given a fixed-point constant, make new constant with new type,
2291 : : appropriately sign-extended or truncated. */
2292 : 0 : t = force_fit_type (type, temp, -1,
2293 : 0 : (temp.is_negative ()
2294 : 0 : && (TYPE_UNSIGNED (type)
2295 : 0 : < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2296 : 0 : | TREE_OVERFLOW (arg1));
2297 : :
2298 : 0 : return t;
2299 : : }
2300 : :
2301 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2302 : : to another floating point type. */
2303 : :
2304 : : static tree
2305 : 1932928 : fold_convert_const_real_from_real (tree type, const_tree arg1)
2306 : : {
2307 : 1932928 : REAL_VALUE_TYPE value;
2308 : 1932928 : tree t;
2309 : :
2310 : : /* If the underlying modes are the same, simply treat it as
2311 : : copy and rebuild with TREE_REAL_CST information and the
2312 : : given type. */
2313 : 1932928 : if (TYPE_MODE (type) == TYPE_MODE (TREE_TYPE (arg1)))
2314 : : {
2315 : 98073 : t = build_real (type, TREE_REAL_CST (arg1));
2316 : 98073 : return t;
2317 : : }
2318 : :
2319 : : /* Don't perform the operation if flag_signaling_nans is on
2320 : : and the operand is a signaling NaN. */
2321 : 1834855 : if (HONOR_SNANS (arg1)
2322 : 1835195 : && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2323 : : return NULL_TREE;
2324 : :
2325 : : /* With flag_rounding_math we should respect the current rounding mode
2326 : : unless the conversion is exact. */
2327 : 1834855 : if (HONOR_SIGN_DEPENDENT_ROUNDING (arg1)
2328 : 1835505 : && !exact_real_truncate (TYPE_MODE (type), &TREE_REAL_CST (arg1)))
2329 : 515 : return NULL_TREE;
2330 : :
2331 : 1834340 : real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2332 : 1834340 : t = build_real (type, value);
2333 : :
2334 : : /* If converting an infinity or NAN to a representation that doesn't
2335 : : have one, set the overflow bit so that we can produce some kind of
2336 : : error message at the appropriate point if necessary. It's not the
2337 : : most user-friendly message, but it's better than nothing. */
2338 : 1834340 : if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2339 : 1939353 : && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2340 : 0 : TREE_OVERFLOW (t) = 1;
2341 : 1834340 : else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2342 : 1933606 : && !MODE_HAS_NANS (TYPE_MODE (type)))
2343 : 0 : TREE_OVERFLOW (t) = 1;
2344 : : /* Regular overflow, conversion produced an infinity in a mode that
2345 : : can't represent them. */
2346 : 9167921 : else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2347 : 0 : && REAL_VALUE_ISINF (value)
2348 : 1834340 : && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2349 : 0 : TREE_OVERFLOW (t) = 1;
2350 : : else
2351 : 1834340 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2352 : : return t;
2353 : : }
2354 : :
2355 : : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2356 : : to a floating point type. */
2357 : :
2358 : : static tree
2359 : 0 : fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2360 : : {
2361 : 0 : REAL_VALUE_TYPE value;
2362 : 0 : tree t;
2363 : :
2364 : 0 : real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2365 : 0 : &TREE_FIXED_CST (arg1));
2366 : 0 : t = build_real (type, value);
2367 : :
2368 : 0 : TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2369 : 0 : return t;
2370 : : }
2371 : :
2372 : : /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2373 : : to another fixed-point type. */
2374 : :
2375 : : static tree
2376 : 0 : fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2377 : : {
2378 : 0 : FIXED_VALUE_TYPE value;
2379 : 0 : tree t;
2380 : 0 : bool overflow_p;
2381 : :
2382 : 0 : overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2383 : 0 : &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2384 : 0 : t = build_fixed (type, value);
2385 : :
2386 : : /* Propagate overflow flags. */
2387 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2388 : 0 : TREE_OVERFLOW (t) = 1;
2389 : 0 : return t;
2390 : : }
2391 : :
2392 : : /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2393 : : to a fixed-point type. */
2394 : :
2395 : : static tree
2396 : 0 : fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2397 : : {
2398 : 0 : FIXED_VALUE_TYPE value;
2399 : 0 : tree t;
2400 : 0 : bool overflow_p;
2401 : 0 : double_int di;
2402 : :
2403 : 0 : gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2404 : :
2405 : 0 : di.low = TREE_INT_CST_ELT (arg1, 0);
2406 : 0 : if (TREE_INT_CST_NUNITS (arg1) == 1)
2407 : 0 : di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2408 : : else
2409 : 0 : di.high = TREE_INT_CST_ELT (arg1, 1);
2410 : :
2411 : 0 : overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2412 : 0 : TYPE_UNSIGNED (TREE_TYPE (arg1)),
2413 : 0 : TYPE_SATURATING (type));
2414 : 0 : t = build_fixed (type, value);
2415 : :
2416 : : /* Propagate overflow flags. */
2417 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2418 : 0 : TREE_OVERFLOW (t) = 1;
2419 : 0 : return t;
2420 : : }
2421 : :
2422 : : /* A subroutine of fold_convert_const handling conversions a REAL_CST
2423 : : to a fixed-point type. */
2424 : :
2425 : : static tree
2426 : 0 : fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2427 : : {
2428 : 0 : FIXED_VALUE_TYPE value;
2429 : 0 : tree t;
2430 : 0 : bool overflow_p;
2431 : :
2432 : 0 : overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2433 : 0 : &TREE_REAL_CST (arg1),
2434 : 0 : TYPE_SATURATING (type));
2435 : 0 : t = build_fixed (type, value);
2436 : :
2437 : : /* Propagate overflow flags. */
2438 : 0 : if (overflow_p | TREE_OVERFLOW (arg1))
2439 : 0 : TREE_OVERFLOW (t) = 1;
2440 : 0 : return t;
2441 : : }
2442 : :
2443 : : /* Attempt to fold type conversion operation CODE of expression ARG1 to
2444 : : type TYPE. If no simplification can be done return NULL_TREE. */
2445 : :
2446 : : static tree
2447 : 1157208195 : fold_convert_const (enum tree_code code, tree type, tree arg1)
2448 : : {
2449 : 1157208195 : tree arg_type = TREE_TYPE (arg1);
2450 : 1157208195 : if (arg_type == type)
2451 : : return arg1;
2452 : :
2453 : : /* We can't widen types, since the runtime value could overflow the
2454 : : original type before being extended to the new type. */
2455 : 1146813609 : if (POLY_INT_CST_P (arg1)
2456 : : && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2457 : : && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2458 : : return build_poly_int_cst (type,
2459 : : poly_wide_int::from (poly_int_cst_value (arg1),
2460 : : TYPE_PRECISION (type),
2461 : : TYPE_SIGN (arg_type)));
2462 : :
2463 : 1146813609 : if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2464 : : || TREE_CODE (type) == OFFSET_TYPE)
2465 : : {
2466 : 1116165387 : if (TREE_CODE (arg1) == INTEGER_CST)
2467 : 1100363980 : return fold_convert_const_int_from_int (type, arg1);
2468 : 15801407 : else if (TREE_CODE (arg1) == REAL_CST)
2469 : 28401 : return fold_convert_const_int_from_real (code, type, arg1);
2470 : 15773006 : else if (TREE_CODE (arg1) == FIXED_CST)
2471 : 0 : return fold_convert_const_int_from_fixed (type, arg1);
2472 : : }
2473 : : else if (SCALAR_FLOAT_TYPE_P (type))
2474 : : {
2475 : 30606466 : if (TREE_CODE (arg1) == INTEGER_CST)
2476 : : {
2477 : 23752086 : tree res = build_real_from_int_cst (type, arg1);
2478 : : /* Avoid the folding if flag_rounding_math is on and the
2479 : : conversion is not exact. */
2480 : 23752086 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
2481 : : {
2482 : 2248 : bool fail = false;
2483 : 4496 : wide_int w = real_to_integer (&TREE_REAL_CST (res), &fail,
2484 : 2248 : TYPE_PRECISION (TREE_TYPE (arg1)));
2485 : 2248 : if (fail || wi::ne_p (w, wi::to_wide (arg1)))
2486 : 1216 : return NULL_TREE;
2487 : 2248 : }
2488 : 23750870 : return res;
2489 : : }
2490 : 6854380 : else if (TREE_CODE (arg1) == REAL_CST)
2491 : 1932928 : return fold_convert_const_real_from_real (type, arg1);
2492 : 4921452 : else if (TREE_CODE (arg1) == FIXED_CST)
2493 : 0 : return fold_convert_const_real_from_fixed (type, arg1);
2494 : : }
2495 : : else if (FIXED_POINT_TYPE_P (type))
2496 : : {
2497 : 0 : if (TREE_CODE (arg1) == FIXED_CST)
2498 : 0 : return fold_convert_const_fixed_from_fixed (type, arg1);
2499 : 0 : else if (TREE_CODE (arg1) == INTEGER_CST)
2500 : 0 : return fold_convert_const_fixed_from_int (type, arg1);
2501 : 0 : else if (TREE_CODE (arg1) == REAL_CST)
2502 : 0 : return fold_convert_const_fixed_from_real (type, arg1);
2503 : : }
2504 : : else if (VECTOR_TYPE_P (type))
2505 : : {
2506 : 3141 : if (TREE_CODE (arg1) == VECTOR_CST
2507 : 3141 : && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2508 : : {
2509 : 3141 : tree elttype = TREE_TYPE (type);
2510 : 3141 : tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2511 : : /* We can't handle steps directly when extending, since the
2512 : : values need to wrap at the original precision first. */
2513 : 3141 : bool step_ok_p
2514 : 3141 : = (INTEGRAL_TYPE_P (elttype)
2515 : 259 : && INTEGRAL_TYPE_P (arg1_elttype)
2516 : 3356 : && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2517 : 3141 : tree_vector_builder v;
2518 : 3141 : if (!v.new_unary_operation (type, arg1, step_ok_p))
2519 : : return NULL_TREE;
2520 : 3141 : unsigned int len = v.encoded_nelts ();
2521 : 21718 : for (unsigned int i = 0; i < len; ++i)
2522 : : {
2523 : 18577 : tree elt = VECTOR_CST_ELT (arg1, i);
2524 : 18577 : tree cvt = fold_convert_const (code, elttype, elt);
2525 : 18577 : if (cvt == NULL_TREE)
2526 : 0 : return NULL_TREE;
2527 : 18577 : v.quick_push (cvt);
2528 : : }
2529 : 3141 : return v.build ();
2530 : 3141 : }
2531 : : }
2532 : : return NULL_TREE;
2533 : : }
2534 : :
2535 : : /* Construct a vector of zero elements of vector type TYPE. */
2536 : :
2537 : : static tree
2538 : 5709 : build_zero_vector (tree type)
2539 : : {
2540 : 5709 : tree t;
2541 : :
2542 : 5709 : t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2543 : 5709 : return build_vector_from_val (type, t);
2544 : : }
2545 : :
2546 : : /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2547 : :
2548 : : bool
2549 : 171253 : fold_convertible_p (const_tree type, const_tree arg)
2550 : : {
2551 : 171253 : const_tree orig = TREE_TYPE (arg);
2552 : :
2553 : 171253 : if (type == orig)
2554 : : return true;
2555 : :
2556 : 171253 : if (TREE_CODE (arg) == ERROR_MARK
2557 : 171253 : || TREE_CODE (type) == ERROR_MARK
2558 : 171253 : || TREE_CODE (orig) == ERROR_MARK)
2559 : : return false;
2560 : :
2561 : 171253 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2562 : : return true;
2563 : :
2564 : 171253 : switch (TREE_CODE (type))
2565 : : {
2566 : 170889 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2567 : 170889 : case POINTER_TYPE: case REFERENCE_TYPE:
2568 : 170889 : case OFFSET_TYPE:
2569 : 170889 : return (INTEGRAL_TYPE_P (orig)
2570 : 241 : || (POINTER_TYPE_P (orig)
2571 : 102 : && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2572 : 171028 : || TREE_CODE (orig) == OFFSET_TYPE);
2573 : :
2574 : 42 : case REAL_TYPE:
2575 : 42 : case FIXED_POINT_TYPE:
2576 : 42 : case VOID_TYPE:
2577 : 42 : return TREE_CODE (type) == TREE_CODE (orig);
2578 : :
2579 : 201 : case VECTOR_TYPE:
2580 : 201 : return (VECTOR_TYPE_P (orig)
2581 : 306 : && known_eq (TYPE_VECTOR_SUBPARTS (type),
2582 : : TYPE_VECTOR_SUBPARTS (orig))
2583 : 210 : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2584 : :
2585 : : default:
2586 : : return false;
2587 : : }
2588 : : }
2589 : :
2590 : : /* Convert expression ARG to type TYPE. Used by the middle-end for
2591 : : simple conversions in preference to calling the front-end's convert. */
2592 : :
2593 : : tree
2594 : 1704918849 : fold_convert_loc (location_t loc, tree type, tree arg)
2595 : : {
2596 : 1704918849 : tree orig = TREE_TYPE (arg);
2597 : 1704918849 : tree tem;
2598 : :
2599 : 1704918849 : if (type == orig)
2600 : : return arg;
2601 : :
2602 : 1142880057 : if (TREE_CODE (arg) == ERROR_MARK
2603 : 1142879080 : || TREE_CODE (type) == ERROR_MARK
2604 : 1142879079 : || TREE_CODE (orig) == ERROR_MARK)
2605 : 978 : return error_mark_node;
2606 : :
2607 : 1142879079 : switch (TREE_CODE (type))
2608 : : {
2609 : 47608002 : case POINTER_TYPE:
2610 : 47608002 : case REFERENCE_TYPE:
2611 : : /* Handle conversions between pointers to different address spaces. */
2612 : 47608002 : if (POINTER_TYPE_P (orig)
2613 : 47608002 : && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2614 : 41472261 : != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2615 : 101 : return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2616 : : /* fall through */
2617 : :
2618 : 1117458096 : case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2619 : 1117458096 : case OFFSET_TYPE: case BITINT_TYPE:
2620 : 1117458096 : if (TREE_CODE (arg) == INTEGER_CST)
2621 : : {
2622 : 977827731 : tem = fold_convert_const (NOP_EXPR, type, arg);
2623 : 977827731 : if (tem != NULL_TREE)
2624 : : return tem;
2625 : : }
2626 : 139630365 : if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2627 : 2166 : || TREE_CODE (orig) == OFFSET_TYPE)
2628 : 139630365 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2629 : 0 : if (TREE_CODE (orig) == COMPLEX_TYPE)
2630 : 0 : return fold_convert_loc (loc, type,
2631 : : fold_build1_loc (loc, REALPART_EXPR,
2632 : 0 : TREE_TYPE (orig), arg));
2633 : 0 : gcc_assert (VECTOR_TYPE_P (orig)
2634 : : && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2635 : 0 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2636 : :
2637 : 529808 : case REAL_TYPE:
2638 : 529808 : if (TREE_CODE (arg) == INTEGER_CST)
2639 : : {
2640 : 56963 : tem = fold_convert_const (FLOAT_EXPR, type, arg);
2641 : 56963 : if (tem != NULL_TREE)
2642 : : return tem;
2643 : : }
2644 : 472845 : else if (TREE_CODE (arg) == REAL_CST)
2645 : : {
2646 : 120209 : tem = fold_convert_const (NOP_EXPR, type, arg);
2647 : 120209 : if (tem != NULL_TREE)
2648 : : return tem;
2649 : : }
2650 : 352636 : else if (TREE_CODE (arg) == FIXED_CST)
2651 : : {
2652 : 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2653 : 0 : if (tem != NULL_TREE)
2654 : : return tem;
2655 : : }
2656 : :
2657 : 352639 : switch (TREE_CODE (orig))
2658 : : {
2659 : 640 : case INTEGER_TYPE: case BITINT_TYPE:
2660 : 640 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2661 : 640 : case POINTER_TYPE: case REFERENCE_TYPE:
2662 : 640 : return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2663 : :
2664 : 351999 : case REAL_TYPE:
2665 : 351999 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2666 : :
2667 : 0 : case FIXED_POINT_TYPE:
2668 : 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2669 : :
2670 : 0 : case COMPLEX_TYPE:
2671 : 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2672 : 0 : return fold_convert_loc (loc, type, tem);
2673 : :
2674 : 0 : default:
2675 : 0 : gcc_unreachable ();
2676 : : }
2677 : :
2678 : 0 : case FIXED_POINT_TYPE:
2679 : 0 : if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2680 : 0 : || TREE_CODE (arg) == REAL_CST)
2681 : : {
2682 : 0 : tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2683 : 0 : if (tem != NULL_TREE)
2684 : 0 : goto fold_convert_exit;
2685 : : }
2686 : :
2687 : 0 : switch (TREE_CODE (orig))
2688 : : {
2689 : 0 : case FIXED_POINT_TYPE:
2690 : 0 : case INTEGER_TYPE:
2691 : 0 : case ENUMERAL_TYPE:
2692 : 0 : case BOOLEAN_TYPE:
2693 : 0 : case REAL_TYPE:
2694 : 0 : case BITINT_TYPE:
2695 : 0 : return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2696 : :
2697 : 0 : case COMPLEX_TYPE:
2698 : 0 : tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2699 : 0 : return fold_convert_loc (loc, type, tem);
2700 : :
2701 : 0 : default:
2702 : 0 : gcc_unreachable ();
2703 : : }
2704 : :
2705 : 2223 : case COMPLEX_TYPE:
2706 : 2223 : switch (TREE_CODE (orig))
2707 : : {
2708 : 585 : case INTEGER_TYPE: case BITINT_TYPE:
2709 : 585 : case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2710 : 585 : case POINTER_TYPE: case REFERENCE_TYPE:
2711 : 585 : case REAL_TYPE:
2712 : 585 : case FIXED_POINT_TYPE:
2713 : 1170 : return fold_build2_loc (loc, COMPLEX_EXPR, type,
2714 : 585 : fold_convert_loc (loc, TREE_TYPE (type), arg),
2715 : 585 : fold_convert_loc (loc, TREE_TYPE (type),
2716 : 585 : integer_zero_node));
2717 : 1638 : case COMPLEX_TYPE:
2718 : 1638 : {
2719 : 1638 : tree rpart, ipart;
2720 : :
2721 : 1638 : if (TREE_CODE (arg) == COMPLEX_EXPR)
2722 : : {
2723 : 1534 : rpart = fold_convert_loc (loc, TREE_TYPE (type),
2724 : 1534 : TREE_OPERAND (arg, 0));
2725 : 1534 : ipart = fold_convert_loc (loc, TREE_TYPE (type),
2726 : 1534 : TREE_OPERAND (arg, 1));
2727 : 1534 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2728 : : }
2729 : :
2730 : 104 : arg = save_expr (arg);
2731 : 104 : rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2732 : 104 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2733 : 104 : rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2734 : 104 : ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2735 : 104 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2736 : : }
2737 : :
2738 : 0 : default:
2739 : 0 : gcc_unreachable ();
2740 : : }
2741 : :
2742 : 24777887 : case VECTOR_TYPE:
2743 : 24777887 : if (integer_zerop (arg))
2744 : 5709 : return build_zero_vector (type);
2745 : 24772178 : gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2746 : 24772178 : gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2747 : : || VECTOR_TYPE_P (orig));
2748 : 24772178 : return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2749 : :
2750 : 107816 : case VOID_TYPE:
2751 : 107816 : tem = fold_ignored_result (arg);
2752 : 107816 : return fold_build1_loc (loc, NOP_EXPR, type, tem);
2753 : :
2754 : 3148 : default:
2755 : 3148 : if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2756 : 3148 : return fold_build1_loc (loc, NOP_EXPR, type, arg);
2757 : 0 : gcc_unreachable ();
2758 : : }
2759 : 0 : fold_convert_exit:
2760 : 0 : tem = protected_set_expr_location_unshare (tem, loc);
2761 : 0 : return tem;
2762 : : }
2763 : :
2764 : : /* Return false if expr can be assumed not to be an lvalue, true
2765 : : otherwise. */
2766 : :
2767 : : static bool
2768 : 48879422 : maybe_lvalue_p (const_tree x)
2769 : : {
2770 : : /* We only need to wrap lvalue tree codes. */
2771 : 48879422 : switch (TREE_CODE (x))
2772 : : {
2773 : : case VAR_DECL:
2774 : : case PARM_DECL:
2775 : : case RESULT_DECL:
2776 : : case LABEL_DECL:
2777 : : case FUNCTION_DECL:
2778 : : case SSA_NAME:
2779 : : case COMPOUND_LITERAL_EXPR:
2780 : :
2781 : : case COMPONENT_REF:
2782 : : case MEM_REF:
2783 : : case INDIRECT_REF:
2784 : : case ARRAY_REF:
2785 : : case ARRAY_RANGE_REF:
2786 : : case BIT_FIELD_REF:
2787 : : case OBJ_TYPE_REF:
2788 : :
2789 : : case REALPART_EXPR:
2790 : : case IMAGPART_EXPR:
2791 : : case PREINCREMENT_EXPR:
2792 : : case PREDECREMENT_EXPR:
2793 : : case SAVE_EXPR:
2794 : : case TRY_CATCH_EXPR:
2795 : : case WITH_CLEANUP_EXPR:
2796 : : case COMPOUND_EXPR:
2797 : : case MODIFY_EXPR:
2798 : : case TARGET_EXPR:
2799 : : case COND_EXPR:
2800 : : case BIND_EXPR:
2801 : : case VIEW_CONVERT_EXPR:
2802 : : break;
2803 : :
2804 : 37824690 : default:
2805 : : /* Assume the worst for front-end tree codes. */
2806 : 37824690 : if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2807 : : break;
2808 : : return false;
2809 : : }
2810 : :
2811 : 11128696 : return true;
2812 : : }
2813 : :
2814 : : /* Return an expr equal to X but certainly not valid as an lvalue. */
2815 : :
2816 : : tree
2817 : 34099759 : non_lvalue_loc (location_t loc, tree x)
2818 : : {
2819 : : /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2820 : : us. */
2821 : 34099759 : if (in_gimple_form)
2822 : : return x;
2823 : :
2824 : 8509883 : if (! maybe_lvalue_p (x))
2825 : : return x;
2826 : 1838739 : return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2827 : : }
2828 : :
2829 : : /* Given a tree comparison code, return the code that is the logical inverse.
2830 : : It is generally not safe to do this for floating-point comparisons, except
2831 : : for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2832 : : ERROR_MARK in this case. */
2833 : :
2834 : : enum tree_code
2835 : 99613063 : invert_tree_comparison (enum tree_code code, bool honor_nans)
2836 : : {
2837 : 99613063 : if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2838 : 944029 : && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2839 : : return ERROR_MARK;
2840 : :
2841 : 98919839 : switch (code)
2842 : : {
2843 : : case EQ_EXPR:
2844 : : return NE_EXPR;
2845 : 45086504 : case NE_EXPR:
2846 : 45086504 : return EQ_EXPR;
2847 : 8866355 : case GT_EXPR:
2848 : 8866355 : return honor_nans ? UNLE_EXPR : LE_EXPR;
2849 : 11163115 : case GE_EXPR:
2850 : 11163115 : return honor_nans ? UNLT_EXPR : LT_EXPR;
2851 : 5994928 : case LT_EXPR:
2852 : 5994928 : return honor_nans ? UNGE_EXPR : GE_EXPR;
2853 : 6576874 : case LE_EXPR:
2854 : 6576874 : return honor_nans ? UNGT_EXPR : GT_EXPR;
2855 : 252 : case LTGT_EXPR:
2856 : 252 : return UNEQ_EXPR;
2857 : 289 : case UNEQ_EXPR:
2858 : 289 : return LTGT_EXPR;
2859 : : case UNGT_EXPR:
2860 : : return LE_EXPR;
2861 : : case UNGE_EXPR:
2862 : : return LT_EXPR;
2863 : : case UNLT_EXPR:
2864 : : return GE_EXPR;
2865 : : case UNLE_EXPR:
2866 : : return GT_EXPR;
2867 : 217307 : case ORDERED_EXPR:
2868 : 217307 : return UNORDERED_EXPR;
2869 : 54329 : case UNORDERED_EXPR:
2870 : 54329 : return ORDERED_EXPR;
2871 : 0 : default:
2872 : 0 : gcc_unreachable ();
2873 : : }
2874 : : }
2875 : :
2876 : : /* Similar, but return the comparison that results if the operands are
2877 : : swapped. This is safe for floating-point. */
2878 : :
2879 : : enum tree_code
2880 : 137349552 : swap_tree_comparison (enum tree_code code)
2881 : : {
2882 : 137349552 : switch (code)
2883 : : {
2884 : : case EQ_EXPR:
2885 : : case NE_EXPR:
2886 : : case ORDERED_EXPR:
2887 : : case UNORDERED_EXPR:
2888 : : case LTGT_EXPR:
2889 : : case UNEQ_EXPR:
2890 : : return code;
2891 : 31570806 : case GT_EXPR:
2892 : 31570806 : return LT_EXPR;
2893 : 9625572 : case GE_EXPR:
2894 : 9625572 : return LE_EXPR;
2895 : 19010255 : case LT_EXPR:
2896 : 19010255 : return GT_EXPR;
2897 : 14311087 : case LE_EXPR:
2898 : 14311087 : return GE_EXPR;
2899 : 249190 : case UNGT_EXPR:
2900 : 249190 : return UNLT_EXPR;
2901 : 18643 : case UNGE_EXPR:
2902 : 18643 : return UNLE_EXPR;
2903 : 856847 : case UNLT_EXPR:
2904 : 856847 : return UNGT_EXPR;
2905 : 126775 : case UNLE_EXPR:
2906 : 126775 : return UNGE_EXPR;
2907 : 0 : default:
2908 : 0 : gcc_unreachable ();
2909 : : }
2910 : : }
2911 : :
2912 : :
2913 : : /* Convert a comparison tree code from an enum tree_code representation
2914 : : into a compcode bit-based encoding. This function is the inverse of
2915 : : compcode_to_comparison. */
2916 : :
2917 : : static enum comparison_code
2918 : 44878 : comparison_to_compcode (enum tree_code code)
2919 : : {
2920 : 44878 : switch (code)
2921 : : {
2922 : : case LT_EXPR:
2923 : : return COMPCODE_LT;
2924 : : case EQ_EXPR:
2925 : : return COMPCODE_EQ;
2926 : : case LE_EXPR:
2927 : : return COMPCODE_LE;
2928 : : case GT_EXPR:
2929 : : return COMPCODE_GT;
2930 : : case NE_EXPR:
2931 : : return COMPCODE_NE;
2932 : : case GE_EXPR:
2933 : : return COMPCODE_GE;
2934 : : case ORDERED_EXPR:
2935 : : return COMPCODE_ORD;
2936 : : case UNORDERED_EXPR:
2937 : : return COMPCODE_UNORD;
2938 : : case UNLT_EXPR:
2939 : : return COMPCODE_UNLT;
2940 : : case UNEQ_EXPR:
2941 : : return COMPCODE_UNEQ;
2942 : : case UNLE_EXPR:
2943 : : return COMPCODE_UNLE;
2944 : : case UNGT_EXPR:
2945 : : return COMPCODE_UNGT;
2946 : : case LTGT_EXPR:
2947 : : return COMPCODE_LTGT;
2948 : : case UNGE_EXPR:
2949 : : return COMPCODE_UNGE;
2950 : 0 : default:
2951 : 0 : gcc_unreachable ();
2952 : : }
2953 : : }
2954 : :
2955 : : /* Convert a compcode bit-based encoding of a comparison operator back
2956 : : to GCC's enum tree_code representation. This function is the
2957 : : inverse of comparison_to_compcode. */
2958 : :
2959 : : static enum tree_code
2960 : 11995 : compcode_to_comparison (enum comparison_code code)
2961 : : {
2962 : 11995 : switch (code)
2963 : : {
2964 : : case COMPCODE_LT:
2965 : : return LT_EXPR;
2966 : : case COMPCODE_EQ:
2967 : : return EQ_EXPR;
2968 : : case COMPCODE_LE:
2969 : : return LE_EXPR;
2970 : : case COMPCODE_GT:
2971 : : return GT_EXPR;
2972 : : case COMPCODE_NE:
2973 : : return NE_EXPR;
2974 : : case COMPCODE_GE:
2975 : : return GE_EXPR;
2976 : : case COMPCODE_ORD:
2977 : : return ORDERED_EXPR;
2978 : : case COMPCODE_UNORD:
2979 : : return UNORDERED_EXPR;
2980 : : case COMPCODE_UNLT:
2981 : : return UNLT_EXPR;
2982 : : case COMPCODE_UNEQ:
2983 : : return UNEQ_EXPR;
2984 : : case COMPCODE_UNLE:
2985 : : return UNLE_EXPR;
2986 : : case COMPCODE_UNGT:
2987 : : return UNGT_EXPR;
2988 : : case COMPCODE_LTGT:
2989 : : return LTGT_EXPR;
2990 : : case COMPCODE_UNGE:
2991 : : return UNGE_EXPR;
2992 : 0 : default:
2993 : 0 : gcc_unreachable ();
2994 : : }
2995 : : }
2996 : :
2997 : : /* Return true if COND1 tests the opposite condition of COND2. */
2998 : :
2999 : : bool
3000 : 1163482 : inverse_conditions_p (const_tree cond1, const_tree cond2)
3001 : : {
3002 : 1163482 : return (COMPARISON_CLASS_P (cond1)
3003 : 1092862 : && COMPARISON_CLASS_P (cond2)
3004 : 1089001 : && (invert_tree_comparison
3005 : 1089001 : (TREE_CODE (cond1),
3006 : 2178002 : HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
3007 : 68616 : && operand_equal_p (TREE_OPERAND (cond1, 0),
3008 : 68616 : TREE_OPERAND (cond2, 0), 0)
3009 : 1184770 : && operand_equal_p (TREE_OPERAND (cond1, 1),
3010 : 21288 : TREE_OPERAND (cond2, 1), 0));
3011 : : }
3012 : :
3013 : : /* Return a tree for the comparison which is the combination of
3014 : : doing the AND or OR (depending on CODE) of the two operations LCODE
3015 : : and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3016 : : the possibility of trapping if the mode has NaNs, and return NULL_TREE
3017 : : if this makes the transformation invalid. */
3018 : :
3019 : : tree
3020 : 22439 : combine_comparisons (location_t loc,
3021 : : enum tree_code code, enum tree_code lcode,
3022 : : enum tree_code rcode, tree truth_type,
3023 : : tree ll_arg, tree lr_arg)
3024 : : {
3025 : 22439 : bool honor_nans = HONOR_NANS (ll_arg);
3026 : 22439 : enum comparison_code lcompcode = comparison_to_compcode (lcode);
3027 : 22439 : enum comparison_code rcompcode = comparison_to_compcode (rcode);
3028 : 22439 : int compcode;
3029 : :
3030 : 22439 : switch (code)
3031 : : {
3032 : 14507 : case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3033 : 14507 : compcode = lcompcode & rcompcode;
3034 : 14507 : break;
3035 : :
3036 : 7932 : case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3037 : 7932 : compcode = lcompcode | rcompcode;
3038 : 7932 : break;
3039 : :
3040 : : default:
3041 : : return NULL_TREE;
3042 : : }
3043 : :
3044 : 22439 : if (!honor_nans)
3045 : : {
3046 : : /* Eliminate unordered comparisons, as well as LTGT and ORD
3047 : : which are not used unless the mode has NaNs. */
3048 : 17225 : compcode &= ~COMPCODE_UNORD;
3049 : 17225 : if (compcode == COMPCODE_LTGT)
3050 : : compcode = COMPCODE_NE;
3051 : 16671 : else if (compcode == COMPCODE_ORD)
3052 : : compcode = COMPCODE_TRUE;
3053 : : }
3054 : 5214 : else if (flag_trapping_math)
3055 : : {
3056 : : /* Check that the original operation and the optimized ones will trap
3057 : : under the same condition. */
3058 : 8586 : bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3059 : 3527 : && (lcompcode != COMPCODE_EQ)
3060 : 4293 : && (lcompcode != COMPCODE_ORD);
3061 : 8586 : bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3062 : 3762 : && (rcompcode != COMPCODE_EQ)
3063 : 4293 : && (rcompcode != COMPCODE_ORD);
3064 : 8586 : bool trap = (compcode & COMPCODE_UNORD) == 0
3065 : 3852 : && (compcode != COMPCODE_EQ)
3066 : 4293 : && (compcode != COMPCODE_ORD);
3067 : :
3068 : : /* In a short-circuited boolean expression the LHS might be
3069 : : such that the RHS, if evaluated, will never trap. For
3070 : : example, in ORD (x, y) && (x < y), we evaluate the RHS only
3071 : : if neither x nor y is NaN. (This is a mixed blessing: for
3072 : : example, the expression above will never trap, hence
3073 : : optimizing it to x < y would be invalid). */
3074 : 4293 : if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3075 : 3874 : || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3076 : 4293 : rtrap = false;
3077 : :
3078 : : /* If the comparison was short-circuited, and only the RHS
3079 : : trapped, we may now generate a spurious trap. */
3080 : 4293 : if (rtrap && !ltrap
3081 : 118 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3082 : : return NULL_TREE;
3083 : :
3084 : : /* If we changed the conditions that cause a trap, we lose. */
3085 : 4175 : if ((ltrap || rtrap) != trap)
3086 : : return NULL_TREE;
3087 : : }
3088 : :
3089 : 17498 : if (compcode == COMPCODE_TRUE)
3090 : 852 : return constant_boolean_node (true, truth_type);
3091 : 18024 : else if (compcode == COMPCODE_FALSE)
3092 : 6029 : return constant_boolean_node (false, truth_type);
3093 : : else
3094 : : {
3095 : 11995 : enum tree_code tcode;
3096 : :
3097 : 11995 : tcode = compcode_to_comparison ((enum comparison_code) compcode);
3098 : 11995 : return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3099 : : }
3100 : : }
3101 : :
3102 : : /* Return nonzero if two operands (typically of the same tree node)
3103 : : are necessarily equal. FLAGS modifies behavior as follows:
3104 : :
3105 : : If OEP_ONLY_CONST is set, only return nonzero for constants.
3106 : : This function tests whether the operands are indistinguishable;
3107 : : it does not test whether they are equal using C's == operation.
3108 : : The distinction is important for IEEE floating point, because
3109 : : (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3110 : : (2) two NaNs may be indistinguishable, but NaN!=NaN.
3111 : :
3112 : : If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3113 : : even though it may hold multiple values during a function.
3114 : : This is because a GCC tree node guarantees that nothing else is
3115 : : executed between the evaluation of its "operands" (which may often
3116 : : be evaluated in arbitrary order). Hence if the operands themselves
3117 : : don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3118 : : same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3119 : : unset means assuming isochronic (or instantaneous) tree equivalence.
3120 : : Unless comparing arbitrary expression trees, such as from different
3121 : : statements, this flag can usually be left unset.
3122 : :
3123 : : If OEP_PURE_SAME is set, then pure functions with identical arguments
3124 : : are considered the same. It is used when the caller has other ways
3125 : : to ensure that global memory is unchanged in between.
3126 : :
3127 : : If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
3128 : : not values of expressions.
3129 : :
3130 : : If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
3131 : : such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
3132 : :
3133 : : If OEP_BITWISE is set, then require the values to be bitwise identical
3134 : : rather than simply numerically equal. Do not take advantage of things
3135 : : like math-related flags or undefined behavior; only return true for
3136 : : values that are provably bitwise identical in all circumstances.
3137 : :
3138 : : Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
3139 : : any operand with side effect. This is unnecesarily conservative in the
3140 : : case we know that arg0 and arg1 are in disjoint code paths (such as in
3141 : : ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
3142 : : addresses with TREE_CONSTANT flag set so we know that &var == &var
3143 : : even if var is volatile. */
3144 : :
3145 : : bool
3146 : 6001404815 : operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
3147 : : unsigned int flags)
3148 : : {
3149 : 6001404815 : bool r;
3150 : 6001404815 : if (verify_hash_value (arg0, arg1, flags, &r))
3151 : 2357017723 : return r;
3152 : :
3153 : 3644387092 : STRIP_ANY_LOCATION_WRAPPER (arg0);
3154 : 3644387092 : STRIP_ANY_LOCATION_WRAPPER (arg1);
3155 : :
3156 : : /* If either is ERROR_MARK, they aren't equal. */
3157 : 3644386905 : if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3158 : 3644386284 : || TREE_TYPE (arg0) == error_mark_node
3159 : 7288773376 : || TREE_TYPE (arg1) == error_mark_node)
3160 : : return false;
3161 : :
3162 : : /* Similar, if either does not have a type (like a template id),
3163 : : they aren't equal. */
3164 : 3644386284 : if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3165 : : return false;
3166 : :
3167 : : /* Bitwise identity makes no sense if the values have different layouts. */
3168 : 3644384431 : if ((flags & OEP_BITWISE)
3169 : 3644384431 : && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3170 : : return false;
3171 : :
3172 : : /* We cannot consider pointers to different address space equal. */
3173 : 6816523541 : if (POINTER_TYPE_P (TREE_TYPE (arg0))
3174 : 486240126 : && POINTER_TYPE_P (TREE_TYPE (arg1))
3175 : 4047393148 : && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3176 : 403008717 : != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3177 : : return false;
3178 : :
3179 : : /* Check equality of integer constants before bailing out due to
3180 : : precision differences. */
3181 : 3644384223 : if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3182 : : {
3183 : : /* Address of INTEGER_CST is not defined; check that we did not forget
3184 : : to drop the OEP_ADDRESS_OF flags. */
3185 : 553183897 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3186 : 553183897 : return tree_int_cst_equal (arg0, arg1);
3187 : : }
3188 : :
3189 : 3091200326 : if (!(flags & OEP_ADDRESS_OF))
3190 : : {
3191 : : /* If both types don't have the same signedness, then we can't consider
3192 : : them equal. We must check this before the STRIP_NOPS calls
3193 : : because they may change the signedness of the arguments. As pointers
3194 : : strictly don't have a signedness, require either two pointers or
3195 : : two non-pointers as well. */
3196 : 2771610380 : if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3197 : 5317195205 : || POINTER_TYPE_P (TREE_TYPE (arg0))
3198 : 2545584825 : != POINTER_TYPE_P (TREE_TYPE (arg1)))
3199 : : return false;
3200 : :
3201 : : /* If both types don't have the same precision, then it is not safe
3202 : : to strip NOPs. */
3203 : 5036383406 : if (element_precision (TREE_TYPE (arg0))
3204 : 2518191703 : != element_precision (TREE_TYPE (arg1)))
3205 : : return false;
3206 : :
3207 : 2386110964 : STRIP_NOPS (arg0);
3208 : 2386110964 : STRIP_NOPS (arg1);
3209 : : }
3210 : : #if 0
3211 : : /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3212 : : sanity check once the issue is solved. */
3213 : : else
3214 : : /* Addresses of conversions and SSA_NAMEs (and many other things)
3215 : : are not defined. Check that we did not forget to drop the
3216 : : OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3217 : : gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3218 : : && TREE_CODE (arg0) != SSA_NAME);
3219 : : #endif
3220 : :
3221 : : /* In case both args are comparisons but with different comparison
3222 : : code, try to swap the comparison operands of one arg to produce
3223 : : a match and compare that variant. */
3224 : 2705700910 : if (TREE_CODE (arg0) != TREE_CODE (arg1)
3225 : 862009652 : && COMPARISON_CLASS_P (arg0)
3226 : 5943029 : && COMPARISON_CLASS_P (arg1))
3227 : : {
3228 : 4705209 : enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3229 : :
3230 : 4705209 : if (TREE_CODE (arg0) == swap_code)
3231 : 2446581 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3232 : 2446581 : TREE_OPERAND (arg1, 1), flags)
3233 : 2463137 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3234 : 16556 : TREE_OPERAND (arg1, 0), flags);
3235 : : }
3236 : :
3237 : 2703254329 : if (TREE_CODE (arg0) != TREE_CODE (arg1))
3238 : : {
3239 : : /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3240 : 859563071 : if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3241 : : ;
3242 : 859539726 : else if (flags & OEP_ADDRESS_OF)
3243 : : {
3244 : : /* If we are interested in comparing addresses ignore
3245 : : MEM_REF wrappings of the base that can appear just for
3246 : : TBAA reasons. */
3247 : 27342470 : if (TREE_CODE (arg0) == MEM_REF
3248 : 5514223 : && DECL_P (arg1)
3249 : 3810924 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3250 : 652294 : && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3251 : 27681581 : && integer_zerop (TREE_OPERAND (arg0, 1)))
3252 : : return true;
3253 : 27165952 : else if (TREE_CODE (arg1) == MEM_REF
3254 : 13929923 : && DECL_P (arg0)
3255 : 8141349 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3256 : 790392 : && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3257 : 27455635 : && integer_zerop (TREE_OPERAND (arg1, 1)))
3258 : : return true;
3259 : 27012860 : return false;
3260 : : }
3261 : : else
3262 : : return false;
3263 : : }
3264 : :
3265 : : /* When not checking adddresses, this is needed for conversions and for
3266 : : COMPONENT_REF. Might as well play it safe and always test this. */
3267 : 1843714603 : if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3268 : 1843714603 : || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3269 : 3687429206 : || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3270 : 19980738 : && !(flags & OEP_ADDRESS_OF)))
3271 : 3262617 : return false;
3272 : :
3273 : : /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3274 : : We don't care about side effects in that case because the SAVE_EXPR
3275 : : takes care of that for us. In all other cases, two expressions are
3276 : : equal if they have no side effects. If we have two identical
3277 : : expressions with side effects that should be treated the same due
3278 : : to the only side effects being identical SAVE_EXPR's, that will
3279 : : be detected in the recursive calls below.
3280 : : If we are taking an invariant address of two identical objects
3281 : : they are necessarily equal as well. */
3282 : 276401575 : if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3283 : 2116853529 : && (TREE_CODE (arg0) == SAVE_EXPR
3284 : 276389010 : || (flags & OEP_MATCH_SIDE_EFFECTS)
3285 : 247683916 : || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3286 : : return true;
3287 : :
3288 : : /* Next handle constant cases, those for which we can return 1 even
3289 : : if ONLY_CONST is set. */
3290 : 1564190143 : if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3291 : 17686053 : switch (TREE_CODE (arg0))
3292 : : {
3293 : 0 : case INTEGER_CST:
3294 : 0 : return tree_int_cst_equal (arg0, arg1);
3295 : :
3296 : 0 : case FIXED_CST:
3297 : 0 : return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3298 : : TREE_FIXED_CST (arg1));
3299 : :
3300 : 3499806 : case REAL_CST:
3301 : 3499806 : if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3302 : : return true;
3303 : :
3304 : 2509803 : if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3305 : : {
3306 : : /* If we do not distinguish between signed and unsigned zero,
3307 : : consider them equal. */
3308 : 14072 : if (real_zerop (arg0) && real_zerop (arg1))
3309 : : return true;
3310 : : }
3311 : 2509799 : return false;
3312 : :
3313 : 647160 : case VECTOR_CST:
3314 : 647160 : {
3315 : 647160 : if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3316 : 647160 : != VECTOR_CST_LOG2_NPATTERNS (arg1))
3317 : : return false;
3318 : :
3319 : 635905 : if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3320 : 635905 : != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3321 : : return false;
3322 : :
3323 : 608916 : unsigned int count = vector_cst_encoded_nelts (arg0);
3324 : 806135 : for (unsigned int i = 0; i < count; ++i)
3325 : 1306570 : if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3326 : 653285 : VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3327 : : return false;
3328 : : return true;
3329 : : }
3330 : :
3331 : 11538 : case COMPLEX_CST:
3332 : 11538 : return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3333 : : flags)
3334 : 11538 : && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3335 : : flags));
3336 : :
3337 : 1027086 : case STRING_CST:
3338 : 1027086 : return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3339 : 1027086 : && ! memcmp (TREE_STRING_POINTER (arg0),
3340 : 594330 : TREE_STRING_POINTER (arg1),
3341 : 594330 : TREE_STRING_LENGTH (arg0)));
3342 : :
3343 : 11202341 : case ADDR_EXPR:
3344 : 11202341 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3345 : 11202341 : return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3346 : : flags | OEP_ADDRESS_OF
3347 : 11202341 : | OEP_MATCH_SIDE_EFFECTS);
3348 : 177430 : case CONSTRUCTOR:
3349 : 177430 : {
3350 : : /* In GIMPLE empty constructors are allowed in initializers of
3351 : : aggregates. */
3352 : 309268540 : if (!CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1))
3353 : : return true;
3354 : :
3355 : : /* See sem_variable::equals in ipa-icf for a similar approach. */
3356 : 150921 : tree typ0 = TREE_TYPE (arg0);
3357 : 150921 : tree typ1 = TREE_TYPE (arg1);
3358 : :
3359 : 150921 : if (TREE_CODE (typ0) != TREE_CODE (typ1))
3360 : : return false;
3361 : 150921 : else if (TREE_CODE (typ0) == ARRAY_TYPE)
3362 : : {
3363 : : /* For arrays, check that the sizes all match. */
3364 : 2 : const HOST_WIDE_INT siz0 = int_size_in_bytes (typ0);
3365 : 2 : if (TYPE_MODE (typ0) != TYPE_MODE (typ1)
3366 : 2 : || siz0 < 0
3367 : 4 : || siz0 != int_size_in_bytes (typ1))
3368 : 0 : return false;
3369 : : }
3370 : 150919 : else if (!types_compatible_p (typ0, typ1))
3371 : : return false;
3372 : :
3373 : 150921 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3374 : 150921 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3375 : 452763 : if (vec_safe_length (v0) != vec_safe_length (v1))
3376 : : return false;
3377 : :
3378 : : /* Address of CONSTRUCTOR is defined in GENERIC to mean the value
3379 : : of the CONSTRUCTOR referenced indirectly. */
3380 : 150921 : flags &= ~OEP_ADDRESS_OF;
3381 : :
3382 : 516830 : for (unsigned idx = 0; idx < vec_safe_length (v0); ++idx)
3383 : : {
3384 : 208866 : constructor_elt *c0 = &(*v0)[idx];
3385 : 208866 : constructor_elt *c1 = &(*v1)[idx];
3386 : :
3387 : : /* Check that the values are the same... */
3388 : 208866 : if (c0->value != c1->value
3389 : 208866 : && !operand_equal_p (c0->value, c1->value, flags))
3390 : : return false;
3391 : :
3392 : : /* ... and that they apply to the same field! */
3393 : 107494 : if (c0->index != c1->index
3394 : 107494 : && (TREE_CODE (typ0) == ARRAY_TYPE
3395 : 0 : ? !operand_equal_p (c0->index, c1->index, flags)
3396 : 0 : : !operand_equal_p (DECL_FIELD_OFFSET (c0->index),
3397 : 0 : DECL_FIELD_OFFSET (c1->index),
3398 : : flags)
3399 : 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (c0->index),
3400 : 0 : DECL_FIELD_BIT_OFFSET (c1->index),
3401 : : flags)))
3402 : 0 : return false;
3403 : : }
3404 : :
3405 : : return true;
3406 : : }
3407 : :
3408 : : default:
3409 : : break;
3410 : : }
3411 : :
3412 : : /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3413 : : two instances of undefined behavior will give identical results. */
3414 : 1547624782 : if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3415 : : return false;
3416 : :
3417 : : /* Define macros to test an operand from arg0 and arg1 for equality and a
3418 : : variant that allows null and views null as being different from any
3419 : : non-null value. In the latter case, if either is null, the both
3420 : : must be; otherwise, do the normal comparison. */
3421 : : #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3422 : : TREE_OPERAND (arg1, N), flags)
3423 : :
3424 : : #define OP_SAME_WITH_NULL(N) \
3425 : : ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3426 : : ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3427 : :
3428 : 1547624782 : switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3429 : : {
3430 : 6844351 : case tcc_unary:
3431 : : /* Two conversions are equal only if signedness and modes match. */
3432 : 6844351 : switch (TREE_CODE (arg0))
3433 : : {
3434 : 6495239 : CASE_CONVERT:
3435 : 6495239 : case FIX_TRUNC_EXPR:
3436 : 6495239 : if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3437 : 6495239 : != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3438 : : return false;
3439 : : break;
3440 : : default:
3441 : : break;
3442 : : }
3443 : :
3444 : 6844330 : return OP_SAME (0);
3445 : :
3446 : :
3447 : 19518813 : case tcc_comparison:
3448 : 19518813 : case tcc_binary:
3449 : 19518813 : if (OP_SAME (0) && OP_SAME (1))
3450 : : return true;
3451 : :
3452 : : /* For commutative ops, allow the other order. */
3453 : 14356982 : return (commutative_tree_code (TREE_CODE (arg0))
3454 : 10479631 : && operand_equal_p (TREE_OPERAND (arg0, 0),
3455 : 10479631 : TREE_OPERAND (arg1, 1), flags)
3456 : 14501361 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3457 : 144379 : TREE_OPERAND (arg1, 0), flags));
3458 : :
3459 : 1070497048 : case tcc_reference:
3460 : : /* If either of the pointer (or reference) expressions we are
3461 : : dereferencing contain a side effect, these cannot be equal,
3462 : : but their addresses can be. */
3463 : 1070497048 : if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3464 : 1070497048 : && (TREE_SIDE_EFFECTS (arg0)
3465 : 1012654555 : || TREE_SIDE_EFFECTS (arg1)))
3466 : : return false;
3467 : :
3468 : 1070298830 : switch (TREE_CODE (arg0))
3469 : : {
3470 : 3787285 : case INDIRECT_REF:
3471 : 3787285 : if (!(flags & OEP_ADDRESS_OF))
3472 : : {
3473 : 7531100 : if (TYPE_ALIGN (TREE_TYPE (arg0))
3474 : 3765550 : != TYPE_ALIGN (TREE_TYPE (arg1)))
3475 : : return false;
3476 : : /* Verify that the access types are compatible. */
3477 : 3763638 : if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3478 : 3763638 : != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3479 : : return false;
3480 : : }
3481 : 3733972 : flags &= ~OEP_ADDRESS_OF;
3482 : 3733972 : return OP_SAME (0);
3483 : :
3484 : 626469 : case IMAGPART_EXPR:
3485 : : /* Require the same offset. */
3486 : 626469 : if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3487 : 626469 : TYPE_SIZE (TREE_TYPE (arg1)),
3488 : : flags & ~OEP_ADDRESS_OF))
3489 : : return false;
3490 : :
3491 : : /* Fallthru. */
3492 : 2507908 : case REALPART_EXPR:
3493 : 2507908 : case VIEW_CONVERT_EXPR:
3494 : 2507908 : return OP_SAME (0);
3495 : :
3496 : 58061148 : case TARGET_MEM_REF:
3497 : 58061148 : case MEM_REF:
3498 : 58061148 : if (!(flags & OEP_ADDRESS_OF))
3499 : : {
3500 : : /* Require equal access sizes */
3501 : 14373371 : if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3502 : 14373371 : && (!TYPE_SIZE (TREE_TYPE (arg0))
3503 : 1091607 : || !TYPE_SIZE (TREE_TYPE (arg1))
3504 : 1087107 : || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3505 : 1087107 : TYPE_SIZE (TREE_TYPE (arg1)),
3506 : : flags)))
3507 : 1090981 : return false;
3508 : : /* Verify that access happens in similar types. */
3509 : 13282390 : if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3510 : : return false;
3511 : : /* Verify that accesses are TBAA compatible. */
3512 : 12923348 : if (!alias_ptr_types_compatible_p
3513 : 12923348 : (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3514 : 12923348 : TREE_TYPE (TREE_OPERAND (arg1, 1)))
3515 : 11851797 : || (MR_DEPENDENCE_CLIQUE (arg0)
3516 : 11851797 : != MR_DEPENDENCE_CLIQUE (arg1))
3517 : 22674655 : || (MR_DEPENDENCE_BASE (arg0)
3518 : 9751307 : != MR_DEPENDENCE_BASE (arg1)))
3519 : : return false;
3520 : : /* Verify that alignment is compatible. */
3521 : 18843992 : if (TYPE_ALIGN (TREE_TYPE (arg0))
3522 : 9421996 : != TYPE_ALIGN (TREE_TYPE (arg1)))
3523 : : return false;
3524 : : }
3525 : 52998393 : flags &= ~OEP_ADDRESS_OF;
3526 : 93194465 : return (OP_SAME (0) && OP_SAME (1)
3527 : : /* TARGET_MEM_REF require equal extra operands. */
3528 : 76417128 : && (TREE_CODE (arg0) != TARGET_MEM_REF
3529 : 494590 : || (OP_SAME_WITH_NULL (2)
3530 : 254748 : && OP_SAME_WITH_NULL (3)
3531 : 250845 : && OP_SAME_WITH_NULL (4))));
3532 : :
3533 : 35780242 : case ARRAY_REF:
3534 : 35780242 : case ARRAY_RANGE_REF:
3535 : 35780242 : if (!OP_SAME (0))
3536 : : return false;
3537 : 31287353 : flags &= ~OEP_ADDRESS_OF;
3538 : : /* Compare the array index by value if it is constant first as we
3539 : : may have different types but same value here. */
3540 : 31287353 : return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3541 : 31287353 : TREE_OPERAND (arg1, 1))
3542 : 28595118 : || OP_SAME (1))
3543 : 5408486 : && OP_SAME_WITH_NULL (2)
3544 : 5408214 : && OP_SAME_WITH_NULL (3)
3545 : : /* Compare low bound and element size as with OEP_ADDRESS_OF
3546 : : we have to account for the offset of the ref. */
3547 : 39399810 : && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3548 : 2704107 : == TREE_TYPE (TREE_OPERAND (arg1, 0))
3549 : 3107 : || (operand_equal_p (array_ref_low_bound
3550 : 3107 : (CONST_CAST_TREE (arg0)),
3551 : : array_ref_low_bound
3552 : 3107 : (CONST_CAST_TREE (arg1)), flags)
3553 : 3107 : && operand_equal_p (array_ref_element_size
3554 : 3107 : (CONST_CAST_TREE (arg0)),
3555 : : array_ref_element_size
3556 : 3107 : (CONST_CAST_TREE (arg1)),
3557 : : flags))));
3558 : :
3559 : 969666165 : case COMPONENT_REF:
3560 : : /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3561 : : may be NULL when we're called to compare MEM_EXPRs. */
3562 : 969666165 : if (!OP_SAME_WITH_NULL (0))
3563 : : return false;
3564 : 57608434 : {
3565 : 57608434 : bool compare_address = flags & OEP_ADDRESS_OF;
3566 : :
3567 : : /* Most of time we only need to compare FIELD_DECLs for equality.
3568 : : However when determining address look into actual offsets.
3569 : : These may match for unions and unshared record types. */
3570 : 57608434 : flags &= ~OEP_ADDRESS_OF;
3571 : 57608434 : if (!OP_SAME (1))
3572 : : {
3573 : 32413479 : if (compare_address
3574 : 691687 : && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3575 : : {
3576 : 691683 : tree field0 = TREE_OPERAND (arg0, 1);
3577 : 691683 : tree field1 = TREE_OPERAND (arg1, 1);
3578 : :
3579 : : /* Non-FIELD_DECL operands can appear in C++ templates. */
3580 : 691683 : if (TREE_CODE (field0) != FIELD_DECL
3581 : 691683 : || TREE_CODE (field1) != FIELD_DECL
3582 : 691683 : || !operand_equal_p (DECL_FIELD_OFFSET (field0),
3583 : 691683 : DECL_FIELD_OFFSET (field1), flags)
3584 : 1141087 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3585 : 224702 : DECL_FIELD_BIT_OFFSET (field1),
3586 : : flags))
3587 : 654908 : return false;
3588 : : }
3589 : : else
3590 : : return false;
3591 : : }
3592 : : }
3593 : 25231730 : return OP_SAME_WITH_NULL (2);
3594 : :
3595 : 496082 : case BIT_FIELD_REF:
3596 : 496082 : if (!OP_SAME (0))
3597 : : return false;
3598 : 369291 : flags &= ~OEP_ADDRESS_OF;
3599 : 369291 : return OP_SAME (1) && OP_SAME (2);
3600 : :
3601 : : default:
3602 : : return false;
3603 : : }
3604 : :
3605 : 30616929 : case tcc_expression:
3606 : 30616929 : switch (TREE_CODE (arg0))
3607 : : {
3608 : 29073920 : case ADDR_EXPR:
3609 : : /* Be sure we pass right ADDRESS_OF flag. */
3610 : 29073920 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3611 : 29073920 : return operand_equal_p (TREE_OPERAND (arg0, 0),
3612 : 29073920 : TREE_OPERAND (arg1, 0),
3613 : 29073920 : flags | OEP_ADDRESS_OF);
3614 : :
3615 : 1151723 : case TRUTH_NOT_EXPR:
3616 : 1151723 : return OP_SAME (0);
3617 : :
3618 : 34008 : case TRUTH_ANDIF_EXPR:
3619 : 34008 : case TRUTH_ORIF_EXPR:
3620 : 34008 : return OP_SAME (0) && OP_SAME (1);
3621 : :
3622 : 0 : case WIDEN_MULT_PLUS_EXPR:
3623 : 0 : case WIDEN_MULT_MINUS_EXPR:
3624 : 0 : if (!OP_SAME (2))
3625 : : return false;
3626 : : /* The multiplcation operands are commutative. */
3627 : : /* FALLTHRU */
3628 : :
3629 : 17818 : case TRUTH_AND_EXPR:
3630 : 17818 : case TRUTH_OR_EXPR:
3631 : 17818 : case TRUTH_XOR_EXPR:
3632 : 17818 : if (OP_SAME (0) && OP_SAME (1))
3633 : : return true;
3634 : :
3635 : : /* Otherwise take into account this is a commutative operation. */
3636 : 17800 : return (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 : 17800 : TREE_OPERAND (arg1, 1), flags)
3638 : 17801 : && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 : 1 : TREE_OPERAND (arg1, 0), flags));
3640 : :
3641 : 103790 : case COND_EXPR:
3642 : 103790 : if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3643 : 47742 : return false;
3644 : 56048 : flags &= ~OEP_ADDRESS_OF;
3645 : 56048 : return OP_SAME (0);
3646 : :
3647 : 0 : case BIT_INSERT_EXPR:
3648 : : /* BIT_INSERT_EXPR has an implict operand as the type precision
3649 : : of op1. Need to check to make sure they are the same. */
3650 : 0 : if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3651 : 0 : && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3652 : 0 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3653 : 0 : != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3654 : : return false;
3655 : : /* FALLTHRU */
3656 : :
3657 : 269 : case VEC_COND_EXPR:
3658 : 269 : case DOT_PROD_EXPR:
3659 : 269 : return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3660 : :
3661 : 16329 : case MODIFY_EXPR:
3662 : 16329 : case INIT_EXPR:
3663 : 16329 : case COMPOUND_EXPR:
3664 : 16329 : case PREDECREMENT_EXPR:
3665 : 16329 : case PREINCREMENT_EXPR:
3666 : 16329 : case POSTDECREMENT_EXPR:
3667 : 16329 : case POSTINCREMENT_EXPR:
3668 : 16329 : if (flags & OEP_LEXICOGRAPHIC)
3669 : 204 : return OP_SAME (0) && OP_SAME (1);
3670 : : return false;
3671 : :
3672 : 81811 : case CLEANUP_POINT_EXPR:
3673 : 81811 : case EXPR_STMT:
3674 : 81811 : case SAVE_EXPR:
3675 : 81811 : if (flags & OEP_LEXICOGRAPHIC)
3676 : 274 : return OP_SAME (0);
3677 : : return false;
3678 : :
3679 : 96524 : case OBJ_TYPE_REF:
3680 : : /* Virtual table reference. */
3681 : 193048 : if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3682 : 96524 : OBJ_TYPE_REF_EXPR (arg1), flags))
3683 : : return false;
3684 : 891 : flags &= ~OEP_ADDRESS_OF;
3685 : 891 : if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3686 : 891 : != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3687 : : return false;
3688 : 891 : if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3689 : 891 : OBJ_TYPE_REF_OBJECT (arg1), flags))
3690 : : return false;
3691 : 891 : if (virtual_method_call_p (arg0))
3692 : : {
3693 : 891 : if (!virtual_method_call_p (arg1))
3694 : : return false;
3695 : 891 : return types_same_for_odr (obj_type_ref_class (arg0),
3696 : 1782 : obj_type_ref_class (arg1));
3697 : : }
3698 : : return false;
3699 : :
3700 : : default:
3701 : : return false;
3702 : : }
3703 : :
3704 : 2692421 : case tcc_vl_exp:
3705 : 2692421 : switch (TREE_CODE (arg0))
3706 : : {
3707 : 2692421 : case CALL_EXPR:
3708 : 2692421 : if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3709 : 2692421 : != (CALL_EXPR_FN (arg1) == NULL_TREE))
3710 : : /* If not both CALL_EXPRs are either internal or normal function
3711 : : functions, then they are not equal. */
3712 : : return false;
3713 : 2692421 : else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3714 : : {
3715 : : /* If the CALL_EXPRs call different internal functions, then they
3716 : : are not equal. */
3717 : 3 : if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3718 : : return false;
3719 : : }
3720 : : else
3721 : : {
3722 : : /* If the CALL_EXPRs call different functions, then they are not
3723 : : equal. */
3724 : 2692418 : if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3725 : : flags))
3726 : : return false;
3727 : : }
3728 : :
3729 : : /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3730 : 1815969 : {
3731 : 1815969 : unsigned int cef = call_expr_flags (arg0);
3732 : 1815969 : if (flags & OEP_PURE_SAME)
3733 : 0 : cef &= ECF_CONST | ECF_PURE;
3734 : : else
3735 : 1815969 : cef &= ECF_CONST;
3736 : 1815969 : if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3737 : : return false;
3738 : : }
3739 : :
3740 : : /* Now see if all the arguments are the same. */
3741 : 86243 : {
3742 : 86243 : const_call_expr_arg_iterator iter0, iter1;
3743 : 86243 : const_tree a0, a1;
3744 : 172486 : for (a0 = first_const_call_expr_arg (arg0, &iter0),
3745 : 86243 : a1 = first_const_call_expr_arg (arg1, &iter1);
3746 : 94311 : a0 && a1;
3747 : 8068 : a0 = next_const_call_expr_arg (&iter0),
3748 : 8068 : a1 = next_const_call_expr_arg (&iter1))
3749 : 83858 : if (! operand_equal_p (a0, a1, flags))
3750 : : return false;
3751 : :
3752 : : /* If we get here and both argument lists are exhausted
3753 : : then the CALL_EXPRs are equal. */
3754 : 10453 : return ! (a0 || a1);
3755 : : }
3756 : : default:
3757 : : return false;
3758 : : }
3759 : :
3760 : 143437954 : case tcc_declaration:
3761 : : /* Consider __builtin_sqrt equal to sqrt. */
3762 : 143437954 : if (TREE_CODE (arg0) == FUNCTION_DECL)
3763 : 5800939 : return (fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3764 : 239476 : && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3765 : 5288452 : && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3766 : 239476 : == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3767 : :
3768 : 138149502 : if (DECL_P (arg0)
3769 : : && (flags & OEP_DECL_NAME)
3770 : 138149502 : && (flags & OEP_LEXICOGRAPHIC))
3771 : : {
3772 : : /* Consider decls with the same name equal. The caller needs
3773 : : to make sure they refer to the same entity (such as a function
3774 : : formal parameter). */
3775 : 34 : tree a0name = DECL_NAME (arg0);
3776 : 34 : tree a1name = DECL_NAME (arg1);
3777 : 68 : const char *a0ns = a0name ? IDENTIFIER_POINTER (a0name) : NULL;
3778 : 68 : const char *a1ns = a1name ? IDENTIFIER_POINTER (a1name) : NULL;
3779 : 59 : return a0ns && a1ns && strcmp (a0ns, a1ns) == 0;
3780 : : }
3781 : : return false;
3782 : :
3783 : 271958140 : case tcc_exceptional:
3784 : 271958140 : if (TREE_CODE (arg0) == CONSTRUCTOR)
3785 : : {
3786 : 20115 : if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3787 : : return false;
3788 : :
3789 : : /* In GIMPLE constructors are used only to build vectors from
3790 : : elements. Individual elements in the constructor must be
3791 : : indexed in increasing order and form an initial sequence.
3792 : :
3793 : : We make no effort to compare nonconstant ones in GENERIC. */
3794 : 20115 : if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3795 : 20115 : || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3796 : : return false;
3797 : :
3798 : : /* Be sure that vectors constructed have the same representation.
3799 : : We only tested element precision and modes to match.
3800 : : Vectors may be BLKmode and thus also check that the number of
3801 : : parts match. */
3802 : 672 : if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3803 : 1344 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3804 : : return false;
3805 : :
3806 : 672 : vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3807 : 672 : vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3808 : 672 : unsigned int len = vec_safe_length (v0);
3809 : :
3810 : 1344 : if (len != vec_safe_length (v1))
3811 : : return false;
3812 : :
3813 : 2601 : for (unsigned int i = 0; i < len; i++)
3814 : : {
3815 : 1984 : constructor_elt *c0 = &(*v0)[i];
3816 : 1984 : constructor_elt *c1 = &(*v1)[i];
3817 : :
3818 : 1984 : if (!operand_equal_p (c0->value, c1->value, flags)
3819 : : /* In GIMPLE the indexes can be either NULL or matching i.
3820 : : Double check this so we won't get false
3821 : : positives for GENERIC. */
3822 : 1929 : || (c0->index
3823 : 700 : && (TREE_CODE (c0->index) != INTEGER_CST
3824 : 700 : || compare_tree_int (c0->index, i)))
3825 : 3913 : || (c1->index
3826 : 700 : && (TREE_CODE (c1->index) != INTEGER_CST
3827 : 700 : || compare_tree_int (c1->index, i))))
3828 : 55 : return false;
3829 : : }
3830 : : return true;
3831 : : }
3832 : 271938025 : else if (TREE_CODE (arg0) == STATEMENT_LIST
3833 : 2780 : && (flags & OEP_LEXICOGRAPHIC))
3834 : : {
3835 : : /* Compare the STATEMENT_LISTs. */
3836 : 20 : tree_stmt_iterator tsi1, tsi2;
3837 : 20 : tree body1 = CONST_CAST_TREE (arg0);
3838 : 20 : tree body2 = CONST_CAST_TREE (arg1);
3839 : 70 : for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3840 : 50 : tsi_next (&tsi1), tsi_next (&tsi2))
3841 : : {
3842 : : /* The lists don't have the same number of statements. */
3843 : 70 : if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3844 : : return false;
3845 : 70 : if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3846 : : return true;
3847 : 50 : if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3848 : : flags & (OEP_LEXICOGRAPHIC
3849 : : | OEP_NO_HASH_CHECK)))
3850 : : return false;
3851 : : }
3852 : : }
3853 : : return false;
3854 : :
3855 : 2058914 : case tcc_statement:
3856 : 2058914 : switch (TREE_CODE (arg0))
3857 : : {
3858 : 65 : case RETURN_EXPR:
3859 : 65 : if (flags & OEP_LEXICOGRAPHIC)
3860 : 65 : return OP_SAME_WITH_NULL (0);
3861 : : return false;
3862 : 5 : case DEBUG_BEGIN_STMT:
3863 : 5 : if (flags & OEP_LEXICOGRAPHIC)
3864 : : return true;
3865 : : return false;
3866 : : default:
3867 : : return false;
3868 : : }
3869 : :
3870 : : default:
3871 : : return false;
3872 : : }
3873 : :
3874 : : #undef OP_SAME
3875 : : #undef OP_SAME_WITH_NULL
3876 : : }
3877 : :
3878 : : /* Generate a hash value for an expression. This can be used iteratively
3879 : : by passing a previous result as the HSTATE argument. */
3880 : :
3881 : : void
3882 : 3827501228 : operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3883 : : unsigned int flags)
3884 : : {
3885 : 3827501228 : int i;
3886 : 3827501228 : enum tree_code code;
3887 : 3827501228 : enum tree_code_class tclass;
3888 : :
3889 : 3827501228 : if (t == NULL_TREE || t == error_mark_node)
3890 : : {
3891 : 76933642 : hstate.merge_hash (0);
3892 : 76933642 : return;
3893 : : }
3894 : :
3895 : 3750567586 : STRIP_ANY_LOCATION_WRAPPER (t);
3896 : :
3897 : 3750567586 : if (!(flags & OEP_ADDRESS_OF))
3898 : 3526338964 : STRIP_NOPS (t);
3899 : :
3900 : 3750567586 : code = TREE_CODE (t);
3901 : :
3902 : 3750567586 : switch (code)
3903 : : {
3904 : : /* Alas, constants aren't shared, so we can't rely on pointer
3905 : : identity. */
3906 : 246 : case VOID_CST:
3907 : 246 : hstate.merge_hash (0);
3908 : 246 : return;
3909 : 1944885788 : case INTEGER_CST:
3910 : 1944885788 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3911 : 3914604411 : for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3912 : 1969718623 : hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3913 : : return;
3914 : 15088559 : case REAL_CST:
3915 : 15088559 : {
3916 : 15088559 : unsigned int val2;
3917 : 15088559 : if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3918 : : val2 = rvc_zero;
3919 : : else
3920 : 14895734 : val2 = real_hash (TREE_REAL_CST_PTR (t));
3921 : 15088559 : hstate.merge_hash (val2);
3922 : 15088559 : return;
3923 : : }
3924 : 0 : case FIXED_CST:
3925 : 0 : {
3926 : 0 : unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3927 : 0 : hstate.merge_hash (val2);
3928 : 0 : return;
3929 : : }
3930 : 8869090 : case STRING_CST:
3931 : 8869090 : hstate.add ((const void *) TREE_STRING_POINTER (t),
3932 : 8869090 : TREE_STRING_LENGTH (t));
3933 : 8869090 : return;
3934 : 208243 : case COMPLEX_CST:
3935 : 208243 : hash_operand (TREE_REALPART (t), hstate, flags);
3936 : 208243 : hash_operand (TREE_IMAGPART (t), hstate, flags);
3937 : 208243 : return;
3938 : 2389675 : case VECTOR_CST:
3939 : 2389675 : {
3940 : 2389675 : hstate.add_int (VECTOR_CST_NPATTERNS (t));
3941 : 2389675 : hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3942 : 2389675 : unsigned int count = vector_cst_encoded_nelts (t);
3943 : 7335590 : for (unsigned int i = 0; i < count; ++i)
3944 : 4945915 : hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3945 : : return;
3946 : : }
3947 : 762576182 : case SSA_NAME:
3948 : : /* We can just compare by pointer. */
3949 : 762576182 : hstate.add_hwi (SSA_NAME_VERSION (t));
3950 : 762576182 : return;
3951 : : case PLACEHOLDER_EXPR:
3952 : : /* The node itself doesn't matter. */
3953 : : return;
3954 : : case BLOCK:
3955 : : case OMP_CLAUSE:
3956 : : /* Ignore. */
3957 : : return;
3958 : : case TREE_LIST:
3959 : : /* A list of expressions, for a CALL_EXPR or as the elements of a
3960 : : VECTOR_CST. */
3961 : 325528 : for (; t; t = TREE_CHAIN (t))
3962 : 162764 : hash_operand (TREE_VALUE (t), hstate, flags);
3963 : : return;
3964 : 4898927 : case CONSTRUCTOR:
3965 : 4898927 : {
3966 : 4898927 : unsigned HOST_WIDE_INT idx;
3967 : 4898927 : tree field, value;
3968 : 4898927 : flags &= ~OEP_ADDRESS_OF;
3969 : 4898927 : hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3970 : 19710202 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3971 : : {
3972 : : /* In GIMPLE the indexes can be either NULL or matching i. */
3973 : 14811275 : if (field == NULL_TREE)
3974 : 1155915 : field = bitsize_int (idx);
3975 : 14811275 : if (TREE_CODE (field) == FIELD_DECL)
3976 : : {
3977 : 10126381 : hash_operand (DECL_FIELD_OFFSET (field), hstate, flags);
3978 : 10126381 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, flags);
3979 : : }
3980 : : else
3981 : 4684894 : hash_operand (field, hstate, flags);
3982 : 14811275 : hash_operand (value, hstate, flags);
3983 : : }
3984 : : return;
3985 : : }
3986 : 230 : case STATEMENT_LIST:
3987 : 230 : {
3988 : 230 : tree_stmt_iterator i;
3989 : 230 : for (i = tsi_start (CONST_CAST_TREE (t));
3990 : 690 : !tsi_end_p (i); tsi_next (&i))
3991 : 460 : hash_operand (tsi_stmt (i), hstate, flags);
3992 : 230 : return;
3993 : : }
3994 : : case TREE_VEC:
3995 : 32 : for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3996 : 16 : hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3997 : : return;
3998 : 5 : case IDENTIFIER_NODE:
3999 : 5 : hstate.add_object (IDENTIFIER_HASH_VALUE (t));
4000 : 5 : return;
4001 : 19170305 : case FUNCTION_DECL:
4002 : : /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
4003 : : Otherwise nodes that compare equal according to operand_equal_p might
4004 : : get different hash codes. However, don't do this for machine specific
4005 : : or front end builtins, since the function code is overloaded in those
4006 : : cases. */
4007 : 19170305 : if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
4008 : 19170305 : && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
4009 : : {
4010 : 5841457 : t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
4011 : 5841457 : code = TREE_CODE (t);
4012 : : }
4013 : : /* FALL THROUGH */
4014 : 1011487713 : default:
4015 : 1011487713 : if (POLY_INT_CST_P (t))
4016 : : {
4017 : : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
4018 : : hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
4019 : : return;
4020 : : }
4021 : 1011487713 : tclass = TREE_CODE_CLASS (code);
4022 : :
4023 : 1011487713 : if (tclass == tcc_declaration)
4024 : : {
4025 : : /* DECL's have a unique ID */
4026 : 708600649 : hstate.add_hwi (DECL_UID (t));
4027 : : }
4028 : 302887064 : else if (tclass == tcc_comparison && !commutative_tree_code (code))
4029 : : {
4030 : : /* For comparisons that can be swapped, use the lower
4031 : : tree code. */
4032 : 116011 : enum tree_code ccode = swap_tree_comparison (code);
4033 : 116011 : if (code < ccode)
4034 : 47679 : ccode = code;
4035 : 116011 : hstate.add_object (ccode);
4036 : 116011 : hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
4037 : 116011 : hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
4038 : : }
4039 : 302771053 : else if (CONVERT_EXPR_CODE_P (code))
4040 : : {
4041 : : /* NOP_EXPR and CONVERT_EXPR are considered equal by
4042 : : operand_equal_p. */
4043 : 4665892 : enum tree_code ccode = NOP_EXPR;
4044 : 4665892 : hstate.add_object (ccode);
4045 : :
4046 : : /* Don't hash the type, that can lead to having nodes which
4047 : : compare equal according to operand_equal_p, but which
4048 : : have different hash codes. Make sure to include signedness
4049 : : in the hash computation. */
4050 : 4665892 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4051 : 4665892 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4052 : : }
4053 : : /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
4054 : 298105161 : else if (code == MEM_REF
4055 : 71759577 : && (flags & OEP_ADDRESS_OF) != 0
4056 : 63574179 : && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
4057 : 9901738 : && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
4058 : 307868583 : && integer_zerop (TREE_OPERAND (t, 1)))
4059 : 4547562 : hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
4060 : : hstate, flags);
4061 : : /* Don't ICE on FE specific trees, or their arguments etc.
4062 : : during operand_equal_p hash verification. */
4063 : 293557599 : else if (!IS_EXPR_CODE_CLASS (tclass))
4064 : 154 : gcc_assert (flags & OEP_HASH_CHECK);
4065 : : else
4066 : : {
4067 : 293557445 : unsigned int sflags = flags;
4068 : :
4069 : 293557445 : hstate.add_object (code);
4070 : :
4071 : 293557445 : switch (code)
4072 : : {
4073 : 109491279 : case ADDR_EXPR:
4074 : 109491279 : gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
4075 : 109491279 : flags |= OEP_ADDRESS_OF;
4076 : 109491279 : sflags = flags;
4077 : 109491279 : break;
4078 : :
4079 : 71338166 : case INDIRECT_REF:
4080 : 71338166 : case MEM_REF:
4081 : 71338166 : case TARGET_MEM_REF:
4082 : 71338166 : flags &= ~OEP_ADDRESS_OF;
4083 : 71338166 : sflags = flags;
4084 : 71338166 : break;
4085 : :
4086 : 76879743 : case COMPONENT_REF:
4087 : 76879743 : if (sflags & OEP_ADDRESS_OF)
4088 : : {
4089 : 34891058 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4090 : 34891058 : hash_operand (DECL_FIELD_OFFSET (TREE_OPERAND (t, 1)),
4091 : : hstate, flags & ~OEP_ADDRESS_OF);
4092 : 34891058 : hash_operand (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (t, 1)),
4093 : : hstate, flags & ~OEP_ADDRESS_OF);
4094 : 34891058 : return;
4095 : : }
4096 : : break;
4097 : 14043539 : case ARRAY_REF:
4098 : 14043539 : case ARRAY_RANGE_REF:
4099 : 14043539 : case BIT_FIELD_REF:
4100 : 14043539 : sflags &= ~OEP_ADDRESS_OF;
4101 : 14043539 : break;
4102 : :
4103 : 8617 : case COND_EXPR:
4104 : 8617 : flags &= ~OEP_ADDRESS_OF;
4105 : 8617 : break;
4106 : :
4107 : 0 : case WIDEN_MULT_PLUS_EXPR:
4108 : 0 : case WIDEN_MULT_MINUS_EXPR:
4109 : 0 : {
4110 : : /* The multiplication operands are commutative. */
4111 : 0 : inchash::hash one, two;
4112 : 0 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4113 : 0 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4114 : 0 : hstate.add_commutative (one, two);
4115 : 0 : hash_operand (TREE_OPERAND (t, 2), two, flags);
4116 : 0 : return;
4117 : : }
4118 : :
4119 : 57210 : case CALL_EXPR:
4120 : 57210 : if (CALL_EXPR_FN (t) == NULL_TREE)
4121 : 0 : hstate.add_int (CALL_EXPR_IFN (t));
4122 : : break;
4123 : :
4124 : 80 : case TARGET_EXPR:
4125 : : /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
4126 : : Usually different TARGET_EXPRs just should use
4127 : : different temporaries in their slots. */
4128 : 80 : hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
4129 : 80 : return;
4130 : :
4131 : 197609 : case OBJ_TYPE_REF:
4132 : : /* Virtual table reference. */
4133 : 197609 : inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
4134 : 197609 : flags &= ~OEP_ADDRESS_OF;
4135 : 197609 : inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
4136 : 197609 : inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
4137 : 197609 : if (!virtual_method_call_p (t))
4138 : : return;
4139 : 197594 : if (tree c = obj_type_ref_class (t))
4140 : : {
4141 : 197594 : c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
4142 : : /* We compute mangled names only when free_lang_data is run.
4143 : : In that case we can hash precisely. */
4144 : 197594 : if (TREE_CODE (c) == TYPE_DECL
4145 : 197594 : && DECL_ASSEMBLER_NAME_SET_P (c))
4146 : 667 : hstate.add_object
4147 : 667 : (IDENTIFIER_HASH_VALUE
4148 : : (DECL_ASSEMBLER_NAME (c)));
4149 : : }
4150 : 197594 : return;
4151 : : default:
4152 : : break;
4153 : : }
4154 : :
4155 : : /* Don't hash the type, that can lead to having nodes which
4156 : : compare equal according to operand_equal_p, but which
4157 : : have different hash codes. */
4158 : 258468698 : if (code == NON_LVALUE_EXPR)
4159 : : {
4160 : : /* Make sure to include signness in the hash computation. */
4161 : 0 : hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
4162 : 0 : hash_operand (TREE_OPERAND (t, 0), hstate, flags);
4163 : : }
4164 : :
4165 : 258468698 : else if (commutative_tree_code (code))
4166 : : {
4167 : : /* It's a commutative expression. We want to hash it the same
4168 : : however it appears. We do this by first hashing both operands
4169 : : and then rehashing based on the order of their independent
4170 : : hashes. */
4171 : 15260138 : inchash::hash one, two;
4172 : 15260138 : hash_operand (TREE_OPERAND (t, 0), one, flags);
4173 : 15260138 : hash_operand (TREE_OPERAND (t, 1), two, flags);
4174 : 15260138 : hstate.add_commutative (one, two);
4175 : : }
4176 : : else
4177 : 694837218 : for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
4178 : 660048804 : hash_operand (TREE_OPERAND (t, i), hstate,
4179 : : i == 0 ? flags : sflags);
4180 : : }
4181 : : return;
4182 : : }
4183 : : }
4184 : :
4185 : : bool
4186 : 6005804787 : operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
4187 : : unsigned int flags, bool *ret)
4188 : : {
4189 : : /* When checking and unless comparing DECL names, verify that if
4190 : : the outermost operand_equal_p call returns non-zero then ARG0
4191 : : and ARG1 have the same hash value. */
4192 : 6005804787 : if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
4193 : : {
4194 : 2358945107 : if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
4195 : : {
4196 : 395647543 : if (arg0 != arg1 && !(flags & OEP_DECL_NAME))
4197 : : {
4198 : 81186545 : inchash::hash hstate0 (0), hstate1 (0);
4199 : 81186545 : hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
4200 : 81186545 : hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
4201 : 81186545 : hashval_t h0 = hstate0.end ();
4202 : 81186545 : hashval_t h1 = hstate1.end ();
4203 : 81186545 : gcc_assert (h0 == h1);
4204 : : }
4205 : 395647543 : *ret = true;
4206 : : }
4207 : : else
4208 : 1963297564 : *ret = false;
4209 : :
4210 : 2358945107 : return true;
4211 : : }
4212 : :
4213 : : return false;
4214 : : }
4215 : :
4216 : :
4217 : : static operand_compare default_compare_instance;
4218 : :
4219 : : /* Conveinece wrapper around operand_compare class because usually we do
4220 : : not need to play with the valueizer. */
4221 : :
4222 : : bool
4223 : 2357024495 : operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
4224 : : {
4225 : 2357024495 : return default_compare_instance.operand_equal_p (arg0, arg1, flags);
4226 : : }
4227 : :
4228 : : namespace inchash
4229 : : {
4230 : :
4231 : : /* Generate a hash value for an expression. This can be used iteratively
4232 : : by passing a previous result as the HSTATE argument.
4233 : :
4234 : : This function is intended to produce the same hash for expressions which
4235 : : would compare equal using operand_equal_p. */
4236 : : void
4237 : 3021462276 : add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
4238 : : {
4239 : 3021462276 : default_compare_instance.hash_operand (t, hstate, flags);
4240 : 3021462276 : }
4241 : :
4242 : : }
4243 : :
4244 : : /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
4245 : : with a different signedness or a narrower precision. */
4246 : :
4247 : : static bool
4248 : 15223505 : operand_equal_for_comparison_p (tree arg0, tree arg1)
4249 : : {
4250 : 15223505 : if (operand_equal_p (arg0, arg1, 0))
4251 : : return true;
4252 : :
4253 : 28921076 : if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
4254 : 24338496 : || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
4255 : : return false;
4256 : :
4257 : : /* Discard any conversions that don't change the modes of ARG0 and ARG1
4258 : : and see if the inner values are the same. This removes any
4259 : : signedness comparison, which doesn't matter here. */
4260 : 4187661 : tree op0 = arg0;
4261 : 4187661 : tree op1 = arg1;
4262 : 4187661 : STRIP_NOPS (op0);
4263 : 4187661 : STRIP_NOPS (op1);
4264 : 4187661 : if (operand_equal_p (op0, op1, 0))
4265 : : return true;
4266 : :
4267 : : /* Discard a single widening conversion from ARG1 and see if the inner
4268 : : value is the same as ARG0. */
4269 : 3340520 : if (CONVERT_EXPR_P (arg1)
4270 : 663986 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4271 : 663947 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4272 : 663947 : < TYPE_PRECISION (TREE_TYPE (arg1))
4273 : 4277488 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4274 : : return true;
4275 : :
4276 : : return false;
4277 : : }
4278 : :
4279 : : /* See if ARG is an expression that is either a comparison or is performing
4280 : : arithmetic on comparisons. The comparisons must only be comparing
4281 : : two different values, which will be stored in *CVAL1 and *CVAL2; if
4282 : : they are nonzero it means that some operands have already been found.
4283 : : No variables may be used anywhere else in the expression except in the
4284 : : comparisons.
4285 : :
4286 : : If this is true, return 1. Otherwise, return zero. */
4287 : :
4288 : : static bool
4289 : 53161436 : twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4290 : : {
4291 : 57072238 : enum tree_code code = TREE_CODE (arg);
4292 : 57072238 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4293 : :
4294 : : /* We can handle some of the tcc_expression cases here. */
4295 : 57072238 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4296 : : tclass = tcc_unary;
4297 : 56554705 : else if (tclass == tcc_expression
4298 : 370089 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4299 : 370089 : || code == COMPOUND_EXPR))
4300 : : tclass = tcc_binary;
4301 : :
4302 : 56544107 : switch (tclass)
4303 : : {
4304 : 3910802 : case tcc_unary:
4305 : 3910802 : return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4306 : :
4307 : 4941025 : case tcc_binary:
4308 : 4941025 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4309 : 4941025 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4310 : :
4311 : : case tcc_constant:
4312 : : return true;
4313 : :
4314 : 359491 : case tcc_expression:
4315 : 359491 : if (code == COND_EXPR)
4316 : 777 : return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4317 : 777 : && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4318 : 842 : && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4319 : : return false;
4320 : :
4321 : 522302 : case tcc_comparison:
4322 : : /* First see if we can handle the first operand, then the second. For
4323 : : the second operand, we know *CVAL1 can't be zero. It must be that
4324 : : one side of the comparison is each of the values; test for the
4325 : : case where this isn't true by failing if the two operands
4326 : : are the same. */
4327 : :
4328 : 522302 : if (operand_equal_p (TREE_OPERAND (arg, 0),
4329 : 522302 : TREE_OPERAND (arg, 1), 0))
4330 : : return false;
4331 : :
4332 : 522302 : if (*cval1 == 0)
4333 : 520303 : *cval1 = TREE_OPERAND (arg, 0);
4334 : 1999 : else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4335 : : ;
4336 : 1890 : else if (*cval2 == 0)
4337 : 0 : *cval2 = TREE_OPERAND (arg, 0);
4338 : 1890 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4339 : : ;
4340 : : else
4341 : : return false;
4342 : :
4343 : 520412 : if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4344 : : ;
4345 : 520412 : else if (*cval2 == 0)
4346 : 520303 : *cval2 = TREE_OPERAND (arg, 1);
4347 : 109 : else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4348 : : ;
4349 : : else
4350 : : return false;
4351 : :
4352 : : return true;
4353 : :
4354 : : default:
4355 : : return false;
4356 : : }
4357 : : }
4358 : :
4359 : : /* ARG is a tree that is known to contain just arithmetic operations and
4360 : : comparisons. Evaluate the operations in the tree substituting NEW0 for
4361 : : any occurrence of OLD0 as an operand of a comparison and likewise for
4362 : : NEW1 and OLD1. */
4363 : :
4364 : : static tree
4365 : 780 : eval_subst (location_t loc, tree arg, tree old0, tree new0,
4366 : : tree old1, tree new1)
4367 : : {
4368 : 780 : tree type = TREE_TYPE (arg);
4369 : 780 : enum tree_code code = TREE_CODE (arg);
4370 : 780 : enum tree_code_class tclass = TREE_CODE_CLASS (code);
4371 : :
4372 : : /* We can handle some of the tcc_expression cases here. */
4373 : 780 : if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4374 : : tclass = tcc_unary;
4375 : 780 : else if (tclass == tcc_expression
4376 : 21 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4377 : : tclass = tcc_binary;
4378 : :
4379 : 771 : switch (tclass)
4380 : : {
4381 : 186 : case tcc_unary:
4382 : 186 : return fold_build1_loc (loc, code, type,
4383 : 186 : eval_subst (loc, TREE_OPERAND (arg, 0),
4384 : 186 : old0, new0, old1, new1));
4385 : :
4386 : 186 : case tcc_binary:
4387 : 372 : return fold_build2_loc (loc, code, type,
4388 : 186 : eval_subst (loc, TREE_OPERAND (arg, 0),
4389 : : old0, new0, old1, new1),
4390 : 186 : eval_subst (loc, TREE_OPERAND (arg, 1),
4391 : 186 : old0, new0, old1, new1));
4392 : :
4393 : 12 : case tcc_expression:
4394 : 12 : switch (code)
4395 : : {
4396 : 0 : case SAVE_EXPR:
4397 : 0 : return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4398 : 0 : old1, new1);
4399 : :
4400 : 0 : case COMPOUND_EXPR:
4401 : 0 : return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4402 : 0 : old1, new1);
4403 : :
4404 : 12 : case COND_EXPR:
4405 : 36 : return fold_build3_loc (loc, code, type,
4406 : 12 : eval_subst (loc, TREE_OPERAND (arg, 0),
4407 : : old0, new0, old1, new1),
4408 : 12 : eval_subst (loc, TREE_OPERAND (arg, 1),
4409 : : old0, new0, old1, new1),
4410 : 12 : eval_subst (loc, TREE_OPERAND (arg, 2),
4411 : 12 : old0, new0, old1, new1));
4412 : : default:
4413 : : break;
4414 : : }
4415 : : /* Fall through - ??? */
4416 : :
4417 : 192 : case tcc_comparison:
4418 : 192 : {
4419 : 192 : tree arg0 = TREE_OPERAND (arg, 0);
4420 : 192 : tree arg1 = TREE_OPERAND (arg, 1);
4421 : :
4422 : : /* We need to check both for exact equality and tree equality. The
4423 : : former will be true if the operand has a side-effect. In that
4424 : : case, we know the operand occurred exactly once. */
4425 : :
4426 : 192 : if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4427 : : arg0 = new0;
4428 : 0 : else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4429 : : arg0 = new1;
4430 : :
4431 : 192 : if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4432 : : arg1 = new0;
4433 : 192 : else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4434 : : arg1 = new1;
4435 : :
4436 : 192 : return fold_build2_loc (loc, code, type, arg0, arg1);
4437 : : }
4438 : :
4439 : : default:
4440 : : return arg;
4441 : : }
4442 : : }
4443 : :
4444 : : /* Return a tree for the case when the result of an expression is RESULT
4445 : : converted to TYPE and OMITTED was previously an operand of the expression
4446 : : but is now not needed (e.g., we folded OMITTED * 0).
4447 : :
4448 : : If OMITTED has side effects, we must evaluate it. Otherwise, just do
4449 : : the conversion of RESULT to TYPE. */
4450 : :
4451 : : tree
4452 : 258614 : omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4453 : : {
4454 : 258614 : tree t = fold_convert_loc (loc, type, result);
4455 : :
4456 : : /* If the resulting operand is an empty statement, just return the omitted
4457 : : statement casted to void. */
4458 : 258614 : if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4459 : 0 : return build1_loc (loc, NOP_EXPR, void_type_node,
4460 : 0 : fold_ignored_result (omitted));
4461 : :
4462 : 258614 : if (TREE_SIDE_EFFECTS (omitted))
4463 : 12530 : return build2_loc (loc, COMPOUND_EXPR, type,
4464 : 12530 : fold_ignored_result (omitted), t);
4465 : :
4466 : 246084 : return non_lvalue_loc (loc, t);
4467 : : }
4468 : :
4469 : : /* Return a tree for the case when the result of an expression is RESULT
4470 : : converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4471 : : of the expression but are now not needed.
4472 : :
4473 : : If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4474 : : If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4475 : : evaluated before OMITTED2. Otherwise, if neither has side effects,
4476 : : just do the conversion of RESULT to TYPE. */
4477 : :
4478 : : tree
4479 : 6645 : omit_two_operands_loc (location_t loc, tree type, tree result,
4480 : : tree omitted1, tree omitted2)
4481 : : {
4482 : 6645 : tree t = fold_convert_loc (loc, type, result);
4483 : :
4484 : 6645 : if (TREE_SIDE_EFFECTS (omitted2))
4485 : 68 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4486 : 6645 : if (TREE_SIDE_EFFECTS (omitted1))
4487 : 183 : t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4488 : :
4489 : 6645 : return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4490 : : }
4491 : :
4492 : :
4493 : : /* Return a simplified tree node for the truth-negation of ARG. This
4494 : : never alters ARG itself. We assume that ARG is an operation that
4495 : : returns a truth value (0 or 1).
4496 : :
4497 : : FIXME: one would think we would fold the result, but it causes
4498 : : problems with the dominator optimizer. */
4499 : :
4500 : : static tree
4501 : 43265368 : fold_truth_not_expr (location_t loc, tree arg)
4502 : : {
4503 : 43265368 : tree type = TREE_TYPE (arg);
4504 : 43265368 : enum tree_code code = TREE_CODE (arg);
4505 : 43265368 : location_t loc1, loc2;
4506 : :
4507 : : /* If this is a comparison, we can simply invert it, except for
4508 : : floating-point non-equality comparisons, in which case we just
4509 : : enclose a TRUTH_NOT_EXPR around what we have. */
4510 : :
4511 : 43265368 : if (TREE_CODE_CLASS (code) == tcc_comparison)
4512 : : {
4513 : 33015582 : tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4514 : 27653254 : if (FLOAT_TYPE_P (op_type)
4515 : 5371913 : && flag_trapping_math
4516 : 5342934 : && code != ORDERED_EXPR && code != UNORDERED_EXPR
4517 : 38318506 : && code != NE_EXPR && code != EQ_EXPR)
4518 : : return NULL_TREE;
4519 : :
4520 : 28384169 : code = invert_tree_comparison (code, HONOR_NANS (op_type));
4521 : 28384169 : if (code == ERROR_MARK)
4522 : : return NULL_TREE;
4523 : :
4524 : 28384169 : tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4525 : 28384169 : TREE_OPERAND (arg, 1));
4526 : 28384169 : copy_warning (ret, arg);
4527 : 28384169 : return ret;
4528 : : }
4529 : :
4530 : 10249786 : switch (code)
4531 : : {
4532 : 0 : case INTEGER_CST:
4533 : 0 : return constant_boolean_node (integer_zerop (arg), type);
4534 : :
4535 : 42916 : case TRUTH_AND_EXPR:
4536 : 42916 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4537 : 42916 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4538 : 85832 : return build2_loc (loc, TRUTH_OR_EXPR, type,
4539 : 42916 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4540 : 85832 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4541 : :
4542 : 2187 : case TRUTH_OR_EXPR:
4543 : 2187 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4544 : 2187 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4545 : 4374 : return build2_loc (loc, TRUTH_AND_EXPR, type,
4546 : 2187 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4547 : 4374 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4548 : :
4549 : 25667 : case TRUTH_XOR_EXPR:
4550 : : /* Here we can invert either operand. We invert the first operand
4551 : : unless the second operand is a TRUTH_NOT_EXPR in which case our
4552 : : result is the XOR of the first operand with the inside of the
4553 : : negation of the second operand. */
4554 : :
4555 : 25667 : if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4556 : 9 : return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4557 : 18 : TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4558 : : else
4559 : 25658 : return build2_loc (loc, TRUTH_XOR_EXPR, type,
4560 : 25658 : invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4561 : 51316 : TREE_OPERAND (arg, 1));
4562 : :
4563 : 217558 : case TRUTH_ANDIF_EXPR:
4564 : 217558 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4565 : 217558 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4566 : 435116 : return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4567 : 217558 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4568 : 435116 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4569 : :
4570 : 19706 : case TRUTH_ORIF_EXPR:
4571 : 19706 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4572 : 19706 : loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4573 : 39412 : return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4574 : 19706 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4575 : 39412 : invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4576 : :
4577 : 738716 : case TRUTH_NOT_EXPR:
4578 : 738716 : return TREE_OPERAND (arg, 0);
4579 : :
4580 : 7961 : case COND_EXPR:
4581 : 7961 : {
4582 : 7961 : tree arg1 = TREE_OPERAND (arg, 1);
4583 : 7961 : tree arg2 = TREE_OPERAND (arg, 2);
4584 : :
4585 : 7961 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4586 : 7961 : loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4587 : :
4588 : : /* A COND_EXPR may have a throw as one operand, which
4589 : : then has void type. Just leave void operands
4590 : : as they are. */
4591 : 7961 : return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4592 : 7961 : VOID_TYPE_P (TREE_TYPE (arg1))
4593 : 7961 : ? arg1 : invert_truthvalue_loc (loc1, arg1),
4594 : 7961 : VOID_TYPE_P (TREE_TYPE (arg2))
4595 : 15918 : ? arg2 : invert_truthvalue_loc (loc2, arg2));
4596 : : }
4597 : :
4598 : 93 : case COMPOUND_EXPR:
4599 : 93 : loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4600 : 186 : return build2_loc (loc, COMPOUND_EXPR, type,
4601 : 93 : TREE_OPERAND (arg, 0),
4602 : 186 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4603 : :
4604 : 0 : case NON_LVALUE_EXPR:
4605 : 0 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4606 : 0 : return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4607 : :
4608 : 68303 : CASE_CONVERT:
4609 : 68303 : if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4610 : 68239 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4611 : :
4612 : : /* fall through */
4613 : :
4614 : 64 : case FLOAT_EXPR:
4615 : 64 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4616 : 64 : return build1_loc (loc, TREE_CODE (arg), type,
4617 : 128 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4618 : :
4619 : 638 : case BIT_AND_EXPR:
4620 : 638 : if (!integer_onep (TREE_OPERAND (arg, 1)))
4621 : : return NULL_TREE;
4622 : 0 : return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4623 : :
4624 : 2 : case SAVE_EXPR:
4625 : 2 : return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4626 : :
4627 : 109 : case CLEANUP_POINT_EXPR:
4628 : 109 : loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4629 : 109 : return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4630 : 218 : invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4631 : :
4632 : : default:
4633 : : return NULL_TREE;
4634 : : }
4635 : : }
4636 : :
4637 : : /* Fold the truth-negation of ARG. This never alters ARG itself. We
4638 : : assume that ARG is an operation that returns a truth value (0 or 1
4639 : : for scalars, 0 or -1 for vectors). Return the folded expression if
4640 : : folding is successful. Otherwise, return NULL_TREE. */
4641 : :
4642 : : static tree
4643 : 1501488 : fold_invert_truthvalue (location_t loc, tree arg)
4644 : : {
4645 : 1501488 : tree type = TREE_TYPE (arg);
4646 : 3002969 : return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4647 : : ? BIT_NOT_EXPR
4648 : : : TRUTH_NOT_EXPR,
4649 : 1501488 : type, arg);
4650 : : }
4651 : :
4652 : : /* Return a simplified tree node for the truth-negation of ARG. This
4653 : : never alters ARG itself. We assume that ARG is an operation that
4654 : : returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4655 : :
4656 : : tree
4657 : 36419338 : invert_truthvalue_loc (location_t loc, tree arg)
4658 : : {
4659 : 36419338 : if (TREE_CODE (arg) == ERROR_MARK)
4660 : : return arg;
4661 : :
4662 : 36419338 : tree type = TREE_TYPE (arg);
4663 : 72838676 : return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4664 : : ? BIT_NOT_EXPR
4665 : : : TRUTH_NOT_EXPR,
4666 : 36419338 : type, arg);
4667 : : }
4668 : :
4669 : : /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4670 : : starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4671 : : and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4672 : : is the original memory reference used to preserve the alias set of
4673 : : the access. */
4674 : :
4675 : : static tree
4676 : 584985 : make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4677 : : HOST_WIDE_INT bitsize, poly_int64 bitpos,
4678 : : int unsignedp, int reversep)
4679 : : {
4680 : 584985 : tree result, bftype;
4681 : :
4682 : : /* Attempt not to lose the access path if possible. */
4683 : 584985 : if (TREE_CODE (orig_inner) == COMPONENT_REF)
4684 : : {
4685 : 549325 : tree ninner = TREE_OPERAND (orig_inner, 0);
4686 : 549325 : machine_mode nmode;
4687 : 549325 : poly_int64 nbitsize, nbitpos;
4688 : 549325 : tree noffset;
4689 : 549325 : int nunsignedp, nreversep, nvolatilep = 0;
4690 : 549325 : tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4691 : : &noffset, &nmode, &nunsignedp,
4692 : : &nreversep, &nvolatilep);
4693 : 549325 : if (base == inner
4694 : 549325 : && noffset == NULL_TREE
4695 : 549325 : && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4696 : 549325 : && !reversep
4697 : 549256 : && !nreversep
4698 : 1098581 : && !nvolatilep)
4699 : : {
4700 : 549256 : inner = ninner;
4701 : 549325 : bitpos -= nbitpos;
4702 : : }
4703 : : }
4704 : :
4705 : 584985 : alias_set_type iset = get_alias_set (orig_inner);
4706 : 584985 : if (iset == 0 && get_alias_set (inner) != iset)
4707 : 212 : inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4708 : : build_fold_addr_expr (inner),
4709 : : build_int_cst (ptr_type_node, 0));
4710 : :
4711 : 584985 : if (known_eq (bitpos, 0) && !reversep)
4712 : : {
4713 : 302296 : tree size = TYPE_SIZE (TREE_TYPE (inner));
4714 : 604592 : if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4715 : 301678 : || POINTER_TYPE_P (TREE_TYPE (inner)))
4716 : 624 : && tree_fits_shwi_p (size)
4717 : 302920 : && tree_to_shwi (size) == bitsize)
4718 : 624 : return fold_convert_loc (loc, type, inner);
4719 : : }
4720 : :
4721 : 584361 : bftype = type;
4722 : 584361 : if (TYPE_PRECISION (bftype) != bitsize
4723 : 584361 : || TYPE_UNSIGNED (bftype) == !unsignedp)
4724 : 1007 : bftype = build_nonstandard_integer_type (bitsize, 0);
4725 : :
4726 : 584361 : result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4727 : : bitsize_int (bitsize), bitsize_int (bitpos));
4728 : 584361 : REF_REVERSE_STORAGE_ORDER (result) = reversep;
4729 : :
4730 : 584361 : if (bftype != type)
4731 : 1007 : result = fold_convert_loc (loc, type, result);
4732 : :
4733 : : return result;
4734 : : }
4735 : :
4736 : : /* Optimize a bit-field compare.
4737 : :
4738 : : There are two cases: First is a compare against a constant and the
4739 : : second is a comparison of two items where the fields are at the same
4740 : : bit position relative to the start of a chunk (byte, halfword, word)
4741 : : large enough to contain it. In these cases we can avoid the shift
4742 : : implicit in bitfield extractions.
4743 : :
4744 : : For constants, we emit a compare of the shifted constant with the
4745 : : BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4746 : : compared. For two fields at the same position, we do the ANDs with the
4747 : : similar mask and compare the result of the ANDs.
4748 : :
4749 : : CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4750 : : COMPARE_TYPE is the type of the comparison, and LHS and RHS
4751 : : are the left and right operands of the comparison, respectively.
4752 : :
4753 : : If the optimization described above can be done, we return the resulting
4754 : : tree. Otherwise we return zero. */
4755 : :
4756 : : static tree
4757 : 3924106 : optimize_bit_field_compare (location_t loc, enum tree_code code,
4758 : : tree compare_type, tree lhs, tree rhs)
4759 : : {
4760 : 3924106 : poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4761 : 3924106 : HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4762 : 3924106 : tree type = TREE_TYPE (lhs);
4763 : 3924106 : tree unsigned_type;
4764 : 3924106 : int const_p = TREE_CODE (rhs) == INTEGER_CST;
4765 : 3924106 : machine_mode lmode, rmode;
4766 : 3924106 : scalar_int_mode nmode;
4767 : 3924106 : int lunsignedp, runsignedp;
4768 : 3924106 : int lreversep, rreversep;
4769 : 3924106 : int lvolatilep = 0, rvolatilep = 0;
4770 : 3924106 : tree linner, rinner = NULL_TREE;
4771 : 3924106 : tree mask;
4772 : 3924106 : tree offset;
4773 : :
4774 : : /* Get all the information about the extractions being done. If the bit size
4775 : : is the same as the size of the underlying object, we aren't doing an
4776 : : extraction at all and so can do nothing. We also don't want to
4777 : : do anything if the inner expression is a PLACEHOLDER_EXPR since we
4778 : : then will no longer be able to replace it. */
4779 : 3924106 : linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4780 : : &lunsignedp, &lreversep, &lvolatilep);
4781 : 3924106 : if (linner == lhs
4782 : 3924106 : || !known_size_p (plbitsize)
4783 : 3924106 : || !plbitsize.is_constant (&lbitsize)
4784 : 3924106 : || !plbitpos.is_constant (&lbitpos)
4785 : 7848212 : || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4786 : 552585 : || offset != 0
4787 : 552560 : || TREE_CODE (linner) == PLACEHOLDER_EXPR
4788 : 4476666 : || lvolatilep)
4789 : 3371606 : return 0;
4790 : :
4791 : 552500 : if (const_p)
4792 : 539880 : rreversep = lreversep;
4793 : : else
4794 : : {
4795 : : /* If this is not a constant, we can only do something if bit positions,
4796 : : sizes, signedness and storage order are the same. */
4797 : 12620 : rinner
4798 : 12620 : = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4799 : : &runsignedp, &rreversep, &rvolatilep);
4800 : :
4801 : 12620 : if (rinner == rhs
4802 : 12456 : || maybe_ne (lbitpos, rbitpos)
4803 : 12422 : || maybe_ne (lbitsize, rbitsize)
4804 : 12422 : || lunsignedp != runsignedp
4805 : 12422 : || lreversep != rreversep
4806 : 12422 : || offset != 0
4807 : 12422 : || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4808 : 25042 : || rvolatilep)
4809 : : return 0;
4810 : : }
4811 : :
4812 : : /* Honor the C++ memory model and mimic what RTL expansion does. */
4813 : 552302 : poly_uint64 bitstart = 0;
4814 : 552302 : poly_uint64 bitend = 0;
4815 : 552302 : if (TREE_CODE (lhs) == COMPONENT_REF)
4816 : : {
4817 : 552166 : get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4818 : 552166 : if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4819 : : return 0;
4820 : : }
4821 : :
4822 : : /* See if we can find a mode to refer to this field. We should be able to,
4823 : : but fail if we can't. */
4824 : 1104604 : if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4825 : 539880 : const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4826 : 12422 : : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4827 : : TYPE_ALIGN (TREE_TYPE (rinner))),
4828 : 552302 : BITS_PER_WORD, false, &nmode))
4829 : : return 0;
4830 : :
4831 : : /* Set signed and unsigned types of the precision of this mode for the
4832 : : shifts below. */
4833 : 550564 : unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4834 : :
4835 : : /* Compute the bit position and size for the new reference and our offset
4836 : : within it. If the new reference is the same size as the original, we
4837 : : won't optimize anything, so return zero. */
4838 : 550564 : nbitsize = GET_MODE_BITSIZE (nmode);
4839 : 550564 : nbitpos = lbitpos & ~ (nbitsize - 1);
4840 : 550564 : lbitpos -= nbitpos;
4841 : 550564 : if (nbitsize == lbitsize)
4842 : : return 0;
4843 : :
4844 : 538876 : if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4845 : 54 : lbitpos = nbitsize - lbitsize - lbitpos;
4846 : :
4847 : : /* Make the mask to be used against the extracted field. */
4848 : 538876 : mask = build_int_cst_type (unsigned_type, -1);
4849 : 538876 : mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4850 : 538876 : mask = const_binop (RSHIFT_EXPR, mask,
4851 : 538876 : size_int (nbitsize - lbitsize - lbitpos));
4852 : :
4853 : 538876 : if (! const_p)
4854 : : {
4855 : 9584 : if (nbitpos < 0)
4856 : : return 0;
4857 : :
4858 : : /* If not comparing with constant, just rework the comparison
4859 : : and return. */
4860 : 9584 : tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4861 : : nbitsize, nbitpos, 1, lreversep);
4862 : 9584 : t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4863 : 9584 : tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4864 : : nbitsize, nbitpos, 1, rreversep);
4865 : 9584 : t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4866 : 9584 : return fold_build2_loc (loc, code, compare_type, t1, t2);
4867 : : }
4868 : :
4869 : : /* Otherwise, we are handling the constant case. See if the constant is too
4870 : : big for the field. Warn and return a tree for 0 (false) if so. We do
4871 : : this not only for its own sake, but to avoid having to test for this
4872 : : error case below. If we didn't, we might generate wrong code.
4873 : :
4874 : : For unsigned fields, the constant shifted right by the field length should
4875 : : be all zero. For signed fields, the high-order bits should agree with
4876 : : the sign bit. */
4877 : :
4878 : 529292 : if (lunsignedp)
4879 : : {
4880 : 528219 : if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4881 : : {
4882 : 0 : warning (0, "comparison is always %d due to width of bit-field",
4883 : : code == NE_EXPR);
4884 : 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
4885 : : }
4886 : : }
4887 : : else
4888 : : {
4889 : 1073 : wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4890 : 1073 : if (tem != 0 && tem != -1)
4891 : : {
4892 : 0 : warning (0, "comparison is always %d due to width of bit-field",
4893 : : code == NE_EXPR);
4894 : 0 : return constant_boolean_node (code == NE_EXPR, compare_type);
4895 : : }
4896 : 1073 : }
4897 : :
4898 : 529292 : if (nbitpos < 0)
4899 : : return 0;
4900 : :
4901 : : /* Single-bit compares should always be against zero. */
4902 : 529292 : if (lbitsize == 1 && ! integer_zerop (rhs))
4903 : : {
4904 : 174 : code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4905 : 174 : rhs = build_int_cst (type, 0);
4906 : : }
4907 : :
4908 : : /* Make a new bitfield reference, shift the constant over the
4909 : : appropriate number of bits and mask it with the computed mask
4910 : : (in case this was a signed field). If we changed it, make a new one. */
4911 : 529292 : lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4912 : : nbitsize, nbitpos, 1, lreversep);
4913 : :
4914 : 529292 : rhs = const_binop (BIT_AND_EXPR,
4915 : : const_binop (LSHIFT_EXPR,
4916 : : fold_convert_loc (loc, unsigned_type, rhs),
4917 : : size_int (lbitpos)),
4918 : : mask);
4919 : :
4920 : 529292 : lhs = build2_loc (loc, code, compare_type,
4921 : : build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4922 : 529292 : return lhs;
4923 : : }
4924 : :
4925 : : /* Subroutine for fold_truth_andor_1: decode a field reference.
4926 : :
4927 : : If EXP is a comparison reference, we return the innermost reference.
4928 : :
4929 : : *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4930 : : set to the starting bit number.
4931 : :
4932 : : If the innermost field can be completely contained in a mode-sized
4933 : : unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4934 : :
4935 : : *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4936 : : otherwise it is not changed.
4937 : :
4938 : : *PUNSIGNEDP is set to the signedness of the field.
4939 : :
4940 : : *PREVERSEP is set to the storage order of the field.
4941 : :
4942 : : *PMASK is set to the mask used. This is either contained in a
4943 : : BIT_AND_EXPR or derived from the width of the field.
4944 : :
4945 : : *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4946 : :
4947 : : Return 0 if this is not a component reference or is one that we can't
4948 : : do anything with. */
4949 : :
4950 : : static tree
4951 : 11114252 : decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4952 : : HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4953 : : int *punsignedp, int *preversep, int *pvolatilep,
4954 : : tree *pmask, tree *pand_mask)
4955 : : {
4956 : 11114252 : tree exp = *exp_;
4957 : 11114252 : tree outer_type = 0;
4958 : 11114252 : tree and_mask = 0;
4959 : 11114252 : tree mask, inner, offset;
4960 : 11114252 : tree unsigned_type;
4961 : 11114252 : unsigned int precision;
4962 : :
4963 : : /* All the optimizations using this function assume integer fields.
4964 : : There are problems with FP fields since the type_for_size call
4965 : : below can fail for, e.g., XFmode. */
4966 : 11114252 : if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4967 : : return NULL_TREE;
4968 : :
4969 : : /* We are interested in the bare arrangement of bits, so strip everything
4970 : : that doesn't affect the machine mode. However, record the type of the
4971 : : outermost expression if it may matter below. */
4972 : 8286590 : if (CONVERT_EXPR_P (exp)
4973 : 8086362 : || TREE_CODE (exp) == NON_LVALUE_EXPR)
4974 : 200527 : outer_type = TREE_TYPE (exp);
4975 : 8286590 : STRIP_NOPS (exp);
4976 : :
4977 : 8286590 : if (TREE_CODE (exp) == BIT_AND_EXPR)
4978 : : {
4979 : 957178 : and_mask = TREE_OPERAND (exp, 1);
4980 : 957178 : exp = TREE_OPERAND (exp, 0);
4981 : 957178 : STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4982 : 957178 : if (TREE_CODE (and_mask) != INTEGER_CST)
4983 : : return NULL_TREE;
4984 : : }
4985 : :
4986 : 8284302 : poly_int64 poly_bitsize, poly_bitpos;
4987 : 8284302 : inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4988 : : pmode, punsignedp, preversep, pvolatilep);
4989 : 8284302 : if ((inner == exp && and_mask == 0)
4990 : 1143499 : || !poly_bitsize.is_constant (pbitsize)
4991 : 1143499 : || !poly_bitpos.is_constant (pbitpos)
4992 : 1143499 : || *pbitsize < 0
4993 : 1143499 : || offset != 0
4994 : 1132569 : || TREE_CODE (inner) == PLACEHOLDER_EXPR
4995 : : /* Reject out-of-bound accesses (PR79731). */
4996 : 9416871 : || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4997 : 827002 : && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4998 : 827002 : *pbitpos + *pbitsize) < 0))
4999 : 7151782 : return NULL_TREE;
5000 : :
5001 : 1132520 : unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
5002 : 1132520 : if (unsigned_type == NULL_TREE)
5003 : : return NULL_TREE;
5004 : :
5005 : 1132392 : *exp_ = exp;
5006 : :
5007 : : /* If the number of bits in the reference is the same as the bitsize of
5008 : : the outer type, then the outer type gives the signedness. Otherwise
5009 : : (in case of a small bitfield) the signedness is unchanged. */
5010 : 1132392 : if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
5011 : 1609 : *punsignedp = TYPE_UNSIGNED (outer_type);
5012 : :
5013 : : /* Compute the mask to access the bitfield. */
5014 : 1132392 : precision = TYPE_PRECISION (unsigned_type);
5015 : :
5016 : 1132392 : mask = build_int_cst_type (unsigned_type, -1);
5017 : :
5018 : 1132392 : mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5019 : 1132392 : mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
5020 : :
5021 : : /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
5022 : 1132392 : if (and_mask != 0)
5023 : 954736 : mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
5024 : : fold_convert_loc (loc, unsigned_type, and_mask), mask);
5025 : :
5026 : 1132392 : *pmask = mask;
5027 : 1132392 : *pand_mask = and_mask;
5028 : 1132392 : return inner;
5029 : : }
5030 : :
5031 : : /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
5032 : : bit positions and MASK is SIGNED. */
5033 : :
5034 : : static bool
5035 : 36525 : all_ones_mask_p (const_tree mask, unsigned int size)
5036 : : {
5037 : 36525 : tree type = TREE_TYPE (mask);
5038 : 36525 : unsigned int precision = TYPE_PRECISION (type);
5039 : :
5040 : : /* If this function returns true when the type of the mask is
5041 : : UNSIGNED, then there will be errors. In particular see
5042 : : gcc.c-torture/execute/990326-1.c. There does not appear to be
5043 : : any documentation paper trail as to why this is so. But the pre
5044 : : wide-int worked with that restriction and it has been preserved
5045 : : here. */
5046 : 36525 : if (size > precision || TYPE_SIGN (type) == UNSIGNED)
5047 : : return false;
5048 : :
5049 : 0 : return wi::mask (size, false, precision) == wi::to_wide (mask);
5050 : : }
5051 : :
5052 : : /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
5053 : : represents the sign bit of EXP's type. If EXP represents a sign
5054 : : or zero extension, also test VAL against the unextended type.
5055 : : The return value is the (sub)expression whose sign bit is VAL,
5056 : : or NULL_TREE otherwise. */
5057 : :
5058 : : tree
5059 : 2078 : sign_bit_p (tree exp, const_tree val)
5060 : : {
5061 : 2078 : int width;
5062 : 2078 : tree t;
5063 : :
5064 : : /* Tree EXP must have an integral type. */
5065 : 2078 : t = TREE_TYPE (exp);
5066 : 2078 : if (! INTEGRAL_TYPE_P (t))
5067 : : return NULL_TREE;
5068 : :
5069 : : /* Tree VAL must be an integer constant. */
5070 : 1815 : if (TREE_CODE (val) != INTEGER_CST
5071 : 1815 : || TREE_OVERFLOW (val))
5072 : : return NULL_TREE;
5073 : :
5074 : 1442 : width = TYPE_PRECISION (t);
5075 : 1442 : if (wi::only_sign_bit_p (wi::to_wide (val), width))
5076 : : return exp;
5077 : :
5078 : : /* Handle extension from a narrower type. */
5079 : 805 : if (TREE_CODE (exp) == NOP_EXPR
5080 : 805 : && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
5081 : 0 : return sign_bit_p (TREE_OPERAND (exp, 0), val);
5082 : :
5083 : : return NULL_TREE;
5084 : : }
5085 : :
5086 : : /* Subroutine for fold_truth_andor_1 and simple_condition_p: determine if an
5087 : : operand is simple enough to be evaluated unconditionally. */
5088 : :
5089 : : static bool
5090 : 55104069 : simple_operand_p (const_tree exp)
5091 : : {
5092 : : /* Strip any conversions that don't change the machine mode. */
5093 : 55104069 : STRIP_NOPS (exp);
5094 : :
5095 : 55104069 : return (CONSTANT_CLASS_P (exp)
5096 : 37976818 : || TREE_CODE (exp) == SSA_NAME
5097 : 67346572 : || (DECL_P (exp)
5098 : : && ! TREE_ADDRESSABLE (exp)
5099 : 4061586 : && ! TREE_THIS_VOLATILE (exp)
5100 : 3980438 : && ! DECL_NONLOCAL (exp)
5101 : : /* Don't regard global variables as simple. They may be
5102 : : allocated in ways unknown to the compiler (shared memory,
5103 : : #pragma weak, etc). */
5104 : 3979104 : && ! TREE_PUBLIC (exp)
5105 : 3958622 : && ! DECL_EXTERNAL (exp)
5106 : : /* Weakrefs are not safe to be read, since they can be NULL.
5107 : : They are !TREE_PUBLIC && !DECL_EXTERNAL but still
5108 : : have DECL_WEAK flag set. */
5109 : 3958622 : && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
5110 : : /* Loading a static variable is unduly expensive, but global
5111 : : registers aren't expensive. */
5112 : 3958622 : && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
5113 : : }
5114 : :
5115 : : /* Determine if an operand is simple enough to be evaluated unconditionally.
5116 : : In addition to simple_operand_p, we assume that comparisons, conversions,
5117 : : and logic-not operations are simple, if their operands are simple, too. */
5118 : :
5119 : : bool
5120 : 5350123 : simple_condition_p (tree exp)
5121 : : {
5122 : 5413836 : enum tree_code code;
5123 : :
5124 : 5413836 : if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
5125 : 3918517 : return false;
5126 : :
5127 : 1503138 : while (CONVERT_EXPR_P (exp))
5128 : 7819 : exp = TREE_OPERAND (exp, 0);
5129 : :
5130 : 1495319 : code = TREE_CODE (exp);
5131 : :
5132 : 1495319 : if (TREE_CODE_CLASS (code) == tcc_comparison)
5133 : 1142154 : return (simple_operand_p (TREE_OPERAND (exp, 0))
5134 : 1142154 : && simple_operand_p (TREE_OPERAND (exp, 1)));
5135 : :
5136 : 353165 : if (code == TRUTH_NOT_EXPR)
5137 : 63713 : return simple_condition_p (TREE_OPERAND (exp, 0));
5138 : :
5139 : 289452 : return simple_operand_p (exp);
5140 : : }
5141 : :
5142 : :
5143 : : /* The following functions are subroutines to fold_range_test and allow it to
5144 : : try to change a logical combination of comparisons into a range test.
5145 : :
5146 : : For example, both
5147 : : X == 2 || X == 3 || X == 4 || X == 5
5148 : : and
5149 : : X >= 2 && X <= 5
5150 : : are converted to
5151 : : (unsigned) (X - 2) <= 3
5152 : :
5153 : : We describe each set of comparisons as being either inside or outside
5154 : : a range, using a variable named like IN_P, and then describe the
5155 : : range with a lower and upper bound. If one of the bounds is omitted,
5156 : : it represents either the highest or lowest value of the type.
5157 : :
5158 : : In the comments below, we represent a range by two numbers in brackets
5159 : : preceded by a "+" to designate being inside that range, or a "-" to
5160 : : designate being outside that range, so the condition can be inverted by
5161 : : flipping the prefix. An omitted bound is represented by a "-". For
5162 : : example, "- [-, 10]" means being outside the range starting at the lowest
5163 : : possible value and ending at 10, in other words, being greater than 10.
5164 : : The range "+ [-, -]" is always true and hence the range "- [-, -]" is
5165 : : always false.
5166 : :
5167 : : We set up things so that the missing bounds are handled in a consistent
5168 : : manner so neither a missing bound nor "true" and "false" need to be
5169 : : handled using a special case. */
5170 : :
5171 : : /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
5172 : : of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
5173 : : and UPPER1_P are nonzero if the respective argument is an upper bound
5174 : : and zero for a lower. TYPE, if nonzero, is the type of the result; it
5175 : : must be specified for a comparison. ARG1 will be converted to ARG0's
5176 : : type if both are specified. */
5177 : :
5178 : : static tree
5179 : 18790366 : range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
5180 : : tree arg1, int upper1_p)
5181 : : {
5182 : 18790366 : tree tem;
5183 : 18790366 : int result;
5184 : 18790366 : int sgn0, sgn1;
5185 : :
5186 : : /* If neither arg represents infinity, do the normal operation.
5187 : : Else, if not a comparison, return infinity. Else handle the special
5188 : : comparison rules. Note that most of the cases below won't occur, but
5189 : : are handled for consistency. */
5190 : :
5191 : 18790366 : if (arg0 != 0 && arg1 != 0)
5192 : : {
5193 : 10853646 : tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
5194 : : arg0, fold_convert (TREE_TYPE (arg0), arg1));
5195 : 10853646 : STRIP_NOPS (tem);
5196 : 10853646 : return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
5197 : : }
5198 : :
5199 : 7936720 : if (TREE_CODE_CLASS (code) != tcc_comparison)
5200 : : return 0;
5201 : :
5202 : : /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
5203 : : for neither. In real maths, we cannot assume open ended ranges are
5204 : : the same. But, this is computer arithmetic, where numbers are finite.
5205 : : We can therefore make the transformation of any unbounded range with
5206 : : the value Z, Z being greater than any representable number. This permits
5207 : : us to treat unbounded ranges as equal. */
5208 : 7929319 : sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
5209 : 7929319 : sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
5210 : 7929319 : switch (code)
5211 : : {
5212 : 3260080 : case EQ_EXPR:
5213 : 3260080 : result = sgn0 == sgn1;
5214 : 3260080 : break;
5215 : 0 : case NE_EXPR:
5216 : 0 : result = sgn0 != sgn1;
5217 : 0 : break;
5218 : 740676 : case LT_EXPR:
5219 : 740676 : result = sgn0 < sgn1;
5220 : 740676 : break;
5221 : 1606920 : case LE_EXPR:
5222 : 1606920 : result = sgn0 <= sgn1;
5223 : 1606920 : break;
5224 : 2321643 : case GT_EXPR:
5225 : 2321643 : result = sgn0 > sgn1;
5226 : 2321643 : break;
5227 : 0 : case GE_EXPR:
5228 : 0 : result = sgn0 >= sgn1;
5229 : 0 : break;
5230 : 0 : default:
5231 : 0 : gcc_unreachable ();
5232 : : }
5233 : :
5234 : 7929319 : return constant_boolean_node (result, type);
5235 : : }
5236 : :
5237 : : /* Helper routine for make_range. Perform one step for it, return
5238 : : new expression if the loop should continue or NULL_TREE if it should
5239 : : stop. */
5240 : :
5241 : : tree
5242 : 50812636 : make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
5243 : : tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
5244 : : bool *strict_overflow_p)
5245 : : {
5246 : 50812636 : tree arg0_type = TREE_TYPE (arg0);
5247 : 50812636 : tree n_low, n_high, low = *p_low, high = *p_high;
5248 : 50812636 : int in_p = *p_in_p, n_in_p;
5249 : :
5250 : 50812636 : switch (code)
5251 : : {
5252 : 1493954 : case TRUTH_NOT_EXPR:
5253 : : /* We can only do something if the range is testing for zero. */
5254 : 1493954 : if (low == NULL_TREE || high == NULL_TREE
5255 : 1493954 : || ! integer_zerop (low) || ! integer_zerop (high))
5256 : 0 : return NULL_TREE;
5257 : 1493954 : *p_in_p = ! in_p;
5258 : 1493954 : return arg0;
5259 : :
5260 : 40528060 : case EQ_EXPR: case NE_EXPR:
5261 : 40528060 : case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
5262 : : /* We can only do something if the range is testing for zero
5263 : : and if the second operand is an integer constant. Note that
5264 : : saying something is "in" the range we make is done by
5265 : : complementing IN_P since it will set in the initial case of
5266 : : being not equal to zero; "out" is leaving it alone. */
5267 : 40528060 : if (low == NULL_TREE || high == NULL_TREE
5268 : 40528060 : || ! integer_zerop (low) || ! integer_zerop (high)
5269 : 81056026 : || TREE_CODE (arg1) != INTEGER_CST)
5270 : 15111152 : return NULL_TREE;
5271 : :
5272 : 25416908 : switch (code)
5273 : : {
5274 : : case NE_EXPR: /* - [c, c] */
5275 : : low = high = arg1;
5276 : : break;
5277 : 6367778 : case EQ_EXPR: /* + [c, c] */
5278 : 6367778 : in_p = ! in_p, low = high = arg1;
5279 : 6367778 : break;
5280 : 2042486 : case GT_EXPR: /* - [-, c] */
5281 : 2042486 : low = 0, high = arg1;
5282 : 2042486 : break;
5283 : 771967 : case GE_EXPR: /* + [c, -] */
5284 : 771967 : in_p = ! in_p, low = arg1, high = 0;
5285 : 771967 : break;
5286 : 5032921 : case LT_EXPR: /* - [c, -] */
5287 : 5032921 : low = arg1, high = 0;
5288 : 5032921 : break;
5289 : 3932638 : case LE_EXPR: /* + [-, c] */
5290 : 3932638 : in_p = ! in_p, low = 0, high = arg1;
5291 : 3932638 : break;
5292 : 0 : default:
5293 : 0 : gcc_unreachable ();
5294 : : }
5295 : :
5296 : : /* If this is an unsigned comparison, we also know that EXP is
5297 : : greater than or equal to zero. We base the range tests we make
5298 : : on that fact, so we record it here so we can parse existing
5299 : : range tests. We test arg0_type since often the return type
5300 : : of, e.g. EQ_EXPR, is boolean. */
5301 : 25416908 : if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5302 : : {
5303 : 1127359 : if (! merge_ranges (&n_in_p, &n_low, &n_high,
5304 : : in_p, low, high, 1,
5305 : 1127359 : build_int_cst (arg0_type, 0),
5306 : : NULL_TREE))
5307 : : return NULL_TREE;
5308 : :
5309 : 1127346 : in_p = n_in_p, low = n_low, high = n_high;
5310 : :
5311 : : /* If the high bound is missing, but we have a nonzero low
5312 : : bound, reverse the range so it goes from zero to the low bound
5313 : : minus 1. */
5314 : 1127346 : if (high == 0 && low && ! integer_zerop (low))
5315 : : {
5316 : 552976 : in_p = ! in_p;
5317 : 552976 : high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5318 : 552976 : build_int_cst (TREE_TYPE (low), 1), 0);
5319 : 552976 : low = build_int_cst (arg0_type, 0);
5320 : : }
5321 : : }
5322 : :
5323 : 25416895 : *p_low = low;
5324 : 25416895 : *p_high = high;
5325 : 25416895 : *p_in_p = in_p;
5326 : 25416895 : return arg0;
5327 : :
5328 : 188 : case NEGATE_EXPR:
5329 : : /* If flag_wrapv and ARG0_TYPE is signed, make sure
5330 : : low and high are non-NULL, then normalize will DTRT. */
5331 : 188 : if (!TYPE_UNSIGNED (arg0_type)
5332 : 188 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5333 : : {
5334 : 105 : if (low == NULL_TREE)
5335 : 12 : low = TYPE_MIN_VALUE (arg0_type);
5336 : 105 : if (high == NULL_TREE)
5337 : 47 : high = TYPE_MAX_VALUE (arg0_type);
5338 : : }
5339 : :
5340 : : /* (-x) IN [a,b] -> x in [-b, -a] */
5341 : 188 : n_low = range_binop (MINUS_EXPR, exp_type,
5342 : 188 : build_int_cst (exp_type, 0),
5343 : : 0, high, 1);
5344 : 188 : n_high = range_binop (MINUS_EXPR, exp_type,
5345 : 188 : build_int_cst (exp_type, 0),
5346 : : 0, low, 0);
5347 : 188 : if (n_high != 0 && TREE_OVERFLOW (n_high))
5348 : : return NULL_TREE;
5349 : 176 : goto normalize;
5350 : :
5351 : 0 : case BIT_NOT_EXPR:
5352 : : /* ~ X -> -X - 1 */
5353 : 0 : return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5354 : 0 : build_int_cst (exp_type, 1));
5355 : :
5356 : 664978 : case PLUS_EXPR:
5357 : 664978 : case MINUS_EXPR:
5358 : 664978 : if (TREE_CODE (arg1) != INTEGER_CST)
5359 : : return NULL_TREE;
5360 : :
5361 : : /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5362 : : move a constant to the other side. */
5363 : 544911 : if (!TYPE_UNSIGNED (arg0_type)
5364 : 544911 : && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5365 : : return NULL_TREE;
5366 : :
5367 : : /* If EXP is signed, any overflow in the computation is undefined,
5368 : : so we don't worry about it so long as our computations on
5369 : : the bounds don't overflow. For unsigned, overflow is defined
5370 : : and this is exactly the right thing. */
5371 : 639732 : n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5372 : : arg0_type, low, 0, arg1, 0);
5373 : 320053 : n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5374 : : arg0_type, high, 1, arg1, 0);
5375 : 317250 : if ((n_low != 0 && TREE_OVERFLOW (n_low))
5376 : 637296 : || (n_high != 0 && TREE_OVERFLOW (n_high)))
5377 : : return NULL_TREE;
5378 : :
5379 : 320046 : if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5380 : 13340 : *strict_overflow_p = true;
5381 : :
5382 : 0 : normalize:
5383 : : /* Check for an unsigned range which has wrapped around the maximum
5384 : : value thus making n_high < n_low, and normalize it. */
5385 : 320222 : if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5386 : : {
5387 : 135695 : low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5388 : 135695 : build_int_cst (TREE_TYPE (n_high), 1), 0);
5389 : 135695 : high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5390 : 135695 : build_int_cst (TREE_TYPE (n_low), 1), 0);
5391 : :
5392 : : /* If the range is of the form +/- [ x+1, x ], we won't
5393 : : be able to normalize it. But then, it represents the
5394 : : whole range or the empty set, so make it
5395 : : +/- [ -, - ]. */
5396 : 135695 : if (tree_int_cst_equal (n_low, low)
5397 : 135695 : && tree_int_cst_equal (n_high, high))
5398 : : low = high = 0;
5399 : : else
5400 : 135695 : in_p = ! in_p;
5401 : : }
5402 : : else
5403 : 184527 : low = n_low, high = n_high;
5404 : :
5405 : 320222 : *p_low = low;
5406 : 320222 : *p_high = high;
5407 : 320222 : *p_in_p = in_p;
5408 : 320222 : return arg0;
5409 : :
5410 : 2168037 : CASE_CONVERT:
5411 : 2168037 : case NON_LVALUE_EXPR:
5412 : 2168037 : if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5413 : : return NULL_TREE;
5414 : :
5415 : 846530 : if (! INTEGRAL_TYPE_P (arg0_type)
5416 : 818942 : || (low != 0 && ! int_fits_type_p (low, arg0_type))
5417 : 713797 : || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5418 : : return NULL_TREE;
5419 : :
5420 : 696333 : n_low = low, n_high = high;
5421 : :
5422 : 696333 : if (n_low != 0)
5423 : 599450 : n_low = fold_convert_loc (loc, arg0_type, n_low);
5424 : :
5425 : 696333 : if (n_high != 0)
5426 : 609580 : n_high = fold_convert_loc (loc, arg0_type, n_high);
5427 : :
5428 : : /* If we're converting arg0 from an unsigned type, to exp,
5429 : : a signed type, we will be doing the comparison as unsigned.
5430 : : The tests above have already verified that LOW and HIGH
5431 : : are both positive.
5432 : :
5433 : : So we have to ensure that we will handle large unsigned
5434 : : values the same way that the current signed bounds treat
5435 : : negative values. */
5436 : :
5437 : 696333 : if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5438 : : {
5439 : 209780 : tree high_positive;
5440 : 209780 : tree equiv_type;
5441 : : /* For fixed-point modes, we need to pass the saturating flag
5442 : : as the 2nd parameter. */
5443 : 209780 : if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5444 : 0 : equiv_type
5445 : 0 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5446 : 0 : TYPE_SATURATING (arg0_type));
5447 : 209780 : else if (TREE_CODE (arg0_type) == BITINT_TYPE)
5448 : : equiv_type = arg0_type;
5449 : : else
5450 : 209772 : equiv_type
5451 : 209772 : = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5452 : :
5453 : : /* A range without an upper bound is, naturally, unbounded.
5454 : : Since convert would have cropped a very large value, use
5455 : : the max value for the destination type. */
5456 : 209780 : high_positive
5457 : 209780 : = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5458 : 0 : : TYPE_MAX_VALUE (arg0_type);
5459 : :
5460 : 209780 : if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5461 : 189168 : high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5462 : : fold_convert_loc (loc, arg0_type,
5463 : : high_positive),
5464 : 189168 : build_int_cst (arg0_type, 1));
5465 : :
5466 : : /* If the low bound is specified, "and" the range with the
5467 : : range for which the original unsigned value will be
5468 : : positive. */
5469 : 209780 : if (low != 0)
5470 : : {
5471 : 117754 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5472 : : 1, fold_convert_loc (loc, arg0_type,
5473 : : integer_zero_node),
5474 : : high_positive))
5475 : : return NULL_TREE;
5476 : :
5477 : 117754 : in_p = (n_in_p == in_p);
5478 : : }
5479 : : else
5480 : : {
5481 : : /* Otherwise, "or" the range with the range of the input
5482 : : that will be interpreted as negative. */
5483 : 92026 : if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5484 : : 1, fold_convert_loc (loc, arg0_type,
5485 : : integer_zero_node),
5486 : : high_positive))
5487 : : return NULL_TREE;
5488 : :
5489 : 92026 : in_p = (in_p != n_in_p);
5490 : : }
5491 : : }
5492 : :
5493 : : /* Otherwise, if we are converting arg0 from signed type, to exp,
5494 : : an unsigned type, we will do the comparison as signed. If
5495 : : high is non-NULL, we punt above if it doesn't fit in the signed
5496 : : type, so if we get through here, +[-, high] or +[low, high] are
5497 : : equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5498 : : +[-, -] or -[-, -] are equivalent too. But if low is specified and
5499 : : high is not, the +[low, -] range is equivalent to union of
5500 : : +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5501 : : -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5502 : : low being 0, which should be treated as [-, -]. */
5503 : 486553 : else if (TYPE_UNSIGNED (exp_type)
5504 : 470430 : && !TYPE_UNSIGNED (arg0_type)
5505 : 214686 : && low
5506 : 701239 : && !high)
5507 : : {
5508 : 12 : if (integer_zerop (low))
5509 : 12 : n_low = NULL_TREE;
5510 : : else
5511 : : {
5512 : 0 : n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5513 : 0 : n_low, build_int_cst (arg0_type, -1));
5514 : 0 : n_low = build_zero_cst (arg0_type);
5515 : 0 : in_p = !in_p;
5516 : : }
5517 : : }
5518 : :
5519 : 696333 : *p_low = n_low;
5520 : 696333 : *p_high = n_high;
5521 : 696333 : *p_in_p = in_p;
5522 : 696333 : return arg0;
5523 : :
5524 : : default:
5525 : : return NULL_TREE;
5526 : : }
5527 : : }
5528 : :
5529 : : /* Given EXP, a logical expression, set the range it is testing into
5530 : : variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5531 : : actually being tested. *PLOW and *PHIGH will be made of the same
5532 : : type as the returned expression. If EXP is not a comparison, we
5533 : : will most likely not be returning a useful value and range. Set
5534 : : *STRICT_OVERFLOW_P to true if the return value is only valid
5535 : : because signed overflow is undefined; otherwise, do not change
5536 : : *STRICT_OVERFLOW_P. */
5537 : :
5538 : : tree
5539 : 42717380 : make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5540 : : bool *strict_overflow_p)
5541 : : {
5542 : 42717380 : enum tree_code code;
5543 : 42717380 : tree arg0, arg1 = NULL_TREE;
5544 : 42717380 : tree exp_type, nexp;
5545 : 42717380 : int in_p;
5546 : 42717380 : tree low, high;
5547 : 42717380 : location_t loc = EXPR_LOCATION (exp);
5548 : :
5549 : : /* Start with simply saying "EXP != 0" and then look at the code of EXP
5550 : : and see if we can refine the range. Some of the cases below may not
5551 : : happen, but it doesn't seem worth worrying about this. We "continue"
5552 : : the outer loop when we've changed something; otherwise we "break"
5553 : : the switch, which will "break" the while. */
5554 : :
5555 : 42717380 : in_p = 0;
5556 : 42717380 : low = high = build_int_cst (TREE_TYPE (exp), 0);
5557 : :
5558 : 68026698 : while (1)
5559 : : {
5560 : 68026698 : code = TREE_CODE (exp);
5561 : 68026698 : exp_type = TREE_TYPE (exp);
5562 : 68026698 : arg0 = NULL_TREE;
5563 : :
5564 : 68026698 : if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5565 : : {
5566 : 46666655 : if (TREE_OPERAND_LENGTH (exp) > 0)
5567 : 46666655 : arg0 = TREE_OPERAND (exp, 0);
5568 : 46666655 : if (TREE_CODE_CLASS (code) == tcc_binary
5569 : 44452530 : || TREE_CODE_CLASS (code) == tcc_comparison
5570 : 54141940 : || (TREE_CODE_CLASS (code) == tcc_expression
5571 : 2424346 : && TREE_OPERAND_LENGTH (exp) > 1))
5572 : 40110165 : arg1 = TREE_OPERAND (exp, 1);
5573 : : }
5574 : 46666655 : if (arg0 == NULL_TREE)
5575 : : break;
5576 : :
5577 : 46666641 : nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5578 : : &high, &in_p, strict_overflow_p);
5579 : 46666641 : if (nexp == NULL_TREE)
5580 : : break;
5581 : : exp = nexp;
5582 : : }
5583 : :
5584 : : /* If EXP is a constant, we can evaluate whether this is true or false. */
5585 : 42717380 : if (TREE_CODE (exp) == INTEGER_CST)
5586 : : {
5587 : 32653 : in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5588 : : exp, 0, low, 0))
5589 : 32653 : && integer_onep (range_binop (LE_EXPR, integer_type_node,
5590 : : exp, 1, high, 1)));
5591 : 32653 : low = high = 0;
5592 : 32653 : exp = 0;
5593 : : }
5594 : :
5595 : 42717380 : *pin_p = in_p, *plow = low, *phigh = high;
5596 : 42717380 : return exp;
5597 : : }
5598 : :
5599 : : /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5600 : : a bitwise check i.e. when
5601 : : LOW == 0xXX...X00...0
5602 : : HIGH == 0xXX...X11...1
5603 : : Return corresponding mask in MASK and stem in VALUE. */
5604 : :
5605 : : static bool
5606 : 133 : maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5607 : : tree *value)
5608 : : {
5609 : 133 : if (TREE_CODE (low) != INTEGER_CST
5610 : 133 : || TREE_CODE (high) != INTEGER_CST)
5611 : : return false;
5612 : :
5613 : 133 : unsigned prec = TYPE_PRECISION (type);
5614 : 133 : wide_int lo = wi::to_wide (low, prec);
5615 : 133 : wide_int hi = wi::to_wide (high, prec);
5616 : :
5617 : 133 : wide_int end_mask = lo ^ hi;
5618 : 266 : if ((end_mask & (end_mask + 1)) != 0
5619 : 245 : || (lo & end_mask) != 0)
5620 : : return false;
5621 : :
5622 : 88 : wide_int stem_mask = ~end_mask;
5623 : 88 : wide_int stem = lo & stem_mask;
5624 : 88 : if (stem != (hi & stem_mask))
5625 : : return false;
5626 : :
5627 : 88 : *mask = wide_int_to_tree (type, stem_mask);
5628 : 88 : *value = wide_int_to_tree (type, stem);
5629 : :
5630 : 88 : return true;
5631 : 221 : }
5632 : :
5633 : : /* Helper routine for build_range_check and match.pd. Return the type to
5634 : : perform the check or NULL if it shouldn't be optimized. */
5635 : :
5636 : : tree
5637 : 857349 : range_check_type (tree etype)
5638 : : {
5639 : : /* First make sure that arithmetics in this type is valid, then make sure
5640 : : that it wraps around. */
5641 : 857349 : if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5642 : 58082 : etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5643 : :
5644 : 857349 : if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5645 : : {
5646 : 740186 : tree utype, minv, maxv;
5647 : :
5648 : : /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5649 : : for the type in question, as we rely on this here. */
5650 : 740186 : utype = unsigned_type_for (etype);
5651 : 740186 : maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5652 : 740186 : maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5653 : 740186 : build_int_cst (TREE_TYPE (maxv), 1), 1);
5654 : 740186 : minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5655 : :
5656 : 740186 : if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5657 : : minv, 1, maxv, 1)))
5658 : : etype = utype;
5659 : : else
5660 : 0 : return NULL_TREE;
5661 : : }
5662 : 117163 : else if (POINTER_TYPE_P (etype)
5663 : : || TREE_CODE (etype) == OFFSET_TYPE
5664 : : /* Right now all BITINT_TYPEs satisfy
5665 : : (unsigned) max + 1 == (unsigned) min, so no need to verify
5666 : : that like for INTEGER_TYPEs. */
5667 : : || TREE_CODE (etype) == BITINT_TYPE)
5668 : 1500 : etype = unsigned_type_for (etype);
5669 : : return etype;
5670 : : }
5671 : :
5672 : : /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5673 : : type, TYPE, return an expression to test if EXP is in (or out of, depending
5674 : : on IN_P) the range. Return 0 if the test couldn't be created. */
5675 : :
5676 : : tree
5677 : 1642835 : build_range_check (location_t loc, tree type, tree exp, int in_p,
5678 : : tree low, tree high)
5679 : : {
5680 : 3271675 : tree etype = TREE_TYPE (exp), mask, value;
5681 : :
5682 : : /* Disable this optimization for function pointer expressions
5683 : : on targets that require function pointer canonicalization. */
5684 : 3271675 : if (targetm.have_canonicalize_funcptr_for_compare ()
5685 : 0 : && POINTER_TYPE_P (etype)
5686 : 3271675 : && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5687 : : return NULL_TREE;
5688 : :
5689 : 3271675 : if (! in_p)
5690 : : {
5691 : 313731 : value = build_range_check (loc, type, exp, 1, low, high);
5692 : 313731 : if (value != 0)
5693 : 313731 : return invert_truthvalue_loc (loc, value);
5694 : :
5695 : : return 0;
5696 : : }
5697 : :
5698 : 2957944 : if (low == 0 && high == 0)
5699 : 127048 : return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5700 : :
5701 : 2830896 : if (low == 0)
5702 : 944407 : return fold_build2_loc (loc, LE_EXPR, type, exp,
5703 : 944407 : fold_convert_loc (loc, etype, high));
5704 : :
5705 : 1886489 : if (high == 0)
5706 : 64711 : return fold_build2_loc (loc, GE_EXPR, type, exp,
5707 : 64711 : fold_convert_loc (loc, etype, low));
5708 : :
5709 : 1821778 : if (operand_equal_p (low, high, 0))
5710 : 192646 : return fold_build2_loc (loc, EQ_EXPR, type, exp,
5711 : 192646 : fold_convert_loc (loc, etype, low));
5712 : :
5713 : 1629132 : if (TREE_CODE (exp) == BIT_AND_EXPR
5714 : 1629132 : && maskable_range_p (low, high, etype, &mask, &value))
5715 : 88 : return fold_build2_loc (loc, EQ_EXPR, type,
5716 : : fold_build2_loc (loc, BIT_AND_EXPR, etype,
5717 : : exp, mask),
5718 : 88 : value);
5719 : :
5720 : 1629044 : if (integer_zerop (low))
5721 : : {
5722 : 858781 : if (! TYPE_UNSIGNED (etype))
5723 : : {
5724 : 81177 : etype = unsigned_type_for (etype);
5725 : 81177 : high = fold_convert_loc (loc, etype, high);
5726 : 81177 : exp = fold_convert_loc (loc, etype, exp);
5727 : : }
5728 : 858781 : return build_range_check (loc, type, exp, 1, 0, high);
5729 : : }
5730 : :
5731 : : /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5732 : 770263 : if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5733 : : {
5734 : 89222 : int prec = TYPE_PRECISION (etype);
5735 : :
5736 : 89222 : if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5737 : : {
5738 : 204 : if (TYPE_UNSIGNED (etype))
5739 : : {
5740 : 197 : tree signed_etype = signed_type_for (etype);
5741 : 197 : if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5742 : 0 : etype
5743 : 0 : = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5744 : : else
5745 : : etype = signed_etype;
5746 : 197 : exp = fold_convert_loc (loc, etype, exp);
5747 : : }
5748 : 204 : return fold_build2_loc (loc, GT_EXPR, type, exp,
5749 : 204 : build_int_cst (etype, 0));
5750 : : }
5751 : : }
5752 : :
5753 : : /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5754 : : This requires wrap-around arithmetics for the type of the expression. */
5755 : 770059 : etype = range_check_type (etype);
5756 : 770059 : if (etype == NULL_TREE)
5757 : : return NULL_TREE;
5758 : :
5759 : 770059 : high = fold_convert_loc (loc, etype, high);
5760 : 770059 : low = fold_convert_loc (loc, etype, low);
5761 : 770059 : exp = fold_convert_loc (loc, etype, exp);
5762 : :
5763 : 770059 : value = const_binop (MINUS_EXPR, high, low);
5764 : :
5765 : 770059 : if (value != 0 && !TREE_OVERFLOW (value))
5766 : 770059 : return build_range_check (loc, type,
5767 : : fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5768 : 770059 : 1, build_int_cst (etype, 0), value);
5769 : :
5770 : : return 0;
5771 : : }
5772 : :
5773 : : /* Return the predecessor of VAL in its type, handling the infinite case. */
5774 : :
5775 : : static tree
5776 : 138424 : range_predecessor (tree val)
5777 : : {
5778 : 138424 : tree type = TREE_TYPE (val);
5779 : :
5780 : 138424 : if (INTEGRAL_TYPE_P (type)
5781 : 138424 : && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5782 : : return 0;
5783 : : else
5784 : 138424 : return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5785 : 138424 : build_int_cst (TREE_TYPE (val), 1), 0);
5786 : : }
5787 : :
5788 : : /* Return the successor of VAL in its type, handling the infinite case. */
5789 : :
5790 : : static tree
5791 : 1467688 : range_successor (tree val)
5792 : : {
5793 : 1467688 : tree type = TREE_TYPE (val);
5794 : :
5795 : 1467688 : if (INTEGRAL_TYPE_P (type)
5796 : 1467688 : && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5797 : : return 0;
5798 : : else
5799 : 1467675 : return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5800 : 1467675 : build_int_cst (TREE_TYPE (val), 1), 0);
5801 : : }
5802 : :
5803 : : /* Given two ranges, see if we can merge them into one. Return 1 if we
5804 : : can, 0 if we can't. Set the output range into the specified parameters. */
5805 : :
5806 : : bool
5807 : 2795612 : merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5808 : : tree high0, int in1_p, tree low1, tree high1)
5809 : : {
5810 : 2795612 : bool no_overlap;
5811 : 2795612 : int subset;
5812 : 2795612 : int temp;
5813 : 2795612 : tree tem;
5814 : 2795612 : int in_p;
5815 : 2795612 : tree low, high;
5816 : 2795612 : int lowequal = ((low0 == 0 && low1 == 0)
5817 : 2795612 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5818 : 2795612 : low0, 0, low1, 0)));
5819 : 2795612 : int highequal = ((high0 == 0 && high1 == 0)
5820 : 2795612 : || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5821 : 2795612 : high0, 1, high1, 1)));
5822 : :
5823 : : /* Make range 0 be the range that starts first, or ends last if they
5824 : : start at the same value. Swap them if it isn't. */
5825 : 2795612 : if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5826 : : low0, 0, low1, 0))
5827 : 2795612 : || (lowequal
5828 : 859399 : && integer_onep (range_binop (GT_EXPR, integer_type_node,
5829 : : high1, 1, high0, 1))))
5830 : : {
5831 : : temp = in0_p, in0_p = in1_p, in1_p = temp;
5832 : : tem = low0, low0 = low1, low1 = tem;
5833 : : tem = high0, high0 = high1, high1 = tem;
5834 : : }
5835 : :
5836 : : /* If the second range is != high1 where high1 is the type maximum of
5837 : : the type, try first merging with < high1 range. */
5838 : 2795612 : if (low1
5839 : 2795612 : && high1
5840 : 755898 : && TREE_CODE (low1) == INTEGER_CST
5841 : 755898 : && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5842 : 121525 : || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5843 : 2542435 : && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5844 : : GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5845 : 3512137 : && operand_equal_p (low1, high1, 0))
5846 : : {
5847 : 417481 : if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5848 : 417481 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5849 : : !in1_p, NULL_TREE, range_predecessor (low1)))
5850 : : return true;
5851 : : /* Similarly for the second range != low1 where low1 is the type minimum
5852 : : of the type, try first merging with > low1 range. */
5853 : 340025 : if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5854 : 340025 : && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5855 : : !in1_p, range_successor (low1), NULL_TREE))
5856 : : return true;
5857 : : }
5858 : :
5859 : : /* Now flag two cases, whether the ranges are disjoint or whether the
5860 : : second range is totally subsumed in the first. Note that the tests
5861 : : below are simplified by the ones above. */
5862 : 2665502 : no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5863 : : high0, 1, low1, 0));
5864 : 2665502 : subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5865 : : high1, 1, high0, 1));
5866 : :
5867 : : /* We now have four cases, depending on whether we are including or
5868 : : excluding the two ranges. */
5869 : 2665502 : if (in0_p && in1_p)
5870 : : {
5871 : : /* If they don't overlap, the result is false. If the second range
5872 : : is a subset it is the result. Otherwise, the range is from the start
5873 : : of the second to the end of the first. */
5874 : 953561 : if (no_overlap)
5875 : : in_p = 0, low = high = 0;
5876 : 951663 : else if (subset)
5877 : : in_p = 1, low = low1, high = high1;
5878 : : else
5879 : 821520 : in_p = 1, low = low1, high = high0;
5880 : : }
5881 : :
5882 : 1711941 : else if (in0_p && ! in1_p)
5883 : : {
5884 : : /* If they don't overlap, the result is the first range. If they are
5885 : : equal, the result is false. If the second range is a subset of the
5886 : : first, and the ranges begin at the same place, we go from just after
5887 : : the end of the second range to the end of the first. If the second
5888 : : range is not a subset of the first, or if it is a subset and both
5889 : : ranges end at the same place, the range starts at the start of the
5890 : : first range and ends just before the second range.
5891 : : Otherwise, we can't describe this as a single range. */
5892 : 631050 : if (no_overlap)
5893 : : in_p = 1, low = low0, high = high0;
5894 : 625950 : else if (lowequal && highequal)
5895 : : in_p = 0, low = high = 0;
5896 : 624911 : else if (subset && lowequal)
5897 : : {
5898 : 551870 : low = range_successor (high1);
5899 : 551870 : high = high0;
5900 : 551870 : in_p = 1;
5901 : 551870 : if (low == 0)
5902 : : {
5903 : : /* We are in the weird situation where high0 > high1 but
5904 : : high1 has no successor. Punt. */
5905 : : return 0;
5906 : : }
5907 : : }
5908 : 73041 : else if (! subset || highequal)
5909 : : {
5910 : 53038 : low = low0;
5911 : 53038 : high = range_predecessor (low1);
5912 : 53038 : in_p = 1;
5913 : 53038 : if (high == 0)
5914 : : {
5915 : : /* low0 < low1 but low1 has no predecessor. Punt. */
5916 : : return 0;
5917 : : }
5918 : : }
5919 : : else
5920 : : return 0;
5921 : : }
5922 : :
5923 : 1080891 : else if (! in0_p && in1_p)
5924 : : {
5925 : : /* If they don't overlap, the result is the second range. If the second
5926 : : is a subset of the first, the result is false. Otherwise,
5927 : : the range starts just after the first range and ends at the
5928 : : end of the second. */
5929 : 794829 : if (no_overlap)
5930 : : in_p = 1, low = low1, high = high1;
5931 : 790562 : else if (subset || highequal)
5932 : : in_p = 0, low = high = 0;
5933 : : else
5934 : : {
5935 : 677441 : low = range_successor (high0);
5936 : 677441 : high = high1;
5937 : 677441 : in_p = 1;
5938 : 677441 : if (low == 0)
5939 : : {
5940 : : /* high1 > high0 but high0 has no successor. Punt. */
5941 : : return 0;
5942 : : }
5943 : : }
5944 : : }
5945 : :
5946 : : else
5947 : : {
5948 : : /* The case where we are excluding both ranges. Here the complex case
5949 : : is if they don't overlap. In that case, the only time we have a
5950 : : range is if they are adjacent. If the second is a subset of the
5951 : : first, the result is the first. Otherwise, the range to exclude
5952 : : starts at the beginning of the first range and ends at the end of the
5953 : : second. */
5954 : 286062 : if (no_overlap)
5955 : : {
5956 : 185018 : if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5957 : : range_successor (high0),
5958 : : 1, low1, 0)))
5959 : : in_p = 0, low = low0, high = high1;
5960 : : else
5961 : : {
5962 : : /* Canonicalize - [min, x] into - [-, x]. */
5963 : 143993 : if (low0 && TREE_CODE (low0) == INTEGER_CST)
5964 : 142846 : switch (TREE_CODE (TREE_TYPE (low0)))
5965 : : {
5966 : 49951 : case ENUMERAL_TYPE:
5967 : 49951 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5968 : : GET_MODE_BITSIZE
5969 : 149853 : (TYPE_MODE (TREE_TYPE (low0)))))
5970 : : break;
5971 : : /* FALLTHROUGH */
5972 : 142561 : case INTEGER_TYPE:
5973 : 142561 : if (tree_int_cst_equal (low0,
5974 : 142561 : TYPE_MIN_VALUE (TREE_TYPE (low0))))
5975 : 10766 : low0 = 0;
5976 : : break;
5977 : 285 : case POINTER_TYPE:
5978 : 285 : if (TYPE_UNSIGNED (TREE_TYPE (low0))
5979 : 285 : && integer_zerop (low0))
5980 : : low0 = 0;
5981 : : break;
5982 : : default:
5983 : : break;
5984 : : }
5985 : :
5986 : : /* Canonicalize - [x, max] into - [x, -]. */
5987 : 143993 : if (high1 && TREE_CODE (high1) == INTEGER_CST)
5988 : 143792 : switch (TREE_CODE (TREE_TYPE (high1)))
5989 : : {
5990 : 49959 : case ENUMERAL_TYPE:
5991 : 49959 : if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5992 : : GET_MODE_BITSIZE
5993 : 149877 : (TYPE_MODE (TREE_TYPE (high1)))))
5994 : : break;
5995 : : /* FALLTHROUGH */
5996 : 143507 : case INTEGER_TYPE:
5997 : 143507 : if (tree_int_cst_equal (high1,
5998 : 143507 : TYPE_MAX_VALUE (TREE_TYPE (high1))))
5999 : 7738 : high1 = 0;
6000 : : break;
6001 : 285 : case POINTER_TYPE:
6002 : 285 : if (TYPE_UNSIGNED (TREE_TYPE (high1))
6003 : 570 : && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
6004 : : high1, 1,
6005 : 297 : build_int_cst (TREE_TYPE (high1), 1),
6006 : : 1)))
6007 : 273 : high1 = 0;
6008 : : break;
6009 : : default:
6010 : : break;
6011 : : }
6012 : :
6013 : : /* The ranges might be also adjacent between the maximum and
6014 : : minimum values of the given type. For
6015 : : - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
6016 : : return + [x + 1, y - 1]. */
6017 : 143993 : if (low0 == 0 && high1 == 0)
6018 : : {
6019 : 705 : low = range_successor (high0);
6020 : 705 : high = range_predecessor (low1);
6021 : 705 : if (low == 0 || high == 0)
6022 : : return 0;
6023 : :
6024 : : in_p = 1;
6025 : : }
6026 : : else
6027 : : return 0;
6028 : : }
6029 : : }
6030 : 101044 : else if (subset)
6031 : : in_p = 0, low = low0, high = high0;
6032 : : else
6033 : 11102 : in_p = 0, low = low0, high = high1;
6034 : : }
6035 : :
6036 : 2502198 : *pin_p = in_p, *plow = low, *phigh = high;
6037 : 2502198 : return 1;
6038 : : }
6039 : :
6040 : :
6041 : : /* Subroutine of fold, looking inside expressions of the form
6042 : : A op B ? A : C, where (ARG00, COMP_CODE, ARG01), ARG1 and ARG2
6043 : : are the three operands of the COND_EXPR. This function is
6044 : : being used also to optimize A op B ? C : A, by reversing the
6045 : : comparison first.
6046 : :
6047 : : Return a folded expression whose code is not a COND_EXPR
6048 : : anymore, or NULL_TREE if no folding opportunity is found. */
6049 : :
6050 : : static tree
6051 : 429024 : fold_cond_expr_with_comparison (location_t loc, tree type,
6052 : : enum tree_code comp_code,
6053 : : tree arg00, tree arg01, tree arg1, tree arg2)
6054 : : {
6055 : 429024 : tree arg1_type = TREE_TYPE (arg1);
6056 : 429024 : tree tem;
6057 : :
6058 : 429024 : STRIP_NOPS (arg1);
6059 : 429024 : STRIP_NOPS (arg2);
6060 : :
6061 : : /* If we have A op 0 ? A : -A, consider applying the following
6062 : : transformations:
6063 : :
6064 : : A == 0? A : -A same as -A
6065 : : A != 0? A : -A same as A
6066 : : A >= 0? A : -A same as abs (A)
6067 : : A > 0? A : -A same as abs (A)
6068 : : A <= 0? A : -A same as -abs (A)
6069 : : A < 0? A : -A same as -abs (A)
6070 : :
6071 : : None of these transformations work for modes with signed
6072 : : zeros. If A is +/-0, the first two transformations will
6073 : : change the sign of the result (from +0 to -0, or vice
6074 : : versa). The last four will fix the sign of the result,
6075 : : even though the original expressions could be positive or
6076 : : negative, depending on the sign of A.
6077 : :
6078 : : Note that all these transformations are correct if A is
6079 : : NaN, since the two alternatives (A and -A) are also NaNs. */
6080 : 429024 : if (!HONOR_SIGNED_ZEROS (type)
6081 : 858060 : && (FLOAT_TYPE_P (TREE_TYPE (arg01))
6082 : 429024 : ? real_zerop (arg01)
6083 : 427988 : : integer_zerop (arg01))
6084 : 1134263 : && ((TREE_CODE (arg2) == NEGATE_EXPR
6085 : 1225 : && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6086 : : /* In the case that A is of the form X-Y, '-A' (arg2) may
6087 : : have already been folded to Y-X, check for that. */
6088 : 275038 : || (TREE_CODE (arg1) == MINUS_EXPR
6089 : 2427 : && TREE_CODE (arg2) == MINUS_EXPR
6090 : 0 : && operand_equal_p (TREE_OPERAND (arg1, 0),
6091 : 0 : TREE_OPERAND (arg2, 1), 0)
6092 : 0 : && operand_equal_p (TREE_OPERAND (arg1, 1),
6093 : 0 : TREE_OPERAND (arg2, 0), 0))))
6094 : 1177 : switch (comp_code)
6095 : : {
6096 : 0 : case EQ_EXPR:
6097 : 0 : case UNEQ_EXPR:
6098 : 0 : tem = fold_convert_loc (loc, arg1_type, arg1);
6099 : 0 : return fold_convert_loc (loc, type, negate_expr (tem));
6100 : 0 : case NE_EXPR:
6101 : 0 : case LTGT_EXPR:
6102 : 0 : return fold_convert_loc (loc, type, arg1);
6103 : 0 : case UNGE_EXPR:
6104 : 0 : case UNGT_EXPR:
6105 : 0 : if (flag_trapping_math)
6106 : : break;
6107 : : /* Fall through. */
6108 : 1177 : case GE_EXPR:
6109 : 1177 : case GT_EXPR:
6110 : 1177 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6111 : : break;
6112 : 1161 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6113 : 1161 : return fold_convert_loc (loc, type, tem);
6114 : 0 : case UNLE_EXPR:
6115 : 0 : case UNLT_EXPR:
6116 : 0 : if (flag_trapping_math)
6117 : : break;
6118 : : /* FALLTHRU */
6119 : 0 : case LE_EXPR:
6120 : 0 : case LT_EXPR:
6121 : 0 : if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
6122 : : break;
6123 : 0 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6124 : 0 : && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
6125 : : {
6126 : : /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
6127 : : is not, invokes UB both in abs and in the negation of it.
6128 : : So, use ABSU_EXPR instead. */
6129 : 0 : tree utype = unsigned_type_for (TREE_TYPE (arg1));
6130 : 0 : tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
6131 : 0 : tem = negate_expr (tem);
6132 : 0 : return fold_convert_loc (loc, type, tem);
6133 : : }
6134 : : else
6135 : : {
6136 : 0 : tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
6137 : 0 : return negate_expr (fold_convert_loc (loc, type, tem));
6138 : : }
6139 : 0 : default:
6140 : 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6141 : : break;
6142 : : }
6143 : :
6144 : : /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
6145 : : A == 0 ? A : 0 is always 0 unless A is -0. Note that
6146 : : both transformations are correct when A is NaN: A != 0
6147 : : is then true, and A == 0 is false. */
6148 : :
6149 : 427863 : if (!HONOR_SIGNED_ZEROS (type)
6150 : 427863 : && integer_zerop (arg01) && integer_zerop (arg2))
6151 : : {
6152 : 232387 : if (comp_code == NE_EXPR)
6153 : 153 : return fold_convert_loc (loc, type, arg1);
6154 : 232234 : else if (comp_code == EQ_EXPR)
6155 : 0 : return build_zero_cst (type);
6156 : : }
6157 : :
6158 : : /* Try some transformations of A op B ? A : B.
6159 : :
6160 : : A == B? A : B same as B
6161 : : A != B? A : B same as A
6162 : : A >= B? A : B same as max (A, B)
6163 : : A > B? A : B same as max (B, A)
6164 : : A <= B? A : B same as min (A, B)
6165 : : A < B? A : B same as min (B, A)
6166 : :
6167 : : As above, these transformations don't work in the presence
6168 : : of signed zeros. For example, if A and B are zeros of
6169 : : opposite sign, the first two transformations will change
6170 : : the sign of the result. In the last four, the original
6171 : : expressions give different results for (A=+0, B=-0) and
6172 : : (A=-0, B=+0), but the transformed expressions do not.
6173 : :
6174 : : The first two transformations are correct if either A or B
6175 : : is a NaN. In the first transformation, the condition will
6176 : : be false, and B will indeed be chosen. In the case of the
6177 : : second transformation, the condition A != B will be true,
6178 : : and A will be chosen.
6179 : :
6180 : : The conversions to max() and min() are not correct if B is
6181 : : a number and A is not. The conditions in the original
6182 : : expressions will be false, so all four give B. The min()
6183 : : and max() versions would give a NaN instead. */
6184 : 427710 : if (!HONOR_SIGNED_ZEROS (type)
6185 : 427710 : && operand_equal_for_comparison_p (arg01, arg2)
6186 : : /* Avoid these transformations if the COND_EXPR may be used
6187 : : as an lvalue in the C++ front-end. PR c++/19199. */
6188 : 668461 : && (in_gimple_form
6189 : 15600 : || VECTOR_TYPE_P (type)
6190 : 15528 : || (! lang_GNU_CXX ()
6191 : 12940 : && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
6192 : 2588 : || ! maybe_lvalue_p (arg1)
6193 : 2566 : || ! maybe_lvalue_p (arg2)))
6194 : : {
6195 : 238919 : tree comp_op0 = arg00;
6196 : 238919 : tree comp_op1 = arg01;
6197 : 238919 : tree comp_type = TREE_TYPE (comp_op0);
6198 : :
6199 : 238919 : switch (comp_code)
6200 : : {
6201 : 0 : case EQ_EXPR:
6202 : 0 : return fold_convert_loc (loc, type, arg2);
6203 : 1 : case NE_EXPR:
6204 : 1 : return fold_convert_loc (loc, type, arg1);
6205 : 5766 : case LE_EXPR:
6206 : 5766 : case LT_EXPR:
6207 : 5766 : case UNLE_EXPR:
6208 : 5766 : case UNLT_EXPR:
6209 : : /* In C++ a ?: expression can be an lvalue, so put the
6210 : : operand which will be used if they are equal first
6211 : : so that we can convert this back to the
6212 : : corresponding COND_EXPR. */
6213 : 5766 : if (!HONOR_NANS (arg1))
6214 : : {
6215 : 5766 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6216 : 5766 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6217 : 11532 : tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
6218 : 5766 : ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
6219 : 4395 : : fold_build2_loc (loc, MIN_EXPR, comp_type,
6220 : : comp_op1, comp_op0);
6221 : 5766 : return fold_convert_loc (loc, type, tem);
6222 : : }
6223 : : break;
6224 : 233152 : case GE_EXPR:
6225 : 233152 : case GT_EXPR:
6226 : 233152 : case UNGE_EXPR:
6227 : 233152 : case UNGT_EXPR:
6228 : 233152 : if (!HONOR_NANS (arg1))
6229 : : {
6230 : 233150 : comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
6231 : 233150 : comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
6232 : 466300 : tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
6233 : 233150 : ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
6234 : 3292 : : fold_build2_loc (loc, MAX_EXPR, comp_type,
6235 : : comp_op1, comp_op0);
6236 : 233150 : return fold_convert_loc (loc, type, tem);
6237 : : }
6238 : : break;
6239 : 0 : case UNEQ_EXPR:
6240 : 0 : if (!HONOR_NANS (arg1))
6241 : 0 : return fold_convert_loc (loc, type, arg2);
6242 : : break;
6243 : 0 : case LTGT_EXPR:
6244 : 0 : if (!HONOR_NANS (arg1))
6245 : 0 : return fold_convert_loc (loc, type, arg1);
6246 : : break;
6247 : 0 : default:
6248 : 0 : gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
6249 : : break;
6250 : : }
6251 : : }
6252 : :
6253 : : return NULL_TREE;
6254 : : }
6255 : :
6256 : :
6257 : :
6258 : : #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
6259 : : #define LOGICAL_OP_NON_SHORT_CIRCUIT \
6260 : : (BRANCH_COST (optimize_function_for_speed_p (cfun), \
6261 : : false) >= 2)
6262 : : #endif
6263 : :
6264 : : /* EXP is some logical combination of boolean tests. See if we can
6265 : : merge it into some range test. Return the new tree if so. */
6266 : :
6267 : : static tree
6268 : 21358061 : fold_range_test (location_t loc, enum tree_code code, tree type,
6269 : : tree op0, tree op1)
6270 : : {
6271 : 21358061 : int or_op = (code == TRUTH_ORIF_EXPR
6272 : 21358061 : || code == TRUTH_OR_EXPR);
6273 : 21358061 : int in0_p, in1_p, in_p;
6274 : 21358061 : tree low0, low1, low, high0, high1, high;
6275 : 21358061 : bool strict_overflow_p = false;
6276 : 21358061 : tree tem, lhs, rhs;
6277 : 21358061 : const char * const warnmsg = G_("assuming signed overflow does not occur "
6278 : : "when simplifying range test");
6279 : :
6280 : 21358061 : if (!INTEGRAL_TYPE_P (type))
6281 : : return 0;
6282 : :
6283 : 21358061 : lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
6284 : : /* If op0 is known true or false and this is a short-circuiting
6285 : : operation we must not merge with op1 since that makes side-effects
6286 : : unconditional. So special-case this. */
6287 : 21358061 : if (!lhs
6288 : 2 : && ((code == TRUTH_ORIF_EXPR && in0_p)
6289 : 1 : || (code == TRUTH_ANDIF_EXPR && !in0_p)))
6290 : : return op0;
6291 : 21358059 : rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
6292 : :
6293 : : /* If this is an OR operation, invert both sides; we will invert
6294 : : again at the end. */
6295 : 21358059 : if (or_op)
6296 : 10599896 : in0_p = ! in0_p, in1_p = ! in1_p;
6297 : :
6298 : : /* If both expressions are the same, if we can merge the ranges, and we
6299 : : can build the range test, return it or it inverted. If one of the
6300 : : ranges is always true or always false, consider it to be the same
6301 : : expression as the other. */
6302 : 21325410 : if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6303 : 1292555 : && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6304 : : in1_p, low1, high1)
6305 : 22514031 : && (tem = (build_range_check (loc, type,
6306 : : lhs != 0 ? lhs
6307 : 0 : : rhs != 0 ? rhs : integer_zero_node,
6308 : : in_p, low, high))) != 0)
6309 : : {
6310 : 1155972 : if (strict_overflow_p)
6311 : 259 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6312 : 1155972 : return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6313 : : }
6314 : :
6315 : : /* On machines where the branch cost is expensive, if this is a
6316 : : short-circuited branch and the underlying object on both sides
6317 : : is the same, make a non-short-circuit operation. */
6318 : 20202087 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6319 : 20202087 : if (param_logical_op_non_short_circuit != -1)
6320 : 7590 : logical_op_non_short_circuit
6321 : 7590 : = param_logical_op_non_short_circuit;
6322 : 20202087 : if (logical_op_non_short_circuit
6323 : 20198264 : && !sanitize_coverage_p ()
6324 : 20198261 : && lhs != 0 && rhs != 0
6325 : 20198005 : && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6326 : 24422743 : && operand_equal_p (lhs, rhs, 0))
6327 : : {
6328 : : /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6329 : : unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6330 : : which cases we can't do this. */
6331 : 105733 : if (simple_operand_p (lhs))
6332 : 60574 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6333 : : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6334 : 30799 : type, op0, op1);
6335 : :
6336 : 74934 : else if (!lang_hooks.decls.global_bindings_p ()
6337 : 74934 : && !CONTAINS_PLACEHOLDER_P (lhs))
6338 : : {
6339 : 74270 : tree common = save_expr (lhs);
6340 : :
6341 : 125184 : if ((lhs = build_range_check (loc, type, common,
6342 : 50914 : or_op ? ! in0_p : in0_p,
6343 : : low0, high0)) != 0
6344 : 125184 : && (rhs = build_range_check (loc, type, common,
6345 : 50914 : or_op ? ! in1_p : in1_p,
6346 : : low1, high1)) != 0)
6347 : : {
6348 : 74270 : if (strict_overflow_p)
6349 : 0 : fold_overflow_warning (warnmsg,
6350 : : WARN_STRICT_OVERFLOW_COMPARISON);
6351 : 125184 : return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6352 : : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6353 : 74270 : type, lhs, rhs);
6354 : : }
6355 : : }
6356 : : }
6357 : :
6358 : : return 0;
6359 : : }
6360 : :
6361 : : /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6362 : : bit value. Arrange things so the extra bits will be set to zero if and
6363 : : only if C is signed-extended to its full width. If MASK is nonzero,
6364 : : it is an INTEGER_CST that should be AND'ed with the extra bits. */
6365 : :
6366 : : static tree
6367 : 72298 : unextend (tree c, int p, int unsignedp, tree mask)
6368 : : {
6369 : 72298 : tree type = TREE_TYPE (c);
6370 : 72298 : int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6371 : 72298 : tree temp;
6372 : :
6373 : 72298 : if (p == modesize || unsignedp)
6374 : : return c;
6375 : :
6376 : : /* We work by getting just the sign bit into the low-order bit, then
6377 : : into the high-order bit, then sign-extend. We then XOR that value
6378 : : with C. */
6379 : 1499 : temp = build_int_cst (TREE_TYPE (c),
6380 : 1499 : wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6381 : :
6382 : : /* We must use a signed type in order to get an arithmetic right shift.
6383 : : However, we must also avoid introducing accidental overflows, so that
6384 : : a subsequent call to integer_zerop will work. Hence we must
6385 : : do the type conversion here. At this point, the constant is either
6386 : : zero or one, and the conversion to a signed type can never overflow.
6387 : : We could get an overflow if this conversion is done anywhere else. */
6388 : 1499 : if (TYPE_UNSIGNED (type))
6389 : 1499 : temp = fold_convert (signed_type_for (type), temp);
6390 : :
6391 : 1499 : temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6392 : 1499 : temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6393 : 1499 : if (mask != 0)
6394 : 166 : temp = const_binop (BIT_AND_EXPR, temp,
6395 : 166 : fold_convert (TREE_TYPE (c), mask));
6396 : : /* If necessary, convert the type back to match the type of C. */
6397 : 1499 : if (TYPE_UNSIGNED (type))
6398 : 1499 : temp = fold_convert (type, temp);
6399 : :
6400 : 1499 : return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6401 : : }
6402 : :
6403 : : /* For an expression that has the form
6404 : : (A && B) || ~B
6405 : : or
6406 : : (A || B) && ~B,
6407 : : we can drop one of the inner expressions and simplify to
6408 : : A || ~B
6409 : : or
6410 : : A && ~B
6411 : : LOC is the location of the resulting expression. OP is the inner
6412 : : logical operation; the left-hand side in the examples above, while CMPOP
6413 : : is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6414 : : removing a condition that guards another, as in
6415 : : (A != NULL && A->...) || A == NULL
6416 : : which we must not transform. If RHS_ONLY is true, only eliminate the
6417 : : right-most operand of the inner logical operation. */
6418 : :
6419 : : static tree
6420 : 130776 : merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6421 : : bool rhs_only)
6422 : : {
6423 : 130776 : enum tree_code code = TREE_CODE (cmpop);
6424 : 130776 : enum tree_code truthop_code = TREE_CODE (op);
6425 : 130776 : tree lhs = TREE_OPERAND (op, 0);
6426 : 130776 : tree rhs = TREE_OPERAND (op, 1);
6427 : 130776 : tree orig_lhs = lhs, orig_rhs = rhs;
6428 : 130776 : enum tree_code rhs_code = TREE_CODE (rhs);
6429 : 130776 : enum tree_code lhs_code = TREE_CODE (lhs);
6430 : 130776 : enum tree_code inv_code;
6431 : :
6432 : 130776 : if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6433 : : return NULL_TREE;
6434 : :
6435 : 86138 : if (TREE_CODE_CLASS (code) != tcc_comparison)
6436 : : return NULL_TREE;
6437 : :
6438 : 54044 : tree type = TREE_TYPE (TREE_OPERAND (cmpop, 0));
6439 : :
6440 : 54044 : if (rhs_code == truthop_code)
6441 : : {
6442 : 29 : tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6443 : 29 : if (newrhs != NULL_TREE)
6444 : : {
6445 : 0 : rhs = newrhs;
6446 : 0 : rhs_code = TREE_CODE (rhs);
6447 : : }
6448 : : }
6449 : 54044 : if (lhs_code == truthop_code && !rhs_only)
6450 : : {
6451 : 458 : tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6452 : 458 : if (newlhs != NULL_TREE)
6453 : : {
6454 : 0 : lhs = newlhs;
6455 : 0 : lhs_code = TREE_CODE (lhs);
6456 : : }
6457 : : }
6458 : :
6459 : 54044 : inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6460 : 54044 : if (inv_code == rhs_code
6461 : 699 : && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6462 : 54080 : && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6463 : : return lhs;
6464 : 54031 : if (!rhs_only && inv_code == lhs_code
6465 : 608 : && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6466 : 54108 : && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6467 : : return rhs;
6468 : 53955 : if (rhs != orig_rhs || lhs != orig_lhs)
6469 : 0 : return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6470 : 0 : lhs, rhs);
6471 : : return NULL_TREE;
6472 : : }
6473 : :
6474 : : /* Find ways of folding logical expressions of LHS and RHS:
6475 : : Try to merge two comparisons to the same innermost item.
6476 : : Look for range tests like "ch >= '0' && ch <= '9'".
6477 : : Look for combinations of simple terms on machines with expensive branches
6478 : : and evaluate the RHS unconditionally.
6479 : :
6480 : : For example, if we have p->a == 2 && p->b == 4 and we can make an
6481 : : object large enough to span both A and B, we can do this with a comparison
6482 : : against the object ANDed with the a mask.
6483 : :
6484 : : If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6485 : : operations to do this with one comparison.
6486 : :
6487 : : We check for both normal comparisons and the BIT_AND_EXPRs made this by
6488 : : function and the one above.
6489 : :
6490 : : CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6491 : : TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6492 : :
6493 : : TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6494 : : two operands.
6495 : :
6496 : : We return the simplified tree or 0 if no optimization is possible. */
6497 : :
6498 : : static tree
6499 : 20768058 : fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6500 : : tree lhs, tree rhs)
6501 : : {
6502 : : /* If this is the "or" of two comparisons, we can do something if
6503 : : the comparisons are NE_EXPR. If this is the "and", we can do something
6504 : : if the comparisons are EQ_EXPR. I.e.,
6505 : : (a->b == 2 && a->c == 4) can become (a->new == NEW).
6506 : :
6507 : : WANTED_CODE is this operation code. For single bit fields, we can
6508 : : convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6509 : : comparison for one-bit fields. */
6510 : :
6511 : 20768058 : enum tree_code wanted_code;
6512 : 20768058 : enum tree_code lcode, rcode;
6513 : 20768058 : tree ll_arg, lr_arg, rl_arg, rr_arg;
6514 : 20768058 : tree ll_inner, lr_inner, rl_inner, rr_inner;
6515 : 20768058 : HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6516 : 20768058 : HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6517 : 20768058 : HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6518 : 20768058 : HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6519 : 20768058 : int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6520 : 20768058 : int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6521 : 20768058 : machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6522 : 20768058 : scalar_int_mode lnmode, rnmode;
6523 : 20768058 : tree ll_mask, lr_mask, rl_mask, rr_mask;
6524 : 20768058 : tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6525 : 20768058 : tree l_const, r_const;
6526 : 20768058 : tree lntype, rntype, result;
6527 : 20768058 : HOST_WIDE_INT first_bit, end_bit;
6528 : 20768058 : int volatilep;
6529 : :
6530 : : /* Start by getting the comparison codes. Fail if anything is volatile.
6531 : : If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6532 : : it were surrounded with a NE_EXPR. */
6533 : :
6534 : 20768058 : if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6535 : : return 0;
6536 : :
6537 : 18679303 : lcode = TREE_CODE (lhs);
6538 : 18679303 : rcode = TREE_CODE (rhs);
6539 : :
6540 : 18679303 : if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6541 : : {
6542 : 0 : lhs = build2 (NE_EXPR, truth_type, lhs,
6543 : 0 : build_int_cst (TREE_TYPE (lhs), 0));
6544 : 0 : lcode = NE_EXPR;
6545 : : }
6546 : :
6547 : 18679303 : if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6548 : : {
6549 : 0 : rhs = build2 (NE_EXPR, truth_type, rhs,
6550 : 0 : build_int_cst (TREE_TYPE (rhs), 0));
6551 : 0 : rcode = NE_EXPR;
6552 : : }
6553 : :
6554 : 18679303 : if (TREE_CODE_CLASS (lcode) != tcc_comparison
6555 : 16582936 : || TREE_CODE_CLASS (rcode) != tcc_comparison)
6556 : : return 0;
6557 : :
6558 : 15572386 : ll_arg = TREE_OPERAND (lhs, 0);
6559 : 15572386 : lr_arg = TREE_OPERAND (lhs, 1);
6560 : 15572386 : rl_arg = TREE_OPERAND (rhs, 0);
6561 : 15572386 : rr_arg = TREE_OPERAND (rhs, 1);
6562 : :
6563 : : /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6564 : 15572386 : if (simple_operand_p (ll_arg)
6565 : 15572386 : && simple_operand_p (lr_arg))
6566 : : {
6567 : 12642155 : if (operand_equal_p (ll_arg, rl_arg, 0)
6568 : 12642155 : && operand_equal_p (lr_arg, rr_arg, 0))
6569 : : {
6570 : 16107 : result = combine_comparisons (loc, code, lcode, rcode,
6571 : : truth_type, ll_arg, lr_arg);
6572 : 16107 : if (result)
6573 : : return result;
6574 : : }
6575 : 12626048 : else if (operand_equal_p (ll_arg, rr_arg, 0)
6576 : 12626048 : && operand_equal_p (lr_arg, rl_arg, 0))
6577 : : {
6578 : 262 : result = combine_comparisons (loc, code, lcode,
6579 : : swap_tree_comparison (rcode),
6580 : : truth_type, ll_arg, lr_arg);
6581 : 262 : if (result)
6582 : : return result;
6583 : : }
6584 : : }
6585 : :
6586 : 31113216 : code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6587 : 15556608 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6588 : :
6589 : : /* If the RHS can be evaluated unconditionally and its operands are
6590 : : simple, it wins to evaluate the RHS unconditionally on machines
6591 : : with expensive branches. In this case, this isn't a comparison
6592 : : that can be merged. */
6593 : :
6594 : 15556608 : if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6595 : : false) >= 2
6596 : 15556505 : && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6597 : 14590416 : && simple_operand_p (rl_arg)
6598 : 24254073 : && simple_operand_p (rr_arg))
6599 : : {
6600 : : /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6601 : 9714531 : if (code == TRUTH_OR_EXPR
6602 : 1280614 : && lcode == NE_EXPR && integer_zerop (lr_arg)
6603 : 510354 : && rcode == NE_EXPR && integer_zerop (rr_arg)
6604 : 22683 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6605 : 9733835 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6606 : 38076 : return build2_loc (loc, NE_EXPR, truth_type,
6607 : 19038 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6608 : : ll_arg, rl_arg),
6609 : 38076 : build_int_cst (TREE_TYPE (ll_arg), 0));
6610 : :
6611 : : /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6612 : 9695493 : if (code == TRUTH_AND_EXPR
6613 : 1428340 : && lcode == EQ_EXPR && integer_zerop (lr_arg)
6614 : 642946 : && rcode == EQ_EXPR && integer_zerop (rr_arg)
6615 : 5265 : && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6616 : 9696903 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6617 : 2370 : return build2_loc (loc, EQ_EXPR, truth_type,
6618 : 1185 : build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6619 : : ll_arg, rl_arg),
6620 : 2370 : build_int_cst (TREE_TYPE (ll_arg), 0));
6621 : : }
6622 : :
6623 : : /* See if the comparisons can be merged. Then get all the parameters for
6624 : : each side. */
6625 : :
6626 : 15536385 : if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6627 : 8060793 : || (rcode != EQ_EXPR && rcode != NE_EXPR))
6628 : : return 0;
6629 : :
6630 : 2778563 : ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6631 : 2778563 : volatilep = 0;
6632 : 2778563 : ll_inner = decode_field_reference (loc, &ll_arg,
6633 : : &ll_bitsize, &ll_bitpos, &ll_mode,
6634 : : &ll_unsignedp, &ll_reversep, &volatilep,
6635 : : &ll_mask, &ll_and_mask);
6636 : 2778563 : lr_inner = decode_field_reference (loc, &lr_arg,
6637 : : &lr_bitsize, &lr_bitpos, &lr_mode,
6638 : : &lr_unsignedp, &lr_reversep, &volatilep,
6639 : : &lr_mask, &lr_and_mask);
6640 : 2778563 : rl_inner = decode_field_reference (loc, &rl_arg,
6641 : : &rl_bitsize, &rl_bitpos, &rl_mode,
6642 : : &rl_unsignedp, &rl_reversep, &volatilep,
6643 : : &rl_mask, &rl_and_mask);
6644 : 2778563 : rr_inner = decode_field_reference (loc, &rr_arg,
6645 : : &rr_bitsize, &rr_bitpos, &rr_mode,
6646 : : &rr_unsignedp, &rr_reversep, &volatilep,
6647 : : &rr_mask, &rr_and_mask);
6648 : :
6649 : : /* It must be true that the inner operation on the lhs of each
6650 : : comparison must be the same if we are to be able to do anything.
6651 : : Then see if we have constants. If not, the same must be true for
6652 : : the rhs's. */
6653 : 2778563 : if (volatilep
6654 : 2778563 : || ll_reversep != rl_reversep
6655 : 2778543 : || ll_inner == 0 || rl_inner == 0
6656 : 2858134 : || ! operand_equal_p (ll_inner, rl_inner, 0))
6657 : 2719667 : return 0;
6658 : :
6659 : 58896 : if (TREE_CODE (lr_arg) == INTEGER_CST
6660 : 54539 : && TREE_CODE (rr_arg) == INTEGER_CST)
6661 : : {
6662 : 53877 : l_const = lr_arg, r_const = rr_arg;
6663 : 53877 : lr_reversep = ll_reversep;
6664 : : }
6665 : 5019 : else if (lr_reversep != rr_reversep
6666 : 5019 : || lr_inner == 0 || rr_inner == 0
6667 : 7806 : || ! operand_equal_p (lr_inner, rr_inner, 0))
6668 : 2362 : return 0;
6669 : : else
6670 : : l_const = r_const = 0;
6671 : :
6672 : : /* If either comparison code is not correct for our logical operation,
6673 : : fail. However, we can convert a one-bit comparison against zero into
6674 : : the opposite comparison against that bit being set in the field. */
6675 : :
6676 : 56534 : wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6677 : 56534 : if (lcode != wanted_code)
6678 : : {
6679 : 1553 : if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6680 : : {
6681 : : /* Make the left operand unsigned, since we are only interested
6682 : : in the value of one bit. Otherwise we are doing the wrong
6683 : : thing below. */
6684 : 574 : ll_unsignedp = 1;
6685 : 574 : l_const = ll_mask;
6686 : : }
6687 : : else
6688 : 979 : return 0;
6689 : : }
6690 : :
6691 : : /* This is analogous to the code for l_const above. */
6692 : 55555 : if (rcode != wanted_code)
6693 : : {
6694 : 1545 : if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6695 : : {
6696 : 560 : rl_unsignedp = 1;
6697 : 560 : r_const = rl_mask;
6698 : : }
6699 : : else
6700 : 985 : return 0;
6701 : : }
6702 : :
6703 : : /* See if we can find a mode that contains both fields being compared on
6704 : : the left. If we can't, fail. Otherwise, update all constants and masks
6705 : : to be relative to a field of that size. */
6706 : 54570 : first_bit = MIN (ll_bitpos, rl_bitpos);
6707 : 54570 : end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6708 : 54570 : if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6709 : 55838 : TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6710 : : volatilep, &lnmode))
6711 : : return 0;
6712 : :
6713 : 36352 : lnbitsize = GET_MODE_BITSIZE (lnmode);
6714 : 36352 : lnbitpos = first_bit & ~ (lnbitsize - 1);
6715 : 36352 : lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6716 : 36352 : xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6717 : :
6718 : 36352 : if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6719 : : {
6720 : 6 : xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6721 : 6 : xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6722 : : }
6723 : :
6724 : 36352 : ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6725 : : size_int (xll_bitpos));
6726 : 36352 : rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6727 : : size_int (xrl_bitpos));
6728 : 36352 : if (ll_mask == NULL_TREE || rl_mask == NULL_TREE)
6729 : : return 0;
6730 : :
6731 : 36346 : if (l_const)
6732 : : {
6733 : 36149 : l_const = fold_convert_loc (loc, lntype, l_const);
6734 : 36149 : l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6735 : 36149 : l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6736 : 36149 : if (l_const == NULL_TREE)
6737 : : return 0;
6738 : 36149 : if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6739 : : fold_build1_loc (loc, BIT_NOT_EXPR,
6740 : : lntype, ll_mask))))
6741 : : {
6742 : 0 : warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6743 : :
6744 : 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6745 : : }
6746 : : }
6747 : 36346 : if (r_const)
6748 : : {
6749 : 36149 : r_const = fold_convert_loc (loc, lntype, r_const);
6750 : 36149 : r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6751 : 36149 : r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6752 : 36149 : if (r_const == NULL_TREE)
6753 : : return 0;
6754 : 36149 : if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6755 : : fold_build1_loc (loc, BIT_NOT_EXPR,
6756 : : lntype, rl_mask))))
6757 : : {
6758 : 0 : warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6759 : :
6760 : 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6761 : : }
6762 : : }
6763 : :
6764 : : /* If the right sides are not constant, do the same for it. Also,
6765 : : disallow this optimization if a size, signedness or storage order
6766 : : mismatch occurs between the left and right sides. */
6767 : 36346 : if (l_const == 0)
6768 : : {
6769 : 197 : if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6770 : 197 : || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6771 : 197 : || ll_reversep != lr_reversep
6772 : : /* Make sure the two fields on the right
6773 : : correspond to the left without being swapped. */
6774 : 191 : || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6775 : : return 0;
6776 : :
6777 : 191 : first_bit = MIN (lr_bitpos, rr_bitpos);
6778 : 191 : end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6779 : 191 : if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6780 : 191 : TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6781 : : volatilep, &rnmode))
6782 : : return 0;
6783 : :
6784 : 191 : rnbitsize = GET_MODE_BITSIZE (rnmode);
6785 : 191 : rnbitpos = first_bit & ~ (rnbitsize - 1);
6786 : 191 : rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6787 : 191 : xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6788 : :
6789 : 191 : if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6790 : : {
6791 : 0 : xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6792 : 0 : xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6793 : : }
6794 : :
6795 : 191 : lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6796 : : rntype, lr_mask),
6797 : : size_int (xlr_bitpos));
6798 : 191 : rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6799 : : rntype, rr_mask),
6800 : : size_int (xrr_bitpos));
6801 : 191 : if (lr_mask == NULL_TREE || rr_mask == NULL_TREE)
6802 : : return 0;
6803 : :
6804 : : /* Make a mask that corresponds to both fields being compared.
6805 : : Do this for both items being compared. If the operands are the
6806 : : same size and the bits being compared are in the same position
6807 : : then we can do this by masking both and comparing the masked
6808 : : results. */
6809 : 191 : ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6810 : 191 : lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6811 : 191 : if (lnbitsize == rnbitsize
6812 : 191 : && xll_bitpos == xlr_bitpos
6813 : 155 : && lnbitpos >= 0
6814 : 155 : && rnbitpos >= 0)
6815 : : {
6816 : 155 : lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6817 : : lntype, lnbitsize, lnbitpos,
6818 : 155 : ll_unsignedp || rl_unsignedp, ll_reversep);
6819 : 155 : if (! all_ones_mask_p (ll_mask, lnbitsize))
6820 : 155 : lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6821 : :
6822 : 155 : rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6823 : : rntype, rnbitsize, rnbitpos,
6824 : 155 : lr_unsignedp || rr_unsignedp, lr_reversep);
6825 : 155 : if (! all_ones_mask_p (lr_mask, rnbitsize))
6826 : 155 : rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6827 : :
6828 : 155 : return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6829 : : }
6830 : :
6831 : : /* There is still another way we can do something: If both pairs of
6832 : : fields being compared are adjacent, we may be able to make a wider
6833 : : field containing them both.
6834 : :
6835 : : Note that we still must mask the lhs/rhs expressions. Furthermore,
6836 : : the mask must be shifted to account for the shift done by
6837 : : make_bit_field_ref. */
6838 : 36 : if (((ll_bitsize + ll_bitpos == rl_bitpos
6839 : 18 : && lr_bitsize + lr_bitpos == rr_bitpos)
6840 : 18 : || (ll_bitpos == rl_bitpos + rl_bitsize
6841 : 18 : && lr_bitpos == rr_bitpos + rr_bitsize))
6842 : 36 : && ll_bitpos >= 0
6843 : 36 : && rl_bitpos >= 0
6844 : 36 : && lr_bitpos >= 0
6845 : 36 : && rr_bitpos >= 0)
6846 : : {
6847 : 36 : tree type;
6848 : :
6849 : 54 : lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6850 : : ll_bitsize + rl_bitsize,
6851 : : MIN (ll_bitpos, rl_bitpos),
6852 : : ll_unsignedp, ll_reversep);
6853 : 54 : rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6854 : : lr_bitsize + rr_bitsize,
6855 : : MIN (lr_bitpos, rr_bitpos),
6856 : : lr_unsignedp, lr_reversep);
6857 : :
6858 : 36 : ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6859 : 36 : size_int (MIN (xll_bitpos, xrl_bitpos)));
6860 : 36 : lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6861 : 36 : size_int (MIN (xlr_bitpos, xrr_bitpos)));
6862 : 36 : if (ll_mask == NULL_TREE || lr_mask == NULL_TREE)
6863 : : return 0;
6864 : :
6865 : : /* Convert to the smaller type before masking out unwanted bits. */
6866 : 36 : type = lntype;
6867 : 36 : if (lntype != rntype)
6868 : : {
6869 : 0 : if (lnbitsize > rnbitsize)
6870 : : {
6871 : 0 : lhs = fold_convert_loc (loc, rntype, lhs);
6872 : 0 : ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6873 : 0 : type = rntype;
6874 : : }
6875 : 0 : else if (lnbitsize < rnbitsize)
6876 : : {
6877 : 0 : rhs = fold_convert_loc (loc, lntype, rhs);
6878 : 0 : lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6879 : 0 : type = lntype;
6880 : : }
6881 : : }
6882 : :
6883 : 36 : if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6884 : 36 : lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6885 : :
6886 : 36 : if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6887 : 36 : rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6888 : :
6889 : 36 : return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6890 : : }
6891 : :
6892 : : return 0;
6893 : : }
6894 : :
6895 : : /* Handle the case of comparisons with constants. If there is something in
6896 : : common between the masks, those bits of the constants must be the same.
6897 : : If not, the condition is always false. Test for this to avoid generating
6898 : : incorrect code below. */
6899 : 36149 : result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6900 : 36149 : if (! integer_zerop (result)
6901 : 36161 : && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6902 : 12 : const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6903 : : {
6904 : 0 : if (wanted_code == NE_EXPR)
6905 : : {
6906 : 0 : warning (0, "%<or%> of unmatched not-equal tests is always 1");
6907 : 0 : return constant_boolean_node (true, truth_type);
6908 : : }
6909 : : else
6910 : : {
6911 : 0 : warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6912 : 0 : return constant_boolean_node (false, truth_type);
6913 : : }
6914 : : }
6915 : :
6916 : 36149 : if (lnbitpos < 0)
6917 : : return 0;
6918 : :
6919 : : /* Construct the expression we will return. First get the component
6920 : : reference we will make. Unless the mask is all ones the width of
6921 : : that field, perform the mask operation. Then compare with the
6922 : : merged constant. */
6923 : 36143 : result = make_bit_field_ref (loc, ll_inner, ll_arg,
6924 : : lntype, lnbitsize, lnbitpos,
6925 : 36143 : ll_unsignedp || rl_unsignedp, ll_reversep);
6926 : :
6927 : 36143 : ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6928 : 36143 : if (! all_ones_mask_p (ll_mask, lnbitsize))
6929 : 36143 : result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6930 : :
6931 : 36143 : return build2_loc (loc, wanted_code, truth_type, result,
6932 : 36143 : const_binop (BIT_IOR_EXPR, l_const, r_const));
6933 : : }
6934 : :
6935 : : /* T is an integer expression that is being multiplied, divided, or taken a
6936 : : modulus (CODE says which and what kind of divide or modulus) by a
6937 : : constant C. See if we can eliminate that operation by folding it with
6938 : : other operations already in T. WIDE_TYPE, if non-null, is a type that
6939 : : should be used for the computation if wider than our type.
6940 : :
6941 : : For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6942 : : (X * 2) + (Y * 4). We must, however, be assured that either the original
6943 : : expression would not overflow or that overflow is undefined for the type
6944 : : in the language in question.
6945 : :
6946 : : If we return a non-null expression, it is an equivalent form of the
6947 : : original computation, but need not be in the original type.
6948 : :
6949 : : We set *STRICT_OVERFLOW_P to true if the return values depends on
6950 : : signed overflow being undefined. Otherwise we do not change
6951 : : *STRICT_OVERFLOW_P. */
6952 : :
6953 : : static tree
6954 : 76101332 : extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6955 : : bool *strict_overflow_p)
6956 : : {
6957 : : /* To avoid exponential search depth, refuse to allow recursion past
6958 : : three levels. Beyond that (1) it's highly unlikely that we'll find
6959 : : something interesting and (2) we've probably processed it before
6960 : : when we built the inner expression. */
6961 : :
6962 : 76101332 : static int depth;
6963 : 76101332 : tree ret;
6964 : :
6965 : 76101332 : if (depth > 3)
6966 : : return NULL;
6967 : :
6968 : 73464038 : depth++;
6969 : 73464038 : ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6970 : 73464038 : depth--;
6971 : :
6972 : 73464038 : return ret;
6973 : : }
6974 : :
6975 : : static tree
6976 : 73464038 : extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6977 : : bool *strict_overflow_p)
6978 : : {
6979 : 73464038 : tree type = TREE_TYPE (t);
6980 : 73464038 : enum tree_code tcode = TREE_CODE (t);
6981 : 73464038 : tree ctype = type;
6982 : 73464038 : if (wide_type)
6983 : : {
6984 : 23052564 : if (TREE_CODE (type) == BITINT_TYPE
6985 : 23052454 : || TREE_CODE (wide_type) == BITINT_TYPE)
6986 : : {
6987 : 110 : if (TYPE_PRECISION (wide_type) > TYPE_PRECISION (type))
6988 : 8061667 : ctype = wide_type;
6989 : : }
6990 : 23052454 : else if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6991 : 46104908 : > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6992 : 8061667 : ctype = wide_type;
6993 : : }
6994 : 73464038 : tree t1, t2;
6995 : 73464038 : bool same_p = tcode == code;
6996 : 73464038 : tree op0 = NULL_TREE, op1 = NULL_TREE;
6997 : 73464038 : bool sub_strict_overflow_p;
6998 : :
6999 : : /* Don't deal with constants of zero here; they confuse the code below. */
7000 : 73464038 : if (integer_zerop (c))
7001 : : return NULL_TREE;
7002 : :
7003 : 73444554 : if (TREE_CODE_CLASS (tcode) == tcc_unary)
7004 : 28381830 : op0 = TREE_OPERAND (t, 0);
7005 : :
7006 : 73444554 : if (TREE_CODE_CLASS (tcode) == tcc_binary)
7007 : 9475772 : op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
7008 : :
7009 : : /* Note that we need not handle conditional operations here since fold
7010 : : already handles those cases. So just do arithmetic here. */
7011 : 73444554 : switch (tcode)
7012 : : {
7013 : 3384732 : case INTEGER_CST:
7014 : : /* For a constant, we can always simplify if we are a multiply
7015 : : or (for divide and modulus) if it is a multiple of our constant. */
7016 : 3384732 : if (code == MULT_EXPR
7017 : 4307471 : || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
7018 : 922739 : TYPE_SIGN (type)))
7019 : : {
7020 : 2742699 : tree tem = const_binop (code, fold_convert (ctype, t),
7021 : : fold_convert (ctype, c));
7022 : : /* If the multiplication overflowed, we lost information on it.
7023 : : See PR68142 and PR69845. */
7024 : 2742699 : if (TREE_OVERFLOW (tem))
7025 : : return NULL_TREE;
7026 : : return tem;
7027 : : }
7028 : : break;
7029 : :
7030 : 27882231 : CASE_CONVERT: case NON_LVALUE_EXPR:
7031 : 27882231 : if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
7032 : : break;
7033 : : /* If op0 is an expression ... */
7034 : 27010193 : if ((COMPARISON_CLASS_P (op0)
7035 : : || UNARY_CLASS_P (op0)
7036 : 27010193 : || BINARY_CLASS_P (op0)
7037 : 24301653 : || VL_EXP_CLASS_P (op0)
7038 : 24266465 : || EXPRESSION_CLASS_P (op0))
7039 : : /* ... and has wrapping overflow, and its type is smaller
7040 : : than ctype, then we cannot pass through as widening. */
7041 : 27118809 : && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
7042 : 918268 : && (TYPE_PRECISION (ctype)
7043 : 918268 : > TYPE_PRECISION (TREE_TYPE (op0))))
7044 : : /* ... or this is a truncation (t is narrower than op0),
7045 : : then we cannot pass through this narrowing. */
7046 : 2329800 : || (TYPE_PRECISION (type)
7047 : 2329800 : < TYPE_PRECISION (TREE_TYPE (op0)))
7048 : : /* ... or signedness changes for division or modulus,
7049 : : then we cannot pass through this conversion. */
7050 : 2294970 : || (code != MULT_EXPR
7051 : 80050 : && (TYPE_UNSIGNED (ctype)
7052 : 80050 : != TYPE_UNSIGNED (TREE_TYPE (op0))))
7053 : : /* ... or has undefined overflow while the converted to
7054 : : type has not, we cannot do the operation in the inner type
7055 : : as that would introduce undefined overflow. */
7056 : 2234561 : || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
7057 : 1831252 : && !TYPE_OVERFLOW_UNDEFINED (type))))
7058 : : break;
7059 : :
7060 : : /* Pass the constant down and see if we can make a simplification. If
7061 : : we can, replace this expression with the inner simplification for
7062 : : possible later conversion to our or some other type. */
7063 : 24646632 : if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
7064 : 24646632 : && TREE_CODE (t2) == INTEGER_CST
7065 : 24646632 : && !TREE_OVERFLOW (t2)
7066 : 49974779 : && (t1 = extract_muldiv (op0, t2, code,
7067 : : code == MULT_EXPR ? ctype : NULL_TREE,
7068 : : strict_overflow_p)) != 0)
7069 : : return t1;
7070 : : break;
7071 : :
7072 : 327 : case ABS_EXPR:
7073 : : /* If widening the type changes it from signed to unsigned, then we
7074 : : must avoid building ABS_EXPR itself as unsigned. */
7075 : 327 : if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
7076 : : {
7077 : 0 : tree cstype = (*signed_type_for) (ctype);
7078 : 0 : if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
7079 : : != 0)
7080 : : {
7081 : 0 : t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
7082 : 0 : return fold_convert (ctype, t1);
7083 : : }
7084 : : break;
7085 : : }
7086 : : /* If the constant is negative, we cannot simplify this. */
7087 : 327 : if (tree_int_cst_sgn (c) == -1)
7088 : : break;
7089 : : /* FALLTHROUGH */
7090 : 15637 : case NEGATE_EXPR:
7091 : : /* For division and modulus, type can't be unsigned, as e.g.
7092 : : (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
7093 : : For signed types, even with wrapping overflow, this is fine. */
7094 : 15637 : if (code != MULT_EXPR && TYPE_UNSIGNED (type))
7095 : : break;
7096 : 14270 : if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
7097 : : != 0)
7098 : 0 : return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
7099 : : break;
7100 : :
7101 : 168 : case MIN_EXPR: case MAX_EXPR:
7102 : : /* If widening the type changes the signedness, then we can't perform
7103 : : this optimization as that changes the result. */
7104 : 168 : if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
7105 : : break;
7106 : :
7107 : : /* Punt for multiplication altogether.
7108 : : MAX (1U + INT_MAX, 1U) * 2U is not equivalent to
7109 : : MAX ((1U + INT_MAX) * 2U, 1U * 2U), the former is
7110 : : 0U, the latter is 2U.
7111 : : MAX (INT_MIN / 2, 0) * -2 is not equivalent to
7112 : : MIN (INT_MIN / 2 * -2, 0 * -2), the former is
7113 : : well defined 0, the latter invokes UB.
7114 : : MAX (INT_MIN / 2, 5) * 5 is not equivalent to
7115 : : MAX (INT_MIN / 2 * 5, 5 * 5), the former is
7116 : : well defined 25, the latter invokes UB. */
7117 : 168 : if (code == MULT_EXPR)
7118 : : break;
7119 : : /* For division/modulo, punt on c being -1 for MAX, as
7120 : : MAX (INT_MIN, 0) / -1 is not equivalent to
7121 : : MIN (INT_MIN / -1, 0 / -1), the former is well defined
7122 : : 0, the latter invokes UB (or for -fwrapv is INT_MIN).
7123 : : MIN (INT_MIN, 0) / -1 already invokes UB, so the
7124 : : transformation won't make it worse. */
7125 : 8 : else if (tcode == MAX_EXPR && integer_minus_onep (c))
7126 : : break;
7127 : :
7128 : : /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
7129 : 8 : sub_strict_overflow_p = false;
7130 : 8 : if ((t1 = extract_muldiv (op0, c, code, wide_type,
7131 : : &sub_strict_overflow_p)) != 0
7132 : 8 : && (t2 = extract_muldiv (op1, c, code, wide_type,
7133 : : &sub_strict_overflow_p)) != 0)
7134 : : {
7135 : 0 : if (tree_int_cst_sgn (c) < 0)
7136 : 0 : tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
7137 : 0 : if (sub_strict_overflow_p)
7138 : 0 : *strict_overflow_p = true;
7139 : 0 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7140 : : fold_convert (ctype, t2));
7141 : : }
7142 : : break;
7143 : :
7144 : 1007 : case LSHIFT_EXPR: case RSHIFT_EXPR:
7145 : : /* If the second operand is constant, this is a multiplication
7146 : : or floor division, by a power of two, so we can treat it that
7147 : : way unless the multiplier or divisor overflows. Signed
7148 : : left-shift overflow is implementation-defined rather than
7149 : : undefined in C90, so do not convert signed left shift into
7150 : : multiplication. */
7151 : 1007 : if (TREE_CODE (op1) == INTEGER_CST
7152 : 990 : && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
7153 : : /* const_binop may not detect overflow correctly,
7154 : : so check for it explicitly here. */
7155 : 875 : && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
7156 : 1016 : wi::to_wide (op1))
7157 : 866 : && (t1 = fold_convert (ctype,
7158 : : const_binop (LSHIFT_EXPR, size_one_node,
7159 : : op1))) != 0
7160 : 1873 : && !TREE_OVERFLOW (t1))
7161 : 1548 : return extract_muldiv (build2 (tcode == LSHIFT_EXPR
7162 : : ? MULT_EXPR : FLOOR_DIV_EXPR,
7163 : : ctype,
7164 : : fold_convert (ctype, op0),
7165 : : t1),
7166 : 866 : c, code, wide_type, strict_overflow_p);
7167 : : break;
7168 : :
7169 : 6235263 : case PLUS_EXPR: case MINUS_EXPR:
7170 : : /* See if we can eliminate the operation on both sides. If we can, we
7171 : : can return a new PLUS or MINUS. If we can't, the only remaining
7172 : : cases where we can do anything are if the second operand is a
7173 : : constant. */
7174 : 6235263 : sub_strict_overflow_p = false;
7175 : 6235263 : t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
7176 : 6235263 : t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
7177 : 709830 : if (t1 != 0 && t2 != 0
7178 : 241642 : && TYPE_OVERFLOW_WRAPS (ctype)
7179 : 6468697 : && (code == MULT_EXPR
7180 : : /* If not multiplication, we can only do this if both operands
7181 : : are divisible by c. */
7182 : 0 : || (multiple_of_p (ctype, op0, c)
7183 : 0 : && multiple_of_p (ctype, op1, c))))
7184 : : {
7185 : 233434 : if (sub_strict_overflow_p)
7186 : 0 : *strict_overflow_p = true;
7187 : 233434 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7188 : : fold_convert (ctype, t2));
7189 : : }
7190 : :
7191 : : /* If this was a subtraction, negate OP1 and set it to be an addition.
7192 : : This simplifies the logic below. */
7193 : 6001829 : if (tcode == MINUS_EXPR)
7194 : : {
7195 : 1415887 : tcode = PLUS_EXPR, op1 = negate_expr (op1);
7196 : : /* If OP1 was not easily negatable, the constant may be OP0. */
7197 : 1415887 : if (TREE_CODE (op0) == INTEGER_CST)
7198 : : {
7199 : 236091 : std::swap (op0, op1);
7200 : 236091 : std::swap (t1, t2);
7201 : : }
7202 : : }
7203 : :
7204 : 6001829 : if (TREE_CODE (op1) != INTEGER_CST)
7205 : : break;
7206 : :
7207 : : /* If either OP1 or C are negative, this optimization is not safe for
7208 : : some of the division and remainder types while for others we need
7209 : : to change the code. */
7210 : 2666384 : if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
7211 : : {
7212 : 160934 : if (code == CEIL_DIV_EXPR)
7213 : : code = FLOOR_DIV_EXPR;
7214 : 160934 : else if (code == FLOOR_DIV_EXPR)
7215 : : code = CEIL_DIV_EXPR;
7216 : 160717 : else if (code != MULT_EXPR
7217 : 160717 : && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
7218 : : break;
7219 : : }
7220 : :
7221 : : /* If it's a multiply or a division/modulus operation of a multiple
7222 : : of our constant, do the operation and verify it doesn't overflow. */
7223 : 2662903 : if (code == MULT_EXPR
7224 : 3582809 : || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7225 : 919906 : TYPE_SIGN (type)))
7226 : : {
7227 : 2023184 : op1 = const_binop (code, fold_convert (ctype, op1),
7228 : : fold_convert (ctype, c));
7229 : : /* We allow the constant to overflow with wrapping semantics. */
7230 : 2023184 : if (op1 == 0
7231 : 2023184 : || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
7232 : : break;
7233 : : }
7234 : : else
7235 : : break;
7236 : :
7237 : : /* If we have an unsigned type, we cannot widen the operation since it
7238 : : will change the result if the original computation overflowed. */
7239 : 2020209 : if (TYPE_UNSIGNED (ctype) && ctype != type)
7240 : : break;
7241 : :
7242 : : /* The last case is if we are a multiply. In that case, we can
7243 : : apply the distributive law to commute the multiply and addition
7244 : : if the multiplication of the constants doesn't overflow
7245 : : and overflow is defined. With undefined overflow
7246 : : op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
7247 : : But fold_plusminus_mult_expr would factor back any power-of-two
7248 : : value so do not distribute in the first place in this case. */
7249 : 2020209 : if (code == MULT_EXPR
7250 : 1740559 : && TYPE_OVERFLOW_WRAPS (ctype)
7251 : 3465166 : && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
7252 : 180730 : return fold_build2 (tcode, ctype,
7253 : : fold_build2 (code, ctype,
7254 : : fold_convert (ctype, op0),
7255 : : fold_convert (ctype, c)),
7256 : : op1);
7257 : :
7258 : : break;
7259 : :
7260 : 2174664 : case MULT_EXPR:
7261 : : /* We have a special case here if we are doing something like
7262 : : (C * 8) % 4 since we know that's zero. */
7263 : 2174664 : if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
7264 : 2174664 : || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
7265 : : /* If the multiplication can overflow we cannot optimize this. */
7266 : 9124 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
7267 : 346 : && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
7268 : 2183788 : && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7269 : 301 : TYPE_SIGN (type)))
7270 : : {
7271 : 8 : *strict_overflow_p = true;
7272 : 8 : return omit_one_operand (type, integer_zero_node, op0);
7273 : : }
7274 : :
7275 : : /* ... fall through ... */
7276 : :
7277 : 2379454 : case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
7278 : 2379454 : case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
7279 : : /* If we can extract our operation from the LHS, do so and return a
7280 : : new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
7281 : : do something only if the second operand is a constant. */
7282 : 2379454 : if (same_p
7283 : 2041638 : && TYPE_OVERFLOW_WRAPS (ctype)
7284 : 4236067 : && (t1 = extract_muldiv (op0, c, code, wide_type,
7285 : : strict_overflow_p)) != 0)
7286 : 44491 : return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
7287 : : fold_convert (ctype, op1));
7288 : 2334963 : else if (tcode == MULT_EXPR && code == MULT_EXPR
7289 : 1995155 : && TYPE_OVERFLOW_WRAPS (ctype)
7290 : 4145141 : && (t1 = extract_muldiv (op1, c, code, wide_type,
7291 : : strict_overflow_p)) != 0)
7292 : 872874 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7293 : : fold_convert (ctype, t1));
7294 : 1462089 : else if (TREE_CODE (op1) != INTEGER_CST)
7295 : : return 0;
7296 : :
7297 : : /* If these are the same operation types, we can associate them
7298 : : assuming no overflow. */
7299 : 549682 : if (tcode == code)
7300 : : {
7301 : 212283 : bool overflow_p = false;
7302 : 212283 : wi::overflow_type overflow_mul;
7303 : 212283 : signop sign = TYPE_SIGN (ctype);
7304 : 212283 : unsigned prec = TYPE_PRECISION (ctype);
7305 : 424566 : wide_int mul = wi::mul (wi::to_wide (op1, prec),
7306 : 424566 : wi::to_wide (c, prec),
7307 : 212283 : sign, &overflow_mul);
7308 : 212283 : overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
7309 : 212283 : if (overflow_mul
7310 : 1050 : && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
7311 : : overflow_p = true;
7312 : 212213 : if (!overflow_p)
7313 : 212213 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7314 : : wide_int_to_tree (ctype, mul));
7315 : 212283 : }
7316 : :
7317 : : /* If these operations "cancel" each other, we have the main
7318 : : optimizations of this pass, which occur when either constant is a
7319 : : multiple of the other, in which case we replace this with either an
7320 : : operation or CODE or TCODE.
7321 : :
7322 : : If we have an unsigned type, we cannot do this since it will change
7323 : : the result if the original computation overflowed. */
7324 : 337469 : if (TYPE_OVERFLOW_UNDEFINED (ctype)
7325 : 56454 : && !TYPE_OVERFLOW_SANITIZED (ctype)
7326 : 393873 : && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
7327 : 25504 : || (tcode == MULT_EXPR
7328 : 25504 : && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
7329 : 871 : && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
7330 : 847 : && code != MULT_EXPR)))
7331 : : {
7332 : 31742 : if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
7333 : 31742 : TYPE_SIGN (type)))
7334 : : {
7335 : 106 : *strict_overflow_p = true;
7336 : 106 : return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7337 : : fold_convert (ctype,
7338 : : const_binop (TRUNC_DIV_EXPR,
7339 : : op1, c)));
7340 : : }
7341 : 31636 : else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7342 : 31636 : TYPE_SIGN (type)))
7343 : : {
7344 : 30915 : *strict_overflow_p = true;
7345 : 30915 : return fold_build2 (code, ctype, fold_convert (ctype, op0),
7346 : : fold_convert (ctype,
7347 : : const_binop (TRUNC_DIV_EXPR,
7348 : : c, op1)));
7349 : : }
7350 : : }
7351 : : break;
7352 : :
7353 : : default:
7354 : : break;
7355 : : }
7356 : :
7357 : : return 0;
7358 : : }
7359 : :
7360 : : /* Return a node which has the indicated constant VALUE (either 0 or
7361 : : 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7362 : : and is of the indicated TYPE. */
7363 : :
7364 : : tree
7365 : 74366670 : constant_boolean_node (bool value, tree type)
7366 : : {
7367 : 74366670 : if (type == integer_type_node)
7368 : 16505595 : return value ? integer_one_node : integer_zero_node;
7369 : 57861075 : else if (type == boolean_type_node)
7370 : 54180599 : return value ? boolean_true_node : boolean_false_node;
7371 : 3680476 : else if (VECTOR_TYPE_P (type))
7372 : 390 : return build_vector_from_val (type,
7373 : 390 : build_int_cst (TREE_TYPE (type),
7374 : 599 : value ? -1 : 0));
7375 : : else
7376 : 3680086 : return fold_convert (type, value ? integer_one_node : integer_zero_node);
7377 : : }
7378 : :
7379 : :
7380 : : /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7381 : : Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7382 : : CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7383 : : expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7384 : : COND is the first argument to CODE; otherwise (as in the example
7385 : : given here), it is the second argument. TYPE is the type of the
7386 : : original expression. Return NULL_TREE if no simplification is
7387 : : possible. */
7388 : :
7389 : : static tree
7390 : 899286 : fold_binary_op_with_conditional_arg (location_t loc,
7391 : : enum tree_code code,
7392 : : tree type, tree op0, tree op1,
7393 : : tree cond, tree arg, int cond_first_p)
7394 : : {
7395 : 899286 : tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7396 : 899286 : tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7397 : 899286 : tree test, true_value, false_value;
7398 : 899286 : tree lhs = NULL_TREE;
7399 : 899286 : tree rhs = NULL_TREE;
7400 : 899286 : enum tree_code cond_code = COND_EXPR;
7401 : :
7402 : : /* Do not move possibly trapping operations into the conditional as this
7403 : : pessimizes code and causes gimplification issues when applied late. */
7404 : 918846 : if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7405 : 199959 : ANY_INTEGRAL_TYPE_P (type)
7406 : 900126 : && TYPE_OVERFLOW_TRAPS (type), op1))
7407 : : return NULL_TREE;
7408 : :
7409 : 879540 : if (TREE_CODE (cond) == COND_EXPR
7410 : 300009 : || TREE_CODE (cond) == VEC_COND_EXPR)
7411 : : {
7412 : 580359 : test = TREE_OPERAND (cond, 0);
7413 : 580359 : true_value = TREE_OPERAND (cond, 1);
7414 : 580359 : false_value = TREE_OPERAND (cond, 2);
7415 : : /* If this operand throws an expression, then it does not make
7416 : : sense to try to perform a logical or arithmetic operation
7417 : : involving it. */
7418 : 580359 : if (VOID_TYPE_P (TREE_TYPE (true_value)))
7419 : 7464 : lhs = true_value;
7420 : 580359 : if (VOID_TYPE_P (TREE_TYPE (false_value)))
7421 : 8 : rhs = false_value;
7422 : : }
7423 : 299181 : else if (!(TREE_CODE (type) != VECTOR_TYPE
7424 : 299181 : && VECTOR_TYPE_P (TREE_TYPE (cond))))
7425 : : {
7426 : 297588 : tree testtype = TREE_TYPE (cond);
7427 : 297588 : test = cond;
7428 : 297588 : true_value = constant_boolean_node (true, testtype);
7429 : 297588 : false_value = constant_boolean_node (false, testtype);
7430 : : }
7431 : : else
7432 : : /* Detect the case of mixing vector and scalar types - bail out. */
7433 : : return NULL_TREE;
7434 : :
7435 : 877947 : if (VECTOR_TYPE_P (TREE_TYPE (test)))
7436 : 828 : cond_code = VEC_COND_EXPR;
7437 : :
7438 : : /* This transformation is only worthwhile if we don't have to wrap ARG
7439 : : in a SAVE_EXPR and the operation can be simplified without recursing
7440 : : on at least one of the branches once its pushed inside the COND_EXPR. */
7441 : 877947 : if (!TREE_CONSTANT (arg)
7442 : 877947 : && (TREE_SIDE_EFFECTS (arg)
7443 : 446522 : || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7444 : 441958 : || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7445 : : return NULL_TREE;
7446 : :
7447 : 445170 : arg = fold_convert_loc (loc, arg_type, arg);
7448 : 445170 : if (lhs == 0)
7449 : : {
7450 : 439139 : true_value = fold_convert_loc (loc, cond_type, true_value);
7451 : 439139 : if (cond_first_p)
7452 : 433296 : lhs = fold_build2_loc (loc, code, type, true_value, arg);
7453 : : else
7454 : 5843 : lhs = fold_build2_loc (loc, code, type, arg, true_value);
7455 : : }
7456 : 445170 : if (rhs == 0)
7457 : : {
7458 : 445162 : false_value = fold_convert_loc (loc, cond_type, false_value);
7459 : 445162 : if (cond_first_p)
7460 : 438758 : rhs = fold_build2_loc (loc, code, type, false_value, arg);
7461 : : else
7462 : 6404 : rhs = fold_build2_loc (loc, code, type, arg, false_value);
7463 : : }
7464 : :
7465 : : /* Check that we have simplified at least one of the branches. */
7466 : 445170 : if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7467 : : return NULL_TREE;
7468 : :
7469 : 427353 : return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7470 : : }
7471 : :
7472 : :
7473 : : /* Subroutine of fold() that checks for the addition of ARG +/- 0.0.
7474 : :
7475 : : If !NEGATE, return true if ZERO_ARG is +/-0.0 and, for all ARG of
7476 : : type TYPE, ARG + ZERO_ARG is the same as ARG. If NEGATE, return true
7477 : : if ARG - ZERO_ARG is the same as X.
7478 : :
7479 : : If ARG is NULL, check for any value of type TYPE.
7480 : :
7481 : : X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7482 : : and finite. The problematic cases are when X is zero, and its mode
7483 : : has signed zeros. In the case of rounding towards -infinity,
7484 : : X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7485 : : modes, X + 0 is not the same as X because -0 + 0 is 0. */
7486 : :
7487 : : bool
7488 : 642995 : fold_real_zero_addition_p (const_tree type, const_tree arg,
7489 : : const_tree zero_arg, int negate)
7490 : : {
7491 : 642995 : if (!real_zerop (zero_arg))
7492 : : return false;
7493 : :
7494 : : /* Don't allow the fold with -fsignaling-nans. */
7495 : 642280 : if (arg ? tree_expr_maybe_signaling_nan_p (arg) : HONOR_SNANS (type))
7496 : : return false;
7497 : :
7498 : : /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7499 : 638940 : if (!HONOR_SIGNED_ZEROS (type))
7500 : : return true;
7501 : :
7502 : : /* There is no case that is safe for all rounding modes. */
7503 : 622967 : if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7504 : : return false;
7505 : :
7506 : : /* In a vector or complex, we would need to check the sign of all zeros. */
7507 : 622311 : if (TREE_CODE (zero_arg) == VECTOR_CST)
7508 : 1062 : zero_arg = uniform_vector_p (zero_arg);
7509 : 622311 : if (!zero_arg || TREE_CODE (zero_arg) != REAL_CST)
7510 : 1279 : return false;
7511 : :
7512 : : /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7513 : 621032 : if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (zero_arg)))
7514 : 230 : negate = !negate;
7515 : :
7516 : : /* The mode has signed zeros, and we have to honor their sign.
7517 : : In this situation, there are only two cases we can return true for.
7518 : : (i) X - 0 is the same as X with default rounding.
7519 : : (ii) X + 0 is X when X can't possibly be -0.0. */
7520 : 621032 : return negate || (arg && !tree_expr_maybe_real_minus_zero_p (arg));
7521 : : }
7522 : :
7523 : : /* Subroutine of match.pd that optimizes comparisons of a division by
7524 : : a nonzero integer constant against an integer constant, i.e.
7525 : : X/C1 op C2.
7526 : :
7527 : : CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7528 : : GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7529 : :
7530 : : enum tree_code
7531 : 1514006 : fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7532 : : tree *hi, bool *neg_overflow)
7533 : : {
7534 : 1514006 : tree prod, tmp, type = TREE_TYPE (c1);
7535 : 1514006 : signop sign = TYPE_SIGN (type);
7536 : 1514006 : wi::overflow_type overflow;
7537 : :
7538 : : /* We have to do this the hard way to detect unsigned overflow.
7539 : : prod = int_const_binop (MULT_EXPR, c1, c2); */
7540 : 1514006 : wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7541 : 1514006 : prod = force_fit_type (type, val, -1, overflow);
7542 : 1514006 : *neg_overflow = false;
7543 : :
7544 : 1514006 : if (sign == UNSIGNED)
7545 : : {
7546 : 1488497 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7547 : 1488497 : *lo = prod;
7548 : :
7549 : : /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7550 : 1488497 : val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7551 : 1488497 : *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7552 : : }
7553 : 25509 : else if (tree_int_cst_sgn (c1) >= 0)
7554 : : {
7555 : 24118 : tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7556 : 24118 : switch (tree_int_cst_sgn (c2))
7557 : : {
7558 : 4212 : case -1:
7559 : 4212 : *neg_overflow = true;
7560 : 4212 : *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7561 : 4212 : *hi = prod;
7562 : 4212 : break;
7563 : :
7564 : 12178 : case 0:
7565 : 12178 : *lo = fold_negate_const (tmp, type);
7566 : 12178 : *hi = tmp;
7567 : 12178 : break;
7568 : :
7569 : 7728 : case 1:
7570 : 7728 : *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7571 : 7728 : *lo = prod;
7572 : 7728 : break;
7573 : :
7574 : 0 : default:
7575 : 0 : gcc_unreachable ();
7576 : : }
7577 : : }
7578 : : else
7579 : : {
7580 : : /* A negative divisor reverses the relational operators. */
7581 : 1391 : code = swap_tree_comparison (code);
7582 : :
7583 : 1391 : tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7584 : 1391 : switch (tree_int_cst_sgn (c2))
7585 : : {
7586 : 132 : case -1:
7587 : 132 : *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7588 : 132 : *lo = prod;
7589 : 132 : break;
7590 : :
7591 : 161 : case 0:
7592 : 161 : *hi = fold_negate_const (tmp, type);
7593 : 161 : *lo = tmp;
7594 : 161 : break;
7595 : :
7596 : 1098 : case 1:
7597 : 1098 : *neg_overflow = true;
7598 : 1098 : *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7599 : 1098 : *hi = prod;
7600 : 1098 : break;
7601 : :
7602 : 0 : default:
7603 : 0 : gcc_unreachable ();
7604 : : }
7605 : : }
7606 : :
7607 : 1514006 : if (code != EQ_EXPR && code != NE_EXPR)
7608 : : return code;
7609 : :
7610 : 15986 : if (TREE_OVERFLOW (*lo)
7611 : 15986 : || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7612 : 1347 : *lo = NULL_TREE;
7613 : 15986 : if (TREE_OVERFLOW (*hi)
7614 : 15986 : || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7615 : 98 : *hi = NULL_TREE;
7616 : :
7617 : : return code;
7618 : 1514006 : }
7619 : :
7620 : : /* Test whether it is preferable to swap two operands, ARG0 and
7621 : : ARG1, for example because ARG0 is an integer constant and ARG1
7622 : : isn't. */
7623 : :
7624 : : bool
7625 : 1259243509 : tree_swap_operands_p (const_tree arg0, const_tree arg1)
7626 : : {
7627 : 1259243509 : if (CONSTANT_CLASS_P (arg1))
7628 : : return false;
7629 : 402168947 : if (CONSTANT_CLASS_P (arg0))
7630 : : return true;
7631 : :
7632 : 370437371 : STRIP_NOPS (arg0);
7633 : 370437371 : STRIP_NOPS (arg1);
7634 : :
7635 : 370437371 : if (TREE_CONSTANT (arg1))
7636 : : return false;
7637 : 359555022 : if (TREE_CONSTANT (arg0))
7638 : : return true;
7639 : :
7640 : : /* It is preferable to swap two SSA_NAME to ensure a canonical form
7641 : : for commutative and comparison operators. Ensuring a canonical
7642 : : form allows the optimizers to find additional redundancies without
7643 : : having to explicitly check for both orderings. */
7644 : 359085045 : if (TREE_CODE (arg0) == SSA_NAME
7645 : 253891194 : && TREE_CODE (arg1) == SSA_NAME
7646 : 606908789 : && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7647 : : return true;
7648 : :
7649 : : /* Put SSA_NAMEs last. */
7650 : 336405999 : if (TREE_CODE (arg1) == SSA_NAME)
7651 : : return false;
7652 : 91608282 : if (TREE_CODE (arg0) == SSA_NAME)
7653 : : return true;
7654 : :
7655 : : /* Put variables last. */
7656 : 85540832 : if (DECL_P (arg1))
7657 : : return false;
7658 : 45485660 : if (DECL_P (arg0))
7659 : : return true;
7660 : :
7661 : : return false;
7662 : : }
7663 : :
7664 : :
7665 : : /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7666 : : means A >= Y && A != MAX, but in this case we know that
7667 : : A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7668 : :
7669 : : static tree
7670 : 19974388 : fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7671 : : {
7672 : 19974388 : tree a, typea, type = TREE_TYPE (bound), a1, diff, y;
7673 : :
7674 : 19974388 : if (TREE_CODE (bound) == LT_EXPR)
7675 : 4621119 : a = TREE_OPERAND (bound, 0);
7676 : 15353269 : else if (TREE_CODE (bound) == GT_EXPR)
7677 : 2133978 : a = TREE_OPERAND (bound, 1);
7678 : : else
7679 : : return NULL_TREE;
7680 : :
7681 : 6755097 : typea = TREE_TYPE (a);
7682 : 6755097 : if (!INTEGRAL_TYPE_P (typea)
7683 : 322779 : && !POINTER_TYPE_P (typea))
7684 : : return NULL_TREE;
7685 : :
7686 : 6578548 : if (TREE_CODE (ineq) == LT_EXPR)
7687 : : {
7688 : 1356979 : a1 = TREE_OPERAND (ineq, 1);
7689 : 1356979 : y = TREE_OPERAND (ineq, 0);
7690 : : }
7691 : 5221569 : else if (TREE_CODE (ineq) == GT_EXPR)
7692 : : {
7693 : 920744 : a1 = TREE_OPERAND (ineq, 0);
7694 : 920744 : y = TREE_OPERAND (ineq, 1);
7695 : : }
7696 : : else
7697 : : return NULL_TREE;
7698 : :
7699 : 2277723 : if (TREE_TYPE (a1) != typea)
7700 : : return NULL_TREE;
7701 : :
7702 : 1572069 : if (POINTER_TYPE_P (typea))
7703 : : {
7704 : : /* Convert the pointer types into integer before taking the difference. */
7705 : 8102 : tree ta = fold_convert_loc (loc, ssizetype, a);
7706 : 8102 : tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7707 : 8102 : diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7708 : : }
7709 : : else
7710 : 1563967 : diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7711 : :
7712 : 1572069 : if (!diff || !integer_onep (diff))
7713 : 1561712 : return NULL_TREE;
7714 : :
7715 : 10357 : return fold_build2_loc (loc, GE_EXPR, type, a, y);
7716 : : }
7717 : :
7718 : : /* Fold a sum or difference of at least one multiplication.
7719 : : Returns the folded tree or NULL if no simplification could be made. */
7720 : :
7721 : : static tree
7722 : 7613876 : fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7723 : : tree arg0, tree arg1)
7724 : : {
7725 : 7613876 : tree arg00, arg01, arg10, arg11;
7726 : 7613876 : tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7727 : :
7728 : : /* (A * C) +- (B * C) -> (A+-B) * C.
7729 : : (A * C) +- A -> A * (C+-1).
7730 : : We are most concerned about the case where C is a constant,
7731 : : but other combinations show up during loop reduction. Since
7732 : : it is not difficult, try all four possibilities. */
7733 : :
7734 : 7613876 : if (TREE_CODE (arg0) == MULT_EXPR)
7735 : : {
7736 : 6479176 : arg00 = TREE_OPERAND (arg0, 0);
7737 : 6479176 : arg01 = TREE_OPERAND (arg0, 1);
7738 : : }
7739 : 1134700 : else if (TREE_CODE (arg0) == INTEGER_CST)
7740 : : {
7741 : 74110 : arg00 = build_one_cst (type);
7742 : 74110 : arg01 = arg0;
7743 : : }
7744 : : else
7745 : : {
7746 : : /* We cannot generate constant 1 for fract. */
7747 : 1060590 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7748 : 0 : return NULL_TREE;
7749 : 1060590 : arg00 = arg0;
7750 : 1060590 : arg01 = build_one_cst (type);
7751 : : }
7752 : 7613876 : if (TREE_CODE (arg1) == MULT_EXPR)
7753 : : {
7754 : 2467347 : arg10 = TREE_OPERAND (arg1, 0);
7755 : 2467347 : arg11 = TREE_OPERAND (arg1, 1);
7756 : : }
7757 : 5146529 : else if (TREE_CODE (arg1) == INTEGER_CST)
7758 : : {
7759 : 3241345 : arg10 = build_one_cst (type);
7760 : : /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7761 : : the purpose of this canonicalization. */
7762 : 6262729 : if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7763 : 222913 : && negate_expr_p (arg1)
7764 : 3461306 : && code == PLUS_EXPR)
7765 : : {
7766 : 219961 : arg11 = negate_expr (arg1);
7767 : 219961 : code = MINUS_EXPR;
7768 : : }
7769 : : else
7770 : : arg11 = arg1;
7771 : : }
7772 : : else
7773 : : {
7774 : : /* We cannot generate constant 1 for fract. */
7775 : 1905184 : if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7776 : 0 : return NULL_TREE;
7777 : 1905184 : arg10 = arg1;
7778 : 1905184 : arg11 = build_one_cst (type);
7779 : : }
7780 : 7613876 : same = NULL_TREE;
7781 : :
7782 : : /* Prefer factoring a common non-constant. */
7783 : 7613876 : if (operand_equal_p (arg00, arg10, 0))
7784 : : same = arg00, alt0 = arg01, alt1 = arg11;
7785 : 7610297 : else if (operand_equal_p (arg01, arg11, 0))
7786 : : same = arg01, alt0 = arg00, alt1 = arg10;
7787 : 7524709 : else if (operand_equal_p (arg00, arg11, 0))
7788 : : same = arg00, alt0 = arg01, alt1 = arg10;
7789 : 7524675 : else if (operand_equal_p (arg01, arg10, 0))
7790 : : same = arg01, alt0 = arg00, alt1 = arg11;
7791 : :
7792 : : /* No identical multiplicands; see if we can find a common
7793 : : power-of-two factor in non-power-of-two multiplies. This
7794 : : can help in multi-dimensional array access. */
7795 : 7523405 : else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7796 : : {
7797 : 6201202 : HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7798 : 6201202 : HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7799 : 6201202 : HOST_WIDE_INT tmp;
7800 : 6201202 : bool swap = false;
7801 : 6201202 : tree maybe_same;
7802 : :
7803 : : /* Move min of absolute values to int11. */
7804 : 18603606 : if (absu_hwi (int01) < absu_hwi (int11))
7805 : : {
7806 : : tmp = int01, int01 = int11, int11 = tmp;
7807 : : alt0 = arg00, arg00 = arg10, arg10 = alt0;
7808 : : maybe_same = arg01;
7809 : : swap = true;
7810 : : }
7811 : : else
7812 : 2837635 : maybe_same = arg11;
7813 : :
7814 : 6201202 : const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7815 : 6201202 : if (factor > 1
7816 : 9599357 : && pow2p_hwi (factor)
7817 : 1985481 : && (int01 & (factor - 1)) == 0
7818 : : /* The remainder should not be a constant, otherwise we
7819 : : end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7820 : : increased the number of multiplications necessary. */
7821 : 7459839 : && TREE_CODE (arg10) != INTEGER_CST)
7822 : : {
7823 : 1105368 : alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7824 : 1105368 : build_int_cst (TREE_TYPE (arg00),
7825 : 1105368 : int01 / int11));
7826 : 1105368 : alt1 = arg10;
7827 : 1105368 : same = maybe_same;
7828 : 1105368 : if (swap)
7829 : 1030497 : maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7830 : : }
7831 : : }
7832 : :
7833 : 7613876 : if (!same)
7834 : : return NULL_TREE;
7835 : :
7836 : 7 : if (! ANY_INTEGRAL_TYPE_P (type)
7837 : 1195839 : || TYPE_OVERFLOW_WRAPS (type)
7838 : : /* We are neither factoring zero nor minus one. */
7839 : 1330945 : || TREE_CODE (same) == INTEGER_CST)
7840 : 1186641 : return fold_build2_loc (loc, MULT_EXPR, type,
7841 : : fold_build2_loc (loc, code, type,
7842 : : fold_convert_loc (loc, type, alt0),
7843 : : fold_convert_loc (loc, type, alt1)),
7844 : 1186641 : fold_convert_loc (loc, type, same));
7845 : :
7846 : : /* Same may be zero and thus the operation 'code' may overflow. Likewise
7847 : : same may be minus one and thus the multiplication may overflow. Perform
7848 : : the sum operation in an unsigned type. */
7849 : 9198 : tree utype = unsigned_type_for (type);
7850 : 9198 : tree tem = fold_build2_loc (loc, code, utype,
7851 : : fold_convert_loc (loc, utype, alt0),
7852 : : fold_convert_loc (loc, utype, alt1));
7853 : : /* If the sum evaluated to a constant that is not -INF the multiplication
7854 : : cannot overflow. */
7855 : 18396 : if (TREE_CODE (tem) == INTEGER_CST
7856 : 14382 : && (wi::to_wide (tem)
7857 : 14382 : != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7858 : 2577 : return fold_build2_loc (loc, MULT_EXPR, type,
7859 : 2577 : fold_convert (type, tem), same);
7860 : :
7861 : : /* Do not resort to unsigned multiplication because
7862 : : we lose the no-overflow property of the expression. */
7863 : : return NULL_TREE;
7864 : : }
7865 : :
7866 : : /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7867 : : specified by EXPR into the buffer PTR of length LEN bytes.
7868 : : Return the number of bytes placed in the buffer, or zero
7869 : : upon failure. */
7870 : :
7871 : : static int
7872 : 14918539 : native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7873 : : {
7874 : 14918539 : tree type = TREE_TYPE (expr);
7875 : 14918539 : int total_bytes;
7876 : 14918539 : if (TREE_CODE (type) == BITINT_TYPE)
7877 : : {
7878 : 16672 : struct bitint_info info;
7879 : 16672 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
7880 : 16672 : gcc_assert (ok);
7881 : 16672 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
7882 : 16672 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
7883 : : {
7884 : 16601 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
7885 : : /* More work is needed when adding _BitInt support to PDP endian
7886 : : if limb is smaller than word, or if _BitInt limb ordering doesn't
7887 : : match target endianity here. */
7888 : 16601 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
7889 : : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
7890 : : || (GET_MODE_SIZE (limb_mode)
7891 : : >= UNITS_PER_WORD)));
7892 : : }
7893 : : else
7894 : 142 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7895 : : }
7896 : : else
7897 : 29803734 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7898 : 14918539 : int byte, offset, word, words;
7899 : 14918539 : unsigned char value;
7900 : :
7901 : 14918539 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7902 : : return 0;
7903 : 14918058 : if (off == -1)
7904 : 14161478 : off = 0;
7905 : :
7906 : 14918058 : if (ptr == NULL)
7907 : : /* Dry run. */
7908 : 2455259 : return MIN (len, total_bytes - off);
7909 : :
7910 : : words = total_bytes / UNITS_PER_WORD;
7911 : :
7912 : 64500933 : for (byte = 0; byte < total_bytes; byte++)
7913 : : {
7914 : 52038134 : int bitpos = byte * BITS_PER_UNIT;
7915 : : /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7916 : : number of bytes. */
7917 : 52038134 : value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7918 : :
7919 : 52038134 : if (total_bytes > UNITS_PER_WORD)
7920 : : {
7921 : 52038134 : word = byte / UNITS_PER_WORD;
7922 : 52038134 : if (WORDS_BIG_ENDIAN)
7923 : : word = (words - 1) - word;
7924 : 52038134 : offset = word * UNITS_PER_WORD;
7925 : 52038134 : if (BYTES_BIG_ENDIAN)
7926 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7927 : : else
7928 : 52038134 : offset += byte % UNITS_PER_WORD;
7929 : : }
7930 : : else
7931 : : offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7932 : 52038134 : if (offset >= off && offset - off < len)
7933 : 50028846 : ptr[offset - off] = value;
7934 : : }
7935 : 12462799 : return MIN (len, total_bytes - off);
7936 : : }
7937 : :
7938 : :
7939 : : /* Subroutine of native_encode_expr. Encode the FIXED_CST
7940 : : specified by EXPR into the buffer PTR of length LEN bytes.
7941 : : Return the number of bytes placed in the buffer, or zero
7942 : : upon failure. */
7943 : :
7944 : : static int
7945 : 0 : native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7946 : : {
7947 : 0 : tree type = TREE_TYPE (expr);
7948 : 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
7949 : 0 : int total_bytes = GET_MODE_SIZE (mode);
7950 : 0 : FIXED_VALUE_TYPE value;
7951 : 0 : tree i_value, i_type;
7952 : :
7953 : 0 : if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7954 : : return 0;
7955 : :
7956 : 0 : i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7957 : :
7958 : 0 : if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7959 : : return 0;
7960 : :
7961 : 0 : value = TREE_FIXED_CST (expr);
7962 : 0 : i_value = double_int_to_tree (i_type, value.data);
7963 : :
7964 : 0 : return native_encode_int (i_value, ptr, len, off);
7965 : : }
7966 : :
7967 : :
7968 : : /* Subroutine of native_encode_expr. Encode the REAL_CST
7969 : : specified by EXPR into the buffer PTR of length LEN bytes.
7970 : : Return the number of bytes placed in the buffer, or zero
7971 : : upon failure. */
7972 : :
7973 : : static int
7974 : 501804 : native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7975 : : {
7976 : 501804 : tree type = TREE_TYPE (expr);
7977 : 501804 : int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7978 : 501804 : int byte, offset, word, words, bitpos;
7979 : 501804 : unsigned char value;
7980 : :
7981 : : /* There are always 32 bits in each long, no matter the size of
7982 : : the hosts long. We handle floating point representations with
7983 : : up to 192 bits. */
7984 : 501804 : long tmp[6];
7985 : :
7986 : 501804 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
7987 : : return 0;
7988 : 500052 : if (off == -1)
7989 : 372667 : off = 0;
7990 : :
7991 : 500052 : if (ptr == NULL)
7992 : : /* Dry run. */
7993 : 121715 : return MIN (len, total_bytes - off);
7994 : :
7995 : 378337 : words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7996 : :
7997 : 378337 : real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7998 : :
7999 : 3089739 : for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8000 : 2711402 : bitpos += BITS_PER_UNIT)
8001 : : {
8002 : 2711402 : byte = (bitpos / BITS_PER_UNIT) & 3;
8003 : 2711402 : value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
8004 : :
8005 : 2711402 : if (UNITS_PER_WORD < 4)
8006 : : {
8007 : : word = byte / UNITS_PER_WORD;
8008 : : if (WORDS_BIG_ENDIAN)
8009 : : word = (words - 1) - word;
8010 : : offset = word * UNITS_PER_WORD;
8011 : : if (BYTES_BIG_ENDIAN)
8012 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8013 : : else
8014 : : offset += byte % UNITS_PER_WORD;
8015 : : }
8016 : : else
8017 : : {
8018 : 2711402 : offset = byte;
8019 : 2711402 : if (BYTES_BIG_ENDIAN)
8020 : : {
8021 : : /* Reverse bytes within each long, or within the entire float
8022 : : if it's smaller than a long (for HFmode). */
8023 : : offset = MIN (3, total_bytes - 1) - offset;
8024 : : gcc_assert (offset >= 0);
8025 : : }
8026 : : }
8027 : 2711402 : offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
8028 : 2711402 : if (offset >= off
8029 : 2708378 : && offset - off < len)
8030 : 2691174 : ptr[offset - off] = value;
8031 : : }
8032 : 378337 : return MIN (len, total_bytes - off);
8033 : : }
8034 : :
8035 : : /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
8036 : : specified by EXPR into the buffer PTR of length LEN bytes.
8037 : : Return the number of bytes placed in the buffer, or zero
8038 : : upon failure. */
8039 : :
8040 : : static int
8041 : 19584 : native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
8042 : : {
8043 : 19584 : int rsize, isize;
8044 : 19584 : tree part;
8045 : :
8046 : 19584 : part = TREE_REALPART (expr);
8047 : 19584 : rsize = native_encode_expr (part, ptr, len, off);
8048 : 19584 : if (off == -1 && rsize == 0)
8049 : : return 0;
8050 : 19584 : part = TREE_IMAGPART (expr);
8051 : 19584 : if (off != -1)
8052 : 39482 : off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
8053 : 19584 : isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
8054 : : len - rsize, off);
8055 : 19584 : if (off == -1 && isize != rsize)
8056 : : return 0;
8057 : 19584 : return rsize + isize;
8058 : : }
8059 : :
8060 : : /* Like native_encode_vector, but only encode the first COUNT elements.
8061 : : The other arguments are as for native_encode_vector. */
8062 : :
8063 : : static int
8064 : 803381 : native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
8065 : : int off, unsigned HOST_WIDE_INT count)
8066 : : {
8067 : 803381 : tree itype = TREE_TYPE (TREE_TYPE (expr));
8068 : 1606762 : if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
8069 : 803671 : && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
8070 : : {
8071 : : /* This is the only case in which elements can be smaller than a byte.
8072 : : Element 0 is always in the lsb of the containing byte. */
8073 : 205 : unsigned int elt_bits = TYPE_PRECISION (itype);
8074 : 205 : int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
8075 : 205 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
8076 : : return 0;
8077 : :
8078 : 205 : if (off == -1)
8079 : 205 : off = 0;
8080 : :
8081 : : /* Zero the buffer and then set bits later where necessary. */
8082 : 205 : int extract_bytes = MIN (len, total_bytes - off);
8083 : 205 : if (ptr)
8084 : 205 : memset (ptr, 0, extract_bytes);
8085 : :
8086 : 205 : unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
8087 : 205 : unsigned int first_elt = off * elts_per_byte;
8088 : 205 : unsigned int extract_elts = extract_bytes * elts_per_byte;
8089 : 2541 : for (unsigned int i = 0; i < extract_elts; ++i)
8090 : : {
8091 : 2336 : tree elt = VECTOR_CST_ELT (expr, first_elt + i);
8092 : 2336 : if (TREE_CODE (elt) != INTEGER_CST)
8093 : : return 0;
8094 : :
8095 : 2336 : if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
8096 : : {
8097 : 1036 : unsigned int bit = i * elt_bits;
8098 : 1036 : ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
8099 : : }
8100 : : }
8101 : : return extract_bytes;
8102 : : }
8103 : :
8104 : 803176 : int offset = 0;
8105 : 803176 : int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
8106 : 2547960 : for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
8107 : : {
8108 : 2249459 : if (off >= size)
8109 : : {
8110 : 18845 : off -= size;
8111 : 18845 : continue;
8112 : : }
8113 : 2230614 : tree elem = VECTOR_CST_ELT (expr, i);
8114 : 2230614 : int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
8115 : : len - offset, off);
8116 : 2230614 : if ((off == -1 && res != size) || res == 0)
8117 : : return 0;
8118 : 2230091 : offset += res;
8119 : 2230091 : if (offset >= len)
8120 : 1008304 : return (off == -1 && i < count - 1) ? 0 : offset;
8121 : 1725939 : if (off != -1)
8122 : 301338 : off = 0;
8123 : : }
8124 : : return offset;
8125 : : }
8126 : :
8127 : : /* Subroutine of native_encode_expr. Encode the VECTOR_CST
8128 : : specified by EXPR into the buffer PTR of length LEN bytes.
8129 : : Return the number of bytes placed in the buffer, or zero
8130 : : upon failure. */
8131 : :
8132 : : static int
8133 : 691286 : native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
8134 : : {
8135 : 691286 : unsigned HOST_WIDE_INT count;
8136 : 691286 : if (!VECTOR_CST_NELTS (expr).is_constant (&count))
8137 : : return 0;
8138 : 691286 : return native_encode_vector_part (expr, ptr, len, off, count);
8139 : : }
8140 : :
8141 : :
8142 : : /* Subroutine of native_encode_expr. Encode the STRING_CST
8143 : : specified by EXPR into the buffer PTR of length LEN bytes.
8144 : : Return the number of bytes placed in the buffer, or zero
8145 : : upon failure. */
8146 : :
8147 : : static int
8148 : 124256 : native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
8149 : : {
8150 : 124256 : tree type = TREE_TYPE (expr);
8151 : :
8152 : : /* Wide-char strings are encoded in target byte-order so native
8153 : : encoding them is trivial. */
8154 : 124256 : if (BITS_PER_UNIT != CHAR_BIT
8155 : 124256 : || TREE_CODE (type) != ARRAY_TYPE
8156 : 124256 : || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8157 : 248512 : || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
8158 : : return 0;
8159 : :
8160 : 124256 : HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
8161 : 124256 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
8162 : : return 0;
8163 : 123337 : if (off == -1)
8164 : 55952 : off = 0;
8165 : 123337 : len = MIN (total_bytes - off, len);
8166 : 123337 : if (ptr == NULL)
8167 : : /* Dry run. */;
8168 : : else
8169 : : {
8170 : 123337 : int written = 0;
8171 : 123337 : if (off < TREE_STRING_LENGTH (expr))
8172 : : {
8173 : 123063 : written = MIN (len, TREE_STRING_LENGTH (expr) - off);
8174 : 123063 : memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
8175 : : }
8176 : 123337 : memset (ptr + written, 0, len - written);
8177 : : }
8178 : : return len;
8179 : : }
8180 : :
8181 : :
8182 : : /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
8183 : : FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
8184 : : the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
8185 : : anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
8186 : : sufficient to encode the entire EXPR, or if OFF is out of bounds.
8187 : : Otherwise, start at byte offset OFF and encode at most LEN bytes.
8188 : : Return the number of bytes placed in the buffer, or zero upon failure. */
8189 : :
8190 : : int
8191 : 27700708 : native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
8192 : : {
8193 : : /* We don't support starting at negative offset and -1 is special. */
8194 : 27700708 : if (off < -1)
8195 : : return 0;
8196 : :
8197 : 27700696 : switch (TREE_CODE (expr))
8198 : : {
8199 : 14916453 : case INTEGER_CST:
8200 : 14916453 : return native_encode_int (expr, ptr, len, off);
8201 : :
8202 : 501804 : case REAL_CST:
8203 : 501804 : return native_encode_real (expr, ptr, len, off);
8204 : :
8205 : 0 : case FIXED_CST:
8206 : 0 : return native_encode_fixed (expr, ptr, len, off);
8207 : :
8208 : 19584 : case COMPLEX_CST:
8209 : 19584 : return native_encode_complex (expr, ptr, len, off);
8210 : :
8211 : 691286 : case VECTOR_CST:
8212 : 691286 : return native_encode_vector (expr, ptr, len, off);
8213 : :
8214 : 124256 : case STRING_CST:
8215 : 124256 : return native_encode_string (expr, ptr, len, off);
8216 : :
8217 : : default:
8218 : : return 0;
8219 : : }
8220 : : }
8221 : :
8222 : : /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
8223 : : or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
8224 : : of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
8225 : : machine modes, we can't just use build_nonstandard_integer_type. */
8226 : :
8227 : : tree
8228 : 521 : find_bitfield_repr_type (int fieldsize, int len)
8229 : : {
8230 : 521 : machine_mode mode;
8231 : 1023 : for (int pass = 0; pass < 2; pass++)
8232 : : {
8233 : 772 : enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
8234 : 4340 : FOR_EACH_MODE_IN_CLASS (mode, mclass)
8235 : 3838 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8236 : 7006 : && known_eq (GET_MODE_PRECISION (mode),
8237 : : GET_MODE_BITSIZE (mode))
8238 : 10844 : && known_le (GET_MODE_SIZE (mode), len))
8239 : : {
8240 : 270 : tree ret = lang_hooks.types.type_for_mode (mode, 1);
8241 : 270 : if (ret && TYPE_MODE (ret) == mode)
8242 : 270 : return ret;
8243 : : }
8244 : : }
8245 : :
8246 : 502 : for (int i = 0; i < NUM_INT_N_ENTS; i ++)
8247 : 251 : if (int_n_enabled_p[i]
8248 : 251 : && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8249 : 251 : && int_n_trees[i].unsigned_type)
8250 : : {
8251 : 251 : tree ret = int_n_trees[i].unsigned_type;
8252 : 251 : mode = TYPE_MODE (ret);
8253 : 251 : if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8254 : 502 : && known_eq (GET_MODE_PRECISION (mode),
8255 : : GET_MODE_BITSIZE (mode))
8256 : 753 : && known_le (GET_MODE_SIZE (mode), len))
8257 : 0 : return ret;
8258 : : }
8259 : :
8260 : : return NULL_TREE;
8261 : : }
8262 : :
8263 : : /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8264 : : NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8265 : : to be non-NULL and OFF zero), then in addition to filling the
8266 : : bytes pointed by PTR with the value also clear any bits pointed
8267 : : by MASK that are known to be initialized, keep them as is for
8268 : : e.g. uninitialized padding bits or uninitialized fields. */
8269 : :
8270 : : int
8271 : 6733726 : native_encode_initializer (tree init, unsigned char *ptr, int len,
8272 : : int off, unsigned char *mask)
8273 : : {
8274 : 6733726 : int r;
8275 : :
8276 : : /* We don't support starting at negative offset and -1 is special. */
8277 : 6733726 : if (off < -1 || init == NULL_TREE)
8278 : : return 0;
8279 : :
8280 : 6733726 : gcc_assert (mask == NULL || (off == 0 && ptr));
8281 : :
8282 : 6733726 : STRIP_NOPS (init);
8283 : 6733726 : switch (TREE_CODE (init))
8284 : : {
8285 : 0 : case VIEW_CONVERT_EXPR:
8286 : 0 : case NON_LVALUE_EXPR:
8287 : 0 : return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8288 : 0 : mask);
8289 : 5994968 : default:
8290 : 5994968 : r = native_encode_expr (init, ptr, len, off);
8291 : 5994968 : if (mask)
8292 : 1485 : memset (mask, 0, r);
8293 : : return r;
8294 : 738758 : case CONSTRUCTOR:
8295 : 738758 : tree type = TREE_TYPE (init);
8296 : 738758 : HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8297 : 738758 : if (total_bytes < 0)
8298 : : return 0;
8299 : 738758 : if ((off == -1 && total_bytes > len) || off >= total_bytes)
8300 : : return 0;
8301 : 738755 : int o = off == -1 ? 0 : off;
8302 : 738755 : if (TREE_CODE (type) == ARRAY_TYPE)
8303 : : {
8304 : 250691 : tree min_index;
8305 : 250691 : unsigned HOST_WIDE_INT cnt;
8306 : 250691 : HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8307 : 250691 : constructor_elt *ce;
8308 : :
8309 : 250691 : if (!TYPE_DOMAIN (type)
8310 : 250691 : || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8311 : : return 0;
8312 : :
8313 : 250691 : fieldsize = int_size_in_bytes (TREE_TYPE (type));
8314 : 250691 : if (fieldsize <= 0)
8315 : : return 0;
8316 : :
8317 : 250691 : min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8318 : 250691 : if (ptr)
8319 : 250691 : memset (ptr, '\0', MIN (total_bytes - off, len));
8320 : :
8321 : 5938288 : for (cnt = 0; ; cnt++)
8322 : : {
8323 : 6188979 : tree val = NULL_TREE, index = NULL_TREE;
8324 : 6188979 : HOST_WIDE_INT pos = curpos, count = 0;
8325 : 6188979 : bool full = false;
8326 : 6188979 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8327 : : {
8328 : 6151286 : val = ce->value;
8329 : 6151286 : index = ce->index;
8330 : : }
8331 : 37693 : else if (mask == NULL
8332 : 185 : || CONSTRUCTOR_NO_CLEARING (init)
8333 : 37878 : || curpos >= total_bytes)
8334 : : break;
8335 : : else
8336 : : pos = total_bytes;
8337 : :
8338 : 6151294 : if (index && TREE_CODE (index) == RANGE_EXPR)
8339 : : {
8340 : 18 : if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8341 : 18 : || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8342 : 0 : return 0;
8343 : 18 : offset_int wpos
8344 : 18 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8345 : 36 : - wi::to_offset (min_index),
8346 : 18 : TYPE_PRECISION (sizetype));
8347 : 18 : wpos *= fieldsize;
8348 : 18 : if (!wi::fits_shwi_p (pos))
8349 : : return 0;
8350 : 18 : pos = wpos.to_shwi ();
8351 : 18 : offset_int wcount
8352 : 18 : = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8353 : 36 : - wi::to_offset (TREE_OPERAND (index, 0)),
8354 : 18 : TYPE_PRECISION (sizetype));
8355 : 18 : if (!wi::fits_shwi_p (wcount))
8356 : : return 0;
8357 : 18 : count = wcount.to_shwi ();
8358 : 18 : }
8359 : 5541897 : else if (index)
8360 : : {
8361 : 5541897 : if (TREE_CODE (index) != INTEGER_CST)
8362 : 0 : return 0;
8363 : 5541897 : offset_int wpos
8364 : 5541897 : = wi::sext (wi::to_offset (index)
8365 : 11083794 : - wi::to_offset (min_index),
8366 : 5541897 : TYPE_PRECISION (sizetype));
8367 : 5541897 : wpos *= fieldsize;
8368 : 5541897 : if (!wi::fits_shwi_p (wpos))
8369 : : return 0;
8370 : 5541897 : pos = wpos.to_shwi ();
8371 : : }
8372 : :
8373 : 6151809 : if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8374 : : {
8375 : 8 : if (valueinit == -1)
8376 : : {
8377 : 8 : tree zero = build_zero_cst (TREE_TYPE (type));
8378 : 16 : r = native_encode_initializer (zero, ptr + curpos,
8379 : : fieldsize, 0,
8380 : 8 : mask + curpos);
8381 : 8 : if (TREE_CODE (zero) == CONSTRUCTOR)
8382 : 0 : ggc_free (zero);
8383 : 8 : if (!r)
8384 : : return 0;
8385 : 8 : valueinit = curpos;
8386 : 8 : curpos += fieldsize;
8387 : : }
8388 : 26 : while (curpos != pos)
8389 : : {
8390 : 18 : memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8391 : 18 : memcpy (mask + curpos, mask + valueinit, fieldsize);
8392 : 18 : curpos += fieldsize;
8393 : : }
8394 : : }
8395 : :
8396 : 6151294 : curpos = pos;
8397 : 6151294 : if (val)
8398 : 6229338 : do
8399 : : {
8400 : 6229338 : if (off == -1
8401 : 635283 : || (curpos >= off
8402 : 231061 : && (curpos + fieldsize
8403 : 231061 : <= (HOST_WIDE_INT) off + len)))
8404 : : {
8405 : 5805280 : if (full)
8406 : : {
8407 : 78052 : if (ptr)
8408 : 78052 : memcpy (ptr + (curpos - o), ptr + (pos - o),
8409 : : fieldsize);
8410 : 78052 : if (mask)
8411 : 0 : memcpy (mask + curpos, mask + pos, fieldsize);
8412 : : }
8413 : 11666188 : else if (!native_encode_initializer (val,
8414 : : ptr
8415 : 5727228 : ? ptr + curpos - o
8416 : : : NULL,
8417 : : fieldsize,
8418 : : off == -1 ? -1
8419 : : : 0,
8420 : : mask
8421 : 507 : ? mask + curpos
8422 : : : NULL))
8423 : : return 0;
8424 : : else
8425 : : {
8426 : : full = true;
8427 : : pos = curpos;
8428 : : }
8429 : : }
8430 : 424058 : else if (curpos + fieldsize > off
8431 : 20346 : && curpos < (HOST_WIDE_INT) off + len)
8432 : : {
8433 : : /* Partial overlap. */
8434 : 657 : unsigned char *p = NULL;
8435 : 657 : int no = 0;
8436 : 657 : int l;
8437 : 657 : gcc_assert (mask == NULL);
8438 : 657 : if (curpos >= off)
8439 : : {
8440 : 147 : if (ptr)
8441 : 147 : p = ptr + curpos - off;
8442 : 147 : l = MIN ((HOST_WIDE_INT) off + len - curpos,
8443 : : fieldsize);
8444 : : }
8445 : : else
8446 : : {
8447 : 510 : p = ptr;
8448 : 510 : no = off - curpos;
8449 : 510 : l = len;
8450 : : }
8451 : 657 : if (!native_encode_initializer (val, p, l, no, NULL))
8452 : : return 0;
8453 : : }
8454 : 6016332 : curpos += fieldsize;
8455 : : }
8456 : 6016332 : while (count-- != 0);
8457 : 5938288 : }
8458 : 37685 : return MIN (total_bytes - off, len);
8459 : : }
8460 : 488064 : else if (TREE_CODE (type) == RECORD_TYPE
8461 : 488064 : || TREE_CODE (type) == UNION_TYPE)
8462 : : {
8463 : 488064 : unsigned HOST_WIDE_INT cnt;
8464 : 488064 : constructor_elt *ce;
8465 : 488064 : tree fld_base = TYPE_FIELDS (type);
8466 : 488064 : tree to_free = NULL_TREE;
8467 : :
8468 : 488064 : gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8469 : 488064 : if (ptr != NULL)
8470 : 488064 : memset (ptr, '\0', MIN (total_bytes - o, len));
8471 : 85286 : for (cnt = 0; ; cnt++)
8472 : : {
8473 : 573350 : tree val = NULL_TREE, field = NULL_TREE;
8474 : 573350 : HOST_WIDE_INT pos = 0, fieldsize;
8475 : 573350 : unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8476 : :
8477 : 573350 : if (to_free)
8478 : : {
8479 : 0 : ggc_free (to_free);
8480 : 0 : to_free = NULL_TREE;
8481 : : }
8482 : :
8483 : 573350 : if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8484 : : {
8485 : 90144 : val = ce->value;
8486 : 90144 : field = ce->index;
8487 : 90144 : if (field == NULL_TREE)
8488 : : return 0;
8489 : :
8490 : 90144 : pos = int_byte_position (field);
8491 : 90144 : if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8492 : 1174 : continue;
8493 : : }
8494 : 483206 : else if (mask == NULL
8495 : 483206 : || CONSTRUCTOR_NO_CLEARING (init))
8496 : : break;
8497 : : else
8498 : : pos = total_bytes;
8499 : :
8500 : 90849 : if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8501 : : {
8502 : : tree fld;
8503 : 9099 : for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8504 : : {
8505 : 8682 : if (TREE_CODE (fld) != FIELD_DECL)
8506 : 7621 : continue;
8507 : 1061 : if (fld == field)
8508 : : break;
8509 : 136 : if (DECL_PADDING_P (fld))
8510 : 87 : continue;
8511 : 49 : if (DECL_SIZE_UNIT (fld) == NULL_TREE
8512 : 49 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8513 : : return 0;
8514 : 49 : if (integer_zerop (DECL_SIZE_UNIT (fld)))
8515 : 2 : continue;
8516 : : break;
8517 : : }
8518 : 1389 : if (fld == NULL_TREE)
8519 : : {
8520 : 417 : if (ce == NULL)
8521 : : break;
8522 : : return 0;
8523 : : }
8524 : 972 : fld_base = DECL_CHAIN (fld);
8525 : 972 : if (fld != field)
8526 : : {
8527 : 47 : cnt--;
8528 : 47 : field = fld;
8529 : 47 : pos = int_byte_position (field);
8530 : 47 : val = build_zero_cst (TREE_TYPE (fld));
8531 : 47 : if (TREE_CODE (val) == CONSTRUCTOR)
8532 : 0 : to_free = val;
8533 : : }
8534 : : }
8535 : :
8536 : 89017 : if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8537 : 4105 : && TYPE_DOMAIN (TREE_TYPE (field))
8538 : 93122 : && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8539 : : {
8540 : 81 : if (mask || off != -1)
8541 : : return 0;
8542 : 81 : if (val == NULL_TREE)
8543 : 0 : continue;
8544 : 81 : if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8545 : : return 0;
8546 : 81 : fieldsize = int_size_in_bytes (TREE_TYPE (val));
8547 : 81 : if (fieldsize < 0
8548 : 81 : || (int) fieldsize != fieldsize
8549 : 81 : || (pos + fieldsize) > INT_MAX)
8550 : : return 0;
8551 : 81 : if (pos + fieldsize > total_bytes)
8552 : : {
8553 : 81 : if (ptr != NULL && total_bytes < len)
8554 : 81 : memset (ptr + total_bytes, '\0',
8555 : 81 : MIN (pos + fieldsize, len) - total_bytes);
8556 : : total_bytes = pos + fieldsize;
8557 : : }
8558 : : }
8559 : : else
8560 : : {
8561 : 88936 : if (DECL_SIZE_UNIT (field) == NULL_TREE
8562 : 88936 : || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8563 : : return 0;
8564 : 88936 : fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8565 : : }
8566 : 89017 : if (fieldsize == 0)
8567 : 1 : continue;
8568 : :
8569 : : /* Prepare to deal with integral bit-fields and filter out other
8570 : : bit-fields that do not start and end on a byte boundary. */
8571 : 89016 : if (DECL_BIT_FIELD (field))
8572 : : {
8573 : 2529 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8574 : : return 0;
8575 : 2529 : bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8576 : 2529 : if (INTEGRAL_TYPE_P (TREE_TYPE (field)))
8577 : : {
8578 : 2529 : bpos %= BITS_PER_UNIT;
8579 : 2529 : fieldsize = TYPE_PRECISION (TREE_TYPE (field)) + bpos;
8580 : 2529 : epos = fieldsize % BITS_PER_UNIT;
8581 : 2529 : fieldsize += BITS_PER_UNIT - 1;
8582 : 2529 : fieldsize /= BITS_PER_UNIT;
8583 : : }
8584 : 0 : else if (bpos % BITS_PER_UNIT
8585 : 0 : || DECL_SIZE (field) == NULL_TREE
8586 : 0 : || !tree_fits_shwi_p (DECL_SIZE (field))
8587 : 0 : || tree_to_shwi (DECL_SIZE (field)) % BITS_PER_UNIT)
8588 : : return 0;
8589 : : }
8590 : :
8591 : 89016 : if (off != -1 && pos + fieldsize <= off)
8592 : 1430 : continue;
8593 : :
8594 : 87586 : if (val == NULL_TREE)
8595 : 0 : continue;
8596 : :
8597 : 87586 : if (DECL_BIT_FIELD (field)
8598 : 87586 : && INTEGRAL_TYPE_P (TREE_TYPE (field)))
8599 : : {
8600 : : /* FIXME: Handle PDP endian. */
8601 : 2337 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8602 : 251 : return 0;
8603 : :
8604 : 2337 : if (TREE_CODE (val) == NON_LVALUE_EXPR)
8605 : 2 : val = TREE_OPERAND (val, 0);
8606 : 2337 : if (TREE_CODE (val) != INTEGER_CST)
8607 : : return 0;
8608 : :
8609 : 2337 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8610 : 2337 : tree repr_type = NULL_TREE;
8611 : 2337 : HOST_WIDE_INT rpos = 0;
8612 : 2337 : if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8613 : : {
8614 : 1828 : rpos = int_byte_position (repr);
8615 : 1828 : repr_type = TREE_TYPE (repr);
8616 : : }
8617 : : else
8618 : : {
8619 : 509 : repr_type = find_bitfield_repr_type (fieldsize, len);
8620 : 509 : if (repr_type == NULL_TREE)
8621 : : return 0;
8622 : 258 : HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8623 : 258 : gcc_assert (repr_size > 0 && repr_size <= len);
8624 : 258 : if (pos + repr_size <= o + len)
8625 : : rpos = pos;
8626 : : else
8627 : : {
8628 : 14 : rpos = o + len - repr_size;
8629 : 14 : gcc_assert (rpos <= pos);
8630 : : }
8631 : : }
8632 : :
8633 : 2086 : if (rpos > pos)
8634 : : return 0;
8635 : 2086 : wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8636 : 2086 : int diff = (TYPE_PRECISION (repr_type)
8637 : 2086 : - TYPE_PRECISION (TREE_TYPE (field)));
8638 : 2086 : HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8639 : 2086 : if (!BYTES_BIG_ENDIAN)
8640 : 2086 : w = wi::lshift (w, bitoff);
8641 : : else
8642 : : w = wi::lshift (w, diff - bitoff);
8643 : 2086 : val = wide_int_to_tree (repr_type, w);
8644 : :
8645 : 2086 : unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8646 : : / BITS_PER_UNIT + 1];
8647 : 2086 : int l = native_encode_int (val, buf, sizeof buf, 0);
8648 : 2086 : if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8649 : 0 : return 0;
8650 : :
8651 : 2086 : if (ptr == NULL)
8652 : 0 : continue;
8653 : :
8654 : : /* If the bitfield does not start at byte boundary, handle
8655 : : the partial byte at the start. */
8656 : 2086 : if (bpos
8657 : 1239 : && (off == -1 || (pos >= off && len >= 1)))
8658 : : {
8659 : 1176 : if (!BYTES_BIG_ENDIAN)
8660 : : {
8661 : 1176 : int msk = (1 << bpos) - 1;
8662 : 1176 : buf[pos - rpos] &= ~msk;
8663 : 1176 : buf[pos - rpos] |= ptr[pos - o] & msk;
8664 : 1176 : if (mask)
8665 : : {
8666 : 127 : if (fieldsize > 1 || epos == 0)
8667 : 109 : mask[pos] &= msk;
8668 : : else
8669 : 18 : mask[pos] &= (msk | ~((1 << epos) - 1));
8670 : : }
8671 : : }
8672 : : else
8673 : : {
8674 : : int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8675 : : buf[pos - rpos] &= msk;
8676 : : buf[pos - rpos] |= ptr[pos - o] & ~msk;
8677 : : if (mask)
8678 : : {
8679 : : if (fieldsize > 1 || epos == 0)
8680 : : mask[pos] &= ~msk;
8681 : : else
8682 : : mask[pos] &= (~msk
8683 : : | ((1 << (BITS_PER_UNIT - epos))
8684 : : - 1));
8685 : : }
8686 : : }
8687 : : }
8688 : : /* If the bitfield does not end at byte boundary, handle
8689 : : the partial byte at the end. */
8690 : 2086 : if (epos
8691 : 1574 : && (off == -1
8692 : 989 : || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8693 : : {
8694 : 1483 : if (!BYTES_BIG_ENDIAN)
8695 : : {
8696 : 1483 : int msk = (1 << epos) - 1;
8697 : 1483 : buf[pos - rpos + fieldsize - 1] &= msk;
8698 : 1483 : buf[pos - rpos + fieldsize - 1]
8699 : 1483 : |= ptr[pos + fieldsize - 1 - o] & ~msk;
8700 : 1483 : if (mask && (fieldsize > 1 || bpos == 0))
8701 : 140 : mask[pos + fieldsize - 1] &= ~msk;
8702 : : }
8703 : : else
8704 : : {
8705 : : int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8706 : : buf[pos - rpos + fieldsize - 1] &= ~msk;
8707 : : buf[pos - rpos + fieldsize - 1]
8708 : : |= ptr[pos + fieldsize - 1 - o] & msk;
8709 : : if (mask && (fieldsize > 1 || bpos == 0))
8710 : : mask[pos + fieldsize - 1] &= msk;
8711 : : }
8712 : : }
8713 : 2086 : if (off == -1
8714 : 1291 : || (pos >= off
8715 : 1214 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8716 : : {
8717 : 1920 : memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8718 : 1920 : if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8719 : 61 : memset (mask + pos + (bpos != 0), 0,
8720 : 61 : fieldsize - (bpos != 0) - (epos != 0));
8721 : : }
8722 : : else
8723 : : {
8724 : : /* Partial overlap. */
8725 : 166 : HOST_WIDE_INT fsz = fieldsize;
8726 : 166 : gcc_assert (mask == NULL);
8727 : 166 : if (pos < off)
8728 : : {
8729 : 77 : fsz -= (off - pos);
8730 : 77 : pos = off;
8731 : : }
8732 : 166 : if (pos + fsz > (HOST_WIDE_INT) off + len)
8733 : 91 : fsz = (HOST_WIDE_INT) off + len - pos;
8734 : 166 : memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8735 : : }
8736 : 2086 : continue;
8737 : 2086 : }
8738 : :
8739 : 85249 : if (off == -1
8740 : 2649 : || (pos >= off
8741 : 2275 : && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8742 : : {
8743 : 84376 : int fldsize = fieldsize;
8744 : 1776 : if (off == -1)
8745 : : {
8746 : 82600 : tree fld = DECL_CHAIN (field);
8747 : 1083531 : while (fld)
8748 : : {
8749 : 1007143 : if (TREE_CODE (fld) == FIELD_DECL)
8750 : : break;
8751 : 1000931 : fld = DECL_CHAIN (fld);
8752 : : }
8753 : 82600 : if (fld == NULL_TREE)
8754 : 76388 : fldsize = len - pos;
8755 : : }
8756 : 86858 : r = native_encode_initializer (val, ptr ? ptr + pos - o
8757 : : : NULL,
8758 : : fldsize,
8759 : : off == -1 ? -1 : 0,
8760 : 706 : mask ? mask + pos : NULL);
8761 : 84376 : if (!r)
8762 : : return 0;
8763 : 80055 : if (off == -1
8764 : 78504 : && fldsize != fieldsize
8765 : 84 : && r > fieldsize
8766 : 54 : && pos + r > total_bytes)
8767 : 85286 : total_bytes = pos + r;
8768 : : }
8769 : : else
8770 : : {
8771 : : /* Partial overlap. */
8772 : 873 : unsigned char *p = NULL;
8773 : 873 : int no = 0;
8774 : 873 : int l;
8775 : 873 : gcc_assert (mask == NULL);
8776 : 873 : if (pos >= off)
8777 : : {
8778 : 499 : if (ptr)
8779 : 499 : p = ptr + pos - off;
8780 : 499 : l = MIN ((HOST_WIDE_INT) off + len - pos,
8781 : : fieldsize);
8782 : : }
8783 : : else
8784 : : {
8785 : 374 : p = ptr;
8786 : 374 : no = off - pos;
8787 : 374 : l = len;
8788 : : }
8789 : 873 : if (!native_encode_initializer (val, p, l, no, NULL))
8790 : : return 0;
8791 : : }
8792 : 85286 : }
8793 : 483159 : return MIN (total_bytes - off, len);
8794 : : }
8795 : : return 0;
8796 : : }
8797 : : }
8798 : :
8799 : :
8800 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8801 : : the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8802 : : If the buffer cannot be interpreted, return NULL_TREE. */
8803 : :
8804 : : static tree
8805 : 3786437 : native_interpret_int (tree type, const unsigned char *ptr, int len)
8806 : : {
8807 : 3786437 : int total_bytes;
8808 : 3786437 : if (TREE_CODE (type) == BITINT_TYPE)
8809 : : {
8810 : 17 : struct bitint_info info;
8811 : 17 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
8812 : 17 : gcc_assert (ok);
8813 : 17 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
8814 : 17 : if (TYPE_PRECISION (type) > GET_MODE_PRECISION (limb_mode))
8815 : : {
8816 : 17 : total_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (type));
8817 : : /* More work is needed when adding _BitInt support to PDP endian
8818 : : if limb is smaller than word, or if _BitInt limb ordering doesn't
8819 : : match target endianity here. */
8820 : 17 : gcc_checking_assert (info.big_endian == WORDS_BIG_ENDIAN
8821 : : && (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
8822 : : || (GET_MODE_SIZE (limb_mode)
8823 : : >= UNITS_PER_WORD)));
8824 : : }
8825 : : else
8826 : 0 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8827 : : }
8828 : : else
8829 : 7572840 : total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8830 : :
8831 : 3786437 : if (total_bytes > len)
8832 : : return NULL_TREE;
8833 : :
8834 : 3786220 : wide_int result = wi::from_buffer (ptr, total_bytes);
8835 : :
8836 : 3786220 : return wide_int_to_tree (type, result);
8837 : 3786220 : }
8838 : :
8839 : :
8840 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8841 : : the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8842 : : If the buffer cannot be interpreted, return NULL_TREE. */
8843 : :
8844 : : static tree
8845 : 0 : native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8846 : : {
8847 : 0 : scalar_mode mode = SCALAR_TYPE_MODE (type);
8848 : 0 : int total_bytes = GET_MODE_SIZE (mode);
8849 : 0 : double_int result;
8850 : 0 : FIXED_VALUE_TYPE fixed_value;
8851 : :
8852 : 0 : if (total_bytes > len
8853 : 0 : || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8854 : : return NULL_TREE;
8855 : :
8856 : 0 : result = double_int::from_buffer (ptr, total_bytes);
8857 : 0 : fixed_value = fixed_from_double_int (result, mode);
8858 : :
8859 : 0 : return build_fixed (type, fixed_value);
8860 : : }
8861 : :
8862 : :
8863 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8864 : : the buffer PTR of length LEN as a REAL_CST of type TYPE.
8865 : : If the buffer cannot be interpreted, return NULL_TREE. */
8866 : :
8867 : : tree
8868 : 29249 : native_interpret_real (tree type, const unsigned char *ptr, int len)
8869 : : {
8870 : 29249 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8871 : 29249 : int total_bytes = GET_MODE_SIZE (mode);
8872 : 29249 : unsigned char value;
8873 : : /* There are always 32 bits in each long, no matter the size of
8874 : : the hosts long. We handle floating point representations with
8875 : : up to 192 bits. */
8876 : 29249 : REAL_VALUE_TYPE r;
8877 : 29249 : long tmp[6];
8878 : :
8879 : 29249 : if (total_bytes > len || total_bytes > 24)
8880 : : return NULL_TREE;
8881 : 29188 : int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8882 : :
8883 : 29188 : memset (tmp, 0, sizeof (tmp));
8884 : 216314 : for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8885 : 187126 : bitpos += BITS_PER_UNIT)
8886 : : {
8887 : : /* Both OFFSET and BYTE index within a long;
8888 : : bitpos indexes the whole float. */
8889 : 187126 : int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8890 : 187126 : if (UNITS_PER_WORD < 4)
8891 : : {
8892 : : int word = byte / UNITS_PER_WORD;
8893 : : if (WORDS_BIG_ENDIAN)
8894 : : word = (words - 1) - word;
8895 : : offset = word * UNITS_PER_WORD;
8896 : : if (BYTES_BIG_ENDIAN)
8897 : : offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8898 : : else
8899 : : offset += byte % UNITS_PER_WORD;
8900 : : }
8901 : : else
8902 : : {
8903 : 187126 : offset = byte;
8904 : 187126 : if (BYTES_BIG_ENDIAN)
8905 : : {
8906 : : /* Reverse bytes within each long, or within the entire float
8907 : : if it's smaller than a long (for HFmode). */
8908 : : offset = MIN (3, total_bytes - 1) - offset;
8909 : : gcc_assert (offset >= 0);
8910 : : }
8911 : : }
8912 : 187126 : value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8913 : :
8914 : 187126 : tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8915 : : }
8916 : :
8917 : 29188 : real_from_target (&r, tmp, mode);
8918 : 29188 : return build_real (type, r);
8919 : : }
8920 : :
8921 : :
8922 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8923 : : the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8924 : : If the buffer cannot be interpreted, return NULL_TREE. */
8925 : :
8926 : : static tree
8927 : 1408 : native_interpret_complex (tree type, const unsigned char *ptr, int len)
8928 : : {
8929 : 1408 : tree etype, rpart, ipart;
8930 : 1408 : int size;
8931 : :
8932 : 1408 : etype = TREE_TYPE (type);
8933 : 1408 : size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8934 : 1408 : if (size * 2 > len)
8935 : : return NULL_TREE;
8936 : 1373 : rpart = native_interpret_expr (etype, ptr, size);
8937 : 1373 : if (!rpart)
8938 : : return NULL_TREE;
8939 : 1372 : ipart = native_interpret_expr (etype, ptr+size, size);
8940 : 1372 : if (!ipart)
8941 : : return NULL_TREE;
8942 : 1372 : return build_complex (type, rpart, ipart);
8943 : : }
8944 : :
8945 : : /* Read a vector of type TYPE from the target memory image given by BYTES,
8946 : : which contains LEN bytes. The vector is known to be encodable using
8947 : : NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8948 : :
8949 : : Return the vector on success, otherwise return null. */
8950 : :
8951 : : static tree
8952 : 187829 : native_interpret_vector_part (tree type, const unsigned char *bytes,
8953 : : unsigned int len, unsigned int npatterns,
8954 : : unsigned int nelts_per_pattern)
8955 : : {
8956 : 187829 : tree elt_type = TREE_TYPE (type);
8957 : 187829 : if (VECTOR_BOOLEAN_TYPE_P (type)
8958 : 187832 : && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8959 : : {
8960 : : /* This is the only case in which elements can be smaller than a byte.
8961 : : Element 0 is always in the lsb of the containing byte. */
8962 : 1 : unsigned int elt_bits = TYPE_PRECISION (elt_type);
8963 : 1 : if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8964 : : return NULL_TREE;
8965 : :
8966 : 1 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8967 : 17 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8968 : : {
8969 : 16 : unsigned int bit_index = i * elt_bits;
8970 : 16 : unsigned int byte_index = bit_index / BITS_PER_UNIT;
8971 : 16 : unsigned int lsb = bit_index % BITS_PER_UNIT;
8972 : 32 : builder.quick_push (bytes[byte_index] & (1 << lsb)
8973 : 17 : ? build_all_ones_cst (elt_type)
8974 : 1 : : build_zero_cst (elt_type));
8975 : : }
8976 : 1 : return builder.build ();
8977 : 1 : }
8978 : :
8979 : 187828 : unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8980 : 187828 : if (elt_bytes * npatterns * nelts_per_pattern > len)
8981 : : return NULL_TREE;
8982 : :
8983 : 187828 : tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8984 : 742447 : for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8985 : : {
8986 : 554657 : tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8987 : 554657 : if (!elt)
8988 : 38 : return NULL_TREE;
8989 : 554619 : builder.quick_push (elt);
8990 : 554619 : bytes += elt_bytes;
8991 : : }
8992 : 187790 : return builder.build ();
8993 : 187828 : }
8994 : :
8995 : : /* Subroutine of native_interpret_expr. Interpret the contents of
8996 : : the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8997 : : If the buffer cannot be interpreted, return NULL_TREE. */
8998 : :
8999 : : static tree
9000 : 75734 : native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
9001 : : {
9002 : 75734 : unsigned HOST_WIDE_INT size;
9003 : :
9004 : 75734 : if (!tree_to_poly_uint64 (TYPE_SIZE_UNIT (type)).is_constant (&size)
9005 : 75734 : || size > len)
9006 : 0 : return NULL_TREE;
9007 : :
9008 : 75734 : unsigned HOST_WIDE_INT count = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9009 : 75734 : return native_interpret_vector_part (type, ptr, len, count, 1);
9010 : : }
9011 : :
9012 : :
9013 : : /* Subroutine of fold_view_convert_expr. Interpret the contents of
9014 : : the buffer PTR of length LEN as a constant of type TYPE. For
9015 : : INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
9016 : : we return a REAL_CST, etc... If the buffer cannot be interpreted,
9017 : : return NULL_TREE. */
9018 : :
9019 : : tree
9020 : 3935376 : native_interpret_expr (tree type, const unsigned char *ptr, int len)
9021 : : {
9022 : 3935376 : switch (TREE_CODE (type))
9023 : : {
9024 : 3786437 : case INTEGER_TYPE:
9025 : 3786437 : case ENUMERAL_TYPE:
9026 : 3786437 : case BOOLEAN_TYPE:
9027 : 3786437 : case POINTER_TYPE:
9028 : 3786437 : case REFERENCE_TYPE:
9029 : 3786437 : case OFFSET_TYPE:
9030 : 3786437 : case BITINT_TYPE:
9031 : 3786437 : return native_interpret_int (type, ptr, len);
9032 : :
9033 : 28016 : case REAL_TYPE:
9034 : 28016 : if (tree ret = native_interpret_real (type, ptr, len))
9035 : : {
9036 : : /* For floating point values in composite modes, punt if this
9037 : : folding doesn't preserve bit representation. As the mode doesn't
9038 : : have fixed precision while GCC pretends it does, there could be
9039 : : valid values that GCC can't really represent accurately.
9040 : : See PR95450. Even for other modes, e.g. x86 XFmode can have some
9041 : : bit combinationations which GCC doesn't preserve. */
9042 : 27955 : unsigned char buf[24 * 2];
9043 : 27955 : scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
9044 : 27955 : int total_bytes = GET_MODE_SIZE (mode);
9045 : 27955 : memcpy (buf + 24, ptr, total_bytes);
9046 : 27955 : clear_type_padding_in_mask (type, buf + 24);
9047 : 27955 : if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
9048 : 27955 : || memcmp (buf + 24, buf, total_bytes) != 0)
9049 : 162 : return NULL_TREE;
9050 : : return ret;
9051 : : }
9052 : : return NULL_TREE;
9053 : :
9054 : 0 : case FIXED_POINT_TYPE:
9055 : 0 : return native_interpret_fixed (type, ptr, len);
9056 : :
9057 : 1408 : case COMPLEX_TYPE:
9058 : 1408 : return native_interpret_complex (type, ptr, len);
9059 : :
9060 : 75734 : case VECTOR_TYPE:
9061 : 75734 : return native_interpret_vector (type, ptr, len);
9062 : :
9063 : : default:
9064 : : return NULL_TREE;
9065 : : }
9066 : : }
9067 : :
9068 : : /* Returns true if we can interpret the contents of a native encoding
9069 : : as TYPE. */
9070 : :
9071 : : bool
9072 : 377360 : can_native_interpret_type_p (tree type)
9073 : : {
9074 : 377360 : switch (TREE_CODE (type))
9075 : : {
9076 : : case INTEGER_TYPE:
9077 : : case ENUMERAL_TYPE:
9078 : : case BOOLEAN_TYPE:
9079 : : case POINTER_TYPE:
9080 : : case REFERENCE_TYPE:
9081 : : case FIXED_POINT_TYPE:
9082 : : case REAL_TYPE:
9083 : : case COMPLEX_TYPE:
9084 : : case VECTOR_TYPE:
9085 : : case OFFSET_TYPE:
9086 : : return true;
9087 : 76803 : default:
9088 : 76803 : return false;
9089 : : }
9090 : : }
9091 : :
9092 : : /* Attempt to interpret aggregate of TYPE from bytes encoded in target
9093 : : byte order at PTR + OFF with LEN bytes. Does not handle unions. */
9094 : :
9095 : : tree
9096 : 617 : native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
9097 : : int len)
9098 : : {
9099 : 617 : vec<constructor_elt, va_gc> *elts = NULL;
9100 : 617 : if (TREE_CODE (type) == ARRAY_TYPE)
9101 : : {
9102 : 193 : HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
9103 : 386 : if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
9104 : : return NULL_TREE;
9105 : :
9106 : 193 : HOST_WIDE_INT cnt = 0;
9107 : 193 : if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
9108 : : {
9109 : 193 : if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
9110 : : return NULL_TREE;
9111 : 193 : cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
9112 : : }
9113 : 193 : if (eltsz == 0)
9114 : 0 : cnt = 0;
9115 : 193 : HOST_WIDE_INT pos = 0;
9116 : 624 : for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
9117 : : {
9118 : 431 : tree v = NULL_TREE;
9119 : 431 : if (pos >= len || pos + eltsz > len)
9120 : 617 : return NULL_TREE;
9121 : 431 : if (can_native_interpret_type_p (TREE_TYPE (type)))
9122 : : {
9123 : 359 : v = native_interpret_expr (TREE_TYPE (type),
9124 : 359 : ptr + off + pos, eltsz);
9125 : 359 : if (v == NULL_TREE)
9126 : : return NULL_TREE;
9127 : : }
9128 : 72 : else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
9129 : 72 : || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
9130 : 72 : v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
9131 : : eltsz);
9132 : 72 : if (v == NULL_TREE)
9133 : 0 : return NULL_TREE;
9134 : 431 : CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
9135 : : }
9136 : 193 : return build_constructor (type, elts);
9137 : : }
9138 : 424 : if (TREE_CODE (type) != RECORD_TYPE)
9139 : : return NULL_TREE;
9140 : 6964 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9141 : : {
9142 : 1146 : if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)
9143 : 7686 : || is_empty_type (TREE_TYPE (field)))
9144 : 5428 : continue;
9145 : 1112 : tree fld = field;
9146 : 1112 : HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
9147 : 1112 : int diff = 0;
9148 : 1112 : tree v = NULL_TREE;
9149 : 1112 : if (DECL_BIT_FIELD (field))
9150 : : {
9151 : 176 : fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
9152 : 176 : if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
9153 : : {
9154 : 164 : poly_int64 bitoffset;
9155 : 164 : poly_uint64 field_offset, fld_offset;
9156 : 164 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
9157 : 328 : && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
9158 : 164 : bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
9159 : : else
9160 : : bitoffset = 0;
9161 : 164 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
9162 : 164 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
9163 : 164 : diff = (TYPE_PRECISION (TREE_TYPE (fld))
9164 : 164 : - TYPE_PRECISION (TREE_TYPE (field)));
9165 : 164 : if (!bitoffset.is_constant (&bitoff)
9166 : 164 : || bitoff < 0
9167 : 164 : || bitoff > diff)
9168 : 0 : return NULL_TREE;
9169 : : }
9170 : : else
9171 : : {
9172 : 12 : if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
9173 : : return NULL_TREE;
9174 : 12 : int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
9175 : 12 : int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
9176 : 12 : bpos %= BITS_PER_UNIT;
9177 : 12 : fieldsize += bpos;
9178 : 12 : fieldsize += BITS_PER_UNIT - 1;
9179 : 12 : fieldsize /= BITS_PER_UNIT;
9180 : 12 : tree repr_type = find_bitfield_repr_type (fieldsize, len);
9181 : 12 : if (repr_type == NULL_TREE)
9182 : : return NULL_TREE;
9183 : 12 : sz = int_size_in_bytes (repr_type);
9184 : 12 : if (sz < 0 || sz > len)
9185 : : return NULL_TREE;
9186 : 12 : pos = int_byte_position (field);
9187 : 12 : if (pos < 0 || pos > len || pos + fieldsize > len)
9188 : : return NULL_TREE;
9189 : 12 : HOST_WIDE_INT rpos;
9190 : 12 : if (pos + sz <= len)
9191 : : rpos = pos;
9192 : : else
9193 : : {
9194 : 0 : rpos = len - sz;
9195 : 0 : gcc_assert (rpos <= pos);
9196 : : }
9197 : 12 : bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
9198 : 12 : pos = rpos;
9199 : 12 : diff = (TYPE_PRECISION (repr_type)
9200 : 12 : - TYPE_PRECISION (TREE_TYPE (field)));
9201 : 12 : v = native_interpret_expr (repr_type, ptr + off + pos, sz);
9202 : 12 : if (v == NULL_TREE)
9203 : : return NULL_TREE;
9204 : : fld = NULL_TREE;
9205 : : }
9206 : : }
9207 : :
9208 : 1112 : if (fld)
9209 : : {
9210 : 1100 : sz = int_size_in_bytes (TREE_TYPE (fld));
9211 : 1100 : if (sz < 0 || sz > len)
9212 : : return NULL_TREE;
9213 : 1100 : tree byte_pos = byte_position (fld);
9214 : 1100 : if (!tree_fits_shwi_p (byte_pos))
9215 : : return NULL_TREE;
9216 : 1100 : pos = tree_to_shwi (byte_pos);
9217 : 1100 : if (pos < 0 || pos > len || pos + sz > len)
9218 : : return NULL_TREE;
9219 : : }
9220 : 1112 : if (fld == NULL_TREE)
9221 : : /* Already handled above. */;
9222 : 1100 : else if (can_native_interpret_type_p (TREE_TYPE (fld)))
9223 : : {
9224 : 912 : v = native_interpret_expr (TREE_TYPE (fld),
9225 : 912 : ptr + off + pos, sz);
9226 : 912 : if (v == NULL_TREE)
9227 : : return NULL_TREE;
9228 : : }
9229 : 188 : else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
9230 : 188 : || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
9231 : 188 : v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
9232 : 200 : if (v == NULL_TREE)
9233 : : return NULL_TREE;
9234 : 1112 : if (fld != field)
9235 : : {
9236 : 176 : if (TREE_CODE (v) != INTEGER_CST)
9237 : : return NULL_TREE;
9238 : :
9239 : : /* FIXME: Figure out how to handle PDP endian bitfields. */
9240 : 176 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9241 : : return NULL_TREE;
9242 : 176 : if (!BYTES_BIG_ENDIAN)
9243 : 176 : v = wide_int_to_tree (TREE_TYPE (field),
9244 : 352 : wi::lrshift (wi::to_wide (v), bitoff));
9245 : : else
9246 : : v = wide_int_to_tree (TREE_TYPE (field),
9247 : : wi::lrshift (wi::to_wide (v),
9248 : : diff - bitoff));
9249 : : }
9250 : 1112 : CONSTRUCTOR_APPEND_ELT (elts, field, v);
9251 : : }
9252 : 424 : return build_constructor (type, elts);
9253 : : }
9254 : :
9255 : : /* Routines for manipulation of native_encode_expr encoded data if the encoded
9256 : : or extracted constant positions and/or sizes aren't byte aligned. */
9257 : :
9258 : : /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
9259 : : bits between adjacent elements. AMNT should be within
9260 : : [0, BITS_PER_UNIT).
9261 : : Example, AMNT = 2:
9262 : : 00011111|11100000 << 2 = 01111111|10000000
9263 : : PTR[1] | PTR[0] PTR[1] | PTR[0]. */
9264 : :
9265 : : void
9266 : 129645 : shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
9267 : : unsigned int amnt)
9268 : : {
9269 : 129645 : if (amnt == 0)
9270 : : return;
9271 : :
9272 : 10266 : unsigned char carry_over = 0U;
9273 : 10266 : unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
9274 : 10266 : unsigned char clear_mask = (~0U) << amnt;
9275 : :
9276 : 58748 : for (unsigned int i = 0; i < sz; i++)
9277 : : {
9278 : 48482 : unsigned prev_carry_over = carry_over;
9279 : 48482 : carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
9280 : :
9281 : 48482 : ptr[i] <<= amnt;
9282 : 48482 : if (i != 0)
9283 : : {
9284 : 38216 : ptr[i] &= clear_mask;
9285 : 38216 : ptr[i] |= prev_carry_over;
9286 : : }
9287 : : }
9288 : : }
9289 : :
9290 : : /* Like shift_bytes_in_array_left but for big-endian.
9291 : : Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9292 : : bits between adjacent elements. AMNT should be within
9293 : : [0, BITS_PER_UNIT).
9294 : : Example, AMNT = 2:
9295 : : 00011111|11100000 >> 2 = 00000111|11111000
9296 : : PTR[0] | PTR[1] PTR[0] | PTR[1]. */
9297 : :
9298 : : void
9299 : 8 : shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9300 : : unsigned int amnt)
9301 : : {
9302 : 8 : if (amnt == 0)
9303 : : return;
9304 : :
9305 : 4 : unsigned char carry_over = 0U;
9306 : 4 : unsigned char carry_mask = ~(~0U << amnt);
9307 : :
9308 : 12 : for (unsigned int i = 0; i < sz; i++)
9309 : : {
9310 : 8 : unsigned prev_carry_over = carry_over;
9311 : 8 : carry_over = ptr[i] & carry_mask;
9312 : :
9313 : 8 : carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9314 : 8 : ptr[i] >>= amnt;
9315 : 8 : ptr[i] |= prev_carry_over;
9316 : : }
9317 : : }
9318 : :
9319 : : /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9320 : : directly on the VECTOR_CST encoding, in a way that works for variable-
9321 : : length vectors. Return the resulting VECTOR_CST on success or null
9322 : : on failure. */
9323 : :
9324 : : static tree
9325 : 125601 : fold_view_convert_vector_encoding (tree type, tree expr)
9326 : : {
9327 : 125601 : tree expr_type = TREE_TYPE (expr);
9328 : 125601 : poly_uint64 type_bits, expr_bits;
9329 : 125601 : if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9330 : 125601 : || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9331 : 0 : return NULL_TREE;
9332 : :
9333 : 125601 : poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9334 : 125601 : poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9335 : 125601 : unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9336 : 125601 : unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9337 : :
9338 : : /* We can only preserve the semantics of a stepped pattern if the new
9339 : : vector element is an integer of the same size. */
9340 : 125601 : if (VECTOR_CST_STEPPED_P (expr)
9341 : 125601 : && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9342 : : return NULL_TREE;
9343 : :
9344 : : /* The number of bits needed to encode one element from every pattern
9345 : : of the original vector. */
9346 : 112095 : unsigned int expr_sequence_bits
9347 : 112095 : = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9348 : :
9349 : : /* The number of bits needed to encode one element from every pattern
9350 : : of the result. */
9351 : 112095 : unsigned int type_sequence_bits
9352 : 112095 : = least_common_multiple (expr_sequence_bits, type_elt_bits);
9353 : :
9354 : : /* Don't try to read more bytes than are available, which can happen
9355 : : for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9356 : : The general VIEW_CONVERT handling can cope with that case, so there's
9357 : : no point complicating things here. */
9358 : 112095 : unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9359 : 112095 : unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9360 : : BITS_PER_UNIT);
9361 : 112095 : unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9362 : 112095 : if (known_gt (buffer_bits, expr_bits))
9363 : : return NULL_TREE;
9364 : :
9365 : : /* Get enough bytes of EXPR to form the new encoding. */
9366 : 112095 : auto_vec<unsigned char, 128> buffer (buffer_bytes);
9367 : 112095 : buffer.quick_grow (buffer_bytes);
9368 : 112095 : if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9369 : 112095 : buffer_bits / expr_elt_bits)
9370 : : != (int) buffer_bytes)
9371 : : return NULL_TREE;
9372 : :
9373 : : /* Reencode the bytes as TYPE. */
9374 : 112095 : unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9375 : 224190 : return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9376 : 112095 : type_npatterns, nelts_per_pattern);
9377 : 112095 : }
9378 : :
9379 : : /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9380 : : TYPE at compile-time. If we're unable to perform the conversion
9381 : : return NULL_TREE. */
9382 : :
9383 : : static tree
9384 : 11602826 : fold_view_convert_expr (tree type, tree expr)
9385 : : {
9386 : 11602826 : unsigned char buffer[128];
9387 : 11602826 : unsigned char *buf;
9388 : 11602826 : int len;
9389 : 11602826 : HOST_WIDE_INT l;
9390 : :
9391 : : /* Check that the host and target are sane. */
9392 : 11602826 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9393 : : return NULL_TREE;
9394 : :
9395 : 11602826 : if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9396 : 125601 : if (tree res = fold_view_convert_vector_encoding (type, expr))
9397 : : return res;
9398 : :
9399 : 11490750 : l = int_size_in_bytes (type);
9400 : 11490750 : if (l > (int) sizeof (buffer)
9401 : 11490750 : && l <= WIDE_INT_MAX_PRECISION / BITS_PER_UNIT)
9402 : : {
9403 : 0 : buf = XALLOCAVEC (unsigned char, l);
9404 : 0 : len = l;
9405 : : }
9406 : : else
9407 : : {
9408 : : buf = buffer;
9409 : : len = sizeof (buffer);
9410 : : }
9411 : 11490750 : len = native_encode_expr (expr, buf, len);
9412 : 11490750 : if (len == 0)
9413 : : return NULL_TREE;
9414 : :
9415 : 3047790 : return native_interpret_expr (type, buf, len);
9416 : : }
9417 : :
9418 : : /* Build an expression for the address of T. Folds away INDIRECT_REF
9419 : : to avoid confusing the gimplify process. */
9420 : :
9421 : : tree
9422 : 362646093 : build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9423 : : {
9424 : : /* The size of the object is not relevant when talking about its address. */
9425 : 362646093 : if (TREE_CODE (t) == WITH_SIZE_EXPR)
9426 : 0 : t = TREE_OPERAND (t, 0);
9427 : :
9428 : 362646093 : if (INDIRECT_REF_P (t))
9429 : : {
9430 : 43716700 : t = TREE_OPERAND (t, 0);
9431 : :
9432 : 43716700 : if (TREE_TYPE (t) != ptrtype)
9433 : 27800212 : t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9434 : : }
9435 : 318929393 : else if (TREE_CODE (t) == MEM_REF
9436 : 318929393 : && integer_zerop (TREE_OPERAND (t, 1)))
9437 : : {
9438 : 1432158 : t = TREE_OPERAND (t, 0);
9439 : :
9440 : 1432158 : if (TREE_TYPE (t) != ptrtype)
9441 : 945842 : t = fold_convert_loc (loc, ptrtype, t);
9442 : : }
9443 : 317497235 : else if (TREE_CODE (t) == MEM_REF
9444 : 317497235 : && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9445 : 20 : return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9446 : : TREE_OPERAND (t, 0),
9447 : : convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9448 : 317497215 : else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9449 : : {
9450 : 20631507 : t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9451 : :
9452 : 20631507 : if (TREE_TYPE (t) != ptrtype)
9453 : 133 : t = fold_convert_loc (loc, ptrtype, t);
9454 : : }
9455 : : else
9456 : 296865708 : t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9457 : :
9458 : : return t;
9459 : : }
9460 : :
9461 : : /* Build an expression for the address of T. */
9462 : :
9463 : : tree
9464 : 335618110 : build_fold_addr_expr_loc (location_t loc, tree t)
9465 : : {
9466 : 335618110 : tree ptrtype = build_pointer_type (TREE_TYPE (t));
9467 : :
9468 : 335618110 : return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9469 : : }
9470 : :
9471 : : /* Fold a unary expression of code CODE and type TYPE with operand
9472 : : OP0. Return the folded expression if folding is successful.
9473 : : Otherwise, return NULL_TREE. */
9474 : :
9475 : : tree
9476 : 1516165836 : fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9477 : : {
9478 : 1516165836 : tree tem;
9479 : 1516165836 : tree arg0;
9480 : 1516165836 : enum tree_code_class kind = TREE_CODE_CLASS (code);
9481 : :
9482 : 1516165836 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
9483 : : && TREE_CODE_LENGTH (code) == 1);
9484 : :
9485 : 1516165836 : arg0 = op0;
9486 : 1516165836 : if (arg0)
9487 : : {
9488 : 1516152787 : if (CONVERT_EXPR_CODE_P (code)
9489 : : || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9490 : : {
9491 : : /* Don't use STRIP_NOPS, because signedness of argument type
9492 : : matters. */
9493 : 767030427 : STRIP_SIGN_NOPS (arg0);
9494 : : }
9495 : : else
9496 : : {
9497 : : /* Strip any conversions that don't change the mode. This
9498 : : is safe for every expression, except for a comparison
9499 : : expression because its signedness is derived from its
9500 : : operands.
9501 : :
9502 : : Note that this is done as an internal manipulation within
9503 : : the constant folder, in order to find the simplest
9504 : : representation of the arguments so that their form can be
9505 : : studied. In any cases, the appropriate type conversions
9506 : : should be put back in the tree that will get out of the
9507 : : constant folder. */
9508 : 749122360 : STRIP_NOPS (arg0);
9509 : : }
9510 : :
9511 : 1516152787 : if (CONSTANT_CLASS_P (arg0))
9512 : : {
9513 : 222396305 : tree tem = const_unop (code, type, arg0);
9514 : 222396305 : if (tem)
9515 : : {
9516 : 185426951 : if (TREE_TYPE (tem) != type)
9517 : 55033 : tem = fold_convert_loc (loc, type, tem);
9518 : 185426951 : return tem;
9519 : : }
9520 : : }
9521 : : }
9522 : :
9523 : 1330738885 : tem = generic_simplify (loc, code, type, op0);
9524 : 1330738885 : if (tem)
9525 : : return tem;
9526 : :
9527 : 1004401248 : if (TREE_CODE_CLASS (code) == tcc_unary)
9528 : : {
9529 : 529139900 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
9530 : 977485 : return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9531 : : fold_build1_loc (loc, code, type,
9532 : 977485 : fold_convert_loc (loc, TREE_TYPE (op0),
9533 : 1954970 : TREE_OPERAND (arg0, 1))));
9534 : 528162415 : else if (TREE_CODE (arg0) == COND_EXPR)
9535 : : {
9536 : 315331 : tree arg01 = TREE_OPERAND (arg0, 1);
9537 : 315331 : tree arg02 = TREE_OPERAND (arg0, 2);
9538 : 315331 : if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9539 : 311146 : arg01 = fold_build1_loc (loc, code, type,
9540 : : fold_convert_loc (loc,
9541 : 311146 : TREE_TYPE (op0), arg01));
9542 : 315331 : if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9543 : 315328 : arg02 = fold_build1_loc (loc, code, type,
9544 : : fold_convert_loc (loc,
9545 : 315328 : TREE_TYPE (op0), arg02));
9546 : 315331 : tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9547 : : arg01, arg02);
9548 : :
9549 : : /* If this was a conversion, and all we did was to move into
9550 : : inside the COND_EXPR, bring it back out. But leave it if
9551 : : it is a conversion from integer to integer and the
9552 : : result precision is no wider than a word since such a
9553 : : conversion is cheap and may be optimized away by combine,
9554 : : while it couldn't if it were outside the COND_EXPR. Then return
9555 : : so we don't get into an infinite recursion loop taking the
9556 : : conversion out and then back in. */
9557 : :
9558 : 315331 : if ((CONVERT_EXPR_CODE_P (code)
9559 : 10116 : || code == NON_LVALUE_EXPR)
9560 : 305239 : && TREE_CODE (tem) == COND_EXPR
9561 : 291811 : && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9562 : 269285 : && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9563 : 136582 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 1)))
9564 : 136370 : && ! VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (tem, 2)))
9565 : 136370 : && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9566 : 136370 : == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9567 : 456937 : && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9568 : 6099 : && (INTEGRAL_TYPE_P
9569 : : (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9570 : 6067 : && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9571 : 6044 : || flag_syntax_only))
9572 : 129495 : tem = build1_loc (loc, code, type,
9573 : : build3 (COND_EXPR,
9574 : 129495 : TREE_TYPE (TREE_OPERAND
9575 : : (TREE_OPERAND (tem, 1), 0)),
9576 : 129495 : TREE_OPERAND (tem, 0),
9577 : 129495 : TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9578 : 129495 : TREE_OPERAND (TREE_OPERAND (tem, 2),
9579 : : 0)));
9580 : 315331 : return tem;
9581 : : }
9582 : : }
9583 : :
9584 : 1003108432 : switch (code)
9585 : : {
9586 : 40364385 : case NON_LVALUE_EXPR:
9587 : 40364385 : if (!maybe_lvalue_p (op0))
9588 : 31078826 : return fold_convert_loc (loc, type, op0);
9589 : : return NULL_TREE;
9590 : :
9591 : 479503461 : CASE_CONVERT:
9592 : 479503461 : case FLOAT_EXPR:
9593 : 479503461 : case FIX_TRUNC_EXPR:
9594 : 479503461 : if (COMPARISON_CLASS_P (op0))
9595 : : {
9596 : : /* If we have (type) (a CMP b) and type is an integral type, return
9597 : : new expression involving the new type. Canonicalize
9598 : : (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9599 : : non-integral type.
9600 : : Do not fold the result as that would not simplify further, also
9601 : : folding again results in recursions. */
9602 : 1282345 : if (TREE_CODE (type) == BOOLEAN_TYPE)
9603 : 65380 : return build2_loc (loc, TREE_CODE (op0), type,
9604 : 65380 : TREE_OPERAND (op0, 0),
9605 : 130760 : TREE_OPERAND (op0, 1));
9606 : 1216965 : else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9607 : 5930 : && TREE_CODE (type) != VECTOR_TYPE)
9608 : 5930 : return build3_loc (loc, COND_EXPR, type, op0,
9609 : : constant_boolean_node (true, type),
9610 : 5930 : constant_boolean_node (false, type));
9611 : : }
9612 : :
9613 : : /* Handle (T *)&A.B.C for A being of type T and B and C
9614 : : living at offset zero. This occurs frequently in
9615 : : C++ upcasting and then accessing the base. */
9616 : 479432151 : if (TREE_CODE (op0) == ADDR_EXPR
9617 : 93950925 : && POINTER_TYPE_P (type)
9618 : 569563919 : && handled_component_p (TREE_OPERAND (op0, 0)))
9619 : : {
9620 : 21862380 : poly_int64 bitsize, bitpos;
9621 : 21862380 : tree offset;
9622 : 21862380 : machine_mode mode;
9623 : 21862380 : int unsignedp, reversep, volatilep;
9624 : 21862380 : tree base
9625 : 21862380 : = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9626 : : &offset, &mode, &unsignedp, &reversep,
9627 : : &volatilep);
9628 : : /* If the reference was to a (constant) zero offset, we can use
9629 : : the address of the base if it has the same base type
9630 : : as the result type and the pointer type is unqualified. */
9631 : 21862380 : if (!offset
9632 : 21779306 : && known_eq (bitpos, 0)
9633 : 14039002 : && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9634 : 14039002 : == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9635 : 21867722 : && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9636 : 5154 : return fold_convert_loc (loc, type,
9637 : 5154 : build_fold_addr_expr_loc (loc, base));
9638 : : }
9639 : :
9640 : 479426997 : if (TREE_CODE (op0) == MODIFY_EXPR
9641 : 249642 : && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9642 : : /* Detect assigning a bitfield. */
9643 : 479428542 : && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9644 : 83 : && DECL_BIT_FIELD
9645 : : (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9646 : : {
9647 : : /* Don't leave an assignment inside a conversion
9648 : : unless assigning a bitfield. */
9649 : 1505 : tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9650 : : /* First do the assignment, then return converted constant. */
9651 : 1505 : tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9652 : 1505 : suppress_warning (tem /* What warning? */);
9653 : 1505 : TREE_USED (tem) = 1;
9654 : 1505 : return tem;
9655 : : }
9656 : :
9657 : : /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9658 : : constants (if x has signed type, the sign bit cannot be set
9659 : : in c). This folds extension into the BIT_AND_EXPR.
9660 : : ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9661 : : very likely don't have maximal range for their precision and this
9662 : : transformation effectively doesn't preserve non-maximal ranges. */
9663 : 479425492 : if (TREE_CODE (type) == INTEGER_TYPE
9664 : 215010353 : && TREE_CODE (op0) == BIT_AND_EXPR
9665 : 479871165 : && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9666 : : {
9667 : 213088 : tree and_expr = op0;
9668 : 213088 : tree and0 = TREE_OPERAND (and_expr, 0);
9669 : 213088 : tree and1 = TREE_OPERAND (and_expr, 1);
9670 : 213088 : int change = 0;
9671 : :
9672 : 213088 : if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9673 : 213088 : || (TYPE_PRECISION (type)
9674 : 105072 : <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9675 : : change = 1;
9676 : 60667 : else if (TYPE_PRECISION (TREE_TYPE (and1))
9677 : : <= HOST_BITS_PER_WIDE_INT
9678 : 60667 : && tree_fits_uhwi_p (and1))
9679 : : {
9680 : 59573 : unsigned HOST_WIDE_INT cst;
9681 : :
9682 : 59573 : cst = tree_to_uhwi (and1);
9683 : 119146 : cst &= HOST_WIDE_INT_M1U
9684 : 59573 : << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9685 : 59573 : change = (cst == 0);
9686 : 59573 : if (change
9687 : 59573 : && !flag_syntax_only
9688 : 118291 : && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9689 : : == ZERO_EXTEND))
9690 : : {
9691 : : tree uns = unsigned_type_for (TREE_TYPE (and0));
9692 : : and0 = fold_convert_loc (loc, uns, and0);
9693 : : and1 = fold_convert_loc (loc, uns, and1);
9694 : : }
9695 : : }
9696 : 59573 : if (change)
9697 : : {
9698 : 211994 : tree and1_type = TREE_TYPE (and1);
9699 : 211994 : unsigned prec = MAX (TYPE_PRECISION (and1_type),
9700 : : TYPE_PRECISION (type));
9701 : 211994 : tem = force_fit_type (type,
9702 : 211994 : wide_int::from (wi::to_wide (and1), prec,
9703 : 211994 : TYPE_SIGN (and1_type)),
9704 : 211994 : 0, TREE_OVERFLOW (and1));
9705 : 211994 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
9706 : 211994 : fold_convert_loc (loc, type, and0), tem);
9707 : : }
9708 : : }
9709 : :
9710 : : /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9711 : : cast (T1)X will fold away. We assume that this happens when X itself
9712 : : is a cast. */
9713 : 479213498 : if (POINTER_TYPE_P (type)
9714 : 232296490 : && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9715 : 483007117 : && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9716 : : {
9717 : 1373576 : tree arg00 = TREE_OPERAND (arg0, 0);
9718 : 1373576 : tree arg01 = TREE_OPERAND (arg0, 1);
9719 : :
9720 : : /* If -fsanitize=alignment, avoid this optimization in GENERIC
9721 : : when the pointed type needs higher alignment than
9722 : : the p+ first operand's pointed type. */
9723 : 1373576 : if (!in_gimple_form
9724 : 1346160 : && sanitize_flags_p (SANITIZE_ALIGNMENT)
9725 : 1373744 : && (min_align_of_type (TREE_TYPE (type))
9726 : 84 : > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9727 : : return NULL_TREE;
9728 : :
9729 : : /* Similarly, avoid this optimization in GENERIC for -fsanitize=null
9730 : : when type is a reference type and arg00's type is not,
9731 : : because arg00 could be validly nullptr and if arg01 doesn't return,
9732 : : we don't want false positive binding of reference to nullptr. */
9733 : 1373551 : if (TREE_CODE (type) == REFERENCE_TYPE
9734 : 902910 : && !in_gimple_form
9735 : 902895 : && sanitize_flags_p (SANITIZE_NULL)
9736 : 1373559 : && TREE_CODE (TREE_TYPE (arg00)) != REFERENCE_TYPE)
9737 : : return NULL_TREE;
9738 : :
9739 : 1373543 : arg00 = fold_convert_loc (loc, type, arg00);
9740 : 1373543 : return fold_build_pointer_plus_loc (loc, arg00, arg01);
9741 : : }
9742 : :
9743 : : /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9744 : : of the same precision, and X is an integer type not narrower than
9745 : : types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9746 : 477839922 : if (INTEGRAL_TYPE_P (type)
9747 : 218868619 : && TREE_CODE (op0) == BIT_NOT_EXPR
9748 : 492128 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9749 : 492128 : && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9750 : 478147360 : && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9751 : : {
9752 : 305712 : tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9753 : 371107 : if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9754 : 371106 : && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9755 : 248596 : return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9756 : 248596 : fold_convert_loc (loc, type, tem));
9757 : : }
9758 : :
9759 : : /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9760 : : type of X and Y (integer types only). */
9761 : 477591326 : if (INTEGRAL_TYPE_P (type)
9762 : 218620023 : && TREE_CODE (op0) == MULT_EXPR
9763 : 7292175 : && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9764 : 7271605 : && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))
9765 : 477675006 : && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
9766 : 15687 : || !sanitize_flags_p (SANITIZE_SI_OVERFLOW)))
9767 : : {
9768 : : /* Be careful not to introduce new overflows. */
9769 : 83635 : tree mult_type;
9770 : 83635 : if (TYPE_OVERFLOW_WRAPS (type))
9771 : : mult_type = type;
9772 : : else
9773 : 1924 : mult_type = unsigned_type_for (type);
9774 : :
9775 : 83635 : if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9776 : : {
9777 : 167270 : tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9778 : : fold_convert_loc (loc, mult_type,
9779 : 83635 : TREE_OPERAND (op0, 0)),
9780 : : fold_convert_loc (loc, mult_type,
9781 : 83635 : TREE_OPERAND (op0, 1)));
9782 : 83635 : return fold_convert_loc (loc, type, tem);
9783 : : }
9784 : : }
9785 : :
9786 : : return NULL_TREE;
9787 : :
9788 : 197886619 : case VIEW_CONVERT_EXPR:
9789 : 197886619 : if (TREE_CODE (op0) == MEM_REF)
9790 : : {
9791 : 3229 : if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9792 : 1673 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9793 : 3229 : tem = fold_build2_loc (loc, MEM_REF, type,
9794 : 3229 : TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9795 : 3229 : REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9796 : 3229 : return tem;
9797 : : }
9798 : :
9799 : : return NULL_TREE;
9800 : :
9801 : 3403185 : case NEGATE_EXPR:
9802 : 3403185 : tem = fold_negate_expr (loc, arg0);
9803 : 3403185 : if (tem)
9804 : 1524 : return fold_convert_loc (loc, type, tem);
9805 : : return NULL_TREE;
9806 : :
9807 : 2267132 : case ABS_EXPR:
9808 : : /* Convert fabs((double)float) into (double)fabsf(float). */
9809 : 2267132 : if (TREE_CODE (arg0) == NOP_EXPR
9810 : 14134 : && TREE_CODE (type) == REAL_TYPE)
9811 : : {
9812 : 14074 : tree targ0 = strip_float_extensions (arg0);
9813 : 14074 : if (targ0 != arg0)
9814 : 13846 : return fold_convert_loc (loc, type,
9815 : : fold_build1_loc (loc, ABS_EXPR,
9816 : 13846 : TREE_TYPE (targ0),
9817 : 13846 : targ0));
9818 : : }
9819 : : return NULL_TREE;
9820 : :
9821 : 2303787 : case BIT_NOT_EXPR:
9822 : : /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9823 : 2303787 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
9824 : 2304440 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9825 : : fold_convert_loc (loc, type,
9826 : 653 : TREE_OPERAND (arg0, 0)))))
9827 : 8 : return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9828 : : fold_convert_loc (loc, type,
9829 : 16 : TREE_OPERAND (arg0, 1)));
9830 : 2303779 : else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9831 : 2304424 : && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9832 : : fold_convert_loc (loc, type,
9833 : 645 : TREE_OPERAND (arg0, 1)))))
9834 : 23 : return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9835 : : fold_convert_loc (loc, type,
9836 : 46 : TREE_OPERAND (arg0, 0)), tem);
9837 : :
9838 : : return NULL_TREE;
9839 : :
9840 : 43265368 : case TRUTH_NOT_EXPR:
9841 : : /* Note that the operand of this must be an int
9842 : : and its values must be 0 or 1.
9843 : : ("true" is a fixed value perhaps depending on the language,
9844 : : but we don't handle values other than 1 correctly yet.) */
9845 : 43265368 : tem = fold_truth_not_expr (loc, arg0);
9846 : 43265368 : if (!tem)
9847 : : return NULL_TREE;
9848 : 29507387 : return fold_convert_loc (loc, type, tem);
9849 : :
9850 : 59006771 : case INDIRECT_REF:
9851 : : /* Fold *&X to X if X is an lvalue. */
9852 : 59006771 : if (TREE_CODE (op0) == ADDR_EXPR)
9853 : : {
9854 : 5916 : tree op00 = TREE_OPERAND (op0, 0);
9855 : 5916 : if ((VAR_P (op00)
9856 : : || TREE_CODE (op00) == PARM_DECL
9857 : : || TREE_CODE (op00) == RESULT_DECL)
9858 : 4746 : && !TREE_READONLY (op00))
9859 : : return op00;
9860 : : }
9861 : : return NULL_TREE;
9862 : :
9863 : : default:
9864 : : return NULL_TREE;
9865 : : } /* switch (code) */
9866 : : }
9867 : :
9868 : :
9869 : : /* If the operation was a conversion do _not_ mark a resulting constant
9870 : : with TREE_OVERFLOW if the original constant was not. These conversions
9871 : : have implementation defined behavior and retaining the TREE_OVERFLOW
9872 : : flag here would confuse later passes such as VRP. */
9873 : : tree
9874 : 0 : fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9875 : : tree type, tree op0)
9876 : : {
9877 : 0 : tree res = fold_unary_loc (loc, code, type, op0);
9878 : 0 : if (res
9879 : 0 : && TREE_CODE (res) == INTEGER_CST
9880 : 0 : && TREE_CODE (op0) == INTEGER_CST
9881 : 0 : && CONVERT_EXPR_CODE_P (code))
9882 : 0 : TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9883 : :
9884 : 0 : return res;
9885 : : }
9886 : :
9887 : : /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9888 : : operands OP0 and OP1. LOC is the location of the resulting expression.
9889 : : ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9890 : : Return the folded expression if folding is successful. Otherwise,
9891 : : return NULL_TREE. */
9892 : : static tree
9893 : 21578093 : fold_truth_andor (location_t loc, enum tree_code code, tree type,
9894 : : tree arg0, tree arg1, tree op0, tree op1)
9895 : : {
9896 : 21578093 : tree tem;
9897 : :
9898 : : /* We only do these simplifications if we are optimizing. */
9899 : 21578093 : if (!optimize)
9900 : : return NULL_TREE;
9901 : :
9902 : : /* Check for things like (A || B) && (A || C). We can convert this
9903 : : to A || (B && C). Note that either operator can be any of the four
9904 : : truth and/or operations and the transformation will still be
9905 : : valid. Also note that we only care about order for the
9906 : : ANDIF and ORIF operators. If B contains side effects, this
9907 : : might change the truth-value of A. */
9908 : 21358483 : if (TREE_CODE (arg0) == TREE_CODE (arg1)
9909 : 4718254 : && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9910 : : || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9911 : : || TREE_CODE (arg0) == TRUTH_AND_EXPR
9912 : 4718254 : || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9913 : 21383027 : && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9914 : : {
9915 : 24042 : tree a00 = TREE_OPERAND (arg0, 0);
9916 : 24042 : tree a01 = TREE_OPERAND (arg0, 1);
9917 : 24042 : tree a10 = TREE_OPERAND (arg1, 0);
9918 : 24042 : tree a11 = TREE_OPERAND (arg1, 1);
9919 : 48084 : bool commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9920 : 24042 : || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9921 : 24042 : && (code == TRUTH_AND_EXPR
9922 : 8246 : || code == TRUTH_OR_EXPR));
9923 : :
9924 : 24042 : if (operand_equal_p (a00, a10, 0))
9925 : 379 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9926 : 379 : fold_build2_loc (loc, code, type, a01, a11));
9927 : 23663 : else if (commutative && operand_equal_p (a00, a11, 0))
9928 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9929 : 0 : fold_build2_loc (loc, code, type, a01, a10));
9930 : 23663 : else if (commutative && operand_equal_p (a01, a10, 0))
9931 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9932 : 0 : fold_build2_loc (loc, code, type, a00, a11));
9933 : :
9934 : : /* This case if tricky because we must either have commutative
9935 : : operators or else A10 must not have side-effects. */
9936 : :
9937 : 23639 : else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9938 : 46790 : && operand_equal_p (a01, a11, 0))
9939 : 43 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
9940 : : fold_build2_loc (loc, code, type, a00, a10),
9941 : 43 : a01);
9942 : : }
9943 : :
9944 : : /* See if we can build a range comparison. */
9945 : 21358061 : if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9946 : : return tem;
9947 : :
9948 : 20097018 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9949 : 20095019 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9950 : : {
9951 : 17301 : tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9952 : 17301 : if (tem)
9953 : 13 : return fold_build2_loc (loc, code, type, tem, arg1);
9954 : : }
9955 : :
9956 : 20097005 : if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9957 : 20089751 : || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9958 : : {
9959 : 112988 : tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9960 : 112988 : if (tem)
9961 : 76 : return fold_build2_loc (loc, code, type, arg0, tem);
9962 : : }
9963 : :
9964 : : /* Check for the possibility of merging component references. If our
9965 : : lhs is another similar operation, try to merge its rhs with our
9966 : : rhs. Then try to merge our lhs and rhs. */
9967 : 20096929 : if (TREE_CODE (arg0) == code
9968 : 20769249 : && (tem = fold_truth_andor_1 (loc, code, type,
9969 : 672320 : TREE_OPERAND (arg0, 1), arg1)) != 0)
9970 : 1191 : return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9971 : :
9972 : 20095738 : if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9973 : : return tem;
9974 : :
9975 : 20024594 : bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9976 : 20024594 : if (param_logical_op_non_short_circuit != -1)
9977 : 7507 : logical_op_non_short_circuit
9978 : 7507 : = param_logical_op_non_short_circuit;
9979 : 20024594 : if (logical_op_non_short_circuit
9980 : 20020799 : && !sanitize_coverage_p ()
9981 : 20024594 : && (code == TRUTH_AND_EXPR
9982 : 20020796 : || code == TRUTH_ANDIF_EXPR
9983 : 9570053 : || code == TRUTH_OR_EXPR
9984 : 9570053 : || code == TRUTH_ORIF_EXPR))
9985 : : {
9986 : 20020796 : enum tree_code ncode, icode;
9987 : :
9988 : 50492335 : ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9989 : 20020796 : ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9990 : 10450743 : icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9991 : :
9992 : : /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9993 : : or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9994 : : We don't want to pack more than two leafs to a non-IF AND/OR
9995 : : expression.
9996 : : If tree-code of left-hand operand isn't an AND/OR-IF code and not
9997 : : equal to IF-CODE, then we don't want to add right-hand operand.
9998 : : If the inner right-hand side of left-hand operand has
9999 : : side-effects, or isn't simple, then we can't add to it,
10000 : : as otherwise we might destroy if-sequence. */
10001 : 20020796 : if (TREE_CODE (arg0) == icode
10002 : 663819 : && simple_condition_p (arg1)
10003 : : /* Needed for sequence points to handle trappings, and
10004 : : side-effects. */
10005 : 20068544 : && simple_condition_p (TREE_OPERAND (arg0, 1)))
10006 : : {
10007 : 41232 : tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
10008 : : arg1);
10009 : 41232 : return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
10010 : 41232 : tem);
10011 : : }
10012 : : /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
10013 : : or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
10014 : 19979564 : else if (TREE_CODE (arg1) == icode
10015 : 2297 : && simple_condition_p (arg0)
10016 : : /* Needed for sequence points to handle trappings, and
10017 : : side-effects. */
10018 : 19980128 : && simple_condition_p (TREE_OPERAND (arg1, 0)))
10019 : : {
10020 : 21 : tem = fold_build2_loc (loc, ncode, type,
10021 : 21 : arg0, TREE_OPERAND (arg1, 0));
10022 : 21 : return fold_build2_loc (loc, icode, type, tem,
10023 : 42 : TREE_OPERAND (arg1, 1));
10024 : : }
10025 : : /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
10026 : : into (A OR B).
10027 : : For sequence point consistancy, we need to check for trapping,
10028 : : and side-effects. */
10029 : 4034907 : else if (code == icode && simple_condition_p (arg0)
10030 : 20579866 : && simple_condition_p (arg1))
10031 : 334277 : return fold_build2_loc (loc, ncode, type, arg0, arg1);
10032 : : }
10033 : :
10034 : : return NULL_TREE;
10035 : : }
10036 : :
10037 : : /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
10038 : : by changing CODE to reduce the magnitude of constants involved in
10039 : : ARG0 of the comparison.
10040 : : Returns a canonicalized comparison tree if a simplification was
10041 : : possible, otherwise returns NULL_TREE.
10042 : : Set *STRICT_OVERFLOW_P to true if the canonicalization is only
10043 : : valid if signed overflow is undefined. */
10044 : :
10045 : : static tree
10046 : 154532229 : maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
10047 : : tree arg0, tree arg1,
10048 : : bool *strict_overflow_p)
10049 : : {
10050 : 154532229 : enum tree_code code0 = TREE_CODE (arg0);
10051 : 154532229 : tree t, cst0 = NULL_TREE;
10052 : 154532229 : int sgn0;
10053 : :
10054 : : /* Match A +- CST code arg1. We can change this only if overflow
10055 : : is undefined. */
10056 : 154532229 : if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10057 : 117177030 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
10058 : : /* In principle pointers also have undefined overflow behavior,
10059 : : but that causes problems elsewhere. */
10060 : 59838110 : && !POINTER_TYPE_P (TREE_TYPE (arg0))
10061 : 59838110 : && (code0 == MINUS_EXPR
10062 : 59838110 : || code0 == PLUS_EXPR)
10063 : 2161407 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
10064 : : return NULL_TREE;
10065 : :
10066 : : /* Identify the constant in arg0 and its sign. */
10067 : 1784557 : cst0 = TREE_OPERAND (arg0, 1);
10068 : 1784557 : sgn0 = tree_int_cst_sgn (cst0);
10069 : :
10070 : : /* Overflowed constants and zero will cause problems. */
10071 : 1784557 : if (integer_zerop (cst0)
10072 : 1784557 : || TREE_OVERFLOW (cst0))
10073 : : return NULL_TREE;
10074 : :
10075 : : /* See if we can reduce the magnitude of the constant in
10076 : : arg0 by changing the comparison code. */
10077 : : /* A - CST < arg1 -> A - CST-1 <= arg1. */
10078 : 1784557 : if (code == LT_EXPR
10079 : 998063 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10080 : : code = LE_EXPR;
10081 : : /* A + CST > arg1 -> A + CST-1 >= arg1. */
10082 : 1610016 : else if (code == GT_EXPR
10083 : 486154 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10084 : : code = GE_EXPR;
10085 : : /* A + CST <= arg1 -> A + CST-1 < arg1. */
10086 : 1456085 : else if (code == LE_EXPR
10087 : 557605 : && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
10088 : : code = LT_EXPR;
10089 : : /* A - CST >= arg1 -> A - CST-1 > arg1. */
10090 : 1259247 : else if (code == GE_EXPR
10091 : 395156 : && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
10092 : : code = GT_EXPR;
10093 : : else
10094 : : return NULL_TREE;
10095 : 742708 : *strict_overflow_p = true;
10096 : :
10097 : : /* Now build the constant reduced in magnitude. But not if that
10098 : : would produce one outside of its types range. */
10099 : 1485416 : if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
10100 : 1485416 : && ((sgn0 == 1
10101 : 350634 : && TYPE_MIN_VALUE (TREE_TYPE (cst0))
10102 : 350634 : && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
10103 : 742708 : || (sgn0 == -1
10104 : 392074 : && TYPE_MAX_VALUE (TREE_TYPE (cst0))
10105 : 392074 : && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
10106 : 0 : return NULL_TREE;
10107 : :
10108 : 1093342 : t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
10109 : 742708 : cst0, build_int_cst (TREE_TYPE (cst0), 1));
10110 : 742708 : t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
10111 : 742708 : t = fold_convert (TREE_TYPE (arg1), t);
10112 : :
10113 : 742708 : return fold_build2_loc (loc, code, type, t, arg1);
10114 : : }
10115 : :
10116 : : /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
10117 : : overflow further. Try to decrease the magnitude of constants involved
10118 : : by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
10119 : : and put sole constants at the second argument position.
10120 : : Returns the canonicalized tree if changed, otherwise NULL_TREE. */
10121 : :
10122 : : static tree
10123 : 77616779 : maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
10124 : : tree arg0, tree arg1)
10125 : : {
10126 : 77616779 : tree t;
10127 : 77616779 : bool strict_overflow_p;
10128 : 77616779 : const char * const warnmsg = G_("assuming signed overflow does not occur "
10129 : : "when reducing constant in comparison");
10130 : :
10131 : : /* Try canonicalization by simplifying arg0. */
10132 : 77616779 : strict_overflow_p = false;
10133 : 77616779 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
10134 : : &strict_overflow_p);
10135 : 77616779 : if (t)
10136 : : {
10137 : 701329 : if (strict_overflow_p)
10138 : 701329 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10139 : 701329 : return t;
10140 : : }
10141 : :
10142 : : /* Try canonicalization by simplifying arg1 using the swapped
10143 : : comparison. */
10144 : 76915450 : code = swap_tree_comparison (code);
10145 : 76915450 : strict_overflow_p = false;
10146 : 76915450 : t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
10147 : : &strict_overflow_p);
10148 : 76915450 : if (t && strict_overflow_p)
10149 : 41379 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
10150 : : return t;
10151 : : }
10152 : :
10153 : : /* Return whether BASE + OFFSET + BITPOS may wrap around the address
10154 : : space. This is used to avoid issuing overflow warnings for
10155 : : expressions like &p->x which cannot wrap. */
10156 : :
10157 : : static bool
10158 : 17160 : pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
10159 : : {
10160 : 17160 : if (!POINTER_TYPE_P (TREE_TYPE (base)))
10161 : : return true;
10162 : :
10163 : 9707 : if (maybe_lt (bitpos, 0))
10164 : : return true;
10165 : :
10166 : : poly_wide_int wi_offset;
10167 : 8915 : int precision = TYPE_PRECISION (TREE_TYPE (base));
10168 : 8915 : if (offset == NULL_TREE)
10169 : 4469 : wi_offset = wi::zero (precision);
10170 : 4446 : else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
10171 : : return true;
10172 : : else
10173 : 0 : wi_offset = wi::to_poly_wide (offset);
10174 : :
10175 : 4469 : wi::overflow_type overflow;
10176 : 4469 : poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
10177 : 4469 : precision);
10178 : 4469 : poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
10179 : 4469 : if (overflow)
10180 : : return true;
10181 : :
10182 : 4469 : poly_uint64 total_hwi, size;
10183 : 4469 : if (!total.to_uhwi (&total_hwi)
10184 : 4469 : || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
10185 : : &size)
10186 : 8840 : || known_eq (size, 0U))
10187 : 98 : return true;
10188 : :
10189 : 4371 : if (known_le (total_hwi, size))
10190 : : return false;
10191 : :
10192 : : /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
10193 : : array. */
10194 : 1017 : if (TREE_CODE (base) == ADDR_EXPR
10195 : 0 : && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
10196 : : &size)
10197 : 0 : && maybe_ne (size, 0U)
10198 : 1017 : && known_le (total_hwi, size))
10199 : : return false;
10200 : :
10201 : : return true;
10202 : 8915 : }
10203 : :
10204 : : /* Return a positive integer when the symbol DECL is known to have
10205 : : a nonzero address, zero when it's known not to (e.g., it's a weak
10206 : : symbol), and a negative integer when the symbol is not yet in the
10207 : : symbol table and so whether or not its address is zero is unknown.
10208 : : For function local objects always return positive integer. */
10209 : : static int
10210 : 10134787 : maybe_nonzero_address (tree decl)
10211 : : {
10212 : : /* Normally, don't do anything for variables and functions before symtab is
10213 : : built; it is quite possible that DECL will be declared weak later.
10214 : : But if folding_initializer, we need a constant answer now, so create
10215 : : the symtab entry and prevent later weak declaration. */
10216 : 10134787 : if (DECL_P (decl) && decl_in_symtab_p (decl))
10217 : 7361742 : if (struct symtab_node *symbol
10218 : 3680871 : = (folding_initializer
10219 : 3680871 : ? symtab_node::get_create (decl)
10220 : 3680871 : : symtab_node::get (decl)))
10221 : 3661067 : return symbol->nonzero_address ();
10222 : :
10223 : : /* Function local objects are never NULL. */
10224 : 6473720 : if (DECL_P (decl)
10225 : 6473720 : && (DECL_CONTEXT (decl)
10226 : 4108886 : && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10227 : 4104786 : && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
10228 : : return 1;
10229 : :
10230 : : return -1;
10231 : : }
10232 : :
10233 : : /* Subroutine of fold_binary. This routine performs all of the
10234 : : transformations that are common to the equality/inequality
10235 : : operators (EQ_EXPR and NE_EXPR) and the ordering operators
10236 : : (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
10237 : : fold_binary should call fold_binary. Fold a comparison with
10238 : : tree code CODE and type TYPE with operands OP0 and OP1. Return
10239 : : the folded comparison or NULL_TREE. */
10240 : :
10241 : : static tree
10242 : 77671548 : fold_comparison (location_t loc, enum tree_code code, tree type,
10243 : : tree op0, tree op1)
10244 : : {
10245 : 77671548 : const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
10246 : 77671548 : tree arg0, arg1, tem;
10247 : :
10248 : 77671548 : arg0 = op0;
10249 : 77671548 : arg1 = op1;
10250 : :
10251 : 77671548 : STRIP_SIGN_NOPS (arg0);
10252 : 77671548 : STRIP_SIGN_NOPS (arg1);
10253 : :
10254 : : /* For comparisons of pointers we can decompose it to a compile time
10255 : : comparison of the base objects and the offsets into the object.
10256 : : This requires at least one operand being an ADDR_EXPR or a
10257 : : POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
10258 : 144066040 : if (POINTER_TYPE_P (TREE_TYPE (arg0))
10259 : 77887583 : && (TREE_CODE (arg0) == ADDR_EXPR
10260 : 10826893 : || TREE_CODE (arg1) == ADDR_EXPR
10261 : 10181672 : || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10262 : 9696046 : || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
10263 : : {
10264 : 1804658 : tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
10265 : 1804658 : poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
10266 : 1804658 : machine_mode mode;
10267 : 1804658 : int volatilep, reversep, unsignedp;
10268 : 1804658 : bool indirect_base0 = false, indirect_base1 = false;
10269 : :
10270 : : /* Get base and offset for the access. Strip ADDR_EXPR for
10271 : : get_inner_reference, but put it back by stripping INDIRECT_REF
10272 : : off the base object if possible. indirect_baseN will be true
10273 : : if baseN is not an address but refers to the object itself. */
10274 : 1804658 : base0 = arg0;
10275 : 1804658 : if (TREE_CODE (arg0) == ADDR_EXPR)
10276 : : {
10277 : 666198 : base0
10278 : 666198 : = get_inner_reference (TREE_OPERAND (arg0, 0),
10279 : : &bitsize, &bitpos0, &offset0, &mode,
10280 : : &unsignedp, &reversep, &volatilep);
10281 : 666198 : if (INDIRECT_REF_P (base0))
10282 : 68836 : base0 = TREE_OPERAND (base0, 0);
10283 : : else
10284 : : indirect_base0 = true;
10285 : : }
10286 : 1138460 : else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10287 : : {
10288 : 518893 : base0 = TREE_OPERAND (arg0, 0);
10289 : 518893 : STRIP_SIGN_NOPS (base0);
10290 : 518893 : if (TREE_CODE (base0) == ADDR_EXPR)
10291 : : {
10292 : 29243 : base0
10293 : 29243 : = get_inner_reference (TREE_OPERAND (base0, 0),
10294 : : &bitsize, &bitpos0, &offset0, &mode,
10295 : : &unsignedp, &reversep, &volatilep);
10296 : 29243 : if (INDIRECT_REF_P (base0))
10297 : 8 : base0 = TREE_OPERAND (base0, 0);
10298 : : else
10299 : : indirect_base0 = true;
10300 : : }
10301 : 518893 : if (offset0 == NULL_TREE || integer_zerop (offset0))
10302 : 518893 : offset0 = TREE_OPERAND (arg0, 1);
10303 : : else
10304 : 0 : offset0 = size_binop (PLUS_EXPR, offset0,
10305 : : TREE_OPERAND (arg0, 1));
10306 : 518893 : if (poly_int_tree_p (offset0))
10307 : : {
10308 : 437398 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
10309 : 437398 : TYPE_PRECISION (sizetype));
10310 : 437398 : tem <<= LOG2_BITS_PER_UNIT;
10311 : 437398 : tem += bitpos0;
10312 : 437398 : if (tem.to_shwi (&bitpos0))
10313 : 437393 : offset0 = NULL_TREE;
10314 : : }
10315 : : }
10316 : :
10317 : 1804658 : base1 = arg1;
10318 : 1804658 : if (TREE_CODE (arg1) == ADDR_EXPR)
10319 : : {
10320 : 668526 : base1
10321 : 668526 : = get_inner_reference (TREE_OPERAND (arg1, 0),
10322 : : &bitsize, &bitpos1, &offset1, &mode,
10323 : : &unsignedp, &reversep, &volatilep);
10324 : 668526 : if (INDIRECT_REF_P (base1))
10325 : 20722 : base1 = TREE_OPERAND (base1, 0);
10326 : : else
10327 : : indirect_base1 = true;
10328 : : }
10329 : 1136132 : else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10330 : : {
10331 : 68042 : base1 = TREE_OPERAND (arg1, 0);
10332 : 68042 : STRIP_SIGN_NOPS (base1);
10333 : 68042 : if (TREE_CODE (base1) == ADDR_EXPR)
10334 : : {
10335 : 11998 : base1
10336 : 11998 : = get_inner_reference (TREE_OPERAND (base1, 0),
10337 : : &bitsize, &bitpos1, &offset1, &mode,
10338 : : &unsignedp, &reversep, &volatilep);
10339 : 11998 : if (INDIRECT_REF_P (base1))
10340 : 6 : base1 = TREE_OPERAND (base1, 0);
10341 : : else
10342 : : indirect_base1 = true;
10343 : : }
10344 : 68042 : if (offset1 == NULL_TREE || integer_zerop (offset1))
10345 : 68026 : offset1 = TREE_OPERAND (arg1, 1);
10346 : : else
10347 : 16 : offset1 = size_binop (PLUS_EXPR, offset1,
10348 : : TREE_OPERAND (arg1, 1));
10349 : 68042 : if (poly_int_tree_p (offset1))
10350 : : {
10351 : 58827 : poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10352 : 58827 : TYPE_PRECISION (sizetype));
10353 : 58827 : tem <<= LOG2_BITS_PER_UNIT;
10354 : 58827 : tem += bitpos1;
10355 : 58827 : if (tem.to_shwi (&bitpos1))
10356 : 58827 : offset1 = NULL_TREE;
10357 : : }
10358 : : }
10359 : :
10360 : : /* If we have equivalent bases we might be able to simplify. */
10361 : 1804658 : if (indirect_base0 == indirect_base1
10362 : 2355676 : && operand_equal_p (base0, base1,
10363 : : indirect_base0 ? OEP_ADDRESS_OF : 0))
10364 : : {
10365 : : /* We can fold this expression to a constant if the non-constant
10366 : : offset parts are equal. */
10367 : 19392 : if ((offset0 == offset1
10368 : 6251 : || (offset0 && offset1
10369 : 2370 : && operand_equal_p (offset0, offset1, 0)))
10370 : 19392 : && (equality_code
10371 : 10621 : || (indirect_base0
10372 : 7247 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10373 : 3374 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10374 : : {
10375 : 13104 : if (!equality_code
10376 : 10584 : && maybe_ne (bitpos0, bitpos1)
10377 : 23659 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10378 : 1932 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10379 : 9133 : fold_overflow_warning (("assuming pointer wraparound does not "
10380 : : "occur when comparing P +- C1 with "
10381 : : "P +- C2"),
10382 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
10383 : :
10384 : 13104 : switch (code)
10385 : : {
10386 : 120 : case EQ_EXPR:
10387 : 120 : if (known_eq (bitpos0, bitpos1))
10388 : 35686 : return constant_boolean_node (true, type);
10389 : 83 : if (known_ne (bitpos0, bitpos1))
10390 : 83 : return constant_boolean_node (false, type);
10391 : : break;
10392 : 2400 : case NE_EXPR:
10393 : 2400 : if (known_ne (bitpos0, bitpos1))
10394 : 2395 : return constant_boolean_node (true, type);
10395 : 5 : if (known_eq (bitpos0, bitpos1))
10396 : 5 : return constant_boolean_node (false, type);
10397 : : break;
10398 : 2613 : case LT_EXPR:
10399 : 2613 : if (known_lt (bitpos0, bitpos1))
10400 : 2401 : return constant_boolean_node (true, type);
10401 : 212 : if (known_ge (bitpos0, bitpos1))
10402 : 212 : return constant_boolean_node (false, type);
10403 : : break;
10404 : 4045 : case LE_EXPR:
10405 : 4045 : if (known_le (bitpos0, bitpos1))
10406 : 1688 : return constant_boolean_node (true, type);
10407 : 2357 : if (known_gt (bitpos0, bitpos1))
10408 : 2357 : return constant_boolean_node (false, type);
10409 : : break;
10410 : 805 : case GE_EXPR:
10411 : 805 : if (known_ge (bitpos0, bitpos1))
10412 : 536 : return constant_boolean_node (true, type);
10413 : 269 : if (known_lt (bitpos0, bitpos1))
10414 : 269 : return constant_boolean_node (false, type);
10415 : : break;
10416 : 3121 : case GT_EXPR:
10417 : 3121 : if (known_gt (bitpos0, bitpos1))
10418 : 1484 : return constant_boolean_node (true, type);
10419 : 1637 : if (known_le (bitpos0, bitpos1))
10420 : 1637 : return constant_boolean_node (false, type);
10421 : : break;
10422 : : default:;
10423 : : }
10424 : : }
10425 : : /* We can simplify the comparison to a comparison of the variable
10426 : : offset parts if the constant offset parts are equal.
10427 : : Be careful to use signed sizetype here because otherwise we
10428 : : mess with array offsets in the wrong way. This is possible
10429 : : because pointer arithmetic is restricted to retain within an
10430 : : object and overflow on pointer differences is undefined as of
10431 : : 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10432 : 6288 : else if (known_eq (bitpos0, bitpos1)
10433 : 6288 : && (equality_code
10434 : 4673 : || (indirect_base0
10435 : 227 : && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10436 : 4446 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10437 : : {
10438 : : /* By converting to signed sizetype we cover middle-end pointer
10439 : : arithmetic which operates on unsigned pointer types of size
10440 : : type size and ARRAY_REF offsets which are properly sign or
10441 : : zero extended from their type in case it is narrower than
10442 : : sizetype. */
10443 : 4776 : if (offset0 == NULL_TREE)
10444 : 36 : offset0 = build_int_cst (ssizetype, 0);
10445 : : else
10446 : 4740 : offset0 = fold_convert_loc (loc, ssizetype, offset0);
10447 : 4776 : if (offset1 == NULL_TREE)
10448 : 2410 : offset1 = build_int_cst (ssizetype, 0);
10449 : : else
10450 : 2366 : offset1 = fold_convert_loc (loc, ssizetype, offset1);
10451 : :
10452 : 4776 : if (!equality_code
10453 : 4776 : && (pointer_may_wrap_p (base0, offset0, bitpos0)
10454 : 0 : || pointer_may_wrap_p (base1, offset1, bitpos1)))
10455 : 4673 : fold_overflow_warning (("assuming pointer wraparound does not "
10456 : : "occur when comparing P +- C1 with "
10457 : : "P +- C2"),
10458 : : WARN_STRICT_OVERFLOW_COMPARISON);
10459 : :
10460 : 4776 : return fold_build2_loc (loc, code, type, offset0, offset1);
10461 : : }
10462 : : }
10463 : : /* For equal offsets we can simplify to a comparison of the
10464 : : base addresses. */
10465 : 1785266 : else if (known_eq (bitpos0, bitpos1)
10466 : 95536 : && (indirect_base0
10467 : 746973 : ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10468 : 9146 : && (indirect_base1
10469 : 171484 : ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10470 : 1978261 : && ((offset0 == offset1)
10471 : 4066 : || (offset0 && offset1
10472 : 3845 : && operand_equal_p (offset0, offset1, 0))))
10473 : : {
10474 : 17802 : if (indirect_base0)
10475 : 1528 : base0 = build_fold_addr_expr_loc (loc, base0);
10476 : 17802 : if (indirect_base1)
10477 : 2351 : base1 = build_fold_addr_expr_loc (loc, base1);
10478 : 17802 : return fold_build2_loc (loc, code, type, base0, base1);
10479 : : }
10480 : : /* Comparison between an ordinary (non-weak) symbol and a null
10481 : : pointer can be eliminated since such symbols must have a non
10482 : : null address. In C, relational expressions between pointers
10483 : : to objects and null pointers are undefined. The results
10484 : : below follow the C++ rules with the additional property that
10485 : : every object pointer compares greater than a null pointer.
10486 : : */
10487 : 1767464 : else if (((DECL_P (base0)
10488 : 564448 : && maybe_nonzero_address (base0) > 0
10489 : : /* Avoid folding references to struct members at offset 0 to
10490 : : prevent tests like '&ptr->firstmember == 0' from getting
10491 : : eliminated. When ptr is null, although the -> expression
10492 : : is strictly speaking invalid, GCC retains it as a matter
10493 : : of QoI. See PR c/44555. */
10494 : 550249 : && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10495 : 1351548 : || CONSTANT_CLASS_P (base0))
10496 : 419600 : && indirect_base0
10497 : : /* The caller guarantees that when one of the arguments is
10498 : : constant (i.e., null in this case) it is second. */
10499 : 2184115 : && integer_zerop (arg1))
10500 : : {
10501 : 4 : switch (code)
10502 : : {
10503 : 2 : case EQ_EXPR:
10504 : 2 : case LE_EXPR:
10505 : 2 : case LT_EXPR:
10506 : 2 : return constant_boolean_node (false, type);
10507 : 2 : case GE_EXPR:
10508 : 2 : case GT_EXPR:
10509 : 2 : case NE_EXPR:
10510 : 2 : return constant_boolean_node (true, type);
10511 : 0 : default:
10512 : 0 : gcc_unreachable ();
10513 : : }
10514 : : }
10515 : : }
10516 : :
10517 : : /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10518 : : X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10519 : : the resulting offset is smaller in absolute value than the
10520 : : original one and has the same sign. */
10521 : 152067824 : if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10522 : 117916534 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10523 : 30288806 : && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10524 : 1958481 : && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10525 : 1628737 : && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10526 : 1628737 : && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10527 : 136723938 : && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10528 : 127832 : && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10529 : : {
10530 : 127832 : tree const1 = TREE_OPERAND (arg0, 1);
10531 : 127832 : tree const2 = TREE_OPERAND (arg1, 1);
10532 : 127832 : tree variable1 = TREE_OPERAND (arg0, 0);
10533 : 127832 : tree variable2 = TREE_OPERAND (arg1, 0);
10534 : 127832 : tree cst;
10535 : 127832 : const char * const warnmsg = G_("assuming signed overflow does not "
10536 : : "occur when combining constants around "
10537 : : "a comparison");
10538 : :
10539 : : /* Put the constant on the side where it doesn't overflow and is
10540 : : of lower absolute value and of same sign than before. */
10541 : 127833 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10542 : : ? MINUS_EXPR : PLUS_EXPR,
10543 : : const2, const1);
10544 : 127832 : if (!TREE_OVERFLOW (cst)
10545 : 127822 : && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10546 : 146915 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10547 : : {
10548 : 4368 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10549 : 4368 : return fold_build2_loc (loc, code, type,
10550 : : variable1,
10551 : 4368 : fold_build2_loc (loc, TREE_CODE (arg1),
10552 : 4368 : TREE_TYPE (arg1),
10553 : 4368 : variable2, cst));
10554 : : }
10555 : :
10556 : 123465 : cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10557 : : ? MINUS_EXPR : PLUS_EXPR,
10558 : : const1, const2);
10559 : 123464 : if (!TREE_OVERFLOW (cst)
10560 : 123454 : && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10561 : 138179 : && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10562 : : {
10563 : 14715 : fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10564 : 14715 : return fold_build2_loc (loc, code, type,
10565 : 14715 : fold_build2_loc (loc, TREE_CODE (arg0),
10566 : 14715 : TREE_TYPE (arg0),
10567 : : variable1, cst),
10568 : 14715 : variable2);
10569 : : }
10570 : : }
10571 : :
10572 : 77616779 : tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10573 : 77616779 : if (tem)
10574 : : return tem;
10575 : :
10576 : : /* If we are comparing an expression that just has comparisons
10577 : : of two integer values, arithmetic expressions of those comparisons,
10578 : : and constants, we can simplify it. There are only three cases
10579 : : to check: the two values can either be equal, the first can be
10580 : : greater, or the second can be greater. Fold the expression for
10581 : : those three values. Since each value must be 0 or 1, we have
10582 : : eight possibilities, each of which corresponds to the constant 0
10583 : : or 1 or one of the six possible comparisons.
10584 : :
10585 : : This handles common cases like (a > b) == 0 but also handles
10586 : : expressions like ((x > y) - (y > x)) > 0, which supposedly
10587 : : occur in macroized code. */
10588 : :
10589 : 76874071 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10590 : : {
10591 : 47741330 : tree cval1 = 0, cval2 = 0;
10592 : :
10593 : 47741330 : if (twoval_comparison_p (arg0, &cval1, &cval2)
10594 : : /* Don't handle degenerate cases here; they should already
10595 : : have been handled anyway. */
10596 : 518275 : && cval1 != 0 && cval2 != 0
10597 : 517433 : && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10598 : 517433 : && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10599 : 517425 : && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10600 : 62 : && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10601 : 62 : && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10602 : 47741392 : && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10603 : 62 : TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10604 : : {
10605 : 62 : tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10606 : 62 : tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10607 : :
10608 : : /* We can't just pass T to eval_subst in case cval1 or cval2
10609 : : was the same as ARG1. */
10610 : :
10611 : 62 : tree high_result
10612 : 62 : = fold_build2_loc (loc, code, type,
10613 : : eval_subst (loc, arg0, cval1, maxval,
10614 : : cval2, minval),
10615 : : arg1);
10616 : 62 : tree equal_result
10617 : 62 : = fold_build2_loc (loc, code, type,
10618 : : eval_subst (loc, arg0, cval1, maxval,
10619 : : cval2, maxval),
10620 : : arg1);
10621 : 62 : tree low_result
10622 : 62 : = fold_build2_loc (loc, code, type,
10623 : : eval_subst (loc, arg0, cval1, minval,
10624 : : cval2, maxval),
10625 : : arg1);
10626 : :
10627 : : /* All three of these results should be 0 or 1. Confirm they are.
10628 : : Then use those values to select the proper code to use. */
10629 : :
10630 : 62 : if (TREE_CODE (high_result) == INTEGER_CST
10631 : 53 : && TREE_CODE (equal_result) == INTEGER_CST
10632 : 42 : && TREE_CODE (low_result) == INTEGER_CST)
10633 : : {
10634 : : /* Make a 3-bit mask with the high-order bit being the
10635 : : value for `>', the next for '=', and the low for '<'. */
10636 : 42 : switch ((integer_onep (high_result) * 4)
10637 : 42 : + (integer_onep (equal_result) * 2)
10638 : 42 : + integer_onep (low_result))
10639 : : {
10640 : 24 : case 0:
10641 : : /* Always false. */
10642 : 42 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10643 : : case 1:
10644 : : code = LT_EXPR;
10645 : : break;
10646 : 2 : case 2:
10647 : 2 : code = EQ_EXPR;
10648 : 2 : break;
10649 : 0 : case 3:
10650 : 0 : code = LE_EXPR;
10651 : 0 : break;
10652 : 0 : case 4:
10653 : 0 : code = GT_EXPR;
10654 : 0 : break;
10655 : 1 : case 5:
10656 : 1 : code = NE_EXPR;
10657 : 1 : break;
10658 : 0 : case 6:
10659 : 0 : code = GE_EXPR;
10660 : 0 : break;
10661 : 15 : case 7:
10662 : : /* Always true. */
10663 : 15 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10664 : : }
10665 : :
10666 : 3 : return fold_build2_loc (loc, code, type, cval1, cval2);
10667 : : }
10668 : : }
10669 : : }
10670 : :
10671 : : return NULL_TREE;
10672 : : }
10673 : :
10674 : :
10675 : : /* Subroutine of fold_binary. Optimize complex multiplications of the
10676 : : form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10677 : : argument EXPR represents the expression "z" of type TYPE. */
10678 : :
10679 : : static tree
10680 : 2 : fold_mult_zconjz (location_t loc, tree type, tree expr)
10681 : : {
10682 : 2 : tree itype = TREE_TYPE (type);
10683 : 2 : tree rpart, ipart, tem;
10684 : :
10685 : 2 : if (TREE_CODE (expr) == COMPLEX_EXPR)
10686 : : {
10687 : 0 : rpart = TREE_OPERAND (expr, 0);
10688 : 0 : ipart = TREE_OPERAND (expr, 1);
10689 : : }
10690 : 2 : else if (TREE_CODE (expr) == COMPLEX_CST)
10691 : : {
10692 : 0 : rpart = TREE_REALPART (expr);
10693 : 0 : ipart = TREE_IMAGPART (expr);
10694 : : }
10695 : : else
10696 : : {
10697 : 2 : expr = save_expr (expr);
10698 : 2 : rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10699 : 2 : ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10700 : : }
10701 : :
10702 : 2 : rpart = save_expr (rpart);
10703 : 2 : ipart = save_expr (ipart);
10704 : 2 : tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10705 : : fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10706 : : fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10707 : 2 : return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10708 : 2 : build_zero_cst (itype));
10709 : : }
10710 : :
10711 : :
10712 : : /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10713 : : CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10714 : : true if successful. */
10715 : :
10716 : : static bool
10717 : 10168 : vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10718 : : {
10719 : 10168 : unsigned HOST_WIDE_INT i, nunits;
10720 : :
10721 : 10168 : if (TREE_CODE (arg) == VECTOR_CST
10722 : 10168 : && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10723 : : {
10724 : 828 : for (i = 0; i < nunits; ++i)
10725 : 670 : elts[i] = VECTOR_CST_ELT (arg, i);
10726 : : }
10727 : 10010 : else if (TREE_CODE (arg) == CONSTRUCTOR)
10728 : : {
10729 : : constructor_elt *elt;
10730 : :
10731 : 33218 : FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10732 : 27903 : if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10733 : 4695 : return false;
10734 : : else
10735 : 23208 : elts[i] = elt->value;
10736 : : }
10737 : : else
10738 : : return false;
10739 : 5799 : for (; i < nelts; i++)
10740 : 652 : elts[i]
10741 : 326 : = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10742 : : return true;
10743 : : }
10744 : :
10745 : : /* Helper routine for fold_vec_perm_cst to check if SEL is a suitable
10746 : : mask for VLA vec_perm folding.
10747 : : REASON if specified, will contain the reason why SEL is not suitable.
10748 : : Used only for debugging and unit-testing. */
10749 : :
10750 : : static bool
10751 : 3401 : valid_mask_for_fold_vec_perm_cst_p (tree arg0, tree arg1,
10752 : : const vec_perm_indices &sel,
10753 : : const char **reason = NULL)
10754 : : {
10755 : 3401 : unsigned sel_npatterns = sel.encoding ().npatterns ();
10756 : 3401 : unsigned sel_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10757 : :
10758 : 6802 : if (!(pow2p_hwi (sel_npatterns)
10759 : 3401 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg0))
10760 : 3401 : && pow2p_hwi (VECTOR_CST_NPATTERNS (arg1))))
10761 : : {
10762 : 0 : if (reason)
10763 : 0 : *reason = "npatterns is not power of 2";
10764 : 0 : return false;
10765 : : }
10766 : :
10767 : : /* We want to avoid cases where sel.length is not a multiple of npatterns.
10768 : : For eg: sel.length = 2 + 2x, and sel npatterns = 4. */
10769 : 3401 : poly_uint64 esel;
10770 : 3401 : if (!multiple_p (sel.length (), sel_npatterns, &esel))
10771 : : {
10772 : 0 : if (reason)
10773 : 0 : *reason = "sel.length is not multiple of sel_npatterns";
10774 : 0 : return false;
10775 : : }
10776 : :
10777 : 3401 : if (sel_nelts_per_pattern < 3)
10778 : : return true;
10779 : :
10780 : 3337 : for (unsigned pattern = 0; pattern < sel_npatterns; pattern++)
10781 : : {
10782 : 2610 : poly_uint64 a1 = sel[pattern + sel_npatterns];
10783 : 2610 : poly_uint64 a2 = sel[pattern + 2 * sel_npatterns];
10784 : 2610 : HOST_WIDE_INT step;
10785 : 2610 : if (!poly_int64 (a2 - a1).is_constant (&step))
10786 : : {
10787 : : if (reason)
10788 : : *reason = "step is not constant";
10789 : 749 : return false;
10790 : : }
10791 : : // FIXME: Punt on step < 0 for now, revisit later.
10792 : 2610 : if (step < 0)
10793 : : return false;
10794 : 2552 : if (step == 0)
10795 : 0 : continue;
10796 : :
10797 : 2552 : if (!pow2p_hwi (step))
10798 : : {
10799 : 24 : if (reason)
10800 : 0 : *reason = "step is not power of 2";
10801 : 24 : return false;
10802 : : }
10803 : :
10804 : : /* Ensure that stepped sequence of the pattern selects elements
10805 : : only from the same input vector. */
10806 : 2528 : uint64_t q1, qe;
10807 : 2528 : poly_uint64 r1, re;
10808 : 2528 : poly_uint64 ae = a1 + (esel - 2) * step;
10809 : 2528 : poly_uint64 arg_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10810 : :
10811 : 2528 : if (!(can_div_trunc_p (a1, arg_len, &q1, &r1)
10812 : 2528 : && can_div_trunc_p (ae, arg_len, &qe, &re)
10813 : : && q1 == qe))
10814 : : {
10815 : 194 : if (reason)
10816 : 0 : *reason = "crossed input vectors";
10817 : 194 : return false;
10818 : : }
10819 : :
10820 : : /* Ensure that the stepped sequence always selects from the same
10821 : : input pattern. */
10822 : 2334 : tree arg = ((q1 & 1) == 0) ? arg0 : arg1;
10823 : 2334 : unsigned arg_npatterns = VECTOR_CST_NPATTERNS (arg);
10824 : :
10825 : 2334 : if (!multiple_p (step, arg_npatterns))
10826 : : {
10827 : 471 : if (reason)
10828 : 0 : *reason = "step is not multiple of npatterns";
10829 : 471 : return false;
10830 : : }
10831 : :
10832 : : /* If a1 chooses base element from arg, ensure that it's a natural
10833 : : stepped sequence, ie, (arg[2] - arg[1]) == (arg[1] - arg[0])
10834 : : to preserve arg's encoding. */
10835 : :
10836 : 1863 : if (maybe_lt (r1, arg_npatterns))
10837 : : {
10838 : 2 : unsigned HOST_WIDE_INT index;
10839 : 2 : if (!r1.is_constant (&index))
10840 : 2 : return false;
10841 : :
10842 : 2 : tree arg_elem0 = vector_cst_elt (arg, index);
10843 : 2 : tree arg_elem1 = vector_cst_elt (arg, index + arg_npatterns);
10844 : 2 : tree arg_elem2 = vector_cst_elt (arg, index + arg_npatterns * 2);
10845 : :
10846 : 2 : tree step1, step2;
10847 : 2 : if (!(step1 = const_binop (MINUS_EXPR, arg_elem1, arg_elem0))
10848 : 2 : || !(step2 = const_binop (MINUS_EXPR, arg_elem2, arg_elem1))
10849 : 4 : || !operand_equal_p (step1, step2, 0))
10850 : : {
10851 : 2 : if (reason)
10852 : 0 : *reason = "not a natural stepped sequence";
10853 : 2 : return false;
10854 : : }
10855 : : }
10856 : : }
10857 : :
10858 : : return true;
10859 : : }
10860 : :
10861 : : /* Try to fold permutation of ARG0 and ARG1 with SEL selector when
10862 : : the input vectors are VECTOR_CST. Return NULL_TREE otherwise.
10863 : : REASON has same purpose as described in
10864 : : valid_mask_for_fold_vec_perm_cst_p. */
10865 : :
10866 : : static tree
10867 : 3401 : fold_vec_perm_cst (tree type, tree arg0, tree arg1, const vec_perm_indices &sel,
10868 : : const char **reason = NULL)
10869 : : {
10870 : 3401 : unsigned res_npatterns, res_nelts_per_pattern;
10871 : 3401 : unsigned HOST_WIDE_INT res_nelts;
10872 : :
10873 : : /* First try to implement the fold in a VLA-friendly way.
10874 : :
10875 : : (1) If the selector is simply a duplication of N elements, the
10876 : : result is likewise a duplication of N elements.
10877 : :
10878 : : (2) If the selector is N elements followed by a duplication
10879 : : of N elements, the result is too.
10880 : :
10881 : : (3) If the selector is N elements followed by an interleaving
10882 : : of N linear series, the situation is more complex.
10883 : :
10884 : : valid_mask_for_fold_vec_perm_cst_p detects whether we
10885 : : can handle this case. If we can, then each of the N linear
10886 : : series either (a) selects the same element each time or
10887 : : (b) selects a linear series from one of the input patterns.
10888 : :
10889 : : If (b) holds for one of the linear series, the result
10890 : : will contain a linear series, and so the result will have
10891 : : the same shape as the selector. If (a) holds for all of
10892 : : the linear series, the result will be the same as (2) above.
10893 : :
10894 : : (b) can only hold if one of the input patterns has a
10895 : : stepped encoding. */
10896 : :
10897 : 3401 : if (valid_mask_for_fold_vec_perm_cst_p (arg0, arg1, sel, reason))
10898 : : {
10899 : 2652 : res_npatterns = sel.encoding ().npatterns ();
10900 : 2652 : res_nelts_per_pattern = sel.encoding ().nelts_per_pattern ();
10901 : 2652 : if (res_nelts_per_pattern == 3
10902 : 727 : && VECTOR_CST_NELTS_PER_PATTERN (arg0) < 3
10903 : 3132 : && VECTOR_CST_NELTS_PER_PATTERN (arg1) < 3)
10904 : : res_nelts_per_pattern = 2;
10905 : 2652 : res_nelts = res_npatterns * res_nelts_per_pattern;
10906 : : }
10907 : 749 : else if (TYPE_VECTOR_SUBPARTS (type).is_constant (&res_nelts))
10908 : : {
10909 : 749 : res_npatterns = res_nelts;
10910 : 749 : res_nelts_per_pattern = 1;
10911 : : }
10912 : : else
10913 : : return NULL_TREE;
10914 : :
10915 : 3401 : tree_vector_builder out_elts (type, res_npatterns, res_nelts_per_pattern);
10916 : 24164 : for (unsigned i = 0; i < res_nelts; i++)
10917 : : {
10918 : 20763 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
10919 : 20763 : uint64_t q;
10920 : 20763 : poly_uint64 r;
10921 : 20763 : unsigned HOST_WIDE_INT index;
10922 : :
10923 : : /* Punt if sel[i] /trunc_div len cannot be determined,
10924 : : because the input vector to be chosen will depend on
10925 : : runtime vector length.
10926 : : For example if len == 4 + 4x, and sel[i] == 4,
10927 : : If len at runtime equals 4, we choose arg1[0].
10928 : : For any other value of len > 4 at runtime, we choose arg0[4].
10929 : : which makes the element choice dependent on runtime vector length. */
10930 : 20763 : if (!can_div_trunc_p (sel[i], len, &q, &r))
10931 : : {
10932 : : if (reason)
10933 : : *reason = "cannot divide selector element by arg len";
10934 : : return NULL_TREE;
10935 : : }
10936 : :
10937 : : /* sel[i] % len will give the index of element in the chosen input
10938 : : vector. For example if sel[i] == 5 + 4x and len == 4 + 4x,
10939 : : we will choose arg1[1] since (5 + 4x) % (4 + 4x) == 1. */
10940 : 20763 : if (!r.is_constant (&index))
10941 : : {
10942 : : if (reason)
10943 : : *reason = "remainder is not constant";
10944 : : return NULL_TREE;
10945 : : }
10946 : :
10947 : 20763 : tree arg = ((q & 1) == 0) ? arg0 : arg1;
10948 : 20763 : tree elem = vector_cst_elt (arg, index);
10949 : 20763 : out_elts.quick_push (elem);
10950 : : }
10951 : :
10952 : 3401 : return out_elts.build ();
10953 : 3401 : }
10954 : :
10955 : : /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10956 : : selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10957 : : NULL_TREE otherwise. */
10958 : :
10959 : : tree
10960 : 16009 : fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10961 : : {
10962 : 16009 : unsigned int i;
10963 : 16009 : unsigned HOST_WIDE_INT nelts;
10964 : :
10965 : 16009 : gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), sel.length ())
10966 : : && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
10967 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))));
10968 : :
10969 : 16009 : if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10970 : 16009 : || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10971 : : return NULL_TREE;
10972 : :
10973 : 10805 : if (TREE_CODE (arg0) == VECTOR_CST
10974 : 3518 : && TREE_CODE (arg1) == VECTOR_CST)
10975 : 3401 : return fold_vec_perm_cst (type, arg0, arg1, sel);
10976 : :
10977 : : /* For fall back case, we want to ensure we have VLS vectors
10978 : : with equal length. */
10979 : 7404 : if (!sel.length ().is_constant (&nelts))
10980 : : return NULL_TREE;
10981 : :
10982 : 7404 : gcc_assert (known_eq (sel.length (),
10983 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))));
10984 : 7404 : tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10985 : 7404 : if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10986 : 7404 : || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10987 : 4695 : return NULL_TREE;
10988 : :
10989 : 2709 : vec<constructor_elt, va_gc> *v;
10990 : 2709 : vec_alloc (v, nelts);
10991 : 14591 : for (i = 0; i < nelts; i++)
10992 : : {
10993 : 11882 : HOST_WIDE_INT index;
10994 : 11882 : if (!sel[i].is_constant (&index))
10995 : : return NULL_TREE;
10996 : 11882 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, in_elts[index]);
10997 : : }
10998 : 2709 : return build_constructor (type, v);
10999 : : }
11000 : :
11001 : : /* Try to fold a pointer difference of type TYPE two address expressions of
11002 : : array references AREF0 and AREF1 using location LOC. Return a
11003 : : simplified expression for the difference or NULL_TREE. */
11004 : :
11005 : : static tree
11006 : 32 : fold_addr_of_array_ref_difference (location_t loc, tree type,
11007 : : tree aref0, tree aref1,
11008 : : bool use_pointer_diff)
11009 : : {
11010 : 32 : tree base0 = TREE_OPERAND (aref0, 0);
11011 : 32 : tree base1 = TREE_OPERAND (aref1, 0);
11012 : 32 : tree base_offset = build_int_cst (type, 0);
11013 : :
11014 : : /* If the bases are array references as well, recurse. If the bases
11015 : : are pointer indirections compute the difference of the pointers.
11016 : : If the bases are equal, we are set. */
11017 : 32 : if ((TREE_CODE (base0) == ARRAY_REF
11018 : 1 : && TREE_CODE (base1) == ARRAY_REF
11019 : 1 : && (base_offset
11020 : 1 : = fold_addr_of_array_ref_difference (loc, type, base0, base1,
11021 : : use_pointer_diff)))
11022 : 31 : || (INDIRECT_REF_P (base0)
11023 : 7 : && INDIRECT_REF_P (base1)
11024 : 7 : && (base_offset
11025 : 7 : = use_pointer_diff
11026 : 8 : ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
11027 : 1 : TREE_OPERAND (base0, 0),
11028 : 1 : TREE_OPERAND (base1, 0))
11029 : 12 : : fold_binary_loc (loc, MINUS_EXPR, type,
11030 : 6 : fold_convert (type,
11031 : : TREE_OPERAND (base0, 0)),
11032 : 6 : fold_convert (type,
11033 : : TREE_OPERAND (base1, 0)))))
11034 : 56 : || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
11035 : : {
11036 : 17 : tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
11037 : 17 : tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
11038 : 17 : tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
11039 : 17 : tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
11040 : 17 : return fold_build2_loc (loc, PLUS_EXPR, type,
11041 : : base_offset,
11042 : : fold_build2_loc (loc, MULT_EXPR, type,
11043 : 17 : diff, esz));
11044 : : }
11045 : : return NULL_TREE;
11046 : : }
11047 : :
11048 : : /* If the real or vector real constant CST of type TYPE has an exact
11049 : : inverse, return it, else return NULL. */
11050 : :
11051 : : tree
11052 : 1086340 : exact_inverse (tree type, tree cst)
11053 : : {
11054 : 1086340 : REAL_VALUE_TYPE r;
11055 : 1086340 : tree unit_type;
11056 : 1086340 : machine_mode mode;
11057 : :
11058 : 1086340 : switch (TREE_CODE (cst))
11059 : : {
11060 : 1085821 : case REAL_CST:
11061 : 1085821 : r = TREE_REAL_CST (cst);
11062 : :
11063 : 1085821 : if (exact_real_inverse (TYPE_MODE (type), &r))
11064 : 309599 : return build_real (type, r);
11065 : :
11066 : : return NULL_TREE;
11067 : :
11068 : 519 : case VECTOR_CST:
11069 : 519 : {
11070 : 519 : unit_type = TREE_TYPE (type);
11071 : 519 : mode = TYPE_MODE (unit_type);
11072 : :
11073 : 519 : tree_vector_builder elts;
11074 : 519 : if (!elts.new_unary_operation (type, cst, false))
11075 : : return NULL_TREE;
11076 : 519 : unsigned int count = elts.encoded_nelts ();
11077 : 576 : for (unsigned int i = 0; i < count; ++i)
11078 : : {
11079 : 519 : r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
11080 : 519 : if (!exact_real_inverse (mode, &r))
11081 : : return NULL_TREE;
11082 : 57 : elts.quick_push (build_real (unit_type, r));
11083 : : }
11084 : :
11085 : 57 : return elts.build ();
11086 : 519 : }
11087 : :
11088 : : default:
11089 : : return NULL_TREE;
11090 : : }
11091 : : }
11092 : :
11093 : : /* Mask out the tz least significant bits of X of type TYPE where
11094 : : tz is the number of trailing zeroes in Y. */
11095 : : static wide_int
11096 : 103218 : mask_with_tz (tree type, const wide_int &x, const wide_int &y)
11097 : : {
11098 : 103218 : int tz = wi::ctz (y);
11099 : 103218 : if (tz > 0)
11100 : 7347 : return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
11101 : 95871 : return x;
11102 : : }
11103 : :
11104 : : /* Return true when T is an address and is known to be nonzero.
11105 : : For floating point we further ensure that T is not denormal.
11106 : : Similar logic is present in nonzero_address in rtlanal.h.
11107 : :
11108 : : If the return value is based on the assumption that signed overflow
11109 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
11110 : : change *STRICT_OVERFLOW_P. */
11111 : :
11112 : : static bool
11113 : 135111879 : tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
11114 : : {
11115 : 135299118 : tree type = TREE_TYPE (t);
11116 : 135299118 : enum tree_code code;
11117 : :
11118 : : /* Doing something useful for floating point would need more work. */
11119 : 135299118 : if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11120 : : return false;
11121 : :
11122 : 135185411 : code = TREE_CODE (t);
11123 : 135185411 : switch (TREE_CODE_CLASS (code))
11124 : : {
11125 : 900719 : case tcc_unary:
11126 : 900719 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11127 : 900719 : strict_overflow_p);
11128 : 2358943 : case tcc_binary:
11129 : 2358943 : case tcc_comparison:
11130 : 2358943 : return tree_binary_nonzero_warnv_p (code, type,
11131 : 2358943 : TREE_OPERAND (t, 0),
11132 : 2358943 : TREE_OPERAND (t, 1),
11133 : 2358943 : strict_overflow_p);
11134 : 11731661 : case tcc_constant:
11135 : 11731661 : case tcc_declaration:
11136 : 11731661 : case tcc_reference:
11137 : 11731661 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11138 : :
11139 : 120194088 : default:
11140 : 120194088 : break;
11141 : : }
11142 : :
11143 : 120194088 : switch (code)
11144 : : {
11145 : 516998 : case TRUTH_NOT_EXPR:
11146 : 516998 : return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
11147 : 516998 : strict_overflow_p);
11148 : :
11149 : 70782 : case TRUTH_AND_EXPR:
11150 : 70782 : case TRUTH_OR_EXPR:
11151 : 70782 : case TRUTH_XOR_EXPR:
11152 : 70782 : return tree_binary_nonzero_warnv_p (code, type,
11153 : 70782 : TREE_OPERAND (t, 0),
11154 : 70782 : TREE_OPERAND (t, 1),
11155 : 70782 : strict_overflow_p);
11156 : :
11157 : 115975654 : case COND_EXPR:
11158 : 115975654 : case CONSTRUCTOR:
11159 : 115975654 : case OBJ_TYPE_REF:
11160 : 115975654 : case ADDR_EXPR:
11161 : 115975654 : case WITH_SIZE_EXPR:
11162 : 115975654 : case SSA_NAME:
11163 : 115975654 : return tree_single_nonzero_warnv_p (t, strict_overflow_p);
11164 : :
11165 : 80352 : case COMPOUND_EXPR:
11166 : 80352 : case MODIFY_EXPR:
11167 : 80352 : case BIND_EXPR:
11168 : 80352 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
11169 : 80352 : strict_overflow_p);
11170 : :
11171 : 106887 : case SAVE_EXPR:
11172 : 106887 : return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
11173 : 106887 : strict_overflow_p);
11174 : :
11175 : 3364670 : case CALL_EXPR:
11176 : 3364670 : {
11177 : 3364670 : tree fndecl = get_callee_fndecl (t);
11178 : 3364670 : if (!fndecl) return false;
11179 : 3363369 : if (flag_delete_null_pointer_checks && !flag_check_new
11180 : 3363357 : && DECL_IS_OPERATOR_NEW_P (fndecl)
11181 : 3363761 : && !TREE_NOTHROW (fndecl))
11182 : : return true;
11183 : 3363541 : if (flag_delete_null_pointer_checks
11184 : 6726812 : && lookup_attribute ("returns_nonnull",
11185 : 3363271 : TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
11186 : : return true;
11187 : 3363362 : return alloca_call_p (t);
11188 : : }
11189 : :
11190 : : default:
11191 : : break;
11192 : : }
11193 : : return false;
11194 : : }
11195 : :
11196 : : /* Return true when T is an address and is known to be nonzero.
11197 : : Handle warnings about undefined signed overflow. */
11198 : :
11199 : : bool
11200 : 133938036 : tree_expr_nonzero_p (tree t)
11201 : : {
11202 : 133938036 : bool ret, strict_overflow_p;
11203 : :
11204 : 133938036 : strict_overflow_p = false;
11205 : 133938036 : ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
11206 : 133938036 : if (strict_overflow_p)
11207 : 0 : fold_overflow_warning (("assuming signed overflow does not occur when "
11208 : : "determining that expression is always "
11209 : : "non-zero"),
11210 : : WARN_STRICT_OVERFLOW_MISC);
11211 : 133938036 : return ret;
11212 : : }
11213 : :
11214 : : /* Return true if T is known not to be equal to an integer W. */
11215 : :
11216 : : bool
11217 : 91589299 : expr_not_equal_to (tree t, const wide_int &w)
11218 : : {
11219 : 91589299 : int_range_max vr;
11220 : 91589299 : switch (TREE_CODE (t))
11221 : : {
11222 : 1254944 : case INTEGER_CST:
11223 : 1254944 : return wi::to_wide (t) != w;
11224 : :
11225 : 90333522 : case SSA_NAME:
11226 : 90333522 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
11227 : : return false;
11228 : :
11229 : 180667044 : get_range_query (cfun)->range_of_expr (vr, t);
11230 : 90333522 : if (!vr.undefined_p () && !vr.contains_p (w))
11231 : : return true;
11232 : : /* If T has some known zero bits and W has any of those bits set,
11233 : : then T is known not to be equal to W. */
11234 : 90240298 : if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
11235 : 180480204 : TYPE_PRECISION (TREE_TYPE (t))), 0))
11236 : : return true;
11237 : : return false;
11238 : :
11239 : : default:
11240 : : return false;
11241 : : }
11242 : 91589299 : }
11243 : :
11244 : : /* Fold a binary expression of code CODE and type TYPE with operands
11245 : : OP0 and OP1. LOC is the location of the resulting expression.
11246 : : Return the folded expression if folding is successful. Otherwise,
11247 : : return NULL_TREE. */
11248 : :
11249 : : tree
11250 : 699450086 : fold_binary_loc (location_t loc, enum tree_code code, tree type,
11251 : : tree op0, tree op1)
11252 : : {
11253 : 699450086 : enum tree_code_class kind = TREE_CODE_CLASS (code);
11254 : 699450086 : tree arg0, arg1, tem;
11255 : 699450086 : tree t1 = NULL_TREE;
11256 : 699450086 : bool strict_overflow_p;
11257 : 699450086 : unsigned int prec;
11258 : :
11259 : 699450086 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
11260 : : && TREE_CODE_LENGTH (code) == 2
11261 : : && op0 != NULL_TREE
11262 : : && op1 != NULL_TREE);
11263 : :
11264 : 699450086 : arg0 = op0;
11265 : 699450086 : arg1 = op1;
11266 : :
11267 : : /* Strip any conversions that don't change the mode. This is
11268 : : safe for every expression, except for a comparison expression
11269 : : because its signedness is derived from its operands. So, in
11270 : : the latter case, only strip conversions that don't change the
11271 : : signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
11272 : : preserved.
11273 : :
11274 : : Note that this is done as an internal manipulation within the
11275 : : constant folder, in order to find the simplest representation
11276 : : of the arguments so that their form can be studied. In any
11277 : : cases, the appropriate type conversions should be put back in
11278 : : the tree that will get out of the constant folder. */
11279 : :
11280 : 699450086 : if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
11281 : : {
11282 : 157854472 : STRIP_SIGN_NOPS (arg0);
11283 : 157854472 : STRIP_SIGN_NOPS (arg1);
11284 : : }
11285 : : else
11286 : : {
11287 : 541595614 : STRIP_NOPS (arg0);
11288 : 541595614 : STRIP_NOPS (arg1);
11289 : : }
11290 : :
11291 : : /* Note that TREE_CONSTANT isn't enough: static var addresses are
11292 : : constant but we can't do arithmetic on them. */
11293 : 699450086 : if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
11294 : : {
11295 : 177235460 : tem = const_binop (code, type, arg0, arg1);
11296 : 177235460 : if (tem != NULL_TREE)
11297 : : {
11298 : 174810762 : if (TREE_TYPE (tem) != type)
11299 : 1531238 : tem = fold_convert_loc (loc, type, tem);
11300 : 174810762 : return tem;
11301 : : }
11302 : : }
11303 : :
11304 : : /* If this is a commutative operation, and ARG0 is a constant, move it
11305 : : to ARG1 to reduce the number of tests below. */
11306 : 524639324 : if (commutative_tree_code (code)
11307 : 524639324 : && tree_swap_operands_p (arg0, arg1))
11308 : 26277872 : return fold_build2_loc (loc, code, type, op1, op0);
11309 : :
11310 : : /* Likewise if this is a comparison, and ARG0 is a constant, move it
11311 : : to ARG1 to reduce the number of tests below. */
11312 : 498361452 : if (kind == tcc_comparison
11313 : 498361452 : && tree_swap_operands_p (arg0, arg1))
11314 : 6902876 : return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
11315 : :
11316 : 491458576 : tem = generic_simplify (loc, code, type, op0, op1);
11317 : 491458576 : if (tem)
11318 : : return tem;
11319 : :
11320 : : /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
11321 : :
11322 : : First check for cases where an arithmetic operation is applied to a
11323 : : compound, conditional, or comparison operation. Push the arithmetic
11324 : : operation inside the compound or conditional to see if any folding
11325 : : can then be done. Convert comparison to conditional for this purpose.
11326 : : The also optimizes non-constant cases that used to be done in
11327 : : expand_expr.
11328 : :
11329 : : Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
11330 : : one of the operands is a comparison and the other is a comparison, a
11331 : : BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
11332 : : code below would make the expression more complex. Change it to a
11333 : : TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
11334 : : TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
11335 : :
11336 : 419980727 : if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
11337 : : || code == EQ_EXPR || code == NE_EXPR)
11338 : 50541698 : && !VECTOR_TYPE_P (TREE_TYPE (arg0))
11339 : 49981128 : && ((truth_value_p (TREE_CODE (arg0))
11340 : 1046014 : && (truth_value_p (TREE_CODE (arg1))
11341 : 774050 : || (TREE_CODE (arg1) == BIT_AND_EXPR
11342 : 46 : && integer_onep (TREE_OPERAND (arg1, 1)))))
11343 : 49709148 : || (truth_value_p (TREE_CODE (arg1))
11344 : 6364 : && (truth_value_p (TREE_CODE (arg0))
11345 : 6364 : || (TREE_CODE (arg0) == BIT_AND_EXPR
11346 : 171 : && integer_onep (TREE_OPERAND (arg0, 1)))))))
11347 : : {
11348 : 303034 : tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
11349 : 31040 : : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
11350 : : : TRUTH_XOR_EXPR,
11351 : : boolean_type_node,
11352 : : fold_convert_loc (loc, boolean_type_node, arg0),
11353 : : fold_convert_loc (loc, boolean_type_node, arg1));
11354 : :
11355 : 271994 : if (code == EQ_EXPR)
11356 : 25655 : tem = invert_truthvalue_loc (loc, tem);
11357 : :
11358 : 271994 : return fold_convert_loc (loc, type, tem);
11359 : : }
11360 : :
11361 : 419708733 : if (TREE_CODE_CLASS (code) == tcc_binary
11362 : 244482790 : || TREE_CODE_CLASS (code) == tcc_comparison)
11363 : : {
11364 : 258282947 : if (TREE_CODE (arg0) == COMPOUND_EXPR)
11365 : : {
11366 : 78473 : tem = fold_build2_loc (loc, code, type,
11367 : 78473 : fold_convert_loc (loc, TREE_TYPE (op0),
11368 : 78473 : TREE_OPERAND (arg0, 1)), op1);
11369 : 78473 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
11370 : 78473 : tem);
11371 : : }
11372 : 258204474 : if (TREE_CODE (arg1) == COMPOUND_EXPR)
11373 : : {
11374 : 3348 : tem = fold_build2_loc (loc, code, type, op0,
11375 : 3348 : fold_convert_loc (loc, TREE_TYPE (op1),
11376 : 3348 : TREE_OPERAND (arg1, 1)));
11377 : 3348 : return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
11378 : 3348 : tem);
11379 : : }
11380 : :
11381 : 258201126 : if (TREE_CODE (arg0) == COND_EXPR
11382 : 257822692 : || TREE_CODE (arg0) == VEC_COND_EXPR
11383 : 257822131 : || COMPARISON_CLASS_P (arg0))
11384 : : {
11385 : 668835 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11386 : : arg0, arg1,
11387 : : /*cond_first_p=*/1);
11388 : 668835 : if (tem != NULL_TREE)
11389 : : return tem;
11390 : : }
11391 : :
11392 : 257777896 : if (TREE_CODE (arg1) == COND_EXPR
11393 : 257557234 : || TREE_CODE (arg1) == VEC_COND_EXPR
11394 : 257556960 : || COMPARISON_CLASS_P (arg1))
11395 : : {
11396 : 230451 : tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
11397 : : arg1, arg0,
11398 : : /*cond_first_p=*/0);
11399 : 230451 : if (tem != NULL_TREE)
11400 : : return tem;
11401 : : }
11402 : : }
11403 : :
11404 : 419199559 : switch (code)
11405 : : {
11406 : 42844848 : case MEM_REF:
11407 : : /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
11408 : 42844848 : if (TREE_CODE (arg0) == ADDR_EXPR
11409 : 42844848 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
11410 : : {
11411 : 676469 : tree iref = TREE_OPERAND (arg0, 0);
11412 : 676469 : return fold_build2 (MEM_REF, type,
11413 : : TREE_OPERAND (iref, 0),
11414 : : int_const_binop (PLUS_EXPR, arg1,
11415 : : TREE_OPERAND (iref, 1)));
11416 : : }
11417 : :
11418 : : /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
11419 : 42168379 : if (TREE_CODE (arg0) == ADDR_EXPR
11420 : 42168379 : && handled_component_p (TREE_OPERAND (arg0, 0)))
11421 : : {
11422 : 1940470 : tree base;
11423 : 1940470 : poly_int64 coffset;
11424 : 1940470 : base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
11425 : : &coffset);
11426 : 1940470 : if (!base)
11427 : : return NULL_TREE;
11428 : 1937073 : return fold_build2 (MEM_REF, type,
11429 : : build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
11430 : : int_const_binop (PLUS_EXPR, arg1,
11431 : : size_int (coffset)));
11432 : : }
11433 : :
11434 : : return NULL_TREE;
11435 : :
11436 : 26990027 : case POINTER_PLUS_EXPR:
11437 : : /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
11438 : 53979634 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11439 : 53970310 : && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
11440 : 28438 : return fold_convert_loc (loc, type,
11441 : : fold_build2_loc (loc, PLUS_EXPR, sizetype,
11442 : : fold_convert_loc (loc, sizetype,
11443 : : arg1),
11444 : : fold_convert_loc (loc, sizetype,
11445 : 28438 : arg0)));
11446 : :
11447 : : return NULL_TREE;
11448 : :
11449 : 52230325 : case PLUS_EXPR:
11450 : 52230325 : if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
11451 : : {
11452 : : /* X + (X / CST) * -CST is X % CST. */
11453 : 41004592 : if (TREE_CODE (arg1) == MULT_EXPR
11454 : 2395993 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
11455 : 41011993 : && operand_equal_p (arg0,
11456 : 7401 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
11457 : : {
11458 : 172 : tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
11459 : 172 : tree cst1 = TREE_OPERAND (arg1, 1);
11460 : 172 : tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
11461 : : cst1, cst0);
11462 : 172 : if (sum && integer_zerop (sum))
11463 : 172 : return fold_convert_loc (loc, type,
11464 : : fold_build2_loc (loc, TRUNC_MOD_EXPR,
11465 : 172 : TREE_TYPE (arg0), arg0,
11466 : 172 : cst0));
11467 : : }
11468 : : }
11469 : :
11470 : : /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
11471 : : one. Make sure the type is not saturating and has the signedness of
11472 : : the stripped operands, as fold_plusminus_mult_expr will re-associate.
11473 : : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11474 : 52230153 : if ((TREE_CODE (arg0) == MULT_EXPR
11475 : 42245750 : || TREE_CODE (arg1) == MULT_EXPR)
11476 : 11435771 : && !TYPE_SATURATING (type)
11477 : 11435771 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11478 : 11090038 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11479 : 62623963 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
11480 : : {
11481 : 7281877 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11482 : 7281877 : if (tem)
11483 : : return tem;
11484 : : }
11485 : :
11486 : 51083211 : if (! FLOAT_TYPE_P (type))
11487 : : {
11488 : : /* Reassociate (plus (plus (mult) (foo)) (mult)) as
11489 : : (plus (plus (mult) (mult)) (foo)) so that we can
11490 : : take advantage of the factoring cases below. */
11491 : 271947 : if (ANY_INTEGRAL_TYPE_P (type)
11492 : 39859827 : && TYPE_OVERFLOW_WRAPS (type)
11493 : 39859827 : && (((TREE_CODE (arg0) == PLUS_EXPR
11494 : 24414855 : || TREE_CODE (arg0) == MINUS_EXPR)
11495 : 2588394 : && TREE_CODE (arg1) == MULT_EXPR)
11496 : 23910015 : || ((TREE_CODE (arg1) == PLUS_EXPR
11497 : 23910015 : || TREE_CODE (arg1) == MINUS_EXPR)
11498 : 384187 : && TREE_CODE (arg0) == MULT_EXPR)))
11499 : : {
11500 : 558379 : tree parg0, parg1, parg, marg;
11501 : 558379 : enum tree_code pcode;
11502 : :
11503 : 558379 : if (TREE_CODE (arg1) == MULT_EXPR)
11504 : : parg = arg0, marg = arg1;
11505 : : else
11506 : 53539 : parg = arg1, marg = arg0;
11507 : 558379 : pcode = TREE_CODE (parg);
11508 : 558379 : parg0 = TREE_OPERAND (parg, 0);
11509 : 558379 : parg1 = TREE_OPERAND (parg, 1);
11510 : 558379 : STRIP_NOPS (parg0);
11511 : 558379 : STRIP_NOPS (parg1);
11512 : :
11513 : 558379 : if (TREE_CODE (parg0) == MULT_EXPR
11514 : 289610 : && TREE_CODE (parg1) != MULT_EXPR)
11515 : 262121 : return fold_build2_loc (loc, pcode, type,
11516 : : fold_build2_loc (loc, PLUS_EXPR, type,
11517 : : fold_convert_loc (loc, type,
11518 : : parg0),
11519 : : fold_convert_loc (loc, type,
11520 : : marg)),
11521 : 262121 : fold_convert_loc (loc, type, parg1));
11522 : 296258 : if (TREE_CODE (parg0) != MULT_EXPR
11523 : 268769 : && TREE_CODE (parg1) == MULT_EXPR)
11524 : 104214 : return
11525 : 104214 : fold_build2_loc (loc, PLUS_EXPR, type,
11526 : : fold_convert_loc (loc, type, parg0),
11527 : : fold_build2_loc (loc, pcode, type,
11528 : : fold_convert_loc (loc, type, marg),
11529 : : fold_convert_loc (loc, type,
11530 : 104214 : parg1)));
11531 : : }
11532 : : }
11533 : : else
11534 : : {
11535 : : /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11536 : : to __complex__ ( x, y ). This is not the same for SNaNs or
11537 : : if signed zeros are involved. */
11538 : 11223384 : if (!HONOR_SNANS (arg0)
11539 : 11222220 : && !HONOR_SIGNED_ZEROS (arg0)
11540 : 11242126 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11541 : : {
11542 : 3039 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11543 : 3039 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11544 : 3039 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11545 : 3039 : bool arg0rz = false, arg0iz = false;
11546 : 3122 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11547 : 3122 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11548 : : {
11549 : 83 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11550 : 83 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11551 : 83 : if (arg0rz && arg1i && real_zerop (arg1i))
11552 : : {
11553 : 22 : tree rp = arg1r ? arg1r
11554 : 0 : : build1 (REALPART_EXPR, rtype, arg1);
11555 : 22 : tree ip = arg0i ? arg0i
11556 : 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11557 : 22 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11558 : : }
11559 : 61 : else if (arg0iz && arg1r && real_zerop (arg1r))
11560 : : {
11561 : 53 : tree rp = arg0r ? arg0r
11562 : 0 : : build1 (REALPART_EXPR, rtype, arg0);
11563 : 53 : tree ip = arg1i ? arg1i
11564 : 0 : : build1 (IMAGPART_EXPR, rtype, arg1);
11565 : 53 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11566 : : }
11567 : : }
11568 : : }
11569 : :
11570 : : /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11571 : : We associate floats only if the user has specified
11572 : : -fassociative-math. */
11573 : 11223309 : if (flag_associative_math
11574 : 18642 : && TREE_CODE (arg1) == PLUS_EXPR
11575 : 35 : && TREE_CODE (arg0) != MULT_EXPR)
11576 : : {
11577 : 21 : tree tree10 = TREE_OPERAND (arg1, 0);
11578 : 21 : tree tree11 = TREE_OPERAND (arg1, 1);
11579 : 21 : if (TREE_CODE (tree11) == MULT_EXPR
11580 : 5 : && TREE_CODE (tree10) == MULT_EXPR)
11581 : : {
11582 : 1 : tree tree0;
11583 : 1 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11584 : 1 : return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11585 : : }
11586 : : }
11587 : : /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11588 : : We associate floats only if the user has specified
11589 : : -fassociative-math. */
11590 : 11223308 : if (flag_associative_math
11591 : 18641 : && TREE_CODE (arg0) == PLUS_EXPR
11592 : 1179 : && TREE_CODE (arg1) != MULT_EXPR)
11593 : : {
11594 : 800 : tree tree00 = TREE_OPERAND (arg0, 0);
11595 : 800 : tree tree01 = TREE_OPERAND (arg0, 1);
11596 : 800 : if (TREE_CODE (tree01) == MULT_EXPR
11597 : 51 : && TREE_CODE (tree00) == MULT_EXPR)
11598 : : {
11599 : 11 : tree tree0;
11600 : 11 : tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11601 : 11 : return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11602 : : }
11603 : : }
11604 : : }
11605 : :
11606 : 11222508 : bit_rotate:
11607 : : /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11608 : : is a rotate of A by C1 bits. */
11609 : : /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11610 : : is a rotate of A by B bits.
11611 : : Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11612 : : though in this case CODE must be | and not + or ^, otherwise
11613 : : it doesn't return A when B is 0. */
11614 : 52976890 : {
11615 : 52976890 : enum tree_code code0, code1;
11616 : 52976890 : tree rtype;
11617 : 52976890 : code0 = TREE_CODE (arg0);
11618 : 52976890 : code1 = TREE_CODE (arg1);
11619 : 51315 : if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11620 : 52959967 : || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11621 : 42622 : && operand_equal_p (TREE_OPERAND (arg0, 0),
11622 : 42622 : TREE_OPERAND (arg1, 0), 0)
11623 : 40016 : && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11624 : 40016 : TYPE_UNSIGNED (rtype))
11625 : : /* Only create rotates in complete modes. Other cases are not
11626 : : expanded properly. */
11627 : 53004134 : && (element_precision (rtype)
11628 : 54488 : == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11629 : : {
11630 : 27194 : tree tree01, tree11;
11631 : 27194 : tree orig_tree01, orig_tree11;
11632 : 27194 : enum tree_code code01, code11;
11633 : :
11634 : 27194 : tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11635 : 27194 : tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11636 : 27194 : STRIP_NOPS (tree01);
11637 : 27194 : STRIP_NOPS (tree11);
11638 : 27194 : code01 = TREE_CODE (tree01);
11639 : 27194 : code11 = TREE_CODE (tree11);
11640 : 27194 : if (code11 != MINUS_EXPR
11641 : 26418 : && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11642 : : {
11643 : 1876 : std::swap (code0, code1);
11644 : 1876 : std::swap (code01, code11);
11645 : 1876 : std::swap (tree01, tree11);
11646 : 1876 : std::swap (orig_tree01, orig_tree11);
11647 : : }
11648 : 54388 : if (code01 == INTEGER_CST
11649 : 3747 : && code11 == INTEGER_CST
11650 : 34686 : && (wi::to_widest (tree01) + wi::to_widest (tree11)
11651 : 34686 : == element_precision (rtype)))
11652 : : {
11653 : 7162 : tem = build2_loc (loc, LROTATE_EXPR,
11654 : 3581 : rtype, TREE_OPERAND (arg0, 0),
11655 : : code0 == LSHIFT_EXPR
11656 : : ? orig_tree01 : orig_tree11);
11657 : 3581 : return fold_convert_loc (loc, type, tem);
11658 : : }
11659 : 23613 : else if (code11 == MINUS_EXPR)
11660 : : {
11661 : 1103 : tree tree110, tree111;
11662 : 1103 : tree110 = TREE_OPERAND (tree11, 0);
11663 : 1103 : tree111 = TREE_OPERAND (tree11, 1);
11664 : 1103 : STRIP_NOPS (tree110);
11665 : 1103 : STRIP_NOPS (tree111);
11666 : 1103 : if (TREE_CODE (tree110) == INTEGER_CST
11667 : 1092 : && compare_tree_int (tree110,
11668 : 1092 : element_precision (rtype)) == 0
11669 : 2179 : && operand_equal_p (tree01, tree111, 0))
11670 : : {
11671 : 957 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11672 : : ? LROTATE_EXPR : RROTATE_EXPR),
11673 : 684 : rtype, TREE_OPERAND (arg0, 0),
11674 : : orig_tree01);
11675 : 684 : return fold_convert_loc (loc, type, tem);
11676 : : }
11677 : : }
11678 : 22510 : else if (code == BIT_IOR_EXPR
11679 : 21076 : && code11 == BIT_AND_EXPR
11680 : 43508 : && pow2p_hwi (element_precision (rtype)))
11681 : : {
11682 : 20998 : tree tree110, tree111;
11683 : 20998 : tree110 = TREE_OPERAND (tree11, 0);
11684 : 20998 : tree111 = TREE_OPERAND (tree11, 1);
11685 : 20998 : STRIP_NOPS (tree110);
11686 : 20998 : STRIP_NOPS (tree111);
11687 : 20998 : if (TREE_CODE (tree110) == NEGATE_EXPR
11688 : 20413 : && TREE_CODE (tree111) == INTEGER_CST
11689 : 20413 : && compare_tree_int (tree111,
11690 : 20413 : element_precision (rtype) - 1) == 0
11691 : 41397 : && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11692 : : {
11693 : 30433 : tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11694 : : ? LROTATE_EXPR : RROTATE_EXPR),
11695 : 20317 : rtype, TREE_OPERAND (arg0, 0),
11696 : : orig_tree01);
11697 : 20317 : return fold_convert_loc (loc, type, tem);
11698 : : }
11699 : : }
11700 : : }
11701 : : }
11702 : :
11703 : 131759238 : associate:
11704 : : /* In most languages, can't associate operations on floats through
11705 : : parentheses. Rather than remember where the parentheses were, we
11706 : : don't associate floats at all, unless the user has specified
11707 : : -fassociative-math.
11708 : : And, we need to make sure type is not saturating. */
11709 : :
11710 : 131759238 : if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11711 : 92163350 : && !TYPE_SATURATING (type)
11712 : 223922588 : && !TYPE_OVERFLOW_SANITIZED (type))
11713 : : {
11714 : 92134912 : tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11715 : 92134912 : tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11716 : 92134912 : tree atype = type;
11717 : 92134912 : bool ok = true;
11718 : :
11719 : : /* Split both trees into variables, constants, and literals. Then
11720 : : associate each group together, the constants with literals,
11721 : : then the result with variables. This increases the chances of
11722 : : literals being recombined later and of generating relocatable
11723 : : expressions for the sum of a constant and literal. */
11724 : 92134912 : var0 = split_tree (arg0, type, code,
11725 : : &minus_var0, &con0, &minus_con0,
11726 : : &lit0, &minus_lit0, 0);
11727 : 92134912 : var1 = split_tree (arg1, type, code,
11728 : : &minus_var1, &con1, &minus_con1,
11729 : : &lit1, &minus_lit1, code == MINUS_EXPR);
11730 : :
11731 : : /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11732 : 92134912 : if (code == MINUS_EXPR)
11733 : 9775471 : code = PLUS_EXPR;
11734 : :
11735 : : /* With undefined overflow prefer doing association in a type
11736 : : which wraps on overflow, if that is one of the operand types. */
11737 : 92134749 : if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11738 : 183080515 : && !TYPE_OVERFLOW_WRAPS (type))
11739 : : {
11740 : 54137098 : if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11741 : 53532336 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11742 : 664214 : atype = TREE_TYPE (arg0);
11743 : 52860319 : else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11744 : 52711306 : && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11745 : 157636 : atype = TREE_TYPE (arg1);
11746 : 27326541 : gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11747 : : }
11748 : :
11749 : : /* With undefined overflow we can only associate constants with one
11750 : : variable, and constants whose association doesn't overflow. */
11751 : 92134749 : if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11752 : 183080515 : && !TYPE_OVERFLOW_WRAPS (atype))
11753 : : {
11754 : 26504691 : if ((var0 && var1) || (minus_var0 && minus_var1))
11755 : : {
11756 : : /* ??? If split_tree would handle NEGATE_EXPR we could
11757 : : simply reject these cases and the allowed cases would
11758 : : be the var0/minus_var1 ones. */
11759 : 1279 : tree tmp0 = var0 ? var0 : minus_var0;
11760 : 4798405 : tree tmp1 = var1 ? var1 : minus_var1;
11761 : 4798405 : bool one_neg = false;
11762 : :
11763 : 4798405 : if (TREE_CODE (tmp0) == NEGATE_EXPR)
11764 : : {
11765 : 1628 : tmp0 = TREE_OPERAND (tmp0, 0);
11766 : 1628 : one_neg = !one_neg;
11767 : : }
11768 : 4293144 : if (CONVERT_EXPR_P (tmp0)
11769 : 521439 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11770 : 5319390 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11771 : 520985 : <= TYPE_PRECISION (atype)))
11772 : 506932 : tmp0 = TREE_OPERAND (tmp0, 0);
11773 : 4798405 : if (TREE_CODE (tmp1) == NEGATE_EXPR)
11774 : : {
11775 : 164 : tmp1 = TREE_OPERAND (tmp1, 0);
11776 : 164 : one_neg = !one_neg;
11777 : : }
11778 : 4496338 : if (CONVERT_EXPR_P (tmp1)
11779 : 323429 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11780 : 5121712 : && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11781 : 323307 : <= TYPE_PRECISION (atype)))
11782 : 311230 : tmp1 = TREE_OPERAND (tmp1, 0);
11783 : : /* The only case we can still associate with two variables
11784 : : is if they cancel out. */
11785 : 4798405 : if (!one_neg
11786 : 4798405 : || !operand_equal_p (tmp0, tmp1, 0))
11787 : : ok = false;
11788 : : }
11789 : 21413903 : else if ((var0 && minus_var1
11790 : 3491011 : && ! operand_equal_p (var0, minus_var1, 0))
11791 : 39629179 : || (minus_var0 && var1
11792 : 4957 : && ! operand_equal_p (minus_var0, var1, 0)))
11793 : : ok = false;
11794 : : }
11795 : :
11796 : : /* Only do something if we found more than two objects. Otherwise,
11797 : : nothing has changed and we risk infinite recursion. */
11798 : : if (ok
11799 : 83840607 : && ((var0 != 0) + (var1 != 0)
11800 : 83840607 : + (minus_var0 != 0) + (minus_var1 != 0)
11801 : 83840607 : + (con0 != 0) + (con1 != 0)
11802 : 83840607 : + (minus_con0 != 0) + (minus_con1 != 0)
11803 : 83840607 : + (lit0 != 0) + (lit1 != 0)
11804 : 83840607 : + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11805 : : {
11806 : 1550268 : int var0_origin = (var0 != 0) + 2 * (var1 != 0);
11807 : 3100536 : int minus_var0_origin
11808 : 1550268 : = (minus_var0 != 0) + 2 * (minus_var1 != 0);
11809 : 1550268 : int con0_origin = (con0 != 0) + 2 * (con1 != 0);
11810 : 3100536 : int minus_con0_origin
11811 : 1550268 : = (minus_con0 != 0) + 2 * (minus_con1 != 0);
11812 : 1550268 : int lit0_origin = (lit0 != 0) + 2 * (lit1 != 0);
11813 : 3100536 : int minus_lit0_origin
11814 : 1550268 : = (minus_lit0 != 0) + 2 * (minus_lit1 != 0);
11815 : 1550268 : var0 = associate_trees (loc, var0, var1, code, atype);
11816 : 1550268 : minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11817 : : code, atype);
11818 : 1550268 : con0 = associate_trees (loc, con0, con1, code, atype);
11819 : 1550268 : minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11820 : : code, atype);
11821 : 1550268 : lit0 = associate_trees (loc, lit0, lit1, code, atype);
11822 : 1550268 : minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11823 : : code, atype);
11824 : :
11825 : 1550268 : if (minus_var0 && var0)
11826 : : {
11827 : 1011616 : var0_origin |= minus_var0_origin;
11828 : 1011616 : var0 = associate_trees (loc, var0, minus_var0,
11829 : : MINUS_EXPR, atype);
11830 : 1011616 : minus_var0 = 0;
11831 : 1011616 : minus_var0_origin = 0;
11832 : : }
11833 : 1550268 : if (minus_con0 && con0)
11834 : : {
11835 : 6238 : con0_origin |= minus_con0_origin;
11836 : 6238 : con0 = associate_trees (loc, con0, minus_con0,
11837 : : MINUS_EXPR, atype);
11838 : 6238 : minus_con0 = 0;
11839 : 6238 : minus_con0_origin = 0;
11840 : : }
11841 : :
11842 : : /* Preserve the MINUS_EXPR if the negative part of the literal is
11843 : : greater than the positive part. Otherwise, the multiplicative
11844 : : folding code (i.e extract_muldiv) may be fooled in case
11845 : : unsigned constants are subtracted, like in the following
11846 : : example: ((X*2 + 4) - 8U)/2. */
11847 : 1550268 : if (minus_lit0 && lit0)
11848 : : {
11849 : 120750 : if (TREE_CODE (lit0) == INTEGER_CST
11850 : 120750 : && TREE_CODE (minus_lit0) == INTEGER_CST
11851 : 120750 : && tree_int_cst_lt (lit0, minus_lit0)
11852 : : /* But avoid ending up with only negated parts. */
11853 : 153899 : && (var0 || con0))
11854 : : {
11855 : 29218 : minus_lit0_origin |= lit0_origin;
11856 : 29218 : minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11857 : : MINUS_EXPR, atype);
11858 : 29218 : lit0 = 0;
11859 : 29218 : lit0_origin = 0;
11860 : : }
11861 : : else
11862 : : {
11863 : 91532 : lit0_origin |= minus_lit0_origin;
11864 : 91532 : lit0 = associate_trees (loc, lit0, minus_lit0,
11865 : : MINUS_EXPR, atype);
11866 : 91532 : minus_lit0 = 0;
11867 : 91532 : minus_lit0_origin = 0;
11868 : : }
11869 : : }
11870 : :
11871 : : /* Don't introduce overflows through reassociation. */
11872 : 1054925 : if ((lit0 && TREE_OVERFLOW_P (lit0))
11873 : 2605153 : || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11874 : 1550268 : return NULL_TREE;
11875 : :
11876 : : /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11877 : 1550228 : con0_origin |= lit0_origin;
11878 : 1550228 : con0 = associate_trees (loc, con0, lit0, code, atype);
11879 : 1550228 : minus_con0_origin |= minus_lit0_origin;
11880 : 1550228 : minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11881 : : code, atype);
11882 : :
11883 : : /* Eliminate minus_con0. */
11884 : 1550228 : if (minus_con0)
11885 : : {
11886 : 496024 : if (con0)
11887 : : {
11888 : 7991 : con0_origin |= minus_con0_origin;
11889 : 7991 : con0 = associate_trees (loc, con0, minus_con0,
11890 : : MINUS_EXPR, atype);
11891 : : }
11892 : 488033 : else if (var0)
11893 : : {
11894 : 488033 : var0_origin |= minus_con0_origin;
11895 : 488033 : var0 = associate_trees (loc, var0, minus_con0,
11896 : : MINUS_EXPR, atype);
11897 : : }
11898 : : else
11899 : 0 : gcc_unreachable ();
11900 : : }
11901 : :
11902 : : /* Eliminate minus_var0. */
11903 : 1550228 : if (minus_var0)
11904 : : {
11905 : 254151 : if (con0)
11906 : : {
11907 : 254151 : con0_origin |= minus_var0_origin;
11908 : 254151 : con0 = associate_trees (loc, con0, minus_var0,
11909 : : MINUS_EXPR, atype);
11910 : : }
11911 : : else
11912 : 0 : gcc_unreachable ();
11913 : : }
11914 : :
11915 : : /* Reassociate only if there has been any actual association
11916 : : between subtrees from op0 and subtrees from op1 in at
11917 : : least one of the operands, otherwise we risk infinite
11918 : : recursion. See PR114084. */
11919 : 1550228 : if (var0_origin != 3 && con0_origin != 3)
11920 : : return NULL_TREE;
11921 : :
11922 : 1549014 : return
11923 : 1549014 : fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11924 : 1549014 : code, atype));
11925 : : }
11926 : : }
11927 : :
11928 : : return NULL_TREE;
11929 : :
11930 : 19779497 : case POINTER_DIFF_EXPR:
11931 : 19779497 : case MINUS_EXPR:
11932 : : /* Fold &a[i] - &a[j] to i-j. */
11933 : 19779497 : if (TREE_CODE (arg0) == ADDR_EXPR
11934 : 31964 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11935 : 5970 : && TREE_CODE (arg1) == ADDR_EXPR
11936 : 19780056 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11937 : : {
11938 : 31 : tree tem = fold_addr_of_array_ref_difference (loc, type,
11939 : 31 : TREE_OPERAND (arg0, 0),
11940 : 31 : TREE_OPERAND (arg1, 0),
11941 : : code
11942 : : == POINTER_DIFF_EXPR);
11943 : 31 : if (tem)
11944 : : return tem;
11945 : : }
11946 : :
11947 : : /* Further transformations are not for pointers. */
11948 : 19779481 : if (code == POINTER_DIFF_EXPR)
11949 : : return NULL_TREE;
11950 : :
11951 : : /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11952 : 17431013 : if (TREE_CODE (arg0) == NEGATE_EXPR
11953 : 127193 : && negate_expr_p (op1)
11954 : : /* If arg0 is e.g. unsigned int and type is int, then this could
11955 : : introduce UB, because if A is INT_MIN at runtime, the original
11956 : : expression can be well defined while the latter is not.
11957 : : See PR83269. */
11958 : 17431482 : && !(ANY_INTEGRAL_TYPE_P (type)
11959 : 469 : && TYPE_OVERFLOW_UNDEFINED (type)
11960 : 457 : && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11961 : 457 : && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11962 : 462 : return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11963 : : fold_convert_loc (loc, type,
11964 : 924 : TREE_OPERAND (arg0, 0)));
11965 : :
11966 : : /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11967 : : __complex__ ( x, -y ). This is not the same for SNaNs or if
11968 : : signed zeros are involved. */
11969 : 17430551 : if (!HONOR_SNANS (arg0)
11970 : 17429687 : && !HONOR_SIGNED_ZEROS (arg0)
11971 : 27830682 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11972 : : {
11973 : 53 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11974 : 53 : tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11975 : 53 : tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11976 : 53 : bool arg0rz = false, arg0iz = false;
11977 : 69 : if ((arg0r && (arg0rz = real_zerop (arg0r)))
11978 : 69 : || (arg0i && (arg0iz = real_zerop (arg0i))))
11979 : : {
11980 : 25 : tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11981 : 25 : tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11982 : 25 : if (arg0rz && arg1i && real_zerop (arg1i))
11983 : : {
11984 : 9 : tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11985 : : arg1r ? arg1r
11986 : 0 : : build1 (REALPART_EXPR, rtype, arg1));
11987 : 9 : tree ip = arg0i ? arg0i
11988 : 0 : : build1 (IMAGPART_EXPR, rtype, arg0);
11989 : 9 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11990 : : }
11991 : 16 : else if (arg0iz && arg1r && real_zerop (arg1r))
11992 : : {
11993 : 15 : tree rp = arg0r ? arg0r
11994 : 0 : : build1 (REALPART_EXPR, rtype, arg0);
11995 : 15 : tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11996 : : arg1i ? arg1i
11997 : 0 : : build1 (IMAGPART_EXPR, rtype, arg1));
11998 : 15 : return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11999 : : }
12000 : : }
12001 : : }
12002 : :
12003 : : /* A - B -> A + (-B) if B is easily negatable. */
12004 : 17430527 : if (negate_expr_p (op1)
12005 : 577356 : && ! TYPE_OVERFLOW_SANITIZED (type)
12006 : 18005424 : && ((FLOAT_TYPE_P (type)
12007 : : /* Avoid this transformation if B is a positive REAL_CST. */
12008 : 65 : && (TREE_CODE (op1) != REAL_CST
12009 : 0 : || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
12010 : 574832 : || INTEGRAL_TYPE_P (type)))
12011 : 574695 : return fold_build2_loc (loc, PLUS_EXPR, type,
12012 : : fold_convert_loc (loc, type, arg0),
12013 : 574695 : negate_expr (op1));
12014 : :
12015 : : /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
12016 : : one. Make sure the type is not saturating and has the signedness of
12017 : : the stripped operands, as fold_plusminus_mult_expr will re-associate.
12018 : : ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
12019 : 16855832 : if ((TREE_CODE (arg0) == MULT_EXPR
12020 : 15658874 : || TREE_CODE (arg1) == MULT_EXPR)
12021 : 2536787 : && !TYPE_SATURATING (type)
12022 : 2536787 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
12023 : 2438507 : && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
12024 : 19248777 : && (!FLOAT_TYPE_P (type) || flag_associative_math))
12025 : : {
12026 : 331999 : tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
12027 : 331999 : if (tem)
12028 : : return tem;
12029 : : }
12030 : :
12031 : 16813556 : goto associate;
12032 : :
12033 : 56629432 : case MULT_EXPR:
12034 : 56629432 : if (! FLOAT_TYPE_P (type))
12035 : : {
12036 : : /* Transform x * -C into -x * C if x is easily negatable. */
12037 : 35238876 : if (TREE_CODE (op1) == INTEGER_CST
12038 : 32240025 : && tree_int_cst_sgn (op1) == -1
12039 : 229253 : && negate_expr_p (op0)
12040 : 279 : && negate_expr_p (op1)
12041 : 260 : && (tem = negate_expr (op1)) != op1
12042 : 35239136 : && ! TREE_OVERFLOW (tem))
12043 : 260 : return fold_build2_loc (loc, MULT_EXPR, type,
12044 : : fold_convert_loc (loc, type,
12045 : 260 : negate_expr (op0)), tem);
12046 : :
12047 : 35238616 : strict_overflow_p = false;
12048 : 35238616 : if (TREE_CODE (arg1) == INTEGER_CST
12049 : 35238616 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12050 : : &strict_overflow_p)) != 0)
12051 : : {
12052 : 228723 : if (strict_overflow_p)
12053 : 30861 : fold_overflow_warning (("assuming signed overflow does not "
12054 : : "occur when simplifying "
12055 : : "multiplication"),
12056 : : WARN_STRICT_OVERFLOW_MISC);
12057 : 228723 : return fold_convert_loc (loc, type, tem);
12058 : : }
12059 : :
12060 : : /* Optimize z * conj(z) for integer complex numbers. */
12061 : 35009893 : if (TREE_CODE (arg0) == CONJ_EXPR
12062 : 35009893 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12063 : 1 : return fold_mult_zconjz (loc, type, arg1);
12064 : 35009892 : if (TREE_CODE (arg1) == CONJ_EXPR
12065 : 35009892 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12066 : 0 : return fold_mult_zconjz (loc, type, arg0);
12067 : : }
12068 : : else
12069 : : {
12070 : : /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
12071 : : This is not the same for NaNs or if signed zeros are
12072 : : involved. */
12073 : 21390556 : if (!HONOR_NANS (arg0)
12074 : 32717 : && !HONOR_SIGNED_ZEROS (arg0)
12075 : 32429 : && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12076 : 3580 : && TREE_CODE (arg1) == COMPLEX_CST
12077 : 21390724 : && real_zerop (TREE_REALPART (arg1)))
12078 : : {
12079 : 161 : tree rtype = TREE_TYPE (TREE_TYPE (arg0));
12080 : 161 : if (real_onep (TREE_IMAGPART (arg1)))
12081 : 151 : return
12082 : 151 : fold_build2_loc (loc, COMPLEX_EXPR, type,
12083 : : negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
12084 : : rtype, arg0)),
12085 : 151 : fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
12086 : 10 : else if (real_minus_onep (TREE_IMAGPART (arg1)))
12087 : 10 : return
12088 : 10 : fold_build2_loc (loc, COMPLEX_EXPR, type,
12089 : : fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
12090 : : negate_expr (fold_build1_loc (loc, REALPART_EXPR,
12091 : 10 : rtype, arg0)));
12092 : : }
12093 : :
12094 : : /* Optimize z * conj(z) for floating point complex numbers.
12095 : : Guarded by flag_unsafe_math_optimizations as non-finite
12096 : : imaginary components don't produce scalar results. */
12097 : 21390395 : if (flag_unsafe_math_optimizations
12098 : 32312 : && TREE_CODE (arg0) == CONJ_EXPR
12099 : 21390397 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12100 : 1 : return fold_mult_zconjz (loc, type, arg1);
12101 : 21390394 : if (flag_unsafe_math_optimizations
12102 : 32311 : && TREE_CODE (arg1) == CONJ_EXPR
12103 : 21390398 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12104 : 0 : return fold_mult_zconjz (loc, type, arg0);
12105 : : }
12106 : 56400286 : goto associate;
12107 : :
12108 : 1622835 : case BIT_IOR_EXPR:
12109 : : /* Canonicalize (X & C1) | C2. */
12110 : 1622835 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12111 : 108387 : && TREE_CODE (arg1) == INTEGER_CST
12112 : 1689749 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12113 : : {
12114 : 66906 : int width = TYPE_PRECISION (type), w;
12115 : 66906 : wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
12116 : 66906 : wide_int c2 = wi::to_wide (arg1);
12117 : :
12118 : : /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
12119 : 66906 : if ((c1 & c2) == c1)
12120 : 0 : return omit_one_operand_loc (loc, type, arg1,
12121 : 0 : TREE_OPERAND (arg0, 0));
12122 : :
12123 : 66906 : wide_int msk = wi::mask (width, false,
12124 : 66906 : TYPE_PRECISION (TREE_TYPE (arg1)));
12125 : :
12126 : : /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
12127 : 66906 : if (wi::bit_and_not (msk, c1 | c2) == 0)
12128 : : {
12129 : 8 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12130 : 8 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12131 : : }
12132 : :
12133 : : /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
12134 : : unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
12135 : : mode which allows further optimizations. */
12136 : 66898 : c1 &= msk;
12137 : 66898 : c2 &= msk;
12138 : 66898 : wide_int c3 = wi::bit_and_not (c1, c2);
12139 : 207310 : for (w = BITS_PER_UNIT; w <= width; w <<= 1)
12140 : : {
12141 : 140654 : wide_int mask = wi::mask (w, false,
12142 : 140654 : TYPE_PRECISION (type));
12143 : 281308 : if (((c1 | c2) & mask) == mask
12144 : 281308 : && wi::bit_and_not (c1, mask) == 0)
12145 : : {
12146 : 242 : c3 = mask;
12147 : 242 : break;
12148 : : }
12149 : 140654 : }
12150 : :
12151 : 66898 : if (c3 != c1)
12152 : : {
12153 : 563 : tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12154 : 563 : tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
12155 : : wide_int_to_tree (type, c3));
12156 : 563 : return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
12157 : : }
12158 : 68040 : }
12159 : :
12160 : : /* See if this can be simplified into a rotate first. If that
12161 : : is unsuccessful continue in the association code. */
12162 : 1622264 : goto bit_rotate;
12163 : :
12164 : 637837 : case BIT_XOR_EXPR:
12165 : : /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
12166 : 637837 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12167 : 1911 : && INTEGRAL_TYPE_P (type)
12168 : 1879 : && integer_onep (TREE_OPERAND (arg0, 1))
12169 : 637838 : && integer_onep (arg1))
12170 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, arg0,
12171 : 0 : build_zero_cst (TREE_TYPE (arg0)));
12172 : :
12173 : : /* See if this can be simplified into a rotate first. If that
12174 : : is unsuccessful continue in the association code. */
12175 : 637837 : goto bit_rotate;
12176 : :
12177 : 5216215 : case BIT_AND_EXPR:
12178 : : /* Fold !X & 1 as X == 0. */
12179 : 5216215 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12180 : 5216215 : && integer_onep (arg1))
12181 : : {
12182 : 0 : tem = TREE_OPERAND (arg0, 0);
12183 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, tem,
12184 : 0 : build_zero_cst (TREE_TYPE (tem)));
12185 : : }
12186 : :
12187 : : /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
12188 : : multiple of 1 << CST. */
12189 : 5216215 : if (TREE_CODE (arg1) == INTEGER_CST)
12190 : : {
12191 : 3704593 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12192 : 3704593 : wide_int ncst1 = -cst1;
12193 : 3704593 : if ((cst1 & ncst1) == ncst1
12194 : 3843925 : && multiple_of_p (type, arg0,
12195 : 3843925 : wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
12196 : 473 : return fold_convert_loc (loc, type, arg0);
12197 : 3704593 : }
12198 : :
12199 : : /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
12200 : : bits from CST2. */
12201 : 5215742 : if (TREE_CODE (arg1) == INTEGER_CST
12202 : 3704120 : && TREE_CODE (arg0) == MULT_EXPR
12203 : 5318982 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12204 : : {
12205 : 103218 : wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
12206 : 103218 : wide_int masked
12207 : 103218 : = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
12208 : :
12209 : 103218 : if (masked == 0)
12210 : 6213 : return omit_two_operands_loc (loc, type, build_zero_cst (type),
12211 : 6213 : arg0, arg1);
12212 : 97005 : else if (masked != warg1)
12213 : : {
12214 : : /* Avoid the transform if arg1 is a mask of some
12215 : : mode which allows further optimizations. */
12216 : 537 : int pop = wi::popcount (warg1);
12217 : 559 : if (!(pop >= BITS_PER_UNIT
12218 : 46 : && pow2p_hwi (pop)
12219 : 581 : && wi::mask (pop, false, warg1.get_precision ()) == warg1))
12220 : 515 : return fold_build2_loc (loc, code, type, op0,
12221 : : wide_int_to_tree (type, masked));
12222 : : }
12223 : 103218 : }
12224 : :
12225 : : /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
12226 : 3697392 : if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12227 : 5475725 : && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12228 : : {
12229 : 133459 : prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12230 : :
12231 : 133459 : wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
12232 : 133459 : if (mask == -1)
12233 : 160 : return
12234 : 160 : fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12235 : 133459 : }
12236 : :
12237 : 5208854 : goto associate;
12238 : :
12239 : 5937866 : case RDIV_EXPR:
12240 : : /* Don't touch a floating-point divide by zero unless the mode
12241 : : of the constant can represent infinity. */
12242 : 5937866 : if (TREE_CODE (arg1) == REAL_CST
12243 : 2830714 : && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12244 : 5937866 : && real_zerop (arg1))
12245 : 0 : return NULL_TREE;
12246 : :
12247 : : /* (-A) / (-B) -> A / B */
12248 : 5937866 : if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12249 : 6 : return fold_build2_loc (loc, RDIV_EXPR, type,
12250 : 3 : TREE_OPERAND (arg0, 0),
12251 : 3 : negate_expr (arg1));
12252 : 5937863 : if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12253 : 0 : return fold_build2_loc (loc, RDIV_EXPR, type,
12254 : : negate_expr (arg0),
12255 : 0 : TREE_OPERAND (arg1, 0));
12256 : : return NULL_TREE;
12257 : :
12258 : 1826746 : case TRUNC_DIV_EXPR:
12259 : : /* Fall through */
12260 : :
12261 : 1826746 : case FLOOR_DIV_EXPR:
12262 : : /* Simplify A / (B << N) where A and B are positive and B is
12263 : : a power of 2, to A >> (N + log2(B)). */
12264 : 1826746 : strict_overflow_p = false;
12265 : 1826746 : if (TREE_CODE (arg1) == LSHIFT_EXPR
12266 : 1826746 : && (TYPE_UNSIGNED (type)
12267 : 8 : || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12268 : : {
12269 : 17 : tree sval = TREE_OPERAND (arg1, 0);
12270 : 17 : if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12271 : : {
12272 : 16 : tree sh_cnt = TREE_OPERAND (arg1, 1);
12273 : 16 : tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
12274 : 16 : wi::exact_log2 (wi::to_wide (sval)));
12275 : :
12276 : 16 : if (strict_overflow_p)
12277 : 0 : fold_overflow_warning (("assuming signed overflow does not "
12278 : : "occur when simplifying A / (B << N)"),
12279 : : WARN_STRICT_OVERFLOW_MISC);
12280 : :
12281 : 16 : sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12282 : : sh_cnt, pow2);
12283 : 16 : return fold_build2_loc (loc, RSHIFT_EXPR, type,
12284 : 16 : fold_convert_loc (loc, type, arg0), sh_cnt);
12285 : : }
12286 : : }
12287 : :
12288 : : /* Fall through */
12289 : :
12290 : 2917509 : case ROUND_DIV_EXPR:
12291 : 2917509 : case CEIL_DIV_EXPR:
12292 : 2917509 : case EXACT_DIV_EXPR:
12293 : 2917509 : if (integer_zerop (arg1))
12294 : : return NULL_TREE;
12295 : :
12296 : : /* Convert -A / -B to A / B when the type is signed and overflow is
12297 : : undefined. */
12298 : 2914692 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12299 : 826173 : && TREE_CODE (op0) == NEGATE_EXPR
12300 : 2914760 : && negate_expr_p (op1))
12301 : : {
12302 : 36 : if (ANY_INTEGRAL_TYPE_P (type))
12303 : 36 : fold_overflow_warning (("assuming signed overflow does not occur "
12304 : : "when distributing negation across "
12305 : : "division"),
12306 : : WARN_STRICT_OVERFLOW_MISC);
12307 : 72 : return fold_build2_loc (loc, code, type,
12308 : : fold_convert_loc (loc, type,
12309 : 36 : TREE_OPERAND (arg0, 0)),
12310 : 36 : negate_expr (op1));
12311 : : }
12312 : 2914656 : if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12313 : 826137 : && TREE_CODE (arg1) == NEGATE_EXPR
12314 : 2914900 : && negate_expr_p (op0))
12315 : : {
12316 : 36 : if (ANY_INTEGRAL_TYPE_P (type))
12317 : 36 : fold_overflow_warning (("assuming signed overflow does not occur "
12318 : : "when distributing negation across "
12319 : : "division"),
12320 : : WARN_STRICT_OVERFLOW_MISC);
12321 : 36 : return fold_build2_loc (loc, code, type,
12322 : : negate_expr (op0),
12323 : : fold_convert_loc (loc, type,
12324 : 72 : TREE_OPERAND (arg1, 0)));
12325 : : }
12326 : :
12327 : : /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12328 : : operation, EXACT_DIV_EXPR.
12329 : :
12330 : : Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12331 : : At one time others generated faster code, it's not clear if they do
12332 : : after the last round to changes to the DIV code in expmed.cc. */
12333 : 2914620 : if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12334 : 2914620 : && multiple_of_p (type, arg0, arg1))
12335 : 0 : return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
12336 : : fold_convert (type, arg0),
12337 : 0 : fold_convert (type, arg1));
12338 : :
12339 : 2914620 : strict_overflow_p = false;
12340 : 2914620 : if (TREE_CODE (arg1) == INTEGER_CST
12341 : 2914620 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12342 : : &strict_overflow_p)) != 0)
12343 : : {
12344 : 2099 : if (strict_overflow_p)
12345 : 157 : fold_overflow_warning (("assuming signed overflow does not occur "
12346 : : "when simplifying division"),
12347 : : WARN_STRICT_OVERFLOW_MISC);
12348 : 2099 : return fold_convert_loc (loc, type, tem);
12349 : : }
12350 : :
12351 : : return NULL_TREE;
12352 : :
12353 : 552316 : case CEIL_MOD_EXPR:
12354 : 552316 : case FLOOR_MOD_EXPR:
12355 : 552316 : case ROUND_MOD_EXPR:
12356 : 552316 : case TRUNC_MOD_EXPR:
12357 : 552316 : strict_overflow_p = false;
12358 : 552316 : if (TREE_CODE (arg1) == INTEGER_CST
12359 : 552316 : && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12360 : : &strict_overflow_p)) != 0)
12361 : : {
12362 : 0 : if (strict_overflow_p)
12363 : 0 : fold_overflow_warning (("assuming signed overflow does not occur "
12364 : : "when simplifying modulus"),
12365 : : WARN_STRICT_OVERFLOW_MISC);
12366 : 0 : return fold_convert_loc (loc, type, tem);
12367 : : }
12368 : :
12369 : : return NULL_TREE;
12370 : :
12371 : 1501379 : case LROTATE_EXPR:
12372 : 1501379 : case RROTATE_EXPR:
12373 : 1501379 : case RSHIFT_EXPR:
12374 : 1501379 : case LSHIFT_EXPR:
12375 : : /* Since negative shift count is not well-defined,
12376 : : don't try to compute it in the compiler. */
12377 : 1501379 : if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12378 : : return NULL_TREE;
12379 : :
12380 : 1500270 : prec = element_precision (type);
12381 : :
12382 : : /* If we have a rotate of a bit operation with the rotate count and
12383 : : the second operand of the bit operation both constant,
12384 : : permute the two operations. */
12385 : 2196 : if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12386 : 1786 : && (TREE_CODE (arg0) == BIT_AND_EXPR
12387 : 1786 : || TREE_CODE (arg0) == BIT_IOR_EXPR
12388 : 1786 : || TREE_CODE (arg0) == BIT_XOR_EXPR)
12389 : 1500270 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12390 : : {
12391 : 0 : tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12392 : 0 : tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12393 : 0 : return fold_build2_loc (loc, TREE_CODE (arg0), type,
12394 : : fold_build2_loc (loc, code, type,
12395 : : arg00, arg1),
12396 : : fold_build2_loc (loc, code, type,
12397 : 0 : arg01, arg1));
12398 : : }
12399 : :
12400 : : /* Two consecutive rotates adding up to the some integer
12401 : : multiple of the precision of the type can be ignored. */
12402 : 2196 : if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12403 : 1786 : && TREE_CODE (arg0) == RROTATE_EXPR
12404 : 0 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12405 : 1502466 : && wi::umod_trunc (wi::to_wide (arg1)
12406 : 1500270 : + wi::to_wide (TREE_OPERAND (arg0, 1)),
12407 : 1500270 : prec) == 0)
12408 : 0 : return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12409 : :
12410 : : return NULL_TREE;
12411 : :
12412 : 384234 : case MIN_EXPR:
12413 : 384234 : case MAX_EXPR:
12414 : 384234 : goto associate;
12415 : :
12416 : 4342407 : case TRUTH_ANDIF_EXPR:
12417 : : /* Note that the operands of this must be ints
12418 : : and their values must be 0 or 1.
12419 : : ("true" is a fixed value perhaps depending on the language.) */
12420 : : /* If first arg is constant zero, return it. */
12421 : 4342407 : if (integer_zerop (arg0))
12422 : 216156 : return fold_convert_loc (loc, type, arg0);
12423 : : /* FALLTHRU */
12424 : 13367905 : case TRUTH_AND_EXPR:
12425 : : /* If either arg is constant true, drop it. */
12426 : 13367905 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12427 : 1881527 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12428 : 653044 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12429 : : /* Preserve sequence points. */
12430 : 12098255 : && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12431 : 586742 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12432 : : /* If second arg is constant zero, result is zero, but first arg
12433 : : must be evaluated. */
12434 : 10899636 : if (integer_zerop (arg1))
12435 : 41167 : return omit_one_operand_loc (loc, type, arg1, arg0);
12436 : : /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12437 : : case will be handled here. */
12438 : 10858469 : if (integer_zerop (arg0))
12439 : 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12440 : :
12441 : : /* !X && X is always false. */
12442 : 10858469 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12443 : 10858469 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12444 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12445 : : /* X && !X is always false. */
12446 : 10858469 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12447 : 10858469 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12448 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12449 : :
12450 : : /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12451 : : means A >= Y && A != MAX, but in this case we know that
12452 : : A < X <= MAX. */
12453 : :
12454 : 10858469 : if (!TREE_SIDE_EFFECTS (arg0)
12455 : 10858469 : && !TREE_SIDE_EFFECTS (arg1))
12456 : : {
12457 : 9987450 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12458 : 9987450 : if (tem && !operand_equal_p (tem, arg0, 0))
12459 : 512 : return fold_convert (type,
12460 : : fold_build2_loc (loc, code, TREE_TYPE (arg1),
12461 : : tem, arg1));
12462 : :
12463 : 9986938 : tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12464 : 9986938 : if (tem && !operand_equal_p (tem, arg1, 0))
12465 : 9845 : return fold_convert (type,
12466 : : fold_build2_loc (loc, code, TREE_TYPE (arg0),
12467 : : arg0, tem));
12468 : : }
12469 : :
12470 : 10848112 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12471 : : != NULL_TREE)
12472 : : return tem;
12473 : :
12474 : : return NULL_TREE;
12475 : :
12476 : 3244791 : case TRUTH_ORIF_EXPR:
12477 : : /* Note that the operands of this must be ints
12478 : : and their values must be 0 or true.
12479 : : ("true" is a fixed value perhaps depending on the language.) */
12480 : : /* If first arg is constant true, return it. */
12481 : 3244791 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12482 : 120774 : return fold_convert_loc (loc, type, arg0);
12483 : : /* FALLTHRU */
12484 : 11259717 : case TRUTH_OR_EXPR:
12485 : : /* If either arg is constant zero, drop it. */
12486 : 11259717 : if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12487 : 134131 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12488 : 406469 : if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12489 : : /* Preserve sequence points. */
12490 : 11496266 : && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12491 : 359802 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12492 : : /* If second arg is constant true, result is true, but we must
12493 : : evaluate first arg. */
12494 : 10765784 : if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12495 : 35789 : return omit_one_operand_loc (loc, type, arg1, arg0);
12496 : : /* Likewise for first arg, but note this only occurs here for
12497 : : TRUTH_OR_EXPR. */
12498 : 10729995 : if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12499 : 0 : return omit_one_operand_loc (loc, type, arg0, arg1);
12500 : :
12501 : : /* !X || X is always true. */
12502 : 10729995 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12503 : 10729995 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12504 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12505 : : /* X || !X is always true. */
12506 : 10729995 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12507 : 10729995 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12508 : 1 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12509 : :
12510 : : /* (X && !Y) || (!X && Y) is X ^ Y */
12511 : 10729994 : if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12512 : 1577 : && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12513 : : {
12514 : 655 : tree a0, a1, l0, l1, n0, n1;
12515 : :
12516 : 655 : a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12517 : 655 : a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12518 : :
12519 : 655 : l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12520 : 655 : l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12521 : :
12522 : 655 : n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12523 : 655 : n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12524 : :
12525 : 655 : if ((operand_equal_p (n0, a0, 0)
12526 : 18 : && operand_equal_p (n1, a1, 0))
12527 : 663 : || (operand_equal_p (n0, a1, 0)
12528 : 3 : && operand_equal_p (n1, a0, 0)))
12529 : 13 : return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12530 : : }
12531 : :
12532 : 10729981 : if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12533 : : != NULL_TREE)
12534 : : return tem;
12535 : :
12536 : : return NULL_TREE;
12537 : :
12538 : 30548 : case TRUTH_XOR_EXPR:
12539 : : /* If the second arg is constant zero, drop it. */
12540 : 30548 : if (integer_zerop (arg1))
12541 : 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12542 : : /* If the second arg is constant true, this is a logical inversion. */
12543 : 30548 : if (integer_onep (arg1))
12544 : : {
12545 : 0 : tem = invert_truthvalue_loc (loc, arg0);
12546 : 0 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12547 : : }
12548 : : /* Identical arguments cancel to zero. */
12549 : 30548 : if (operand_equal_p (arg0, arg1, 0))
12550 : 0 : return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12551 : :
12552 : : /* !X ^ X is always true. */
12553 : 30548 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12554 : 30548 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12555 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12556 : :
12557 : : /* X ^ !X is always true. */
12558 : 30548 : if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12559 : 30548 : && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12560 : 0 : return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12561 : :
12562 : : return NULL_TREE;
12563 : :
12564 : 43051647 : case EQ_EXPR:
12565 : 43051647 : case NE_EXPR:
12566 : 43051647 : STRIP_NOPS (arg0);
12567 : 43051647 : STRIP_NOPS (arg1);
12568 : :
12569 : 43051647 : tem = fold_comparison (loc, code, type, op0, op1);
12570 : 43051647 : if (tem != NULL_TREE)
12571 : : return tem;
12572 : :
12573 : : /* bool_var != 1 becomes !bool_var. */
12574 : 44072436 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12575 : 43105191 : && code == NE_EXPR)
12576 : 53670 : return fold_convert_loc (loc, type,
12577 : : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12578 : 107340 : TREE_TYPE (arg0), arg0));
12579 : :
12580 : : /* bool_var == 0 becomes !bool_var. */
12581 : 43965096 : if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12582 : 43802189 : && code == EQ_EXPR)
12583 : 198110 : return fold_convert_loc (loc, type,
12584 : : fold_build1_loc (loc, TRUTH_NOT_EXPR,
12585 : 396220 : TREE_TYPE (arg0), arg0));
12586 : :
12587 : : /* !exp != 0 becomes !exp */
12588 : 517555 : if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12589 : 43310634 : && code == NE_EXPR)
12590 : 512681 : return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12591 : :
12592 : : /* If this is an EQ or NE comparison with zero and ARG0 is
12593 : : (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12594 : : two operations, but the latter can be done in one less insn
12595 : : on machines that have only two-operand insns or on which a
12596 : : constant cannot be the first operand. */
12597 : 42281116 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12598 : 42281116 : && integer_zerop (arg1))
12599 : : {
12600 : 1207612 : tree arg00 = TREE_OPERAND (arg0, 0);
12601 : 1207612 : tree arg01 = TREE_OPERAND (arg0, 1);
12602 : 1207612 : if (TREE_CODE (arg00) == LSHIFT_EXPR
12603 : 1207612 : && integer_onep (TREE_OPERAND (arg00, 0)))
12604 : : {
12605 : 4091 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12606 : 4091 : arg01, TREE_OPERAND (arg00, 1));
12607 : 4091 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12608 : 4091 : build_one_cst (TREE_TYPE (arg0)));
12609 : 4091 : return fold_build2_loc (loc, code, type,
12610 : 4091 : fold_convert_loc (loc, TREE_TYPE (arg1),
12611 : 4091 : tem), arg1);
12612 : : }
12613 : 1203521 : else if (TREE_CODE (arg01) == LSHIFT_EXPR
12614 : 1203521 : && integer_onep (TREE_OPERAND (arg01, 0)))
12615 : : {
12616 : 919 : tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12617 : 919 : arg00, TREE_OPERAND (arg01, 1));
12618 : 919 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12619 : 919 : build_one_cst (TREE_TYPE (arg0)));
12620 : 919 : return fold_build2_loc (loc, code, type,
12621 : 919 : fold_convert_loc (loc, TREE_TYPE (arg1),
12622 : 919 : tem), arg1);
12623 : : }
12624 : : }
12625 : :
12626 : : /* If this is a comparison of a field, we may be able to simplify it. */
12627 : 42276106 : if ((TREE_CODE (arg0) == COMPONENT_REF
12628 : 42276106 : || TREE_CODE (arg0) == BIT_FIELD_REF)
12629 : : /* Handle the constant case even without -O
12630 : : to make sure the warnings are given. */
12631 : 4206080 : && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12632 : : {
12633 : 3924106 : t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12634 : 3924106 : if (t1)
12635 : : return t1;
12636 : : }
12637 : :
12638 : : /* Optimize comparisons of strlen vs zero to a compare of the
12639 : : first character of the string vs zero. To wit,
12640 : : strlen(ptr) == 0 => *ptr == 0
12641 : : strlen(ptr) != 0 => *ptr != 0
12642 : : Other cases should reduce to one of these two (or a constant)
12643 : : due to the return value of strlen being unsigned. */
12644 : 41737230 : if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12645 : : {
12646 : 3328471 : tree fndecl = get_callee_fndecl (arg0);
12647 : :
12648 : 3328471 : if (fndecl
12649 : 3327584 : && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12650 : 538 : && call_expr_nargs (arg0) == 1
12651 : 3329009 : && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12652 : : == POINTER_TYPE))
12653 : : {
12654 : 538 : tree ptrtype
12655 : 538 : = build_pointer_type (build_qualified_type (char_type_node,
12656 : : TYPE_QUAL_CONST));
12657 : 1076 : tree ptr = fold_convert_loc (loc, ptrtype,
12658 : 538 : CALL_EXPR_ARG (arg0, 0));
12659 : 538 : tree iref = build_fold_indirect_ref_loc (loc, ptr);
12660 : 538 : return fold_build2_loc (loc, code, type, iref,
12661 : 538 : build_int_cst (TREE_TYPE (iref), 0));
12662 : : }
12663 : : }
12664 : :
12665 : : /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12666 : : of X. Similarly fold (X >> C) == 0 into X >= 0. */
12667 : 41736692 : if (TREE_CODE (arg0) == RSHIFT_EXPR
12668 : 29746 : && integer_zerop (arg1)
12669 : 41745486 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12670 : : {
12671 : 6780 : tree arg00 = TREE_OPERAND (arg0, 0);
12672 : 6780 : tree arg01 = TREE_OPERAND (arg0, 1);
12673 : 6780 : tree itype = TREE_TYPE (arg00);
12674 : 6780 : if (wi::to_wide (arg01) == element_precision (itype) - 1)
12675 : : {
12676 : 572 : if (TYPE_UNSIGNED (itype))
12677 : : {
12678 : 497 : itype = signed_type_for (itype);
12679 : 497 : arg00 = fold_convert_loc (loc, itype, arg00);
12680 : : }
12681 : 1114 : return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12682 : 572 : type, arg00, build_zero_cst (itype));
12683 : : }
12684 : : }
12685 : :
12686 : : /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12687 : : (X & C) == 0 when C is a single bit. */
12688 : 41736120 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12689 : 1363209 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12690 : 840 : && integer_zerop (arg1)
12691 : 41736568 : && integer_pow2p (TREE_OPERAND (arg0, 1)))
12692 : : {
12693 : 140 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12694 : 140 : TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12695 : 140 : TREE_OPERAND (arg0, 1));
12696 : 280 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12697 : : type, tem,
12698 : 140 : fold_convert_loc (loc, TREE_TYPE (arg0),
12699 : 140 : arg1));
12700 : : }
12701 : :
12702 : : /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12703 : : constant C is a power of two, i.e. a single bit. */
12704 : 41735980 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12705 : 3757 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12706 : 0 : && integer_zerop (arg1)
12707 : 0 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12708 : 41735980 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12709 : 0 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12710 : : {
12711 : 0 : tree arg00 = TREE_OPERAND (arg0, 0);
12712 : 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12713 : 0 : arg00, build_int_cst (TREE_TYPE (arg00), 0));
12714 : : }
12715 : :
12716 : : /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12717 : : when is C is a power of two, i.e. a single bit. */
12718 : 41735980 : if (TREE_CODE (arg0) == BIT_AND_EXPR
12719 : 1363069 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12720 : 9763 : && integer_zerop (arg1)
12721 : 9763 : && integer_pow2p (TREE_OPERAND (arg0, 1))
12722 : 41743491 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12723 : 7511 : TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12724 : : {
12725 : 0 : tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12726 : 0 : tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12727 : 0 : arg000, TREE_OPERAND (arg0, 1));
12728 : 0 : return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12729 : 0 : tem, build_int_cst (TREE_TYPE (tem), 0));
12730 : : }
12731 : :
12732 : 41735980 : if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 : 3757 : && TREE_CODE (arg1) == BIT_XOR_EXPR)
12734 : : {
12735 : 242 : tree arg00 = TREE_OPERAND (arg0, 0);
12736 : 242 : tree arg01 = TREE_OPERAND (arg0, 1);
12737 : 242 : tree arg10 = TREE_OPERAND (arg1, 0);
12738 : 242 : tree arg11 = TREE_OPERAND (arg1, 1);
12739 : 242 : tree itype = TREE_TYPE (arg0);
12740 : :
12741 : : /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12742 : : operand_equal_p guarantees no side-effects so we don't need
12743 : : to use omit_one_operand on Z. */
12744 : 242 : if (operand_equal_p (arg01, arg11, 0))
12745 : 8 : return fold_build2_loc (loc, code, type, arg00,
12746 : 8 : fold_convert_loc (loc, TREE_TYPE (arg00),
12747 : 8 : arg10));
12748 : 234 : if (operand_equal_p (arg01, arg10, 0))
12749 : 0 : return fold_build2_loc (loc, code, type, arg00,
12750 : 0 : fold_convert_loc (loc, TREE_TYPE (arg00),
12751 : 0 : arg11));
12752 : 234 : if (operand_equal_p (arg00, arg11, 0))
12753 : 0 : return fold_build2_loc (loc, code, type, arg01,
12754 : 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12755 : 0 : arg10));
12756 : 234 : if (operand_equal_p (arg00, arg10, 0))
12757 : 0 : return fold_build2_loc (loc, code, type, arg01,
12758 : 0 : fold_convert_loc (loc, TREE_TYPE (arg01),
12759 : 0 : arg11));
12760 : :
12761 : : /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12762 : 234 : if (TREE_CODE (arg01) == INTEGER_CST
12763 : 8 : && TREE_CODE (arg11) == INTEGER_CST)
12764 : : {
12765 : 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12766 : : fold_convert_loc (loc, itype, arg11));
12767 : 8 : tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12768 : 8 : return fold_build2_loc (loc, code, type, tem,
12769 : 8 : fold_convert_loc (loc, itype, arg10));
12770 : : }
12771 : : }
12772 : :
12773 : : /* Attempt to simplify equality/inequality comparisons of complex
12774 : : values. Only lower the comparison if the result is known or
12775 : : can be simplified to a single scalar comparison. */
12776 : 41735964 : if ((TREE_CODE (arg0) == COMPLEX_EXPR
12777 : 41733476 : || TREE_CODE (arg0) == COMPLEX_CST)
12778 : 2488 : && (TREE_CODE (arg1) == COMPLEX_EXPR
12779 : 2300 : || TREE_CODE (arg1) == COMPLEX_CST))
12780 : : {
12781 : 1709 : tree real0, imag0, real1, imag1;
12782 : 1709 : tree rcond, icond;
12783 : :
12784 : 1709 : if (TREE_CODE (arg0) == COMPLEX_EXPR)
12785 : : {
12786 : 1709 : real0 = TREE_OPERAND (arg0, 0);
12787 : 1709 : imag0 = TREE_OPERAND (arg0, 1);
12788 : : }
12789 : : else
12790 : : {
12791 : 0 : real0 = TREE_REALPART (arg0);
12792 : 0 : imag0 = TREE_IMAGPART (arg0);
12793 : : }
12794 : :
12795 : 1709 : if (TREE_CODE (arg1) == COMPLEX_EXPR)
12796 : : {
12797 : 188 : real1 = TREE_OPERAND (arg1, 0);
12798 : 188 : imag1 = TREE_OPERAND (arg1, 1);
12799 : : }
12800 : : else
12801 : : {
12802 : 1521 : real1 = TREE_REALPART (arg1);
12803 : 1521 : imag1 = TREE_IMAGPART (arg1);
12804 : : }
12805 : :
12806 : 1709 : rcond = fold_binary_loc (loc, code, type, real0, real1);
12807 : 1709 : if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12808 : : {
12809 : 11 : if (integer_zerop (rcond))
12810 : : {
12811 : 11 : if (code == EQ_EXPR)
12812 : 0 : return omit_two_operands_loc (loc, type, boolean_false_node,
12813 : 0 : imag0, imag1);
12814 : 11 : return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12815 : : }
12816 : : else
12817 : : {
12818 : 0 : if (code == NE_EXPR)
12819 : 0 : return omit_two_operands_loc (loc, type, boolean_true_node,
12820 : 0 : imag0, imag1);
12821 : 0 : return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12822 : : }
12823 : : }
12824 : :
12825 : 1698 : icond = fold_binary_loc (loc, code, type, imag0, imag1);
12826 : 1698 : if (icond && TREE_CODE (icond) == INTEGER_CST)
12827 : : {
12828 : 9 : if (integer_zerop (icond))
12829 : : {
12830 : 7 : if (code == EQ_EXPR)
12831 : 1 : return omit_two_operands_loc (loc, type, boolean_false_node,
12832 : 1 : real0, real1);
12833 : 6 : return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12834 : : }
12835 : : else
12836 : : {
12837 : 2 : if (code == NE_EXPR)
12838 : 1 : return omit_two_operands_loc (loc, type, boolean_true_node,
12839 : 1 : real0, real1);
12840 : 1 : return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12841 : : }
12842 : : }
12843 : : }
12844 : :
12845 : : return NULL_TREE;
12846 : :
12847 : 34619901 : case LT_EXPR:
12848 : 34619901 : case GT_EXPR:
12849 : 34619901 : case LE_EXPR:
12850 : 34619901 : case GE_EXPR:
12851 : 34619901 : tem = fold_comparison (loc, code, type, op0, op1);
12852 : 34619901 : if (tem != NULL_TREE)
12853 : : return tem;
12854 : :
12855 : : /* Transform comparisons of the form X +- C CMP X. */
12856 : 33828452 : if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12857 : 4709277 : && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12858 : 50042 : && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12859 : 33828470 : && !HONOR_SNANS (arg0))
12860 : : {
12861 : 16 : tree arg01 = TREE_OPERAND (arg0, 1);
12862 : 16 : enum tree_code code0 = TREE_CODE (arg0);
12863 : 16 : int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12864 : :
12865 : : /* (X - c) > X becomes false. */
12866 : 16 : if (code == GT_EXPR
12867 : 7 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12868 : 3 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12869 : 4 : return constant_boolean_node (0, type);
12870 : :
12871 : : /* Likewise (X + c) < X becomes false. */
12872 : 12 : if (code == LT_EXPR
12873 : 2 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12874 : 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12875 : 2 : return constant_boolean_node (0, type);
12876 : :
12877 : : /* Convert (X - c) <= X to true. */
12878 : 10 : if (!HONOR_NANS (arg1)
12879 : 6 : && code == LE_EXPR
12880 : 14 : && ((code0 == MINUS_EXPR && is_positive >= 0)
12881 : 0 : || (code0 == PLUS_EXPR && is_positive <= 0)))
12882 : 4 : return constant_boolean_node (1, type);
12883 : :
12884 : : /* Convert (X + c) >= X to true. */
12885 : 6 : if (!HONOR_NANS (arg1)
12886 : 2 : && code == GE_EXPR
12887 : 8 : && ((code0 == PLUS_EXPR && is_positive >= 0)
12888 : 0 : || (code0 == MINUS_EXPR && is_positive <= 0)))
12889 : 2 : return constant_boolean_node (1, type);
12890 : : }
12891 : :
12892 : : /* If we are comparing an ABS_EXPR with a constant, we can
12893 : : convert all the cases into explicit comparisons, but they may
12894 : : well not be faster than doing the ABS and one comparison.
12895 : : But ABS (X) <= C is a range comparison, which becomes a subtraction
12896 : : and a comparison, and is probably faster. */
12897 : 33828440 : if (code == LE_EXPR
12898 : 6750211 : && TREE_CODE (arg1) == INTEGER_CST
12899 : 4860462 : && TREE_CODE (arg0) == ABS_EXPR
12900 : 675 : && ! TREE_SIDE_EFFECTS (arg0)
12901 : 675 : && (tem = negate_expr (arg1)) != 0
12902 : 675 : && TREE_CODE (tem) == INTEGER_CST
12903 : 33829115 : && !TREE_OVERFLOW (tem))
12904 : 1350 : return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12905 : : build2 (GE_EXPR, type,
12906 : 675 : TREE_OPERAND (arg0, 0), tem),
12907 : : build2 (LE_EXPR, type,
12908 : 1350 : TREE_OPERAND (arg0, 0), arg1));
12909 : :
12910 : : /* Convert ABS_EXPR<x> >= 0 to true. */
12911 : 33827765 : strict_overflow_p = false;
12912 : 33827765 : if (code == GE_EXPR
12913 : 3365853 : && (integer_zerop (arg1)
12914 : 2559356 : || (! HONOR_NANS (arg0)
12915 : 1940825 : && real_zerop (arg1)))
12916 : 34634439 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12917 : : {
12918 : 898 : if (strict_overflow_p)
12919 : 6 : fold_overflow_warning (("assuming signed overflow does not occur "
12920 : : "when simplifying comparison of "
12921 : : "absolute value and zero"),
12922 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
12923 : 898 : return omit_one_operand_loc (loc, type,
12924 : : constant_boolean_node (true, type),
12925 : 898 : arg0);
12926 : : }
12927 : :
12928 : : /* Convert ABS_EXPR<x> < 0 to false. */
12929 : 33826867 : strict_overflow_p = false;
12930 : 33826867 : if (code == LT_EXPR
12931 : 11056455 : && (integer_zerop (arg1) || real_zerop (arg1))
12932 : 36353868 : && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12933 : : {
12934 : 3483 : if (strict_overflow_p)
12935 : 288 : fold_overflow_warning (("assuming signed overflow does not occur "
12936 : : "when simplifying comparison of "
12937 : : "absolute value and zero"),
12938 : : WARN_STRICT_OVERFLOW_CONDITIONAL);
12939 : 3483 : return omit_one_operand_loc (loc, type,
12940 : : constant_boolean_node (false, type),
12941 : 3483 : arg0);
12942 : : }
12943 : :
12944 : : /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12945 : : and similarly for >= into !=. */
12946 : 33823384 : if ((code == LT_EXPR || code == GE_EXPR)
12947 : 14417927 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12948 : 3409242 : && TREE_CODE (arg1) == LSHIFT_EXPR
12949 : 33824611 : && integer_onep (TREE_OPERAND (arg1, 0)))
12950 : 3152 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12951 : 1057 : build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12952 : 1057 : TREE_OPERAND (arg1, 1)),
12953 : 2114 : build_zero_cst (TREE_TYPE (arg0)));
12954 : :
12955 : : /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12956 : : otherwise Y might be >= # of bits in X's type and thus e.g.
12957 : : (unsigned char) (1 << Y) for Y 15 might be 0.
12958 : : If the cast is widening, then 1 << Y should have unsigned type,
12959 : : otherwise if Y is number of bits in the signed shift type minus 1,
12960 : : we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12961 : : 31 might be 0xffffffff80000000. */
12962 : 33822327 : if ((code == LT_EXPR || code == GE_EXPR)
12963 : 14416870 : && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12964 : 5070909 : || VECTOR_INTEGER_TYPE_P (TREE_TYPE (arg0)))
12965 : 9367438 : && TYPE_UNSIGNED (TREE_TYPE (arg0))
12966 : 2404150 : && CONVERT_EXPR_P (arg1)
12967 : 281791 : && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12968 : 42 : && (element_precision (TREE_TYPE (arg1))
12969 : 21 : >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12970 : 14 : && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12971 : 14 : || (element_precision (TREE_TYPE (arg1))
12972 : 7 : == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12973 : 33822334 : && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12974 : : {
12975 : 7 : tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12976 : 7 : TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12977 : 21 : return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12978 : 7 : fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12979 : 14 : build_zero_cst (TREE_TYPE (arg0)));
12980 : : }
12981 : :
12982 : : return NULL_TREE;
12983 : :
12984 : 5004542 : case UNORDERED_EXPR:
12985 : 5004542 : case ORDERED_EXPR:
12986 : 5004542 : case UNLT_EXPR:
12987 : 5004542 : case UNLE_EXPR:
12988 : 5004542 : case UNGT_EXPR:
12989 : 5004542 : case UNGE_EXPR:
12990 : 5004542 : case UNEQ_EXPR:
12991 : 5004542 : case LTGT_EXPR:
12992 : : /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12993 : 5004542 : {
12994 : 5004542 : tree targ0 = strip_float_extensions (arg0);
12995 : 5004542 : tree targ1 = strip_float_extensions (arg1);
12996 : 5004542 : tree newtype = TREE_TYPE (targ0);
12997 : :
12998 : 5004542 : if (element_precision (TREE_TYPE (targ1)) > element_precision (newtype))
12999 : 1223 : newtype = TREE_TYPE (targ1);
13000 : :
13001 : 5004542 : if (element_precision (newtype) < element_precision (TREE_TYPE (arg0))
13002 : 5004542 : && (!VECTOR_TYPE_P (type) || is_truth_type_for (newtype, type)))
13003 : 323 : return fold_build2_loc (loc, code, type,
13004 : : fold_convert_loc (loc, newtype, targ0),
13005 : 323 : fold_convert_loc (loc, newtype, targ1));
13006 : : }
13007 : :
13008 : : return NULL_TREE;
13009 : :
13010 : 5100168 : case COMPOUND_EXPR:
13011 : : /* When pedantic, a compound expression can be neither an lvalue
13012 : : nor an integer constant expression. */
13013 : 5100168 : if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13014 : : return NULL_TREE;
13015 : : /* Don't let (0, 0) be null pointer constant. */
13016 : 454902 : tem = integer_zerop (arg1) ? build1_loc (loc, NOP_EXPR, type, arg1)
13017 : 454902 : : fold_convert_loc (loc, type, arg1);
13018 : : return tem;
13019 : :
13020 : : default:
13021 : : return NULL_TREE;
13022 : : } /* switch (code) */
13023 : : }
13024 : :
13025 : : /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
13026 : : ((A & N) + B) & M -> (A + B) & M
13027 : : Similarly if (N & M) == 0,
13028 : : ((A | N) + B) & M -> (A + B) & M
13029 : : and for - instead of + (or unary - instead of +)
13030 : : and/or ^ instead of |.
13031 : : If B is constant and (B & M) == 0, fold into A & M.
13032 : :
13033 : : This function is a helper for match.pd patterns. Return non-NULL
13034 : : type in which the simplified operation should be performed only
13035 : : if any optimization is possible.
13036 : :
13037 : : ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
13038 : : then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
13039 : : Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
13040 : : +/-. */
13041 : : tree
13042 : 1012963 : fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
13043 : : tree arg00, enum tree_code code00, tree arg000, tree arg001,
13044 : : tree arg01, enum tree_code code01, tree arg010, tree arg011,
13045 : : tree *pmop)
13046 : : {
13047 : 1012963 : gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
13048 : 1012963 : gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
13049 : 1012963 : wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
13050 : 2025926 : if (~cst1 == 0
13051 : 3035097 : || (cst1 & (cst1 + 1)) != 0
13052 : 816849 : || !INTEGRAL_TYPE_P (type)
13053 : 816849 : || (!TYPE_OVERFLOW_WRAPS (type)
13054 : 36488 : && TREE_CODE (type) != INTEGER_TYPE)
13055 : 3657728 : || (wi::max_value (type) & cst1) != cst1)
13056 : : return NULL_TREE;
13057 : :
13058 : 816849 : enum tree_code codes[2] = { code00, code01 };
13059 : 816849 : tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
13060 : 816849 : int which = 0;
13061 : 816849 : wide_int cst0;
13062 : :
13063 : : /* Now we know that arg0 is (C + D) or (C - D) or -C and
13064 : : arg1 (M) is == (1LL << cst) - 1.
13065 : : Store C into PMOP[0] and D into PMOP[1]. */
13066 : 816849 : pmop[0] = arg00;
13067 : 816849 : pmop[1] = arg01;
13068 : 816849 : which = code != NEGATE_EXPR;
13069 : :
13070 : 2449653 : for (; which >= 0; which--)
13071 : 1632804 : switch (codes[which])
13072 : : {
13073 : 20210 : case BIT_AND_EXPR:
13074 : 20210 : case BIT_IOR_EXPR:
13075 : 20210 : case BIT_XOR_EXPR:
13076 : 20210 : gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
13077 : 20210 : cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
13078 : 20210 : if (codes[which] == BIT_AND_EXPR)
13079 : : {
13080 : 20096 : if (cst0 != cst1)
13081 : : break;
13082 : : }
13083 : 114 : else if (cst0 != 0)
13084 : : break;
13085 : : /* If C or D is of the form (A & N) where
13086 : : (N & M) == M, or of the form (A | N) or
13087 : : (A ^ N) where (N & M) == 0, replace it with A. */
13088 : 18665 : pmop[which] = arg0xx[2 * which];
13089 : 18665 : break;
13090 : 1612594 : case ERROR_MARK:
13091 : 1612594 : if (TREE_CODE (pmop[which]) != INTEGER_CST)
13092 : : break;
13093 : : /* If C or D is a N where (N & M) == 0, it can be
13094 : : omitted (replaced with 0). */
13095 : 675356 : if ((code == PLUS_EXPR
13096 : 147986 : || (code == MINUS_EXPR && which == 0))
13097 : 485664 : && (cst1 & wi::to_wide (pmop[which])) == 0)
13098 : 115971 : pmop[which] = build_int_cst (type, 0);
13099 : : /* Similarly, with C - N where (-N & M) == 0. */
13100 : 675356 : if (code == MINUS_EXPR
13101 : 337678 : && which == 1
13102 : 621892 : && (cst1 & -wi::to_wide (pmop[which])) == 0)
13103 : 135102 : pmop[which] = build_int_cst (type, 0);
13104 : : break;
13105 : 0 : default:
13106 : 0 : gcc_unreachable ();
13107 : : }
13108 : :
13109 : : /* Only build anything new if we optimized one or both arguments above. */
13110 : 816849 : if (pmop[0] == arg00 && pmop[1] == arg01)
13111 : : return NULL_TREE;
13112 : :
13113 : 268272 : if (TYPE_OVERFLOW_WRAPS (type))
13114 : : return type;
13115 : : else
13116 : 2135 : return unsigned_type_for (type);
13117 : 816849 : }
13118 : :
13119 : : /* Used by contains_label_[p1]. */
13120 : :
13121 : : struct contains_label_data
13122 : : {
13123 : : hash_set<tree> *pset;
13124 : : bool inside_switch_p;
13125 : : };
13126 : :
13127 : : /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13128 : : a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
13129 : : return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
13130 : :
13131 : : static tree
13132 : 2906738 : contains_label_1 (tree *tp, int *walk_subtrees, void *data)
13133 : : {
13134 : 2906738 : contains_label_data *d = (contains_label_data *) data;
13135 : 2906738 : switch (TREE_CODE (*tp))
13136 : : {
13137 : : case LABEL_EXPR:
13138 : : return *tp;
13139 : :
13140 : 0 : case CASE_LABEL_EXPR:
13141 : 0 : if (!d->inside_switch_p)
13142 : : return *tp;
13143 : : return NULL_TREE;
13144 : :
13145 : 0 : case SWITCH_EXPR:
13146 : 0 : if (!d->inside_switch_p)
13147 : : {
13148 : 0 : if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
13149 : 0 : return *tp;
13150 : 0 : d->inside_switch_p = true;
13151 : 0 : if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
13152 : 0 : return *tp;
13153 : 0 : d->inside_switch_p = false;
13154 : 0 : *walk_subtrees = 0;
13155 : : }
13156 : : return NULL_TREE;
13157 : :
13158 : 5543 : case GOTO_EXPR:
13159 : 5543 : *walk_subtrees = 0;
13160 : 5543 : return NULL_TREE;
13161 : :
13162 : : default:
13163 : : return NULL_TREE;
13164 : : }
13165 : : }
13166 : :
13167 : : /* Return whether the sub-tree ST contains a label which is accessible from
13168 : : outside the sub-tree. */
13169 : :
13170 : : static bool
13171 : 195938 : contains_label_p (tree st)
13172 : : {
13173 : 195938 : hash_set<tree> pset;
13174 : 195938 : contains_label_data data = { &pset, false };
13175 : 195938 : return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
13176 : 195938 : }
13177 : :
13178 : : /* Fold a ternary expression of code CODE and type TYPE with operands
13179 : : OP0, OP1, and OP2. Return the folded expression if folding is
13180 : : successful. Otherwise, return NULL_TREE. */
13181 : :
13182 : : tree
13183 : 26377597 : fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13184 : : tree op0, tree op1, tree op2)
13185 : : {
13186 : 26377597 : tree tem;
13187 : 26377597 : tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13188 : 26377597 : enum tree_code_class kind = TREE_CODE_CLASS (code);
13189 : :
13190 : 26377597 : gcc_assert (IS_EXPR_CODE_CLASS (kind)
13191 : : && TREE_CODE_LENGTH (code) == 3);
13192 : :
13193 : : /* If this is a commutative operation, and OP0 is a constant, move it
13194 : : to OP1 to reduce the number of tests below. */
13195 : 26377597 : if (commutative_ternary_tree_code (code)
13196 : 26377597 : && tree_swap_operands_p (op0, op1))
13197 : 125 : return fold_build3_loc (loc, code, type, op1, op0, op2);
13198 : :
13199 : 26377472 : tem = generic_simplify (loc, code, type, op0, op1, op2);
13200 : 26377472 : if (tem)
13201 : : return tem;
13202 : :
13203 : : /* Strip any conversions that don't change the mode. This is safe
13204 : : for every expression, except for a comparison expression because
13205 : : its signedness is derived from its operands. So, in the latter
13206 : : case, only strip conversions that don't change the signedness.
13207 : :
13208 : : Note that this is done as an internal manipulation within the
13209 : : constant folder, in order to find the simplest representation of
13210 : : the arguments so that their form can be studied. In any cases,
13211 : : the appropriate type conversions should be put back in the tree
13212 : : that will get out of the constant folder. */
13213 : 25528799 : if (op0)
13214 : : {
13215 : 25469436 : arg0 = op0;
13216 : 25469436 : STRIP_NOPS (arg0);
13217 : : }
13218 : :
13219 : 25528799 : if (op1)
13220 : : {
13221 : 25528799 : arg1 = op1;
13222 : 25528799 : STRIP_NOPS (arg1);
13223 : : }
13224 : :
13225 : 25528799 : if (op2)
13226 : : {
13227 : 10982427 : arg2 = op2;
13228 : 10982427 : STRIP_NOPS (arg2);
13229 : : }
13230 : :
13231 : 25528799 : switch (code)
13232 : : {
13233 : 14545946 : case COMPONENT_REF:
13234 : 14545946 : if (TREE_CODE (arg0) == CONSTRUCTOR
13235 : 14545946 : && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13236 : : {
13237 : : unsigned HOST_WIDE_INT idx;
13238 : : tree field, value;
13239 : 855 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13240 : 664 : if (field == arg1)
13241 : 376 : return value;
13242 : : }
13243 : : return NULL_TREE;
13244 : :
13245 : 9183077 : case COND_EXPR:
13246 : 9183077 : case VEC_COND_EXPR:
13247 : : /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13248 : : so all simple results must be passed through pedantic_non_lvalue. */
13249 : 9183077 : if (TREE_CODE (arg0) == INTEGER_CST)
13250 : : {
13251 : 318858 : tree unused_op = integer_zerop (arg0) ? op1 : op2;
13252 : 318858 : tem = integer_zerop (arg0) ? op2 : op1;
13253 : : /* Only optimize constant conditions when the selected branch
13254 : : has the same type as the COND_EXPR. This avoids optimizing
13255 : : away "c ? x : throw", where the throw has a void type.
13256 : : Avoid throwing away that operand which contains label. */
13257 : 318858 : if ((!TREE_SIDE_EFFECTS (unused_op)
13258 : 195938 : || !contains_label_p (unused_op))
13259 : 510658 : && (! VOID_TYPE_P (TREE_TYPE (tem))
13260 : 243823 : || VOID_TYPE_P (type)))
13261 : 310541 : return protected_set_expr_location_unshare (tem, loc);
13262 : 8317 : return NULL_TREE;
13263 : : }
13264 : 8864219 : else if (TREE_CODE (arg0) == VECTOR_CST)
13265 : : {
13266 : 823 : unsigned HOST_WIDE_INT nelts;
13267 : 823 : if ((TREE_CODE (arg1) == VECTOR_CST
13268 : 107 : || TREE_CODE (arg1) == CONSTRUCTOR)
13269 : 716 : && (TREE_CODE (arg2) == VECTOR_CST
13270 : 0 : || TREE_CODE (arg2) == CONSTRUCTOR)
13271 : 1646 : && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
13272 : : {
13273 : 716 : vec_perm_builder sel (nelts, nelts, 1);
13274 : 8882 : for (unsigned int i = 0; i < nelts; i++)
13275 : : {
13276 : 8166 : tree val = VECTOR_CST_ELT (arg0, i);
13277 : 8166 : if (integer_all_onesp (val))
13278 : 4520 : sel.quick_push (i);
13279 : 3646 : else if (integer_zerop (val))
13280 : 3646 : sel.quick_push (nelts + i);
13281 : : else /* Currently unreachable. */
13282 : 716 : return NULL_TREE;
13283 : : }
13284 : 716 : vec_perm_indices indices (sel, 2, nelts);
13285 : 716 : tree t = fold_vec_perm (type, arg1, arg2, indices);
13286 : 716 : if (t != NULL_TREE)
13287 : 716 : return t;
13288 : 1432 : }
13289 : : }
13290 : :
13291 : : /* If we have A op B ? A : C, we may be able to convert this to a
13292 : : simpler expression, depending on the operation and the values
13293 : : of B and C. Signed zeros prevent all of these transformations,
13294 : : for reasons given above each one.
13295 : :
13296 : : Also try swapping the arguments and inverting the conditional. */
13297 : 8863503 : if (COMPARISON_CLASS_P (arg0)
13298 : 7401071 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
13299 : 8980537 : && !HONOR_SIGNED_ZEROS (op1))
13300 : : {
13301 : 106737 : tem = fold_cond_expr_with_comparison (loc, type, TREE_CODE (arg0),
13302 : 106737 : TREE_OPERAND (arg0, 0),
13303 : 106737 : TREE_OPERAND (arg0, 1),
13304 : : op1, op2);
13305 : 106737 : if (tem)
13306 : : return tem;
13307 : : }
13308 : :
13309 : 8857156 : if (COMPARISON_CLASS_P (arg0)
13310 : 7394724 : && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
13311 : 9266195 : && !HONOR_SIGNED_ZEROS (op2))
13312 : : {
13313 : 322287 : enum tree_code comp_code = TREE_CODE (arg0);
13314 : 322287 : tree arg00 = TREE_OPERAND (arg0, 0);
13315 : 322287 : tree arg01 = TREE_OPERAND (arg0, 1);
13316 : 322287 : comp_code = invert_tree_comparison (comp_code, HONOR_NANS (arg00));
13317 : 322287 : if (comp_code != ERROR_MARK)
13318 : 322287 : tem = fold_cond_expr_with_comparison (loc, type, comp_code,
13319 : : arg00,
13320 : : arg01,
13321 : : op2, op1);
13322 : 322287 : if (tem)
13323 : : return tem;
13324 : : }
13325 : :
13326 : : /* If the second operand is simpler than the third, swap them
13327 : : since that produces better jump optimization results. */
13328 : 8623272 : if (truth_value_p (TREE_CODE (arg0))
13329 : 8623272 : && tree_swap_operands_p (op1, op2))
13330 : : {
13331 : 1471274 : location_t loc0 = expr_location_or (arg0, loc);
13332 : : /* See if this can be inverted. If it can't, possibly because
13333 : : it was a floating-point inequality comparison, don't do
13334 : : anything. */
13335 : 1471274 : tem = fold_invert_truthvalue (loc0, arg0);
13336 : 1471274 : if (tem)
13337 : 953392 : return fold_build3_loc (loc, code, type, tem, op2, op1);
13338 : : }
13339 : :
13340 : : /* Convert A ? 1 : 0 to simply A. */
13341 : 7669880 : if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13342 : 7385662 : : (integer_onep (op1)
13343 : 329547 : && !VECTOR_TYPE_P (type)))
13344 : 523892 : && integer_zerop (op2)
13345 : : /* If we try to convert OP0 to our type, the
13346 : : call to fold will try to move the conversion inside
13347 : : a COND, which will recurse. In that case, the COND_EXPR
13348 : : is probably the best choice, so leave it alone. */
13349 : 8469644 : && type == TREE_TYPE (arg0))
13350 : 28722 : return protected_set_expr_location_unshare (arg0, loc);
13351 : :
13352 : : /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13353 : : over COND_EXPR in cases such as floating point comparisons. */
13354 : 7641158 : if (integer_zerop (op1)
13355 : 239840 : && code == COND_EXPR
13356 : 238328 : && integer_onep (op2)
13357 : 31616 : && !VECTOR_TYPE_P (type)
13358 : 7672774 : && truth_value_p (TREE_CODE (arg0)))
13359 : 30106 : return fold_convert_loc (loc, type,
13360 : 30106 : invert_truthvalue_loc (loc, arg0));
13361 : :
13362 : : /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13363 : 7611052 : if (TREE_CODE (arg0) == LT_EXPR
13364 : 929020 : && integer_zerop (TREE_OPERAND (arg0, 1))
13365 : 13628 : && integer_zerop (op2)
13366 : 7611896 : && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13367 : : {
13368 : : /* sign_bit_p looks through both zero and sign extensions,
13369 : : but for this optimization only sign extensions are
13370 : : usable. */
13371 : 56 : tree tem2 = TREE_OPERAND (arg0, 0);
13372 : 56 : while (tem != tem2)
13373 : : {
13374 : 0 : if (TREE_CODE (tem2) != NOP_EXPR
13375 : 0 : || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13376 : : {
13377 : : tem = NULL_TREE;
13378 : : break;
13379 : : }
13380 : 0 : tem2 = TREE_OPERAND (tem2, 0);
13381 : : }
13382 : : /* sign_bit_p only checks ARG1 bits within A's precision.
13383 : : If <sign bit of A> has wider type than A, bits outside
13384 : : of A's precision in <sign bit of A> need to be checked.
13385 : : If they are all 0, this optimization needs to be done
13386 : : in unsigned A's type, if they are all 1 in signed A's type,
13387 : : otherwise this can't be done. */
13388 : 56 : if (tem
13389 : 56 : && TYPE_PRECISION (TREE_TYPE (tem))
13390 : 56 : < TYPE_PRECISION (TREE_TYPE (arg1))
13391 : 112 : && TYPE_PRECISION (TREE_TYPE (tem))
13392 : 56 : < TYPE_PRECISION (type))
13393 : : {
13394 : 56 : int inner_width, outer_width;
13395 : 56 : tree tem_type;
13396 : :
13397 : 56 : inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13398 : 56 : outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13399 : 56 : if (outer_width > TYPE_PRECISION (type))
13400 : 0 : outer_width = TYPE_PRECISION (type);
13401 : :
13402 : 56 : wide_int mask = wi::shifted_mask
13403 : 56 : (inner_width, outer_width - inner_width, false,
13404 : 56 : TYPE_PRECISION (TREE_TYPE (arg1)));
13405 : :
13406 : 56 : wide_int common = mask & wi::to_wide (arg1);
13407 : 56 : if (common == mask)
13408 : : {
13409 : 28 : tem_type = signed_type_for (TREE_TYPE (tem));
13410 : 28 : tem = fold_convert_loc (loc, tem_type, tem);
13411 : : }
13412 : 28 : else if (common == 0)
13413 : : {
13414 : 0 : tem_type = unsigned_type_for (TREE_TYPE (tem));
13415 : 0 : tem = fold_convert_loc (loc, tem_type, tem);
13416 : : }
13417 : : else
13418 : : tem = NULL;
13419 : 56 : }
13420 : :
13421 : 56 : if (tem)
13422 : 28 : return
13423 : 56 : fold_convert_loc (loc, type,
13424 : : fold_build2_loc (loc, BIT_AND_EXPR,
13425 : 28 : TREE_TYPE (tem), tem,
13426 : : fold_convert_loc (loc,
13427 : 28 : TREE_TYPE (tem),
13428 : 28 : arg1)));
13429 : : }
13430 : :
13431 : : /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13432 : : already handled above. */
13433 : 7611024 : if (TREE_CODE (arg0) == BIT_AND_EXPR
13434 : 354 : && integer_onep (TREE_OPERAND (arg0, 1))
13435 : 3 : && integer_zerop (op2)
13436 : 7611024 : && integer_pow2p (arg1))
13437 : : {
13438 : 0 : tree tem = TREE_OPERAND (arg0, 0);
13439 : 0 : STRIP_NOPS (tem);
13440 : 0 : if (TREE_CODE (tem) == RSHIFT_EXPR
13441 : 0 : && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13442 : 0 : && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
13443 : 0 : == tree_to_uhwi (TREE_OPERAND (tem, 1)))
13444 : 0 : return fold_build2_loc (loc, BIT_AND_EXPR, type,
13445 : : fold_convert_loc (loc, type,
13446 : 0 : TREE_OPERAND (tem, 0)),
13447 : 0 : op1);
13448 : : }
13449 : :
13450 : : /* A & N ? N : 0 is simply A & N if N is a power of two. This
13451 : : is probably obsolete because the first operand should be a
13452 : : truth value (that's why we have the two cases above), but let's
13453 : : leave it in until we can confirm this for all front-ends. */
13454 : 7611024 : if (integer_zerop (op2)
13455 : 1578348 : && TREE_CODE (arg0) == NE_EXPR
13456 : 352334 : && integer_zerop (TREE_OPERAND (arg0, 1))
13457 : 192649 : && integer_pow2p (arg1)
13458 : 28061 : && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13459 : 120 : && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13460 : : arg1, OEP_ONLY_CONST)
13461 : : /* operand_equal_p compares just value, not precision, so e.g.
13462 : : arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
13463 : : second operand 32-bit -128, which is not a power of two (or vice
13464 : : versa. */
13465 : 7611024 : && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
13466 : 0 : return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
13467 : :
13468 : : /* Disable the transformations below for vectors, since
13469 : : fold_binary_op_with_conditional_arg may undo them immediately,
13470 : : yielding an infinite loop. */
13471 : 7611024 : if (code == VEC_COND_EXPR)
13472 : : return NULL_TREE;
13473 : :
13474 : : /* Convert A ? B : 0 into A && B if A and B are truth values. */
13475 : 7326806 : if (integer_zerop (op2)
13476 : 1326424 : && truth_value_p (TREE_CODE (arg0))
13477 : 1213554 : && truth_value_p (TREE_CODE (arg1))
13478 : 7357300 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13479 : 30494 : return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13480 : : : TRUTH_ANDIF_EXPR,
13481 : 30494 : type, fold_convert_loc (loc, type, arg0), op1);
13482 : :
13483 : : /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13484 : 7296312 : if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13485 : 384012 : && truth_value_p (TREE_CODE (arg0))
13486 : 285232 : && truth_value_p (TREE_CODE (arg1))
13487 : 7326499 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13488 : : {
13489 : 30187 : location_t loc0 = expr_location_or (arg0, loc);
13490 : : /* Only perform transformation if ARG0 is easily inverted. */
13491 : 30187 : tem = fold_invert_truthvalue (loc0, arg0);
13492 : 30187 : if (tem)
13493 : 29923 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13494 : : ? BIT_IOR_EXPR
13495 : : : TRUTH_ORIF_EXPR,
13496 : : type, fold_convert_loc (loc, type, tem),
13497 : 29923 : op1);
13498 : : }
13499 : :
13500 : : /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13501 : 7266389 : if (integer_zerop (arg1)
13502 : 208296 : && truth_value_p (TREE_CODE (arg0))
13503 : 45517 : && truth_value_p (TREE_CODE (op2))
13504 : 7266416 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13505 : : {
13506 : 27 : location_t loc0 = expr_location_or (arg0, loc);
13507 : : /* Only perform transformation if ARG0 is easily inverted. */
13508 : 27 : tem = fold_invert_truthvalue (loc0, arg0);
13509 : 27 : if (tem)
13510 : 0 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13511 : : ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13512 : : type, fold_convert_loc (loc, type, tem),
13513 : 0 : op2);
13514 : : }
13515 : :
13516 : : /* Convert A ? 1 : B into A || B if A and B are truth values. */
13517 : 7266389 : if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13518 : 300825 : && truth_value_p (TREE_CODE (arg0))
13519 : 229229 : && truth_value_p (TREE_CODE (op2))
13520 : 7266575 : && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13521 : 186 : return fold_build2_loc (loc, code == VEC_COND_EXPR
13522 : : ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13523 : 186 : type, fold_convert_loc (loc, type, arg0), op2);
13524 : :
13525 : : return NULL_TREE;
13526 : :
13527 : 0 : case CALL_EXPR:
13528 : : /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13529 : : of fold_ternary on them. */
13530 : 0 : gcc_unreachable ();
13531 : :
13532 : 637787 : case BIT_FIELD_REF:
13533 : 637787 : if (TREE_CODE (arg0) == VECTOR_CST
13534 : 31007 : && (type == TREE_TYPE (TREE_TYPE (arg0))
13535 : 1840 : || (VECTOR_TYPE_P (type)
13536 : 969 : && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13537 : 30116 : && tree_fits_uhwi_p (op1)
13538 : 667903 : && tree_fits_uhwi_p (op2))
13539 : : {
13540 : 30116 : tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13541 : 30116 : unsigned HOST_WIDE_INT width
13542 : 30116 : = (TREE_CODE (eltype) == BOOLEAN_TYPE
13543 : 30116 : ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13544 : 30116 : unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13545 : 30116 : unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13546 : :
13547 : 30116 : if (n != 0
13548 : 30116 : && (idx % width) == 0
13549 : 30116 : && (n % width) == 0
13550 : 60232 : && known_le ((idx + n) / width,
13551 : : TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13552 : : {
13553 : 30116 : idx = idx / width;
13554 : 30116 : n = n / width;
13555 : :
13556 : 30116 : if (TREE_CODE (arg0) == VECTOR_CST)
13557 : : {
13558 : 30116 : if (n == 1)
13559 : : {
13560 : 29172 : tem = VECTOR_CST_ELT (arg0, idx);
13561 : 29172 : if (VECTOR_TYPE_P (type))
13562 : 5 : tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13563 : 29172 : return tem;
13564 : : }
13565 : :
13566 : 944 : tree_vector_builder vals (type, n, 1);
13567 : 5104 : for (unsigned i = 0; i < n; ++i)
13568 : 4160 : vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13569 : 944 : return vals.build ();
13570 : 944 : }
13571 : : }
13572 : : }
13573 : :
13574 : : /* On constants we can use native encode/interpret to constant
13575 : : fold (nearly) all BIT_FIELD_REFs. */
13576 : 607671 : if (CONSTANT_CLASS_P (arg0)
13577 : 1670 : && can_native_interpret_type_p (type)
13578 : : && BITS_PER_UNIT == 8
13579 : 1670 : && tree_fits_uhwi_p (op1)
13580 : 609341 : && tree_fits_uhwi_p (op2))
13581 : : {
13582 : 1670 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13583 : 1670 : unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13584 : : /* Limit us to a reasonable amount of work. To relax the
13585 : : other limitations we need bit-shifting of the buffer
13586 : : and rounding up the size. */
13587 : 1670 : if (bitpos % BITS_PER_UNIT == 0
13588 : 1670 : && bitsize % BITS_PER_UNIT == 0
13589 : 1670 : && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13590 : : {
13591 : 1670 : unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13592 : 1670 : unsigned HOST_WIDE_INT len
13593 : 1670 : = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13594 : 1670 : bitpos / BITS_PER_UNIT);
13595 : 1670 : if (len > 0
13596 : 1670 : && len * BITS_PER_UNIT >= bitsize)
13597 : : {
13598 : 1670 : tree v = native_interpret_expr (type, b,
13599 : : bitsize / BITS_PER_UNIT);
13600 : 1670 : if (v)
13601 : 1662 : return v;
13602 : : }
13603 : : }
13604 : : }
13605 : :
13606 : : return NULL_TREE;
13607 : :
13608 : 572827 : case VEC_PERM_EXPR:
13609 : : /* Perform constant folding of BIT_INSERT_EXPR. */
13610 : 572827 : if (TREE_CODE (arg2) == VECTOR_CST
13611 : 561696 : && TREE_CODE (op0) == VECTOR_CST
13612 : 11670 : && TREE_CODE (op1) == VECTOR_CST)
13613 : : {
13614 : : /* Build a vector of integers from the tree mask. */
13615 : 2085 : vec_perm_builder builder;
13616 : 2085 : if (!tree_to_vec_perm_builder (&builder, arg2))
13617 : : return NULL_TREE;
13618 : :
13619 : : /* Create a vec_perm_indices for the integer vector. */
13620 : 2085 : poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13621 : 2085 : bool single_arg = (op0 == op1);
13622 : 4170 : vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13623 : 2085 : return fold_vec_perm (type, op0, op1, sel);
13624 : 4170 : }
13625 : : return NULL_TREE;
13626 : :
13627 : 11692 : case BIT_INSERT_EXPR:
13628 : : /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13629 : 11692 : if (TREE_CODE (arg0) == INTEGER_CST
13630 : 0 : && TREE_CODE (arg1) == INTEGER_CST)
13631 : : {
13632 : 0 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13633 : 0 : unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13634 : 0 : wide_int tem = (wi::to_wide (arg0)
13635 : 0 : & wi::shifted_mask (bitpos, bitsize, true,
13636 : 0 : TYPE_PRECISION (type)));
13637 : 0 : wide_int tem2
13638 : 0 : = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13639 : 0 : bitsize), bitpos);
13640 : 0 : return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13641 : 0 : }
13642 : 11692 : else if (TREE_CODE (arg0) == VECTOR_CST
13643 : 690 : && CONSTANT_CLASS_P (arg1)
13644 : 11786 : && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13645 : 94 : TREE_TYPE (arg1)))
13646 : : {
13647 : 94 : unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13648 : 94 : unsigned HOST_WIDE_INT elsize
13649 : 94 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13650 : 94 : if (bitpos % elsize == 0)
13651 : : {
13652 : 94 : unsigned k = bitpos / elsize;
13653 : 94 : unsigned HOST_WIDE_INT nelts;
13654 : 94 : if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13655 : 26377597 : return arg0;
13656 : 85 : else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13657 : : {
13658 : 85 : tree_vector_builder elts (type, nelts, 1);
13659 : 85 : elts.quick_grow (nelts);
13660 : 319 : for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13661 : 234 : elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13662 : 85 : return elts.build ();
13663 : 85 : }
13664 : : }
13665 : : }
13666 : : return NULL_TREE;
13667 : :
13668 : : default:
13669 : : return NULL_TREE;
13670 : : } /* switch (code) */
13671 : : }
13672 : :
13673 : : /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13674 : : of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13675 : : constructor element index of the value returned. If the element is
13676 : : not found NULL_TREE is returned and *CTOR_IDX is updated to
13677 : : the index of the element after the ACCESS_INDEX position (which
13678 : : may be outside of the CTOR array). */
13679 : :
13680 : : tree
13681 : 626326 : get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13682 : : unsigned *ctor_idx)
13683 : : {
13684 : 626326 : tree index_type = NULL_TREE;
13685 : 626326 : signop index_sgn = UNSIGNED;
13686 : 626326 : offset_int low_bound = 0;
13687 : :
13688 : 626326 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13689 : : {
13690 : 626326 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13691 : 626326 : if (domain_type && TYPE_MIN_VALUE (domain_type))
13692 : : {
13693 : : /* Static constructors for variably sized objects makes no sense. */
13694 : 626326 : gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13695 : 626326 : index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13696 : : /* ??? When it is obvious that the range is signed, treat it so. */
13697 : 626326 : if (TYPE_UNSIGNED (index_type)
13698 : 346847 : && TYPE_MAX_VALUE (domain_type)
13699 : 973139 : && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13700 : 346813 : TYPE_MIN_VALUE (domain_type)))
13701 : : {
13702 : 0 : index_sgn = SIGNED;
13703 : 0 : low_bound
13704 : 0 : = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13705 : : SIGNED);
13706 : : }
13707 : : else
13708 : : {
13709 : 626326 : index_sgn = TYPE_SIGN (index_type);
13710 : 626326 : low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13711 : : }
13712 : : }
13713 : : }
13714 : :
13715 : 626326 : if (index_type)
13716 : 626326 : access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13717 : : index_sgn);
13718 : :
13719 : 626326 : offset_int index = low_bound;
13720 : 626326 : if (index_type)
13721 : 626326 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13722 : :
13723 : 626326 : offset_int max_index = index;
13724 : 626326 : unsigned cnt;
13725 : 626326 : tree cfield, cval;
13726 : 626326 : bool first_p = true;
13727 : :
13728 : 11282249 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13729 : : {
13730 : : /* Array constructor might explicitly set index, or specify a range,
13731 : : or leave index NULL meaning that it is next index after previous
13732 : : one. */
13733 : 11280568 : if (cfield)
13734 : : {
13735 : 3160792 : if (TREE_CODE (cfield) == INTEGER_CST)
13736 : 6320440 : max_index = index
13737 : 3160220 : = offset_int::from (wi::to_wide (cfield), index_sgn);
13738 : : else
13739 : : {
13740 : 572 : gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13741 : 572 : index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13742 : : index_sgn);
13743 : 572 : max_index
13744 : 572 : = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13745 : : index_sgn);
13746 : 572 : gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13747 : : }
13748 : : }
13749 : 8119776 : else if (!first_p)
13750 : : {
13751 : 7869960 : index = max_index + 1;
13752 : 7869960 : if (index_type)
13753 : 7869960 : index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13754 : 7869960 : gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13755 : 7869960 : max_index = index;
13756 : : }
13757 : : else
13758 : : first_p = false;
13759 : :
13760 : : /* Do we have match? */
13761 : 11280568 : if (wi::cmp (access_index, index, index_sgn) >= 0)
13762 : : {
13763 : 11280285 : if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13764 : : {
13765 : 624534 : if (ctor_idx)
13766 : 32658 : *ctor_idx = cnt;
13767 : 624534 : return cval;
13768 : : }
13769 : : }
13770 : 283 : else if (in_gimple_form)
13771 : : /* We're past the element we search for. Note during parsing
13772 : : the elements might not be sorted.
13773 : : ??? We should use a binary search and a flag on the
13774 : : CONSTRUCTOR as to whether elements are sorted in declaration
13775 : : order. */
13776 : : break;
13777 : : }
13778 : 1792 : if (ctor_idx)
13779 : 3 : *ctor_idx = cnt;
13780 : : return NULL_TREE;
13781 : : }
13782 : :
13783 : : /* Perform constant folding and related simplification of EXPR.
13784 : : The related simplifications include x*1 => x, x*0 => 0, etc.,
13785 : : and application of the associative law.
13786 : : NOP_EXPR conversions may be removed freely (as long as we
13787 : : are careful not to change the type of the overall expression).
13788 : : We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13789 : : but we can constant-fold them if they have constant operands. */
13790 : :
13791 : : #ifdef ENABLE_FOLD_CHECKING
13792 : : # define fold(x) fold_1 (x)
13793 : : static tree fold_1 (tree);
13794 : : static
13795 : : #endif
13796 : : tree
13797 : 1186517051 : fold (tree expr)
13798 : : {
13799 : 1186644470 : const tree t = expr;
13800 : 1186644470 : enum tree_code code = TREE_CODE (t);
13801 : 1186644470 : enum tree_code_class kind = TREE_CODE_CLASS (code);
13802 : 1186644470 : tree tem;
13803 : 1186644470 : location_t loc = EXPR_LOCATION (expr);
13804 : :
13805 : : /* Return right away if a constant. */
13806 : 1186644470 : if (kind == tcc_constant)
13807 : : return t;
13808 : :
13809 : : /* CALL_EXPR-like objects with variable numbers of operands are
13810 : : treated specially. */
13811 : 1072993663 : if (kind == tcc_vl_exp)
13812 : : {
13813 : 148854233 : if (code == CALL_EXPR)
13814 : : {
13815 : 148853937 : tem = fold_call_expr (loc, expr, false);
13816 : 295465859 : return tem ? tem : expr;
13817 : : }
13818 : : return expr;
13819 : : }
13820 : :
13821 : 924139430 : if (IS_EXPR_CODE_CLASS (kind))
13822 : : {
13823 : 922135893 : tree type = TREE_TYPE (t);
13824 : 922135893 : tree op0, op1, op2;
13825 : :
13826 : 922135893 : switch (TREE_CODE_LENGTH (code))
13827 : : {
13828 : 833820441 : case 1:
13829 : 833820441 : op0 = TREE_OPERAND (t, 0);
13830 : 833820441 : tem = fold_unary_loc (loc, code, type, op0);
13831 : 1424706685 : return tem ? tem : expr;
13832 : 79877188 : case 2:
13833 : 79877188 : op0 = TREE_OPERAND (t, 0);
13834 : 79877188 : op1 = TREE_OPERAND (t, 1);
13835 : 79877188 : tem = fold_binary_loc (loc, code, type, op0, op1);
13836 : 152126576 : return tem ? tem : expr;
13837 : 4056011 : case 3:
13838 : 4056011 : op0 = TREE_OPERAND (t, 0);
13839 : 4056011 : op1 = TREE_OPERAND (t, 1);
13840 : 4056011 : op2 = TREE_OPERAND (t, 2);
13841 : 4056011 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13842 : 7919016 : return tem ? tem : expr;
13843 : : default:
13844 : : break;
13845 : : }
13846 : : }
13847 : :
13848 : 6385790 : switch (code)
13849 : : {
13850 : 4281805 : case ARRAY_REF:
13851 : 4281805 : {
13852 : 4281805 : tree op0 = TREE_OPERAND (t, 0);
13853 : 4281805 : tree op1 = TREE_OPERAND (t, 1);
13854 : :
13855 : 4281805 : if (TREE_CODE (op1) == INTEGER_CST
13856 : 2788640 : && TREE_CODE (op0) == CONSTRUCTOR
13857 : 4281810 : && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13858 : : {
13859 : 10 : tree val = get_array_ctor_element_at_index (op0,
13860 : 5 : wi::to_offset (op1));
13861 : 5 : if (val)
13862 : : return val;
13863 : : }
13864 : :
13865 : : return t;
13866 : : }
13867 : :
13868 : : /* Return a VECTOR_CST if possible. */
13869 : 107148 : case CONSTRUCTOR:
13870 : 107148 : {
13871 : 107148 : tree type = TREE_TYPE (t);
13872 : 107148 : if (TREE_CODE (type) != VECTOR_TYPE)
13873 : : return t;
13874 : :
13875 : : unsigned i;
13876 : : tree val;
13877 : 253007 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13878 : 218450 : if (! CONSTANT_CLASS_P (val))
13879 : : return t;
13880 : :
13881 : 34557 : return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13882 : : }
13883 : :
13884 : 127419 : case CONST_DECL:
13885 : 127419 : return fold (DECL_INITIAL (t));
13886 : :
13887 : : default:
13888 : : return t;
13889 : : } /* switch (code) */
13890 : : }
13891 : :
13892 : : #ifdef ENABLE_FOLD_CHECKING
13893 : : #undef fold
13894 : :
13895 : : static void fold_checksum_tree (const_tree, struct md5_ctx *,
13896 : : hash_table<nofree_ptr_hash<const tree_node> > *);
13897 : : static void fold_check_failed (const_tree, const_tree);
13898 : : void print_fold_checksum (const_tree);
13899 : :
13900 : : /* When --enable-checking=fold, compute a digest of expr before
13901 : : and after actual fold call to see if fold did not accidentally
13902 : : change original expr. */
13903 : :
13904 : : tree
13905 : : fold (tree expr)
13906 : : {
13907 : : tree ret;
13908 : : struct md5_ctx ctx;
13909 : : unsigned char checksum_before[16], checksum_after[16];
13910 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13911 : :
13912 : : md5_init_ctx (&ctx);
13913 : : fold_checksum_tree (expr, &ctx, &ht);
13914 : : md5_finish_ctx (&ctx, checksum_before);
13915 : : ht.empty ();
13916 : :
13917 : : ret = fold_1 (expr);
13918 : :
13919 : : md5_init_ctx (&ctx);
13920 : : fold_checksum_tree (expr, &ctx, &ht);
13921 : : md5_finish_ctx (&ctx, checksum_after);
13922 : :
13923 : : if (memcmp (checksum_before, checksum_after, 16))
13924 : : fold_check_failed (expr, ret);
13925 : :
13926 : : return ret;
13927 : : }
13928 : :
13929 : : void
13930 : : print_fold_checksum (const_tree expr)
13931 : : {
13932 : : struct md5_ctx ctx;
13933 : : unsigned char checksum[16], cnt;
13934 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13935 : :
13936 : : md5_init_ctx (&ctx);
13937 : : fold_checksum_tree (expr, &ctx, &ht);
13938 : : md5_finish_ctx (&ctx, checksum);
13939 : : for (cnt = 0; cnt < 16; ++cnt)
13940 : : fprintf (stderr, "%02x", checksum[cnt]);
13941 : : putc ('\n', stderr);
13942 : : }
13943 : :
13944 : : static void
13945 : : fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13946 : : {
13947 : : internal_error ("fold check: original tree changed by fold");
13948 : : }
13949 : :
13950 : : static void
13951 : : fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13952 : : hash_table<nofree_ptr_hash <const tree_node> > *ht)
13953 : : {
13954 : : const tree_node **slot;
13955 : : enum tree_code code;
13956 : : union tree_node *buf;
13957 : : int i, len;
13958 : :
13959 : : recursive_label:
13960 : : if (expr == NULL)
13961 : : return;
13962 : : slot = ht->find_slot (expr, INSERT);
13963 : : if (*slot != NULL)
13964 : : return;
13965 : : *slot = expr;
13966 : : code = TREE_CODE (expr);
13967 : : if (TREE_CODE_CLASS (code) == tcc_declaration
13968 : : && HAS_DECL_ASSEMBLER_NAME_P (expr))
13969 : : {
13970 : : /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13971 : : size_t sz = tree_size (expr);
13972 : : buf = XALLOCAVAR (union tree_node, sz);
13973 : : memcpy ((char *) buf, expr, sz);
13974 : : SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13975 : : buf->decl_with_vis.symtab_node = NULL;
13976 : : buf->base.nowarning_flag = 0;
13977 : : expr = (tree) buf;
13978 : : }
13979 : : else if (TREE_CODE_CLASS (code) == tcc_type
13980 : : && (TYPE_POINTER_TO (expr)
13981 : : || TYPE_REFERENCE_TO (expr)
13982 : : || TYPE_CACHED_VALUES_P (expr)
13983 : : || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13984 : : || TYPE_NEXT_VARIANT (expr)
13985 : : || TYPE_ALIAS_SET_KNOWN_P (expr)))
13986 : : {
13987 : : /* Allow these fields to be modified. */
13988 : : tree tmp;
13989 : : size_t sz = tree_size (expr);
13990 : : buf = XALLOCAVAR (union tree_node, sz);
13991 : : memcpy ((char *) buf, expr, sz);
13992 : : expr = tmp = (tree) buf;
13993 : : TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13994 : : TYPE_POINTER_TO (tmp) = NULL;
13995 : : TYPE_REFERENCE_TO (tmp) = NULL;
13996 : : TYPE_NEXT_VARIANT (tmp) = NULL;
13997 : : TYPE_ALIAS_SET (tmp) = -1;
13998 : : if (TYPE_CACHED_VALUES_P (tmp))
13999 : : {
14000 : : TYPE_CACHED_VALUES_P (tmp) = 0;
14001 : : TYPE_CACHED_VALUES (tmp) = NULL;
14002 : : }
14003 : : }
14004 : : else if (warning_suppressed_p (expr) && (DECL_P (expr) || EXPR_P (expr)))
14005 : : {
14006 : : /* Allow the no-warning bit to be set. Perhaps we shouldn't allow
14007 : : that and change builtins.cc etc. instead - see PR89543. */
14008 : : size_t sz = tree_size (expr);
14009 : : buf = XALLOCAVAR (union tree_node, sz);
14010 : : memcpy ((char *) buf, expr, sz);
14011 : : buf->base.nowarning_flag = 0;
14012 : : expr = (tree) buf;
14013 : : }
14014 : : md5_process_bytes (expr, tree_size (expr), ctx);
14015 : : if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14016 : : fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14017 : : if (TREE_CODE_CLASS (code) != tcc_type
14018 : : && TREE_CODE_CLASS (code) != tcc_declaration
14019 : : && code != TREE_LIST
14020 : : && code != SSA_NAME
14021 : : && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14022 : : fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14023 : : switch (TREE_CODE_CLASS (code))
14024 : : {
14025 : : case tcc_constant:
14026 : : switch (code)
14027 : : {
14028 : : case STRING_CST:
14029 : : md5_process_bytes (TREE_STRING_POINTER (expr),
14030 : : TREE_STRING_LENGTH (expr), ctx);
14031 : : break;
14032 : : case COMPLEX_CST:
14033 : : fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14034 : : fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14035 : : break;
14036 : : case VECTOR_CST:
14037 : : len = vector_cst_encoded_nelts (expr);
14038 : : for (i = 0; i < len; ++i)
14039 : : fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
14040 : : break;
14041 : : default:
14042 : : break;
14043 : : }
14044 : : break;
14045 : : case tcc_exceptional:
14046 : : switch (code)
14047 : : {
14048 : : case TREE_LIST:
14049 : : fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14050 : : fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14051 : : expr = TREE_CHAIN (expr);
14052 : : goto recursive_label;
14053 : : break;
14054 : : case TREE_VEC:
14055 : : for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14056 : : fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14057 : : break;
14058 : : default:
14059 : : break;
14060 : : }
14061 : : break;
14062 : : case tcc_expression:
14063 : : case tcc_reference:
14064 : : case tcc_comparison:
14065 : : case tcc_unary:
14066 : : case tcc_binary:
14067 : : case tcc_statement:
14068 : : case tcc_vl_exp:
14069 : : len = TREE_OPERAND_LENGTH (expr);
14070 : : for (i = 0; i < len; ++i)
14071 : : fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14072 : : break;
14073 : : case tcc_declaration:
14074 : : fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14075 : : fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14076 : : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14077 : : {
14078 : : fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14079 : : fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14080 : : fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14081 : : fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14082 : : fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14083 : : }
14084 : :
14085 : : if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14086 : : {
14087 : : if (TREE_CODE (expr) == FUNCTION_DECL)
14088 : : {
14089 : : fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14090 : : fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14091 : : }
14092 : : fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14093 : : }
14094 : : break;
14095 : : case tcc_type:
14096 : : if (TREE_CODE (expr) == ENUMERAL_TYPE)
14097 : : fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14098 : : fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14099 : : fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14100 : : fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14101 : : fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14102 : : if (INTEGRAL_TYPE_P (expr)
14103 : : || SCALAR_FLOAT_TYPE_P (expr))
14104 : : {
14105 : : fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14106 : : fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14107 : : }
14108 : : fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14109 : : if (RECORD_OR_UNION_TYPE_P (expr))
14110 : : fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14111 : : fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14112 : : break;
14113 : : default:
14114 : : break;
14115 : : }
14116 : : }
14117 : :
14118 : : /* Helper function for outputting the checksum of a tree T. When
14119 : : debugging with gdb, you can "define mynext" to be "next" followed
14120 : : by "call debug_fold_checksum (op0)", then just trace down till the
14121 : : outputs differ. */
14122 : :
14123 : : DEBUG_FUNCTION void
14124 : : debug_fold_checksum (const_tree t)
14125 : : {
14126 : : int i;
14127 : : unsigned char checksum[16];
14128 : : struct md5_ctx ctx;
14129 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14130 : :
14131 : : md5_init_ctx (&ctx);
14132 : : fold_checksum_tree (t, &ctx, &ht);
14133 : : md5_finish_ctx (&ctx, checksum);
14134 : : ht.empty ();
14135 : :
14136 : : for (i = 0; i < 16; i++)
14137 : : fprintf (stderr, "%d ", checksum[i]);
14138 : :
14139 : : fprintf (stderr, "\n");
14140 : : }
14141 : :
14142 : : #endif
14143 : :
14144 : : /* Fold a unary tree expression with code CODE of type TYPE with an
14145 : : operand OP0. LOC is the location of the resulting expression.
14146 : : Return a folded expression if successful. Otherwise, return a tree
14147 : : expression with code CODE of type TYPE with an operand OP0. */
14148 : :
14149 : : tree
14150 : 658817256 : fold_build1_loc (location_t loc,
14151 : : enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14152 : : {
14153 : 658817256 : tree tem;
14154 : : #ifdef ENABLE_FOLD_CHECKING
14155 : : unsigned char checksum_before[16], checksum_after[16];
14156 : : struct md5_ctx ctx;
14157 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14158 : :
14159 : : md5_init_ctx (&ctx);
14160 : : fold_checksum_tree (op0, &ctx, &ht);
14161 : : md5_finish_ctx (&ctx, checksum_before);
14162 : : ht.empty ();
14163 : : #endif
14164 : :
14165 : 658817256 : tem = fold_unary_loc (loc, code, type, op0);
14166 : 658817256 : if (!tem)
14167 : 346208642 : tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
14168 : :
14169 : : #ifdef ENABLE_FOLD_CHECKING
14170 : : md5_init_ctx (&ctx);
14171 : : fold_checksum_tree (op0, &ctx, &ht);
14172 : : md5_finish_ctx (&ctx, checksum_after);
14173 : :
14174 : : if (memcmp (checksum_before, checksum_after, 16))
14175 : : fold_check_failed (op0, tem);
14176 : : #endif
14177 : 658817256 : return tem;
14178 : : }
14179 : :
14180 : : /* Fold a binary tree expression with code CODE of type TYPE with
14181 : : operands OP0 and OP1. LOC is the location of the resulting
14182 : : expression. Return a folded expression if successful. Otherwise,
14183 : : return a tree expression with code CODE of type TYPE with operands
14184 : : OP0 and OP1. */
14185 : :
14186 : : tree
14187 : 493377002 : fold_build2_loc (location_t loc,
14188 : : enum tree_code code, tree type, tree op0, tree op1
14189 : : MEM_STAT_DECL)
14190 : : {
14191 : 493377002 : tree tem;
14192 : : #ifdef ENABLE_FOLD_CHECKING
14193 : : unsigned char checksum_before_op0[16],
14194 : : checksum_before_op1[16],
14195 : : checksum_after_op0[16],
14196 : : checksum_after_op1[16];
14197 : : struct md5_ctx ctx;
14198 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14199 : :
14200 : : md5_init_ctx (&ctx);
14201 : : fold_checksum_tree (op0, &ctx, &ht);
14202 : : md5_finish_ctx (&ctx, checksum_before_op0);
14203 : : ht.empty ();
14204 : :
14205 : : md5_init_ctx (&ctx);
14206 : : fold_checksum_tree (op1, &ctx, &ht);
14207 : : md5_finish_ctx (&ctx, checksum_before_op1);
14208 : : ht.empty ();
14209 : : #endif
14210 : :
14211 : 493377002 : tem = fold_binary_loc (loc, code, type, op0, op1);
14212 : 493377002 : if (!tem)
14213 : 278661328 : tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14214 : :
14215 : : #ifdef ENABLE_FOLD_CHECKING
14216 : : md5_init_ctx (&ctx);
14217 : : fold_checksum_tree (op0, &ctx, &ht);
14218 : : md5_finish_ctx (&ctx, checksum_after_op0);
14219 : : ht.empty ();
14220 : :
14221 : : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14222 : : fold_check_failed (op0, tem);
14223 : :
14224 : : md5_init_ctx (&ctx);
14225 : : fold_checksum_tree (op1, &ctx, &ht);
14226 : : md5_finish_ctx (&ctx, checksum_after_op1);
14227 : :
14228 : : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14229 : : fold_check_failed (op1, tem);
14230 : : #endif
14231 : 493377002 : return tem;
14232 : : }
14233 : :
14234 : : /* Fold a ternary tree expression with code CODE of type TYPE with
14235 : : operands OP0, OP1, and OP2. Return a folded expression if
14236 : : successful. Otherwise, return a tree expression with code CODE of
14237 : : type TYPE with operands OP0, OP1, and OP2. */
14238 : :
14239 : : tree
14240 : 20934164 : fold_build3_loc (location_t loc, enum tree_code code, tree type,
14241 : : tree op0, tree op1, tree op2 MEM_STAT_DECL)
14242 : : {
14243 : 20934164 : tree tem;
14244 : : #ifdef ENABLE_FOLD_CHECKING
14245 : : unsigned char checksum_before_op0[16],
14246 : : checksum_before_op1[16],
14247 : : checksum_before_op2[16],
14248 : : checksum_after_op0[16],
14249 : : checksum_after_op1[16],
14250 : : checksum_after_op2[16];
14251 : : struct md5_ctx ctx;
14252 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14253 : :
14254 : : md5_init_ctx (&ctx);
14255 : : fold_checksum_tree (op0, &ctx, &ht);
14256 : : md5_finish_ctx (&ctx, checksum_before_op0);
14257 : : ht.empty ();
14258 : :
14259 : : md5_init_ctx (&ctx);
14260 : : fold_checksum_tree (op1, &ctx, &ht);
14261 : : md5_finish_ctx (&ctx, checksum_before_op1);
14262 : : ht.empty ();
14263 : :
14264 : : md5_init_ctx (&ctx);
14265 : : fold_checksum_tree (op2, &ctx, &ht);
14266 : : md5_finish_ctx (&ctx, checksum_before_op2);
14267 : : ht.empty ();
14268 : : #endif
14269 : :
14270 : 20934164 : gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14271 : 20934164 : tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14272 : 20934164 : if (!tem)
14273 : 18667678 : tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14274 : :
14275 : : #ifdef ENABLE_FOLD_CHECKING
14276 : : md5_init_ctx (&ctx);
14277 : : fold_checksum_tree (op0, &ctx, &ht);
14278 : : md5_finish_ctx (&ctx, checksum_after_op0);
14279 : : ht.empty ();
14280 : :
14281 : : if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14282 : : fold_check_failed (op0, tem);
14283 : :
14284 : : md5_init_ctx (&ctx);
14285 : : fold_checksum_tree (op1, &ctx, &ht);
14286 : : md5_finish_ctx (&ctx, checksum_after_op1);
14287 : : ht.empty ();
14288 : :
14289 : : if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14290 : : fold_check_failed (op1, tem);
14291 : :
14292 : : md5_init_ctx (&ctx);
14293 : : fold_checksum_tree (op2, &ctx, &ht);
14294 : : md5_finish_ctx (&ctx, checksum_after_op2);
14295 : :
14296 : : if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14297 : : fold_check_failed (op2, tem);
14298 : : #endif
14299 : 20934164 : return tem;
14300 : : }
14301 : :
14302 : : /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14303 : : arguments in ARGARRAY, and a null static chain.
14304 : : Return a folded expression if successful. Otherwise, return a CALL_EXPR
14305 : : of type TYPE from the given operands as constructed by build_call_array. */
14306 : :
14307 : : tree
14308 : 45008030 : fold_build_call_array_loc (location_t loc, tree type, tree fn,
14309 : : int nargs, tree *argarray)
14310 : : {
14311 : 45008030 : tree tem;
14312 : : #ifdef ENABLE_FOLD_CHECKING
14313 : : unsigned char checksum_before_fn[16],
14314 : : checksum_before_arglist[16],
14315 : : checksum_after_fn[16],
14316 : : checksum_after_arglist[16];
14317 : : struct md5_ctx ctx;
14318 : : hash_table<nofree_ptr_hash<const tree_node> > ht (32);
14319 : : int i;
14320 : :
14321 : : md5_init_ctx (&ctx);
14322 : : fold_checksum_tree (fn, &ctx, &ht);
14323 : : md5_finish_ctx (&ctx, checksum_before_fn);
14324 : : ht.empty ();
14325 : :
14326 : : md5_init_ctx (&ctx);
14327 : : for (i = 0; i < nargs; i++)
14328 : : fold_checksum_tree (argarray[i], &ctx, &ht);
14329 : : md5_finish_ctx (&ctx, checksum_before_arglist);
14330 : : ht.empty ();
14331 : : #endif
14332 : :
14333 : 45008030 : tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14334 : 45008030 : if (!tem)
14335 : 43589154 : tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14336 : :
14337 : : #ifdef ENABLE_FOLD_CHECKING
14338 : : md5_init_ctx (&ctx);
14339 : : fold_checksum_tree (fn, &ctx, &ht);
14340 : : md5_finish_ctx (&ctx, checksum_after_fn);
14341 : : ht.empty ();
14342 : :
14343 : : if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14344 : : fold_check_failed (fn, tem);
14345 : :
14346 : : md5_init_ctx (&ctx);
14347 : : for (i = 0; i < nargs; i++)
14348 : : fold_checksum_tree (argarray[i], &ctx, &ht);
14349 : : md5_finish_ctx (&ctx, checksum_after_arglist);
14350 : :
14351 : : if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14352 : : fold_check_failed (NULL_TREE, tem);
14353 : : #endif
14354 : 45008030 : return tem;
14355 : : }
14356 : :
14357 : : /* Perform constant folding and related simplification of initializer
14358 : : expression EXPR. These behave identically to "fold_buildN" but ignore
14359 : : potential run-time traps and exceptions that fold must preserve. */
14360 : :
14361 : : #define START_FOLD_INIT \
14362 : : int saved_signaling_nans = flag_signaling_nans;\
14363 : : int saved_trapping_math = flag_trapping_math;\
14364 : : int saved_rounding_math = flag_rounding_math;\
14365 : : int saved_trapv = flag_trapv;\
14366 : : int saved_folding_initializer = folding_initializer;\
14367 : : flag_signaling_nans = 0;\
14368 : : flag_trapping_math = 0;\
14369 : : flag_rounding_math = 0;\
14370 : : flag_trapv = 0;\
14371 : : folding_initializer = 1;
14372 : :
14373 : : #define END_FOLD_INIT \
14374 : : flag_signaling_nans = saved_signaling_nans;\
14375 : : flag_trapping_math = saved_trapping_math;\
14376 : : flag_rounding_math = saved_rounding_math;\
14377 : : flag_trapv = saved_trapv;\
14378 : : folding_initializer = saved_folding_initializer;
14379 : :
14380 : : tree
14381 : 541974 : fold_init (tree expr)
14382 : : {
14383 : 541974 : tree result;
14384 : 541974 : START_FOLD_INIT;
14385 : :
14386 : 541974 : result = fold (expr);
14387 : :
14388 : 541974 : END_FOLD_INIT;
14389 : 541974 : return result;
14390 : : }
14391 : :
14392 : : tree
14393 : 2987664 : fold_build1_initializer_loc (location_t loc, enum tree_code code,
14394 : : tree type, tree op)
14395 : : {
14396 : 2987664 : tree result;
14397 : 2987664 : START_FOLD_INIT;
14398 : :
14399 : 2987664 : result = fold_build1_loc (loc, code, type, op);
14400 : :
14401 : 2987664 : END_FOLD_INIT;
14402 : 2987664 : return result;
14403 : : }
14404 : :
14405 : : tree
14406 : 50295 : fold_build2_initializer_loc (location_t loc, enum tree_code code,
14407 : : tree type, tree op0, tree op1)
14408 : : {
14409 : 50295 : tree result;
14410 : 50295 : START_FOLD_INIT;
14411 : :
14412 : 50295 : result = fold_build2_loc (loc, code, type, op0, op1);
14413 : :
14414 : 50295 : END_FOLD_INIT;
14415 : 50295 : return result;
14416 : : }
14417 : :
14418 : : tree
14419 : 3484 : fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14420 : : int nargs, tree *argarray)
14421 : : {
14422 : 3484 : tree result;
14423 : 3484 : START_FOLD_INIT;
14424 : :
14425 : 3484 : result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14426 : :
14427 : 3484 : END_FOLD_INIT;
14428 : 3484 : return result;
14429 : : }
14430 : :
14431 : : tree
14432 : 16169029 : fold_binary_initializer_loc (location_t loc, tree_code code, tree type,
14433 : : tree lhs, tree rhs)
14434 : : {
14435 : 16169029 : tree result;
14436 : 16169029 : START_FOLD_INIT;
14437 : :
14438 : 16169029 : result = fold_binary_loc (loc, code, type, lhs, rhs);
14439 : :
14440 : 16169029 : END_FOLD_INIT;
14441 : 16169029 : return result;
14442 : : }
14443 : :
14444 : : #undef START_FOLD_INIT
14445 : : #undef END_FOLD_INIT
14446 : :
14447 : : /* Determine if first argument is a multiple of second argument. Return
14448 : : false if it is not, or we cannot easily determined it to be.
14449 : :
14450 : : An example of the sort of thing we care about (at this point; this routine
14451 : : could surely be made more general, and expanded to do what the *_DIV_EXPR's
14452 : : fold cases do now) is discovering that
14453 : :
14454 : : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14455 : :
14456 : : is a multiple of
14457 : :
14458 : : SAVE_EXPR (J * 8)
14459 : :
14460 : : when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14461 : :
14462 : : This code also handles discovering that
14463 : :
14464 : : SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14465 : :
14466 : : is a multiple of 8 so we don't have to worry about dealing with a
14467 : : possible remainder.
14468 : :
14469 : : Note that we *look* inside a SAVE_EXPR only to determine how it was
14470 : : calculated; it is not safe for fold to do much of anything else with the
14471 : : internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14472 : : at run time. For example, the latter example above *cannot* be implemented
14473 : : as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14474 : : evaluation time of the original SAVE_EXPR is not necessarily the same at
14475 : : the time the new expression is evaluated. The only optimization of this
14476 : : sort that would be valid is changing
14477 : :
14478 : : SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14479 : :
14480 : : divided by 8 to
14481 : :
14482 : : SAVE_EXPR (I) * SAVE_EXPR (J)
14483 : :
14484 : : (where the same SAVE_EXPR (J) is used in the original and the
14485 : : transformed version).
14486 : :
14487 : : NOWRAP specifies whether all outer operations in TYPE should
14488 : : be considered not wrapping. Any type conversion within TOP acts
14489 : : as a barrier and we will fall back to NOWRAP being false.
14490 : : NOWRAP is mostly used to treat expressions in TYPE_SIZE and friends
14491 : : as not wrapping even though they are generally using unsigned arithmetic. */
14492 : :
14493 : : bool
14494 : 1339119 : multiple_of_p (tree type, const_tree top, const_tree bottom, bool nowrap)
14495 : : {
14496 : 1339119 : gimple *stmt;
14497 : 1339119 : tree op1, op2;
14498 : :
14499 : 1339119 : if (operand_equal_p (top, bottom, 0))
14500 : : return true;
14501 : :
14502 : 919431 : if (TREE_CODE (type) != INTEGER_TYPE)
14503 : : return false;
14504 : :
14505 : 919426 : switch (TREE_CODE (top))
14506 : : {
14507 : 620 : case BIT_AND_EXPR:
14508 : : /* Bitwise and provides a power of two multiple. If the mask is
14509 : : a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14510 : 620 : if (!integer_pow2p (bottom))
14511 : : return false;
14512 : 620 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14513 : 620 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14514 : :
14515 : 333450 : case MULT_EXPR:
14516 : : /* If the multiplication can wrap we cannot recurse further unless
14517 : : the bottom is a power of two which is where wrapping does not
14518 : : matter. */
14519 : 333450 : if (!nowrap
14520 : 12980 : && !TYPE_OVERFLOW_UNDEFINED (type)
14521 : 338273 : && !integer_pow2p (bottom))
14522 : : return false;
14523 : 333094 : if (TREE_CODE (bottom) == INTEGER_CST)
14524 : : {
14525 : 331336 : op1 = TREE_OPERAND (top, 0);
14526 : 331336 : op2 = TREE_OPERAND (top, 1);
14527 : 331336 : if (TREE_CODE (op1) == INTEGER_CST)
14528 : 0 : std::swap (op1, op2);
14529 : 331336 : if (TREE_CODE (op2) == INTEGER_CST)
14530 : : {
14531 : 322702 : if (multiple_of_p (type, op2, bottom, nowrap))
14532 : : return true;
14533 : : /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
14534 : 2964 : if (multiple_of_p (type, bottom, op2, nowrap))
14535 : : {
14536 : 2182 : widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14537 : 2182 : wi::to_widest (op2));
14538 : 2182 : if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14539 : : {
14540 : 2182 : op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14541 : 2182 : return multiple_of_p (type, op1, op2, nowrap);
14542 : : }
14543 : 2182 : }
14544 : 782 : return multiple_of_p (type, op1, bottom, nowrap);
14545 : : }
14546 : : }
14547 : 10392 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14548 : 10392 : || multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14549 : :
14550 : 279 : case LSHIFT_EXPR:
14551 : : /* Handle X << CST as X * (1 << CST) and only process the constant. */
14552 : 279 : if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14553 : : {
14554 : 279 : op1 = TREE_OPERAND (top, 1);
14555 : 279 : if (wi::to_widest (op1) < TYPE_PRECISION (type))
14556 : : {
14557 : 279 : wide_int mul_op
14558 : 279 : = wi::one (TYPE_PRECISION (type)) << wi::to_wide (op1);
14559 : 558 : return multiple_of_p (type,
14560 : 558 : wide_int_to_tree (type, mul_op), bottom,
14561 : : nowrap);
14562 : 279 : }
14563 : : }
14564 : : return false;
14565 : :
14566 : 177322 : case MINUS_EXPR:
14567 : 177322 : case PLUS_EXPR:
14568 : : /* If the addition or subtraction can wrap we cannot recurse further
14569 : : unless bottom is a power of two which is where wrapping does not
14570 : : matter. */
14571 : 177322 : if (!nowrap
14572 : 139353 : && !TYPE_OVERFLOW_UNDEFINED (type)
14573 : 315220 : && !integer_pow2p (bottom))
14574 : : return false;
14575 : :
14576 : : /* Handle cases like op0 + 0xfffffffd as op0 - 3 if the expression has
14577 : : unsigned type. For example, (X / 3) + 0xfffffffd is multiple of 3,
14578 : : but 0xfffffffd is not. */
14579 : 160166 : op1 = TREE_OPERAND (top, 1);
14580 : 160166 : if (TREE_CODE (top) == PLUS_EXPR
14581 : 155198 : && nowrap
14582 : 33067 : && TYPE_UNSIGNED (type)
14583 : 192791 : && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14584 : 28896 : op1 = fold_build1 (NEGATE_EXPR, type, op1);
14585 : :
14586 : : /* It is impossible to prove if op0 +- op1 is multiple of bottom
14587 : : precisely, so be conservative here checking if both op0 and op1
14588 : : are multiple of bottom. Note we check the second operand first
14589 : : since it's usually simpler. */
14590 : 160166 : return (multiple_of_p (type, op1, bottom, nowrap)
14591 : 160166 : && multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap));
14592 : :
14593 : 135148 : CASE_CONVERT:
14594 : : /* Can't handle conversions from non-integral or wider integral type. */
14595 : 135148 : if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14596 : 135148 : || (TYPE_PRECISION (type)
14597 : 44790 : < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14598 : : return false;
14599 : : /* NOWRAP only extends to operations in the outermost type so
14600 : : make sure to strip it off here. */
14601 : 44523 : return multiple_of_p (TREE_TYPE (TREE_OPERAND (top, 0)),
14602 : 89046 : TREE_OPERAND (top, 0), bottom, false);
14603 : :
14604 : 10367 : case SAVE_EXPR:
14605 : 10367 : return multiple_of_p (type, TREE_OPERAND (top, 0), bottom, nowrap);
14606 : :
14607 : 0 : case COND_EXPR:
14608 : 0 : return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom, nowrap)
14609 : 0 : && multiple_of_p (type, TREE_OPERAND (top, 2), bottom, nowrap));
14610 : :
14611 : 104475 : case INTEGER_CST:
14612 : 104475 : if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom))
14613 : 2284 : return false;
14614 : 102191 : return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14615 : : SIGNED);
14616 : :
14617 : 49960 : case SSA_NAME:
14618 : 49960 : if (TREE_CODE (bottom) == INTEGER_CST
14619 : 47177 : && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14620 : 97137 : && gimple_code (stmt) == GIMPLE_ASSIGN)
14621 : : {
14622 : 19354 : enum tree_code code = gimple_assign_rhs_code (stmt);
14623 : :
14624 : : /* Check for special cases to see if top is defined as multiple
14625 : : of bottom:
14626 : :
14627 : : top = (X & ~(bottom - 1) ; bottom is power of 2
14628 : :
14629 : : or
14630 : :
14631 : : Y = X % bottom
14632 : : top = X - Y. */
14633 : 19354 : if (code == BIT_AND_EXPR
14634 : 695 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14635 : 695 : && TREE_CODE (op2) == INTEGER_CST
14636 : 591 : && integer_pow2p (bottom)
14637 : 19945 : && wi::multiple_of_p (wi::to_widest (op2),
14638 : 591 : wi::to_widest (bottom), SIGNED))
14639 : 590 : return true;
14640 : :
14641 : 18764 : op1 = gimple_assign_rhs1 (stmt);
14642 : 18764 : if (code == MINUS_EXPR
14643 : 2194 : && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14644 : 2194 : && TREE_CODE (op2) == SSA_NAME
14645 : 2194 : && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14646 : 2194 : && gimple_code (stmt) == GIMPLE_ASSIGN
14647 : 2158 : && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14648 : 66 : && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14649 : 18830 : && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14650 : : return true;
14651 : : }
14652 : :
14653 : : /* fall through */
14654 : :
14655 : : default:
14656 : : if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14657 : : return multiple_p (wi::to_poly_widest (top),
14658 : : wi::to_poly_widest (bottom));
14659 : :
14660 : : return false;
14661 : : }
14662 : : }
14663 : :
14664 : : /* Return true if expression X cannot be (or contain) a NaN or infinity.
14665 : : This function returns true for integer expressions, and returns
14666 : : false if uncertain. */
14667 : :
14668 : : bool
14669 : 417805 : tree_expr_finite_p (const_tree x)
14670 : : {
14671 : 417830 : machine_mode mode = element_mode (x);
14672 : 417830 : if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14673 : : return true;
14674 : 417736 : switch (TREE_CODE (x))
14675 : : {
14676 : 571 : case REAL_CST:
14677 : 571 : return real_isfinite (TREE_REAL_CST_PTR (x));
14678 : 0 : case COMPLEX_CST:
14679 : 0 : return tree_expr_finite_p (TREE_REALPART (x))
14680 : 0 : && tree_expr_finite_p (TREE_IMAGPART (x));
14681 : : case FLOAT_EXPR:
14682 : : return true;
14683 : 25 : case ABS_EXPR:
14684 : 25 : case CONVERT_EXPR:
14685 : 25 : case NON_LVALUE_EXPR:
14686 : 25 : case NEGATE_EXPR:
14687 : 25 : case SAVE_EXPR:
14688 : 25 : return tree_expr_finite_p (TREE_OPERAND (x, 0));
14689 : 0 : case MIN_EXPR:
14690 : 0 : case MAX_EXPR:
14691 : 0 : return tree_expr_finite_p (TREE_OPERAND (x, 0))
14692 : 0 : && tree_expr_finite_p (TREE_OPERAND (x, 1));
14693 : 0 : case COND_EXPR:
14694 : 0 : return tree_expr_finite_p (TREE_OPERAND (x, 1))
14695 : 0 : && tree_expr_finite_p (TREE_OPERAND (x, 2));
14696 : 86 : case CALL_EXPR:
14697 : 86 : switch (get_call_combined_fn (x))
14698 : : {
14699 : 0 : CASE_CFN_FABS:
14700 : 0 : CASE_CFN_FABS_FN:
14701 : 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14702 : 0 : CASE_CFN_FMAX:
14703 : 0 : CASE_CFN_FMAX_FN:
14704 : 0 : CASE_CFN_FMIN:
14705 : 0 : CASE_CFN_FMIN_FN:
14706 : 0 : return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14707 : 0 : && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14708 : : default:
14709 : : return false;
14710 : : }
14711 : :
14712 : : default:
14713 : : return false;
14714 : : }
14715 : : }
14716 : :
14717 : : /* Return true if expression X evaluates to an infinity.
14718 : : This function returns false for integer expressions. */
14719 : :
14720 : : bool
14721 : 647034 : tree_expr_infinite_p (const_tree x)
14722 : : {
14723 : 647510 : if (!HONOR_INFINITIES (x))
14724 : : return false;
14725 : 647423 : switch (TREE_CODE (x))
14726 : : {
14727 : 0 : case REAL_CST:
14728 : 0 : return real_isinf (TREE_REAL_CST_PTR (x));
14729 : 476 : case ABS_EXPR:
14730 : 476 : case NEGATE_EXPR:
14731 : 476 : case NON_LVALUE_EXPR:
14732 : 476 : case SAVE_EXPR:
14733 : 476 : return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14734 : 0 : case COND_EXPR:
14735 : 0 : return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14736 : 0 : && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14737 : : default:
14738 : : return false;
14739 : : }
14740 : : }
14741 : :
14742 : : /* Return true if expression X could evaluate to an infinity.
14743 : : This function returns false for integer expressions, and returns
14744 : : true if uncertain. */
14745 : :
14746 : : bool
14747 : 349340 : tree_expr_maybe_infinite_p (const_tree x)
14748 : : {
14749 : 349348 : if (!HONOR_INFINITIES (x))
14750 : : return false;
14751 : 349070 : switch (TREE_CODE (x))
14752 : : {
14753 : 199 : case REAL_CST:
14754 : 199 : return real_isinf (TREE_REAL_CST_PTR (x));
14755 : : case FLOAT_EXPR:
14756 : : return false;
14757 : 8 : case ABS_EXPR:
14758 : 8 : case NEGATE_EXPR:
14759 : 8 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14760 : 1 : case COND_EXPR:
14761 : 1 : return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14762 : 1 : || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14763 : : default:
14764 : : return true;
14765 : : }
14766 : : }
14767 : :
14768 : : /* Return true if expression X evaluates to a signaling NaN.
14769 : : This function returns false for integer expressions. */
14770 : :
14771 : : bool
14772 : 398 : tree_expr_signaling_nan_p (const_tree x)
14773 : : {
14774 : 398 : if (!HONOR_SNANS (x))
14775 : : return false;
14776 : 156 : switch (TREE_CODE (x))
14777 : : {
14778 : 156 : case REAL_CST:
14779 : 156 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14780 : 0 : case NON_LVALUE_EXPR:
14781 : 0 : case SAVE_EXPR:
14782 : 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14783 : 0 : case COND_EXPR:
14784 : 0 : return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14785 : 0 : && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14786 : : default:
14787 : : return false;
14788 : : }
14789 : : }
14790 : :
14791 : : /* Return true if expression X could evaluate to a signaling NaN.
14792 : : This function returns false for integer expressions, and returns
14793 : : true if uncertain. */
14794 : :
14795 : : bool
14796 : 721994 : tree_expr_maybe_signaling_nan_p (const_tree x)
14797 : : {
14798 : 721994 : if (!HONOR_SNANS (x))
14799 : : return false;
14800 : 4984 : switch (TREE_CODE (x))
14801 : : {
14802 : 1438 : case REAL_CST:
14803 : 1438 : return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14804 : : case FLOAT_EXPR:
14805 : : return false;
14806 : 0 : case ABS_EXPR:
14807 : 0 : case CONVERT_EXPR:
14808 : 0 : case NEGATE_EXPR:
14809 : 0 : case NON_LVALUE_EXPR:
14810 : 0 : case SAVE_EXPR:
14811 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14812 : 0 : case MIN_EXPR:
14813 : 0 : case MAX_EXPR:
14814 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14815 : 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14816 : 0 : case COND_EXPR:
14817 : 0 : return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14818 : 0 : || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14819 : 0 : case CALL_EXPR:
14820 : 0 : switch (get_call_combined_fn (x))
14821 : : {
14822 : 0 : CASE_CFN_FABS:
14823 : 0 : CASE_CFN_FABS_FN:
14824 : 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14825 : 0 : CASE_CFN_FMAX:
14826 : 0 : CASE_CFN_FMAX_FN:
14827 : 0 : CASE_CFN_FMIN:
14828 : 0 : CASE_CFN_FMIN_FN:
14829 : 0 : return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14830 : 0 : || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14831 : : default:
14832 : : return true;
14833 : : }
14834 : : default:
14835 : : return true;
14836 : : }
14837 : : }
14838 : :
14839 : : /* Return true if expression X evaluates to a NaN.
14840 : : This function returns false for integer expressions. */
14841 : :
14842 : : bool
14843 : 3700257 : tree_expr_nan_p (const_tree x)
14844 : : {
14845 : 4058620 : if (!HONOR_NANS (x))
14846 : : return false;
14847 : 4058354 : switch (TREE_CODE (x))
14848 : : {
14849 : 2960 : case REAL_CST:
14850 : 2960 : return real_isnan (TREE_REAL_CST_PTR (x));
14851 : 358363 : case NON_LVALUE_EXPR:
14852 : 358363 : case SAVE_EXPR:
14853 : 358363 : return tree_expr_nan_p (TREE_OPERAND (x, 0));
14854 : 900 : case COND_EXPR:
14855 : 900 : return tree_expr_nan_p (TREE_OPERAND (x, 1))
14856 : 900 : && tree_expr_nan_p (TREE_OPERAND (x, 2));
14857 : : default:
14858 : : return false;
14859 : : }
14860 : : }
14861 : :
14862 : : /* Return true if expression X could evaluate to a NaN.
14863 : : This function returns false for integer expressions, and returns
14864 : : true if uncertain. */
14865 : :
14866 : : bool
14867 : 4419626 : tree_expr_maybe_nan_p (const_tree x)
14868 : : {
14869 : 6128936 : if (!HONOR_NANS (x))
14870 : : return false;
14871 : 6038998 : switch (TREE_CODE (x))
14872 : : {
14873 : 3353 : case REAL_CST:
14874 : 3353 : return real_isnan (TREE_REAL_CST_PTR (x));
14875 : : case FLOAT_EXPR:
14876 : : return false;
14877 : 14619 : case PLUS_EXPR:
14878 : 14619 : case MINUS_EXPR:
14879 : 14619 : case MULT_EXPR:
14880 : 14619 : return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14881 : 14619 : || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14882 : 1709310 : case ABS_EXPR:
14883 : 1709310 : case CONVERT_EXPR:
14884 : 1709310 : case NEGATE_EXPR:
14885 : 1709310 : case NON_LVALUE_EXPR:
14886 : 1709310 : case SAVE_EXPR:
14887 : 1709310 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14888 : 168 : case MIN_EXPR:
14889 : 168 : case MAX_EXPR:
14890 : 168 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14891 : 168 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14892 : 557 : case COND_EXPR:
14893 : 557 : return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14894 : 557 : || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14895 : 1064 : case CALL_EXPR:
14896 : 1064 : switch (get_call_combined_fn (x))
14897 : : {
14898 : 0 : CASE_CFN_FABS:
14899 : 0 : CASE_CFN_FABS_FN:
14900 : 0 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14901 : 108 : CASE_CFN_FMAX:
14902 : 108 : CASE_CFN_FMAX_FN:
14903 : 108 : CASE_CFN_FMIN:
14904 : 108 : CASE_CFN_FMIN_FN:
14905 : 108 : return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14906 : 108 : || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14907 : : default:
14908 : : return true;
14909 : : }
14910 : : default:
14911 : : return true;
14912 : : }
14913 : : }
14914 : :
14915 : : /* Return true if expression X could evaluate to -0.0.
14916 : : This function returns true if uncertain. */
14917 : :
14918 : : bool
14919 : 600852 : tree_expr_maybe_real_minus_zero_p (const_tree x)
14920 : : {
14921 : 600852 : if (!HONOR_SIGNED_ZEROS (x))
14922 : : return false;
14923 : 600852 : switch (TREE_CODE (x))
14924 : : {
14925 : 0 : case REAL_CST:
14926 : 0 : return REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (x));
14927 : : case INTEGER_CST:
14928 : : case FLOAT_EXPR:
14929 : : case ABS_EXPR:
14930 : : return false;
14931 : 0 : case NON_LVALUE_EXPR:
14932 : 0 : case SAVE_EXPR:
14933 : 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 0));
14934 : 0 : case COND_EXPR:
14935 : 0 : return tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 1))
14936 : 0 : || tree_expr_maybe_real_minus_zero_p (TREE_OPERAND (x, 2));
14937 : 1 : case CALL_EXPR:
14938 : 1 : switch (get_call_combined_fn (x))
14939 : : {
14940 : : CASE_CFN_FABS:
14941 : : CASE_CFN_FABS_FN:
14942 : : return false;
14943 : : default:
14944 : : break;
14945 : : }
14946 : : default:
14947 : : break;
14948 : : }
14949 : : /* Ideally !(tree_expr_nonzero_p (X) || tree_expr_nonnegative_p (X))
14950 : : * but currently those predicates require tree and not const_tree. */
14951 : : return true;
14952 : : }
14953 : :
14954 : : #define tree_expr_nonnegative_warnv_p(X, Y) \
14955 : : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14956 : :
14957 : : #define RECURSE(X) \
14958 : : ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14959 : :
14960 : : /* Return true if CODE or TYPE is known to be non-negative. */
14961 : :
14962 : : static bool
14963 : 30391652 : tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14964 : : {
14965 : 30391652 : if (!VECTOR_TYPE_P (type)
14966 : 30376149 : && (TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14967 : 60767262 : && truth_value_p (code))
14968 : : /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14969 : : have a signed:1 type (where the value is -1 and 0). */
14970 : : return true;
14971 : : return false;
14972 : : }
14973 : :
14974 : : /* Return true if (CODE OP0) is known to be non-negative. If the return
14975 : : value is based on the assumption that signed overflow is undefined,
14976 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
14977 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14978 : :
14979 : : bool
14980 : 10619513 : tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14981 : : bool *strict_overflow_p, int depth)
14982 : : {
14983 : 10619513 : if (TYPE_UNSIGNED (type))
14984 : : return true;
14985 : :
14986 : 3977832 : switch (code)
14987 : : {
14988 : 22058 : case ABS_EXPR:
14989 : : /* We can't return 1 if flag_wrapv is set because
14990 : : ABS_EXPR<INT_MIN> = INT_MIN. */
14991 : 22058 : if (!ANY_INTEGRAL_TYPE_P (type))
14992 : : return true;
14993 : 19261 : if (TYPE_OVERFLOW_UNDEFINED (type))
14994 : : {
14995 : 18363 : *strict_overflow_p = true;
14996 : 18363 : return true;
14997 : : }
14998 : : break;
14999 : :
15000 : 26248 : case NON_LVALUE_EXPR:
15001 : 26248 : case FLOAT_EXPR:
15002 : 26248 : case FIX_TRUNC_EXPR:
15003 : 26248 : return RECURSE (op0);
15004 : :
15005 : 3839681 : CASE_CONVERT:
15006 : 3839681 : {
15007 : 3839681 : tree inner_type = TREE_TYPE (op0);
15008 : 3839681 : tree outer_type = type;
15009 : :
15010 : 3839681 : if (SCALAR_FLOAT_TYPE_P (outer_type))
15011 : : {
15012 : 178584 : if (SCALAR_FLOAT_TYPE_P (inner_type))
15013 : 178584 : return RECURSE (op0);
15014 : 0 : if (INTEGRAL_TYPE_P (inner_type))
15015 : : {
15016 : 0 : if (TYPE_UNSIGNED (inner_type))
15017 : : return true;
15018 : 0 : return RECURSE (op0);
15019 : : }
15020 : : }
15021 : 3661097 : else if (INTEGRAL_TYPE_P (outer_type))
15022 : : {
15023 : 3661077 : if (SCALAR_FLOAT_TYPE_P (inner_type))
15024 : 0 : return RECURSE (op0);
15025 : 3661077 : if (INTEGRAL_TYPE_P (inner_type))
15026 : 3501486 : return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15027 : 3501486 : && TYPE_UNSIGNED (inner_type);
15028 : : }
15029 : : }
15030 : : break;
15031 : :
15032 : 89845 : default:
15033 : 89845 : return tree_simple_nonnegative_warnv_p (code, type);
15034 : : }
15035 : :
15036 : : /* We don't know sign of `t', so be conservative and return false. */
15037 : : return false;
15038 : : }
15039 : :
15040 : : /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15041 : : value is based on the assumption that signed overflow is undefined,
15042 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15043 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15044 : :
15045 : : bool
15046 : 31150447 : tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15047 : : tree op1, bool *strict_overflow_p,
15048 : : int depth)
15049 : : {
15050 : 31150447 : if (TYPE_UNSIGNED (type))
15051 : : return true;
15052 : :
15053 : 10646162 : switch (code)
15054 : : {
15055 : 4573415 : case POINTER_PLUS_EXPR:
15056 : 4573415 : case PLUS_EXPR:
15057 : 4573415 : if (FLOAT_TYPE_P (type))
15058 : 44362 : return RECURSE (op0) && RECURSE (op1);
15059 : :
15060 : : /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15061 : : both unsigned and at least 2 bits shorter than the result. */
15062 : 4529053 : if (TREE_CODE (type) == INTEGER_TYPE
15063 : 4523501 : && TREE_CODE (op0) == NOP_EXPR
15064 : 2576 : && TREE_CODE (op1) == NOP_EXPR)
15065 : : {
15066 : 192 : tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15067 : 192 : tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15068 : 192 : if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15069 : 279 : && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15070 : : {
15071 : 79 : unsigned int prec = MAX (TYPE_PRECISION (inner1),
15072 : 79 : TYPE_PRECISION (inner2)) + 1;
15073 : 79 : return prec < TYPE_PRECISION (type);
15074 : : }
15075 : : }
15076 : : break;
15077 : :
15078 : 1105252 : case MULT_EXPR:
15079 : 1105252 : if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15080 : : {
15081 : : /* x * x is always non-negative for floating point x
15082 : : or without overflow. */
15083 : 1051097 : if (operand_equal_p (op0, op1, 0)
15084 : 1051097 : || (RECURSE (op0) && RECURSE (op1)))
15085 : : {
15086 : 514 : if (ANY_INTEGRAL_TYPE_P (type)
15087 : 10443 : && TYPE_OVERFLOW_UNDEFINED (type))
15088 : 9929 : *strict_overflow_p = true;
15089 : 10435 : return true;
15090 : : }
15091 : : }
15092 : :
15093 : : /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15094 : : both unsigned and their total bits is shorter than the result. */
15095 : 1094817 : if (TREE_CODE (type) == INTEGER_TYPE
15096 : 1028004 : && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15097 : 135 : && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15098 : : {
15099 : 102 : tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15100 : 102 : ? TREE_TYPE (TREE_OPERAND (op0, 0))
15101 : 0 : : TREE_TYPE (op0);
15102 : 102 : tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15103 : 157 : ? TREE_TYPE (TREE_OPERAND (op1, 0))
15104 : 55 : : TREE_TYPE (op1);
15105 : :
15106 : 102 : bool unsigned0 = TYPE_UNSIGNED (inner0);
15107 : 102 : bool unsigned1 = TYPE_UNSIGNED (inner1);
15108 : :
15109 : 102 : if (TREE_CODE (op0) == INTEGER_CST)
15110 : 0 : unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15111 : :
15112 : 102 : if (TREE_CODE (op1) == INTEGER_CST)
15113 : 55 : unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15114 : :
15115 : 102 : if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15116 : 7 : && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15117 : : {
15118 : 0 : unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15119 : 0 : ? tree_int_cst_min_precision (op0, UNSIGNED)
15120 : 0 : : TYPE_PRECISION (inner0);
15121 : :
15122 : 0 : unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15123 : 0 : ? tree_int_cst_min_precision (op1, UNSIGNED)
15124 : 0 : : TYPE_PRECISION (inner1);
15125 : :
15126 : 0 : return precision0 + precision1 < TYPE_PRECISION (type);
15127 : : }
15128 : : }
15129 : : return false;
15130 : :
15131 : 81440 : case BIT_AND_EXPR:
15132 : 81440 : return RECURSE (op0) || RECURSE (op1);
15133 : :
15134 : 39455 : case MAX_EXPR:
15135 : : /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
15136 : : things. */
15137 : 39455 : if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
15138 : 31 : return RECURSE (op0) && RECURSE (op1);
15139 : 39424 : return RECURSE (op0) || RECURSE (op1);
15140 : :
15141 : 645581 : case BIT_IOR_EXPR:
15142 : 645581 : case BIT_XOR_EXPR:
15143 : 645581 : case MIN_EXPR:
15144 : 645581 : case RDIV_EXPR:
15145 : 645581 : case TRUNC_DIV_EXPR:
15146 : 645581 : case CEIL_DIV_EXPR:
15147 : 645581 : case FLOOR_DIV_EXPR:
15148 : 645581 : case ROUND_DIV_EXPR:
15149 : 645581 : return RECURSE (op0) && RECURSE (op1);
15150 : :
15151 : 36276 : case TRUNC_MOD_EXPR:
15152 : 36276 : return RECURSE (op0);
15153 : :
15154 : 159 : case FLOOR_MOD_EXPR:
15155 : 159 : return RECURSE (op1);
15156 : :
15157 : 4164584 : case CEIL_MOD_EXPR:
15158 : 4164584 : case ROUND_MOD_EXPR:
15159 : 4164584 : default:
15160 : 4164584 : return tree_simple_nonnegative_warnv_p (code, type);
15161 : : }
15162 : :
15163 : : /* We don't know sign of `t', so be conservative and return false. */
15164 : : return false;
15165 : : }
15166 : :
15167 : : /* Return true if T is known to be non-negative. If the return
15168 : : value is based on the assumption that signed overflow is undefined,
15169 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15170 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15171 : :
15172 : : bool
15173 : 31796755 : tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15174 : : {
15175 : 31796755 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
15176 : : return true;
15177 : :
15178 : 21617099 : switch (TREE_CODE (t))
15179 : : {
15180 : 1504120 : case INTEGER_CST:
15181 : 1504120 : return tree_int_cst_sgn (t) >= 0;
15182 : :
15183 : 39378 : case REAL_CST:
15184 : 39378 : return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15185 : :
15186 : 0 : case FIXED_CST:
15187 : 0 : return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15188 : :
15189 : 161 : case COND_EXPR:
15190 : 161 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15191 : :
15192 : 13035078 : case SSA_NAME:
15193 : : /* Limit the depth of recursion to avoid quadratic behavior.
15194 : : This is expected to catch almost all occurrences in practice.
15195 : : If this code misses important cases that unbounded recursion
15196 : : would not, passes that need this information could be revised
15197 : : to provide it through dataflow propagation. */
15198 : 13035078 : return (!name_registered_for_update_p (t)
15199 : 13035077 : && depth < param_max_ssa_name_query_depth
15200 : 24859368 : && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
15201 : : strict_overflow_p, depth));
15202 : :
15203 : 7038362 : default:
15204 : 7038362 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15205 : : }
15206 : : }
15207 : :
15208 : : /* Return true if T is known to be non-negative. If the return
15209 : : value is based on the assumption that signed overflow is undefined,
15210 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15211 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15212 : :
15213 : : bool
15214 : 19231842 : tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
15215 : : bool *strict_overflow_p, int depth)
15216 : : {
15217 : 19231842 : switch (fn)
15218 : : {
15219 : : CASE_CFN_ACOS:
15220 : : CASE_CFN_ACOS_FN:
15221 : : CASE_CFN_ACOSH:
15222 : : CASE_CFN_ACOSH_FN:
15223 : : CASE_CFN_CABS:
15224 : : CASE_CFN_CABS_FN:
15225 : : CASE_CFN_COSH:
15226 : : CASE_CFN_COSH_FN:
15227 : : CASE_CFN_ERFC:
15228 : : CASE_CFN_ERFC_FN:
15229 : : CASE_CFN_EXP:
15230 : : CASE_CFN_EXP_FN:
15231 : : CASE_CFN_EXP10:
15232 : : CASE_CFN_EXP2:
15233 : : CASE_CFN_EXP2_FN:
15234 : : CASE_CFN_FABS:
15235 : : CASE_CFN_FABS_FN:
15236 : : CASE_CFN_FDIM:
15237 : : CASE_CFN_FDIM_FN:
15238 : : CASE_CFN_HYPOT:
15239 : : CASE_CFN_HYPOT_FN:
15240 : : CASE_CFN_POW10:
15241 : : CASE_CFN_FFS:
15242 : : CASE_CFN_PARITY:
15243 : : CASE_CFN_POPCOUNT:
15244 : : CASE_CFN_CLZ:
15245 : : CASE_CFN_CLRSB:
15246 : : case CFN_BUILT_IN_BSWAP16:
15247 : : case CFN_BUILT_IN_BSWAP32:
15248 : : case CFN_BUILT_IN_BSWAP64:
15249 : : case CFN_BUILT_IN_BSWAP128:
15250 : : /* Always true. */
15251 : : return true;
15252 : :
15253 : 937 : CASE_CFN_SQRT:
15254 : 937 : CASE_CFN_SQRT_FN:
15255 : : /* sqrt(-0.0) is -0.0. */
15256 : 937 : if (!HONOR_SIGNED_ZEROS (type))
15257 : : return true;
15258 : 913 : return RECURSE (arg0);
15259 : :
15260 : 89348 : CASE_CFN_ASINH:
15261 : 89348 : CASE_CFN_ASINH_FN:
15262 : 89348 : CASE_CFN_ATAN:
15263 : 89348 : CASE_CFN_ATAN_FN:
15264 : 89348 : CASE_CFN_ATANH:
15265 : 89348 : CASE_CFN_ATANH_FN:
15266 : 89348 : CASE_CFN_CBRT:
15267 : 89348 : CASE_CFN_CBRT_FN:
15268 : 89348 : CASE_CFN_CEIL:
15269 : 89348 : CASE_CFN_CEIL_FN:
15270 : 89348 : CASE_CFN_ERF:
15271 : 89348 : CASE_CFN_ERF_FN:
15272 : 89348 : CASE_CFN_EXPM1:
15273 : 89348 : CASE_CFN_EXPM1_FN:
15274 : 89348 : CASE_CFN_FLOOR:
15275 : 89348 : CASE_CFN_FLOOR_FN:
15276 : 89348 : CASE_CFN_FMOD:
15277 : 89348 : CASE_CFN_FMOD_FN:
15278 : 89348 : CASE_CFN_FREXP:
15279 : 89348 : CASE_CFN_FREXP_FN:
15280 : 89348 : CASE_CFN_ICEIL:
15281 : 89348 : CASE_CFN_IFLOOR:
15282 : 89348 : CASE_CFN_IRINT:
15283 : 89348 : CASE_CFN_IROUND:
15284 : 89348 : CASE_CFN_LCEIL:
15285 : 89348 : CASE_CFN_LDEXP:
15286 : 89348 : CASE_CFN_LFLOOR:
15287 : 89348 : CASE_CFN_LLCEIL:
15288 : 89348 : CASE_CFN_LLFLOOR:
15289 : 89348 : CASE_CFN_LLRINT:
15290 : 89348 : CASE_CFN_LLRINT_FN:
15291 : 89348 : CASE_CFN_LLROUND:
15292 : 89348 : CASE_CFN_LLROUND_FN:
15293 : 89348 : CASE_CFN_LRINT:
15294 : 89348 : CASE_CFN_LRINT_FN:
15295 : 89348 : CASE_CFN_LROUND:
15296 : 89348 : CASE_CFN_LROUND_FN:
15297 : 89348 : CASE_CFN_MODF:
15298 : 89348 : CASE_CFN_MODF_FN:
15299 : 89348 : CASE_CFN_NEARBYINT:
15300 : 89348 : CASE_CFN_NEARBYINT_FN:
15301 : 89348 : CASE_CFN_RINT:
15302 : 89348 : CASE_CFN_RINT_FN:
15303 : 89348 : CASE_CFN_ROUND:
15304 : 89348 : CASE_CFN_ROUND_FN:
15305 : 89348 : CASE_CFN_ROUNDEVEN:
15306 : 89348 : CASE_CFN_ROUNDEVEN_FN:
15307 : 89348 : CASE_CFN_SCALB:
15308 : 89348 : CASE_CFN_SCALBLN:
15309 : 89348 : CASE_CFN_SCALBLN_FN:
15310 : 89348 : CASE_CFN_SCALBN:
15311 : 89348 : CASE_CFN_SCALBN_FN:
15312 : 89348 : CASE_CFN_SIGNBIT:
15313 : 89348 : CASE_CFN_SIGNIFICAND:
15314 : 89348 : CASE_CFN_SINH:
15315 : 89348 : CASE_CFN_SINH_FN:
15316 : 89348 : CASE_CFN_TANH:
15317 : 89348 : CASE_CFN_TANH_FN:
15318 : 89348 : CASE_CFN_TRUNC:
15319 : 89348 : CASE_CFN_TRUNC_FN:
15320 : : /* True if the 1st argument is nonnegative. */
15321 : 89348 : return RECURSE (arg0);
15322 : :
15323 : 1260 : CASE_CFN_FMAX:
15324 : 1260 : CASE_CFN_FMAX_FN:
15325 : : /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
15326 : : things. In the presence of sNaNs, we're only guaranteed to be
15327 : : non-negative if both operands are non-negative. In the presence
15328 : : of qNaNs, we're non-negative if either operand is non-negative
15329 : : and can't be a qNaN, or if both operands are non-negative. */
15330 : 2420 : if (tree_expr_maybe_signaling_nan_p (arg0) ||
15331 : 1160 : tree_expr_maybe_signaling_nan_p (arg1))
15332 : 100 : return RECURSE (arg0) && RECURSE (arg1);
15333 : 1160 : return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
15334 : 331 : || RECURSE (arg1))
15335 : 829 : : (RECURSE (arg1)
15336 : 829 : && !tree_expr_maybe_nan_p (arg1));
15337 : :
15338 : 872 : CASE_CFN_FMIN:
15339 : 872 : CASE_CFN_FMIN_FN:
15340 : : /* True if the 1st AND 2nd arguments are nonnegative. */
15341 : 872 : return RECURSE (arg0) && RECURSE (arg1);
15342 : :
15343 : 1348 : CASE_CFN_COPYSIGN:
15344 : 1348 : CASE_CFN_COPYSIGN_FN:
15345 : : /* True if the 2nd argument is nonnegative. */
15346 : 1348 : return RECURSE (arg1);
15347 : :
15348 : 2379 : CASE_CFN_POWI:
15349 : : /* True if the 1st argument is nonnegative or the second
15350 : : argument is an even integer. */
15351 : 2379 : if (TREE_CODE (arg1) == INTEGER_CST
15352 : 2379 : && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15353 : : return true;
15354 : 2299 : return RECURSE (arg0);
15355 : :
15356 : 4865 : CASE_CFN_POW:
15357 : 4865 : CASE_CFN_POW_FN:
15358 : : /* True if the 1st argument is nonnegative or the second
15359 : : argument is an even integer valued real. */
15360 : 4865 : if (TREE_CODE (arg1) == REAL_CST)
15361 : : {
15362 : 2133 : REAL_VALUE_TYPE c;
15363 : 2133 : HOST_WIDE_INT n;
15364 : :
15365 : 2133 : c = TREE_REAL_CST (arg1);
15366 : 2133 : n = real_to_integer (&c);
15367 : 2133 : if ((n & 1) == 0)
15368 : : {
15369 : 1490 : REAL_VALUE_TYPE cint;
15370 : 1490 : real_from_integer (&cint, VOIDmode, n, SIGNED);
15371 : 1490 : if (real_identical (&c, &cint))
15372 : 492 : return true;
15373 : : }
15374 : : }
15375 : 4373 : return RECURSE (arg0);
15376 : :
15377 : 19098431 : default:
15378 : 19098431 : break;
15379 : : }
15380 : 19098431 : return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
15381 : : }
15382 : :
15383 : : /* Return true if T is known to be non-negative. If the return
15384 : : value is based on the assumption that signed overflow is undefined,
15385 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15386 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15387 : :
15388 : : static bool
15389 : 53630 : tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15390 : : {
15391 : 53630 : enum tree_code code = TREE_CODE (t);
15392 : 53630 : if (TYPE_UNSIGNED (TREE_TYPE (t)))
15393 : : return true;
15394 : :
15395 : 53623 : switch (code)
15396 : : {
15397 : 203 : case TARGET_EXPR:
15398 : 203 : {
15399 : 203 : tree temp = TARGET_EXPR_SLOT (t);
15400 : 203 : t = TARGET_EXPR_INITIAL (t);
15401 : :
15402 : : /* If the initializer is non-void, then it's a normal expression
15403 : : that will be assigned to the slot. */
15404 : 203 : if (!VOID_TYPE_P (TREE_TYPE (t)))
15405 : 0 : return RECURSE (t);
15406 : :
15407 : : /* Otherwise, the initializer sets the slot in some way. One common
15408 : : way is an assignment statement at the end of the initializer. */
15409 : 405 : while (1)
15410 : : {
15411 : 405 : if (TREE_CODE (t) == BIND_EXPR)
15412 : 202 : t = expr_last (BIND_EXPR_BODY (t));
15413 : 203 : else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15414 : 203 : || TREE_CODE (t) == TRY_CATCH_EXPR)
15415 : 0 : t = expr_last (TREE_OPERAND (t, 0));
15416 : 203 : else if (TREE_CODE (t) == STATEMENT_LIST)
15417 : 0 : t = expr_last (t);
15418 : : else
15419 : : break;
15420 : : }
15421 : 203 : if (TREE_CODE (t) == MODIFY_EXPR
15422 : 203 : && TREE_OPERAND (t, 0) == temp)
15423 : 202 : return RECURSE (TREE_OPERAND (t, 1));
15424 : :
15425 : : return false;
15426 : : }
15427 : :
15428 : 47304 : case CALL_EXPR:
15429 : 47304 : {
15430 : 47304 : tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15431 : 47304 : tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15432 : :
15433 : 47304 : return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15434 : : get_call_combined_fn (t),
15435 : : arg0,
15436 : : arg1,
15437 : 47304 : strict_overflow_p, depth);
15438 : : }
15439 : 455 : case COMPOUND_EXPR:
15440 : 455 : case MODIFY_EXPR:
15441 : 455 : return RECURSE (TREE_OPERAND (t, 1));
15442 : :
15443 : 12 : case BIND_EXPR:
15444 : 12 : return RECURSE (expr_last (TREE_OPERAND (t, 1)));
15445 : :
15446 : 5219 : case SAVE_EXPR:
15447 : 5219 : return RECURSE (TREE_OPERAND (t, 0));
15448 : :
15449 : 430 : default:
15450 : 430 : return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
15451 : : }
15452 : : }
15453 : :
15454 : : #undef RECURSE
15455 : : #undef tree_expr_nonnegative_warnv_p
15456 : :
15457 : : /* Return true if T is known to be non-negative. If the return
15458 : : value is based on the assumption that signed overflow is undefined,
15459 : : set *STRICT_OVERFLOW_P to true; otherwise, don't change
15460 : : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
15461 : :
15462 : : bool
15463 : 10649109 : tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
15464 : : {
15465 : 10649109 : enum tree_code code;
15466 : 10649109 : if (t == error_mark_node)
15467 : : return false;
15468 : :
15469 : 10649109 : code = TREE_CODE (t);
15470 : 10649109 : switch (TREE_CODE_CLASS (code))
15471 : : {
15472 : 213048 : case tcc_binary:
15473 : 213048 : case tcc_comparison:
15474 : 213048 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15475 : 213048 : TREE_TYPE (t),
15476 : 213048 : TREE_OPERAND (t, 0),
15477 : 213048 : TREE_OPERAND (t, 1),
15478 : 213048 : strict_overflow_p, depth);
15479 : :
15480 : 642952 : case tcc_unary:
15481 : 642952 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15482 : 642952 : TREE_TYPE (t),
15483 : 642952 : TREE_OPERAND (t, 0),
15484 : 642952 : strict_overflow_p, depth);
15485 : :
15486 : 4115869 : case tcc_constant:
15487 : 4115869 : case tcc_declaration:
15488 : 4115869 : case tcc_reference:
15489 : 4115869 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15490 : :
15491 : 5677240 : default:
15492 : 5677240 : break;
15493 : : }
15494 : :
15495 : 5677240 : switch (code)
15496 : : {
15497 : 7 : case TRUTH_AND_EXPR:
15498 : 7 : case TRUTH_OR_EXPR:
15499 : 7 : case TRUTH_XOR_EXPR:
15500 : 7 : return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15501 : 7 : TREE_TYPE (t),
15502 : 7 : TREE_OPERAND (t, 0),
15503 : 7 : TREE_OPERAND (t, 1),
15504 : 7 : strict_overflow_p, depth);
15505 : 0 : case TRUTH_NOT_EXPR:
15506 : 0 : return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15507 : 0 : TREE_TYPE (t),
15508 : 0 : TREE_OPERAND (t, 0),
15509 : 0 : strict_overflow_p, depth);
15510 : :
15511 : 5623603 : case COND_EXPR:
15512 : 5623603 : case CONSTRUCTOR:
15513 : 5623603 : case OBJ_TYPE_REF:
15514 : 5623603 : case ADDR_EXPR:
15515 : 5623603 : case WITH_SIZE_EXPR:
15516 : 5623603 : case SSA_NAME:
15517 : 5623603 : return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
15518 : :
15519 : 53630 : default:
15520 : 53630 : return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
15521 : : }
15522 : : }
15523 : :
15524 : : /* Return true if `t' is known to be non-negative. Handle warnings
15525 : : about undefined signed overflow. */
15526 : :
15527 : : bool
15528 : 4723999 : tree_expr_nonnegative_p (tree t)
15529 : : {
15530 : 4723999 : bool ret, strict_overflow_p;
15531 : :
15532 : 4723999 : strict_overflow_p = false;
15533 : 4723999 : ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15534 : 4723999 : if (strict_overflow_p)
15535 : 17738 : fold_overflow_warning (("assuming signed overflow does not occur when "
15536 : : "determining that expression is always "
15537 : : "non-negative"),
15538 : : WARN_STRICT_OVERFLOW_MISC);
15539 : 4723999 : return ret;
15540 : : }
15541 : :
15542 : :
15543 : : /* Return true when (CODE OP0) is an address and is known to be nonzero.
15544 : : For floating point we further ensure that T is not denormal.
15545 : : Similar logic is present in nonzero_address in rtlanal.h.
15546 : :
15547 : : If the return value is based on the assumption that signed overflow
15548 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15549 : : change *STRICT_OVERFLOW_P. */
15550 : :
15551 : : bool
15552 : 1417717 : tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15553 : : bool *strict_overflow_p)
15554 : : {
15555 : 1417717 : switch (code)
15556 : : {
15557 : 1 : case ABS_EXPR:
15558 : 1 : return tree_expr_nonzero_warnv_p (op0,
15559 : 1 : strict_overflow_p);
15560 : :
15561 : 841772 : case NOP_EXPR:
15562 : 841772 : {
15563 : 841772 : tree inner_type = TREE_TYPE (op0);
15564 : 841772 : tree outer_type = type;
15565 : :
15566 : 841772 : return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15567 : 841772 : && tree_expr_nonzero_warnv_p (op0,
15568 : : strict_overflow_p));
15569 : : }
15570 : 28147 : break;
15571 : :
15572 : 28147 : case NON_LVALUE_EXPR:
15573 : 28147 : return tree_expr_nonzero_warnv_p (op0,
15574 : 28147 : strict_overflow_p);
15575 : :
15576 : : default:
15577 : : break;
15578 : : }
15579 : :
15580 : : return false;
15581 : : }
15582 : :
15583 : : /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15584 : : For floating point we further ensure that T is not denormal.
15585 : : Similar logic is present in nonzero_address in rtlanal.h.
15586 : :
15587 : : If the return value is based on the assumption that signed overflow
15588 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15589 : : change *STRICT_OVERFLOW_P. */
15590 : :
15591 : : bool
15592 : 2429725 : tree_binary_nonzero_warnv_p (enum tree_code code,
15593 : : tree type,
15594 : : tree op0,
15595 : : tree op1, bool *strict_overflow_p)
15596 : : {
15597 : 2429725 : bool sub_strict_overflow_p;
15598 : 2429725 : switch (code)
15599 : : {
15600 : 369523 : case POINTER_PLUS_EXPR:
15601 : 369523 : case PLUS_EXPR:
15602 : 369523 : if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15603 : : {
15604 : : /* With the presence of negative values it is hard
15605 : : to say something. */
15606 : 89877 : sub_strict_overflow_p = false;
15607 : 89877 : if (!tree_expr_nonnegative_warnv_p (op0,
15608 : : &sub_strict_overflow_p)
15609 : 89877 : || !tree_expr_nonnegative_warnv_p (op1,
15610 : : &sub_strict_overflow_p))
15611 : 87582 : return false;
15612 : : /* One of operands must be positive and the other non-negative. */
15613 : : /* We don't set *STRICT_OVERFLOW_P here: even if this value
15614 : : overflows, on a twos-complement machine the sum of two
15615 : : nonnegative numbers can never be zero. */
15616 : 2295 : return (tree_expr_nonzero_warnv_p (op0,
15617 : : strict_overflow_p)
15618 : 2295 : || tree_expr_nonzero_warnv_p (op1,
15619 : : strict_overflow_p));
15620 : : }
15621 : : break;
15622 : :
15623 : 13827 : case MULT_EXPR:
15624 : 13827 : if (TYPE_OVERFLOW_UNDEFINED (type))
15625 : : {
15626 : 416 : if (tree_expr_nonzero_warnv_p (op0,
15627 : : strict_overflow_p)
15628 : 416 : && tree_expr_nonzero_warnv_p (op1,
15629 : : strict_overflow_p))
15630 : : {
15631 : 0 : *strict_overflow_p = true;
15632 : 0 : return true;
15633 : : }
15634 : : }
15635 : : break;
15636 : :
15637 : 12365 : case MIN_EXPR:
15638 : 12365 : sub_strict_overflow_p = false;
15639 : 12365 : if (tree_expr_nonzero_warnv_p (op0,
15640 : : &sub_strict_overflow_p)
15641 : 12365 : && tree_expr_nonzero_warnv_p (op1,
15642 : : &sub_strict_overflow_p))
15643 : : {
15644 : 0 : if (sub_strict_overflow_p)
15645 : 0 : *strict_overflow_p = true;
15646 : : }
15647 : : break;
15648 : :
15649 : 150 : case MAX_EXPR:
15650 : 150 : sub_strict_overflow_p = false;
15651 : 150 : if (tree_expr_nonzero_warnv_p (op0,
15652 : : &sub_strict_overflow_p))
15653 : : {
15654 : 0 : if (sub_strict_overflow_p)
15655 : 0 : *strict_overflow_p = true;
15656 : :
15657 : : /* When both operands are nonzero, then MAX must be too. */
15658 : 0 : if (tree_expr_nonzero_warnv_p (op1,
15659 : : strict_overflow_p))
15660 : : return true;
15661 : :
15662 : : /* MAX where operand 0 is positive is positive. */
15663 : 0 : return tree_expr_nonnegative_warnv_p (op0,
15664 : 0 : strict_overflow_p);
15665 : : }
15666 : : /* MAX where operand 1 is positive is positive. */
15667 : 150 : else if (tree_expr_nonzero_warnv_p (op1,
15668 : : &sub_strict_overflow_p)
15669 : 150 : && tree_expr_nonnegative_warnv_p (op1,
15670 : : &sub_strict_overflow_p))
15671 : : {
15672 : 0 : if (sub_strict_overflow_p)
15673 : 0 : *strict_overflow_p = true;
15674 : 0 : return true;
15675 : : }
15676 : : break;
15677 : :
15678 : 215986 : case BIT_IOR_EXPR:
15679 : 215986 : return (tree_expr_nonzero_warnv_p (op1,
15680 : : strict_overflow_p)
15681 : 215986 : || tree_expr_nonzero_warnv_p (op0,
15682 : : strict_overflow_p));
15683 : :
15684 : : default:
15685 : : break;
15686 : : }
15687 : :
15688 : : return false;
15689 : : }
15690 : :
15691 : : /* Return true when T is an address and is known to be nonzero.
15692 : : For floating point we further ensure that T is not denormal.
15693 : : Similar logic is present in nonzero_address in rtlanal.h.
15694 : :
15695 : : If the return value is based on the assumption that signed overflow
15696 : : is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15697 : : change *STRICT_OVERFLOW_P. */
15698 : :
15699 : : bool
15700 : 136879663 : tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15701 : : {
15702 : 136879663 : bool sub_strict_overflow_p;
15703 : 136879663 : switch (TREE_CODE (t))
15704 : : {
15705 : 1330031 : case INTEGER_CST:
15706 : 1330031 : return !integer_zerop (t);
15707 : :
15708 : 9570339 : case ADDR_EXPR:
15709 : 9570339 : {
15710 : 9570339 : tree base = TREE_OPERAND (t, 0);
15711 : :
15712 : 9570339 : if (!DECL_P (base))
15713 : 4791140 : base = get_base_address (base);
15714 : :
15715 : 9570339 : if (base && TREE_CODE (base) == TARGET_EXPR)
15716 : 445 : base = TARGET_EXPR_SLOT (base);
15717 : :
15718 : 445 : if (!base)
15719 : 0 : return false;
15720 : :
15721 : : /* For objects in symbol table check if we know they are non-zero.
15722 : : Don't do anything for variables and functions before symtab is built;
15723 : : it is quite possible that they will be declared weak later. */
15724 : 9570339 : int nonzero_addr = maybe_nonzero_address (base);
15725 : 9570339 : if (nonzero_addr >= 0)
15726 : 7152688 : return nonzero_addr;
15727 : :
15728 : : /* Constants are never weak. */
15729 : 2417651 : if (CONSTANT_CLASS_P (base))
15730 : : return true;
15731 : :
15732 : : return false;
15733 : : }
15734 : :
15735 : 29877 : case COND_EXPR:
15736 : 29877 : sub_strict_overflow_p = false;
15737 : 29877 : if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15738 : : &sub_strict_overflow_p)
15739 : 29877 : && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15740 : : &sub_strict_overflow_p))
15741 : : {
15742 : 1204 : if (sub_strict_overflow_p)
15743 : 0 : *strict_overflow_p = true;
15744 : 1204 : return true;
15745 : : }
15746 : : break;
15747 : :
15748 : 115511320 : case SSA_NAME:
15749 : 115511320 : if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15750 : : break;
15751 : 90280555 : return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15752 : :
15753 : : default:
15754 : : break;
15755 : : }
15756 : : return false;
15757 : : }
15758 : :
15759 : : #define integer_valued_real_p(X) \
15760 : : _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15761 : :
15762 : : #define RECURSE(X) \
15763 : : ((integer_valued_real_p) (X, depth + 1))
15764 : :
15765 : : /* Return true if the floating point result of (CODE OP0) has an
15766 : : integer value. We also allow +Inf, -Inf and NaN to be considered
15767 : : integer values. Return false for signaling NaN.
15768 : :
15769 : : DEPTH is the current nesting depth of the query. */
15770 : :
15771 : : bool
15772 : 14623 : integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15773 : : {
15774 : 14623 : switch (code)
15775 : : {
15776 : : case FLOAT_EXPR:
15777 : : return true;
15778 : :
15779 : 1413 : case ABS_EXPR:
15780 : 1413 : return RECURSE (op0);
15781 : :
15782 : 9673 : CASE_CONVERT:
15783 : 9673 : {
15784 : 9673 : tree type = TREE_TYPE (op0);
15785 : 9673 : if (TREE_CODE (type) == INTEGER_TYPE)
15786 : : return true;
15787 : 9673 : if (SCALAR_FLOAT_TYPE_P (type))
15788 : 9673 : return RECURSE (op0);
15789 : : break;
15790 : : }
15791 : :
15792 : : default:
15793 : : break;
15794 : : }
15795 : : return false;
15796 : : }
15797 : :
15798 : : /* Return true if the floating point result of (CODE OP0 OP1) has an
15799 : : integer value. We also allow +Inf, -Inf and NaN to be considered
15800 : : integer values. Return false for signaling NaN.
15801 : :
15802 : : DEPTH is the current nesting depth of the query. */
15803 : :
15804 : : bool
15805 : 13140 : integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15806 : : {
15807 : 13140 : switch (code)
15808 : : {
15809 : 7083 : case PLUS_EXPR:
15810 : 7083 : case MINUS_EXPR:
15811 : 7083 : case MULT_EXPR:
15812 : 7083 : case MIN_EXPR:
15813 : 7083 : case MAX_EXPR:
15814 : 7083 : return RECURSE (op0) && RECURSE (op1);
15815 : :
15816 : : default:
15817 : : break;
15818 : : }
15819 : : return false;
15820 : : }
15821 : :
15822 : : /* Return true if the floating point result of calling FNDECL with arguments
15823 : : ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15824 : : considered integer values. Return false for signaling NaN. If FNDECL
15825 : : takes fewer than 2 arguments, the remaining ARGn are null.
15826 : :
15827 : : DEPTH is the current nesting depth of the query. */
15828 : :
15829 : : bool
15830 : 920 : integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15831 : : {
15832 : 920 : switch (fn)
15833 : : {
15834 : : CASE_CFN_CEIL:
15835 : : CASE_CFN_CEIL_FN:
15836 : : CASE_CFN_FLOOR:
15837 : : CASE_CFN_FLOOR_FN:
15838 : : CASE_CFN_NEARBYINT:
15839 : : CASE_CFN_NEARBYINT_FN:
15840 : : CASE_CFN_RINT:
15841 : : CASE_CFN_RINT_FN:
15842 : : CASE_CFN_ROUND:
15843 : : CASE_CFN_ROUND_FN:
15844 : : CASE_CFN_ROUNDEVEN:
15845 : : CASE_CFN_ROUNDEVEN_FN:
15846 : : CASE_CFN_TRUNC:
15847 : : CASE_CFN_TRUNC_FN:
15848 : : return true;
15849 : :
15850 : 336 : CASE_CFN_FMIN:
15851 : 336 : CASE_CFN_FMIN_FN:
15852 : 336 : CASE_CFN_FMAX:
15853 : 336 : CASE_CFN_FMAX_FN:
15854 : 336 : return RECURSE (arg0) && RECURSE (arg1);
15855 : :
15856 : : default:
15857 : : break;
15858 : : }
15859 : : return false;
15860 : : }
15861 : :
15862 : : /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15863 : : has an integer value. We also allow +Inf, -Inf and NaN to be
15864 : : considered integer values. Return false for signaling NaN.
15865 : :
15866 : : DEPTH is the current nesting depth of the query. */
15867 : :
15868 : : bool
15869 : 108397 : integer_valued_real_single_p (tree t, int depth)
15870 : : {
15871 : 108397 : switch (TREE_CODE (t))
15872 : : {
15873 : 1721 : case REAL_CST:
15874 : 1721 : return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15875 : :
15876 : 0 : case COND_EXPR:
15877 : 0 : return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15878 : :
15879 : 78657 : case SSA_NAME:
15880 : : /* Limit the depth of recursion to avoid quadratic behavior.
15881 : : This is expected to catch almost all occurrences in practice.
15882 : : If this code misses important cases that unbounded recursion
15883 : : would not, passes that need this information could be revised
15884 : : to provide it through dataflow propagation. */
15885 : 78657 : return (!name_registered_for_update_p (t)
15886 : 78657 : && depth < param_max_ssa_name_query_depth
15887 : 156686 : && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15888 : : depth));
15889 : :
15890 : : default:
15891 : : break;
15892 : : }
15893 : : return false;
15894 : : }
15895 : :
15896 : : /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15897 : : has an integer value. We also allow +Inf, -Inf and NaN to be
15898 : : considered integer values. Return false for signaling NaN.
15899 : :
15900 : : DEPTH is the current nesting depth of the query. */
15901 : :
15902 : : static bool
15903 : 0 : integer_valued_real_invalid_p (tree t, int depth)
15904 : : {
15905 : 0 : switch (TREE_CODE (t))
15906 : : {
15907 : 0 : case COMPOUND_EXPR:
15908 : 0 : case MODIFY_EXPR:
15909 : 0 : case BIND_EXPR:
15910 : 0 : return RECURSE (TREE_OPERAND (t, 1));
15911 : :
15912 : 0 : case SAVE_EXPR:
15913 : 0 : return RECURSE (TREE_OPERAND (t, 0));
15914 : :
15915 : : default:
15916 : : break;
15917 : : }
15918 : : return false;
15919 : : }
15920 : :
15921 : : #undef RECURSE
15922 : : #undef integer_valued_real_p
15923 : :
15924 : : /* Return true if the floating point expression T has an integer value.
15925 : : We also allow +Inf, -Inf and NaN to be considered integer values.
15926 : : Return false for signaling NaN.
15927 : :
15928 : : DEPTH is the current nesting depth of the query. */
15929 : :
15930 : : bool
15931 : 85674 : integer_valued_real_p (tree t, int depth)
15932 : : {
15933 : 85674 : if (t == error_mark_node)
15934 : : return false;
15935 : :
15936 : 85674 : STRIP_ANY_LOCATION_WRAPPER (t);
15937 : :
15938 : 85674 : tree_code code = TREE_CODE (t);
15939 : 85674 : switch (TREE_CODE_CLASS (code))
15940 : : {
15941 : 0 : case tcc_binary:
15942 : 0 : case tcc_comparison:
15943 : 0 : return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15944 : 0 : TREE_OPERAND (t, 1), depth);
15945 : :
15946 : 0 : case tcc_unary:
15947 : 0 : return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15948 : :
15949 : 7988 : case tcc_constant:
15950 : 7988 : case tcc_declaration:
15951 : 7988 : case tcc_reference:
15952 : 7988 : return integer_valued_real_single_p (t, depth);
15953 : :
15954 : 77686 : default:
15955 : 77686 : break;
15956 : : }
15957 : :
15958 : 77686 : switch (code)
15959 : : {
15960 : 77686 : case COND_EXPR:
15961 : 77686 : case SSA_NAME:
15962 : 77686 : return integer_valued_real_single_p (t, depth);
15963 : :
15964 : 0 : case CALL_EXPR:
15965 : 0 : {
15966 : 0 : tree arg0 = (call_expr_nargs (t) > 0
15967 : 0 : ? CALL_EXPR_ARG (t, 0)
15968 : 0 : : NULL_TREE);
15969 : 0 : tree arg1 = (call_expr_nargs (t) > 1
15970 : 0 : ? CALL_EXPR_ARG (t, 1)
15971 : 0 : : NULL_TREE);
15972 : 0 : return integer_valued_real_call_p (get_call_combined_fn (t),
15973 : 0 : arg0, arg1, depth);
15974 : : }
15975 : :
15976 : 0 : default:
15977 : 0 : return integer_valued_real_invalid_p (t, depth);
15978 : : }
15979 : : }
15980 : :
15981 : : /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15982 : : attempt to fold the expression to a constant without modifying TYPE,
15983 : : OP0 or OP1.
15984 : :
15985 : : If the expression could be simplified to a constant, then return
15986 : : the constant. If the expression would not be simplified to a
15987 : : constant, then return NULL_TREE. */
15988 : :
15989 : : tree
15990 : 13587869 : fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15991 : : {
15992 : 13587869 : tree tem = fold_binary (code, type, op0, op1);
15993 : 13587869 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15994 : : }
15995 : :
15996 : : /* Given the components of a unary expression CODE, TYPE and OP0,
15997 : : attempt to fold the expression to a constant without modifying
15998 : : TYPE or OP0.
15999 : :
16000 : : If the expression could be simplified to a constant, then return
16001 : : the constant. If the expression would not be simplified to a
16002 : : constant, then return NULL_TREE. */
16003 : :
16004 : : tree
16005 : 0 : fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16006 : : {
16007 : 0 : tree tem = fold_unary (code, type, op0);
16008 : 0 : return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16009 : : }
16010 : :
16011 : : /* If EXP represents referencing an element in a constant string
16012 : : (either via pointer arithmetic or array indexing), return the
16013 : : tree representing the value accessed, otherwise return NULL. */
16014 : :
16015 : : tree
16016 : 139734386 : fold_read_from_constant_string (tree exp)
16017 : : {
16018 : 139734386 : if ((INDIRECT_REF_P (exp)
16019 : 139734368 : || TREE_CODE (exp) == ARRAY_REF)
16020 : 149422114 : && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16021 : : {
16022 : 6766706 : tree exp1 = TREE_OPERAND (exp, 0);
16023 : 6766706 : tree index;
16024 : 6766706 : tree string;
16025 : 6766706 : location_t loc = EXPR_LOCATION (exp);
16026 : :
16027 : 6766706 : if (INDIRECT_REF_P (exp))
16028 : 0 : string = string_constant (exp1, &index, NULL, NULL);
16029 : : else
16030 : : {
16031 : 6766706 : tree low_bound = array_ref_low_bound (exp);
16032 : 6766706 : index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16033 : :
16034 : : /* Optimize the special-case of a zero lower bound.
16035 : :
16036 : : We convert the low_bound to sizetype to avoid some problems
16037 : : with constant folding. (E.g. suppose the lower bound is 1,
16038 : : and its mode is QI. Without the conversion,l (ARRAY
16039 : : +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16040 : : +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16041 : 6766706 : if (! integer_zerop (low_bound))
16042 : 132530 : index = size_diffop_loc (loc, index,
16043 : : fold_convert_loc (loc, sizetype, low_bound));
16044 : :
16045 : : string = exp1;
16046 : : }
16047 : :
16048 : 6766706 : scalar_int_mode char_mode;
16049 : 6766706 : if (string
16050 : 6766706 : && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16051 : 6766706 : && TREE_CODE (string) == STRING_CST
16052 : 54660 : && tree_fits_uhwi_p (index)
16053 : 50112 : && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16054 : 6816581 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
16055 : : &char_mode)
16056 : 6815880 : && GET_MODE_SIZE (char_mode) == 1)
16057 : 96946 : return build_int_cst_type (TREE_TYPE (exp),
16058 : 48473 : (TREE_STRING_POINTER (string)
16059 : 48473 : [TREE_INT_CST_LOW (index)]));
16060 : : }
16061 : : return NULL;
16062 : : }
16063 : :
16064 : : /* Folds a read from vector element at IDX of vector ARG. */
16065 : :
16066 : : tree
16067 : 3899 : fold_read_from_vector (tree arg, poly_uint64 idx)
16068 : : {
16069 : 3899 : unsigned HOST_WIDE_INT i;
16070 : 3899 : if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
16071 : 3899 : && known_ge (idx, 0u)
16072 : 3899 : && idx.is_constant (&i))
16073 : : {
16074 : 3899 : if (TREE_CODE (arg) == VECTOR_CST)
16075 : 307 : return VECTOR_CST_ELT (arg, i);
16076 : 3592 : else if (TREE_CODE (arg) == CONSTRUCTOR)
16077 : : {
16078 : 1446 : if (CONSTRUCTOR_NELTS (arg)
16079 : 1419 : && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
16080 : : return NULL_TREE;
16081 : 2519 : if (i >= CONSTRUCTOR_NELTS (arg))
16082 : 27 : return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
16083 : 1246 : return CONSTRUCTOR_ELT (arg, i)->value;
16084 : : }
16085 : : }
16086 : : return NULL_TREE;
16087 : : }
16088 : :
16089 : : /* Return the tree for neg (ARG0) when ARG0 is known to be either
16090 : : an integer constant, real, or fixed-point constant.
16091 : :
16092 : : TYPE is the type of the result. */
16093 : :
16094 : : static tree
16095 : 26827801 : fold_negate_const (tree arg0, tree type)
16096 : : {
16097 : 26827801 : tree t = NULL_TREE;
16098 : :
16099 : 26827801 : switch (TREE_CODE (arg0))
16100 : : {
16101 : 1959576 : case REAL_CST:
16102 : 1959576 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16103 : 1959576 : break;
16104 : :
16105 : 0 : case FIXED_CST:
16106 : 0 : {
16107 : 0 : FIXED_VALUE_TYPE f;
16108 : 0 : bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16109 : 0 : &(TREE_FIXED_CST (arg0)), NULL,
16110 : 0 : TYPE_SATURATING (type));
16111 : 0 : t = build_fixed (type, f);
16112 : : /* Propagate overflow flags. */
16113 : 0 : if (overflow_p | TREE_OVERFLOW (arg0))
16114 : 0 : TREE_OVERFLOW (t) = 1;
16115 : 0 : break;
16116 : : }
16117 : :
16118 : 24868225 : default:
16119 : 24868225 : if (poly_int_tree_p (arg0))
16120 : : {
16121 : 24868225 : wi::overflow_type overflow;
16122 : 24868225 : poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
16123 : 24868225 : t = force_fit_type (type, res, 1,
16124 : 204186 : (overflow && ! TYPE_UNSIGNED (type))
16125 : 25065936 : || TREE_OVERFLOW (arg0));
16126 : 24868225 : break;
16127 : 24868225 : }
16128 : :
16129 : 0 : gcc_unreachable ();
16130 : : }
16131 : :
16132 : 26827801 : return t;
16133 : : }
16134 : :
16135 : : /* Return the tree for abs (ARG0) when ARG0 is known to be either
16136 : : an integer constant or real constant.
16137 : :
16138 : : TYPE is the type of the result. */
16139 : :
16140 : : tree
16141 : 31529 : fold_abs_const (tree arg0, tree type)
16142 : : {
16143 : 31529 : tree t = NULL_TREE;
16144 : :
16145 : 31529 : switch (TREE_CODE (arg0))
16146 : : {
16147 : 5124 : case INTEGER_CST:
16148 : 5124 : {
16149 : : /* If the value is unsigned or non-negative, then the absolute value
16150 : : is the same as the ordinary value. */
16151 : 5124 : wide_int val = wi::to_wide (arg0);
16152 : 5124 : wi::overflow_type overflow = wi::OVF_NONE;
16153 : 5124 : if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
16154 : : ;
16155 : :
16156 : : /* If the value is negative, then the absolute value is
16157 : : its negation. */
16158 : : else
16159 : 2167 : val = wi::neg (val, &overflow);
16160 : :
16161 : : /* Force to the destination type, set TREE_OVERFLOW for signed
16162 : : TYPE only. */
16163 : 5124 : t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
16164 : 5124 : }
16165 : 5124 : break;
16166 : :
16167 : 26405 : case REAL_CST:
16168 : 26405 : if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16169 : 6989 : t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16170 : : else
16171 : : t = arg0;
16172 : : break;
16173 : :
16174 : 0 : default:
16175 : 0 : gcc_unreachable ();
16176 : : }
16177 : :
16178 : 31529 : return t;
16179 : : }
16180 : :
16181 : : /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16182 : : constant. TYPE is the type of the result. */
16183 : :
16184 : : static tree
16185 : 2174021 : fold_not_const (const_tree arg0, tree type)
16186 : : {
16187 : 2174021 : gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16188 : :
16189 : 2174021 : return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
16190 : : }
16191 : :
16192 : : /* Given CODE, a relational operator, the target type, TYPE and two
16193 : : constant operands OP0 and OP1, return the result of the
16194 : : relational operation. If the result is not a compile time
16195 : : constant, then return NULL_TREE. */
16196 : :
16197 : : static tree
16198 : 50565880 : fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16199 : : {
16200 : 50565880 : int result, invert;
16201 : :
16202 : : /* From here on, the only cases we handle are when the result is
16203 : : known to be a constant. */
16204 : :
16205 : 50565880 : if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16206 : : {
16207 : 810001 : const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16208 : 810001 : const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16209 : :
16210 : : /* Handle the cases where either operand is a NaN. */
16211 : 810001 : if (real_isnan (c0) || real_isnan (c1))
16212 : : {
16213 : 11958 : switch (code)
16214 : : {
16215 : : case EQ_EXPR:
16216 : : case ORDERED_EXPR:
16217 : : result = 0;
16218 : : break;
16219 : :
16220 : : case NE_EXPR:
16221 : : case UNORDERED_EXPR:
16222 : : case UNLT_EXPR:
16223 : : case UNLE_EXPR:
16224 : : case UNGT_EXPR:
16225 : : case UNGE_EXPR:
16226 : : case UNEQ_EXPR:
16227 : 6423 : result = 1;
16228 : : break;
16229 : :
16230 : 5557 : case LT_EXPR:
16231 : 5557 : case LE_EXPR:
16232 : 5557 : case GT_EXPR:
16233 : 5557 : case GE_EXPR:
16234 : 5557 : case LTGT_EXPR:
16235 : 5557 : if (flag_trapping_math)
16236 : : return NULL_TREE;
16237 : : result = 0;
16238 : : break;
16239 : :
16240 : 0 : default:
16241 : 0 : gcc_unreachable ();
16242 : : }
16243 : :
16244 : 6423 : return constant_boolean_node (result, type);
16245 : : }
16246 : :
16247 : 798043 : return constant_boolean_node (real_compare (code, c0, c1), type);
16248 : : }
16249 : :
16250 : 49755879 : if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16251 : : {
16252 : 0 : const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16253 : 0 : const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16254 : 0 : return constant_boolean_node (fixed_compare (code, c0, c1), type);
16255 : : }
16256 : :
16257 : : /* Handle equality/inequality of complex constants. */
16258 : 49755879 : if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16259 : : {
16260 : 58048 : tree rcond = fold_relational_const (code, type,
16261 : 29024 : TREE_REALPART (op0),
16262 : 29024 : TREE_REALPART (op1));
16263 : 116096 : tree icond = fold_relational_const (code, type,
16264 : 29024 : TREE_IMAGPART (op0),
16265 : 29024 : TREE_IMAGPART (op1));
16266 : 29024 : if (code == EQ_EXPR)
16267 : 277 : return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16268 : 28747 : else if (code == NE_EXPR)
16269 : 28747 : return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16270 : : else
16271 : : return NULL_TREE;
16272 : : }
16273 : :
16274 : 49726855 : if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16275 : : {
16276 : 5104 : if (!VECTOR_TYPE_P (type))
16277 : : {
16278 : : /* Have vector comparison with scalar boolean result. */
16279 : 999 : gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
16280 : : && known_eq (VECTOR_CST_NELTS (op0),
16281 : : VECTOR_CST_NELTS (op1)));
16282 : 999 : unsigned HOST_WIDE_INT nunits;
16283 : 999 : if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
16284 : : return NULL_TREE;
16285 : 1966 : for (unsigned i = 0; i < nunits; i++)
16286 : : {
16287 : 1836 : tree elem0 = VECTOR_CST_ELT (op0, i);
16288 : 1836 : tree elem1 = VECTOR_CST_ELT (op1, i);
16289 : 1836 : tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
16290 : 1836 : if (tmp == NULL_TREE)
16291 : : return NULL_TREE;
16292 : 1836 : if (integer_zerop (tmp))
16293 : 869 : return constant_boolean_node (code == NE_EXPR, type);
16294 : : }
16295 : 130 : return constant_boolean_node (code == EQ_EXPR, type);
16296 : : }
16297 : 4105 : tree_vector_builder elts;
16298 : 4105 : if (!elts.new_binary_operation (type, op0, op1, false))
16299 : : return NULL_TREE;
16300 : 4105 : unsigned int count = elts.encoded_nelts ();
16301 : 16580 : for (unsigned i = 0; i < count; i++)
16302 : : {
16303 : 12475 : tree elem_type = TREE_TYPE (type);
16304 : 12475 : tree elem0 = VECTOR_CST_ELT (op0, i);
16305 : 12475 : tree elem1 = VECTOR_CST_ELT (op1, i);
16306 : :
16307 : 12475 : tree tem = fold_relational_const (code, elem_type,
16308 : : elem0, elem1);
16309 : :
16310 : 12475 : if (tem == NULL_TREE)
16311 : : return NULL_TREE;
16312 : :
16313 : 12475 : elts.quick_push (build_int_cst (elem_type,
16314 : 17416 : integer_zerop (tem) ? 0 : -1));
16315 : : }
16316 : :
16317 : 4105 : return elts.build ();
16318 : 4105 : }
16319 : :
16320 : : /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16321 : :
16322 : : To compute GT, swap the arguments and do LT.
16323 : : To compute GE, do LT and invert the result.
16324 : : To compute LE, swap the arguments, do LT and invert the result.
16325 : : To compute NE, do EQ and invert the result.
16326 : :
16327 : : Therefore, the code below must handle only EQ and LT. */
16328 : :
16329 : 49721751 : if (code == LE_EXPR || code == GT_EXPR)
16330 : : {
16331 : 10462616 : std::swap (op0, op1);
16332 : 10462616 : code = swap_tree_comparison (code);
16333 : : }
16334 : :
16335 : : /* Note that it is safe to invert for real values here because we
16336 : : have already handled the one case that it matters. */
16337 : :
16338 : 49721751 : invert = 0;
16339 : 49721751 : if (code == NE_EXPR || code == GE_EXPR)
16340 : : {
16341 : 24173477 : invert = 1;
16342 : 24173477 : code = invert_tree_comparison (code, false);
16343 : : }
16344 : :
16345 : : /* Compute a result for LT or EQ if args permit;
16346 : : Otherwise return T. */
16347 : 49721751 : if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16348 : : {
16349 : 49700532 : if (code == EQ_EXPR)
16350 : 25047469 : result = tree_int_cst_equal (op0, op1);
16351 : : else
16352 : 24653063 : result = tree_int_cst_lt (op0, op1);
16353 : : }
16354 : : else
16355 : : return NULL_TREE;
16356 : :
16357 : 49700532 : if (invert)
16358 : 24171302 : result ^= 1;
16359 : 49700532 : return constant_boolean_node (result, type);
16360 : : }
16361 : :
16362 : : /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16363 : : indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16364 : : itself. */
16365 : :
16366 : : tree
16367 : 107066366 : fold_build_cleanup_point_expr (tree type, tree expr)
16368 : : {
16369 : : /* If the expression does not have side effects then we don't have to wrap
16370 : : it with a cleanup point expression. */
16371 : 107066366 : if (!TREE_SIDE_EFFECTS (expr))
16372 : : return expr;
16373 : :
16374 : : /* If the expression is a return, check to see if the expression inside the
16375 : : return has no side effects or the right hand side of the modify expression
16376 : : inside the return. If either don't have side effects set we don't need to
16377 : : wrap the expression in a cleanup point expression. Note we don't check the
16378 : : left hand side of the modify because it should always be a return decl. */
16379 : 93260969 : if (TREE_CODE (expr) == RETURN_EXPR)
16380 : : {
16381 : 33260395 : tree op = TREE_OPERAND (expr, 0);
16382 : 33260395 : if (!op || !TREE_SIDE_EFFECTS (op))
16383 : : return expr;
16384 : 32674815 : op = TREE_OPERAND (op, 1);
16385 : 32674815 : if (!TREE_SIDE_EFFECTS (op))
16386 : : return expr;
16387 : : }
16388 : :
16389 : 75735143 : return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
16390 : : }
16391 : :
16392 : : /* Given a pointer value OP0 and a type TYPE, return a simplified version
16393 : : of an indirection through OP0, or NULL_TREE if no simplification is
16394 : : possible. */
16395 : :
16396 : : tree
16397 : 15894743 : fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16398 : : {
16399 : 15894743 : tree sub = op0;
16400 : 15894743 : tree subtype;
16401 : 15894743 : poly_uint64 const_op01;
16402 : :
16403 : 15894743 : STRIP_NOPS (sub);
16404 : 15894743 : subtype = TREE_TYPE (sub);
16405 : 15894743 : if (!POINTER_TYPE_P (subtype)
16406 : 15894743 : || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
16407 : : return NULL_TREE;
16408 : :
16409 : 15764974 : if (TREE_CODE (sub) == ADDR_EXPR)
16410 : : {
16411 : 2557441 : tree op = TREE_OPERAND (sub, 0);
16412 : 2557441 : tree optype = TREE_TYPE (op);
16413 : :
16414 : : /* *&CONST_DECL -> to the value of the const decl. */
16415 : 2557441 : if (TREE_CODE (op) == CONST_DECL)
16416 : 2176 : return DECL_INITIAL (op);
16417 : : /* *&p => p; make sure to handle *&"str"[cst] here. */
16418 : 2555265 : if (type == optype)
16419 : : {
16420 : 1721339 : tree fop = fold_read_from_constant_string (op);
16421 : 1721339 : if (fop)
16422 : : return fop;
16423 : : else
16424 : 1679867 : return op;
16425 : : }
16426 : : /* *(foo *)&fooarray => fooarray[0] */
16427 : 833926 : else if (TREE_CODE (optype) == ARRAY_TYPE
16428 : 12961 : && type == TREE_TYPE (optype)
16429 : 845939 : && (!in_gimple_form
16430 : 1068 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16431 : : {
16432 : 12013 : tree type_domain = TYPE_DOMAIN (optype);
16433 : 12013 : tree min_val = size_zero_node;
16434 : 12013 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16435 : 11965 : min_val = TYPE_MIN_VALUE (type_domain);
16436 : 12013 : if (in_gimple_form
16437 : 1068 : && TREE_CODE (min_val) != INTEGER_CST)
16438 : : return NULL_TREE;
16439 : 12013 : return build4_loc (loc, ARRAY_REF, type, op, min_val,
16440 : 12013 : NULL_TREE, NULL_TREE);
16441 : : }
16442 : : /* *(foo *)&complexfoo => __real__ complexfoo */
16443 : 821913 : else if (TREE_CODE (optype) == COMPLEX_TYPE
16444 : 821913 : && type == TREE_TYPE (optype))
16445 : 0 : return fold_build1_loc (loc, REALPART_EXPR, type, op);
16446 : : /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16447 : 821913 : else if (VECTOR_TYPE_P (optype)
16448 : 821913 : && type == TREE_TYPE (optype))
16449 : : {
16450 : 70 : tree part_width = TYPE_SIZE (type);
16451 : 70 : tree index = bitsize_int (0);
16452 : 70 : return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
16453 : 70 : index);
16454 : : }
16455 : : }
16456 : :
16457 : 14029376 : if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16458 : 14029376 : && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
16459 : : {
16460 : 268233 : tree op00 = TREE_OPERAND (sub, 0);
16461 : 268233 : tree op01 = TREE_OPERAND (sub, 1);
16462 : :
16463 : 268233 : STRIP_NOPS (op00);
16464 : 268233 : if (TREE_CODE (op00) == ADDR_EXPR)
16465 : : {
16466 : 4149 : tree op00type;
16467 : 4149 : op00 = TREE_OPERAND (op00, 0);
16468 : 4149 : op00type = TREE_TYPE (op00);
16469 : :
16470 : : /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16471 : 4149 : if (VECTOR_TYPE_P (op00type)
16472 : 240 : && type == TREE_TYPE (op00type)
16473 : : /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
16474 : : but we want to treat offsets with MSB set as negative.
16475 : : For the code below negative offsets are invalid and
16476 : : TYPE_SIZE of the element is something unsigned, so
16477 : : check whether op01 fits into poly_int64, which implies
16478 : : it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
16479 : : then just use poly_uint64 because we want to treat the
16480 : : value as unsigned. */
16481 : 4342 : && tree_fits_poly_int64_p (op01))
16482 : : {
16483 : 179 : tree part_width = TYPE_SIZE (type);
16484 : 179 : poly_uint64 max_offset
16485 : 179 : = (tree_to_uhwi (part_width) / BITS_PER_UNIT
16486 : 179 : * TYPE_VECTOR_SUBPARTS (op00type));
16487 : 179 : if (known_lt (const_op01, max_offset))
16488 : : {
16489 : 179 : tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
16490 : 179 : return fold_build3_loc (loc,
16491 : : BIT_FIELD_REF, type, op00,
16492 : 179 : part_width, index);
16493 : : }
16494 : : }
16495 : : /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16496 : 3970 : else if (TREE_CODE (op00type) == COMPLEX_TYPE
16497 : 3970 : && type == TREE_TYPE (op00type))
16498 : : {
16499 : 0 : if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
16500 : : const_op01))
16501 : 0 : return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16502 : : }
16503 : : /* ((foo *)&fooarray)[1] => fooarray[1] */
16504 : 3970 : else if (TREE_CODE (op00type) == ARRAY_TYPE
16505 : 3970 : && type == TREE_TYPE (op00type))
16506 : : {
16507 : 3068 : tree type_domain = TYPE_DOMAIN (op00type);
16508 : 3068 : tree min_val = size_zero_node;
16509 : 3068 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16510 : 3067 : min_val = TYPE_MIN_VALUE (type_domain);
16511 : 3068 : poly_uint64 type_size, index;
16512 : 3068 : if (poly_int_tree_p (min_val)
16513 : 3068 : && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
16514 : 3068 : && multiple_p (const_op01, type_size, &index))
16515 : : {
16516 : 3068 : poly_offset_int off = index + wi::to_poly_offset (min_val);
16517 : 3068 : op01 = wide_int_to_tree (sizetype, off);
16518 : 3068 : return build4_loc (loc, ARRAY_REF, type, op00, op01,
16519 : : NULL_TREE, NULL_TREE);
16520 : : }
16521 : : }
16522 : : }
16523 : : }
16524 : :
16525 : : /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16526 : 14026129 : if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16527 : 561960 : && type == TREE_TYPE (TREE_TYPE (subtype))
16528 : 14028867 : && (!in_gimple_form
16529 : 8 : || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16530 : : {
16531 : 2737 : tree type_domain;
16532 : 2737 : tree min_val = size_zero_node;
16533 : 2737 : sub = build_fold_indirect_ref_loc (loc, sub);
16534 : 2737 : type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16535 : 2737 : if (type_domain && TYPE_MIN_VALUE (type_domain))
16536 : 2737 : min_val = TYPE_MIN_VALUE (type_domain);
16537 : 2737 : if (in_gimple_form
16538 : 7 : && TREE_CODE (min_val) != INTEGER_CST)
16539 : : return NULL_TREE;
16540 : 2737 : return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16541 : 2737 : NULL_TREE);
16542 : : }
16543 : :
16544 : : return NULL_TREE;
16545 : : }
16546 : :
16547 : : /* Builds an expression for an indirection through T, simplifying some
16548 : : cases. */
16549 : :
16550 : : tree
16551 : 6163048 : build_fold_indirect_ref_loc (location_t loc, tree t)
16552 : : {
16553 : 6163048 : tree type = TREE_TYPE (TREE_TYPE (t));
16554 : 6163048 : tree sub = fold_indirect_ref_1 (loc, type, t);
16555 : :
16556 : 6163048 : if (sub)
16557 : : return sub;
16558 : :
16559 : 4438872 : return build1_loc (loc, INDIRECT_REF, type, t);
16560 : : }
16561 : :
16562 : : /* Given an INDIRECT_REF T, return either T or a simplified version. */
16563 : :
16564 : : tree
16565 : 9546018 : fold_indirect_ref_loc (location_t loc, tree t)
16566 : : {
16567 : 9546018 : tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16568 : :
16569 : 9546018 : if (sub)
16570 : : return sub;
16571 : : else
16572 : 9529175 : return t;
16573 : : }
16574 : :
16575 : : /* Strip non-trapping, non-side-effecting tree nodes from an expression
16576 : : whose result is ignored. The type of the returned tree need not be
16577 : : the same as the original expression. */
16578 : :
16579 : : tree
16580 : 130852 : fold_ignored_result (tree t)
16581 : : {
16582 : 130852 : if (!TREE_SIDE_EFFECTS (t))
16583 : 18796 : return integer_zero_node;
16584 : :
16585 : 150071 : for (;;)
16586 : 150071 : switch (TREE_CODE_CLASS (TREE_CODE (t)))
16587 : : {
16588 : 4080 : case tcc_unary:
16589 : 4080 : t = TREE_OPERAND (t, 0);
16590 : 4080 : break;
16591 : :
16592 : 5735 : case tcc_binary:
16593 : 5735 : case tcc_comparison:
16594 : 5735 : if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16595 : 3444 : t = TREE_OPERAND (t, 0);
16596 : 2291 : else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16597 : 25 : t = TREE_OPERAND (t, 1);
16598 : : else
16599 : 2266 : return t;
16600 : : break;
16601 : :
16602 : 97866 : case tcc_expression:
16603 : 97866 : switch (TREE_CODE (t))
16604 : : {
16605 : 30411 : case COMPOUND_EXPR:
16606 : 30411 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16607 : 240 : return t;
16608 : 30171 : t = TREE_OPERAND (t, 0);
16609 : 30171 : break;
16610 : :
16611 : 382 : case COND_EXPR:
16612 : 382 : if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16613 : 382 : || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16614 : 87 : return t;
16615 : 295 : t = TREE_OPERAND (t, 0);
16616 : 295 : break;
16617 : :
16618 : : default:
16619 : : return t;
16620 : : }
16621 : : break;
16622 : :
16623 : : default:
16624 : : return t;
16625 : : }
16626 : : }
16627 : :
16628 : : /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16629 : :
16630 : : tree
16631 : 2780746799 : round_up_loc (location_t loc, tree value, unsigned int divisor)
16632 : : {
16633 : 2780746799 : tree div = NULL_TREE;
16634 : :
16635 : 2780746799 : if (divisor == 1)
16636 : : return value;
16637 : :
16638 : : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16639 : : have to do anything. Only do this when we are not given a const,
16640 : : because in that case, this check is more expensive than just
16641 : : doing it. */
16642 : 1718588060 : if (TREE_CODE (value) != INTEGER_CST)
16643 : : {
16644 : 309649 : div = build_int_cst (TREE_TYPE (value), divisor);
16645 : :
16646 : 309649 : if (multiple_of_p (TREE_TYPE (value), value, div))
16647 : : return value;
16648 : : }
16649 : :
16650 : : /* If divisor is a power of two, simplify this to bit manipulation. */
16651 : 1718279906 : if (pow2_or_zerop (divisor))
16652 : : {
16653 : 1718279906 : if (TREE_CODE (value) == INTEGER_CST)
16654 : : {
16655 : 1718278411 : wide_int val = wi::to_wide (value);
16656 : 1718278411 : bool overflow_p;
16657 : :
16658 : 1718278411 : if ((val & (divisor - 1)) == 0)
16659 : : return value;
16660 : :
16661 : 3406454 : overflow_p = TREE_OVERFLOW (value);
16662 : 3406454 : val += divisor - 1;
16663 : 3406454 : val &= (int) -divisor;
16664 : 3406454 : if (val == 0)
16665 : 0 : overflow_p = true;
16666 : :
16667 : 3406454 : return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16668 : 1718278411 : }
16669 : : else
16670 : : {
16671 : 1495 : tree t;
16672 : :
16673 : 1495 : t = build_int_cst (TREE_TYPE (value), divisor - 1);
16674 : 1495 : value = size_binop_loc (loc, PLUS_EXPR, value, t);
16675 : 1495 : t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16676 : 1495 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16677 : : }
16678 : : }
16679 : : else
16680 : : {
16681 : 0 : if (!div)
16682 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16683 : 0 : value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16684 : 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16685 : : }
16686 : :
16687 : : return value;
16688 : : }
16689 : :
16690 : : /* Likewise, but round down. */
16691 : :
16692 : : tree
16693 : 16417618 : round_down_loc (location_t loc, tree value, int divisor)
16694 : : {
16695 : 16417618 : tree div = NULL_TREE;
16696 : :
16697 : 16417618 : gcc_assert (divisor > 0);
16698 : 16417618 : if (divisor == 1)
16699 : : return value;
16700 : :
16701 : : /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16702 : : have to do anything. Only do this when we are not given a const,
16703 : : because in that case, this check is more expensive than just
16704 : : doing it. */
16705 : 16417618 : if (TREE_CODE (value) != INTEGER_CST)
16706 : : {
16707 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16708 : :
16709 : 0 : if (multiple_of_p (TREE_TYPE (value), value, div))
16710 : : return value;
16711 : : }
16712 : :
16713 : : /* If divisor is a power of two, simplify this to bit manipulation. */
16714 : 16417618 : if (pow2_or_zerop (divisor))
16715 : : {
16716 : 16417618 : tree t;
16717 : :
16718 : 16417618 : t = build_int_cst (TREE_TYPE (value), -divisor);
16719 : 16417618 : value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16720 : : }
16721 : : else
16722 : : {
16723 : 0 : if (!div)
16724 : 0 : div = build_int_cst (TREE_TYPE (value), divisor);
16725 : 0 : value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16726 : 0 : value = size_binop_loc (loc, MULT_EXPR, value, div);
16727 : : }
16728 : :
16729 : : return value;
16730 : : }
16731 : :
16732 : : /* Returns the pointer to the base of the object addressed by EXP and
16733 : : extracts the information about the offset of the access, storing it
16734 : : to PBITPOS and POFFSET. */
16735 : :
16736 : : static tree
16737 : 1191808 : split_address_to_core_and_offset (tree exp,
16738 : : poly_int64 *pbitpos, tree *poffset)
16739 : : {
16740 : 1191808 : tree core;
16741 : 1191808 : machine_mode mode;
16742 : 1191808 : int unsignedp, reversep, volatilep;
16743 : 1191808 : poly_int64 bitsize;
16744 : 1191808 : location_t loc = EXPR_LOCATION (exp);
16745 : :
16746 : 1191808 : if (TREE_CODE (exp) == SSA_NAME)
16747 : 395119 : if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (exp)))
16748 : 321175 : if (gimple_assign_rhs_code (def) == ADDR_EXPR)
16749 : 31733 : exp = gimple_assign_rhs1 (def);
16750 : :
16751 : 1191808 : if (TREE_CODE (exp) == ADDR_EXPR)
16752 : : {
16753 : 718048 : core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16754 : : poffset, &mode, &unsignedp, &reversep,
16755 : : &volatilep);
16756 : 718048 : core = build_fold_addr_expr_loc (loc, core);
16757 : : }
16758 : 473760 : else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16759 : : {
16760 : 30308 : core = TREE_OPERAND (exp, 0);
16761 : 30308 : STRIP_NOPS (core);
16762 : 30308 : *pbitpos = 0;
16763 : 30308 : *poffset = TREE_OPERAND (exp, 1);
16764 : 30308 : if (poly_int_tree_p (*poffset))
16765 : : {
16766 : 30204 : poly_offset_int tem
16767 : 30204 : = wi::sext (wi::to_poly_offset (*poffset),
16768 : 30204 : TYPE_PRECISION (TREE_TYPE (*poffset)));
16769 : 30204 : tem <<= LOG2_BITS_PER_UNIT;
16770 : 30204 : if (tem.to_shwi (pbitpos))
16771 : 30204 : *poffset = NULL_TREE;
16772 : : }
16773 : : }
16774 : : else
16775 : : {
16776 : 443452 : core = exp;
16777 : 443452 : *pbitpos = 0;
16778 : 443452 : *poffset = NULL_TREE;
16779 : : }
16780 : :
16781 : 1191808 : return core;
16782 : : }
16783 : :
16784 : : /* Returns true if addresses of E1 and E2 differ by a constant, false
16785 : : otherwise. If they do, E1 - E2 is stored in *DIFF. */
16786 : :
16787 : : bool
16788 : 595904 : ptr_difference_const (tree e1, tree e2, poly_int64 *diff)
16789 : : {
16790 : 595904 : tree core1, core2;
16791 : 595904 : poly_int64 bitpos1, bitpos2;
16792 : 595904 : tree toffset1, toffset2, tdiff, type;
16793 : :
16794 : 595904 : core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16795 : 595904 : core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16796 : :
16797 : 595904 : poly_int64 bytepos1, bytepos2;
16798 : 595904 : if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16799 : 1039430 : || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16800 : 1191808 : || !operand_equal_p (core1, core2, 0))
16801 : 443526 : return false;
16802 : :
16803 : 152378 : if (toffset1 && toffset2)
16804 : : {
16805 : 30 : type = TREE_TYPE (toffset1);
16806 : 30 : if (type != TREE_TYPE (toffset2))
16807 : 0 : toffset2 = fold_convert (type, toffset2);
16808 : :
16809 : 30 : tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16810 : 30 : if (!cst_and_fits_in_hwi (tdiff))
16811 : : return false;
16812 : :
16813 : 12 : *diff = int_cst_value (tdiff);
16814 : : }
16815 : 152348 : else if (toffset1 || toffset2)
16816 : : {
16817 : : /* If only one of the offsets is non-constant, the difference cannot
16818 : : be a constant. */
16819 : : return false;
16820 : : }
16821 : : else
16822 : 134929 : *diff = 0;
16823 : :
16824 : 134941 : *diff += bytepos1 - bytepos2;
16825 : 134941 : return true;
16826 : : }
16827 : :
16828 : : /* Return OFF converted to a pointer offset type suitable as offset for
16829 : : POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16830 : : tree
16831 : 16415110 : convert_to_ptrofftype_loc (location_t loc, tree off)
16832 : : {
16833 : 16415110 : if (ptrofftype_p (TREE_TYPE (off)))
16834 : : return off;
16835 : 1925621 : return fold_convert_loc (loc, sizetype, off);
16836 : : }
16837 : :
16838 : : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16839 : : tree
16840 : 14733263 : fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16841 : : {
16842 : 14733263 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16843 : 14733263 : ptr, convert_to_ptrofftype_loc (loc, off));
16844 : : }
16845 : :
16846 : : /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16847 : : tree
16848 : 156561 : fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16849 : : {
16850 : 156561 : return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16851 : 156561 : ptr, size_int (off));
16852 : : }
16853 : :
16854 : : /* Return a pointer to a NUL-terminated string containing the sequence
16855 : : of bytes corresponding to the representation of the object referred to
16856 : : by SRC (or a subsequence of such bytes within it if SRC is a reference
16857 : : to an initialized constant array plus some constant offset).
16858 : : Set *STRSIZE the number of bytes in the constant sequence including
16859 : : the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16860 : : where A is the array that stores the constant sequence that SRC points
16861 : : to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16862 : : need not point to a string or even an array of characters but may point
16863 : : to an object of any type. */
16864 : :
16865 : : const char *
16866 : 12157364 : getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16867 : : {
16868 : : /* The offset into the array A storing the string, and A's byte size. */
16869 : 12157364 : tree offset_node;
16870 : 12157364 : tree mem_size;
16871 : :
16872 : 12157364 : if (strsize)
16873 : 4886213 : *strsize = 0;
16874 : :
16875 : 12157364 : if (strsize)
16876 : 4886213 : src = byte_representation (src, &offset_node, &mem_size, NULL);
16877 : : else
16878 : 7271151 : src = string_constant (src, &offset_node, &mem_size, NULL);
16879 : 12157364 : if (!src)
16880 : : return NULL;
16881 : :
16882 : 2700636 : unsigned HOST_WIDE_INT offset = 0;
16883 : 2700636 : if (offset_node != NULL_TREE)
16884 : : {
16885 : 2700636 : if (!tree_fits_uhwi_p (offset_node))
16886 : : return NULL;
16887 : : else
16888 : 2698683 : offset = tree_to_uhwi (offset_node);
16889 : : }
16890 : :
16891 : 2698683 : if (!tree_fits_uhwi_p (mem_size))
16892 : : return NULL;
16893 : :
16894 : : /* ARRAY_SIZE is the byte size of the array the constant sequence
16895 : : is stored in and equal to sizeof A. INIT_BYTES is the number
16896 : : of bytes in the constant sequence used to initialize the array,
16897 : : including any embedded NULs as well as the terminating NUL (for
16898 : : strings), but not including any trailing zeros/NULs past
16899 : : the terminating one appended implicitly to a string literal to
16900 : : zero out the remainder of the array it's stored in. For example,
16901 : : given:
16902 : : const char a[7] = "abc\0d";
16903 : : n = strlen (a + 1);
16904 : : ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16905 : : (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16906 : : is equal to strlen (A) + 1. */
16907 : 2698683 : const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16908 : 2698683 : unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16909 : 2698683 : const char *string = TREE_STRING_POINTER (src);
16910 : :
16911 : : /* Ideally this would turn into a gcc_checking_assert over time. */
16912 : 2698683 : if (init_bytes > array_size)
16913 : : init_bytes = array_size;
16914 : :
16915 : 2698683 : if (init_bytes == 0 || offset >= array_size)
16916 : : return NULL;
16917 : :
16918 : 2697196 : if (strsize)
16919 : : {
16920 : : /* Compute and store the number of characters from the beginning
16921 : : of the substring at OFFSET to the end, including the terminating
16922 : : nul. Offsets past the initial length refer to null strings. */
16923 : 1595155 : if (offset < init_bytes)
16924 : 1595155 : *strsize = init_bytes - offset;
16925 : : else
16926 : 0 : *strsize = 1;
16927 : : }
16928 : : else
16929 : : {
16930 : 1102041 : tree eltype = TREE_TYPE (TREE_TYPE (src));
16931 : : /* Support only properly NUL-terminated single byte strings. */
16932 : 1102041 : if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16933 : : return NULL;
16934 : 1097699 : if (string[init_bytes - 1] != '\0')
16935 : : return NULL;
16936 : : }
16937 : :
16938 : 2670768 : return offset < init_bytes ? string + offset : "";
16939 : : }
16940 : :
16941 : : /* Return a pointer to a NUL-terminated string corresponding to
16942 : : the expression STR referencing a constant string, possibly
16943 : : involving a constant offset. Return null if STR either doesn't
16944 : : reference a constant string or if it involves a nonconstant
16945 : : offset. */
16946 : :
16947 : : const char *
16948 : 7271151 : c_getstr (tree str)
16949 : : {
16950 : 7271151 : return getbyterep (str, NULL);
16951 : : }
16952 : :
16953 : : /* Given a tree T, compute which bits in T may be nonzero. */
16954 : :
16955 : : wide_int
16956 : 204012766 : tree_nonzero_bits (const_tree t)
16957 : : {
16958 : 204012766 : switch (TREE_CODE (t))
16959 : : {
16960 : 8136510 : case INTEGER_CST:
16961 : 8136510 : return wi::to_wide (t);
16962 : 112925632 : case SSA_NAME:
16963 : 112925632 : return get_nonzero_bits (t);
16964 : 230476 : case NON_LVALUE_EXPR:
16965 : 230476 : case SAVE_EXPR:
16966 : 230476 : return tree_nonzero_bits (TREE_OPERAND (t, 0));
16967 : 466574 : case BIT_AND_EXPR:
16968 : 933148 : return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16969 : 1399722 : tree_nonzero_bits (TREE_OPERAND (t, 1)));
16970 : 4181 : case BIT_IOR_EXPR:
16971 : 4181 : case BIT_XOR_EXPR:
16972 : 8362 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16973 : 12543 : tree_nonzero_bits (TREE_OPERAND (t, 1)));
16974 : 70308 : case COND_EXPR:
16975 : 140616 : return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16976 : 210924 : tree_nonzero_bits (TREE_OPERAND (t, 2)));
16977 : 41157063 : CASE_CONVERT:
16978 : 41157063 : return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16979 : 41157063 : TYPE_PRECISION (TREE_TYPE (t)),
16980 : 123471210 : TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16981 : 12359599 : case PLUS_EXPR:
16982 : 12359599 : if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16983 : : {
16984 : 12359599 : wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16985 : 12359599 : wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16986 : 12359599 : if (wi::bit_and (nzbits1, nzbits2) == 0)
16987 : 738268 : return wi::bit_or (nzbits1, nzbits2);
16988 : 12359599 : }
16989 : : break;
16990 : 132481 : case LSHIFT_EXPR:
16991 : 132481 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16992 : : {
16993 : 82674 : tree type = TREE_TYPE (t);
16994 : 82674 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16995 : 165348 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16996 : 82674 : TYPE_PRECISION (type));
16997 : 82674 : return wi::neg_p (arg1)
16998 : 165348 : ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16999 : 82674 : : wi::lshift (nzbits, arg1);
17000 : 82674 : }
17001 : : break;
17002 : 55402 : case RSHIFT_EXPR:
17003 : 55402 : if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
17004 : : {
17005 : 53874 : tree type = TREE_TYPE (t);
17006 : 53874 : wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
17007 : 107748 : wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
17008 : 53874 : TYPE_PRECISION (type));
17009 : 53874 : return wi::neg_p (arg1)
17010 : 107748 : ? wi::lshift (nzbits, -arg1)
17011 : 53874 : : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
17012 : 53874 : }
17013 : : break;
17014 : : default:
17015 : : break;
17016 : : }
17017 : :
17018 : 40147206 : return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
17019 : : }
17020 : :
17021 : : /* Helper function for address compare simplifications in match.pd.
17022 : : OP0 and OP1 are ADDR_EXPR operands being compared by CODE.
17023 : : TYPE is the type of comparison operands.
17024 : : BASE0, BASE1, OFF0 and OFF1 are set by the function.
17025 : : GENERIC is true if GENERIC folding and false for GIMPLE folding.
17026 : : Returns 0 if OP0 is known to be unequal to OP1 regardless of OFF{0,1},
17027 : : 1 if bases are known to be equal and OP0 cmp OP1 depends on OFF0 cmp OFF1,
17028 : : and 2 if unknown. */
17029 : :
17030 : : int
17031 : 832695 : address_compare (tree_code code, tree type, tree op0, tree op1,
17032 : : tree &base0, tree &base1, poly_int64 &off0, poly_int64 &off1,
17033 : : bool generic)
17034 : : {
17035 : 832695 : if (TREE_CODE (op0) == SSA_NAME)
17036 : 16345 : op0 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op0));
17037 : 832695 : if (TREE_CODE (op1) == SSA_NAME)
17038 : 7561 : op1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (op1));
17039 : 832695 : gcc_checking_assert (TREE_CODE (op0) == ADDR_EXPR);
17040 : 832695 : gcc_checking_assert (TREE_CODE (op1) == ADDR_EXPR);
17041 : 832695 : base0 = get_addr_base_and_unit_offset (TREE_OPERAND (op0, 0), &off0);
17042 : 832695 : base1 = get_addr_base_and_unit_offset (TREE_OPERAND (op1, 0), &off1);
17043 : 832695 : if (base0 && TREE_CODE (base0) == MEM_REF)
17044 : : {
17045 : 17628 : off0 += mem_ref_offset (base0).force_shwi ();
17046 : 17628 : base0 = TREE_OPERAND (base0, 0);
17047 : : }
17048 : 832695 : if (base1 && TREE_CODE (base1) == MEM_REF)
17049 : : {
17050 : 4171 : off1 += mem_ref_offset (base1).force_shwi ();
17051 : 4171 : base1 = TREE_OPERAND (base1, 0);
17052 : : }
17053 : 832695 : if (base0 == NULL_TREE || base1 == NULL_TREE)
17054 : : return 2;
17055 : :
17056 : 825699 : int equal = 2;
17057 : : /* Punt in GENERIC on variables with value expressions;
17058 : : the value expressions might point to fields/elements
17059 : : of other vars etc. */
17060 : 825699 : if (generic
17061 : 825699 : && ((VAR_P (base0) && DECL_HAS_VALUE_EXPR_P (base0))
17062 : 729534 : || (VAR_P (base1) && DECL_HAS_VALUE_EXPR_P (base1))))
17063 : : return 2;
17064 : 825133 : else if (decl_in_symtab_p (base0) && decl_in_symtab_p (base1))
17065 : : {
17066 : 83554 : symtab_node *node0 = symtab_node::get_create (base0);
17067 : 83554 : symtab_node *node1 = symtab_node::get_create (base1);
17068 : 83554 : equal = node0->equal_address_to (node1);
17069 : : }
17070 : 741579 : else if ((DECL_P (base0)
17071 : 104468 : || TREE_CODE (base0) == SSA_NAME
17072 : 87747 : || TREE_CODE (base0) == STRING_CST)
17073 : 741200 : && (DECL_P (base1)
17074 : 91726 : || TREE_CODE (base1) == SSA_NAME
17075 : 87781 : || TREE_CODE (base1) == STRING_CST))
17076 : 741174 : equal = (base0 == base1);
17077 : : /* Assume different STRING_CSTs with the same content will be
17078 : : merged. */
17079 : 824728 : if (equal == 0
17080 : 44922 : && TREE_CODE (base0) == STRING_CST
17081 : 17005 : && TREE_CODE (base1) == STRING_CST
17082 : 16976 : && TREE_STRING_LENGTH (base0) == TREE_STRING_LENGTH (base1)
17083 : 824728 : && memcmp (TREE_STRING_POINTER (base0), TREE_STRING_POINTER (base1),
17084 : 5990 : TREE_STRING_LENGTH (base0)) == 0)
17085 : : equal = 1;
17086 : 820781 : if (equal == 1)
17087 : : {
17088 : 760392 : if (code == EQ_EXPR
17089 : 760392 : || code == NE_EXPR
17090 : : /* If the offsets are equal we can ignore overflow. */
17091 : 52254 : || known_eq (off0, off1)
17092 : 104344 : || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
17093 : : /* Or if we compare using pointers to decls or strings. */
17094 : 812564 : || (POINTER_TYPE_P (type)
17095 : 0 : && (DECL_P (base0) || TREE_CODE (base0) == STRING_CST)))
17096 : : return 1;
17097 : : return 2;
17098 : : }
17099 : 64741 : if (equal != 0)
17100 : : return equal;
17101 : 40570 : if (code != EQ_EXPR && code != NE_EXPR)
17102 : : return 2;
17103 : :
17104 : : /* At this point we know (or assume) the two pointers point at
17105 : : different objects. */
17106 : 37270 : HOST_WIDE_INT ioff0 = -1, ioff1 = -1;
17107 : 37270 : off0.is_constant (&ioff0);
17108 : 37270 : off1.is_constant (&ioff1);
17109 : : /* Punt on non-zero offsets from functions. */
17110 : 37270 : if ((TREE_CODE (base0) == FUNCTION_DECL && ioff0)
17111 : 37270 : || (TREE_CODE (base1) == FUNCTION_DECL && ioff1))
17112 : : return 2;
17113 : : /* Or if the bases are neither decls nor string literals. */
17114 : 37270 : if (!DECL_P (base0) && TREE_CODE (base0) != STRING_CST)
17115 : : return 2;
17116 : 21049 : if (!DECL_P (base1) && TREE_CODE (base1) != STRING_CST)
17117 : : return 2;
17118 : : /* For initializers, assume addresses of different functions are
17119 : : different. */
17120 : 19677 : if (folding_initializer
17121 : 488 : && TREE_CODE (base0) == FUNCTION_DECL
17122 : 14 : && TREE_CODE (base1) == FUNCTION_DECL)
17123 : : return 0;
17124 : :
17125 : : /* Compute whether one address points to the start of one
17126 : : object and another one to the end of another one. */
17127 : 19663 : poly_int64 size0 = 0, size1 = 0;
17128 : 19663 : if (TREE_CODE (base0) == STRING_CST)
17129 : : {
17130 : 12629 : if (ioff0 < 0 || ioff0 > TREE_STRING_LENGTH (base0))
17131 : : equal = 2;
17132 : : else
17133 : : size0 = TREE_STRING_LENGTH (base0);
17134 : : }
17135 : 7034 : else if (TREE_CODE (base0) == FUNCTION_DECL)
17136 : : size0 = 1;
17137 : : else
17138 : : {
17139 : 6970 : tree sz0 = DECL_SIZE_UNIT (base0);
17140 : 6970 : if (!tree_fits_poly_int64_p (sz0))
17141 : : equal = 2;
17142 : : else
17143 : 6970 : size0 = tree_to_poly_int64 (sz0);
17144 : : }
17145 : 19663 : if (TREE_CODE (base1) == STRING_CST)
17146 : : {
17147 : 12750 : if (ioff1 < 0 || ioff1 > TREE_STRING_LENGTH (base1))
17148 : : equal = 2;
17149 : : else
17150 : : size1 = TREE_STRING_LENGTH (base1);
17151 : : }
17152 : 6913 : else if (TREE_CODE (base1) == FUNCTION_DECL)
17153 : : size1 = 1;
17154 : : else
17155 : : {
17156 : 6853 : tree sz1 = DECL_SIZE_UNIT (base1);
17157 : 6853 : if (!tree_fits_poly_int64_p (sz1))
17158 : : equal = 2;
17159 : : else
17160 : 6853 : size1 = tree_to_poly_int64 (sz1);
17161 : : }
17162 : 19663 : if (equal == 0)
17163 : : {
17164 : : /* If one offset is pointing (or could be) to the beginning of one
17165 : : object and the other is pointing to one past the last byte of the
17166 : : other object, punt. */
17167 : 19651 : if (maybe_eq (off0, 0) && maybe_eq (off1, size1))
17168 : : equal = 2;
17169 : 19514 : else if (maybe_eq (off1, 0) && maybe_eq (off0, size0))
17170 : : equal = 2;
17171 : : /* If both offsets are the same, there are some cases we know that are
17172 : : ok. Either if we know they aren't zero, or if we know both sizes
17173 : : are no zero. */
17174 : : if (equal == 2
17175 : 270 : && known_eq (off0, off1)
17176 : 22 : && (known_ne (off0, 0)
17177 : 22 : || (known_ne (size0, 0) && known_ne (size1, 0))))
17178 : : equal = 0;
17179 : : }
17180 : :
17181 : : /* At this point, equal is 2 if either one or both pointers are out of
17182 : : bounds of their object, or one points to start of its object and the
17183 : : other points to end of its object. This is unspecified behavior
17184 : : e.g. in C++. Otherwise equal is 0. */
17185 : 19663 : if (folding_cxx_constexpr && equal)
17186 : : return equal;
17187 : :
17188 : : /* When both pointers point to string literals, even when equal is 0,
17189 : : due to tail merging of string literals the pointers might be the same. */
17190 : 19600 : if (TREE_CODE (base0) == STRING_CST && TREE_CODE (base1) == STRING_CST)
17191 : : {
17192 : 12606 : if (ioff0 < 0
17193 : 12606 : || ioff1 < 0
17194 : 12606 : || ioff0 > TREE_STRING_LENGTH (base0)
17195 : 25200 : || ioff1 > TREE_STRING_LENGTH (base1))
17196 : : return 2;
17197 : :
17198 : : /* If the bytes in the string literals starting at the pointers
17199 : : differ, the pointers need to be different. */
17200 : 37782 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0,
17201 : 12594 : TREE_STRING_POINTER (base1) + ioff1,
17202 : 12594 : MIN (TREE_STRING_LENGTH (base0) - ioff0,
17203 : : TREE_STRING_LENGTH (base1) - ioff1)) == 0)
17204 : : {
17205 : 3711 : HOST_WIDE_INT ioffmin = MIN (ioff0, ioff1);
17206 : 3711 : if (memcmp (TREE_STRING_POINTER (base0) + ioff0 - ioffmin,
17207 : 3711 : TREE_STRING_POINTER (base1) + ioff1 - ioffmin,
17208 : : ioffmin) == 0)
17209 : : /* If even the bytes in the string literal before the
17210 : : pointers are the same, the string literals could be
17211 : : tail merged. */
17212 : : return 2;
17213 : : }
17214 : : return 0;
17215 : : }
17216 : :
17217 : 6994 : if (folding_cxx_constexpr)
17218 : : return 0;
17219 : :
17220 : : /* If this is a pointer comparison, ignore for now even
17221 : : valid equalities where one pointer is the offset zero
17222 : : of one object and the other to one past end of another one. */
17223 : 6626 : if (!INTEGRAL_TYPE_P (type))
17224 : : return 0;
17225 : :
17226 : : /* Assume that string literals can't be adjacent to variables
17227 : : (automatic or global). */
17228 : 292 : if (TREE_CODE (base0) == STRING_CST || TREE_CODE (base1) == STRING_CST)
17229 : : return 0;
17230 : :
17231 : : /* Assume that automatic variables can't be adjacent to global
17232 : : variables. */
17233 : 288 : if (is_global_var (base0) != is_global_var (base1))
17234 : : return 0;
17235 : :
17236 : : return equal;
17237 : : }
17238 : :
17239 : : /* Return the single non-zero element of a CONSTRUCTOR or NULL_TREE. */
17240 : : tree
17241 : 49 : ctor_single_nonzero_element (const_tree t)
17242 : : {
17243 : 49 : unsigned HOST_WIDE_INT idx;
17244 : 49 : constructor_elt *ce;
17245 : 49 : tree elt = NULL_TREE;
17246 : :
17247 : 49 : if (TREE_CODE (t) != CONSTRUCTOR)
17248 : : return NULL_TREE;
17249 : 110 : for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
17250 : 106 : if (!integer_zerop (ce->value) && !real_zerop (ce->value))
17251 : : {
17252 : 94 : if (elt)
17253 : : return NULL_TREE;
17254 : 49 : elt = ce->value;
17255 : : }
17256 : : return elt;
17257 : : }
17258 : :
17259 : : #if CHECKING_P
17260 : :
17261 : : namespace selftest {
17262 : :
17263 : : /* Helper functions for writing tests of folding trees. */
17264 : :
17265 : : /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
17266 : :
17267 : : static void
17268 : 16 : assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
17269 : : tree constant)
17270 : : {
17271 : 16 : ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
17272 : 16 : }
17273 : :
17274 : : /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
17275 : : wrapping WRAPPED_EXPR. */
17276 : :
17277 : : static void
17278 : 12 : assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
17279 : : tree wrapped_expr)
17280 : : {
17281 : 12 : tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
17282 : 12 : ASSERT_NE (wrapped_expr, result);
17283 : 12 : ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
17284 : 12 : ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
17285 : 12 : }
17286 : :
17287 : : /* Verify that various arithmetic binary operations are folded
17288 : : correctly. */
17289 : :
17290 : : static void
17291 : 4 : test_arithmetic_folding ()
17292 : : {
17293 : 4 : tree type = integer_type_node;
17294 : 4 : tree x = create_tmp_var_raw (type, "x");
17295 : 4 : tree zero = build_zero_cst (type);
17296 : 4 : tree one = build_int_cst (type, 1);
17297 : :
17298 : : /* Addition. */
17299 : : /* 1 <-- (0 + 1) */
17300 : 4 : assert_binop_folds_to_const (zero, PLUS_EXPR, one,
17301 : : one);
17302 : 4 : assert_binop_folds_to_const (one, PLUS_EXPR, zero,
17303 : : one);
17304 : :
17305 : : /* (nonlvalue)x <-- (x + 0) */
17306 : 4 : assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
17307 : : x);
17308 : :
17309 : : /* Subtraction. */
17310 : : /* 0 <-- (x - x) */
17311 : 4 : assert_binop_folds_to_const (x, MINUS_EXPR, x,
17312 : : zero);
17313 : 4 : assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
17314 : : x);
17315 : :
17316 : : /* Multiplication. */
17317 : : /* 0 <-- (x * 0) */
17318 : 4 : assert_binop_folds_to_const (x, MULT_EXPR, zero,
17319 : : zero);
17320 : :
17321 : : /* (nonlvalue)x <-- (x * 1) */
17322 : 4 : assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
17323 : : x);
17324 : 4 : }
17325 : :
17326 : : namespace test_fold_vec_perm_cst {
17327 : :
17328 : : /* Build a VECTOR_CST corresponding to VMODE, and has
17329 : : encoding given by NPATTERNS, NELTS_PER_PATTERN and STEP.
17330 : : Fill it with randomized elements, using rand() % THRESHOLD. */
17331 : :
17332 : : static tree
17333 : 0 : build_vec_cst_rand (machine_mode vmode, unsigned npatterns,
17334 : : unsigned nelts_per_pattern,
17335 : : int step = 0, bool natural_stepped = false,
17336 : : int threshold = 100)
17337 : : {
17338 : 0 : tree inner_type = lang_hooks.types.type_for_mode (GET_MODE_INNER (vmode), 1);
17339 : 0 : tree vectype = build_vector_type_for_mode (inner_type, vmode);
17340 : 0 : tree_vector_builder builder (vectype, npatterns, nelts_per_pattern);
17341 : :
17342 : : // Fill a0 for each pattern
17343 : 0 : for (unsigned i = 0; i < npatterns; i++)
17344 : 0 : builder.quick_push (build_int_cst (inner_type, rand () % threshold));
17345 : :
17346 : 0 : if (nelts_per_pattern == 1)
17347 : 0 : return builder.build ();
17348 : :
17349 : : // Fill a1 for each pattern
17350 : 0 : for (unsigned i = 0; i < npatterns; i++)
17351 : : {
17352 : 0 : tree a1;
17353 : 0 : if (natural_stepped)
17354 : : {
17355 : 0 : tree a0 = builder[i];
17356 : 0 : wide_int a0_val = wi::to_wide (a0);
17357 : 0 : wide_int a1_val = a0_val + step;
17358 : 0 : a1 = wide_int_to_tree (inner_type, a1_val);
17359 : 0 : }
17360 : : else
17361 : 0 : a1 = build_int_cst (inner_type, rand () % threshold);
17362 : 0 : builder.quick_push (a1);
17363 : : }
17364 : 0 : if (nelts_per_pattern == 2)
17365 : 0 : return builder.build ();
17366 : :
17367 : 0 : for (unsigned i = npatterns * 2; i < npatterns * nelts_per_pattern; i++)
17368 : : {
17369 : 0 : tree prev_elem = builder[i - npatterns];
17370 : 0 : wide_int prev_elem_val = wi::to_wide (prev_elem);
17371 : 0 : wide_int val = prev_elem_val + step;
17372 : 0 : builder.quick_push (wide_int_to_tree (inner_type, val));
17373 : 0 : }
17374 : :
17375 : 0 : return builder.build ();
17376 : 0 : }
17377 : :
17378 : : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17379 : : when result is VLA. */
17380 : :
17381 : : static void
17382 : 0 : validate_res (unsigned npatterns, unsigned nelts_per_pattern,
17383 : : tree res, tree *expected_res)
17384 : : {
17385 : : /* Actual npatterns and encoded_elts in res may be less than expected due
17386 : : to canonicalization. */
17387 : 0 : ASSERT_TRUE (res != NULL_TREE);
17388 : 0 : ASSERT_TRUE (VECTOR_CST_NPATTERNS (res) <= npatterns);
17389 : 0 : ASSERT_TRUE (vector_cst_encoded_nelts (res) <= npatterns * nelts_per_pattern);
17390 : :
17391 : 0 : for (unsigned i = 0; i < npatterns * nelts_per_pattern; i++)
17392 : 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17393 : 0 : }
17394 : :
17395 : : /* Validate result of VEC_PERM_EXPR folding for the unit-tests below,
17396 : : when the result is VLS. */
17397 : :
17398 : : static void
17399 : 0 : validate_res_vls (tree res, tree *expected_res, unsigned expected_nelts)
17400 : : {
17401 : 0 : ASSERT_TRUE (known_eq (VECTOR_CST_NELTS (res), expected_nelts));
17402 : 0 : for (unsigned i = 0; i < expected_nelts; i++)
17403 : 0 : ASSERT_TRUE (operand_equal_p (VECTOR_CST_ELT (res, i), expected_res[i], 0));
17404 : 0 : }
17405 : :
17406 : : /* Helper routine to push multiple elements into BUILDER. */
17407 : : template<unsigned N>
17408 : 0 : static void builder_push_elems (vec_perm_builder& builder,
17409 : : poly_uint64 (&elems)[N])
17410 : : {
17411 : 0 : for (unsigned i = 0; i < N; i++)
17412 : 0 : builder.quick_push (elems[i]);
17413 : 0 : }
17414 : :
17415 : : #define ARG0(index) vector_cst_elt (arg0, index)
17416 : : #define ARG1(index) vector_cst_elt (arg1, index)
17417 : :
17418 : : /* Test cases where result is VNx4SI and input vectors are V4SI. */
17419 : :
17420 : : static void
17421 : 0 : test_vnx4si_v4si (machine_mode vnx4si_mode, machine_mode v4si_mode)
17422 : : {
17423 : 0 : for (int i = 0; i < 10; i++)
17424 : : {
17425 : : /* Case 1:
17426 : : sel = { 0, 4, 1, 5, ... }
17427 : : res = { arg[0], arg1[0], arg0[1], arg1[1], ...} // (4, 1) */
17428 : 0 : {
17429 : 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17430 : 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17431 : :
17432 : 0 : tree inner_type
17433 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17434 : 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17435 : :
17436 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17437 : 0 : vec_perm_builder builder (res_len, 4, 1);
17438 : 0 : poly_uint64 mask_elems[] = { 0, 4, 1, 5 };
17439 : 0 : builder_push_elems (builder, mask_elems);
17440 : :
17441 : 0 : vec_perm_indices sel (builder, 2, res_len);
17442 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17443 : :
17444 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17445 : 0 : validate_res (4, 1, res, expected_res);
17446 : 0 : }
17447 : :
17448 : : /* Case 2: Same as case 1, but contains an out of bounds access which
17449 : : should wrap around.
17450 : : sel = {0, 8, 4, 12, ...} (4, 1)
17451 : : res = { arg0[0], arg0[0], arg1[0], arg1[0], ... } (4, 1). */
17452 : 0 : {
17453 : 0 : tree arg0 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17454 : 0 : tree arg1 = build_vec_cst_rand (v4si_mode, 4, 1, 0);
17455 : :
17456 : 0 : tree inner_type
17457 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (vnx4si_mode), 1);
17458 : 0 : tree res_type = build_vector_type_for_mode (inner_type, vnx4si_mode);
17459 : :
17460 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17461 : 0 : vec_perm_builder builder (res_len, 4, 1);
17462 : 0 : poly_uint64 mask_elems[] = { 0, 8, 4, 12 };
17463 : 0 : builder_push_elems (builder, mask_elems);
17464 : :
17465 : 0 : vec_perm_indices sel (builder, 2, res_len);
17466 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17467 : :
17468 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG1(0), ARG1(0) };
17469 : 0 : validate_res (4, 1, res, expected_res);
17470 : 0 : }
17471 : : }
17472 : 0 : }
17473 : :
17474 : : /* Test cases where result is V4SI and input vectors are VNx4SI. */
17475 : :
17476 : : static void
17477 : 0 : test_v4si_vnx4si (machine_mode v4si_mode, machine_mode vnx4si_mode)
17478 : : {
17479 : 0 : for (int i = 0; i < 10; i++)
17480 : : {
17481 : : /* Case 1:
17482 : : sel = { 0, 1, 2, 3}
17483 : : res = { arg0[0], arg0[1], arg0[2], arg0[3] }. */
17484 : 0 : {
17485 : 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17486 : 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17487 : :
17488 : 0 : tree inner_type
17489 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17490 : 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17491 : :
17492 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17493 : 0 : vec_perm_builder builder (res_len, 4, 1);
17494 : 0 : poly_uint64 mask_elems[] = {0, 1, 2, 3};
17495 : 0 : builder_push_elems (builder, mask_elems);
17496 : :
17497 : 0 : vec_perm_indices sel (builder, 2, res_len);
17498 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel);
17499 : :
17500 : 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2), ARG0(3) };
17501 : 0 : validate_res_vls (res, expected_res, 4);
17502 : 0 : }
17503 : :
17504 : : /* Case 2: Same as Case 1, but crossing input vector.
17505 : : sel = {0, 2, 4, 6}
17506 : : In this case,the index 4 is ambiguous since len = 4 + 4x.
17507 : : Since we cannot determine, which vector to choose from during
17508 : : compile time, should return NULL_TREE. */
17509 : 0 : {
17510 : 0 : tree arg0 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17511 : 0 : tree arg1 = build_vec_cst_rand (vnx4si_mode, 4, 1);
17512 : :
17513 : 0 : tree inner_type
17514 : 0 : = lang_hooks.types.type_for_mode (GET_MODE_INNER (v4si_mode), 1);
17515 : 0 : tree res_type = build_vector_type_for_mode (inner_type, v4si_mode);
17516 : :
17517 : 0 : poly_uint64 res_len = TYPE_VECTOR_SUBPARTS (res_type);
17518 : 0 : vec_perm_builder builder (res_len, 4, 1);
17519 : 0 : poly_uint64 mask_elems[] = {0, 2, 4, 6};
17520 : 0 : builder_push_elems (builder, mask_elems);
17521 : :
17522 : 0 : vec_perm_indices sel (builder, 2, res_len);
17523 : 0 : const char *reason;
17524 : 0 : tree res = fold_vec_perm_cst (res_type, arg0, arg1, sel, &reason);
17525 : :
17526 : 0 : ASSERT_TRUE (res == NULL_TREE);
17527 : 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17528 : 0 : }
17529 : : }
17530 : 0 : }
17531 : :
17532 : : /* Test all input vectors. */
17533 : :
17534 : : static void
17535 : 0 : test_all_nunits (machine_mode vmode)
17536 : : {
17537 : : /* Test with 10 different inputs. */
17538 : 0 : for (int i = 0; i < 10; i++)
17539 : : {
17540 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17541 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17542 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17543 : :
17544 : : /* Case 1: mask = {0, ...} // (1, 1)
17545 : : res = { arg0[0], ... } // (1, 1) */
17546 : 0 : {
17547 : 0 : vec_perm_builder builder (len, 1, 1);
17548 : 0 : builder.quick_push (0);
17549 : 0 : vec_perm_indices sel (builder, 2, len);
17550 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17551 : 0 : tree expected_res[] = { ARG0(0) };
17552 : 0 : validate_res (1, 1, res, expected_res);
17553 : 0 : }
17554 : :
17555 : : /* Case 2: mask = {len, ...} // (1, 1)
17556 : : res = { arg1[0], ... } // (1, 1) */
17557 : 0 : {
17558 : 0 : vec_perm_builder builder (len, 1, 1);
17559 : 0 : builder.quick_push (len);
17560 : 0 : vec_perm_indices sel (builder, 2, len);
17561 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17562 : :
17563 : 0 : tree expected_res[] = { ARG1(0) };
17564 : 0 : validate_res (1, 1, res, expected_res);
17565 : 0 : }
17566 : : }
17567 : 0 : }
17568 : :
17569 : : /* Test all vectors which contain at-least 2 elements. */
17570 : :
17571 : : static void
17572 : 0 : test_nunits_min_2 (machine_mode vmode)
17573 : : {
17574 : 0 : for (int i = 0; i < 10; i++)
17575 : : {
17576 : : /* Case 1: mask = { 0, len, ... } // (2, 1)
17577 : : res = { arg0[0], arg1[0], ... } // (2, 1) */
17578 : 0 : {
17579 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17580 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17581 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17582 : :
17583 : 0 : vec_perm_builder builder (len, 2, 1);
17584 : 0 : poly_uint64 mask_elems[] = { 0, len };
17585 : 0 : builder_push_elems (builder, mask_elems);
17586 : :
17587 : 0 : vec_perm_indices sel (builder, 2, len);
17588 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17589 : :
17590 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17591 : 0 : validate_res (2, 1, res, expected_res);
17592 : 0 : }
17593 : :
17594 : : /* Case 2: mask = { 0, len, 1, len+1, ... } // (2, 2)
17595 : : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2) */
17596 : 0 : {
17597 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17598 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17599 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17600 : :
17601 : 0 : vec_perm_builder builder (len, 2, 2);
17602 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17603 : 0 : builder_push_elems (builder, mask_elems);
17604 : :
17605 : 0 : vec_perm_indices sel (builder, 2, len);
17606 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17607 : :
17608 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17609 : 0 : validate_res (2, 2, res, expected_res);
17610 : 0 : }
17611 : :
17612 : : /* Case 4: mask = {0, 0, 1, ...} // (1, 3)
17613 : : Test that the stepped sequence of the pattern selects from
17614 : : same input pattern. Since input vectors have npatterns = 2,
17615 : : and step (a2 - a1) = 1, step is not a multiple of npatterns
17616 : : in input vector. So return NULL_TREE. */
17617 : 0 : {
17618 : 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 1, true);
17619 : 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 1);
17620 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17621 : :
17622 : 0 : vec_perm_builder builder (len, 1, 3);
17623 : 0 : poly_uint64 mask_elems[] = { 0, 0, 1 };
17624 : 0 : builder_push_elems (builder, mask_elems);
17625 : :
17626 : 0 : vec_perm_indices sel (builder, 2, len);
17627 : 0 : const char *reason;
17628 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel,
17629 : : &reason);
17630 : 0 : ASSERT_TRUE (res == NULL_TREE);
17631 : 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17632 : 0 : }
17633 : :
17634 : : /* Case 5: mask = {len, 0, 1, ...} // (1, 3)
17635 : : Test that stepped sequence of the pattern selects from arg0.
17636 : : res = { arg1[0], arg0[0], arg0[1], ... } // (1, 3) */
17637 : 0 : {
17638 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17639 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17640 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17641 : :
17642 : 0 : vec_perm_builder builder (len, 1, 3);
17643 : 0 : poly_uint64 mask_elems[] = { len, 0, 1 };
17644 : 0 : builder_push_elems (builder, mask_elems);
17645 : :
17646 : 0 : vec_perm_indices sel (builder, 2, len);
17647 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17648 : :
17649 : 0 : tree expected_res[] = { ARG1(0), ARG0(0), ARG0(1) };
17650 : 0 : validate_res (1, 3, res, expected_res);
17651 : 0 : }
17652 : :
17653 : : /* Case 6: PR111648 - a1 chooses base element from input vector arg.
17654 : : In this case ensure that arg has a natural stepped sequence
17655 : : to preserve arg's encoding.
17656 : :
17657 : : As a concrete example, consider:
17658 : : arg0: { -16, -9, -10, ... } // (1, 3)
17659 : : arg1: { -12, -5, -6, ... } // (1, 3)
17660 : : sel = { 0, len, len + 1, ... } // (1, 3)
17661 : :
17662 : : This will create res with following encoding:
17663 : : res = { arg0[0], arg1[0], arg1[1], ... } // (1, 3)
17664 : : = { -16, -12, -5, ... }
17665 : :
17666 : : The step in above encoding would be: (-5) - (-12) = 7
17667 : : And hence res[3] would be computed as -5 + 7 = 2.
17668 : : instead of arg1[2], ie, -6.
17669 : : Ensure that valid_mask_for_fold_vec_perm_cst returns false
17670 : : for this case. */
17671 : 0 : {
17672 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17673 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17674 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17675 : :
17676 : 0 : vec_perm_builder builder (len, 1, 3);
17677 : 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17678 : 0 : builder_push_elems (builder, mask_elems);
17679 : :
17680 : 0 : vec_perm_indices sel (builder, 2, len);
17681 : 0 : const char *reason;
17682 : : /* FIXME: It may happen that build_vec_cst_rand may build a natural
17683 : : stepped pattern, even if we didn't explicitly tell it to. So folding
17684 : : may not always fail, but if it does, ensure that's because arg1 does
17685 : : not have a natural stepped sequence (and not due to other reason) */
17686 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17687 : 0 : if (res == NULL_TREE)
17688 : 0 : ASSERT_TRUE (!strcmp (reason, "not a natural stepped sequence"));
17689 : 0 : }
17690 : :
17691 : : /* Case 7: Same as Case 6, except that arg1 contains natural stepped
17692 : : sequence and thus folding should be valid for this case. */
17693 : 0 : {
17694 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17695 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1, true);
17696 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17697 : :
17698 : 0 : vec_perm_builder builder (len, 1, 3);
17699 : 0 : poly_uint64 mask_elems[] = { 0, len, len+1 };
17700 : 0 : builder_push_elems (builder, mask_elems);
17701 : :
17702 : 0 : vec_perm_indices sel (builder, 2, len);
17703 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17704 : :
17705 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG1(1) };
17706 : 0 : validate_res (1, 3, res, expected_res);
17707 : 0 : }
17708 : :
17709 : : /* Case 8: Same as aarch64/sve/slp_3.c:
17710 : : arg0, arg1 are dup vectors.
17711 : : sel = { 0, len, 1, len+1, 2, len+2, ... } // (2, 3)
17712 : : So res = { arg0[0], arg1[0], ... } // (2, 1)
17713 : :
17714 : : In this case, since the input vectors are dup, only the first two
17715 : : elements per pattern in sel are considered significant. */
17716 : 0 : {
17717 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17718 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 1);
17719 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17720 : :
17721 : 0 : vec_perm_builder builder (len, 2, 3);
17722 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17723 : 0 : builder_push_elems (builder, mask_elems);
17724 : :
17725 : 0 : vec_perm_indices sel (builder, 2, len);
17726 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17727 : :
17728 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17729 : 0 : validate_res (2, 1, res, expected_res);
17730 : 0 : }
17731 : : }
17732 : 0 : }
17733 : :
17734 : : /* Test all vectors which contain at-least 4 elements. */
17735 : :
17736 : : static void
17737 : 0 : test_nunits_min_4 (machine_mode vmode)
17738 : : {
17739 : 0 : for (int i = 0; i < 10; i++)
17740 : : {
17741 : : /* Case 1: mask = { 0, len, 1, len+1, ... } // (4, 1)
17742 : : res: { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (4, 1) */
17743 : 0 : {
17744 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17745 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17746 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17747 : :
17748 : 0 : vec_perm_builder builder (len, 4, 1);
17749 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17750 : 0 : builder_push_elems (builder, mask_elems);
17751 : :
17752 : 0 : vec_perm_indices sel (builder, 2, len);
17753 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17754 : :
17755 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17756 : 0 : validate_res (4, 1, res, expected_res);
17757 : 0 : }
17758 : :
17759 : : /* Case 2: sel = {0, 1, 2, ...} // (1, 3)
17760 : : res: { arg0[0], arg0[1], arg0[2], ... } // (1, 3) */
17761 : 0 : {
17762 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17763 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17764 : 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17765 : :
17766 : 0 : vec_perm_builder builder (arg0_len, 1, 3);
17767 : 0 : poly_uint64 mask_elems[] = {0, 1, 2};
17768 : 0 : builder_push_elems (builder, mask_elems);
17769 : :
17770 : 0 : vec_perm_indices sel (builder, 2, arg0_len);
17771 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17772 : 0 : tree expected_res[] = { ARG0(0), ARG0(1), ARG0(2) };
17773 : 0 : validate_res (1, 3, res, expected_res);
17774 : 0 : }
17775 : :
17776 : : /* Case 3: sel = {len, len+1, len+2, ...} // (1, 3)
17777 : : res: { arg1[0], arg1[1], arg1[2], ... } // (1, 3) */
17778 : 0 : {
17779 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17780 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17781 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17782 : :
17783 : 0 : vec_perm_builder builder (len, 1, 3);
17784 : 0 : poly_uint64 mask_elems[] = {len, len + 1, len + 2};
17785 : 0 : builder_push_elems (builder, mask_elems);
17786 : :
17787 : 0 : vec_perm_indices sel (builder, 2, len);
17788 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17789 : 0 : tree expected_res[] = { ARG1(0), ARG1(1), ARG1(2) };
17790 : 0 : validate_res (1, 3, res, expected_res);
17791 : 0 : }
17792 : :
17793 : : /* Case 4:
17794 : : sel = { len, 0, 2, ... } // (1, 3)
17795 : : This should return NULL because we cross the input vectors.
17796 : : Because,
17797 : : Let's assume len = C + Cx
17798 : : a1 = 0
17799 : : S = 2
17800 : : esel = arg0_len / sel_npatterns = C + Cx
17801 : : ae = 0 + (esel - 2) * S
17802 : : = 0 + (C + Cx - 2) * 2
17803 : : = 2(C-2) + 2Cx
17804 : :
17805 : : For C >= 4:
17806 : : Let q1 = a1 / arg0_len = 0 / (C + Cx) = 0
17807 : : Let qe = ae / arg0_len = (2(C-2) + 2Cx) / (C + Cx) = 1
17808 : : Since q1 != qe, we cross input vectors.
17809 : : So return NULL_TREE. */
17810 : 0 : {
17811 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 2);
17812 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 2);
17813 : 0 : poly_uint64 arg0_len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17814 : :
17815 : 0 : vec_perm_builder builder (arg0_len, 1, 3);
17816 : 0 : poly_uint64 mask_elems[] = { arg0_len, 0, 2 };
17817 : 0 : builder_push_elems (builder, mask_elems);
17818 : :
17819 : 0 : vec_perm_indices sel (builder, 2, arg0_len);
17820 : 0 : const char *reason;
17821 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17822 : 0 : ASSERT_TRUE (res == NULL_TREE);
17823 : 0 : ASSERT_TRUE (!strcmp (reason, "crossed input vectors"));
17824 : 0 : }
17825 : :
17826 : : /* Case 5: npatterns(arg0) = 4 > npatterns(sel) = 2
17827 : : mask = { 0, len, 1, len + 1, ...} // (2, 2)
17828 : : res = { arg0[0], arg1[0], arg0[1], arg1[1], ... } // (2, 2)
17829 : :
17830 : : Note that fold_vec_perm_cst will set
17831 : : res_npatterns = max(4, max(4, 2)) = 4
17832 : : However after canonicalizing, we will end up with shape (2, 2). */
17833 : 0 : {
17834 : 0 : tree arg0 = build_vec_cst_rand (vmode, 4, 1);
17835 : 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17836 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17837 : :
17838 : 0 : vec_perm_builder builder (len, 2, 2);
17839 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1 };
17840 : 0 : builder_push_elems (builder, mask_elems);
17841 : :
17842 : 0 : vec_perm_indices sel (builder, 2, len);
17843 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17844 : 0 : tree expected_res[] = { ARG0(0), ARG1(0), ARG0(1), ARG1(1) };
17845 : 0 : validate_res (2, 2, res, expected_res);
17846 : 0 : }
17847 : :
17848 : : /* Case 6: Test combination in sel, where one pattern is dup and other
17849 : : is stepped sequence.
17850 : : sel = { 0, 0, 0, 1, 0, 2, ... } // (2, 3)
17851 : : res = { arg0[0], arg0[0], arg0[0],
17852 : : arg0[1], arg0[0], arg0[2], ... } // (2, 3) */
17853 : 0 : {
17854 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17855 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17856 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17857 : :
17858 : 0 : vec_perm_builder builder (len, 2, 3);
17859 : 0 : poly_uint64 mask_elems[] = { 0, 0, 0, 1, 0, 2 };
17860 : 0 : builder_push_elems (builder, mask_elems);
17861 : :
17862 : 0 : vec_perm_indices sel (builder, 2, len);
17863 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17864 : :
17865 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(0),
17866 : 0 : ARG0(1), ARG0(0), ARG0(2) };
17867 : 0 : validate_res (2, 3, res, expected_res);
17868 : 0 : }
17869 : :
17870 : : /* Case 7: PR111048: Check that we set arg_npatterns correctly,
17871 : : when arg0, arg1 and sel have different number of patterns.
17872 : : arg0 is of shape (1, 1)
17873 : : arg1 is of shape (4, 1)
17874 : : sel is of shape (2, 3) = {1, len, 2, len+1, 3, len+2, ...}
17875 : :
17876 : : In this case the pattern: {len, len+1, len+2, ...} chooses arg1.
17877 : : However,
17878 : : step = (len+2) - (len+1) = 1
17879 : : arg_npatterns = VECTOR_CST_NPATTERNS (arg1) = 4
17880 : : Since step is not a multiple of arg_npatterns,
17881 : : valid_mask_for_fold_vec_perm_cst should return false,
17882 : : and thus fold_vec_perm_cst should return NULL_TREE. */
17883 : 0 : {
17884 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 1);
17885 : 0 : tree arg1 = build_vec_cst_rand (vmode, 4, 1);
17886 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17887 : :
17888 : 0 : vec_perm_builder builder (len, 2, 3);
17889 : 0 : poly_uint64 mask_elems[] = { 0, len, 1, len + 1, 2, len + 2 };
17890 : 0 : builder_push_elems (builder, mask_elems);
17891 : :
17892 : 0 : vec_perm_indices sel (builder, 2, len);
17893 : 0 : const char *reason;
17894 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17895 : :
17896 : 0 : ASSERT_TRUE (res == NULL_TREE);
17897 : 0 : ASSERT_TRUE (!strcmp (reason, "step is not multiple of npatterns"));
17898 : 0 : }
17899 : :
17900 : : /* Case 8: PR111754: When input vector is not a stepped sequence,
17901 : : check that the result is not a stepped sequence either, even
17902 : : if sel has a stepped sequence. */
17903 : 0 : {
17904 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 2);
17905 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17906 : :
17907 : 0 : vec_perm_builder builder (len, 1, 3);
17908 : 0 : poly_uint64 mask_elems[] = { 0, 1, 2 };
17909 : 0 : builder_push_elems (builder, mask_elems);
17910 : :
17911 : 0 : vec_perm_indices sel (builder, 1, len);
17912 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg0, sel);
17913 : :
17914 : 0 : tree expected_res[] = { ARG0(0), ARG0(1) };
17915 : 0 : validate_res (sel.encoding ().npatterns (), 2, res, expected_res);
17916 : 0 : }
17917 : :
17918 : : /* Case 9: If sel doesn't contain a stepped sequence,
17919 : : check that the result has same encoding as sel, irrespective
17920 : : of shape of input vectors. */
17921 : 0 : {
17922 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17923 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17924 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17925 : :
17926 : 0 : vec_perm_builder builder (len, 1, 2);
17927 : 0 : poly_uint64 mask_elems[] = { 0, len };
17928 : 0 : builder_push_elems (builder, mask_elems);
17929 : :
17930 : 0 : vec_perm_indices sel (builder, 2, len);
17931 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17932 : :
17933 : 0 : tree expected_res[] = { ARG0(0), ARG1(0) };
17934 : 0 : validate_res (sel.encoding ().npatterns (),
17935 : 0 : sel.encoding ().nelts_per_pattern (), res, expected_res);
17936 : 0 : }
17937 : : }
17938 : 0 : }
17939 : :
17940 : : /* Test all vectors which contain at-least 8 elements. */
17941 : :
17942 : : static void
17943 : 0 : test_nunits_min_8 (machine_mode vmode)
17944 : : {
17945 : 0 : for (int i = 0; i < 10; i++)
17946 : : {
17947 : : /* Case 1: sel_npatterns (4) > input npatterns (2)
17948 : : sel: { 0, 0, 1, len, 2, 0, 3, len, 4, 0, 5, len, ...} // (4, 3)
17949 : : res: { arg0[0], arg0[0], arg0[0], arg1[0],
17950 : : arg0[2], arg0[0], arg0[3], arg1[0],
17951 : : arg0[4], arg0[0], arg0[5], arg1[0], ... } // (4, 3) */
17952 : 0 : {
17953 : 0 : tree arg0 = build_vec_cst_rand (vmode, 2, 3, 2);
17954 : 0 : tree arg1 = build_vec_cst_rand (vmode, 2, 3, 2);
17955 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17956 : :
17957 : 0 : vec_perm_builder builder(len, 4, 3);
17958 : 0 : poly_uint64 mask_elems[] = { 0, 0, 1, len, 2, 0, 3, len,
17959 : 0 : 4, 0, 5, len };
17960 : 0 : builder_push_elems (builder, mask_elems);
17961 : :
17962 : 0 : vec_perm_indices sel (builder, 2, len);
17963 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel);
17964 : :
17965 : 0 : tree expected_res[] = { ARG0(0), ARG0(0), ARG0(1), ARG1(0),
17966 : 0 : ARG0(2), ARG0(0), ARG0(3), ARG1(0),
17967 : 0 : ARG0(4), ARG0(0), ARG0(5), ARG1(0) };
17968 : 0 : validate_res (4, 3, res, expected_res);
17969 : 0 : }
17970 : : }
17971 : 0 : }
17972 : :
17973 : : /* Test vectors for which nunits[0] <= 4. */
17974 : :
17975 : : static void
17976 : 0 : test_nunits_max_4 (machine_mode vmode)
17977 : : {
17978 : : /* Case 1: mask = {0, 4, ...} // (1, 2)
17979 : : This should return NULL_TREE because the index 4 may choose
17980 : : from either arg0 or arg1 depending on vector length. */
17981 : 0 : {
17982 : 0 : tree arg0 = build_vec_cst_rand (vmode, 1, 3, 1);
17983 : 0 : tree arg1 = build_vec_cst_rand (vmode, 1, 3, 1);
17984 : 0 : poly_uint64 len = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
17985 : :
17986 : 0 : vec_perm_builder builder (len, 1, 2);
17987 : 0 : poly_uint64 mask_elems[] = {0, 4};
17988 : 0 : builder_push_elems (builder, mask_elems);
17989 : :
17990 : 0 : vec_perm_indices sel (builder, 2, len);
17991 : 0 : const char *reason;
17992 : 0 : tree res = fold_vec_perm_cst (TREE_TYPE (arg0), arg0, arg1, sel, &reason);
17993 : 0 : ASSERT_TRUE (res == NULL_TREE);
17994 : 0 : ASSERT_TRUE (reason != NULL);
17995 : 0 : ASSERT_TRUE (!strcmp (reason, "cannot divide selector element by arg len"));
17996 : 0 : }
17997 : 0 : }
17998 : :
17999 : : #undef ARG0
18000 : : #undef ARG1
18001 : :
18002 : : /* Return true if SIZE is of the form C + Cx and C is power of 2. */
18003 : :
18004 : : static bool
18005 : 156 : is_simple_vla_size (poly_uint64 size)
18006 : : {
18007 : 156 : if (size.is_constant ()
18008 : : || !pow2p_hwi (size.coeffs[0]))
18009 : 156 : return false;
18010 : : for (unsigned i = 1; i < ARRAY_SIZE (size.coeffs); ++i)
18011 : : if (size.coeffs[i] != (i <= 1 ? size.coeffs[0] : 0))
18012 : : return false;
18013 : : return true;
18014 : : }
18015 : :
18016 : : /* Execute fold_vec_perm_cst unit tests. */
18017 : :
18018 : : static void
18019 : 4 : test ()
18020 : : {
18021 : 4 : machine_mode vnx4si_mode = E_VOIDmode;
18022 : 4 : machine_mode v4si_mode = E_VOIDmode;
18023 : :
18024 : 4 : machine_mode vmode;
18025 : 132 : FOR_EACH_MODE_IN_CLASS (vmode, MODE_VECTOR_INT)
18026 : : {
18027 : : /* Obtain modes corresponding to VNx4SI and V4SI,
18028 : : to call mixed mode tests below.
18029 : : FIXME: Is there a better way to do this ? */
18030 : 256 : if (GET_MODE_INNER (vmode) == SImode)
18031 : : {
18032 : 56 : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18033 : 28 : if (is_simple_vla_size (nunits)
18034 : 28 : && nunits.coeffs[0] == 4)
18035 : : vnx4si_mode = vmode;
18036 : 28 : else if (known_eq (nunits, poly_uint64 (4)))
18037 : 4 : v4si_mode = vmode;
18038 : : }
18039 : :
18040 : 256 : if (!is_simple_vla_size (GET_MODE_NUNITS (vmode))
18041 : 128 : || !targetm.vector_mode_supported_p (vmode))
18042 : 128 : continue;
18043 : :
18044 : 0 : poly_uint64 nunits = GET_MODE_NUNITS (vmode);
18045 : 0 : test_all_nunits (vmode);
18046 : 0 : if (nunits.coeffs[0] >= 2)
18047 : 0 : test_nunits_min_2 (vmode);
18048 : 0 : if (nunits.coeffs[0] >= 4)
18049 : 0 : test_nunits_min_4 (vmode);
18050 : 0 : if (nunits.coeffs[0] >= 8)
18051 : 0 : test_nunits_min_8 (vmode);
18052 : :
18053 : 0 : if (nunits.coeffs[0] <= 4)
18054 : 0 : test_nunits_max_4 (vmode);
18055 : : }
18056 : :
18057 : 4 : if (vnx4si_mode != E_VOIDmode && v4si_mode != E_VOIDmode
18058 : 0 : && targetm.vector_mode_supported_p (vnx4si_mode)
18059 : 4 : && targetm.vector_mode_supported_p (v4si_mode))
18060 : : {
18061 : 0 : test_vnx4si_v4si (vnx4si_mode, v4si_mode);
18062 : 0 : test_v4si_vnx4si (v4si_mode, vnx4si_mode);
18063 : : }
18064 : 4 : }
18065 : : } // end of test_fold_vec_perm_cst namespace
18066 : :
18067 : : /* Verify that various binary operations on vectors are folded
18068 : : correctly. */
18069 : :
18070 : : static void
18071 : 4 : test_vector_folding ()
18072 : : {
18073 : 4 : tree inner_type = integer_type_node;
18074 : 4 : tree type = build_vector_type (inner_type, 4);
18075 : 4 : tree zero = build_zero_cst (type);
18076 : 4 : tree one = build_one_cst (type);
18077 : 4 : tree index = build_index_vector (type, 0, 1);
18078 : :
18079 : : /* Verify equality tests that return a scalar boolean result. */
18080 : 4 : tree res_type = boolean_type_node;
18081 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
18082 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
18083 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
18084 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
18085 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
18086 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18087 : : index, one)));
18088 : 4 : ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
18089 : : index, index)));
18090 : 4 : ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
18091 : : index, index)));
18092 : 4 : }
18093 : :
18094 : : /* Verify folding of VEC_DUPLICATE_EXPRs. */
18095 : :
18096 : : static void
18097 : 4 : test_vec_duplicate_folding ()
18098 : : {
18099 : 4 : scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
18100 : 4 : machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
18101 : : /* This will be 1 if VEC_MODE isn't a vector mode. */
18102 : 8 : poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
18103 : :
18104 : 4 : tree type = build_vector_type (ssizetype, nunits);
18105 : 4 : tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
18106 : 4 : tree dup5_cst = build_vector_from_val (type, ssize_int (5));
18107 : 4 : ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
18108 : 4 : }
18109 : :
18110 : : /* Run all of the selftests within this file. */
18111 : :
18112 : : void
18113 : 4 : fold_const_cc_tests ()
18114 : : {
18115 : 4 : test_arithmetic_folding ();
18116 : 4 : test_vector_folding ();
18117 : 4 : test_vec_duplicate_folding ();
18118 : 4 : test_fold_vec_perm_cst::test ();
18119 : 4 : }
18120 : :
18121 : : } // namespace selftest
18122 : :
18123 : : #endif /* CHECKING_P */
|