Line data Source code
1 : /* Statement simplification on GIMPLE.
2 : Copyright (C) 2010-2026 Free Software Foundation, Inc.
3 : Split out from tree-ssa-ccp.cc.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "gimple.h"
29 : #include "predict.h"
30 : #include "ssa.h"
31 : #include "cgraph.h"
32 : #include "gimple-pretty-print.h"
33 : #include "gimple-ssa-warn-access.h"
34 : #include "gimple-ssa-warn-restrict.h"
35 : #include "fold-const.h"
36 : #include "stmt.h"
37 : #include "expr.h"
38 : #include "stor-layout.h"
39 : #include "dumpfile.h"
40 : #include "gimple-iterator.h"
41 : #include "tree-pass.h"
42 : #include "gimple-fold.h"
43 : #include "gimplify.h"
44 : #include "tree-into-ssa.h"
45 : #include "tree-dfa.h"
46 : #include "tree-object-size.h"
47 : #include "tree-ssa.h"
48 : #include "tree-ssa-propagate.h"
49 : #include "ipa-utils.h"
50 : #include "tree-ssa-address.h"
51 : #include "langhooks.h"
52 : #include "gimplify-me.h"
53 : #include "dbgcnt.h"
54 : #include "builtins.h"
55 : #include "tree-eh.h"
56 : #include "gimple-match.h"
57 : #include "gomp-constants.h"
58 : #include "optabs-query.h"
59 : #include "omp-general.h"
60 : #include "tree-cfg.h"
61 : #include "fold-const-call.h"
62 : #include "stringpool.h"
63 : #include "attribs.h"
64 : #include "asan.h"
65 : #include "diagnostic-core.h"
66 : #include "intl.h"
67 : #include "calls.h"
68 : #include "tree-vector-builder.h"
69 : #include "tree-ssa-strlen.h"
70 : #include "varasm.h"
71 : #include "internal-fn.h"
72 : #include "gimple-range.h"
73 :
74 : enum strlen_range_kind {
75 : /* Compute the exact constant string length. */
76 : SRK_STRLEN,
77 : /* Compute the maximum constant string length. */
78 : SRK_STRLENMAX,
79 : /* Compute a range of string lengths bounded by object sizes. When
80 : the length of a string cannot be determined, consider as the upper
81 : bound the size of the enclosing object the string may be a member
82 : or element of. Also determine the size of the largest character
83 : array the string may refer to. */
84 : SRK_LENRANGE,
85 : /* Determine the integer value of the argument (not string length). */
86 : SRK_INT_VALUE
87 : };
88 :
89 : static bool
90 : get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 :
92 : /* Return true when DECL can be referenced from current unit.
93 : FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 : We can get declarations that are not possible to reference for various
95 : reasons:
96 :
97 : 1) When analyzing C++ virtual tables.
98 : C++ virtual tables do have known constructors even
99 : when they are keyed to other compilation unit.
100 : Those tables can contain pointers to methods and vars
101 : in other units. Those methods have both STATIC and EXTERNAL
102 : set.
103 : 2) In WHOPR mode devirtualization might lead to reference
104 : to method that was partitioned elsehwere.
105 : In this case we have static VAR_DECL or FUNCTION_DECL
106 : that has no corresponding callgraph/varpool node
107 : declaring the body.
108 : 3) COMDAT functions referred by external vtables that
109 : we devirtualize only during final compilation stage.
110 : At this time we already decided that we will not output
111 : the function body and thus we can't reference the symbol
112 : directly. */
113 :
114 : static bool
115 4475344 : can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 : {
117 4475344 : varpool_node *vnode;
118 4475344 : struct cgraph_node *node;
119 4475344 : symtab_node *snode;
120 :
121 4475344 : if (DECL_ABSTRACT_P (decl))
122 : return false;
123 :
124 : /* We are concerned only about static/external vars and functions. */
125 1502252 : if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 5573742 : || !VAR_OR_FUNCTION_DECL_P (decl))
127 : return true;
128 :
129 : /* Static objects can be referred only if they are defined and not optimized
130 : out yet. */
131 4071490 : if (!TREE_PUBLIC (decl))
132 : {
133 1090849 : if (DECL_EXTERNAL (decl))
134 : return false;
135 : /* Before we start optimizing unreachable code we can be sure all
136 : static objects are defined. */
137 1090804 : if (symtab->function_flags_ready)
138 : return true;
139 1055824 : snode = symtab_node::get (decl);
140 1055824 : if (!snode || !snode->definition)
141 : return false;
142 1055775 : node = dyn_cast <cgraph_node *> (snode);
143 1064391 : return !node || !node->inlined_to;
144 : }
145 :
146 : /* We will later output the initializer, so we can refer to it.
147 : So we are concerned only when DECL comes from initializer of
148 : external var or var that has been optimized out. */
149 2980641 : if (!from_decl
150 529656 : || !VAR_P (from_decl)
151 528521 : || (!DECL_EXTERNAL (from_decl)
152 249441 : && (vnode = varpool_node::get (from_decl)) != NULL
153 161643 : && vnode->definition)
154 3347525 : || (flag_ltrans
155 3 : && (vnode = varpool_node::get (from_decl)) != NULL
156 3 : && vnode->in_other_partition))
157 : return true;
158 : /* We are folding reference from external vtable. The vtable may reffer
159 : to a symbol keyed to other compilation unit. The other compilation
160 : unit may be in separate DSO and the symbol may be hidden. */
161 366881 : if (DECL_VISIBILITY_SPECIFIED (decl)
162 360088 : && DECL_EXTERNAL (decl)
163 272344 : && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
164 550308 : && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
165 : return false;
166 : /* When function is public, we always can introduce new reference.
167 : Exception are the COMDAT functions where introducing a direct
168 : reference imply need to include function body in the curren tunit. */
169 183454 : if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
170 : return true;
171 : /* We have COMDAT. We are going to check if we still have definition
172 : or if the definition is going to be output in other partition.
173 : Bypass this when gimplifying; all needed functions will be produced.
174 :
175 : As observed in PR20991 for already optimized out comdat virtual functions
176 : it may be tempting to not necessarily give up because the copy will be
177 : output elsewhere when corresponding vtable is output.
178 : This is however not possible - ABI specify that COMDATs are output in
179 : units where they are used and when the other unit was compiled with LTO
180 : it is possible that vtable was kept public while the function itself
181 : was privatized. */
182 135297 : if (!symtab->function_flags_ready)
183 : return true;
184 :
185 122044 : snode = symtab_node::get (decl);
186 122044 : if (!snode
187 122044 : || ((!snode->definition || DECL_EXTERNAL (decl))
188 13143 : && (!snode->in_other_partition
189 0 : || (!snode->forced_by_abi && !snode->force_output))))
190 : return false;
191 80114 : node = dyn_cast <cgraph_node *> (snode);
192 80114 : return !node || !node->inlined_to;
193 : }
194 :
195 : /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
196 : acceptable form for is_gimple_min_invariant.
197 : FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
198 :
199 : tree
200 15444791 : canonicalize_constructor_val (tree cval, tree from_decl)
201 : {
202 15444791 : if (CONSTANT_CLASS_P (cval))
203 : return cval;
204 :
205 9345537 : tree orig_cval = cval;
206 9345537 : STRIP_NOPS (cval);
207 9345537 : if (TREE_CODE (cval) == POINTER_PLUS_EXPR
208 9345537 : && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
209 : {
210 70042 : tree ptr = TREE_OPERAND (cval, 0);
211 70042 : if (is_gimple_min_invariant (ptr))
212 209358 : cval = build1_loc (EXPR_LOCATION (cval),
213 69786 : ADDR_EXPR, TREE_TYPE (ptr),
214 139572 : fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
215 : ptr,
216 : fold_convert (ptr_type_node,
217 : TREE_OPERAND (cval, 1))));
218 : }
219 9345537 : if (TREE_CODE (cval) == ADDR_EXPR)
220 : {
221 5127664 : tree base = NULL_TREE;
222 5127664 : if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
223 : {
224 193 : base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
225 193 : if (base)
226 193 : TREE_OPERAND (cval, 0) = base;
227 : }
228 : else
229 5127471 : base = get_base_address (TREE_OPERAND (cval, 0));
230 5127664 : if (!base)
231 0 : return NULL_TREE;
232 :
233 2269037 : if (VAR_OR_FUNCTION_DECL_P (base)
234 6423339 : && !can_refer_decl_in_current_unit_p (base, from_decl))
235 : return NULL_TREE;
236 4943345 : if (TREE_TYPE (base) == error_mark_node)
237 : return NULL_TREE;
238 4943345 : if (VAR_P (base))
239 : /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
240 : but since the use can be in a debug stmt we can't. */
241 : ;
242 2268034 : else if (TREE_CODE (base) == FUNCTION_DECL)
243 : {
244 : /* Make sure we create a cgraph node for functions we'll reference.
245 : They can be non-existent if the reference comes from an entry
246 : of an external vtable for example. */
247 1294672 : cgraph_node::get_create (base);
248 : }
249 : /* Fixup types in global initializers. */
250 4943345 : if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
251 38135 : cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
252 :
253 4943345 : if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
254 212378 : cval = fold_convert (TREE_TYPE (orig_cval), cval);
255 4943345 : return cval;
256 : }
257 : /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
258 4217873 : if (TREE_CODE (cval) == INTEGER_CST)
259 : {
260 64466 : if (TREE_OVERFLOW_P (cval))
261 0 : cval = drop_tree_overflow (cval);
262 64466 : if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 61265 : cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 64466 : return cval;
265 : }
266 : return orig_cval;
267 : }
268 :
269 : /* If SYM is a constant variable with known value, return the value.
270 : NULL_TREE is returned otherwise. */
271 :
272 : tree
273 21036005 : get_symbol_constant_value (tree sym)
274 : {
275 21036005 : tree val = ctor_for_folding (sym);
276 21036005 : if (val != error_mark_node)
277 : {
278 39523 : if (val)
279 : {
280 37254 : val = canonicalize_constructor_val (unshare_expr (val), sym);
281 37254 : if (val
282 37254 : && is_gimple_min_invariant (val)
283 65686 : && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
284 : return val;
285 : else
286 8928 : return NULL_TREE;
287 : }
288 : /* Variables declared 'const' without an initializer
289 : have zero as the initializer if they may not be
290 : overridden at link or run time. */
291 2269 : if (!val
292 2269 : && is_gimple_reg_type (TREE_TYPE (sym)))
293 1930 : return build_zero_cst (TREE_TYPE (sym));
294 : }
295 :
296 : return NULL_TREE;
297 : }
298 :
299 :
300 :
301 : /* Subroutine of fold_stmt. We perform constant folding of the
302 : memory reference tree EXPR. */
303 :
304 : static tree
305 62935667 : maybe_fold_reference (tree expr)
306 : {
307 62935667 : tree result = NULL_TREE;
308 :
309 62935667 : if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
310 60952496 : || TREE_CODE (expr) == REALPART_EXPR
311 60277761 : || TREE_CODE (expr) == IMAGPART_EXPR)
312 64420976 : && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
313 2901 : result = fold_unary_loc (EXPR_LOCATION (expr),
314 : TREE_CODE (expr),
315 2901 : TREE_TYPE (expr),
316 2901 : TREE_OPERAND (expr, 0));
317 62932766 : else if (TREE_CODE (expr) == BIT_FIELD_REF
318 62932766 : && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 21 : result = fold_ternary_loc (EXPR_LOCATION (expr),
320 : TREE_CODE (expr),
321 21 : TREE_TYPE (expr),
322 21 : TREE_OPERAND (expr, 0),
323 21 : TREE_OPERAND (expr, 1),
324 21 : TREE_OPERAND (expr, 2));
325 : else
326 62932745 : result = fold_const_aggregate_ref (expr);
327 :
328 62935667 : if (result && is_gimple_min_invariant (result))
329 : return result;
330 :
331 : return NULL_TREE;
332 : }
333 :
334 : /* Return true if EXPR is an acceptable right-hand-side for a
335 : GIMPLE assignment. We validate the entire tree, not just
336 : the root node, thus catching expressions that embed complex
337 : operands that are not permitted in GIMPLE. This function
338 : is needed because the folding routines in fold-const.cc
339 : may return such expressions in some cases, e.g., an array
340 : access with an embedded index addition. It may make more
341 : sense to have folding routines that are sensitive to the
342 : constraints on GIMPLE operands, rather than abandoning any
343 : any attempt to fold if the usual folding turns out to be too
344 : aggressive. */
345 :
346 : bool
347 0 : valid_gimple_rhs_p (tree expr)
348 : {
349 0 : enum tree_code code = TREE_CODE (expr);
350 :
351 0 : switch (TREE_CODE_CLASS (code))
352 : {
353 0 : case tcc_declaration:
354 0 : if (!is_gimple_variable (expr))
355 : return false;
356 : break;
357 :
358 : case tcc_constant:
359 : /* All constants are ok. */
360 : break;
361 :
362 0 : case tcc_comparison:
363 : /* GENERIC allows comparisons with non-boolean types, reject
364 : those for GIMPLE. Let vector-typed comparisons pass - rules
365 : for GENERIC and GIMPLE are the same here. */
366 0 : if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
367 0 : && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
368 0 : || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
369 0 : && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
370 : return false;
371 :
372 : /* Fallthru. */
373 0 : case tcc_binary:
374 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0))
375 0 : || !is_gimple_val (TREE_OPERAND (expr, 1)))
376 0 : return false;
377 : break;
378 :
379 0 : case tcc_unary:
380 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0)))
381 : return false;
382 : break;
383 :
384 0 : case tcc_expression:
385 0 : switch (code)
386 : {
387 0 : case ADDR_EXPR:
388 0 : {
389 0 : tree t;
390 0 : if (is_gimple_min_invariant (expr))
391 : return true;
392 0 : t = TREE_OPERAND (expr, 0);
393 0 : while (handled_component_p (t))
394 : {
395 : /* ??? More checks needed, see the GIMPLE verifier. */
396 0 : if ((TREE_CODE (t) == ARRAY_REF
397 0 : || TREE_CODE (t) == ARRAY_RANGE_REF)
398 0 : && !is_gimple_val (TREE_OPERAND (t, 1)))
399 : return false;
400 0 : t = TREE_OPERAND (t, 0);
401 : }
402 0 : if (!is_gimple_id (t))
403 : return false;
404 : }
405 : break;
406 :
407 0 : default:
408 0 : if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
409 : {
410 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0))
411 0 : || !is_gimple_val (TREE_OPERAND (expr, 1))
412 0 : || !is_gimple_val (TREE_OPERAND (expr, 2)))
413 0 : return false;
414 : break;
415 : }
416 : return false;
417 : }
418 : break;
419 :
420 : case tcc_vl_exp:
421 : return false;
422 :
423 0 : case tcc_exceptional:
424 0 : if (code == CONSTRUCTOR)
425 : {
426 : unsigned i;
427 : tree elt;
428 0 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
429 0 : if (!is_gimple_val (elt))
430 : return false;
431 : return true;
432 : }
433 0 : if (code != SSA_NAME)
434 : return false;
435 : break;
436 :
437 0 : case tcc_reference:
438 0 : if (code == BIT_FIELD_REF)
439 0 : return is_gimple_val (TREE_OPERAND (expr, 0));
440 : return false;
441 :
442 : default:
443 : return false;
444 : }
445 :
446 : return true;
447 : }
448 :
449 :
450 : /* Attempt to fold an assignment statement pointed-to by SI. Returns a
451 : replacement rhs for the statement or NULL_TREE if no simplification
452 : could be made. It is assumed that the operands have been previously
453 : folded. */
454 :
455 : static tree
456 251321773 : fold_gimple_assign (gimple_stmt_iterator *si)
457 : {
458 251321773 : gimple *stmt = gsi_stmt (*si);
459 251321773 : enum tree_code subcode = gimple_assign_rhs_code (stmt);
460 251321773 : location_t loc = gimple_location (stmt);
461 :
462 251321773 : tree result = NULL_TREE;
463 :
464 251321773 : switch (get_gimple_rhs_class (subcode))
465 : {
466 166036674 : case GIMPLE_SINGLE_RHS:
467 166036674 : {
468 166036674 : tree rhs = gimple_assign_rhs1 (stmt);
469 :
470 166036674 : if (TREE_CLOBBER_P (rhs))
471 : return NULL_TREE;
472 :
473 153284373 : if (REFERENCE_CLASS_P (rhs))
474 60441500 : return maybe_fold_reference (rhs);
475 :
476 92842873 : else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
477 : {
478 155912 : tree val = OBJ_TYPE_REF_EXPR (rhs);
479 155912 : if (is_gimple_min_invariant (val))
480 : return val;
481 155890 : else if (flag_devirtualize && virtual_method_call_p (rhs))
482 : {
483 155850 : bool final;
484 155850 : vec <cgraph_node *>targets
485 155850 : = possible_polymorphic_call_targets (rhs, stmt, &final);
486 156124 : if (final && targets.length () <= 1 && dbg_cnt (devirt))
487 : {
488 44 : if (dump_enabled_p ())
489 : {
490 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
491 : "resolving virtual function address "
492 : "reference to function %s\n",
493 0 : targets.length () == 1
494 0 : ? targets[0]->name ()
495 : : "NULL");
496 : }
497 44 : if (targets.length () == 1)
498 : {
499 33 : val = fold_convert (TREE_TYPE (val),
500 : build_fold_addr_expr_loc
501 : (loc, targets[0]->decl));
502 33 : STRIP_USELESS_TYPE_CONVERSION (val);
503 : }
504 : else
505 : /* We cannot use __builtin_unreachable here because it
506 : cannot have address taken. */
507 11 : val = build_int_cst (TREE_TYPE (val), 0);
508 44 : return val;
509 : }
510 : }
511 : }
512 :
513 92686961 : else if (TREE_CODE (rhs) == ADDR_EXPR)
514 : {
515 14585027 : tree ref = TREE_OPERAND (rhs, 0);
516 14585027 : if (TREE_CODE (ref) == MEM_REF
517 14585027 : && integer_zerop (TREE_OPERAND (ref, 1)))
518 : {
519 2497 : result = TREE_OPERAND (ref, 0);
520 2497 : if (!useless_type_conversion_p (TREE_TYPE (rhs),
521 2497 : TREE_TYPE (result)))
522 0 : result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
523 2497 : return result;
524 : }
525 : }
526 :
527 78101934 : else if (TREE_CODE (rhs) == CONSTRUCTOR
528 78101934 : && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
529 : {
530 : /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
531 : unsigned i;
532 : tree val;
533 :
534 410179 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
535 405938 : if (! CONSTANT_CLASS_P (val))
536 : return NULL_TREE;
537 :
538 4241 : return build_vector_from_ctor (TREE_TYPE (rhs),
539 8482 : CONSTRUCTOR_ELTS (rhs));
540 : }
541 :
542 77782136 : else if (DECL_P (rhs)
543 77782136 : && is_gimple_reg_type (TREE_TYPE (rhs)))
544 12168328 : return get_symbol_constant_value (rhs);
545 : }
546 : break;
547 :
548 : case GIMPLE_UNARY_RHS:
549 : break;
550 :
551 : case GIMPLE_BINARY_RHS:
552 : break;
553 :
554 475655 : case GIMPLE_TERNARY_RHS:
555 951310 : result = fold_ternary_loc (loc, subcode,
556 475655 : TREE_TYPE (gimple_assign_lhs (stmt)),
557 : gimple_assign_rhs1 (stmt),
558 : gimple_assign_rhs2 (stmt),
559 : gimple_assign_rhs3 (stmt));
560 :
561 475655 : if (result)
562 : {
563 0 : STRIP_USELESS_TYPE_CONVERSION (result);
564 0 : if (valid_gimple_rhs_p (result))
565 : return result;
566 : }
567 : break;
568 :
569 0 : case GIMPLE_INVALID_RHS:
570 0 : gcc_unreachable ();
571 : }
572 :
573 : return NULL_TREE;
574 : }
575 :
576 :
577 : /* Replace a statement at *SI_P with a sequence of statements in STMTS,
578 : adjusting the replacement stmts location and virtual operands.
579 : If the statement has a lhs the last stmt in the sequence is expected
580 : to assign to that lhs. */
581 :
582 : void
583 127852 : gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
584 : {
585 127852 : gimple *stmt = gsi_stmt (*si_p);
586 :
587 127852 : if (gimple_has_location (stmt))
588 105764 : annotate_all_with_location (stmts, gimple_location (stmt));
589 :
590 : /* First iterate over the replacement statements backward, assigning
591 : virtual operands to their defining statements. */
592 127852 : gimple *laststore = NULL;
593 255704 : for (gimple_stmt_iterator i = gsi_last (stmts);
594 524968 : !gsi_end_p (i); gsi_prev (&i))
595 : {
596 198558 : gimple *new_stmt = gsi_stmt (i);
597 198558 : if ((gimple_assign_single_p (new_stmt)
598 124198 : && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
599 322486 : || (is_gimple_call (new_stmt)
600 14640 : && (gimple_call_flags (new_stmt)
601 14640 : & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
602 : {
603 2222 : tree vdef;
604 2222 : if (!laststore)
605 2216 : vdef = gimple_vdef (stmt);
606 : else
607 6 : vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
608 2222 : gimple_set_vdef (new_stmt, vdef);
609 2222 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
610 1297 : SSA_NAME_DEF_STMT (vdef) = new_stmt;
611 : laststore = new_stmt;
612 : }
613 : }
614 :
615 : /* Second iterate over the statements forward, assigning virtual
616 : operands to their uses. */
617 127852 : tree reaching_vuse = gimple_vuse (stmt);
618 127852 : for (gimple_stmt_iterator i = gsi_start (stmts);
619 326410 : !gsi_end_p (i); gsi_next (&i))
620 : {
621 198558 : gimple *new_stmt = gsi_stmt (i);
622 : /* If the new statement possibly has a VUSE, update it with exact SSA
623 : name we know will reach this one. */
624 198558 : if (gimple_has_mem_ops (new_stmt))
625 198556 : gimple_set_vuse (new_stmt, reaching_vuse);
626 198558 : gimple_set_modified (new_stmt, true);
627 594369 : if (gimple_vdef (new_stmt))
628 198558 : reaching_vuse = gimple_vdef (new_stmt);
629 : }
630 :
631 : /* If the new sequence does not do a store release the virtual
632 : definition of the original statement. */
633 127852 : if (reaching_vuse
634 213325 : && reaching_vuse == gimple_vuse (stmt))
635 : {
636 84182 : tree vdef = gimple_vdef (stmt);
637 84182 : if (vdef
638 1487 : && TREE_CODE (vdef) == SSA_NAME)
639 : {
640 1431 : unlink_stmt_vdef (stmt);
641 1431 : release_ssa_name (vdef);
642 : }
643 : }
644 :
645 : /* Finally replace the original statement with the sequence. */
646 127852 : gsi_replace_with_seq (si_p, stmts, false);
647 127852 : }
648 :
649 : /* Helper function for update_gimple_call and
650 : gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
651 : with GIMPLE_CALL NEW_STMT. */
652 :
653 : static void
654 2437 : finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
655 : gimple *stmt)
656 : {
657 2437 : tree lhs = gimple_call_lhs (stmt);
658 2437 : gimple_call_set_lhs (new_stmt, lhs);
659 2437 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
660 808 : SSA_NAME_DEF_STMT (lhs) = new_stmt;
661 2437 : gimple_move_vops (new_stmt, stmt);
662 2437 : gimple_set_location (new_stmt, gimple_location (stmt));
663 2437 : if (gimple_block (new_stmt) == NULL_TREE)
664 1 : gimple_set_block (new_stmt, gimple_block (stmt));
665 2437 : gsi_replace (si_p, new_stmt, false);
666 2437 : }
667 :
668 : /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
669 : with number of arguments NARGS, where the arguments in GIMPLE form
670 : follow NARGS argument. */
671 :
672 : bool
673 2434 : update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
674 : {
675 2434 : va_list ap;
676 2434 : gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
677 :
678 2434 : gcc_assert (is_gimple_call (stmt));
679 2434 : va_start (ap, nargs);
680 2434 : new_stmt = gimple_build_call_valist (fn, nargs, ap);
681 2434 : finish_update_gimple_call (si_p, new_stmt, stmt);
682 2434 : va_end (ap);
683 2434 : return true;
684 : }
685 :
686 : /* Return true if EXPR is a CALL_EXPR suitable for representation
687 : as a single GIMPLE_CALL statement. If the arguments require
688 : further gimplification, return false. */
689 :
690 : static bool
691 58599 : valid_gimple_call_p (tree expr)
692 : {
693 58599 : unsigned i, nargs;
694 :
695 58599 : if (TREE_CODE (expr) != CALL_EXPR)
696 : return false;
697 :
698 3 : nargs = call_expr_nargs (expr);
699 6 : for (i = 0; i < nargs; i++)
700 : {
701 3 : tree arg = CALL_EXPR_ARG (expr, i);
702 3 : if (is_gimple_reg_type (TREE_TYPE (arg)))
703 : {
704 3 : if (!is_gimple_val (arg))
705 : return false;
706 : }
707 : else
708 0 : if (!is_gimple_lvalue (arg))
709 : return false;
710 : }
711 :
712 : return true;
713 : }
714 :
715 : /* Convert EXPR into a GIMPLE value suitable for substitution on the
716 : RHS of an assignment. Insert the necessary statements before
717 : iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
718 : is replaced. If the call is expected to produces a result, then it
719 : is replaced by an assignment of the new RHS to the result variable.
720 : If the result is to be ignored, then the call is replaced by a
721 : GIMPLE_NOP. A proper VDEF chain is retained by making the first
722 : VUSE and the last VDEF of the whole sequence be the same as the replaced
723 : statement and using new SSA names for stores in between. */
724 :
725 : void
726 58599 : gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
727 : {
728 58599 : tree lhs;
729 58599 : gimple *stmt, *new_stmt;
730 58599 : gimple_stmt_iterator i;
731 58599 : gimple_seq stmts = NULL;
732 :
733 58599 : stmt = gsi_stmt (*si_p);
734 :
735 58599 : gcc_assert (is_gimple_call (stmt));
736 :
737 58599 : if (valid_gimple_call_p (expr))
738 : {
739 : /* The call has simplified to another call. */
740 3 : tree fn = CALL_EXPR_FN (expr);
741 3 : unsigned i;
742 3 : unsigned nargs = call_expr_nargs (expr);
743 3 : vec<tree> args = vNULL;
744 3 : gcall *new_stmt;
745 :
746 3 : if (nargs > 0)
747 : {
748 3 : args.create (nargs);
749 3 : args.safe_grow_cleared (nargs, true);
750 :
751 9 : for (i = 0; i < nargs; i++)
752 3 : args[i] = CALL_EXPR_ARG (expr, i);
753 : }
754 :
755 3 : new_stmt = gimple_build_call_vec (fn, args);
756 3 : finish_update_gimple_call (si_p, new_stmt, stmt);
757 3 : args.release ();
758 3 : return;
759 : }
760 :
761 58596 : lhs = gimple_call_lhs (stmt);
762 58596 : if (lhs == NULL_TREE)
763 : {
764 2436 : push_gimplify_context (gimple_in_ssa_p (cfun));
765 1218 : gimplify_and_add (expr, &stmts);
766 1218 : pop_gimplify_context (NULL);
767 :
768 : /* We can end up with folding a memcpy of an empty class assignment
769 : which gets optimized away by C++ gimplification. */
770 1218 : if (gimple_seq_empty_p (stmts))
771 : {
772 1083 : if (gimple_in_ssa_p (cfun))
773 : {
774 1083 : unlink_stmt_vdef (stmt);
775 1083 : release_defs (stmt);
776 : }
777 1083 : gsi_replace (si_p, gimple_build_nop (), false);
778 1083 : return;
779 : }
780 : }
781 : else
782 : {
783 57378 : tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
784 57378 : new_stmt = gimple_build_assign (lhs, tmp);
785 57378 : i = gsi_last (stmts);
786 57378 : gsi_insert_after_without_update (&i, new_stmt,
787 : GSI_CONTINUE_LINKING);
788 : }
789 :
790 57513 : gsi_replace_with_seq_vops (si_p, stmts);
791 : }
792 :
793 : /* Print a message in the dump file recording transformation of FROM to TO. */
794 :
795 : static void
796 39944 : dump_transformation (gcall *from, gcall *to)
797 : {
798 39944 : if (dump_enabled_p ())
799 11 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
800 : gimple_call_fn (from), gimple_call_fn (to));
801 39944 : }
802 :
803 : /* Replace the call at *GSI with the gimple value VAL. */
804 :
805 : void
806 83207 : replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807 : {
808 83207 : gimple *stmt = gsi_stmt (*gsi);
809 83207 : tree lhs = gimple_call_lhs (stmt);
810 83207 : gimple *repl;
811 83207 : if (lhs)
812 : {
813 78221 : if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 2878 : val = fold_convert (TREE_TYPE (lhs), val);
815 78221 : repl = gimple_build_assign (lhs, val);
816 : }
817 : else
818 4986 : repl = gimple_build_nop ();
819 83207 : tree vdef = gimple_vdef (stmt);
820 83207 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 : {
822 5557 : unlink_stmt_vdef (stmt);
823 5557 : release_ssa_name (vdef);
824 : }
825 83207 : gsi_replace (gsi, repl, false);
826 83207 : }
827 :
828 : /* Replace the call at *GSI with the new call REPL and fold that
829 : again. */
830 :
831 : static void
832 39944 : replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
833 : {
834 39944 : gimple *stmt = gsi_stmt (*gsi);
835 39944 : dump_transformation (as_a <gcall *> (stmt), as_a <gcall *> (repl));
836 39944 : gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
837 39944 : gimple_set_location (repl, gimple_location (stmt));
838 39944 : gimple_move_vops (repl, stmt);
839 39944 : gsi_replace (gsi, repl, false);
840 39944 : fold_stmt (gsi);
841 39944 : }
842 :
843 : /* Return true if VAR is a VAR_DECL or a component thereof. */
844 :
845 : static bool
846 410947 : var_decl_component_p (tree var)
847 : {
848 410947 : tree inner = var;
849 598892 : while (handled_component_p (inner))
850 187945 : inner = TREE_OPERAND (inner, 0);
851 410947 : return (DECL_P (inner)
852 410947 : || (TREE_CODE (inner) == MEM_REF
853 47582 : && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
854 : }
855 :
856 : /* Return TRUE if the SIZE argument, representing the size of an
857 : object, is in a range of values of which exactly zero is valid. */
858 :
859 : static bool
860 1011541 : size_must_be_zero_p (tree size)
861 : {
862 1011541 : if (integer_zerop (size))
863 : return true;
864 :
865 1008898 : if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
866 : return false;
867 :
868 607844 : tree type = TREE_TYPE (size);
869 607844 : int prec = TYPE_PRECISION (type);
870 :
871 : /* Compute the value of SSIZE_MAX, the largest positive value that
872 : can be stored in ssize_t, the signed counterpart of size_t. */
873 607844 : wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
874 607844 : wide_int zero = wi::zero (TYPE_PRECISION (type));
875 607844 : int_range_max valid_range (type, zero, ssize_max);
876 607844 : int_range_max vr;
877 1215688 : get_range_query (cfun)->range_of_expr (vr, size);
878 :
879 607844 : if (vr.undefined_p ())
880 92 : vr.set_varying (TREE_TYPE (size));
881 607844 : vr.intersect (valid_range);
882 607844 : return vr.zero_p ();
883 607844 : }
884 :
885 : /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
886 : diagnose (otherwise undefined) overlapping copies without preventing
887 : folding. When folded, GCC guarantees that overlapping memcpy has
888 : the same semantics as memmove. Call to the library memcpy need not
889 : provide the same guarantee. Return false if no simplification can
890 : be made. */
891 :
892 : static bool
893 1011541 : gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
894 : tree dest, tree src, enum built_in_function code)
895 : {
896 1011541 : gimple *stmt = gsi_stmt (*gsi);
897 1011541 : tree lhs = gimple_call_lhs (stmt);
898 1011541 : tree len = gimple_call_arg (stmt, 2);
899 1011541 : location_t loc = gimple_location (stmt);
900 :
901 : /* If the LEN parameter is a constant zero or in range where
902 : the only valid value is zero, return DEST. */
903 1011541 : if (size_must_be_zero_p (len))
904 : {
905 2675 : gimple *repl;
906 2675 : if (gimple_call_lhs (stmt))
907 58 : repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
908 : else
909 2617 : repl = gimple_build_nop ();
910 2675 : tree vdef = gimple_vdef (stmt);
911 2675 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
912 : {
913 570 : unlink_stmt_vdef (stmt);
914 570 : release_ssa_name (vdef);
915 : }
916 2675 : gsi_replace (gsi, repl, false);
917 2675 : return true;
918 : }
919 :
920 : /* If SRC and DEST are the same (and not volatile), return
921 : DEST{,+LEN,+LEN-1}. */
922 1008866 : if (operand_equal_p (src, dest, 0))
923 : {
924 : /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
925 : It's safe and may even be emitted by GCC itself (see bug
926 : 32667). */
927 79 : unlink_stmt_vdef (stmt);
928 158 : if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
929 39 : release_ssa_name (gimple_vdef (stmt));
930 79 : if (!lhs)
931 : {
932 58 : gsi_replace (gsi, gimple_build_nop (), false);
933 58 : return true;
934 : }
935 21 : goto done;
936 : }
937 2017574 : else if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
938 : return false;
939 : else
940 : {
941 : /* We cannot (easily) change the type of the copy if it is a storage
942 : order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 : modify the storage order of objects (see storage_order_barrier_p). */
944 1008787 : tree srctype
945 1021446 : = POINTER_TYPE_P (TREE_TYPE (src))
946 1021446 : ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 1008787 : tree desttype
948 1028470 : = POINTER_TYPE_P (TREE_TYPE (dest))
949 1028470 : ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 1008787 : tree destvar, srcvar, srcoff;
951 1008787 : unsigned int src_align, dest_align;
952 1008787 : unsigned HOST_WIDE_INT tmp_len;
953 1008787 : const char *tmp_str;
954 :
955 : /* Build accesses at offset zero with a ref-all character type. */
956 1008787 : tree off0
957 1008787 : = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 : ptr_mode, true), 0);
959 :
960 : /* If we can perform the copy efficiently with first doing all loads
961 : and then all stores inline it that way. Currently efficiently
962 : means that we can load all the memory into a single integer
963 : register which is what MOVE_MAX gives us. */
964 1008787 : src_align = get_pointer_alignment (src);
965 1008787 : dest_align = get_pointer_alignment (dest);
966 1008787 : if (tree_fits_uhwi_p (len)
967 391288 : && compare_tree_int (len, MOVE_MAX) <= 0
968 : /* FIXME: Don't transform copies from strings with known length.
969 : Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 : from being handled, and the case was XFAILed for that reason.
971 : Now that it is handled and the XFAIL removed, as soon as other
972 : strlenopt tests that rely on it for passing are adjusted, this
973 : hack can be removed. */
974 296970 : && !c_strlen (src, 1)
975 185944 : && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 79568 : && memchr (tmp_str, 0, tmp_len) == NULL)
977 119676 : && !(srctype
978 119676 : && AGGREGATE_TYPE_P (srctype)
979 56887 : && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 1128330 : && !(desttype
981 119543 : && AGGREGATE_TYPE_P (desttype)
982 65476 : && TYPE_REVERSE_STORAGE_ORDER (desttype)))
983 : {
984 119510 : unsigned ilen = tree_to_uhwi (len);
985 119510 : if (pow2p_hwi (ilen))
986 : {
987 : /* Detect out-of-bounds accesses without issuing warnings.
988 : Avoid folding out-of-bounds copies but to avoid false
989 : positives for unreachable code defer warning until after
990 : DCE has worked its magic.
991 : -Wrestrict is still diagnosed. */
992 23653 : if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 : dest, src, len, len,
994 23653 : false, false))
995 953 : if (warning != OPT_Wrestrict)
996 19729 : return false;
997 :
998 22758 : scalar_int_mode imode;
999 22758 : machine_mode mode;
1000 22758 : if (int_mode_for_size (ilen * BITS_PER_UNIT, 0).exists (&imode)
1001 22758 : && bitwise_mode_for_size (ilen
1002 22758 : * BITS_PER_UNIT).exists (&mode)
1003 45516 : && known_eq (GET_MODE_BITSIZE (mode), ilen * BITS_PER_UNIT)
1004 : /* If the destination pointer is not aligned we must be able
1005 : to emit an unaligned store. */
1006 22758 : && (dest_align >= GET_MODE_ALIGNMENT (mode)
1007 11906 : || !targetm.slow_unaligned_access (mode, dest_align)
1008 0 : || (optab_handler (movmisalign_optab, mode)
1009 : != CODE_FOR_nothing)))
1010 : {
1011 22758 : tree type = bitwise_type_for_mode (mode);
1012 22758 : tree srctype = type;
1013 22758 : tree desttype = type;
1014 22758 : if (src_align < GET_MODE_ALIGNMENT (mode))
1015 11476 : srctype = build_aligned_type (type, src_align);
1016 22758 : tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1017 22758 : tree tem = fold_const_aggregate_ref (srcmem);
1018 22758 : if (tem)
1019 : srcmem = tem;
1020 21750 : else if (src_align < GET_MODE_ALIGNMENT (mode)
1021 11218 : && targetm.slow_unaligned_access (mode, src_align)
1022 21750 : && (optab_handler (movmisalign_optab, mode)
1023 : == CODE_FOR_nothing))
1024 : srcmem = NULL_TREE;
1025 21750 : if (srcmem)
1026 : {
1027 22758 : gimple *new_stmt;
1028 22758 : if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 : {
1030 22758 : new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1031 22758 : srcmem
1032 22758 : = make_ssa_name (TREE_TYPE (srcmem), new_stmt);
1033 22758 : gimple_assign_set_lhs (new_stmt, srcmem);
1034 45516 : gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 22758 : gimple_set_location (new_stmt, loc);
1036 22758 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1037 : }
1038 22758 : if (dest_align < GET_MODE_ALIGNMENT (mode))
1039 11906 : desttype = build_aligned_type (type, dest_align);
1040 22758 : new_stmt
1041 22758 : = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1042 : dest, off0),
1043 : srcmem);
1044 22758 : gimple_move_vops (new_stmt, stmt);
1045 22758 : if (!lhs)
1046 : {
1047 18834 : gsi_replace (gsi, new_stmt, false);
1048 18834 : return true;
1049 : }
1050 3924 : gimple_set_location (new_stmt, loc);
1051 3924 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 3924 : goto done;
1053 : }
1054 : }
1055 : }
1056 : }
1057 :
1058 985134 : if (code == BUILT_IN_MEMMOVE)
1059 : {
1060 : /* Both DEST and SRC must be pointer types.
1061 : ??? This is what old code did. Is the testing for pointer types
1062 : really mandatory?
1063 :
1064 : If either SRC is readonly or length is 1, we can use memcpy. */
1065 203275 : if (!dest_align || !src_align)
1066 : return false;
1067 203275 : if (readonly_data_expr (src)
1068 203275 : || (tree_fits_uhwi_p (len)
1069 32792 : && (MIN (src_align, dest_align) / BITS_PER_UNIT
1070 32792 : >= tree_to_uhwi (len))))
1071 : {
1072 953032 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1073 19838 : if (!fn)
1074 : return false;
1075 19838 : gimple_call_set_fndecl (stmt, fn);
1076 19838 : gimple_call_set_arg (stmt, 0, dest);
1077 19838 : gimple_call_set_arg (stmt, 1, src);
1078 19838 : fold_stmt (gsi);
1079 19838 : return true;
1080 : }
1081 :
1082 : /* If *src and *dest can't overlap, optimize into memcpy as well. */
1083 183437 : if (TREE_CODE (src) == ADDR_EXPR
1084 5571 : && TREE_CODE (dest) == ADDR_EXPR)
1085 : {
1086 1786 : tree src_base, dest_base, fn;
1087 1786 : poly_int64 src_offset = 0, dest_offset = 0;
1088 1786 : poly_uint64 maxsize;
1089 :
1090 1786 : srcvar = TREE_OPERAND (src, 0);
1091 1786 : src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1092 1786 : if (src_base == NULL)
1093 0 : src_base = srcvar;
1094 1786 : destvar = TREE_OPERAND (dest, 0);
1095 1786 : dest_base = get_addr_base_and_unit_offset (destvar,
1096 : &dest_offset);
1097 1786 : if (dest_base == NULL)
1098 0 : dest_base = destvar;
1099 1786 : if (!poly_int_tree_p (len, &maxsize))
1100 224 : maxsize = -1;
1101 1786 : if (SSA_VAR_P (src_base)
1102 1776 : && SSA_VAR_P (dest_base))
1103 : {
1104 1776 : if (operand_equal_p (src_base, dest_base, 0)
1105 1776 : && ranges_maybe_overlap_p (src_offset, maxsize,
1106 : dest_offset, maxsize))
1107 : return false;
1108 : }
1109 10 : else if (TREE_CODE (src_base) == MEM_REF
1110 0 : && TREE_CODE (dest_base) == MEM_REF)
1111 : {
1112 0 : if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1113 0 : TREE_OPERAND (dest_base, 0), 0))
1114 0 : return false;
1115 0 : poly_offset_int full_src_offset
1116 0 : = mem_ref_offset (src_base) + src_offset;
1117 0 : poly_offset_int full_dest_offset
1118 0 : = mem_ref_offset (dest_base) + dest_offset;
1119 0 : if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1120 : full_dest_offset, maxsize))
1121 : return false;
1122 0 : }
1123 : else
1124 : return false;
1125 :
1126 1786 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1127 1382 : if (!fn)
1128 : return false;
1129 1382 : gimple_call_set_fndecl (stmt, fn);
1130 1382 : gimple_call_set_arg (stmt, 0, dest);
1131 1382 : gimple_call_set_arg (stmt, 1, src);
1132 1382 : fold_stmt (gsi);
1133 1382 : return true;
1134 : }
1135 :
1136 : /* If the destination and source do not alias optimize into
1137 : memcpy as well. */
1138 181651 : if ((is_gimple_min_invariant (dest)
1139 178009 : || TREE_CODE (dest) == SSA_NAME)
1140 341441 : && (is_gimple_min_invariant (src)
1141 159668 : || TREE_CODE (src) == SSA_NAME))
1142 : {
1143 163001 : ao_ref destr, srcr;
1144 163001 : ao_ref_init_from_ptr_and_size (&destr, dest, len);
1145 163001 : ao_ref_init_from_ptr_and_size (&srcr, src, len);
1146 163001 : if (!refs_may_alias_p_1 (&destr, &srcr, false))
1147 : {
1148 10077 : tree fn;
1149 10077 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1150 10077 : if (!fn)
1151 10077 : return false;
1152 10077 : gimple_call_set_fndecl (stmt, fn);
1153 10077 : gimple_call_set_arg (stmt, 0, dest);
1154 10077 : gimple_call_set_arg (stmt, 1, src);
1155 10077 : fold_stmt (gsi);
1156 10077 : return true;
1157 : }
1158 : }
1159 :
1160 171574 : return false;
1161 : }
1162 :
1163 781859 : if (!tree_fits_shwi_p (len))
1164 : return false;
1165 318033 : if (!srctype
1166 318033 : || (AGGREGATE_TYPE_P (srctype)
1167 200961 : && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1168 : return false;
1169 317900 : if (!desttype
1170 317900 : || (AGGREGATE_TYPE_P (desttype)
1171 188888 : && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1172 : return false;
1173 : /* In the following try to find a type that is most natural to be
1174 : used for the memcpy source and destination and that allows
1175 : the most optimization when memcpy is turned into a plain assignment
1176 : using that type. In theory we could always use a char[len] type
1177 : but that only gains us that the destination and source possibly
1178 : no longer will have their address taken. */
1179 317867 : if (TREE_CODE (srctype) == ARRAY_TYPE
1180 317867 : && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1181 130001 : srctype = TREE_TYPE (srctype);
1182 317867 : if (TREE_CODE (desttype) == ARRAY_TYPE
1183 317867 : && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1184 105575 : desttype = TREE_TYPE (desttype);
1185 317867 : if (TREE_ADDRESSABLE (srctype)
1186 317836 : || TREE_ADDRESSABLE (desttype))
1187 : return false;
1188 :
1189 : /* Make sure we are not copying using a floating-point mode or
1190 : a type whose size possibly does not match its precision. */
1191 635035 : if (FLOAT_MODE_P (TYPE_MODE (desttype))
1192 316986 : || TREE_CODE (desttype) == BOOLEAN_TYPE
1193 634767 : || TREE_CODE (desttype) == ENUMERAL_TYPE)
1194 839 : desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1195 635175 : if (FLOAT_MODE_P (TYPE_MODE (srctype))
1196 317260 : || TREE_CODE (srctype) == BOOLEAN_TYPE
1197 635045 : || TREE_CODE (srctype) == ENUMERAL_TYPE)
1198 561 : srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1199 317803 : if (!srctype)
1200 120 : srctype = desttype;
1201 317803 : if (!desttype)
1202 0 : desttype = srctype;
1203 317803 : if (!srctype)
1204 : return false;
1205 :
1206 317803 : src_align = get_pointer_alignment (src);
1207 317803 : dest_align = get_pointer_alignment (dest);
1208 :
1209 : /* Choose between src and destination type for the access based
1210 : on alignment, whether the access constitutes a register access
1211 : and whether it may actually expose a declaration for SSA rewrite
1212 : or SRA decomposition. Also try to expose a string constant, we
1213 : might be able to concatenate several of them later into a single
1214 : string store. */
1215 317803 : destvar = NULL_TREE;
1216 317803 : srcvar = NULL_TREE;
1217 317803 : if (TREE_CODE (dest) == ADDR_EXPR
1218 114163 : && var_decl_component_p (TREE_OPERAND (dest, 0))
1219 114159 : && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1220 22022 : && dest_align >= TYPE_ALIGN (desttype)
1221 339825 : && (is_gimple_reg_type (desttype)
1222 21611 : || src_align >= TYPE_ALIGN (desttype)))
1223 17470 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1224 300333 : else if (TREE_CODE (src) == ADDR_EXPR
1225 236510 : && var_decl_component_p (TREE_OPERAND (src, 0))
1226 41367 : && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1227 8147 : && src_align >= TYPE_ALIGN (srctype)
1228 308464 : && (is_gimple_reg_type (srctype)
1229 7968 : || dest_align >= TYPE_ALIGN (srctype)))
1230 2873 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1231 : /* FIXME: Don't transform copies from strings with known original length.
1232 : As soon as strlenopt tests that rely on it for passing are adjusted,
1233 : this hack can be removed. */
1234 297460 : else if (gimple_call_alloca_for_var_p (stmt)
1235 115 : && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1236 3 : && integer_zerop (srcoff)
1237 3 : && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1238 297463 : && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1239 3 : srctype = TREE_TYPE (srcvar);
1240 : else
1241 297457 : return false;
1242 :
1243 : /* Now that we chose an access type express the other side in
1244 : terms of it if the target allows that with respect to alignment
1245 : constraints. */
1246 20346 : if (srcvar == NULL_TREE)
1247 : {
1248 17470 : if (src_align >= TYPE_ALIGN (desttype))
1249 17456 : srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1250 : else
1251 : {
1252 14 : enum machine_mode mode = TYPE_MODE (desttype);
1253 14 : if ((mode == BLKmode && STRICT_ALIGNMENT)
1254 14 : || (targetm.slow_unaligned_access (mode, src_align)
1255 14 : && (optab_handler (movmisalign_optab, mode)
1256 : == CODE_FOR_nothing)))
1257 : return false;
1258 14 : srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1259 : src_align);
1260 14 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1261 : }
1262 : }
1263 2876 : else if (destvar == NULL_TREE)
1264 : {
1265 2876 : if (dest_align >= TYPE_ALIGN (srctype))
1266 2876 : destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1267 : else
1268 : {
1269 0 : enum machine_mode mode = TYPE_MODE (srctype);
1270 0 : if ((mode == BLKmode && STRICT_ALIGNMENT)
1271 0 : || (targetm.slow_unaligned_access (mode, dest_align)
1272 0 : && (optab_handler (movmisalign_optab, mode)
1273 : == CODE_FOR_nothing)))
1274 : return false;
1275 0 : desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1276 : dest_align);
1277 0 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1278 : }
1279 : }
1280 :
1281 : /* Same as above, detect out-of-bounds accesses without issuing
1282 : warnings. Avoid folding out-of-bounds copies but to avoid
1283 : false positives for unreachable code defer warning until
1284 : after DCE has worked its magic.
1285 : -Wrestrict is still diagnosed. */
1286 20346 : if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1287 : dest, src, len, len,
1288 20346 : false, false))
1289 115 : if (warning != OPT_Wrestrict)
1290 : return false;
1291 :
1292 20239 : gimple *new_stmt;
1293 20239 : if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1294 : {
1295 534 : tree tem = fold_const_aggregate_ref (srcvar);
1296 534 : if (tem)
1297 517 : srcvar = tem;
1298 534 : if (! is_gimple_min_invariant (srcvar))
1299 : {
1300 17 : new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1301 17 : srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1302 17 : gimple_assign_set_lhs (new_stmt, srcvar);
1303 34 : gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1304 17 : gimple_set_location (new_stmt, loc);
1305 17 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1306 : }
1307 534 : new_stmt = gimple_build_assign (destvar, srcvar);
1308 534 : goto set_vop_and_replace;
1309 : }
1310 :
1311 : /* We get an aggregate copy. If the source is a STRING_CST, then
1312 : directly use its type to perform the copy. */
1313 19705 : if (TREE_CODE (srcvar) == STRING_CST)
1314 : desttype = srctype;
1315 :
1316 : /* Or else, use an unsigned char[] type to perform the copy in order
1317 : to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1318 : types or float modes behavior on copying. */
1319 : else
1320 : {
1321 39404 : desttype = build_array_type_nelts (unsigned_char_type_node,
1322 19702 : tree_to_uhwi (len));
1323 19702 : srctype = desttype;
1324 19702 : if (src_align > TYPE_ALIGN (srctype))
1325 12189 : srctype = build_aligned_type (srctype, src_align);
1326 19702 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1327 : }
1328 :
1329 19705 : if (dest_align > TYPE_ALIGN (desttype))
1330 12820 : desttype = build_aligned_type (desttype, dest_align);
1331 19705 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1332 19705 : new_stmt = gimple_build_assign (destvar, srcvar);
1333 :
1334 20239 : set_vop_and_replace:
1335 20239 : gimple_move_vops (new_stmt, stmt);
1336 20239 : if (!lhs)
1337 : {
1338 19764 : gsi_replace (gsi, new_stmt, false);
1339 19764 : return true;
1340 : }
1341 475 : gimple_set_location (new_stmt, loc);
1342 475 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1343 : }
1344 :
1345 4420 : done:
1346 4420 : gimple_seq stmts = NULL;
1347 4420 : if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1348 4420 : len = NULL_TREE;
1349 197 : else if (code == BUILT_IN_MEMPCPY)
1350 : {
1351 197 : len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1352 197 : dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1353 197 : TREE_TYPE (dest), dest, len);
1354 : }
1355 : else
1356 0 : gcc_unreachable ();
1357 :
1358 4420 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1359 4420 : gimple *repl = gimple_build_assign (lhs, dest);
1360 4420 : gsi_replace (gsi, repl, false);
1361 4420 : return true;
1362 : }
1363 :
1364 : /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1365 : to built-in memcmp (a, b, len). */
1366 :
1367 : static bool
1368 148 : gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1369 : {
1370 148 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1371 :
1372 148 : if (!fn)
1373 : return false;
1374 :
1375 : /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1376 :
1377 148 : gimple *stmt = gsi_stmt (*gsi);
1378 296 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
1379 : return false;
1380 148 : tree a = gimple_call_arg (stmt, 0);
1381 148 : tree b = gimple_call_arg (stmt, 1);
1382 148 : tree len = gimple_call_arg (stmt, 2);
1383 :
1384 148 : gimple *repl = gimple_build_call (fn, 3, a, b, len);
1385 148 : replace_call_with_call_and_fold (gsi, repl);
1386 :
1387 148 : return true;
1388 : }
1389 :
1390 : /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391 : to built-in memmove (dest, src, len). */
1392 :
1393 : static bool
1394 367 : gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395 : {
1396 367 : tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397 :
1398 367 : if (!fn)
1399 : return false;
1400 :
1401 : /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402 : it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1403 : len) into memmove (dest, src, len). */
1404 :
1405 367 : gimple *stmt = gsi_stmt (*gsi);
1406 734 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1407 : return false;
1408 367 : tree src = gimple_call_arg (stmt, 0);
1409 367 : tree dest = gimple_call_arg (stmt, 1);
1410 367 : tree len = gimple_call_arg (stmt, 2);
1411 :
1412 367 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1413 367 : gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1414 367 : replace_call_with_call_and_fold (gsi, repl);
1415 :
1416 367 : return true;
1417 : }
1418 :
1419 : /* Transform a call to built-in bzero (dest, len) at *GSI into one
1420 : to built-in memset (dest, 0, len). */
1421 :
1422 : static bool
1423 250 : gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1424 : {
1425 250 : tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1426 :
1427 250 : if (!fn)
1428 : return false;
1429 :
1430 : /* Transform bzero (dest, len) into memset (dest, 0, len). */
1431 :
1432 250 : gimple *stmt = gsi_stmt (*gsi);
1433 500 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1434 : return false;
1435 250 : tree dest = gimple_call_arg (stmt, 0);
1436 250 : tree len = gimple_call_arg (stmt, 1);
1437 :
1438 250 : gimple_seq seq = NULL;
1439 250 : gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1440 250 : gimple_seq_add_stmt_without_update (&seq, repl);
1441 250 : gsi_replace_with_seq_vops (gsi, seq);
1442 250 : fold_stmt (gsi);
1443 :
1444 250 : return true;
1445 : }
1446 :
1447 : /* Fold function call to builtin memset or bzero at *GSI setting the
1448 : memory of size LEN to VAL. Return whether a simplification was made. */
1449 :
1450 : static bool
1451 309668 : gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1452 : {
1453 309668 : gimple *stmt = gsi_stmt (*gsi);
1454 309668 : tree etype;
1455 309668 : unsigned HOST_WIDE_INT length, cval;
1456 :
1457 : /* If the LEN parameter is zero, return DEST. */
1458 309668 : if (integer_zerop (len))
1459 : {
1460 811 : replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1461 811 : return true;
1462 : }
1463 :
1464 924818 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1465 : return false;
1466 :
1467 308857 : if (! tree_fits_uhwi_p (len))
1468 : return false;
1469 :
1470 200362 : if (TREE_CODE (c) != INTEGER_CST)
1471 : return false;
1472 :
1473 194515 : tree dest = gimple_call_arg (stmt, 0);
1474 194515 : tree var = dest;
1475 194515 : if (TREE_CODE (var) != ADDR_EXPR)
1476 : return false;
1477 :
1478 155961 : var = TREE_OPERAND (var, 0);
1479 155961 : if (TREE_THIS_VOLATILE (var))
1480 : return false;
1481 :
1482 155918 : etype = TREE_TYPE (var);
1483 155918 : if (TREE_CODE (etype) == ARRAY_TYPE)
1484 81427 : etype = TREE_TYPE (etype);
1485 :
1486 155918 : if ((!INTEGRAL_TYPE_P (etype)
1487 96132 : && !POINTER_TYPE_P (etype))
1488 60311 : || TREE_CODE (etype) == BITINT_TYPE)
1489 : return false;
1490 :
1491 60274 : if (! var_decl_component_p (var))
1492 : return false;
1493 :
1494 60274 : length = tree_to_uhwi (len);
1495 60274 : if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1496 1753 : || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1497 3506 : != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1498 62027 : || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1499 58521 : return false;
1500 :
1501 1753 : if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1502 : return false;
1503 :
1504 1753 : if (!type_has_mode_precision_p (etype))
1505 7 : etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1506 7 : TYPE_UNSIGNED (etype));
1507 :
1508 1753 : if (integer_zerop (c))
1509 : cval = 0;
1510 : else
1511 : {
1512 337 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1513 : return NULL_TREE;
1514 :
1515 337 : cval = TREE_INT_CST_LOW (c);
1516 337 : cval &= 0xff;
1517 337 : cval |= cval << 8;
1518 337 : cval |= cval << 16;
1519 337 : cval |= (cval << 31) << 1;
1520 : }
1521 :
1522 1753 : var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1523 1753 : gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1524 1753 : gimple_move_vops (store, stmt);
1525 1753 : gimple_set_location (store, gimple_location (stmt));
1526 1753 : gsi_insert_before (gsi, store, GSI_SAME_STMT);
1527 1753 : if (gimple_call_lhs (stmt))
1528 : {
1529 2 : gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1530 2 : gsi_replace (gsi, asgn, false);
1531 : }
1532 : else
1533 : {
1534 1751 : gimple_stmt_iterator gsi2 = *gsi;
1535 1751 : gsi_prev (gsi);
1536 1751 : gsi_remove (&gsi2, true);
1537 : }
1538 :
1539 : return true;
1540 : }
1541 :
1542 : /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1543 :
1544 : static bool
1545 453311 : get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1546 : c_strlen_data *pdata, unsigned eltsize)
1547 : {
1548 453311 : gcc_assert (TREE_CODE (arg) != SSA_NAME);
1549 :
1550 : /* The length computed by this invocation of the function. */
1551 453311 : tree val = NULL_TREE;
1552 :
1553 : /* True if VAL is an optimistic (tight) bound determined from
1554 : the size of the character array in which the string may be
1555 : stored. In that case, the computed VAL is used to set
1556 : PDATA->MAXBOUND. */
1557 453311 : bool tight_bound = false;
1558 :
1559 : /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1560 453311 : if (TREE_CODE (arg) == ADDR_EXPR
1561 453311 : && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1562 : {
1563 28202 : tree op = TREE_OPERAND (arg, 0);
1564 28202 : if (integer_zerop (TREE_OPERAND (op, 1)))
1565 : {
1566 12028 : tree aop0 = TREE_OPERAND (op, 0);
1567 12028 : if (TREE_CODE (aop0) == INDIRECT_REF
1568 12028 : && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1569 0 : return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1570 0 : pdata, eltsize);
1571 : }
1572 16174 : else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1573 16174 : && rkind == SRK_LENRANGE)
1574 : {
1575 : /* Fail if an array is the last member of a struct object
1576 : since it could be treated as a (fake) flexible array
1577 : member. */
1578 4785 : tree idx = TREE_OPERAND (op, 1);
1579 :
1580 4785 : arg = TREE_OPERAND (op, 0);
1581 4785 : tree optype = TREE_TYPE (arg);
1582 4785 : if (tree dom = TYPE_DOMAIN (optype))
1583 4785 : if (tree bound = TYPE_MAX_VALUE (dom))
1584 4785 : if (TREE_CODE (bound) == INTEGER_CST
1585 4785 : && TREE_CODE (idx) == INTEGER_CST
1586 8000 : && tree_int_cst_lt (bound, idx))
1587 : return false;
1588 : }
1589 : }
1590 :
1591 453103 : if (rkind == SRK_INT_VALUE)
1592 : {
1593 : /* We are computing the maximum value (not string length). */
1594 25671 : val = arg;
1595 25671 : if (TREE_CODE (val) != INTEGER_CST
1596 25671 : || tree_int_cst_sgn (val) < 0)
1597 2570 : return false;
1598 : }
1599 : else
1600 : {
1601 427432 : c_strlen_data lendata = { };
1602 427432 : val = c_strlen (arg, 1, &lendata, eltsize);
1603 :
1604 427432 : if (!val && lendata.decl)
1605 : {
1606 : /* ARG refers to an unterminated const character array.
1607 : DATA.DECL with size DATA.LEN. */
1608 4193 : val = lendata.minlen;
1609 4193 : pdata->decl = lendata.decl;
1610 : }
1611 : }
1612 :
1613 : /* Set if VAL represents the maximum length based on array size (set
1614 : when exact length cannot be determined). */
1615 450533 : bool maxbound = false;
1616 :
1617 450533 : if (!val && rkind == SRK_LENRANGE)
1618 : {
1619 226449 : if (TREE_CODE (arg) == ADDR_EXPR)
1620 80347 : return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1621 80347 : pdata, eltsize);
1622 :
1623 146102 : if (TREE_CODE (arg) == ARRAY_REF)
1624 : {
1625 18260 : tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1626 :
1627 : /* Determine the "innermost" array type. */
1628 18260 : while (TREE_CODE (optype) == ARRAY_TYPE
1629 25134 : && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1630 6874 : optype = TREE_TYPE (optype);
1631 :
1632 : /* Avoid arrays of pointers. */
1633 18260 : tree eltype = TREE_TYPE (optype);
1634 18260 : if (TREE_CODE (optype) != ARRAY_TYPE
1635 18260 : || !INTEGRAL_TYPE_P (eltype))
1636 : return false;
1637 :
1638 : /* Fail when the array bound is unknown or zero. */
1639 13482 : val = TYPE_SIZE_UNIT (optype);
1640 13482 : if (!val
1641 13410 : || TREE_CODE (val) != INTEGER_CST
1642 26864 : || integer_zerop (val))
1643 105 : return false;
1644 :
1645 13377 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1646 : integer_one_node);
1647 :
1648 : /* Set the minimum size to zero since the string in
1649 : the array could have zero length. */
1650 13377 : pdata->minlen = ssize_int (0);
1651 :
1652 13377 : tight_bound = true;
1653 : }
1654 127842 : else if (TREE_CODE (arg) == COMPONENT_REF
1655 127842 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1656 : == ARRAY_TYPE))
1657 : {
1658 : /* Use the type of the member array to determine the upper
1659 : bound on the length of the array. This may be overly
1660 : optimistic if the array itself isn't NUL-terminated and
1661 : the caller relies on the subsequent member to contain
1662 : the NUL but that would only be considered valid if
1663 : the array were the last member of a struct. */
1664 :
1665 9580 : tree fld = TREE_OPERAND (arg, 1);
1666 :
1667 9580 : tree optype = TREE_TYPE (fld);
1668 :
1669 : /* Determine the "innermost" array type. */
1670 9580 : while (TREE_CODE (optype) == ARRAY_TYPE
1671 10127 : && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1672 547 : optype = TREE_TYPE (optype);
1673 :
1674 : /* Fail when the array bound is unknown or zero. */
1675 9580 : val = TYPE_SIZE_UNIT (optype);
1676 9580 : if (!val
1677 9344 : || TREE_CODE (val) != INTEGER_CST
1678 18889 : || integer_zerop (val))
1679 350 : return false;
1680 9230 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1681 : integer_one_node);
1682 :
1683 : /* Set the minimum size to zero since the string in
1684 : the array could have zero length. */
1685 9230 : pdata->minlen = ssize_int (0);
1686 :
1687 : /* The array size determined above is an optimistic bound
1688 : on the length. If the array isn't nul-terminated the
1689 : length computed by the library function would be greater.
1690 : Even though using strlen to cross the subobject boundary
1691 : is undefined, avoid drawing conclusions from the member
1692 : type about the length here. */
1693 9230 : tight_bound = true;
1694 : }
1695 118262 : else if (TREE_CODE (arg) == MEM_REF
1696 29307 : && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1697 4457 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1698 122281 : && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1699 : {
1700 : /* Handle a MEM_REF into a DECL accessing an array of integers,
1701 : being conservative about references to extern structures with
1702 : flexible array members that can be initialized to arbitrary
1703 : numbers of elements as an extension (static structs are okay). */
1704 4019 : tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1705 4019 : if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1706 8025 : && (decl_binds_to_current_def_p (ref)
1707 438 : || !array_ref_flexible_size_p (arg)))
1708 : {
1709 : /* Fail if the offset is out of bounds. Such accesses
1710 : should be diagnosed at some point. */
1711 3893 : val = DECL_SIZE_UNIT (ref);
1712 3893 : if (!val
1713 3721 : || TREE_CODE (val) != INTEGER_CST
1714 7614 : || integer_zerop (val))
1715 371 : return false;
1716 :
1717 3719 : poly_offset_int psiz = wi::to_offset (val);
1718 3719 : poly_offset_int poff = mem_ref_offset (arg);
1719 3719 : if (known_le (psiz, poff))
1720 : return false;
1721 :
1722 3522 : pdata->minlen = ssize_int (0);
1723 :
1724 : /* Subtract the offset and one for the terminating nul. */
1725 3522 : psiz -= poff;
1726 3522 : psiz -= 1;
1727 3522 : val = wide_int_to_tree (TREE_TYPE (val), psiz);
1728 : /* Since VAL reflects the size of a declared object
1729 : rather the type of the access it is not a tight bound. */
1730 : }
1731 : }
1732 114243 : else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1733 : {
1734 : /* Avoid handling pointers to arrays. GCC might misuse
1735 : a pointer to an array of one bound to point to an array
1736 : object of a greater bound. */
1737 70059 : tree argtype = TREE_TYPE (arg);
1738 70059 : if (TREE_CODE (argtype) == ARRAY_TYPE)
1739 : {
1740 42150 : val = TYPE_SIZE_UNIT (argtype);
1741 42150 : if (!val
1742 41384 : || TREE_CODE (val) != INTEGER_CST
1743 83534 : || integer_zerop (val))
1744 881 : return false;
1745 41269 : val = wide_int_to_tree (TREE_TYPE (val),
1746 41269 : wi::sub (wi::to_wide (val), 1));
1747 :
1748 : /* Set the minimum size to zero since the string in
1749 : the array could have zero length. */
1750 41269 : pdata->minlen = ssize_int (0);
1751 : }
1752 : }
1753 : maxbound = true;
1754 : }
1755 :
1756 363701 : if (!val)
1757 : return false;
1758 :
1759 : /* Adjust the lower bound on the string length as necessary. */
1760 267646 : if (!pdata->minlen
1761 267646 : || (rkind != SRK_STRLEN
1762 72740 : && TREE_CODE (pdata->minlen) == INTEGER_CST
1763 72740 : && TREE_CODE (val) == INTEGER_CST
1764 72735 : && tree_int_cst_lt (val, pdata->minlen)))
1765 194998 : pdata->minlen = val;
1766 :
1767 267646 : if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1768 : {
1769 : /* Adjust the tighter (more optimistic) string length bound
1770 : if necessary and proceed to adjust the more conservative
1771 : bound. */
1772 1754 : if (TREE_CODE (val) == INTEGER_CST)
1773 : {
1774 1754 : if (tree_int_cst_lt (pdata->maxbound, val))
1775 657 : pdata->maxbound = val;
1776 : }
1777 : else
1778 0 : pdata->maxbound = val;
1779 : }
1780 265892 : else if (pdata->maxbound || maxbound)
1781 : /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1782 : if VAL corresponds to the maximum length determined based
1783 : on the type of the object. */
1784 70388 : pdata->maxbound = val;
1785 :
1786 267646 : if (tight_bound)
1787 : {
1788 : /* VAL computed above represents an optimistically tight bound
1789 : on the length of the string based on the referenced object's
1790 : or subobject's type. Determine the conservative upper bound
1791 : based on the enclosing object's size if possible. */
1792 22607 : if (rkind == SRK_LENRANGE)
1793 : {
1794 22607 : poly_int64 offset;
1795 22607 : tree base = get_addr_base_and_unit_offset (arg, &offset);
1796 22607 : if (!base)
1797 : {
1798 : /* When the call above fails due to a non-constant offset
1799 : assume the offset is zero and use the size of the whole
1800 : enclosing object instead. */
1801 7837 : base = get_base_address (arg);
1802 7837 : offset = 0;
1803 : }
1804 : /* If the base object is a pointer no upper bound on the length
1805 : can be determined. Otherwise the maximum length is equal to
1806 : the size of the enclosing object minus the offset of
1807 : the referenced subobject minus 1 (for the terminating nul). */
1808 22607 : tree type = TREE_TYPE (base);
1809 22607 : if (POINTER_TYPE_P (type)
1810 22603 : || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1811 41010 : || !(val = DECL_SIZE_UNIT (base)))
1812 5451 : val = build_all_ones_cst (size_type_node);
1813 : else
1814 : {
1815 17156 : val = DECL_SIZE_UNIT (base);
1816 17156 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1817 : size_int (offset + 1));
1818 : }
1819 : }
1820 : else
1821 : return false;
1822 : }
1823 :
1824 267646 : if (pdata->maxlen)
1825 : {
1826 : /* Adjust the more conservative bound if possible/necessary
1827 : and fail otherwise. */
1828 9070 : if (rkind != SRK_STRLEN)
1829 : {
1830 8139 : if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1831 8139 : || TREE_CODE (val) != INTEGER_CST)
1832 : return false;
1833 :
1834 8134 : if (tree_int_cst_lt (pdata->maxlen, val))
1835 1464 : pdata->maxlen = val;
1836 8134 : return true;
1837 : }
1838 931 : else if (simple_cst_equal (val, pdata->maxlen) != 1)
1839 : {
1840 : /* Fail if the length of this ARG is different from that
1841 : previously determined from another ARG. */
1842 : return false;
1843 : }
1844 : }
1845 :
1846 258700 : pdata->maxlen = val;
1847 258700 : return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1848 : }
1849 :
1850 : /* For an ARG referencing one or more strings, try to obtain the range
1851 : of their lengths, or the size of the largest array ARG referes to if
1852 : the range of lengths cannot be determined, and store all in *PDATA.
1853 : For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1854 : the maximum constant value.
1855 : If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1856 : SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1857 : length or if we are unable to determine the length, return false.
1858 : VISITED is a bitmap of visited variables.
1859 : RKIND determines the kind of value or range to obtain (see
1860 : strlen_range_kind).
1861 : Set PDATA->DECL if ARG refers to an unterminated constant array.
1862 : On input, set ELTSIZE to 1 for normal single byte character strings,
1863 : and either 2 or 4 for wide characer strings (the size of wchar_t).
1864 : Return true if *PDATA was successfully populated and false otherwise. */
1865 :
1866 : static bool
1867 1367040 : get_range_strlen (tree arg, bitmap visited,
1868 : strlen_range_kind rkind,
1869 : c_strlen_data *pdata, unsigned eltsize)
1870 : {
1871 :
1872 1445273 : if (TREE_CODE (arg) != SSA_NAME)
1873 453311 : return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1874 :
1875 : /* If ARG is registered for SSA update we cannot look at its defining
1876 : statement. */
1877 991962 : if (name_registered_for_update_p (arg))
1878 : return false;
1879 :
1880 : /* If we were already here, break the infinite cycle. */
1881 991962 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1882 : return true;
1883 :
1884 986573 : tree var = arg;
1885 986573 : gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1886 :
1887 986573 : switch (gimple_code (def_stmt))
1888 : {
1889 122674 : case GIMPLE_ASSIGN:
1890 : /* The RHS of the statement defining VAR must either have a
1891 : constant length or come from another SSA_NAME with a constant
1892 : length. */
1893 122674 : if (gimple_assign_single_p (def_stmt)
1894 122674 : || gimple_assign_unary_nop_p (def_stmt))
1895 : {
1896 78233 : tree rhs = gimple_assign_rhs1 (def_stmt);
1897 78233 : return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1898 : }
1899 44441 : else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1900 : {
1901 246 : tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1902 246 : gimple_assign_rhs3 (def_stmt) };
1903 :
1904 738 : for (unsigned int i = 0; i < 2; i++)
1905 492 : if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1906 : {
1907 28 : if (rkind != SRK_LENRANGE)
1908 : return false;
1909 : /* Set the upper bound to the maximum to prevent
1910 : it from being adjusted in the next iteration but
1911 : leave MINLEN and the more conservative MAXBOUND
1912 : determined so far alone (or leave them null if
1913 : they haven't been set yet). That the MINLEN is
1914 : in fact zero can be determined from MAXLEN being
1915 : unbounded but the discovered minimum is used for
1916 : diagnostics. */
1917 28 : pdata->maxlen = build_all_ones_cst (size_type_node);
1918 : }
1919 : return true;
1920 : }
1921 : return false;
1922 :
1923 : case GIMPLE_PHI:
1924 : /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1925 : must have a constant length. */
1926 75523 : for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1927 : {
1928 53702 : tree arg = gimple_phi_arg (def_stmt, i)->def;
1929 :
1930 : /* If this PHI has itself as an argument, we cannot
1931 : determine the string length of this argument. However,
1932 : if we can find a constant string length for the other
1933 : PHI args then we can still be sure that this is a
1934 : constant string length. So be optimistic and just
1935 : continue with the next argument. */
1936 53702 : if (arg == gimple_phi_result (def_stmt))
1937 0 : continue;
1938 :
1939 53702 : if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1940 : {
1941 28421 : if (rkind != SRK_LENRANGE)
1942 : return false;
1943 : /* Set the upper bound to the maximum to prevent
1944 : it from being adjusted in the next iteration but
1945 : leave MINLEN and the more conservative MAXBOUND
1946 : determined so far alone (or leave them null if
1947 : they haven't been set yet). That the MINLEN is
1948 : in fact zero can be determined from MAXLEN being
1949 : unbounded but the discovered minimum is used for
1950 : diagnostics. */
1951 26694 : pdata->maxlen = build_all_ones_cst (size_type_node);
1952 : }
1953 : }
1954 : return true;
1955 :
1956 : default:
1957 : return false;
1958 : }
1959 : }
1960 :
1961 : /* Try to obtain the range of the lengths of the string(s) referenced
1962 : by ARG, or the size of the largest array ARG refers to if the range
1963 : of lengths cannot be determined, and store all in *PDATA which must
1964 : be zero-initialized on input except PDATA->MAXBOUND may be set to
1965 : a non-null tree node other than INTEGER_CST to request to have it
1966 : set to the length of the longest string in a PHI. ELTSIZE is
1967 : the expected size of the string element in bytes: 1 for char and
1968 : some power of 2 for wide characters.
1969 : Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1970 : for optimization. Returning false means that a nonzero PDATA->MINLEN
1971 : doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1972 : is -1 (in that case, the actual range is indeterminate, i.e.,
1973 : [0, PTRDIFF_MAX - 2]. */
1974 :
1975 : bool
1976 1137062 : get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1977 : {
1978 1137062 : auto_bitmap visited;
1979 1137062 : tree maxbound = pdata->maxbound;
1980 :
1981 1137062 : if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1982 : {
1983 : /* On failure extend the length range to an impossible maximum
1984 : (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1985 : members can stay unchanged regardless. */
1986 914138 : pdata->minlen = ssize_int (0);
1987 914138 : pdata->maxlen = build_all_ones_cst (size_type_node);
1988 : }
1989 222924 : else if (!pdata->minlen)
1990 8851 : pdata->minlen = ssize_int (0);
1991 :
1992 : /* If it's unchanged from it initial non-null value, set the conservative
1993 : MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1994 1137062 : if (maxbound && pdata->maxbound == maxbound)
1995 650556 : pdata->maxbound = build_all_ones_cst (size_type_node);
1996 :
1997 1137062 : return !integer_all_onesp (pdata->maxlen);
1998 1137062 : }
1999 :
2000 : /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
2001 : For ARG of pointer types, NONSTR indicates if the caller is prepared
2002 : to handle unterminated strings. For integer ARG and when RKIND ==
2003 : SRK_INT_VALUE, NONSTR must be null.
2004 :
2005 : If an unterminated array is discovered and our caller handles
2006 : unterminated arrays, then bubble up the offending DECL and
2007 : return the maximum size. Otherwise return NULL. */
2008 :
2009 : static tree
2010 95437 : get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2011 : {
2012 : /* A non-null NONSTR is meaningless when determining the maximum
2013 : value of an integer ARG. */
2014 95437 : gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2015 : /* ARG must have an integral type when RKIND says so. */
2016 95437 : gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2017 :
2018 95437 : auto_bitmap visited;
2019 :
2020 : /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2021 : is unbounded. */
2022 95437 : c_strlen_data lendata = { };
2023 95437 : if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2024 49818 : lendata.maxlen = NULL_TREE;
2025 45619 : else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2026 0 : lendata.maxlen = NULL_TREE;
2027 :
2028 95437 : if (nonstr)
2029 : {
2030 : /* For callers prepared to handle unterminated arrays set
2031 : *NONSTR to point to the declaration of the array and return
2032 : the maximum length/size. */
2033 23877 : *nonstr = lendata.decl;
2034 23877 : return lendata.maxlen;
2035 : }
2036 :
2037 : /* Fail if the constant array isn't nul-terminated. */
2038 71560 : return lendata.decl ? NULL_TREE : lendata.maxlen;
2039 95437 : }
2040 :
2041 : /* Return true if LEN is known to be less than or equal to (or if STRICT is
2042 : true, strictly less than) the lower bound of SIZE at compile time and false
2043 : otherwise. */
2044 :
2045 : static bool
2046 62890 : known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2047 : {
2048 62890 : if (len == NULL_TREE)
2049 : return false;
2050 :
2051 234320 : wide_int size_range[2];
2052 234320 : wide_int len_range[2];
2053 46864 : if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2054 : {
2055 16443 : if (strict)
2056 1803 : return wi::ltu_p (len_range[1], size_range[0]);
2057 : else
2058 14640 : return wi::leu_p (len_range[1], size_range[0]);
2059 : }
2060 :
2061 : return false;
2062 281184 : }
2063 :
2064 : /* Fold function call to builtin strcpy with arguments DEST and SRC.
2065 : If LEN is not NULL, it represents the length of the string to be
2066 : copied. Return NULL_TREE if no simplification can be made. */
2067 :
2068 : static bool
2069 25961 : gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2070 : tree dest, tree src)
2071 : {
2072 25961 : gimple *stmt = gsi_stmt (*gsi);
2073 25961 : location_t loc = gimple_location (stmt);
2074 25961 : tree fn;
2075 :
2076 : /* If SRC and DEST are the same (and not volatile), return DEST. */
2077 25961 : if (operand_equal_p (src, dest, 0))
2078 : {
2079 : /* Issue -Wrestrict unless the pointers are null (those do
2080 : not point to objects and so do not indicate an overlap;
2081 : such calls could be the result of sanitization and jump
2082 : threading). */
2083 86 : if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2084 : {
2085 51 : tree func = gimple_call_fndecl (stmt);
2086 :
2087 51 : warning_at (loc, OPT_Wrestrict,
2088 : "%qD source argument is the same as destination",
2089 : func);
2090 : }
2091 :
2092 86 : replace_call_with_value (gsi, dest);
2093 86 : return true;
2094 : }
2095 :
2096 25875 : if (optimize_function_for_size_p (cfun))
2097 : return false;
2098 :
2099 23877 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2100 23877 : if (!fn)
2101 : return false;
2102 :
2103 : /* Set to non-null if ARG refers to an unterminated array. */
2104 23877 : tree nonstr = NULL;
2105 23877 : tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2106 :
2107 23877 : if (nonstr)
2108 : {
2109 : /* Avoid folding calls with unterminated arrays. */
2110 531 : if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2111 69 : warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2112 531 : suppress_warning (stmt, OPT_Wstringop_overread);
2113 531 : return false;
2114 : }
2115 :
2116 28702 : if (!len || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2117 : return false;
2118 :
2119 2827 : len = fold_convert_loc (loc, size_type_node, len);
2120 2827 : len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2121 2827 : len = force_gimple_operand_gsi (gsi, len, true,
2122 : NULL_TREE, true, GSI_SAME_STMT);
2123 2827 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2124 2827 : replace_call_with_call_and_fold (gsi, repl);
2125 2827 : return true;
2126 : }
2127 :
2128 : /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2129 : If SLEN is not NULL, it represents the length of the source string.
2130 : Return NULL_TREE if no simplification can be made. */
2131 :
2132 : static bool
2133 17202 : gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2134 : tree dest, tree src, tree len)
2135 : {
2136 17202 : gimple *stmt = gsi_stmt (*gsi);
2137 17202 : location_t loc = gimple_location (stmt);
2138 17202 : bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2139 :
2140 : /* If the LEN parameter is zero, return DEST. */
2141 17202 : if (integer_zerop (len))
2142 : {
2143 : /* Avoid warning if the destination refers to an array/pointer
2144 : decorate with attribute nonstring. */
2145 167 : if (!nonstring)
2146 : {
2147 155 : tree fndecl = gimple_call_fndecl (stmt);
2148 :
2149 : /* Warn about the lack of nul termination: the result is not
2150 : a (nul-terminated) string. */
2151 155 : tree slen = get_maxval_strlen (src, SRK_STRLEN);
2152 155 : if (slen && !integer_zerop (slen))
2153 24 : warning_at (loc, OPT_Wstringop_truncation,
2154 : "%qD destination unchanged after copying no bytes "
2155 : "from a string of length %E",
2156 : fndecl, slen);
2157 : else
2158 131 : warning_at (loc, OPT_Wstringop_truncation,
2159 : "%qD destination unchanged after copying no bytes",
2160 : fndecl);
2161 : }
2162 :
2163 167 : replace_call_with_value (gsi, dest);
2164 167 : return true;
2165 : }
2166 :
2167 : /* We can't compare slen with len as constants below if len is not a
2168 : constant. */
2169 17035 : if (TREE_CODE (len) != INTEGER_CST)
2170 : return false;
2171 :
2172 : /* Now, we must be passed a constant src ptr parameter. */
2173 10680 : tree slen = get_maxval_strlen (src, SRK_STRLEN);
2174 10680 : if (!slen || TREE_CODE (slen) != INTEGER_CST)
2175 : return false;
2176 :
2177 : /* The size of the source string including the terminating nul. */
2178 1780 : tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2179 :
2180 : /* We do not support simplification of this case, though we do
2181 : support it when expanding trees into RTL. */
2182 : /* FIXME: generate a call to __builtin_memset. */
2183 1780 : if (tree_int_cst_lt (ssize, len))
2184 : return false;
2185 :
2186 : /* Diagnose truncation that leaves the copy unterminated. */
2187 695 : maybe_diag_stxncpy_trunc (*gsi, src, len);
2188 :
2189 : /* OK transform into builtin memcpy. */
2190 695 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2191 17730 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2192 : return false;
2193 :
2194 695 : len = fold_convert_loc (loc, size_type_node, len);
2195 695 : len = force_gimple_operand_gsi (gsi, len, true,
2196 : NULL_TREE, true, GSI_SAME_STMT);
2197 695 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2198 695 : replace_call_with_call_and_fold (gsi, repl);
2199 :
2200 695 : return true;
2201 : }
2202 :
2203 : /* Fold function call to builtin strchr or strrchr.
2204 : If both arguments are constant, evaluate and fold the result,
2205 : otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2206 : In general strlen is significantly faster than strchr
2207 : due to being a simpler operation. */
2208 : static bool
2209 5397 : gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2210 : {
2211 5397 : gimple *stmt = gsi_stmt (*gsi);
2212 5397 : tree str = gimple_call_arg (stmt, 0);
2213 5397 : tree c = gimple_call_arg (stmt, 1);
2214 5397 : location_t loc = gimple_location (stmt);
2215 5397 : const char *p;
2216 5397 : char ch;
2217 :
2218 5397 : if (!gimple_call_lhs (stmt))
2219 : return false;
2220 :
2221 : /* Avoid folding if the first argument is not a nul-terminated array.
2222 : Defer warning until later. */
2223 5387 : if (!check_nul_terminated_array (NULL_TREE, str))
2224 : return false;
2225 :
2226 5303 : if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2227 : {
2228 41 : const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2229 :
2230 41 : if (p1 == NULL)
2231 : {
2232 1 : replace_call_with_value (gsi, integer_zero_node);
2233 1 : return true;
2234 : }
2235 :
2236 40 : tree len = build_int_cst (size_type_node, p1 - p);
2237 40 : gimple_seq stmts = NULL;
2238 40 : gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2239 : POINTER_PLUS_EXPR, str, len);
2240 40 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2241 40 : gsi_replace_with_seq_vops (gsi, stmts);
2242 40 : return true;
2243 : }
2244 :
2245 5344 : if (!integer_zerop (c) || (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun)))
2246 : return false;
2247 :
2248 : /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2249 82 : if (is_strrchr && optimize_function_for_size_p (cfun))
2250 : {
2251 3 : tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2252 :
2253 3 : if (strchr_fn)
2254 : {
2255 3 : gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2256 3 : replace_call_with_call_and_fold (gsi, repl);
2257 3 : return true;
2258 : }
2259 :
2260 : return false;
2261 : }
2262 :
2263 79 : tree len;
2264 5353 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2265 :
2266 79 : if (!strlen_fn)
2267 : return false;
2268 :
2269 : /* Create newstr = strlen (str). */
2270 79 : gimple_seq stmts = NULL;
2271 79 : gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2272 79 : gimple_set_location (new_stmt, loc);
2273 79 : len = make_ssa_name (size_type_node);
2274 79 : gimple_call_set_lhs (new_stmt, len);
2275 79 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2276 :
2277 : /* Create (str p+ strlen (str)). */
2278 79 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2279 : POINTER_PLUS_EXPR, str, len);
2280 79 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2281 79 : gsi_replace_with_seq_vops (gsi, stmts);
2282 : /* gsi now points at the assignment to the lhs, get a
2283 : stmt iterator to the strlen.
2284 : ??? We can't use gsi_for_stmt as that doesn't work when the
2285 : CFG isn't built yet. */
2286 79 : gimple_stmt_iterator gsi2 = *gsi;
2287 79 : gsi_prev (&gsi2);
2288 79 : fold_stmt (&gsi2);
2289 79 : return true;
2290 : }
2291 :
2292 : /* Fold function call to builtin strstr.
2293 : If both arguments are constant, evaluate and fold the result,
2294 : additionally fold strstr (x, "") into x and strstr (x, "c")
2295 : into strchr (x, 'c'). */
2296 : static bool
2297 4305 : gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2298 : {
2299 4305 : gimple *stmt = gsi_stmt (*gsi);
2300 4305 : if (!gimple_call_lhs (stmt))
2301 : return false;
2302 :
2303 4302 : tree haystack = gimple_call_arg (stmt, 0);
2304 4302 : tree needle = gimple_call_arg (stmt, 1);
2305 :
2306 : /* Avoid folding if either argument is not a nul-terminated array.
2307 : Defer warning until later. */
2308 4302 : if (!check_nul_terminated_array (NULL_TREE, haystack)
2309 4302 : || !check_nul_terminated_array (NULL_TREE, needle))
2310 19 : return false;
2311 :
2312 4283 : const char *q = c_getstr (needle);
2313 4283 : if (q == NULL)
2314 : return false;
2315 :
2316 3125 : if (const char *p = c_getstr (haystack))
2317 : {
2318 14 : const char *r = strstr (p, q);
2319 :
2320 14 : if (r == NULL)
2321 : {
2322 1 : replace_call_with_value (gsi, integer_zero_node);
2323 1 : return true;
2324 : }
2325 :
2326 13 : tree len = build_int_cst (size_type_node, r - p);
2327 13 : gimple_seq stmts = NULL;
2328 13 : gimple *new_stmt
2329 13 : = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2330 : haystack, len);
2331 13 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2332 13 : gsi_replace_with_seq_vops (gsi, stmts);
2333 13 : return true;
2334 : }
2335 :
2336 : /* For strstr (x, "") return x. */
2337 3111 : if (q[0] == '\0')
2338 : {
2339 6 : replace_call_with_value (gsi, haystack);
2340 6 : return true;
2341 : }
2342 :
2343 10473 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
2344 : return false;
2345 :
2346 : /* Transform strstr (x, "c") into strchr (x, 'c'). */
2347 3105 : if (q[1] == '\0')
2348 : {
2349 22 : tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2350 22 : if (strchr_fn)
2351 : {
2352 22 : tree c = build_int_cst (integer_type_node, q[0]);
2353 22 : gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2354 22 : replace_call_with_call_and_fold (gsi, repl);
2355 22 : return true;
2356 : }
2357 : }
2358 :
2359 : return false;
2360 : }
2361 :
2362 : /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2363 : to the call.
2364 :
2365 : Return NULL_TREE if no simplification was possible, otherwise return the
2366 : simplified form of the call as a tree.
2367 :
2368 : The simplified form may be a constant or other expression which
2369 : computes the same value, but in a more efficient manner (including
2370 : calls to other builtin functions).
2371 :
2372 : The call may contain arguments which need to be evaluated, but
2373 : which are not useful to determine the result of the call. In
2374 : this case we return a chain of COMPOUND_EXPRs. The LHS of each
2375 : COMPOUND_EXPR will be an argument which must be evaluated.
2376 : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2377 : COMPOUND_EXPR in the chain will contain the tree for the simplified
2378 : form of the builtin function call. */
2379 :
2380 : static bool
2381 7329 : gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2382 : {
2383 7329 : gimple *stmt = gsi_stmt (*gsi);
2384 7329 : location_t loc = gimple_location (stmt);
2385 :
2386 7329 : const char *p = c_getstr (src);
2387 :
2388 : /* If the string length is zero, return the dst parameter. */
2389 7329 : if (p && *p == '\0')
2390 : {
2391 72 : replace_call_with_value (gsi, dst);
2392 72 : return true;
2393 : }
2394 :
2395 7257 : if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2396 : return false;
2397 :
2398 19865 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2399 : return false;
2400 :
2401 : /* See if we can store by pieces into (dst + strlen(dst)). */
2402 6674 : tree newdst;
2403 6674 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2404 6674 : tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2405 :
2406 6674 : if (!strlen_fn || !memcpy_fn)
2407 : return false;
2408 :
2409 : /* If the length of the source string isn't computable don't
2410 : split strcat into strlen and memcpy. */
2411 6674 : tree len = get_maxval_strlen (src, SRK_STRLEN);
2412 6674 : if (! len)
2413 : return false;
2414 :
2415 : /* Create strlen (dst). */
2416 740 : gimple_seq stmts = NULL, stmts2;
2417 740 : gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2418 740 : gimple_set_location (repl, loc);
2419 740 : newdst = make_ssa_name (size_type_node);
2420 740 : gimple_call_set_lhs (repl, newdst);
2421 740 : gimple_seq_add_stmt_without_update (&stmts, repl);
2422 :
2423 : /* Create (dst p+ strlen (dst)). */
2424 740 : newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2425 740 : newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2426 740 : gimple_seq_add_seq_without_update (&stmts, stmts2);
2427 :
2428 740 : len = fold_convert_loc (loc, size_type_node, len);
2429 740 : len = size_binop_loc (loc, PLUS_EXPR, len,
2430 : build_int_cst (size_type_node, 1));
2431 740 : len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2432 740 : gimple_seq_add_seq_without_update (&stmts, stmts2);
2433 :
2434 740 : repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2435 740 : gimple_seq_add_stmt_without_update (&stmts, repl);
2436 740 : if (gimple_call_lhs (stmt))
2437 : {
2438 165 : repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2439 165 : gimple_seq_add_stmt_without_update (&stmts, repl);
2440 165 : gsi_replace_with_seq_vops (gsi, stmts);
2441 : /* gsi now points at the assignment to the lhs, get a
2442 : stmt iterator to the memcpy call.
2443 : ??? We can't use gsi_for_stmt as that doesn't work when the
2444 : CFG isn't built yet. */
2445 165 : gimple_stmt_iterator gsi2 = *gsi;
2446 165 : gsi_prev (&gsi2);
2447 165 : fold_stmt (&gsi2);
2448 : }
2449 : else
2450 : {
2451 575 : gsi_replace_with_seq_vops (gsi, stmts);
2452 575 : fold_stmt (gsi);
2453 : }
2454 : return true;
2455 : }
2456 :
2457 : /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2458 : are the arguments to the call. */
2459 :
2460 : static bool
2461 1702 : gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2462 : {
2463 1702 : gimple *stmt = gsi_stmt (*gsi);
2464 1702 : tree dest = gimple_call_arg (stmt, 0);
2465 1702 : tree src = gimple_call_arg (stmt, 1);
2466 1702 : tree size = gimple_call_arg (stmt, 2);
2467 1702 : tree fn;
2468 1702 : const char *p;
2469 :
2470 1702 : p = c_getstr (src);
2471 : /* If the SRC parameter is "", return DEST. */
2472 1702 : if (p && *p == '\0')
2473 : {
2474 60 : replace_call_with_value (gsi, dest);
2475 60 : return true;
2476 : }
2477 :
2478 1642 : if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2479 1560 : return false;
2480 :
2481 1724 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2482 : return false;
2483 :
2484 : /* If __builtin_strcat_chk is used, assume strcat is available. */
2485 82 : fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2486 82 : if (!fn)
2487 : return false;
2488 :
2489 82 : gimple *repl = gimple_build_call (fn, 2, dest, src);
2490 82 : replace_call_with_call_and_fold (gsi, repl);
2491 82 : return true;
2492 : }
2493 :
2494 : /* Simplify a call to the strncat builtin. */
2495 :
2496 : static bool
2497 6786 : gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2498 : {
2499 6786 : gimple *stmt = gsi_stmt (*gsi);
2500 6786 : tree dst = gimple_call_arg (stmt, 0);
2501 6786 : tree src = gimple_call_arg (stmt, 1);
2502 6786 : tree len = gimple_call_arg (stmt, 2);
2503 6786 : tree src_len = c_strlen (src, 1);
2504 :
2505 : /* If the requested length is zero, or the src parameter string
2506 : length is zero, return the dst parameter. */
2507 6786 : if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2508 : {
2509 119 : replace_call_with_value (gsi, dst);
2510 119 : return true;
2511 : }
2512 :
2513 : /* Return early if the requested len is less than the string length.
2514 : Warnings will be issued elsewhere later. */
2515 6667 : if (!src_len || known_lower (stmt, len, src_len, true))
2516 6099 : return false;
2517 :
2518 : /* Warn on constant LEN. */
2519 568 : if (TREE_CODE (len) == INTEGER_CST)
2520 : {
2521 131 : bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2522 131 : tree dstsize;
2523 :
2524 131 : if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2525 175 : && TREE_CODE (dstsize) == INTEGER_CST)
2526 : {
2527 44 : int cmpdst = tree_int_cst_compare (len, dstsize);
2528 :
2529 44 : if (cmpdst >= 0)
2530 : {
2531 19 : tree fndecl = gimple_call_fndecl (stmt);
2532 :
2533 : /* Strncat copies (at most) LEN bytes and always appends
2534 : the terminating NUL so the specified bound should never
2535 : be equal to (or greater than) the size of the destination.
2536 : If it is, the copy could overflow. */
2537 19 : location_t loc = gimple_location (stmt);
2538 37 : nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2539 : cmpdst == 0
2540 : ? G_("%qD specified bound %E equals "
2541 : "destination size")
2542 : : G_("%qD specified bound %E exceeds "
2543 : "destination size %E"),
2544 : fndecl, len, dstsize);
2545 19 : if (nowarn)
2546 0 : suppress_warning (stmt, OPT_Wstringop_overflow_);
2547 : }
2548 : }
2549 :
2550 131 : if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2551 243 : && tree_int_cst_compare (src_len, len) == 0)
2552 : {
2553 20 : tree fndecl = gimple_call_fndecl (stmt);
2554 20 : location_t loc = gimple_location (stmt);
2555 :
2556 : /* To avoid possible overflow the specified bound should also
2557 : not be equal to the length of the source, even when the size
2558 : of the destination is unknown (it's not an uncommon mistake
2559 : to specify as the bound to strncpy the length of the source). */
2560 20 : if (warning_at (loc, OPT_Wstringop_overflow_,
2561 : "%qD specified bound %E equals source length",
2562 : fndecl, len))
2563 6 : suppress_warning (stmt, OPT_Wstringop_overflow_);
2564 : }
2565 : }
2566 :
2567 568 : if (!known_lower (stmt, src_len, len))
2568 : return false;
2569 :
2570 136 : tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2571 :
2572 : /* If the replacement _DECL isn't initialized, don't do the
2573 : transformation. */
2574 6803 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2575 : return false;
2576 :
2577 : /* Otherwise, emit a call to strcat. */
2578 136 : gcall *repl = gimple_build_call (fn, 2, dst, src);
2579 136 : replace_call_with_call_and_fold (gsi, repl);
2580 136 : return true;
2581 : }
2582 :
2583 : /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2584 : LEN, and SIZE. */
2585 :
2586 : static bool
2587 1143 : gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2588 : {
2589 1143 : gimple *stmt = gsi_stmt (*gsi);
2590 1143 : tree dest = gimple_call_arg (stmt, 0);
2591 1143 : tree src = gimple_call_arg (stmt, 1);
2592 1143 : tree len = gimple_call_arg (stmt, 2);
2593 1143 : tree size = gimple_call_arg (stmt, 3);
2594 1143 : tree fn;
2595 1143 : const char *p;
2596 :
2597 1143 : p = c_getstr (src);
2598 : /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2599 302 : if ((p && *p == '\0')
2600 1394 : || integer_zerop (len))
2601 : {
2602 78 : replace_call_with_value (gsi, dest);
2603 78 : return true;
2604 : }
2605 :
2606 3043 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2607 : return false;
2608 :
2609 1065 : if (! integer_all_onesp (size))
2610 : {
2611 978 : tree src_len = c_strlen (src, 1);
2612 978 : if (known_lower (stmt, src_len, len))
2613 : {
2614 : /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2615 65 : fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2616 65 : if (!fn)
2617 : return false;
2618 :
2619 65 : gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2620 65 : replace_call_with_call_and_fold (gsi, repl);
2621 65 : return true;
2622 : }
2623 : return false;
2624 : }
2625 :
2626 : /* If __builtin_strncat_chk is used, assume strncat is available. */
2627 87 : fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2628 87 : if (!fn)
2629 : return false;
2630 :
2631 87 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2632 87 : replace_call_with_call_and_fold (gsi, repl);
2633 87 : return true;
2634 : }
2635 :
2636 : /* Build and append gimple statements to STMTS that would load a first
2637 : character of a memory location identified by STR. LOC is location
2638 : of the statement. */
2639 :
2640 : static tree
2641 469 : gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2642 : {
2643 469 : tree var;
2644 :
2645 469 : tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2646 469 : tree cst_uchar_ptr_node
2647 469 : = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2648 469 : tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2649 :
2650 469 : tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2651 469 : gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2652 469 : var = make_ssa_name (cst_uchar_node, stmt);
2653 :
2654 469 : gimple_assign_set_lhs (stmt, var);
2655 469 : gimple_seq_add_stmt_without_update (stmts, stmt);
2656 :
2657 469 : return var;
2658 : }
2659 :
2660 : /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2661 :
2662 : static bool
2663 1250365 : gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2664 : {
2665 1250365 : gimple *stmt = gsi_stmt (*gsi);
2666 1250365 : tree callee = gimple_call_fndecl (stmt);
2667 1250365 : enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2668 :
2669 1250365 : tree type = integer_type_node;
2670 1250365 : tree str1 = gimple_call_arg (stmt, 0);
2671 1250365 : tree str2 = gimple_call_arg (stmt, 1);
2672 1250365 : tree lhs = gimple_call_lhs (stmt);
2673 :
2674 1250365 : tree bound_node = NULL_TREE;
2675 1250365 : unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2676 :
2677 : /* Handle strncmp and strncasecmp functions. */
2678 1250365 : if (gimple_call_num_args (stmt) == 3)
2679 : {
2680 22862 : bound_node = gimple_call_arg (stmt, 2);
2681 22862 : if (tree_fits_uhwi_p (bound_node))
2682 17143 : bound = tree_to_uhwi (bound_node);
2683 : }
2684 :
2685 : /* If the BOUND parameter is zero, return zero. */
2686 17143 : if (bound == 0)
2687 : {
2688 4 : replace_call_with_value (gsi, integer_zero_node);
2689 4 : return true;
2690 : }
2691 :
2692 : /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2693 1250361 : if (operand_equal_p (str1, str2, 0))
2694 : {
2695 41 : replace_call_with_value (gsi, integer_zero_node);
2696 41 : return true;
2697 : }
2698 :
2699 2500640 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
2700 : return false;
2701 :
2702 : /* Initially set to the number of characters, including the terminating
2703 : nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2704 : the array Sx is not terminated by a nul.
2705 : For nul-terminated strings then adjusted to their length so that
2706 : LENx == NULPOSx holds. */
2707 1250320 : unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2708 1250320 : const char *p1 = getbyterep (str1, &len1);
2709 1250320 : const char *p2 = getbyterep (str2, &len2);
2710 :
2711 : /* The position of the terminating nul character if one exists, otherwise
2712 : a value greater than LENx. */
2713 1250320 : unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2714 :
2715 1250320 : if (p1)
2716 : {
2717 42806 : size_t n = strnlen (p1, len1);
2718 42806 : if (n < len1)
2719 42699 : len1 = nulpos1 = n;
2720 : }
2721 :
2722 1250320 : if (p2)
2723 : {
2724 1219508 : size_t n = strnlen (p2, len2);
2725 1219508 : if (n < len2)
2726 1219449 : len2 = nulpos2 = n;
2727 : }
2728 :
2729 : /* For known strings, return an immediate value. */
2730 1250320 : if (p1 && p2)
2731 : {
2732 39244 : int r = 0;
2733 39244 : bool known_result = false;
2734 :
2735 39244 : switch (fcode)
2736 : {
2737 38169 : case BUILT_IN_STRCMP:
2738 38169 : case BUILT_IN_STRCMP_EQ:
2739 38169 : if (len1 != nulpos1 || len2 != nulpos2)
2740 : break;
2741 :
2742 38144 : r = strcmp (p1, p2);
2743 38144 : known_result = true;
2744 38144 : break;
2745 :
2746 1002 : case BUILT_IN_STRNCMP:
2747 1002 : case BUILT_IN_STRNCMP_EQ:
2748 1002 : {
2749 1002 : if (bound == HOST_WIDE_INT_M1U)
2750 : break;
2751 :
2752 : /* Reduce the bound to be no more than the length
2753 : of the shorter of the two strings, or the sizes
2754 : of the unterminated arrays. */
2755 38 : unsigned HOST_WIDE_INT n = bound;
2756 :
2757 38 : if (len1 == nulpos1 && len1 < n)
2758 4 : n = len1 + 1;
2759 38 : if (len2 == nulpos2 && len2 < n)
2760 11 : n = len2 + 1;
2761 :
2762 38 : if (MIN (nulpos1, nulpos2) + 1 < n)
2763 : break;
2764 :
2765 38 : r = strncmp (p1, p2, n);
2766 38 : known_result = true;
2767 38 : break;
2768 : }
2769 : /* Only handleable situation is where the string are equal (result 0),
2770 : which is already handled by operand_equal_p case. */
2771 : case BUILT_IN_STRCASECMP:
2772 : break;
2773 37 : case BUILT_IN_STRNCASECMP:
2774 37 : {
2775 37 : if (bound == HOST_WIDE_INT_M1U)
2776 : break;
2777 37 : r = strncmp (p1, p2, bound);
2778 37 : if (r == 0)
2779 : known_result = true;
2780 : break;
2781 : }
2782 0 : default:
2783 0 : gcc_unreachable ();
2784 : }
2785 :
2786 38182 : if (known_result)
2787 : {
2788 38182 : replace_call_with_value (gsi, build_cmp_result (type, r));
2789 38182 : return true;
2790 : }
2791 : }
2792 :
2793 2424276 : bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2794 1195139 : || fcode == BUILT_IN_STRCMP
2795 1195139 : || fcode == BUILT_IN_STRCMP_EQ
2796 1218029 : || fcode == BUILT_IN_STRCASECMP;
2797 :
2798 1212138 : location_t loc = gimple_location (stmt);
2799 :
2800 : /* If the second arg is "", return *(const unsigned char*)arg1. */
2801 1212138 : if (p2 && *p2 == '\0' && nonzero_bound)
2802 : {
2803 150 : gimple_seq stmts = NULL;
2804 150 : tree var = gimple_load_first_char (loc, str1, &stmts);
2805 150 : if (lhs)
2806 : {
2807 150 : stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2808 150 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2809 : }
2810 :
2811 150 : gsi_replace_with_seq_vops (gsi, stmts);
2812 150 : return true;
2813 : }
2814 :
2815 : /* If the first arg is "", return -*(const unsigned char*)arg2. */
2816 1211988 : if (p1 && *p1 == '\0' && nonzero_bound)
2817 : {
2818 99 : gimple_seq stmts = NULL;
2819 99 : tree var = gimple_load_first_char (loc, str2, &stmts);
2820 :
2821 99 : if (lhs)
2822 : {
2823 99 : tree c = make_ssa_name (integer_type_node);
2824 99 : stmt = gimple_build_assign (c, NOP_EXPR, var);
2825 99 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2826 :
2827 99 : stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2828 99 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2829 : }
2830 :
2831 99 : gsi_replace_with_seq_vops (gsi, stmts);
2832 99 : return true;
2833 : }
2834 :
2835 : /* If BOUND is one, return an expression corresponding to
2836 : (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2837 1211889 : if (fcode == BUILT_IN_STRNCMP && bound == 1)
2838 : {
2839 110 : gimple_seq stmts = NULL;
2840 110 : tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2841 110 : tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2842 :
2843 110 : if (lhs)
2844 : {
2845 107 : tree c1 = make_ssa_name (integer_type_node);
2846 107 : gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2847 107 : gimple_seq_add_stmt_without_update (&stmts, convert1);
2848 :
2849 107 : tree c2 = make_ssa_name (integer_type_node);
2850 107 : gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2851 107 : gimple_seq_add_stmt_without_update (&stmts, convert2);
2852 :
2853 107 : stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2854 107 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2855 : }
2856 :
2857 110 : gsi_replace_with_seq_vops (gsi, stmts);
2858 110 : return true;
2859 : }
2860 :
2861 : /* If BOUND is greater than the length of one constant string,
2862 : and the other argument is also a nul-terminated string, replace
2863 : strncmp with strcmp. */
2864 1211779 : if (fcode == BUILT_IN_STRNCMP
2865 17600 : && bound > 0 && bound < HOST_WIDE_INT_M1U
2866 12009 : && ((p2 && len2 < bound && len2 == nulpos2)
2867 11779 : || (p1 && len1 < bound && len1 == nulpos1)))
2868 : {
2869 1211779 : tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2870 308 : if (!fn)
2871 : return false;
2872 308 : gimple *repl = gimple_build_call (fn, 2, str1, str2);
2873 308 : replace_call_with_call_and_fold (gsi, repl);
2874 308 : return true;
2875 : }
2876 :
2877 : return false;
2878 : }
2879 :
2880 : /* Fold a call to the memchr pointed by GSI iterator. */
2881 :
2882 : static bool
2883 39245 : gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2884 : {
2885 39245 : gimple *stmt = gsi_stmt (*gsi);
2886 39245 : tree lhs = gimple_call_lhs (stmt);
2887 39245 : tree arg1 = gimple_call_arg (stmt, 0);
2888 39245 : tree arg2 = gimple_call_arg (stmt, 1);
2889 39245 : tree len = gimple_call_arg (stmt, 2);
2890 :
2891 : /* If the LEN parameter is zero, return zero. */
2892 39245 : if (integer_zerop (len))
2893 : {
2894 1 : replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2895 1 : return true;
2896 : }
2897 :
2898 39244 : char c;
2899 39244 : if (TREE_CODE (arg2) != INTEGER_CST
2900 22868 : || !tree_fits_uhwi_p (len)
2901 39957 : || !target_char_cst_p (arg2, &c))
2902 38531 : return false;
2903 :
2904 713 : unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2905 713 : unsigned HOST_WIDE_INT string_length;
2906 713 : const char *p1 = getbyterep (arg1, &string_length);
2907 :
2908 713 : if (p1)
2909 : {
2910 100 : const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2911 100 : if (r == NULL)
2912 : {
2913 14 : tree mem_size, offset_node;
2914 14 : byte_representation (arg1, &offset_node, &mem_size, NULL);
2915 14 : unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2916 14 : ? 0 : tree_to_uhwi (offset_node);
2917 : /* MEM_SIZE is the size of the array the string literal
2918 : is stored in. */
2919 14 : unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2920 14 : gcc_checking_assert (string_length <= string_size);
2921 14 : if (length <= string_size)
2922 : {
2923 4 : replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2924 4 : return true;
2925 : }
2926 : }
2927 : else
2928 : {
2929 86 : unsigned HOST_WIDE_INT offset = r - p1;
2930 86 : gimple_seq stmts = NULL;
2931 86 : if (lhs != NULL_TREE)
2932 : {
2933 84 : tree offset_cst = build_int_cst (sizetype, offset);
2934 84 : gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2935 : arg1, offset_cst);
2936 84 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2937 : }
2938 : else
2939 2 : gimple_seq_add_stmt_without_update (&stmts,
2940 : gimple_build_nop ());
2941 :
2942 86 : gsi_replace_with_seq_vops (gsi, stmts);
2943 86 : return true;
2944 : }
2945 : }
2946 :
2947 : return false;
2948 : }
2949 :
2950 : /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2951 : to the call. IGNORE is true if the value returned
2952 : by the builtin will be ignored. UNLOCKED is true is true if this
2953 : actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2954 : the known length of the string. Return NULL_TREE if no simplification
2955 : was possible. */
2956 :
2957 : static bool
2958 20674 : gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2959 : tree arg0, tree arg1,
2960 : bool unlocked)
2961 : {
2962 20674 : gimple *stmt = gsi_stmt (*gsi);
2963 :
2964 : /* If we're using an unlocked function, assume the other unlocked
2965 : functions exist explicitly. */
2966 20674 : tree const fn_fputc = (unlocked
2967 20674 : ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2968 20631 : : builtin_decl_implicit (BUILT_IN_FPUTC));
2969 20631 : tree const fn_fwrite = (unlocked
2970 43 : ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2971 20674 : : builtin_decl_implicit (BUILT_IN_FWRITE));
2972 :
2973 : /* If the return value is used, don't do the transformation. */
2974 20674 : if (gimple_call_lhs (stmt))
2975 : return false;
2976 :
2977 : /* Get the length of the string passed to fputs. If the length
2978 : can't be determined, punt. */
2979 20603 : tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2980 20603 : if (!len || TREE_CODE (len) != INTEGER_CST)
2981 : return false;
2982 :
2983 16199 : switch (compare_tree_int (len, 1))
2984 : {
2985 91 : case -1: /* length is 0, delete the call entirely . */
2986 91 : replace_call_with_value (gsi, integer_zero_node);
2987 91 : return true;
2988 :
2989 1060 : case 0: /* length is 1, call fputc. */
2990 1060 : {
2991 1060 : const char *p = c_getstr (arg0);
2992 1060 : if (p != NULL)
2993 : {
2994 2092 : if (!fn_fputc || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2995 : return false;
2996 :
2997 1046 : gimple *repl
2998 1046 : = gimple_build_call (fn_fputc, 2,
2999 1046 : build_int_cst (integer_type_node, p[0]),
3000 : arg1);
3001 1046 : replace_call_with_call_and_fold (gsi, repl);
3002 1046 : return true;
3003 : }
3004 : }
3005 : /* FALLTHROUGH */
3006 15062 : case 1: /* length is greater than 1, call fwrite. */
3007 15062 : {
3008 : /* If optimizing for size keep fputs. */
3009 15062 : if (optimize_function_for_size_p (cfun))
3010 : return false;
3011 : /* New argument list transforming fputs(string, stream) to
3012 : fwrite(string, 1, len, stream). */
3013 27756 : if (!fn_fwrite || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3014 : return false;
3015 :
3016 8219 : gimple *repl
3017 8219 : = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
3018 : fold_convert (size_type_node, len), arg1);
3019 8219 : replace_call_with_call_and_fold (gsi, repl);
3020 8219 : return true;
3021 : }
3022 0 : default:
3023 0 : gcc_unreachable ();
3024 : }
3025 : }
3026 :
3027 : /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3028 : DEST, SRC, LEN, and SIZE are the arguments to the call.
3029 : IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3030 : code of the builtin. If MAXLEN is not NULL, it is maximum length
3031 : passed as third argument. */
3032 :
3033 : static bool
3034 25602 : gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3035 : tree dest, tree src, tree len, tree size,
3036 : enum built_in_function fcode)
3037 : {
3038 25602 : gimple *stmt = gsi_stmt (*gsi);
3039 25602 : location_t loc = gimple_location (stmt);
3040 25602 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3041 25602 : tree fn;
3042 :
3043 : /* If SRC and DEST are the same (and not volatile), return DEST
3044 : (resp. DEST+LEN for __mempcpy_chk). */
3045 25602 : if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3046 : {
3047 13 : if (fcode != BUILT_IN_MEMPCPY_CHK)
3048 : {
3049 7 : replace_call_with_value (gsi, dest);
3050 7 : return true;
3051 : }
3052 : else
3053 : {
3054 6 : gimple_seq stmts = NULL;
3055 6 : len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3056 6 : tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3057 6 : TREE_TYPE (dest), dest, len);
3058 6 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3059 6 : replace_call_with_value (gsi, temp);
3060 6 : return true;
3061 : }
3062 : }
3063 :
3064 68210 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3065 : return false;
3066 :
3067 25589 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3068 25589 : if (! integer_all_onesp (size)
3069 24523 : && !known_lower (stmt, len, size)
3070 42741 : && !known_lower (stmt, maxlen, size))
3071 : {
3072 : /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3073 : least try to optimize (void) __mempcpy_chk () into
3074 : (void) __memcpy_chk () */
3075 17075 : if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3076 : {
3077 43 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3078 43 : if (!fn)
3079 : return false;
3080 :
3081 43 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3082 43 : replace_call_with_call_and_fold (gsi, repl);
3083 43 : return true;
3084 : }
3085 : return false;
3086 : }
3087 :
3088 8514 : fn = NULL_TREE;
3089 : /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3090 : mem{cpy,pcpy,move,set} is available. */
3091 8514 : switch (fcode)
3092 : {
3093 1768 : case BUILT_IN_MEMCPY_CHK:
3094 1768 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3095 1768 : break;
3096 1068 : case BUILT_IN_MEMPCPY_CHK:
3097 1068 : fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3098 1068 : break;
3099 1657 : case BUILT_IN_MEMMOVE_CHK:
3100 1657 : fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3101 1657 : break;
3102 4021 : case BUILT_IN_MEMSET_CHK:
3103 4021 : fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3104 4021 : break;
3105 : default:
3106 : break;
3107 : }
3108 :
3109 8514 : if (!fn)
3110 : return false;
3111 :
3112 8514 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3113 8514 : replace_call_with_call_and_fold (gsi, repl);
3114 8514 : return true;
3115 : }
3116 :
3117 : /* Fold a call to the __st[rp]cpy_chk builtin.
3118 : DEST, SRC, and SIZE are the arguments to the call.
3119 : IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3120 : code of the builtin. If MAXLEN is not NULL, it is maximum length of
3121 : strings passed as second argument. */
3122 :
3123 : static bool
3124 2588 : gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3125 : tree dest,
3126 : tree src, tree size,
3127 : enum built_in_function fcode)
3128 : {
3129 2588 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3130 2588 : location_t loc = gimple_location (stmt);
3131 2588 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3132 2588 : tree len, fn;
3133 :
3134 : /* If SRC and DEST are the same (and not volatile), return DEST. */
3135 2588 : if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3136 : {
3137 : /* Issue -Wrestrict unless the pointers are null (those do
3138 : not point to objects and so do not indicate an overlap;
3139 : such calls could be the result of sanitization and jump
3140 : threading). */
3141 0 : if (!integer_zerop (dest)
3142 0 : && !warning_suppressed_p (stmt, OPT_Wrestrict))
3143 : {
3144 0 : tree func = gimple_call_fndecl (stmt);
3145 :
3146 0 : warning_at (loc, OPT_Wrestrict,
3147 : "%qD source argument is the same as destination",
3148 : func);
3149 : }
3150 :
3151 0 : replace_call_with_value (gsi, dest);
3152 0 : return true;
3153 : }
3154 :
3155 5176 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3156 : return false;
3157 :
3158 2588 : tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3159 2588 : if (! integer_all_onesp (size))
3160 : {
3161 2519 : len = c_strlen (src, 1);
3162 2519 : if (!known_lower (stmt, len, size, true)
3163 2519 : && !known_lower (stmt, maxlen, size, true))
3164 : {
3165 2183 : if (fcode == BUILT_IN_STPCPY_CHK)
3166 : {
3167 1075 : if (! ignore)
3168 : return false;
3169 :
3170 : /* If return value of __stpcpy_chk is ignored,
3171 : optimize into __strcpy_chk. */
3172 35 : fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3173 35 : if (!fn)
3174 : return false;
3175 :
3176 35 : gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3177 35 : replace_call_with_call_and_fold (gsi, repl);
3178 35 : return true;
3179 : }
3180 :
3181 1108 : if (! len || TREE_SIDE_EFFECTS (len))
3182 : return false;
3183 :
3184 : /* If c_strlen returned something, but not provably less than size,
3185 : transform __strcpy_chk into __memcpy_chk. */
3186 106 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3187 106 : if (!fn)
3188 : return false;
3189 :
3190 106 : gimple_seq stmts = NULL;
3191 106 : len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3192 106 : len = gimple_convert (&stmts, loc, size_type_node, len);
3193 106 : len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3194 : build_int_cst (size_type_node, 1));
3195 106 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3196 106 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3197 106 : replace_call_with_call_and_fold (gsi, repl);
3198 106 : return true;
3199 : }
3200 : }
3201 :
3202 : /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3203 666 : fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3204 : ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3205 405 : if (!fn)
3206 : return false;
3207 :
3208 405 : gcall *repl = gimple_build_call (fn, 2, dest, src);
3209 405 : replace_call_with_call_and_fold (gsi, repl);
3210 405 : return true;
3211 : }
3212 :
3213 : /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3214 : are the arguments to the call. If MAXLEN is not NULL, it is maximum
3215 : length passed as third argument. IGNORE is true if return value can be
3216 : ignored. FCODE is the BUILT_IN_* code of the builtin. */
3217 :
3218 : static bool
3219 2721 : gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3220 : tree dest, tree src,
3221 : tree len, tree size,
3222 : enum built_in_function fcode)
3223 : {
3224 2721 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3225 2721 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3226 2721 : tree fn;
3227 :
3228 2721 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3229 2721 : if (! integer_all_onesp (size)
3230 2721 : && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3231 : {
3232 2264 : if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3233 : {
3234 : /* If return value of __stpncpy_chk is ignored,
3235 : optimize into __strncpy_chk. */
3236 39 : fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3237 39 : if (fn)
3238 : {
3239 39 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3240 39 : replace_call_with_call_and_fold (gsi, repl);
3241 39 : return true;
3242 : }
3243 : }
3244 : return false;
3245 : }
3246 :
3247 : /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3248 717 : fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3249 : ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3250 3139 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3251 : return false;
3252 :
3253 457 : gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3254 457 : replace_call_with_call_and_fold (gsi, repl);
3255 457 : return true;
3256 : }
3257 :
3258 : /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3259 : Return NULL_TREE if no simplification can be made. */
3260 :
3261 : static bool
3262 3674 : gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3263 : {
3264 3674 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3265 3674 : location_t loc = gimple_location (stmt);
3266 3674 : tree dest = gimple_call_arg (stmt, 0);
3267 3674 : tree src = gimple_call_arg (stmt, 1);
3268 3674 : tree fn, lenp1;
3269 :
3270 7348 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3271 : return false;
3272 :
3273 : /* If the result is unused, replace stpcpy with strcpy. */
3274 3674 : if (gimple_call_lhs (stmt) == NULL_TREE)
3275 : {
3276 29 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3277 29 : if (!fn)
3278 : return false;
3279 29 : gimple_call_set_fndecl (stmt, fn);
3280 29 : fold_stmt (gsi);
3281 29 : return true;
3282 : }
3283 :
3284 : /* Set to non-null if ARG refers to an unterminated array. */
3285 3645 : c_strlen_data data = { };
3286 : /* The size of the unterminated array if SRC referes to one. */
3287 3645 : tree size;
3288 : /* True if the size is exact/constant, false if it's the lower bound
3289 : of a range. */
3290 3645 : bool exact;
3291 3645 : tree len = c_strlen (src, 1, &data, 1);
3292 3645 : if (!len
3293 703 : || TREE_CODE (len) != INTEGER_CST)
3294 : {
3295 3174 : data.decl = unterminated_array (src, &size, &exact);
3296 3174 : if (!data.decl)
3297 : return false;
3298 : }
3299 :
3300 1076 : if (data.decl)
3301 : {
3302 : /* Avoid folding calls with unterminated arrays. */
3303 605 : if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3304 75 : warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3305 : exact);
3306 605 : suppress_warning (stmt, OPT_Wstringop_overread);
3307 605 : return false;
3308 : }
3309 :
3310 471 : if (optimize_function_for_size_p (cfun)
3311 : /* If length is zero it's small enough. */
3312 471 : && !integer_zerop (len))
3313 : return false;
3314 :
3315 : /* If the source has a known length replace stpcpy with memcpy. */
3316 3645 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3317 287 : if (!fn)
3318 : return false;
3319 :
3320 287 : gimple_seq stmts = NULL;
3321 287 : tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3322 287 : lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3323 : tem, build_int_cst (size_type_node, 1));
3324 287 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3325 287 : gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3326 287 : gimple_move_vops (repl, stmt);
3327 287 : gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3328 : /* Replace the result with dest + len. */
3329 287 : stmts = NULL;
3330 287 : tem = gimple_convert (&stmts, loc, sizetype, len);
3331 287 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3332 287 : gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3333 : POINTER_PLUS_EXPR, dest, tem);
3334 287 : gsi_replace (gsi, ret, false);
3335 : /* Finally fold the memcpy call. */
3336 287 : gimple_stmt_iterator gsi2 = *gsi;
3337 287 : gsi_prev (&gsi2);
3338 287 : fold_stmt (&gsi2);
3339 287 : return true;
3340 : }
3341 :
3342 : /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3343 : NULL_TREE if a normal call should be emitted rather than expanding
3344 : the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3345 : BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3346 : passed as second argument. */
3347 :
3348 : static bool
3349 2359 : gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3350 : enum built_in_function fcode)
3351 : {
3352 2359 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3353 2359 : tree dest, size, len, fn, fmt, flag;
3354 2359 : const char *fmt_str;
3355 :
3356 : /* Verify the required arguments in the original call. */
3357 2359 : if (gimple_call_num_args (stmt) < 5)
3358 : return false;
3359 :
3360 2359 : dest = gimple_call_arg (stmt, 0);
3361 2359 : len = gimple_call_arg (stmt, 1);
3362 2359 : flag = gimple_call_arg (stmt, 2);
3363 2359 : size = gimple_call_arg (stmt, 3);
3364 2359 : fmt = gimple_call_arg (stmt, 4);
3365 :
3366 2359 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3367 2359 : if (! integer_all_onesp (size)
3368 2359 : && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3369 : return false;
3370 :
3371 308 : if (!init_target_chars ())
3372 : return false;
3373 :
3374 : /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3375 : or if format doesn't contain % chars or is "%s". */
3376 308 : if (! integer_zerop (flag))
3377 : {
3378 52 : fmt_str = c_getstr (fmt);
3379 52 : if (fmt_str == NULL)
3380 : return false;
3381 52 : if (strchr (fmt_str, target_percent) != NULL
3382 51 : && strcmp (fmt_str, target_percent_s))
3383 : return false;
3384 : }
3385 :
3386 : /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3387 : available. */
3388 415 : fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3389 : ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3390 2618 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3391 : return false;
3392 :
3393 : /* Replace the called function and the first 5 argument by 3 retaining
3394 : trailing varargs. */
3395 259 : gimple_call_set_fndecl (stmt, fn);
3396 259 : gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3397 259 : gimple_call_set_arg (stmt, 0, dest);
3398 259 : gimple_call_set_arg (stmt, 1, len);
3399 259 : gimple_call_set_arg (stmt, 2, fmt);
3400 546 : for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3401 287 : gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3402 259 : gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3403 259 : fold_stmt (gsi);
3404 259 : return true;
3405 : }
3406 :
3407 : /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3408 : Return NULL_TREE if a normal call should be emitted rather than
3409 : expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3410 : or BUILT_IN_VSPRINTF_CHK. */
3411 :
3412 : static bool
3413 4471 : gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3414 : enum built_in_function fcode)
3415 : {
3416 4471 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3417 4471 : tree dest, size, len, fn, fmt, flag;
3418 4471 : const char *fmt_str;
3419 4471 : unsigned nargs = gimple_call_num_args (stmt);
3420 :
3421 : /* Verify the required arguments in the original call. */
3422 4471 : if (nargs < 4)
3423 : return false;
3424 4471 : dest = gimple_call_arg (stmt, 0);
3425 4471 : flag = gimple_call_arg (stmt, 1);
3426 4471 : size = gimple_call_arg (stmt, 2);
3427 4471 : fmt = gimple_call_arg (stmt, 3);
3428 :
3429 4471 : len = NULL_TREE;
3430 :
3431 4471 : if (!init_target_chars ())
3432 : return false;
3433 :
3434 : /* Check whether the format is a literal string constant. */
3435 4471 : fmt_str = c_getstr (fmt);
3436 4471 : if (fmt_str != NULL)
3437 : {
3438 : /* If the format doesn't contain % args or %%, we know the size. */
3439 4081 : if (strchr (fmt_str, target_percent) == 0)
3440 : {
3441 251 : if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3442 251 : len = build_int_cstu (size_type_node, strlen (fmt_str));
3443 : }
3444 : /* If the format is "%s" and first ... argument is a string literal,
3445 : we know the size too. */
3446 3830 : else if (fcode == BUILT_IN_SPRINTF_CHK
3447 2962 : && strcmp (fmt_str, target_percent_s) == 0)
3448 : {
3449 395 : tree arg;
3450 :
3451 395 : if (nargs == 5)
3452 : {
3453 395 : arg = gimple_call_arg (stmt, 4);
3454 395 : if (POINTER_TYPE_P (TREE_TYPE (arg)))
3455 363 : len = c_strlen (arg, 1);
3456 : }
3457 : }
3458 : }
3459 :
3460 4471 : if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3461 : return false;
3462 :
3463 : /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3464 : or if format doesn't contain % chars or is "%s". */
3465 202 : if (! integer_zerop (flag))
3466 : {
3467 1 : if (fmt_str == NULL)
3468 : return false;
3469 1 : if (strchr (fmt_str, target_percent) != NULL
3470 0 : && strcmp (fmt_str, target_percent_s))
3471 : return false;
3472 : }
3473 :
3474 : /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3475 347 : fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3476 : ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3477 4673 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3478 : return false;
3479 :
3480 : /* Replace the called function and the first 4 argument by 2 retaining
3481 : trailing varargs. */
3482 202 : gimple_call_set_fndecl (stmt, fn);
3483 202 : gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3484 202 : gimple_call_set_arg (stmt, 0, dest);
3485 202 : gimple_call_set_arg (stmt, 1, fmt);
3486 400 : for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3487 198 : gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3488 202 : gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3489 202 : fold_stmt (gsi);
3490 202 : return true;
3491 : }
3492 :
3493 : /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3494 : ORIG may be null if this is a 2-argument call. We don't attempt to
3495 : simplify calls with more than 3 arguments.
3496 :
3497 : Return true if simplification was possible, otherwise false. */
3498 :
3499 : bool
3500 2278 : gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3501 : {
3502 2278 : gimple *stmt = gsi_stmt (*gsi);
3503 :
3504 : /* Verify the required arguments in the original call. We deal with two
3505 : types of sprintf() calls: 'sprintf (str, fmt)' and
3506 : 'sprintf (dest, "%s", orig)'. */
3507 2278 : if (gimple_call_num_args (stmt) > 3)
3508 : return false;
3509 :
3510 1877 : tree orig = NULL_TREE;
3511 1877 : if (gimple_call_num_args (stmt) == 3)
3512 1780 : orig = gimple_call_arg (stmt, 2);
3513 :
3514 : /* Check whether the format is a literal string constant. */
3515 1877 : tree fmt = gimple_call_arg (stmt, 1);
3516 1877 : const char *fmt_str = c_getstr (fmt);
3517 1877 : if (fmt_str == NULL)
3518 : return false;
3519 :
3520 1877 : tree dest = gimple_call_arg (stmt, 0);
3521 :
3522 1877 : if (!init_target_chars ())
3523 : return false;
3524 :
3525 1877 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3526 5177 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3527 : return false;
3528 :
3529 : /* If the format doesn't contain % args or %%, use strcpy. */
3530 1877 : if (strchr (fmt_str, target_percent) == NULL)
3531 : {
3532 : /* Don't optimize sprintf (buf, "abc", ptr++). */
3533 109 : if (orig)
3534 : return false;
3535 :
3536 : /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3537 : 'format' is known to contain no % formats. */
3538 96 : gimple_seq stmts = NULL;
3539 96 : gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3540 :
3541 : /* Propagate the NO_WARNING bit to avoid issuing the same
3542 : warning more than once. */
3543 96 : copy_warning (repl, stmt);
3544 :
3545 96 : gimple_seq_add_stmt_without_update (&stmts, repl);
3546 96 : if (tree lhs = gimple_call_lhs (stmt))
3547 : {
3548 0 : repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3549 0 : strlen (fmt_str)));
3550 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3551 0 : gsi_replace_with_seq_vops (gsi, stmts);
3552 : /* gsi now points at the assignment to the lhs, get a
3553 : stmt iterator to the memcpy call.
3554 : ??? We can't use gsi_for_stmt as that doesn't work when the
3555 : CFG isn't built yet. */
3556 0 : gimple_stmt_iterator gsi2 = *gsi;
3557 0 : gsi_prev (&gsi2);
3558 0 : fold_stmt (&gsi2);
3559 : }
3560 : else
3561 : {
3562 96 : gsi_replace_with_seq_vops (gsi, stmts);
3563 96 : fold_stmt (gsi);
3564 : }
3565 96 : return true;
3566 : }
3567 :
3568 : /* If the format is "%s", use strcpy if the result isn't used. */
3569 1768 : else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3570 : {
3571 : /* Don't crash on sprintf (str1, "%s"). */
3572 746 : if (!orig)
3573 : return false;
3574 :
3575 : /* Don't fold calls with source arguments of invalid (nonpointer)
3576 : types. */
3577 745 : if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3578 : return false;
3579 :
3580 739 : tree orig_len = NULL_TREE;
3581 739 : if (gimple_call_lhs (stmt))
3582 : {
3583 17 : orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3584 17 : if (!orig_len)
3585 : return false;
3586 : }
3587 :
3588 : /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3589 722 : gimple_seq stmts = NULL;
3590 722 : gimple *repl = gimple_build_call (fn, 2, dest, orig);
3591 :
3592 : /* Propagate the NO_WARNING bit to avoid issuing the same
3593 : warning more than once. */
3594 722 : copy_warning (repl, stmt);
3595 :
3596 722 : gimple_seq_add_stmt_without_update (&stmts, repl);
3597 722 : if (tree lhs = gimple_call_lhs (stmt))
3598 : {
3599 0 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
3600 0 : TREE_TYPE (orig_len)))
3601 0 : orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3602 0 : repl = gimple_build_assign (lhs, orig_len);
3603 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3604 0 : gsi_replace_with_seq_vops (gsi, stmts);
3605 : /* gsi now points at the assignment to the lhs, get a
3606 : stmt iterator to the memcpy call.
3607 : ??? We can't use gsi_for_stmt as that doesn't work when the
3608 : CFG isn't built yet. */
3609 0 : gimple_stmt_iterator gsi2 = *gsi;
3610 0 : gsi_prev (&gsi2);
3611 0 : fold_stmt (&gsi2);
3612 : }
3613 : else
3614 : {
3615 722 : gsi_replace_with_seq_vops (gsi, stmts);
3616 722 : fold_stmt (gsi);
3617 : }
3618 722 : return true;
3619 : }
3620 : return false;
3621 : }
3622 :
3623 : /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3624 : FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3625 : attempt to simplify calls with more than 4 arguments.
3626 :
3627 : Return true if simplification was possible, otherwise false. */
3628 :
3629 : bool
3630 1605 : gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3631 : {
3632 1605 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3633 1605 : tree dest = gimple_call_arg (stmt, 0);
3634 1605 : tree destsize = gimple_call_arg (stmt, 1);
3635 1605 : tree fmt = gimple_call_arg (stmt, 2);
3636 1605 : tree orig = NULL_TREE;
3637 1605 : const char *fmt_str = NULL;
3638 :
3639 1605 : if (gimple_call_num_args (stmt) > 4
3640 2775 : || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3641 : return false;
3642 :
3643 728 : if (gimple_call_num_args (stmt) == 4)
3644 609 : orig = gimple_call_arg (stmt, 3);
3645 :
3646 : /* Check whether the format is a literal string constant. */
3647 728 : fmt_str = c_getstr (fmt);
3648 728 : if (fmt_str == NULL)
3649 : return false;
3650 :
3651 728 : if (!init_target_chars ())
3652 : return false;
3653 :
3654 : /* If the format doesn't contain % args or %%, use strcpy. */
3655 728 : if (strchr (fmt_str, target_percent) == NULL)
3656 : {
3657 148 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3658 118 : if (!fn)
3659 : return false;
3660 :
3661 : /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3662 118 : if (orig)
3663 : return false;
3664 :
3665 118 : tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3666 :
3667 : /* We could expand this as
3668 : memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3669 : or to
3670 : memcpy (str, fmt_with_nul_at_cstm1, cst);
3671 : but in the former case that might increase code size
3672 : and in the latter case grow .rodata section too much.
3673 : So punt for now. */
3674 118 : if (!known_lower (stmt, len, destsize, true))
3675 : return false;
3676 :
3677 88 : gimple_seq stmts = NULL;
3678 88 : gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3679 88 : gimple_seq_add_stmt_without_update (&stmts, repl);
3680 88 : if (tree lhs = gimple_call_lhs (stmt))
3681 : {
3682 0 : repl = gimple_build_assign (lhs,
3683 0 : fold_convert (TREE_TYPE (lhs), len));
3684 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3685 0 : gsi_replace_with_seq_vops (gsi, stmts);
3686 : /* gsi now points at the assignment to the lhs, get a
3687 : stmt iterator to the memcpy call.
3688 : ??? We can't use gsi_for_stmt as that doesn't work when the
3689 : CFG isn't built yet. */
3690 0 : gimple_stmt_iterator gsi2 = *gsi;
3691 0 : gsi_prev (&gsi2);
3692 0 : fold_stmt (&gsi2);
3693 : }
3694 : else
3695 : {
3696 88 : gsi_replace_with_seq_vops (gsi, stmts);
3697 88 : fold_stmt (gsi);
3698 : }
3699 88 : return true;
3700 : }
3701 :
3702 : /* If the format is "%s", use strcpy if the result isn't used. */
3703 610 : else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3704 : {
3705 292 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3706 174 : if (!fn)
3707 : return false;
3708 :
3709 : /* Don't crash on snprintf (str1, cst, "%s"). */
3710 174 : if (!orig)
3711 : return false;
3712 :
3713 174 : tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3714 :
3715 : /* We could expand this as
3716 : memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3717 : or to
3718 : memcpy (str1, str2_with_nul_at_cstm1, cst);
3719 : but in the former case that might increase code size
3720 : and in the latter case grow .rodata section too much.
3721 : So punt for now. */
3722 174 : if (!known_lower (stmt, orig_len, destsize, true))
3723 : return false;
3724 :
3725 : /* Convert snprintf (str1, cst, "%s", str2) into
3726 : strcpy (str1, str2) if strlen (str2) < cst. */
3727 56 : gimple_seq stmts = NULL;
3728 56 : gimple *repl = gimple_build_call (fn, 2, dest, orig);
3729 56 : gimple_seq_add_stmt_without_update (&stmts, repl);
3730 56 : if (tree lhs = gimple_call_lhs (stmt))
3731 : {
3732 0 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
3733 0 : TREE_TYPE (orig_len)))
3734 0 : orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3735 0 : repl = gimple_build_assign (lhs, orig_len);
3736 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3737 0 : gsi_replace_with_seq_vops (gsi, stmts);
3738 : /* gsi now points at the assignment to the lhs, get a
3739 : stmt iterator to the memcpy call.
3740 : ??? We can't use gsi_for_stmt as that doesn't work when the
3741 : CFG isn't built yet. */
3742 0 : gimple_stmt_iterator gsi2 = *gsi;
3743 0 : gsi_prev (&gsi2);
3744 0 : fold_stmt (&gsi2);
3745 : }
3746 : else
3747 : {
3748 56 : gsi_replace_with_seq_vops (gsi, stmts);
3749 56 : fold_stmt (gsi);
3750 : }
3751 56 : return true;
3752 : }
3753 : return false;
3754 : }
3755 :
3756 : /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3757 : FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3758 : more than 3 arguments, and ARG may be null in the 2-argument case.
3759 :
3760 : Return NULL_TREE if no simplification was possible, otherwise return the
3761 : simplified form of the call as a tree. FCODE is the BUILT_IN_*
3762 : code of the function to be simplified. */
3763 :
3764 : static bool
3765 54441 : gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3766 : tree fp, tree fmt, tree arg,
3767 : enum built_in_function fcode)
3768 : {
3769 54441 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3770 54441 : tree fn_fputc, fn_fputs;
3771 54441 : const char *fmt_str = NULL;
3772 :
3773 : /* If the return value is used, don't do the transformation. */
3774 54441 : if (gimple_call_lhs (stmt) != NULL_TREE)
3775 : return false;
3776 :
3777 : /* Check whether the format is a literal string constant. */
3778 50263 : fmt_str = c_getstr (fmt);
3779 50263 : if (fmt_str == NULL)
3780 : return false;
3781 :
3782 49953 : if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3783 : {
3784 : /* If we're using an unlocked function, assume the other
3785 : unlocked functions exist explicitly. */
3786 80 : fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3787 80 : fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3788 : }
3789 : else
3790 : {
3791 49873 : fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3792 49873 : fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3793 : }
3794 :
3795 49953 : if (!init_target_chars ())
3796 : return false;
3797 :
3798 144059 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3799 : return false;
3800 :
3801 : /* If the format doesn't contain % args or %%, use strcpy. */
3802 49953 : if (strchr (fmt_str, target_percent) == NULL)
3803 : {
3804 9600 : if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3805 9530 : && arg)
3806 : return false;
3807 :
3808 : /* If the format specifier was "", fprintf does nothing. */
3809 9600 : if (fmt_str[0] == '\0')
3810 : {
3811 58 : replace_call_with_value (gsi, NULL_TREE);
3812 58 : return true;
3813 : }
3814 :
3815 : /* When "string" doesn't contain %, replace all cases of
3816 : fprintf (fp, string) with fputs (string, fp). The fputs
3817 : builtin will take care of special cases like length == 1. */
3818 9542 : if (fn_fputs)
3819 : {
3820 9542 : gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3821 9542 : replace_call_with_call_and_fold (gsi, repl);
3822 9542 : return true;
3823 : }
3824 : }
3825 :
3826 : /* The other optimizations can be done only on the non-va_list variants. */
3827 40353 : else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3828 : return false;
3829 :
3830 : /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3831 39304 : else if (strcmp (fmt_str, target_percent_s) == 0)
3832 : {
3833 639 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3834 : return false;
3835 639 : if (fn_fputs)
3836 : {
3837 639 : gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3838 639 : replace_call_with_call_and_fold (gsi, repl);
3839 639 : return true;
3840 : }
3841 : }
3842 :
3843 : /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3844 38665 : else if (strcmp (fmt_str, target_percent_c) == 0)
3845 : {
3846 49 : if (!arg
3847 49 : || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3848 0 : return false;
3849 49 : if (fn_fputc)
3850 : {
3851 49 : gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3852 49 : replace_call_with_call_and_fold (gsi, repl);
3853 49 : return true;
3854 : }
3855 : }
3856 :
3857 : return false;
3858 : }
3859 :
3860 : /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3861 : FMT and ARG are the arguments to the call; we don't fold cases with
3862 : more than 2 arguments, and ARG may be null if this is a 1-argument case.
3863 :
3864 : Return NULL_TREE if no simplification was possible, otherwise return the
3865 : simplified form of the call as a tree. FCODE is the BUILT_IN_*
3866 : code of the function to be simplified. */
3867 :
3868 : static bool
3869 113197 : gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3870 : tree arg, enum built_in_function fcode)
3871 : {
3872 113197 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3873 113197 : tree fn_putchar, fn_puts, newarg;
3874 113197 : const char *fmt_str = NULL;
3875 :
3876 : /* If the return value is used, don't do the transformation. */
3877 113197 : if (gimple_call_lhs (stmt) != NULL_TREE)
3878 : return false;
3879 :
3880 328245 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3881 : return false;
3882 :
3883 : /* Check whether the format is a literal string constant. */
3884 109955 : fmt_str = c_getstr (fmt);
3885 109955 : if (fmt_str == NULL)
3886 : return false;
3887 :
3888 106894 : if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3889 : {
3890 : /* If we're using an unlocked function, assume the other
3891 : unlocked functions exist explicitly. */
3892 80 : fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3893 80 : fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3894 : }
3895 : else
3896 : {
3897 106814 : fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3898 106814 : fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3899 : }
3900 :
3901 106894 : if (!init_target_chars ())
3902 : return false;
3903 :
3904 106894 : if (strcmp (fmt_str, target_percent_s) == 0
3905 100508 : || strchr (fmt_str, target_percent) == NULL)
3906 : {
3907 14080 : const char *str;
3908 :
3909 14080 : if (strcmp (fmt_str, target_percent_s) == 0)
3910 : {
3911 6386 : if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3912 : return false;
3913 :
3914 6098 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3915 : return false;
3916 :
3917 6093 : str = c_getstr (arg);
3918 6093 : if (str == NULL)
3919 : return false;
3920 : }
3921 : else
3922 : {
3923 : /* The format specifier doesn't contain any '%' characters. */
3924 7694 : if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3925 7562 : && arg)
3926 : return false;
3927 : str = fmt_str;
3928 : }
3929 :
3930 : /* If the string was "", printf does nothing. */
3931 5730 : if (str[0] == '\0')
3932 : {
3933 109 : replace_call_with_value (gsi, NULL_TREE);
3934 109 : return true;
3935 : }
3936 :
3937 : /* If the string has length of 1, call putchar. */
3938 5621 : if (str[1] == '\0')
3939 : {
3940 : /* Given printf("c"), (where c is any one character,)
3941 : convert "c"[0] to an int and pass that to the replacement
3942 : function. */
3943 559 : newarg = build_int_cst (integer_type_node, str[0]);
3944 559 : if (fn_putchar)
3945 : {
3946 559 : gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3947 559 : replace_call_with_call_and_fold (gsi, repl);
3948 559 : return true;
3949 : }
3950 : }
3951 : else
3952 : {
3953 : /* If the string was "string\n", call puts("string"). */
3954 5062 : size_t len = strlen (str);
3955 5062 : if ((unsigned char)str[len - 1] == target_newline
3956 3970 : && (size_t) (int) len == len
3957 3970 : && (int) len > 0)
3958 : {
3959 3970 : char *newstr;
3960 :
3961 : /* Create a NUL-terminated string that's one char shorter
3962 : than the original, stripping off the trailing '\n'. */
3963 3970 : newstr = xstrdup (str);
3964 3970 : newstr[len - 1] = '\0';
3965 3970 : newarg = build_string_literal (len, newstr);
3966 3970 : free (newstr);
3967 3970 : if (fn_puts)
3968 : {
3969 3970 : gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3970 3970 : replace_call_with_call_and_fold (gsi, repl);
3971 3970 : return true;
3972 : }
3973 : }
3974 : else
3975 : /* We'd like to arrange to call fputs(string,stdout) here,
3976 : but we need stdout and don't have a way to get it yet. */
3977 : return false;
3978 : }
3979 : }
3980 :
3981 : /* The other optimizations can be done only on the non-va_list variants. */
3982 92814 : else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3983 : return false;
3984 :
3985 : /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3986 92564 : else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3987 : {
3988 177 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3989 : return false;
3990 177 : if (fn_puts)
3991 : {
3992 177 : gcall *repl = gimple_build_call (fn_puts, 1, arg);
3993 177 : replace_call_with_call_and_fold (gsi, repl);
3994 177 : return true;
3995 : }
3996 : }
3997 :
3998 : /* If the format specifier was "%c", call __builtin_putchar(arg). */
3999 92387 : else if (strcmp (fmt_str, target_percent_c) == 0)
4000 : {
4001 94 : if (!arg || ! useless_type_conversion_p (integer_type_node,
4002 47 : TREE_TYPE (arg)))
4003 0 : return false;
4004 47 : if (fn_putchar)
4005 : {
4006 47 : gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4007 47 : replace_call_with_call_and_fold (gsi, repl);
4008 47 : return true;
4009 : }
4010 : }
4011 :
4012 : return false;
4013 : }
4014 :
4015 :
4016 :
4017 : /* Fold a call to __builtin_strlen with known length LEN. */
4018 :
4019 : static bool
4020 140968 : gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4021 : {
4022 140968 : gimple *stmt = gsi_stmt (*gsi);
4023 140968 : tree arg = gimple_call_arg (stmt, 0);
4024 :
4025 140968 : wide_int minlen;
4026 140968 : wide_int maxlen;
4027 :
4028 140968 : c_strlen_data lendata = { };
4029 140968 : if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4030 35467 : && !lendata.decl
4031 32504 : && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4032 173367 : && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4033 : {
4034 : /* The range of lengths refers to either a single constant
4035 : string or to the longest and shortest constant string
4036 : referenced by the argument of the strlen() call, or to
4037 : the strings that can possibly be stored in the arrays
4038 : the argument refers to. */
4039 32399 : minlen = wi::to_wide (lendata.minlen);
4040 32399 : maxlen = wi::to_wide (lendata.maxlen);
4041 : }
4042 : else
4043 : {
4044 108569 : unsigned prec = TYPE_PRECISION (sizetype);
4045 :
4046 108569 : minlen = wi::shwi (0, prec);
4047 108569 : maxlen = wi::to_wide (max_object_size (), prec) - 2;
4048 : }
4049 :
4050 : /* For -fsanitize=address, don't optimize the upper bound of the
4051 : length to be able to diagnose UB on non-zero terminated arrays. */
4052 140968 : if (sanitize_flags_p (SANITIZE_ADDRESS))
4053 290 : maxlen = wi::max_value (TYPE_PRECISION (sizetype), UNSIGNED);
4054 :
4055 140968 : if (minlen == maxlen)
4056 : {
4057 : /* Fold the strlen call to a constant. */
4058 2322 : tree type = TREE_TYPE (lendata.minlen);
4059 4644 : tree len = force_gimple_operand_gsi (gsi,
4060 2322 : wide_int_to_tree (type, minlen),
4061 : true, NULL, true, GSI_SAME_STMT);
4062 2322 : replace_call_with_value (gsi, len);
4063 2322 : return true;
4064 : }
4065 :
4066 : /* Set the strlen() range to [0, MAXLEN]. */
4067 138646 : if (tree lhs = gimple_call_lhs (stmt))
4068 138641 : set_strlen_range (lhs, minlen, maxlen);
4069 :
4070 : return false;
4071 140968 : }
4072 :
4073 : static bool
4074 216 : gimple_fold_builtin_omp_is_initial_device (gimple_stmt_iterator *gsi)
4075 : {
4076 : #if ACCEL_COMPILER
4077 : replace_call_with_value (gsi, integer_zero_node);
4078 : return true;
4079 : #else
4080 216 : if (!ENABLE_OFFLOADING || symtab->state == EXPANSION)
4081 : {
4082 0 : replace_call_with_value (gsi, integer_one_node);
4083 216 : return true;
4084 : }
4085 : #endif
4086 : return false;
4087 : }
4088 :
4089 : /* omp_get_initial_device was in OpenMP 5.0/5.1 explicitly and in
4090 : 5.0 implicitly the same as omp_get_num_devices; since 6.0 it is
4091 : unspecified whether -1 or omp_get_num_devices() is returned. For
4092 : better backward compatibility, use omp_get_num_devices() on the
4093 : host - and -1 on the device (where the result is unspecified). */
4094 :
4095 : static bool
4096 103 : gimple_fold_builtin_omp_get_initial_device (gimple_stmt_iterator *gsi)
4097 : {
4098 : #if ACCEL_COMPILER
4099 : replace_call_with_value (gsi, build_int_cst (integer_type_node, -1));
4100 : #else
4101 103 : if (!ENABLE_OFFLOADING)
4102 0 : replace_call_with_value (gsi, integer_zero_node);
4103 : else
4104 : {
4105 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_DEVICES);
4106 : gcall *repl = gimple_build_call (fn, 0);
4107 : replace_call_with_call_and_fold (gsi, repl);
4108 : }
4109 : #endif
4110 103 : return true;
4111 : }
4112 :
4113 : static bool
4114 276 : gimple_fold_builtin_omp_get_num_devices (gimple_stmt_iterator *gsi)
4115 : {
4116 276 : if (!ENABLE_OFFLOADING)
4117 : {
4118 0 : replace_call_with_value (gsi, integer_zero_node);
4119 276 : return true;
4120 : }
4121 : return false;
4122 : }
4123 :
4124 : /* Fold a call to __builtin_acc_on_device. */
4125 :
4126 : static bool
4127 2866 : gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4128 : {
4129 : /* Defer folding until we know which compiler we're in. */
4130 2866 : if (symtab->state != EXPANSION)
4131 : return false;
4132 :
4133 554 : unsigned val_host = GOMP_DEVICE_HOST;
4134 554 : unsigned val_dev = GOMP_DEVICE_NONE;
4135 :
4136 : #ifdef ACCEL_COMPILER
4137 : val_host = GOMP_DEVICE_NOT_HOST;
4138 : val_dev = ACCEL_COMPILER_acc_device;
4139 : #endif
4140 :
4141 554 : location_t loc = gimple_location (gsi_stmt (*gsi));
4142 :
4143 554 : tree host_eq = make_ssa_name (boolean_type_node);
4144 554 : gimple *host_ass = gimple_build_assign
4145 554 : (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4146 554 : gimple_set_location (host_ass, loc);
4147 554 : gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4148 :
4149 554 : tree dev_eq = make_ssa_name (boolean_type_node);
4150 554 : gimple *dev_ass = gimple_build_assign
4151 554 : (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4152 554 : gimple_set_location (dev_ass, loc);
4153 554 : gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4154 :
4155 554 : tree result = make_ssa_name (boolean_type_node);
4156 554 : gimple *result_ass = gimple_build_assign
4157 554 : (result, BIT_IOR_EXPR, host_eq, dev_eq);
4158 554 : gimple_set_location (result_ass, loc);
4159 554 : gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4160 :
4161 554 : replace_call_with_value (gsi, result);
4162 :
4163 554 : return true;
4164 : }
4165 :
4166 : /* Fold realloc (0, n) -> malloc (n). */
4167 :
4168 : static bool
4169 47777 : gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4170 : {
4171 47777 : gimple *stmt = gsi_stmt (*gsi);
4172 47777 : tree arg = gimple_call_arg (stmt, 0);
4173 47777 : tree size = gimple_call_arg (stmt, 1);
4174 :
4175 141974 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
4176 : return false;
4177 :
4178 47777 : if (operand_equal_p (arg, null_pointer_node, 0))
4179 : {
4180 1357 : tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4181 1357 : if (fn_malloc)
4182 : {
4183 1357 : gcall *repl = gimple_build_call (fn_malloc, 1, size);
4184 1357 : replace_call_with_call_and_fold (gsi, repl);
4185 1357 : return true;
4186 : }
4187 : }
4188 : return false;
4189 : }
4190 :
4191 : /* Number of bytes into which any type but aggregate, vector or
4192 : _BitInt types should fit. */
4193 : static constexpr size_t clear_padding_unit
4194 : = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4195 : /* Buffer size on which __builtin_clear_padding folding code works. */
4196 : static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4197 :
4198 : /* Data passed through __builtin_clear_padding folding. */
4199 : struct clear_padding_struct {
4200 : location_t loc;
4201 : /* 0 during __builtin_clear_padding folding, nonzero during
4202 : clear_type_padding_in_mask. In that case, instead of clearing the
4203 : non-padding bits in union_ptr array clear the padding bits in there. */
4204 : bool clear_in_mask;
4205 : tree base;
4206 : tree alias_type;
4207 : gimple_stmt_iterator *gsi;
4208 : /* Alignment of buf->base + 0. */
4209 : unsigned align;
4210 : /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4211 : HOST_WIDE_INT off;
4212 : /* Number of padding bytes before buf->off that don't have padding clear
4213 : code emitted yet. */
4214 : HOST_WIDE_INT padding_bytes;
4215 : /* The size of the whole object. Never emit code to touch
4216 : buf->base + buf->sz or following bytes. */
4217 : HOST_WIDE_INT sz;
4218 : /* Number of bytes recorded in buf->buf. */
4219 : size_t size;
4220 : /* When inside union, instead of emitting code we and bits inside of
4221 : the union_ptr array. */
4222 : unsigned char *union_ptr;
4223 : /* Set bits mean padding bits that need to be cleared by the builtin. */
4224 : unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4225 : };
4226 :
4227 : /* Emit code to clear padding requested in BUF->buf - set bits
4228 : in there stand for padding that should be cleared. FULL is true
4229 : if everything from the buffer should be flushed, otherwise
4230 : it can leave up to 2 * clear_padding_unit bytes for further
4231 : processing. */
4232 :
4233 : static void
4234 33895 : clear_padding_flush (clear_padding_struct *buf, bool full)
4235 : {
4236 33895 : gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4237 33895 : if (!full && buf->size < 2 * clear_padding_unit)
4238 33895 : return;
4239 34943 : gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4240 33853 : size_t end = buf->size;
4241 33853 : if (!full)
4242 42 : end = ((end - clear_padding_unit - 1) / clear_padding_unit
4243 : * clear_padding_unit);
4244 33853 : size_t padding_bytes = buf->padding_bytes;
4245 33853 : if (buf->union_ptr)
4246 : {
4247 33088 : if (buf->clear_in_mask)
4248 : {
4249 : /* During clear_type_padding_in_mask, clear the padding
4250 : bits set in buf->buf in the buf->union_ptr mask. */
4251 224566 : for (size_t i = 0; i < end; i++)
4252 : {
4253 191871 : if (buf->buf[i] == (unsigned char) ~0)
4254 7884 : padding_bytes++;
4255 : else
4256 : {
4257 183987 : memset (&buf->union_ptr[buf->off + i - padding_bytes],
4258 : 0, padding_bytes);
4259 183987 : padding_bytes = 0;
4260 183987 : buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4261 : }
4262 : }
4263 32695 : if (full)
4264 : {
4265 32695 : memset (&buf->union_ptr[buf->off + end - padding_bytes],
4266 : 0, padding_bytes);
4267 32695 : buf->off = 0;
4268 32695 : buf->size = 0;
4269 32695 : buf->padding_bytes = 0;
4270 : }
4271 : else
4272 : {
4273 0 : memmove (buf->buf, buf->buf + end, buf->size - end);
4274 0 : buf->off += end;
4275 0 : buf->size -= end;
4276 0 : buf->padding_bytes = padding_bytes;
4277 : }
4278 32695 : return;
4279 : }
4280 : /* Inside of a union, instead of emitting any code, instead
4281 : clear all bits in the union_ptr buffer that are clear
4282 : in buf. Whole padding bytes don't clear anything. */
4283 3017 : for (size_t i = 0; i < end; i++)
4284 : {
4285 2624 : if (buf->buf[i] == (unsigned char) ~0)
4286 1424 : padding_bytes++;
4287 : else
4288 : {
4289 1200 : padding_bytes = 0;
4290 1200 : buf->union_ptr[buf->off + i] &= buf->buf[i];
4291 : }
4292 : }
4293 393 : if (full)
4294 : {
4295 393 : buf->off = 0;
4296 393 : buf->size = 0;
4297 393 : buf->padding_bytes = 0;
4298 : }
4299 : else
4300 : {
4301 0 : memmove (buf->buf, buf->buf + end, buf->size - end);
4302 0 : buf->off += end;
4303 0 : buf->size -= end;
4304 0 : buf->padding_bytes = padding_bytes;
4305 : }
4306 393 : return;
4307 : }
4308 765 : size_t wordsize = UNITS_PER_WORD;
4309 23494 : for (size_t i = 0; i < end; i += wordsize)
4310 : {
4311 22729 : size_t nonzero_first = wordsize;
4312 22729 : size_t nonzero_last = 0;
4313 22729 : size_t zero_first = wordsize;
4314 22729 : size_t zero_last = 0;
4315 22729 : bool all_ones = true, bytes_only = true;
4316 23009 : if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4317 22729 : > (unsigned HOST_WIDE_INT) buf->sz)
4318 : {
4319 280 : gcc_assert (wordsize > 1);
4320 280 : wordsize /= 2;
4321 280 : i -= wordsize;
4322 280 : continue;
4323 : }
4324 22449 : size_t endsize = end - i > wordsize ? wordsize : end - i;
4325 200820 : for (size_t j = i; j < i + endsize; j++)
4326 : {
4327 178371 : if (buf->buf[j])
4328 : {
4329 168383 : if (nonzero_first == wordsize)
4330 : {
4331 21509 : nonzero_first = j - i;
4332 21509 : nonzero_last = j - i;
4333 : }
4334 168383 : if (nonzero_last != j - i)
4335 158 : all_ones = false;
4336 168383 : nonzero_last = j + 1 - i;
4337 : }
4338 : else
4339 : {
4340 9988 : if (zero_first == wordsize)
4341 1934 : zero_first = j - i;
4342 9988 : zero_last = j + 1 - i;
4343 : }
4344 178371 : if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4345 : {
4346 85 : all_ones = false;
4347 85 : bytes_only = false;
4348 : }
4349 : }
4350 22449 : size_t padding_end = i;
4351 22449 : if (padding_bytes)
4352 : {
4353 20848 : if (nonzero_first == 0
4354 20848 : && nonzero_last == endsize
4355 20400 : && all_ones)
4356 : {
4357 : /* All bits are padding and we had some padding
4358 : before too. Just extend it. */
4359 20400 : padding_bytes += endsize;
4360 20400 : continue;
4361 : }
4362 448 : if (all_ones && nonzero_first == 0)
4363 : {
4364 4 : padding_bytes += nonzero_last;
4365 4 : padding_end += nonzero_last;
4366 4 : nonzero_first = wordsize;
4367 4 : nonzero_last = 0;
4368 : }
4369 444 : else if (bytes_only && nonzero_first == 0)
4370 : {
4371 0 : gcc_assert (zero_first && zero_first != wordsize);
4372 0 : padding_bytes += zero_first;
4373 0 : padding_end += zero_first;
4374 : }
4375 448 : tree atype, src;
4376 448 : if (padding_bytes == 1)
4377 : {
4378 33 : atype = char_type_node;
4379 33 : src = build_zero_cst (char_type_node);
4380 : }
4381 : else
4382 : {
4383 415 : atype = build_array_type_nelts (char_type_node, padding_bytes);
4384 415 : src = build_constructor (atype, NULL);
4385 : }
4386 448 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4387 : build_int_cst (buf->alias_type,
4388 448 : buf->off + padding_end
4389 448 : - padding_bytes));
4390 448 : gimple *g = gimple_build_assign (dst, src);
4391 448 : gimple_set_location (g, buf->loc);
4392 448 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4393 448 : padding_bytes = 0;
4394 448 : buf->padding_bytes = 0;
4395 : }
4396 2049 : if (nonzero_first == wordsize)
4397 : /* All bits in a word are 0, there are no padding bits. */
4398 944 : continue;
4399 1105 : if (all_ones && nonzero_last == endsize)
4400 : {
4401 : /* All bits between nonzero_first and end of word are padding
4402 : bits, start counting padding_bytes. */
4403 839 : padding_bytes = nonzero_last - nonzero_first;
4404 839 : continue;
4405 : }
4406 266 : if (bytes_only)
4407 : {
4408 : /* If bitfields aren't involved in this word, prefer storing
4409 : individual bytes or groups of them over performing a RMW
4410 : operation on the whole word. */
4411 227 : gcc_assert (i + zero_last <= end);
4412 1125 : for (size_t j = padding_end; j < i + zero_last; j++)
4413 : {
4414 898 : if (buf->buf[j])
4415 : {
4416 : size_t k;
4417 614 : for (k = j; k < i + zero_last; k++)
4418 614 : if (buf->buf[k] == 0)
4419 : break;
4420 259 : HOST_WIDE_INT off = buf->off + j;
4421 259 : tree atype, src;
4422 259 : if (k - j == 1)
4423 : {
4424 211 : atype = char_type_node;
4425 211 : src = build_zero_cst (char_type_node);
4426 : }
4427 : else
4428 : {
4429 48 : atype = build_array_type_nelts (char_type_node, k - j);
4430 48 : src = build_constructor (atype, NULL);
4431 : }
4432 259 : tree dst = build2_loc (buf->loc, MEM_REF, atype,
4433 : buf->base,
4434 259 : build_int_cst (buf->alias_type, off));
4435 259 : gimple *g = gimple_build_assign (dst, src);
4436 259 : gimple_set_location (g, buf->loc);
4437 259 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4438 259 : j = k;
4439 : }
4440 : }
4441 227 : if (nonzero_last == endsize)
4442 98 : padding_bytes = nonzero_last - zero_last;
4443 227 : continue;
4444 227 : }
4445 126 : for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4446 : {
4447 126 : if (nonzero_last - nonzero_first <= eltsz
4448 39 : && ((nonzero_first & ~(eltsz - 1))
4449 39 : == ((nonzero_last - 1) & ~(eltsz - 1))))
4450 : {
4451 39 : tree type;
4452 39 : if (eltsz == 1)
4453 2 : type = char_type_node;
4454 : else
4455 37 : type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4456 : 0);
4457 39 : size_t start = nonzero_first & ~(eltsz - 1);
4458 39 : HOST_WIDE_INT off = buf->off + i + start;
4459 39 : tree atype = type;
4460 39 : if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4461 0 : atype = build_aligned_type (type, buf->align);
4462 39 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4463 39 : build_int_cst (buf->alias_type, off));
4464 39 : tree src;
4465 39 : gimple *g;
4466 39 : if (all_ones
4467 39 : && nonzero_first == start
4468 0 : && nonzero_last == start + eltsz)
4469 0 : src = build_zero_cst (type);
4470 : else
4471 : {
4472 39 : src = make_ssa_name (type);
4473 39 : tree tmp_dst = unshare_expr (dst);
4474 : /* The folding introduces a read from the tmp_dst, we should
4475 : prevent uninitialized warning analysis from issuing warning
4476 : for such fake read. In order to suppress warning only for
4477 : this expr, we should set the location of tmp_dst to
4478 : UNKNOWN_LOCATION first, then suppress_warning will call
4479 : set_no_warning_bit to set the no_warning flag only for
4480 : tmp_dst. */
4481 39 : SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4482 39 : suppress_warning (tmp_dst, OPT_Wuninitialized);
4483 39 : g = gimple_build_assign (src, tmp_dst);
4484 39 : gimple_set_location (g, buf->loc);
4485 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4486 78 : tree mask = native_interpret_expr (type,
4487 39 : buf->buf + i + start,
4488 : eltsz);
4489 39 : gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4490 39 : mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4491 39 : tree src_masked = make_ssa_name (type);
4492 39 : g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4493 : src, mask);
4494 39 : gimple_set_location (g, buf->loc);
4495 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4496 39 : src = src_masked;
4497 : }
4498 39 : g = gimple_build_assign (dst, src);
4499 39 : gimple_set_location (g, buf->loc);
4500 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4501 39 : break;
4502 : }
4503 : }
4504 : }
4505 765 : if (full)
4506 : {
4507 723 : if (padding_bytes)
4508 : {
4509 489 : tree atype, src;
4510 489 : if (padding_bytes == 1)
4511 : {
4512 108 : atype = char_type_node;
4513 108 : src = build_zero_cst (char_type_node);
4514 : }
4515 : else
4516 : {
4517 381 : atype = build_array_type_nelts (char_type_node, padding_bytes);
4518 381 : src = build_constructor (atype, NULL);
4519 : }
4520 489 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4521 : build_int_cst (buf->alias_type,
4522 489 : buf->off + end
4523 489 : - padding_bytes));
4524 489 : gimple *g = gimple_build_assign (dst, src);
4525 489 : gimple_set_location (g, buf->loc);
4526 489 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4527 : }
4528 723 : size_t end_rem = end % UNITS_PER_WORD;
4529 723 : buf->off += end - end_rem;
4530 723 : buf->size = end_rem;
4531 723 : memset (buf->buf, 0, buf->size);
4532 723 : buf->padding_bytes = 0;
4533 : }
4534 : else
4535 : {
4536 42 : memmove (buf->buf, buf->buf + end, buf->size - end);
4537 42 : buf->off += end;
4538 42 : buf->size -= end;
4539 42 : buf->padding_bytes = padding_bytes;
4540 : }
4541 : }
4542 :
4543 : /* Append PADDING_BYTES padding bytes. */
4544 :
4545 : static void
4546 4907 : clear_padding_add_padding (clear_padding_struct *buf,
4547 : HOST_WIDE_INT padding_bytes)
4548 : {
4549 4907 : if (padding_bytes == 0)
4550 : return;
4551 1677 : if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4552 : > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4553 42 : clear_padding_flush (buf, false);
4554 1677 : if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4555 : > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4556 : {
4557 42 : memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4558 42 : padding_bytes -= clear_padding_buf_size - buf->size;
4559 42 : buf->size = clear_padding_buf_size;
4560 42 : clear_padding_flush (buf, false);
4561 42 : gcc_assert (buf->padding_bytes);
4562 : /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4563 : is guaranteed to be all ones. */
4564 42 : padding_bytes += buf->size;
4565 42 : buf->size = padding_bytes % UNITS_PER_WORD;
4566 42 : memset (buf->buf, ~0, buf->size);
4567 42 : buf->off += padding_bytes - buf->size;
4568 42 : buf->padding_bytes += padding_bytes - buf->size;
4569 : }
4570 : else
4571 : {
4572 1635 : memset (buf->buf + buf->size, ~0, padding_bytes);
4573 1635 : buf->size += padding_bytes;
4574 : }
4575 : }
4576 :
4577 : static void clear_padding_type (clear_padding_struct *, tree,
4578 : HOST_WIDE_INT, bool);
4579 :
4580 : /* Clear padding bits of union type TYPE. */
4581 :
4582 : static void
4583 128 : clear_padding_union (clear_padding_struct *buf, tree type,
4584 : HOST_WIDE_INT sz, bool for_auto_init)
4585 : {
4586 128 : clear_padding_struct *union_buf;
4587 128 : HOST_WIDE_INT start_off = 0, next_off = 0;
4588 128 : size_t start_size = 0;
4589 128 : if (buf->union_ptr)
4590 : {
4591 42 : start_off = buf->off + buf->size;
4592 42 : next_off = start_off + sz;
4593 42 : start_size = start_off % UNITS_PER_WORD;
4594 42 : start_off -= start_size;
4595 42 : clear_padding_flush (buf, true);
4596 42 : union_buf = buf;
4597 : }
4598 : else
4599 : {
4600 86 : if (sz + buf->size > clear_padding_buf_size)
4601 0 : clear_padding_flush (buf, false);
4602 86 : union_buf = XALLOCA (clear_padding_struct);
4603 86 : union_buf->loc = buf->loc;
4604 86 : union_buf->clear_in_mask = buf->clear_in_mask;
4605 86 : union_buf->base = NULL_TREE;
4606 86 : union_buf->alias_type = NULL_TREE;
4607 86 : union_buf->gsi = NULL;
4608 86 : union_buf->align = 0;
4609 86 : union_buf->off = 0;
4610 86 : union_buf->padding_bytes = 0;
4611 86 : union_buf->sz = sz;
4612 86 : union_buf->size = 0;
4613 86 : if (sz + buf->size <= clear_padding_buf_size)
4614 86 : union_buf->union_ptr = buf->buf + buf->size;
4615 : else
4616 0 : union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4617 86 : memset (union_buf->union_ptr, ~0, sz);
4618 : }
4619 :
4620 1193 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4621 1065 : if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4622 : {
4623 359 : if (DECL_SIZE_UNIT (field) == NULL_TREE)
4624 : {
4625 8 : if (TREE_TYPE (field) == error_mark_node)
4626 0 : continue;
4627 8 : gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4628 : && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4629 8 : if (!buf->clear_in_mask && !for_auto_init)
4630 8 : error_at (buf->loc, "flexible array member %qD does not have "
4631 : "well defined padding bits for %qs",
4632 : field, "__builtin_clear_padding");
4633 8 : continue;
4634 : }
4635 351 : HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4636 351 : gcc_assert (union_buf->size == 0);
4637 351 : union_buf->off = start_off;
4638 351 : union_buf->size = start_size;
4639 351 : memset (union_buf->buf, ~0, start_size);
4640 351 : clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4641 351 : clear_padding_add_padding (union_buf, sz - fldsz);
4642 351 : clear_padding_flush (union_buf, true);
4643 : }
4644 :
4645 128 : if (buf == union_buf)
4646 : {
4647 42 : buf->off = next_off;
4648 42 : buf->size = next_off % UNITS_PER_WORD;
4649 42 : buf->off -= buf->size;
4650 42 : memset (buf->buf, ~0, buf->size);
4651 : }
4652 86 : else if (sz + buf->size <= clear_padding_buf_size)
4653 86 : buf->size += sz;
4654 : else
4655 : {
4656 0 : unsigned char *union_ptr = union_buf->union_ptr;
4657 0 : while (sz)
4658 : {
4659 0 : clear_padding_flush (buf, false);
4660 0 : HOST_WIDE_INT this_sz
4661 0 : = MIN ((unsigned HOST_WIDE_INT) sz,
4662 : clear_padding_buf_size - buf->size);
4663 0 : memcpy (buf->buf + buf->size, union_ptr, this_sz);
4664 0 : buf->size += this_sz;
4665 0 : union_ptr += this_sz;
4666 0 : sz -= this_sz;
4667 : }
4668 0 : XDELETE (union_buf->union_ptr);
4669 : }
4670 128 : }
4671 :
4672 : /* The only known floating point formats with padding bits are the
4673 : IEEE extended ones. */
4674 :
4675 : static bool
4676 34378 : clear_padding_real_needs_padding_p (tree type)
4677 : {
4678 34378 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4679 34378 : return (fmt->b == 2
4680 33919 : && fmt->signbit_ro == fmt->signbit_rw
4681 68297 : && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4682 : }
4683 :
4684 : /* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4685 : precision than bits in limb or corresponding number of limbs. */
4686 :
4687 : static bool
4688 6 : clear_padding_bitint_needs_padding_p (tree type)
4689 : {
4690 6 : struct bitint_info info;
4691 6 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4692 6 : gcc_assert (ok);
4693 6 : if (info.extended)
4694 : return false;
4695 6 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.abi_limb_mode);
4696 6 : if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4697 : return true;
4698 4 : else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4699 : return false;
4700 : else
4701 4 : return (((unsigned) TYPE_PRECISION (type))
4702 4 : % GET_MODE_PRECISION (limb_mode)) != 0;
4703 : }
4704 :
4705 : /* Return true if TYPE might contain any padding bits. */
4706 :
4707 : bool
4708 907894 : clear_padding_type_may_have_padding_p (tree type)
4709 : {
4710 1041122 : switch (TREE_CODE (type))
4711 : {
4712 : case RECORD_TYPE:
4713 : case UNION_TYPE:
4714 : return true;
4715 133228 : case ARRAY_TYPE:
4716 133228 : case COMPLEX_TYPE:
4717 133228 : case VECTOR_TYPE:
4718 133228 : return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4719 1663 : case REAL_TYPE:
4720 1663 : return clear_padding_real_needs_padding_p (type);
4721 6 : case BITINT_TYPE:
4722 6 : return clear_padding_bitint_needs_padding_p (type);
4723 38070 : default:
4724 38070 : return false;
4725 : }
4726 : }
4727 :
4728 : /* Return true if TYPE has padding bits aside from those in fields,
4729 : elements, etc. */
4730 :
4731 : bool
4732 1164896 : type_has_padding_at_level_p (tree type)
4733 : {
4734 1164896 : switch (TREE_CODE (type))
4735 : {
4736 1027912 : case RECORD_TYPE:
4737 1027912 : {
4738 1027912 : tree bitpos = size_zero_node;
4739 : /* Expect fields to be sorted by bit position. */
4740 7446050 : for (tree f = TYPE_FIELDS (type); f; f = DECL_CHAIN (f))
4741 6422288 : if (TREE_CODE (f) == FIELD_DECL)
4742 : {
4743 2260244 : if (DECL_PADDING_P (f))
4744 : return true;
4745 2260241 : tree pos = bit_position (f);
4746 2260241 : if (simple_cst_equal (bitpos, pos) != 1)
4747 : return true;
4748 2256117 : if (!DECL_SIZE (f))
4749 : return true;
4750 2256094 : bitpos = int_const_binop (PLUS_EXPR, pos, DECL_SIZE (f));
4751 : }
4752 1023762 : if (simple_cst_equal (bitpos, TYPE_SIZE (type)) != 1)
4753 : return true;
4754 : return false;
4755 : }
4756 3 : case UNION_TYPE:
4757 3 : case QUAL_UNION_TYPE:
4758 3 : bool any_fields;
4759 3 : any_fields = false;
4760 : /* If any of the fields is smaller than the whole, there is padding. */
4761 6 : for (tree f = TYPE_FIELDS (type); f; f = DECL_CHAIN (f))
4762 3 : if (TREE_CODE (f) != FIELD_DECL || TREE_TYPE (f) == error_mark_node)
4763 3 : continue;
4764 0 : else if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
4765 0 : TYPE_SIZE (type)) != 1)
4766 : return true;
4767 : else
4768 : any_fields = true;
4769 : /* If the union doesn't have any fields and still has non-zero size,
4770 : all of it is padding. */
4771 3 : if (!any_fields && !integer_zerop (TYPE_SIZE (type)))
4772 : return true;
4773 : return false;
4774 : case ARRAY_TYPE:
4775 : case COMPLEX_TYPE:
4776 : case VECTOR_TYPE:
4777 : /* No recursing here, no padding at this level. */
4778 : return false;
4779 0 : case REAL_TYPE:
4780 0 : return clear_padding_real_needs_padding_p (type);
4781 0 : case BITINT_TYPE:
4782 0 : return clear_padding_bitint_needs_padding_p (type);
4783 : default:
4784 : return false;
4785 : }
4786 : }
4787 :
4788 : /* Emit a runtime loop:
4789 : for (; buf.base != end; buf.base += sz)
4790 : __builtin_clear_padding (buf.base); */
4791 :
4792 : static void
4793 114 : clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4794 : tree end, bool for_auto_init)
4795 : {
4796 114 : tree l1 = create_artificial_label (buf->loc);
4797 114 : tree l2 = create_artificial_label (buf->loc);
4798 114 : tree l3 = create_artificial_label (buf->loc);
4799 114 : gimple *g = gimple_build_goto (l2);
4800 114 : gimple_set_location (g, buf->loc);
4801 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4802 114 : g = gimple_build_label (l1);
4803 114 : gimple_set_location (g, buf->loc);
4804 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4805 114 : clear_padding_type (buf, type, buf->sz, for_auto_init);
4806 114 : clear_padding_flush (buf, true);
4807 114 : g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4808 114 : size_int (buf->sz));
4809 114 : gimple_set_location (g, buf->loc);
4810 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4811 114 : g = gimple_build_label (l2);
4812 114 : gimple_set_location (g, buf->loc);
4813 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4814 114 : g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4815 114 : gimple_set_location (g, buf->loc);
4816 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4817 114 : g = gimple_build_label (l3);
4818 114 : gimple_set_location (g, buf->loc);
4819 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4820 114 : }
4821 :
4822 : /* Clear padding bits for TYPE. Called recursively from
4823 : gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4824 : the __builtin_clear_padding is not called by the end user,
4825 : instead, it's inserted by the compiler to initialize the
4826 : paddings of automatic variable. Therefore, we should not
4827 : emit the error messages for flexible array members to confuse
4828 : the end user. */
4829 :
4830 : static void
4831 37763 : clear_padding_type (clear_padding_struct *buf, tree type,
4832 : HOST_WIDE_INT sz, bool for_auto_init)
4833 : {
4834 37763 : switch (TREE_CODE (type))
4835 : {
4836 1224 : case RECORD_TYPE:
4837 1224 : HOST_WIDE_INT cur_pos;
4838 1224 : cur_pos = 0;
4839 15677 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4840 14453 : if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4841 : {
4842 3637 : tree ftype = TREE_TYPE (field);
4843 3637 : if (DECL_BIT_FIELD (field))
4844 : {
4845 260 : HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4846 260 : if (fldsz == 0)
4847 0 : continue;
4848 260 : HOST_WIDE_INT pos = int_byte_position (field);
4849 260 : if (pos >= sz)
4850 0 : continue;
4851 260 : HOST_WIDE_INT bpos
4852 260 : = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4853 260 : bpos %= BITS_PER_UNIT;
4854 260 : HOST_WIDE_INT end
4855 260 : = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4856 260 : if (pos + end > cur_pos)
4857 : {
4858 199 : clear_padding_add_padding (buf, pos + end - cur_pos);
4859 199 : cur_pos = pos + end;
4860 : }
4861 260 : gcc_assert (cur_pos > pos
4862 : && ((unsigned HOST_WIDE_INT) buf->size
4863 : >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4864 260 : unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4865 260 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4866 : sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4867 : " in %qs", "__builtin_clear_padding");
4868 260 : else if (BYTES_BIG_ENDIAN)
4869 : {
4870 : /* Big endian. */
4871 : if (bpos + fldsz <= BITS_PER_UNIT)
4872 : *p &= ~(((1 << fldsz) - 1)
4873 : << (BITS_PER_UNIT - bpos - fldsz));
4874 : else
4875 : {
4876 : if (bpos)
4877 : {
4878 : *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4879 : p++;
4880 : fldsz -= BITS_PER_UNIT - bpos;
4881 : }
4882 : memset (p, 0, fldsz / BITS_PER_UNIT);
4883 : p += fldsz / BITS_PER_UNIT;
4884 : fldsz %= BITS_PER_UNIT;
4885 : if (fldsz)
4886 : *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4887 : }
4888 : }
4889 : else
4890 : {
4891 : /* Little endian. */
4892 260 : if (bpos + fldsz <= BITS_PER_UNIT)
4893 159 : *p &= ~(((1 << fldsz) - 1) << bpos);
4894 : else
4895 : {
4896 101 : if (bpos)
4897 : {
4898 33 : *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4899 33 : p++;
4900 33 : fldsz -= BITS_PER_UNIT - bpos;
4901 : }
4902 101 : memset (p, 0, fldsz / BITS_PER_UNIT);
4903 101 : p += fldsz / BITS_PER_UNIT;
4904 101 : fldsz %= BITS_PER_UNIT;
4905 101 : if (fldsz)
4906 56 : *p &= ~((1 << fldsz) - 1);
4907 : }
4908 : }
4909 : }
4910 3377 : else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4911 : {
4912 32 : if (ftype == error_mark_node)
4913 0 : continue;
4914 32 : gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4915 : && !COMPLETE_TYPE_P (ftype));
4916 32 : if (!buf->clear_in_mask && !for_auto_init)
4917 24 : error_at (buf->loc, "flexible array member %qD does not "
4918 : "have well defined padding bits for %qs",
4919 : field, "__builtin_clear_padding");
4920 : }
4921 3345 : else if (is_empty_type (ftype))
4922 212 : continue;
4923 : else
4924 : {
4925 3133 : HOST_WIDE_INT pos = int_byte_position (field);
4926 3133 : if (pos >= sz)
4927 0 : continue;
4928 3133 : HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4929 3133 : gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4930 3133 : clear_padding_add_padding (buf, pos - cur_pos);
4931 3133 : cur_pos = pos;
4932 3133 : if (tree asbase = lang_hooks.types.classtype_as_base (field))
4933 188 : ftype = asbase;
4934 3133 : clear_padding_type (buf, ftype, fldsz, for_auto_init);
4935 3133 : cur_pos += fldsz;
4936 : }
4937 : }
4938 1224 : gcc_assert (sz >= cur_pos);
4939 1224 : clear_padding_add_padding (buf, sz - cur_pos);
4940 1224 : break;
4941 325 : case ARRAY_TYPE:
4942 325 : HOST_WIDE_INT nelts, fldsz;
4943 325 : fldsz = int_size_in_bytes (TREE_TYPE (type));
4944 325 : if (fldsz == 0)
4945 : break;
4946 311 : nelts = sz / fldsz;
4947 311 : if (nelts > 1
4948 304 : && sz > 8 * UNITS_PER_WORD
4949 78 : && buf->union_ptr == NULL
4950 389 : && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4951 : {
4952 : /* For sufficiently large array of more than one elements,
4953 : emit a runtime loop to keep code size manageable. */
4954 66 : tree base = buf->base;
4955 66 : unsigned int prev_align = buf->align;
4956 66 : HOST_WIDE_INT off = buf->off + buf->size;
4957 66 : HOST_WIDE_INT prev_sz = buf->sz;
4958 66 : clear_padding_flush (buf, true);
4959 66 : tree elttype = TREE_TYPE (type);
4960 66 : buf->base = create_tmp_var (build_pointer_type (elttype));
4961 66 : tree end = make_ssa_name (TREE_TYPE (buf->base));
4962 66 : gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4963 66 : base, size_int (off));
4964 66 : gimple_set_location (g, buf->loc);
4965 66 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4966 66 : g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4967 66 : size_int (sz));
4968 66 : gimple_set_location (g, buf->loc);
4969 66 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4970 66 : buf->sz = fldsz;
4971 66 : buf->align = TYPE_ALIGN (elttype);
4972 66 : buf->off = 0;
4973 66 : buf->size = 0;
4974 66 : clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4975 66 : off += sz;
4976 66 : buf->base = base;
4977 66 : buf->sz = prev_sz;
4978 66 : buf->align = prev_align;
4979 66 : buf->size = off % UNITS_PER_WORD;
4980 66 : buf->off = off - buf->size;
4981 66 : memset (buf->buf, 0, buf->size);
4982 66 : break;
4983 : }
4984 1163 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
4985 918 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4986 : break;
4987 128 : case UNION_TYPE:
4988 128 : clear_padding_union (buf, type, sz, for_auto_init);
4989 128 : break;
4990 32715 : case REAL_TYPE:
4991 32715 : gcc_assert ((size_t) sz <= clear_padding_unit);
4992 32715 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4993 0 : clear_padding_flush (buf, false);
4994 32715 : if (clear_padding_real_needs_padding_p (type))
4995 : {
4996 : /* Use native_interpret_real + native_encode_expr to figure out
4997 : which bits are padding. */
4998 1395 : memset (buf->buf + buf->size, ~0, sz);
4999 1395 : tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
5000 1395 : gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
5001 1395 : int len = native_encode_expr (cst, buf->buf + buf->size, sz);
5002 1395 : gcc_assert (len > 0 && (size_t) len == (size_t) sz);
5003 23715 : for (size_t i = 0; i < (size_t) sz; i++)
5004 22320 : buf->buf[buf->size + i] ^= ~0;
5005 : }
5006 : else
5007 31320 : memset (buf->buf + buf->size, 0, sz);
5008 32715 : buf->size += sz;
5009 32715 : break;
5010 0 : case COMPLEX_TYPE:
5011 0 : fldsz = int_size_in_bytes (TREE_TYPE (type));
5012 0 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5013 0 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5014 0 : break;
5015 0 : case VECTOR_TYPE:
5016 0 : nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
5017 0 : fldsz = int_size_in_bytes (TREE_TYPE (type));
5018 0 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
5019 0 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5020 : break;
5021 7 : case NULLPTR_TYPE:
5022 7 : gcc_assert ((size_t) sz <= clear_padding_unit);
5023 7 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
5024 0 : clear_padding_flush (buf, false);
5025 7 : memset (buf->buf + buf->size, ~0, sz);
5026 7 : buf->size += sz;
5027 7 : break;
5028 4 : case BITINT_TYPE:
5029 4 : {
5030 4 : struct bitint_info info;
5031 4 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
5032 4 : gcc_assert (ok);
5033 4 : scalar_int_mode limb_mode
5034 4 : = as_a <scalar_int_mode> (info.abi_limb_mode);
5035 4 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
5036 : {
5037 2 : gcc_assert ((size_t) sz <= clear_padding_unit);
5038 2 : if ((unsigned HOST_WIDE_INT) sz + buf->size
5039 : > clear_padding_buf_size)
5040 0 : clear_padding_flush (buf, false);
5041 2 : if (!info.extended
5042 2 : && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
5043 : {
5044 2 : int tprec = GET_MODE_PRECISION (limb_mode);
5045 2 : int prec = TYPE_PRECISION (type);
5046 2 : tree t = build_nonstandard_integer_type (tprec, 1);
5047 2 : tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
5048 2 : int len = native_encode_expr (cst, buf->buf + buf->size, sz);
5049 2 : gcc_assert (len > 0 && (size_t) len == (size_t) sz);
5050 : }
5051 : else
5052 0 : memset (buf->buf + buf->size, 0, sz);
5053 2 : buf->size += sz;
5054 2 : break;
5055 : }
5056 2 : tree limbtype
5057 2 : = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
5058 2 : fldsz = int_size_in_bytes (limbtype);
5059 2 : nelts = int_size_in_bytes (type) / fldsz;
5060 13 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
5061 : {
5062 11 : if (!info.extended
5063 11 : && i == (info.big_endian ? 0 : nelts - 1)
5064 13 : && (((unsigned) TYPE_PRECISION (type))
5065 2 : % TYPE_PRECISION (limbtype)) != 0)
5066 : {
5067 2 : int tprec = GET_MODE_PRECISION (limb_mode);
5068 2 : int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
5069 2 : tree cst = wide_int_to_tree (limbtype,
5070 2 : wi::mask (prec, true, tprec));
5071 2 : int len = native_encode_expr (cst, buf->buf + buf->size,
5072 : fldsz);
5073 2 : gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
5074 2 : buf->size += fldsz;
5075 : }
5076 : else
5077 9 : clear_padding_type (buf, limbtype, fldsz, for_auto_init);
5078 : }
5079 : break;
5080 : }
5081 3360 : default:
5082 3360 : gcc_assert ((size_t) sz <= clear_padding_unit);
5083 3360 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
5084 0 : clear_padding_flush (buf, false);
5085 3360 : memset (buf->buf + buf->size, 0, sz);
5086 3360 : buf->size += sz;
5087 3360 : break;
5088 : }
5089 37763 : }
5090 :
5091 : /* Clear padding bits of TYPE in MASK. */
5092 :
5093 : void
5094 32695 : clear_type_padding_in_mask (tree type, unsigned char *mask)
5095 : {
5096 32695 : clear_padding_struct buf;
5097 32695 : buf.loc = UNKNOWN_LOCATION;
5098 32695 : buf.clear_in_mask = true;
5099 32695 : buf.base = NULL_TREE;
5100 32695 : buf.alias_type = NULL_TREE;
5101 32695 : buf.gsi = NULL;
5102 32695 : buf.align = 0;
5103 32695 : buf.off = 0;
5104 32695 : buf.padding_bytes = 0;
5105 32695 : buf.sz = int_size_in_bytes (type);
5106 32695 : buf.size = 0;
5107 32695 : buf.union_ptr = mask;
5108 32695 : clear_padding_type (&buf, type, buf.sz, false);
5109 32695 : clear_padding_flush (&buf, true);
5110 32695 : }
5111 :
5112 : /* Fold __builtin_clear_padding builtin. */
5113 :
5114 : static bool
5115 629 : gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
5116 : {
5117 629 : gimple *stmt = gsi_stmt (*gsi);
5118 629 : gcc_assert (gimple_call_num_args (stmt) == 2);
5119 629 : tree ptr = gimple_call_arg (stmt, 0);
5120 629 : tree typearg = gimple_call_arg (stmt, 1);
5121 : /* The 2nd argument of __builtin_clear_padding's value is used to
5122 : distinguish whether this call is made by the user or by the compiler
5123 : for automatic variable initialization. */
5124 629 : bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
5125 629 : tree type = TREE_TYPE (TREE_TYPE (typearg));
5126 629 : location_t loc = gimple_location (stmt);
5127 629 : clear_padding_struct buf;
5128 629 : gimple_stmt_iterator gsiprev = *gsi;
5129 : /* This should be folded during the lower pass. */
5130 1258 : gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
5131 629 : gcc_assert (COMPLETE_TYPE_P (type));
5132 629 : gsi_prev (&gsiprev);
5133 :
5134 629 : buf.loc = loc;
5135 629 : buf.clear_in_mask = false;
5136 629 : buf.base = ptr;
5137 629 : buf.alias_type = NULL_TREE;
5138 629 : buf.gsi = gsi;
5139 629 : buf.align = get_pointer_alignment (ptr);
5140 629 : unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
5141 629 : buf.align = MAX (buf.align, talign);
5142 629 : buf.off = 0;
5143 629 : buf.padding_bytes = 0;
5144 629 : buf.size = 0;
5145 629 : buf.sz = int_size_in_bytes (type);
5146 629 : buf.union_ptr = NULL;
5147 629 : if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
5148 1 : sorry_at (loc, "%s not supported for variable length aggregates",
5149 : "__builtin_clear_padding");
5150 : /* The implementation currently assumes 8-bit host and target
5151 : chars which is the case for all currently supported targets
5152 : and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5153 628 : else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5154 : sorry_at (loc, "%s not supported on this target",
5155 : "__builtin_clear_padding");
5156 628 : else if (!clear_padding_type_may_have_padding_p (type))
5157 : ;
5158 591 : else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5159 : {
5160 48 : tree sz = TYPE_SIZE_UNIT (type);
5161 48 : tree elttype = type;
5162 : /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5163 48 : while (TREE_CODE (elttype) == ARRAY_TYPE
5164 144 : && int_size_in_bytes (elttype) < 0)
5165 96 : elttype = TREE_TYPE (elttype);
5166 48 : HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5167 48 : gcc_assert (eltsz >= 0);
5168 48 : if (eltsz)
5169 : {
5170 48 : buf.base = create_tmp_var (build_pointer_type (elttype));
5171 48 : tree end = make_ssa_name (TREE_TYPE (buf.base));
5172 48 : gimple *g = gimple_build_assign (buf.base, ptr);
5173 48 : gimple_set_location (g, loc);
5174 48 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5175 48 : g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5176 48 : gimple_set_location (g, loc);
5177 48 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5178 48 : buf.sz = eltsz;
5179 48 : buf.align = TYPE_ALIGN (elttype);
5180 48 : buf.alias_type = build_pointer_type (elttype);
5181 48 : clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
5182 : }
5183 : }
5184 : else
5185 : {
5186 543 : if (!is_gimple_mem_ref_addr (buf.base))
5187 : {
5188 28 : buf.base = make_ssa_name (TREE_TYPE (ptr));
5189 28 : gimple *g = gimple_build_assign (buf.base, ptr);
5190 28 : gimple_set_location (g, loc);
5191 28 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5192 : }
5193 543 : buf.alias_type = build_pointer_type (type);
5194 543 : clear_padding_type (&buf, type, buf.sz, for_auto_init);
5195 543 : clear_padding_flush (&buf, true);
5196 : }
5197 :
5198 629 : gimple_stmt_iterator gsiprev2 = *gsi;
5199 629 : gsi_prev (&gsiprev2);
5200 629 : if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5201 126 : gsi_replace (gsi, gimple_build_nop (), true);
5202 : else
5203 : {
5204 503 : gsi_remove (gsi, true);
5205 503 : *gsi = gsiprev2;
5206 : }
5207 629 : return true;
5208 : }
5209 :
5210 : /* Fold __builtin_constant_p builtin. */
5211 :
5212 : static bool
5213 26233 : gimple_fold_builtin_constant_p (gimple_stmt_iterator *gsi)
5214 : {
5215 26233 : gcall *call = as_a<gcall*>(gsi_stmt (*gsi));
5216 :
5217 26233 : if (gimple_call_num_args (call) != 1)
5218 : return false;
5219 :
5220 26228 : tree arg = gimple_call_arg (call, 0);
5221 26228 : tree result = fold_builtin_constant_p (arg);
5222 :
5223 : /* Resolve __builtin_constant_p. If it hasn't been
5224 : folded to integer_one_node by now, it's fairly
5225 : certain that the value simply isn't constant. */
5226 51996 : if (!result && fold_before_rtl_expansion_p ())
5227 3 : result = integer_zero_node;
5228 :
5229 26228 : if (!result)
5230 : return false;
5231 :
5232 463 : gimplify_and_update_call_from_tree (gsi, result);
5233 463 : return true;
5234 : }
5235 :
5236 : /* If va_list type is a simple pointer and nothing special is needed,
5237 : optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
5238 : __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
5239 : pointer assignment. Returns true if a change happened. */
5240 :
5241 : static bool
5242 114003 : gimple_fold_builtin_stdarg (gimple_stmt_iterator *gsi, gcall *call)
5243 : {
5244 : /* These shouldn't be folded before pass_stdarg. */
5245 114003 : if (!fold_before_rtl_expansion_p ())
5246 : return false;
5247 :
5248 10785 : tree callee, lhs, rhs, cfun_va_list;
5249 10785 : bool va_list_simple_ptr;
5250 10785 : location_t loc = gimple_location (call);
5251 10785 : gimple *nstmt0, *nstmt;
5252 10785 : tree tlhs, oldvdef, newvdef;
5253 :
5254 10785 : callee = gimple_call_fndecl (call);
5255 :
5256 10785 : cfun_va_list = targetm.fn_abi_va_list (callee);
5257 21570 : va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
5258 10785 : && (TREE_TYPE (cfun_va_list) == void_type_node
5259 436 : || TREE_TYPE (cfun_va_list) == char_type_node);
5260 :
5261 10785 : switch (DECL_FUNCTION_CODE (callee))
5262 : {
5263 6982 : case BUILT_IN_VA_START:
5264 6982 : if (!va_list_simple_ptr
5265 172 : || targetm.expand_builtin_va_start != NULL
5266 7138 : || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
5267 : return false;
5268 :
5269 156 : if (gimple_call_num_args (call) != 2)
5270 : return false;
5271 :
5272 156 : lhs = gimple_call_arg (call, 0);
5273 156 : if (!POINTER_TYPE_P (TREE_TYPE (lhs))
5274 156 : || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5275 156 : != TYPE_MAIN_VARIANT (cfun_va_list))
5276 : return false;
5277 : /* Create `tlhs = __builtin_next_arg(0);`. */
5278 156 : tlhs = make_ssa_name (cfun_va_list);
5279 156 : nstmt0 = gimple_build_call (builtin_decl_explicit (BUILT_IN_NEXT_ARG), 1, integer_zero_node);
5280 156 : lhs = fold_build2 (MEM_REF, cfun_va_list, lhs, build_zero_cst (TREE_TYPE (lhs)));
5281 156 : gimple_call_set_lhs (nstmt0, tlhs);
5282 156 : gimple_set_location (nstmt0, loc);
5283 156 : gimple_move_vops (nstmt0, call);
5284 156 : gsi_replace (gsi, nstmt0, false);
5285 156 : oldvdef = gimple_vdef (nstmt0);
5286 156 : newvdef = make_ssa_name (gimple_vop (cfun), nstmt0);
5287 156 : gimple_set_vdef (nstmt0, newvdef);
5288 :
5289 : /* Create `*lhs = tlhs;`. */
5290 156 : nstmt = gimple_build_assign (lhs, tlhs);
5291 156 : gimple_set_location (nstmt, loc);
5292 156 : gimple_set_vuse (nstmt, newvdef);
5293 156 : gimple_set_vdef (nstmt, oldvdef);
5294 156 : SSA_NAME_DEF_STMT (oldvdef) = nstmt;
5295 156 : gsi_insert_after (gsi, nstmt, GSI_NEW_STMT);
5296 :
5297 156 : if (dump_file && (dump_flags & TDF_DETAILS))
5298 : {
5299 0 : fprintf (dump_file, "Simplified\n ");
5300 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5301 0 : fprintf (dump_file, "into\n ");
5302 0 : print_gimple_stmt (dump_file, nstmt0, 0, dump_flags);
5303 0 : fprintf (dump_file, " ");
5304 0 : print_gimple_stmt (dump_file, nstmt, 0, dump_flags);
5305 : }
5306 : return true;
5307 :
5308 248 : case BUILT_IN_VA_COPY:
5309 248 : if (!va_list_simple_ptr)
5310 : return false;
5311 :
5312 47 : if (gimple_call_num_args (call) != 2)
5313 : return false;
5314 :
5315 47 : lhs = gimple_call_arg (call, 0);
5316 47 : if (!POINTER_TYPE_P (TREE_TYPE (lhs))
5317 47 : || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5318 47 : != TYPE_MAIN_VARIANT (cfun_va_list))
5319 : return false;
5320 47 : rhs = gimple_call_arg (call, 1);
5321 47 : if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5322 47 : != TYPE_MAIN_VARIANT (cfun_va_list))
5323 : return false;
5324 :
5325 47 : lhs = fold_build2 (MEM_REF, cfun_va_list, lhs, build_zero_cst (TREE_TYPE (lhs)));
5326 47 : nstmt = gimple_build_assign (lhs, rhs);
5327 47 : gimple_set_location (nstmt, loc);
5328 47 : gimple_move_vops (nstmt, call);
5329 47 : gsi_replace (gsi, nstmt, false);
5330 :
5331 47 : if (dump_file && (dump_flags & TDF_DETAILS))
5332 : {
5333 0 : fprintf (dump_file, "Simplified\n ");
5334 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5335 0 : fprintf (dump_file, "into\n ");
5336 0 : print_gimple_stmt (dump_file, nstmt, 0, dump_flags);
5337 : }
5338 : return true;
5339 :
5340 3555 : case BUILT_IN_VA_END:
5341 : /* No effect, so the statement will be deleted. */
5342 3555 : if (dump_file && (dump_flags & TDF_DETAILS))
5343 : {
5344 0 : fprintf (dump_file, "Removed\n ");
5345 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5346 : }
5347 3555 : unlink_stmt_vdef (call);
5348 3555 : release_defs (call);
5349 3555 : gsi_replace (gsi, gimple_build_nop (), true);
5350 3555 : return true;
5351 :
5352 0 : default:
5353 0 : gcc_unreachable ();
5354 : }
5355 : }
5356 :
5357 : /* Fold the non-target builtin at *GSI and return whether any simplification
5358 : was made. */
5359 :
5360 : static bool
5361 9213066 : gimple_fold_builtin (gimple_stmt_iterator *gsi)
5362 : {
5363 9213066 : gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5364 9213066 : tree callee = gimple_call_fndecl (stmt);
5365 :
5366 : /* Give up for always_inline inline builtins until they are
5367 : inlined. */
5368 9213066 : if (avoid_folding_inline_builtin (callee))
5369 : return false;
5370 :
5371 9211890 : unsigned n = gimple_call_num_args (stmt);
5372 9211890 : enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5373 9211890 : switch (fcode)
5374 : {
5375 114003 : case BUILT_IN_VA_START:
5376 114003 : case BUILT_IN_VA_END:
5377 114003 : case BUILT_IN_VA_COPY:
5378 114003 : return gimple_fold_builtin_stdarg (gsi, stmt);
5379 148 : case BUILT_IN_BCMP:
5380 148 : return gimple_fold_builtin_bcmp (gsi);
5381 367 : case BUILT_IN_BCOPY:
5382 367 : return gimple_fold_builtin_bcopy (gsi);
5383 250 : case BUILT_IN_BZERO:
5384 250 : return gimple_fold_builtin_bzero (gsi);
5385 :
5386 309668 : case BUILT_IN_MEMSET:
5387 309668 : return gimple_fold_builtin_memset (gsi,
5388 : gimple_call_arg (stmt, 1),
5389 309668 : gimple_call_arg (stmt, 2));
5390 1011541 : case BUILT_IN_MEMCPY:
5391 1011541 : case BUILT_IN_MEMPCPY:
5392 1011541 : case BUILT_IN_MEMMOVE:
5393 1011541 : return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5394 1011541 : gimple_call_arg (stmt, 1), fcode);
5395 4471 : case BUILT_IN_SPRINTF_CHK:
5396 4471 : case BUILT_IN_VSPRINTF_CHK:
5397 4471 : return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5398 1702 : case BUILT_IN_STRCAT_CHK:
5399 1702 : return gimple_fold_builtin_strcat_chk (gsi);
5400 1143 : case BUILT_IN_STRNCAT_CHK:
5401 1143 : return gimple_fold_builtin_strncat_chk (gsi);
5402 140968 : case BUILT_IN_STRLEN:
5403 140968 : return gimple_fold_builtin_strlen (gsi);
5404 25961 : case BUILT_IN_STRCPY:
5405 25961 : return gimple_fold_builtin_strcpy (gsi,
5406 : gimple_call_arg (stmt, 0),
5407 25961 : gimple_call_arg (stmt, 1));
5408 17202 : case BUILT_IN_STRNCPY:
5409 17202 : return gimple_fold_builtin_strncpy (gsi,
5410 : gimple_call_arg (stmt, 0),
5411 : gimple_call_arg (stmt, 1),
5412 17202 : gimple_call_arg (stmt, 2));
5413 7329 : case BUILT_IN_STRCAT:
5414 7329 : return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5415 7329 : gimple_call_arg (stmt, 1));
5416 6786 : case BUILT_IN_STRNCAT:
5417 6786 : return gimple_fold_builtin_strncat (gsi);
5418 4675 : case BUILT_IN_INDEX:
5419 4675 : case BUILT_IN_STRCHR:
5420 4675 : return gimple_fold_builtin_strchr (gsi, false);
5421 722 : case BUILT_IN_RINDEX:
5422 722 : case BUILT_IN_STRRCHR:
5423 722 : return gimple_fold_builtin_strchr (gsi, true);
5424 4305 : case BUILT_IN_STRSTR:
5425 4305 : return gimple_fold_builtin_strstr (gsi);
5426 1250365 : case BUILT_IN_STRCMP:
5427 1250365 : case BUILT_IN_STRCMP_EQ:
5428 1250365 : case BUILT_IN_STRCASECMP:
5429 1250365 : case BUILT_IN_STRNCMP:
5430 1250365 : case BUILT_IN_STRNCMP_EQ:
5431 1250365 : case BUILT_IN_STRNCASECMP:
5432 1250365 : return gimple_fold_builtin_string_compare (gsi);
5433 39245 : case BUILT_IN_MEMCHR:
5434 39245 : return gimple_fold_builtin_memchr (gsi);
5435 20631 : case BUILT_IN_FPUTS:
5436 20631 : return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5437 20631 : gimple_call_arg (stmt, 1), false);
5438 43 : case BUILT_IN_FPUTS_UNLOCKED:
5439 43 : return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5440 43 : gimple_call_arg (stmt, 1), true);
5441 25602 : case BUILT_IN_MEMCPY_CHK:
5442 25602 : case BUILT_IN_MEMPCPY_CHK:
5443 25602 : case BUILT_IN_MEMMOVE_CHK:
5444 25602 : case BUILT_IN_MEMSET_CHK:
5445 25602 : return gimple_fold_builtin_memory_chk (gsi,
5446 : gimple_call_arg (stmt, 0),
5447 : gimple_call_arg (stmt, 1),
5448 : gimple_call_arg (stmt, 2),
5449 : gimple_call_arg (stmt, 3),
5450 25602 : fcode);
5451 3674 : case BUILT_IN_STPCPY:
5452 3674 : return gimple_fold_builtin_stpcpy (gsi);
5453 2588 : case BUILT_IN_STRCPY_CHK:
5454 2588 : case BUILT_IN_STPCPY_CHK:
5455 2588 : return gimple_fold_builtin_stxcpy_chk (gsi,
5456 : gimple_call_arg (stmt, 0),
5457 : gimple_call_arg (stmt, 1),
5458 : gimple_call_arg (stmt, 2),
5459 2588 : fcode);
5460 2721 : case BUILT_IN_STRNCPY_CHK:
5461 2721 : case BUILT_IN_STPNCPY_CHK:
5462 2721 : return gimple_fold_builtin_stxncpy_chk (gsi,
5463 : gimple_call_arg (stmt, 0),
5464 : gimple_call_arg (stmt, 1),
5465 : gimple_call_arg (stmt, 2),
5466 : gimple_call_arg (stmt, 3),
5467 2721 : fcode);
5468 2359 : case BUILT_IN_SNPRINTF_CHK:
5469 2359 : case BUILT_IN_VSNPRINTF_CHK:
5470 2359 : return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5471 :
5472 782216 : case BUILT_IN_FPRINTF:
5473 782216 : case BUILT_IN_FPRINTF_UNLOCKED:
5474 782216 : case BUILT_IN_VFPRINTF:
5475 782216 : if (n == 2 || n == 3)
5476 94841 : return gimple_fold_builtin_fprintf (gsi,
5477 : gimple_call_arg (stmt, 0),
5478 : gimple_call_arg (stmt, 1),
5479 : n == 3
5480 42467 : ? gimple_call_arg (stmt, 2)
5481 : : NULL_TREE,
5482 52374 : fcode);
5483 : break;
5484 2226 : case BUILT_IN_FPRINTF_CHK:
5485 2226 : case BUILT_IN_VFPRINTF_CHK:
5486 2226 : if (n == 3 || n == 4)
5487 3811 : return gimple_fold_builtin_fprintf (gsi,
5488 : gimple_call_arg (stmt, 0),
5489 : gimple_call_arg (stmt, 2),
5490 : n == 4
5491 1744 : ? gimple_call_arg (stmt, 3)
5492 : : NULL_TREE,
5493 2067 : fcode);
5494 : break;
5495 188343 : case BUILT_IN_PRINTF:
5496 188343 : case BUILT_IN_PRINTF_UNLOCKED:
5497 188343 : case BUILT_IN_VPRINTF:
5498 188343 : if (n == 1 || n == 2)
5499 213077 : return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5500 : n == 2
5501 102002 : ? gimple_call_arg (stmt, 1)
5502 111075 : : NULL_TREE, fcode);
5503 : break;
5504 2273 : case BUILT_IN_PRINTF_CHK:
5505 2273 : case BUILT_IN_VPRINTF_CHK:
5506 2273 : if (n == 2 || n == 3)
5507 3893 : return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5508 : n == 3
5509 1771 : ? gimple_call_arg (stmt, 2)
5510 2122 : : NULL_TREE, fcode);
5511 : break;
5512 2866 : case BUILT_IN_ACC_ON_DEVICE:
5513 2866 : return gimple_fold_builtin_acc_on_device (gsi,
5514 2866 : gimple_call_arg (stmt, 0));
5515 216 : case BUILT_IN_OMP_IS_INITIAL_DEVICE:
5516 216 : return gimple_fold_builtin_omp_is_initial_device (gsi);
5517 :
5518 103 : case BUILT_IN_OMP_GET_INITIAL_DEVICE:
5519 103 : return gimple_fold_builtin_omp_get_initial_device (gsi);
5520 :
5521 276 : case BUILT_IN_OMP_GET_NUM_DEVICES:
5522 276 : return gimple_fold_builtin_omp_get_num_devices (gsi);
5523 :
5524 47777 : case BUILT_IN_REALLOC:
5525 47777 : return gimple_fold_builtin_realloc (gsi);
5526 :
5527 629 : case BUILT_IN_CLEAR_PADDING:
5528 629 : return gimple_fold_builtin_clear_padding (gsi);
5529 :
5530 26233 : case BUILT_IN_CONSTANT_P:
5531 26233 : return gimple_fold_builtin_constant_p (gsi);
5532 :
5533 5967683 : default:;
5534 : }
5535 :
5536 : /* Try the generic builtin folder. */
5537 5967683 : bool ignore = (gimple_call_lhs (stmt) == NULL);
5538 5967683 : tree result = fold_call_stmt (stmt, ignore);
5539 5967683 : if (result)
5540 : {
5541 4924 : if (ignore)
5542 1221 : STRIP_NOPS (result);
5543 : else
5544 3703 : result = fold_convert (gimple_call_return_type (stmt), result);
5545 4924 : gimplify_and_update_call_from_tree (gsi, result);
5546 4924 : return true;
5547 : }
5548 :
5549 : return false;
5550 : }
5551 :
5552 : /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5553 : function calls to constants, where possible. */
5554 :
5555 : static tree
5556 20589 : fold_internal_goacc_dim (const gimple *call)
5557 : {
5558 20589 : int axis = oacc_get_ifn_dim_arg (call);
5559 20589 : int size = oacc_get_fn_dim_size (current_function_decl, axis);
5560 20589 : tree result = NULL_TREE;
5561 20589 : tree type = TREE_TYPE (gimple_call_lhs (call));
5562 :
5563 20589 : switch (gimple_call_internal_fn (call))
5564 : {
5565 8915 : case IFN_GOACC_DIM_POS:
5566 : /* If the size is 1, we know the answer. */
5567 8915 : if (size == 1)
5568 8915 : result = build_int_cst (type, 0);
5569 : break;
5570 11674 : case IFN_GOACC_DIM_SIZE:
5571 : /* If the size is not dynamic, we know the answer. */
5572 11674 : if (size)
5573 11674 : result = build_int_cst (type, size);
5574 : break;
5575 : default:
5576 : break;
5577 : }
5578 :
5579 20589 : return result;
5580 : }
5581 :
5582 : /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5583 : for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5584 : &var where var is only addressable because of such calls. */
5585 :
5586 : bool
5587 59137325 : optimize_atomic_compare_exchange_p (gimple *stmt)
5588 : {
5589 59137325 : if (gimple_call_num_args (stmt) != 6
5590 1580909 : || !flag_inline_atomics
5591 1580909 : || !optimize
5592 1580909 : || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5593 1580850 : || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5594 1043459 : || !gimple_vdef (stmt)
5595 60069512 : || !gimple_vuse (stmt))
5596 58205138 : return false;
5597 :
5598 932187 : tree fndecl = gimple_call_fndecl (stmt);
5599 932187 : switch (DECL_FUNCTION_CODE (fndecl))
5600 : {
5601 51740 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5602 51740 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5603 51740 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5604 51740 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5605 51740 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5606 51740 : break;
5607 : default:
5608 : return false;
5609 : }
5610 :
5611 51740 : tree expected = gimple_call_arg (stmt, 1);
5612 51740 : if (TREE_CODE (expected) != ADDR_EXPR
5613 51740 : || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5614 : return false;
5615 :
5616 49406 : tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5617 49406 : if (!is_gimple_reg_type (etype)
5618 48990 : || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5619 46592 : || TREE_THIS_VOLATILE (etype)
5620 46592 : || VECTOR_TYPE_P (etype)
5621 : || TREE_CODE (etype) == COMPLEX_TYPE
5622 : /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5623 : might not preserve all the bits. See PR71716. */
5624 : || SCALAR_FLOAT_TYPE_P (etype)
5625 67374 : || maybe_ne (TYPE_PRECISION (etype),
5626 35936 : GET_MODE_BITSIZE (TYPE_MODE (etype))))
5627 37846 : return false;
5628 :
5629 11560 : tree weak = gimple_call_arg (stmt, 3);
5630 11560 : if (!integer_zerop (weak) && !integer_onep (weak))
5631 : return false;
5632 :
5633 11560 : tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5634 11560 : tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5635 11560 : machine_mode mode = TYPE_MODE (itype);
5636 :
5637 11560 : if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5638 : == CODE_FOR_nothing
5639 11560 : && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5640 : return false;
5641 :
5642 23120 : if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5643 : return false;
5644 :
5645 : return true;
5646 : }
5647 :
5648 : /* Fold
5649 : r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5650 : into
5651 : _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5652 : i = IMAGPART_EXPR <t>;
5653 : r = (_Bool) i;
5654 : e = REALPART_EXPR <t>; */
5655 :
5656 : void
5657 5705 : fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5658 : {
5659 5705 : gimple *stmt = gsi_stmt (*gsi);
5660 5705 : tree fndecl = gimple_call_fndecl (stmt);
5661 5705 : tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5662 5705 : tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5663 5705 : tree ctype = build_complex_type (itype);
5664 5705 : tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5665 5705 : bool throws = false;
5666 5705 : edge e = NULL;
5667 5705 : gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5668 : expected);
5669 5705 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5670 5705 : gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5671 5705 : if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5672 : {
5673 2627 : g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5674 : build1 (VIEW_CONVERT_EXPR, itype,
5675 : gimple_assign_lhs (g)));
5676 2627 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5677 : }
5678 5705 : int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5679 11046 : + int_size_in_bytes (itype);
5680 5705 : g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5681 : gimple_call_arg (stmt, 0),
5682 : gimple_assign_lhs (g),
5683 : gimple_call_arg (stmt, 2),
5684 5705 : build_int_cst (integer_type_node, flag),
5685 : gimple_call_arg (stmt, 4),
5686 : gimple_call_arg (stmt, 5));
5687 5705 : tree lhs = make_ssa_name (ctype);
5688 5705 : gimple_call_set_lhs (g, lhs);
5689 5705 : gimple_move_vops (g, stmt);
5690 5705 : tree oldlhs = gimple_call_lhs (stmt);
5691 5705 : if (stmt_can_throw_internal (cfun, stmt))
5692 : {
5693 203 : throws = true;
5694 203 : e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5695 : }
5696 5705 : gimple_call_set_nothrow (as_a <gcall *> (g),
5697 5705 : gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5698 5705 : gimple_call_set_lhs (stmt, NULL_TREE);
5699 5705 : gsi_replace (gsi, g, true);
5700 5705 : if (oldlhs)
5701 : {
5702 5658 : g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5703 : build1 (IMAGPART_EXPR, itype, lhs));
5704 5658 : if (throws)
5705 : {
5706 197 : gsi_insert_on_edge_immediate (e, g);
5707 197 : *gsi = gsi_for_stmt (g);
5708 : }
5709 : else
5710 5461 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5711 5658 : g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5712 5658 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5713 : }
5714 5705 : g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5715 : build1 (REALPART_EXPR, itype, lhs));
5716 5705 : if (throws && oldlhs == NULL_TREE)
5717 : {
5718 6 : gsi_insert_on_edge_immediate (e, g);
5719 6 : *gsi = gsi_for_stmt (g);
5720 : }
5721 : else
5722 5699 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5723 5705 : if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5724 : {
5725 5254 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5726 : VIEW_CONVERT_EXPR,
5727 2627 : build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5728 : gimple_assign_lhs (g)));
5729 2627 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5730 : }
5731 5705 : g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5732 5705 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5733 5705 : *gsi = gsiret;
5734 5705 : }
5735 :
5736 : /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5737 : doesn't fit into TYPE. The test for overflow should be regardless of
5738 : -fwrapv, and even for unsigned types. */
5739 :
5740 : bool
5741 414532 : arith_overflowed_p (enum tree_code code, const_tree type,
5742 : const_tree arg0, const_tree arg1)
5743 : {
5744 414532 : widest2_int warg0 = widest2_int_cst (arg0);
5745 414532 : widest2_int warg1 = widest2_int_cst (arg1);
5746 414532 : widest2_int wres;
5747 414532 : switch (code)
5748 : {
5749 96218 : case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5750 115416 : case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5751 204077 : case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5752 0 : default: gcc_unreachable ();
5753 : }
5754 414532 : signop sign = TYPE_SIGN (type);
5755 414532 : if (sign == UNSIGNED && wi::neg_p (wres))
5756 : return true;
5757 342663 : return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5758 414544 : }
5759 :
5760 : /* Mask state for partial load/store operations (mask and length). */
5761 : enum mask_load_store_state {
5762 : MASK_ALL_INACTIVE, /* All lanes/elements are inactive (can be elided). */
5763 : MASK_ALL_ACTIVE, /* All lanes/elements are active (unconditional). */
5764 : MASK_UNKNOWN
5765 : };
5766 :
5767 : /* Check the mask/length state of IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL.
5768 : Returns whether all elements are active, all inactive, or mixed.
5769 : VECTYPE is the vector type of the operation. */
5770 :
5771 : static enum mask_load_store_state
5772 8047 : partial_load_store_mask_state (gcall *call, tree vectype)
5773 : {
5774 8047 : internal_fn ifn = gimple_call_internal_fn (call);
5775 8047 : int mask_index = internal_fn_mask_index (ifn);
5776 8047 : int len_index = internal_fn_len_index (ifn);
5777 :
5778 : /* Extract length and mask arguments up front. */
5779 8047 : tree len = len_index != -1 ? gimple_call_arg (call, len_index) : NULL_TREE;
5780 0 : tree bias = len ? gimple_call_arg (call, len_index + 1) : NULL_TREE;
5781 8047 : tree mask = mask_index != -1 ? gimple_call_arg (call, mask_index) : NULL_TREE;
5782 :
5783 16094 : poly_int64 nelts = GET_MODE_NUNITS (TYPE_MODE (vectype));
5784 :
5785 8047 : poly_widest_int wlen = -1;
5786 8047 : bool full_length_p = !len; /* No length means full length. */
5787 :
5788 : /* Compute effective length. */
5789 8047 : if (len && poly_int_tree_p (len))
5790 : {
5791 0 : gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5792 0 : wlen = wi::to_poly_widest (len) + wi::to_widest (bias);
5793 :
5794 0 : if (known_eq (wlen, 0))
5795 : return MASK_ALL_INACTIVE;
5796 :
5797 0 : if (known_eq (wlen, nelts))
5798 : full_length_p = true;
5799 : else
5800 : full_length_p = false;
5801 : }
5802 :
5803 : /* Check mask for early return cases. */
5804 8047 : if (mask)
5805 : {
5806 8047 : if (integer_zerop (mask))
5807 : return MASK_ALL_INACTIVE;
5808 :
5809 8032 : if (full_length_p && integer_all_onesp (mask))
5810 : return MASK_ALL_ACTIVE;
5811 : }
5812 0 : else if (full_length_p)
5813 : /* No mask and full length means all active. */
5814 : return MASK_ALL_ACTIVE;
5815 :
5816 : /* For VLA vectors, we can't do much more. */
5817 7890 : if (!nelts.is_constant ())
5818 : return MASK_UNKNOWN;
5819 :
5820 : /* Same for VLS vectors with non-constant mask. */
5821 7890 : if (mask && TREE_CODE (mask) != VECTOR_CST)
5822 : return MASK_UNKNOWN;
5823 :
5824 : /* Check VLS vector elements. */
5825 480 : gcc_assert (wlen.is_constant ());
5826 :
5827 480 : HOST_WIDE_INT active_len = wlen.to_constant ().to_shwi ();
5828 480 : if (active_len == -1)
5829 480 : active_len = nelts.to_constant ();
5830 :
5831 : /* Check if all elements in the active range match the mask. */
5832 996 : for (HOST_WIDE_INT i = 0; i < active_len; i++)
5833 : {
5834 996 : bool elt_active = !mask || !integer_zerop (vector_cst_elt (mask, i));
5835 516 : if (!elt_active)
5836 : {
5837 : /* Found an inactive element. Check if all are inactive. */
5838 972 : for (HOST_WIDE_INT j = 0; j < active_len; j++)
5839 972 : if (!mask || !integer_zerop (vector_cst_elt (mask, j)))
5840 : return MASK_UNKNOWN; /* Mixed state. */
5841 : return MASK_ALL_INACTIVE;
5842 : }
5843 : }
5844 :
5845 : /* All elements in active range are active. */
5846 0 : return full_length_p ? MASK_ALL_ACTIVE : MASK_UNKNOWN;
5847 8047 : }
5848 :
5849 :
5850 : /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional
5851 : (all lanes active), return a MEM_REF for the memory it references.
5852 : Otherwise return NULL_TREE. VECTYPE is the type of the memory vector. */
5853 :
5854 : static tree
5855 4016 : gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype)
5856 : {
5857 : /* Only fold if all lanes are active (unconditional). */
5858 4016 : if (partial_load_store_mask_state (call, vectype) != MASK_ALL_ACTIVE)
5859 : return NULL_TREE;
5860 :
5861 71 : tree ptr = gimple_call_arg (call, 0);
5862 71 : tree alias_align = gimple_call_arg (call, 1);
5863 71 : if (!tree_fits_uhwi_p (alias_align))
5864 : return NULL_TREE;
5865 :
5866 71 : unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5867 71 : if (TYPE_ALIGN (vectype) != align)
5868 14 : vectype = build_aligned_type (vectype, align);
5869 71 : tree offset = build_zero_cst (TREE_TYPE (alias_align));
5870 71 : return fold_build2 (MEM_REF, vectype, ptr, offset);
5871 : }
5872 :
5873 : /* Try to fold IFN_{MASK,LEN}_LOAD/STORE call CALL. Return true on success. */
5874 :
5875 : static bool
5876 4031 : gimple_fold_partial_load_store (gimple_stmt_iterator *gsi, gcall *call)
5877 : {
5878 4031 : internal_fn ifn = gimple_call_internal_fn (call);
5879 4031 : tree lhs = gimple_call_lhs (call);
5880 4031 : bool is_load = (lhs != NULL_TREE);
5881 4031 : tree vectype;
5882 :
5883 4031 : if (is_load)
5884 1968 : vectype = TREE_TYPE (lhs);
5885 : else
5886 : {
5887 2063 : tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5888 2063 : vectype = TREE_TYPE (rhs);
5889 : }
5890 :
5891 4031 : enum mask_load_store_state state
5892 4031 : = partial_load_store_mask_state (call, vectype);
5893 :
5894 : /* Handle all-inactive case. */
5895 4031 : if (state == MASK_ALL_INACTIVE)
5896 : {
5897 15 : if (is_load)
5898 : {
5899 : /* Replace load with else value. */
5900 15 : int else_index = internal_fn_else_index (ifn);
5901 15 : tree else_value = gimple_call_arg (call, else_index);
5902 15 : if (!is_gimple_reg (lhs))
5903 : {
5904 0 : if (!zerop (else_value))
5905 : return false;
5906 0 : else_value = build_constructor (TREE_TYPE (lhs), NULL);
5907 : }
5908 15 : gassign *new_stmt = gimple_build_assign (lhs, else_value);
5909 15 : gimple_set_location (new_stmt, gimple_location (call));
5910 : /* When the lhs is an array for LANES version, then there is still
5911 : a store, move the vops from the old stmt to the new one. */
5912 15 : if (!is_gimple_reg (lhs))
5913 0 : gimple_move_vops (new_stmt, call);
5914 15 : gsi_replace (gsi, new_stmt, false);
5915 15 : return true;
5916 : }
5917 : else
5918 : {
5919 : /* Remove inactive store altogether. */
5920 0 : unlink_stmt_vdef (call);
5921 0 : release_defs (call);
5922 0 : gsi_replace (gsi, gimple_build_nop (), true);
5923 0 : return true;
5924 : }
5925 : }
5926 :
5927 : /* We cannot simplify a gather/scatter or load/store lanes further. */
5928 4016 : if (internal_gather_scatter_fn_p (ifn)
5929 4016 : || TREE_CODE (vectype) == ARRAY_TYPE)
5930 : return false;
5931 :
5932 : /* Handle all-active case by folding to regular memory operation. */
5933 4016 : if (tree mem_ref = gimple_fold_partial_load_store_mem_ref (call, vectype))
5934 : {
5935 71 : gassign *new_stmt;
5936 71 : if (is_load)
5937 17 : new_stmt = gimple_build_assign (lhs, mem_ref);
5938 : else
5939 : {
5940 54 : tree rhs
5941 54 : = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5942 54 : new_stmt = gimple_build_assign (mem_ref, rhs);
5943 : }
5944 :
5945 71 : gimple_set_location (new_stmt, gimple_location (call));
5946 71 : gimple_move_vops (new_stmt, call);
5947 71 : gsi_replace (gsi, new_stmt, false);
5948 71 : return true;
5949 : }
5950 : return false;
5951 : }
5952 :
5953 : /* Attempt to fold a call statement referenced by the statement iterator GSI.
5954 : The statement may be replaced by another statement, e.g., if the call
5955 : simplifies to a constant value. Return true if any changes were made.
5956 : It is assumed that the operands have been previously folded. */
5957 :
5958 : static bool
5959 55479877 : gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5960 : {
5961 55479877 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5962 55479877 : tree callee;
5963 55479877 : bool changed = false;
5964 :
5965 : /* Check for virtual calls that became direct calls. */
5966 55479877 : callee = gimple_call_fn (stmt);
5967 55479877 : if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5968 : {
5969 472463 : if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5970 : {
5971 6 : if (dump_file && virtual_method_call_p (callee)
5972 338 : && !possible_polymorphic_call_target_p
5973 6 : (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5974 6 : (OBJ_TYPE_REF_EXPR (callee)))))
5975 : {
5976 0 : fprintf (dump_file,
5977 : "Type inheritance inconsistent devirtualization of ");
5978 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5979 0 : fprintf (dump_file, " to ");
5980 0 : print_generic_expr (dump_file, callee, TDF_SLIM);
5981 0 : fprintf (dump_file, "\n");
5982 : }
5983 :
5984 332 : gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5985 332 : changed = true;
5986 : }
5987 472131 : else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5988 : {
5989 467316 : bool final;
5990 467316 : vec <cgraph_node *>targets
5991 467316 : = possible_polymorphic_call_targets (callee, stmt, &final);
5992 469776 : if (final && targets.length () <= 1 && dbg_cnt (devirt))
5993 : {
5994 2033 : tree lhs = gimple_call_lhs (stmt);
5995 2033 : if (dump_enabled_p ())
5996 : {
5997 34 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5998 : "folding virtual function call to %s\n",
5999 34 : targets.length () == 1
6000 17 : ? targets[0]->name ()
6001 : : "__builtin_unreachable");
6002 : }
6003 2033 : if (targets.length () == 1)
6004 : {
6005 1996 : tree fndecl = targets[0]->decl;
6006 1996 : gimple_call_set_fndecl (stmt, fndecl);
6007 1996 : changed = true;
6008 : /* If changing the call to __cxa_pure_virtual
6009 : or similar noreturn function, adjust gimple_call_fntype
6010 : too. */
6011 1996 : if (gimple_call_noreturn_p (stmt)
6012 28 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
6013 13 : && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
6014 2009 : && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6015 13 : == void_type_node))
6016 13 : gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
6017 : /* If the call becomes noreturn, remove the lhs. */
6018 1996 : if (lhs
6019 1668 : && gimple_call_noreturn_p (stmt)
6020 2011 : && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
6021 6 : || should_remove_lhs_p (lhs)))
6022 : {
6023 12 : if (TREE_CODE (lhs) == SSA_NAME)
6024 : {
6025 0 : tree var = create_tmp_var (TREE_TYPE (lhs));
6026 0 : tree def = get_or_create_ssa_default_def (cfun, var);
6027 0 : gimple *new_stmt = gimple_build_assign (lhs, def);
6028 0 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
6029 : }
6030 12 : gimple_call_set_lhs (stmt, NULL_TREE);
6031 : }
6032 1996 : maybe_remove_unused_call_args (cfun, stmt);
6033 : }
6034 : else
6035 : {
6036 37 : location_t loc = gimple_location (stmt);
6037 37 : gimple *new_stmt = gimple_build_builtin_unreachable (loc);
6038 37 : gimple_call_set_ctrl_altering (new_stmt, false);
6039 : /* If the call had a SSA name as lhs morph that into
6040 : an uninitialized value. */
6041 37 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
6042 : {
6043 12 : tree var = create_tmp_var (TREE_TYPE (lhs));
6044 12 : SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
6045 12 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
6046 12 : set_ssa_default_def (cfun, var, lhs);
6047 : }
6048 37 : gimple_move_vops (new_stmt, stmt);
6049 37 : gsi_replace (gsi, new_stmt, false);
6050 37 : return true;
6051 : }
6052 : }
6053 : }
6054 : }
6055 :
6056 : /* Check for indirect calls that became direct calls, and then
6057 : no longer require a static chain. */
6058 55479840 : if (gimple_call_chain (stmt))
6059 : {
6060 245876 : tree fn = gimple_call_fndecl (stmt);
6061 294294 : if (fn && !DECL_STATIC_CHAIN (fn))
6062 : {
6063 2024 : gimple_call_set_chain (stmt, NULL);
6064 2024 : changed = true;
6065 : }
6066 : }
6067 :
6068 55479840 : if (inplace)
6069 : return changed;
6070 :
6071 : /* Don't constant fold functions which can change the control. */
6072 55477256 : if (gimple_call_ctrl_altering_p (stmt))
6073 : return changed;
6074 :
6075 : /* Check for builtins that CCP can handle using information not
6076 : available in the generic fold routines. */
6077 47941759 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
6078 : {
6079 9213066 : if (gimple_fold_builtin (gsi))
6080 205042 : changed = true;
6081 : }
6082 38728693 : else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
6083 : {
6084 1095065 : changed |= targetm.gimple_fold_builtin (gsi);
6085 : }
6086 37633628 : else if (gimple_call_internal_p (stmt))
6087 : {
6088 1718778 : enum tree_code subcode = ERROR_MARK;
6089 1718778 : tree result = NULL_TREE;
6090 1718778 : bool cplx_result = false;
6091 1718778 : bool uaddc_usubc = false;
6092 1718778 : tree overflow = NULL_TREE;
6093 1718778 : switch (gimple_call_internal_fn (stmt))
6094 : {
6095 832 : case IFN_ASSUME:
6096 : /* Remove .ASSUME calls during the last fold since it is no
6097 : longer needed. */
6098 832 : if (fold_before_rtl_expansion_p ())
6099 114 : replace_call_with_value (gsi, NULL_TREE);
6100 : break;
6101 161981 : case IFN_BUILTIN_EXPECT:
6102 161981 : result = fold_builtin_expect (gimple_location (stmt),
6103 : gimple_call_arg (stmt, 0),
6104 : gimple_call_arg (stmt, 1),
6105 : gimple_call_arg (stmt, 2),
6106 : NULL_TREE);
6107 161981 : break;
6108 8660 : case IFN_UBSAN_OBJECT_SIZE:
6109 8660 : {
6110 8660 : tree offset = gimple_call_arg (stmt, 1);
6111 8660 : tree objsize = gimple_call_arg (stmt, 2);
6112 8660 : if (integer_all_onesp (objsize)
6113 8660 : || (TREE_CODE (offset) == INTEGER_CST
6114 4787 : && TREE_CODE (objsize) == INTEGER_CST
6115 1126 : && tree_int_cst_le (offset, objsize)))
6116 : {
6117 1539 : replace_call_with_value (gsi, NULL_TREE);
6118 1539 : return true;
6119 : }
6120 : }
6121 : break;
6122 11349 : case IFN_UBSAN_PTR:
6123 11349 : if (integer_zerop (gimple_call_arg (stmt, 1)))
6124 : {
6125 30 : replace_call_with_value (gsi, NULL_TREE);
6126 30 : return true;
6127 : }
6128 : break;
6129 8051 : case IFN_UBSAN_BOUNDS:
6130 8051 : {
6131 8051 : tree index = gimple_call_arg (stmt, 1);
6132 8051 : tree bound = gimple_call_arg (stmt, 2);
6133 8051 : if (TREE_CODE (index) == INTEGER_CST
6134 4989 : && TREE_CODE (bound) == INTEGER_CST)
6135 : {
6136 4390 : index = fold_convert (TREE_TYPE (bound), index);
6137 4390 : if (TREE_CODE (index) == INTEGER_CST
6138 4390 : && tree_int_cst_lt (index, bound))
6139 : {
6140 288 : replace_call_with_value (gsi, NULL_TREE);
6141 288 : return true;
6142 : }
6143 : }
6144 : }
6145 : break;
6146 20589 : case IFN_GOACC_DIM_SIZE:
6147 20589 : case IFN_GOACC_DIM_POS:
6148 20589 : result = fold_internal_goacc_dim (stmt);
6149 20589 : break;
6150 : case IFN_UBSAN_CHECK_ADD:
6151 : subcode = PLUS_EXPR;
6152 : break;
6153 : case IFN_UBSAN_CHECK_SUB:
6154 : subcode = MINUS_EXPR;
6155 : break;
6156 : case IFN_UBSAN_CHECK_MUL:
6157 : subcode = MULT_EXPR;
6158 : break;
6159 : case IFN_ADD_OVERFLOW:
6160 : subcode = PLUS_EXPR;
6161 : cplx_result = true;
6162 : break;
6163 : case IFN_SUB_OVERFLOW:
6164 : subcode = MINUS_EXPR;
6165 : cplx_result = true;
6166 : break;
6167 : case IFN_MUL_OVERFLOW:
6168 : subcode = MULT_EXPR;
6169 : cplx_result = true;
6170 : break;
6171 : case IFN_UADDC:
6172 : subcode = PLUS_EXPR;
6173 : cplx_result = true;
6174 : uaddc_usubc = true;
6175 : break;
6176 : case IFN_USUBC:
6177 : subcode = MINUS_EXPR;
6178 : cplx_result = true;
6179 : uaddc_usubc = true;
6180 : break;
6181 4031 : case IFN_LEN_LOAD:
6182 4031 : case IFN_MASK_LOAD:
6183 4031 : case IFN_MASK_LEN_LOAD:
6184 4031 : case IFN_MASK_GATHER_LOAD:
6185 4031 : case IFN_MASK_LEN_GATHER_LOAD:
6186 4031 : case IFN_MASK_LOAD_LANES:
6187 4031 : case IFN_MASK_LEN_LOAD_LANES:
6188 4031 : case IFN_LEN_STORE:
6189 4031 : case IFN_MASK_STORE:
6190 4031 : case IFN_MASK_LEN_STORE:
6191 4031 : case IFN_MASK_SCATTER_STORE:
6192 4031 : case IFN_MASK_LEN_SCATTER_STORE:
6193 4031 : case IFN_MASK_STORE_LANES:
6194 4031 : case IFN_MASK_LEN_STORE_LANES:
6195 4031 : changed |= gimple_fold_partial_load_store (gsi, stmt);
6196 4031 : break;
6197 : default:
6198 : break;
6199 : }
6200 186715 : if (subcode != ERROR_MARK)
6201 : {
6202 492634 : tree arg0 = gimple_call_arg (stmt, 0);
6203 492634 : tree arg1 = gimple_call_arg (stmt, 1);
6204 492634 : tree arg2 = NULL_TREE;
6205 492634 : tree type = TREE_TYPE (arg0);
6206 492634 : if (cplx_result)
6207 : {
6208 473556 : tree lhs = gimple_call_lhs (stmt);
6209 473556 : if (lhs == NULL_TREE)
6210 : type = NULL_TREE;
6211 : else
6212 473556 : type = TREE_TYPE (TREE_TYPE (lhs));
6213 473556 : if (uaddc_usubc)
6214 30935 : arg2 = gimple_call_arg (stmt, 2);
6215 : }
6216 492634 : if (type == NULL_TREE)
6217 : ;
6218 492634 : else if (uaddc_usubc)
6219 : {
6220 30935 : if (!integer_zerop (arg2))
6221 : ;
6222 : /* x = y + 0 + 0; x = y - 0 - 0; */
6223 4819 : else if (integer_zerop (arg1))
6224 : result = arg0;
6225 : /* x = 0 + y + 0; */
6226 4195 : else if (subcode != MINUS_EXPR && integer_zerop (arg0))
6227 : result = arg1;
6228 : /* x = y - y - 0; */
6229 4195 : else if (subcode == MINUS_EXPR
6230 4195 : && operand_equal_p (arg0, arg1, 0))
6231 0 : result = integer_zero_node;
6232 : }
6233 : /* x = y + 0; x = y - 0; x = y * 0; */
6234 461699 : else if (integer_zerop (arg1))
6235 10107 : result = subcode == MULT_EXPR ? integer_zero_node : arg0;
6236 : /* x = 0 + y; x = 0 * y; */
6237 451592 : else if (subcode != MINUS_EXPR && integer_zerop (arg0))
6238 0 : result = subcode == MULT_EXPR ? integer_zero_node : arg1;
6239 : /* x = y - y; */
6240 451592 : else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
6241 7 : result = integer_zero_node;
6242 : /* x = y * 1; x = 1 * y; */
6243 451585 : else if (subcode == MULT_EXPR && integer_onep (arg1))
6244 : result = arg0;
6245 446256 : else if (subcode == MULT_EXPR && integer_onep (arg0))
6246 : result = arg1;
6247 492634 : if (result)
6248 : {
6249 16067 : if (result == integer_zero_node)
6250 2140 : result = build_zero_cst (type);
6251 13927 : else if (cplx_result && TREE_TYPE (result) != type)
6252 : {
6253 9606 : if (TREE_CODE (result) == INTEGER_CST)
6254 : {
6255 0 : if (arith_overflowed_p (PLUS_EXPR, type, result,
6256 : integer_zero_node))
6257 0 : overflow = build_one_cst (type);
6258 : }
6259 9606 : else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
6260 6920 : && TYPE_UNSIGNED (type))
6261 9751 : || (TYPE_PRECISION (type)
6262 2831 : < (TYPE_PRECISION (TREE_TYPE (result))
6263 2831 : + (TYPE_UNSIGNED (TREE_TYPE (result))
6264 3155 : && !TYPE_UNSIGNED (type)))))
6265 : result = NULL_TREE;
6266 62 : if (result)
6267 62 : result = fold_convert (type, result);
6268 : }
6269 : }
6270 : }
6271 :
6272 1230810 : if (result)
6273 : {
6274 28910 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
6275 0 : result = drop_tree_overflow (result);
6276 28910 : if (cplx_result)
6277 : {
6278 6512 : if (overflow == NULL_TREE)
6279 6512 : overflow = build_zero_cst (TREE_TYPE (result));
6280 6512 : tree ctype = build_complex_type (TREE_TYPE (result));
6281 6512 : if (TREE_CODE (result) == INTEGER_CST
6282 2140 : && TREE_CODE (overflow) == INTEGER_CST)
6283 2140 : result = build_complex (ctype, result, overflow);
6284 : else
6285 4372 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
6286 : ctype, result, overflow);
6287 : }
6288 28910 : gimplify_and_update_call_from_tree (gsi, result);
6289 28910 : changed = true;
6290 : }
6291 : }
6292 :
6293 : return changed;
6294 : }
6295 :
6296 :
6297 : /* Return true whether NAME has a use on STMT. Note this can return
6298 : false even though there's a use on STMT if SSA operands are not
6299 : up-to-date. */
6300 :
6301 : static bool
6302 1639 : has_use_on_stmt (tree name, gimple *stmt)
6303 : {
6304 1639 : ssa_op_iter iter;
6305 1639 : tree op;
6306 3303 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6307 1710 : if (op == name)
6308 : return true;
6309 : return false;
6310 : }
6311 :
6312 : /* Add the lhs of each statement of SEQ to DCE_WORKLIST. */
6313 :
6314 : void
6315 4631390 : mark_lhs_in_seq_for_dce (bitmap dce_worklist, gimple_seq seq)
6316 : {
6317 4631390 : if (!dce_worklist)
6318 : return;
6319 :
6320 1619569 : for (gimple_stmt_iterator i = gsi_start (seq);
6321 1887052 : !gsi_end_p (i); gsi_next (&i))
6322 : {
6323 267483 : gimple *stmt = gsi_stmt (i);
6324 267483 : tree name = gimple_get_lhs (stmt);
6325 267483 : if (name && TREE_CODE (name) == SSA_NAME)
6326 267483 : bitmap_set_bit (dce_worklist, SSA_NAME_VERSION (name));
6327 : }
6328 : }
6329 :
6330 : /* Worker for fold_stmt_1 dispatch to pattern based folding with
6331 : gimple_simplify.
6332 :
6333 : Replaces *GSI with the simplification result in RCODE and OPS
6334 : and the associated statements in *SEQ. Does the replacement
6335 : according to INPLACE and returns true if the operation succeeded. */
6336 :
6337 : static bool
6338 8997134 : replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
6339 : gimple_match_op *res_op,
6340 : gimple_seq *seq, bool inplace,
6341 : bitmap dce_worklist)
6342 : {
6343 8997134 : gimple *stmt = gsi_stmt (*gsi);
6344 8997134 : tree *ops = res_op->ops;
6345 8997134 : unsigned int num_ops = res_op->num_ops;
6346 :
6347 : /* Play safe and do not allow abnormals to be mentioned in
6348 : newly created statements. See also maybe_push_res_to_seq.
6349 : As an exception allow such uses if there was a use of the
6350 : same SSA name on the old stmt. */
6351 19964662 : for (unsigned int i = 0; i < num_ops; ++i)
6352 10969121 : if (TREE_CODE (ops[i]) == SSA_NAME
6353 6669162 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
6354 10970760 : && !has_use_on_stmt (ops[i], stmt))
6355 : return false;
6356 :
6357 8995541 : if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
6358 0 : for (unsigned int i = 0; i < 2; ++i)
6359 0 : if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
6360 0 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
6361 0 : && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
6362 : return false;
6363 :
6364 : /* Don't insert new statements when INPLACE is true, even if we could
6365 : reuse STMT for the final statement. */
6366 8995541 : if (inplace && !gimple_seq_empty_p (*seq))
6367 : return false;
6368 :
6369 8995541 : if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
6370 : {
6371 6778827 : gcc_assert (res_op->code.is_tree_code ());
6372 6778827 : auto code = tree_code (res_op->code);
6373 6778827 : if (TREE_CODE_CLASS (code) == tcc_comparison
6374 : /* GIMPLE_CONDs condition may not throw. */
6375 6778827 : && ((cfun
6376 1130528 : && (!flag_exceptions
6377 774682 : || !cfun->can_throw_non_call_exceptions))
6378 290130 : || !operation_could_trap_p (code,
6379 290130 : FLOAT_TYPE_P (TREE_TYPE (ops[0])),
6380 : false, NULL_TREE)))
6381 1119234 : gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
6382 5659593 : else if (code == SSA_NAME)
6383 : {
6384 : /* If setting the gimple cond to the same thing,
6385 : return false as nothing changed. */
6386 3919711 : if (gimple_cond_code (cond_stmt) == NE_EXPR
6387 3895231 : && operand_equal_p (gimple_cond_lhs (cond_stmt), ops[0])
6388 7812417 : && integer_zerop (gimple_cond_rhs (cond_stmt)))
6389 : return false;
6390 27005 : gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
6391 27005 : build_zero_cst (TREE_TYPE (ops[0])));
6392 : }
6393 1739882 : else if (code == INTEGER_CST)
6394 : {
6395 : /* Make into the canonical form `1 != 0` and `0 != 0`.
6396 : If already in the canonical form return false
6397 : saying nothing has been done. */
6398 1203390 : if (integer_zerop (ops[0]))
6399 : {
6400 5294046 : if (gimple_cond_false_canonical_p (cond_stmt))
6401 : return false;
6402 482974 : gimple_cond_make_false (cond_stmt);
6403 : }
6404 : else
6405 : {
6406 354829 : if (gimple_cond_true_canonical_p (cond_stmt))
6407 : return false;
6408 181507 : gimple_cond_make_true (cond_stmt);
6409 : }
6410 : }
6411 536492 : else if (!inplace)
6412 : {
6413 : /* For throwing comparisons, see if the GIMPLE_COND is the same as
6414 : the comparison would be.
6415 : This can happen due to the match pattern for
6416 : `(ne (cmp @0 @1) integer_zerop)` which creates a new expression
6417 : for the comparison. */
6418 536492 : if (TREE_CODE_CLASS (code) == tcc_comparison
6419 11294 : && (!cfun
6420 11294 : || (flag_exceptions
6421 11294 : && cfun->can_throw_non_call_exceptions))
6422 547786 : && operation_could_trap_p (code,
6423 11294 : FLOAT_TYPE_P (TREE_TYPE (ops[0])),
6424 : false, NULL_TREE))
6425 : {
6426 11294 : tree lhs = gimple_cond_lhs (cond_stmt);
6427 11294 : if (gimple_cond_code (cond_stmt) == NE_EXPR
6428 11294 : && TREE_CODE (lhs) == SSA_NAME
6429 11294 : && INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6430 22588 : && integer_zerop (gimple_cond_rhs (cond_stmt)))
6431 : {
6432 11294 : gimple *s = SSA_NAME_DEF_STMT (lhs);
6433 11294 : if (is_gimple_assign (s)
6434 11294 : && gimple_assign_rhs_code (s) == code
6435 11294 : && operand_equal_p (gimple_assign_rhs1 (s), ops[0])
6436 22588 : && operand_equal_p (gimple_assign_rhs2 (s), ops[1]))
6437 : return false;
6438 : }
6439 : }
6440 525198 : tree res = maybe_push_res_to_seq (res_op, seq);
6441 525198 : if (!res)
6442 : return false;
6443 525198 : gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
6444 525198 : build_zero_cst (TREE_TYPE (res)));
6445 : }
6446 : else
6447 : return false;
6448 2335918 : if (dump_file && (dump_flags & TDF_DETAILS))
6449 : {
6450 857 : fprintf (dump_file, "gimple_simplified to ");
6451 857 : if (!gimple_seq_empty_p (*seq))
6452 0 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6453 857 : print_gimple_stmt (dump_file, gsi_stmt (*gsi),
6454 : 0, TDF_SLIM);
6455 : }
6456 : // Mark the lhs of the new statements maybe for dce
6457 2335918 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6458 2335918 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6459 2335918 : return true;
6460 : }
6461 2216714 : else if (is_gimple_assign (stmt)
6462 2216714 : && res_op->code.is_tree_code ())
6463 : {
6464 2139912 : auto code = tree_code (res_op->code);
6465 2139912 : if (!inplace
6466 2139912 : || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
6467 : {
6468 2139912 : maybe_build_generic_op (res_op);
6469 5064810 : gimple_assign_set_rhs_with_ops (gsi, code,
6470 : res_op->op_or_null (0),
6471 : res_op->op_or_null (1),
6472 : res_op->op_or_null (2));
6473 2139912 : if (dump_file && (dump_flags & TDF_DETAILS))
6474 : {
6475 16570 : fprintf (dump_file, "gimple_simplified to ");
6476 16570 : if (!gimple_seq_empty_p (*seq))
6477 48 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6478 16570 : print_gimple_stmt (dump_file, gsi_stmt (*gsi),
6479 : 0, TDF_SLIM);
6480 : }
6481 : // Mark the lhs of the new statements maybe for dce
6482 2139912 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6483 2139912 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6484 2139912 : return true;
6485 : }
6486 : }
6487 76802 : else if (res_op->code.is_fn_code ()
6488 76802 : && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
6489 : {
6490 8009 : gcc_assert (num_ops == gimple_call_num_args (stmt));
6491 23703 : for (unsigned int i = 0; i < num_ops; ++i)
6492 15694 : gimple_call_set_arg (stmt, i, ops[i]);
6493 8009 : if (dump_file && (dump_flags & TDF_DETAILS))
6494 : {
6495 0 : fprintf (dump_file, "gimple_simplified to ");
6496 0 : if (!gimple_seq_empty_p (*seq))
6497 0 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6498 0 : print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6499 : }
6500 : // Mark the lhs of the new statements maybe for dce
6501 8009 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6502 8009 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6503 8009 : return true;
6504 : }
6505 68793 : else if (!inplace)
6506 : {
6507 135596 : if (gimple_has_lhs (stmt))
6508 : {
6509 68793 : tree lhs = gimple_get_lhs (stmt);
6510 68793 : if (!maybe_push_res_to_seq (res_op, seq, lhs))
6511 : return false;
6512 67810 : if (dump_file && (dump_flags & TDF_DETAILS))
6513 : {
6514 10 : fprintf (dump_file, "gimple_simplified to ");
6515 10 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6516 : }
6517 : // Mark the lhs of the new statements maybe for dce
6518 67810 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6519 67810 : gsi_replace_with_seq_vops (gsi, *seq);
6520 67810 : return true;
6521 : }
6522 : else
6523 0 : gcc_unreachable ();
6524 : }
6525 :
6526 : return false;
6527 : }
6528 :
6529 : /* Canonicalize MEM_REFs invariant address operand after propagation. */
6530 :
6531 : static bool
6532 186845450 : maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6533 : {
6534 186845450 : bool res = false;
6535 186845450 : tree *orig_t = t;
6536 :
6537 186845450 : if (TREE_CODE (*t) == ADDR_EXPR)
6538 61669387 : t = &TREE_OPERAND (*t, 0);
6539 :
6540 : /* The C and C++ frontends use an ARRAY_REF for indexing with their
6541 : generic vector extension. The actual vector referenced is
6542 : view-converted to an array type for this purpose. If the index
6543 : is constant the canonical representation in the middle-end is a
6544 : BIT_FIELD_REF so re-write the former to the latter here. */
6545 186845450 : if (TREE_CODE (*t) == ARRAY_REF
6546 10645078 : && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6547 150535 : && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6548 186891514 : && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6549 : {
6550 17143 : tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6551 17143 : if (VECTOR_TYPE_P (vtype))
6552 : {
6553 17143 : tree low = array_ref_low_bound (*t);
6554 17143 : if (TREE_CODE (low) == INTEGER_CST)
6555 : {
6556 17143 : if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6557 : {
6558 34242 : widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6559 34242 : wi::to_widest (low));
6560 17121 : idx = wi::mul (idx, wi::to_widest
6561 34242 : (TYPE_SIZE (TREE_TYPE (*t))));
6562 17121 : widest_int ext
6563 17121 : = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6564 17121 : if (maybe_le (ext, wi::to_poly_widest (TYPE_SIZE (vtype))))
6565 : {
6566 33602 : *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6567 16801 : TREE_TYPE (*t),
6568 16801 : TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6569 16801 : TYPE_SIZE (TREE_TYPE (*t)),
6570 16801 : wide_int_to_tree (bitsizetype, idx));
6571 16801 : res = true;
6572 : }
6573 17121 : }
6574 : }
6575 : }
6576 : }
6577 :
6578 352599480 : while (handled_component_p (*t))
6579 165754030 : t = &TREE_OPERAND (*t, 0);
6580 :
6581 : /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6582 : of invariant addresses into a SSA name MEM_REF address. */
6583 186845450 : if (TREE_CODE (*t) == MEM_REF
6584 186845450 : || TREE_CODE (*t) == TARGET_MEM_REF)
6585 : {
6586 99522958 : tree addr = TREE_OPERAND (*t, 0);
6587 99522958 : if (TREE_CODE (addr) == ADDR_EXPR
6588 99522958 : && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6589 30544944 : || handled_component_p (TREE_OPERAND (addr, 0))))
6590 : {
6591 527479 : tree base;
6592 527479 : poly_int64 coffset;
6593 527479 : base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6594 : &coffset);
6595 527479 : if (!base)
6596 : {
6597 18 : if (is_debug)
6598 18 : return false;
6599 0 : gcc_unreachable ();
6600 : }
6601 :
6602 527461 : TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6603 527461 : TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6604 527461 : TREE_OPERAND (*t, 1),
6605 527461 : size_int (coffset));
6606 527461 : res = true;
6607 : }
6608 99522940 : gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6609 : || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6610 : }
6611 :
6612 : /* Canonicalize back MEM_REFs to plain reference trees if the object
6613 : accessed is a decl that has the same access semantics as the MEM_REF. */
6614 186845432 : if (TREE_CODE (*t) == MEM_REF
6615 97546897 : && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6616 30228392 : && integer_zerop (TREE_OPERAND (*t, 1))
6617 203239313 : && MR_DEPENDENCE_CLIQUE (*t) == 0)
6618 : {
6619 11665047 : tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6620 11665047 : tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6621 11665047 : if (/* Same volatile qualification. */
6622 11665047 : TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6623 : /* Same TBAA behavior with -fstrict-aliasing. */
6624 11662037 : && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6625 11332899 : && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6626 11332899 : == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6627 : /* Same alignment. */
6628 4733861 : && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6629 : /* We have to look out here to not drop a required conversion
6630 : from the rhs to the lhs if *t appears on the lhs or vice-versa
6631 : if it appears on the rhs. Thus require strict type
6632 : compatibility. */
6633 16119413 : && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6634 : {
6635 2544473 : *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6636 2544473 : res = true;
6637 : }
6638 : }
6639 :
6640 175180385 : else if (TREE_CODE (*orig_t) == ADDR_EXPR
6641 59087517 : && TREE_CODE (*t) == MEM_REF
6642 195951333 : && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6643 : {
6644 834 : tree base;
6645 834 : poly_int64 coffset;
6646 834 : base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6647 : &coffset);
6648 834 : if (base)
6649 : {
6650 739 : gcc_assert (TREE_CODE (base) == MEM_REF);
6651 739 : poly_int64 moffset;
6652 739 : if (mem_ref_offset (base).to_shwi (&moffset))
6653 : {
6654 739 : coffset += moffset;
6655 739 : if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6656 : {
6657 739 : coffset += moffset;
6658 739 : *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6659 739 : return true;
6660 : }
6661 : }
6662 : }
6663 : }
6664 :
6665 : /* Canonicalize TARGET_MEM_REF in particular with respect to
6666 : the indexes becoming constant. */
6667 175179551 : else if (TREE_CODE (*t) == TARGET_MEM_REF)
6668 : {
6669 1976043 : tree tem = maybe_fold_tmr (*t);
6670 1976043 : if (tem)
6671 : {
6672 1690 : *t = tem;
6673 1690 : if (TREE_CODE (*orig_t) == ADDR_EXPR)
6674 0 : recompute_tree_invariant_for_addr_expr (*orig_t);
6675 : res = true;
6676 : }
6677 : }
6678 :
6679 : return res;
6680 : }
6681 :
6682 : /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6683 : distinguishes both cases. */
6684 :
6685 : static bool
6686 747364596 : fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree),
6687 : bitmap dce_worklist = nullptr)
6688 : {
6689 747364596 : bool changed = false;
6690 747364596 : gimple *stmt = gsi_stmt (*gsi);
6691 747364596 : bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6692 747364596 : unsigned i;
6693 747364596 : fold_defer_overflow_warnings ();
6694 :
6695 : /* First do required canonicalization of [TARGET_]MEM_REF addresses
6696 : after propagation.
6697 : ??? This shouldn't be done in generic folding but in the
6698 : propagation helpers which also know whether an address was
6699 : propagated.
6700 : Also canonicalize operand order. */
6701 747364596 : switch (gimple_code (stmt))
6702 : {
6703 251271491 : case GIMPLE_ASSIGN:
6704 251271491 : if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6705 : {
6706 165229076 : tree *rhs = gimple_assign_rhs1_ptr (stmt);
6707 165229076 : if ((REFERENCE_CLASS_P (*rhs)
6708 104298896 : || TREE_CODE (*rhs) == ADDR_EXPR)
6709 179867161 : && maybe_canonicalize_mem_ref_addr (rhs))
6710 : changed = true;
6711 165229076 : tree *lhs = gimple_assign_lhs_ptr (stmt);
6712 165229076 : if (REFERENCE_CLASS_P (*lhs)
6713 165229076 : && maybe_canonicalize_mem_ref_addr (lhs))
6714 : changed = true;
6715 : /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6716 : This cannot be done in maybe_canonicalize_mem_ref_addr
6717 : as the gimple now has two operands rather than one.
6718 : The same reason why this can't be done in
6719 : maybe_canonicalize_mem_ref_addr is the same reason why
6720 : this can't be done inplace. */
6721 165229076 : if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6722 : {
6723 14437006 : tree inner = TREE_OPERAND (*rhs, 0);
6724 14437006 : if (TREE_CODE (inner) == MEM_REF
6725 1020139 : && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6726 14495305 : && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6727 : {
6728 58299 : tree ptr = TREE_OPERAND (inner, 0);
6729 58299 : tree addon = TREE_OPERAND (inner, 1);
6730 58299 : addon = fold_convert (sizetype, addon);
6731 58299 : gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6732 : ptr, addon);
6733 58299 : changed = true;
6734 58299 : stmt = gsi_stmt (*gsi);
6735 : }
6736 : }
6737 : }
6738 : else
6739 : {
6740 : /* Canonicalize operand order. */
6741 86042415 : enum tree_code code = gimple_assign_rhs_code (stmt);
6742 86042415 : if (TREE_CODE_CLASS (code) == tcc_comparison
6743 80131834 : || commutative_tree_code (code)
6744 129306799 : || commutative_ternary_tree_code (code))
6745 : {
6746 42778934 : tree rhs1 = gimple_assign_rhs1 (stmt);
6747 42778934 : tree rhs2 = gimple_assign_rhs2 (stmt);
6748 42778934 : if (tree_swap_operands_p (rhs1, rhs2))
6749 : {
6750 2581416 : gimple_assign_set_rhs1 (stmt, rhs2);
6751 2581416 : gimple_assign_set_rhs2 (stmt, rhs1);
6752 2581416 : if (TREE_CODE_CLASS (code) == tcc_comparison)
6753 304454 : gimple_assign_set_rhs_code (stmt,
6754 : swap_tree_comparison (code));
6755 : changed = true;
6756 : }
6757 : }
6758 : }
6759 : break;
6760 55534864 : case GIMPLE_CALL:
6761 55534864 : {
6762 55534864 : gcall *call = as_a<gcall *> (stmt);
6763 166520666 : for (i = 0; i < gimple_call_num_args (call); ++i)
6764 : {
6765 110985802 : tree *arg = gimple_call_arg_ptr (call, i);
6766 110985802 : if (REFERENCE_CLASS_P (*arg)
6767 110985802 : && maybe_canonicalize_mem_ref_addr (arg))
6768 : changed = true;
6769 : }
6770 55534864 : tree *lhs = gimple_call_lhs_ptr (call);
6771 55534864 : if (*lhs
6772 22400314 : && REFERENCE_CLASS_P (*lhs)
6773 55653296 : && maybe_canonicalize_mem_ref_addr (lhs))
6774 : changed = true;
6775 55534864 : if (*lhs)
6776 : {
6777 22400314 : combined_fn cfn = gimple_call_combined_fn (call);
6778 22400314 : internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6779 22400314 : int opno = first_commutative_argument (ifn);
6780 22400314 : if (opno >= 0)
6781 : {
6782 354081 : tree arg1 = gimple_call_arg (call, opno);
6783 354081 : tree arg2 = gimple_call_arg (call, opno + 1);
6784 354081 : if (tree_swap_operands_p (arg1, arg2))
6785 : {
6786 22681 : gimple_call_set_arg (call, opno, arg2);
6787 22681 : gimple_call_set_arg (call, opno + 1, arg1);
6788 22681 : changed = true;
6789 : }
6790 : }
6791 : }
6792 : break;
6793 : }
6794 579781 : case GIMPLE_ASM:
6795 579781 : {
6796 579781 : gasm *asm_stmt = as_a <gasm *> (stmt);
6797 1214102 : for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6798 : {
6799 634321 : tree link = gimple_asm_output_op (asm_stmt, i);
6800 634321 : tree op = TREE_VALUE (link);
6801 634321 : if (REFERENCE_CLASS_P (op)
6802 634321 : && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6803 : changed = true;
6804 : }
6805 968552 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6806 : {
6807 388771 : tree link = gimple_asm_input_op (asm_stmt, i);
6808 388771 : tree op = TREE_VALUE (link);
6809 388771 : if ((REFERENCE_CLASS_P (op)
6810 379930 : || TREE_CODE (op) == ADDR_EXPR)
6811 422255 : && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6812 : changed = true;
6813 : }
6814 : }
6815 : break;
6816 375338492 : case GIMPLE_DEBUG:
6817 375338492 : if (gimple_debug_bind_p (stmt))
6818 : {
6819 288996561 : tree *val = gimple_debug_bind_get_value_ptr (stmt);
6820 288996561 : if (*val
6821 164753641 : && (REFERENCE_CLASS_P (*val)
6822 162257581 : || TREE_CODE (*val) == ADDR_EXPR)
6823 338490439 : && maybe_canonicalize_mem_ref_addr (val, true))
6824 : changed = true;
6825 : }
6826 : break;
6827 44706027 : case GIMPLE_COND:
6828 44706027 : {
6829 : /* Canonicalize operand order. */
6830 44706027 : tree lhs = gimple_cond_lhs (stmt);
6831 44706027 : tree rhs = gimple_cond_rhs (stmt);
6832 44706027 : if (tree_swap_operands_p (lhs, rhs))
6833 : {
6834 1428331 : gcond *gc = as_a <gcond *> (stmt);
6835 1428331 : gimple_cond_set_lhs (gc, rhs);
6836 1428331 : gimple_cond_set_rhs (gc, lhs);
6837 1428331 : gimple_cond_set_code (gc,
6838 : swap_tree_comparison (gimple_cond_code (gc)));
6839 1428331 : changed = true;
6840 : }
6841 : }
6842 745876171 : default:;
6843 : }
6844 :
6845 : /* Dispatch to pattern-based folding. */
6846 745876171 : if (!inplace
6847 3146771 : || is_gimple_assign (stmt)
6848 746768784 : || gimple_code (stmt) == GIMPLE_COND)
6849 : {
6850 746471983 : gimple_seq seq = NULL;
6851 746471983 : gimple_match_op res_op;
6852 1490689808 : if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6853 : valueize, valueize))
6854 : {
6855 8997134 : if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace,
6856 : dce_worklist))
6857 : changed = true;
6858 : else
6859 4445485 : gimple_seq_discard (seq);
6860 : }
6861 : }
6862 :
6863 747364596 : stmt = gsi_stmt (*gsi);
6864 :
6865 : /* Fold the main computation performed by the statement. */
6866 747364596 : switch (gimple_code (stmt))
6867 : {
6868 251326478 : case GIMPLE_ASSIGN:
6869 251326478 : {
6870 : /* Try to canonicalize for boolean-typed X the comparisons
6871 : X == 0, X == 1, X != 0, and X != 1. */
6872 251326478 : if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6873 251326478 : || gimple_assign_rhs_code (stmt) == NE_EXPR)
6874 : {
6875 3185507 : tree lhs = gimple_assign_lhs (stmt);
6876 3185507 : tree op1 = gimple_assign_rhs1 (stmt);
6877 3185507 : tree op2 = gimple_assign_rhs2 (stmt);
6878 3185507 : tree type = TREE_TYPE (op1);
6879 :
6880 : /* Check whether the comparison operands are of the same boolean
6881 : type as the result type is.
6882 : Check that second operand is an integer-constant with value
6883 : one or zero. */
6884 3185507 : if (TREE_CODE (op2) == INTEGER_CST
6885 2165923 : && (integer_zerop (op2) || integer_onep (op2))
6886 4808645 : && useless_type_conversion_p (TREE_TYPE (lhs), type))
6887 : {
6888 4705 : enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6889 4705 : bool is_logical_not = false;
6890 :
6891 : /* X == 0 and X != 1 is a logical-not.of X
6892 : X == 1 and X != 0 is X */
6893 3977 : if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6894 4705 : || (cmp_code == NE_EXPR && integer_onep (op2)))
6895 4663 : is_logical_not = true;
6896 :
6897 4705 : if (is_logical_not == false)
6898 42 : gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6899 : /* Only for one-bit precision typed X the transformation
6900 : !X -> ~X is valied. */
6901 4663 : else if (TYPE_PRECISION (type) == 1)
6902 4663 : gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6903 : /* Otherwise we use !X -> X ^ 1. */
6904 : else
6905 0 : gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6906 : build_int_cst (type, 1));
6907 : changed = true;
6908 : break;
6909 : }
6910 : }
6911 :
6912 251321773 : unsigned old_num_ops = gimple_num_ops (stmt);
6913 251321773 : tree lhs = gimple_assign_lhs (stmt);
6914 251321773 : tree new_rhs = fold_gimple_assign (gsi);
6915 251321773 : if (new_rhs
6916 251440264 : && !useless_type_conversion_p (TREE_TYPE (lhs),
6917 118491 : TREE_TYPE (new_rhs)))
6918 0 : new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6919 251321773 : if (new_rhs
6920 251321773 : && (!inplace
6921 981 : || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6922 : {
6923 118491 : gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6924 118491 : changed = true;
6925 : }
6926 : break;
6927 : }
6928 :
6929 55479877 : case GIMPLE_CALL:
6930 55479877 : changed |= gimple_fold_call (gsi, inplace);
6931 55479877 : break;
6932 :
6933 375338492 : case GIMPLE_DEBUG:
6934 375338492 : if (gimple_debug_bind_p (stmt))
6935 : {
6936 288996561 : tree val = gimple_debug_bind_get_value (stmt);
6937 288996561 : if (val && REFERENCE_CLASS_P (val))
6938 : {
6939 2494167 : tree tem = maybe_fold_reference (val);
6940 2494167 : if (tem)
6941 : {
6942 1488 : gimple_debug_bind_set_value (stmt, tem);
6943 1488 : changed = true;
6944 : }
6945 : }
6946 : }
6947 : break;
6948 :
6949 10571017 : case GIMPLE_RETURN:
6950 10571017 : {
6951 10571017 : greturn *ret_stmt = as_a<greturn *> (stmt);
6952 10571017 : tree ret = gimple_return_retval(ret_stmt);
6953 :
6954 10571017 : if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6955 : {
6956 4478992 : tree val = valueize (ret);
6957 4478992 : if (val && val != ret
6958 4478992 : && may_propagate_copy (ret, val))
6959 : {
6960 0 : gimple_return_set_retval (ret_stmt, val);
6961 0 : changed = true;
6962 : }
6963 : }
6964 : }
6965 : break;
6966 :
6967 747364596 : default:;
6968 : }
6969 :
6970 747364596 : stmt = gsi_stmt (*gsi);
6971 :
6972 747364596 : fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6973 747364596 : return changed;
6974 : }
6975 :
6976 : /* Valueziation callback that ends up not following SSA edges. */
6977 :
6978 : tree
6979 5783833346 : no_follow_ssa_edges (tree)
6980 : {
6981 5783833346 : return NULL_TREE;
6982 : }
6983 :
6984 : /* Valueization callback that ends up following single-use SSA edges only. */
6985 :
6986 : tree
6987 871473071 : follow_single_use_edges (tree val)
6988 : {
6989 871473071 : if (TREE_CODE (val) == SSA_NAME
6990 871473071 : && !has_single_use (val))
6991 448255265 : return NULL_TREE;
6992 : return val;
6993 : }
6994 :
6995 : /* Valueization callback that follows all SSA edges. */
6996 :
6997 : tree
6998 189741320 : follow_all_ssa_edges (tree val)
6999 : {
7000 189741320 : return val;
7001 : }
7002 :
7003 : /* Fold the statement pointed to by GSI. In some cases, this function may
7004 : replace the whole statement with a new one. Returns true iff folding
7005 : makes any changes.
7006 : The statement pointed to by GSI should be in valid gimple form but may
7007 : be in unfolded state as resulting from for example constant propagation
7008 : which can produce *&x = 0. */
7009 :
7010 : bool
7011 146608023 : fold_stmt (gimple_stmt_iterator *gsi, bitmap dce_bitmap)
7012 : {
7013 146608023 : return fold_stmt_1 (gsi, false, no_follow_ssa_edges, dce_bitmap);
7014 : }
7015 :
7016 : bool
7017 597609802 : fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree), bitmap dce_bitmap)
7018 : {
7019 597609802 : return fold_stmt_1 (gsi, false, valueize, dce_bitmap);
7020 : }
7021 :
7022 : /* Perform the minimal folding on statement *GSI. Only operations like
7023 : *&x created by constant propagation are handled. The statement cannot
7024 : be replaced with a new one. Return true if the statement was
7025 : changed, false otherwise.
7026 : The statement *GSI should be in valid gimple form but may
7027 : be in unfolded state as resulting from for example constant propagation
7028 : which can produce *&x = 0. */
7029 :
7030 : bool
7031 3146771 : fold_stmt_inplace (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
7032 : {
7033 3146771 : gimple *stmt = gsi_stmt (*gsi);
7034 3146771 : bool changed = fold_stmt_1 (gsi, true, valueize);
7035 3146771 : gcc_assert (gsi_stmt (*gsi) == stmt);
7036 3146771 : return changed;
7037 : }
7038 :
7039 : /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
7040 : if EXPR is null or we don't know how.
7041 : If non-null, the result always has boolean type. */
7042 :
7043 : static tree
7044 209632 : canonicalize_bool (tree expr, bool invert)
7045 : {
7046 209632 : if (!expr)
7047 : return NULL_TREE;
7048 50 : else if (invert)
7049 : {
7050 36 : if (integer_nonzerop (expr))
7051 0 : return boolean_false_node;
7052 36 : else if (integer_zerop (expr))
7053 0 : return boolean_true_node;
7054 36 : else if (TREE_CODE (expr) == SSA_NAME)
7055 0 : return fold_build2 (EQ_EXPR, boolean_type_node, expr,
7056 : build_int_cst (TREE_TYPE (expr), 0));
7057 36 : else if (COMPARISON_CLASS_P (expr))
7058 36 : return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
7059 : boolean_type_node,
7060 : TREE_OPERAND (expr, 0),
7061 : TREE_OPERAND (expr, 1));
7062 : else
7063 : return NULL_TREE;
7064 : }
7065 : else
7066 : {
7067 14 : if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
7068 : return expr;
7069 0 : if (integer_nonzerop (expr))
7070 0 : return boolean_true_node;
7071 0 : else if (integer_zerop (expr))
7072 0 : return boolean_false_node;
7073 0 : else if (TREE_CODE (expr) == SSA_NAME)
7074 0 : return fold_build2 (NE_EXPR, boolean_type_node, expr,
7075 : build_int_cst (TREE_TYPE (expr), 0));
7076 0 : else if (COMPARISON_CLASS_P (expr))
7077 0 : return fold_build2 (TREE_CODE (expr),
7078 : boolean_type_node,
7079 : TREE_OPERAND (expr, 0),
7080 : TREE_OPERAND (expr, 1));
7081 : else
7082 : return NULL_TREE;
7083 : }
7084 : }
7085 :
7086 : /* Check to see if a boolean expression EXPR is logically equivalent to the
7087 : comparison (OP1 CODE OP2). Check for various identities involving
7088 : SSA_NAMEs. */
7089 :
7090 : static bool
7091 1703 : same_bool_comparison_p (const_tree expr, enum tree_code code,
7092 : const_tree op1, const_tree op2)
7093 : {
7094 1703 : gimple *s;
7095 :
7096 : /* The obvious case. */
7097 1703 : if (TREE_CODE (expr) == code
7098 33 : && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
7099 1736 : && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
7100 : return true;
7101 :
7102 : /* Check for comparing (name, name != 0) and the case where expr
7103 : is an SSA_NAME with a definition matching the comparison. */
7104 1686 : if (TREE_CODE (expr) == SSA_NAME
7105 1686 : && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
7106 : {
7107 0 : if (operand_equal_p (expr, op1, 0))
7108 0 : return ((code == NE_EXPR && integer_zerop (op2))
7109 0 : || (code == EQ_EXPR && integer_nonzerop (op2)));
7110 0 : s = SSA_NAME_DEF_STMT (expr);
7111 0 : if (is_gimple_assign (s)
7112 0 : && gimple_assign_rhs_code (s) == code
7113 0 : && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
7114 0 : && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
7115 : return true;
7116 : }
7117 :
7118 : /* If op1 is of the form (name != 0) or (name == 0), and the definition
7119 : of name is a comparison, recurse. */
7120 1686 : if (TREE_CODE (op1) == SSA_NAME
7121 1686 : && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
7122 : {
7123 448 : s = SSA_NAME_DEF_STMT (op1);
7124 448 : if (is_gimple_assign (s)
7125 448 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
7126 : {
7127 0 : enum tree_code c = gimple_assign_rhs_code (s);
7128 0 : if ((c == NE_EXPR && integer_zerop (op2))
7129 0 : || (c == EQ_EXPR && integer_nonzerop (op2)))
7130 0 : return same_bool_comparison_p (expr, c,
7131 0 : gimple_assign_rhs1 (s),
7132 0 : gimple_assign_rhs2 (s));
7133 0 : if ((c == EQ_EXPR && integer_zerop (op2))
7134 0 : || (c == NE_EXPR && integer_nonzerop (op2)))
7135 0 : return same_bool_comparison_p (expr,
7136 : invert_tree_comparison (c, false),
7137 0 : gimple_assign_rhs1 (s),
7138 0 : gimple_assign_rhs2 (s));
7139 : }
7140 : }
7141 : return false;
7142 : }
7143 :
7144 : /* Check to see if two boolean expressions OP1 and OP2 are logically
7145 : equivalent. */
7146 :
7147 : static bool
7148 15 : same_bool_result_p (const_tree op1, const_tree op2)
7149 : {
7150 : /* Simple cases first. */
7151 15 : if (operand_equal_p (op1, op2, 0))
7152 : return true;
7153 :
7154 : /* Check the cases where at least one of the operands is a comparison.
7155 : These are a bit smarter than operand_equal_p in that they apply some
7156 : identifies on SSA_NAMEs. */
7157 8 : if (COMPARISON_CLASS_P (op2)
7158 16 : && same_bool_comparison_p (op1, TREE_CODE (op2),
7159 8 : TREE_OPERAND (op2, 0),
7160 8 : TREE_OPERAND (op2, 1)))
7161 : return true;
7162 8 : if (COMPARISON_CLASS_P (op1)
7163 16 : && same_bool_comparison_p (op2, TREE_CODE (op1),
7164 8 : TREE_OPERAND (op1, 0),
7165 8 : TREE_OPERAND (op1, 1)))
7166 : return true;
7167 :
7168 : /* Default case. */
7169 : return false;
7170 : }
7171 :
7172 : /* Forward declarations for some mutually recursive functions. */
7173 :
7174 : static tree
7175 : and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7176 : enum tree_code code2, tree op2a, tree op2b, basic_block);
7177 : static tree
7178 : and_var_with_comparison (tree type, tree var, bool invert,
7179 : enum tree_code code2, tree op2a, tree op2b,
7180 : basic_block);
7181 : static tree
7182 : and_var_with_comparison_1 (tree type, gimple *stmt,
7183 : enum tree_code code2, tree op2a, tree op2b,
7184 : basic_block);
7185 : static tree
7186 : or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
7187 : enum tree_code code2, tree op2a, tree op2b,
7188 : basic_block);
7189 : static tree
7190 : or_var_with_comparison (tree, tree var, bool invert,
7191 : enum tree_code code2, tree op2a, tree op2b,
7192 : basic_block);
7193 : static tree
7194 : or_var_with_comparison_1 (tree, gimple *stmt,
7195 : enum tree_code code2, tree op2a, tree op2b,
7196 : basic_block);
7197 :
7198 : /* Helper function for and_comparisons_1: try to simplify the AND of the
7199 : ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7200 : If INVERT is true, invert the value of the VAR before doing the AND.
7201 : Return NULL_EXPR if we can't simplify this to a single expression. */
7202 :
7203 : static tree
7204 171818 : and_var_with_comparison (tree type, tree var, bool invert,
7205 : enum tree_code code2, tree op2a, tree op2b,
7206 : basic_block outer_cond_bb)
7207 : {
7208 171818 : tree t;
7209 171818 : gimple *stmt = SSA_NAME_DEF_STMT (var);
7210 :
7211 : /* We can only deal with variables whose definitions are assignments. */
7212 171818 : if (!is_gimple_assign (stmt))
7213 : return NULL_TREE;
7214 :
7215 : /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7216 : !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
7217 : Then we only have to consider the simpler non-inverted cases. */
7218 171416 : if (invert)
7219 80186 : t = or_var_with_comparison_1 (type, stmt,
7220 : invert_tree_comparison (code2, false),
7221 : op2a, op2b, outer_cond_bb);
7222 : else
7223 91230 : t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7224 : outer_cond_bb);
7225 171416 : return canonicalize_bool (t, invert);
7226 : }
7227 :
7228 : /* Try to simplify the AND of the ssa variable defined by the assignment
7229 : STMT with the comparison specified by (OP2A CODE2 OP2B).
7230 : Return NULL_EXPR if we can't simplify this to a single expression. */
7231 :
7232 : static tree
7233 110838 : and_var_with_comparison_1 (tree type, gimple *stmt,
7234 : enum tree_code code2, tree op2a, tree op2b,
7235 : basic_block outer_cond_bb)
7236 : {
7237 110838 : tree var = gimple_assign_lhs (stmt);
7238 110838 : tree true_test_var = NULL_TREE;
7239 110838 : tree false_test_var = NULL_TREE;
7240 110838 : enum tree_code innercode = gimple_assign_rhs_code (stmt);
7241 :
7242 : /* Check for identities like (var AND (var == 0)) => false. */
7243 110838 : if (TREE_CODE (op2a) == SSA_NAME
7244 110838 : && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7245 : {
7246 8660 : if ((code2 == NE_EXPR && integer_zerop (op2b))
7247 22572 : || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7248 : {
7249 7912 : true_test_var = op2a;
7250 7912 : if (var == true_test_var)
7251 : return var;
7252 : }
7253 3858 : else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7254 14946 : || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7255 : {
7256 3078 : false_test_var = op2a;
7257 3078 : if (var == false_test_var)
7258 0 : return boolean_false_node;
7259 : }
7260 : }
7261 :
7262 : /* If the definition is a comparison, recurse on it. */
7263 110838 : if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7264 : {
7265 1944 : tree t = and_comparisons_1 (type, innercode,
7266 : gimple_assign_rhs1 (stmt),
7267 : gimple_assign_rhs2 (stmt),
7268 : code2,
7269 : op2a,
7270 : op2b, outer_cond_bb);
7271 1944 : if (t)
7272 : return t;
7273 : }
7274 :
7275 : /* If the definition is an AND or OR expression, we may be able to
7276 : simplify by reassociating. */
7277 110832 : if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7278 110832 : && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7279 : {
7280 14439 : tree inner1 = gimple_assign_rhs1 (stmt);
7281 14439 : tree inner2 = gimple_assign_rhs2 (stmt);
7282 14439 : gimple *s;
7283 14439 : tree t;
7284 14439 : tree partial = NULL_TREE;
7285 14439 : bool is_and = (innercode == BIT_AND_EXPR);
7286 :
7287 : /* Check for boolean identities that don't require recursive examination
7288 : of inner1/inner2:
7289 : inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
7290 : inner1 AND (inner1 OR inner2) => inner1
7291 : !inner1 AND (inner1 AND inner2) => false
7292 : !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
7293 : Likewise for similar cases involving inner2. */
7294 14439 : if (inner1 == true_test_var)
7295 0 : return (is_and ? var : inner1);
7296 14439 : else if (inner2 == true_test_var)
7297 0 : return (is_and ? var : inner2);
7298 14439 : else if (inner1 == false_test_var)
7299 0 : return (is_and
7300 0 : ? boolean_false_node
7301 0 : : and_var_with_comparison (type, inner2, false, code2, op2a,
7302 0 : op2b, outer_cond_bb));
7303 14439 : else if (inner2 == false_test_var)
7304 0 : return (is_and
7305 0 : ? boolean_false_node
7306 0 : : and_var_with_comparison (type, inner1, false, code2, op2a,
7307 0 : op2b, outer_cond_bb));
7308 :
7309 : /* Next, redistribute/reassociate the AND across the inner tests.
7310 : Compute the first partial result, (inner1 AND (op2a code op2b)) */
7311 14439 : if (TREE_CODE (inner1) == SSA_NAME
7312 14439 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7313 13644 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7314 26935 : && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
7315 : gimple_assign_rhs1 (s),
7316 : gimple_assign_rhs2 (s),
7317 : code2, op2a, op2b,
7318 : outer_cond_bb)))
7319 : {
7320 : /* Handle the AND case, where we are reassociating:
7321 : (inner1 AND inner2) AND (op2a code2 op2b)
7322 : => (t AND inner2)
7323 : If the partial result t is a constant, we win. Otherwise
7324 : continue on to try reassociating with the other inner test. */
7325 45 : if (is_and)
7326 : {
7327 5 : if (integer_onep (t))
7328 : return inner2;
7329 5 : else if (integer_zerop (t))
7330 0 : return boolean_false_node;
7331 : }
7332 :
7333 : /* Handle the OR case, where we are redistributing:
7334 : (inner1 OR inner2) AND (op2a code2 op2b)
7335 : => (t OR (inner2 AND (op2a code2 op2b))) */
7336 40 : else if (integer_onep (t))
7337 0 : return boolean_true_node;
7338 :
7339 : /* Save partial result for later. */
7340 : partial = t;
7341 : }
7342 :
7343 : /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
7344 14439 : if (TREE_CODE (inner2) == SSA_NAME
7345 14439 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7346 14062 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7347 27587 : && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
7348 : gimple_assign_rhs1 (s),
7349 : gimple_assign_rhs2 (s),
7350 : code2, op2a, op2b,
7351 : outer_cond_bb)))
7352 : {
7353 : /* Handle the AND case, where we are reassociating:
7354 : (inner1 AND inner2) AND (op2a code2 op2b)
7355 : => (inner1 AND t) */
7356 430 : if (is_and)
7357 : {
7358 20 : if (integer_onep (t))
7359 : return inner1;
7360 20 : else if (integer_zerop (t))
7361 1 : return boolean_false_node;
7362 : /* If both are the same, we can apply the identity
7363 : (x AND x) == x. */
7364 19 : else if (partial && same_bool_result_p (t, partial))
7365 : return t;
7366 : }
7367 :
7368 : /* Handle the OR case. where we are redistributing:
7369 : (inner1 OR inner2) AND (op2a code2 op2b)
7370 : => (t OR (inner1 AND (op2a code2 op2b)))
7371 : => (t OR partial) */
7372 : else
7373 : {
7374 410 : if (integer_onep (t))
7375 0 : return boolean_true_node;
7376 410 : else if (partial)
7377 : {
7378 : /* We already got a simplification for the other
7379 : operand to the redistributed OR expression. The
7380 : interesting case is when at least one is false.
7381 : Or, if both are the same, we can apply the identity
7382 : (x OR x) == x. */
7383 6 : if (integer_zerop (partial))
7384 : return t;
7385 6 : else if (integer_zerop (t))
7386 : return partial;
7387 4 : else if (same_bool_result_p (t, partial))
7388 : return t;
7389 : }
7390 : }
7391 : }
7392 : }
7393 : return NULL_TREE;
7394 : }
7395 :
7396 : /* Try to simplify the AND of two comparisons defined by
7397 : (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7398 : If this can be done without constructing an intermediate value,
7399 : return the resulting tree; otherwise NULL_TREE is returned.
7400 : This function is deliberately asymmetric as it recurses on SSA_DEFs
7401 : in the first comparison but not the second. */
7402 :
7403 : static tree
7404 783015 : and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7405 : enum tree_code code2, tree op2a, tree op2b,
7406 : basic_block outer_cond_bb)
7407 : {
7408 783015 : tree truth_type = truth_type_for (TREE_TYPE (op1a));
7409 :
7410 : /* First check for ((x CODE1 y) AND (x CODE2 y)). */
7411 783015 : if (operand_equal_p (op1a, op2a, 0)
7412 783015 : && operand_equal_p (op1b, op2b, 0))
7413 : {
7414 : /* Result will be either NULL_TREE, or a combined comparison. */
7415 3734 : tree t = combine_comparisons (UNKNOWN_LOCATION,
7416 : TRUTH_ANDIF_EXPR, code1, code2,
7417 : truth_type, op1a, op1b);
7418 3734 : if (t)
7419 : return t;
7420 : }
7421 :
7422 : /* Likewise the swapped case of the above. */
7423 782097 : if (operand_equal_p (op1a, op2b, 0)
7424 782097 : && operand_equal_p (op1b, op2a, 0))
7425 : {
7426 : /* Result will be either NULL_TREE, or a combined comparison. */
7427 0 : tree t = combine_comparisons (UNKNOWN_LOCATION,
7428 : TRUTH_ANDIF_EXPR, code1,
7429 : swap_tree_comparison (code2),
7430 : truth_type, op1a, op1b);
7431 0 : if (t)
7432 : return t;
7433 : }
7434 :
7435 : /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7436 : NAME's definition is a truth value. See if there are any simplifications
7437 : that can be done against the NAME's definition. */
7438 782097 : if (TREE_CODE (op1a) == SSA_NAME
7439 782076 : && (code1 == NE_EXPR || code1 == EQ_EXPR)
7440 1318297 : && (integer_zerop (op1b) || integer_onep (op1b)))
7441 : {
7442 115572 : bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7443 254552 : || (code1 == NE_EXPR && integer_onep (op1b)));
7444 230288 : gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7445 230288 : switch (gimple_code (stmt))
7446 : {
7447 169237 : case GIMPLE_ASSIGN:
7448 : /* Try to simplify by copy-propagating the definition. */
7449 169237 : return and_var_with_comparison (type, op1a, invert, code2, op2a,
7450 169237 : op2b, outer_cond_bb);
7451 :
7452 27443 : case GIMPLE_PHI:
7453 : /* If every argument to the PHI produces the same result when
7454 : ANDed with the second comparison, we win.
7455 : Do not do this unless the type is bool since we need a bool
7456 : result here anyway. */
7457 27443 : if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7458 : {
7459 : tree result = NULL_TREE;
7460 : unsigned i;
7461 11633 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
7462 : {
7463 11633 : tree arg = gimple_phi_arg_def (stmt, i);
7464 :
7465 : /* If this PHI has itself as an argument, ignore it.
7466 : If all the other args produce the same result,
7467 : we're still OK. */
7468 11633 : if (arg == gimple_phi_result (stmt))
7469 0 : continue;
7470 11633 : else if (TREE_CODE (arg) == INTEGER_CST)
7471 : {
7472 8127 : if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
7473 : {
7474 4705 : if (!result)
7475 2017 : result = boolean_false_node;
7476 2688 : else if (!integer_zerop (result))
7477 : return NULL_TREE;
7478 : }
7479 3422 : else if (!result)
7480 1941 : result = fold_build2 (code2, boolean_type_node,
7481 : op2a, op2b);
7482 1481 : else if (!same_bool_comparison_p (result,
7483 : code2, op2a, op2b))
7484 : return NULL_TREE;
7485 : }
7486 3506 : else if (TREE_CODE (arg) == SSA_NAME
7487 3506 : && !SSA_NAME_IS_DEFAULT_DEF (arg))
7488 : {
7489 3503 : tree temp;
7490 3503 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7491 : /* In simple cases we can look through PHI nodes,
7492 : but we have to be careful with loops.
7493 : See PR49073. */
7494 3503 : if (! dom_info_available_p (CDI_DOMINATORS)
7495 3503 : || gimple_bb (def_stmt) == gimple_bb (stmt)
7496 7006 : || dominated_by_p (CDI_DOMINATORS,
7497 3503 : gimple_bb (def_stmt),
7498 3503 : gimple_bb (stmt)))
7499 922 : return NULL_TREE;
7500 2581 : temp = and_var_with_comparison (type, arg, invert, code2,
7501 : op2a, op2b,
7502 : outer_cond_bb);
7503 2581 : if (!temp)
7504 : return NULL_TREE;
7505 0 : else if (!result)
7506 : result = temp;
7507 0 : else if (!same_bool_result_p (result, temp))
7508 : return NULL_TREE;
7509 : }
7510 : else
7511 : return NULL_TREE;
7512 : }
7513 : return result;
7514 : }
7515 :
7516 : default:
7517 : break;
7518 : }
7519 : }
7520 : return NULL_TREE;
7521 : }
7522 :
7523 : static basic_block fosa_bb;
7524 : static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7525 : static tree
7526 28228623 : follow_outer_ssa_edges (tree val)
7527 : {
7528 28228623 : if (TREE_CODE (val) == SSA_NAME
7529 28228623 : && !SSA_NAME_IS_DEFAULT_DEF (val))
7530 : {
7531 27760674 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7532 27760674 : if (!def_bb
7533 8269024 : || def_bb == fosa_bb
7534 32347843 : || (dom_info_available_p (CDI_DOMINATORS)
7535 4587169 : && (def_bb == fosa_bb
7536 4587169 : || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7537 25523772 : return val;
7538 : /* We cannot temporarily rewrite stmts with undefined overflow
7539 : behavior, so avoid expanding them. */
7540 4453010 : if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7541 260078 : || POINTER_TYPE_P (TREE_TYPE (val)))
7542 4349516 : && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7543 : return NULL_TREE;
7544 1094176 : flow_sensitive_info_storage storage;
7545 1094176 : storage.save_and_clear (val);
7546 : /* If the definition does not dominate fosa_bb temporarily reset
7547 : flow-sensitive info. */
7548 1094176 : fosa_unwind->safe_push (std::make_pair (val, storage));
7549 1094176 : return val;
7550 : }
7551 : return val;
7552 : }
7553 :
7554 : /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7555 : : try to simplify the AND/OR of the ssa variable VAR with the comparison
7556 : specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7557 : simplify this to a single expression. As we are going to lower the cost
7558 : of building SSA names / gimple stmts significantly, we need to allocate
7559 : them ont the stack. This will cause the code to be a bit ugly. */
7560 :
7561 : static tree
7562 868522 : maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7563 : enum tree_code code1,
7564 : tree op1a, tree op1b,
7565 : enum tree_code code2, tree op2a,
7566 : tree op2b,
7567 : basic_block outer_cond_bb)
7568 : {
7569 : /* Allocate gimple stmt1 on the stack. */
7570 868522 : gassign *stmt1
7571 868522 : = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7572 868522 : gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7573 868522 : gimple_assign_set_rhs_code (stmt1, code1);
7574 868522 : gimple_assign_set_rhs1 (stmt1, op1a);
7575 868522 : gimple_assign_set_rhs2 (stmt1, op1b);
7576 868522 : gimple_set_bb (stmt1, NULL);
7577 :
7578 : /* Allocate gimple stmt2 on the stack. */
7579 868522 : gassign *stmt2
7580 868522 : = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7581 868522 : gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7582 868522 : gimple_assign_set_rhs_code (stmt2, code2);
7583 868522 : gimple_assign_set_rhs1 (stmt2, op2a);
7584 868522 : gimple_assign_set_rhs2 (stmt2, op2b);
7585 868522 : gimple_set_bb (stmt2, NULL);
7586 :
7587 : /* Allocate SSA names(lhs1) on the stack. */
7588 868522 : alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)];
7589 868522 : tree lhs1 = (tree) &lhs1buf[0];
7590 868522 : memset (lhs1, 0, sizeof (tree_ssa_name));
7591 868522 : TREE_SET_CODE (lhs1, SSA_NAME);
7592 868522 : TREE_TYPE (lhs1) = type;
7593 868522 : init_ssa_name_imm_use (lhs1);
7594 :
7595 : /* Allocate SSA names(lhs2) on the stack. */
7596 868522 : alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)];
7597 868522 : tree lhs2 = (tree) &lhs2buf[0];
7598 868522 : memset (lhs2, 0, sizeof (tree_ssa_name));
7599 868522 : TREE_SET_CODE (lhs2, SSA_NAME);
7600 868522 : TREE_TYPE (lhs2) = type;
7601 868522 : init_ssa_name_imm_use (lhs2);
7602 :
7603 868522 : gimple_assign_set_lhs (stmt1, lhs1);
7604 868522 : gimple_assign_set_lhs (stmt2, lhs2);
7605 :
7606 868522 : gimple_match_op op (gimple_match_cond::UNCOND, code,
7607 : type, gimple_assign_lhs (stmt1),
7608 868522 : gimple_assign_lhs (stmt2));
7609 868522 : fosa_bb = outer_cond_bb;
7610 868522 : auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7611 868522 : fosa_unwind = &unwind_stack;
7612 1190208 : if (op.resimplify (NULL, (!outer_cond_bb
7613 : ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7614 : {
7615 2383 : fosa_unwind = NULL;
7616 8554 : for (auto p : unwind_stack)
7617 1405 : p.second.restore (p.first);
7618 2383 : if (gimple_simplified_result_is_gimple_val (&op))
7619 : {
7620 1788 : tree res = op.ops[0];
7621 1788 : if (res == lhs1)
7622 1434 : return build2 (code1, type, op1a, op1b);
7623 354 : else if (res == lhs2)
7624 326 : return build2 (code2, type, op2a, op2b);
7625 : else
7626 : return res;
7627 : }
7628 595 : else if (op.code.is_tree_code ()
7629 595 : && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7630 : {
7631 595 : tree op0 = op.ops[0];
7632 595 : tree op1 = op.ops[1];
7633 595 : if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7634 : return NULL_TREE; /* not simple */
7635 :
7636 595 : return build2 ((enum tree_code)op.code, op.type, op0, op1);
7637 : }
7638 : }
7639 866139 : fosa_unwind = NULL;
7640 3691188 : for (auto p : unwind_stack)
7641 1092771 : p.second.restore (p.first);
7642 :
7643 866139 : return NULL_TREE;
7644 868522 : }
7645 :
7646 : /* Return TRUE and set op[0] if T, following all SSA edges, is a type
7647 : conversion. Reject loads if LOAD is NULL, otherwise set *LOAD if a
7648 : converting load is found. */
7649 :
7650 : static bool
7651 1472087 : gimple_convert_def_p (tree t, tree op[1], gimple **load = NULL)
7652 : {
7653 1472087 : bool ret = false;
7654 :
7655 1472087 : if (TREE_CODE (t) == SSA_NAME
7656 1472087 : && !SSA_NAME_IS_DEFAULT_DEF (t))
7657 830053 : if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (t)))
7658 : {
7659 759573 : bool load_p = gimple_assign_load_p (def);
7660 759573 : if (load_p && !load)
7661 : return false;
7662 575415 : switch (gimple_assign_rhs_code (def))
7663 : {
7664 8973 : CASE_CONVERT:
7665 8973 : op[0] = gimple_assign_rhs1 (def);
7666 8973 : ret = true;
7667 8973 : break;
7668 :
7669 2296 : case VIEW_CONVERT_EXPR:
7670 2296 : op[0] = TREE_OPERAND (gimple_assign_rhs1 (def), 0);
7671 2296 : ret = true;
7672 2296 : break;
7673 :
7674 : default:
7675 : break;
7676 : }
7677 :
7678 11269 : if (ret && load_p)
7679 0 : *load = def;
7680 : }
7681 :
7682 : return ret;
7683 : }
7684 :
7685 : /* Return TRUE and set op[*] if T, following all SSA edges, resolves to a
7686 : binary expression with code CODE. */
7687 :
7688 : static bool
7689 1493893 : gimple_binop_def_p (enum tree_code code, tree t, tree op[2])
7690 : {
7691 1493893 : if (TREE_CODE (t) == SSA_NAME
7692 1493893 : && !SSA_NAME_IS_DEFAULT_DEF (t))
7693 850834 : if (gimple *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (t)))
7694 778604 : if (gimple_assign_rhs_code (def) == code)
7695 : {
7696 38372 : op[0] = gimple_assign_rhs1 (def);
7697 38372 : op[1] = gimple_assign_rhs2 (def);
7698 38372 : return true;
7699 : }
7700 : return false;
7701 : }
7702 : /* Subroutine for fold_truth_andor_1: decode a field reference.
7703 :
7704 : If *PEXP is a comparison reference, we return the innermost reference.
7705 :
7706 : *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
7707 : set to the starting bit number.
7708 :
7709 : *PVOLATILEP is set to 1 if the any expression encountered is volatile;
7710 : otherwise it is not changed.
7711 :
7712 : *PUNSIGNEDP is set to the signedness of the field.
7713 :
7714 : *PREVERSEP is set to the storage order of the field.
7715 :
7716 : *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. If
7717 : *PAND_MASK is initially set to a mask with nonzero precision, that mask is
7718 : combined with the found mask, or adjusted in precision to match.
7719 :
7720 : *PSIGNBIT is set to TRUE if, before clipping to *PBITSIZE, the mask
7721 : encompassed bits that corresponded to extensions of the sign bit.
7722 :
7723 : *PXORP is to be FALSE if EXP might be a XOR used in a compare, in which
7724 : case, if PXOR_CMP_OP is a zero constant, it will be overridden with *PEXP,
7725 : *PXORP will be set to TRUE, *PXOR_AND_MASK will be copied from *PAND_MASK,
7726 : and the left-hand operand of the XOR will be decoded. If *PXORP is TRUE,
7727 : PXOR_CMP_OP and PXOR_AND_MASK are supposed to be NULL, and then the
7728 : right-hand operand of the XOR will be decoded.
7729 :
7730 : *LOAD is set to the load stmt of the innermost reference, if any,
7731 : *and NULL otherwise.
7732 :
7733 : LOC[0..3] are filled in as conversion, masking, shifting and loading
7734 : operations are located.
7735 :
7736 : Return 0 if this is not a component reference or is one that we can't
7737 : do anything with. */
7738 :
7739 : static tree
7740 536294 : decode_field_reference (tree *pexp, HOST_WIDE_INT *pbitsize,
7741 : HOST_WIDE_INT *pbitpos,
7742 : bool *punsignedp, bool *preversep, bool *pvolatilep,
7743 : wide_int *pand_mask, bool *psignbit,
7744 : bool *pxorp, tree *pxor_cmp_op, wide_int *pxor_and_mask,
7745 : gimple **pload, location_t loc[4])
7746 : {
7747 536294 : tree exp = *pexp;
7748 536294 : tree outer_type = 0;
7749 536294 : wide_int and_mask;
7750 536294 : tree inner, offset;
7751 536294 : int shiftrt = 0;
7752 536294 : tree res_ops[2];
7753 536294 : machine_mode mode;
7754 536294 : bool convert_before_shift = false;
7755 536294 : bool signbit = false;
7756 536294 : bool xorp = false;
7757 536294 : tree xor_cmp_op;
7758 536294 : wide_int xor_and_mask;
7759 536294 : gimple *load = NULL;
7760 :
7761 : /* All the optimizations using this function assume integer fields.
7762 : There are problems with FP fields since the type_for_size call
7763 : below can fail for, e.g., XFmode. */
7764 536294 : if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
7765 : return NULL_TREE;
7766 :
7767 : /* Drop casts, saving only the outermost type, effectively used in
7768 : the compare. We can deal with at most one conversion, and it may
7769 : appear at various points in the chain of recognized preparation
7770 : statements. Earlier optimizers will often have already dropped
7771 : unneeded extensions, but they may survive, as in PR118046. ???
7772 : Can we do better and allow multiple conversions, perhaps taking
7773 : note of the narrowest intermediate type, sign extensions and
7774 : whatnot? */
7775 497965 : if (!outer_type && gimple_convert_def_p (exp, res_ops))
7776 : {
7777 10545 : outer_type = TREE_TYPE (exp);
7778 10545 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7779 10545 : exp = res_ops[0];
7780 : }
7781 :
7782 : /* Recognize and save a masking operation. Combine it with an
7783 : incoming mask. */
7784 497965 : if (gimple_binop_def_p (BIT_AND_EXPR, exp, res_ops)
7785 497965 : && TREE_CODE (res_ops[1]) == INTEGER_CST)
7786 : {
7787 24134 : loc[1] = gimple_location (SSA_NAME_DEF_STMT (exp));
7788 24134 : exp = res_ops[0];
7789 24134 : and_mask = wi::to_wide (res_ops[1]);
7790 24134 : unsigned prec_in = pand_mask->get_precision ();
7791 24134 : if (prec_in)
7792 : {
7793 52 : unsigned prec_op = and_mask.get_precision ();
7794 52 : if (prec_in >= prec_op)
7795 : {
7796 52 : if (prec_in > prec_op)
7797 0 : and_mask = wide_int::from (and_mask, prec_in, UNSIGNED);
7798 52 : and_mask &= *pand_mask;
7799 : }
7800 : else
7801 0 : and_mask &= wide_int::from (*pand_mask, prec_op, UNSIGNED);
7802 : }
7803 : }
7804 : else
7805 473831 : and_mask = *pand_mask;
7806 :
7807 : /* Turn (a ^ b) [!]= 0 into a [!]= b. */
7808 497965 : if (pxorp && gimple_binop_def_p (BIT_XOR_EXPR, exp, res_ops))
7809 : {
7810 : /* No location recorded for this one, it's entirely subsumed by the
7811 : compare. */
7812 8250 : if (*pxorp)
7813 : {
7814 4122 : exp = res_ops[1];
7815 4122 : gcc_checking_assert (!pxor_cmp_op && !pxor_and_mask);
7816 : }
7817 4128 : else if (!pxor_cmp_op)
7818 : /* Not much we can do when xor appears in the right-hand compare
7819 : operand. */
7820 : return NULL_TREE;
7821 4126 : else if (integer_zerop (*pxor_cmp_op))
7822 : {
7823 4122 : xorp = true;
7824 4122 : exp = res_ops[0];
7825 4122 : xor_cmp_op = *pexp;
7826 4122 : xor_and_mask = *pand_mask;
7827 : }
7828 : }
7829 :
7830 : /* Another chance to drop conversions. */
7831 497963 : if (!outer_type && gimple_convert_def_p (exp, res_ops))
7832 : {
7833 714 : outer_type = TREE_TYPE (exp);
7834 714 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7835 714 : exp = res_ops[0];
7836 : }
7837 :
7838 : /* Take note of shifts. */
7839 497963 : if (gimple_binop_def_p (RSHIFT_EXPR, exp, res_ops)
7840 497963 : && TREE_CODE (res_ops[1]) == INTEGER_CST)
7841 : {
7842 298 : loc[2] = gimple_location (SSA_NAME_DEF_STMT (exp));
7843 298 : exp = res_ops[0];
7844 298 : if (!tree_fits_shwi_p (res_ops[1]))
7845 : return NULL_TREE;
7846 298 : shiftrt = tree_to_shwi (res_ops[1]);
7847 298 : if (shiftrt <= 0)
7848 : return NULL_TREE;
7849 : }
7850 :
7851 : /* Yet another chance to drop conversions. This one is allowed to
7852 : match a converting load, subsuming the load identification block
7853 : below. */
7854 497963 : if (!outer_type && gimple_convert_def_p (exp, res_ops, &load))
7855 : {
7856 10 : outer_type = TREE_TYPE (exp);
7857 10 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7858 10 : if (load)
7859 0 : loc[3] = gimple_location (load);
7860 10 : exp = res_ops[0];
7861 : /* This looks backwards, but we're going back the def chain, so if we
7862 : find the conversion here, after finding a shift, that's because the
7863 : convert appears before the shift, and we should thus adjust the bit
7864 : pos and size because of the shift after adjusting it due to type
7865 : conversion. */
7866 10 : convert_before_shift = true;
7867 : }
7868 :
7869 : /* Identify the load, if there is one. */
7870 497963 : if (!load && TREE_CODE (exp) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (exp))
7871 : {
7872 283025 : gimple *def = SSA_NAME_DEF_STMT (exp);
7873 283025 : if (gimple_assign_load_p (def))
7874 : {
7875 216284 : loc[3] = gimple_location (def);
7876 216284 : load = def;
7877 216284 : exp = gimple_assign_rhs1 (def);
7878 : }
7879 : }
7880 :
7881 : /* Identify the relevant bits. */
7882 497963 : poly_int64 poly_bitsize, poly_bitpos;
7883 497963 : int unsignedp, reversep = *preversep, volatilep = *pvolatilep;
7884 497963 : inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
7885 : &mode, &unsignedp, &reversep, &volatilep);
7886 :
7887 497963 : HOST_WIDE_INT bs, bp;
7888 497963 : if (!poly_bitsize.is_constant (&bs)
7889 497963 : || !poly_bitpos.is_constant (&bp)
7890 497963 : || bs <= shiftrt
7891 497963 : || offset != 0
7892 496794 : || TREE_CODE (inner) == PLACEHOLDER_EXPR
7893 : /* Reject out-of-bound accesses (PR79731, PR118514). */
7894 496794 : || !access_in_bounds_of_type_p (TREE_TYPE (inner), bs, bp)
7895 496768 : || (INTEGRAL_TYPE_P (TREE_TYPE (inner))
7896 306025 : && !type_has_mode_precision_p (TREE_TYPE (inner))))
7897 29113 : return NULL_TREE;
7898 :
7899 : /* Adjust shifts... */
7900 468850 : if (convert_before_shift
7901 468850 : && outer_type && bs > TYPE_PRECISION (outer_type))
7902 : {
7903 3 : HOST_WIDE_INT excess = bs - TYPE_PRECISION (outer_type);
7904 3 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7905 0 : bp += excess;
7906 : bs -= excess;
7907 : }
7908 :
7909 468850 : if (shiftrt)
7910 : {
7911 : /* Punt if we're shifting by more than the loaded bitfield (after
7912 : adjustment), or if there's a shift after a change of signedness, punt.
7913 : When comparing this field with a constant, we'll check that the
7914 : constant is a proper sign- or zero-extension (depending on signedness)
7915 : of a value that would fit in the selected portion of the bitfield. A
7916 : shift after a change of signedness would make the extension
7917 : non-uniform, and we can't deal with that (yet ???). See
7918 : gcc.dg/field-merge-22.c for a test that would go wrong. */
7919 298 : if (bs <= shiftrt
7920 298 : || (convert_before_shift
7921 10 : && outer_type && unsignedp != TYPE_UNSIGNED (outer_type)))
7922 : return NULL_TREE;
7923 290 : if (!reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7924 290 : bp += shiftrt;
7925 290 : bs -= shiftrt;
7926 : }
7927 :
7928 : /* ... and bit position. */
7929 468842 : if (!convert_before_shift
7930 468842 : && outer_type && bs > TYPE_PRECISION (outer_type))
7931 : {
7932 5190 : HOST_WIDE_INT excess = bs - TYPE_PRECISION (outer_type);
7933 5190 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7934 0 : bp += excess;
7935 : bs -= excess;
7936 : }
7937 :
7938 : /* If the number of bits in the reference is the same as the bitsize of
7939 : the outer type, then the outer type gives the signedness. Otherwise
7940 : (in case of a small bitfield) the signedness is unchanged. */
7941 468842 : if (outer_type && bs == TYPE_PRECISION (outer_type))
7942 8772 : unsignedp = TYPE_UNSIGNED (outer_type);
7943 :
7944 : /* Make the mask the expected width. */
7945 468842 : if (and_mask.get_precision () != 0)
7946 : {
7947 : /* If the AND_MASK encompasses bits that would be extensions of
7948 : the sign bit, set SIGNBIT. */
7949 27795 : if (!unsignedp
7950 2624 : && and_mask.get_precision () > bs
7951 30461 : && (and_mask & wi::mask (bs, true, and_mask.get_precision ())) != 0)
7952 : signbit = true;
7953 27795 : and_mask = wide_int::from (and_mask, bs, UNSIGNED);
7954 : }
7955 :
7956 468842 : *pexp = exp;
7957 468842 : *pload = load;
7958 468842 : *pbitsize = bs;
7959 468842 : *pbitpos = bp;
7960 468842 : *punsignedp = unsignedp;
7961 468842 : *preversep = reversep;
7962 468842 : *pvolatilep = volatilep;
7963 468842 : *psignbit = signbit;
7964 468842 : *pand_mask = and_mask;
7965 468842 : if (xorp)
7966 : {
7967 4122 : *pxorp = xorp;
7968 4122 : *pxor_cmp_op = xor_cmp_op;
7969 4122 : *pxor_and_mask = xor_and_mask;
7970 : }
7971 :
7972 : return inner;
7973 536294 : }
7974 :
7975 : /* Return the one bitpos within bit extents L or R that is at an
7976 : ALIGN-bit alignment boundary, or -1 if there is more than one such
7977 : boundary, if there isn't any, or if there is any such boundary
7978 : between the extents. L and R are given by bitpos and bitsize. If
7979 : it doesn't return -1, there are two consecutive ALIGN-bit words
7980 : that contain both extents, and at least one of the extents
7981 : straddles across the returned alignment boundary. */
7982 :
7983 : static inline HOST_WIDE_INT
7984 28410 : compute_split_boundary_from_align (HOST_WIDE_INT align,
7985 : HOST_WIDE_INT l_bitpos,
7986 : HOST_WIDE_INT l_bitsize,
7987 : HOST_WIDE_INT r_bitpos,
7988 : HOST_WIDE_INT r_bitsize)
7989 : {
7990 28410 : HOST_WIDE_INT amask = ~(align - 1);
7991 :
7992 28410 : HOST_WIDE_INT first_bit = MIN (l_bitpos, r_bitpos);
7993 28410 : HOST_WIDE_INT end_bit = MAX (l_bitpos + l_bitsize, r_bitpos + r_bitsize);
7994 :
7995 28410 : HOST_WIDE_INT boundary = (end_bit - 1) & amask;
7996 :
7997 : /* Make sure we're crossing no more than one alignment boundary.
7998 :
7999 : ??? We don't have logic to recombine loads of two adjacent
8000 : fields that each crosses a different alignment boundary, so
8001 : as to load the middle word only once, if other words can't be
8002 : otherwise recombined. */
8003 28410 : if (boundary - first_bit > align)
8004 : return -1;
8005 :
8006 11155 : HOST_WIDE_INT l_start_word = l_bitpos & amask;
8007 11155 : HOST_WIDE_INT l_end_word = (l_bitpos + l_bitsize - 1) & amask;
8008 :
8009 11155 : HOST_WIDE_INT r_start_word = r_bitpos & amask;
8010 11155 : HOST_WIDE_INT r_end_word = (r_bitpos + r_bitsize - 1) & amask;
8011 :
8012 : /* If neither field straddles across an alignment boundary, it's no
8013 : use to even try to merge them. */
8014 11155 : if (l_start_word == l_end_word && r_start_word == r_end_word)
8015 10848 : return -1;
8016 :
8017 : return boundary;
8018 : }
8019 :
8020 : /* Make a bit_field_ref. If POINT is NULL, return the BIT_FIELD_REF.
8021 : Otherwise, build and insert a load stmt before POINT, and return
8022 : the SSA_NAME. ??? Rewrite LOAD in terms of the bitfield? */
8023 :
8024 : static tree
8025 4553 : make_bit_field_load (location_t loc, tree inner, tree orig_inner, tree type,
8026 : HOST_WIDE_INT bitsize, poly_int64 bitpos,
8027 : bool unsignedp, bool reversep, gimple *point)
8028 : {
8029 4553 : if (point && loc == UNKNOWN_LOCATION)
8030 18 : loc = gimple_location (point);
8031 :
8032 4553 : tree ref = make_bit_field_ref (loc, unshare_expr (inner),
8033 : unshare_expr (orig_inner),
8034 : type, bitsize, bitpos,
8035 : unsignedp, reversep);
8036 4553 : if (!point)
8037 : return ref;
8038 :
8039 : /* If we're remaking the same load, reuse the SSA NAME it is already loaded
8040 : into. */
8041 4402 : if (gimple_assign_load_p (point)
8042 4402 : && operand_equal_p (ref, gimple_assign_rhs1 (point)))
8043 : {
8044 1689 : gcc_checking_assert (TREE_CODE (gimple_assign_lhs (point)) == SSA_NAME);
8045 : return gimple_assign_lhs (point);
8046 : }
8047 :
8048 2713 : gimple_seq stmts = NULL;
8049 2713 : tree ret = force_gimple_operand (ref, &stmts, true, NULL_TREE);
8050 :
8051 : /* We know the vuse is supposed to end up being the same as that at the
8052 : original load at the insertion point, but if we don't set it, it will be a
8053 : generic placeholder that only the global SSA update at the end of the pass
8054 : would make equal, too late for us to use in further combinations. So go
8055 : ahead and copy the vuse. */
8056 :
8057 2713 : tree reaching_vuse = gimple_vuse (point);
8058 2713 : for (gimple_stmt_iterator i = gsi_start (stmts);
8059 5846 : !gsi_end_p (i); gsi_next (&i))
8060 : {
8061 3133 : gimple *new_stmt = gsi_stmt (i);
8062 6266 : if (gimple_has_mem_ops (new_stmt))
8063 3133 : gimple_set_vuse (new_stmt, reaching_vuse);
8064 : }
8065 :
8066 2713 : gimple_stmt_iterator gsi = gsi_for_stmt (point);
8067 2713 : gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8068 2713 : return ret;
8069 : }
8070 :
8071 : /* Initialize ln_arg[0] and ln_arg[1] to a pair of newly-created (at
8072 : LOC) loads from INNER (from ORIG_INNER), of modes MODE and MODE2,
8073 : respectively, starting at BIT_POS, using reversed endianness if
8074 : REVERSEP. Also initialize BITPOS (the starting position of each
8075 : part into INNER), BITSIZ (the bit count starting at BITPOS),
8076 : TOSHIFT[1] (the amount by which the part and its mask are to be
8077 : shifted right to bring its least-significant bit to bit zero) and
8078 : SHIFTED (the amount by which the part, by separate loading, has
8079 : already been shifted right, but that the mask needs shifting to
8080 : match). */
8081 :
8082 : static inline void
8083 307 : build_split_load (tree /* out */ ln_arg[2],
8084 : HOST_WIDE_INT /* out */ bitpos[2],
8085 : HOST_WIDE_INT /* out */ bitsiz[2],
8086 : HOST_WIDE_INT /* in[0] out[0..1] */ toshift[2],
8087 : HOST_WIDE_INT /* out */ shifted[2],
8088 : location_t loc, tree inner, tree orig_inner,
8089 : scalar_int_mode mode, scalar_int_mode mode2,
8090 : HOST_WIDE_INT bit_pos, bool reversep,
8091 : gimple *point[2])
8092 : {
8093 307 : scalar_int_mode modes[2] = { mode, mode2 };
8094 307 : bitsiz[0] = GET_MODE_BITSIZE (mode);
8095 307 : bitsiz[1] = GET_MODE_BITSIZE (mode2);
8096 :
8097 921 : for (int i = 0; i < 2; i++)
8098 : {
8099 614 : tree type = lang_hooks.types.type_for_mode (modes[i], 1);
8100 614 : if (!type)
8101 : {
8102 0 : type = build_nonstandard_integer_type (bitsiz[0], 1);
8103 0 : gcc_assert (type);
8104 : }
8105 614 : bitpos[i] = bit_pos;
8106 1228 : ln_arg[i] = make_bit_field_load (loc, inner, orig_inner,
8107 614 : type, bitsiz[i],
8108 614 : bit_pos, 1, reversep, point[i]);
8109 614 : bit_pos += bitsiz[i];
8110 : }
8111 :
8112 307 : toshift[1] = toshift[0];
8113 307 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8114 : {
8115 3 : shifted[0] = bitsiz[1];
8116 3 : shifted[1] = 0;
8117 3 : toshift[0] = 0;
8118 : }
8119 : else
8120 : {
8121 304 : shifted[1] = bitsiz[0];
8122 304 : shifted[0] = 0;
8123 304 : toshift[1] = 0;
8124 : }
8125 307 : }
8126 :
8127 : /* Make arrangements to split at bit BOUNDARY a single loaded word
8128 : (with REVERSEP bit order) LN_ARG[0], to be shifted right by
8129 : TOSHIFT[0] to bring the field of interest to the least-significant
8130 : bit. The expectation is that the same loaded word will be
8131 : propagated from part 0 to part 1, with just different shifting and
8132 : masking to extract both parts. MASK is not expected to do more
8133 : than masking out the bits that belong to the other part. See
8134 : build_split_load for more information on the other fields. */
8135 :
8136 : static inline void
8137 51 : reuse_split_load (tree /* in[0] out[1] */ ln_arg[2],
8138 : HOST_WIDE_INT /* in[0] out[1] */ bitpos[2],
8139 : HOST_WIDE_INT /* in[0] out[1] */ bitsiz[2],
8140 : HOST_WIDE_INT /* in[0] out[0..1] */ toshift[2],
8141 : HOST_WIDE_INT /* out */ shifted[2],
8142 : wide_int /* out */ mask[2],
8143 : HOST_WIDE_INT boundary, bool reversep)
8144 : {
8145 51 : unsigned prec = TYPE_PRECISION (TREE_TYPE (ln_arg[0]));
8146 :
8147 51 : ln_arg[1] = ln_arg[0];
8148 51 : bitpos[1] = bitpos[0];
8149 51 : bitsiz[1] = bitsiz[0];
8150 51 : shifted[1] = shifted[0] = 0;
8151 :
8152 51 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8153 : {
8154 3 : toshift[1] = toshift[0];
8155 3 : toshift[0] = bitpos[0] + bitsiz[0] - boundary;
8156 3 : mask[0] = wi::mask (toshift[0], true, prec);
8157 3 : mask[1] = wi::mask (toshift[0], false, prec);
8158 : }
8159 : else
8160 : {
8161 48 : toshift[1] = boundary - bitpos[1];
8162 48 : mask[1] = wi::mask (toshift[1], true, prec);
8163 48 : mask[0] = wi::mask (toshift[1], false, prec);
8164 : }
8165 51 : }
8166 :
8167 : /* Find ways of folding logical expressions of LHS and RHS:
8168 :
8169 : Try to merge two comparisons to nearby fields.
8170 :
8171 : For example, if we have p->a == 2 && p->b == 4 and we can load both A and B
8172 : at once, we can do this with a comparison against the object ANDed with the
8173 : a mask.
8174 :
8175 : If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
8176 : operations to do this with one comparison, loading both fields from P at
8177 : once, and likewise from Q.
8178 :
8179 : Herein, loading at once means loading from within the same alignment
8180 : boundary for the enclosing object. If (packed) fields cross such alignment
8181 : boundaries, we may still recombine the compares, so that loads do not cross
8182 : the boundaries.
8183 :
8184 : CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
8185 : TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
8186 :
8187 : TRUTH_TYPE is the type of the logical operand.
8188 :
8189 : LHS is denoted as LL_ARG LCODE LR_ARG.
8190 :
8191 : RHS is denoted as RL_ARG RCODE RR_ARG.
8192 :
8193 : LHS is assumed to dominate RHS.
8194 :
8195 : Combined loads are inserted next to preexisting loads, once we determine
8196 : that the combination is viable, and the combined condition references new
8197 : SSA_NAMEs that hold the loaded values. Since the original loads are
8198 : verified to have the same gimple_vuse, the insertion point doesn't matter
8199 : for correctness. ??? The loads may be a lot earlier than the compares, and
8200 : it's conceivable that one or two loads for RHS appear before those for LHS.
8201 : It could be advantageous to try to place the loads optimally, taking
8202 : advantage of knowing whether RHS is accessed before LHS, or that both are
8203 : accessed before both compares, but we don't do that (yet?).
8204 :
8205 : SEPARATEP should be NULL if the combined condition must be returned as a
8206 : single expression, even if it is a compound condition. This must only be
8207 : done if LHS and RHS are adjacent, without intervening conditions, and the
8208 : combined condition is to replace RHS, while LHS is dropped altogether.
8209 :
8210 : Otherwise, SEPARATEP must be a non-NULL pointer to a NULL_TREE, that may be
8211 : replaced by a part of the compound condition that could replace RHS, while
8212 : the returned expression replaces LHS. This works whether or not LHS and RHS
8213 : are adjacent, as long as there aren't VDEFs or other side effects between
8214 : them.
8215 :
8216 : If the "words" accessed by RHS are already accessed by LHS, this won't
8217 : matter, but if RHS accesses "words" that LHS doesn't, then *SEPARATEP will
8218 : be set to the compares that should take RHS's place. By "words" we mean
8219 : contiguous bits that do not cross a an TYPE_ALIGN boundary of the accessed
8220 : object's type.
8221 :
8222 : We return the simplified tree or 0 if no optimization is possible. */
8223 :
8224 : tree
8225 234670 : fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type,
8226 : location_t lloc, enum tree_code lcode,
8227 : tree ll_arg, tree lr_arg,
8228 : location_t rloc, enum tree_code rcode,
8229 : tree rl_arg, tree rr_arg,
8230 : tree *separatep)
8231 : {
8232 : /* If this is the "or" of two comparisons, we can do something if
8233 : the comparisons are NE_EXPR. If this is the "and", we can do something
8234 : if the comparisons are EQ_EXPR. I.e.,
8235 : (a->b == 2 && a->c == 4) can become (a->new == NEW).
8236 :
8237 : WANTED_CODE is this operation code. For single bit fields, we can
8238 : convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
8239 : comparison for one-bit fields. */
8240 :
8241 234670 : enum tree_code orig_code = code;
8242 234670 : enum tree_code wanted_code;
8243 234670 : tree ll_inner, lr_inner, rl_inner, rr_inner;
8244 234670 : gimple *ll_load, *lr_load, *rl_load, *rr_load;
8245 234670 : HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
8246 234670 : HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
8247 234670 : HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
8248 234670 : HOST_WIDE_INT lnbitsize, lnbitpos, lnprec;
8249 234670 : HOST_WIDE_INT rnbitsize, rnbitpos, rnprec;
8250 234670 : bool ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
8251 234670 : bool ll_reversep, lr_reversep, rl_reversep, rr_reversep;
8252 234670 : bool ll_signbit, lr_signbit, rl_signbit, rr_signbit;
8253 234670 : scalar_int_mode lnmode, lnmode2, rnmode;
8254 234670 : wide_int ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
8255 234670 : wide_int l_const, r_const;
8256 234670 : tree lntype, rntype, result;
8257 234670 : HOST_WIDE_INT first_bit, end_bit;
8258 234670 : bool volatilep;
8259 234670 : bool l_split_load;
8260 :
8261 : /* These are indexed by: conv, mask, shft, load. */
8262 234670 : location_t ll_loc[4] = { lloc, lloc, lloc, UNKNOWN_LOCATION };
8263 234670 : location_t lr_loc[4] = { lloc, lloc, lloc, UNKNOWN_LOCATION };
8264 234670 : location_t rl_loc[4] = { rloc, rloc, rloc, UNKNOWN_LOCATION };
8265 234670 : location_t rr_loc[4] = { rloc, rloc, rloc, UNKNOWN_LOCATION };
8266 :
8267 234670 : gcc_checking_assert (!separatep || !*separatep);
8268 :
8269 : /* Start by getting the comparison codes. Fail if anything is volatile.
8270 : If one operand is a BIT_AND_EXPR with the constant one, treat it as if
8271 : it were surrounded with a NE_EXPR. */
8272 :
8273 234670 : if (TREE_CODE_CLASS (lcode) != tcc_comparison
8274 234670 : || TREE_CODE_CLASS (rcode) != tcc_comparison)
8275 : return 0;
8276 :
8277 : /* We don't normally find TRUTH_*IF_EXPR in gimple, but these codes may be
8278 : given by our caller to denote conditions from different blocks. */
8279 234670 : switch (code)
8280 : {
8281 : case TRUTH_AND_EXPR:
8282 : case TRUTH_ANDIF_EXPR:
8283 : code = TRUTH_AND_EXPR;
8284 : break;
8285 :
8286 0 : case TRUTH_OR_EXPR:
8287 0 : case TRUTH_ORIF_EXPR:
8288 0 : code = TRUTH_OR_EXPR;
8289 0 : break;
8290 :
8291 : default:
8292 : return 0;
8293 : }
8294 :
8295 : /* Prepare to turn compares of signed quantities with zero into sign-bit
8296 : tests. We need not worry about *_reversep here for these compare
8297 : rewrites: loads will have already been reversed before compares. Save the
8298 : precision, because [lr]l_arg may change and we won't be able to tell how
8299 : wide it was originally. */
8300 234670 : unsigned lsignbit = 0, rsignbit = 0;
8301 234670 : if ((lcode == LT_EXPR || lcode == GE_EXPR)
8302 11108 : && integer_zerop (lr_arg)
8303 3184 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8304 237854 : && !TYPE_UNSIGNED (TREE_TYPE (ll_arg)))
8305 : {
8306 3184 : lsignbit = TYPE_PRECISION (TREE_TYPE (ll_arg));
8307 3184 : lcode = (lcode == LT_EXPR ? NE_EXPR : EQ_EXPR);
8308 : }
8309 : /* Turn compares of unsigned quantities with powers of two into
8310 : equality tests of masks. */
8311 231486 : else if ((lcode == LT_EXPR || lcode == GE_EXPR)
8312 7924 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8313 7291 : && TYPE_UNSIGNED (TREE_TYPE (ll_arg))
8314 5187 : && TREE_CODE (lr_arg) == INTEGER_CST
8315 231486 : && wi::popcount (wi::to_wide (lr_arg)) == 1)
8316 : {
8317 0 : ll_and_mask = ~(wi::to_wide (lr_arg) - 1);
8318 0 : lcode = (lcode == GE_EXPR ? NE_EXPR : EQ_EXPR);
8319 0 : lr_arg = wide_int_to_tree (TREE_TYPE (ll_arg), ll_and_mask * 0);
8320 : }
8321 : /* Turn compares of unsigned quantities with powers of two minus one
8322 : into equality tests of masks. */
8323 462972 : else if ((lcode == LE_EXPR || lcode == GT_EXPR)
8324 27371 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8325 27223 : && TYPE_UNSIGNED (TREE_TYPE (ll_arg))
8326 22388 : && TREE_CODE (lr_arg) == INTEGER_CST
8327 490343 : && wi::popcount (wi::to_wide (lr_arg) + 1) == 1)
8328 : {
8329 3639 : ll_and_mask = ~wi::to_wide (lr_arg);
8330 3639 : lcode = (lcode == GT_EXPR ? NE_EXPR : EQ_EXPR);
8331 3639 : lr_arg = wide_int_to_tree (TREE_TYPE (ll_arg), ll_and_mask * 0);
8332 : }
8333 : /* Likewise for the second compare. */
8334 234670 : if ((rcode == LT_EXPR || rcode == GE_EXPR)
8335 18436 : && integer_zerop (rr_arg)
8336 1717 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8337 236387 : && !TYPE_UNSIGNED (TREE_TYPE (rl_arg)))
8338 : {
8339 1717 : rsignbit = TYPE_PRECISION (TREE_TYPE (rl_arg));
8340 1717 : rcode = (rcode == LT_EXPR ? NE_EXPR : EQ_EXPR);
8341 : }
8342 232953 : else if ((rcode == LT_EXPR || rcode == GE_EXPR)
8343 16719 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8344 15768 : && TYPE_UNSIGNED (TREE_TYPE (rl_arg))
8345 2784 : && TREE_CODE (rr_arg) == INTEGER_CST
8346 232953 : && wi::popcount (wi::to_wide (rr_arg)) == 1)
8347 : {
8348 0 : rl_and_mask = ~(wi::to_wide (rr_arg) - 1);
8349 0 : rcode = (rcode == GE_EXPR ? NE_EXPR : EQ_EXPR);
8350 0 : rr_arg = wide_int_to_tree (TREE_TYPE (rl_arg), rl_and_mask * 0);
8351 : }
8352 465906 : else if ((rcode == LE_EXPR || rcode == GT_EXPR)
8353 37375 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8354 37151 : && TYPE_UNSIGNED (TREE_TYPE (rl_arg))
8355 27553 : && TREE_CODE (rr_arg) == INTEGER_CST
8356 503281 : && wi::popcount (wi::to_wide (rr_arg) + 1) == 1)
8357 : {
8358 4044 : rl_and_mask = ~wi::to_wide (rr_arg);
8359 4044 : rcode = (rcode == GT_EXPR ? NE_EXPR : EQ_EXPR);
8360 4044 : rr_arg = wide_int_to_tree (TREE_TYPE (rl_arg), rl_and_mask * 0);
8361 : }
8362 :
8363 : /* See if the comparisons can be merged. Then get all the parameters for
8364 : each side. */
8365 :
8366 234670 : if ((lcode != EQ_EXPR && lcode != NE_EXPR)
8367 202236 : || (rcode != EQ_EXPR && rcode != NE_EXPR))
8368 : return 0;
8369 :
8370 178260 : ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
8371 178260 : volatilep = 0;
8372 178260 : bool l_xor = false, r_xor = false;
8373 178260 : ll_inner = decode_field_reference (&ll_arg, &ll_bitsize, &ll_bitpos,
8374 : &ll_unsignedp, &ll_reversep, &volatilep,
8375 : &ll_and_mask, &ll_signbit,
8376 : &l_xor, &lr_arg, &lr_and_mask,
8377 : &ll_load, ll_loc);
8378 178260 : if (!ll_inner)
8379 : return 0;
8380 124647 : lr_inner = decode_field_reference (&lr_arg, &lr_bitsize, &lr_bitpos,
8381 : &lr_unsignedp, &lr_reversep, &volatilep,
8382 : &lr_and_mask, &lr_signbit, &l_xor, 0, 0,
8383 : &lr_load, lr_loc);
8384 124647 : if (!lr_inner)
8385 : return 0;
8386 121769 : rl_inner = decode_field_reference (&rl_arg, &rl_bitsize, &rl_bitpos,
8387 : &rl_unsignedp, &rl_reversep, &volatilep,
8388 : &rl_and_mask, &rl_signbit,
8389 : &r_xor, &rr_arg, &rr_and_mask,
8390 : &rl_load, rl_loc);
8391 121769 : if (!rl_inner)
8392 : return 0;
8393 111618 : rr_inner = decode_field_reference (&rr_arg, &rr_bitsize, &rr_bitpos,
8394 : &rr_unsignedp, &rr_reversep, &volatilep,
8395 : &rr_and_mask, &rr_signbit, &r_xor, 0, 0,
8396 : &rr_load, rr_loc);
8397 111618 : if (!rr_inner)
8398 : return 0;
8399 :
8400 : /* It must be true that the inner operation on the lhs of each
8401 : comparison must be the same if we are to be able to do anything.
8402 : Then see if we have constants. If not, the same must be true for
8403 : the rhs's. If one is a load and the other isn't, we have to be
8404 : conservative and avoid the optimization, otherwise we could get
8405 : SRAed fields wrong. */
8406 110808 : if (volatilep)
8407 : return 0;
8408 :
8409 110808 : if (ll_reversep != rl_reversep
8410 110808 : || ! operand_equal_p (ll_inner, rl_inner, 0))
8411 : {
8412 : /* Try swapping the operands. */
8413 75456 : if (ll_reversep != rr_reversep || rsignbit
8414 150456 : || !operand_equal_p (ll_inner, rr_inner, 0))
8415 74306 : return 0;
8416 :
8417 1177 : rcode = swap_tree_comparison (rcode);
8418 1177 : std::swap (rl_arg, rr_arg);
8419 1177 : std::swap (rl_inner, rr_inner);
8420 1177 : std::swap (rl_bitsize, rr_bitsize);
8421 1177 : std::swap (rl_bitpos, rr_bitpos);
8422 1177 : std::swap (rl_unsignedp, rr_unsignedp);
8423 1177 : std::swap (rl_reversep, rr_reversep);
8424 1177 : std::swap (rl_and_mask, rr_and_mask);
8425 1177 : std::swap (rl_signbit, rr_signbit);
8426 1177 : std::swap (rl_load, rr_load);
8427 1177 : std::swap (rl_loc, rr_loc);
8428 : }
8429 :
8430 70878 : if ((ll_load && rl_load)
8431 139630 : ? gimple_vuse (ll_load) != gimple_vuse (rl_load)
8432 2126 : : (!ll_load != !rl_load))
8433 : return 0;
8434 :
8435 : /* ??? Can we do anything with these? */
8436 36130 : if (lr_signbit || rr_signbit)
8437 : return 0;
8438 :
8439 : /* If the mask encompassed extensions of the sign bit before
8440 : clipping, try to include the sign bit in the test. If we're not
8441 : comparing with zero, don't even try to deal with it (for now?).
8442 : If we've already commited to a sign test, the extended (before
8443 : clipping) mask could already be messing with it. */
8444 36130 : if (ll_signbit)
8445 : {
8446 4 : if (!integer_zerop (lr_arg) || lsignbit)
8447 0 : return 0;
8448 4 : wide_int sign = wi::mask (ll_bitsize - 1, true, ll_bitsize);
8449 4 : if (!ll_and_mask.get_precision ())
8450 0 : ll_and_mask = sign;
8451 : else
8452 4 : ll_and_mask |= sign;
8453 4 : }
8454 :
8455 36130 : if (rl_signbit)
8456 : {
8457 4 : if (!integer_zerop (rr_arg) || rsignbit)
8458 1 : return 0;
8459 3 : wide_int sign = wi::mask (rl_bitsize - 1, true, rl_bitsize);
8460 3 : if (!rl_and_mask.get_precision ())
8461 0 : rl_and_mask = sign;
8462 : else
8463 3 : rl_and_mask |= sign;
8464 3 : }
8465 :
8466 36129 : if (TREE_CODE (lr_arg) == INTEGER_CST
8467 29529 : && TREE_CODE (rr_arg) == INTEGER_CST)
8468 : {
8469 29123 : l_const = wi::to_wide (lr_arg);
8470 : /* We don't expect masks on constants, but if there are any, apply
8471 : them now. */
8472 29123 : if (lr_and_mask.get_precision ())
8473 0 : l_const &= wide_int::from (lr_and_mask,
8474 0 : l_const.get_precision (), UNSIGNED);
8475 29123 : r_const = wi::to_wide (rr_arg);
8476 29123 : if (rr_and_mask.get_precision ())
8477 0 : r_const &= wide_int::from (rr_and_mask,
8478 0 : r_const.get_precision (), UNSIGNED);
8479 29123 : lr_reversep = ll_reversep;
8480 : }
8481 7006 : else if (lr_reversep != rr_reversep
8482 7006 : || ! operand_equal_p (lr_inner, rr_inner, 0)
8483 12846 : || ((lr_load && rr_load)
8484 17439 : ? gimple_vuse (lr_load) != gimple_vuse (rr_load)
8485 27 : : (!lr_load != !rr_load)))
8486 1196 : return 0;
8487 :
8488 : /* If we found sign tests, finish turning them into bit tests. */
8489 :
8490 34933 : if (lsignbit)
8491 : {
8492 44 : wide_int sign = wi::mask (ll_bitsize - 1, true, ll_bitsize);
8493 : /* If ll_arg is zero-extended and we're testing the sign bit, we know
8494 : what the result should be. Shifting the sign bit out of sign will get
8495 : us to mask the entire field out, yielding zero, i.e., the sign bit of
8496 : the zero-extended value. We know the masked value is being compared
8497 : with zero, so the compare will get us the result we're looking
8498 : for: TRUE if EQ_EXPR, FALSE if NE_EXPR. */
8499 44 : if (lsignbit > ll_bitsize && ll_unsignedp)
8500 1 : sign <<= 1;
8501 44 : if (!ll_and_mask.get_precision ())
8502 43 : ll_and_mask = sign;
8503 : else
8504 1 : ll_and_mask &= sign;
8505 44 : if (l_xor)
8506 : {
8507 1 : if (ll_bitsize != lr_bitsize)
8508 1 : return 0;
8509 0 : if (!lr_and_mask.get_precision ())
8510 0 : lr_and_mask = sign;
8511 : else
8512 0 : lr_and_mask &= sign;
8513 0 : if (l_const.get_precision ())
8514 0 : l_const &= wide_int::from (lr_and_mask,
8515 0 : l_const.get_precision (), UNSIGNED);
8516 : }
8517 44 : }
8518 :
8519 34932 : if (rsignbit)
8520 : {
8521 170 : wide_int sign = wi::mask (rl_bitsize - 1, true, rl_bitsize);
8522 170 : if (rsignbit > rl_bitsize && rl_unsignedp)
8523 0 : sign <<= 1;
8524 170 : if (!rl_and_mask.get_precision ())
8525 170 : rl_and_mask = sign;
8526 : else
8527 0 : rl_and_mask &= sign;
8528 170 : if (r_xor)
8529 : {
8530 16 : if (rl_bitsize != rr_bitsize)
8531 0 : return 0;
8532 16 : if (!rr_and_mask.get_precision ())
8533 16 : rr_and_mask = sign;
8534 : else
8535 0 : rr_and_mask &= sign;
8536 16 : if (r_const.get_precision ())
8537 24 : r_const &= wide_int::from (rr_and_mask,
8538 12 : r_const.get_precision (), UNSIGNED);
8539 : }
8540 170 : }
8541 :
8542 : /* If either comparison code is not correct for our logical operation,
8543 : fail. However, we can convert a one-bit comparison against zero into
8544 : the opposite comparison against that bit being set in the field. */
8545 :
8546 34932 : wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
8547 34932 : if (lcode != wanted_code)
8548 : {
8549 4529 : if (l_const.get_precision ()
8550 4483 : && l_const == 0
8551 1546 : && ll_and_mask.get_precision ()
8552 4959 : && wi::popcount (ll_and_mask) == 1)
8553 : {
8554 : /* Make the left operand unsigned, since we are only interested
8555 : in the value of one bit. Otherwise we are doing the wrong
8556 : thing below. */
8557 305 : ll_unsignedp = 1;
8558 305 : l_const = ll_and_mask;
8559 : }
8560 : else
8561 4224 : return 0;
8562 : }
8563 :
8564 : /* This is analogous to the code for l_const above. */
8565 30708 : if (rcode != wanted_code)
8566 : {
8567 854 : if (r_const.get_precision ()
8568 854 : && r_const == 0
8569 829 : && rl_and_mask.get_precision ()
8570 1617 : && wi::popcount (rl_and_mask) == 1)
8571 : {
8572 601 : rl_unsignedp = 1;
8573 601 : r_const = rl_and_mask;
8574 : }
8575 : else
8576 253 : return 0;
8577 : }
8578 :
8579 : /* This will be bumped to 2 if any of the field pairs crosses an
8580 : alignment boundary, so the merged compare has to be done in two
8581 : parts. */
8582 91365 : int parts = 1;
8583 : /* Set to true if the second combined compare should come first,
8584 : e.g., because the second original compare accesses a word that
8585 : the first one doesn't, and the combined compares access those in
8586 : cmp[0]. */
8587 91365 : bool first1 = false;
8588 : /* Set to true if the first original compare is not the one being
8589 : split. */
8590 91365 : bool maybe_separate = false;
8591 :
8592 : /* The following 2-dimensional arrays use the first index to
8593 : identify left(0)- vs right(1)-hand compare operands, and the
8594 : second one to identify merged compare parts. */
8595 : /* The memory loads or constants to be compared. */
8596 : tree ld_arg[2][2];
8597 : /* The first bit of the corresponding inner object that the
8598 : corresponding LD_ARG covers. */
8599 : HOST_WIDE_INT bitpos[2][2];
8600 : /* The bit count starting at BITPOS that the corresponding LD_ARG
8601 : covers. */
8602 : HOST_WIDE_INT bitsiz[2][2];
8603 : /* The number of bits by which LD_ARG has already been shifted
8604 : right, WRT mask. */
8605 : HOST_WIDE_INT shifted[2][2];
8606 : /* The number of bits by which both LD_ARG and MASK need shifting to
8607 : bring its least-significant bit to bit zero. */
8608 : HOST_WIDE_INT toshift[2][2];
8609 : /* An additional mask to be applied to LD_ARG, to remove any bits
8610 : that may have been loaded for use in another compare, but that
8611 : don't belong in the corresponding compare. */
8612 365460 : wide_int xmask[2][2] = {};
8613 :
8614 : /* The combined compare or compares. */
8615 30455 : tree cmp[2];
8616 :
8617 : /* Consider we're comparing two non-contiguous fields of packed
8618 : structs, both aligned at 32-bit boundaries:
8619 :
8620 : ll_arg: an 8-bit field at offset 0
8621 : lr_arg: a 16-bit field at offset 2
8622 :
8623 : rl_arg: an 8-bit field at offset 1
8624 : rr_arg: a 16-bit field at offset 3
8625 :
8626 : We'll have r_split_load, because rr_arg straddles across an
8627 : alignment boundary.
8628 :
8629 : We'll want to have:
8630 :
8631 : bitpos = { { 0, 0 }, { 0, 32 } }
8632 : bitsiz = { { 32, 32 }, { 32, 8 } }
8633 :
8634 : And, for little-endian:
8635 :
8636 : shifted = { { 0, 0 }, { 0, 32 } }
8637 : toshift = { { 0, 24 }, { 0, 0 } }
8638 :
8639 : Or, for big-endian:
8640 :
8641 : shifted = { { 0, 0 }, { 8, 0 } }
8642 : toshift = { { 8, 0 }, { 0, 0 } }
8643 : */
8644 :
8645 : /* See if we can find a mode that contains both fields being compared on
8646 : the left. If we can't, fail. Otherwise, update all constants and masks
8647 : to be relative to a field of that size. */
8648 30455 : first_bit = MIN (ll_bitpos, rl_bitpos);
8649 30455 : end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
8650 30455 : HOST_WIDE_INT ll_align = TYPE_ALIGN (TREE_TYPE (ll_inner));
8651 30455 : poly_uint64 ll_end_region = 0;
8652 30455 : if (TYPE_SIZE (TREE_TYPE (ll_inner))
8653 30455 : && tree_fits_poly_uint64_p (TYPE_SIZE (TREE_TYPE (ll_inner))))
8654 30455 : ll_end_region = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (ll_inner)));
8655 30455 : if (get_best_mode (end_bit - first_bit, first_bit, 0, ll_end_region,
8656 30455 : ll_align, BITS_PER_WORD, volatilep, &lnmode))
8657 : l_split_load = false;
8658 : /* ??? If ll and rl share the same load, reuse that?
8659 : See PR 118206 -> gcc.dg/field-merge-18.c */
8660 : else
8661 : {
8662 : /* Consider the possibility of recombining loads if any of the
8663 : fields straddles across an alignment boundary, so that either
8664 : part can be loaded along with the other field. Since we
8665 : limit access modes to BITS_PER_WORD, don't exceed that,
8666 : otherwise on a 32-bit host and a 64-bit-aligned data
8667 : structure, we'll fail the above for a field that straddles
8668 : across two words, and would fail here for not even trying to
8669 : split it at between 32-bit words. */
8670 27026 : HOST_WIDE_INT boundary = compute_split_boundary_from_align
8671 28606 : (MIN (ll_align, BITS_PER_WORD),
8672 : ll_bitpos, ll_bitsize, rl_bitpos, rl_bitsize);
8673 :
8674 27026 : if (boundary < 0
8675 219 : || !get_best_mode (boundary - first_bit, first_bit, 0, ll_end_region,
8676 : ll_align, BITS_PER_WORD, volatilep, &lnmode)
8677 27203 : || !get_best_mode (end_bit - boundary, boundary, 0, ll_end_region,
8678 177 : ll_align, BITS_PER_WORD, volatilep, &lnmode2))
8679 : {
8680 28387 : if (ll_align <= BITS_PER_WORD)
8681 : return 0;
8682 :
8683 : /* As a last resort, try double-word access modes. This
8684 : enables us to deal with misaligned double-word fields
8685 : that straddle across 3 separate words. */
8686 1250 : boundary = compute_split_boundary_from_align
8687 1338 : (MIN (ll_align, 2 * BITS_PER_WORD),
8688 : ll_bitpos, ll_bitsize, rl_bitpos, rl_bitsize);
8689 1250 : if (boundary < 0
8690 0 : || !get_best_mode (boundary - first_bit, first_bit,
8691 : 0, ll_end_region, ll_align, 2 * BITS_PER_WORD,
8692 : volatilep, &lnmode)
8693 1250 : || !get_best_mode (end_bit - boundary, boundary,
8694 0 : 0, ll_end_region, ll_align, 2 * BITS_PER_WORD,
8695 : volatilep, &lnmode2))
8696 1250 : return 0;
8697 : }
8698 :
8699 : /* If we can't have a single load, but can with two, figure out whether
8700 : the two compares can be separated, i.e., whether the entirety of the
8701 : first original compare is encompassed by the entirety of the first
8702 : combined compare. If the first original compare is past the alignment
8703 : boundary, arrange to compare that range first, by setting first1
8704 : (meaning make cmp[1] first, instead of cmp[0]). */
8705 177 : l_split_load = true;
8706 177 : parts = 2;
8707 177 : if (ll_bitpos >= boundary)
8708 : maybe_separate = first1 = true;
8709 132 : else if (ll_bitpos + ll_bitsize <= boundary)
8710 32 : maybe_separate = true;
8711 : }
8712 :
8713 3606 : lnbitsize = GET_MODE_BITSIZE (lnmode);
8714 3606 : lnbitpos = first_bit & ~ (lnbitsize - 1);
8715 : /* Avoid situations that the code below can't handle. */
8716 3606 : if (lnbitpos < 0)
8717 : return 0;
8718 :
8719 : /* Choose the type for the combined compare. Even if we're splitting loads,
8720 : make it wide enough to hold both. */
8721 3606 : if (l_split_load)
8722 354 : lnbitsize += GET_MODE_BITSIZE (lnmode2);
8723 3606 : lntype = build_nonstandard_integer_type (lnbitsize, 1);
8724 3606 : if (!lntype)
8725 : return NULL_TREE;
8726 3606 : lnprec = TYPE_PRECISION (lntype);
8727 3606 : xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
8728 :
8729 : /* Adjust bit ranges for reverse endianness. */
8730 3606 : if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8731 : {
8732 6 : xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
8733 6 : xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
8734 : }
8735 :
8736 : /* Adjust masks to match the positions in the combined lntype. */
8737 7212 : wide_int ll_mask, rl_mask, r_mask;
8738 3606 : if (ll_and_mask.get_precision ())
8739 4346 : ll_mask = wi::lshift (wide_int::from (ll_and_mask, lnprec, UNSIGNED),
8740 2173 : xll_bitpos);
8741 : else
8742 1433 : ll_mask = wi::shifted_mask (xll_bitpos, ll_bitsize, false, lnprec);
8743 3606 : if (rl_and_mask.get_precision ())
8744 4138 : rl_mask = wi::lshift (wide_int::from (rl_and_mask, lnprec, UNSIGNED),
8745 2069 : xrl_bitpos);
8746 : else
8747 1537 : rl_mask = wi::shifted_mask (xrl_bitpos, rl_bitsize, false, lnprec);
8748 :
8749 : /* When we set l_const, we also set r_const. */
8750 3606 : gcc_checking_assert (!l_const.get_precision () == !r_const.get_precision ());
8751 :
8752 : /* Adjust right-hand constants in both original comparisons to match width
8753 : and bit position. */
8754 3606 : if (l_const.get_precision ())
8755 : {
8756 : /* Before clipping upper bits of the right-hand operand of the compare,
8757 : check that they're sign or zero extensions, depending on how the
8758 : left-hand operand would be extended. If it is unsigned, or if there's
8759 : a mask that zeroes out extension bits, whether because we've checked
8760 : for upper bits in the mask and did not set ll_signbit, or because the
8761 : sign bit itself is masked out, check that the right-hand operand is
8762 : zero-extended. */
8763 1924 : bool l_non_ext_bits = false;
8764 1924 : if (ll_bitsize < lr_bitsize)
8765 : {
8766 40 : wide_int zext = wi::zext (l_const, ll_bitsize);
8767 80 : if ((ll_unsignedp
8768 32 : || (ll_and_mask.get_precision ()
8769 4 : && (!ll_signbit
8770 48 : || ((ll_and_mask & wi::mask (ll_bitsize - 1, true, ll_bitsize))
8771 8 : == 0)))
8772 152 : ? zext : wi::sext (l_const, ll_bitsize)) == l_const)
8773 40 : l_const = zext;
8774 : else
8775 : l_non_ext_bits = true;
8776 40 : }
8777 : /* We're doing bitwise equality tests, so don't bother with sign
8778 : extensions. */
8779 1924 : l_const = wide_int::from (l_const, lnprec, UNSIGNED);
8780 1924 : if (ll_and_mask.get_precision ())
8781 1186 : l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED);
8782 1924 : l_const <<= xll_bitpos;
8783 5772 : if (l_non_ext_bits || (l_const & ~ll_mask) != 0)
8784 : {
8785 0 : warning_at (lloc, OPT_Wtautological_compare,
8786 : "comparison is always %d", wanted_code == NE_EXPR);
8787 :
8788 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
8789 : }
8790 :
8791 : /* Before clipping upper bits of the right-hand operand of the compare,
8792 : check that they're sign or zero extensions, depending on how the
8793 : left-hand operand would be extended. */
8794 1924 : bool r_non_ext_bits = false;
8795 1924 : if (rl_bitsize < rr_bitsize)
8796 : {
8797 18 : wide_int zext = wi::zext (r_const, rl_bitsize);
8798 36 : if ((rl_unsignedp
8799 17 : || (rl_and_mask.get_precision ()
8800 10 : && (!rl_signbit
8801 24 : || ((rl_and_mask & wi::mask (rl_bitsize - 1, true, rl_bitsize))
8802 6 : == 0)))
8803 71 : ? zext : wi::sext (r_const, rl_bitsize)) == r_const)
8804 18 : r_const = zext;
8805 : else
8806 : r_non_ext_bits = true;
8807 18 : }
8808 1924 : r_const = wide_int::from (r_const, lnprec, UNSIGNED);
8809 1924 : if (rl_and_mask.get_precision ())
8810 1126 : r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED);
8811 1924 : r_const <<= xrl_bitpos;
8812 5772 : if (r_non_ext_bits || (r_const & ~rl_mask) != 0)
8813 : {
8814 0 : warning_at (rloc, OPT_Wtautological_compare,
8815 : "comparison is always %d", wanted_code == NE_EXPR);
8816 :
8817 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
8818 : }
8819 :
8820 : /* If there is something in common between the masks, those bits of the
8821 : constants must be the same. If not, the combined condition cannot be
8822 : met, and the result is known. Test for this to avoid generating
8823 : incorrect code below. */
8824 1924 : wide_int mask = ll_mask & rl_mask;
8825 1924 : if (mask != 0
8826 1969 : && (l_const & mask) != (r_const & mask))
8827 : {
8828 0 : if (wanted_code == NE_EXPR)
8829 0 : return constant_boolean_node (true, truth_type);
8830 : else
8831 0 : return constant_boolean_node (false, truth_type);
8832 : }
8833 :
8834 : /* The constants are combined so as to line up with the loaded field, so
8835 : tentatively use the same parameters for the second combined
8836 : compare. */
8837 1924 : ld_arg[1][0] = wide_int_to_tree (lntype, l_const | r_const);
8838 1924 : toshift[1][0] = MIN (xll_bitpos, xrl_bitpos);
8839 1924 : shifted[1][0] = 0;
8840 1924 : bitpos[1][0] = lnbitpos;
8841 1924 : bitsiz[1][0] = lnbitsize;
8842 :
8843 1924 : if (parts > 1)
8844 49 : reuse_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
8845 : shifted[1], xmask[1],
8846 49 : lnbitpos + GET_MODE_BITSIZE (lnmode),
8847 : lr_reversep);
8848 :
8849 : /* No masking needed, we know the full constants. */
8850 1924 : r_mask = wi::mask (0, true, lnprec);
8851 :
8852 : /* If the compiler thinks this is used uninitialized below, it's
8853 : because it can't realize that parts can only be 2 when
8854 : comparing with constants if l_split_load is also true. This
8855 : just silences the warning. */
8856 1924 : rnbitpos = 0;
8857 1924 : }
8858 :
8859 : /* Likewise, if the right sides are not constant, align them for the combined
8860 : compare. Also, disallow this optimization if a size, signedness or
8861 : storage order mismatch occurs between the left and right sides. */
8862 : else
8863 : {
8864 1682 : if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
8865 1621 : || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
8866 1621 : || ll_reversep != lr_reversep
8867 : /* Make sure the two fields on the right
8868 : correspond to the left without being swapped. */
8869 1621 : || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
8870 521 : return 0;
8871 :
8872 1165 : bool r_split_load;
8873 1165 : scalar_int_mode rnmode2;
8874 :
8875 : /* Figure out how to load the bits for the right-hand size of the
8876 : combined compare. As in the left-hand size, we may have to split it,
8877 : and then we use two separate compares. */
8878 1165 : first_bit = MIN (lr_bitpos, rr_bitpos);
8879 1165 : end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
8880 1165 : HOST_WIDE_INT lr_align = TYPE_ALIGN (TREE_TYPE (lr_inner));
8881 1165 : poly_uint64 lr_end_region = 0;
8882 1165 : if (TYPE_SIZE (TREE_TYPE (lr_inner))
8883 1165 : && tree_fits_poly_uint64_p (TYPE_SIZE (TREE_TYPE (lr_inner))))
8884 1165 : lr_end_region = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (lr_inner)));
8885 1165 : if (!get_best_mode (end_bit - first_bit, first_bit, 0, lr_end_region,
8886 1165 : lr_align, BITS_PER_WORD, volatilep, &rnmode))
8887 : {
8888 : /* Consider the possibility of recombining loads if any of the
8889 : fields straddles across an alignment boundary, so that either
8890 : part can be loaded along with the other field. */
8891 134 : HOST_WIDE_INT boundary = compute_split_boundary_from_align
8892 134 : (lr_align, lr_bitpos, lr_bitsize, rr_bitpos, rr_bitsize);
8893 :
8894 134 : if (boundary < 0
8895 : /* If we're to split both, make sure the split point is
8896 : the same. */
8897 130 : || (l_split_load
8898 128 : && (boundary - lr_bitpos
8899 128 : != (lnbitpos + GET_MODE_BITSIZE (lnmode)) - ll_bitpos))
8900 130 : || !get_best_mode (boundary - first_bit, first_bit,
8901 : 0, lr_end_region,
8902 130 : lr_align, BITS_PER_WORD, volatilep, &rnmode)
8903 264 : || !get_best_mode (end_bit - boundary, boundary, 0, lr_end_region,
8904 130 : lr_align, BITS_PER_WORD, volatilep, &rnmode2))
8905 4 : return 0;
8906 :
8907 130 : r_split_load = true;
8908 130 : parts = 2;
8909 130 : if (lr_bitpos >= boundary)
8910 : maybe_separate = first1 = true;
8911 88 : else if (lr_bitpos + lr_bitsize <= boundary)
8912 29 : maybe_separate = true;
8913 : }
8914 : else
8915 : r_split_load = false;
8916 :
8917 : /* Find a type that can hold the entire right-hand operand. */
8918 1161 : rnbitsize = GET_MODE_BITSIZE (rnmode);
8919 1161 : rnbitpos = first_bit & ~ (rnbitsize - 1);
8920 1161 : if (r_split_load)
8921 260 : rnbitsize += GET_MODE_BITSIZE (rnmode2);
8922 1161 : rntype = build_nonstandard_integer_type (rnbitsize, 1);
8923 1161 : if (!rntype)
8924 : return 0;
8925 1161 : rnprec = TYPE_PRECISION (rntype);
8926 1161 : xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
8927 :
8928 : /* Adjust for reversed endianness. */
8929 1161 : if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8930 : {
8931 0 : xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
8932 0 : xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
8933 : }
8934 :
8935 : /* Adjust the masks to match the combined type, and combine them. */
8936 1161 : wide_int lr_mask, rr_mask;
8937 1161 : if (lr_and_mask.get_precision ())
8938 1972 : lr_mask = wi::lshift (wide_int::from (lr_and_mask, rnprec, UNSIGNED),
8939 986 : xlr_bitpos);
8940 : else
8941 175 : lr_mask = wi::shifted_mask (xlr_bitpos, lr_bitsize, false, rnprec);
8942 1161 : if (rr_and_mask.get_precision ())
8943 1884 : rr_mask = wi::lshift (wide_int::from (rr_and_mask, rnprec, UNSIGNED),
8944 942 : xrr_bitpos);
8945 : else
8946 219 : rr_mask = wi::shifted_mask (xrr_bitpos, rr_bitsize, false, rnprec);
8947 1161 : r_mask = lr_mask | rr_mask;
8948 :
8949 : /* Load the right-hand operand of the combined compare. */
8950 1161 : toshift[1][0] = MIN (xlr_bitpos, xrr_bitpos);
8951 1161 : shifted[1][0] = 0;
8952 :
8953 1161 : if (!r_split_load)
8954 : {
8955 1031 : bitpos[1][0] = rnbitpos;
8956 1031 : bitsiz[1][0] = rnbitsize;
8957 1031 : ld_arg[1][0] = make_bit_field_load (ll_loc[3], lr_inner, lr_arg,
8958 1031 : rntype, rnbitsize, rnbitpos,
8959 1031 : lr_unsignedp || rr_unsignedp,
8960 : lr_reversep, lr_load);
8961 : }
8962 :
8963 : /* ... and the second part of the right-hand operand if needed. */
8964 1161 : if (parts > 1)
8965 : {
8966 130 : if (r_split_load)
8967 : {
8968 130 : gimple *point[2];
8969 130 : point[0] = lr_load;
8970 130 : point[1] = rr_load;
8971 130 : build_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
8972 : shifted[1], rl_loc[3], lr_inner, lr_arg,
8973 : rnmode, rnmode2, rnbitpos, lr_reversep, point);
8974 : }
8975 : else
8976 0 : reuse_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
8977 : shifted[1], xmask[1],
8978 0 : lnbitpos + GET_MODE_BITSIZE (lnmode)
8979 0 : - ll_bitpos + lr_bitpos, lr_reversep);
8980 : }
8981 1161 : }
8982 :
8983 : /* Now issue the loads for the left-hand combined operand/s. */
8984 6170 : wide_int l_mask = ll_mask | rl_mask;
8985 3085 : toshift[0][0] = MIN (xll_bitpos, xrl_bitpos);
8986 3085 : shifted[0][0] = 0;
8987 :
8988 3085 : if (!l_split_load)
8989 : {
8990 2908 : bitpos[0][0] = lnbitpos;
8991 2908 : bitsiz[0][0] = lnbitsize;
8992 2908 : ld_arg[0][0] = make_bit_field_load (ll_loc[3], ll_inner, ll_arg,
8993 2908 : lntype, lnbitsize, lnbitpos,
8994 2908 : ll_unsignedp || rl_unsignedp,
8995 : ll_reversep, ll_load);
8996 : }
8997 :
8998 3085 : if (parts > 1)
8999 : {
9000 179 : if (l_split_load)
9001 : {
9002 177 : gimple *point[2];
9003 177 : point[0] = ll_load;
9004 177 : point[1] = rl_load;
9005 177 : build_split_load (ld_arg[0], bitpos[0], bitsiz[0], toshift[0],
9006 : shifted[0], rl_loc[3], ll_inner, ll_arg,
9007 : lnmode, lnmode2, lnbitpos, ll_reversep, point);
9008 : }
9009 : else
9010 2 : reuse_split_load (ld_arg[0], bitpos[0], bitsiz[0], toshift[0],
9011 : shifted[0], xmask[0],
9012 2 : rnbitpos + GET_MODE_BITSIZE (rnmode)
9013 2 : - lr_bitpos + ll_bitpos, ll_reversep);
9014 : }
9015 :
9016 : /* Compute the compares. */
9017 6349 : for (int i = 0; i < parts; i++)
9018 : {
9019 3264 : tree op[2] = { ld_arg[0][i], ld_arg[1][i] };
9020 9792 : wide_int mask[2] = { l_mask, r_mask };
9021 3264 : location_t *locs[2] = { i ? rl_loc : ll_loc, i ? rr_loc : lr_loc };
9022 :
9023 : /* Figure out the masks, and unshare the original operands. */
9024 9792 : for (int j = 0; j < 2; j++)
9025 : {
9026 6528 : unsigned prec = TYPE_PRECISION (TREE_TYPE (op[j]));
9027 6528 : op[j] = unshare_expr (op[j]);
9028 :
9029 : /* Mask out the bits belonging to the other part. */
9030 6528 : if (xmask[j][i].get_precision ())
9031 102 : mask[j] &= xmask[j][i];
9032 :
9033 6528 : if (shifted[j][i])
9034 : {
9035 307 : wide_int shift = wide_int::from (shifted[j][i], prec, UNSIGNED);
9036 307 : mask[j] = wi::lrshift (mask[j], shift);
9037 307 : }
9038 6528 : mask[j] = wide_int::from (mask[j], prec, UNSIGNED);
9039 : }
9040 :
9041 : /* Line up the operands for a compare. */
9042 3264 : HOST_WIDE_INT shift = (toshift[0][i] - toshift[1][i]);
9043 :
9044 3264 : if (shift)
9045 : {
9046 54 : int j;
9047 54 : if (shift > 0)
9048 : j = 0;
9049 : else
9050 : {
9051 52 : j = 1;
9052 52 : shift = -shift;
9053 : }
9054 :
9055 54 : tree shiftsz = bitsize_int (shift);
9056 54 : op[j] = fold_build2_loc (locs[j][1], RSHIFT_EXPR, TREE_TYPE (op[j]),
9057 : op[j], shiftsz);
9058 54 : mask[j] = wi::lrshift (mask[j], shift);
9059 : }
9060 :
9061 : /* Convert to the smaller type before masking out unwanted
9062 : bits. */
9063 3264 : tree type = TREE_TYPE (op[0]);
9064 3264 : if (type != TREE_TYPE (op[1]))
9065 : {
9066 220 : int j = (TYPE_PRECISION (type)
9067 220 : < TYPE_PRECISION (TREE_TYPE (op[1])));
9068 220 : if (!j)
9069 121 : type = TREE_TYPE (op[1]);
9070 220 : op[j] = fold_convert_loc (locs[j][0], type, op[j]);
9071 220 : mask[j] = wide_int::from (mask[j], TYPE_PRECISION (type), UNSIGNED);
9072 : }
9073 :
9074 : /* Apply masks. */
9075 9792 : for (int j = 0; j < 2; j++)
9076 6528 : if (mask[j] != wi::mask (0, true, mask[j].get_precision ()))
9077 2634 : op[j] = fold_build2_loc (locs[j][2], BIT_AND_EXPR, type,
9078 5268 : op[j], wide_int_to_tree (type, mask[j]));
9079 :
9080 6349 : cmp[i] = fold_build2_loc (i ? rloc : lloc, wanted_code, truth_type,
9081 : op[0], op[1]);
9082 9792 : }
9083 :
9084 : /* Reorder the compares if needed. */
9085 3085 : if (first1)
9086 45 : std::swap (cmp[0], cmp[1]);
9087 :
9088 : /* Prepare to return the resulting compares. Combine two parts if
9089 : needed. */
9090 3085 : if (parts == 1)
9091 2906 : result = cmp[0];
9092 179 : else if (!separatep || !maybe_separate)
9093 : {
9094 : /* Only fold if any of the cmp is known, otherwise we may lose the
9095 : sequence point, and that may prevent further optimizations. */
9096 173 : if (TREE_CODE (cmp[0]) == INTEGER_CST
9097 137 : || TREE_CODE (cmp[1]) == INTEGER_CST)
9098 37 : result = fold_build2_loc (rloc, orig_code, truth_type, cmp[0], cmp[1]);
9099 : else
9100 136 : result = build2_loc (rloc, orig_code, truth_type, cmp[0], cmp[1]);
9101 : }
9102 : else
9103 : {
9104 6 : result = cmp[0];
9105 6 : *separatep = cmp[1];
9106 : }
9107 :
9108 3085 : return result;
9109 234670 : }
9110 :
9111 : /* Try to simplify the AND of two comparisons, specified by
9112 : (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
9113 : If this can be simplified to a single expression (without requiring
9114 : introducing more SSA variables to hold intermediate values),
9115 : return the resulting tree. Otherwise return NULL_TREE.
9116 : If the result expression is non-null, it has boolean type. */
9117 :
9118 : tree
9119 391006 : maybe_fold_and_comparisons (tree type,
9120 : enum tree_code code1, tree op1a, tree op1b,
9121 : enum tree_code code2, tree op2a, tree op2b,
9122 : basic_block outer_cond_bb)
9123 : {
9124 391006 : if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
9125 : outer_cond_bb))
9126 : return t;
9127 :
9128 390065 : if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
9129 : outer_cond_bb))
9130 : return t;
9131 :
9132 390049 : if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
9133 : op1a, op1b, code2, op2a,
9134 : op2b, outer_cond_bb))
9135 : return t;
9136 :
9137 : return NULL_TREE;
9138 : }
9139 :
9140 : /* Helper function for or_comparisons_1: try to simplify the OR of the
9141 : ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
9142 : If INVERT is true, invert the value of VAR before doing the OR.
9143 : Return NULL_EXPR if we can't simplify this to a single expression. */
9144 :
9145 : static tree
9146 38336 : or_var_with_comparison (tree type, tree var, bool invert,
9147 : enum tree_code code2, tree op2a, tree op2b,
9148 : basic_block outer_cond_bb)
9149 : {
9150 38336 : tree t;
9151 38336 : gimple *stmt = SSA_NAME_DEF_STMT (var);
9152 :
9153 : /* We can only deal with variables whose definitions are assignments. */
9154 38336 : if (!is_gimple_assign (stmt))
9155 : return NULL_TREE;
9156 :
9157 : /* If we have an inverted comparison, apply DeMorgan's law and rewrite
9158 : !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
9159 : Then we only have to consider the simpler non-inverted cases. */
9160 38216 : if (invert)
9161 19608 : t = and_var_with_comparison_1 (type, stmt,
9162 : invert_tree_comparison (code2, false),
9163 : op2a, op2b, outer_cond_bb);
9164 : else
9165 18608 : t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
9166 : outer_cond_bb);
9167 38216 : return canonicalize_bool (t, invert);
9168 : }
9169 :
9170 : /* Try to simplify the OR of the ssa variable defined by the assignment
9171 : STMT with the comparison specified by (OP2A CODE2 OP2B).
9172 : Return NULL_EXPR if we can't simplify this to a single expression. */
9173 :
9174 : static tree
9175 98794 : or_var_with_comparison_1 (tree type, gimple *stmt,
9176 : enum tree_code code2, tree op2a, tree op2b,
9177 : basic_block outer_cond_bb)
9178 : {
9179 98794 : tree var = gimple_assign_lhs (stmt);
9180 98794 : tree true_test_var = NULL_TREE;
9181 98794 : tree false_test_var = NULL_TREE;
9182 98794 : enum tree_code innercode = gimple_assign_rhs_code (stmt);
9183 :
9184 : /* Check for identities like (var OR (var != 0)) => true . */
9185 98794 : if (TREE_CODE (op2a) == SSA_NAME
9186 98794 : && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
9187 : {
9188 15811 : if ((code2 == NE_EXPR && integer_zerop (op2b))
9189 50784 : || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
9190 : {
9191 14025 : true_test_var = op2a;
9192 14025 : if (var == true_test_var)
9193 : return var;
9194 : }
9195 2930 : else if ((code2 == EQ_EXPR && integer_zerop (op2b))
9196 30359 : || (code2 == NE_EXPR && integer_nonzerop (op2b)))
9197 : {
9198 7234 : false_test_var = op2a;
9199 7234 : if (var == false_test_var)
9200 0 : return boolean_true_node;
9201 : }
9202 : }
9203 :
9204 : /* If the definition is a comparison, recurse on it. */
9205 98794 : if (TREE_CODE_CLASS (innercode) == tcc_comparison)
9206 : {
9207 876 : tree t = or_comparisons_1 (type, innercode,
9208 : gimple_assign_rhs1 (stmt),
9209 : gimple_assign_rhs2 (stmt),
9210 : code2, op2a, op2b, outer_cond_bb);
9211 876 : if (t)
9212 : return t;
9213 : }
9214 :
9215 : /* If the definition is an AND or OR expression, we may be able to
9216 : simplify by reassociating. */
9217 98771 : if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
9218 98771 : && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
9219 : {
9220 39666 : tree inner1 = gimple_assign_rhs1 (stmt);
9221 39666 : tree inner2 = gimple_assign_rhs2 (stmt);
9222 39666 : gimple *s;
9223 39666 : tree t;
9224 39666 : tree partial = NULL_TREE;
9225 39666 : bool is_or = (innercode == BIT_IOR_EXPR);
9226 :
9227 : /* Check for boolean identities that don't require recursive examination
9228 : of inner1/inner2:
9229 : inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
9230 : inner1 OR (inner1 AND inner2) => inner1
9231 : !inner1 OR (inner1 OR inner2) => true
9232 : !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
9233 : */
9234 39666 : if (inner1 == true_test_var)
9235 0 : return (is_or ? var : inner1);
9236 39666 : else if (inner2 == true_test_var)
9237 0 : return (is_or ? var : inner2);
9238 39666 : else if (inner1 == false_test_var)
9239 0 : return (is_or
9240 0 : ? boolean_true_node
9241 0 : : or_var_with_comparison (type, inner2, false, code2, op2a,
9242 0 : op2b, outer_cond_bb));
9243 39666 : else if (inner2 == false_test_var)
9244 0 : return (is_or
9245 0 : ? boolean_true_node
9246 0 : : or_var_with_comparison (type, inner1, false, code2, op2a,
9247 0 : op2b, outer_cond_bb));
9248 :
9249 : /* Next, redistribute/reassociate the OR across the inner tests.
9250 : Compute the first partial result, (inner1 OR (op2a code op2b)) */
9251 39666 : if (TREE_CODE (inner1) == SSA_NAME
9252 39666 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
9253 38659 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
9254 63821 : && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
9255 : gimple_assign_rhs1 (s),
9256 : gimple_assign_rhs2 (s),
9257 : code2, op2a, op2b,
9258 : outer_cond_bb)))
9259 : {
9260 : /* Handle the OR case, where we are reassociating:
9261 : (inner1 OR inner2) OR (op2a code2 op2b)
9262 : => (t OR inner2)
9263 : If the partial result t is a constant, we win. Otherwise
9264 : continue on to try reassociating with the other inner test. */
9265 751 : if (is_or)
9266 : {
9267 31 : if (integer_onep (t))
9268 0 : return boolean_true_node;
9269 31 : else if (integer_zerop (t))
9270 : return inner2;
9271 : }
9272 :
9273 : /* Handle the AND case, where we are redistributing:
9274 : (inner1 AND inner2) OR (op2a code2 op2b)
9275 : => (t AND (inner2 OR (op2a code op2b))) */
9276 720 : else if (integer_zerop (t))
9277 0 : return boolean_false_node;
9278 :
9279 : /* Save partial result for later. */
9280 : partial = t;
9281 : }
9282 :
9283 : /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
9284 39666 : if (TREE_CODE (inner2) == SSA_NAME
9285 39666 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
9286 38961 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
9287 76856 : && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
9288 : gimple_assign_rhs1 (s),
9289 : gimple_assign_rhs2 (s),
9290 : code2, op2a, op2b,
9291 : outer_cond_bb)))
9292 : {
9293 : /* Handle the OR case, where we are reassociating:
9294 : (inner1 OR inner2) OR (op2a code2 op2b)
9295 : => (inner1 OR t)
9296 : => (t OR partial) */
9297 494 : if (is_or)
9298 : {
9299 60 : if (integer_zerop (t))
9300 : return inner1;
9301 60 : else if (integer_onep (t))
9302 1 : return boolean_true_node;
9303 : /* If both are the same, we can apply the identity
9304 : (x OR x) == x. */
9305 59 : else if (partial && same_bool_result_p (t, partial))
9306 : return t;
9307 : }
9308 :
9309 : /* Handle the AND case, where we are redistributing:
9310 : (inner1 AND inner2) OR (op2a code2 op2b)
9311 : => (t AND (inner1 OR (op2a code2 op2b)))
9312 : => (t AND partial) */
9313 : else
9314 : {
9315 434 : if (integer_zerop (t))
9316 0 : return boolean_false_node;
9317 434 : else if (partial)
9318 : {
9319 : /* We already got a simplification for the other
9320 : operand to the redistributed AND expression. The
9321 : interesting case is when at least one is true.
9322 : Or, if both are the same, we can apply the identity
9323 : (x AND x) == x. */
9324 14 : if (integer_onep (partial))
9325 : return t;
9326 14 : else if (integer_onep (t))
9327 : return partial;
9328 4 : else if (same_bool_result_p (t, partial))
9329 : return t;
9330 : }
9331 : }
9332 : }
9333 : }
9334 : return NULL_TREE;
9335 : }
9336 :
9337 : /* Try to simplify the OR of two comparisons defined by
9338 : (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
9339 : If this can be done without constructing an intermediate value,
9340 : return the resulting tree; otherwise NULL_TREE is returned.
9341 : This function is deliberately asymmetric as it recurses on SSA_DEFs
9342 : in the first comparison but not the second. */
9343 :
9344 : static tree
9345 960920 : or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
9346 : enum tree_code code2, tree op2a, tree op2b,
9347 : basic_block outer_cond_bb)
9348 : {
9349 960920 : tree truth_type = truth_type_for (TREE_TYPE (op1a));
9350 :
9351 : /* First check for ((x CODE1 y) OR (x CODE2 y)). */
9352 960920 : if (operand_equal_p (op1a, op2a, 0)
9353 960920 : && operand_equal_p (op1b, op2b, 0))
9354 : {
9355 : /* Result will be either NULL_TREE, or a combined comparison. */
9356 3143 : tree t = combine_comparisons (UNKNOWN_LOCATION,
9357 : TRUTH_ORIF_EXPR, code1, code2,
9358 : truth_type, op1a, op1b);
9359 3143 : if (t)
9360 : return t;
9361 : }
9362 :
9363 : /* Likewise the swapped case of the above. */
9364 957809 : if (operand_equal_p (op1a, op2b, 0)
9365 957809 : && operand_equal_p (op1b, op2a, 0))
9366 : {
9367 : /* Result will be either NULL_TREE, or a combined comparison. */
9368 0 : tree t = combine_comparisons (UNKNOWN_LOCATION,
9369 : TRUTH_ORIF_EXPR, code1,
9370 : swap_tree_comparison (code2),
9371 : truth_type, op1a, op1b);
9372 0 : if (t)
9373 : return t;
9374 : }
9375 :
9376 : /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
9377 : NAME's definition is a truth value. See if there are any simplifications
9378 : that can be done against the NAME's definition. */
9379 957809 : if (TREE_CODE (op1a) == SSA_NAME
9380 957806 : && (code1 == NE_EXPR || code1 == EQ_EXPR)
9381 1227225 : && (integer_zerop (op1b) || integer_onep (op1b)))
9382 : {
9383 34576 : bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
9384 70435 : || (code1 == NE_EXPR && integer_onep (op1b)));
9385 66583 : gimple *stmt = SSA_NAME_DEF_STMT (op1a);
9386 66583 : switch (gimple_code (stmt))
9387 : {
9388 38216 : case GIMPLE_ASSIGN:
9389 : /* Try to simplify by copy-propagating the definition. */
9390 38216 : return or_var_with_comparison (type, op1a, invert, code2, op2a,
9391 38216 : op2b, outer_cond_bb);
9392 :
9393 15579 : case GIMPLE_PHI:
9394 : /* If every argument to the PHI produces the same result when
9395 : ORed with the second comparison, we win.
9396 : Do not do this unless the type is bool since we need a bool
9397 : result here anyway. */
9398 15579 : if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
9399 : {
9400 : tree result = NULL_TREE;
9401 : unsigned i;
9402 894 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
9403 : {
9404 894 : tree arg = gimple_phi_arg_def (stmt, i);
9405 :
9406 : /* If this PHI has itself as an argument, ignore it.
9407 : If all the other args produce the same result,
9408 : we're still OK. */
9409 894 : if (arg == gimple_phi_result (stmt))
9410 0 : continue;
9411 894 : else if (TREE_CODE (arg) == INTEGER_CST)
9412 : {
9413 745 : if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
9414 : {
9415 339 : if (!result)
9416 199 : result = boolean_true_node;
9417 140 : else if (!integer_onep (result))
9418 : return NULL_TREE;
9419 : }
9420 406 : else if (!result)
9421 200 : result = fold_build2 (code2, boolean_type_node,
9422 : op2a, op2b);
9423 206 : else if (!same_bool_comparison_p (result,
9424 : code2, op2a, op2b))
9425 : return NULL_TREE;
9426 : }
9427 149 : else if (TREE_CODE (arg) == SSA_NAME
9428 149 : && !SSA_NAME_IS_DEFAULT_DEF (arg))
9429 : {
9430 149 : tree temp;
9431 149 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
9432 : /* In simple cases we can look through PHI nodes,
9433 : but we have to be careful with loops.
9434 : See PR49073. */
9435 149 : if (! dom_info_available_p (CDI_DOMINATORS)
9436 149 : || gimple_bb (def_stmt) == gimple_bb (stmt)
9437 298 : || dominated_by_p (CDI_DOMINATORS,
9438 149 : gimple_bb (def_stmt),
9439 149 : gimple_bb (stmt)))
9440 29 : return NULL_TREE;
9441 120 : temp = or_var_with_comparison (type, arg, invert, code2,
9442 : op2a, op2b, outer_cond_bb);
9443 120 : if (!temp)
9444 : return NULL_TREE;
9445 0 : else if (!result)
9446 : result = temp;
9447 0 : else if (!same_bool_result_p (result, temp))
9448 : return NULL_TREE;
9449 : }
9450 : else
9451 : return NULL_TREE;
9452 : }
9453 : return result;
9454 : }
9455 :
9456 : default:
9457 : break;
9458 : }
9459 : }
9460 : return NULL_TREE;
9461 : }
9462 :
9463 : /* Try to simplify the OR of two comparisons, specified by
9464 : (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
9465 : If this can be simplified to a single expression (without requiring
9466 : introducing more SSA variables to hold intermediate values),
9467 : return the resulting tree. Otherwise return NULL_TREE.
9468 : If the result expression is non-null, it has boolean type. */
9469 :
9470 : tree
9471 481566 : maybe_fold_or_comparisons (tree type,
9472 : enum tree_code code1, tree op1a, tree op1b,
9473 : enum tree_code code2, tree op2a, tree op2b,
9474 : basic_block outer_cond_bb)
9475 : {
9476 481566 : if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
9477 : outer_cond_bb))
9478 : return t;
9479 :
9480 478478 : if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
9481 : outer_cond_bb))
9482 : return t;
9483 :
9484 478473 : if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
9485 : op1a, op1b, code2, op2a,
9486 : op2b, outer_cond_bb))
9487 : return t;
9488 :
9489 : return NULL_TREE;
9490 : }
9491 :
9492 : /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
9493 :
9494 : Either NULL_TREE, a simplified but non-constant or a constant
9495 : is returned.
9496 :
9497 : ??? This should go into a gimple-fold-inline.h file to be eventually
9498 : privatized with the single valueize function used in the various TUs
9499 : to avoid the indirect function call overhead. */
9500 :
9501 : tree
9502 420544596 : gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
9503 : tree (*gvalueize) (tree))
9504 : {
9505 420544596 : gimple_match_op res_op;
9506 : /* ??? The SSA propagators do not correctly deal with following SSA use-def
9507 : edges if there are intermediate VARYING defs. For this reason
9508 : do not follow SSA edges here even though SCCVN can technically
9509 : just deal fine with that. */
9510 420544596 : if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
9511 : {
9512 55434703 : tree res = NULL_TREE;
9513 55434703 : if (gimple_simplified_result_is_gimple_val (&res_op))
9514 33705070 : res = res_op.ops[0];
9515 21729633 : else if (mprts_hook)
9516 7500497 : res = mprts_hook (&res_op);
9517 41205567 : if (res)
9518 : {
9519 35545351 : if (dump_file && dump_flags & TDF_DETAILS)
9520 : {
9521 9702 : fprintf (dump_file, "Match-and-simplified ");
9522 9702 : print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
9523 9702 : fprintf (dump_file, " to ");
9524 9702 : print_generic_expr (dump_file, res);
9525 9702 : fprintf (dump_file, "\n");
9526 : }
9527 35545351 : return res;
9528 : }
9529 : }
9530 :
9531 384999245 : location_t loc = gimple_location (stmt);
9532 384999245 : switch (gimple_code (stmt))
9533 : {
9534 332908603 : case GIMPLE_ASSIGN:
9535 332908603 : {
9536 332908603 : enum tree_code subcode = gimple_assign_rhs_code (stmt);
9537 :
9538 332908603 : switch (get_gimple_rhs_class (subcode))
9539 : {
9540 120195103 : case GIMPLE_SINGLE_RHS:
9541 120195103 : {
9542 120195103 : tree rhs = gimple_assign_rhs1 (stmt);
9543 120195103 : enum tree_code_class kind = TREE_CODE_CLASS (subcode);
9544 :
9545 120195103 : if (TREE_CODE (rhs) == SSA_NAME)
9546 : {
9547 : /* If the RHS is an SSA_NAME, return its known constant value,
9548 : if any. */
9549 9552584 : return (*valueize) (rhs);
9550 : }
9551 : /* Handle propagating invariant addresses into address
9552 : operations. */
9553 110642519 : else if (TREE_CODE (rhs) == ADDR_EXPR
9554 110642519 : && !is_gimple_min_invariant (rhs))
9555 : {
9556 7550112 : poly_int64 offset = 0;
9557 7550112 : tree base;
9558 7550112 : base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
9559 : &offset,
9560 : valueize);
9561 7550112 : if (base
9562 7550112 : && (CONSTANT_CLASS_P (base)
9563 6842096 : || decl_address_invariant_p (base)))
9564 193942 : return build_invariant_address (TREE_TYPE (rhs),
9565 193942 : base, offset);
9566 : }
9567 103092407 : else if (TREE_CODE (rhs) == CONSTRUCTOR
9568 1045934 : && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
9569 104546197 : && known_eq (CONSTRUCTOR_NELTS (rhs),
9570 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
9571 : {
9572 396846 : unsigned i, nelts;
9573 396846 : tree val;
9574 :
9575 396846 : nelts = CONSTRUCTOR_NELTS (rhs);
9576 396846 : tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
9577 883954 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
9578 : {
9579 476292 : val = (*valueize) (val);
9580 476292 : if (TREE_CODE (val) == INTEGER_CST
9581 406173 : || TREE_CODE (val) == REAL_CST
9582 386030 : || TREE_CODE (val) == FIXED_CST)
9583 90262 : vec.quick_push (val);
9584 : else
9585 : return NULL_TREE;
9586 : }
9587 :
9588 10816 : return vec.build ();
9589 396846 : }
9590 110051731 : if (subcode == OBJ_TYPE_REF)
9591 : {
9592 273230 : tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
9593 : /* If callee is constant, we can fold away the wrapper. */
9594 273230 : if (is_gimple_min_invariant (val))
9595 : return val;
9596 : }
9597 :
9598 110051548 : if (kind == tcc_reference)
9599 : {
9600 73615311 : if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
9601 71456993 : || TREE_CODE (rhs) == REALPART_EXPR
9602 70610044 : || TREE_CODE (rhs) == IMAGPART_EXPR)
9603 75462928 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9604 : {
9605 3085455 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9606 3085455 : return fold_unary_loc (EXPR_LOCATION (rhs),
9607 3085455 : TREE_CODE (rhs),
9608 6170910 : TREE_TYPE (rhs), val);
9609 : }
9610 70529856 : else if (TREE_CODE (rhs) == BIT_FIELD_REF
9611 70529856 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9612 : {
9613 488382 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9614 488382 : return fold_ternary_loc (EXPR_LOCATION (rhs),
9615 488382 : TREE_CODE (rhs),
9616 488382 : TREE_TYPE (rhs), val,
9617 488382 : TREE_OPERAND (rhs, 1),
9618 976764 : TREE_OPERAND (rhs, 2));
9619 : }
9620 70041474 : else if (TREE_CODE (rhs) == MEM_REF
9621 70041474 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9622 : {
9623 14201026 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9624 14201026 : if (TREE_CODE (val) == ADDR_EXPR
9625 14201026 : && is_gimple_min_invariant (val))
9626 : {
9627 936448 : tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
9628 : unshare_expr (val),
9629 : TREE_OPERAND (rhs, 1));
9630 936448 : if (tem)
9631 70041474 : rhs = tem;
9632 : }
9633 : }
9634 70041474 : return fold_const_aggregate_ref_1 (rhs, valueize);
9635 : }
9636 36436237 : else if (kind == tcc_declaration)
9637 8594816 : return get_symbol_constant_value (rhs);
9638 : return rhs;
9639 : }
9640 :
9641 : case GIMPLE_UNARY_RHS:
9642 : return NULL_TREE;
9643 :
9644 159225749 : case GIMPLE_BINARY_RHS:
9645 : /* Translate &x + CST into an invariant form suitable for
9646 : further propagation. */
9647 159225749 : if (subcode == POINTER_PLUS_EXPR)
9648 : {
9649 18929286 : tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
9650 18929286 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9651 18929286 : if (TREE_CODE (op0) == ADDR_EXPR
9652 5329094 : && TREE_CODE (op1) == INTEGER_CST)
9653 : {
9654 544836 : tree off = fold_convert (ptr_type_node, op1);
9655 544836 : return build1_loc
9656 1089672 : (loc, ADDR_EXPR, TREE_TYPE (op0),
9657 544836 : fold_build2 (MEM_REF,
9658 : TREE_TYPE (TREE_TYPE (op0)),
9659 544836 : unshare_expr (op0), off));
9660 : }
9661 : }
9662 : /* Canonicalize bool != 0 and bool == 0 appearing after
9663 : valueization. While gimple_simplify handles this
9664 : it can get confused by the ~X == 1 -> X == 0 transform
9665 : which we cant reduce to a SSA name or a constant
9666 : (and we have no way to tell gimple_simplify to not
9667 : consider those transforms in the first place). */
9668 140296463 : else if (subcode == EQ_EXPR
9669 140296463 : || subcode == NE_EXPR)
9670 : {
9671 3197489 : tree lhs = gimple_assign_lhs (stmt);
9672 3197489 : tree op0 = gimple_assign_rhs1 (stmt);
9673 3197489 : if (useless_type_conversion_p (TREE_TYPE (lhs),
9674 3197489 : TREE_TYPE (op0)))
9675 : {
9676 24395 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9677 24395 : op0 = (*valueize) (op0);
9678 24395 : if (TREE_CODE (op0) == INTEGER_CST)
9679 706 : std::swap (op0, op1);
9680 24395 : if (TREE_CODE (op1) == INTEGER_CST
9681 24395 : && ((subcode == NE_EXPR && integer_zerop (op1))
9682 2320 : || (subcode == EQ_EXPR && integer_onep (op1))))
9683 247 : return op0;
9684 : }
9685 : }
9686 : return NULL_TREE;
9687 :
9688 672758 : case GIMPLE_TERNARY_RHS:
9689 672758 : {
9690 : /* Handle ternary operators that can appear in GIMPLE form. */
9691 672758 : tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
9692 672758 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9693 672758 : tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
9694 672758 : return fold_ternary_loc (loc, subcode,
9695 672758 : TREE_TYPE (gimple_assign_lhs (stmt)),
9696 672758 : op0, op1, op2);
9697 : }
9698 :
9699 0 : default:
9700 0 : gcc_unreachable ();
9701 : }
9702 : }
9703 :
9704 14251916 : case GIMPLE_CALL:
9705 14251916 : {
9706 14251916 : tree fn;
9707 14251916 : gcall *call_stmt = as_a <gcall *> (stmt);
9708 :
9709 14251916 : if (gimple_call_internal_p (stmt))
9710 : {
9711 1283841 : enum tree_code subcode = ERROR_MARK;
9712 1283841 : switch (gimple_call_internal_fn (stmt))
9713 : {
9714 : case IFN_UBSAN_CHECK_ADD:
9715 : subcode = PLUS_EXPR;
9716 : break;
9717 7983 : case IFN_UBSAN_CHECK_SUB:
9718 7983 : subcode = MINUS_EXPR;
9719 7983 : break;
9720 6815 : case IFN_UBSAN_CHECK_MUL:
9721 6815 : subcode = MULT_EXPR;
9722 6815 : break;
9723 141450 : case IFN_BUILTIN_EXPECT:
9724 141450 : {
9725 141450 : tree arg0 = gimple_call_arg (stmt, 0);
9726 141450 : tree op0 = (*valueize) (arg0);
9727 141450 : if (TREE_CODE (op0) == INTEGER_CST)
9728 : return op0;
9729 : return NULL_TREE;
9730 : }
9731 : default:
9732 : return NULL_TREE;
9733 : }
9734 22924 : tree arg0 = gimple_call_arg (stmt, 0);
9735 22924 : tree arg1 = gimple_call_arg (stmt, 1);
9736 22924 : tree op0 = (*valueize) (arg0);
9737 22924 : tree op1 = (*valueize) (arg1);
9738 :
9739 22924 : if (TREE_CODE (op0) != INTEGER_CST
9740 2499 : || TREE_CODE (op1) != INTEGER_CST)
9741 : {
9742 22402 : switch (subcode)
9743 : {
9744 6715 : case MULT_EXPR:
9745 : /* x * 0 = 0 * x = 0 without overflow. */
9746 6715 : if (integer_zerop (op0) || integer_zerop (op1))
9747 20 : return build_zero_cst (TREE_TYPE (arg0));
9748 : break;
9749 7641 : case MINUS_EXPR:
9750 : /* y - y = 0 without overflow. */
9751 7641 : if (operand_equal_p (op0, op1, 0))
9752 0 : return build_zero_cst (TREE_TYPE (arg0));
9753 : break;
9754 : default:
9755 : break;
9756 : }
9757 : }
9758 22904 : tree res
9759 22904 : = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
9760 22904 : if (res
9761 2870 : && TREE_CODE (res) == INTEGER_CST
9762 23426 : && !TREE_OVERFLOW (res))
9763 : return res;
9764 : return NULL_TREE;
9765 : }
9766 :
9767 12968075 : fn = (*valueize) (gimple_call_fn (stmt));
9768 12968075 : if (TREE_CODE (fn) == ADDR_EXPR
9769 12336558 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
9770 12336494 : && fndecl_built_in_p (TREE_OPERAND (fn, 0))
9771 18934402 : && gimple_builtin_call_types_compatible_p (stmt,
9772 5966327 : TREE_OPERAND (fn, 0)))
9773 : {
9774 5862661 : tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
9775 5862661 : tree retval;
9776 5862661 : unsigned i;
9777 18246214 : for (i = 0; i < gimple_call_num_args (stmt); ++i)
9778 12383553 : args[i] = (*valueize) (gimple_call_arg (stmt, i));
9779 5862661 : retval = fold_builtin_call_array (loc,
9780 : gimple_call_return_type (call_stmt),
9781 : fn, gimple_call_num_args (stmt), args);
9782 5862661 : if (retval)
9783 : {
9784 : /* fold_call_expr wraps the result inside a NOP_EXPR. */
9785 47143 : STRIP_NOPS (retval);
9786 47143 : retval = fold_convert (gimple_call_return_type (call_stmt),
9787 : retval);
9788 : }
9789 5862661 : return retval;
9790 : }
9791 : return NULL_TREE;
9792 : }
9793 :
9794 : default:
9795 : return NULL_TREE;
9796 : }
9797 : }
9798 :
9799 : /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
9800 : Returns NULL_TREE if folding to a constant is not possible, otherwise
9801 : returns a constant according to is_gimple_min_invariant. */
9802 :
9803 : tree
9804 4413 : gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
9805 : {
9806 4413 : tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
9807 4413 : if (res && is_gimple_min_invariant (res))
9808 : return res;
9809 : return NULL_TREE;
9810 : }
9811 :
9812 :
9813 : /* The following set of functions are supposed to fold references using
9814 : their constant initializers. */
9815 :
9816 : /* See if we can find constructor defining value of BASE.
9817 : When we know the consructor with constant offset (such as
9818 : base is array[40] and we do know constructor of array), then
9819 : BIT_OFFSET is adjusted accordingly.
9820 :
9821 : As a special case, return error_mark_node when constructor
9822 : is not explicitly available, but it is known to be zero
9823 : such as 'static const int a;'. */
9824 : static tree
9825 129145455 : get_base_constructor (tree base, poly_int64 *bit_offset,
9826 : tree (*valueize)(tree))
9827 : {
9828 129238596 : poly_int64 bit_offset2, size, max_size;
9829 129238596 : bool reverse;
9830 :
9831 129238596 : if (TREE_CODE (base) == MEM_REF)
9832 : {
9833 139387994 : poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
9834 69693997 : if (!boff.to_shwi (bit_offset))
9835 69349671 : return NULL_TREE;
9836 :
9837 69693650 : if (valueize
9838 69693650 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
9839 37724764 : base = valueize (TREE_OPERAND (base, 0));
9840 69693650 : if (!base || TREE_CODE (base) != ADDR_EXPR)
9841 : return NULL_TREE;
9842 344326 : base = TREE_OPERAND (base, 0);
9843 : }
9844 59544599 : else if (valueize
9845 31923989 : && TREE_CODE (base) == SSA_NAME)
9846 0 : base = valueize (base);
9847 :
9848 : /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
9849 : DECL_INITIAL. If BASE is a nested reference into another
9850 : ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
9851 : the inner reference. */
9852 59888925 : switch (TREE_CODE (base))
9853 : {
9854 52336297 : case VAR_DECL:
9855 52336297 : case CONST_DECL:
9856 52336297 : {
9857 52336297 : tree init = ctor_for_folding (base);
9858 :
9859 : /* Our semantic is exact opposite of ctor_for_folding;
9860 : NULL means unknown, while error_mark_node is 0. */
9861 52336297 : if (init == error_mark_node)
9862 : return NULL_TREE;
9863 1301752 : if (!init)
9864 1166 : return error_mark_node;
9865 : return init;
9866 : }
9867 :
9868 93141 : case VIEW_CONVERT_EXPR:
9869 93141 : return get_base_constructor (TREE_OPERAND (base, 0),
9870 93141 : bit_offset, valueize);
9871 :
9872 334752 : case ARRAY_REF:
9873 334752 : case COMPONENT_REF:
9874 334752 : base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
9875 : &reverse);
9876 334752 : if (!known_size_p (max_size) || maybe_ne (size, max_size))
9877 : return NULL_TREE;
9878 274108 : *bit_offset += bit_offset2;
9879 274108 : return get_base_constructor (base, bit_offset, valueize);
9880 :
9881 : case CONSTRUCTOR:
9882 : return base;
9883 :
9884 7124735 : default:
9885 7124735 : if (CONSTANT_CLASS_P (base))
9886 : return base;
9887 :
9888 : return NULL_TREE;
9889 : }
9890 : }
9891 :
9892 : /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
9893 : bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
9894 : the reference; otherwise the type of the referenced element is used instead.
9895 : When SIZE is zero, attempt to fold a reference to the entire element OFFSET
9896 : refers to. Increment *SUBOFF by the bit offset of the accessed element. */
9897 :
9898 : static tree
9899 707368 : fold_array_ctor_reference (tree type, tree ctor,
9900 : unsigned HOST_WIDE_INT offset,
9901 : unsigned HOST_WIDE_INT size,
9902 : tree from_decl,
9903 : unsigned HOST_WIDE_INT *suboff)
9904 : {
9905 707368 : offset_int low_bound;
9906 707368 : offset_int elt_size;
9907 707368 : offset_int access_index;
9908 707368 : tree domain_type = NULL_TREE;
9909 707368 : HOST_WIDE_INT inner_offset;
9910 :
9911 : /* Compute low bound and elt size. */
9912 707368 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
9913 707368 : domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
9914 707368 : if (domain_type && TYPE_MIN_VALUE (domain_type))
9915 : {
9916 : /* Static constructors for variably sized objects make no sense. */
9917 707368 : if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
9918 : return NULL_TREE;
9919 707368 : low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
9920 : }
9921 : else
9922 0 : low_bound = 0;
9923 : /* Static constructors for variably sized objects make no sense. */
9924 707368 : if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
9925 : return NULL_TREE;
9926 707368 : elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
9927 :
9928 : /* When TYPE is non-null, verify that it specifies a constant-sized
9929 : access of a multiple of the array element size. Avoid division
9930 : by zero below when ELT_SIZE is zero, such as with the result of
9931 : an initializer for a zero-length array or an empty struct. */
9932 707368 : if (elt_size == 0
9933 707368 : || (type
9934 707332 : && (!TYPE_SIZE_UNIT (type)
9935 707332 : || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
9936 36 : return NULL_TREE;
9937 :
9938 : /* Compute the array index we look for. */
9939 707332 : access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
9940 : elt_size);
9941 707332 : access_index += low_bound;
9942 :
9943 : /* And offset within the access. */
9944 707332 : inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
9945 :
9946 707332 : unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
9947 707332 : if (size > elt_sz * BITS_PER_UNIT)
9948 : {
9949 : /* native_encode_expr constraints. */
9950 50607 : if (size > MAX_BITSIZE_MODE_ANY_MODE
9951 40537 : || size % BITS_PER_UNIT != 0
9952 40537 : || inner_offset % BITS_PER_UNIT != 0
9953 40537 : || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
9954 : return NULL_TREE;
9955 :
9956 40537 : unsigned ctor_idx;
9957 40537 : tree val = get_array_ctor_element_at_index (ctor, access_index,
9958 : &ctor_idx);
9959 40561 : if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
9960 23 : return build_zero_cst (type);
9961 :
9962 : /* native-encode adjacent ctor elements. */
9963 40514 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
9964 40514 : unsigned bufoff = 0;
9965 40514 : offset_int index = 0;
9966 40514 : offset_int max_index = access_index;
9967 40514 : constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
9968 40514 : if (!val)
9969 1 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
9970 40513 : else if (!CONSTANT_CLASS_P (val))
9971 : return NULL_TREE;
9972 40111 : if (!elt->index)
9973 : ;
9974 33820 : else if (TREE_CODE (elt->index) == RANGE_EXPR)
9975 : {
9976 20 : index = wi::to_offset (TREE_OPERAND (elt->index, 0));
9977 20 : max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
9978 : }
9979 : else
9980 33800 : index = max_index = wi::to_offset (elt->index);
9981 40111 : index = wi::umax (index, access_index);
9982 273603 : do
9983 : {
9984 273603 : if (bufoff + elt_sz > sizeof (buf))
9985 0 : elt_sz = sizeof (buf) - bufoff;
9986 273603 : int len;
9987 273603 : if (TREE_CODE (val) == RAW_DATA_CST)
9988 : {
9989 20 : gcc_assert (inner_offset == 0);
9990 20 : if (!elt->index || TREE_CODE (elt->index) != INTEGER_CST)
9991 : return NULL_TREE;
9992 40 : inner_offset = (access_index
9993 20 : - wi::to_offset (elt->index)).to_uhwi ();
9994 20 : len = MIN (sizeof (buf) - bufoff,
9995 : (unsigned) (RAW_DATA_LENGTH (val) - inner_offset));
9996 20 : memcpy (buf + bufoff, RAW_DATA_POINTER (val) + inner_offset,
9997 : len);
9998 20 : access_index += len - 1;
9999 : }
10000 : else
10001 : {
10002 547166 : len = native_encode_expr (val, buf + bufoff, elt_sz,
10003 273583 : inner_offset / BITS_PER_UNIT);
10004 273583 : if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
10005 : return NULL_TREE;
10006 : }
10007 273603 : inner_offset = 0;
10008 273603 : bufoff += len;
10009 :
10010 273603 : access_index += 1;
10011 273603 : if (wi::cmpu (access_index, index) == 0)
10012 2 : val = elt->value;
10013 273601 : else if (wi::cmpu (access_index, max_index) > 0)
10014 : {
10015 273361 : ctor_idx++;
10016 273361 : if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
10017 : {
10018 38350 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
10019 38350 : ++max_index;
10020 : }
10021 : else
10022 : {
10023 235011 : elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
10024 235011 : index = 0;
10025 235011 : max_index = access_index;
10026 235011 : if (!elt->index)
10027 : ;
10028 234203 : else if (TREE_CODE (elt->index) == RANGE_EXPR)
10029 : {
10030 0 : index = wi::to_offset (TREE_OPERAND (elt->index, 0));
10031 0 : max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
10032 : }
10033 : else
10034 234203 : index = max_index = wi::to_offset (elt->index);
10035 235011 : index = wi::umax (index, access_index);
10036 235011 : if (wi::cmpu (access_index, index) == 0)
10037 235006 : val = elt->value;
10038 : else
10039 5 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
10040 : }
10041 : }
10042 : }
10043 273603 : while (bufoff < size / BITS_PER_UNIT);
10044 40111 : *suboff += size;
10045 40111 : return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
10046 : }
10047 :
10048 656725 : unsigned ctor_idx;
10049 656725 : if (tree val = get_array_ctor_element_at_index (ctor, access_index,
10050 : &ctor_idx))
10051 : {
10052 655596 : if (TREE_CODE (val) == RAW_DATA_CST)
10053 : {
10054 2591 : if (size != BITS_PER_UNIT || elt_sz != 1 || inner_offset != 0)
10055 : return NULL_TREE;
10056 2583 : constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
10057 2583 : if (elt->index == NULL_TREE || TREE_CODE (elt->index) != INTEGER_CST)
10058 : return NULL_TREE;
10059 2583 : unsigned o = (access_index - wi::to_offset (elt->index)).to_uhwi ();
10060 2583 : val = build_int_cst (TREE_TYPE (val), RAW_DATA_UCHAR_ELT (val, o));
10061 : }
10062 655588 : if (!size && TREE_CODE (val) != CONSTRUCTOR)
10063 : {
10064 : /* For the final reference to the entire accessed element
10065 : (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
10066 : may be null) in favor of the type of the element, and set
10067 : SIZE to the size of the accessed element. */
10068 22902 : inner_offset = 0;
10069 22902 : type = TREE_TYPE (val);
10070 22902 : size = elt_sz * BITS_PER_UNIT;
10071 : }
10072 1687867 : else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
10073 460610 : && TREE_CODE (val) == CONSTRUCTOR
10074 16685 : && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
10075 : /* If this isn't the last element in the CTOR and a CTOR itself
10076 : and it does not cover the whole object we are requesting give up
10077 : since we're not set up for combining from multiple CTORs. */
10078 26 : return NULL_TREE;
10079 :
10080 655562 : *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
10081 655562 : return fold_ctor_reference (type, val, inner_offset, size, from_decl,
10082 : suboff);
10083 : }
10084 :
10085 : /* Memory not explicitly mentioned in constructor is 0 (or
10086 : the reference is out of range). */
10087 1129 : return type ? build_zero_cst (type) : NULL_TREE;
10088 : }
10089 :
10090 : /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
10091 : bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
10092 : the reference; otherwise the type of the referenced member is used instead.
10093 : When SIZE is zero, attempt to fold a reference to the entire member OFFSET
10094 : refers to. Increment *SUBOFF by the bit offset of the accessed member. */
10095 :
10096 : static tree
10097 75741 : fold_nonarray_ctor_reference (tree type, tree ctor,
10098 : unsigned HOST_WIDE_INT offset,
10099 : unsigned HOST_WIDE_INT size,
10100 : tree from_decl,
10101 : unsigned HOST_WIDE_INT *suboff)
10102 : {
10103 75741 : unsigned HOST_WIDE_INT cnt;
10104 75741 : tree cfield, cval;
10105 :
10106 115856 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
10107 : {
10108 106894 : tree byte_offset = DECL_FIELD_OFFSET (cfield);
10109 106894 : tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
10110 106894 : tree field_size = DECL_SIZE (cfield);
10111 :
10112 106894 : if (!field_size)
10113 : {
10114 : /* Determine the size of the flexible array member from
10115 : the size of the initializer provided for it. */
10116 847 : field_size = TYPE_SIZE (TREE_TYPE (cval));
10117 : }
10118 :
10119 : /* Variable sized objects in static constructors makes no sense,
10120 : but field_size can be NULL for flexible array members. */
10121 106894 : gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
10122 : && TREE_CODE (byte_offset) == INTEGER_CST
10123 : && (field_size != NULL_TREE
10124 : ? TREE_CODE (field_size) == INTEGER_CST
10125 : : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
10126 :
10127 : /* Compute bit offset of the field. */
10128 106894 : offset_int bitoffset
10129 106894 : = (wi::to_offset (field_offset)
10130 106894 : + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
10131 : /* Compute bit offset where the field ends. */
10132 106894 : offset_int bitoffset_end;
10133 106894 : if (field_size != NULL_TREE)
10134 106894 : bitoffset_end = bitoffset + wi::to_offset (field_size);
10135 : else
10136 0 : bitoffset_end = 0;
10137 :
10138 : /* Compute the bit offset of the end of the desired access.
10139 : As a special case, if the size of the desired access is
10140 : zero, assume the access is to the entire field (and let
10141 : the caller make any necessary adjustments by storing
10142 : the actual bounds of the field in FIELDBOUNDS). */
10143 106894 : offset_int access_end = offset_int (offset);
10144 106894 : if (size)
10145 65466 : access_end += size;
10146 : else
10147 41428 : access_end = bitoffset_end;
10148 :
10149 : /* Is there any overlap between the desired access at
10150 : [OFFSET, OFFSET+SIZE) and the offset of the field within
10151 : the object at [BITOFFSET, BITOFFSET_END)? */
10152 106894 : if (wi::cmps (access_end, bitoffset) > 0
10153 106894 : && (field_size == NULL_TREE
10154 104563 : || wi::lts_p (offset, bitoffset_end)))
10155 : {
10156 66779 : *suboff += bitoffset.to_uhwi ();
10157 :
10158 66779 : if (!size && TREE_CODE (cval) != CONSTRUCTOR)
10159 : {
10160 : /* For the final reference to the entire accessed member
10161 : (SIZE is zero), reset OFFSET, disegard TYPE (which may
10162 : be null) in favor of the type of the member, and set
10163 : SIZE to the size of the accessed member. */
10164 19305 : offset = bitoffset.to_uhwi ();
10165 19305 : type = TREE_TYPE (cval);
10166 19305 : size = (bitoffset_end - bitoffset).to_uhwi ();
10167 : }
10168 :
10169 : /* We do have overlap. Now see if the field is large enough
10170 : to cover the access. Give up for accesses that extend
10171 : beyond the end of the object or that span multiple fields. */
10172 66779 : if (wi::cmps (access_end, bitoffset_end) > 0)
10173 : return NULL_TREE;
10174 66147 : if (offset < bitoffset)
10175 : return NULL_TREE;
10176 :
10177 66147 : offset_int inner_offset = offset_int (offset) - bitoffset;
10178 :
10179 : /* Integral bit-fields are left-justified on big-endian targets, so
10180 : we must arrange for native_encode_int to start at their MSB. */
10181 66147 : if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
10182 : {
10183 66147 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
10184 : return NULL_TREE;
10185 66147 : if (BYTES_BIG_ENDIAN)
10186 : {
10187 : tree ctype = TREE_TYPE (cfield);
10188 : unsigned int encoding_size;
10189 : if (TYPE_MODE (ctype) != BLKmode)
10190 : encoding_size
10191 : = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (ctype));
10192 : else
10193 : encoding_size = TREE_INT_CST_LOW (TYPE_SIZE (ctype));
10194 : inner_offset += encoding_size - wi::to_offset (field_size);
10195 : }
10196 : }
10197 :
10198 66147 : return fold_ctor_reference (type, cval,
10199 66147 : inner_offset.to_uhwi (), size,
10200 : from_decl, suboff);
10201 : }
10202 : }
10203 :
10204 8962 : if (!type)
10205 : return NULL_TREE;
10206 :
10207 8962 : return build_zero_cst (type);
10208 : }
10209 :
10210 : /* CTOR is a value initializing memory. Fold a reference of TYPE and
10211 : bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
10212 : is zero, attempt to fold a reference to the entire subobject
10213 : which OFFSET refers to. This is used when folding accesses to
10214 : string members of aggregates. When non-null, set *SUBOFF to
10215 : the bit offset of the accessed subobject. */
10216 :
10217 : tree
10218 1622203 : fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
10219 : const poly_uint64 &poly_size, tree from_decl,
10220 : unsigned HOST_WIDE_INT *suboff /* = NULL */)
10221 : {
10222 1622203 : tree ret;
10223 :
10224 : /* We found the field with exact match. */
10225 1622203 : if (type
10226 1622203 : && useless_type_conversion_p (type, TREE_TYPE (ctor))
10227 2294923 : && known_eq (poly_offset, 0U))
10228 671458 : return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
10229 :
10230 : /* The remaining optimizations need a constant size and offset. */
10231 950745 : unsigned HOST_WIDE_INT size, offset;
10232 950745 : if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
10233 : return NULL_TREE;
10234 :
10235 : /* We are at the end of walk, see if we can view convert the
10236 : result. */
10237 950745 : if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
10238 : /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
10239 22772 : && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
10240 22665 : && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
10241 : {
10242 14361 : ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
10243 14361 : if (ret)
10244 : {
10245 14361 : ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
10246 14361 : if (ret)
10247 14301 : STRIP_USELESS_TYPE_CONVERSION (ret);
10248 : }
10249 14361 : return ret;
10250 : }
10251 :
10252 : /* For constants and byte-aligned/sized reads, try to go through
10253 : native_encode/interpret. */
10254 936384 : if (CONSTANT_CLASS_P (ctor)
10255 : && BITS_PER_UNIT == 8
10256 143847 : && offset % BITS_PER_UNIT == 0
10257 143843 : && offset / BITS_PER_UNIT <= INT_MAX
10258 143811 : && size % BITS_PER_UNIT == 0
10259 143802 : && size <= MAX_BITSIZE_MODE_ANY_MODE
10260 1079381 : && can_native_interpret_type_p (type))
10261 : {
10262 100579 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
10263 201158 : int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
10264 100579 : offset / BITS_PER_UNIT);
10265 100579 : if (len > 0)
10266 99857 : return native_interpret_expr (type, buf, len);
10267 : }
10268 :
10269 : /* For constructors, try first a recursive local processing, but in any case
10270 : this requires the native storage order. */
10271 836527 : if (TREE_CODE (ctor) == CONSTRUCTOR
10272 836527 : && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
10273 783339 : && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
10274 : {
10275 783109 : unsigned HOST_WIDE_INT dummy = 0;
10276 783109 : if (!suboff)
10277 658014 : suboff = &dummy;
10278 :
10279 783109 : tree ret;
10280 783109 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
10281 783109 : || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
10282 707368 : ret = fold_array_ctor_reference (type, ctor, offset, size,
10283 : from_decl, suboff);
10284 : else
10285 75741 : ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
10286 : from_decl, suboff);
10287 :
10288 : /* Otherwise fall back to native_encode_initializer. This may be done
10289 : only from the outermost fold_ctor_reference call (because it itself
10290 : recurses into CONSTRUCTORs and doesn't update suboff). */
10291 783109 : if (ret == NULL_TREE
10292 250853 : && suboff == &dummy
10293 : && BITS_PER_UNIT == 8
10294 241290 : && offset % BITS_PER_UNIT == 0
10295 241288 : && offset / BITS_PER_UNIT <= INT_MAX
10296 241288 : && size % BITS_PER_UNIT == 0
10297 241279 : && size <= MAX_BITSIZE_MODE_ANY_MODE
10298 1014318 : && can_native_interpret_type_p (type))
10299 : {
10300 194206 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
10301 388412 : int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
10302 194206 : offset / BITS_PER_UNIT);
10303 194206 : if (len > 0)
10304 1302 : return native_interpret_expr (type, buf, len);
10305 : }
10306 :
10307 781807 : return ret;
10308 : }
10309 :
10310 : return NULL_TREE;
10311 : }
10312 :
10313 : /* Return the tree representing the element referenced by T if T is an
10314 : ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
10315 : names using VALUEIZE. Return NULL_TREE otherwise. */
10316 :
10317 : tree
10318 134599354 : fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
10319 : {
10320 134599354 : tree ctor, idx, base;
10321 134599354 : poly_int64 offset, size, max_size;
10322 134599354 : tree tem;
10323 134599354 : bool reverse;
10324 :
10325 134599354 : if (TREE_THIS_VOLATILE (t))
10326 : return NULL_TREE;
10327 :
10328 134280379 : if (DECL_P (t))
10329 260436 : return get_symbol_constant_value (t);
10330 :
10331 134019943 : tem = fold_read_from_constant_string (t);
10332 134019943 : if (tem)
10333 : return tem;
10334 :
10335 134018077 : switch (TREE_CODE (t))
10336 : {
10337 10459601 : case ARRAY_REF:
10338 10459601 : case ARRAY_RANGE_REF:
10339 : /* Constant indexes are handled well by get_base_constructor.
10340 : Only special case variable offsets.
10341 : FIXME: This code can't handle nested references with variable indexes
10342 : (they will be handled only by iteration of ccp). Perhaps we can bring
10343 : get_ref_base_and_extent here and make it use a valueize callback. */
10344 10459601 : if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
10345 6159749 : && valueize
10346 4121935 : && (idx = (*valueize) (TREE_OPERAND (t, 1)))
10347 14581536 : && poly_int_tree_p (idx))
10348 : {
10349 1624799 : tree low_bound, unit_size;
10350 :
10351 : /* If the resulting bit-offset is constant, track it. */
10352 1624799 : if ((low_bound = array_ref_low_bound (t),
10353 1624799 : poly_int_tree_p (low_bound))
10354 1624799 : && (unit_size = array_ref_element_size (t),
10355 1624799 : tree_fits_uhwi_p (unit_size)))
10356 : {
10357 1624799 : poly_offset_int woffset
10358 1624799 : = wi::sext (wi::to_poly_offset (idx)
10359 3249598 : - wi::to_poly_offset (low_bound),
10360 1624799 : TYPE_PRECISION (sizetype));
10361 1624799 : woffset *= tree_to_uhwi (unit_size);
10362 1624799 : woffset *= BITS_PER_UNIT;
10363 1624799 : if (woffset.to_shwi (&offset))
10364 : {
10365 1624687 : base = TREE_OPERAND (t, 0);
10366 1624687 : ctor = get_base_constructor (base, &offset, valueize);
10367 : /* Empty constructor. Always fold to 0. */
10368 1624687 : if (ctor == error_mark_node)
10369 1624687 : return build_zero_cst (TREE_TYPE (t));
10370 : /* Out of bound array access. Value is undefined,
10371 : but don't fold. */
10372 1624609 : if (maybe_lt (offset, 0))
10373 : return NULL_TREE;
10374 : /* We cannot determine ctor. */
10375 1624139 : if (!ctor)
10376 : return NULL_TREE;
10377 168362 : return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
10378 168362 : tree_to_uhwi (unit_size)
10379 336724 : * BITS_PER_UNIT,
10380 : base);
10381 : }
10382 : }
10383 : }
10384 : /* Fallthru. */
10385 :
10386 127246660 : case COMPONENT_REF:
10387 127246660 : case BIT_FIELD_REF:
10388 127246660 : case TARGET_MEM_REF:
10389 127246660 : case MEM_REF:
10390 127246660 : base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
10391 127246660 : ctor = get_base_constructor (base, &offset, valueize);
10392 :
10393 : /* We cannot determine ctor. */
10394 127246660 : if (!ctor)
10395 : return NULL_TREE;
10396 : /* Empty constructor. Always fold to 0. */
10397 1247846 : if (ctor == error_mark_node)
10398 1088 : return build_zero_cst (TREE_TYPE (t));
10399 : /* We do not know precise access. */
10400 1246758 : if (!known_size_p (max_size) || maybe_ne (max_size, size))
10401 : return NULL_TREE;
10402 : /* Out of bound array access. Value is undefined, but don't fold. */
10403 555165 : if (maybe_lt (offset, 0))
10404 : return NULL_TREE;
10405 : /* Access with reverse storage order. */
10406 554784 : if (reverse)
10407 : return NULL_TREE;
10408 :
10409 554784 : tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
10410 554784 : if (tem)
10411 : return tem;
10412 :
10413 : /* For bit field reads try to read the representative and
10414 : adjust. */
10415 242623 : if (TREE_CODE (t) == COMPONENT_REF
10416 6353 : && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
10417 242707 : && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
10418 : {
10419 84 : HOST_WIDE_INT csize, coffset;
10420 84 : tree field = TREE_OPERAND (t, 1);
10421 84 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
10422 168 : if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
10423 83 : && size.is_constant (&csize)
10424 83 : && offset.is_constant (&coffset)
10425 83 : && (coffset % BITS_PER_UNIT != 0
10426 81 : || csize % BITS_PER_UNIT != 0)
10427 84 : && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
10428 : {
10429 10 : poly_int64 bitoffset;
10430 10 : poly_uint64 field_offset, repr_offset;
10431 10 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
10432 20 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
10433 10 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
10434 : else
10435 : bitoffset = 0;
10436 10 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
10437 10 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
10438 10 : HOST_WIDE_INT bitoff;
10439 10 : int diff = (TYPE_PRECISION (TREE_TYPE (repr))
10440 10 : - TYPE_PRECISION (TREE_TYPE (field)));
10441 10 : if (bitoffset.is_constant (&bitoff)
10442 10 : && bitoff >= 0
10443 10 : && bitoff <= diff)
10444 : {
10445 10 : offset -= bitoff;
10446 10 : size = tree_to_uhwi (DECL_SIZE (repr));
10447 :
10448 10 : tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
10449 10 : size, base);
10450 10 : if (tem && TREE_CODE (tem) == INTEGER_CST)
10451 : {
10452 10 : if (!BYTES_BIG_ENDIAN)
10453 10 : tem = wide_int_to_tree (TREE_TYPE (field),
10454 10 : wi::lrshift (wi::to_wide (tem),
10455 : bitoff));
10456 : else
10457 : tem = wide_int_to_tree (TREE_TYPE (field),
10458 : wi::lrshift (wi::to_wide (tem),
10459 : diff - bitoff));
10460 10 : return tem;
10461 : }
10462 : }
10463 : }
10464 : }
10465 : break;
10466 :
10467 1563753 : case REALPART_EXPR:
10468 1563753 : case IMAGPART_EXPR:
10469 1563753 : {
10470 1563753 : tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
10471 1563753 : if (c && TREE_CODE (c) == COMPLEX_CST)
10472 2710 : return fold_build1_loc (EXPR_LOCATION (t),
10473 5420 : TREE_CODE (t), TREE_TYPE (t), c);
10474 : break;
10475 : }
10476 :
10477 : default:
10478 : break;
10479 : }
10480 :
10481 : return NULL_TREE;
10482 : }
10483 :
10484 : tree
10485 62994127 : fold_const_aggregate_ref (tree t)
10486 : {
10487 62994127 : return fold_const_aggregate_ref_1 (t, NULL);
10488 : }
10489 :
10490 : /* Lookup virtual method with index TOKEN in a virtual table V
10491 : at OFFSET.
10492 : Set CAN_REFER if non-NULL to false if method
10493 : is not referable or if the virtual table is ill-formed (such as rewriten
10494 : by non-C++ produced symbol). Otherwise just return NULL in that calse. */
10495 :
10496 : tree
10497 327282 : gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
10498 : tree v,
10499 : unsigned HOST_WIDE_INT offset,
10500 : bool *can_refer)
10501 : {
10502 327282 : tree vtable = v, init, fn;
10503 327282 : unsigned HOST_WIDE_INT size;
10504 327282 : unsigned HOST_WIDE_INT elt_size, access_index;
10505 327282 : tree domain_type;
10506 :
10507 327282 : if (can_refer)
10508 327282 : *can_refer = true;
10509 :
10510 : /* First of all double check we have virtual table. */
10511 327282 : if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
10512 : {
10513 : /* Pass down that we lost track of the target. */
10514 0 : if (can_refer)
10515 0 : *can_refer = false;
10516 0 : return NULL_TREE;
10517 : }
10518 :
10519 327282 : init = ctor_for_folding (v);
10520 :
10521 : /* The virtual tables should always be born with constructors
10522 : and we always should assume that they are avaialble for
10523 : folding. At the moment we do not stream them in all cases,
10524 : but it should never happen that ctor seem unreachable. */
10525 327282 : gcc_assert (init);
10526 327282 : if (init == error_mark_node)
10527 : {
10528 : /* Pass down that we lost track of the target. */
10529 181 : if (can_refer)
10530 181 : *can_refer = false;
10531 181 : return NULL_TREE;
10532 : }
10533 327101 : gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
10534 327101 : size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
10535 327101 : offset *= BITS_PER_UNIT;
10536 327101 : offset += token * size;
10537 :
10538 : /* Lookup the value in the constructor that is assumed to be array.
10539 : This is equivalent to
10540 : fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
10541 : offset, size, NULL);
10542 : but in a constant time. We expect that frontend produced a simple
10543 : array without indexed initializers. */
10544 :
10545 327101 : gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
10546 327101 : domain_type = TYPE_DOMAIN (TREE_TYPE (init));
10547 327101 : gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
10548 327101 : elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
10549 :
10550 327101 : access_index = offset / BITS_PER_UNIT / elt_size;
10551 327101 : gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
10552 :
10553 : /* This code makes an assumption that there are no
10554 : indexed fileds produced by C++ FE, so we can directly index the array. */
10555 327101 : if (access_index < CONSTRUCTOR_NELTS (init))
10556 : {
10557 327100 : fn = CONSTRUCTOR_ELT (init, access_index)->value;
10558 327100 : gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
10559 327100 : STRIP_NOPS (fn);
10560 : }
10561 : else
10562 : fn = NULL;
10563 :
10564 : /* For type inconsistent program we may end up looking up virtual method
10565 : in virtual table that does not contain TOKEN entries. We may overrun
10566 : the virtual table and pick up a constant or RTTI info pointer.
10567 : In any case the call is undefined. */
10568 327100 : if (!fn
10569 327100 : || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
10570 648142 : || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
10571 6059 : fn = builtin_decl_unreachable ();
10572 : else
10573 : {
10574 321042 : fn = TREE_OPERAND (fn, 0);
10575 :
10576 : /* When cgraph node is missing and function is not public, we cannot
10577 : devirtualize. This can happen in WHOPR when the actual method
10578 : ends up in other partition, because we found devirtualization
10579 : possibility too late. */
10580 321042 : if (!can_refer_decl_in_current_unit_p (fn, vtable))
10581 : {
10582 41136 : if (can_refer)
10583 : {
10584 41136 : *can_refer = false;
10585 41136 : return fn;
10586 : }
10587 : return NULL_TREE;
10588 : }
10589 : }
10590 :
10591 : /* Make sure we create a cgraph node for functions we'll reference.
10592 : They can be non-existent if the reference comes from an entry
10593 : of an external vtable for example. */
10594 285965 : cgraph_node::get_create (fn);
10595 :
10596 285965 : return fn;
10597 : }
10598 :
10599 : /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
10600 : is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
10601 : KNOWN_BINFO carries the binfo describing the true type of
10602 : OBJ_TYPE_REF_OBJECT(REF).
10603 : Set CAN_REFER if non-NULL to false if method
10604 : is not referable or if the virtual table is ill-formed (such as rewriten
10605 : by non-C++ produced symbol). Otherwise just return NULL in that calse. */
10606 :
10607 : tree
10608 308078 : gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
10609 : bool *can_refer)
10610 : {
10611 308078 : unsigned HOST_WIDE_INT offset;
10612 308078 : tree v;
10613 :
10614 308078 : v = BINFO_VTABLE (known_binfo);
10615 : /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
10616 308078 : if (!v)
10617 : return NULL_TREE;
10618 :
10619 308078 : if (!vtable_pointer_value_to_vtable (v, &v, &offset))
10620 : {
10621 0 : if (can_refer)
10622 0 : *can_refer = false;
10623 0 : return NULL_TREE;
10624 : }
10625 308078 : return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
10626 : }
10627 :
10628 : /* Given a pointer value T, return a simplified version of an
10629 : indirection through T, or NULL_TREE if no simplification is
10630 : possible. Note that the resulting type may be different from
10631 : the type pointed to in the sense that it is still compatible
10632 : from the langhooks point of view. */
10633 :
10634 : tree
10635 2379169 : gimple_fold_indirect_ref (tree t)
10636 : {
10637 2379169 : tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
10638 2379169 : tree sub = t;
10639 2379169 : tree subtype;
10640 :
10641 2379169 : STRIP_NOPS (sub);
10642 2379169 : subtype = TREE_TYPE (sub);
10643 2379169 : if (!POINTER_TYPE_P (subtype)
10644 2379169 : || TYPE_REF_CAN_ALIAS_ALL (ptype))
10645 : return NULL_TREE;
10646 :
10647 2377464 : if (TREE_CODE (sub) == ADDR_EXPR)
10648 : {
10649 87774 : tree op = TREE_OPERAND (sub, 0);
10650 87774 : tree optype = TREE_TYPE (op);
10651 : /* *&p => p */
10652 87774 : if (useless_type_conversion_p (type, optype))
10653 : return op;
10654 :
10655 : /* *(foo *)&fooarray => fooarray[0] */
10656 984 : if (TREE_CODE (optype) == ARRAY_TYPE
10657 307 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
10658 1291 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10659 : {
10660 54 : tree type_domain = TYPE_DOMAIN (optype);
10661 54 : tree min_val = size_zero_node;
10662 54 : if (type_domain && TYPE_MIN_VALUE (type_domain))
10663 54 : min_val = TYPE_MIN_VALUE (type_domain);
10664 54 : if (TREE_CODE (min_val) == INTEGER_CST)
10665 54 : return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
10666 : }
10667 : /* *(foo *)&complexfoo => __real__ complexfoo */
10668 930 : else if (TREE_CODE (optype) == COMPLEX_TYPE
10669 930 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10670 4 : return fold_build1 (REALPART_EXPR, type, op);
10671 : /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
10672 926 : else if (TREE_CODE (optype) == VECTOR_TYPE
10673 926 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10674 : {
10675 26 : tree part_width = TYPE_SIZE (type);
10676 26 : tree index = bitsize_int (0);
10677 26 : return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
10678 : }
10679 : }
10680 :
10681 : /* *(p + CST) -> ... */
10682 2290590 : if (TREE_CODE (sub) == POINTER_PLUS_EXPR
10683 2290590 : && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
10684 : {
10685 33834 : tree addr = TREE_OPERAND (sub, 0);
10686 33834 : tree off = TREE_OPERAND (sub, 1);
10687 33834 : tree addrtype;
10688 :
10689 33834 : STRIP_NOPS (addr);
10690 33834 : addrtype = TREE_TYPE (addr);
10691 :
10692 : /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
10693 33834 : if (TREE_CODE (addr) == ADDR_EXPR
10694 92 : && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
10695 39 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
10696 33862 : && tree_fits_uhwi_p (off))
10697 : {
10698 28 : unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
10699 28 : tree part_width = TYPE_SIZE (type);
10700 28 : unsigned HOST_WIDE_INT part_widthi
10701 28 : = tree_to_shwi (part_width) / BITS_PER_UNIT;
10702 28 : unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
10703 28 : tree index = bitsize_int (indexi);
10704 28 : if (known_lt (offset / part_widthi,
10705 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
10706 28 : return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
10707 : part_width, index);
10708 : }
10709 :
10710 : /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
10711 33806 : if (TREE_CODE (addr) == ADDR_EXPR
10712 64 : && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
10713 33807 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
10714 : {
10715 1 : tree size = TYPE_SIZE_UNIT (type);
10716 1 : if (tree_int_cst_equal (size, off))
10717 1 : return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
10718 : }
10719 :
10720 : /* *(p + CST) -> MEM_REF <p, CST>. */
10721 33805 : if (TREE_CODE (addr) != ADDR_EXPR
10722 33805 : || DECL_P (TREE_OPERAND (addr, 0)))
10723 33787 : return fold_build2 (MEM_REF, type,
10724 : addr,
10725 : wide_int_to_tree (ptype, wi::to_wide (off)));
10726 : }
10727 :
10728 : /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10729 2256774 : if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10730 2517 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
10731 2259261 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10732 : {
10733 1 : tree type_domain;
10734 1 : tree min_val = size_zero_node;
10735 1 : tree osub = sub;
10736 1 : sub = gimple_fold_indirect_ref (sub);
10737 1 : if (! sub)
10738 1 : sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
10739 1 : type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
10740 1 : if (type_domain && TYPE_MIN_VALUE (type_domain))
10741 1 : min_val = TYPE_MIN_VALUE (type_domain);
10742 1 : if (TREE_CODE (min_val) == INTEGER_CST)
10743 1 : return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
10744 : }
10745 :
10746 : return NULL_TREE;
10747 : }
10748 :
10749 : /* Return true if CODE is an operation that when operating on signed
10750 : integer types involves undefined behavior on overflow and the
10751 : operation can be expressed with unsigned arithmetic. */
10752 :
10753 : bool
10754 505812 : arith_code_with_undefined_signed_overflow (tree_code code)
10755 : {
10756 505812 : switch (code)
10757 : {
10758 : case ABS_EXPR:
10759 : case PLUS_EXPR:
10760 : case MINUS_EXPR:
10761 : case MULT_EXPR:
10762 : case NEGATE_EXPR:
10763 : case POINTER_PLUS_EXPR:
10764 : return true;
10765 188310 : default:
10766 188310 : return false;
10767 : }
10768 : }
10769 :
10770 : /* Return true if STMT has an operation that operates on a signed
10771 : integer types involves undefined behavior on overflow and the
10772 : operation can be expressed with unsigned arithmetic.
10773 : Also returns true if STMT is a VCE that needs to be rewritten
10774 : if moved to be executed unconditionally. */
10775 :
10776 : bool
10777 1207094 : gimple_needing_rewrite_undefined (gimple *stmt)
10778 : {
10779 1207094 : if (!is_gimple_assign (stmt))
10780 : return false;
10781 1051636 : tree lhs = gimple_assign_lhs (stmt);
10782 1051636 : if (!lhs)
10783 : return false;
10784 1051636 : tree lhs_type = TREE_TYPE (lhs);
10785 1051636 : if (!INTEGRAL_TYPE_P (lhs_type)
10786 121594 : && !POINTER_TYPE_P (lhs_type))
10787 : return false;
10788 1014936 : tree rhs = gimple_assign_rhs1 (stmt);
10789 : /* Boolean loads need special handling as they are treated as a full MODE load
10790 : and don't mask off the bits for the precision. */
10791 1014936 : if (gimple_assign_load_p (stmt)
10792 : /* Booleans are the integral type which has this non-masking issue. */
10793 94477 : && TREE_CODE (lhs_type) == BOOLEAN_TYPE
10794 : /* Only non mode precision booleans are need the masking. */
10795 410 : && !type_has_mode_precision_p (lhs_type)
10796 : /* BFR should be the correct thing and just grab the precision. */
10797 410 : && TREE_CODE (rhs) != BIT_FIELD_REF
10798 : /* Bit-fields loads don't need a rewrite as the masking
10799 : happens for them. */
10800 1015346 : && (TREE_CODE (rhs) != COMPONENT_REF
10801 145 : || !DECL_BIT_FIELD (TREE_OPERAND (rhs, 1))))
10802 : return true;
10803 : /* VCE from integral types to a integral types but with
10804 : a smaller precision need to be changed into casts
10805 : to be well defined. */
10806 1014526 : if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
10807 200 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
10808 173 : && is_gimple_val (TREE_OPERAND (rhs, 0))
10809 1014699 : && TYPE_PRECISION (lhs_type)
10810 173 : < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (rhs, 0))))
10811 : return true;
10812 1014357 : if (!TYPE_OVERFLOW_UNDEFINED (lhs_type))
10813 : return false;
10814 385030 : if (!arith_code_with_undefined_signed_overflow
10815 385030 : (gimple_assign_rhs_code (stmt)))
10816 : return false;
10817 : return true;
10818 : }
10819 :
10820 : /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
10821 : operation that can be transformed to unsigned arithmetic by converting
10822 : its operand, carrying out the operation in the corresponding unsigned
10823 : type and converting the result back to the original type.
10824 :
10825 : If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
10826 : return NULL.
10827 : Otherwise returns a sequence of statements that replace STMT and also
10828 : contain a modified form of STMT itself. */
10829 :
10830 : static gimple_seq
10831 66088 : rewrite_to_defined_unconditional (gimple_stmt_iterator *gsi, gimple *stmt,
10832 : bool in_place)
10833 : {
10834 66088 : gcc_assert (gimple_needing_rewrite_undefined (stmt));
10835 66088 : if (dump_file && (dump_flags & TDF_DETAILS))
10836 : {
10837 21 : fprintf (dump_file, "rewriting stmt for being uncondtional defined");
10838 21 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
10839 : }
10840 66088 : gimple_seq stmts = NULL;
10841 66088 : tree lhs = gimple_assign_lhs (stmt);
10842 :
10843 : /* Boolean loads need to be rewritten to be a load from the same mode
10844 : and then a cast to the other type so the other bits are masked off
10845 : correctly since the load was done conditionally. It is similar to the VCE
10846 : case below. */
10847 66088 : if (gimple_assign_load_p (stmt)
10848 66088 : && TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE)
10849 : {
10850 116 : tree rhs = gimple_assign_rhs1 (stmt);
10851 :
10852 : /* Double check that gimple_needing_rewrite_undefined was called. */
10853 : /* Bit-fields loads will do the masking so don't need the rewriting. */
10854 116 : gcc_assert (TREE_CODE (rhs) != COMPONENT_REF
10855 : || !DECL_BIT_FIELD (TREE_OPERAND (rhs, 1)));
10856 : /* BFR is like a bit field load and will do the correct thing. */
10857 116 : gcc_assert (TREE_CODE (lhs) != BIT_FIELD_REF);
10858 : /* Complex boolean types are not valid so REAL/IMAG part will
10859 : never show up. */
10860 116 : gcc_assert (TREE_CODE (rhs) != REALPART_EXPR
10861 : && TREE_CODE (lhs) != IMAGPART_EXPR);
10862 :
10863 116 : auto bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs)));
10864 116 : tree new_type = build_nonstandard_integer_type (bits, true);
10865 116 : location_t loc = gimple_location (stmt);
10866 116 : tree mem_ref = fold_build1_loc (loc, VIEW_CONVERT_EXPR, new_type, rhs);
10867 : /* Replace the original load with a new load and a new lhs. */
10868 116 : tree new_lhs = make_ssa_name (new_type);
10869 116 : gimple_assign_set_rhs1 (stmt, mem_ref);
10870 116 : gimple_assign_set_lhs (stmt, new_lhs);
10871 :
10872 116 : if (in_place)
10873 49 : update_stmt (stmt);
10874 : else
10875 : {
10876 67 : gimple_set_modified (stmt, true);
10877 67 : gimple_seq_add_stmt (&stmts, stmt);
10878 : }
10879 :
10880 : /* Build the conversion statement. */
10881 116 : gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
10882 116 : if (in_place)
10883 : {
10884 49 : gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
10885 49 : update_stmt (stmt);
10886 : }
10887 : else
10888 67 : gimple_seq_add_stmt (&stmts, cvt);
10889 116 : return stmts;
10890 : }
10891 :
10892 : /* VCE from integral types to another integral types but with
10893 : smaller precisions need to be changed into casts
10894 : to be well defined. */
10895 65972 : if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
10896 : {
10897 60 : tree rhs = gimple_assign_rhs1 (stmt);
10898 60 : tree new_rhs = TREE_OPERAND (rhs, 0);
10899 60 : gcc_assert (TYPE_PRECISION (TREE_TYPE (rhs))
10900 : < TYPE_PRECISION (TREE_TYPE (new_rhs)));
10901 60 : gcc_assert (is_gimple_val (new_rhs));
10902 60 : gimple_assign_set_rhs_code (stmt, NOP_EXPR);
10903 60 : gimple_assign_set_rhs1 (stmt, new_rhs);
10904 60 : if (in_place)
10905 51 : update_stmt (stmt);
10906 : else
10907 : {
10908 9 : gimple_set_modified (stmt, true);
10909 9 : gimple_seq_add_stmt (&stmts, stmt);
10910 : }
10911 60 : return stmts;
10912 : }
10913 65912 : tree type = unsigned_type_for (TREE_TYPE (lhs));
10914 65912 : if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
10915 21 : gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
10916 : else
10917 196263 : for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
10918 : {
10919 130372 : tree op = gimple_op (stmt, i);
10920 130372 : op = gimple_convert (&stmts, type, op);
10921 130372 : gimple_set_op (stmt, i, op);
10922 : }
10923 65912 : gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
10924 65912 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
10925 10851 : gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
10926 65912 : gimple_set_modified (stmt, true);
10927 65912 : if (in_place)
10928 : {
10929 45564 : if (stmts)
10930 45175 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
10931 45564 : stmts = NULL;
10932 : }
10933 : else
10934 20348 : gimple_seq_add_stmt (&stmts, stmt);
10935 65912 : gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
10936 65912 : if (in_place)
10937 : {
10938 45564 : gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
10939 45564 : update_stmt (stmt);
10940 : }
10941 : else
10942 20348 : gimple_seq_add_stmt (&stmts, cvt);
10943 :
10944 65912 : return stmts;
10945 : }
10946 :
10947 : void
10948 45664 : rewrite_to_defined_unconditional (gimple_stmt_iterator *gsi)
10949 : {
10950 45664 : rewrite_to_defined_unconditional (gsi, gsi_stmt (*gsi), true);
10951 45664 : }
10952 :
10953 : gimple_seq
10954 20424 : rewrite_to_defined_unconditional (gimple *stmt)
10955 : {
10956 20424 : return rewrite_to_defined_unconditional (nullptr, stmt, false);
10957 : }
10958 :
10959 : /* The valueization hook we use for the gimple_build API simplification.
10960 : This makes us match fold_buildN behavior by only combining with
10961 : statements in the sequence(s) we are currently building. */
10962 :
10963 : static tree
10964 20111849 : gimple_build_valueize (tree op)
10965 : {
10966 20111849 : if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
10967 4367268 : return op;
10968 : return NULL_TREE;
10969 : }
10970 :
10971 : /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
10972 :
10973 : static inline void
10974 1324235 : gimple_build_insert_seq (gimple_stmt_iterator *gsi,
10975 : bool before, gsi_iterator_update update,
10976 : gimple_seq seq)
10977 : {
10978 1324235 : if (before)
10979 : {
10980 89944 : if (gsi->bb)
10981 89944 : gsi_insert_seq_before (gsi, seq, update);
10982 : else
10983 0 : gsi_insert_seq_before_without_update (gsi, seq, update);
10984 : }
10985 : else
10986 : {
10987 1234291 : if (gsi->bb)
10988 131 : gsi_insert_seq_after (gsi, seq, update);
10989 : else
10990 1234160 : gsi_insert_seq_after_without_update (gsi, seq, update);
10991 : }
10992 1324235 : }
10993 :
10994 : /* Build the expression CODE OP0 of type TYPE with location LOC,
10995 : simplifying it first if possible. Returns the built
10996 : expression value and inserts statements possibly defining it
10997 : before GSI if BEFORE is true or after GSI if false and advance
10998 : the iterator accordingly.
10999 : If gsi refers to a basic block simplifying is allowed to look
11000 : at all SSA defs while when it does not it is restricted to
11001 : SSA defs that are not associated with a basic block yet,
11002 : indicating they belong to the currently building sequence. */
11003 :
11004 : tree
11005 342988 : gimple_build (gimple_stmt_iterator *gsi,
11006 : bool before, gsi_iterator_update update,
11007 : location_t loc, enum tree_code code, tree type, tree op0)
11008 : {
11009 342988 : gimple_seq seq = NULL;
11010 342988 : tree res
11011 342988 : = gimple_simplify (code, type, op0, &seq,
11012 342988 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11013 342988 : if (!res)
11014 : {
11015 302187 : res = make_ssa_name (type);
11016 302187 : gimple *stmt;
11017 302187 : if (code == REALPART_EXPR
11018 : || code == IMAGPART_EXPR
11019 302187 : || code == VIEW_CONVERT_EXPR)
11020 15175 : stmt = gimple_build_assign (res, code, build1 (code, type, op0));
11021 : else
11022 287012 : stmt = gimple_build_assign (res, code, op0);
11023 302187 : gimple_set_location (stmt, loc);
11024 302187 : gimple_seq_add_stmt_without_update (&seq, stmt);
11025 : }
11026 342988 : gimple_build_insert_seq (gsi, before, update, seq);
11027 342988 : return res;
11028 : }
11029 :
11030 : /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
11031 : simplifying it first if possible. Returns the built
11032 : expression value inserting any new statements at GSI honoring BEFORE
11033 : and UPDATE. */
11034 :
11035 : tree
11036 772006 : gimple_build (gimple_stmt_iterator *gsi,
11037 : bool before, gsi_iterator_update update,
11038 : location_t loc, enum tree_code code, tree type,
11039 : tree op0, tree op1)
11040 : {
11041 772006 : gimple_seq seq = NULL;
11042 772006 : tree res
11043 772006 : = gimple_simplify (code, type, op0, op1, &seq,
11044 772006 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11045 772006 : if (!res)
11046 : {
11047 682074 : res = make_ssa_name (type);
11048 682074 : gimple *stmt = gimple_build_assign (res, code, op0, op1);
11049 682074 : gimple_set_location (stmt, loc);
11050 682074 : gimple_seq_add_stmt_without_update (&seq, stmt);
11051 : }
11052 772006 : gimple_build_insert_seq (gsi, before, update, seq);
11053 772006 : return res;
11054 : }
11055 :
11056 : /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
11057 : simplifying it first if possible. Returns the built
11058 : expression value inserting any new statements at GSI honoring BEFORE
11059 : and UPDATE. */
11060 :
11061 : tree
11062 43727 : gimple_build (gimple_stmt_iterator *gsi,
11063 : bool before, gsi_iterator_update update,
11064 : location_t loc, enum tree_code code, tree type,
11065 : tree op0, tree op1, tree op2)
11066 : {
11067 :
11068 43727 : gimple_seq seq = NULL;
11069 43727 : tree res
11070 43727 : = gimple_simplify (code, type, op0, op1, op2, &seq,
11071 43727 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11072 43727 : if (!res)
11073 : {
11074 31725 : res = make_ssa_name (type);
11075 31725 : gimple *stmt;
11076 31725 : if (code == BIT_FIELD_REF)
11077 24467 : stmt = gimple_build_assign (res, code,
11078 : build3 (code, type, op0, op1, op2));
11079 : else
11080 7258 : stmt = gimple_build_assign (res, code, op0, op1, op2);
11081 31725 : gimple_set_location (stmt, loc);
11082 31725 : gimple_seq_add_stmt_without_update (&seq, stmt);
11083 : }
11084 43727 : gimple_build_insert_seq (gsi, before, update, seq);
11085 43727 : return res;
11086 : }
11087 :
11088 : /* Build the call FN () with a result of type TYPE (or no result if TYPE is
11089 : void) with a location LOC. Returns the built expression value (or NULL_TREE
11090 : if TYPE is void) inserting any new statements at GSI honoring BEFORE
11091 : and UPDATE. */
11092 :
11093 : tree
11094 0 : gimple_build (gimple_stmt_iterator *gsi,
11095 : bool before, gsi_iterator_update update,
11096 : location_t loc, combined_fn fn, tree type)
11097 : {
11098 0 : tree res = NULL_TREE;
11099 0 : gimple_seq seq = NULL;
11100 0 : gcall *stmt;
11101 0 : if (internal_fn_p (fn))
11102 0 : stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
11103 : else
11104 : {
11105 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11106 0 : stmt = gimple_build_call (decl, 0);
11107 : }
11108 0 : if (!VOID_TYPE_P (type))
11109 : {
11110 0 : res = make_ssa_name (type);
11111 0 : gimple_call_set_lhs (stmt, res);
11112 : }
11113 0 : gimple_set_location (stmt, loc);
11114 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11115 0 : gimple_build_insert_seq (gsi, before, update, seq);
11116 0 : return res;
11117 : }
11118 :
11119 : /* Build the call FN (ARG0) with a result of type TYPE
11120 : (or no result if TYPE is void) with location LOC,
11121 : simplifying it first if possible. Returns the built
11122 : expression value (or NULL_TREE if TYPE is void) inserting any new
11123 : statements at GSI honoring BEFORE and UPDATE. */
11124 :
11125 : tree
11126 24997 : gimple_build (gimple_stmt_iterator *gsi,
11127 : bool before, gsi_iterator_update update,
11128 : location_t loc, combined_fn fn,
11129 : tree type, tree arg0)
11130 : {
11131 24997 : gimple_seq seq = NULL;
11132 24997 : tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
11133 24997 : if (!res)
11134 : {
11135 24997 : gcall *stmt;
11136 24997 : if (internal_fn_p (fn))
11137 24616 : stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
11138 : else
11139 : {
11140 381 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11141 381 : stmt = gimple_build_call (decl, 1, arg0);
11142 : }
11143 24997 : if (!VOID_TYPE_P (type))
11144 : {
11145 24616 : res = make_ssa_name (type);
11146 24616 : gimple_call_set_lhs (stmt, res);
11147 : }
11148 24997 : gimple_set_location (stmt, loc);
11149 24997 : gimple_seq_add_stmt_without_update (&seq, stmt);
11150 : }
11151 24997 : gimple_build_insert_seq (gsi, before, update, seq);
11152 24997 : return res;
11153 : }
11154 :
11155 : /* Build the call FN (ARG0, ARG1) with a result of type TYPE
11156 : (or no result if TYPE is void) with location LOC,
11157 : simplifying it first if possible. Returns the built
11158 : expression value (or NULL_TREE if TYPE is void) inserting any new
11159 : statements at GSI honoring BEFORE and UPDATE. */
11160 :
11161 : tree
11162 0 : gimple_build (gimple_stmt_iterator *gsi,
11163 : bool before, gsi_iterator_update update,
11164 : location_t loc, combined_fn fn,
11165 : tree type, tree arg0, tree arg1)
11166 : {
11167 0 : gimple_seq seq = NULL;
11168 0 : tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
11169 : gimple_build_valueize);
11170 0 : if (!res)
11171 : {
11172 0 : gcall *stmt;
11173 0 : if (internal_fn_p (fn))
11174 0 : stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
11175 : else
11176 : {
11177 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11178 0 : stmt = gimple_build_call (decl, 2, arg0, arg1);
11179 : }
11180 0 : if (!VOID_TYPE_P (type))
11181 : {
11182 0 : res = make_ssa_name (type);
11183 0 : gimple_call_set_lhs (stmt, res);
11184 : }
11185 0 : gimple_set_location (stmt, loc);
11186 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11187 : }
11188 0 : gimple_build_insert_seq (gsi, before, update, seq);
11189 0 : return res;
11190 : }
11191 :
11192 : /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
11193 : (or no result if TYPE is void) with location LOC,
11194 : simplifying it first if possible. Returns the built
11195 : expression value (or NULL_TREE if TYPE is void) inserting any new
11196 : statements at GSI honoring BEFORE and UPDATE. */
11197 :
11198 : tree
11199 0 : gimple_build (gimple_stmt_iterator *gsi,
11200 : bool before, gsi_iterator_update update,
11201 : location_t loc, combined_fn fn,
11202 : tree type, tree arg0, tree arg1, tree arg2)
11203 : {
11204 0 : gimple_seq seq = NULL;
11205 0 : tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
11206 : &seq, gimple_build_valueize);
11207 0 : if (!res)
11208 : {
11209 0 : gcall *stmt;
11210 0 : if (internal_fn_p (fn))
11211 0 : stmt = gimple_build_call_internal (as_internal_fn (fn),
11212 : 3, arg0, arg1, arg2);
11213 : else
11214 : {
11215 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11216 0 : stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
11217 : }
11218 0 : if (!VOID_TYPE_P (type))
11219 : {
11220 0 : res = make_ssa_name (type);
11221 0 : gimple_call_set_lhs (stmt, res);
11222 : }
11223 0 : gimple_set_location (stmt, loc);
11224 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11225 : }
11226 0 : gimple_build_insert_seq (gsi, before, update, seq);
11227 0 : return res;
11228 : }
11229 :
11230 : /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
11231 : void) with location LOC, simplifying it first if possible. Returns the
11232 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11233 : statements at GSI honoring BEFORE and UPDATE. */
11234 :
11235 : tree
11236 21 : gimple_build (gimple_stmt_iterator *gsi,
11237 : bool before, gsi_iterator_update update,
11238 : location_t loc, code_helper code, tree type, tree op0)
11239 : {
11240 21 : if (code.is_tree_code ())
11241 0 : return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
11242 21 : return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
11243 : }
11244 :
11245 : /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
11246 : void) with location LOC, simplifying it first if possible. Returns the
11247 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11248 : statements at GSI honoring BEFORE and UPDATE. */
11249 :
11250 : tree
11251 24078 : gimple_build (gimple_stmt_iterator *gsi,
11252 : bool before, gsi_iterator_update update,
11253 : location_t loc, code_helper code, tree type, tree op0, tree op1)
11254 : {
11255 24078 : if (code.is_tree_code ())
11256 24078 : return gimple_build (gsi, before, update,
11257 24078 : loc, tree_code (code), type, op0, op1);
11258 0 : return gimple_build (gsi, before, update,
11259 0 : loc, combined_fn (code), type, op0, op1);
11260 : }
11261 :
11262 : /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
11263 : is void) with location LOC, simplifying it first if possible. Returns the
11264 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11265 : statements at GSI honoring BEFORE and UPDATE. */
11266 :
11267 : tree
11268 0 : gimple_build (gimple_stmt_iterator *gsi,
11269 : bool before, gsi_iterator_update update,
11270 : location_t loc, code_helper code,
11271 : tree type, tree op0, tree op1, tree op2)
11272 : {
11273 0 : if (code.is_tree_code ())
11274 0 : return gimple_build (gsi, before, update,
11275 0 : loc, tree_code (code), type, op0, op1, op2);
11276 0 : return gimple_build (gsi, before, update,
11277 0 : loc, combined_fn (code), type, op0, op1, op2);
11278 : }
11279 :
11280 : /* Build the conversion (TYPE) OP with a result of type TYPE
11281 : with location LOC if such conversion is neccesary in GIMPLE,
11282 : simplifying it first.
11283 : Returns the built expression inserting any new statements
11284 : at GSI honoring BEFORE and UPDATE. */
11285 :
11286 : tree
11287 2011730 : gimple_convert (gimple_stmt_iterator *gsi,
11288 : bool before, gsi_iterator_update update,
11289 : location_t loc, tree type, tree op)
11290 : {
11291 2011730 : if (useless_type_conversion_p (type, TREE_TYPE (op)))
11292 : return op;
11293 187477 : return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
11294 : }
11295 :
11296 : /* Build the conversion (ptrofftype) OP with a result of a type
11297 : compatible with ptrofftype with location LOC if such conversion
11298 : is neccesary in GIMPLE, simplifying it first.
11299 : Returns the built expression value inserting any new statements
11300 : at GSI honoring BEFORE and UPDATE. */
11301 :
11302 : tree
11303 203 : gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
11304 : bool before, gsi_iterator_update update,
11305 : location_t loc, tree op)
11306 : {
11307 203 : if (ptrofftype_p (TREE_TYPE (op)))
11308 : return op;
11309 0 : return gimple_convert (gsi, before, update, loc, sizetype, op);
11310 : }
11311 :
11312 : /* Build a vector of type TYPE in which each element has the value OP.
11313 : Return a gimple value for the result, inserting any new statements
11314 : at GSI honoring BEFORE and UPDATE. */
11315 :
11316 : tree
11317 324575 : gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
11318 : bool before, gsi_iterator_update update,
11319 : location_t loc, tree type, tree op)
11320 : {
11321 324575 : if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
11322 : && !CONSTANT_CLASS_P (op))
11323 : return gimple_build (gsi, before, update,
11324 : loc, VEC_DUPLICATE_EXPR, type, op);
11325 :
11326 324575 : tree res, vec = build_vector_from_val (type, op);
11327 324575 : if (is_gimple_val (vec))
11328 : return vec;
11329 27505 : if (gimple_in_ssa_p (cfun))
11330 27505 : res = make_ssa_name (type);
11331 : else
11332 0 : res = create_tmp_reg (type);
11333 27505 : gimple_seq seq = NULL;
11334 27505 : gimple *stmt = gimple_build_assign (res, vec);
11335 27505 : gimple_set_location (stmt, loc);
11336 27505 : gimple_seq_add_stmt_without_update (&seq, stmt);
11337 27505 : gimple_build_insert_seq (gsi, before, update, seq);
11338 27505 : return res;
11339 : }
11340 :
11341 : /* Build a vector from BUILDER, handling the case in which some elements
11342 : are non-constant. Return a gimple value for the result, inserting
11343 : any new instructions to GSI honoring BEFORE and UPDATE.
11344 :
11345 : BUILDER must not have a stepped encoding on entry. This is because
11346 : the function is not geared up to handle the arithmetic that would
11347 : be needed in the variable case, and any code building a vector that
11348 : is known to be constant should use BUILDER->build () directly. */
11349 :
11350 : tree
11351 366094 : gimple_build_vector (gimple_stmt_iterator *gsi,
11352 : bool before, gsi_iterator_update update,
11353 : location_t loc, tree_vector_builder *builder)
11354 : {
11355 366094 : gcc_assert (builder->nelts_per_pattern () <= 2);
11356 366094 : unsigned int encoded_nelts = builder->encoded_nelts ();
11357 1305044 : for (unsigned int i = 0; i < encoded_nelts; ++i)
11358 1051962 : if (!CONSTANT_CLASS_P ((*builder)[i]))
11359 : {
11360 113012 : gimple_seq seq = NULL;
11361 113012 : tree type = builder->type ();
11362 113012 : unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
11363 113012 : vec<constructor_elt, va_gc> *v;
11364 113012 : vec_alloc (v, nelts);
11365 361016 : for (i = 0; i < nelts; ++i)
11366 248004 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
11367 :
11368 113012 : tree res;
11369 113012 : if (gimple_in_ssa_p (cfun))
11370 113012 : res = make_ssa_name (type);
11371 : else
11372 0 : res = create_tmp_reg (type);
11373 113012 : gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
11374 113012 : gimple_set_location (stmt, loc);
11375 113012 : gimple_seq_add_stmt_without_update (&seq, stmt);
11376 113012 : gimple_build_insert_seq (gsi, before, update, seq);
11377 113012 : return res;
11378 : }
11379 253082 : return builder->build ();
11380 : }
11381 :
11382 : /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
11383 : and generate a value guaranteed to be rounded upwards to ALIGN.
11384 :
11385 : Return the tree node representing this size, it is of TREE_TYPE TYPE. */
11386 :
11387 : tree
11388 0 : gimple_build_round_up (gimple_stmt_iterator *gsi,
11389 : bool before, gsi_iterator_update update,
11390 : location_t loc, tree type,
11391 : tree old_size, unsigned HOST_WIDE_INT align)
11392 : {
11393 0 : unsigned HOST_WIDE_INT tg_mask = align - 1;
11394 : /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
11395 0 : gcc_assert (INTEGRAL_TYPE_P (type));
11396 0 : tree tree_mask = build_int_cst (type, tg_mask);
11397 0 : tree oversize = gimple_build (gsi, before, update,
11398 : loc, PLUS_EXPR, type, old_size, tree_mask);
11399 :
11400 0 : tree mask = build_int_cst (type, -align);
11401 0 : return gimple_build (gsi, before, update,
11402 0 : loc, BIT_AND_EXPR, type, oversize, mask);
11403 : }
11404 :
11405 : /* Return true if the result of assignment STMT is known to be non-negative.
11406 : If the return value is based on the assumption that signed overflow is
11407 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11408 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11409 :
11410 : static bool
11411 59610203 : gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11412 : int depth)
11413 : {
11414 59610203 : enum tree_code code = gimple_assign_rhs_code (stmt);
11415 59610203 : tree type = TREE_TYPE (gimple_assign_lhs (stmt));
11416 59610203 : switch (get_gimple_rhs_class (code))
11417 : {
11418 12653303 : case GIMPLE_UNARY_RHS:
11419 12653303 : return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
11420 : type,
11421 : gimple_assign_rhs1 (stmt),
11422 12653303 : strict_overflow_p, depth);
11423 41051451 : case GIMPLE_BINARY_RHS:
11424 41051451 : return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
11425 : type,
11426 : gimple_assign_rhs1 (stmt),
11427 : gimple_assign_rhs2 (stmt),
11428 41051451 : strict_overflow_p, depth);
11429 : case GIMPLE_TERNARY_RHS:
11430 : return false;
11431 5845833 : case GIMPLE_SINGLE_RHS:
11432 5845833 : return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
11433 5845833 : strict_overflow_p, depth);
11434 : case GIMPLE_INVALID_RHS:
11435 : break;
11436 : }
11437 0 : gcc_unreachable ();
11438 : }
11439 :
11440 : /* Return true if return value of call STMT is known to be non-negative.
11441 : If the return value is based on the assumption that signed overflow is
11442 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11443 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11444 :
11445 : static bool
11446 21358794 : gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11447 : int depth)
11448 : {
11449 21358794 : tree arg0
11450 21358794 : = gimple_call_num_args (stmt) > 0 ? gimple_call_arg (stmt, 0) : NULL_TREE;
11451 21358794 : tree arg1
11452 21358794 : = gimple_call_num_args (stmt) > 1 ? gimple_call_arg (stmt, 1) : NULL_TREE;
11453 21358794 : tree lhs = gimple_call_lhs (stmt);
11454 21358794 : return (lhs
11455 21358794 : && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
11456 : gimple_call_combined_fn (stmt),
11457 : arg0, arg1,
11458 21358794 : strict_overflow_p, depth));
11459 : }
11460 :
11461 : /* Return true if return value of call STMT is known to be non-negative.
11462 : If the return value is based on the assumption that signed overflow is
11463 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11464 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11465 :
11466 : static bool
11467 12983099 : gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11468 : int depth)
11469 : {
11470 26439066 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
11471 : {
11472 21375829 : tree arg = gimple_phi_arg_def (stmt, i);
11473 21375829 : if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
11474 : return false;
11475 : }
11476 : return true;
11477 : }
11478 :
11479 : /* Return true if STMT is known to compute a non-negative value.
11480 : If the return value is based on the assumption that signed overflow is
11481 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11482 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11483 :
11484 : bool
11485 149126405 : gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11486 : int depth)
11487 : {
11488 149126405 : tree type = gimple_range_type (stmt);
11489 149126405 : if (type && frange::supports_p (type))
11490 : {
11491 1654958 : frange r;
11492 1654958 : bool sign;
11493 1654958 : if (get_global_range_query ()->range_of_stmt (r, stmt)
11494 1654958 : && r.signbit_p (sign))
11495 31761 : return !sign;
11496 1654958 : }
11497 149094644 : switch (gimple_code (stmt))
11498 : {
11499 59610203 : case GIMPLE_ASSIGN:
11500 59610203 : return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
11501 59610203 : depth);
11502 21358794 : case GIMPLE_CALL:
11503 21358794 : return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
11504 21358794 : depth);
11505 12983099 : case GIMPLE_PHI:
11506 12983099 : return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
11507 12983099 : depth);
11508 : default:
11509 : return false;
11510 : }
11511 : }
11512 :
11513 : /* Return true if the floating-point value computed by assignment STMT
11514 : is known to have an integer value. We also allow +Inf, -Inf and NaN
11515 : to be considered integer values. Return false for signaling NaN.
11516 :
11517 : DEPTH is the current nesting depth of the query. */
11518 :
11519 : static bool
11520 58442 : gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
11521 : {
11522 58442 : enum tree_code code = gimple_assign_rhs_code (stmt);
11523 58442 : switch (get_gimple_rhs_class (code))
11524 : {
11525 14964 : case GIMPLE_UNARY_RHS:
11526 14964 : return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
11527 14964 : gimple_assign_rhs1 (stmt), depth);
11528 12986 : case GIMPLE_BINARY_RHS:
11529 12986 : return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
11530 : gimple_assign_rhs1 (stmt),
11531 12986 : gimple_assign_rhs2 (stmt), depth);
11532 : case GIMPLE_TERNARY_RHS:
11533 : return false;
11534 29490 : case GIMPLE_SINGLE_RHS:
11535 29490 : return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
11536 : case GIMPLE_INVALID_RHS:
11537 : break;
11538 : }
11539 0 : gcc_unreachable ();
11540 : }
11541 :
11542 : /* Return true if the floating-point value computed by call STMT is known
11543 : to have an integer value. We also allow +Inf, -Inf and NaN to be
11544 : considered integer values. Return false for signaling NaN.
11545 :
11546 : DEPTH is the current nesting depth of the query. */
11547 :
11548 : static bool
11549 1212 : gimple_call_integer_valued_real_p (gimple *stmt, int depth)
11550 : {
11551 1212 : tree arg0 = (gimple_call_num_args (stmt) > 0
11552 1212 : ? gimple_call_arg (stmt, 0)
11553 : : NULL_TREE);
11554 1212 : tree arg1 = (gimple_call_num_args (stmt) > 1
11555 1212 : ? gimple_call_arg (stmt, 1)
11556 : : NULL_TREE);
11557 1212 : return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
11558 1212 : arg0, arg1, depth);
11559 : }
11560 :
11561 : /* Return true if the floating-point result of phi STMT is known to have
11562 : an integer value. We also allow +Inf, -Inf and NaN to be considered
11563 : integer values. Return false for signaling NaN.
11564 :
11565 : DEPTH is the current nesting depth of the query. */
11566 :
11567 : static bool
11568 1490 : gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
11569 : {
11570 1655 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
11571 : {
11572 1650 : tree arg = gimple_phi_arg_def (stmt, i);
11573 1650 : if (!integer_valued_real_single_p (arg, depth + 1))
11574 : return false;
11575 : }
11576 : return true;
11577 : }
11578 :
11579 : /* Return true if the floating-point value computed by STMT is known
11580 : to have an integer value. We also allow +Inf, -Inf and NaN to be
11581 : considered integer values. Return false for signaling NaN.
11582 :
11583 : DEPTH is the current nesting depth of the query. */
11584 :
11585 : bool
11586 88521 : gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
11587 : {
11588 88521 : switch (gimple_code (stmt))
11589 : {
11590 58442 : case GIMPLE_ASSIGN:
11591 58442 : return gimple_assign_integer_valued_real_p (stmt, depth);
11592 1212 : case GIMPLE_CALL:
11593 1212 : return gimple_call_integer_valued_real_p (stmt, depth);
11594 1490 : case GIMPLE_PHI:
11595 1490 : return gimple_phi_integer_valued_real_p (stmt, depth);
11596 : default:
11597 : return false;
11598 : }
11599 : }
|