Line data Source code
1 : /* Statement simplification on GIMPLE.
2 : Copyright (C) 2010-2026 Free Software Foundation, Inc.
3 : Split out from tree-ssa-ccp.cc.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "target.h"
26 : #include "rtl.h"
27 : #include "tree.h"
28 : #include "gimple.h"
29 : #include "predict.h"
30 : #include "ssa.h"
31 : #include "cgraph.h"
32 : #include "gimple-pretty-print.h"
33 : #include "gimple-ssa-warn-access.h"
34 : #include "gimple-ssa-warn-restrict.h"
35 : #include "fold-const.h"
36 : #include "stmt.h"
37 : #include "expr.h"
38 : #include "stor-layout.h"
39 : #include "dumpfile.h"
40 : #include "gimple-iterator.h"
41 : #include "tree-pass.h"
42 : #include "gimple-fold.h"
43 : #include "gimplify.h"
44 : #include "tree-into-ssa.h"
45 : #include "tree-dfa.h"
46 : #include "tree-object-size.h"
47 : #include "tree-ssa.h"
48 : #include "tree-ssa-propagate.h"
49 : #include "ipa-utils.h"
50 : #include "tree-ssa-address.h"
51 : #include "langhooks.h"
52 : #include "gimplify-me.h"
53 : #include "dbgcnt.h"
54 : #include "builtins.h"
55 : #include "tree-eh.h"
56 : #include "gimple-match.h"
57 : #include "gomp-constants.h"
58 : #include "optabs-query.h"
59 : #include "omp-general.h"
60 : #include "tree-cfg.h"
61 : #include "fold-const-call.h"
62 : #include "stringpool.h"
63 : #include "attribs.h"
64 : #include "asan.h"
65 : #include "diagnostic-core.h"
66 : #include "intl.h"
67 : #include "calls.h"
68 : #include "tree-vector-builder.h"
69 : #include "tree-ssa-strlen.h"
70 : #include "varasm.h"
71 : #include "internal-fn.h"
72 : #include "gimple-range.h"
73 :
74 : enum strlen_range_kind {
75 : /* Compute the exact constant string length. */
76 : SRK_STRLEN,
77 : /* Compute the maximum constant string length. */
78 : SRK_STRLENMAX,
79 : /* Compute a range of string lengths bounded by object sizes. When
80 : the length of a string cannot be determined, consider as the upper
81 : bound the size of the enclosing object the string may be a member
82 : or element of. Also determine the size of the largest character
83 : array the string may refer to. */
84 : SRK_LENRANGE,
85 : /* Determine the integer value of the argument (not string length). */
86 : SRK_INT_VALUE
87 : };
88 :
89 : static bool
90 : get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
91 :
92 : /* Return true when DECL can be referenced from current unit.
93 : FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 : We can get declarations that are not possible to reference for various
95 : reasons:
96 :
97 : 1) When analyzing C++ virtual tables.
98 : C++ virtual tables do have known constructors even
99 : when they are keyed to other compilation unit.
100 : Those tables can contain pointers to methods and vars
101 : in other units. Those methods have both STATIC and EXTERNAL
102 : set.
103 : 2) In WHOPR mode devirtualization might lead to reference
104 : to method that was partitioned elsehwere.
105 : In this case we have static VAR_DECL or FUNCTION_DECL
106 : that has no corresponding callgraph/varpool node
107 : declaring the body.
108 : 3) COMDAT functions referred by external vtables that
109 : we devirtualize only during final compilation stage.
110 : At this time we already decided that we will not output
111 : the function body and thus we can't reference the symbol
112 : directly. */
113 :
114 : static bool
115 4427890 : can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
116 : {
117 4427890 : varpool_node *vnode;
118 4427890 : struct cgraph_node *node;
119 4427890 : symtab_node *snode;
120 :
121 4427890 : if (DECL_ABSTRACT_P (decl))
122 : return false;
123 :
124 : /* We are concerned only about static/external vars and functions. */
125 1500453 : if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
126 5524489 : || !VAR_OR_FUNCTION_DECL_P (decl))
127 : return true;
128 :
129 : /* Static objects can be referred only if they are defined and not optimized
130 : out yet. */
131 4024036 : if (!TREE_PUBLIC (decl))
132 : {
133 1091507 : if (DECL_EXTERNAL (decl))
134 : return false;
135 : /* Before we start optimizing unreachable code we can be sure all
136 : static objects are defined. */
137 1091462 : if (symtab->function_flags_ready)
138 : return true;
139 1056025 : snode = symtab_node::get (decl);
140 1056025 : if (!snode || !snode->definition)
141 : return false;
142 1055970 : node = dyn_cast <cgraph_node *> (snode);
143 1064600 : return !node || !node->inlined_to;
144 : }
145 :
146 : /* We will later output the initializer, so we can refer to it.
147 : So we are concerned only when DECL comes from initializer of
148 : external var or var that has been optimized out. */
149 2932529 : if (!from_decl
150 483716 : || !VAR_P (from_decl)
151 482581 : || (!DECL_EXTERNAL (from_decl)
152 207025 : && (vnode = varpool_node::get (from_decl)) != NULL
153 139247 : && vnode->definition)
154 3275869 : || (flag_ltrans
155 3 : && (vnode = varpool_node::get (from_decl)) != NULL
156 3 : && vnode->in_other_partition))
157 : return true;
158 : /* We are folding reference from external vtable. The vtable may reffer
159 : to a symbol keyed to other compilation unit. The other compilation
160 : unit may be in separate DSO and the symbol may be hidden. */
161 343337 : if (DECL_VISIBILITY_SPECIFIED (decl)
162 336544 : && DECL_EXTERNAL (decl)
163 267267 : && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
164 526170 : && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
165 : return false;
166 : /* When function is public, we always can introduce new reference.
167 : Exception are the COMDAT functions where introducing a direct
168 : reference imply need to include function body in the curren tunit. */
169 160504 : if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
170 : return true;
171 : /* We have COMDAT. We are going to check if we still have definition
172 : or if the definition is going to be output in other partition.
173 : Bypass this when gimplifying; all needed functions will be produced.
174 :
175 : As observed in PR20991 for already optimized out comdat virtual functions
176 : it may be tempting to not necessarily give up because the copy will be
177 : output elsewhere when corresponding vtable is output.
178 : This is however not possible - ABI specify that COMDATs are output in
179 : units where they are used and when the other unit was compiled with LTO
180 : it is possible that vtable was kept public while the function itself
181 : was privatized. */
182 114036 : if (!symtab->function_flags_ready)
183 : return true;
184 :
185 101368 : snode = symtab_node::get (decl);
186 101368 : if (!snode
187 101368 : || ((!snode->definition || DECL_EXTERNAL (decl))
188 11837 : && (!snode->in_other_partition
189 0 : || (!snode->forced_by_abi && !snode->force_output))))
190 : return false;
191 64016 : node = dyn_cast <cgraph_node *> (snode);
192 64016 : return !node || !node->inlined_to;
193 : }
194 :
195 : /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
196 : acceptable form for is_gimple_min_invariant.
197 : FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
198 :
199 : tree
200 15386556 : canonicalize_constructor_val (tree cval, tree from_decl)
201 : {
202 15386556 : if (CONSTANT_CLASS_P (cval))
203 : return cval;
204 :
205 9351145 : tree orig_cval = cval;
206 9351145 : STRIP_NOPS (cval);
207 9351145 : if (TREE_CODE (cval) == POINTER_PLUS_EXPR
208 9351145 : && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
209 : {
210 69733 : tree ptr = TREE_OPERAND (cval, 0);
211 69733 : if (is_gimple_min_invariant (ptr))
212 208461 : cval = build1_loc (EXPR_LOCATION (cval),
213 69487 : ADDR_EXPR, TREE_TYPE (ptr),
214 138974 : fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
215 : ptr,
216 : fold_convert (ptr_type_node,
217 : TREE_OPERAND (cval, 1))));
218 : }
219 9351145 : if (TREE_CODE (cval) == ADDR_EXPR)
220 : {
221 5130456 : tree base = NULL_TREE;
222 5130456 : if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
223 : {
224 193 : base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
225 193 : if (base)
226 193 : TREE_OPERAND (cval, 0) = base;
227 : }
228 : else
229 5130263 : base = get_base_address (TREE_OPERAND (cval, 0));
230 5130456 : if (!base)
231 0 : return NULL_TREE;
232 :
233 2273241 : if (VAR_OR_FUNCTION_DECL_P (base)
234 6425367 : && !can_refer_decl_in_current_unit_p (base, from_decl))
235 : return NULL_TREE;
236 4946725 : if (TREE_TYPE (base) == error_mark_node)
237 : return NULL_TREE;
238 4946725 : if (VAR_P (base))
239 : /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
240 : but since the use can be in a debug stmt we can't. */
241 : ;
242 2272238 : else if (TREE_CODE (base) == FUNCTION_DECL)
243 : {
244 : /* Make sure we create a cgraph node for functions we'll reference.
245 : They can be non-existent if the reference comes from an entry
246 : of an external vtable for example. */
247 1293908 : cgraph_node::get_create (base);
248 : }
249 : /* Fixup types in global initializers. */
250 4946725 : if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
251 38151 : cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
252 :
253 4946725 : if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
254 212073 : cval = fold_convert (TREE_TYPE (orig_cval), cval);
255 4946725 : return cval;
256 : }
257 : /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
258 4220689 : if (TREE_CODE (cval) == INTEGER_CST)
259 : {
260 64165 : if (TREE_OVERFLOW_P (cval))
261 0 : cval = drop_tree_overflow (cval);
262 64165 : if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 60964 : cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 64165 : return cval;
265 : }
266 : return orig_cval;
267 : }
268 :
269 : /* If SYM is a constant variable with known value, return the value.
270 : NULL_TREE is returned otherwise. */
271 :
272 : tree
273 21062209 : get_symbol_constant_value (tree sym)
274 : {
275 21062209 : tree val = ctor_for_folding (sym);
276 21062209 : if (val != error_mark_node)
277 : {
278 39536 : if (val)
279 : {
280 37262 : val = canonicalize_constructor_val (unshare_expr (val), sym);
281 37262 : if (val
282 37262 : && is_gimple_min_invariant (val)
283 65806 : && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
284 : return val;
285 : else
286 8824 : return NULL_TREE;
287 : }
288 : /* Variables declared 'const' without an initializer
289 : have zero as the initializer if they may not be
290 : overridden at link or run time. */
291 2274 : if (!val
292 2274 : && is_gimple_reg_type (TREE_TYPE (sym)))
293 1937 : return build_zero_cst (TREE_TYPE (sym));
294 : }
295 :
296 : return NULL_TREE;
297 : }
298 :
299 :
300 :
301 : /* Subroutine of fold_stmt. We perform constant folding of the
302 : memory reference tree EXPR. */
303 :
304 : static tree
305 62516819 : maybe_fold_reference (tree expr)
306 : {
307 62516819 : tree result = NULL_TREE;
308 :
309 62516819 : if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
310 60487921 : || TREE_CODE (expr) == REALPART_EXPR
311 59812866 : || TREE_CODE (expr) == IMAGPART_EXPR)
312 64002512 : && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
313 2995 : result = fold_unary_loc (EXPR_LOCATION (expr),
314 : TREE_CODE (expr),
315 2995 : TREE_TYPE (expr),
316 2995 : TREE_OPERAND (expr, 0));
317 62513824 : else if (TREE_CODE (expr) == BIT_FIELD_REF
318 62513824 : && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 27 : result = fold_ternary_loc (EXPR_LOCATION (expr),
320 : TREE_CODE (expr),
321 27 : TREE_TYPE (expr),
322 27 : TREE_OPERAND (expr, 0),
323 27 : TREE_OPERAND (expr, 1),
324 27 : TREE_OPERAND (expr, 2));
325 : else
326 62513797 : result = fold_const_aggregate_ref (expr);
327 :
328 62516819 : if (result && is_gimple_min_invariant (result))
329 : return result;
330 :
331 : return NULL_TREE;
332 : }
333 :
334 : /* Return true if EXPR is an acceptable right-hand-side for a
335 : GIMPLE assignment. We validate the entire tree, not just
336 : the root node, thus catching expressions that embed complex
337 : operands that are not permitted in GIMPLE. This function
338 : is needed because the folding routines in fold-const.cc
339 : may return such expressions in some cases, e.g., an array
340 : access with an embedded index addition. It may make more
341 : sense to have folding routines that are sensitive to the
342 : constraints on GIMPLE operands, rather than abandoning any
343 : any attempt to fold if the usual folding turns out to be too
344 : aggressive. */
345 :
346 : bool
347 0 : valid_gimple_rhs_p (tree expr)
348 : {
349 0 : enum tree_code code = TREE_CODE (expr);
350 :
351 0 : switch (TREE_CODE_CLASS (code))
352 : {
353 0 : case tcc_declaration:
354 0 : if (!is_gimple_variable (expr))
355 : return false;
356 : break;
357 :
358 : case tcc_constant:
359 : /* All constants are ok. */
360 : break;
361 :
362 0 : case tcc_comparison:
363 : /* GENERIC allows comparisons with non-boolean types, reject
364 : those for GIMPLE. Let vector-typed comparisons pass - rules
365 : for GENERIC and GIMPLE are the same here. */
366 0 : if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
367 0 : && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
368 0 : || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
369 0 : && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
370 : return false;
371 :
372 : /* Fallthru. */
373 0 : case tcc_binary:
374 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0))
375 0 : || !is_gimple_val (TREE_OPERAND (expr, 1)))
376 0 : return false;
377 : break;
378 :
379 0 : case tcc_unary:
380 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0)))
381 : return false;
382 : break;
383 :
384 0 : case tcc_expression:
385 0 : switch (code)
386 : {
387 0 : case ADDR_EXPR:
388 0 : {
389 0 : tree t;
390 0 : if (is_gimple_min_invariant (expr))
391 : return true;
392 0 : t = TREE_OPERAND (expr, 0);
393 0 : while (handled_component_p (t))
394 : {
395 : /* ??? More checks needed, see the GIMPLE verifier. */
396 0 : if ((TREE_CODE (t) == ARRAY_REF
397 0 : || TREE_CODE (t) == ARRAY_RANGE_REF)
398 0 : && !is_gimple_val (TREE_OPERAND (t, 1)))
399 : return false;
400 0 : t = TREE_OPERAND (t, 0);
401 : }
402 0 : if (!is_gimple_id (t))
403 : return false;
404 : }
405 : break;
406 :
407 0 : default:
408 0 : if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
409 : {
410 0 : if (!is_gimple_val (TREE_OPERAND (expr, 0))
411 0 : || !is_gimple_val (TREE_OPERAND (expr, 1))
412 0 : || !is_gimple_val (TREE_OPERAND (expr, 2)))
413 0 : return false;
414 : break;
415 : }
416 : return false;
417 : }
418 : break;
419 :
420 : case tcc_vl_exp:
421 : return false;
422 :
423 0 : case tcc_exceptional:
424 0 : if (code == CONSTRUCTOR)
425 : {
426 : unsigned i;
427 : tree elt;
428 0 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
429 0 : if (!is_gimple_val (elt))
430 : return false;
431 : return true;
432 : }
433 0 : if (code != SSA_NAME)
434 : return false;
435 : break;
436 :
437 0 : case tcc_reference:
438 0 : if (code == BIT_FIELD_REF)
439 0 : return is_gimple_val (TREE_OPERAND (expr, 0));
440 : return false;
441 :
442 : default:
443 : return false;
444 : }
445 :
446 : return true;
447 : }
448 :
449 :
450 : /* Attempt to fold an assignment statement pointed-to by SI. Returns a
451 : replacement rhs for the statement or NULL_TREE if no simplification
452 : could be made. It is assumed that the operands have been previously
453 : folded. */
454 :
455 : static tree
456 250349570 : fold_gimple_assign (gimple_stmt_iterator *si)
457 : {
458 250349570 : gimple *stmt = gsi_stmt (*si);
459 250349570 : enum tree_code subcode = gimple_assign_rhs_code (stmt);
460 250349570 : location_t loc = gimple_location (stmt);
461 :
462 250349570 : tree result = NULL_TREE;
463 :
464 250349570 : switch (get_gimple_rhs_class (subcode))
465 : {
466 165623553 : case GIMPLE_SINGLE_RHS:
467 165623553 : {
468 165623553 : tree rhs = gimple_assign_rhs1 (stmt);
469 :
470 165623553 : if (TREE_CLOBBER_P (rhs))
471 : return NULL_TREE;
472 :
473 152878066 : if (REFERENCE_CLASS_P (rhs))
474 60061917 : return maybe_fold_reference (rhs);
475 :
476 92816149 : else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
477 : {
478 162840 : tree val = OBJ_TYPE_REF_EXPR (rhs);
479 162840 : if (is_gimple_min_invariant (val))
480 : return val;
481 162818 : else if (flag_devirtualize && virtual_method_call_p (rhs))
482 : {
483 162778 : bool final;
484 162778 : vec <cgraph_node *>targets
485 162778 : = possible_polymorphic_call_targets (rhs, stmt, &final);
486 163052 : if (final && targets.length () <= 1 && dbg_cnt (devirt))
487 : {
488 44 : if (dump_enabled_p ())
489 : {
490 0 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
491 : "resolving virtual function address "
492 : "reference to function %s\n",
493 0 : targets.length () == 1
494 0 : ? targets[0]->name ()
495 : : "NULL");
496 : }
497 44 : if (targets.length () == 1)
498 : {
499 33 : val = fold_convert (TREE_TYPE (val),
500 : build_fold_addr_expr_loc
501 : (loc, targets[0]->decl));
502 33 : STRIP_USELESS_TYPE_CONVERSION (val);
503 : }
504 : else
505 : /* We cannot use __builtin_unreachable here because it
506 : cannot have address taken. */
507 11 : val = build_int_cst (TREE_TYPE (val), 0);
508 44 : return val;
509 : }
510 : }
511 : }
512 :
513 92653309 : else if (TREE_CODE (rhs) == ADDR_EXPR)
514 : {
515 14613648 : tree ref = TREE_OPERAND (rhs, 0);
516 14613648 : if (TREE_CODE (ref) == MEM_REF
517 14613648 : && integer_zerop (TREE_OPERAND (ref, 1)))
518 : {
519 2512 : result = TREE_OPERAND (ref, 0);
520 2512 : if (!useless_type_conversion_p (TREE_TYPE (rhs),
521 2512 : TREE_TYPE (result)))
522 0 : result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
523 2512 : return result;
524 : }
525 : }
526 :
527 78039661 : else if (TREE_CODE (rhs) == CONSTRUCTOR
528 78039661 : && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
529 : {
530 : /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
531 : unsigned i;
532 : tree val;
533 :
534 424618 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
535 420333 : if (! CONSTANT_CLASS_P (val))
536 : return NULL_TREE;
537 :
538 4285 : return build_vector_from_ctor (TREE_TYPE (rhs),
539 8570 : CONSTRUCTOR_ELTS (rhs));
540 : }
541 :
542 77705436 : else if (DECL_P (rhs)
543 77705436 : && is_gimple_reg_type (TREE_TYPE (rhs)))
544 12187075 : return get_symbol_constant_value (rhs);
545 : }
546 : break;
547 :
548 : case GIMPLE_UNARY_RHS:
549 : break;
550 :
551 : case GIMPLE_BINARY_RHS:
552 : break;
553 :
554 509922 : case GIMPLE_TERNARY_RHS:
555 1019844 : result = fold_ternary_loc (loc, subcode,
556 509922 : TREE_TYPE (gimple_assign_lhs (stmt)),
557 : gimple_assign_rhs1 (stmt),
558 : gimple_assign_rhs2 (stmt),
559 : gimple_assign_rhs3 (stmt));
560 :
561 509922 : if (result)
562 : {
563 0 : STRIP_USELESS_TYPE_CONVERSION (result);
564 0 : if (valid_gimple_rhs_p (result))
565 : return result;
566 : }
567 : break;
568 :
569 0 : case GIMPLE_INVALID_RHS:
570 0 : gcc_unreachable ();
571 : }
572 :
573 : return NULL_TREE;
574 : }
575 :
576 :
577 : /* Replace a statement at *SI_P with a sequence of statements in STMTS,
578 : adjusting the replacement stmts location and virtual operands.
579 : If the statement has a lhs the last stmt in the sequence is expected
580 : to assign to that lhs. */
581 :
582 : void
583 128377 : gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
584 : {
585 128377 : gimple *stmt = gsi_stmt (*si_p);
586 :
587 128377 : if (gimple_has_location (stmt))
588 106293 : annotate_all_with_location (stmts, gimple_location (stmt));
589 :
590 : /* First iterate over the replacement statements backward, assigning
591 : virtual operands to their defining statements. */
592 128377 : gimple *laststore = NULL;
593 256754 : for (gimple_stmt_iterator i = gsi_last (stmts);
594 525903 : !gsi_end_p (i); gsi_prev (&i))
595 : {
596 198763 : gimple *new_stmt = gsi_stmt (i);
597 198763 : if ((gimple_assign_single_p (new_stmt)
598 124593 : && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
599 323086 : || (is_gimple_call (new_stmt)
600 14631 : && (gimple_call_flags (new_stmt)
601 14631 : & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
602 : {
603 2228 : tree vdef;
604 2228 : if (!laststore)
605 2222 : vdef = gimple_vdef (stmt);
606 : else
607 6 : vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
608 2228 : gimple_set_vdef (new_stmt, vdef);
609 2228 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
610 1297 : SSA_NAME_DEF_STMT (vdef) = new_stmt;
611 : laststore = new_stmt;
612 : }
613 : }
614 :
615 : /* Second iterate over the statements forward, assigning virtual
616 : operands to their uses. */
617 128377 : tree reaching_vuse = gimple_vuse (stmt);
618 128377 : for (gimple_stmt_iterator i = gsi_start (stmts);
619 327140 : !gsi_end_p (i); gsi_next (&i))
620 : {
621 198763 : gimple *new_stmt = gsi_stmt (i);
622 : /* If the new statement possibly has a VUSE, update it with exact SSA
623 : name we know will reach this one. */
624 198763 : if (gimple_has_mem_ops (new_stmt))
625 198761 : gimple_set_vuse (new_stmt, reaching_vuse);
626 198763 : gimple_set_modified (new_stmt, true);
627 594984 : if (gimple_vdef (new_stmt))
628 198763 : reaching_vuse = gimple_vdef (new_stmt);
629 : }
630 :
631 : /* If the new sequence does not do a store release the virtual
632 : definition of the original statement. */
633 128377 : if (reaching_vuse
634 213639 : && reaching_vuse == gimple_vuse (stmt))
635 : {
636 83971 : tree vdef = gimple_vdef (stmt);
637 83971 : if (vdef
638 1483 : && TREE_CODE (vdef) == SSA_NAME)
639 : {
640 1427 : unlink_stmt_vdef (stmt);
641 1427 : release_ssa_name (vdef);
642 : }
643 : }
644 :
645 : /* Finally replace the original statement with the sequence. */
646 128377 : gsi_replace_with_seq (si_p, stmts, false);
647 128377 : }
648 :
649 : /* Helper function for update_gimple_call and
650 : gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
651 : with GIMPLE_CALL NEW_STMT. */
652 :
653 : static void
654 2439 : finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
655 : gimple *stmt)
656 : {
657 2439 : tree lhs = gimple_call_lhs (stmt);
658 2439 : gimple_call_set_lhs (new_stmt, lhs);
659 2439 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
660 807 : SSA_NAME_DEF_STMT (lhs) = new_stmt;
661 2439 : gimple_move_vops (new_stmt, stmt);
662 2439 : gimple_set_location (new_stmt, gimple_location (stmt));
663 2439 : if (gimple_block (new_stmt) == NULL_TREE)
664 1 : gimple_set_block (new_stmt, gimple_block (stmt));
665 2439 : gsi_replace (si_p, new_stmt, false);
666 2439 : }
667 :
668 : /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
669 : with number of arguments NARGS, where the arguments in GIMPLE form
670 : follow NARGS argument. */
671 :
672 : bool
673 2436 : update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
674 : {
675 2436 : va_list ap;
676 2436 : gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
677 :
678 2436 : gcc_assert (is_gimple_call (stmt));
679 2436 : va_start (ap, nargs);
680 2436 : new_stmt = gimple_build_call_valist (fn, nargs, ap);
681 2436 : finish_update_gimple_call (si_p, new_stmt, stmt);
682 2436 : va_end (ap);
683 2436 : return true;
684 : }
685 :
686 : /* Return true if EXPR is a CALL_EXPR suitable for representation
687 : as a single GIMPLE_CALL statement. If the arguments require
688 : further gimplification, return false. */
689 :
690 : static bool
691 59251 : valid_gimple_call_p (tree expr)
692 : {
693 59251 : unsigned i, nargs;
694 :
695 59251 : if (TREE_CODE (expr) != CALL_EXPR)
696 : return false;
697 :
698 3 : nargs = call_expr_nargs (expr);
699 6 : for (i = 0; i < nargs; i++)
700 : {
701 3 : tree arg = CALL_EXPR_ARG (expr, i);
702 3 : if (is_gimple_reg_type (TREE_TYPE (arg)))
703 : {
704 3 : if (!is_gimple_val (arg))
705 : return false;
706 : }
707 : else
708 0 : if (!is_gimple_lvalue (arg))
709 : return false;
710 : }
711 :
712 : return true;
713 : }
714 :
715 : /* Convert EXPR into a GIMPLE value suitable for substitution on the
716 : RHS of an assignment. Insert the necessary statements before
717 : iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
718 : is replaced. If the call is expected to produces a result, then it
719 : is replaced by an assignment of the new RHS to the result variable.
720 : If the result is to be ignored, then the call is replaced by a
721 : GIMPLE_NOP. A proper VDEF chain is retained by making the first
722 : VUSE and the last VDEF of the whole sequence be the same as the replaced
723 : statement and using new SSA names for stores in between. */
724 :
725 : void
726 59251 : gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
727 : {
728 59251 : tree lhs;
729 59251 : gimple *stmt, *new_stmt;
730 59251 : gimple_stmt_iterator i;
731 59251 : gimple_seq stmts = NULL;
732 :
733 59251 : stmt = gsi_stmt (*si_p);
734 :
735 59251 : gcc_assert (is_gimple_call (stmt));
736 :
737 59251 : if (valid_gimple_call_p (expr))
738 : {
739 : /* The call has simplified to another call. */
740 3 : tree fn = CALL_EXPR_FN (expr);
741 3 : unsigned i;
742 3 : unsigned nargs = call_expr_nargs (expr);
743 3 : vec<tree> args = vNULL;
744 3 : gcall *new_stmt;
745 :
746 3 : if (nargs > 0)
747 : {
748 3 : args.create (nargs);
749 3 : args.safe_grow_cleared (nargs, true);
750 :
751 9 : for (i = 0; i < nargs; i++)
752 3 : args[i] = CALL_EXPR_ARG (expr, i);
753 : }
754 :
755 3 : new_stmt = gimple_build_call_vec (fn, args);
756 3 : finish_update_gimple_call (si_p, new_stmt, stmt);
757 3 : args.release ();
758 3 : return;
759 : }
760 :
761 59248 : lhs = gimple_call_lhs (stmt);
762 59248 : if (lhs == NULL_TREE)
763 : {
764 2446 : push_gimplify_context (gimple_in_ssa_p (cfun));
765 1223 : gimplify_and_add (expr, &stmts);
766 1223 : pop_gimplify_context (NULL);
767 :
768 : /* We can end up with folding a memcpy of an empty class assignment
769 : which gets optimized away by C++ gimplification. */
770 1223 : if (gimple_seq_empty_p (stmts))
771 : {
772 1088 : if (gimple_in_ssa_p (cfun))
773 : {
774 1088 : unlink_stmt_vdef (stmt);
775 1088 : release_defs (stmt);
776 : }
777 1088 : gsi_replace (si_p, gimple_build_nop (), false);
778 1088 : return;
779 : }
780 : }
781 : else
782 : {
783 58025 : tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
784 58025 : new_stmt = gimple_build_assign (lhs, tmp);
785 58025 : i = gsi_last (stmts);
786 58025 : gsi_insert_after_without_update (&i, new_stmt,
787 : GSI_CONTINUE_LINKING);
788 : }
789 :
790 58160 : gsi_replace_with_seq_vops (si_p, stmts);
791 : }
792 :
793 : /* Print a message in the dump file recording transformation of FROM to TO. */
794 :
795 : static void
796 40367 : dump_transformation (gcall *from, gcall *to)
797 : {
798 40367 : if (dump_enabled_p ())
799 12 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
800 : gimple_call_fn (from), gimple_call_fn (to));
801 40367 : }
802 :
803 : /* Replace the call at *GSI with the gimple value VAL. */
804 :
805 : void
806 84346 : replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807 : {
808 84346 : gimple *stmt = gsi_stmt (*gsi);
809 84346 : tree lhs = gimple_call_lhs (stmt);
810 84346 : gimple *repl;
811 84346 : if (lhs)
812 : {
813 79351 : if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 2112 : val = fold_convert (TREE_TYPE (lhs), val);
815 79351 : repl = gimple_build_assign (lhs, val);
816 : }
817 : else
818 4995 : repl = gimple_build_nop ();
819 84346 : tree vdef = gimple_vdef (stmt);
820 84346 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 : {
822 5574 : unlink_stmt_vdef (stmt);
823 5574 : release_ssa_name (vdef);
824 : }
825 84346 : gsi_replace (gsi, repl, false);
826 84346 : }
827 :
828 : /* Replace the call at *GSI with the new call REPL and fold that
829 : again. */
830 :
831 : static void
832 40367 : replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
833 : {
834 40367 : gimple *stmt = gsi_stmt (*gsi);
835 40367 : dump_transformation (as_a <gcall *> (stmt), as_a <gcall *> (repl));
836 40367 : gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
837 40367 : gimple_set_location (repl, gimple_location (stmt));
838 40367 : gimple_move_vops (repl, stmt);
839 40367 : gsi_replace (gsi, repl, false);
840 40367 : fold_stmt (gsi);
841 40367 : }
842 :
843 : /* Return true if VAR is a VAR_DECL or a component thereof. */
844 :
845 : static bool
846 419632 : var_decl_component_p (tree var)
847 : {
848 419632 : tree inner = var;
849 611430 : while (handled_component_p (inner))
850 191798 : inner = TREE_OPERAND (inner, 0);
851 419632 : return (DECL_P (inner)
852 419632 : || (TREE_CODE (inner) == MEM_REF
853 50061 : && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
854 : }
855 :
856 : /* Return TRUE if the SIZE argument, representing the size of an
857 : object, is in a range of values of which exactly zero is valid. */
858 :
859 : static bool
860 1022536 : size_must_be_zero_p (tree size)
861 : {
862 1022536 : if (integer_zerop (size))
863 : return true;
864 :
865 1019884 : if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
866 : return false;
867 :
868 611528 : tree type = TREE_TYPE (size);
869 611528 : int prec = TYPE_PRECISION (type);
870 :
871 : /* Compute the value of SSIZE_MAX, the largest positive value that
872 : can be stored in ssize_t, the signed counterpart of size_t. */
873 611528 : wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
874 611528 : wide_int zero = wi::zero (TYPE_PRECISION (type));
875 611528 : int_range_max valid_range (type, zero, ssize_max);
876 611528 : int_range_max vr;
877 1223056 : get_range_query (cfun)->range_of_expr (vr, size);
878 :
879 611528 : if (vr.undefined_p ())
880 101 : vr.set_varying (TREE_TYPE (size));
881 611528 : vr.intersect (valid_range);
882 611528 : return vr.zero_p ();
883 611528 : }
884 :
885 : /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
886 : diagnose (otherwise undefined) overlapping copies without preventing
887 : folding. When folded, GCC guarantees that overlapping memcpy has
888 : the same semantics as memmove. Call to the library memcpy need not
889 : provide the same guarantee. Return false if no simplification can
890 : be made. */
891 :
892 : static bool
893 1022536 : gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
894 : tree dest, tree src, enum built_in_function code)
895 : {
896 1022536 : gimple *stmt = gsi_stmt (*gsi);
897 1022536 : tree lhs = gimple_call_lhs (stmt);
898 1022536 : tree len = gimple_call_arg (stmt, 2);
899 1022536 : location_t loc = gimple_location (stmt);
900 :
901 : /* If the LEN parameter is a constant zero or in range where
902 : the only valid value is zero, return DEST. */
903 1022536 : if (size_must_be_zero_p (len))
904 : {
905 2684 : gimple *repl;
906 2684 : if (gimple_call_lhs (stmt))
907 58 : repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
908 : else
909 2626 : repl = gimple_build_nop ();
910 2684 : tree vdef = gimple_vdef (stmt);
911 2684 : if (vdef && TREE_CODE (vdef) == SSA_NAME)
912 : {
913 578 : unlink_stmt_vdef (stmt);
914 578 : release_ssa_name (vdef);
915 : }
916 2684 : gsi_replace (gsi, repl, false);
917 2684 : return true;
918 : }
919 :
920 : /* If SRC and DEST are the same (and not volatile), return
921 : DEST{,+LEN,+LEN-1}. */
922 1019852 : if (operand_equal_p (src, dest, 0))
923 : {
924 : /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
925 : It's safe and may even be emitted by GCC itself (see bug
926 : 32667). */
927 73 : unlink_stmt_vdef (stmt);
928 146 : if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
929 33 : release_ssa_name (gimple_vdef (stmt));
930 73 : if (!lhs)
931 : {
932 52 : gsi_replace (gsi, gimple_build_nop (), false);
933 52 : return true;
934 : }
935 21 : goto done;
936 : }
937 2039558 : else if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
938 : return false;
939 : else
940 : {
941 : /* We cannot (easily) change the type of the copy if it is a storage
942 : order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 : modify the storage order of objects (see storage_order_barrier_p). */
944 1019779 : tree srctype
945 1032466 : = POINTER_TYPE_P (TREE_TYPE (src))
946 1032466 : ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 1019779 : tree desttype
948 1039801 : = POINTER_TYPE_P (TREE_TYPE (dest))
949 1039801 : ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 1019779 : tree destvar, srcvar, srcoff;
951 1019779 : unsigned int src_align, dest_align;
952 1019779 : unsigned HOST_WIDE_INT tmp_len;
953 1019779 : const char *tmp_str;
954 :
955 : /* Build accesses at offset zero with a ref-all character type. */
956 1019779 : tree off0
957 1019779 : = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 : ptr_mode, true), 0);
959 :
960 : /* If we can perform the copy efficiently with first doing all loads
961 : and then all stores inline it that way. Currently efficiently
962 : means that we can load all the memory into a single integer
963 : register which is what MOVE_MAX gives us. */
964 1019779 : src_align = get_pointer_alignment (src);
965 1019779 : dest_align = get_pointer_alignment (dest);
966 1019779 : if (tree_fits_uhwi_p (len)
967 398300 : && compare_tree_int (len, MOVE_MAX) <= 0
968 : /* FIXME: Don't transform copies from strings with known length.
969 : Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 : from being handled, and the case was XFAILed for that reason.
971 : Now that it is handled and the XFAIL removed, as soon as other
972 : strlenopt tests that rely on it for passing are adjusted, this
973 : hack can be removed. */
974 302300 : && !c_strlen (src, 1)
975 188355 : && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
976 79903 : && memchr (tmp_str, 0, tmp_len) == NULL)
977 121797 : && !(srctype
978 121797 : && AGGREGATE_TYPE_P (srctype)
979 57843 : && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 1141443 : && !(desttype
981 121664 : && AGGREGATE_TYPE_P (desttype)
982 67574 : && TYPE_REVERSE_STORAGE_ORDER (desttype)))
983 : {
984 121631 : unsigned ilen = tree_to_uhwi (len);
985 121631 : if (pow2p_hwi (ilen))
986 : {
987 : /* Detect out-of-bounds accesses without issuing warnings.
988 : Avoid folding out-of-bounds copies but to avoid false
989 : positives for unreachable code defer warning until after
990 : DCE has worked its magic.
991 : -Wrestrict is still diagnosed. */
992 24451 : if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 : dest, src, len, len,
994 24451 : false, false))
995 980 : if (warning != OPT_Wrestrict)
996 21093 : return false;
997 :
998 23529 : scalar_int_mode imode;
999 23529 : machine_mode mode;
1000 23529 : if (int_mode_for_size (ilen * BITS_PER_UNIT, 0).exists (&imode)
1001 23529 : && bitwise_mode_for_size (ilen
1002 23529 : * BITS_PER_UNIT).exists (&mode)
1003 47058 : && known_eq (GET_MODE_BITSIZE (mode), ilen * BITS_PER_UNIT)
1004 : /* If the destination pointer is not aligned we must be able
1005 : to emit an unaligned store. */
1006 23529 : && (dest_align >= GET_MODE_ALIGNMENT (mode)
1007 12640 : || !targetm.slow_unaligned_access (mode, dest_align)
1008 0 : || (optab_handler (movmisalign_optab, mode)
1009 : != CODE_FOR_nothing)))
1010 : {
1011 23529 : tree type = bitwise_type_for_mode (mode);
1012 23529 : tree srctype = type;
1013 23529 : tree desttype = type;
1014 23529 : if (src_align < GET_MODE_ALIGNMENT (mode))
1015 12097 : srctype = build_aligned_type (type, src_align);
1016 23529 : tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1017 23529 : tree tem = fold_const_aggregate_ref (srcmem);
1018 23529 : if (tem)
1019 : srcmem = tem;
1020 22476 : else if (src_align < GET_MODE_ALIGNMENT (mode)
1021 11839 : && targetm.slow_unaligned_access (mode, src_align)
1022 22476 : && (optab_handler (movmisalign_optab, mode)
1023 : == CODE_FOR_nothing))
1024 : srcmem = NULL_TREE;
1025 22476 : if (srcmem)
1026 : {
1027 23529 : gimple *new_stmt;
1028 23529 : if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 : {
1030 23529 : new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1031 23529 : srcmem
1032 23529 : = make_ssa_name (TREE_TYPE (srcmem), new_stmt);
1033 23529 : gimple_assign_set_lhs (new_stmt, srcmem);
1034 47058 : gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1035 23529 : gimple_set_location (new_stmt, loc);
1036 23529 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1037 : }
1038 23529 : if (dest_align < GET_MODE_ALIGNMENT (mode))
1039 12640 : desttype = build_aligned_type (type, dest_align);
1040 23529 : new_stmt
1041 23529 : = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1042 : dest, off0),
1043 : srcmem);
1044 23529 : gimple_move_vops (new_stmt, stmt);
1045 23529 : if (!lhs)
1046 : {
1047 20171 : gsi_replace (gsi, new_stmt, false);
1048 20171 : return true;
1049 : }
1050 3358 : gimple_set_location (new_stmt, loc);
1051 3358 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 3358 : goto done;
1053 : }
1054 : }
1055 : }
1056 : }
1057 :
1058 995328 : if (code == BUILT_IN_MEMMOVE)
1059 : {
1060 : /* Both DEST and SRC must be pointer types.
1061 : ??? This is what old code did. Is the testing for pointer types
1062 : really mandatory?
1063 :
1064 : If either SRC is readonly or length is 1, we can use memcpy. */
1065 200118 : if (!dest_align || !src_align)
1066 : return false;
1067 200118 : if (readonly_data_expr (src)
1068 200118 : || (tree_fits_uhwi_p (len)
1069 32825 : && (MIN (src_align, dest_align) / BITS_PER_UNIT
1070 32825 : >= tree_to_uhwi (len))))
1071 : {
1072 962205 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1073 19905 : if (!fn)
1074 : return false;
1075 19905 : gimple_call_set_fndecl (stmt, fn);
1076 19905 : gimple_call_set_arg (stmt, 0, dest);
1077 19905 : gimple_call_set_arg (stmt, 1, src);
1078 19905 : fold_stmt (gsi);
1079 19905 : return true;
1080 : }
1081 :
1082 : /* If *src and *dest can't overlap, optimize into memcpy as well. */
1083 180213 : if (TREE_CODE (src) == ADDR_EXPR
1084 5579 : && TREE_CODE (dest) == ADDR_EXPR)
1085 : {
1086 1794 : tree src_base, dest_base, fn;
1087 1794 : poly_int64 src_offset = 0, dest_offset = 0;
1088 1794 : poly_uint64 maxsize;
1089 :
1090 1794 : srcvar = TREE_OPERAND (src, 0);
1091 1794 : src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1092 1794 : if (src_base == NULL)
1093 0 : src_base = srcvar;
1094 1794 : destvar = TREE_OPERAND (dest, 0);
1095 1794 : dest_base = get_addr_base_and_unit_offset (destvar,
1096 : &dest_offset);
1097 1794 : if (dest_base == NULL)
1098 0 : dest_base = destvar;
1099 1794 : if (!poly_int_tree_p (len, &maxsize))
1100 232 : maxsize = -1;
1101 1794 : if (SSA_VAR_P (src_base)
1102 1784 : && SSA_VAR_P (dest_base))
1103 : {
1104 1784 : if (operand_equal_p (src_base, dest_base, 0)
1105 1784 : && ranges_maybe_overlap_p (src_offset, maxsize,
1106 : dest_offset, maxsize))
1107 : return false;
1108 : }
1109 10 : else if (TREE_CODE (src_base) == MEM_REF
1110 0 : && TREE_CODE (dest_base) == MEM_REF)
1111 : {
1112 0 : if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1113 0 : TREE_OPERAND (dest_base, 0), 0))
1114 0 : return false;
1115 0 : poly_offset_int full_src_offset
1116 0 : = mem_ref_offset (src_base) + src_offset;
1117 0 : poly_offset_int full_dest_offset
1118 0 : = mem_ref_offset (dest_base) + dest_offset;
1119 0 : if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1120 : full_dest_offset, maxsize))
1121 : return false;
1122 0 : }
1123 : else
1124 : return false;
1125 :
1126 1794 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1127 1382 : if (!fn)
1128 : return false;
1129 1382 : gimple_call_set_fndecl (stmt, fn);
1130 1382 : gimple_call_set_arg (stmt, 0, dest);
1131 1382 : gimple_call_set_arg (stmt, 1, src);
1132 1382 : fold_stmt (gsi);
1133 1382 : return true;
1134 : }
1135 :
1136 : /* If the destination and source do not alias optimize into
1137 : memcpy as well. */
1138 178419 : if ((is_gimple_min_invariant (dest)
1139 174770 : || TREE_CODE (dest) == SSA_NAME)
1140 334902 : && (is_gimple_min_invariant (src)
1141 156368 : || TREE_CODE (src) == SSA_NAME))
1142 : {
1143 159701 : ao_ref destr, srcr;
1144 159701 : ao_ref_init_from_ptr_and_size (&destr, dest, len);
1145 159701 : ao_ref_init_from_ptr_and_size (&srcr, src, len);
1146 159701 : if (!refs_may_alias_p_1 (&destr, &srcr, false))
1147 : {
1148 10073 : tree fn;
1149 10073 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1150 10073 : if (!fn)
1151 10073 : return false;
1152 10073 : gimple_call_set_fndecl (stmt, fn);
1153 10073 : gimple_call_set_arg (stmt, 0, dest);
1154 10073 : gimple_call_set_arg (stmt, 1, src);
1155 10073 : fold_stmt (gsi);
1156 10073 : return true;
1157 : }
1158 : }
1159 :
1160 168346 : return false;
1161 : }
1162 :
1163 795210 : if (!tree_fits_shwi_p (len))
1164 : return false;
1165 324149 : if (!srctype
1166 324149 : || (AGGREGATE_TYPE_P (srctype)
1167 205707 : && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1168 : return false;
1169 324016 : if (!desttype
1170 324016 : || (AGGREGATE_TYPE_P (desttype)
1171 193346 : && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1172 : return false;
1173 : /* In the following try to find a type that is most natural to be
1174 : used for the memcpy source and destination and that allows
1175 : the most optimization when memcpy is turned into a plain assignment
1176 : using that type. In theory we could always use a char[len] type
1177 : but that only gains us that the destination and source possibly
1178 : no longer will have their address taken. */
1179 323983 : if (TREE_CODE (srctype) == ARRAY_TYPE
1180 323983 : && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1181 132692 : srctype = TREE_TYPE (srctype);
1182 323983 : if (TREE_CODE (desttype) == ARRAY_TYPE
1183 323983 : && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1184 107744 : desttype = TREE_TYPE (desttype);
1185 323983 : if (TREE_ADDRESSABLE (srctype)
1186 323952 : || TREE_ADDRESSABLE (desttype))
1187 : return false;
1188 :
1189 : /* Make sure we are not copying using a floating-point mode or
1190 : a type whose size possibly does not match its precision. */
1191 647263 : if (FLOAT_MODE_P (TYPE_MODE (desttype))
1192 323098 : || TREE_CODE (desttype) == BOOLEAN_TYPE
1193 646995 : || TREE_CODE (desttype) == ENUMERAL_TYPE)
1194 851 : desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1195 647407 : if (FLOAT_MODE_P (TYPE_MODE (srctype))
1196 323376 : || TREE_CODE (srctype) == BOOLEAN_TYPE
1197 647277 : || TREE_CODE (srctype) == ENUMERAL_TYPE)
1198 569 : srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1199 323919 : if (!srctype)
1200 120 : srctype = desttype;
1201 323919 : if (!desttype)
1202 0 : desttype = srctype;
1203 323919 : if (!srctype)
1204 : return false;
1205 :
1206 323919 : src_align = get_pointer_alignment (src);
1207 323919 : dest_align = get_pointer_alignment (dest);
1208 :
1209 : /* Choose between src and destination type for the access based
1210 : on alignment, whether the access constitutes a register access
1211 : and whether it may actually expose a declaration for SSA rewrite
1212 : or SRA decomposition. Also try to expose a string constant, we
1213 : might be able to concatenate several of them later into a single
1214 : string store. */
1215 323919 : destvar = NULL_TREE;
1216 323919 : srcvar = NULL_TREE;
1217 323919 : if (TREE_CODE (dest) == ADDR_EXPR
1218 118163 : && var_decl_component_p (TREE_OPERAND (dest, 0))
1219 118159 : && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1220 24006 : && dest_align >= TYPE_ALIGN (desttype)
1221 347925 : && (is_gimple_reg_type (desttype)
1222 23593 : || src_align >= TYPE_ALIGN (desttype)))
1223 19439 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1224 304480 : else if (TREE_CODE (src) == ADDR_EXPR
1225 241069 : && var_decl_component_p (TREE_OPERAND (src, 0))
1226 43127 : && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1227 8363 : && src_align >= TYPE_ALIGN (srctype)
1228 312827 : && (is_gimple_reg_type (srctype)
1229 8184 : || dest_align >= TYPE_ALIGN (srctype)))
1230 3069 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1231 : /* FIXME: Don't transform copies from strings with known original length.
1232 : As soon as strlenopt tests that rely on it for passing are adjusted,
1233 : this hack can be removed. */
1234 301411 : else if (gimple_call_alloca_for_var_p (stmt)
1235 115 : && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1236 3 : && integer_zerop (srcoff)
1237 3 : && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1238 301414 : && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1239 3 : srctype = TREE_TYPE (srcvar);
1240 : else
1241 301408 : return false;
1242 :
1243 : /* Now that we chose an access type express the other side in
1244 : terms of it if the target allows that with respect to alignment
1245 : constraints. */
1246 22511 : if (srcvar == NULL_TREE)
1247 : {
1248 19439 : if (src_align >= TYPE_ALIGN (desttype))
1249 19423 : srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1250 : else
1251 : {
1252 16 : enum machine_mode mode = TYPE_MODE (desttype);
1253 16 : if ((mode == BLKmode && STRICT_ALIGNMENT)
1254 16 : || (targetm.slow_unaligned_access (mode, src_align)
1255 16 : && (optab_handler (movmisalign_optab, mode)
1256 : == CODE_FOR_nothing)))
1257 : return false;
1258 16 : srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1259 : src_align);
1260 16 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1261 : }
1262 : }
1263 3072 : else if (destvar == NULL_TREE)
1264 : {
1265 3072 : if (dest_align >= TYPE_ALIGN (srctype))
1266 3072 : destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1267 : else
1268 : {
1269 0 : enum machine_mode mode = TYPE_MODE (srctype);
1270 0 : if ((mode == BLKmode && STRICT_ALIGNMENT)
1271 0 : || (targetm.slow_unaligned_access (mode, dest_align)
1272 0 : && (optab_handler (movmisalign_optab, mode)
1273 : == CODE_FOR_nothing)))
1274 : return false;
1275 0 : desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1276 : dest_align);
1277 0 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1278 : }
1279 : }
1280 :
1281 : /* Same as above, detect out-of-bounds accesses without issuing
1282 : warnings. Avoid folding out-of-bounds copies but to avoid
1283 : false positives for unreachable code defer warning until
1284 : after DCE has worked its magic.
1285 : -Wrestrict is still diagnosed. */
1286 22511 : if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1287 : dest, src, len, len,
1288 22511 : false, false))
1289 1263 : if (warning != OPT_Wrestrict)
1290 : return false;
1291 :
1292 21256 : gimple *new_stmt;
1293 21256 : if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1294 : {
1295 536 : tree tem = fold_const_aggregate_ref (srcvar);
1296 536 : if (tem)
1297 517 : srcvar = tem;
1298 536 : if (! is_gimple_min_invariant (srcvar))
1299 : {
1300 19 : new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1301 19 : srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1302 19 : gimple_assign_set_lhs (new_stmt, srcvar);
1303 38 : gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1304 19 : gimple_set_location (new_stmt, loc);
1305 19 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1306 : }
1307 536 : new_stmt = gimple_build_assign (destvar, srcvar);
1308 536 : goto set_vop_and_replace;
1309 : }
1310 :
1311 : /* We get an aggregate copy. If the source is a STRING_CST, then
1312 : directly use its type to perform the copy. */
1313 20720 : if (TREE_CODE (srcvar) == STRING_CST)
1314 : desttype = srctype;
1315 :
1316 : /* Or else, use an unsigned char[] type to perform the copy in order
1317 : to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1318 : types or float modes behavior on copying. */
1319 : else
1320 : {
1321 41434 : desttype = build_array_type_nelts (unsigned_char_type_node,
1322 20717 : tree_to_uhwi (len));
1323 20717 : srctype = desttype;
1324 20717 : if (src_align > TYPE_ALIGN (srctype))
1325 12420 : srctype = build_aligned_type (srctype, src_align);
1326 20717 : srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1327 : }
1328 :
1329 20720 : if (dest_align > TYPE_ALIGN (desttype))
1330 13050 : desttype = build_aligned_type (desttype, dest_align);
1331 20720 : destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1332 20720 : new_stmt = gimple_build_assign (destvar, srcvar);
1333 :
1334 21256 : set_vop_and_replace:
1335 21256 : gimple_move_vops (new_stmt, stmt);
1336 21256 : if (!lhs)
1337 : {
1338 21069 : gsi_replace (gsi, new_stmt, false);
1339 21069 : return true;
1340 : }
1341 187 : gimple_set_location (new_stmt, loc);
1342 187 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1343 : }
1344 :
1345 3566 : done:
1346 3566 : gimple_seq stmts = NULL;
1347 3566 : if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1348 3566 : len = NULL_TREE;
1349 198 : else if (code == BUILT_IN_MEMPCPY)
1350 : {
1351 198 : len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1352 198 : dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1353 198 : TREE_TYPE (dest), dest, len);
1354 : }
1355 : else
1356 0 : gcc_unreachable ();
1357 :
1358 3566 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1359 3566 : gimple *repl = gimple_build_assign (lhs, dest);
1360 3566 : gsi_replace (gsi, repl, false);
1361 3566 : return true;
1362 : }
1363 :
1364 : /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1365 : to built-in memcmp (a, b, len). */
1366 :
1367 : static bool
1368 148 : gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1369 : {
1370 148 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1371 :
1372 148 : if (!fn)
1373 : return false;
1374 :
1375 : /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1376 :
1377 148 : gimple *stmt = gsi_stmt (*gsi);
1378 296 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
1379 : return false;
1380 148 : tree a = gimple_call_arg (stmt, 0);
1381 148 : tree b = gimple_call_arg (stmt, 1);
1382 148 : tree len = gimple_call_arg (stmt, 2);
1383 :
1384 148 : gimple *repl = gimple_build_call (fn, 3, a, b, len);
1385 148 : replace_call_with_call_and_fold (gsi, repl);
1386 :
1387 148 : return true;
1388 : }
1389 :
1390 : /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391 : to built-in memmove (dest, src, len). */
1392 :
1393 : static bool
1394 367 : gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395 : {
1396 367 : tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397 :
1398 367 : if (!fn)
1399 : return false;
1400 :
1401 : /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402 : it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1403 : len) into memmove (dest, src, len). */
1404 :
1405 367 : gimple *stmt = gsi_stmt (*gsi);
1406 734 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1407 : return false;
1408 367 : tree src = gimple_call_arg (stmt, 0);
1409 367 : tree dest = gimple_call_arg (stmt, 1);
1410 367 : tree len = gimple_call_arg (stmt, 2);
1411 :
1412 367 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1413 367 : gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1414 367 : replace_call_with_call_and_fold (gsi, repl);
1415 :
1416 367 : return true;
1417 : }
1418 :
1419 : /* Transform a call to built-in bzero (dest, len) at *GSI into one
1420 : to built-in memset (dest, 0, len). */
1421 :
1422 : static bool
1423 250 : gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1424 : {
1425 250 : tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1426 :
1427 250 : if (!fn)
1428 : return false;
1429 :
1430 : /* Transform bzero (dest, len) into memset (dest, 0, len). */
1431 :
1432 250 : gimple *stmt = gsi_stmt (*gsi);
1433 500 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1434 : return false;
1435 250 : tree dest = gimple_call_arg (stmt, 0);
1436 250 : tree len = gimple_call_arg (stmt, 1);
1437 :
1438 250 : gimple_seq seq = NULL;
1439 250 : gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1440 250 : gimple_seq_add_stmt_without_update (&seq, repl);
1441 250 : gsi_replace_with_seq_vops (gsi, seq);
1442 250 : fold_stmt (gsi);
1443 :
1444 250 : return true;
1445 : }
1446 :
1447 : /* Fold function call to builtin memset or bzero at *GSI setting the
1448 : memory of size LEN to VAL. Return whether a simplification was made. */
1449 :
1450 : static bool
1451 306540 : gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1452 : {
1453 306540 : gimple *stmt = gsi_stmt (*gsi);
1454 306540 : tree etype;
1455 306540 : unsigned HOST_WIDE_INT length, cval;
1456 :
1457 : /* If the LEN parameter is zero, return DEST. */
1458 306540 : if (integer_zerop (len))
1459 : {
1460 811 : replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1461 811 : return true;
1462 : }
1463 :
1464 915434 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
1465 : return false;
1466 :
1467 305651 : if (! tree_fits_uhwi_p (len))
1468 : return false;
1469 :
1470 202018 : if (TREE_CODE (c) != INTEGER_CST)
1471 : return false;
1472 :
1473 196119 : tree dest = gimple_call_arg (stmt, 0);
1474 196119 : tree var = dest;
1475 196119 : if (TREE_CODE (var) != ADDR_EXPR)
1476 : return false;
1477 :
1478 157301 : var = TREE_OPERAND (var, 0);
1479 157301 : if (TREE_THIS_VOLATILE (var))
1480 : return false;
1481 :
1482 157258 : etype = TREE_TYPE (var);
1483 157258 : if (TREE_CODE (etype) == ARRAY_TYPE)
1484 81493 : etype = TREE_TYPE (etype);
1485 :
1486 157258 : if ((!INTEGRAL_TYPE_P (etype)
1487 96080 : && !POINTER_TYPE_P (etype))
1488 61703 : || TREE_CODE (etype) == BITINT_TYPE)
1489 : return false;
1490 :
1491 60400 : if (! var_decl_component_p (var))
1492 : return false;
1493 :
1494 60400 : length = tree_to_uhwi (len);
1495 60400 : if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1496 1753 : || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1497 3506 : != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1498 62153 : || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1499 58647 : return false;
1500 :
1501 1753 : if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1502 : return false;
1503 :
1504 1753 : if (!type_has_mode_precision_p (etype))
1505 7 : etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1506 7 : TYPE_UNSIGNED (etype));
1507 :
1508 1753 : if (integer_zerop (c))
1509 : cval = 0;
1510 : else
1511 : {
1512 337 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1513 : return NULL_TREE;
1514 :
1515 337 : cval = TREE_INT_CST_LOW (c);
1516 337 : cval &= 0xff;
1517 337 : cval |= cval << 8;
1518 337 : cval |= cval << 16;
1519 337 : cval |= (cval << 31) << 1;
1520 : }
1521 :
1522 1753 : var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1523 1753 : gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1524 1753 : gimple_move_vops (store, stmt);
1525 1753 : gimple_set_location (store, gimple_location (stmt));
1526 1753 : gsi_insert_before (gsi, store, GSI_SAME_STMT);
1527 1753 : if (gimple_call_lhs (stmt))
1528 : {
1529 2 : gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1530 2 : gsi_replace (gsi, asgn, false);
1531 : }
1532 : else
1533 : {
1534 1751 : gimple_stmt_iterator gsi2 = *gsi;
1535 1751 : gsi_prev (gsi);
1536 1751 : gsi_remove (&gsi2, true);
1537 : }
1538 :
1539 : return true;
1540 : }
1541 :
1542 : /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1543 :
1544 : static bool
1545 428500 : get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1546 : c_strlen_data *pdata, unsigned eltsize)
1547 : {
1548 428500 : gcc_assert (TREE_CODE (arg) != SSA_NAME);
1549 :
1550 : /* The length computed by this invocation of the function. */
1551 428500 : tree val = NULL_TREE;
1552 :
1553 : /* True if VAL is an optimistic (tight) bound determined from
1554 : the size of the character array in which the string may be
1555 : stored. In that case, the computed VAL is used to set
1556 : PDATA->MAXBOUND. */
1557 428500 : bool tight_bound = false;
1558 :
1559 : /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1560 428500 : if (TREE_CODE (arg) == ADDR_EXPR
1561 428500 : && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1562 : {
1563 28219 : tree op = TREE_OPERAND (arg, 0);
1564 28219 : if (integer_zerop (TREE_OPERAND (op, 1)))
1565 : {
1566 12045 : tree aop0 = TREE_OPERAND (op, 0);
1567 12045 : if (TREE_CODE (aop0) == INDIRECT_REF
1568 12045 : && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1569 0 : return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1570 0 : pdata, eltsize);
1571 : }
1572 16174 : else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1573 16174 : && rkind == SRK_LENRANGE)
1574 : {
1575 : /* Fail if an array is the last member of a struct object
1576 : since it could be treated as a (fake) flexible array
1577 : member. */
1578 4785 : tree idx = TREE_OPERAND (op, 1);
1579 :
1580 4785 : arg = TREE_OPERAND (op, 0);
1581 4785 : tree optype = TREE_TYPE (arg);
1582 4785 : if (tree dom = TYPE_DOMAIN (optype))
1583 4785 : if (tree bound = TYPE_MAX_VALUE (dom))
1584 4785 : if (TREE_CODE (bound) == INTEGER_CST
1585 4785 : && TREE_CODE (idx) == INTEGER_CST
1586 8000 : && tree_int_cst_lt (bound, idx))
1587 : return false;
1588 : }
1589 : }
1590 :
1591 428292 : if (rkind == SRK_INT_VALUE)
1592 : {
1593 : /* We are computing the maximum value (not string length). */
1594 3083 : val = arg;
1595 3083 : if (TREE_CODE (val) != INTEGER_CST
1596 3083 : || tree_int_cst_sgn (val) < 0)
1597 2573 : return false;
1598 : }
1599 : else
1600 : {
1601 425209 : c_strlen_data lendata = { };
1602 425209 : val = c_strlen (arg, 1, &lendata, eltsize);
1603 :
1604 425209 : if (!val && lendata.decl)
1605 : {
1606 : /* ARG refers to an unterminated const character array.
1607 : DATA.DECL with size DATA.LEN. */
1608 4193 : val = lendata.minlen;
1609 4193 : pdata->decl = lendata.decl;
1610 : }
1611 : }
1612 :
1613 : /* Set if VAL represents the maximum length based on array size (set
1614 : when exact length cannot be determined). */
1615 425719 : bool maxbound = false;
1616 :
1617 425719 : if (!val && rkind == SRK_LENRANGE)
1618 : {
1619 225324 : if (TREE_CODE (arg) == ADDR_EXPR)
1620 79616 : return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1621 79616 : pdata, eltsize);
1622 :
1623 145708 : if (TREE_CODE (arg) == ARRAY_REF)
1624 : {
1625 18220 : tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1626 :
1627 : /* Determine the "innermost" array type. */
1628 18220 : while (TREE_CODE (optype) == ARRAY_TYPE
1629 25094 : && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1630 6874 : optype = TREE_TYPE (optype);
1631 :
1632 : /* Avoid arrays of pointers. */
1633 18220 : tree eltype = TREE_TYPE (optype);
1634 18220 : if (TREE_CODE (optype) != ARRAY_TYPE
1635 18220 : || !INTEGRAL_TYPE_P (eltype))
1636 : return false;
1637 :
1638 : /* Fail when the array bound is unknown or zero. */
1639 13482 : val = TYPE_SIZE_UNIT (optype);
1640 13482 : if (!val
1641 13410 : || TREE_CODE (val) != INTEGER_CST
1642 26864 : || integer_zerop (val))
1643 105 : return false;
1644 :
1645 13377 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1646 : integer_one_node);
1647 :
1648 : /* Set the minimum size to zero since the string in
1649 : the array could have zero length. */
1650 13377 : pdata->minlen = ssize_int (0);
1651 :
1652 13377 : tight_bound = true;
1653 : }
1654 127488 : else if (TREE_CODE (arg) == COMPONENT_REF
1655 127488 : && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1656 : == ARRAY_TYPE))
1657 : {
1658 : /* Use the type of the member array to determine the upper
1659 : bound on the length of the array. This may be overly
1660 : optimistic if the array itself isn't NUL-terminated and
1661 : the caller relies on the subsequent member to contain
1662 : the NUL but that would only be considered valid if
1663 : the array were the last member of a struct. */
1664 :
1665 9614 : tree fld = TREE_OPERAND (arg, 1);
1666 :
1667 9614 : tree optype = TREE_TYPE (fld);
1668 :
1669 : /* Determine the "innermost" array type. */
1670 9614 : while (TREE_CODE (optype) == ARRAY_TYPE
1671 10161 : && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1672 547 : optype = TREE_TYPE (optype);
1673 :
1674 : /* Fail when the array bound is unknown or zero. */
1675 9614 : val = TYPE_SIZE_UNIT (optype);
1676 9614 : if (!val
1677 9378 : || TREE_CODE (val) != INTEGER_CST
1678 18957 : || integer_zerop (val))
1679 350 : return false;
1680 9264 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1681 : integer_one_node);
1682 :
1683 : /* Set the minimum size to zero since the string in
1684 : the array could have zero length. */
1685 9264 : pdata->minlen = ssize_int (0);
1686 :
1687 : /* The array size determined above is an optimistic bound
1688 : on the length. If the array isn't nul-terminated the
1689 : length computed by the library function would be greater.
1690 : Even though using strlen to cross the subobject boundary
1691 : is undefined, avoid drawing conclusions from the member
1692 : type about the length here. */
1693 9264 : tight_bound = true;
1694 : }
1695 117874 : else if (TREE_CODE (arg) == MEM_REF
1696 27781 : && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1697 3953 : && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1698 121389 : && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1699 : {
1700 : /* Handle a MEM_REF into a DECL accessing an array of integers,
1701 : being conservative about references to extern structures with
1702 : flexible array members that can be initialized to arbitrary
1703 : numbers of elements as an extension (static structs are okay). */
1704 3515 : tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1705 3515 : if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1706 7017 : && (decl_binds_to_current_def_p (ref)
1707 438 : || !array_ref_flexible_size_p (arg)))
1708 : {
1709 : /* Fail if the offset is out of bounds. Such accesses
1710 : should be diagnosed at some point. */
1711 3389 : val = DECL_SIZE_UNIT (ref);
1712 3389 : if (!val
1713 3217 : || TREE_CODE (val) != INTEGER_CST
1714 6606 : || integer_zerop (val))
1715 371 : return false;
1716 :
1717 3215 : poly_offset_int psiz = wi::to_offset (val);
1718 3215 : poly_offset_int poff = mem_ref_offset (arg);
1719 3215 : if (known_le (psiz, poff))
1720 : return false;
1721 :
1722 3018 : pdata->minlen = ssize_int (0);
1723 :
1724 : /* Subtract the offset and one for the terminating nul. */
1725 3018 : psiz -= poff;
1726 3018 : psiz -= 1;
1727 3018 : val = wide_int_to_tree (TREE_TYPE (val), psiz);
1728 : /* Since VAL reflects the size of a declared object
1729 : rather the type of the access it is not a tight bound. */
1730 : }
1731 : }
1732 114359 : else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1733 : {
1734 : /* Avoid handling pointers to arrays. GCC might misuse
1735 : a pointer to an array of one bound to point to an array
1736 : object of a greater bound. */
1737 69798 : tree argtype = TREE_TYPE (arg);
1738 69798 : if (TREE_CODE (argtype) == ARRAY_TYPE)
1739 : {
1740 41889 : val = TYPE_SIZE_UNIT (argtype);
1741 41889 : if (!val
1742 41123 : || TREE_CODE (val) != INTEGER_CST
1743 83012 : || integer_zerop (val))
1744 881 : return false;
1745 41008 : val = wide_int_to_tree (TREE_TYPE (val),
1746 41008 : wi::sub (wi::to_wide (val), 1));
1747 :
1748 : /* Set the minimum size to zero since the string in
1749 : the array could have zero length. */
1750 41008 : pdata->minlen = ssize_int (0);
1751 : }
1752 : }
1753 : maxbound = true;
1754 : }
1755 :
1756 339658 : if (!val)
1757 : return false;
1758 :
1759 : /* Adjust the lower bound on the string length as necessary. */
1760 243212 : if (!pdata->minlen
1761 243212 : || (rkind != SRK_STRLEN
1762 71143 : && TREE_CODE (pdata->minlen) == INTEGER_CST
1763 71143 : && TREE_CODE (val) == INTEGER_CST
1764 71138 : && tree_int_cst_lt (val, pdata->minlen)))
1765 172161 : pdata->minlen = val;
1766 :
1767 243212 : if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1768 : {
1769 : /* Adjust the tighter (more optimistic) string length bound
1770 : if necessary and proceed to adjust the more conservative
1771 : bound. */
1772 1514 : if (TREE_CODE (val) == INTEGER_CST)
1773 : {
1774 1514 : if (tree_int_cst_lt (pdata->maxbound, val))
1775 657 : pdata->maxbound = val;
1776 : }
1777 : else
1778 0 : pdata->maxbound = val;
1779 : }
1780 241698 : else if (pdata->maxbound || maxbound)
1781 : /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1782 : if VAL corresponds to the maximum length determined based
1783 : on the type of the object. */
1784 69897 : pdata->maxbound = val;
1785 :
1786 243212 : if (tight_bound)
1787 : {
1788 : /* VAL computed above represents an optimistically tight bound
1789 : on the length of the string based on the referenced object's
1790 : or subobject's type. Determine the conservative upper bound
1791 : based on the enclosing object's size if possible. */
1792 22641 : if (rkind == SRK_LENRANGE)
1793 : {
1794 22641 : poly_int64 offset;
1795 22641 : tree base = get_addr_base_and_unit_offset (arg, &offset);
1796 22641 : if (!base)
1797 : {
1798 : /* When the call above fails due to a non-constant offset
1799 : assume the offset is zero and use the size of the whole
1800 : enclosing object instead. */
1801 7837 : base = get_base_address (arg);
1802 7837 : offset = 0;
1803 : }
1804 : /* If the base object is a pointer no upper bound on the length
1805 : can be determined. Otherwise the maximum length is equal to
1806 : the size of the enclosing object minus the offset of
1807 : the referenced subobject minus 1 (for the terminating nul). */
1808 22641 : tree type = TREE_TYPE (base);
1809 22641 : if (POINTER_TYPE_P (type)
1810 22637 : || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1811 41078 : || !(val = DECL_SIZE_UNIT (base)))
1812 5451 : val = build_all_ones_cst (size_type_node);
1813 : else
1814 : {
1815 17190 : val = DECL_SIZE_UNIT (base);
1816 17190 : val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1817 : size_int (offset + 1));
1818 : }
1819 : }
1820 : else
1821 : return false;
1822 : }
1823 :
1824 243212 : if (pdata->maxlen)
1825 : {
1826 : /* Adjust the more conservative bound if possible/necessary
1827 : and fail otherwise. */
1828 8209 : if (rkind != SRK_STRLEN)
1829 : {
1830 7278 : if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1831 7278 : || TREE_CODE (val) != INTEGER_CST)
1832 : return false;
1833 :
1834 7273 : if (tree_int_cst_lt (pdata->maxlen, val))
1835 1376 : pdata->maxlen = val;
1836 7273 : return true;
1837 : }
1838 931 : else if (simple_cst_equal (val, pdata->maxlen) != 1)
1839 : {
1840 : /* Fail if the length of this ARG is different from that
1841 : previously determined from another ARG. */
1842 : return false;
1843 : }
1844 : }
1845 :
1846 235127 : pdata->maxlen = val;
1847 235127 : return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1848 : }
1849 :
1850 : /* For an ARG referencing one or more strings, try to obtain the range
1851 : of their lengths, or the size of the largest array ARG referes to if
1852 : the range of lengths cannot be determined, and store all in *PDATA.
1853 : For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1854 : the maximum constant value.
1855 : If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1856 : SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1857 : length or if we are unable to determine the length, return false.
1858 : VISITED is a bitmap of visited variables.
1859 : RKIND determines the kind of value or range to obtain (see
1860 : strlen_range_kind).
1861 : Set PDATA->DECL if ARG refers to an unterminated constant array.
1862 : On input, set ELTSIZE to 1 for normal single byte character strings,
1863 : and either 2 or 4 for wide characer strings (the size of wchar_t).
1864 : Return true if *PDATA was successfully populated and false otherwise. */
1865 :
1866 : static bool
1867 1338379 : get_range_strlen (tree arg, bitmap visited,
1868 : strlen_range_kind rkind,
1869 : c_strlen_data *pdata, unsigned eltsize)
1870 : {
1871 :
1872 1415457 : if (TREE_CODE (arg) != SSA_NAME)
1873 428500 : return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1874 :
1875 : /* If ARG is registered for SSA update we cannot look at its defining
1876 : statement. */
1877 986957 : if (name_registered_for_update_p (arg))
1878 : return false;
1879 :
1880 : /* If we were already here, break the infinite cycle. */
1881 986957 : if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1882 : return true;
1883 :
1884 982444 : tree var = arg;
1885 982444 : gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1886 :
1887 982444 : switch (gimple_code (def_stmt))
1888 : {
1889 120289 : case GIMPLE_ASSIGN:
1890 : /* The RHS of the statement defining VAR must either have a
1891 : constant length or come from another SSA_NAME with a constant
1892 : length. */
1893 120289 : if (gimple_assign_single_p (def_stmt)
1894 120289 : || gimple_assign_unary_nop_p (def_stmt))
1895 : {
1896 77078 : tree rhs = gimple_assign_rhs1 (def_stmt);
1897 77078 : return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1898 : }
1899 43211 : else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1900 : {
1901 246 : tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1902 246 : gimple_assign_rhs3 (def_stmt) };
1903 :
1904 738 : for (unsigned int i = 0; i < 2; i++)
1905 492 : if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1906 : {
1907 28 : if (rkind != SRK_LENRANGE)
1908 : return false;
1909 : /* Set the upper bound to the maximum to prevent
1910 : it from being adjusted in the next iteration but
1911 : leave MINLEN and the more conservative MAXBOUND
1912 : determined so far alone (or leave them null if
1913 : they haven't been set yet). That the MINLEN is
1914 : in fact zero can be determined from MAXLEN being
1915 : unbounded but the discovered minimum is used for
1916 : diagnostics. */
1917 28 : pdata->maxlen = build_all_ones_cst (size_type_node);
1918 : }
1919 : return true;
1920 : }
1921 : return false;
1922 :
1923 : case GIMPLE_PHI:
1924 : /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1925 : must have a constant length. */
1926 70816 : for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1927 : {
1928 50099 : tree arg = gimple_phi_arg (def_stmt, i)->def;
1929 :
1930 : /* If this PHI has itself as an argument, we cannot
1931 : determine the string length of this argument. However,
1932 : if we can find a constant string length for the other
1933 : PHI args then we can still be sure that this is a
1934 : constant string length. So be optimistic and just
1935 : continue with the next argument. */
1936 50099 : if (arg == gimple_phi_result (def_stmt))
1937 0 : continue;
1938 :
1939 50099 : if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1940 : {
1941 27854 : if (rkind != SRK_LENRANGE)
1942 : return false;
1943 : /* Set the upper bound to the maximum to prevent
1944 : it from being adjusted in the next iteration but
1945 : leave MINLEN and the more conservative MAXBOUND
1946 : determined so far alone (or leave them null if
1947 : they haven't been set yet). That the MINLEN is
1948 : in fact zero can be determined from MAXLEN being
1949 : unbounded but the discovered minimum is used for
1950 : diagnostics. */
1951 26127 : pdata->maxlen = build_all_ones_cst (size_type_node);
1952 : }
1953 : }
1954 : return true;
1955 :
1956 : default:
1957 : return false;
1958 : }
1959 : }
1960 :
1961 : /* Try to obtain the range of the lengths of the string(s) referenced
1962 : by ARG, or the size of the largest array ARG refers to if the range
1963 : of lengths cannot be determined, and store all in *PDATA which must
1964 : be zero-initialized on input except PDATA->MAXBOUND may be set to
1965 : a non-null tree node other than INTEGER_CST to request to have it
1966 : set to the length of the longest string in a PHI. ELTSIZE is
1967 : the expected size of the string element in bytes: 1 for char and
1968 : some power of 2 for wide characters.
1969 : Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1970 : for optimization. Returning false means that a nonzero PDATA->MINLEN
1971 : doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1972 : is -1 (in that case, the actual range is indeterminate, i.e.,
1973 : [0, PTRDIFF_MAX - 2]. */
1974 :
1975 : bool
1976 1135345 : get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1977 : {
1978 1135345 : auto_bitmap visited;
1979 1135345 : tree maxbound = pdata->maxbound;
1980 :
1981 1135345 : if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1982 : {
1983 : /* On failure extend the length range to an impossible maximum
1984 : (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1985 : members can stay unchanged regardless. */
1986 913228 : pdata->minlen = ssize_int (0);
1987 913228 : pdata->maxlen = build_all_ones_cst (size_type_node);
1988 : }
1989 222117 : else if (!pdata->minlen)
1990 8801 : pdata->minlen = ssize_int (0);
1991 :
1992 : /* If it's unchanged from it initial non-null value, set the conservative
1993 : MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1994 1135345 : if (maxbound && pdata->maxbound == maxbound)
1995 650643 : pdata->maxbound = build_all_ones_cst (size_type_node);
1996 :
1997 1135345 : return !integer_all_onesp (pdata->maxlen);
1998 1135345 : }
1999 :
2000 : /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
2001 : For ARG of pointer types, NONSTR indicates if the caller is prepared
2002 : to handle unterminated strings. For integer ARG and when RKIND ==
2003 : SRK_INT_VALUE, NONSTR must be null.
2004 :
2005 : If an unterminated array is discovered and our caller handles
2006 : unterminated arrays, then bubble up the offending DECL and
2007 : return the maximum size. Otherwise return NULL. */
2008 :
2009 : static tree
2010 95432 : get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2011 : {
2012 : /* A non-null NONSTR is meaningless when determining the maximum
2013 : value of an integer ARG. */
2014 95432 : gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2015 :
2016 : // If arg is already a constant, simply return it.
2017 95432 : if (TREE_CODE (arg) == INTEGER_CST && rkind == SRK_INT_VALUE)
2018 : return arg;
2019 :
2020 : /* ARG must have an integral type when RKIND says so. */
2021 72728 : gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2022 :
2023 72827 : auto_bitmap visited;
2024 :
2025 : /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2026 : is unbounded. */
2027 72827 : c_strlen_data lendata = { };
2028 72827 : if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2029 49777 : lendata.maxlen = NULL_TREE;
2030 23050 : else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2031 0 : lendata.maxlen = NULL_TREE;
2032 :
2033 72827 : if (nonstr)
2034 : {
2035 : /* For callers prepared to handle unterminated arrays set
2036 : *NONSTR to point to the declaration of the array and return
2037 : the maximum length/size. */
2038 23825 : *nonstr = lendata.decl;
2039 23825 : return lendata.maxlen;
2040 : }
2041 :
2042 : /* Fail if the constant array isn't nul-terminated. */
2043 49002 : return lendata.decl ? NULL_TREE : lendata.maxlen;
2044 72827 : }
2045 :
2046 : /* Return true if LEN is known to be less than or equal to (or if STRICT is
2047 : true, strictly less than) the lower bound of SIZE at compile time and false
2048 : otherwise. */
2049 :
2050 : static bool
2051 62921 : known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2052 : {
2053 62921 : if (len == NULL_TREE)
2054 : return false;
2055 :
2056 234435 : wide_int size_range[2];
2057 234435 : wide_int len_range[2];
2058 46887 : if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2059 : {
2060 16437 : if (strict)
2061 1803 : return wi::ltu_p (len_range[1], size_range[0]);
2062 : else
2063 14634 : return wi::leu_p (len_range[1], size_range[0]);
2064 : }
2065 :
2066 : return false;
2067 281322 : }
2068 :
2069 : /* Fold function call to builtin strcpy with arguments DEST and SRC.
2070 : If LEN is not NULL, it represents the length of the string to be
2071 : copied. Return NULL_TREE if no simplification can be made. */
2072 :
2073 : static bool
2074 25909 : gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2075 : tree dest, tree src)
2076 : {
2077 25909 : gimple *stmt = gsi_stmt (*gsi);
2078 25909 : location_t loc = gimple_location (stmt);
2079 25909 : tree fn;
2080 :
2081 : /* If SRC and DEST are the same (and not volatile), return DEST. */
2082 25909 : if (operand_equal_p (src, dest, 0))
2083 : {
2084 : /* Issue -Wrestrict unless the pointers are null (those do
2085 : not point to objects and so do not indicate an overlap;
2086 : such calls could be the result of sanitization and jump
2087 : threading). */
2088 86 : if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2089 : {
2090 51 : tree func = gimple_call_fndecl (stmt);
2091 :
2092 51 : warning_at (loc, OPT_Wrestrict,
2093 : "%qD source argument is the same as destination",
2094 : func);
2095 : }
2096 :
2097 86 : replace_call_with_value (gsi, dest);
2098 86 : return true;
2099 : }
2100 :
2101 25823 : if (optimize_function_for_size_p (cfun))
2102 : return false;
2103 :
2104 23825 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2105 23825 : if (!fn)
2106 : return false;
2107 :
2108 : /* Set to non-null if ARG refers to an unterminated array. */
2109 23825 : tree nonstr = NULL;
2110 23825 : tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2111 :
2112 23825 : if (nonstr)
2113 : {
2114 : /* Avoid folding calls with unterminated arrays. */
2115 531 : if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2116 69 : warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2117 531 : suppress_warning (stmt, OPT_Wstringop_overread);
2118 531 : return false;
2119 : }
2120 :
2121 28650 : if (!len || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2122 : return false;
2123 :
2124 2827 : len = fold_convert_loc (loc, size_type_node, len);
2125 2827 : len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2126 2827 : len = force_gimple_operand_gsi (gsi, len, true,
2127 : NULL_TREE, true, GSI_SAME_STMT);
2128 2827 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2129 2827 : replace_call_with_call_and_fold (gsi, repl);
2130 2827 : return true;
2131 : }
2132 :
2133 : /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2134 : If SLEN is not NULL, it represents the length of the source string.
2135 : Return NULL_TREE if no simplification can be made. */
2136 :
2137 : static bool
2138 17202 : gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2139 : tree dest, tree src, tree len)
2140 : {
2141 17202 : gimple *stmt = gsi_stmt (*gsi);
2142 17202 : location_t loc = gimple_location (stmt);
2143 17202 : bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2144 :
2145 : /* If the LEN parameter is zero, return DEST. */
2146 17202 : if (integer_zerop (len))
2147 : {
2148 : /* Avoid warning if the destination refers to an array/pointer
2149 : decorate with attribute nonstring. */
2150 167 : if (!nonstring)
2151 : {
2152 155 : tree fndecl = gimple_call_fndecl (stmt);
2153 :
2154 : /* Warn about the lack of nul termination: the result is not
2155 : a (nul-terminated) string. */
2156 155 : tree slen = get_maxval_strlen (src, SRK_STRLEN);
2157 155 : if (slen && !integer_zerop (slen))
2158 24 : warning_at (loc, OPT_Wstringop_truncation,
2159 : "%qD destination unchanged after copying no bytes "
2160 : "from a string of length %E",
2161 : fndecl, slen);
2162 : else
2163 131 : warning_at (loc, OPT_Wstringop_truncation,
2164 : "%qD destination unchanged after copying no bytes",
2165 : fndecl);
2166 : }
2167 :
2168 167 : replace_call_with_value (gsi, dest);
2169 167 : return true;
2170 : }
2171 :
2172 : /* We can't compare slen with len as constants below if len is not a
2173 : constant. */
2174 17035 : if (TREE_CODE (len) != INTEGER_CST)
2175 : return false;
2176 :
2177 : /* Now, we must be passed a constant src ptr parameter. */
2178 10680 : tree slen = get_maxval_strlen (src, SRK_STRLEN);
2179 10680 : if (!slen || TREE_CODE (slen) != INTEGER_CST)
2180 : return false;
2181 :
2182 : /* The size of the source string including the terminating nul. */
2183 1780 : tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2184 :
2185 : /* We do not support simplification of this case, though we do
2186 : support it when expanding trees into RTL. */
2187 : /* FIXME: generate a call to __builtin_memset. */
2188 1780 : if (tree_int_cst_lt (ssize, len))
2189 : return false;
2190 :
2191 : /* Diagnose truncation that leaves the copy unterminated. */
2192 695 : maybe_diag_stxncpy_trunc (*gsi, src, len);
2193 :
2194 : /* OK transform into builtin memcpy. */
2195 695 : tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2196 17730 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2197 : return false;
2198 :
2199 695 : len = fold_convert_loc (loc, size_type_node, len);
2200 695 : len = force_gimple_operand_gsi (gsi, len, true,
2201 : NULL_TREE, true, GSI_SAME_STMT);
2202 695 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2203 695 : replace_call_with_call_and_fold (gsi, repl);
2204 :
2205 695 : return true;
2206 : }
2207 :
2208 : /* Fold function call to builtin strchr or strrchr.
2209 : If both arguments are constant, evaluate and fold the result,
2210 : otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2211 : In general strlen is significantly faster than strchr
2212 : due to being a simpler operation. */
2213 : static bool
2214 5399 : gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2215 : {
2216 5399 : gimple *stmt = gsi_stmt (*gsi);
2217 5399 : tree str = gimple_call_arg (stmt, 0);
2218 5399 : tree c = gimple_call_arg (stmt, 1);
2219 5399 : location_t loc = gimple_location (stmt);
2220 5399 : const char *p;
2221 5399 : char ch;
2222 :
2223 5399 : if (!gimple_call_lhs (stmt))
2224 : return false;
2225 :
2226 : /* Avoid folding if the first argument is not a nul-terminated array.
2227 : Defer warning until later. */
2228 5389 : if (!check_nul_terminated_array (NULL_TREE, str))
2229 : return false;
2230 :
2231 5305 : if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2232 : {
2233 41 : const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2234 :
2235 41 : if (p1 == NULL)
2236 : {
2237 1 : replace_call_with_value (gsi, integer_zero_node);
2238 1 : return true;
2239 : }
2240 :
2241 40 : tree len = build_int_cst (size_type_node, p1 - p);
2242 40 : gimple_seq stmts = NULL;
2243 40 : gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2244 : POINTER_PLUS_EXPR, str, len);
2245 40 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2246 40 : gsi_replace_with_seq_vops (gsi, stmts);
2247 40 : return true;
2248 : }
2249 :
2250 5346 : if (!integer_zerop (c) || (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun)))
2251 : return false;
2252 :
2253 : /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2254 82 : if (is_strrchr && optimize_function_for_size_p (cfun))
2255 : {
2256 3 : tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2257 :
2258 3 : if (strchr_fn)
2259 : {
2260 3 : gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2261 3 : replace_call_with_call_and_fold (gsi, repl);
2262 3 : return true;
2263 : }
2264 :
2265 : return false;
2266 : }
2267 :
2268 79 : tree len;
2269 5355 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2270 :
2271 79 : if (!strlen_fn)
2272 : return false;
2273 :
2274 : /* Create newstr = strlen (str). */
2275 79 : gimple_seq stmts = NULL;
2276 79 : gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2277 79 : gimple_set_location (new_stmt, loc);
2278 79 : len = make_ssa_name (size_type_node);
2279 79 : gimple_call_set_lhs (new_stmt, len);
2280 79 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2281 :
2282 : /* Create (str p+ strlen (str)). */
2283 79 : new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2284 : POINTER_PLUS_EXPR, str, len);
2285 79 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2286 79 : gsi_replace_with_seq_vops (gsi, stmts);
2287 : /* gsi now points at the assignment to the lhs, get a
2288 : stmt iterator to the strlen.
2289 : ??? We can't use gsi_for_stmt as that doesn't work when the
2290 : CFG isn't built yet. */
2291 79 : gimple_stmt_iterator gsi2 = *gsi;
2292 79 : gsi_prev (&gsi2);
2293 79 : fold_stmt (&gsi2);
2294 79 : return true;
2295 : }
2296 :
2297 : /* Fold function call to builtin strstr.
2298 : If both arguments are constant, evaluate and fold the result,
2299 : additionally fold strstr (x, "") into x and strstr (x, "c")
2300 : into strchr (x, 'c'). */
2301 : static bool
2302 4185 : gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2303 : {
2304 4185 : gimple *stmt = gsi_stmt (*gsi);
2305 4185 : if (!gimple_call_lhs (stmt))
2306 : return false;
2307 :
2308 4182 : tree haystack = gimple_call_arg (stmt, 0);
2309 4182 : tree needle = gimple_call_arg (stmt, 1);
2310 :
2311 : /* Avoid folding if either argument is not a nul-terminated array.
2312 : Defer warning until later. */
2313 4182 : if (!check_nul_terminated_array (NULL_TREE, haystack)
2314 4182 : || !check_nul_terminated_array (NULL_TREE, needle))
2315 19 : return false;
2316 :
2317 4163 : const char *q = c_getstr (needle);
2318 4163 : if (q == NULL)
2319 : return false;
2320 :
2321 3035 : if (const char *p = c_getstr (haystack))
2322 : {
2323 14 : const char *r = strstr (p, q);
2324 :
2325 14 : if (r == NULL)
2326 : {
2327 1 : replace_call_with_value (gsi, integer_zero_node);
2328 1 : return true;
2329 : }
2330 :
2331 13 : tree len = build_int_cst (size_type_node, r - p);
2332 13 : gimple_seq stmts = NULL;
2333 13 : gimple *new_stmt
2334 13 : = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2335 : haystack, len);
2336 13 : gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2337 13 : gsi_replace_with_seq_vops (gsi, stmts);
2338 13 : return true;
2339 : }
2340 :
2341 : /* For strstr (x, "") return x. */
2342 3021 : if (q[0] == '\0')
2343 : {
2344 6 : replace_call_with_value (gsi, haystack);
2345 6 : return true;
2346 : }
2347 :
2348 10173 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
2349 : return false;
2350 :
2351 : /* Transform strstr (x, "c") into strchr (x, 'c'). */
2352 3015 : if (q[1] == '\0')
2353 : {
2354 22 : tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2355 22 : if (strchr_fn)
2356 : {
2357 22 : tree c = build_int_cst (integer_type_node, q[0]);
2358 22 : gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2359 22 : replace_call_with_call_and_fold (gsi, repl);
2360 22 : return true;
2361 : }
2362 : }
2363 :
2364 : return false;
2365 : }
2366 :
2367 : /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2368 : to the call.
2369 :
2370 : Return NULL_TREE if no simplification was possible, otherwise return the
2371 : simplified form of the call as a tree.
2372 :
2373 : The simplified form may be a constant or other expression which
2374 : computes the same value, but in a more efficient manner (including
2375 : calls to other builtin functions).
2376 :
2377 : The call may contain arguments which need to be evaluated, but
2378 : which are not useful to determine the result of the call. In
2379 : this case we return a chain of COMPOUND_EXPRs. The LHS of each
2380 : COMPOUND_EXPR will be an argument which must be evaluated.
2381 : COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2382 : COMPOUND_EXPR in the chain will contain the tree for the simplified
2383 : form of the builtin function call. */
2384 :
2385 : static bool
2386 7343 : gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2387 : {
2388 7343 : gimple *stmt = gsi_stmt (*gsi);
2389 7343 : location_t loc = gimple_location (stmt);
2390 :
2391 7343 : const char *p = c_getstr (src);
2392 :
2393 : /* If the string length is zero, return the dst parameter. */
2394 7343 : if (p && *p == '\0')
2395 : {
2396 72 : replace_call_with_value (gsi, dst);
2397 72 : return true;
2398 : }
2399 :
2400 7271 : if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2401 : return false;
2402 :
2403 19901 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2404 : return false;
2405 :
2406 : /* See if we can store by pieces into (dst + strlen(dst)). */
2407 6688 : tree newdst;
2408 6688 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2409 6688 : tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2410 :
2411 6688 : if (!strlen_fn || !memcpy_fn)
2412 : return false;
2413 :
2414 : /* If the length of the source string isn't computable don't
2415 : split strcat into strlen and memcpy. */
2416 6688 : tree len = get_maxval_strlen (src, SRK_STRLEN);
2417 6688 : if (! len)
2418 : return false;
2419 :
2420 : /* Create strlen (dst). */
2421 746 : gimple_seq stmts = NULL, stmts2;
2422 746 : gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2423 746 : gimple_set_location (repl, loc);
2424 746 : newdst = make_ssa_name (size_type_node);
2425 746 : gimple_call_set_lhs (repl, newdst);
2426 746 : gimple_seq_add_stmt_without_update (&stmts, repl);
2427 :
2428 : /* Create (dst p+ strlen (dst)). */
2429 746 : newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2430 746 : newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2431 746 : gimple_seq_add_seq_without_update (&stmts, stmts2);
2432 :
2433 746 : len = fold_convert_loc (loc, size_type_node, len);
2434 746 : len = size_binop_loc (loc, PLUS_EXPR, len,
2435 : build_int_cst (size_type_node, 1));
2436 746 : len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2437 746 : gimple_seq_add_seq_without_update (&stmts, stmts2);
2438 :
2439 746 : repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2440 746 : gimple_seq_add_stmt_without_update (&stmts, repl);
2441 746 : if (gimple_call_lhs (stmt))
2442 : {
2443 165 : repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2444 165 : gimple_seq_add_stmt_without_update (&stmts, repl);
2445 165 : gsi_replace_with_seq_vops (gsi, stmts);
2446 : /* gsi now points at the assignment to the lhs, get a
2447 : stmt iterator to the memcpy call.
2448 : ??? We can't use gsi_for_stmt as that doesn't work when the
2449 : CFG isn't built yet. */
2450 165 : gimple_stmt_iterator gsi2 = *gsi;
2451 165 : gsi_prev (&gsi2);
2452 165 : fold_stmt (&gsi2);
2453 : }
2454 : else
2455 : {
2456 581 : gsi_replace_with_seq_vops (gsi, stmts);
2457 581 : fold_stmt (gsi);
2458 : }
2459 : return true;
2460 : }
2461 :
2462 : /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2463 : are the arguments to the call. */
2464 :
2465 : static bool
2466 1714 : gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2467 : {
2468 1714 : gimple *stmt = gsi_stmt (*gsi);
2469 1714 : tree dest = gimple_call_arg (stmt, 0);
2470 1714 : tree src = gimple_call_arg (stmt, 1);
2471 1714 : tree size = gimple_call_arg (stmt, 2);
2472 1714 : tree fn;
2473 1714 : const char *p;
2474 :
2475 1714 : p = c_getstr (src);
2476 : /* If the SRC parameter is "", return DEST. */
2477 1714 : if (p && *p == '\0')
2478 : {
2479 60 : replace_call_with_value (gsi, dest);
2480 60 : return true;
2481 : }
2482 :
2483 1654 : if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2484 1572 : return false;
2485 :
2486 1736 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2487 : return false;
2488 :
2489 : /* If __builtin_strcat_chk is used, assume strcat is available. */
2490 82 : fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2491 82 : if (!fn)
2492 : return false;
2493 :
2494 82 : gimple *repl = gimple_build_call (fn, 2, dest, src);
2495 82 : replace_call_with_call_and_fold (gsi, repl);
2496 82 : return true;
2497 : }
2498 :
2499 : /* Simplify a call to the strncat builtin. */
2500 :
2501 : static bool
2502 6786 : gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2503 : {
2504 6786 : gimple *stmt = gsi_stmt (*gsi);
2505 6786 : tree dst = gimple_call_arg (stmt, 0);
2506 6786 : tree src = gimple_call_arg (stmt, 1);
2507 6786 : tree len = gimple_call_arg (stmt, 2);
2508 6786 : tree src_len = c_strlen (src, 1);
2509 :
2510 : /* If the requested length is zero, or the src parameter string
2511 : length is zero, return the dst parameter. */
2512 6786 : if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2513 : {
2514 119 : replace_call_with_value (gsi, dst);
2515 119 : return true;
2516 : }
2517 :
2518 : /* Return early if the requested len is less than the string length.
2519 : Warnings will be issued elsewhere later. */
2520 6667 : if (!src_len || known_lower (stmt, len, src_len, true))
2521 6099 : return false;
2522 :
2523 : /* Warn on constant LEN. */
2524 568 : if (TREE_CODE (len) == INTEGER_CST)
2525 : {
2526 131 : bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2527 131 : tree dstsize;
2528 :
2529 131 : if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2530 175 : && TREE_CODE (dstsize) == INTEGER_CST)
2531 : {
2532 44 : int cmpdst = tree_int_cst_compare (len, dstsize);
2533 :
2534 44 : if (cmpdst >= 0)
2535 : {
2536 19 : tree fndecl = gimple_call_fndecl (stmt);
2537 :
2538 : /* Strncat copies (at most) LEN bytes and always appends
2539 : the terminating NUL so the specified bound should never
2540 : be equal to (or greater than) the size of the destination.
2541 : If it is, the copy could overflow. */
2542 19 : location_t loc = gimple_location (stmt);
2543 37 : nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2544 : cmpdst == 0
2545 : ? G_("%qD specified bound %E equals "
2546 : "destination size")
2547 : : G_("%qD specified bound %E exceeds "
2548 : "destination size %E"),
2549 : fndecl, len, dstsize);
2550 19 : if (nowarn)
2551 0 : suppress_warning (stmt, OPT_Wstringop_overflow_);
2552 : }
2553 : }
2554 :
2555 131 : if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2556 243 : && tree_int_cst_compare (src_len, len) == 0)
2557 : {
2558 20 : tree fndecl = gimple_call_fndecl (stmt);
2559 20 : location_t loc = gimple_location (stmt);
2560 :
2561 : /* To avoid possible overflow the specified bound should also
2562 : not be equal to the length of the source, even when the size
2563 : of the destination is unknown (it's not an uncommon mistake
2564 : to specify as the bound to strncpy the length of the source). */
2565 20 : if (warning_at (loc, OPT_Wstringop_overflow_,
2566 : "%qD specified bound %E equals source length",
2567 : fndecl, len))
2568 6 : suppress_warning (stmt, OPT_Wstringop_overflow_);
2569 : }
2570 : }
2571 :
2572 568 : if (!known_lower (stmt, src_len, len))
2573 : return false;
2574 :
2575 136 : tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2576 :
2577 : /* If the replacement _DECL isn't initialized, don't do the
2578 : transformation. */
2579 6803 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
2580 : return false;
2581 :
2582 : /* Otherwise, emit a call to strcat. */
2583 136 : gcall *repl = gimple_build_call (fn, 2, dst, src);
2584 136 : replace_call_with_call_and_fold (gsi, repl);
2585 136 : return true;
2586 : }
2587 :
2588 : /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2589 : LEN, and SIZE. */
2590 :
2591 : static bool
2592 1143 : gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2593 : {
2594 1143 : gimple *stmt = gsi_stmt (*gsi);
2595 1143 : tree dest = gimple_call_arg (stmt, 0);
2596 1143 : tree src = gimple_call_arg (stmt, 1);
2597 1143 : tree len = gimple_call_arg (stmt, 2);
2598 1143 : tree size = gimple_call_arg (stmt, 3);
2599 1143 : tree fn;
2600 1143 : const char *p;
2601 :
2602 1143 : p = c_getstr (src);
2603 : /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2604 302 : if ((p && *p == '\0')
2605 1394 : || integer_zerop (len))
2606 : {
2607 78 : replace_call_with_value (gsi, dest);
2608 78 : return true;
2609 : }
2610 :
2611 3043 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
2612 : return false;
2613 :
2614 1065 : if (! integer_all_onesp (size))
2615 : {
2616 978 : tree src_len = c_strlen (src, 1);
2617 978 : if (known_lower (stmt, src_len, len))
2618 : {
2619 : /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2620 65 : fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2621 65 : if (!fn)
2622 : return false;
2623 :
2624 65 : gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2625 65 : replace_call_with_call_and_fold (gsi, repl);
2626 65 : return true;
2627 : }
2628 : return false;
2629 : }
2630 :
2631 : /* If __builtin_strncat_chk is used, assume strncat is available. */
2632 87 : fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2633 87 : if (!fn)
2634 : return false;
2635 :
2636 87 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2637 87 : replace_call_with_call_and_fold (gsi, repl);
2638 87 : return true;
2639 : }
2640 :
2641 : /* Build and append gimple statements to STMTS that would load a first
2642 : character of a memory location identified by STR. LOC is location
2643 : of the statement. */
2644 :
2645 : static tree
2646 469 : gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2647 : {
2648 469 : tree var;
2649 :
2650 469 : tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2651 469 : tree cst_uchar_ptr_node
2652 469 : = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2653 469 : tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2654 :
2655 469 : tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2656 469 : gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2657 469 : var = make_ssa_name (cst_uchar_node, stmt);
2658 :
2659 469 : gimple_assign_set_lhs (stmt, var);
2660 469 : gimple_seq_add_stmt_without_update (stmts, stmt);
2661 :
2662 469 : return var;
2663 : }
2664 :
2665 : /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2666 :
2667 : static bool
2668 1250453 : gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2669 : {
2670 1250453 : gimple *stmt = gsi_stmt (*gsi);
2671 1250453 : tree callee = gimple_call_fndecl (stmt);
2672 1250453 : enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2673 :
2674 1250453 : tree type = integer_type_node;
2675 1250453 : tree str1 = gimple_call_arg (stmt, 0);
2676 1250453 : tree str2 = gimple_call_arg (stmt, 1);
2677 1250453 : tree lhs = gimple_call_lhs (stmt);
2678 :
2679 1250453 : tree bound_node = NULL_TREE;
2680 1250453 : unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2681 :
2682 : /* Handle strncmp and strncasecmp functions. */
2683 1250453 : if (gimple_call_num_args (stmt) == 3)
2684 : {
2685 22860 : bound_node = gimple_call_arg (stmt, 2);
2686 22860 : if (tree_fits_uhwi_p (bound_node))
2687 17141 : bound = tree_to_uhwi (bound_node);
2688 : }
2689 :
2690 : /* If the BOUND parameter is zero, return zero. */
2691 17141 : if (bound == 0)
2692 : {
2693 4 : replace_call_with_value (gsi, integer_zero_node);
2694 4 : return true;
2695 : }
2696 :
2697 : /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2698 1250449 : if (operand_equal_p (str1, str2, 0))
2699 : {
2700 41 : replace_call_with_value (gsi, integer_zero_node);
2701 41 : return true;
2702 : }
2703 :
2704 2500816 : if (!gimple_vuse (stmt) && gimple_in_ssa_p (cfun))
2705 : return false;
2706 :
2707 : /* Initially set to the number of characters, including the terminating
2708 : nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2709 : the array Sx is not terminated by a nul.
2710 : For nul-terminated strings then adjusted to their length so that
2711 : LENx == NULPOSx holds. */
2712 1250408 : unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2713 1250408 : const char *p1 = getbyterep (str1, &len1);
2714 1250408 : const char *p2 = getbyterep (str2, &len2);
2715 :
2716 : /* The position of the terminating nul character if one exists, otherwise
2717 : a value greater than LENx. */
2718 1250408 : unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2719 :
2720 1250408 : if (p1)
2721 : {
2722 42809 : size_t n = strnlen (p1, len1);
2723 42809 : if (n < len1)
2724 42702 : len1 = nulpos1 = n;
2725 : }
2726 :
2727 1250408 : if (p2)
2728 : {
2729 1219553 : size_t n = strnlen (p2, len2);
2730 1219553 : if (n < len2)
2731 1219494 : len2 = nulpos2 = n;
2732 : }
2733 :
2734 : /* For known strings, return an immediate value. */
2735 1250408 : if (p1 && p2)
2736 : {
2737 39245 : int r = 0;
2738 39245 : bool known_result = false;
2739 :
2740 39245 : switch (fcode)
2741 : {
2742 38169 : case BUILT_IN_STRCMP:
2743 38169 : case BUILT_IN_STRCMP_EQ:
2744 38169 : if (len1 != nulpos1 || len2 != nulpos2)
2745 : break;
2746 :
2747 38144 : r = strcmp (p1, p2);
2748 38144 : known_result = true;
2749 38144 : break;
2750 :
2751 1002 : case BUILT_IN_STRNCMP:
2752 1002 : case BUILT_IN_STRNCMP_EQ:
2753 1002 : {
2754 1002 : if (bound == HOST_WIDE_INT_M1U)
2755 : break;
2756 :
2757 : /* Reduce the bound to be no more than the length
2758 : of the shorter of the two strings, or the sizes
2759 : of the unterminated arrays. */
2760 38 : unsigned HOST_WIDE_INT n = bound;
2761 :
2762 38 : if (len1 == nulpos1 && len1 < n)
2763 4 : n = len1 + 1;
2764 38 : if (len2 == nulpos2 && len2 < n)
2765 11 : n = len2 + 1;
2766 :
2767 38 : if (MIN (nulpos1, nulpos2) + 1 < n)
2768 : break;
2769 :
2770 38 : r = strncmp (p1, p2, n);
2771 38 : known_result = true;
2772 38 : break;
2773 : }
2774 : /* Only handleable situation is where the string are equal (result 0),
2775 : which is already handled by operand_equal_p case. */
2776 : case BUILT_IN_STRCASECMP:
2777 : break;
2778 37 : case BUILT_IN_STRNCASECMP:
2779 37 : {
2780 37 : if (bound == HOST_WIDE_INT_M1U)
2781 : break;
2782 37 : r = strncmp (p1, p2, bound);
2783 37 : if (r == 0)
2784 : known_result = true;
2785 : break;
2786 : }
2787 0 : default:
2788 0 : gcc_unreachable ();
2789 : }
2790 :
2791 38182 : if (known_result)
2792 : {
2793 38182 : replace_call_with_value (gsi, build_cmp_result (type, r));
2794 38182 : return true;
2795 : }
2796 : }
2797 :
2798 2424452 : bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2799 1195229 : || fcode == BUILT_IN_STRCMP
2800 1195229 : || fcode == BUILT_IN_STRCMP_EQ
2801 1218123 : || fcode == BUILT_IN_STRCASECMP;
2802 :
2803 1212226 : location_t loc = gimple_location (stmt);
2804 :
2805 : /* If the second arg is "", return *(const unsigned char*)arg1. */
2806 1212226 : if (p2 && *p2 == '\0' && nonzero_bound)
2807 : {
2808 150 : gimple_seq stmts = NULL;
2809 150 : tree var = gimple_load_first_char (loc, str1, &stmts);
2810 150 : if (lhs)
2811 : {
2812 150 : stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2813 150 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2814 : }
2815 :
2816 150 : gsi_replace_with_seq_vops (gsi, stmts);
2817 150 : return true;
2818 : }
2819 :
2820 : /* If the first arg is "", return -*(const unsigned char*)arg2. */
2821 1212076 : if (p1 && *p1 == '\0' && nonzero_bound)
2822 : {
2823 99 : gimple_seq stmts = NULL;
2824 99 : tree var = gimple_load_first_char (loc, str2, &stmts);
2825 :
2826 99 : if (lhs)
2827 : {
2828 99 : tree c = make_ssa_name (integer_type_node);
2829 99 : stmt = gimple_build_assign (c, NOP_EXPR, var);
2830 99 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2831 :
2832 99 : stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2833 99 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2834 : }
2835 :
2836 99 : gsi_replace_with_seq_vops (gsi, stmts);
2837 99 : return true;
2838 : }
2839 :
2840 : /* If BOUND is one, return an expression corresponding to
2841 : (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2842 1211977 : if (fcode == BUILT_IN_STRNCMP && bound == 1)
2843 : {
2844 110 : gimple_seq stmts = NULL;
2845 110 : tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2846 110 : tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2847 :
2848 110 : if (lhs)
2849 : {
2850 107 : tree c1 = make_ssa_name (integer_type_node);
2851 107 : gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2852 107 : gimple_seq_add_stmt_without_update (&stmts, convert1);
2853 :
2854 107 : tree c2 = make_ssa_name (integer_type_node);
2855 107 : gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2856 107 : gimple_seq_add_stmt_without_update (&stmts, convert2);
2857 :
2858 107 : stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2859 107 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2860 : }
2861 :
2862 110 : gsi_replace_with_seq_vops (gsi, stmts);
2863 110 : return true;
2864 : }
2865 :
2866 : /* If BOUND is greater than the length of one constant string,
2867 : and the other argument is also a nul-terminated string, replace
2868 : strncmp with strcmp. */
2869 1211867 : if (fcode == BUILT_IN_STRNCMP
2870 17598 : && bound > 0 && bound < HOST_WIDE_INT_M1U
2871 12007 : && ((p2 && len2 < bound && len2 == nulpos2)
2872 11777 : || (p1 && len1 < bound && len1 == nulpos1)))
2873 : {
2874 1211867 : tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2875 308 : if (!fn)
2876 : return false;
2877 308 : gimple *repl = gimple_build_call (fn, 2, str1, str2);
2878 308 : replace_call_with_call_and_fold (gsi, repl);
2879 308 : return true;
2880 : }
2881 :
2882 : return false;
2883 : }
2884 :
2885 : /* Fold a call to the memchr pointed by GSI iterator. */
2886 :
2887 : static bool
2888 33740 : gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2889 : {
2890 33740 : gimple *stmt = gsi_stmt (*gsi);
2891 33740 : tree lhs = gimple_call_lhs (stmt);
2892 33740 : tree arg1 = gimple_call_arg (stmt, 0);
2893 33740 : tree arg2 = gimple_call_arg (stmt, 1);
2894 33740 : tree len = gimple_call_arg (stmt, 2);
2895 :
2896 : /* If the LEN parameter is zero, return zero. */
2897 33740 : if (integer_zerop (len))
2898 : {
2899 1 : replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2900 1 : return true;
2901 : }
2902 :
2903 33739 : char c;
2904 33739 : if (TREE_CODE (arg2) != INTEGER_CST
2905 19196 : || !tree_fits_uhwi_p (len)
2906 34451 : || !target_char_cst_p (arg2, &c))
2907 33027 : return false;
2908 :
2909 712 : unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2910 712 : unsigned HOST_WIDE_INT string_length;
2911 712 : const char *p1 = getbyterep (arg1, &string_length);
2912 :
2913 712 : if (p1)
2914 : {
2915 94 : const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2916 94 : if (r == NULL)
2917 : {
2918 14 : tree mem_size, offset_node;
2919 14 : byte_representation (arg1, &offset_node, &mem_size, NULL);
2920 14 : unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2921 14 : ? 0 : tree_to_uhwi (offset_node);
2922 : /* MEM_SIZE is the size of the array the string literal
2923 : is stored in. */
2924 14 : unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2925 14 : gcc_checking_assert (string_length <= string_size);
2926 14 : if (length <= string_size)
2927 : {
2928 4 : replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2929 4 : return true;
2930 : }
2931 : }
2932 : else
2933 : {
2934 80 : unsigned HOST_WIDE_INT offset = r - p1;
2935 80 : gimple_seq stmts = NULL;
2936 80 : if (lhs != NULL_TREE)
2937 : {
2938 78 : tree offset_cst = build_int_cst (sizetype, offset);
2939 78 : gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2940 : arg1, offset_cst);
2941 78 : gimple_seq_add_stmt_without_update (&stmts, stmt);
2942 : }
2943 : else
2944 2 : gimple_seq_add_stmt_without_update (&stmts,
2945 : gimple_build_nop ());
2946 :
2947 80 : gsi_replace_with_seq_vops (gsi, stmts);
2948 80 : return true;
2949 : }
2950 : }
2951 :
2952 : return false;
2953 : }
2954 :
2955 : /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2956 : to the call. IGNORE is true if the value returned
2957 : by the builtin will be ignored. UNLOCKED is true is true if this
2958 : actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2959 : the known length of the string. Return NULL_TREE if no simplification
2960 : was possible. */
2961 :
2962 : static bool
2963 20688 : gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2964 : tree arg0, tree arg1,
2965 : bool unlocked)
2966 : {
2967 20688 : gimple *stmt = gsi_stmt (*gsi);
2968 :
2969 : /* If we're using an unlocked function, assume the other unlocked
2970 : functions exist explicitly. */
2971 20688 : tree const fn_fputc = (unlocked
2972 20688 : ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2973 20645 : : builtin_decl_implicit (BUILT_IN_FPUTC));
2974 20645 : tree const fn_fwrite = (unlocked
2975 43 : ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2976 20688 : : builtin_decl_implicit (BUILT_IN_FWRITE));
2977 :
2978 : /* If the return value is used, don't do the transformation. */
2979 20688 : if (gimple_call_lhs (stmt))
2980 : return false;
2981 :
2982 : /* Get the length of the string passed to fputs. If the length
2983 : can't be determined, punt. */
2984 20617 : tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2985 20617 : if (!len || TREE_CODE (len) != INTEGER_CST)
2986 : return false;
2987 :
2988 16213 : switch (compare_tree_int (len, 1))
2989 : {
2990 91 : case -1: /* length is 0, delete the call entirely . */
2991 91 : replace_call_with_value (gsi, integer_zero_node);
2992 91 : return true;
2993 :
2994 1060 : case 0: /* length is 1, call fputc. */
2995 1060 : {
2996 1060 : const char *p = c_getstr (arg0);
2997 1060 : if (p != NULL)
2998 : {
2999 2092 : if (!fn_fputc || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3000 : return false;
3001 :
3002 1046 : gimple *repl
3003 1046 : = gimple_build_call (fn_fputc, 2,
3004 1046 : build_int_cst (integer_type_node, p[0]),
3005 : arg1);
3006 1046 : replace_call_with_call_and_fold (gsi, repl);
3007 1046 : return true;
3008 : }
3009 : }
3010 : /* FALLTHROUGH */
3011 15076 : case 1: /* length is greater than 1, call fwrite. */
3012 15076 : {
3013 : /* If optimizing for size keep fputs. */
3014 15076 : if (optimize_function_for_size_p (cfun))
3015 : return false;
3016 : /* New argument list transforming fputs(string, stream) to
3017 : fwrite(string, 1, len, stream). */
3018 27784 : if (!fn_fwrite || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3019 : return false;
3020 :
3021 8233 : gimple *repl
3022 8233 : = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
3023 : fold_convert (size_type_node, len), arg1);
3024 8233 : replace_call_with_call_and_fold (gsi, repl);
3025 8233 : return true;
3026 : }
3027 0 : default:
3028 0 : gcc_unreachable ();
3029 : }
3030 : }
3031 :
3032 : /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3033 : DEST, SRC, LEN, and SIZE are the arguments to the call.
3034 : IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3035 : code of the builtin. If MAXLEN is not NULL, it is maximum length
3036 : passed as third argument. */
3037 :
3038 : static bool
3039 25617 : gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3040 : tree dest, tree src, tree len, tree size,
3041 : enum built_in_function fcode)
3042 : {
3043 25617 : gimple *stmt = gsi_stmt (*gsi);
3044 25617 : location_t loc = gimple_location (stmt);
3045 25617 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3046 25617 : tree fn;
3047 :
3048 : /* If SRC and DEST are the same (and not volatile), return DEST
3049 : (resp. DEST+LEN for __mempcpy_chk). */
3050 25617 : if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3051 : {
3052 13 : if (fcode != BUILT_IN_MEMPCPY_CHK)
3053 : {
3054 7 : replace_call_with_value (gsi, dest);
3055 7 : return true;
3056 : }
3057 : else
3058 : {
3059 6 : gimple_seq stmts = NULL;
3060 6 : len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3061 6 : tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3062 6 : TREE_TYPE (dest), dest, len);
3063 6 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3064 6 : replace_call_with_value (gsi, temp);
3065 6 : return true;
3066 : }
3067 : }
3068 :
3069 68251 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3070 : return false;
3071 :
3072 25604 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3073 25604 : if (! integer_all_onesp (size)
3074 24535 : && !known_lower (stmt, len, size)
3075 42767 : && !known_lower (stmt, maxlen, size))
3076 : {
3077 : /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3078 : least try to optimize (void) __mempcpy_chk () into
3079 : (void) __memcpy_chk () */
3080 17086 : if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3081 : {
3082 43 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3083 43 : if (!fn)
3084 : return false;
3085 :
3086 43 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3087 43 : replace_call_with_call_and_fold (gsi, repl);
3088 43 : return true;
3089 : }
3090 : return false;
3091 : }
3092 :
3093 8518 : fn = NULL_TREE;
3094 : /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3095 : mem{cpy,pcpy,move,set} is available. */
3096 8518 : switch (fcode)
3097 : {
3098 1768 : case BUILT_IN_MEMCPY_CHK:
3099 1768 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3100 1768 : break;
3101 1068 : case BUILT_IN_MEMPCPY_CHK:
3102 1068 : fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3103 1068 : break;
3104 1657 : case BUILT_IN_MEMMOVE_CHK:
3105 1657 : fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3106 1657 : break;
3107 4025 : case BUILT_IN_MEMSET_CHK:
3108 4025 : fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3109 4025 : break;
3110 : default:
3111 : break;
3112 : }
3113 :
3114 8518 : if (!fn)
3115 : return false;
3116 :
3117 8518 : gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3118 8518 : replace_call_with_call_and_fold (gsi, repl);
3119 8518 : return true;
3120 : }
3121 :
3122 : /* Fold a call to the __st[rp]cpy_chk builtin.
3123 : DEST, SRC, and SIZE are the arguments to the call.
3124 : IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3125 : code of the builtin. If MAXLEN is not NULL, it is maximum length of
3126 : strings passed as second argument. */
3127 :
3128 : static bool
3129 2592 : gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3130 : tree dest,
3131 : tree src, tree size,
3132 : enum built_in_function fcode)
3133 : {
3134 2592 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3135 2592 : location_t loc = gimple_location (stmt);
3136 2592 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3137 2592 : tree len, fn;
3138 :
3139 : /* If SRC and DEST are the same (and not volatile), return DEST. */
3140 2592 : if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3141 : {
3142 : /* Issue -Wrestrict unless the pointers are null (those do
3143 : not point to objects and so do not indicate an overlap;
3144 : such calls could be the result of sanitization and jump
3145 : threading). */
3146 0 : if (!integer_zerop (dest)
3147 0 : && !warning_suppressed_p (stmt, OPT_Wrestrict))
3148 : {
3149 0 : tree func = gimple_call_fndecl (stmt);
3150 :
3151 0 : warning_at (loc, OPT_Wrestrict,
3152 : "%qD source argument is the same as destination",
3153 : func);
3154 : }
3155 :
3156 0 : replace_call_with_value (gsi, dest);
3157 0 : return true;
3158 : }
3159 :
3160 5184 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3161 : return false;
3162 :
3163 2592 : tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3164 2592 : if (! integer_all_onesp (size))
3165 : {
3166 2523 : len = c_strlen (src, 1);
3167 2523 : if (!known_lower (stmt, len, size, true)
3168 2523 : && !known_lower (stmt, maxlen, size, true))
3169 : {
3170 2187 : if (fcode == BUILT_IN_STPCPY_CHK)
3171 : {
3172 1077 : if (! ignore)
3173 : return false;
3174 :
3175 : /* If return value of __stpcpy_chk is ignored,
3176 : optimize into __strcpy_chk. */
3177 35 : fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3178 35 : if (!fn)
3179 : return false;
3180 :
3181 35 : gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3182 35 : replace_call_with_call_and_fold (gsi, repl);
3183 35 : return true;
3184 : }
3185 :
3186 1110 : if (! len || TREE_SIDE_EFFECTS (len))
3187 : return false;
3188 :
3189 : /* If c_strlen returned something, but not provably less than size,
3190 : transform __strcpy_chk into __memcpy_chk. */
3191 106 : fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3192 106 : if (!fn)
3193 : return false;
3194 :
3195 106 : gimple_seq stmts = NULL;
3196 106 : len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3197 106 : len = gimple_convert (&stmts, loc, size_type_node, len);
3198 106 : len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3199 : build_int_cst (size_type_node, 1));
3200 106 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3201 106 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3202 106 : replace_call_with_call_and_fold (gsi, repl);
3203 106 : return true;
3204 : }
3205 : }
3206 :
3207 : /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3208 666 : fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3209 : ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3210 405 : if (!fn)
3211 : return false;
3212 :
3213 405 : gcall *repl = gimple_build_call (fn, 2, dest, src);
3214 405 : replace_call_with_call_and_fold (gsi, repl);
3215 405 : return true;
3216 : }
3217 :
3218 : /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3219 : are the arguments to the call. If MAXLEN is not NULL, it is maximum
3220 : length passed as third argument. IGNORE is true if return value can be
3221 : ignored. FCODE is the BUILT_IN_* code of the builtin. */
3222 :
3223 : static bool
3224 2721 : gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3225 : tree dest, tree src,
3226 : tree len, tree size,
3227 : enum built_in_function fcode)
3228 : {
3229 2721 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3230 2721 : bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3231 2721 : tree fn;
3232 :
3233 2721 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3234 2721 : if (! integer_all_onesp (size)
3235 2721 : && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3236 : {
3237 2264 : if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3238 : {
3239 : /* If return value of __stpncpy_chk is ignored,
3240 : optimize into __strncpy_chk. */
3241 39 : fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3242 39 : if (fn)
3243 : {
3244 39 : gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3245 39 : replace_call_with_call_and_fold (gsi, repl);
3246 39 : return true;
3247 : }
3248 : }
3249 : return false;
3250 : }
3251 :
3252 : /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3253 717 : fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3254 : ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3255 3139 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3256 : return false;
3257 :
3258 457 : gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3259 457 : replace_call_with_call_and_fold (gsi, repl);
3260 457 : return true;
3261 : }
3262 :
3263 : /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3264 : Return NULL_TREE if no simplification can be made. */
3265 :
3266 : static bool
3267 3674 : gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3268 : {
3269 3674 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3270 3674 : location_t loc = gimple_location (stmt);
3271 3674 : tree dest = gimple_call_arg (stmt, 0);
3272 3674 : tree src = gimple_call_arg (stmt, 1);
3273 3674 : tree fn, lenp1;
3274 :
3275 7348 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3276 : return false;
3277 :
3278 : /* If the result is unused, replace stpcpy with strcpy. */
3279 3674 : if (gimple_call_lhs (stmt) == NULL_TREE)
3280 : {
3281 29 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3282 29 : if (!fn)
3283 : return false;
3284 29 : gimple_call_set_fndecl (stmt, fn);
3285 29 : fold_stmt (gsi);
3286 29 : return true;
3287 : }
3288 :
3289 : /* Set to non-null if ARG refers to an unterminated array. */
3290 3645 : c_strlen_data data = { };
3291 : /* The size of the unterminated array if SRC referes to one. */
3292 3645 : tree size;
3293 : /* True if the size is exact/constant, false if it's the lower bound
3294 : of a range. */
3295 3645 : bool exact;
3296 3645 : tree len = c_strlen (src, 1, &data, 1);
3297 3645 : if (!len
3298 703 : || TREE_CODE (len) != INTEGER_CST)
3299 : {
3300 3174 : data.decl = unterminated_array (src, &size, &exact);
3301 3174 : if (!data.decl)
3302 : return false;
3303 : }
3304 :
3305 1076 : if (data.decl)
3306 : {
3307 : /* Avoid folding calls with unterminated arrays. */
3308 605 : if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3309 75 : warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3310 : exact);
3311 605 : suppress_warning (stmt, OPT_Wstringop_overread);
3312 605 : return false;
3313 : }
3314 :
3315 471 : if (optimize_function_for_size_p (cfun)
3316 : /* If length is zero it's small enough. */
3317 471 : && !integer_zerop (len))
3318 : return false;
3319 :
3320 : /* If the source has a known length replace stpcpy with memcpy. */
3321 3645 : fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3322 287 : if (!fn)
3323 : return false;
3324 :
3325 287 : gimple_seq stmts = NULL;
3326 287 : tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3327 287 : lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3328 : tem, build_int_cst (size_type_node, 1));
3329 287 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3330 287 : gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3331 287 : gimple_move_vops (repl, stmt);
3332 287 : gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3333 : /* Replace the result with dest + len. */
3334 287 : stmts = NULL;
3335 287 : tem = gimple_convert (&stmts, loc, sizetype, len);
3336 287 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3337 287 : gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3338 : POINTER_PLUS_EXPR, dest, tem);
3339 287 : gsi_replace (gsi, ret, false);
3340 : /* Finally fold the memcpy call. */
3341 287 : gimple_stmt_iterator gsi2 = *gsi;
3342 287 : gsi_prev (&gsi2);
3343 287 : fold_stmt (&gsi2);
3344 287 : return true;
3345 : }
3346 :
3347 : /* Simplify mempcpy call stmt at GSI, returning true if simplified.
3348 : Currently only handling mempcpy -> memcpy when the return value
3349 : is ignored. */
3350 :
3351 : static bool
3352 9536 : gimple_fold_builtin_mempcpy (gimple_stmt_iterator *gsi)
3353 : {
3354 9536 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3355 :
3356 9536 : if (gimple_call_lhs (stmt) != NULL_TREE)
3357 : return false;
3358 :
3359 385 : tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3360 385 : if (!fn)
3361 : return false;
3362 :
3363 385 : tree dest = gimple_call_arg (stmt, 0);
3364 385 : tree src = gimple_call_arg (stmt, 1);
3365 385 : tree n = gimple_call_arg (stmt, 2);
3366 :
3367 385 : gcall *repl = gimple_build_call (fn, 3, dest, src, n);
3368 385 : replace_call_with_call_and_fold (gsi, repl);
3369 :
3370 385 : return true;
3371 : }
3372 :
3373 : /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3374 : NULL_TREE if a normal call should be emitted rather than expanding
3375 : the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3376 : BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3377 : passed as second argument. */
3378 :
3379 : static bool
3380 2359 : gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3381 : enum built_in_function fcode)
3382 : {
3383 2359 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3384 2359 : tree dest, size, len, fn, fmt, flag;
3385 2359 : const char *fmt_str;
3386 :
3387 : /* Verify the required arguments in the original call. */
3388 2359 : if (gimple_call_num_args (stmt) < 5)
3389 : return false;
3390 :
3391 2359 : dest = gimple_call_arg (stmt, 0);
3392 2359 : len = gimple_call_arg (stmt, 1);
3393 2359 : flag = gimple_call_arg (stmt, 2);
3394 2359 : size = gimple_call_arg (stmt, 3);
3395 2359 : fmt = gimple_call_arg (stmt, 4);
3396 :
3397 2359 : tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3398 2359 : if (! integer_all_onesp (size)
3399 2359 : && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3400 : return false;
3401 :
3402 308 : if (!init_target_chars ())
3403 : return false;
3404 :
3405 : /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3406 : or if format doesn't contain % chars or is "%s". */
3407 308 : if (! integer_zerop (flag))
3408 : {
3409 52 : fmt_str = c_getstr (fmt);
3410 52 : if (fmt_str == NULL)
3411 : return false;
3412 52 : if (strchr (fmt_str, target_percent) != NULL
3413 51 : && strcmp (fmt_str, target_percent_s))
3414 : return false;
3415 : }
3416 :
3417 : /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3418 : available. */
3419 415 : fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3420 : ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3421 2618 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3422 : return false;
3423 :
3424 : /* Replace the called function and the first 5 argument by 3 retaining
3425 : trailing varargs. */
3426 259 : gimple_call_set_fndecl (stmt, fn);
3427 259 : gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3428 259 : gimple_call_set_arg (stmt, 0, dest);
3429 259 : gimple_call_set_arg (stmt, 1, len);
3430 259 : gimple_call_set_arg (stmt, 2, fmt);
3431 546 : for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3432 287 : gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3433 259 : gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3434 259 : fold_stmt (gsi);
3435 259 : return true;
3436 : }
3437 :
3438 : /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3439 : Return NULL_TREE if a normal call should be emitted rather than
3440 : expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3441 : or BUILT_IN_VSPRINTF_CHK. */
3442 :
3443 : static bool
3444 4471 : gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3445 : enum built_in_function fcode)
3446 : {
3447 4471 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3448 4471 : tree dest, size, len, fn, fmt, flag;
3449 4471 : const char *fmt_str;
3450 4471 : unsigned nargs = gimple_call_num_args (stmt);
3451 :
3452 : /* Verify the required arguments in the original call. */
3453 4471 : if (nargs < 4)
3454 : return false;
3455 4471 : dest = gimple_call_arg (stmt, 0);
3456 4471 : flag = gimple_call_arg (stmt, 1);
3457 4471 : size = gimple_call_arg (stmt, 2);
3458 4471 : fmt = gimple_call_arg (stmt, 3);
3459 :
3460 4471 : len = NULL_TREE;
3461 :
3462 4471 : if (!init_target_chars ())
3463 : return false;
3464 :
3465 : /* Check whether the format is a literal string constant. */
3466 4471 : fmt_str = c_getstr (fmt);
3467 4471 : if (fmt_str != NULL)
3468 : {
3469 : /* If the format doesn't contain % args or %%, we know the size. */
3470 4081 : if (strchr (fmt_str, target_percent) == 0)
3471 : {
3472 251 : if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3473 251 : len = build_int_cstu (size_type_node, strlen (fmt_str));
3474 : }
3475 : /* If the format is "%s" and first ... argument is a string literal,
3476 : we know the size too. */
3477 3830 : else if (fcode == BUILT_IN_SPRINTF_CHK
3478 2962 : && strcmp (fmt_str, target_percent_s) == 0)
3479 : {
3480 395 : tree arg;
3481 :
3482 395 : if (nargs == 5)
3483 : {
3484 395 : arg = gimple_call_arg (stmt, 4);
3485 395 : if (POINTER_TYPE_P (TREE_TYPE (arg)))
3486 363 : len = c_strlen (arg, 1);
3487 : }
3488 : }
3489 : }
3490 :
3491 4471 : if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3492 : return false;
3493 :
3494 : /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3495 : or if format doesn't contain % chars or is "%s". */
3496 202 : if (! integer_zerop (flag))
3497 : {
3498 1 : if (fmt_str == NULL)
3499 : return false;
3500 1 : if (strchr (fmt_str, target_percent) != NULL
3501 0 : && strcmp (fmt_str, target_percent_s))
3502 : return false;
3503 : }
3504 :
3505 : /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3506 347 : fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3507 : ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3508 4673 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3509 : return false;
3510 :
3511 : /* Replace the called function and the first 4 argument by 2 retaining
3512 : trailing varargs. */
3513 202 : gimple_call_set_fndecl (stmt, fn);
3514 202 : gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3515 202 : gimple_call_set_arg (stmt, 0, dest);
3516 202 : gimple_call_set_arg (stmt, 1, fmt);
3517 400 : for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3518 198 : gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3519 202 : gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3520 202 : fold_stmt (gsi);
3521 202 : return true;
3522 : }
3523 :
3524 : /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3525 : ORIG may be null if this is a 2-argument call. We don't attempt to
3526 : simplify calls with more than 3 arguments.
3527 :
3528 : Return true if simplification was possible, otherwise false. */
3529 :
3530 : bool
3531 2281 : gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3532 : {
3533 2281 : gimple *stmt = gsi_stmt (*gsi);
3534 :
3535 : /* Verify the required arguments in the original call. We deal with two
3536 : types of sprintf() calls: 'sprintf (str, fmt)' and
3537 : 'sprintf (dest, "%s", orig)'. */
3538 2281 : if (gimple_call_num_args (stmt) > 3)
3539 : return false;
3540 :
3541 1880 : tree orig = NULL_TREE;
3542 1880 : if (gimple_call_num_args (stmt) == 3)
3543 1783 : orig = gimple_call_arg (stmt, 2);
3544 :
3545 : /* Check whether the format is a literal string constant. */
3546 1880 : tree fmt = gimple_call_arg (stmt, 1);
3547 1880 : const char *fmt_str = c_getstr (fmt);
3548 1880 : if (fmt_str == NULL)
3549 : return false;
3550 :
3551 1880 : tree dest = gimple_call_arg (stmt, 0);
3552 :
3553 1880 : if (!init_target_chars ())
3554 : return false;
3555 :
3556 1880 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3557 5186 : if (!fn || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3558 : return false;
3559 :
3560 : /* If the format doesn't contain % args or %%, use strcpy. */
3561 1880 : if (strchr (fmt_str, target_percent) == NULL)
3562 : {
3563 : /* Don't optimize sprintf (buf, "abc", ptr++). */
3564 109 : if (orig)
3565 : return false;
3566 :
3567 : /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3568 : 'format' is known to contain no % formats. */
3569 96 : gimple_seq stmts = NULL;
3570 96 : gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3571 :
3572 : /* Propagate the NO_WARNING bit to avoid issuing the same
3573 : warning more than once. */
3574 96 : copy_warning (repl, stmt);
3575 :
3576 96 : gimple_seq_add_stmt_without_update (&stmts, repl);
3577 96 : if (tree lhs = gimple_call_lhs (stmt))
3578 : {
3579 0 : repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3580 0 : strlen (fmt_str)));
3581 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3582 0 : gsi_replace_with_seq_vops (gsi, stmts);
3583 : /* gsi now points at the assignment to the lhs, get a
3584 : stmt iterator to the memcpy call.
3585 : ??? We can't use gsi_for_stmt as that doesn't work when the
3586 : CFG isn't built yet. */
3587 0 : gimple_stmt_iterator gsi2 = *gsi;
3588 0 : gsi_prev (&gsi2);
3589 0 : fold_stmt (&gsi2);
3590 : }
3591 : else
3592 : {
3593 96 : gsi_replace_with_seq_vops (gsi, stmts);
3594 96 : fold_stmt (gsi);
3595 : }
3596 96 : return true;
3597 : }
3598 :
3599 : /* If the format is "%s", use strcpy if the result isn't used. */
3600 1771 : else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3601 : {
3602 : /* Don't crash on sprintf (str1, "%s"). */
3603 746 : if (!orig)
3604 : return false;
3605 :
3606 : /* Don't fold calls with source arguments of invalid (nonpointer)
3607 : types. */
3608 745 : if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3609 : return false;
3610 :
3611 739 : tree orig_len = NULL_TREE;
3612 739 : if (gimple_call_lhs (stmt))
3613 : {
3614 17 : orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3615 17 : if (!orig_len)
3616 : return false;
3617 : }
3618 :
3619 : /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3620 722 : gimple_seq stmts = NULL;
3621 722 : gimple *repl = gimple_build_call (fn, 2, dest, orig);
3622 :
3623 : /* Propagate the NO_WARNING bit to avoid issuing the same
3624 : warning more than once. */
3625 722 : copy_warning (repl, stmt);
3626 :
3627 722 : gimple_seq_add_stmt_without_update (&stmts, repl);
3628 722 : if (tree lhs = gimple_call_lhs (stmt))
3629 : {
3630 0 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
3631 0 : TREE_TYPE (orig_len)))
3632 0 : orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3633 0 : repl = gimple_build_assign (lhs, orig_len);
3634 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3635 0 : gsi_replace_with_seq_vops (gsi, stmts);
3636 : /* gsi now points at the assignment to the lhs, get a
3637 : stmt iterator to the memcpy call.
3638 : ??? We can't use gsi_for_stmt as that doesn't work when the
3639 : CFG isn't built yet. */
3640 0 : gimple_stmt_iterator gsi2 = *gsi;
3641 0 : gsi_prev (&gsi2);
3642 0 : fold_stmt (&gsi2);
3643 : }
3644 : else
3645 : {
3646 722 : gsi_replace_with_seq_vops (gsi, stmts);
3647 722 : fold_stmt (gsi);
3648 : }
3649 722 : return true;
3650 : }
3651 : return false;
3652 : }
3653 :
3654 : /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3655 : FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3656 : attempt to simplify calls with more than 4 arguments.
3657 :
3658 : Return true if simplification was possible, otherwise false. */
3659 :
3660 : bool
3661 1605 : gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3662 : {
3663 1605 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3664 1605 : tree dest = gimple_call_arg (stmt, 0);
3665 1605 : tree destsize = gimple_call_arg (stmt, 1);
3666 1605 : tree fmt = gimple_call_arg (stmt, 2);
3667 1605 : tree orig = NULL_TREE;
3668 1605 : const char *fmt_str = NULL;
3669 :
3670 1605 : if (gimple_call_num_args (stmt) > 4
3671 2775 : || (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun)))
3672 : return false;
3673 :
3674 728 : if (gimple_call_num_args (stmt) == 4)
3675 609 : orig = gimple_call_arg (stmt, 3);
3676 :
3677 : /* Check whether the format is a literal string constant. */
3678 728 : fmt_str = c_getstr (fmt);
3679 728 : if (fmt_str == NULL)
3680 : return false;
3681 :
3682 728 : if (!init_target_chars ())
3683 : return false;
3684 :
3685 : /* If the format doesn't contain % args or %%, use strcpy. */
3686 728 : if (strchr (fmt_str, target_percent) == NULL)
3687 : {
3688 148 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3689 118 : if (!fn)
3690 : return false;
3691 :
3692 : /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3693 118 : if (orig)
3694 : return false;
3695 :
3696 118 : tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3697 :
3698 : /* We could expand this as
3699 : memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3700 : or to
3701 : memcpy (str, fmt_with_nul_at_cstm1, cst);
3702 : but in the former case that might increase code size
3703 : and in the latter case grow .rodata section too much.
3704 : So punt for now. */
3705 118 : if (!known_lower (stmt, len, destsize, true))
3706 : return false;
3707 :
3708 88 : gimple_seq stmts = NULL;
3709 88 : gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3710 88 : gimple_seq_add_stmt_without_update (&stmts, repl);
3711 88 : if (tree lhs = gimple_call_lhs (stmt))
3712 : {
3713 0 : repl = gimple_build_assign (lhs,
3714 0 : fold_convert (TREE_TYPE (lhs), len));
3715 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3716 0 : gsi_replace_with_seq_vops (gsi, stmts);
3717 : /* gsi now points at the assignment to the lhs, get a
3718 : stmt iterator to the memcpy call.
3719 : ??? We can't use gsi_for_stmt as that doesn't work when the
3720 : CFG isn't built yet. */
3721 0 : gimple_stmt_iterator gsi2 = *gsi;
3722 0 : gsi_prev (&gsi2);
3723 0 : fold_stmt (&gsi2);
3724 : }
3725 : else
3726 : {
3727 88 : gsi_replace_with_seq_vops (gsi, stmts);
3728 88 : fold_stmt (gsi);
3729 : }
3730 88 : return true;
3731 : }
3732 :
3733 : /* If the format is "%s", use strcpy if the result isn't used. */
3734 610 : else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3735 : {
3736 292 : tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3737 174 : if (!fn)
3738 : return false;
3739 :
3740 : /* Don't crash on snprintf (str1, cst, "%s"). */
3741 174 : if (!orig)
3742 : return false;
3743 :
3744 174 : tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3745 :
3746 : /* We could expand this as
3747 : memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3748 : or to
3749 : memcpy (str1, str2_with_nul_at_cstm1, cst);
3750 : but in the former case that might increase code size
3751 : and in the latter case grow .rodata section too much.
3752 : So punt for now. */
3753 174 : if (!known_lower (stmt, orig_len, destsize, true))
3754 : return false;
3755 :
3756 : /* Convert snprintf (str1, cst, "%s", str2) into
3757 : strcpy (str1, str2) if strlen (str2) < cst. */
3758 56 : gimple_seq stmts = NULL;
3759 56 : gimple *repl = gimple_build_call (fn, 2, dest, orig);
3760 56 : gimple_seq_add_stmt_without_update (&stmts, repl);
3761 56 : if (tree lhs = gimple_call_lhs (stmt))
3762 : {
3763 0 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
3764 0 : TREE_TYPE (orig_len)))
3765 0 : orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3766 0 : repl = gimple_build_assign (lhs, orig_len);
3767 0 : gimple_seq_add_stmt_without_update (&stmts, repl);
3768 0 : gsi_replace_with_seq_vops (gsi, stmts);
3769 : /* gsi now points at the assignment to the lhs, get a
3770 : stmt iterator to the memcpy call.
3771 : ??? We can't use gsi_for_stmt as that doesn't work when the
3772 : CFG isn't built yet. */
3773 0 : gimple_stmt_iterator gsi2 = *gsi;
3774 0 : gsi_prev (&gsi2);
3775 0 : fold_stmt (&gsi2);
3776 : }
3777 : else
3778 : {
3779 56 : gsi_replace_with_seq_vops (gsi, stmts);
3780 56 : fold_stmt (gsi);
3781 : }
3782 56 : return true;
3783 : }
3784 : return false;
3785 : }
3786 :
3787 : /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3788 : FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3789 : more than 3 arguments, and ARG may be null in the 2-argument case.
3790 :
3791 : Return NULL_TREE if no simplification was possible, otherwise return the
3792 : simplified form of the call as a tree. FCODE is the BUILT_IN_*
3793 : code of the function to be simplified. */
3794 :
3795 : static bool
3796 54452 : gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3797 : tree fp, tree fmt, tree arg,
3798 : enum built_in_function fcode)
3799 : {
3800 54452 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3801 54452 : tree fn_fputc, fn_fputs;
3802 54452 : const char *fmt_str = NULL;
3803 :
3804 : /* If the return value is used, don't do the transformation. */
3805 54452 : if (gimple_call_lhs (stmt) != NULL_TREE)
3806 : return false;
3807 :
3808 : /* Check whether the format is a literal string constant. */
3809 50274 : fmt_str = c_getstr (fmt);
3810 50274 : if (fmt_str == NULL)
3811 : return false;
3812 :
3813 49964 : if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3814 : {
3815 : /* If we're using an unlocked function, assume the other
3816 : unlocked functions exist explicitly. */
3817 80 : fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3818 80 : fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3819 : }
3820 : else
3821 : {
3822 49884 : fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3823 49884 : fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3824 : }
3825 :
3826 49964 : if (!init_target_chars ())
3827 : return false;
3828 :
3829 144078 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3830 : return false;
3831 :
3832 : /* If the format doesn't contain % args or %%, use strcpy. */
3833 49964 : if (strchr (fmt_str, target_percent) == NULL)
3834 : {
3835 9614 : if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3836 9544 : && arg)
3837 : return false;
3838 :
3839 : /* If the format specifier was "", fprintf does nothing. */
3840 9614 : if (fmt_str[0] == '\0')
3841 : {
3842 58 : replace_call_with_value (gsi, NULL_TREE);
3843 58 : return true;
3844 : }
3845 :
3846 : /* When "string" doesn't contain %, replace all cases of
3847 : fprintf (fp, string) with fputs (string, fp). The fputs
3848 : builtin will take care of special cases like length == 1. */
3849 9556 : if (fn_fputs)
3850 : {
3851 9556 : gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3852 9556 : replace_call_with_call_and_fold (gsi, repl);
3853 9556 : return true;
3854 : }
3855 : }
3856 :
3857 : /* The other optimizations can be done only on the non-va_list variants. */
3858 40350 : else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3859 : return false;
3860 :
3861 : /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3862 39301 : else if (strcmp (fmt_str, target_percent_s) == 0)
3863 : {
3864 639 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3865 : return false;
3866 639 : if (fn_fputs)
3867 : {
3868 639 : gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3869 639 : replace_call_with_call_and_fold (gsi, repl);
3870 639 : return true;
3871 : }
3872 : }
3873 :
3874 : /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3875 38662 : else if (strcmp (fmt_str, target_percent_c) == 0)
3876 : {
3877 49 : if (!arg
3878 49 : || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3879 0 : return false;
3880 49 : if (fn_fputc)
3881 : {
3882 49 : gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3883 49 : replace_call_with_call_and_fold (gsi, repl);
3884 49 : return true;
3885 : }
3886 : }
3887 :
3888 : return false;
3889 : }
3890 :
3891 : /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3892 : FMT and ARG are the arguments to the call; we don't fold cases with
3893 : more than 2 arguments, and ARG may be null if this is a 1-argument case.
3894 :
3895 : Return NULL_TREE if no simplification was possible, otherwise return the
3896 : simplified form of the call as a tree. FCODE is the BUILT_IN_*
3897 : code of the function to be simplified. */
3898 :
3899 : static bool
3900 113232 : gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3901 : tree arg, enum built_in_function fcode)
3902 : {
3903 113232 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3904 113232 : tree fn_putchar, fn_puts, newarg;
3905 113232 : const char *fmt_str = NULL;
3906 :
3907 : /* If the return value is used, don't do the transformation. */
3908 113232 : if (gimple_call_lhs (stmt) != NULL_TREE)
3909 : return false;
3910 :
3911 328345 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
3912 : return false;
3913 :
3914 : /* Check whether the format is a literal string constant. */
3915 109990 : fmt_str = c_getstr (fmt);
3916 109990 : if (fmt_str == NULL)
3917 : return false;
3918 :
3919 106929 : if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3920 : {
3921 : /* If we're using an unlocked function, assume the other
3922 : unlocked functions exist explicitly. */
3923 80 : fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3924 80 : fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3925 : }
3926 : else
3927 : {
3928 106849 : fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3929 106849 : fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3930 : }
3931 :
3932 106929 : if (!init_target_chars ())
3933 : return false;
3934 :
3935 106929 : if (strcmp (fmt_str, target_percent_s) == 0
3936 100543 : || strchr (fmt_str, target_percent) == NULL)
3937 : {
3938 14084 : const char *str;
3939 :
3940 14084 : if (strcmp (fmt_str, target_percent_s) == 0)
3941 : {
3942 6386 : if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3943 : return false;
3944 :
3945 6098 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3946 : return false;
3947 :
3948 6093 : str = c_getstr (arg);
3949 6093 : if (str == NULL)
3950 : return false;
3951 : }
3952 : else
3953 : {
3954 : /* The format specifier doesn't contain any '%' characters. */
3955 7698 : if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3956 7566 : && arg)
3957 : return false;
3958 : str = fmt_str;
3959 : }
3960 :
3961 : /* If the string was "", printf does nothing. */
3962 5734 : if (str[0] == '\0')
3963 : {
3964 109 : replace_call_with_value (gsi, NULL_TREE);
3965 109 : return true;
3966 : }
3967 :
3968 : /* If the string has length of 1, call putchar. */
3969 5625 : if (str[1] == '\0')
3970 : {
3971 : /* Given printf("c"), (where c is any one character,)
3972 : convert "c"[0] to an int and pass that to the replacement
3973 : function. */
3974 559 : newarg = build_int_cst (integer_type_node, str[0]);
3975 559 : if (fn_putchar)
3976 : {
3977 559 : gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3978 559 : replace_call_with_call_and_fold (gsi, repl);
3979 559 : return true;
3980 : }
3981 : }
3982 : else
3983 : {
3984 : /* If the string was "string\n", call puts("string"). */
3985 5066 : size_t len = strlen (str);
3986 5066 : if ((unsigned char)str[len - 1] == target_newline
3987 3974 : && (size_t) (int) len == len
3988 3974 : && (int) len > 0)
3989 : {
3990 3974 : char *newstr;
3991 :
3992 : /* Create a NUL-terminated string that's one char shorter
3993 : than the original, stripping off the trailing '\n'. */
3994 3974 : newstr = xstrdup (str);
3995 3974 : newstr[len - 1] = '\0';
3996 3974 : newarg = build_string_literal (len, newstr);
3997 3974 : free (newstr);
3998 3974 : if (fn_puts)
3999 : {
4000 3974 : gcall *repl = gimple_build_call (fn_puts, 1, newarg);
4001 3974 : replace_call_with_call_and_fold (gsi, repl);
4002 3974 : return true;
4003 : }
4004 : }
4005 : else
4006 : /* We'd like to arrange to call fputs(string,stdout) here,
4007 : but we need stdout and don't have a way to get it yet. */
4008 : return false;
4009 : }
4010 : }
4011 :
4012 : /* The other optimizations can be done only on the non-va_list variants. */
4013 92845 : else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
4014 : return false;
4015 :
4016 : /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4017 92595 : else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4018 : {
4019 178 : if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4020 : return false;
4021 178 : if (fn_puts)
4022 : {
4023 178 : gcall *repl = gimple_build_call (fn_puts, 1, arg);
4024 178 : replace_call_with_call_and_fold (gsi, repl);
4025 178 : return true;
4026 : }
4027 : }
4028 :
4029 : /* If the format specifier was "%c", call __builtin_putchar(arg). */
4030 92417 : else if (strcmp (fmt_str, target_percent_c) == 0)
4031 : {
4032 94 : if (!arg || ! useless_type_conversion_p (integer_type_node,
4033 47 : TREE_TYPE (arg)))
4034 0 : return false;
4035 47 : if (fn_putchar)
4036 : {
4037 47 : gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4038 47 : replace_call_with_call_and_fold (gsi, repl);
4039 47 : return true;
4040 : }
4041 : }
4042 :
4043 : return false;
4044 : }
4045 :
4046 :
4047 :
4048 : /* Fold a call to __builtin_strlen with known length LEN. */
4049 :
4050 : static bool
4051 139778 : gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4052 : {
4053 139778 : gimple *stmt = gsi_stmt (*gsi);
4054 139778 : tree arg = gimple_call_arg (stmt, 0);
4055 :
4056 139778 : wide_int minlen;
4057 139778 : wide_int maxlen;
4058 :
4059 139778 : c_strlen_data lendata = { };
4060 139778 : if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4061 34395 : && !lendata.decl
4062 31432 : && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4063 171105 : && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4064 : {
4065 : /* The range of lengths refers to either a single constant
4066 : string or to the longest and shortest constant string
4067 : referenced by the argument of the strlen() call, or to
4068 : the strings that can possibly be stored in the arrays
4069 : the argument refers to. */
4070 31327 : minlen = wi::to_wide (lendata.minlen);
4071 31327 : maxlen = wi::to_wide (lendata.maxlen);
4072 : }
4073 : else
4074 : {
4075 108451 : unsigned prec = TYPE_PRECISION (sizetype);
4076 :
4077 108451 : minlen = wi::shwi (0, prec);
4078 108451 : maxlen = wi::to_wide (max_object_size (), prec) - 2;
4079 : }
4080 :
4081 : /* For -fsanitize=address, don't optimize the upper bound of the
4082 : length to be able to diagnose UB on non-zero terminated arrays. */
4083 139778 : if (sanitize_flags_p (SANITIZE_ADDRESS))
4084 278 : maxlen = wi::max_value (TYPE_PRECISION (sizetype), UNSIGNED);
4085 :
4086 139778 : if (minlen == maxlen)
4087 : {
4088 : /* Fold the strlen call to a constant. */
4089 1556 : tree type = TREE_TYPE (lendata.minlen);
4090 3112 : tree len = force_gimple_operand_gsi (gsi,
4091 1556 : wide_int_to_tree (type, minlen),
4092 : true, NULL, true, GSI_SAME_STMT);
4093 1556 : replace_call_with_value (gsi, len);
4094 1556 : return true;
4095 : }
4096 :
4097 : /* Set the strlen() range to [0, MAXLEN]. */
4098 138222 : if (tree lhs = gimple_call_lhs (stmt))
4099 138217 : set_strlen_range (lhs, minlen, maxlen);
4100 :
4101 : return false;
4102 139778 : }
4103 :
4104 : static bool
4105 234 : gimple_fold_builtin_omp_is_initial_device (gimple_stmt_iterator *gsi)
4106 : {
4107 : #if ACCEL_COMPILER
4108 : replace_call_with_value (gsi, integer_zero_node);
4109 : return true;
4110 : #else
4111 234 : if (!ENABLE_OFFLOADING || symtab->state == EXPANSION)
4112 : {
4113 0 : replace_call_with_value (gsi, integer_one_node);
4114 234 : return true;
4115 : }
4116 : #endif
4117 : return false;
4118 : }
4119 :
4120 : /* omp_get_initial_device was in OpenMP 5.0/5.1 explicitly and in
4121 : 5.0 implicitly the same as omp_get_num_devices; since 6.0 it is
4122 : unspecified whether -1 or omp_get_num_devices() is returned. For
4123 : better backward compatibility, use omp_get_num_devices() on the
4124 : host - and -1 on the device (where the result is unspecified). */
4125 :
4126 : static bool
4127 103 : gimple_fold_builtin_omp_get_initial_device (gimple_stmt_iterator *gsi)
4128 : {
4129 : #if ACCEL_COMPILER
4130 : replace_call_with_value (gsi, build_int_cst (integer_type_node, -1));
4131 : #else
4132 103 : if (!ENABLE_OFFLOADING)
4133 0 : replace_call_with_value (gsi, integer_zero_node);
4134 : else
4135 : {
4136 : tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_DEVICES);
4137 : gcall *repl = gimple_build_call (fn, 0);
4138 : replace_call_with_call_and_fold (gsi, repl);
4139 : }
4140 : #endif
4141 103 : return true;
4142 : }
4143 :
4144 : static bool
4145 294 : gimple_fold_builtin_omp_get_num_devices (gimple_stmt_iterator *gsi)
4146 : {
4147 294 : if (!ENABLE_OFFLOADING)
4148 : {
4149 0 : replace_call_with_value (gsi, integer_zero_node);
4150 294 : return true;
4151 : }
4152 : return false;
4153 : }
4154 :
4155 : /* Fold a call to __builtin_acc_on_device. */
4156 :
4157 : static bool
4158 2866 : gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4159 : {
4160 : /* Defer folding until we know which compiler we're in. */
4161 2866 : if (symtab->state != EXPANSION)
4162 : return false;
4163 :
4164 554 : unsigned val_host = GOMP_DEVICE_HOST;
4165 554 : unsigned val_dev = GOMP_DEVICE_NONE;
4166 :
4167 : #ifdef ACCEL_COMPILER
4168 : val_host = GOMP_DEVICE_NOT_HOST;
4169 : val_dev = ACCEL_COMPILER_acc_device;
4170 : #endif
4171 :
4172 554 : location_t loc = gimple_location (gsi_stmt (*gsi));
4173 :
4174 554 : tree host_eq = make_ssa_name (boolean_type_node);
4175 554 : gimple *host_ass = gimple_build_assign
4176 554 : (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4177 554 : gimple_set_location (host_ass, loc);
4178 554 : gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4179 :
4180 554 : tree dev_eq = make_ssa_name (boolean_type_node);
4181 554 : gimple *dev_ass = gimple_build_assign
4182 554 : (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4183 554 : gimple_set_location (dev_ass, loc);
4184 554 : gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4185 :
4186 554 : tree result = make_ssa_name (boolean_type_node);
4187 554 : gimple *result_ass = gimple_build_assign
4188 554 : (result, BIT_IOR_EXPR, host_eq, dev_eq);
4189 554 : gimple_set_location (result_ass, loc);
4190 554 : gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4191 :
4192 554 : replace_call_with_value (gsi, result);
4193 :
4194 554 : return true;
4195 : }
4196 :
4197 : /* Fold realloc (0, n) -> malloc (n). */
4198 :
4199 : static bool
4200 48435 : gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4201 : {
4202 48435 : gimple *stmt = gsi_stmt (*gsi);
4203 48435 : tree arg = gimple_call_arg (stmt, 0);
4204 48435 : tree size = gimple_call_arg (stmt, 1);
4205 :
4206 143947 : if (!gimple_vdef (stmt) && gimple_in_ssa_p (cfun))
4207 : return false;
4208 :
4209 48435 : if (operand_equal_p (arg, null_pointer_node, 0))
4210 : {
4211 1358 : tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4212 1358 : if (fn_malloc)
4213 : {
4214 1358 : gcall *repl = gimple_build_call (fn_malloc, 1, size);
4215 1358 : replace_call_with_call_and_fold (gsi, repl);
4216 1358 : return true;
4217 : }
4218 : }
4219 : return false;
4220 : }
4221 :
4222 : /* Number of bytes into which any type but aggregate, vector or
4223 : _BitInt types should fit. */
4224 : static constexpr size_t clear_padding_unit
4225 : = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4226 : /* Buffer size on which __builtin_clear_padding folding code works. */
4227 : static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4228 :
4229 : /* Data passed through __builtin_clear_padding folding. */
4230 : struct clear_padding_struct {
4231 : location_t loc;
4232 : /* 0 during __builtin_clear_padding folding, nonzero during
4233 : clear_type_padding_in_mask. In that case, instead of clearing the
4234 : non-padding bits in union_ptr array clear the padding bits in there. */
4235 : bool clear_in_mask;
4236 : tree base;
4237 : tree alias_type;
4238 : gimple_stmt_iterator *gsi;
4239 : /* Alignment of buf->base + 0. */
4240 : unsigned align;
4241 : /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4242 : HOST_WIDE_INT off;
4243 : /* Number of padding bytes before buf->off that don't have padding clear
4244 : code emitted yet. */
4245 : HOST_WIDE_INT padding_bytes;
4246 : /* The size of the whole object. Never emit code to touch
4247 : buf->base + buf->sz or following bytes. */
4248 : HOST_WIDE_INT sz;
4249 : /* Number of bytes recorded in buf->buf. */
4250 : size_t size;
4251 : /* When inside union, instead of emitting code we and bits inside of
4252 : the union_ptr array. */
4253 : unsigned char *union_ptr;
4254 : /* Set bits mean padding bits that need to be cleared by the builtin. */
4255 : unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4256 : };
4257 :
4258 : /* Emit code to clear padding requested in BUF->buf - set bits
4259 : in there stand for padding that should be cleared. FULL is true
4260 : if everything from the buffer should be flushed, otherwise
4261 : it can leave up to 2 * clear_padding_unit bytes for further
4262 : processing. */
4263 :
4264 : static void
4265 34484 : clear_padding_flush (clear_padding_struct *buf, bool full)
4266 : {
4267 34484 : gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4268 34484 : if (!full && buf->size < 2 * clear_padding_unit)
4269 34484 : return;
4270 35532 : gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4271 34442 : size_t end = buf->size;
4272 34442 : if (!full)
4273 42 : end = ((end - clear_padding_unit - 1) / clear_padding_unit
4274 : * clear_padding_unit);
4275 34442 : size_t padding_bytes = buf->padding_bytes;
4276 34442 : if (buf->union_ptr)
4277 : {
4278 33676 : if (buf->clear_in_mask)
4279 : {
4280 : /* During clear_type_padding_in_mask, clear the padding
4281 : bits set in buf->buf in the buf->union_ptr mask. */
4282 228506 : for (size_t i = 0; i < end; i++)
4283 : {
4284 195223 : if (buf->buf[i] == (unsigned char) ~0)
4285 7884 : padding_bytes++;
4286 : else
4287 : {
4288 187339 : memset (&buf->union_ptr[buf->off + i - padding_bytes],
4289 : 0, padding_bytes);
4290 187339 : padding_bytes = 0;
4291 187339 : buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4292 : }
4293 : }
4294 33283 : if (full)
4295 : {
4296 33283 : memset (&buf->union_ptr[buf->off + end - padding_bytes],
4297 : 0, padding_bytes);
4298 33283 : buf->off = 0;
4299 33283 : buf->size = 0;
4300 33283 : buf->padding_bytes = 0;
4301 : }
4302 : else
4303 : {
4304 0 : memmove (buf->buf, buf->buf + end, buf->size - end);
4305 0 : buf->off += end;
4306 0 : buf->size -= end;
4307 0 : buf->padding_bytes = padding_bytes;
4308 : }
4309 33283 : return;
4310 : }
4311 : /* Inside of a union, instead of emitting any code, instead
4312 : clear all bits in the union_ptr buffer that are clear
4313 : in buf. Whole padding bytes don't clear anything. */
4314 3017 : for (size_t i = 0; i < end; i++)
4315 : {
4316 2624 : if (buf->buf[i] == (unsigned char) ~0)
4317 1424 : padding_bytes++;
4318 : else
4319 : {
4320 1200 : padding_bytes = 0;
4321 1200 : buf->union_ptr[buf->off + i] &= buf->buf[i];
4322 : }
4323 : }
4324 393 : if (full)
4325 : {
4326 393 : buf->off = 0;
4327 393 : buf->size = 0;
4328 393 : buf->padding_bytes = 0;
4329 : }
4330 : else
4331 : {
4332 0 : memmove (buf->buf, buf->buf + end, buf->size - end);
4333 0 : buf->off += end;
4334 0 : buf->size -= end;
4335 0 : buf->padding_bytes = padding_bytes;
4336 : }
4337 393 : return;
4338 : }
4339 766 : size_t wordsize = UNITS_PER_WORD;
4340 23505 : for (size_t i = 0; i < end; i += wordsize)
4341 : {
4342 22739 : size_t nonzero_first = wordsize;
4343 22739 : size_t nonzero_last = 0;
4344 22739 : size_t zero_first = wordsize;
4345 22739 : size_t zero_last = 0;
4346 22739 : bool all_ones = true, bytes_only = true;
4347 23025 : if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4348 22739 : > (unsigned HOST_WIDE_INT) buf->sz)
4349 : {
4350 286 : gcc_assert (wordsize > 1);
4351 286 : wordsize /= 2;
4352 286 : i -= wordsize;
4353 286 : continue;
4354 : }
4355 22453 : size_t endsize = end - i > wordsize ? wordsize : end - i;
4356 200832 : for (size_t j = i; j < i + endsize; j++)
4357 : {
4358 178379 : if (buf->buf[j])
4359 : {
4360 168378 : if (nonzero_first == wordsize)
4361 : {
4362 21511 : nonzero_first = j - i;
4363 21511 : nonzero_last = j - i;
4364 : }
4365 168378 : if (nonzero_last != j - i)
4366 158 : all_ones = false;
4367 168378 : nonzero_last = j + 1 - i;
4368 : }
4369 : else
4370 : {
4371 10001 : if (zero_first == wordsize)
4372 1938 : zero_first = j - i;
4373 10001 : zero_last = j + 1 - i;
4374 : }
4375 178379 : if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4376 : {
4377 85 : all_ones = false;
4378 85 : bytes_only = false;
4379 : }
4380 : }
4381 22453 : size_t padding_end = i;
4382 22453 : if (padding_bytes)
4383 : {
4384 20849 : if (nonzero_first == 0
4385 20849 : && nonzero_last == endsize
4386 20400 : && all_ones)
4387 : {
4388 : /* All bits are padding and we had some padding
4389 : before too. Just extend it. */
4390 20400 : padding_bytes += endsize;
4391 20400 : continue;
4392 : }
4393 449 : if (all_ones && nonzero_first == 0)
4394 : {
4395 4 : padding_bytes += nonzero_last;
4396 4 : padding_end += nonzero_last;
4397 4 : nonzero_first = wordsize;
4398 4 : nonzero_last = 0;
4399 : }
4400 445 : else if (bytes_only && nonzero_first == 0)
4401 : {
4402 0 : gcc_assert (zero_first && zero_first != wordsize);
4403 0 : padding_bytes += zero_first;
4404 0 : padding_end += zero_first;
4405 : }
4406 449 : tree atype, src;
4407 449 : if (padding_bytes == 1)
4408 : {
4409 33 : atype = char_type_node;
4410 33 : src = build_zero_cst (char_type_node);
4411 : }
4412 : else
4413 : {
4414 416 : atype = build_array_type_nelts (char_type_node, padding_bytes);
4415 416 : src = build_constructor (atype, NULL);
4416 : }
4417 449 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4418 : build_int_cst (buf->alias_type,
4419 449 : buf->off + padding_end
4420 449 : - padding_bytes));
4421 449 : gimple *g = gimple_build_assign (dst, src);
4422 449 : gimple_set_location (g, buf->loc);
4423 449 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4424 449 : padding_bytes = 0;
4425 449 : buf->padding_bytes = 0;
4426 : }
4427 2053 : if (nonzero_first == wordsize)
4428 : /* All bits in a word are 0, there are no padding bits. */
4429 946 : continue;
4430 1107 : if (all_ones && nonzero_last == endsize)
4431 : {
4432 : /* All bits between nonzero_first and end of word are padding
4433 : bits, start counting padding_bytes. */
4434 841 : padding_bytes = nonzero_last - nonzero_first;
4435 841 : continue;
4436 : }
4437 266 : if (bytes_only)
4438 : {
4439 : /* If bitfields aren't involved in this word, prefer storing
4440 : individual bytes or groups of them over performing a RMW
4441 : operation on the whole word. */
4442 227 : gcc_assert (i + zero_last <= end);
4443 1117 : for (size_t j = padding_end; j < i + zero_last; j++)
4444 : {
4445 890 : if (buf->buf[j])
4446 : {
4447 : size_t k;
4448 606 : for (k = j; k < i + zero_last; k++)
4449 606 : if (buf->buf[k] == 0)
4450 : break;
4451 259 : HOST_WIDE_INT off = buf->off + j;
4452 259 : tree atype, src;
4453 259 : if (k - j == 1)
4454 : {
4455 215 : atype = char_type_node;
4456 215 : src = build_zero_cst (char_type_node);
4457 : }
4458 : else
4459 : {
4460 44 : atype = build_array_type_nelts (char_type_node, k - j);
4461 44 : src = build_constructor (atype, NULL);
4462 : }
4463 259 : tree dst = build2_loc (buf->loc, MEM_REF, atype,
4464 : buf->base,
4465 259 : build_int_cst (buf->alias_type, off));
4466 259 : gimple *g = gimple_build_assign (dst, src);
4467 259 : gimple_set_location (g, buf->loc);
4468 259 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4469 259 : j = k;
4470 : }
4471 : }
4472 227 : if (nonzero_last == endsize)
4473 98 : padding_bytes = nonzero_last - zero_last;
4474 227 : continue;
4475 227 : }
4476 126 : for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4477 : {
4478 126 : if (nonzero_last - nonzero_first <= eltsz
4479 39 : && ((nonzero_first & ~(eltsz - 1))
4480 39 : == ((nonzero_last - 1) & ~(eltsz - 1))))
4481 : {
4482 39 : tree type;
4483 39 : if (eltsz == 1)
4484 2 : type = char_type_node;
4485 : else
4486 37 : type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4487 : 0);
4488 39 : size_t start = nonzero_first & ~(eltsz - 1);
4489 39 : HOST_WIDE_INT off = buf->off + i + start;
4490 39 : tree atype = type;
4491 39 : if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4492 0 : atype = build_aligned_type (type, buf->align);
4493 39 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4494 39 : build_int_cst (buf->alias_type, off));
4495 39 : tree src;
4496 39 : gimple *g;
4497 39 : if (all_ones
4498 39 : && nonzero_first == start
4499 0 : && nonzero_last == start + eltsz)
4500 0 : src = build_zero_cst (type);
4501 : else
4502 : {
4503 39 : src = make_ssa_name (type);
4504 39 : tree tmp_dst = unshare_expr (dst);
4505 : /* The folding introduces a read from the tmp_dst, we should
4506 : prevent uninitialized warning analysis from issuing warning
4507 : for such fake read. In order to suppress warning only for
4508 : this expr, we should set the location of tmp_dst to
4509 : UNKNOWN_LOCATION first, then suppress_warning will call
4510 : set_no_warning_bit to set the no_warning flag only for
4511 : tmp_dst. */
4512 39 : SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4513 39 : suppress_warning (tmp_dst, OPT_Wuninitialized);
4514 39 : g = gimple_build_assign (src, tmp_dst);
4515 39 : gimple_set_location (g, buf->loc);
4516 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4517 78 : tree mask = native_interpret_expr (type,
4518 39 : buf->buf + i + start,
4519 : eltsz);
4520 39 : gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4521 39 : mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4522 39 : tree src_masked = make_ssa_name (type);
4523 39 : g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4524 : src, mask);
4525 39 : gimple_set_location (g, buf->loc);
4526 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4527 39 : src = src_masked;
4528 : }
4529 39 : g = gimple_build_assign (dst, src);
4530 39 : gimple_set_location (g, buf->loc);
4531 39 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4532 39 : break;
4533 : }
4534 : }
4535 : }
4536 766 : if (full)
4537 : {
4538 724 : if (padding_bytes)
4539 : {
4540 490 : tree atype, src;
4541 490 : if (padding_bytes == 1)
4542 : {
4543 110 : atype = char_type_node;
4544 110 : src = build_zero_cst (char_type_node);
4545 : }
4546 : else
4547 : {
4548 380 : atype = build_array_type_nelts (char_type_node, padding_bytes);
4549 380 : src = build_constructor (atype, NULL);
4550 : }
4551 490 : tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4552 : build_int_cst (buf->alias_type,
4553 490 : buf->off + end
4554 490 : - padding_bytes));
4555 490 : gimple *g = gimple_build_assign (dst, src);
4556 490 : gimple_set_location (g, buf->loc);
4557 490 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4558 : }
4559 724 : size_t end_rem = end % UNITS_PER_WORD;
4560 724 : buf->off += end - end_rem;
4561 724 : buf->size = end_rem;
4562 724 : memset (buf->buf, 0, buf->size);
4563 724 : buf->padding_bytes = 0;
4564 : }
4565 : else
4566 : {
4567 42 : memmove (buf->buf, buf->buf + end, buf->size - end);
4568 42 : buf->off += end;
4569 42 : buf->size -= end;
4570 42 : buf->padding_bytes = padding_bytes;
4571 : }
4572 : }
4573 :
4574 : /* Append PADDING_BYTES padding bytes. */
4575 :
4576 : static void
4577 5215 : clear_padding_add_padding (clear_padding_struct *buf,
4578 : HOST_WIDE_INT padding_bytes)
4579 : {
4580 5215 : if (padding_bytes == 0)
4581 : return;
4582 1679 : if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4583 : > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4584 42 : clear_padding_flush (buf, false);
4585 1679 : if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4586 : > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4587 : {
4588 42 : memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4589 42 : padding_bytes -= clear_padding_buf_size - buf->size;
4590 42 : buf->size = clear_padding_buf_size;
4591 42 : clear_padding_flush (buf, false);
4592 42 : gcc_assert (buf->padding_bytes);
4593 : /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4594 : is guaranteed to be all ones. */
4595 42 : padding_bytes += buf->size;
4596 42 : buf->size = padding_bytes % UNITS_PER_WORD;
4597 42 : memset (buf->buf, ~0, buf->size);
4598 42 : buf->off += padding_bytes - buf->size;
4599 42 : buf->padding_bytes += padding_bytes - buf->size;
4600 : }
4601 : else
4602 : {
4603 1637 : memset (buf->buf + buf->size, ~0, padding_bytes);
4604 1637 : buf->size += padding_bytes;
4605 : }
4606 : }
4607 :
4608 : static void clear_padding_type (clear_padding_struct *, tree,
4609 : HOST_WIDE_INT, bool);
4610 :
4611 : /* Clear padding bits of union type TYPE. */
4612 :
4613 : static void
4614 128 : clear_padding_union (clear_padding_struct *buf, tree type,
4615 : HOST_WIDE_INT sz, bool for_auto_init)
4616 : {
4617 128 : clear_padding_struct *union_buf;
4618 128 : HOST_WIDE_INT start_off = 0, next_off = 0;
4619 128 : size_t start_size = 0;
4620 128 : if (buf->union_ptr)
4621 : {
4622 42 : start_off = buf->off + buf->size;
4623 42 : next_off = start_off + sz;
4624 42 : start_size = start_off % UNITS_PER_WORD;
4625 42 : start_off -= start_size;
4626 42 : clear_padding_flush (buf, true);
4627 42 : union_buf = buf;
4628 : }
4629 : else
4630 : {
4631 86 : if (sz + buf->size > clear_padding_buf_size)
4632 0 : clear_padding_flush (buf, false);
4633 86 : union_buf = XALLOCA (clear_padding_struct);
4634 86 : union_buf->loc = buf->loc;
4635 86 : union_buf->clear_in_mask = buf->clear_in_mask;
4636 86 : union_buf->base = NULL_TREE;
4637 86 : union_buf->alias_type = NULL_TREE;
4638 86 : union_buf->gsi = NULL;
4639 86 : union_buf->align = 0;
4640 86 : union_buf->off = 0;
4641 86 : union_buf->padding_bytes = 0;
4642 86 : union_buf->sz = sz;
4643 86 : union_buf->size = 0;
4644 86 : if (sz + buf->size <= clear_padding_buf_size)
4645 86 : union_buf->union_ptr = buf->buf + buf->size;
4646 : else
4647 0 : union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4648 86 : memset (union_buf->union_ptr, ~0, sz);
4649 : }
4650 :
4651 1193 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4652 1065 : if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4653 : {
4654 359 : if (DECL_SIZE_UNIT (field) == NULL_TREE)
4655 : {
4656 8 : if (TREE_TYPE (field) == error_mark_node)
4657 0 : continue;
4658 8 : gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4659 : && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4660 8 : if (!buf->clear_in_mask && !for_auto_init)
4661 8 : error_at (buf->loc, "flexible array member %qD does not have "
4662 : "well defined padding bits for %qs",
4663 : field, "__builtin_clear_padding");
4664 8 : continue;
4665 : }
4666 351 : HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4667 351 : gcc_assert (union_buf->size == 0);
4668 351 : union_buf->off = start_off;
4669 351 : union_buf->size = start_size;
4670 351 : memset (union_buf->buf, ~0, start_size);
4671 351 : clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4672 351 : clear_padding_add_padding (union_buf, sz - fldsz);
4673 351 : clear_padding_flush (union_buf, true);
4674 : }
4675 :
4676 128 : if (buf == union_buf)
4677 : {
4678 42 : buf->off = next_off;
4679 42 : buf->size = next_off % UNITS_PER_WORD;
4680 42 : buf->off -= buf->size;
4681 42 : memset (buf->buf, ~0, buf->size);
4682 : }
4683 86 : else if (sz + buf->size <= clear_padding_buf_size)
4684 86 : buf->size += sz;
4685 : else
4686 : {
4687 0 : unsigned char *union_ptr = union_buf->union_ptr;
4688 0 : while (sz)
4689 : {
4690 0 : clear_padding_flush (buf, false);
4691 0 : HOST_WIDE_INT this_sz
4692 0 : = MIN ((unsigned HOST_WIDE_INT) sz,
4693 : clear_padding_buf_size - buf->size);
4694 0 : memcpy (buf->buf + buf->size, union_ptr, this_sz);
4695 0 : buf->size += this_sz;
4696 0 : union_ptr += this_sz;
4697 0 : sz -= this_sz;
4698 : }
4699 0 : XDELETE (union_buf->union_ptr);
4700 : }
4701 128 : }
4702 :
4703 : /* The only known floating point formats with padding bits are the
4704 : IEEE extended ones. */
4705 :
4706 : static bool
4707 35040 : clear_padding_real_needs_padding_p (tree type)
4708 : {
4709 35040 : const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4710 35040 : return (fmt->b == 2
4711 34581 : && fmt->signbit_ro == fmt->signbit_rw
4712 69621 : && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4713 : }
4714 :
4715 : /* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4716 : precision than bits in limb or corresponding number of limbs. */
4717 :
4718 : static bool
4719 54 : clear_padding_bitint_needs_padding_p (tree type)
4720 : {
4721 54 : struct bitint_info info;
4722 54 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4723 54 : gcc_assert (ok);
4724 54 : if (info.extended)
4725 : return false;
4726 54 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.abi_limb_mode);
4727 54 : if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4728 : return true;
4729 4 : else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4730 : return false;
4731 : else
4732 4 : return (((unsigned) TYPE_PRECISION (type))
4733 4 : % GET_MODE_PRECISION (limb_mode)) != 0;
4734 : }
4735 :
4736 : /* Return true if TYPE might contain any padding bits. */
4737 :
4738 : bool
4739 912756 : clear_padding_type_may_have_padding_p (tree type)
4740 : {
4741 1048766 : switch (TREE_CODE (type))
4742 : {
4743 : case RECORD_TYPE:
4744 : case UNION_TYPE:
4745 : return true;
4746 136010 : case ARRAY_TYPE:
4747 136010 : case COMPLEX_TYPE:
4748 136010 : case VECTOR_TYPE:
4749 136010 : return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4750 1771 : case REAL_TYPE:
4751 1771 : return clear_padding_real_needs_padding_p (type);
4752 54 : case BITINT_TYPE:
4753 54 : return clear_padding_bitint_needs_padding_p (type);
4754 40615 : default:
4755 40615 : return false;
4756 : }
4757 : }
4758 :
4759 : /* Return true if TYPE has padding bits aside from those in fields,
4760 : elements, etc. */
4761 :
4762 : bool
4763 1171274 : type_has_padding_at_level_p (tree type)
4764 : {
4765 1171274 : switch (TREE_CODE (type))
4766 : {
4767 1032994 : case RECORD_TYPE:
4768 1032994 : {
4769 1032994 : tree bitpos = size_zero_node;
4770 : /* Expect fields to be sorted by bit position. */
4771 7612917 : for (tree f = TYPE_FIELDS (type); f; f = DECL_CHAIN (f))
4772 6584417 : if (TREE_CODE (f) == FIELD_DECL)
4773 : {
4774 2265509 : if (DECL_PADDING_P (f))
4775 : return true;
4776 2265506 : tree pos = bit_position (f);
4777 2265506 : if (simple_cst_equal (bitpos, pos) != 1)
4778 : return true;
4779 2261038 : if (!DECL_SIZE (f))
4780 : return true;
4781 2261015 : bitpos = int_const_binop (PLUS_EXPR, pos, DECL_SIZE (f));
4782 : }
4783 1028500 : if (simple_cst_equal (bitpos, TYPE_SIZE (type)) != 1)
4784 : return true;
4785 : return false;
4786 : }
4787 3 : case UNION_TYPE:
4788 3 : case QUAL_UNION_TYPE:
4789 3 : bool any_fields;
4790 3 : any_fields = false;
4791 : /* If any of the fields is smaller than the whole, there is padding. */
4792 6 : for (tree f = TYPE_FIELDS (type); f; f = DECL_CHAIN (f))
4793 3 : if (TREE_CODE (f) != FIELD_DECL || TREE_TYPE (f) == error_mark_node)
4794 3 : continue;
4795 0 : else if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
4796 0 : TYPE_SIZE (type)) != 1)
4797 : return true;
4798 : else
4799 : any_fields = true;
4800 : /* If the union doesn't have any fields and still has non-zero size,
4801 : all of it is padding. */
4802 3 : if (!any_fields && !integer_zerop (TYPE_SIZE (type)))
4803 : return true;
4804 : return false;
4805 : case ARRAY_TYPE:
4806 : case COMPLEX_TYPE:
4807 : case VECTOR_TYPE:
4808 : /* No recursing here, no padding at this level. */
4809 : return false;
4810 0 : case REAL_TYPE:
4811 0 : return clear_padding_real_needs_padding_p (type);
4812 0 : case BITINT_TYPE:
4813 0 : return clear_padding_bitint_needs_padding_p (type);
4814 : default:
4815 : return false;
4816 : }
4817 : }
4818 :
4819 : /* Emit a runtime loop:
4820 : for (; buf.base != end; buf.base += sz)
4821 : __builtin_clear_padding (buf.base); */
4822 :
4823 : static void
4824 114 : clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4825 : tree end, bool for_auto_init)
4826 : {
4827 114 : tree l1 = create_artificial_label (buf->loc);
4828 114 : tree l2 = create_artificial_label (buf->loc);
4829 114 : tree l3 = create_artificial_label (buf->loc);
4830 114 : gimple *g = gimple_build_goto (l2);
4831 114 : gimple_set_location (g, buf->loc);
4832 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4833 114 : g = gimple_build_label (l1);
4834 114 : gimple_set_location (g, buf->loc);
4835 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4836 114 : clear_padding_type (buf, type, buf->sz, for_auto_init);
4837 114 : clear_padding_flush (buf, true);
4838 114 : g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4839 114 : size_int (buf->sz));
4840 114 : gimple_set_location (g, buf->loc);
4841 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4842 114 : g = gimple_build_label (l2);
4843 114 : gimple_set_location (g, buf->loc);
4844 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4845 114 : g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4846 114 : gimple_set_location (g, buf->loc);
4847 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4848 114 : g = gimple_build_label (l3);
4849 114 : gimple_set_location (g, buf->loc);
4850 114 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4851 114 : }
4852 :
4853 : /* Clear padding bits for TYPE. Called recursively from
4854 : gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4855 : the __builtin_clear_padding is not called by the end user,
4856 : instead, it's inserted by the compiler to initialize the
4857 : paddings of automatic variable. Therefore, we should not
4858 : emit the error messages for flexible array members to confuse
4859 : the end user. */
4860 :
4861 : static void
4862 38553 : clear_padding_type (clear_padding_struct *buf, tree type,
4863 : HOST_WIDE_INT sz, bool for_auto_init)
4864 : {
4865 38553 : switch (TREE_CODE (type))
4866 : {
4867 1363 : case RECORD_TYPE:
4868 1363 : HOST_WIDE_INT cur_pos;
4869 1363 : cur_pos = 0;
4870 23099 : for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4871 21736 : if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4872 : {
4873 3814 : tree ftype = TREE_TYPE (field);
4874 3814 : if (DECL_BIT_FIELD (field))
4875 : {
4876 260 : HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4877 260 : if (fldsz == 0)
4878 0 : continue;
4879 260 : HOST_WIDE_INT pos = int_byte_position (field);
4880 260 : if (pos >= sz)
4881 0 : continue;
4882 260 : HOST_WIDE_INT bpos
4883 260 : = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4884 260 : bpos %= BITS_PER_UNIT;
4885 260 : HOST_WIDE_INT end
4886 260 : = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4887 260 : if (pos + end > cur_pos)
4888 : {
4889 199 : clear_padding_add_padding (buf, pos + end - cur_pos);
4890 199 : cur_pos = pos + end;
4891 : }
4892 260 : gcc_assert (cur_pos > pos
4893 : && ((unsigned HOST_WIDE_INT) buf->size
4894 : >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4895 260 : unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4896 260 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4897 : sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4898 : " in %qs", "__builtin_clear_padding");
4899 260 : else if (BYTES_BIG_ENDIAN)
4900 : {
4901 : /* Big endian. */
4902 : if (bpos + fldsz <= BITS_PER_UNIT)
4903 : *p &= ~(((1 << fldsz) - 1)
4904 : << (BITS_PER_UNIT - bpos - fldsz));
4905 : else
4906 : {
4907 : if (bpos)
4908 : {
4909 : *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4910 : p++;
4911 : fldsz -= BITS_PER_UNIT - bpos;
4912 : }
4913 : memset (p, 0, fldsz / BITS_PER_UNIT);
4914 : p += fldsz / BITS_PER_UNIT;
4915 : fldsz %= BITS_PER_UNIT;
4916 : if (fldsz)
4917 : *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4918 : }
4919 : }
4920 : else
4921 : {
4922 : /* Little endian. */
4923 260 : if (bpos + fldsz <= BITS_PER_UNIT)
4924 159 : *p &= ~(((1 << fldsz) - 1) << bpos);
4925 : else
4926 : {
4927 101 : if (bpos)
4928 : {
4929 33 : *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4930 33 : p++;
4931 33 : fldsz -= BITS_PER_UNIT - bpos;
4932 : }
4933 101 : memset (p, 0, fldsz / BITS_PER_UNIT);
4934 101 : p += fldsz / BITS_PER_UNIT;
4935 101 : fldsz %= BITS_PER_UNIT;
4936 101 : if (fldsz)
4937 56 : *p &= ~((1 << fldsz) - 1);
4938 : }
4939 : }
4940 : }
4941 3554 : else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4942 : {
4943 32 : if (ftype == error_mark_node)
4944 0 : continue;
4945 32 : gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4946 : && !COMPLETE_TYPE_P (ftype));
4947 32 : if (!buf->clear_in_mask && !for_auto_init)
4948 24 : error_at (buf->loc, "flexible array member %qD does not "
4949 : "have well defined padding bits for %qs",
4950 : field, "__builtin_clear_padding");
4951 : }
4952 3522 : else if (is_empty_type (ftype))
4953 220 : continue;
4954 : else
4955 : {
4956 3302 : HOST_WIDE_INT pos = int_byte_position (field);
4957 3302 : if (pos >= sz)
4958 0 : continue;
4959 3302 : HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4960 3302 : gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4961 3302 : clear_padding_add_padding (buf, pos - cur_pos);
4962 3302 : cur_pos = pos;
4963 3302 : if (tree asbase = lang_hooks.types.classtype_as_base (field))
4964 240 : ftype = asbase;
4965 3302 : clear_padding_type (buf, ftype, fldsz, for_auto_init);
4966 3302 : cur_pos += fldsz;
4967 : }
4968 : }
4969 1363 : gcc_assert (sz >= cur_pos);
4970 1363 : clear_padding_add_padding (buf, sz - cur_pos);
4971 1363 : break;
4972 325 : case ARRAY_TYPE:
4973 325 : HOST_WIDE_INT nelts, fldsz;
4974 325 : fldsz = int_size_in_bytes (TREE_TYPE (type));
4975 325 : if (fldsz == 0)
4976 : break;
4977 311 : nelts = sz / fldsz;
4978 311 : if (nelts > 1
4979 304 : && sz > 8 * UNITS_PER_WORD
4980 78 : && buf->union_ptr == NULL
4981 389 : && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4982 : {
4983 : /* For sufficiently large array of more than one elements,
4984 : emit a runtime loop to keep code size manageable. */
4985 66 : tree base = buf->base;
4986 66 : unsigned int prev_align = buf->align;
4987 66 : HOST_WIDE_INT off = buf->off + buf->size;
4988 66 : HOST_WIDE_INT prev_sz = buf->sz;
4989 66 : clear_padding_flush (buf, true);
4990 66 : tree elttype = TREE_TYPE (type);
4991 66 : buf->base = create_tmp_var (build_pointer_type (elttype));
4992 66 : tree end = make_ssa_name (TREE_TYPE (buf->base));
4993 66 : gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4994 66 : base, size_int (off));
4995 66 : gimple_set_location (g, buf->loc);
4996 66 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4997 66 : g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4998 66 : size_int (sz));
4999 66 : gimple_set_location (g, buf->loc);
5000 66 : gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
5001 66 : buf->sz = fldsz;
5002 66 : buf->align = TYPE_ALIGN (elttype);
5003 66 : buf->off = 0;
5004 66 : buf->size = 0;
5005 66 : clear_padding_emit_loop (buf, elttype, end, for_auto_init);
5006 66 : off += sz;
5007 66 : buf->base = base;
5008 66 : buf->sz = prev_sz;
5009 66 : buf->align = prev_align;
5010 66 : buf->size = off % UNITS_PER_WORD;
5011 66 : buf->off = off - buf->size;
5012 66 : memset (buf->buf, 0, buf->size);
5013 66 : break;
5014 : }
5015 1163 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
5016 918 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5017 : break;
5018 128 : case UNION_TYPE:
5019 128 : clear_padding_union (buf, type, sz, for_auto_init);
5020 128 : break;
5021 33269 : case REAL_TYPE:
5022 33269 : gcc_assert ((size_t) sz <= clear_padding_unit);
5023 33269 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
5024 0 : clear_padding_flush (buf, false);
5025 33269 : if (clear_padding_real_needs_padding_p (type))
5026 : {
5027 : /* Use native_interpret_real + native_encode_expr to figure out
5028 : which bits are padding. */
5029 1395 : memset (buf->buf + buf->size, ~0, sz);
5030 1395 : tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
5031 1395 : gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
5032 1395 : int len = native_encode_expr (cst, buf->buf + buf->size, sz);
5033 1395 : gcc_assert (len > 0 && (size_t) len == (size_t) sz);
5034 23715 : for (size_t i = 0; i < (size_t) sz; i++)
5035 22320 : buf->buf[buf->size + i] ^= ~0;
5036 : }
5037 : else
5038 31874 : memset (buf->buf + buf->size, 0, sz);
5039 33269 : buf->size += sz;
5040 33269 : break;
5041 0 : case COMPLEX_TYPE:
5042 0 : fldsz = int_size_in_bytes (TREE_TYPE (type));
5043 0 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5044 0 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5045 0 : break;
5046 8 : case VECTOR_TYPE:
5047 8 : nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
5048 8 : fldsz = int_size_in_bytes (TREE_TYPE (type));
5049 40 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
5050 32 : clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
5051 : break;
5052 7 : case NULLPTR_TYPE:
5053 7 : gcc_assert ((size_t) sz <= clear_padding_unit);
5054 7 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
5055 0 : clear_padding_flush (buf, false);
5056 7 : memset (buf->buf + buf->size, ~0, sz);
5057 7 : buf->size += sz;
5058 7 : break;
5059 4 : case BITINT_TYPE:
5060 4 : {
5061 4 : struct bitint_info info;
5062 4 : bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
5063 4 : gcc_assert (ok);
5064 4 : scalar_int_mode limb_mode
5065 4 : = as_a <scalar_int_mode> (info.abi_limb_mode);
5066 4 : if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
5067 : {
5068 2 : gcc_assert ((size_t) sz <= clear_padding_unit);
5069 2 : if ((unsigned HOST_WIDE_INT) sz + buf->size
5070 : > clear_padding_buf_size)
5071 0 : clear_padding_flush (buf, false);
5072 2 : if (!info.extended
5073 2 : && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
5074 : {
5075 2 : int tprec = GET_MODE_PRECISION (limb_mode);
5076 2 : int prec = TYPE_PRECISION (type);
5077 2 : tree t = build_nonstandard_integer_type (tprec, 1);
5078 2 : tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
5079 2 : int len = native_encode_expr (cst, buf->buf + buf->size, sz);
5080 2 : gcc_assert (len > 0 && (size_t) len == (size_t) sz);
5081 : }
5082 : else
5083 0 : memset (buf->buf + buf->size, 0, sz);
5084 2 : buf->size += sz;
5085 2 : break;
5086 : }
5087 2 : tree limbtype
5088 2 : = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
5089 2 : fldsz = int_size_in_bytes (limbtype);
5090 2 : nelts = int_size_in_bytes (type) / fldsz;
5091 13 : for (HOST_WIDE_INT i = 0; i < nelts; i++)
5092 : {
5093 11 : if (!info.extended
5094 11 : && i == (info.big_endian ? 0 : nelts - 1)
5095 13 : && (((unsigned) TYPE_PRECISION (type))
5096 2 : % TYPE_PRECISION (limbtype)) != 0)
5097 : {
5098 2 : int tprec = GET_MODE_PRECISION (limb_mode);
5099 2 : int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
5100 2 : tree cst = wide_int_to_tree (limbtype,
5101 2 : wi::mask (prec, true, tprec));
5102 2 : int len = native_encode_expr (cst, buf->buf + buf->size,
5103 : fldsz);
5104 2 : gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
5105 2 : buf->size += fldsz;
5106 : }
5107 : else
5108 9 : clear_padding_type (buf, limbtype, fldsz, for_auto_init);
5109 : }
5110 : break;
5111 : }
5112 3449 : default:
5113 3449 : gcc_assert ((size_t) sz <= clear_padding_unit);
5114 3449 : if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
5115 0 : clear_padding_flush (buf, false);
5116 3449 : memset (buf->buf + buf->size, 0, sz);
5117 3449 : buf->size += sz;
5118 3449 : break;
5119 : }
5120 38553 : }
5121 :
5122 : /* Clear padding bits of TYPE in MASK. */
5123 :
5124 : void
5125 33283 : clear_type_padding_in_mask (tree type, unsigned char *mask)
5126 : {
5127 33283 : clear_padding_struct buf;
5128 33283 : buf.loc = UNKNOWN_LOCATION;
5129 33283 : buf.clear_in_mask = true;
5130 33283 : buf.base = NULL_TREE;
5131 33283 : buf.alias_type = NULL_TREE;
5132 33283 : buf.gsi = NULL;
5133 33283 : buf.align = 0;
5134 33283 : buf.off = 0;
5135 33283 : buf.padding_bytes = 0;
5136 33283 : buf.sz = int_size_in_bytes (type);
5137 33283 : buf.size = 0;
5138 33283 : buf.union_ptr = mask;
5139 33283 : clear_padding_type (&buf, type, buf.sz, false);
5140 33283 : clear_padding_flush (&buf, true);
5141 33283 : }
5142 :
5143 : /* Fold __builtin_clear_padding builtin. */
5144 :
5145 : static bool
5146 630 : gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
5147 : {
5148 630 : gimple *stmt = gsi_stmt (*gsi);
5149 630 : gcc_assert (gimple_call_num_args (stmt) == 2);
5150 630 : tree ptr = gimple_call_arg (stmt, 0);
5151 630 : tree typearg = gimple_call_arg (stmt, 1);
5152 : /* The 2nd argument of __builtin_clear_padding's value is used to
5153 : distinguish whether this call is made by the user or by the compiler
5154 : for automatic variable initialization. */
5155 630 : bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
5156 630 : tree type = TREE_TYPE (TREE_TYPE (typearg));
5157 630 : location_t loc = gimple_location (stmt);
5158 630 : clear_padding_struct buf;
5159 630 : gimple_stmt_iterator gsiprev = *gsi;
5160 : /* This should be folded during the lower pass. */
5161 1260 : gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
5162 630 : gcc_assert (COMPLETE_TYPE_P (type));
5163 630 : gsi_prev (&gsiprev);
5164 :
5165 630 : buf.loc = loc;
5166 630 : buf.clear_in_mask = false;
5167 630 : buf.base = ptr;
5168 630 : buf.alias_type = NULL_TREE;
5169 630 : buf.gsi = gsi;
5170 630 : buf.align = get_pointer_alignment (ptr);
5171 630 : unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
5172 630 : buf.align = MAX (buf.align, talign);
5173 630 : buf.off = 0;
5174 630 : buf.padding_bytes = 0;
5175 630 : buf.size = 0;
5176 630 : buf.sz = int_size_in_bytes (type);
5177 630 : buf.union_ptr = NULL;
5178 630 : if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
5179 1 : sorry_at (loc, "%s not supported for variable length aggregates",
5180 : "__builtin_clear_padding");
5181 : /* The implementation currently assumes 8-bit host and target
5182 : chars which is the case for all currently supported targets
5183 : and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5184 629 : else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5185 : sorry_at (loc, "%s not supported on this target",
5186 : "__builtin_clear_padding");
5187 629 : else if (!clear_padding_type_may_have_padding_p (type))
5188 : ;
5189 592 : else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5190 : {
5191 48 : tree sz = TYPE_SIZE_UNIT (type);
5192 48 : tree elttype = type;
5193 : /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5194 48 : while (TREE_CODE (elttype) == ARRAY_TYPE
5195 144 : && int_size_in_bytes (elttype) < 0)
5196 96 : elttype = TREE_TYPE (elttype);
5197 48 : HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5198 48 : gcc_assert (eltsz >= 0);
5199 48 : if (eltsz)
5200 : {
5201 48 : buf.base = create_tmp_var (build_pointer_type (elttype));
5202 48 : tree end = make_ssa_name (TREE_TYPE (buf.base));
5203 48 : gimple *g = gimple_build_assign (buf.base, ptr);
5204 48 : gimple_set_location (g, loc);
5205 48 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5206 48 : g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5207 48 : gimple_set_location (g, loc);
5208 48 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5209 48 : buf.sz = eltsz;
5210 48 : buf.align = TYPE_ALIGN (elttype);
5211 48 : buf.alias_type = build_pointer_type (elttype);
5212 48 : clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
5213 : }
5214 : }
5215 : else
5216 : {
5217 544 : if (!is_gimple_mem_ref_addr (buf.base))
5218 : {
5219 28 : buf.base = make_ssa_name (TREE_TYPE (ptr));
5220 28 : gimple *g = gimple_build_assign (buf.base, ptr);
5221 28 : gimple_set_location (g, loc);
5222 28 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5223 : }
5224 544 : buf.alias_type = build_pointer_type (type);
5225 544 : clear_padding_type (&buf, type, buf.sz, for_auto_init);
5226 544 : clear_padding_flush (&buf, true);
5227 : }
5228 :
5229 630 : gimple_stmt_iterator gsiprev2 = *gsi;
5230 630 : gsi_prev (&gsiprev2);
5231 630 : if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5232 126 : gsi_replace (gsi, gimple_build_nop (), true);
5233 : else
5234 : {
5235 504 : gsi_remove (gsi, true);
5236 504 : *gsi = gsiprev2;
5237 : }
5238 630 : return true;
5239 : }
5240 :
5241 : /* Fold __builtin_constant_p builtin. */
5242 :
5243 : static bool
5244 94894 : gimple_fold_builtin_constant_p (gimple_stmt_iterator *gsi)
5245 : {
5246 94894 : gcall *call = as_a<gcall*>(gsi_stmt (*gsi));
5247 :
5248 94894 : if (gimple_call_num_args (call) != 1)
5249 : return false;
5250 :
5251 94889 : tree arg = gimple_call_arg (call, 0);
5252 94889 : tree result = fold_builtin_constant_p (arg);
5253 :
5254 : /* Resolve __builtin_constant_p. If it hasn't been
5255 : folded to integer_one_node by now, it's fairly
5256 : certain that the value simply isn't constant. */
5257 188711 : if (!result && fold_before_rtl_expansion_p ())
5258 3 : result = integer_zero_node;
5259 :
5260 94889 : if (!result)
5261 : return false;
5262 :
5263 1070 : gimplify_and_update_call_from_tree (gsi, result);
5264 1070 : return true;
5265 : }
5266 :
5267 : /* If va_list type is a simple pointer and nothing special is needed,
5268 : optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
5269 : __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
5270 : pointer assignment. Returns true if a change happened. */
5271 :
5272 : static bool
5273 114007 : gimple_fold_builtin_stdarg (gimple_stmt_iterator *gsi, gcall *call)
5274 : {
5275 : /* These shouldn't be folded before pass_stdarg. */
5276 114007 : if (!fold_before_rtl_expansion_p ())
5277 : return false;
5278 :
5279 10784 : tree callee, lhs, rhs, cfun_va_list;
5280 10784 : bool va_list_simple_ptr;
5281 10784 : location_t loc = gimple_location (call);
5282 10784 : gimple *nstmt0, *nstmt;
5283 10784 : tree tlhs, oldvdef, newvdef;
5284 :
5285 10784 : callee = gimple_call_fndecl (call);
5286 :
5287 10784 : cfun_va_list = targetm.fn_abi_va_list (callee);
5288 21568 : va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
5289 10784 : && (TREE_TYPE (cfun_va_list) == void_type_node
5290 436 : || TREE_TYPE (cfun_va_list) == char_type_node);
5291 :
5292 10784 : switch (DECL_FUNCTION_CODE (callee))
5293 : {
5294 6982 : case BUILT_IN_VA_START:
5295 6982 : if (!va_list_simple_ptr
5296 172 : || targetm.expand_builtin_va_start != NULL
5297 7138 : || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
5298 : return false;
5299 :
5300 156 : if (gimple_call_num_args (call) != 2)
5301 : return false;
5302 :
5303 156 : lhs = gimple_call_arg (call, 0);
5304 156 : if (!POINTER_TYPE_P (TREE_TYPE (lhs))
5305 156 : || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5306 156 : != TYPE_MAIN_VARIANT (cfun_va_list))
5307 : return false;
5308 : /* Create `tlhs = __builtin_next_arg(0);`. */
5309 156 : tlhs = make_ssa_name (cfun_va_list);
5310 156 : nstmt0 = gimple_build_call (builtin_decl_explicit (BUILT_IN_NEXT_ARG), 1, integer_zero_node);
5311 156 : lhs = fold_build2 (MEM_REF, cfun_va_list, lhs, build_zero_cst (TREE_TYPE (lhs)));
5312 156 : gimple_call_set_lhs (nstmt0, tlhs);
5313 156 : gimple_set_location (nstmt0, loc);
5314 156 : gimple_move_vops (nstmt0, call);
5315 156 : gsi_replace (gsi, nstmt0, false);
5316 156 : oldvdef = gimple_vdef (nstmt0);
5317 156 : newvdef = make_ssa_name (gimple_vop (cfun), nstmt0);
5318 156 : gimple_set_vdef (nstmt0, newvdef);
5319 :
5320 : /* Create `*lhs = tlhs;`. */
5321 156 : nstmt = gimple_build_assign (lhs, tlhs);
5322 156 : gimple_set_location (nstmt, loc);
5323 156 : gimple_set_vuse (nstmt, newvdef);
5324 156 : gimple_set_vdef (nstmt, oldvdef);
5325 156 : SSA_NAME_DEF_STMT (oldvdef) = nstmt;
5326 156 : gsi_insert_after (gsi, nstmt, GSI_NEW_STMT);
5327 :
5328 156 : if (dump_file && (dump_flags & TDF_DETAILS))
5329 : {
5330 0 : fprintf (dump_file, "Simplified\n ");
5331 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5332 0 : fprintf (dump_file, "into\n ");
5333 0 : print_gimple_stmt (dump_file, nstmt0, 0, dump_flags);
5334 0 : fprintf (dump_file, " ");
5335 0 : print_gimple_stmt (dump_file, nstmt, 0, dump_flags);
5336 : }
5337 : return true;
5338 :
5339 248 : case BUILT_IN_VA_COPY:
5340 248 : if (!va_list_simple_ptr)
5341 : return false;
5342 :
5343 47 : if (gimple_call_num_args (call) != 2)
5344 : return false;
5345 :
5346 47 : lhs = gimple_call_arg (call, 0);
5347 47 : if (!POINTER_TYPE_P (TREE_TYPE (lhs))
5348 47 : || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5349 47 : != TYPE_MAIN_VARIANT (cfun_va_list))
5350 : return false;
5351 47 : rhs = gimple_call_arg (call, 1);
5352 47 : if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5353 47 : != TYPE_MAIN_VARIANT (cfun_va_list))
5354 : return false;
5355 :
5356 47 : lhs = fold_build2 (MEM_REF, cfun_va_list, lhs, build_zero_cst (TREE_TYPE (lhs)));
5357 47 : nstmt = gimple_build_assign (lhs, rhs);
5358 47 : gimple_set_location (nstmt, loc);
5359 47 : gimple_move_vops (nstmt, call);
5360 47 : gsi_replace (gsi, nstmt, false);
5361 :
5362 47 : if (dump_file && (dump_flags & TDF_DETAILS))
5363 : {
5364 0 : fprintf (dump_file, "Simplified\n ");
5365 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5366 0 : fprintf (dump_file, "into\n ");
5367 0 : print_gimple_stmt (dump_file, nstmt, 0, dump_flags);
5368 : }
5369 : return true;
5370 :
5371 3554 : case BUILT_IN_VA_END:
5372 : /* No effect, so the statement will be deleted. */
5373 3554 : if (dump_file && (dump_flags & TDF_DETAILS))
5374 : {
5375 0 : fprintf (dump_file, "Removed\n ");
5376 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
5377 : }
5378 3554 : unlink_stmt_vdef (call);
5379 3554 : release_defs (call);
5380 3554 : gsi_replace (gsi, gimple_build_nop (), false);
5381 3554 : return true;
5382 :
5383 0 : default:
5384 0 : gcc_unreachable ();
5385 : }
5386 : }
5387 :
5388 : /* Fold the non-target builtin at *GSI and return whether any simplification
5389 : was made. */
5390 :
5391 : static bool
5392 9294687 : gimple_fold_builtin (gimple_stmt_iterator *gsi)
5393 : {
5394 9294687 : gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5395 9294687 : tree callee = gimple_call_fndecl (stmt);
5396 :
5397 : /* Give up for always_inline inline builtins until they are
5398 : inlined. */
5399 9294687 : if (avoid_folding_inline_builtin (callee))
5400 : return false;
5401 :
5402 9293511 : unsigned n = gimple_call_num_args (stmt);
5403 9293511 : enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5404 9293511 : switch (fcode)
5405 : {
5406 114007 : case BUILT_IN_VA_START:
5407 114007 : case BUILT_IN_VA_END:
5408 114007 : case BUILT_IN_VA_COPY:
5409 114007 : return gimple_fold_builtin_stdarg (gsi, stmt);
5410 148 : case BUILT_IN_BCMP:
5411 148 : return gimple_fold_builtin_bcmp (gsi);
5412 367 : case BUILT_IN_BCOPY:
5413 367 : return gimple_fold_builtin_bcopy (gsi);
5414 250 : case BUILT_IN_BZERO:
5415 250 : return gimple_fold_builtin_bzero (gsi);
5416 :
5417 306540 : case BUILT_IN_MEMSET:
5418 306540 : return gimple_fold_builtin_memset (gsi,
5419 : gimple_call_arg (stmt, 1),
5420 306540 : gimple_call_arg (stmt, 2));
5421 10276 : case BUILT_IN_MEMPCPY:
5422 10276 : if (gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5423 : gimple_call_arg (stmt, 1), fcode))
5424 : return true;
5425 9536 : return gimple_fold_builtin_mempcpy (gsi);
5426 1012260 : case BUILT_IN_MEMCPY:
5427 1012260 : case BUILT_IN_MEMMOVE:
5428 1012260 : return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5429 1012260 : gimple_call_arg (stmt, 1), fcode);
5430 4471 : case BUILT_IN_SPRINTF_CHK:
5431 4471 : case BUILT_IN_VSPRINTF_CHK:
5432 4471 : return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5433 1714 : case BUILT_IN_STRCAT_CHK:
5434 1714 : return gimple_fold_builtin_strcat_chk (gsi);
5435 1143 : case BUILT_IN_STRNCAT_CHK:
5436 1143 : return gimple_fold_builtin_strncat_chk (gsi);
5437 139778 : case BUILT_IN_STRLEN:
5438 139778 : return gimple_fold_builtin_strlen (gsi);
5439 25909 : case BUILT_IN_STRCPY:
5440 25909 : return gimple_fold_builtin_strcpy (gsi,
5441 : gimple_call_arg (stmt, 0),
5442 25909 : gimple_call_arg (stmt, 1));
5443 17202 : case BUILT_IN_STRNCPY:
5444 17202 : return gimple_fold_builtin_strncpy (gsi,
5445 : gimple_call_arg (stmt, 0),
5446 : gimple_call_arg (stmt, 1),
5447 17202 : gimple_call_arg (stmt, 2));
5448 7343 : case BUILT_IN_STRCAT:
5449 7343 : return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5450 7343 : gimple_call_arg (stmt, 1));
5451 6786 : case BUILT_IN_STRNCAT:
5452 6786 : return gimple_fold_builtin_strncat (gsi);
5453 4677 : case BUILT_IN_INDEX:
5454 4677 : case BUILT_IN_STRCHR:
5455 4677 : return gimple_fold_builtin_strchr (gsi, false);
5456 722 : case BUILT_IN_RINDEX:
5457 722 : case BUILT_IN_STRRCHR:
5458 722 : return gimple_fold_builtin_strchr (gsi, true);
5459 4185 : case BUILT_IN_STRSTR:
5460 4185 : return gimple_fold_builtin_strstr (gsi);
5461 1250453 : case BUILT_IN_STRCMP:
5462 1250453 : case BUILT_IN_STRCMP_EQ:
5463 1250453 : case BUILT_IN_STRCASECMP:
5464 1250453 : case BUILT_IN_STRNCMP:
5465 1250453 : case BUILT_IN_STRNCMP_EQ:
5466 1250453 : case BUILT_IN_STRNCASECMP:
5467 1250453 : return gimple_fold_builtin_string_compare (gsi);
5468 33740 : case BUILT_IN_MEMCHR:
5469 33740 : return gimple_fold_builtin_memchr (gsi);
5470 20645 : case BUILT_IN_FPUTS:
5471 20645 : return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5472 20645 : gimple_call_arg (stmt, 1), false);
5473 43 : case BUILT_IN_FPUTS_UNLOCKED:
5474 43 : return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5475 43 : gimple_call_arg (stmt, 1), true);
5476 25617 : case BUILT_IN_MEMCPY_CHK:
5477 25617 : case BUILT_IN_MEMPCPY_CHK:
5478 25617 : case BUILT_IN_MEMMOVE_CHK:
5479 25617 : case BUILT_IN_MEMSET_CHK:
5480 25617 : return gimple_fold_builtin_memory_chk (gsi,
5481 : gimple_call_arg (stmt, 0),
5482 : gimple_call_arg (stmt, 1),
5483 : gimple_call_arg (stmt, 2),
5484 : gimple_call_arg (stmt, 3),
5485 25617 : fcode);
5486 3674 : case BUILT_IN_STPCPY:
5487 3674 : return gimple_fold_builtin_stpcpy (gsi);
5488 2592 : case BUILT_IN_STRCPY_CHK:
5489 2592 : case BUILT_IN_STPCPY_CHK:
5490 2592 : return gimple_fold_builtin_stxcpy_chk (gsi,
5491 : gimple_call_arg (stmt, 0),
5492 : gimple_call_arg (stmt, 1),
5493 : gimple_call_arg (stmt, 2),
5494 2592 : fcode);
5495 2721 : case BUILT_IN_STRNCPY_CHK:
5496 2721 : case BUILT_IN_STPNCPY_CHK:
5497 2721 : return gimple_fold_builtin_stxncpy_chk (gsi,
5498 : gimple_call_arg (stmt, 0),
5499 : gimple_call_arg (stmt, 1),
5500 : gimple_call_arg (stmt, 2),
5501 : gimple_call_arg (stmt, 3),
5502 2721 : fcode);
5503 2359 : case BUILT_IN_SNPRINTF_CHK:
5504 2359 : case BUILT_IN_VSNPRINTF_CHK:
5505 2359 : return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5506 :
5507 790104 : case BUILT_IN_FPRINTF:
5508 790104 : case BUILT_IN_FPRINTF_UNLOCKED:
5509 790104 : case BUILT_IN_VFPRINTF:
5510 790104 : if (n == 2 || n == 3)
5511 94849 : return gimple_fold_builtin_fprintf (gsi,
5512 : gimple_call_arg (stmt, 0),
5513 : gimple_call_arg (stmt, 1),
5514 : n == 3
5515 42464 : ? gimple_call_arg (stmt, 2)
5516 : : NULL_TREE,
5517 52385 : fcode);
5518 : break;
5519 2226 : case BUILT_IN_FPRINTF_CHK:
5520 2226 : case BUILT_IN_VFPRINTF_CHK:
5521 2226 : if (n == 3 || n == 4)
5522 3811 : return gimple_fold_builtin_fprintf (gsi,
5523 : gimple_call_arg (stmt, 0),
5524 : gimple_call_arg (stmt, 2),
5525 : n == 4
5526 1744 : ? gimple_call_arg (stmt, 3)
5527 : : NULL_TREE,
5528 2067 : fcode);
5529 : break;
5530 188382 : case BUILT_IN_PRINTF:
5531 188382 : case BUILT_IN_PRINTF_UNLOCKED:
5532 188382 : case BUILT_IN_VPRINTF:
5533 188382 : if (n == 1 || n == 2)
5534 213143 : return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5535 : n == 2
5536 102033 : ? gimple_call_arg (stmt, 1)
5537 111110 : : NULL_TREE, fcode);
5538 : break;
5539 2273 : case BUILT_IN_PRINTF_CHK:
5540 2273 : case BUILT_IN_VPRINTF_CHK:
5541 2273 : if (n == 2 || n == 3)
5542 3893 : return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5543 : n == 3
5544 1771 : ? gimple_call_arg (stmt, 2)
5545 2122 : : NULL_TREE, fcode);
5546 : break;
5547 2866 : case BUILT_IN_ACC_ON_DEVICE:
5548 2866 : return gimple_fold_builtin_acc_on_device (gsi,
5549 2866 : gimple_call_arg (stmt, 0));
5550 234 : case BUILT_IN_OMP_IS_INITIAL_DEVICE:
5551 234 : return gimple_fold_builtin_omp_is_initial_device (gsi);
5552 :
5553 103 : case BUILT_IN_OMP_GET_INITIAL_DEVICE:
5554 103 : return gimple_fold_builtin_omp_get_initial_device (gsi);
5555 :
5556 294 : case BUILT_IN_OMP_GET_NUM_DEVICES:
5557 294 : return gimple_fold_builtin_omp_get_num_devices (gsi);
5558 :
5559 48435 : case BUILT_IN_REALLOC:
5560 48435 : return gimple_fold_builtin_realloc (gsi);
5561 :
5562 630 : case BUILT_IN_CLEAR_PADDING:
5563 630 : return gimple_fold_builtin_clear_padding (gsi);
5564 :
5565 94894 : case BUILT_IN_CONSTANT_P:
5566 94894 : return gimple_fold_builtin_constant_p (gsi);
5567 :
5568 5978749 : default:;
5569 : }
5570 :
5571 : /* Try the generic builtin folder. */
5572 5978749 : bool ignore = (gimple_call_lhs (stmt) == NULL);
5573 5978749 : tree result = fold_call_stmt (stmt, ignore);
5574 5978749 : if (result)
5575 : {
5576 4875 : if (ignore)
5577 1226 : STRIP_NOPS (result);
5578 : else
5579 3649 : result = fold_convert (gimple_call_return_type (stmt), result);
5580 4875 : gimplify_and_update_call_from_tree (gsi, result);
5581 4875 : return true;
5582 : }
5583 :
5584 : return false;
5585 : }
5586 :
5587 : /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5588 : function calls to constants, where possible. */
5589 :
5590 : static tree
5591 20589 : fold_internal_goacc_dim (const gimple *call)
5592 : {
5593 20589 : int axis = oacc_get_ifn_dim_arg (call);
5594 20589 : int size = oacc_get_fn_dim_size (current_function_decl, axis);
5595 20589 : tree result = NULL_TREE;
5596 20589 : tree type = TREE_TYPE (gimple_call_lhs (call));
5597 :
5598 20589 : switch (gimple_call_internal_fn (call))
5599 : {
5600 8915 : case IFN_GOACC_DIM_POS:
5601 : /* If the size is 1, we know the answer. */
5602 8915 : if (size == 1)
5603 8915 : result = build_int_cst (type, 0);
5604 : break;
5605 11674 : case IFN_GOACC_DIM_SIZE:
5606 : /* If the size is not dynamic, we know the answer. */
5607 11674 : if (size)
5608 11674 : result = build_int_cst (type, size);
5609 : break;
5610 : default:
5611 : break;
5612 : }
5613 :
5614 20589 : return result;
5615 : }
5616 :
5617 : /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5618 : for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5619 : &var where var is only addressable because of such calls. */
5620 :
5621 : bool
5622 59130050 : optimize_atomic_compare_exchange_p (gimple *stmt)
5623 : {
5624 59130050 : if (gimple_call_num_args (stmt) != 6
5625 1592884 : || !flag_inline_atomics
5626 1592884 : || !optimize
5627 1592884 : || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5628 1592803 : || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5629 1054126 : || !gimple_vdef (stmt)
5630 60071880 : || !gimple_vuse (stmt))
5631 58188220 : return false;
5632 :
5633 941830 : tree fndecl = gimple_call_fndecl (stmt);
5634 941830 : switch (DECL_FUNCTION_CODE (fndecl))
5635 : {
5636 51767 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5637 51767 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5638 51767 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5639 51767 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5640 51767 : case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5641 51767 : break;
5642 : default:
5643 : return false;
5644 : }
5645 :
5646 51767 : tree expected = gimple_call_arg (stmt, 1);
5647 51767 : if (TREE_CODE (expected) != ADDR_EXPR
5648 51767 : || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5649 : return false;
5650 :
5651 49433 : tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5652 49433 : if (!is_gimple_reg_type (etype)
5653 49006 : || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5654 46594 : || TREE_THIS_VOLATILE (etype)
5655 46594 : || VECTOR_TYPE_P (etype)
5656 : || TREE_CODE (etype) == COMPLEX_TYPE
5657 : /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5658 : might not preserve all the bits. See PR71716. */
5659 : || SCALAR_FLOAT_TYPE_P (etype)
5660 67403 : || maybe_ne (TYPE_PRECISION (etype),
5661 35940 : GET_MODE_BITSIZE (TYPE_MODE (etype))))
5662 37871 : return false;
5663 :
5664 11562 : tree weak = gimple_call_arg (stmt, 3);
5665 11562 : if (!integer_zerop (weak) && !integer_onep (weak))
5666 : return false;
5667 :
5668 11562 : tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5669 11562 : tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5670 11562 : machine_mode mode = TYPE_MODE (itype);
5671 :
5672 11562 : if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5673 : == CODE_FOR_nothing
5674 11562 : && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5675 : return false;
5676 :
5677 23124 : if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5678 : return false;
5679 :
5680 : return true;
5681 : }
5682 :
5683 : /* Fold
5684 : r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5685 : into
5686 : _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5687 : i = IMAGPART_EXPR <t>;
5688 : r = (_Bool) i;
5689 : e = REALPART_EXPR <t>; */
5690 :
5691 : void
5692 5706 : fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5693 : {
5694 5706 : gimple *stmt = gsi_stmt (*gsi);
5695 5706 : tree fndecl = gimple_call_fndecl (stmt);
5696 5706 : tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5697 5706 : tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5698 5706 : tree ctype = build_complex_type (itype);
5699 5706 : tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5700 5706 : bool throws = false;
5701 5706 : edge e = NULL;
5702 5706 : gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5703 : expected);
5704 5706 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5705 5706 : gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5706 5706 : if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5707 : {
5708 2628 : g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5709 : build1 (VIEW_CONVERT_EXPR, itype,
5710 : gimple_assign_lhs (g)));
5711 2628 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
5712 : }
5713 5706 : int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5714 11047 : + int_size_in_bytes (itype);
5715 5706 : g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5716 : gimple_call_arg (stmt, 0),
5717 : gimple_assign_lhs (g),
5718 : gimple_call_arg (stmt, 2),
5719 5706 : build_int_cst (integer_type_node, flag),
5720 : gimple_call_arg (stmt, 4),
5721 : gimple_call_arg (stmt, 5));
5722 5706 : tree lhs = make_ssa_name (ctype);
5723 5706 : gimple_call_set_lhs (g, lhs);
5724 5706 : gimple_move_vops (g, stmt);
5725 5706 : tree oldlhs = gimple_call_lhs (stmt);
5726 5706 : if (stmt_can_throw_internal (cfun, stmt))
5727 : {
5728 203 : throws = true;
5729 203 : e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5730 : }
5731 5706 : gimple_call_set_nothrow (as_a <gcall *> (g),
5732 5706 : gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5733 5706 : gimple_call_set_lhs (stmt, NULL_TREE);
5734 5706 : gsi_replace (gsi, g, true);
5735 5706 : if (oldlhs)
5736 : {
5737 5659 : g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5738 : build1 (IMAGPART_EXPR, itype, lhs));
5739 5659 : if (throws)
5740 : {
5741 197 : gsi_insert_on_edge_immediate (e, g);
5742 197 : *gsi = gsi_for_stmt (g);
5743 : }
5744 : else
5745 5462 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5746 5659 : g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5747 5659 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5748 : }
5749 5706 : g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5750 : build1 (REALPART_EXPR, itype, lhs));
5751 5706 : if (throws && oldlhs == NULL_TREE)
5752 : {
5753 6 : gsi_insert_on_edge_immediate (e, g);
5754 6 : *gsi = gsi_for_stmt (g);
5755 : }
5756 : else
5757 5700 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5758 5706 : if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5759 : {
5760 5256 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5761 : VIEW_CONVERT_EXPR,
5762 2628 : build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5763 : gimple_assign_lhs (g)));
5764 2628 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5765 : }
5766 5706 : g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5767 5706 : gsi_insert_after (gsi, g, GSI_NEW_STMT);
5768 5706 : *gsi = gsiret;
5769 5706 : }
5770 :
5771 : /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5772 : doesn't fit into TYPE. The test for overflow should be regardless of
5773 : -fwrapv, and even for unsigned types. */
5774 :
5775 : bool
5776 407646 : arith_overflowed_p (enum tree_code code, const_tree type,
5777 : const_tree arg0, const_tree arg1)
5778 : {
5779 407646 : widest2_int warg0 = widest2_int_cst (arg0);
5780 407646 : widest2_int warg1 = widest2_int_cst (arg1);
5781 407646 : widest2_int wres;
5782 407646 : switch (code)
5783 : {
5784 96014 : case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5785 115516 : case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5786 197511 : case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5787 0 : default: gcc_unreachable ();
5788 : }
5789 407646 : signop sign = TYPE_SIGN (type);
5790 407646 : if (sign == UNSIGNED && wi::neg_p (wres))
5791 : return true;
5792 335761 : return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5793 407786 : }
5794 :
5795 : /* Mask state for partial load/store operations (mask and length). */
5796 : enum mask_load_store_state {
5797 : MASK_ALL_INACTIVE, /* All lanes/elements are inactive (can be elided). */
5798 : MASK_ALL_ACTIVE, /* All lanes/elements are active (unconditional). */
5799 : MASK_UNKNOWN
5800 : };
5801 :
5802 : /* Check the mask/length state of IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL.
5803 : Returns whether all elements are active, all inactive, or mixed.
5804 : VECTYPE is the vector type of the operation. */
5805 :
5806 : static enum mask_load_store_state
5807 8093 : partial_load_store_mask_state (gcall *call, tree vectype)
5808 : {
5809 8093 : internal_fn ifn = gimple_call_internal_fn (call);
5810 8093 : int mask_index = internal_fn_mask_index (ifn);
5811 8093 : int len_index = internal_fn_len_index (ifn);
5812 :
5813 : /* Extract length and mask arguments up front. */
5814 8093 : tree len = len_index != -1 ? gimple_call_arg (call, len_index) : NULL_TREE;
5815 0 : tree bias = len ? gimple_call_arg (call, len_index + 1) : NULL_TREE;
5816 8093 : tree mask = mask_index != -1 ? gimple_call_arg (call, mask_index) : NULL_TREE;
5817 :
5818 16186 : poly_int64 nelts = GET_MODE_NUNITS (TYPE_MODE (vectype));
5819 :
5820 8093 : poly_widest_int wlen = -1;
5821 8093 : bool full_length_p = !len; /* No length means full length. */
5822 :
5823 : /* Compute effective length. */
5824 8093 : if (len && poly_int_tree_p (len))
5825 : {
5826 0 : gcc_assert (TREE_CODE (bias) == INTEGER_CST);
5827 0 : wlen = wi::to_poly_widest (len) + wi::to_widest (bias);
5828 :
5829 0 : if (known_eq (wlen, 0))
5830 : return MASK_ALL_INACTIVE;
5831 :
5832 0 : if (known_eq (wlen, nelts))
5833 : full_length_p = true;
5834 : else
5835 : full_length_p = false;
5836 : }
5837 :
5838 : /* Check mask for early return cases. */
5839 8093 : if (mask)
5840 : {
5841 8093 : if (integer_zerop (mask))
5842 : return MASK_ALL_INACTIVE;
5843 :
5844 8078 : if (full_length_p && integer_all_onesp (mask))
5845 : return MASK_ALL_ACTIVE;
5846 : }
5847 0 : else if (full_length_p)
5848 : /* No mask and full length means all active. */
5849 : return MASK_ALL_ACTIVE;
5850 :
5851 : /* For VLA vectors, we can't do much more. */
5852 7936 : if (!nelts.is_constant ())
5853 : return MASK_UNKNOWN;
5854 :
5855 : /* Same for VLS vectors with non-constant mask. */
5856 7936 : if (mask && TREE_CODE (mask) != VECTOR_CST)
5857 : return MASK_UNKNOWN;
5858 :
5859 : /* Check VLS vector elements. */
5860 480 : gcc_assert (wlen.is_constant ());
5861 :
5862 480 : HOST_WIDE_INT active_len = wlen.to_constant ().to_shwi ();
5863 480 : if (active_len == -1)
5864 480 : active_len = nelts.to_constant ();
5865 :
5866 : /* Check if all elements in the active range match the mask. */
5867 996 : for (HOST_WIDE_INT i = 0; i < active_len; i++)
5868 : {
5869 996 : bool elt_active = !mask || !integer_zerop (vector_cst_elt (mask, i));
5870 516 : if (!elt_active)
5871 : {
5872 : /* Found an inactive element. Check if all are inactive. */
5873 972 : for (HOST_WIDE_INT j = 0; j < active_len; j++)
5874 972 : if (!mask || !integer_zerop (vector_cst_elt (mask, j)))
5875 : return MASK_UNKNOWN; /* Mixed state. */
5876 : return MASK_ALL_INACTIVE;
5877 : }
5878 : }
5879 :
5880 : /* All elements in active range are active. */
5881 0 : return full_length_p ? MASK_ALL_ACTIVE : MASK_UNKNOWN;
5882 8093 : }
5883 :
5884 :
5885 : /* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional
5886 : (all lanes active), return a MEM_REF for the memory it references.
5887 : Otherwise return NULL_TREE. VECTYPE is the type of the memory vector. */
5888 :
5889 : static tree
5890 4039 : gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype)
5891 : {
5892 : /* Only fold if all lanes are active (unconditional). */
5893 4039 : if (partial_load_store_mask_state (call, vectype) != MASK_ALL_ACTIVE)
5894 : return NULL_TREE;
5895 :
5896 71 : tree ptr = gimple_call_arg (call, 0);
5897 71 : tree alias_align = gimple_call_arg (call, 1);
5898 71 : if (!tree_fits_uhwi_p (alias_align))
5899 : return NULL_TREE;
5900 :
5901 71 : unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5902 71 : if (TYPE_ALIGN (vectype) != align)
5903 14 : vectype = build_aligned_type (vectype, align);
5904 71 : tree offset = build_zero_cst (TREE_TYPE (alias_align));
5905 71 : return fold_build2 (MEM_REF, vectype, ptr, offset);
5906 : }
5907 :
5908 : /* Try to fold IFN_{MASK,LEN}_LOAD/STORE call CALL. Return true on success. */
5909 :
5910 : static bool
5911 4054 : gimple_fold_partial_load_store (gimple_stmt_iterator *gsi, gcall *call)
5912 : {
5913 4054 : internal_fn ifn = gimple_call_internal_fn (call);
5914 4054 : tree lhs = gimple_call_lhs (call);
5915 4054 : bool is_load = (lhs != NULL_TREE);
5916 4054 : tree vectype;
5917 :
5918 4054 : if (is_load)
5919 1979 : vectype = TREE_TYPE (lhs);
5920 : else
5921 : {
5922 2075 : tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5923 2075 : vectype = TREE_TYPE (rhs);
5924 : }
5925 :
5926 4054 : enum mask_load_store_state state
5927 4054 : = partial_load_store_mask_state (call, vectype);
5928 :
5929 : /* Handle all-inactive case. */
5930 4054 : if (state == MASK_ALL_INACTIVE)
5931 : {
5932 15 : if (is_load)
5933 : {
5934 : /* Replace load with else value. */
5935 15 : int else_index = internal_fn_else_index (ifn);
5936 15 : tree else_value = gimple_call_arg (call, else_index);
5937 15 : if (!is_gimple_reg (lhs))
5938 : {
5939 0 : if (!zerop (else_value))
5940 : return false;
5941 0 : else_value = build_constructor (TREE_TYPE (lhs), NULL);
5942 : }
5943 15 : gassign *new_stmt = gimple_build_assign (lhs, else_value);
5944 15 : gimple_set_location (new_stmt, gimple_location (call));
5945 : /* When the lhs is an array for LANES version, then there is still
5946 : a store, move the vops from the old stmt to the new one. */
5947 15 : if (!is_gimple_reg (lhs))
5948 0 : gimple_move_vops (new_stmt, call);
5949 15 : gsi_replace (gsi, new_stmt, false);
5950 15 : return true;
5951 : }
5952 : else
5953 : {
5954 : /* Remove inactive store altogether. */
5955 0 : unlink_stmt_vdef (call);
5956 0 : release_defs (call);
5957 0 : gsi_replace (gsi, gimple_build_nop (), true);
5958 0 : return true;
5959 : }
5960 : }
5961 :
5962 : /* We cannot simplify a gather/scatter or load/store lanes further. */
5963 4039 : if (internal_gather_scatter_fn_p (ifn)
5964 4039 : || TREE_CODE (vectype) == ARRAY_TYPE)
5965 : return false;
5966 :
5967 : /* Handle all-active case by folding to regular memory operation. */
5968 4039 : if (tree mem_ref = gimple_fold_partial_load_store_mem_ref (call, vectype))
5969 : {
5970 71 : gassign *new_stmt;
5971 71 : if (is_load)
5972 17 : new_stmt = gimple_build_assign (lhs, mem_ref);
5973 : else
5974 : {
5975 54 : tree rhs
5976 54 : = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
5977 54 : new_stmt = gimple_build_assign (mem_ref, rhs);
5978 : }
5979 :
5980 71 : gimple_set_location (new_stmt, gimple_location (call));
5981 71 : gimple_move_vops (new_stmt, call);
5982 71 : gsi_replace (gsi, new_stmt, false);
5983 71 : return true;
5984 : }
5985 : return false;
5986 : }
5987 :
5988 : /* Attempt to fold a call statement referenced by the statement iterator GSI.
5989 : The statement may be replaced by another statement, e.g., if the call
5990 : simplifies to a constant value. Return true if any changes were made.
5991 : It is assumed that the operands have been previously folded. */
5992 :
5993 : static bool
5994 55489080 : gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5995 : {
5996 55489080 : gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5997 55489080 : tree callee;
5998 55489080 : bool changed = false;
5999 :
6000 : /* Check for virtual calls that became direct calls. */
6001 55489080 : callee = gimple_call_fn (stmt);
6002 55489080 : if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
6003 : {
6004 444102 : if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
6005 : {
6006 6 : if (dump_file && virtual_method_call_p (callee)
6007 374 : && !possible_polymorphic_call_target_p
6008 6 : (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
6009 6 : (OBJ_TYPE_REF_EXPR (callee)))))
6010 : {
6011 0 : fprintf (dump_file,
6012 : "Type inheritance inconsistent devirtualization of ");
6013 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6014 0 : fprintf (dump_file, " to ");
6015 0 : print_generic_expr (dump_file, callee, TDF_SLIM);
6016 0 : fprintf (dump_file, "\n");
6017 : }
6018 :
6019 368 : gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
6020 368 : changed = true;
6021 : }
6022 443734 : else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
6023 : {
6024 438796 : bool final;
6025 438796 : vec <cgraph_node *>targets
6026 438796 : = possible_polymorphic_call_targets (callee, stmt, &final);
6027 441455 : if (final && targets.length () <= 1 && dbg_cnt (devirt))
6028 : {
6029 2025 : tree lhs = gimple_call_lhs (stmt);
6030 2025 : if (dump_enabled_p ())
6031 : {
6032 34 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6033 : "folding virtual function call to %s\n",
6034 34 : targets.length () == 1
6035 17 : ? targets[0]->name ()
6036 : : "__builtin_unreachable");
6037 : }
6038 2025 : if (targets.length () == 1)
6039 : {
6040 1988 : tree fndecl = targets[0]->decl;
6041 1988 : gimple_call_set_fndecl (stmt, fndecl);
6042 1988 : changed = true;
6043 : /* If changing the call to __cxa_pure_virtual
6044 : or similar noreturn function, adjust gimple_call_fntype
6045 : too. */
6046 1988 : if (gimple_call_noreturn_p (stmt)
6047 19 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
6048 13 : && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
6049 2001 : && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6050 13 : == void_type_node))
6051 13 : gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
6052 : /* If the call becomes noreturn, remove the lhs. */
6053 1988 : if (lhs
6054 1668 : && gimple_call_noreturn_p (stmt)
6055 2003 : && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
6056 6 : || should_remove_lhs_p (lhs)))
6057 : {
6058 12 : if (TREE_CODE (lhs) == SSA_NAME)
6059 : {
6060 0 : tree var = create_tmp_var (TREE_TYPE (lhs));
6061 0 : tree def = get_or_create_ssa_default_def (cfun, var);
6062 0 : gimple *new_stmt = gimple_build_assign (lhs, def);
6063 0 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
6064 : }
6065 12 : gimple_call_set_lhs (stmt, NULL_TREE);
6066 : }
6067 1988 : maybe_remove_unused_call_args (cfun, stmt);
6068 : }
6069 : else
6070 : {
6071 37 : location_t loc = gimple_location (stmt);
6072 37 : gimple *new_stmt = gimple_build_builtin_unreachable (loc);
6073 37 : gimple_call_set_ctrl_altering (new_stmt, false);
6074 : /* If the call had a SSA name as lhs morph that into
6075 : an uninitialized value. */
6076 37 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
6077 : {
6078 12 : tree var = create_tmp_var (TREE_TYPE (lhs));
6079 12 : SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
6080 12 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
6081 12 : set_ssa_default_def (cfun, var, lhs);
6082 : }
6083 37 : gimple_move_vops (new_stmt, stmt);
6084 37 : gsi_replace (gsi, new_stmt, false);
6085 37 : return true;
6086 : }
6087 : }
6088 : }
6089 : }
6090 :
6091 : /* Check for indirect calls that became direct calls, and then
6092 : no longer require a static chain. */
6093 55489043 : if (gimple_call_chain (stmt))
6094 : {
6095 245881 : tree fn = gimple_call_fndecl (stmt);
6096 294304 : if (fn && !DECL_STATIC_CHAIN (fn))
6097 : {
6098 2024 : gimple_call_set_chain (stmt, NULL);
6099 2024 : changed = true;
6100 : }
6101 : }
6102 :
6103 55489043 : if (inplace)
6104 : return changed;
6105 :
6106 : /* Don't constant fold functions which can change the control. */
6107 55486319 : if (gimple_call_ctrl_altering_p (stmt))
6108 : return changed;
6109 :
6110 : /* Check for builtins that CCP can handle using information not
6111 : available in the generic fold routines. */
6112 47971171 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
6113 : {
6114 9294687 : if (gimple_fold_builtin (gsi))
6115 207251 : changed = true;
6116 : }
6117 38676484 : else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
6118 : {
6119 1120891 : changed |= targetm.gimple_fold_builtin (gsi);
6120 : }
6121 37555593 : else if (gimple_call_internal_p (stmt))
6122 : {
6123 1754689 : enum tree_code subcode = ERROR_MARK;
6124 1754689 : tree result = NULL_TREE;
6125 1754689 : bool cplx_result = false;
6126 1754689 : bool uaddc_usubc = false;
6127 1754689 : tree overflow = NULL_TREE;
6128 1754689 : switch (gimple_call_internal_fn (stmt))
6129 : {
6130 832 : case IFN_ASSUME:
6131 : /* Remove .ASSUME calls during the last fold since it is no
6132 : longer needed. */
6133 832 : if (fold_before_rtl_expansion_p ())
6134 114 : replace_call_with_value (gsi, NULL_TREE);
6135 : break;
6136 162368 : case IFN_BUILTIN_EXPECT:
6137 162368 : result = fold_builtin_expect (gimple_location (stmt),
6138 : gimple_call_arg (stmt, 0),
6139 : gimple_call_arg (stmt, 1),
6140 : gimple_call_arg (stmt, 2),
6141 : NULL_TREE);
6142 162368 : break;
6143 8660 : case IFN_UBSAN_OBJECT_SIZE:
6144 8660 : {
6145 8660 : tree offset = gimple_call_arg (stmt, 1);
6146 8660 : tree objsize = gimple_call_arg (stmt, 2);
6147 8660 : if (integer_all_onesp (objsize)
6148 8660 : || (TREE_CODE (offset) == INTEGER_CST
6149 4787 : && TREE_CODE (objsize) == INTEGER_CST
6150 1126 : && tree_int_cst_le (offset, objsize)))
6151 : {
6152 1539 : replace_call_with_value (gsi, NULL_TREE);
6153 1539 : return true;
6154 : }
6155 : }
6156 : break;
6157 11383 : case IFN_UBSAN_PTR:
6158 11383 : if (integer_zerop (gimple_call_arg (stmt, 1)))
6159 : {
6160 30 : replace_call_with_value (gsi, NULL_TREE);
6161 30 : return true;
6162 : }
6163 : break;
6164 8557 : case IFN_UBSAN_BOUNDS:
6165 8557 : {
6166 8557 : tree index = gimple_call_arg (stmt, 1);
6167 8557 : tree bound = gimple_call_arg (stmt, 2);
6168 8557 : if (TREE_CODE (index) == INTEGER_CST
6169 5533 : && TREE_CODE (bound) == INTEGER_CST)
6170 : {
6171 4390 : index = fold_convert (TREE_TYPE (bound), index);
6172 4390 : if (TREE_CODE (index) == INTEGER_CST
6173 4390 : && tree_int_cst_lt (index, bound))
6174 : {
6175 288 : replace_call_with_value (gsi, NULL_TREE);
6176 288 : return true;
6177 : }
6178 : }
6179 : }
6180 : break;
6181 20589 : case IFN_GOACC_DIM_SIZE:
6182 20589 : case IFN_GOACC_DIM_POS:
6183 20589 : result = fold_internal_goacc_dim (stmt);
6184 20589 : break;
6185 : case IFN_UBSAN_CHECK_ADD:
6186 : subcode = PLUS_EXPR;
6187 : break;
6188 : case IFN_UBSAN_CHECK_SUB:
6189 : subcode = MINUS_EXPR;
6190 : break;
6191 : case IFN_UBSAN_CHECK_MUL:
6192 : subcode = MULT_EXPR;
6193 : break;
6194 : case IFN_ADD_OVERFLOW:
6195 : subcode = PLUS_EXPR;
6196 : cplx_result = true;
6197 : break;
6198 : case IFN_SUB_OVERFLOW:
6199 : subcode = MINUS_EXPR;
6200 : cplx_result = true;
6201 : break;
6202 : case IFN_MUL_OVERFLOW:
6203 : subcode = MULT_EXPR;
6204 : cplx_result = true;
6205 : break;
6206 : case IFN_UADDC:
6207 : subcode = PLUS_EXPR;
6208 : cplx_result = true;
6209 : uaddc_usubc = true;
6210 : break;
6211 : case IFN_USUBC:
6212 : subcode = MINUS_EXPR;
6213 : cplx_result = true;
6214 : uaddc_usubc = true;
6215 : break;
6216 4054 : case IFN_LEN_LOAD:
6217 4054 : case IFN_MASK_LOAD:
6218 4054 : case IFN_MASK_LEN_LOAD:
6219 4054 : case IFN_MASK_GATHER_LOAD:
6220 4054 : case IFN_MASK_LEN_GATHER_LOAD:
6221 4054 : case IFN_MASK_LOAD_LANES:
6222 4054 : case IFN_MASK_LEN_LOAD_LANES:
6223 4054 : case IFN_LEN_STORE:
6224 4054 : case IFN_MASK_STORE:
6225 4054 : case IFN_MASK_LEN_STORE:
6226 4054 : case IFN_MASK_SCATTER_STORE:
6227 4054 : case IFN_MASK_LEN_SCATTER_STORE:
6228 4054 : case IFN_MASK_STORE_LANES:
6229 4054 : case IFN_MASK_LEN_STORE_LANES:
6230 4054 : changed |= gimple_fold_partial_load_store (gsi, stmt);
6231 4054 : break;
6232 : default:
6233 : break;
6234 : }
6235 187125 : if (subcode != ERROR_MARK)
6236 : {
6237 493336 : tree arg0 = gimple_call_arg (stmt, 0);
6238 493336 : tree arg1 = gimple_call_arg (stmt, 1);
6239 493336 : tree arg2 = NULL_TREE;
6240 493336 : tree type = TREE_TYPE (arg0);
6241 493336 : if (cplx_result)
6242 : {
6243 474257 : tree lhs = gimple_call_lhs (stmt);
6244 474257 : if (lhs == NULL_TREE)
6245 : type = NULL_TREE;
6246 : else
6247 474257 : type = TREE_TYPE (TREE_TYPE (lhs));
6248 474257 : if (uaddc_usubc)
6249 31845 : arg2 = gimple_call_arg (stmt, 2);
6250 : }
6251 493336 : if (type == NULL_TREE)
6252 : ;
6253 493336 : else if (uaddc_usubc)
6254 : {
6255 31845 : if (!integer_zerop (arg2))
6256 : ;
6257 : /* x = y + 0 + 0; x = y - 0 - 0; */
6258 4825 : else if (integer_zerop (arg1))
6259 : result = arg0;
6260 : /* x = 0 + y + 0; */
6261 4201 : else if (subcode != MINUS_EXPR && integer_zerop (arg0))
6262 : result = arg1;
6263 : /* x = y - y - 0; */
6264 4201 : else if (subcode == MINUS_EXPR
6265 4201 : && operand_equal_p (arg0, arg1, 0))
6266 0 : result = integer_zero_node;
6267 : }
6268 : /* x = y + 0; x = y - 0; x = y * 0; */
6269 461491 : else if (integer_zerop (arg1))
6270 10107 : result = subcode == MULT_EXPR ? integer_zero_node : arg0;
6271 : /* x = 0 + y; x = 0 * y; */
6272 451384 : else if (subcode != MINUS_EXPR && integer_zerop (arg0))
6273 0 : result = subcode == MULT_EXPR ? integer_zero_node : arg1;
6274 : /* x = y - y; */
6275 451384 : else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
6276 7 : result = integer_zero_node;
6277 : /* x = y * 1; x = 1 * y; */
6278 451377 : else if (subcode == MULT_EXPR && integer_onep (arg1))
6279 : result = arg0;
6280 446003 : else if (subcode == MULT_EXPR && integer_onep (arg0))
6281 : result = arg1;
6282 493336 : if (result)
6283 : {
6284 16112 : if (result == integer_zero_node)
6285 2140 : result = build_zero_cst (type);
6286 13972 : else if (cplx_result && TREE_TYPE (result) != type)
6287 : {
6288 9638 : if (TREE_CODE (result) == INTEGER_CST)
6289 : {
6290 0 : if (arith_overflowed_p (PLUS_EXPR, type, result,
6291 : integer_zero_node))
6292 0 : overflow = build_one_cst (type);
6293 : }
6294 9638 : else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
6295 6920 : && TYPE_UNSIGNED (type))
6296 9783 : || (TYPE_PRECISION (type)
6297 2863 : < (TYPE_PRECISION (TREE_TYPE (result))
6298 2863 : + (TYPE_UNSIGNED (TREE_TYPE (result))
6299 3219 : && !TYPE_UNSIGNED (type)))))
6300 : result = NULL_TREE;
6301 62 : if (result)
6302 62 : result = fold_convert (type, result);
6303 : }
6304 : }
6305 : }
6306 :
6307 1266032 : if (result)
6308 : {
6309 29014 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
6310 0 : result = drop_tree_overflow (result);
6311 29014 : if (cplx_result)
6312 : {
6313 6525 : if (overflow == NULL_TREE)
6314 6525 : overflow = build_zero_cst (TREE_TYPE (result));
6315 6525 : tree ctype = build_complex_type (TREE_TYPE (result));
6316 6525 : if (TREE_CODE (result) == INTEGER_CST
6317 2140 : && TREE_CODE (overflow) == INTEGER_CST)
6318 2140 : result = build_complex (ctype, result, overflow);
6319 : else
6320 4385 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
6321 : ctype, result, overflow);
6322 : }
6323 29014 : gimplify_and_update_call_from_tree (gsi, result);
6324 29014 : changed = true;
6325 : }
6326 : }
6327 :
6328 : return changed;
6329 : }
6330 :
6331 :
6332 : /* Return true whether NAME has a use on STMT. Note this can return
6333 : false even though there's a use on STMT if SSA operands are not
6334 : up-to-date. */
6335 :
6336 : static bool
6337 1639 : has_use_on_stmt (tree name, gimple *stmt)
6338 : {
6339 1639 : ssa_op_iter iter;
6340 1639 : tree op;
6341 3303 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6342 1710 : if (op == name)
6343 : return true;
6344 : return false;
6345 : }
6346 :
6347 : /* Add the lhs of each statement of SEQ to DCE_WORKLIST. */
6348 :
6349 : void
6350 4640741 : mark_lhs_in_seq_for_dce (bitmap dce_worklist, gimple_seq seq)
6351 : {
6352 4640741 : if (!dce_worklist)
6353 : return;
6354 :
6355 1618106 : for (gimple_stmt_iterator i = gsi_start (seq);
6356 1886956 : !gsi_end_p (i); gsi_next (&i))
6357 : {
6358 268850 : gimple *stmt = gsi_stmt (i);
6359 268850 : tree name = gimple_get_lhs (stmt);
6360 268850 : if (name && TREE_CODE (name) == SSA_NAME)
6361 268850 : bitmap_set_bit (dce_worklist, SSA_NAME_VERSION (name));
6362 : }
6363 : }
6364 :
6365 : /* Worker for fold_stmt_1 dispatch to pattern based folding with
6366 : gimple_simplify.
6367 :
6368 : Replaces *GSI with the simplification result in RCODE and OPS
6369 : and the associated statements in *SEQ. Does the replacement
6370 : according to INPLACE and returns true if the operation succeeded. */
6371 :
6372 : static bool
6373 8988327 : replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
6374 : gimple_match_op *res_op,
6375 : gimple_seq *seq, bool inplace,
6376 : bitmap dce_worklist)
6377 : {
6378 8988327 : gimple *stmt = gsi_stmt (*gsi);
6379 8988327 : tree *ops = res_op->ops;
6380 8988327 : unsigned int num_ops = res_op->num_ops;
6381 :
6382 : /* Play safe and do not allow abnormals to be mentioned in
6383 : newly created statements. See also maybe_push_res_to_seq.
6384 : As an exception allow such uses if there was a use of the
6385 : same SSA name on the old stmt. */
6386 19945968 : for (unsigned int i = 0; i < num_ops; ++i)
6387 10959234 : if (TREE_CODE (ops[i]) == SSA_NAME
6388 6665116 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
6389 10960873 : && !has_use_on_stmt (ops[i], stmt))
6390 : return false;
6391 :
6392 8986734 : if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
6393 0 : for (unsigned int i = 0; i < 2; ++i)
6394 0 : if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
6395 0 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
6396 0 : && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
6397 : return false;
6398 :
6399 : /* Don't insert new statements when INPLACE is true, even if we could
6400 : reuse STMT for the final statement. */
6401 8986734 : if (inplace && !gimple_seq_empty_p (*seq))
6402 : return false;
6403 :
6404 8986734 : if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
6405 : {
6406 6769259 : gcc_assert (res_op->code.is_tree_code ());
6407 6769259 : auto code = tree_code (res_op->code);
6408 6769259 : if (TREE_CODE_CLASS (code) == tcc_comparison
6409 : /* GIMPLE_CONDs condition may not throw. */
6410 6769259 : && ((cfun
6411 1134133 : && (!flag_exceptions
6412 774140 : || !cfun->can_throw_non_call_exceptions))
6413 291646 : || !operation_could_trap_p (code,
6414 291646 : FLOAT_TYPE_P (TREE_TYPE (ops[0])),
6415 : false, NULL_TREE)))
6416 1122842 : gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
6417 5646417 : else if (code == SSA_NAME)
6418 : {
6419 : /* If setting the gimple cond to the same thing,
6420 : return false as nothing changed. */
6421 3921136 : if (gimple_cond_code (cond_stmt) == NE_EXPR
6422 3896924 : && operand_equal_p (gimple_cond_lhs (cond_stmt), ops[0])
6423 7815553 : && integer_zerop (gimple_cond_rhs (cond_stmt)))
6424 : return false;
6425 26719 : gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
6426 26719 : build_zero_cst (TREE_TYPE (ops[0])));
6427 : }
6428 1725281 : else if (code == INTEGER_CST)
6429 : {
6430 : /* Make into the canonical form `1 != 0` and `0 != 0`.
6431 : If already in the canonical form return false
6432 : saying nothing has been done. */
6433 1194959 : if (integer_zerop (ops[0]))
6434 : {
6435 5261891 : if (gimple_cond_false_canonical_p (cond_stmt))
6436 : return false;
6437 487553 : gimple_cond_make_false (cond_stmt);
6438 : }
6439 : else
6440 : {
6441 360646 : if (gimple_cond_true_canonical_p (cond_stmt))
6442 : return false;
6443 188112 : gimple_cond_make_true (cond_stmt);
6444 : }
6445 : }
6446 530322 : else if (!inplace)
6447 : {
6448 : /* For throwing comparisons, see if the GIMPLE_COND is the same as
6449 : the comparison would be.
6450 : This can happen due to the match pattern for
6451 : `(ne (cmp @0 @1) integer_zerop)` which creates a new expression
6452 : for the comparison. */
6453 530322 : if (TREE_CODE_CLASS (code) == tcc_comparison
6454 11291 : && (!cfun
6455 11291 : || (flag_exceptions
6456 11291 : && cfun->can_throw_non_call_exceptions))
6457 541613 : && operation_could_trap_p (code,
6458 11291 : FLOAT_TYPE_P (TREE_TYPE (ops[0])),
6459 : false, NULL_TREE))
6460 : {
6461 11291 : tree lhs = gimple_cond_lhs (cond_stmt);
6462 11291 : if (gimple_cond_code (cond_stmt) == NE_EXPR
6463 11291 : && TREE_CODE (lhs) == SSA_NAME
6464 11291 : && INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6465 22582 : && integer_zerop (gimple_cond_rhs (cond_stmt)))
6466 : {
6467 11291 : gimple *s = SSA_NAME_DEF_STMT (lhs);
6468 11291 : if (is_gimple_assign (s)
6469 11291 : && gimple_assign_rhs_code (s) == code
6470 11291 : && operand_equal_p (gimple_assign_rhs1 (s), ops[0])
6471 22582 : && operand_equal_p (gimple_assign_rhs2 (s), ops[1]))
6472 : return false;
6473 : }
6474 : }
6475 519031 : tree res = maybe_push_res_to_seq (res_op, seq);
6476 519031 : if (!res)
6477 : return false;
6478 519031 : gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
6479 519031 : build_zero_cst (TREE_TYPE (res)));
6480 : }
6481 : else
6482 : return false;
6483 2344257 : if (dump_file && (dump_flags & TDF_DETAILS))
6484 : {
6485 857 : fprintf (dump_file, "gimple_simplified to ");
6486 857 : if (!gimple_seq_empty_p (*seq))
6487 0 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6488 857 : print_gimple_stmt (dump_file, gsi_stmt (*gsi),
6489 : 0, TDF_SLIM);
6490 : }
6491 : // Mark the lhs of the new statements maybe for dce
6492 2344257 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6493 2344257 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6494 2344257 : return true;
6495 : }
6496 2217475 : else if (is_gimple_assign (stmt)
6497 2217475 : && res_op->code.is_tree_code ())
6498 : {
6499 2140752 : auto code = tree_code (res_op->code);
6500 2140752 : if (!inplace
6501 2140752 : || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
6502 : {
6503 2140752 : maybe_build_generic_op (res_op);
6504 5060962 : gimple_assign_set_rhs_with_ops (gsi, code,
6505 : res_op->op_or_null (0),
6506 : res_op->op_or_null (1),
6507 : res_op->op_or_null (2));
6508 2140752 : if (dump_file && (dump_flags & TDF_DETAILS))
6509 : {
6510 16586 : fprintf (dump_file, "gimple_simplified to ");
6511 16586 : if (!gimple_seq_empty_p (*seq))
6512 48 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6513 16586 : print_gimple_stmt (dump_file, gsi_stmt (*gsi),
6514 : 0, TDF_SLIM);
6515 : }
6516 : // Mark the lhs of the new statements maybe for dce
6517 2140752 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6518 2140752 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6519 2140752 : return true;
6520 : }
6521 : }
6522 76723 : else if (res_op->code.is_fn_code ()
6523 76723 : && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
6524 : {
6525 8052 : gcc_assert (num_ops == gimple_call_num_args (stmt));
6526 23832 : for (unsigned int i = 0; i < num_ops; ++i)
6527 15780 : gimple_call_set_arg (stmt, i, ops[i]);
6528 8052 : if (dump_file && (dump_flags & TDF_DETAILS))
6529 : {
6530 0 : fprintf (dump_file, "gimple_simplified to ");
6531 0 : if (!gimple_seq_empty_p (*seq))
6532 0 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6533 0 : print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6534 : }
6535 : // Mark the lhs of the new statements maybe for dce
6536 8052 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6537 8052 : gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
6538 8052 : return true;
6539 : }
6540 68671 : else if (!inplace)
6541 : {
6542 135352 : if (gimple_has_lhs (stmt))
6543 : {
6544 68671 : tree lhs = gimple_get_lhs (stmt);
6545 68671 : if (!maybe_push_res_to_seq (res_op, seq, lhs))
6546 : return false;
6547 67688 : if (dump_file && (dump_flags & TDF_DETAILS))
6548 : {
6549 10 : fprintf (dump_file, "gimple_simplified to ");
6550 10 : print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6551 : }
6552 : // Mark the lhs of the new statements maybe for dce
6553 67688 : mark_lhs_in_seq_for_dce (dce_worklist, *seq);
6554 67688 : gsi_replace_with_seq_vops (gsi, *seq);
6555 67688 : return true;
6556 : }
6557 : else
6558 0 : gcc_unreachable ();
6559 : }
6560 :
6561 : return false;
6562 : }
6563 :
6564 : /* Canonicalize MEM_REFs invariant address operand after propagation. */
6565 :
6566 : static bool
6567 186002281 : maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
6568 : {
6569 186002281 : bool res = false;
6570 186002281 : tree *orig_t = t;
6571 :
6572 186002281 : if (TREE_CODE (*t) == ADDR_EXPR)
6573 61309102 : t = &TREE_OPERAND (*t, 0);
6574 :
6575 : /* The C and C++ frontends use an ARRAY_REF for indexing with their
6576 : generic vector extension. The actual vector referenced is
6577 : view-converted to an array type for this purpose. If the index
6578 : is constant the canonical representation in the middle-end is a
6579 : BIT_FIELD_REF so re-write the former to the latter here. */
6580 186002281 : if (TREE_CODE (*t) == ARRAY_REF
6581 10601829 : && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6582 160179 : && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6583 186051503 : && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6584 : {
6585 19975 : tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6586 19975 : if (VECTOR_TYPE_P (vtype))
6587 : {
6588 19975 : tree low = array_ref_low_bound (*t);
6589 19975 : if (TREE_CODE (low) == INTEGER_CST)
6590 : {
6591 19975 : if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6592 : {
6593 39906 : widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6594 39906 : wi::to_widest (low));
6595 19953 : idx = wi::mul (idx, wi::to_widest
6596 39906 : (TYPE_SIZE (TREE_TYPE (*t))));
6597 19953 : widest_int ext
6598 19953 : = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6599 19953 : if (maybe_le (ext, wi::to_poly_widest (TYPE_SIZE (vtype))))
6600 : {
6601 39010 : *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6602 19505 : TREE_TYPE (*t),
6603 19505 : TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6604 19505 : TYPE_SIZE (TREE_TYPE (*t)),
6605 19505 : wide_int_to_tree (bitsizetype, idx));
6606 19505 : res = true;
6607 : }
6608 19953 : }
6609 : }
6610 : }
6611 : }
6612 :
6613 350810315 : while (handled_component_p (*t))
6614 164808034 : t = &TREE_OPERAND (*t, 0);
6615 :
6616 : /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6617 : of invariant addresses into a SSA name MEM_REF address. */
6618 186002281 : if (TREE_CODE (*t) == MEM_REF
6619 186002281 : || TREE_CODE (*t) == TARGET_MEM_REF)
6620 : {
6621 98790244 : tree addr = TREE_OPERAND (*t, 0);
6622 98790244 : if (TREE_CODE (addr) == ADDR_EXPR
6623 98790244 : && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6624 30316113 : || handled_component_p (TREE_OPERAND (addr, 0))))
6625 : {
6626 521563 : tree base;
6627 521563 : poly_int64 coffset;
6628 521563 : base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6629 : &coffset);
6630 521563 : if (!base)
6631 : {
6632 18 : if (is_debug)
6633 18 : return false;
6634 0 : gcc_unreachable ();
6635 : }
6636 :
6637 521545 : TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6638 521545 : TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6639 521545 : TREE_OPERAND (*t, 1),
6640 521545 : size_int (coffset));
6641 521545 : res = true;
6642 : }
6643 98790226 : gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6644 : || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6645 : }
6646 :
6647 : /* Canonicalize back MEM_REFs to plain reference trees if the object
6648 : accessed is a decl that has the same access semantics as the MEM_REF. */
6649 186002263 : if (TREE_CODE (*t) == MEM_REF
6650 96822568 : && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
6651 30000091 : && integer_zerop (TREE_OPERAND (*t, 1))
6652 202224092 : && MR_DEPENDENCE_CLIQUE (*t) == 0)
6653 : {
6654 11583589 : tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6655 11583589 : tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6656 11583589 : if (/* Same volatile qualification. */
6657 11583589 : TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6658 : /* Same TBAA behavior with -fstrict-aliasing. */
6659 11580579 : && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6660 11246122 : && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6661 11246122 : == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6662 : /* Same alignment. */
6663 4678582 : && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6664 : /* We have to look out here to not drop a required conversion
6665 : from the rhs to the lhs if *t appears on the lhs or vice-versa
6666 : if it appears on the rhs. Thus require strict type
6667 : compatibility. */
6668 15980511 : && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6669 : {
6670 2459876 : *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6671 2459876 : res = true;
6672 : }
6673 : }
6674 :
6675 174418674 : else if (TREE_CODE (*orig_t) == ADDR_EXPR
6676 58729107 : && TREE_CODE (*t) == MEM_REF
6677 194899307 : && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6678 : {
6679 812 : tree base;
6680 812 : poly_int64 coffset;
6681 812 : base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6682 : &coffset);
6683 812 : if (base)
6684 : {
6685 702 : gcc_assert (TREE_CODE (base) == MEM_REF);
6686 702 : poly_int64 moffset;
6687 702 : if (mem_ref_offset (base).to_shwi (&moffset))
6688 : {
6689 702 : coffset += moffset;
6690 702 : if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6691 : {
6692 702 : coffset += moffset;
6693 702 : *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6694 702 : return true;
6695 : }
6696 : }
6697 : }
6698 : }
6699 :
6700 : /* Canonicalize TARGET_MEM_REF in particular with respect to
6701 : the indexes becoming constant. */
6702 174417862 : else if (TREE_CODE (*t) == TARGET_MEM_REF)
6703 : {
6704 1967658 : tree tem = maybe_fold_tmr (*t);
6705 1967658 : if (tem)
6706 : {
6707 1670 : *t = tem;
6708 1670 : if (TREE_CODE (*orig_t) == ADDR_EXPR)
6709 0 : recompute_tree_invariant_for_addr_expr (*orig_t);
6710 : res = true;
6711 : }
6712 : }
6713 :
6714 : return res;
6715 : }
6716 :
6717 : /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6718 : distinguishes both cases. */
6719 :
6720 : static bool
6721 745227705 : fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree),
6722 : bitmap dce_worklist = nullptr)
6723 : {
6724 745227705 : bool changed = false;
6725 745227705 : gimple *stmt = gsi_stmt (*gsi);
6726 745227705 : bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6727 745227705 : unsigned i;
6728 745227705 : fold_defer_overflow_warnings ();
6729 :
6730 : /* First do required canonicalization of [TARGET_]MEM_REF addresses
6731 : after propagation.
6732 : ??? This shouldn't be done in generic folding but in the
6733 : propagation helpers which also know whether an address was
6734 : propagated.
6735 : Also canonicalize operand order. */
6736 745227705 : switch (gimple_code (stmt))
6737 : {
6738 250299380 : case GIMPLE_ASSIGN:
6739 250299380 : if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6740 : {
6741 164810250 : tree *rhs = gimple_assign_rhs1_ptr (stmt);
6742 164810250 : if ((REFERENCE_CLASS_P (*rhs)
6743 104258535 : || TREE_CODE (*rhs) == ADDR_EXPR)
6744 179480508 : && maybe_canonicalize_mem_ref_addr (rhs))
6745 : changed = true;
6746 164810250 : tree *lhs = gimple_assign_lhs_ptr (stmt);
6747 164810250 : if (REFERENCE_CLASS_P (*lhs)
6748 164810250 : && maybe_canonicalize_mem_ref_addr (lhs))
6749 : changed = true;
6750 : /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6751 : This cannot be done in maybe_canonicalize_mem_ref_addr
6752 : as the gimple now has two operands rather than one.
6753 : The same reason why this can't be done in
6754 : maybe_canonicalize_mem_ref_addr is the same reason why
6755 : this can't be done inplace. */
6756 164810250 : if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6757 : {
6758 14467955 : tree inner = TREE_OPERAND (*rhs, 0);
6759 14467955 : if (TREE_CODE (inner) == MEM_REF
6760 999031 : && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6761 14530148 : && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6762 : {
6763 62193 : tree ptr = TREE_OPERAND (inner, 0);
6764 62193 : tree addon = TREE_OPERAND (inner, 1);
6765 62193 : addon = fold_convert (sizetype, addon);
6766 62193 : gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6767 : ptr, addon);
6768 62193 : changed = true;
6769 62193 : stmt = gsi_stmt (*gsi);
6770 : }
6771 : }
6772 : }
6773 : else
6774 : {
6775 : /* Canonicalize operand order. */
6776 85489130 : enum tree_code code = gimple_assign_rhs_code (stmt);
6777 85489130 : if (TREE_CODE_CLASS (code) == tcc_comparison
6778 79509642 : || commutative_tree_code (code)
6779 128343444 : || commutative_ternary_tree_code (code))
6780 : {
6781 42635770 : tree rhs1 = gimple_assign_rhs1 (stmt);
6782 42635770 : tree rhs2 = gimple_assign_rhs2 (stmt);
6783 42635770 : if (tree_swap_operands_p (rhs1, rhs2))
6784 : {
6785 2562427 : gimple_assign_set_rhs1 (stmt, rhs2);
6786 2562427 : gimple_assign_set_rhs2 (stmt, rhs1);
6787 2562427 : if (TREE_CODE_CLASS (code) == tcc_comparison)
6788 302180 : gimple_assign_set_rhs_code (stmt,
6789 : swap_tree_comparison (code));
6790 : changed = true;
6791 : }
6792 : }
6793 : }
6794 : break;
6795 55543966 : case GIMPLE_CALL:
6796 55543966 : {
6797 55543966 : gcall *call = as_a<gcall *> (stmt);
6798 166414533 : for (i = 0; i < gimple_call_num_args (call); ++i)
6799 : {
6800 110870567 : tree *arg = gimple_call_arg_ptr (call, i);
6801 110870567 : if (REFERENCE_CLASS_P (*arg)
6802 110870567 : && maybe_canonicalize_mem_ref_addr (arg))
6803 : changed = true;
6804 : }
6805 55543966 : tree *lhs = gimple_call_lhs_ptr (call);
6806 55543966 : if (*lhs
6807 22446363 : && REFERENCE_CLASS_P (*lhs)
6808 55659946 : && maybe_canonicalize_mem_ref_addr (lhs))
6809 : changed = true;
6810 55543966 : if (*lhs)
6811 : {
6812 22446363 : combined_fn cfn = gimple_call_combined_fn (call);
6813 22446363 : internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6814 22446363 : int opno = first_commutative_argument (ifn);
6815 22446363 : if (opno >= 0)
6816 : {
6817 354162 : tree arg1 = gimple_call_arg (call, opno);
6818 354162 : tree arg2 = gimple_call_arg (call, opno + 1);
6819 354162 : if (tree_swap_operands_p (arg1, arg2))
6820 : {
6821 22265 : gimple_call_set_arg (call, opno, arg2);
6822 22265 : gimple_call_set_arg (call, opno + 1, arg1);
6823 22265 : changed = true;
6824 : }
6825 : }
6826 : }
6827 : break;
6828 : }
6829 585526 : case GIMPLE_ASM:
6830 585526 : {
6831 585526 : gasm *asm_stmt = as_a <gasm *> (stmt);
6832 1231121 : for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6833 : {
6834 645595 : tree link = gimple_asm_output_op (asm_stmt, i);
6835 645595 : tree op = TREE_VALUE (link);
6836 645595 : if (REFERENCE_CLASS_P (op)
6837 645595 : && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6838 : changed = true;
6839 : }
6840 985409 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6841 : {
6842 399883 : tree link = gimple_asm_input_op (asm_stmt, i);
6843 399883 : tree op = TREE_VALUE (link);
6844 399883 : if ((REFERENCE_CLASS_P (op)
6845 385620 : || TREE_CODE (op) == ADDR_EXPR)
6846 433380 : && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6847 : changed = true;
6848 : }
6849 : }
6850 : break;
6851 374431663 : case GIMPLE_DEBUG:
6852 374431663 : if (gimple_debug_bind_p (stmt))
6853 : {
6854 288124844 : tree *val = gimple_debug_bind_get_value_ptr (stmt);
6855 288124844 : if (*val
6856 163951660 : && (REFERENCE_CLASS_P (*val)
6857 161494848 : || TREE_CODE (*val) == ADDR_EXPR)
6858 337187003 : && maybe_canonicalize_mem_ref_addr (val, true))
6859 : changed = true;
6860 : }
6861 : break;
6862 44561893 : case GIMPLE_COND:
6863 44561893 : {
6864 : /* Canonicalize operand order. */
6865 44561893 : tree lhs = gimple_cond_lhs (stmt);
6866 44561893 : tree rhs = gimple_cond_rhs (stmt);
6867 44561893 : if (tree_swap_operands_p (lhs, rhs))
6868 : {
6869 1411713 : gcond *gc = as_a <gcond *> (stmt);
6870 1411713 : gimple_cond_set_lhs (gc, rhs);
6871 1411713 : gimple_cond_set_rhs (gc, lhs);
6872 1411713 : gimple_cond_set_code (gc,
6873 : swap_tree_comparison (gimple_cond_code (gc)));
6874 1411713 : changed = true;
6875 : }
6876 : }
6877 743738988 : default:;
6878 : }
6879 :
6880 : /* Dispatch to pattern-based folding. */
6881 743738988 : if (!inplace
6882 3119096 : || is_gimple_assign (stmt)
6883 744618440 : || gimple_code (stmt) == GIMPLE_COND)
6884 : {
6885 744348253 : gimple_seq seq = NULL;
6886 744348253 : gimple_match_op res_op;
6887 1486456862 : if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6888 : valueize, valueize)
6889 744348253 : && replace_stmt_with_simplification (gsi, &res_op, &seq, inplace,
6890 : dce_worklist))
6891 : changed = true;
6892 : else
6893 739787504 : gimple_seq_discard (seq);
6894 : }
6895 :
6896 745227705 : stmt = gsi_stmt (*gsi);
6897 :
6898 : /* Fold the main computation performed by the statement. */
6899 745227705 : switch (gimple_code (stmt))
6900 : {
6901 250354266 : case GIMPLE_ASSIGN:
6902 250354266 : {
6903 : /* Try to canonicalize for boolean-typed X the comparisons
6904 : X == 0, X == 1, X != 0, and X != 1. */
6905 250354266 : if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6906 250354266 : || gimple_assign_rhs_code (stmt) == NE_EXPR)
6907 : {
6908 3240720 : tree lhs = gimple_assign_lhs (stmt);
6909 3240720 : tree op1 = gimple_assign_rhs1 (stmt);
6910 3240720 : tree op2 = gimple_assign_rhs2 (stmt);
6911 3240720 : tree type = TREE_TYPE (op1);
6912 :
6913 : /* Check whether the comparison operands are of the same boolean
6914 : type as the result type is.
6915 : Check that second operand is an integer-constant with value
6916 : one or zero. */
6917 3240720 : if (TREE_CODE (op2) == INTEGER_CST
6918 2214793 : && (integer_zerop (op2) || integer_onep (op2))
6919 4909273 : && useless_type_conversion_p (TREE_TYPE (lhs), type))
6920 : {
6921 4696 : enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6922 4696 : bool is_logical_not = false;
6923 :
6924 : /* X == 0 and X != 1 is a logical-not.of X
6925 : X == 1 and X != 0 is X */
6926 3967 : if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6927 4696 : || (cmp_code == NE_EXPR && integer_onep (op2)))
6928 4654 : is_logical_not = true;
6929 :
6930 4696 : if (is_logical_not == false)
6931 42 : gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6932 : /* Only for one-bit precision typed X the transformation
6933 : !X -> ~X is valied. */
6934 4654 : else if (TYPE_PRECISION (type) == 1)
6935 4654 : gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6936 : /* Otherwise we use !X -> X ^ 1. */
6937 : else
6938 0 : gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6939 : build_int_cst (type, 1));
6940 : changed = true;
6941 : break;
6942 : }
6943 : }
6944 :
6945 250349570 : unsigned old_num_ops = gimple_num_ops (stmt);
6946 250349570 : tree lhs = gimple_assign_lhs (stmt);
6947 250349570 : tree new_rhs = fold_gimple_assign (gsi);
6948 250349570 : if (new_rhs
6949 250468891 : && !useless_type_conversion_p (TREE_TYPE (lhs),
6950 119321 : TREE_TYPE (new_rhs)))
6951 0 : new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6952 250349570 : if (new_rhs
6953 250349570 : && (!inplace
6954 987 : || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6955 : {
6956 119321 : gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6957 119321 : changed = true;
6958 : }
6959 : break;
6960 : }
6961 :
6962 55489080 : case GIMPLE_CALL:
6963 55489080 : changed |= gimple_fold_call (gsi, inplace);
6964 55489080 : break;
6965 :
6966 374431663 : case GIMPLE_DEBUG:
6967 374431663 : if (gimple_debug_bind_p (stmt))
6968 : {
6969 288124844 : tree val = gimple_debug_bind_get_value (stmt);
6970 288124844 : if (val && REFERENCE_CLASS_P (val))
6971 : {
6972 2454902 : tree tem = maybe_fold_reference (val);
6973 2454902 : if (tem)
6974 : {
6975 1588 : gimple_debug_bind_set_value (stmt, tem);
6976 1588 : changed = true;
6977 : }
6978 : }
6979 : }
6980 : break;
6981 :
6982 10561219 : case GIMPLE_RETURN:
6983 10561219 : {
6984 10561219 : greturn *ret_stmt = as_a<greturn *> (stmt);
6985 10561219 : tree ret = gimple_return_retval(ret_stmt);
6986 :
6987 10561219 : if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6988 : {
6989 4477538 : tree val = valueize (ret);
6990 4477538 : if (val && val != ret
6991 4477538 : && may_propagate_copy (ret, val))
6992 : {
6993 0 : gimple_return_set_retval (ret_stmt, val);
6994 0 : changed = true;
6995 : }
6996 : }
6997 : }
6998 : break;
6999 :
7000 745227705 : default:;
7001 : }
7002 :
7003 745227705 : stmt = gsi_stmt (*gsi);
7004 :
7005 745227705 : fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
7006 745227705 : return changed;
7007 : }
7008 :
7009 : /* Valueziation callback that ends up not following SSA edges. */
7010 :
7011 : tree
7012 5732250541 : no_follow_ssa_edges (tree)
7013 : {
7014 5732250541 : return NULL_TREE;
7015 : }
7016 :
7017 : /* Valueization callback that ends up following single-use SSA edges only. */
7018 :
7019 : tree
7020 873421770 : follow_single_use_edges (tree val)
7021 : {
7022 873421770 : if (TREE_CODE (val) == SSA_NAME
7023 873421770 : && !has_single_use (val))
7024 449020717 : return NULL_TREE;
7025 : return val;
7026 : }
7027 :
7028 : /* Valueization callback that follows all SSA edges. */
7029 :
7030 : tree
7031 192308507 : follow_all_ssa_edges (tree val)
7032 : {
7033 192308507 : return val;
7034 : }
7035 :
7036 : /* Fold the statement pointed to by GSI. In some cases, this function may
7037 : replace the whole statement with a new one. Returns true iff folding
7038 : makes any changes.
7039 : The statement pointed to by GSI should be in valid gimple form but may
7040 : be in unfolded state as resulting from for example constant propagation
7041 : which can produce *&x = 0. */
7042 :
7043 : bool
7044 147255163 : fold_stmt (gimple_stmt_iterator *gsi, bitmap dce_bitmap)
7045 : {
7046 147255163 : return fold_stmt_1 (gsi, false, no_follow_ssa_edges, dce_bitmap);
7047 : }
7048 :
7049 : bool
7050 594853446 : fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree), bitmap dce_bitmap)
7051 : {
7052 594853446 : return fold_stmt_1 (gsi, false, valueize, dce_bitmap);
7053 : }
7054 :
7055 : /* Perform the minimal folding on statement *GSI. Only operations like
7056 : *&x created by constant propagation are handled. The statement cannot
7057 : be replaced with a new one. Return true if the statement was
7058 : changed, false otherwise.
7059 : The statement *GSI should be in valid gimple form but may
7060 : be in unfolded state as resulting from for example constant propagation
7061 : which can produce *&x = 0. */
7062 :
7063 : bool
7064 3119096 : fold_stmt_inplace (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
7065 : {
7066 3119096 : gimple *stmt = gsi_stmt (*gsi);
7067 3119096 : bool changed = fold_stmt_1 (gsi, true, valueize);
7068 3119096 : gcc_assert (gsi_stmt (*gsi) == stmt);
7069 3119096 : return changed;
7070 : }
7071 :
7072 : /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
7073 : if EXPR is null or we don't know how.
7074 : If non-null, the result always has boolean type. */
7075 :
7076 : static tree
7077 261935 : canonicalize_bool (tree expr, bool invert)
7078 : {
7079 261935 : if (!expr)
7080 : return NULL_TREE;
7081 50 : else if (invert)
7082 : {
7083 36 : if (integer_nonzerop (expr))
7084 0 : return boolean_false_node;
7085 36 : else if (integer_zerop (expr))
7086 0 : return boolean_true_node;
7087 36 : else if (TREE_CODE (expr) == SSA_NAME)
7088 0 : return fold_build2 (EQ_EXPR, boolean_type_node, expr,
7089 : build_int_cst (TREE_TYPE (expr), 0));
7090 36 : else if (COMPARISON_CLASS_P (expr))
7091 36 : return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
7092 : boolean_type_node,
7093 : TREE_OPERAND (expr, 0),
7094 : TREE_OPERAND (expr, 1));
7095 : else
7096 : return NULL_TREE;
7097 : }
7098 : else
7099 : {
7100 14 : if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
7101 : return expr;
7102 0 : if (integer_nonzerop (expr))
7103 0 : return boolean_true_node;
7104 0 : else if (integer_zerop (expr))
7105 0 : return boolean_false_node;
7106 0 : else if (TREE_CODE (expr) == SSA_NAME)
7107 0 : return fold_build2 (NE_EXPR, boolean_type_node, expr,
7108 : build_int_cst (TREE_TYPE (expr), 0));
7109 0 : else if (COMPARISON_CLASS_P (expr))
7110 0 : return fold_build2 (TREE_CODE (expr),
7111 : boolean_type_node,
7112 : TREE_OPERAND (expr, 0),
7113 : TREE_OPERAND (expr, 1));
7114 : else
7115 : return NULL_TREE;
7116 : }
7117 : }
7118 :
7119 : /* Check to see if a boolean expression EXPR is logically equivalent to the
7120 : comparison (OP1 CODE OP2). Check for various identities involving
7121 : SSA_NAMEs. */
7122 :
7123 : static bool
7124 1443 : same_bool_comparison_p (const_tree expr, enum tree_code code,
7125 : const_tree op1, const_tree op2)
7126 : {
7127 1443 : gimple *s;
7128 :
7129 : /* The obvious case. */
7130 1443 : if (TREE_CODE (expr) == code
7131 33 : && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
7132 1476 : && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
7133 : return true;
7134 :
7135 : /* Check for comparing (name, name != 0) and the case where expr
7136 : is an SSA_NAME with a definition matching the comparison. */
7137 1426 : if (TREE_CODE (expr) == SSA_NAME
7138 1426 : && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
7139 : {
7140 0 : if (operand_equal_p (expr, op1, 0))
7141 0 : return ((code == NE_EXPR && integer_zerop (op2))
7142 0 : || (code == EQ_EXPR && integer_nonzerop (op2)));
7143 0 : s = SSA_NAME_DEF_STMT (expr);
7144 0 : if (is_gimple_assign (s)
7145 0 : && gimple_assign_rhs_code (s) == code
7146 0 : && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
7147 0 : && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
7148 : return true;
7149 : }
7150 :
7151 : /* If op1 is of the form (name != 0) or (name == 0), and the definition
7152 : of name is a comparison, recurse. */
7153 1426 : if (TREE_CODE (op1) == SSA_NAME
7154 1426 : && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
7155 : {
7156 448 : s = SSA_NAME_DEF_STMT (op1);
7157 448 : if (is_gimple_assign (s)
7158 448 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
7159 : {
7160 0 : enum tree_code c = gimple_assign_rhs_code (s);
7161 0 : if ((c == NE_EXPR && integer_zerop (op2))
7162 0 : || (c == EQ_EXPR && integer_nonzerop (op2)))
7163 0 : return same_bool_comparison_p (expr, c,
7164 0 : gimple_assign_rhs1 (s),
7165 0 : gimple_assign_rhs2 (s));
7166 0 : if ((c == EQ_EXPR && integer_zerop (op2))
7167 0 : || (c == NE_EXPR && integer_nonzerop (op2)))
7168 0 : return same_bool_comparison_p (expr,
7169 : invert_tree_comparison (c, false),
7170 0 : gimple_assign_rhs1 (s),
7171 0 : gimple_assign_rhs2 (s));
7172 : }
7173 : }
7174 : return false;
7175 : }
7176 :
7177 : /* Check to see if two boolean expressions OP1 and OP2 are logically
7178 : equivalent. */
7179 :
7180 : static bool
7181 15 : same_bool_result_p (const_tree op1, const_tree op2)
7182 : {
7183 : /* Simple cases first. */
7184 15 : if (operand_equal_p (op1, op2, 0))
7185 : return true;
7186 :
7187 : /* Check the cases where at least one of the operands is a comparison.
7188 : These are a bit smarter than operand_equal_p in that they apply some
7189 : identifies on SSA_NAMEs. */
7190 8 : if (COMPARISON_CLASS_P (op2)
7191 16 : && same_bool_comparison_p (op1, TREE_CODE (op2),
7192 8 : TREE_OPERAND (op2, 0),
7193 8 : TREE_OPERAND (op2, 1)))
7194 : return true;
7195 8 : if (COMPARISON_CLASS_P (op1)
7196 16 : && same_bool_comparison_p (op2, TREE_CODE (op1),
7197 8 : TREE_OPERAND (op1, 0),
7198 8 : TREE_OPERAND (op1, 1)))
7199 : return true;
7200 :
7201 : /* Default case. */
7202 : return false;
7203 : }
7204 :
7205 : /* Forward declarations for some mutually recursive functions. */
7206 :
7207 : static tree
7208 : and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7209 : enum tree_code code2, tree op2a, tree op2b, basic_block);
7210 : static tree
7211 : and_var_with_comparison (tree type, tree var, bool invert,
7212 : enum tree_code code2, tree op2a, tree op2b,
7213 : basic_block);
7214 : static tree
7215 : and_var_with_comparison_1 (tree type, gimple *stmt,
7216 : enum tree_code code2, tree op2a, tree op2b,
7217 : basic_block);
7218 : static tree
7219 : or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
7220 : enum tree_code code2, tree op2a, tree op2b,
7221 : basic_block);
7222 : static tree
7223 : or_var_with_comparison (tree, tree var, bool invert,
7224 : enum tree_code code2, tree op2a, tree op2b,
7225 : basic_block);
7226 : static tree
7227 : or_var_with_comparison_1 (tree, gimple *stmt,
7228 : enum tree_code code2, tree op2a, tree op2b,
7229 : basic_block);
7230 :
7231 : /* Helper function for and_comparisons_1: try to simplify the AND of the
7232 : ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7233 : If INVERT is true, invert the value of the VAR before doing the AND.
7234 : Return NULL_EXPR if we can't simplify this to a single expression. */
7235 :
7236 : static tree
7237 223501 : and_var_with_comparison (tree type, tree var, bool invert,
7238 : enum tree_code code2, tree op2a, tree op2b,
7239 : basic_block outer_cond_bb)
7240 : {
7241 223501 : tree t;
7242 223501 : gimple *stmt = SSA_NAME_DEF_STMT (var);
7243 :
7244 : /* We can only deal with variables whose definitions are assignments. */
7245 223501 : if (!is_gimple_assign (stmt))
7246 : return NULL_TREE;
7247 :
7248 : /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7249 : !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
7250 : Then we only have to consider the simpler non-inverted cases. */
7251 223099 : if (invert)
7252 83406 : t = or_var_with_comparison_1 (type, stmt,
7253 : invert_tree_comparison (code2, false),
7254 : op2a, op2b, outer_cond_bb);
7255 : else
7256 139693 : t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7257 : outer_cond_bb);
7258 223099 : return canonicalize_bool (t, invert);
7259 : }
7260 :
7261 : /* Try to simplify the AND of the ssa variable defined by the assignment
7262 : STMT with the comparison specified by (OP2A CODE2 OP2B).
7263 : Return NULL_EXPR if we can't simplify this to a single expression. */
7264 :
7265 : static tree
7266 159525 : and_var_with_comparison_1 (tree type, gimple *stmt,
7267 : enum tree_code code2, tree op2a, tree op2b,
7268 : basic_block outer_cond_bb)
7269 : {
7270 159525 : tree var = gimple_assign_lhs (stmt);
7271 159525 : tree true_test_var = NULL_TREE;
7272 159525 : tree false_test_var = NULL_TREE;
7273 159525 : enum tree_code innercode = gimple_assign_rhs_code (stmt);
7274 :
7275 : /* Check for identities like (var AND (var == 0)) => false. */
7276 159525 : if (TREE_CODE (op2a) == SSA_NAME
7277 159525 : && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7278 : {
7279 8693 : if ((code2 == NE_EXPR && integer_zerop (op2b))
7280 22695 : || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7281 : {
7282 8029 : true_test_var = op2a;
7283 8029 : if (var == true_test_var)
7284 : return var;
7285 : }
7286 3870 : else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7287 14946 : || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7288 : {
7289 3098 : false_test_var = op2a;
7290 3098 : if (var == false_test_var)
7291 0 : return boolean_false_node;
7292 : }
7293 : }
7294 :
7295 : /* If the definition is a comparison, recurse on it. */
7296 159525 : if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7297 : {
7298 1942 : tree t = and_comparisons_1 (type, innercode,
7299 : gimple_assign_rhs1 (stmt),
7300 : gimple_assign_rhs2 (stmt),
7301 : code2,
7302 : op2a,
7303 : op2b, outer_cond_bb);
7304 1942 : if (t)
7305 : return t;
7306 : }
7307 :
7308 : /* If the definition is an AND or OR expression, we may be able to
7309 : simplify by reassociating. */
7310 159519 : if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7311 159519 : && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7312 : {
7313 14458 : tree inner1 = gimple_assign_rhs1 (stmt);
7314 14458 : tree inner2 = gimple_assign_rhs2 (stmt);
7315 14458 : gimple *s;
7316 14458 : tree t;
7317 14458 : tree partial = NULL_TREE;
7318 14458 : bool is_and = (innercode == BIT_AND_EXPR);
7319 :
7320 : /* Check for boolean identities that don't require recursive examination
7321 : of inner1/inner2:
7322 : inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
7323 : inner1 AND (inner1 OR inner2) => inner1
7324 : !inner1 AND (inner1 AND inner2) => false
7325 : !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
7326 : Likewise for similar cases involving inner2. */
7327 14458 : if (inner1 == true_test_var)
7328 0 : return (is_and ? var : inner1);
7329 14458 : else if (inner2 == true_test_var)
7330 0 : return (is_and ? var : inner2);
7331 14458 : else if (inner1 == false_test_var)
7332 0 : return (is_and
7333 0 : ? boolean_false_node
7334 0 : : and_var_with_comparison (type, inner2, false, code2, op2a,
7335 0 : op2b, outer_cond_bb));
7336 14458 : else if (inner2 == false_test_var)
7337 0 : return (is_and
7338 0 : ? boolean_false_node
7339 0 : : and_var_with_comparison (type, inner1, false, code2, op2a,
7340 0 : op2b, outer_cond_bb));
7341 :
7342 : /* Next, redistribute/reassociate the AND across the inner tests.
7343 : Compute the first partial result, (inner1 AND (op2a code op2b)) */
7344 14458 : if (TREE_CODE (inner1) == SSA_NAME
7345 14458 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7346 13665 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7347 26965 : && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
7348 : gimple_assign_rhs1 (s),
7349 : gimple_assign_rhs2 (s),
7350 : code2, op2a, op2b,
7351 : outer_cond_bb)))
7352 : {
7353 : /* Handle the AND case, where we are reassociating:
7354 : (inner1 AND inner2) AND (op2a code2 op2b)
7355 : => (t AND inner2)
7356 : If the partial result t is a constant, we win. Otherwise
7357 : continue on to try reassociating with the other inner test. */
7358 33 : if (is_and)
7359 : {
7360 5 : if (integer_onep (t))
7361 : return inner2;
7362 5 : else if (integer_zerop (t))
7363 0 : return boolean_false_node;
7364 : }
7365 :
7366 : /* Handle the OR case, where we are redistributing:
7367 : (inner1 OR inner2) AND (op2a code2 op2b)
7368 : => (t OR (inner2 AND (op2a code2 op2b))) */
7369 28 : else if (integer_onep (t))
7370 0 : return boolean_true_node;
7371 :
7372 : /* Save partial result for later. */
7373 : partial = t;
7374 : }
7375 :
7376 : /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
7377 14458 : if (TREE_CODE (inner2) == SSA_NAME
7378 14458 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7379 14079 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7380 27633 : && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
7381 : gimple_assign_rhs1 (s),
7382 : gimple_assign_rhs2 (s),
7383 : code2, op2a, op2b,
7384 : outer_cond_bb)))
7385 : {
7386 : /* Handle the AND case, where we are reassociating:
7387 : (inner1 AND inner2) AND (op2a code2 op2b)
7388 : => (inner1 AND t) */
7389 475 : if (is_and)
7390 : {
7391 20 : if (integer_onep (t))
7392 : return inner1;
7393 20 : else if (integer_zerop (t))
7394 1 : return boolean_false_node;
7395 : /* If both are the same, we can apply the identity
7396 : (x AND x) == x. */
7397 19 : else if (partial && same_bool_result_p (t, partial))
7398 : return t;
7399 : }
7400 :
7401 : /* Handle the OR case. where we are redistributing:
7402 : (inner1 OR inner2) AND (op2a code2 op2b)
7403 : => (t OR (inner1 AND (op2a code2 op2b)))
7404 : => (t OR partial) */
7405 : else
7406 : {
7407 455 : if (integer_onep (t))
7408 0 : return boolean_true_node;
7409 455 : else if (partial)
7410 : {
7411 : /* We already got a simplification for the other
7412 : operand to the redistributed OR expression. The
7413 : interesting case is when at least one is false.
7414 : Or, if both are the same, we can apply the identity
7415 : (x OR x) == x. */
7416 6 : if (integer_zerop (partial))
7417 : return t;
7418 6 : else if (integer_zerop (t))
7419 : return partial;
7420 4 : else if (same_bool_result_p (t, partial))
7421 : return t;
7422 : }
7423 : }
7424 : }
7425 : }
7426 : return NULL_TREE;
7427 : }
7428 :
7429 : /* Try to simplify the AND of two comparisons defined by
7430 : (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7431 : If this can be done without constructing an intermediate value,
7432 : return the resulting tree; otherwise NULL_TREE is returned.
7433 : This function is deliberately asymmetric as it recurses on SSA_DEFs
7434 : in the first comparison but not the second. */
7435 :
7436 : static tree
7437 823600 : and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7438 : enum tree_code code2, tree op2a, tree op2b,
7439 : basic_block outer_cond_bb)
7440 : {
7441 823600 : tree truth_type = truth_type_for (TREE_TYPE (op1a));
7442 :
7443 : /* First check for ((x CODE1 y) AND (x CODE2 y)). */
7444 823600 : if (operand_equal_p (op1a, op2a, 0)
7445 823600 : && operand_equal_p (op1b, op2b, 0))
7446 : {
7447 : /* Result will be either NULL_TREE, or a combined comparison. */
7448 4107 : tree t = combine_comparisons (UNKNOWN_LOCATION,
7449 : TRUTH_ANDIF_EXPR, code1, code2,
7450 : truth_type, op1a, op1b);
7451 4107 : if (t)
7452 : return t;
7453 : }
7454 :
7455 : /* Likewise the swapped case of the above. */
7456 822337 : if (operand_equal_p (op1a, op2b, 0)
7457 822337 : && operand_equal_p (op1b, op2a, 0))
7458 : {
7459 : /* Result will be either NULL_TREE, or a combined comparison. */
7460 28 : tree t = combine_comparisons (UNKNOWN_LOCATION,
7461 : TRUTH_ANDIF_EXPR, code1,
7462 : swap_tree_comparison (code2),
7463 : truth_type, op1a, op1b);
7464 28 : if (t)
7465 : return t;
7466 : }
7467 :
7468 : /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7469 : NAME's definition is a truth value. See if there are any simplifications
7470 : that can be done against the NAME's definition. */
7471 822337 : if (TREE_CODE (op1a) == SSA_NAME
7472 822316 : && (code1 == NE_EXPR || code1 == EQ_EXPR)
7473 1401363 : && (integer_zerop (op1b) || integer_onep (op1b)))
7474 : {
7475 118596 : bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7476 301631 : || (code1 == NE_EXPR && integer_onep (op1b)));
7477 277403 : gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7478 277403 : switch (gimple_code (stmt))
7479 : {
7480 220928 : case GIMPLE_ASSIGN:
7481 : /* Try to simplify by copy-propagating the definition. */
7482 220928 : return and_var_with_comparison (type, op1a, invert, code2, op2a,
7483 220928 : op2b, outer_cond_bb);
7484 :
7485 26352 : case GIMPLE_PHI:
7486 : /* If every argument to the PHI produces the same result when
7487 : ANDed with the second comparison, we win.
7488 : Do not do this unless the type is bool since we need a bool
7489 : result here anyway. */
7490 26352 : if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7491 : {
7492 : tree result = NULL_TREE;
7493 : unsigned i;
7494 10351 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
7495 : {
7496 10351 : tree arg = gimple_phi_arg_def (stmt, i);
7497 :
7498 : /* If this PHI has itself as an argument, ignore it.
7499 : If all the other args produce the same result,
7500 : we're still OK. */
7501 10351 : if (arg == gimple_phi_result (stmt))
7502 0 : continue;
7503 10351 : else if (TREE_CODE (arg) == INTEGER_CST)
7504 : {
7505 6875 : if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
7506 : {
7507 3826 : if (!result)
7508 1757 : result = boolean_false_node;
7509 2069 : else if (!integer_zerop (result))
7510 : return NULL_TREE;
7511 : }
7512 3049 : else if (!result)
7513 1828 : result = fold_build2 (code2, boolean_type_node,
7514 : op2a, op2b);
7515 1221 : else if (!same_bool_comparison_p (result,
7516 : code2, op2a, op2b))
7517 : return NULL_TREE;
7518 : }
7519 3476 : else if (TREE_CODE (arg) == SSA_NAME
7520 3476 : && !SSA_NAME_IS_DEFAULT_DEF (arg))
7521 : {
7522 3473 : tree temp;
7523 3473 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7524 : /* In simple cases we can look through PHI nodes,
7525 : but we have to be careful with loops.
7526 : See PR49073. */
7527 3473 : if (! dom_info_available_p (CDI_DOMINATORS)
7528 3473 : || gimple_bb (def_stmt) == gimple_bb (stmt)
7529 6946 : || dominated_by_p (CDI_DOMINATORS,
7530 3473 : gimple_bb (def_stmt),
7531 3473 : gimple_bb (stmt)))
7532 900 : return NULL_TREE;
7533 2573 : temp = and_var_with_comparison (type, arg, invert, code2,
7534 : op2a, op2b,
7535 : outer_cond_bb);
7536 2573 : if (!temp)
7537 : return NULL_TREE;
7538 0 : else if (!result)
7539 : result = temp;
7540 0 : else if (!same_bool_result_p (result, temp))
7541 : return NULL_TREE;
7542 : }
7543 : else
7544 : return NULL_TREE;
7545 : }
7546 : return result;
7547 : }
7548 :
7549 : default:
7550 : break;
7551 : }
7552 : }
7553 : return NULL_TREE;
7554 : }
7555 :
7556 : static basic_block fosa_bb;
7557 : static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
7558 : static tree
7559 30504269 : follow_outer_ssa_edges (tree val)
7560 : {
7561 30504269 : if (TREE_CODE (val) == SSA_NAME
7562 30504269 : && !SSA_NAME_IS_DEFAULT_DEF (val))
7563 : {
7564 30040668 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7565 30040668 : if (!def_bb
7566 9128328 : || def_bb == fosa_bb
7567 35063981 : || (dom_info_available_p (CDI_DOMINATORS)
7568 5023313 : && (def_bb == fosa_bb
7569 5023313 : || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7570 27395720 : return val;
7571 : /* We cannot temporarily rewrite stmts with undefined overflow
7572 : behavior, so avoid expanding them. */
7573 5271852 : if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7574 254489 : || POINTER_TYPE_P (TREE_TYPE (val)))
7575 5167599 : && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7576 : return NULL_TREE;
7577 1077252 : flow_sensitive_info_storage storage;
7578 1077252 : storage.save_and_clear (val);
7579 : /* If the definition does not dominate fosa_bb temporarily reset
7580 : flow-sensitive info. */
7581 1077252 : fosa_unwind->safe_push (std::make_pair (val, storage));
7582 1077252 : return val;
7583 : }
7584 : return val;
7585 : }
7586 :
7587 : /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7588 : : try to simplify the AND/OR of the ssa variable VAR with the comparison
7589 : specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7590 : simplify this to a single expression. As we are going to lower the cost
7591 : of building SSA names / gimple stmts significantly, we need to allocate
7592 : them ont the stack. This will cause the code to be a bit ugly. */
7593 :
7594 : static tree
7595 889087 : maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7596 : enum tree_code code1,
7597 : tree op1a, tree op1b,
7598 : enum tree_code code2, tree op2a,
7599 : tree op2b,
7600 : basic_block outer_cond_bb)
7601 : {
7602 : /* Allocate gimple stmt1 on the stack. */
7603 889087 : gassign *stmt1
7604 889087 : = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7605 889087 : gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7606 889087 : gimple_assign_set_rhs_code (stmt1, code1);
7607 889087 : gimple_assign_set_rhs1 (stmt1, op1a);
7608 889087 : gimple_assign_set_rhs2 (stmt1, op1b);
7609 889087 : gimple_set_bb (stmt1, NULL);
7610 :
7611 : /* Allocate gimple stmt2 on the stack. */
7612 889087 : gassign *stmt2
7613 889087 : = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7614 889087 : gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7615 889087 : gimple_assign_set_rhs_code (stmt2, code2);
7616 889087 : gimple_assign_set_rhs1 (stmt2, op2a);
7617 889087 : gimple_assign_set_rhs2 (stmt2, op2b);
7618 889087 : gimple_set_bb (stmt2, NULL);
7619 :
7620 : /* Allocate SSA names(lhs1) on the stack. */
7621 889087 : alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)];
7622 889087 : tree lhs1 = (tree) &lhs1buf[0];
7623 889087 : memset (lhs1, 0, sizeof (tree_ssa_name));
7624 889087 : TREE_SET_CODE (lhs1, SSA_NAME);
7625 889087 : TREE_TYPE (lhs1) = type;
7626 889087 : init_ssa_name_imm_use (lhs1);
7627 :
7628 : /* Allocate SSA names(lhs2) on the stack. */
7629 889087 : alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)];
7630 889087 : tree lhs2 = (tree) &lhs2buf[0];
7631 889087 : memset (lhs2, 0, sizeof (tree_ssa_name));
7632 889087 : TREE_SET_CODE (lhs2, SSA_NAME);
7633 889087 : TREE_TYPE (lhs2) = type;
7634 889087 : init_ssa_name_imm_use (lhs2);
7635 :
7636 889087 : gimple_assign_set_lhs (stmt1, lhs1);
7637 889087 : gimple_assign_set_lhs (stmt2, lhs2);
7638 :
7639 889087 : gimple_match_op op (gimple_match_cond::UNCOND, code,
7640 : type, gimple_assign_lhs (stmt1),
7641 889087 : gimple_assign_lhs (stmt2));
7642 889087 : fosa_bb = outer_cond_bb;
7643 889087 : auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
7644 889087 : fosa_unwind = &unwind_stack;
7645 1234068 : if (op.resimplify (NULL, (!outer_cond_bb
7646 : ? follow_all_ssa_edges : follow_outer_ssa_edges)))
7647 : {
7648 2429 : fosa_unwind = NULL;
7649 8692 : for (auto p : unwind_stack)
7650 1405 : p.second.restore (p.first);
7651 2429 : if (gimple_simplified_result_is_gimple_val (&op))
7652 : {
7653 1834 : tree res = op.ops[0];
7654 1834 : if (res == lhs1)
7655 1466 : return build2 (code1, type, op1a, op1b);
7656 368 : else if (res == lhs2)
7657 326 : return build2 (code2, type, op2a, op2b);
7658 : else
7659 : return res;
7660 : }
7661 595 : else if (op.code.is_tree_code ()
7662 595 : && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7663 : {
7664 595 : tree op0 = op.ops[0];
7665 595 : tree op1 = op.ops[1];
7666 595 : if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7667 : return NULL_TREE; /* not simple */
7668 :
7669 595 : return build2 ((enum tree_code)op.code, op.type, op0, op1);
7670 : }
7671 : }
7672 886658 : fosa_unwind = NULL;
7673 3735821 : for (auto p : unwind_stack)
7674 1075847 : p.second.restore (p.first);
7675 :
7676 886658 : return NULL_TREE;
7677 889087 : }
7678 :
7679 : /* Return TRUE and set op[0] if T, following all SSA edges, is a type
7680 : conversion. Reject loads if LOAD is NULL, otherwise set *LOAD if a
7681 : converting load is found. */
7682 :
7683 : static bool
7684 1777566 : gimple_convert_def_p (tree t, tree op[1], gimple **load = NULL)
7685 : {
7686 1777566 : bool ret = false;
7687 :
7688 1777566 : if (TREE_CODE (t) == SSA_NAME
7689 1777566 : && !SSA_NAME_IS_DEFAULT_DEF (t))
7690 983410 : if (gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (t)))
7691 : {
7692 915252 : bool load_p = gimple_assign_load_p (def);
7693 915252 : if (load_p && !load)
7694 : return false;
7695 892759 : switch (gimple_assign_rhs_code (def))
7696 : {
7697 9001 : CASE_CONVERT:
7698 9001 : op[0] = gimple_assign_rhs1 (def);
7699 9001 : ret = true;
7700 9001 : break;
7701 :
7702 2289 : case VIEW_CONVERT_EXPR:
7703 2289 : op[0] = TREE_OPERAND (gimple_assign_rhs1 (def), 0);
7704 2289 : ret = true;
7705 2289 : break;
7706 :
7707 : default:
7708 : break;
7709 : }
7710 :
7711 11290 : if (ret && load_p)
7712 0 : *load = def;
7713 : }
7714 :
7715 : return ret;
7716 : }
7717 :
7718 : /* Return TRUE and set op[*] if T, following all SSA edges, resolves to a
7719 : binary expression with code CODE. */
7720 :
7721 : static bool
7722 1799422 : gimple_binop_def_p (enum tree_code code, tree t, tree op[2])
7723 : {
7724 1799422 : if (TREE_CODE (t) == SSA_NAME
7725 1799422 : && !SSA_NAME_IS_DEFAULT_DEF (t))
7726 1004241 : if (gimple *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (t)))
7727 934333 : if (gimple_assign_rhs_code (def) == code)
7728 : {
7729 37175 : op[0] = gimple_assign_rhs1 (def);
7730 37175 : op[1] = gimple_assign_rhs2 (def);
7731 37175 : return true;
7732 : }
7733 : return false;
7734 : }
7735 : /* Subroutine for fold_truth_andor_1: decode a field reference.
7736 :
7737 : If *PEXP is a comparison reference, we return the innermost reference.
7738 :
7739 : *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
7740 : set to the starting bit number.
7741 :
7742 : *PVOLATILEP is set to 1 if the any expression encountered is volatile;
7743 : otherwise it is not changed.
7744 :
7745 : *PUNSIGNEDP is set to the signedness of the field.
7746 :
7747 : *PREVERSEP is set to the storage order of the field.
7748 :
7749 : *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. If
7750 : *PAND_MASK is initially set to a mask with nonzero precision, that mask is
7751 : combined with the found mask, or adjusted in precision to match.
7752 :
7753 : *PSIGNBIT is set to TRUE if, before clipping to *PBITSIZE, the mask
7754 : encompassed bits that corresponded to extensions of the sign bit.
7755 :
7756 : *PXORP is to be FALSE if EXP might be a XOR used in a compare, in which
7757 : case, if PXOR_CMP_OP is a zero constant, it will be overridden with *PEXP,
7758 : *PXORP will be set to TRUE, *PXOR_AND_MASK will be copied from *PAND_MASK,
7759 : and the left-hand operand of the XOR will be decoded. If *PXORP is TRUE,
7760 : PXOR_CMP_OP and PXOR_AND_MASK are supposed to be NULL, and then the
7761 : right-hand operand of the XOR will be decoded.
7762 :
7763 : *LOAD is set to the load stmt of the innermost reference, if any,
7764 : *and NULL otherwise.
7765 :
7766 : LOC[0..3] are filled in as conversion, masking, shifting and loading
7767 : operations are located.
7768 :
7769 : Return 0 if this is not a component reference or is one that we can't
7770 : do anything with. */
7771 :
7772 : static tree
7773 636998 : decode_field_reference (tree *pexp, HOST_WIDE_INT *pbitsize,
7774 : HOST_WIDE_INT *pbitpos,
7775 : bool *punsignedp, bool *preversep, bool *pvolatilep,
7776 : wide_int *pand_mask, bool *psignbit,
7777 : bool *pxorp, tree *pxor_cmp_op, wide_int *pxor_and_mask,
7778 : gimple **pload, location_t loc[4])
7779 : {
7780 636998 : tree exp = *pexp;
7781 636998 : tree outer_type = 0;
7782 636998 : wide_int and_mask;
7783 636998 : tree inner, offset;
7784 636998 : int shiftrt = 0;
7785 636998 : tree res_ops[2];
7786 636998 : machine_mode mode;
7787 636998 : bool convert_before_shift = false;
7788 636998 : bool signbit = false;
7789 636998 : bool xorp = false;
7790 636998 : tree xor_cmp_op;
7791 636998 : wide_int xor_and_mask;
7792 636998 : gimple *load = NULL;
7793 :
7794 : /* All the optimizations using this function assume integer fields.
7795 : There are problems with FP fields since the type_for_size call
7796 : below can fail for, e.g., XFmode. */
7797 636998 : if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
7798 : return NULL_TREE;
7799 :
7800 : /* Drop casts, saving only the outermost type, effectively used in
7801 : the compare. We can deal with at most one conversion, and it may
7802 : appear at various points in the chain of recognized preparation
7803 : statements. Earlier optimizers will often have already dropped
7804 : unneeded extensions, but they may survive, as in PR118046. ???
7805 : Can we do better and allow multiple conversions, perhaps taking
7806 : note of the narrowest intermediate type, sign extensions and
7807 : whatnot? */
7808 599808 : if (!outer_type && gimple_convert_def_p (exp, res_ops))
7809 : {
7810 10574 : outer_type = TREE_TYPE (exp);
7811 10574 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7812 10574 : exp = res_ops[0];
7813 : }
7814 :
7815 : /* Recognize and save a masking operation. Combine it with an
7816 : incoming mask. */
7817 599808 : if (gimple_binop_def_p (BIT_AND_EXPR, exp, res_ops)
7818 599808 : && TREE_CODE (res_ops[1]) == INTEGER_CST)
7819 : {
7820 22935 : loc[1] = gimple_location (SSA_NAME_DEF_STMT (exp));
7821 22935 : exp = res_ops[0];
7822 22935 : and_mask = wi::to_wide (res_ops[1]);
7823 22935 : unsigned prec_in = pand_mask->get_precision ();
7824 22935 : if (prec_in)
7825 : {
7826 52 : unsigned prec_op = and_mask.get_precision ();
7827 52 : if (prec_in >= prec_op)
7828 : {
7829 52 : if (prec_in > prec_op)
7830 0 : and_mask = wide_int::from (and_mask, prec_in, UNSIGNED);
7831 52 : and_mask &= *pand_mask;
7832 : }
7833 : else
7834 0 : and_mask &= wide_int::from (*pand_mask, prec_op, UNSIGNED);
7835 : }
7836 : }
7837 : else
7838 576873 : and_mask = *pand_mask;
7839 :
7840 : /* Turn (a ^ b) [!]= 0 into a [!]= b. */
7841 599808 : if (pxorp && gimple_binop_def_p (BIT_XOR_EXPR, exp, res_ops))
7842 : {
7843 : /* No location recorded for this one, it's entirely subsumed by the
7844 : compare. */
7845 8306 : if (*pxorp)
7846 : {
7847 4150 : exp = res_ops[1];
7848 4150 : gcc_checking_assert (!pxor_cmp_op && !pxor_and_mask);
7849 : }
7850 4156 : else if (!pxor_cmp_op)
7851 : /* Not much we can do when xor appears in the right-hand compare
7852 : operand. */
7853 : return NULL_TREE;
7854 4154 : else if (integer_zerop (*pxor_cmp_op))
7855 : {
7856 4150 : xorp = true;
7857 4150 : exp = res_ops[0];
7858 4150 : xor_cmp_op = *pexp;
7859 4150 : xor_and_mask = *pand_mask;
7860 : }
7861 : }
7862 :
7863 : /* Another chance to drop conversions. */
7864 599806 : if (!outer_type && gimple_convert_def_p (exp, res_ops))
7865 : {
7866 706 : outer_type = TREE_TYPE (exp);
7867 706 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7868 706 : exp = res_ops[0];
7869 : }
7870 :
7871 : /* Take note of shifts. */
7872 599806 : if (gimple_binop_def_p (RSHIFT_EXPR, exp, res_ops)
7873 599806 : && TREE_CODE (res_ops[1]) == INTEGER_CST)
7874 : {
7875 258 : loc[2] = gimple_location (SSA_NAME_DEF_STMT (exp));
7876 258 : exp = res_ops[0];
7877 258 : if (!tree_fits_shwi_p (res_ops[1]))
7878 : return NULL_TREE;
7879 258 : shiftrt = tree_to_shwi (res_ops[1]);
7880 258 : if (shiftrt <= 0)
7881 : return NULL_TREE;
7882 : }
7883 :
7884 : /* Yet another chance to drop conversions. This one is allowed to
7885 : match a converting load, subsuming the load identification block
7886 : below. */
7887 599806 : if (!outer_type && gimple_convert_def_p (exp, res_ops, &load))
7888 : {
7889 10 : outer_type = TREE_TYPE (exp);
7890 10 : loc[0] = gimple_location (SSA_NAME_DEF_STMT (exp));
7891 10 : if (load)
7892 0 : loc[3] = gimple_location (load);
7893 10 : exp = res_ops[0];
7894 : /* This looks backwards, but we're going back the def chain, so if we
7895 : find the conversion here, after finding a shift, that's because the
7896 : convert appears before the shift, and we should thus adjust the bit
7897 : pos and size because of the shift after adjusting it due to type
7898 : conversion. */
7899 10 : convert_before_shift = true;
7900 : }
7901 :
7902 : /* Identify the load, if there is one. */
7903 599806 : if (!load && TREE_CODE (exp) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (exp))
7904 : {
7905 334160 : gimple *def = SSA_NAME_DEF_STMT (exp);
7906 334160 : if (gimple_assign_load_p (def))
7907 : {
7908 214538 : loc[3] = gimple_location (def);
7909 214538 : load = def;
7910 214538 : exp = gimple_assign_rhs1 (def);
7911 : }
7912 : }
7913 :
7914 : /* Identify the relevant bits. */
7915 599806 : poly_int64 poly_bitsize, poly_bitpos;
7916 599806 : int unsignedp, reversep = *preversep, volatilep = *pvolatilep;
7917 599806 : inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
7918 : &mode, &unsignedp, &reversep, &volatilep);
7919 :
7920 599806 : HOST_WIDE_INT bs, bp;
7921 599806 : if (!poly_bitsize.is_constant (&bs)
7922 599806 : || !poly_bitpos.is_constant (&bp)
7923 599806 : || bs <= shiftrt
7924 599806 : || offset != 0
7925 598631 : || TREE_CODE (inner) == PLACEHOLDER_EXPR
7926 : /* Reject out-of-bound accesses (PR79731, PR118514). */
7927 598631 : || !access_in_bounds_of_type_p (TREE_TYPE (inner), bs, bp)
7928 598605 : || (INTEGRAL_TYPE_P (TREE_TYPE (inner))
7929 409683 : && !type_has_mode_precision_p (TREE_TYPE (inner))))
7930 28781 : return NULL_TREE;
7931 :
7932 : /* Adjust shifts... */
7933 571025 : if (convert_before_shift
7934 571025 : && outer_type && bs > TYPE_PRECISION (outer_type))
7935 : {
7936 3 : HOST_WIDE_INT excess = bs - TYPE_PRECISION (outer_type);
7937 3 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7938 0 : bp += excess;
7939 : bs -= excess;
7940 : }
7941 :
7942 571025 : if (shiftrt)
7943 : {
7944 : /* Punt if we're shifting by more than the loaded bitfield (after
7945 : adjustment), or if there's a shift after a change of signedness, punt.
7946 : When comparing this field with a constant, we'll check that the
7947 : constant is a proper sign- or zero-extension (depending on signedness)
7948 : of a value that would fit in the selected portion of the bitfield. A
7949 : shift after a change of signedness would make the extension
7950 : non-uniform, and we can't deal with that (yet ???). See
7951 : gcc.dg/field-merge-22.c for a test that would go wrong. */
7952 258 : if (bs <= shiftrt
7953 258 : || (convert_before_shift
7954 10 : && outer_type && unsignedp != TYPE_UNSIGNED (outer_type)))
7955 : return NULL_TREE;
7956 250 : if (!reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7957 250 : bp += shiftrt;
7958 250 : bs -= shiftrt;
7959 : }
7960 :
7961 : /* ... and bit position. */
7962 571017 : if (!convert_before_shift
7963 571017 : && outer_type && bs > TYPE_PRECISION (outer_type))
7964 : {
7965 5210 : HOST_WIDE_INT excess = bs - TYPE_PRECISION (outer_type);
7966 5210 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
7967 0 : bp += excess;
7968 : bs -= excess;
7969 : }
7970 :
7971 : /* If the number of bits in the reference is the same as the bitsize of
7972 : the outer type, then the outer type gives the signedness. Otherwise
7973 : (in case of a small bitfield) the signedness is unchanged. */
7974 571017 : if (outer_type && bs == TYPE_PRECISION (outer_type))
7975 8793 : unsignedp = TYPE_UNSIGNED (outer_type);
7976 :
7977 : /* Make the mask the expected width. */
7978 571017 : if (and_mask.get_precision () != 0)
7979 : {
7980 : /* If the AND_MASK encompasses bits that would be extensions of
7981 : the sign bit, set SIGNBIT. */
7982 26648 : if (!unsignedp
7983 2624 : && and_mask.get_precision () > bs
7984 29314 : && (and_mask & wi::mask (bs, true, and_mask.get_precision ())) != 0)
7985 : signbit = true;
7986 26648 : and_mask = wide_int::from (and_mask, bs, UNSIGNED);
7987 : }
7988 :
7989 571017 : *pexp = exp;
7990 571017 : *pload = load;
7991 571017 : *pbitsize = bs;
7992 571017 : *pbitpos = bp;
7993 571017 : *punsignedp = unsignedp;
7994 571017 : *preversep = reversep;
7995 571017 : *pvolatilep = volatilep;
7996 571017 : *psignbit = signbit;
7997 571017 : *pand_mask = and_mask;
7998 571017 : if (xorp)
7999 : {
8000 4150 : *pxorp = xorp;
8001 4150 : *pxor_cmp_op = xor_cmp_op;
8002 4150 : *pxor_and_mask = xor_and_mask;
8003 : }
8004 :
8005 : return inner;
8006 636998 : }
8007 :
8008 : /* Return the one bitpos within bit extents L or R that is at an
8009 : ALIGN-bit alignment boundary, or -1 if there is more than one such
8010 : boundary, if there isn't any, or if there is any such boundary
8011 : between the extents. L and R are given by bitpos and bitsize. If
8012 : it doesn't return -1, there are two consecutive ALIGN-bit words
8013 : that contain both extents, and at least one of the extents
8014 : straddles across the returned alignment boundary. */
8015 :
8016 : static inline HOST_WIDE_INT
8017 28465 : compute_split_boundary_from_align (HOST_WIDE_INT align,
8018 : HOST_WIDE_INT l_bitpos,
8019 : HOST_WIDE_INT l_bitsize,
8020 : HOST_WIDE_INT r_bitpos,
8021 : HOST_WIDE_INT r_bitsize)
8022 : {
8023 28465 : HOST_WIDE_INT amask = ~(align - 1);
8024 :
8025 28465 : HOST_WIDE_INT first_bit = MIN (l_bitpos, r_bitpos);
8026 28465 : HOST_WIDE_INT end_bit = MAX (l_bitpos + l_bitsize, r_bitpos + r_bitsize);
8027 :
8028 28465 : HOST_WIDE_INT boundary = (end_bit - 1) & amask;
8029 :
8030 : /* Make sure we're crossing no more than one alignment boundary.
8031 :
8032 : ??? We don't have logic to recombine loads of two adjacent
8033 : fields that each crosses a different alignment boundary, so
8034 : as to load the middle word only once, if other words can't be
8035 : otherwise recombined. */
8036 28465 : if (boundary - first_bit > align)
8037 : return -1;
8038 :
8039 11184 : HOST_WIDE_INT l_start_word = l_bitpos & amask;
8040 11184 : HOST_WIDE_INT l_end_word = (l_bitpos + l_bitsize - 1) & amask;
8041 :
8042 11184 : HOST_WIDE_INT r_start_word = r_bitpos & amask;
8043 11184 : HOST_WIDE_INT r_end_word = (r_bitpos + r_bitsize - 1) & amask;
8044 :
8045 : /* If neither field straddles across an alignment boundary, it's no
8046 : use to even try to merge them. */
8047 11184 : if (l_start_word == l_end_word && r_start_word == r_end_word)
8048 10877 : return -1;
8049 :
8050 : return boundary;
8051 : }
8052 :
8053 : /* Make a bit_field_ref. If POINT is NULL, return the BIT_FIELD_REF.
8054 : Otherwise, build and insert a load stmt before POINT, and return
8055 : the SSA_NAME. ??? Rewrite LOAD in terms of the bitfield? */
8056 :
8057 : static tree
8058 4519 : make_bit_field_load (location_t loc, tree inner, tree orig_inner, tree type,
8059 : HOST_WIDE_INT bitsize, poly_int64 bitpos,
8060 : bool unsignedp, bool reversep, gimple *point)
8061 : {
8062 4519 : if (point && loc == UNKNOWN_LOCATION)
8063 18 : loc = gimple_location (point);
8064 :
8065 4519 : tree ref = make_bit_field_ref (loc, unshare_expr (inner),
8066 : unshare_expr (orig_inner),
8067 : type, bitsize, bitpos,
8068 : unsignedp, reversep);
8069 4519 : if (!point)
8070 : return ref;
8071 :
8072 : /* If we're remaking the same load, reuse the SSA NAME it is already loaded
8073 : into. */
8074 4368 : if (gimple_assign_load_p (point)
8075 4368 : && operand_equal_p (ref, gimple_assign_rhs1 (point)))
8076 : {
8077 1657 : gcc_checking_assert (TREE_CODE (gimple_assign_lhs (point)) == SSA_NAME);
8078 : return gimple_assign_lhs (point);
8079 : }
8080 :
8081 2711 : gimple_seq stmts = NULL;
8082 2711 : tree ret = force_gimple_operand (ref, &stmts, true, NULL_TREE);
8083 :
8084 : /* We know the vuse is supposed to end up being the same as that at the
8085 : original load at the insertion point, but if we don't set it, it will be a
8086 : generic placeholder that only the global SSA update at the end of the pass
8087 : would make equal, too late for us to use in further combinations. So go
8088 : ahead and copy the vuse. */
8089 :
8090 2711 : tree reaching_vuse = gimple_vuse (point);
8091 2711 : for (gimple_stmt_iterator i = gsi_start (stmts);
8092 5842 : !gsi_end_p (i); gsi_next (&i))
8093 : {
8094 3131 : gimple *new_stmt = gsi_stmt (i);
8095 6262 : if (gimple_has_mem_ops (new_stmt))
8096 3131 : gimple_set_vuse (new_stmt, reaching_vuse);
8097 : }
8098 :
8099 2711 : gimple_stmt_iterator gsi = gsi_for_stmt (point);
8100 2711 : gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8101 2711 : return ret;
8102 : }
8103 :
8104 : /* Initialize ln_arg[0] and ln_arg[1] to a pair of newly-created (at
8105 : LOC) loads from INNER (from ORIG_INNER), of modes MODE and MODE2,
8106 : respectively, starting at BIT_POS, using reversed endianness if
8107 : REVERSEP. Also initialize BITPOS (the starting position of each
8108 : part into INNER), BITSIZ (the bit count starting at BITPOS),
8109 : TOSHIFT[1] (the amount by which the part and its mask are to be
8110 : shifted right to bring its least-significant bit to bit zero) and
8111 : SHIFTED (the amount by which the part, by separate loading, has
8112 : already been shifted right, but that the mask needs shifting to
8113 : match). */
8114 :
8115 : static inline void
8116 307 : build_split_load (tree /* out */ ln_arg[2],
8117 : HOST_WIDE_INT /* out */ bitpos[2],
8118 : HOST_WIDE_INT /* out */ bitsiz[2],
8119 : HOST_WIDE_INT /* in[0] out[0..1] */ toshift[2],
8120 : HOST_WIDE_INT /* out */ shifted[2],
8121 : location_t loc, tree inner, tree orig_inner,
8122 : scalar_int_mode mode, scalar_int_mode mode2,
8123 : HOST_WIDE_INT bit_pos, bool reversep,
8124 : gimple *point[2])
8125 : {
8126 307 : scalar_int_mode modes[2] = { mode, mode2 };
8127 307 : bitsiz[0] = GET_MODE_BITSIZE (mode);
8128 307 : bitsiz[1] = GET_MODE_BITSIZE (mode2);
8129 :
8130 921 : for (int i = 0; i < 2; i++)
8131 : {
8132 614 : tree type = lang_hooks.types.type_for_mode (modes[i], 1);
8133 614 : if (!type)
8134 : {
8135 0 : type = build_nonstandard_integer_type (bitsiz[0], 1);
8136 0 : gcc_assert (type);
8137 : }
8138 614 : bitpos[i] = bit_pos;
8139 1228 : ln_arg[i] = make_bit_field_load (loc, inner, orig_inner,
8140 614 : type, bitsiz[i],
8141 614 : bit_pos, 1, reversep, point[i]);
8142 614 : bit_pos += bitsiz[i];
8143 : }
8144 :
8145 307 : toshift[1] = toshift[0];
8146 307 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8147 : {
8148 3 : shifted[0] = bitsiz[1];
8149 3 : shifted[1] = 0;
8150 3 : toshift[0] = 0;
8151 : }
8152 : else
8153 : {
8154 304 : shifted[1] = bitsiz[0];
8155 304 : shifted[0] = 0;
8156 304 : toshift[1] = 0;
8157 : }
8158 307 : }
8159 :
8160 : /* Make arrangements to split at bit BOUNDARY a single loaded word
8161 : (with REVERSEP bit order) LN_ARG[0], to be shifted right by
8162 : TOSHIFT[0] to bring the field of interest to the least-significant
8163 : bit. The expectation is that the same loaded word will be
8164 : propagated from part 0 to part 1, with just different shifting and
8165 : masking to extract both parts. MASK is not expected to do more
8166 : than masking out the bits that belong to the other part. See
8167 : build_split_load for more information on the other fields. */
8168 :
8169 : static inline void
8170 51 : reuse_split_load (tree /* in[0] out[1] */ ln_arg[2],
8171 : HOST_WIDE_INT /* in[0] out[1] */ bitpos[2],
8172 : HOST_WIDE_INT /* in[0] out[1] */ bitsiz[2],
8173 : HOST_WIDE_INT /* in[0] out[0..1] */ toshift[2],
8174 : HOST_WIDE_INT /* out */ shifted[2],
8175 : wide_int /* out */ mask[2],
8176 : HOST_WIDE_INT boundary, bool reversep)
8177 : {
8178 51 : unsigned prec = TYPE_PRECISION (TREE_TYPE (ln_arg[0]));
8179 :
8180 51 : ln_arg[1] = ln_arg[0];
8181 51 : bitpos[1] = bitpos[0];
8182 51 : bitsiz[1] = bitsiz[0];
8183 51 : shifted[1] = shifted[0] = 0;
8184 :
8185 51 : if (reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8186 : {
8187 3 : toshift[1] = toshift[0];
8188 3 : toshift[0] = bitpos[0] + bitsiz[0] - boundary;
8189 3 : mask[0] = wi::mask (toshift[0], true, prec);
8190 3 : mask[1] = wi::mask (toshift[0], false, prec);
8191 : }
8192 : else
8193 : {
8194 48 : toshift[1] = boundary - bitpos[1];
8195 48 : mask[1] = wi::mask (toshift[1], true, prec);
8196 48 : mask[0] = wi::mask (toshift[1], false, prec);
8197 : }
8198 51 : }
8199 :
8200 : /* Find ways of folding logical expressions of LHS and RHS:
8201 :
8202 : Try to merge two comparisons to nearby fields.
8203 :
8204 : For example, if we have p->a == 2 && p->b == 4 and we can load both A and B
8205 : at once, we can do this with a comparison against the object ANDed with the
8206 : a mask.
8207 :
8208 : If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
8209 : operations to do this with one comparison, loading both fields from P at
8210 : once, and likewise from Q.
8211 :
8212 : Herein, loading at once means loading from within the same alignment
8213 : boundary for the enclosing object. If (packed) fields cross such alignment
8214 : boundaries, we may still recombine the compares, so that loads do not cross
8215 : the boundaries.
8216 :
8217 : CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
8218 : TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
8219 :
8220 : TRUTH_TYPE is the type of the logical operand.
8221 :
8222 : LHS is denoted as LL_ARG LCODE LR_ARG.
8223 :
8224 : RHS is denoted as RL_ARG RCODE RR_ARG.
8225 :
8226 : LHS is assumed to dominate RHS.
8227 :
8228 : Combined loads are inserted next to preexisting loads, once we determine
8229 : that the combination is viable, and the combined condition references new
8230 : SSA_NAMEs that hold the loaded values. Since the original loads are
8231 : verified to have the same gimple_vuse, the insertion point doesn't matter
8232 : for correctness. ??? The loads may be a lot earlier than the compares, and
8233 : it's conceivable that one or two loads for RHS appear before those for LHS.
8234 : It could be advantageous to try to place the loads optimally, taking
8235 : advantage of knowing whether RHS is accessed before LHS, or that both are
8236 : accessed before both compares, but we don't do that (yet?).
8237 :
8238 : SEPARATEP should be NULL if the combined condition must be returned as a
8239 : single expression, even if it is a compound condition. This must only be
8240 : done if LHS and RHS are adjacent, without intervening conditions, and the
8241 : combined condition is to replace RHS, while LHS is dropped altogether.
8242 :
8243 : Otherwise, SEPARATEP must be a non-NULL pointer to a NULL_TREE, that may be
8244 : replaced by a part of the compound condition that could replace RHS, while
8245 : the returned expression replaces LHS. This works whether or not LHS and RHS
8246 : are adjacent, as long as there aren't VDEFs or other side effects between
8247 : them.
8248 :
8249 : If the "words" accessed by RHS are already accessed by LHS, this won't
8250 : matter, but if RHS accesses "words" that LHS doesn't, then *SEPARATEP will
8251 : be set to the compares that should take RHS's place. By "words" we mean
8252 : contiguous bits that do not cross a an TYPE_ALIGN boundary of the accessed
8253 : object's type.
8254 :
8255 : We return the simplified tree or 0 if no optimization is possible. */
8256 :
8257 : tree
8258 257938 : fold_truth_andor_for_ifcombine (enum tree_code code, tree truth_type,
8259 : location_t lloc, enum tree_code lcode,
8260 : tree ll_arg, tree lr_arg,
8261 : location_t rloc, enum tree_code rcode,
8262 : tree rl_arg, tree rr_arg,
8263 : tree *separatep)
8264 : {
8265 : /* If this is the "or" of two comparisons, we can do something if
8266 : the comparisons are NE_EXPR. If this is the "and", we can do something
8267 : if the comparisons are EQ_EXPR. I.e.,
8268 : (a->b == 2 && a->c == 4) can become (a->new == NEW).
8269 :
8270 : WANTED_CODE is this operation code. For single bit fields, we can
8271 : convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
8272 : comparison for one-bit fields. */
8273 :
8274 257938 : enum tree_code orig_code = code;
8275 257938 : enum tree_code wanted_code;
8276 257938 : tree ll_inner, lr_inner, rl_inner, rr_inner;
8277 257938 : gimple *ll_load, *lr_load, *rl_load, *rr_load;
8278 257938 : HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
8279 257938 : HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
8280 257938 : HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
8281 257938 : HOST_WIDE_INT lnbitsize, lnbitpos, lnprec;
8282 257938 : HOST_WIDE_INT rnbitsize, rnbitpos, rnprec;
8283 257938 : bool ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
8284 257938 : bool ll_reversep, lr_reversep, rl_reversep, rr_reversep;
8285 257938 : bool ll_signbit, lr_signbit, rl_signbit, rr_signbit;
8286 257938 : scalar_int_mode lnmode, lnmode2, rnmode;
8287 257938 : wide_int ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
8288 257938 : wide_int l_const, r_const;
8289 257938 : tree lntype, rntype, result;
8290 257938 : HOST_WIDE_INT first_bit, end_bit;
8291 257938 : bool volatilep;
8292 257938 : bool l_split_load;
8293 :
8294 : /* These are indexed by: conv, mask, shft, load. */
8295 257938 : location_t ll_loc[4] = { lloc, lloc, lloc, UNKNOWN_LOCATION };
8296 257938 : location_t lr_loc[4] = { lloc, lloc, lloc, UNKNOWN_LOCATION };
8297 257938 : location_t rl_loc[4] = { rloc, rloc, rloc, UNKNOWN_LOCATION };
8298 257938 : location_t rr_loc[4] = { rloc, rloc, rloc, UNKNOWN_LOCATION };
8299 :
8300 257938 : gcc_checking_assert (!separatep || !*separatep);
8301 :
8302 : /* Start by getting the comparison codes. Fail if anything is volatile.
8303 : If one operand is a BIT_AND_EXPR with the constant one, treat it as if
8304 : it were surrounded with a NE_EXPR. */
8305 :
8306 257938 : if (TREE_CODE_CLASS (lcode) != tcc_comparison
8307 257938 : || TREE_CODE_CLASS (rcode) != tcc_comparison)
8308 : return 0;
8309 :
8310 : /* We don't normally find TRUTH_*IF_EXPR in gimple, but these codes may be
8311 : given by our caller to denote conditions from different blocks. */
8312 257938 : switch (code)
8313 : {
8314 : case TRUTH_AND_EXPR:
8315 : case TRUTH_ANDIF_EXPR:
8316 : code = TRUTH_AND_EXPR;
8317 : break;
8318 :
8319 0 : case TRUTH_OR_EXPR:
8320 0 : case TRUTH_ORIF_EXPR:
8321 0 : code = TRUTH_OR_EXPR;
8322 0 : break;
8323 :
8324 : default:
8325 : return 0;
8326 : }
8327 :
8328 : /* Prepare to turn compares of signed quantities with zero into sign-bit
8329 : tests. We need not worry about *_reversep here for these compare
8330 : rewrites: loads will have already been reversed before compares. Save the
8331 : precision, because [lr]l_arg may change and we won't be able to tell how
8332 : wide it was originally. */
8333 257938 : unsigned lsignbit = 0, rsignbit = 0;
8334 257938 : if ((lcode == LT_EXPR || lcode == GE_EXPR)
8335 10754 : && integer_zerop (lr_arg)
8336 3208 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8337 261146 : && !TYPE_UNSIGNED (TREE_TYPE (ll_arg)))
8338 : {
8339 3208 : lsignbit = TYPE_PRECISION (TREE_TYPE (ll_arg));
8340 3208 : lcode = (lcode == LT_EXPR ? NE_EXPR : EQ_EXPR);
8341 : }
8342 : /* Turn compares of unsigned quantities with powers of two into
8343 : equality tests of masks. */
8344 254730 : else if ((lcode == LT_EXPR || lcode == GE_EXPR)
8345 7546 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8346 6919 : && TYPE_UNSIGNED (TREE_TYPE (ll_arg))
8347 4984 : && TREE_CODE (lr_arg) == INTEGER_CST
8348 254730 : && wi::popcount (wi::to_wide (lr_arg)) == 1)
8349 : {
8350 0 : ll_and_mask = ~(wi::to_wide (lr_arg) - 1);
8351 0 : lcode = (lcode == GE_EXPR ? NE_EXPR : EQ_EXPR);
8352 0 : lr_arg = wide_int_to_tree (TREE_TYPE (ll_arg), ll_and_mask * 0);
8353 : }
8354 : /* Turn compares of unsigned quantities with powers of two minus one
8355 : into equality tests of masks. */
8356 509460 : else if ((lcode == LE_EXPR || lcode == GT_EXPR)
8357 27006 : && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))
8358 26857 : && TYPE_UNSIGNED (TREE_TYPE (ll_arg))
8359 22019 : && TREE_CODE (lr_arg) == INTEGER_CST
8360 536466 : && wi::popcount (wi::to_wide (lr_arg) + 1) == 1)
8361 : {
8362 3289 : ll_and_mask = ~wi::to_wide (lr_arg);
8363 3289 : lcode = (lcode == GT_EXPR ? NE_EXPR : EQ_EXPR);
8364 3289 : lr_arg = wide_int_to_tree (TREE_TYPE (ll_arg), ll_and_mask * 0);
8365 : }
8366 : /* Likewise for the second compare. */
8367 257938 : if ((rcode == LT_EXPR || rcode == GE_EXPR)
8368 18243 : && integer_zerop (rr_arg)
8369 1717 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8370 259655 : && !TYPE_UNSIGNED (TREE_TYPE (rl_arg)))
8371 : {
8372 1717 : rsignbit = TYPE_PRECISION (TREE_TYPE (rl_arg));
8373 1717 : rcode = (rcode == LT_EXPR ? NE_EXPR : EQ_EXPR);
8374 : }
8375 256221 : else if ((rcode == LT_EXPR || rcode == GE_EXPR)
8376 16526 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8377 15596 : && TYPE_UNSIGNED (TREE_TYPE (rl_arg))
8378 2611 : && TREE_CODE (rr_arg) == INTEGER_CST
8379 256221 : && wi::popcount (wi::to_wide (rr_arg)) == 1)
8380 : {
8381 0 : rl_and_mask = ~(wi::to_wide (rr_arg) - 1);
8382 0 : rcode = (rcode == GE_EXPR ? NE_EXPR : EQ_EXPR);
8383 0 : rr_arg = wide_int_to_tree (TREE_TYPE (rl_arg), rl_and_mask * 0);
8384 : }
8385 512442 : else if ((rcode == LE_EXPR || rcode == GT_EXPR)
8386 36685 : && INTEGRAL_TYPE_P (TREE_TYPE (rl_arg))
8387 36461 : && TYPE_UNSIGNED (TREE_TYPE (rl_arg))
8388 26853 : && TREE_CODE (rr_arg) == INTEGER_CST
8389 549127 : && wi::popcount (wi::to_wide (rr_arg) + 1) == 1)
8390 : {
8391 4023 : rl_and_mask = ~wi::to_wide (rr_arg);
8392 4023 : rcode = (rcode == GT_EXPR ? NE_EXPR : EQ_EXPR);
8393 4023 : rr_arg = wide_int_to_tree (TREE_TYPE (rl_arg), rl_and_mask * 0);
8394 : }
8395 :
8396 : /* See if the comparisons can be merged. Then get all the parameters for
8397 : each side. */
8398 :
8399 257938 : if ((lcode != EQ_EXPR && lcode != NE_EXPR)
8400 225885 : || (rcode != EQ_EXPR && rcode != NE_EXPR))
8401 : return 0;
8402 :
8403 202616 : ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
8404 202616 : volatilep = 0;
8405 202616 : bool l_xor = false, r_xor = false;
8406 202616 : ll_inner = decode_field_reference (&ll_arg, &ll_bitsize, &ll_bitpos,
8407 : &ll_unsignedp, &ll_reversep, &volatilep,
8408 : &ll_and_mask, &ll_signbit,
8409 : &l_xor, &lr_arg, &lr_and_mask,
8410 : &ll_load, ll_loc);
8411 202616 : if (!ll_inner)
8412 : return 0;
8413 149963 : lr_inner = decode_field_reference (&lr_arg, &lr_bitsize, &lr_bitpos,
8414 : &lr_unsignedp, &lr_reversep, &volatilep,
8415 : &lr_and_mask, &lr_signbit, &l_xor, 0, 0,
8416 : &lr_load, lr_loc);
8417 149963 : if (!lr_inner)
8418 : return 0;
8419 147060 : rl_inner = decode_field_reference (&rl_arg, &rl_bitsize, &rl_bitpos,
8420 : &rl_unsignedp, &rl_reversep, &volatilep,
8421 : &rl_and_mask, &rl_signbit,
8422 : &r_xor, &rr_arg, &rr_and_mask,
8423 : &rl_load, rl_loc);
8424 147060 : if (!rl_inner)
8425 : return 0;
8426 137359 : rr_inner = decode_field_reference (&rr_arg, &rr_bitsize, &rr_bitpos,
8427 : &rr_unsignedp, &rr_reversep, &volatilep,
8428 : &rr_and_mask, &rr_signbit, &r_xor, 0, 0,
8429 : &rr_load, rr_loc);
8430 137359 : if (!rr_inner)
8431 : return 0;
8432 :
8433 : /* It must be true that the inner operation on the lhs of each
8434 : comparison must be the same if we are to be able to do anything.
8435 : Then see if we have constants. If not, the same must be true for
8436 : the rhs's. If one is a load and the other isn't, we have to be
8437 : conservative and avoid the optimization, otherwise we could get
8438 : SRAed fields wrong. */
8439 136635 : if (volatilep)
8440 : return 0;
8441 :
8442 136635 : if (ll_reversep != rl_reversep
8443 136635 : || ! operand_equal_p (ll_inner, rl_inner, 0))
8444 : {
8445 : /* Try swapping the operands. */
8446 101222 : if (ll_reversep != rr_reversep || rsignbit
8447 201988 : || !operand_equal_p (ll_inner, rr_inner, 0))
8448 100071 : return 0;
8449 :
8450 1178 : rcode = swap_tree_comparison (rcode);
8451 1178 : std::swap (rl_arg, rr_arg);
8452 1178 : std::swap (rl_inner, rr_inner);
8453 1178 : std::swap (rl_bitsize, rr_bitsize);
8454 1178 : std::swap (rl_bitpos, rr_bitpos);
8455 1178 : std::swap (rl_unsignedp, rr_unsignedp);
8456 1178 : std::swap (rl_reversep, rr_reversep);
8457 1178 : std::swap (rl_and_mask, rr_and_mask);
8458 1178 : std::swap (rl_signbit, rr_signbit);
8459 1178 : std::swap (rl_load, rr_load);
8460 1178 : std::swap (rl_loc, rr_loc);
8461 : }
8462 :
8463 71000 : if ((ll_load && rl_load)
8464 139872 : ? gimple_vuse (ll_load) != gimple_vuse (rl_load)
8465 2128 : : (!ll_load != !rl_load))
8466 : return 0;
8467 :
8468 : /* ??? Can we do anything with these? */
8469 36156 : if (lr_signbit || rr_signbit)
8470 : return 0;
8471 :
8472 : /* If the mask encompassed extensions of the sign bit before
8473 : clipping, try to include the sign bit in the test. If we're not
8474 : comparing with zero, don't even try to deal with it (for now?).
8475 : If we've already commited to a sign test, the extended (before
8476 : clipping) mask could already be messing with it. */
8477 36156 : if (ll_signbit)
8478 : {
8479 4 : if (!integer_zerop (lr_arg) || lsignbit)
8480 0 : return 0;
8481 4 : wide_int sign = wi::mask (ll_bitsize - 1, true, ll_bitsize);
8482 4 : if (!ll_and_mask.get_precision ())
8483 0 : ll_and_mask = sign;
8484 : else
8485 4 : ll_and_mask |= sign;
8486 4 : }
8487 :
8488 36156 : if (rl_signbit)
8489 : {
8490 4 : if (!integer_zerop (rr_arg) || rsignbit)
8491 1 : return 0;
8492 3 : wide_int sign = wi::mask (rl_bitsize - 1, true, rl_bitsize);
8493 3 : if (!rl_and_mask.get_precision ())
8494 0 : rl_and_mask = sign;
8495 : else
8496 3 : rl_and_mask |= sign;
8497 3 : }
8498 :
8499 36155 : if (TREE_CODE (lr_arg) == INTEGER_CST
8500 29551 : && TREE_CODE (rr_arg) == INTEGER_CST)
8501 : {
8502 29145 : l_const = wi::to_wide (lr_arg);
8503 : /* We don't expect masks on constants, but if there are any, apply
8504 : them now. */
8505 29145 : if (lr_and_mask.get_precision ())
8506 0 : l_const &= wide_int::from (lr_and_mask,
8507 0 : l_const.get_precision (), UNSIGNED);
8508 29145 : r_const = wi::to_wide (rr_arg);
8509 29145 : if (rr_and_mask.get_precision ())
8510 0 : r_const &= wide_int::from (rr_and_mask,
8511 0 : r_const.get_precision (), UNSIGNED);
8512 29145 : lr_reversep = ll_reversep;
8513 : }
8514 7010 : else if (lr_reversep != rr_reversep
8515 7010 : || ! operand_equal_p (lr_inner, rr_inner, 0)
8516 12854 : || ((lr_load && rr_load)
8517 17451 : ? gimple_vuse (lr_load) != gimple_vuse (rr_load)
8518 27 : : (!lr_load != !rr_load)))
8519 1196 : return 0;
8520 :
8521 : /* If we found sign tests, finish turning them into bit tests. */
8522 :
8523 34959 : if (lsignbit)
8524 : {
8525 44 : wide_int sign = wi::mask (ll_bitsize - 1, true, ll_bitsize);
8526 : /* If ll_arg is zero-extended and we're testing the sign bit, we know
8527 : what the result should be. Shifting the sign bit out of sign will get
8528 : us to mask the entire field out, yielding zero, i.e., the sign bit of
8529 : the zero-extended value. We know the masked value is being compared
8530 : with zero, so the compare will get us the result we're looking
8531 : for: TRUE if EQ_EXPR, FALSE if NE_EXPR. */
8532 44 : if (lsignbit > ll_bitsize && ll_unsignedp)
8533 1 : sign <<= 1;
8534 44 : if (!ll_and_mask.get_precision ())
8535 43 : ll_and_mask = sign;
8536 : else
8537 1 : ll_and_mask &= sign;
8538 44 : if (l_xor)
8539 : {
8540 1 : if (ll_bitsize != lr_bitsize)
8541 1 : return 0;
8542 0 : if (!lr_and_mask.get_precision ())
8543 0 : lr_and_mask = sign;
8544 : else
8545 0 : lr_and_mask &= sign;
8546 0 : if (l_const.get_precision ())
8547 0 : l_const &= wide_int::from (lr_and_mask,
8548 0 : l_const.get_precision (), UNSIGNED);
8549 : }
8550 44 : }
8551 :
8552 34958 : if (rsignbit)
8553 : {
8554 170 : wide_int sign = wi::mask (rl_bitsize - 1, true, rl_bitsize);
8555 170 : if (rsignbit > rl_bitsize && rl_unsignedp)
8556 0 : sign <<= 1;
8557 170 : if (!rl_and_mask.get_precision ())
8558 170 : rl_and_mask = sign;
8559 : else
8560 0 : rl_and_mask &= sign;
8561 170 : if (r_xor)
8562 : {
8563 16 : if (rl_bitsize != rr_bitsize)
8564 0 : return 0;
8565 16 : if (!rr_and_mask.get_precision ())
8566 16 : rr_and_mask = sign;
8567 : else
8568 0 : rr_and_mask &= sign;
8569 16 : if (r_const.get_precision ())
8570 24 : r_const &= wide_int::from (rr_and_mask,
8571 12 : r_const.get_precision (), UNSIGNED);
8572 : }
8573 170 : }
8574 :
8575 : /* If either comparison code is not correct for our logical operation,
8576 : fail. However, we can convert a one-bit comparison against zero into
8577 : the opposite comparison against that bit being set in the field. */
8578 :
8579 34958 : wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
8580 34958 : if (lcode != wanted_code)
8581 : {
8582 4518 : if (l_const.get_precision ()
8583 4472 : && l_const == 0
8584 1529 : && ll_and_mask.get_precision ()
8585 4930 : && wi::popcount (ll_and_mask) == 1)
8586 : {
8587 : /* Make the left operand unsigned, since we are only interested
8588 : in the value of one bit. Otherwise we are doing the wrong
8589 : thing below. */
8590 289 : ll_unsignedp = 1;
8591 289 : l_const = ll_and_mask;
8592 : }
8593 : else
8594 4229 : return 0;
8595 : }
8596 :
8597 : /* This is analogous to the code for l_const above. */
8598 30729 : if (rcode != wanted_code)
8599 : {
8600 854 : if (r_const.get_precision ()
8601 854 : && r_const == 0
8602 829 : && rl_and_mask.get_precision ()
8603 1617 : && wi::popcount (rl_and_mask) == 1)
8604 : {
8605 601 : rl_unsignedp = 1;
8606 601 : r_const = rl_and_mask;
8607 : }
8608 : else
8609 253 : return 0;
8610 : }
8611 :
8612 : /* This will be bumped to 2 if any of the field pairs crosses an
8613 : alignment boundary, so the merged compare has to be done in two
8614 : parts. */
8615 91428 : int parts = 1;
8616 : /* Set to true if the second combined compare should come first,
8617 : e.g., because the second original compare accesses a word that
8618 : the first one doesn't, and the combined compares access those in
8619 : cmp[0]. */
8620 91428 : bool first1 = false;
8621 : /* Set to true if the first original compare is not the one being
8622 : split. */
8623 91428 : bool maybe_separate = false;
8624 :
8625 : /* The following 2-dimensional arrays use the first index to
8626 : identify left(0)- vs right(1)-hand compare operands, and the
8627 : second one to identify merged compare parts. */
8628 : /* The memory loads or constants to be compared. */
8629 : tree ld_arg[2][2];
8630 : /* The first bit of the corresponding inner object that the
8631 : corresponding LD_ARG covers. */
8632 : HOST_WIDE_INT bitpos[2][2];
8633 : /* The bit count starting at BITPOS that the corresponding LD_ARG
8634 : covers. */
8635 : HOST_WIDE_INT bitsiz[2][2];
8636 : /* The number of bits by which LD_ARG has already been shifted
8637 : right, WRT mask. */
8638 : HOST_WIDE_INT shifted[2][2];
8639 : /* The number of bits by which both LD_ARG and MASK need shifting to
8640 : bring its least-significant bit to bit zero. */
8641 : HOST_WIDE_INT toshift[2][2];
8642 : /* An additional mask to be applied to LD_ARG, to remove any bits
8643 : that may have been loaded for use in another compare, but that
8644 : don't belong in the corresponding compare. */
8645 365712 : wide_int xmask[2][2] = {};
8646 :
8647 : /* The combined compare or compares. */
8648 30476 : tree cmp[2];
8649 :
8650 : /* Consider we're comparing two non-contiguous fields of packed
8651 : structs, both aligned at 32-bit boundaries:
8652 :
8653 : ll_arg: an 8-bit field at offset 0
8654 : lr_arg: a 16-bit field at offset 2
8655 :
8656 : rl_arg: an 8-bit field at offset 1
8657 : rr_arg: a 16-bit field at offset 3
8658 :
8659 : We'll have r_split_load, because rr_arg straddles across an
8660 : alignment boundary.
8661 :
8662 : We'll want to have:
8663 :
8664 : bitpos = { { 0, 0 }, { 0, 32 } }
8665 : bitsiz = { { 32, 32 }, { 32, 8 } }
8666 :
8667 : And, for little-endian:
8668 :
8669 : shifted = { { 0, 0 }, { 0, 32 } }
8670 : toshift = { { 0, 24 }, { 0, 0 } }
8671 :
8672 : Or, for big-endian:
8673 :
8674 : shifted = { { 0, 0 }, { 8, 0 } }
8675 : toshift = { { 8, 0 }, { 0, 0 } }
8676 : */
8677 :
8678 : /* See if we can find a mode that contains both fields being compared on
8679 : the left. If we can't, fail. Otherwise, update all constants and masks
8680 : to be relative to a field of that size. */
8681 30476 : first_bit = MIN (ll_bitpos, rl_bitpos);
8682 30476 : end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
8683 30476 : HOST_WIDE_INT ll_align = TYPE_ALIGN (TREE_TYPE (ll_inner));
8684 30476 : poly_uint64 ll_end_region = 0;
8685 30476 : if (TYPE_SIZE (TREE_TYPE (ll_inner))
8686 30476 : && tree_fits_poly_uint64_p (TYPE_SIZE (TREE_TYPE (ll_inner))))
8687 30476 : ll_end_region = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (ll_inner)));
8688 30476 : if (get_best_mode (end_bit - first_bit, first_bit, 0, ll_end_region,
8689 30476 : ll_align, BITS_PER_WORD, volatilep, &lnmode))
8690 : l_split_load = false;
8691 : /* ??? If ll and rl share the same load, reuse that?
8692 : See PR 118206 -> gcc.dg/field-merge-18.c */
8693 : else
8694 : {
8695 : /* Consider the possibility of recombining loads if any of the
8696 : fields straddles across an alignment boundary, so that either
8697 : part can be loaded along with the other field. Since we
8698 : limit access modes to BITS_PER_WORD, don't exceed that,
8699 : otherwise on a 32-bit host and a 64-bit-aligned data
8700 : structure, we'll fail the above for a field that straddles
8701 : across two words, and would fail here for not even trying to
8702 : split it at between 32-bit words. */
8703 27081 : HOST_WIDE_INT boundary = compute_split_boundary_from_align
8704 28661 : (MIN (ll_align, BITS_PER_WORD),
8705 : ll_bitpos, ll_bitsize, rl_bitpos, rl_bitsize);
8706 :
8707 27081 : if (boundary < 0
8708 219 : || !get_best_mode (boundary - first_bit, first_bit, 0, ll_end_region,
8709 : ll_align, BITS_PER_WORD, volatilep, &lnmode)
8710 27258 : || !get_best_mode (end_bit - boundary, boundary, 0, ll_end_region,
8711 177 : ll_align, BITS_PER_WORD, volatilep, &lnmode2))
8712 : {
8713 28442 : if (ll_align <= BITS_PER_WORD)
8714 : return 0;
8715 :
8716 : /* As a last resort, try double-word access modes. This
8717 : enables us to deal with misaligned double-word fields
8718 : that straddle across 3 separate words. */
8719 1250 : boundary = compute_split_boundary_from_align
8720 1338 : (MIN (ll_align, 2 * BITS_PER_WORD),
8721 : ll_bitpos, ll_bitsize, rl_bitpos, rl_bitsize);
8722 1250 : if (boundary < 0
8723 0 : || !get_best_mode (boundary - first_bit, first_bit,
8724 : 0, ll_end_region, ll_align, 2 * BITS_PER_WORD,
8725 : volatilep, &lnmode)
8726 1250 : || !get_best_mode (end_bit - boundary, boundary,
8727 0 : 0, ll_end_region, ll_align, 2 * BITS_PER_WORD,
8728 : volatilep, &lnmode2))
8729 1250 : return 0;
8730 : }
8731 :
8732 : /* If we can't have a single load, but can with two, figure out whether
8733 : the two compares can be separated, i.e., whether the entirety of the
8734 : first original compare is encompassed by the entirety of the first
8735 : combined compare. If the first original compare is past the alignment
8736 : boundary, arrange to compare that range first, by setting first1
8737 : (meaning make cmp[1] first, instead of cmp[0]). */
8738 177 : l_split_load = true;
8739 177 : parts = 2;
8740 177 : if (ll_bitpos >= boundary)
8741 : maybe_separate = first1 = true;
8742 132 : else if (ll_bitpos + ll_bitsize <= boundary)
8743 32 : maybe_separate = true;
8744 : }
8745 :
8746 3572 : lnbitsize = GET_MODE_BITSIZE (lnmode);
8747 3572 : lnbitpos = first_bit & ~ (lnbitsize - 1);
8748 : /* Avoid situations that the code below can't handle. */
8749 3572 : if (lnbitpos < 0)
8750 : return 0;
8751 :
8752 : /* Choose the type for the combined compare. Even if we're splitting loads,
8753 : make it wide enough to hold both. */
8754 3572 : if (l_split_load)
8755 354 : lnbitsize += GET_MODE_BITSIZE (lnmode2);
8756 3572 : lntype = build_nonstandard_integer_type (lnbitsize, 1);
8757 3572 : if (!lntype)
8758 : return NULL_TREE;
8759 3572 : lnprec = TYPE_PRECISION (lntype);
8760 3572 : xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
8761 :
8762 : /* Adjust bit ranges for reverse endianness. */
8763 3572 : if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8764 : {
8765 6 : xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
8766 6 : xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
8767 : }
8768 :
8769 : /* Adjust masks to match the positions in the combined lntype. */
8770 7144 : wide_int ll_mask, rl_mask, r_mask;
8771 3572 : if (ll_and_mask.get_precision ())
8772 4278 : ll_mask = wi::lshift (wide_int::from (ll_and_mask, lnprec, UNSIGNED),
8773 2139 : xll_bitpos);
8774 : else
8775 1433 : ll_mask = wi::shifted_mask (xll_bitpos, ll_bitsize, false, lnprec);
8776 3572 : if (rl_and_mask.get_precision ())
8777 4070 : rl_mask = wi::lshift (wide_int::from (rl_and_mask, lnprec, UNSIGNED),
8778 2035 : xrl_bitpos);
8779 : else
8780 1537 : rl_mask = wi::shifted_mask (xrl_bitpos, rl_bitsize, false, lnprec);
8781 :
8782 : /* When we set l_const, we also set r_const. */
8783 3572 : gcc_checking_assert (!l_const.get_precision () == !r_const.get_precision ());
8784 :
8785 : /* Adjust right-hand constants in both original comparisons to match width
8786 : and bit position. */
8787 3572 : if (l_const.get_precision ())
8788 : {
8789 : /* Before clipping upper bits of the right-hand operand of the compare,
8790 : check that they're sign or zero extensions, depending on how the
8791 : left-hand operand would be extended. If it is unsigned, or if there's
8792 : a mask that zeroes out extension bits, whether because we've checked
8793 : for upper bits in the mask and did not set ll_signbit, or because the
8794 : sign bit itself is masked out, check that the right-hand operand is
8795 : zero-extended. */
8796 1890 : bool l_non_ext_bits = false;
8797 1890 : if (ll_bitsize < lr_bitsize)
8798 : {
8799 40 : wide_int zext = wi::zext (l_const, ll_bitsize);
8800 80 : if ((ll_unsignedp
8801 32 : || (ll_and_mask.get_precision ()
8802 4 : && (!ll_signbit
8803 48 : || ((ll_and_mask & wi::mask (ll_bitsize - 1, true, ll_bitsize))
8804 8 : == 0)))
8805 152 : ? zext : wi::sext (l_const, ll_bitsize)) == l_const)
8806 40 : l_const = zext;
8807 : else
8808 : l_non_ext_bits = true;
8809 40 : }
8810 : /* We're doing bitwise equality tests, so don't bother with sign
8811 : extensions. */
8812 1890 : l_const = wide_int::from (l_const, lnprec, UNSIGNED);
8813 1890 : if (ll_and_mask.get_precision ())
8814 1152 : l_const &= wide_int::from (ll_and_mask, lnprec, UNSIGNED);
8815 1890 : l_const <<= xll_bitpos;
8816 5670 : if (l_non_ext_bits || (l_const & ~ll_mask) != 0)
8817 : {
8818 0 : warning_at (lloc, OPT_Wtautological_compare,
8819 : "comparison is always %d", wanted_code == NE_EXPR);
8820 :
8821 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
8822 : }
8823 :
8824 : /* Before clipping upper bits of the right-hand operand of the compare,
8825 : check that they're sign or zero extensions, depending on how the
8826 : left-hand operand would be extended. */
8827 1890 : bool r_non_ext_bits = false;
8828 1890 : if (rl_bitsize < rr_bitsize)
8829 : {
8830 18 : wide_int zext = wi::zext (r_const, rl_bitsize);
8831 36 : if ((rl_unsignedp
8832 17 : || (rl_and_mask.get_precision ()
8833 10 : && (!rl_signbit
8834 24 : || ((rl_and_mask & wi::mask (rl_bitsize - 1, true, rl_bitsize))
8835 6 : == 0)))
8836 71 : ? zext : wi::sext (r_const, rl_bitsize)) == r_const)
8837 18 : r_const = zext;
8838 : else
8839 : r_non_ext_bits = true;
8840 18 : }
8841 1890 : r_const = wide_int::from (r_const, lnprec, UNSIGNED);
8842 1890 : if (rl_and_mask.get_precision ())
8843 1092 : r_const &= wide_int::from (rl_and_mask, lnprec, UNSIGNED);
8844 1890 : r_const <<= xrl_bitpos;
8845 5670 : if (r_non_ext_bits || (r_const & ~rl_mask) != 0)
8846 : {
8847 0 : warning_at (rloc, OPT_Wtautological_compare,
8848 : "comparison is always %d", wanted_code == NE_EXPR);
8849 :
8850 0 : return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
8851 : }
8852 :
8853 : /* If there is something in common between the masks, those bits of the
8854 : constants must be the same. If not, the combined condition cannot be
8855 : met, and the result is known. Test for this to avoid generating
8856 : incorrect code below. */
8857 1890 : wide_int mask = ll_mask & rl_mask;
8858 1890 : if (mask != 0
8859 1935 : && (l_const & mask) != (r_const & mask))
8860 : {
8861 0 : if (wanted_code == NE_EXPR)
8862 0 : return constant_boolean_node (true, truth_type);
8863 : else
8864 0 : return constant_boolean_node (false, truth_type);
8865 : }
8866 :
8867 : /* The constants are combined so as to line up with the loaded field, so
8868 : tentatively use the same parameters for the second combined
8869 : compare. */
8870 1890 : ld_arg[1][0] = wide_int_to_tree (lntype, l_const | r_const);
8871 1890 : toshift[1][0] = MIN (xll_bitpos, xrl_bitpos);
8872 1890 : shifted[1][0] = 0;
8873 1890 : bitpos[1][0] = lnbitpos;
8874 1890 : bitsiz[1][0] = lnbitsize;
8875 :
8876 1890 : if (parts > 1)
8877 49 : reuse_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
8878 : shifted[1], xmask[1],
8879 49 : lnbitpos + GET_MODE_BITSIZE (lnmode),
8880 : lr_reversep);
8881 :
8882 : /* No masking needed, we know the full constants. */
8883 1890 : r_mask = wi::mask (0, true, lnprec);
8884 :
8885 : /* If the compiler thinks this is used uninitialized below, it's
8886 : because it can't realize that parts can only be 2 when
8887 : comparing with constants if l_split_load is also true. This
8888 : just silences the warning. */
8889 1890 : rnbitpos = 0;
8890 1890 : }
8891 :
8892 : /* Likewise, if the right sides are not constant, align them for the combined
8893 : compare. Also, disallow this optimization if a size, signedness or
8894 : storage order mismatch occurs between the left and right sides. */
8895 : else
8896 : {
8897 1682 : if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
8898 1621 : || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
8899 1621 : || ll_reversep != lr_reversep
8900 : /* Make sure the two fields on the right
8901 : correspond to the left without being swapped. */
8902 1621 : || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
8903 521 : return 0;
8904 :
8905 1165 : bool r_split_load;
8906 1165 : scalar_int_mode rnmode2;
8907 :
8908 : /* Figure out how to load the bits for the right-hand size of the
8909 : combined compare. As in the left-hand size, we may have to split it,
8910 : and then we use two separate compares. */
8911 1165 : first_bit = MIN (lr_bitpos, rr_bitpos);
8912 1165 : end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
8913 1165 : HOST_WIDE_INT lr_align = TYPE_ALIGN (TREE_TYPE (lr_inner));
8914 1165 : poly_uint64 lr_end_region = 0;
8915 1165 : if (TYPE_SIZE (TREE_TYPE (lr_inner))
8916 1165 : && tree_fits_poly_uint64_p (TYPE_SIZE (TREE_TYPE (lr_inner))))
8917 1165 : lr_end_region = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (lr_inner)));
8918 1165 : if (!get_best_mode (end_bit - first_bit, first_bit, 0, lr_end_region,
8919 1165 : lr_align, BITS_PER_WORD, volatilep, &rnmode))
8920 : {
8921 : /* Consider the possibility of recombining loads if any of the
8922 : fields straddles across an alignment boundary, so that either
8923 : part can be loaded along with the other field. */
8924 134 : HOST_WIDE_INT boundary = compute_split_boundary_from_align
8925 134 : (lr_align, lr_bitpos, lr_bitsize, rr_bitpos, rr_bitsize);
8926 :
8927 134 : if (boundary < 0
8928 : /* If we're to split both, make sure the split point is
8929 : the same. */
8930 130 : || (l_split_load
8931 128 : && (boundary - lr_bitpos
8932 128 : != (lnbitpos + GET_MODE_BITSIZE (lnmode)) - ll_bitpos))
8933 130 : || !get_best_mode (boundary - first_bit, first_bit,
8934 : 0, lr_end_region,
8935 130 : lr_align, BITS_PER_WORD, volatilep, &rnmode)
8936 264 : || !get_best_mode (end_bit - boundary, boundary, 0, lr_end_region,
8937 130 : lr_align, BITS_PER_WORD, volatilep, &rnmode2))
8938 4 : return 0;
8939 :
8940 130 : r_split_load = true;
8941 130 : parts = 2;
8942 130 : if (lr_bitpos >= boundary)
8943 : maybe_separate = first1 = true;
8944 88 : else if (lr_bitpos + lr_bitsize <= boundary)
8945 29 : maybe_separate = true;
8946 : }
8947 : else
8948 : r_split_load = false;
8949 :
8950 : /* Find a type that can hold the entire right-hand operand. */
8951 1161 : rnbitsize = GET_MODE_BITSIZE (rnmode);
8952 1161 : rnbitpos = first_bit & ~ (rnbitsize - 1);
8953 1161 : if (r_split_load)
8954 260 : rnbitsize += GET_MODE_BITSIZE (rnmode2);
8955 1161 : rntype = build_nonstandard_integer_type (rnbitsize, 1);
8956 1161 : if (!rntype)
8957 : return 0;
8958 1161 : rnprec = TYPE_PRECISION (rntype);
8959 1161 : xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
8960 :
8961 : /* Adjust for reversed endianness. */
8962 1161 : if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
8963 : {
8964 0 : xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
8965 0 : xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
8966 : }
8967 :
8968 : /* Adjust the masks to match the combined type, and combine them. */
8969 1161 : wide_int lr_mask, rr_mask;
8970 1161 : if (lr_and_mask.get_precision ())
8971 1972 : lr_mask = wi::lshift (wide_int::from (lr_and_mask, rnprec, UNSIGNED),
8972 986 : xlr_bitpos);
8973 : else
8974 175 : lr_mask = wi::shifted_mask (xlr_bitpos, lr_bitsize, false, rnprec);
8975 1161 : if (rr_and_mask.get_precision ())
8976 1884 : rr_mask = wi::lshift (wide_int::from (rr_and_mask, rnprec, UNSIGNED),
8977 942 : xrr_bitpos);
8978 : else
8979 219 : rr_mask = wi::shifted_mask (xrr_bitpos, rr_bitsize, false, rnprec);
8980 1161 : r_mask = lr_mask | rr_mask;
8981 :
8982 : /* Load the right-hand operand of the combined compare. */
8983 1161 : toshift[1][0] = MIN (xlr_bitpos, xrr_bitpos);
8984 1161 : shifted[1][0] = 0;
8985 :
8986 1161 : if (!r_split_load)
8987 : {
8988 1031 : bitpos[1][0] = rnbitpos;
8989 1031 : bitsiz[1][0] = rnbitsize;
8990 1031 : ld_arg[1][0] = make_bit_field_load (ll_loc[3], lr_inner, lr_arg,
8991 1031 : rntype, rnbitsize, rnbitpos,
8992 1031 : lr_unsignedp || rr_unsignedp,
8993 : lr_reversep, lr_load);
8994 : }
8995 :
8996 : /* ... and the second part of the right-hand operand if needed. */
8997 1161 : if (parts > 1)
8998 : {
8999 130 : if (r_split_load)
9000 : {
9001 130 : gimple *point[2];
9002 130 : point[0] = lr_load;
9003 130 : point[1] = rr_load;
9004 130 : build_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
9005 : shifted[1], rl_loc[3], lr_inner, lr_arg,
9006 : rnmode, rnmode2, rnbitpos, lr_reversep, point);
9007 : }
9008 : else
9009 0 : reuse_split_load (ld_arg[1], bitpos[1], bitsiz[1], toshift[1],
9010 : shifted[1], xmask[1],
9011 0 : lnbitpos + GET_MODE_BITSIZE (lnmode)
9012 0 : - ll_bitpos + lr_bitpos, lr_reversep);
9013 : }
9014 1161 : }
9015 :
9016 : /* Now issue the loads for the left-hand combined operand/s. */
9017 6102 : wide_int l_mask = ll_mask | rl_mask;
9018 3051 : toshift[0][0] = MIN (xll_bitpos, xrl_bitpos);
9019 3051 : shifted[0][0] = 0;
9020 :
9021 3051 : if (!l_split_load)
9022 : {
9023 2874 : bitpos[0][0] = lnbitpos;
9024 2874 : bitsiz[0][0] = lnbitsize;
9025 2874 : ld_arg[0][0] = make_bit_field_load (ll_loc[3], ll_inner, ll_arg,
9026 2874 : lntype, lnbitsize, lnbitpos,
9027 2874 : ll_unsignedp || rl_unsignedp,
9028 : ll_reversep, ll_load);
9029 : }
9030 :
9031 3051 : if (parts > 1)
9032 : {
9033 179 : if (l_split_load)
9034 : {
9035 177 : gimple *point[2];
9036 177 : point[0] = ll_load;
9037 177 : point[1] = rl_load;
9038 177 : build_split_load (ld_arg[0], bitpos[0], bitsiz[0], toshift[0],
9039 : shifted[0], rl_loc[3], ll_inner, ll_arg,
9040 : lnmode, lnmode2, lnbitpos, ll_reversep, point);
9041 : }
9042 : else
9043 2 : reuse_split_load (ld_arg[0], bitpos[0], bitsiz[0], toshift[0],
9044 : shifted[0], xmask[0],
9045 2 : rnbitpos + GET_MODE_BITSIZE (rnmode)
9046 2 : - lr_bitpos + ll_bitpos, ll_reversep);
9047 : }
9048 :
9049 : /* Compute the compares. */
9050 6281 : for (int i = 0; i < parts; i++)
9051 : {
9052 3230 : tree op[2] = { ld_arg[0][i], ld_arg[1][i] };
9053 9690 : wide_int mask[2] = { l_mask, r_mask };
9054 3230 : location_t *locs[2] = { i ? rl_loc : ll_loc, i ? rr_loc : lr_loc };
9055 :
9056 : /* Figure out the masks, and unshare the original operands. */
9057 9690 : for (int j = 0; j < 2; j++)
9058 : {
9059 6460 : unsigned prec = TYPE_PRECISION (TREE_TYPE (op[j]));
9060 6460 : op[j] = unshare_expr (op[j]);
9061 :
9062 : /* Mask out the bits belonging to the other part. */
9063 6460 : if (xmask[j][i].get_precision ())
9064 102 : mask[j] &= xmask[j][i];
9065 :
9066 6460 : if (shifted[j][i])
9067 : {
9068 307 : wide_int shift = wide_int::from (shifted[j][i], prec, UNSIGNED);
9069 307 : mask[j] = wi::lrshift (mask[j], shift);
9070 307 : }
9071 6460 : mask[j] = wide_int::from (mask[j], prec, UNSIGNED);
9072 : }
9073 :
9074 : /* Line up the operands for a compare. */
9075 3230 : HOST_WIDE_INT shift = (toshift[0][i] - toshift[1][i]);
9076 :
9077 3230 : if (shift)
9078 : {
9079 54 : int j;
9080 54 : if (shift > 0)
9081 : j = 0;
9082 : else
9083 : {
9084 52 : j = 1;
9085 52 : shift = -shift;
9086 : }
9087 :
9088 54 : tree shiftsz = bitsize_int (shift);
9089 54 : op[j] = fold_build2_loc (locs[j][1], RSHIFT_EXPR, TREE_TYPE (op[j]),
9090 : op[j], shiftsz);
9091 54 : mask[j] = wi::lrshift (mask[j], shift);
9092 : }
9093 :
9094 : /* Convert to the smaller type before masking out unwanted
9095 : bits. */
9096 3230 : tree type = TREE_TYPE (op[0]);
9097 3230 : if (type != TREE_TYPE (op[1]))
9098 : {
9099 188 : int j = (TYPE_PRECISION (type)
9100 188 : < TYPE_PRECISION (TREE_TYPE (op[1])));
9101 188 : if (!j)
9102 89 : type = TREE_TYPE (op[1]);
9103 188 : op[j] = fold_convert_loc (locs[j][0], type, op[j]);
9104 188 : mask[j] = wide_int::from (mask[j], TYPE_PRECISION (type), UNSIGNED);
9105 : }
9106 :
9107 : /* Apply masks. */
9108 9690 : for (int j = 0; j < 2; j++)
9109 6460 : if (mask[j] != wi::mask (0, true, mask[j].get_precision ()))
9110 2600 : op[j] = fold_build2_loc (locs[j][2], BIT_AND_EXPR, type,
9111 5200 : op[j], wide_int_to_tree (type, mask[j]));
9112 :
9113 6281 : cmp[i] = fold_build2_loc (i ? rloc : lloc, wanted_code, truth_type,
9114 : op[0], op[1]);
9115 9690 : }
9116 :
9117 : /* Reorder the compares if needed. */
9118 3051 : if (first1)
9119 45 : std::swap (cmp[0], cmp[1]);
9120 :
9121 : /* Prepare to return the resulting compares. Combine two parts if
9122 : needed. */
9123 3051 : if (parts == 1)
9124 2872 : result = cmp[0];
9125 179 : else if (!separatep || !maybe_separate)
9126 : {
9127 : /* Only fold if any of the cmp is known, otherwise we may lose the
9128 : sequence point, and that may prevent further optimizations. */
9129 173 : if (TREE_CODE (cmp[0]) == INTEGER_CST
9130 137 : || TREE_CODE (cmp[1]) == INTEGER_CST)
9131 37 : result = fold_build2_loc (rloc, orig_code, truth_type, cmp[0], cmp[1]);
9132 : else
9133 136 : result = build2_loc (rloc, orig_code, truth_type, cmp[0], cmp[1]);
9134 : }
9135 : else
9136 : {
9137 6 : result = cmp[0];
9138 6 : *separatep = cmp[1];
9139 : }
9140 :
9141 3051 : return result;
9142 257938 : }
9143 :
9144 : /* Try to simplify the AND of two comparisons, specified by
9145 : (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
9146 : If this can be simplified to a single expression (without requiring
9147 : introducing more SSA variables to hold intermediate values),
9148 : return the resulting tree. Otherwise return NULL_TREE.
9149 : If the result expression is non-null, it has boolean type. */
9150 :
9151 : tree
9152 411472 : maybe_fold_and_comparisons (tree type,
9153 : enum tree_code code1, tree op1a, tree op1b,
9154 : enum tree_code code2, tree op2a, tree op2b,
9155 : basic_block outer_cond_bb)
9156 : {
9157 411472 : if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
9158 : outer_cond_bb))
9159 : return t;
9160 :
9161 410186 : if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
9162 : outer_cond_bb))
9163 : return t;
9164 :
9165 410170 : if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
9166 : op1a, op1b, code2, op2a,
9167 : op2b, outer_cond_bb))
9168 : return t;
9169 :
9170 : return NULL_TREE;
9171 : }
9172 :
9173 : /* Helper function for or_comparisons_1: try to simplify the OR of the
9174 : ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
9175 : If INVERT is true, invert the value of VAR before doing the OR.
9176 : Return NULL_EXPR if we can't simplify this to a single expression. */
9177 :
9178 : static tree
9179 38956 : or_var_with_comparison (tree type, tree var, bool invert,
9180 : enum tree_code code2, tree op2a, tree op2b,
9181 : basic_block outer_cond_bb)
9182 : {
9183 38956 : tree t;
9184 38956 : gimple *stmt = SSA_NAME_DEF_STMT (var);
9185 :
9186 : /* We can only deal with variables whose definitions are assignments. */
9187 38956 : if (!is_gimple_assign (stmt))
9188 : return NULL_TREE;
9189 :
9190 : /* If we have an inverted comparison, apply DeMorgan's law and rewrite
9191 : !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
9192 : Then we only have to consider the simpler non-inverted cases. */
9193 38836 : if (invert)
9194 19832 : t = and_var_with_comparison_1 (type, stmt,
9195 : invert_tree_comparison (code2, false),
9196 : op2a, op2b, outer_cond_bb);
9197 : else
9198 19004 : t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
9199 : outer_cond_bb);
9200 38836 : return canonicalize_bool (t, invert);
9201 : }
9202 :
9203 : /* Try to simplify the OR of the ssa variable defined by the assignment
9204 : STMT with the comparison specified by (OP2A CODE2 OP2B).
9205 : Return NULL_EXPR if we can't simplify this to a single expression. */
9206 :
9207 : static tree
9208 102410 : or_var_with_comparison_1 (tree type, gimple *stmt,
9209 : enum tree_code code2, tree op2a, tree op2b,
9210 : basic_block outer_cond_bb)
9211 : {
9212 102410 : tree var = gimple_assign_lhs (stmt);
9213 102410 : tree true_test_var = NULL_TREE;
9214 102410 : tree false_test_var = NULL_TREE;
9215 102410 : enum tree_code innercode = gimple_assign_rhs_code (stmt);
9216 :
9217 : /* Check for identities like (var OR (var != 0)) => true . */
9218 102410 : if (TREE_CODE (op2a) == SSA_NAME
9219 102410 : && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
9220 : {
9221 15849 : if ((code2 == NE_EXPR && integer_zerop (op2b))
9222 50797 : || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
9223 : {
9224 14031 : true_test_var = op2a;
9225 14031 : if (var == true_test_var)
9226 : return var;
9227 : }
9228 2989 : else if ((code2 == EQ_EXPR && integer_zerop (op2b))
9229 30380 : || (code2 == NE_EXPR && integer_nonzerop (op2b)))
9230 : {
9231 7279 : false_test_var = op2a;
9232 7279 : if (var == false_test_var)
9233 0 : return boolean_true_node;
9234 : }
9235 : }
9236 :
9237 : /* If the definition is a comparison, recurse on it. */
9238 102410 : if (TREE_CODE_CLASS (innercode) == tcc_comparison)
9239 : {
9240 870 : tree t = or_comparisons_1 (type, innercode,
9241 : gimple_assign_rhs1 (stmt),
9242 : gimple_assign_rhs2 (stmt),
9243 : code2, op2a, op2b, outer_cond_bb);
9244 870 : if (t)
9245 : return t;
9246 : }
9247 :
9248 : /* If the definition is an AND or OR expression, we may be able to
9249 : simplify by reassociating. */
9250 102387 : if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
9251 102387 : && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
9252 : {
9253 39659 : tree inner1 = gimple_assign_rhs1 (stmt);
9254 39659 : tree inner2 = gimple_assign_rhs2 (stmt);
9255 39659 : gimple *s;
9256 39659 : tree t;
9257 39659 : tree partial = NULL_TREE;
9258 39659 : bool is_or = (innercode == BIT_IOR_EXPR);
9259 :
9260 : /* Check for boolean identities that don't require recursive examination
9261 : of inner1/inner2:
9262 : inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
9263 : inner1 OR (inner1 AND inner2) => inner1
9264 : !inner1 OR (inner1 OR inner2) => true
9265 : !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
9266 : */
9267 39659 : if (inner1 == true_test_var)
9268 0 : return (is_or ? var : inner1);
9269 39659 : else if (inner2 == true_test_var)
9270 0 : return (is_or ? var : inner2);
9271 39659 : else if (inner1 == false_test_var)
9272 0 : return (is_or
9273 0 : ? boolean_true_node
9274 0 : : or_var_with_comparison (type, inner2, false, code2, op2a,
9275 0 : op2b, outer_cond_bb));
9276 39659 : else if (inner2 == false_test_var)
9277 0 : return (is_or
9278 0 : ? boolean_true_node
9279 0 : : or_var_with_comparison (type, inner1, false, code2, op2a,
9280 0 : op2b, outer_cond_bb));
9281 :
9282 : /* Next, redistribute/reassociate the OR across the inner tests.
9283 : Compute the first partial result, (inner1 OR (op2a code op2b)) */
9284 39659 : if (TREE_CODE (inner1) == SSA_NAME
9285 39659 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
9286 38648 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
9287 63806 : && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
9288 : gimple_assign_rhs1 (s),
9289 : gimple_assign_rhs2 (s),
9290 : code2, op2a, op2b,
9291 : outer_cond_bb)))
9292 : {
9293 : /* Handle the OR case, where we are reassociating:
9294 : (inner1 OR inner2) OR (op2a code2 op2b)
9295 : => (t OR inner2)
9296 : If the partial result t is a constant, we win. Otherwise
9297 : continue on to try reassociating with the other inner test. */
9298 738 : if (is_or)
9299 : {
9300 31 : if (integer_onep (t))
9301 0 : return boolean_true_node;
9302 31 : else if (integer_zerop (t))
9303 : return inner2;
9304 : }
9305 :
9306 : /* Handle the AND case, where we are redistributing:
9307 : (inner1 AND inner2) OR (op2a code2 op2b)
9308 : => (t AND (inner2 OR (op2a code op2b))) */
9309 707 : else if (integer_zerop (t))
9310 0 : return boolean_false_node;
9311 :
9312 : /* Save partial result for later. */
9313 : partial = t;
9314 : }
9315 :
9316 : /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
9317 39659 : if (TREE_CODE (inner2) == SSA_NAME
9318 39659 : && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
9319 38944 : && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
9320 76835 : && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
9321 : gimple_assign_rhs1 (s),
9322 : gimple_assign_rhs2 (s),
9323 : code2, op2a, op2b,
9324 : outer_cond_bb)))
9325 : {
9326 : /* Handle the OR case, where we are reassociating:
9327 : (inner1 OR inner2) OR (op2a code2 op2b)
9328 : => (inner1 OR t)
9329 : => (t OR partial) */
9330 495 : if (is_or)
9331 : {
9332 60 : if (integer_zerop (t))
9333 : return inner1;
9334 60 : else if (integer_onep (t))
9335 1 : return boolean_true_node;
9336 : /* If both are the same, we can apply the identity
9337 : (x OR x) == x. */
9338 59 : else if (partial && same_bool_result_p (t, partial))
9339 : return t;
9340 : }
9341 :
9342 : /* Handle the AND case, where we are redistributing:
9343 : (inner1 AND inner2) OR (op2a code2 op2b)
9344 : => (t AND (inner1 OR (op2a code2 op2b)))
9345 : => (t AND partial) */
9346 : else
9347 : {
9348 435 : if (integer_zerop (t))
9349 0 : return boolean_false_node;
9350 435 : else if (partial)
9351 : {
9352 : /* We already got a simplification for the other
9353 : operand to the redistributed AND expression. The
9354 : interesting case is when at least one is true.
9355 : Or, if both are the same, we can apply the identity
9356 : (x AND x) == x. */
9357 14 : if (integer_onep (partial))
9358 : return t;
9359 14 : else if (integer_onep (t))
9360 : return partial;
9361 4 : else if (same_bool_result_p (t, partial))
9362 : return t;
9363 : }
9364 : }
9365 : }
9366 : }
9367 : return NULL_TREE;
9368 : }
9369 :
9370 : /* Try to simplify the OR of two comparisons defined by
9371 : (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
9372 : If this can be done without constructing an intermediate value,
9373 : return the resulting tree; otherwise NULL_TREE is returned.
9374 : This function is deliberately asymmetric as it recurses on SSA_DEFs
9375 : in the first comparison but not the second. */
9376 :
9377 : static tree
9378 961586 : or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
9379 : enum tree_code code2, tree op2a, tree op2b,
9380 : basic_block outer_cond_bb)
9381 : {
9382 961586 : tree truth_type = truth_type_for (TREE_TYPE (op1a));
9383 :
9384 : /* First check for ((x CODE1 y) OR (x CODE2 y)). */
9385 961586 : if (operand_equal_p (op1a, op2a, 0)
9386 961586 : && operand_equal_p (op1b, op2b, 0))
9387 : {
9388 : /* Result will be either NULL_TREE, or a combined comparison. */
9389 2927 : tree t = combine_comparisons (UNKNOWN_LOCATION,
9390 : TRUTH_ORIF_EXPR, code1, code2,
9391 : truth_type, op1a, op1b);
9392 2927 : if (t)
9393 : return t;
9394 : }
9395 :
9396 : /* Likewise the swapped case of the above. */
9397 958691 : if (operand_equal_p (op1a, op2b, 0)
9398 958691 : && operand_equal_p (op1b, op2a, 0))
9399 : {
9400 : /* Result will be either NULL_TREE, or a combined comparison. */
9401 0 : tree t = combine_comparisons (UNKNOWN_LOCATION,
9402 : TRUTH_ORIF_EXPR, code1,
9403 : swap_tree_comparison (code2),
9404 : truth_type, op1a, op1b);
9405 0 : if (t)
9406 : return t;
9407 : }
9408 :
9409 : /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
9410 : NAME's definition is a truth value. See if there are any simplifications
9411 : that can be done against the NAME's definition. */
9412 958691 : if (TREE_CODE (op1a) == SSA_NAME
9413 958688 : && (code1 == NE_EXPR || code1 == EQ_EXPR)
9414 1228963 : && (integer_zerop (op1b) || integer_onep (op1b)))
9415 : {
9416 34708 : bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
9417 71005 : || (code1 == NE_EXPR && integer_onep (op1b)));
9418 67177 : gimple *stmt = SSA_NAME_DEF_STMT (op1a);
9419 67177 : switch (gimple_code (stmt))
9420 : {
9421 38836 : case GIMPLE_ASSIGN:
9422 : /* Try to simplify by copy-propagating the definition. */
9423 38836 : return or_var_with_comparison (type, op1a, invert, code2, op2a,
9424 38836 : op2b, outer_cond_bb);
9425 :
9426 15558 : case GIMPLE_PHI:
9427 : /* If every argument to the PHI produces the same result when
9428 : ORed with the second comparison, we win.
9429 : Do not do this unless the type is bool since we need a bool
9430 : result here anyway. */
9431 15558 : if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
9432 : {
9433 : tree result = NULL_TREE;
9434 : unsigned i;
9435 894 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
9436 : {
9437 894 : tree arg = gimple_phi_arg_def (stmt, i);
9438 :
9439 : /* If this PHI has itself as an argument, ignore it.
9440 : If all the other args produce the same result,
9441 : we're still OK. */
9442 894 : if (arg == gimple_phi_result (stmt))
9443 0 : continue;
9444 894 : else if (TREE_CODE (arg) == INTEGER_CST)
9445 : {
9446 745 : if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
9447 : {
9448 339 : if (!result)
9449 199 : result = boolean_true_node;
9450 140 : else if (!integer_onep (result))
9451 : return NULL_TREE;
9452 : }
9453 406 : else if (!result)
9454 200 : result = fold_build2 (code2, boolean_type_node,
9455 : op2a, op2b);
9456 206 : else if (!same_bool_comparison_p (result,
9457 : code2, op2a, op2b))
9458 : return NULL_TREE;
9459 : }
9460 149 : else if (TREE_CODE (arg) == SSA_NAME
9461 149 : && !SSA_NAME_IS_DEFAULT_DEF (arg))
9462 : {
9463 149 : tree temp;
9464 149 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
9465 : /* In simple cases we can look through PHI nodes,
9466 : but we have to be careful with loops.
9467 : See PR49073. */
9468 149 : if (! dom_info_available_p (CDI_DOMINATORS)
9469 149 : || gimple_bb (def_stmt) == gimple_bb (stmt)
9470 298 : || dominated_by_p (CDI_DOMINATORS,
9471 149 : gimple_bb (def_stmt),
9472 149 : gimple_bb (stmt)))
9473 29 : return NULL_TREE;
9474 120 : temp = or_var_with_comparison (type, arg, invert, code2,
9475 : op2a, op2b, outer_cond_bb);
9476 120 : if (!temp)
9477 : return NULL_TREE;
9478 0 : else if (!result)
9479 : result = temp;
9480 0 : else if (!same_bool_result_p (result, temp))
9481 : return NULL_TREE;
9482 : }
9483 : else
9484 : return NULL_TREE;
9485 : }
9486 : return result;
9487 : }
9488 :
9489 : default:
9490 : break;
9491 : }
9492 : }
9493 : return NULL_TREE;
9494 : }
9495 :
9496 : /* Try to simplify the OR of two comparisons, specified by
9497 : (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
9498 : If this can be simplified to a single expression (without requiring
9499 : introducing more SSA variables to hold intermediate values),
9500 : return the resulting tree. Otherwise return NULL_TREE.
9501 : If the result expression is non-null, it has boolean type. */
9502 :
9503 : tree
9504 481794 : maybe_fold_or_comparisons (tree type,
9505 : enum tree_code code1, tree op1a, tree op1b,
9506 : enum tree_code code2, tree op2a, tree op2b,
9507 : basic_block outer_cond_bb)
9508 : {
9509 481794 : if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
9510 : outer_cond_bb))
9511 : return t;
9512 :
9513 478922 : if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
9514 : outer_cond_bb))
9515 : return t;
9516 :
9517 478917 : if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
9518 : op1a, op1b, code2, op2a,
9519 : op2b, outer_cond_bb))
9520 : return t;
9521 :
9522 : return NULL_TREE;
9523 : }
9524 :
9525 : /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
9526 :
9527 : Either NULL_TREE, a simplified but non-constant or a constant
9528 : is returned.
9529 :
9530 : ??? This should go into a gimple-fold-inline.h file to be eventually
9531 : privatized with the single valueize function used in the various TUs
9532 : to avoid the indirect function call overhead. */
9533 :
9534 : tree
9535 417720065 : gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
9536 : tree (*gvalueize) (tree))
9537 : {
9538 417720065 : gimple_match_op res_op;
9539 : /* ??? The SSA propagators do not correctly deal with following SSA use-def
9540 : edges if there are intermediate VARYING defs. For this reason
9541 : do not follow SSA edges here even though SCCVN can technically
9542 : just deal fine with that. */
9543 417720065 : if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
9544 : {
9545 55167383 : tree res = NULL_TREE;
9546 55167383 : if (gimple_simplified_result_is_gimple_val (&res_op))
9547 33695950 : res = res_op.ops[0];
9548 21471433 : else if (mprts_hook)
9549 7467889 : res = mprts_hook (&res_op);
9550 41163839 : if (res)
9551 : {
9552 35541376 : if (dump_file && dump_flags & TDF_DETAILS)
9553 : {
9554 9583 : fprintf (dump_file, "Match-and-simplified ");
9555 9583 : print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
9556 9583 : fprintf (dump_file, " to ");
9557 9583 : print_generic_expr (dump_file, res);
9558 9583 : fprintf (dump_file, "\n");
9559 : }
9560 35541376 : return res;
9561 : }
9562 : }
9563 :
9564 382178689 : location_t loc = gimple_location (stmt);
9565 382178689 : switch (gimple_code (stmt))
9566 : {
9567 330256221 : case GIMPLE_ASSIGN:
9568 330256221 : {
9569 330256221 : enum tree_code subcode = gimple_assign_rhs_code (stmt);
9570 :
9571 330256221 : switch (get_gimple_rhs_class (subcode))
9572 : {
9573 119459104 : case GIMPLE_SINGLE_RHS:
9574 119459104 : {
9575 119459104 : tree rhs = gimple_assign_rhs1 (stmt);
9576 119459104 : enum tree_code_class kind = TREE_CODE_CLASS (subcode);
9577 :
9578 119459104 : if (TREE_CODE (rhs) == SSA_NAME)
9579 : {
9580 : /* If the RHS is an SSA_NAME, return its known constant value,
9581 : if any. */
9582 9535284 : return (*valueize) (rhs);
9583 : }
9584 : /* Handle propagating invariant addresses into address
9585 : operations. */
9586 109923820 : else if (TREE_CODE (rhs) == ADDR_EXPR
9587 109923820 : && !is_gimple_min_invariant (rhs))
9588 : {
9589 7507993 : poly_int64 offset = 0;
9590 7507993 : tree base;
9591 7507993 : base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
9592 : &offset,
9593 : valueize);
9594 7507993 : if (base
9595 7507993 : && (CONSTANT_CLASS_P (base)
9596 6783741 : || decl_address_invariant_p (base)))
9597 198215 : return build_invariant_address (TREE_TYPE (rhs),
9598 198215 : base, offset);
9599 : }
9600 102415827 : else if (TREE_CODE (rhs) == CONSTRUCTOR
9601 1054338 : && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
9602 103889996 : && known_eq (CONSTRUCTOR_NELTS (rhs),
9603 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
9604 : {
9605 404674 : unsigned i, nelts;
9606 404674 : tree val;
9607 :
9608 404674 : nelts = CONSTRUCTOR_NELTS (rhs);
9609 404674 : tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
9610 901221 : FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
9611 : {
9612 485365 : val = (*valueize) (val);
9613 485365 : if (TREE_CODE (val) == INTEGER_CST
9614 413853 : || TREE_CODE (val) == REAL_CST
9615 393492 : || TREE_CODE (val) == FIXED_CST)
9616 91873 : vec.quick_push (val);
9617 : else
9618 : return NULL_TREE;
9619 : }
9620 :
9621 11182 : return vec.build ();
9622 404674 : }
9623 109320931 : if (subcode == OBJ_TYPE_REF)
9624 : {
9625 282359 : tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
9626 : /* If callee is constant, we can fold away the wrapper. */
9627 282359 : if (is_gimple_min_invariant (val))
9628 : return val;
9629 : }
9630 :
9631 109320748 : if (kind == tcc_reference)
9632 : {
9633 72933069 : if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
9634 70734308 : || TREE_CODE (rhs) == REALPART_EXPR
9635 69886586 : || TREE_CODE (rhs) == IMAGPART_EXPR)
9636 74781943 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9637 : {
9638 3128255 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9639 3128255 : return fold_unary_loc (EXPR_LOCATION (rhs),
9640 3128255 : TREE_CODE (rhs),
9641 6256510 : TREE_TYPE (rhs), val);
9642 : }
9643 69804814 : else if (TREE_CODE (rhs) == BIT_FIELD_REF
9644 69804814 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9645 : {
9646 582402 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9647 582402 : return fold_ternary_loc (EXPR_LOCATION (rhs),
9648 582402 : TREE_CODE (rhs),
9649 582402 : TREE_TYPE (rhs), val,
9650 582402 : TREE_OPERAND (rhs, 1),
9651 1164804 : TREE_OPERAND (rhs, 2));
9652 : }
9653 69222412 : else if (TREE_CODE (rhs) == MEM_REF
9654 69222412 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
9655 : {
9656 14241504 : tree val = (*valueize) (TREE_OPERAND (rhs, 0));
9657 14241504 : if (TREE_CODE (val) == ADDR_EXPR
9658 14241504 : && is_gimple_min_invariant (val))
9659 : {
9660 938648 : tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
9661 : unshare_expr (val),
9662 : TREE_OPERAND (rhs, 1));
9663 938648 : if (tem)
9664 69222412 : rhs = tem;
9665 : }
9666 : }
9667 69222412 : return fold_const_aggregate_ref_1 (rhs, valueize);
9668 : }
9669 36387679 : else if (kind == tcc_declaration)
9670 8601824 : return get_symbol_constant_value (rhs);
9671 : return rhs;
9672 : }
9673 :
9674 : case GIMPLE_UNARY_RHS:
9675 : return NULL_TREE;
9676 :
9677 157682419 : case GIMPLE_BINARY_RHS:
9678 : /* Translate &x + CST into an invariant form suitable for
9679 : further propagation. */
9680 157682419 : if (subcode == POINTER_PLUS_EXPR)
9681 : {
9682 18593961 : tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
9683 18593961 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9684 18593961 : if (TREE_CODE (op0) == ADDR_EXPR
9685 5283820 : && TREE_CODE (op1) == INTEGER_CST)
9686 : {
9687 518768 : tree off = fold_convert (ptr_type_node, op1);
9688 518768 : return build1_loc
9689 1037536 : (loc, ADDR_EXPR, TREE_TYPE (op0),
9690 518768 : fold_build2 (MEM_REF,
9691 : TREE_TYPE (TREE_TYPE (op0)),
9692 518768 : unshare_expr (op0), off));
9693 : }
9694 : }
9695 : /* Canonicalize bool != 0 and bool == 0 appearing after
9696 : valueization. While gimple_simplify handles this
9697 : it can get confused by the ~X == 1 -> X == 0 transform
9698 : which we cant reduce to a SSA name or a constant
9699 : (and we have no way to tell gimple_simplify to not
9700 : consider those transforms in the first place). */
9701 139088458 : else if (subcode == EQ_EXPR
9702 139088458 : || subcode == NE_EXPR)
9703 : {
9704 3184054 : tree lhs = gimple_assign_lhs (stmt);
9705 3184054 : tree op0 = gimple_assign_rhs1 (stmt);
9706 3184054 : if (useless_type_conversion_p (TREE_TYPE (lhs),
9707 3184054 : TREE_TYPE (op0)))
9708 : {
9709 24463 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9710 24463 : op0 = (*valueize) (op0);
9711 24463 : if (TREE_CODE (op0) == INTEGER_CST)
9712 700 : std::swap (op0, op1);
9713 24463 : if (TREE_CODE (op1) == INTEGER_CST
9714 24463 : && ((subcode == NE_EXPR && integer_zerop (op1))
9715 2309 : || (subcode == EQ_EXPR && integer_onep (op1))))
9716 282 : return op0;
9717 : }
9718 : }
9719 : return NULL_TREE;
9720 :
9721 702954 : case GIMPLE_TERNARY_RHS:
9722 702954 : {
9723 : /* Handle ternary operators that can appear in GIMPLE form. */
9724 702954 : tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
9725 702954 : tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
9726 702954 : tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
9727 702954 : return fold_ternary_loc (loc, subcode,
9728 702954 : TREE_TYPE (gimple_assign_lhs (stmt)),
9729 702954 : op0, op1, op2);
9730 : }
9731 :
9732 0 : default:
9733 0 : gcc_unreachable ();
9734 : }
9735 : }
9736 :
9737 14313753 : case GIMPLE_CALL:
9738 14313753 : {
9739 14313753 : tree fn;
9740 14313753 : gcall *call_stmt = as_a <gcall *> (stmt);
9741 :
9742 14313753 : if (gimple_call_internal_p (stmt))
9743 : {
9744 1293762 : enum tree_code subcode = ERROR_MARK;
9745 1293762 : switch (gimple_call_internal_fn (stmt))
9746 : {
9747 : case IFN_UBSAN_CHECK_ADD:
9748 : subcode = PLUS_EXPR;
9749 : break;
9750 7983 : case IFN_UBSAN_CHECK_SUB:
9751 7983 : subcode = MINUS_EXPR;
9752 7983 : break;
9753 6815 : case IFN_UBSAN_CHECK_MUL:
9754 6815 : subcode = MULT_EXPR;
9755 6815 : break;
9756 142120 : case IFN_BUILTIN_EXPECT:
9757 142120 : {
9758 142120 : tree arg0 = gimple_call_arg (stmt, 0);
9759 142120 : tree op0 = (*valueize) (arg0);
9760 142120 : if (TREE_CODE (op0) == INTEGER_CST)
9761 : return op0;
9762 : return NULL_TREE;
9763 : }
9764 : default:
9765 : return NULL_TREE;
9766 : }
9767 22924 : tree arg0 = gimple_call_arg (stmt, 0);
9768 22924 : tree arg1 = gimple_call_arg (stmt, 1);
9769 22924 : tree op0 = (*valueize) (arg0);
9770 22924 : tree op1 = (*valueize) (arg1);
9771 :
9772 22924 : if (TREE_CODE (op0) != INTEGER_CST
9773 2499 : || TREE_CODE (op1) != INTEGER_CST)
9774 : {
9775 22402 : switch (subcode)
9776 : {
9777 6715 : case MULT_EXPR:
9778 : /* x * 0 = 0 * x = 0 without overflow. */
9779 6715 : if (integer_zerop (op0) || integer_zerop (op1))
9780 20 : return build_zero_cst (TREE_TYPE (arg0));
9781 : break;
9782 7641 : case MINUS_EXPR:
9783 : /* y - y = 0 without overflow. */
9784 7641 : if (operand_equal_p (op0, op1, 0))
9785 0 : return build_zero_cst (TREE_TYPE (arg0));
9786 : break;
9787 : default:
9788 : break;
9789 : }
9790 : }
9791 22904 : tree res
9792 22904 : = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
9793 22904 : if (res
9794 2870 : && TREE_CODE (res) == INTEGER_CST
9795 23426 : && !TREE_OVERFLOW (res))
9796 : return res;
9797 : return NULL_TREE;
9798 : }
9799 :
9800 13019991 : fn = (*valueize) (gimple_call_fn (stmt));
9801 13019991 : if (TREE_CODE (fn) == ADDR_EXPR
9802 12387023 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
9803 12386959 : && fndecl_built_in_p (TREE_OPERAND (fn, 0))
9804 19033355 : && gimple_builtin_call_types_compatible_p (stmt,
9805 6013364 : TREE_OPERAND (fn, 0)))
9806 : {
9807 5909891 : tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
9808 5909891 : tree retval;
9809 5909891 : unsigned i;
9810 18337956 : for (i = 0; i < gimple_call_num_args (stmt); ++i)
9811 12428065 : args[i] = (*valueize) (gimple_call_arg (stmt, i));
9812 5909891 : retval = fold_builtin_call_array (loc,
9813 : gimple_call_return_type (call_stmt),
9814 : fn, gimple_call_num_args (stmt), args);
9815 5909891 : if (retval)
9816 : {
9817 : /* fold_call_expr wraps the result inside a NOP_EXPR. */
9818 52312 : STRIP_NOPS (retval);
9819 52312 : retval = fold_convert (gimple_call_return_type (call_stmt),
9820 : retval);
9821 : }
9822 5909891 : return retval;
9823 : }
9824 : return NULL_TREE;
9825 : }
9826 :
9827 : default:
9828 : return NULL_TREE;
9829 : }
9830 : }
9831 :
9832 : /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
9833 : Returns NULL_TREE if folding to a constant is not possible, otherwise
9834 : returns a constant according to is_gimple_min_invariant. */
9835 :
9836 : tree
9837 4415 : gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
9838 : {
9839 4415 : tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
9840 4415 : if (res && is_gimple_min_invariant (res))
9841 : return res;
9842 : return NULL_TREE;
9843 : }
9844 :
9845 :
9846 : /* The following set of functions are supposed to fold references using
9847 : their constant initializers. */
9848 :
9849 : /* See if we can find constructor defining value of BASE.
9850 : When we know the consructor with constant offset (such as
9851 : base is array[40] and we do know constructor of array), then
9852 : BIT_OFFSET is adjusted accordingly.
9853 :
9854 : As a special case, return error_mark_node when constructor
9855 : is not explicitly available, but it is known to be zero
9856 : such as 'static const int a;'. */
9857 : static tree
9858 127856009 : get_base_constructor (tree base, poly_int64 *bit_offset,
9859 : tree (*valueize)(tree))
9860 : {
9861 127951816 : poly_int64 bit_offset2, size, max_size;
9862 127951816 : bool reverse;
9863 :
9864 127951816 : if (TREE_CODE (base) == MEM_REF)
9865 : {
9866 137602052 : poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
9867 68801026 : if (!boff.to_shwi (bit_offset))
9868 68471650 : return NULL_TREE;
9869 :
9870 68800679 : if (valueize
9871 68800679 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
9872 37202913 : base = valueize (TREE_OPERAND (base, 0));
9873 68800679 : if (!base || TREE_CODE (base) != ADDR_EXPR)
9874 : return NULL_TREE;
9875 329376 : base = TREE_OPERAND (base, 0);
9876 : }
9877 59150790 : else if (valueize
9878 31623804 : && TREE_CODE (base) == SSA_NAME)
9879 0 : base = valueize (base);
9880 :
9881 : /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
9882 : DECL_INITIAL. If BASE is a nested reference into another
9883 : ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
9884 : the inner reference. */
9885 59480166 : switch (TREE_CODE (base))
9886 : {
9887 51813202 : case VAR_DECL:
9888 51813202 : case CONST_DECL:
9889 51813202 : {
9890 51813202 : tree init = ctor_for_folding (base);
9891 :
9892 : /* Our semantic is exact opposite of ctor_for_folding;
9893 : NULL means unknown, while error_mark_node is 0. */
9894 51813202 : if (init == error_mark_node)
9895 : return NULL_TREE;
9896 1298589 : if (!init)
9897 1173 : return error_mark_node;
9898 : return init;
9899 : }
9900 :
9901 95807 : case VIEW_CONVERT_EXPR:
9902 95807 : return get_base_constructor (TREE_OPERAND (base, 0),
9903 95807 : bit_offset, valueize);
9904 :
9905 329050 : case ARRAY_REF:
9906 329050 : case COMPONENT_REF:
9907 329050 : base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
9908 : &reverse);
9909 329050 : if (!known_size_p (max_size) || maybe_ne (size, max_size))
9910 : return NULL_TREE;
9911 268479 : *bit_offset += bit_offset2;
9912 268479 : return get_base_constructor (base, bit_offset, valueize);
9913 :
9914 : case CONSTRUCTOR:
9915 : return base;
9916 :
9917 7242107 : default:
9918 7242107 : if (CONSTANT_CLASS_P (base))
9919 : return base;
9920 :
9921 : return NULL_TREE;
9922 : }
9923 : }
9924 :
9925 : /* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
9926 : bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
9927 : the reference; otherwise the type of the referenced element is used instead.
9928 : When SIZE is zero, attempt to fold a reference to the entire element OFFSET
9929 : refers to. Increment *SUBOFF by the bit offset of the accessed element. */
9930 :
9931 : static tree
9932 707611 : fold_array_ctor_reference (tree type, tree ctor,
9933 : unsigned HOST_WIDE_INT offset,
9934 : unsigned HOST_WIDE_INT size,
9935 : tree from_decl,
9936 : unsigned HOST_WIDE_INT *suboff)
9937 : {
9938 707611 : offset_int low_bound;
9939 707611 : offset_int elt_size;
9940 707611 : offset_int access_index;
9941 707611 : tree domain_type = NULL_TREE;
9942 707611 : HOST_WIDE_INT inner_offset;
9943 :
9944 : /* Compute low bound and elt size. */
9945 707611 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
9946 707611 : domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
9947 707611 : if (domain_type && TYPE_MIN_VALUE (domain_type))
9948 : {
9949 : /* Static constructors for variably sized objects make no sense. */
9950 707611 : if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
9951 : return NULL_TREE;
9952 707611 : low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
9953 : }
9954 : else
9955 0 : low_bound = 0;
9956 : /* Static constructors for variably sized objects make no sense. */
9957 707611 : if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
9958 : return NULL_TREE;
9959 707611 : elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
9960 :
9961 : /* When TYPE is non-null, verify that it specifies a constant-sized
9962 : access of a multiple of the array element size. Avoid division
9963 : by zero below when ELT_SIZE is zero, such as with the result of
9964 : an initializer for a zero-length array or an empty struct. */
9965 707611 : if (elt_size == 0
9966 707611 : || (type
9967 707575 : && (!TYPE_SIZE_UNIT (type)
9968 707575 : || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
9969 36 : return NULL_TREE;
9970 :
9971 : /* Compute the array index we look for. */
9972 707575 : access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
9973 : elt_size);
9974 707575 : access_index += low_bound;
9975 :
9976 : /* And offset within the access. */
9977 707575 : inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
9978 :
9979 707575 : unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
9980 707575 : if (size > elt_sz * BITS_PER_UNIT)
9981 : {
9982 : /* native_encode_expr constraints. */
9983 50619 : if (size > MAX_BITSIZE_MODE_ANY_MODE
9984 40552 : || size % BITS_PER_UNIT != 0
9985 40552 : || inner_offset % BITS_PER_UNIT != 0
9986 40552 : || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
9987 : return NULL_TREE;
9988 :
9989 40552 : unsigned ctor_idx;
9990 40552 : tree val = get_array_ctor_element_at_index (ctor, access_index,
9991 : &ctor_idx);
9992 40576 : if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
9993 23 : return build_zero_cst (type);
9994 :
9995 : /* native-encode adjacent ctor elements. */
9996 40529 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
9997 40529 : unsigned bufoff = 0;
9998 40529 : offset_int index = 0;
9999 40529 : offset_int max_index = access_index;
10000 40529 : constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
10001 40529 : if (!val)
10002 1 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
10003 40528 : else if (!CONSTANT_CLASS_P (val))
10004 : return NULL_TREE;
10005 40126 : if (!elt->index)
10006 : ;
10007 33835 : else if (TREE_CODE (elt->index) == RANGE_EXPR)
10008 : {
10009 20 : index = wi::to_offset (TREE_OPERAND (elt->index, 0));
10010 20 : max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
10011 : }
10012 : else
10013 33815 : index = max_index = wi::to_offset (elt->index);
10014 40126 : index = wi::umax (index, access_index);
10015 273621 : do
10016 : {
10017 273621 : if (bufoff + elt_sz > sizeof (buf))
10018 0 : elt_sz = sizeof (buf) - bufoff;
10019 273621 : int len;
10020 273621 : if (TREE_CODE (val) == RAW_DATA_CST)
10021 : {
10022 20 : gcc_assert (inner_offset == 0);
10023 20 : if (!elt->index || TREE_CODE (elt->index) != INTEGER_CST)
10024 : return NULL_TREE;
10025 40 : inner_offset = (access_index
10026 20 : - wi::to_offset (elt->index)).to_uhwi ();
10027 20 : len = MIN (sizeof (buf) - bufoff,
10028 : (unsigned) (RAW_DATA_LENGTH (val) - inner_offset));
10029 20 : memcpy (buf + bufoff, RAW_DATA_POINTER (val) + inner_offset,
10030 : len);
10031 20 : access_index += len - 1;
10032 : }
10033 : else
10034 : {
10035 547202 : len = native_encode_expr (val, buf + bufoff, elt_sz,
10036 273601 : inner_offset / BITS_PER_UNIT);
10037 273601 : if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
10038 : return NULL_TREE;
10039 : }
10040 273621 : inner_offset = 0;
10041 273621 : bufoff += len;
10042 :
10043 273621 : access_index += 1;
10044 273621 : if (wi::cmpu (access_index, index) == 0)
10045 2 : val = elt->value;
10046 273619 : else if (wi::cmpu (access_index, max_index) > 0)
10047 : {
10048 273379 : ctor_idx++;
10049 273379 : if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
10050 : {
10051 38391 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
10052 38391 : ++max_index;
10053 : }
10054 : else
10055 : {
10056 234988 : elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
10057 234988 : index = 0;
10058 234988 : max_index = access_index;
10059 234988 : if (!elt->index)
10060 : ;
10061 234180 : else if (TREE_CODE (elt->index) == RANGE_EXPR)
10062 : {
10063 0 : index = wi::to_offset (TREE_OPERAND (elt->index, 0));
10064 0 : max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
10065 : }
10066 : else
10067 234180 : index = max_index = wi::to_offset (elt->index);
10068 234988 : index = wi::umax (index, access_index);
10069 234988 : if (wi::cmpu (access_index, index) == 0)
10070 234983 : val = elt->value;
10071 : else
10072 5 : val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
10073 : }
10074 : }
10075 : }
10076 273621 : while (bufoff < size / BITS_PER_UNIT);
10077 40126 : *suboff += size;
10078 40126 : return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
10079 : }
10080 :
10081 656956 : unsigned ctor_idx;
10082 656956 : if (tree val = get_array_ctor_element_at_index (ctor, access_index,
10083 : &ctor_idx))
10084 : {
10085 655835 : if (TREE_CODE (val) == RAW_DATA_CST)
10086 : {
10087 2618 : if (size != BITS_PER_UNIT || elt_sz != 1 || inner_offset != 0)
10088 : return NULL_TREE;
10089 2610 : constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
10090 2610 : if (elt->index == NULL_TREE || TREE_CODE (elt->index) != INTEGER_CST)
10091 : return NULL_TREE;
10092 2610 : unsigned o = (access_index - wi::to_offset (elt->index)).to_uhwi ();
10093 2610 : val = build_int_cst (TREE_TYPE (val), RAW_DATA_UCHAR_ELT (val, o));
10094 : }
10095 655827 : if (!size && TREE_CODE (val) != CONSTRUCTOR)
10096 : {
10097 : /* For the final reference to the entire accessed element
10098 : (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
10099 : may be null) in favor of the type of the element, and set
10100 : SIZE to the size of the accessed element. */
10101 22947 : inner_offset = 0;
10102 22947 : type = TREE_TYPE (val);
10103 22947 : size = elt_sz * BITS_PER_UNIT;
10104 : }
10105 1687943 : else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
10106 460298 : && TREE_CODE (val) == CONSTRUCTOR
10107 16860 : && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
10108 : /* If this isn't the last element in the CTOR and a CTOR itself
10109 : and it does not cover the whole object we are requesting give up
10110 : since we're not set up for combining from multiple CTORs. */
10111 26 : return NULL_TREE;
10112 :
10113 655801 : *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
10114 655801 : return fold_ctor_reference (type, val, inner_offset, size, from_decl,
10115 : suboff);
10116 : }
10117 :
10118 : /* Memory not explicitly mentioned in constructor is 0 (or
10119 : the reference is out of range). */
10120 1121 : return type ? build_zero_cst (type) : NULL_TREE;
10121 : }
10122 :
10123 : /* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
10124 : bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
10125 : the reference; otherwise the type of the referenced member is used instead.
10126 : When SIZE is zero, attempt to fold a reference to the entire member OFFSET
10127 : refers to. Increment *SUBOFF by the bit offset of the accessed member. */
10128 :
10129 : static tree
10130 76449 : fold_nonarray_ctor_reference (tree type, tree ctor,
10131 : unsigned HOST_WIDE_INT offset,
10132 : unsigned HOST_WIDE_INT size,
10133 : tree from_decl,
10134 : unsigned HOST_WIDE_INT *suboff)
10135 : {
10136 76449 : unsigned HOST_WIDE_INT cnt;
10137 76449 : tree cfield, cval;
10138 :
10139 116566 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
10140 : {
10141 107607 : tree byte_offset = DECL_FIELD_OFFSET (cfield);
10142 107607 : tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
10143 107607 : tree field_size = DECL_SIZE (cfield);
10144 :
10145 107607 : if (!field_size)
10146 : {
10147 : /* Determine the size of the flexible array member from
10148 : the size of the initializer provided for it. */
10149 847 : field_size = TYPE_SIZE (TREE_TYPE (cval));
10150 : }
10151 :
10152 : /* Variable sized objects in static constructors makes no sense,
10153 : but field_size can be NULL for flexible array members. */
10154 107607 : gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
10155 : && TREE_CODE (byte_offset) == INTEGER_CST
10156 : && (field_size != NULL_TREE
10157 : ? TREE_CODE (field_size) == INTEGER_CST
10158 : : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
10159 :
10160 : /* Compute bit offset of the field. */
10161 107607 : offset_int bitoffset
10162 107607 : = (wi::to_offset (field_offset)
10163 107607 : + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
10164 : /* Compute bit offset where the field ends. */
10165 107607 : offset_int bitoffset_end;
10166 107607 : if (field_size != NULL_TREE)
10167 107607 : bitoffset_end = bitoffset + wi::to_offset (field_size);
10168 : else
10169 0 : bitoffset_end = 0;
10170 :
10171 : /* Compute the bit offset of the end of the desired access.
10172 : As a special case, if the size of the desired access is
10173 : zero, assume the access is to the entire field (and let
10174 : the caller make any necessary adjustments by storing
10175 : the actual bounds of the field in FIELDBOUNDS). */
10176 107607 : offset_int access_end = offset_int (offset);
10177 107607 : if (size)
10178 66179 : access_end += size;
10179 : else
10180 41428 : access_end = bitoffset_end;
10181 :
10182 : /* Is there any overlap between the desired access at
10183 : [OFFSET, OFFSET+SIZE) and the offset of the field within
10184 : the object at [BITOFFSET, BITOFFSET_END)? */
10185 107607 : if (wi::cmps (access_end, bitoffset) > 0
10186 107607 : && (field_size == NULL_TREE
10187 105276 : || wi::lts_p (offset, bitoffset_end)))
10188 : {
10189 67490 : *suboff += bitoffset.to_uhwi ();
10190 :
10191 67490 : if (!size && TREE_CODE (cval) != CONSTRUCTOR)
10192 : {
10193 : /* For the final reference to the entire accessed member
10194 : (SIZE is zero), reset OFFSET, disegard TYPE (which may
10195 : be null) in favor of the type of the member, and set
10196 : SIZE to the size of the accessed member. */
10197 19305 : offset = bitoffset.to_uhwi ();
10198 19305 : type = TREE_TYPE (cval);
10199 19305 : size = (bitoffset_end - bitoffset).to_uhwi ();
10200 : }
10201 :
10202 : /* We do have overlap. Now see if the field is large enough
10203 : to cover the access. Give up for accesses that extend
10204 : beyond the end of the object or that span multiple fields. */
10205 67490 : if (wi::cmps (access_end, bitoffset_end) > 0)
10206 : return NULL_TREE;
10207 66858 : if (offset < bitoffset)
10208 : return NULL_TREE;
10209 :
10210 66858 : offset_int inner_offset = offset_int (offset) - bitoffset;
10211 :
10212 : /* Integral bit-fields are left-justified on big-endian targets, so
10213 : we must arrange for native_encode_int to start at their MSB. */
10214 66858 : if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
10215 : {
10216 66858 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
10217 : return NULL_TREE;
10218 66858 : if (BYTES_BIG_ENDIAN)
10219 : {
10220 : tree ctype = TREE_TYPE (cfield);
10221 : unsigned int encoding_size;
10222 : if (TYPE_MODE (ctype) != BLKmode)
10223 : encoding_size
10224 : = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (ctype));
10225 : else
10226 : encoding_size = TREE_INT_CST_LOW (TYPE_SIZE (ctype));
10227 : inner_offset += encoding_size - wi::to_offset (field_size);
10228 : }
10229 : }
10230 :
10231 66858 : return fold_ctor_reference (type, cval,
10232 66858 : inner_offset.to_uhwi (), size,
10233 : from_decl, suboff);
10234 : }
10235 : }
10236 :
10237 8959 : if (!type)
10238 : return NULL_TREE;
10239 :
10240 8959 : return build_zero_cst (type);
10241 : }
10242 :
10243 : /* CTOR is a value initializing memory. Fold a reference of TYPE and
10244 : bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
10245 : is zero, attempt to fold a reference to the entire subobject
10246 : which OFFSET refers to. This is used when folding accesses to
10247 : string members of aggregates. When non-null, set *SUBOFF to
10248 : the bit offset of the accessed subobject. */
10249 :
10250 : tree
10251 1623460 : fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
10252 : const poly_uint64 &poly_size, tree from_decl,
10253 : unsigned HOST_WIDE_INT *suboff /* = NULL */)
10254 : {
10255 1623460 : tree ret;
10256 :
10257 : /* We found the field with exact match. */
10258 1623460 : if (type
10259 1623460 : && useless_type_conversion_p (type, TREE_TYPE (ctor))
10260 2296477 : && known_eq (poly_offset, 0U))
10261 671763 : return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
10262 :
10263 : /* The remaining optimizations need a constant size and offset. */
10264 951697 : unsigned HOST_WIDE_INT size, offset;
10265 951697 : if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
10266 : return NULL_TREE;
10267 :
10268 : /* We are at the end of walk, see if we can view convert the
10269 : result. */
10270 951697 : if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
10271 : /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
10272 22802 : && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
10273 22695 : && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
10274 : {
10275 14391 : ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
10276 14391 : if (ret)
10277 : {
10278 14391 : ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
10279 14391 : if (ret)
10280 14331 : STRIP_USELESS_TYPE_CONVERSION (ret);
10281 : }
10282 14391 : return ret;
10283 : }
10284 :
10285 : /* For constants and byte-aligned/sized reads, try to go through
10286 : native_encode/interpret. */
10287 937306 : if (CONSTANT_CLASS_P (ctor)
10288 : && BITS_PER_UNIT == 8
10289 143818 : && offset % BITS_PER_UNIT == 0
10290 143814 : && offset / BITS_PER_UNIT <= INT_MAX
10291 143782 : && size % BITS_PER_UNIT == 0
10292 143773 : && size <= MAX_BITSIZE_MODE_ANY_MODE
10293 1080274 : && can_native_interpret_type_p (type))
10294 : {
10295 100481 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
10296 200962 : int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
10297 100481 : offset / BITS_PER_UNIT);
10298 100481 : if (len > 0)
10299 99763 : return native_interpret_expr (type, buf, len);
10300 : }
10301 :
10302 : /* For constructors, try first a recursive local processing, but in any case
10303 : this requires the native storage order. */
10304 837543 : if (TREE_CODE (ctor) == CONSTRUCTOR
10305 837543 : && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
10306 784290 : && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
10307 : {
10308 784060 : unsigned HOST_WIDE_INT dummy = 0;
10309 784060 : if (!suboff)
10310 658512 : suboff = &dummy;
10311 :
10312 784060 : tree ret;
10313 784060 : if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
10314 784060 : || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
10315 707611 : ret = fold_array_ctor_reference (type, ctor, offset, size,
10316 : from_decl, suboff);
10317 : else
10318 76449 : ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
10319 : from_decl, suboff);
10320 :
10321 : /* Otherwise fall back to native_encode_initializer. This may be done
10322 : only from the outermost fold_ctor_reference call (because it itself
10323 : recurses into CONSTRUCTORs and doesn't update suboff). */
10324 784060 : if (ret == NULL_TREE
10325 250281 : && suboff == &dummy
10326 : && BITS_PER_UNIT == 8
10327 240718 : && offset % BITS_PER_UNIT == 0
10328 240716 : && offset / BITS_PER_UNIT <= INT_MAX
10329 240716 : && size % BITS_PER_UNIT == 0
10330 240707 : && size <= MAX_BITSIZE_MODE_ANY_MODE
10331 1014700 : && can_native_interpret_type_p (type))
10332 : {
10333 193612 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
10334 387224 : int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
10335 193612 : offset / BITS_PER_UNIT);
10336 193612 : if (len > 0)
10337 1302 : return native_interpret_expr (type, buf, len);
10338 : }
10339 :
10340 782758 : return ret;
10341 : }
10342 :
10343 : return NULL_TREE;
10344 : }
10345 :
10346 : /* Return the tree representing the element referenced by T if T is an
10347 : ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
10348 : names using VALUEIZE. Return NULL_TREE otherwise. */
10349 :
10350 : tree
10351 133361374 : fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
10352 : {
10353 133361374 : tree ctor, idx, base;
10354 133361374 : poly_int64 offset, size, max_size;
10355 133361374 : tree tem;
10356 133361374 : bool reverse;
10357 :
10358 133361374 : if (TREE_THIS_VOLATILE (t))
10359 : return NULL_TREE;
10360 :
10361 133042401 : if (DECL_P (t))
10362 260734 : return get_symbol_constant_value (t);
10363 :
10364 132781667 : tem = fold_read_from_constant_string (t);
10365 132781667 : if (tem)
10366 : return tem;
10367 :
10368 132779801 : switch (TREE_CODE (t))
10369 : {
10370 10411557 : case ARRAY_REF:
10371 10411557 : case ARRAY_RANGE_REF:
10372 : /* Constant indexes are handled well by get_base_constructor.
10373 : Only special case variable offsets.
10374 : FIXME: This code can't handle nested references with variable indexes
10375 : (they will be handled only by iteration of ccp). Perhaps we can bring
10376 : get_ref_base_and_extent here and make it use a valueize callback. */
10377 10411557 : if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
10378 6110365 : && valueize
10379 4092194 : && (idx = (*valueize) (TREE_OPERAND (t, 1)))
10380 14503751 : && poly_int_tree_p (idx))
10381 : {
10382 1631022 : tree low_bound, unit_size;
10383 :
10384 : /* If the resulting bit-offset is constant, track it. */
10385 1631022 : if ((low_bound = array_ref_low_bound (t),
10386 1631022 : poly_int_tree_p (low_bound))
10387 1631022 : && (unit_size = array_ref_element_size (t),
10388 1631022 : tree_fits_uhwi_p (unit_size)))
10389 : {
10390 1631022 : poly_offset_int woffset
10391 1631022 : = wi::sext (wi::to_poly_offset (idx)
10392 3262044 : - wi::to_poly_offset (low_bound),
10393 1631022 : TYPE_PRECISION (sizetype));
10394 1631022 : woffset *= tree_to_uhwi (unit_size);
10395 1631022 : woffset *= BITS_PER_UNIT;
10396 1631022 : if (woffset.to_shwi (&offset))
10397 : {
10398 1630891 : base = TREE_OPERAND (t, 0);
10399 1630891 : ctor = get_base_constructor (base, &offset, valueize);
10400 : /* Empty constructor. Always fold to 0. */
10401 1630891 : if (ctor == error_mark_node)
10402 1630891 : return build_zero_cst (TREE_TYPE (t));
10403 : /* Out of bound array access. Value is undefined,
10404 : but don't fold. */
10405 1630813 : if (maybe_lt (offset, 0))
10406 : return NULL_TREE;
10407 : /* We cannot determine ctor. */
10408 1630343 : if (!ctor)
10409 : return NULL_TREE;
10410 169225 : return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
10411 169225 : tree_to_uhwi (unit_size)
10412 338450 : * BITS_PER_UNIT,
10413 : base);
10414 : }
10415 : }
10416 : }
10417 : /* Fallthru. */
10418 :
10419 125956639 : case COMPONENT_REF:
10420 125956639 : case BIT_FIELD_REF:
10421 125956639 : case TARGET_MEM_REF:
10422 125956639 : case MEM_REF:
10423 125956639 : base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
10424 125956639 : ctor = get_base_constructor (base, &offset, valueize);
10425 :
10426 : /* We cannot determine ctor. */
10427 125956639 : if (!ctor)
10428 : return NULL_TREE;
10429 : /* Empty constructor. Always fold to 0. */
10430 1243438 : if (ctor == error_mark_node)
10431 1095 : return build_zero_cst (TREE_TYPE (t));
10432 : /* We do not know precise access. */
10433 1242343 : if (!known_size_p (max_size) || maybe_ne (max_size, size))
10434 : return NULL_TREE;
10435 : /* Out of bound array access. Value is undefined, but don't fold. */
10436 554449 : if (maybe_lt (offset, 0))
10437 : return NULL_TREE;
10438 : /* Access with reverse storage order. */
10439 554068 : if (reverse)
10440 : return NULL_TREE;
10441 :
10442 554068 : tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
10443 554068 : if (tem)
10444 : return tem;
10445 :
10446 : /* For bit field reads try to read the representative and
10447 : adjust. */
10448 242252 : if (TREE_CODE (t) == COMPONENT_REF
10449 6353 : && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
10450 242336 : && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
10451 : {
10452 84 : HOST_WIDE_INT csize, coffset;
10453 84 : tree field = TREE_OPERAND (t, 1);
10454 84 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
10455 168 : if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
10456 83 : && size.is_constant (&csize)
10457 83 : && offset.is_constant (&coffset)
10458 83 : && (coffset % BITS_PER_UNIT != 0
10459 81 : || csize % BITS_PER_UNIT != 0)
10460 84 : && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
10461 : {
10462 10 : poly_int64 bitoffset;
10463 10 : poly_uint64 field_offset, repr_offset;
10464 10 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
10465 20 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
10466 10 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
10467 : else
10468 : bitoffset = 0;
10469 10 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
10470 10 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
10471 10 : HOST_WIDE_INT bitoff;
10472 10 : int diff = (TYPE_PRECISION (TREE_TYPE (repr))
10473 10 : - TYPE_PRECISION (TREE_TYPE (field)));
10474 10 : if (bitoffset.is_constant (&bitoff)
10475 10 : && bitoff >= 0
10476 10 : && bitoff <= diff)
10477 : {
10478 10 : offset -= bitoff;
10479 10 : size = tree_to_uhwi (DECL_SIZE (repr));
10480 :
10481 10 : tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
10482 10 : size, base);
10483 10 : if (tem && TREE_CODE (tem) == INTEGER_CST)
10484 : {
10485 10 : if (!BYTES_BIG_ENDIAN)
10486 10 : tem = wide_int_to_tree (TREE_TYPE (field),
10487 10 : wi::lrshift (wi::to_wide (tem),
10488 : bitoff));
10489 : else
10490 : tem = wide_int_to_tree (TREE_TYPE (field),
10491 : wi::lrshift (wi::to_wide (tem),
10492 : diff - bitoff));
10493 10 : return tem;
10494 : }
10495 : }
10496 : }
10497 : }
10498 : break;
10499 :
10500 1563022 : case REALPART_EXPR:
10501 1563022 : case IMAGPART_EXPR:
10502 1563022 : {
10503 1563022 : tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
10504 1563022 : if (c && TREE_CODE (c) == COMPLEX_CST)
10505 2710 : return fold_build1_loc (EXPR_LOCATION (t),
10506 5420 : TREE_CODE (t), TREE_TYPE (t), c);
10507 : break;
10508 : }
10509 :
10510 : default:
10511 : break;
10512 : }
10513 :
10514 : return NULL_TREE;
10515 : }
10516 :
10517 : tree
10518 62575940 : fold_const_aggregate_ref (tree t)
10519 : {
10520 62575940 : return fold_const_aggregate_ref_1 (t, NULL);
10521 : }
10522 :
10523 : /* Lookup virtual method with index TOKEN in a virtual table V
10524 : at OFFSET.
10525 : Set CAN_REFER if non-NULL to false if method
10526 : is not referable or if the virtual table is ill-formed (such as rewriten
10527 : by non-C++ produced symbol). Otherwise just return NULL in that calse. */
10528 :
10529 : tree
10530 282052 : gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
10531 : tree v,
10532 : unsigned HOST_WIDE_INT offset,
10533 : bool *can_refer)
10534 : {
10535 282052 : tree vtable = v, init, fn;
10536 282052 : unsigned HOST_WIDE_INT size;
10537 282052 : unsigned HOST_WIDE_INT elt_size, access_index;
10538 282052 : tree domain_type;
10539 :
10540 282052 : if (can_refer)
10541 282052 : *can_refer = true;
10542 :
10543 : /* First of all double check we have virtual table. */
10544 282052 : if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
10545 : {
10546 : /* Pass down that we lost track of the target. */
10547 0 : if (can_refer)
10548 0 : *can_refer = false;
10549 0 : return NULL_TREE;
10550 : }
10551 :
10552 282052 : init = ctor_for_folding (v);
10553 :
10554 : /* The virtual tables should always be born with constructors
10555 : and we always should assume that they are avaialble for
10556 : folding. At the moment we do not stream them in all cases,
10557 : but it should never happen that ctor seem unreachable. */
10558 282052 : gcc_assert (init);
10559 282052 : if (init == error_mark_node)
10560 : {
10561 : /* Pass down that we lost track of the target. */
10562 209 : if (can_refer)
10563 209 : *can_refer = false;
10564 209 : return NULL_TREE;
10565 : }
10566 281843 : gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
10567 281843 : size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
10568 281843 : offset *= BITS_PER_UNIT;
10569 281843 : offset += token * size;
10570 :
10571 : /* Lookup the value in the constructor that is assumed to be array.
10572 : This is equivalent to
10573 : fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
10574 : offset, size, NULL);
10575 : but in a constant time. We expect that frontend produced a simple
10576 : array without indexed initializers. */
10577 :
10578 281843 : gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
10579 281843 : domain_type = TYPE_DOMAIN (TREE_TYPE (init));
10580 281843 : gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
10581 281843 : elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
10582 :
10583 281843 : access_index = offset / BITS_PER_UNIT / elt_size;
10584 281843 : gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
10585 :
10586 : /* This code makes an assumption that there are no
10587 : indexed fileds produced by C++ FE, so we can directly index the array. */
10588 281843 : if (access_index < CONSTRUCTOR_NELTS (init))
10589 : {
10590 281842 : fn = CONSTRUCTOR_ELT (init, access_index)->value;
10591 281842 : gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
10592 281842 : STRIP_NOPS (fn);
10593 : }
10594 : else
10595 : fn = NULL;
10596 :
10597 : /* For type inconsistent program we may end up looking up virtual method
10598 : in virtual table that does not contain TOKEN entries. We may overrun
10599 : the virtual table and pick up a constant or RTTI info pointer.
10600 : In any case the call is undefined. */
10601 281842 : if (!fn
10602 281842 : || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
10603 557606 : || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
10604 6079 : fn = builtin_decl_unreachable ();
10605 : else
10606 : {
10607 275764 : fn = TREE_OPERAND (fn, 0);
10608 :
10609 : /* When cgraph node is missing and function is not public, we cannot
10610 : devirtualize. This can happen in WHOPR when the actual method
10611 : ends up in other partition, because we found devirtualization
10612 : possibility too late. */
10613 275764 : if (!can_refer_decl_in_current_unit_p (fn, vtable))
10614 : {
10615 36558 : if (can_refer)
10616 : {
10617 36558 : *can_refer = false;
10618 36558 : return fn;
10619 : }
10620 : return NULL_TREE;
10621 : }
10622 : }
10623 :
10624 : /* Make sure we create a cgraph node for functions we'll reference.
10625 : They can be non-existent if the reference comes from an entry
10626 : of an external vtable for example. */
10627 245285 : cgraph_node::get_create (fn);
10628 :
10629 245285 : return fn;
10630 : }
10631 :
10632 : /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
10633 : is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
10634 : KNOWN_BINFO carries the binfo describing the true type of
10635 : OBJ_TYPE_REF_OBJECT(REF).
10636 : Set CAN_REFER if non-NULL to false if method
10637 : is not referable or if the virtual table is ill-formed (such as rewriten
10638 : by non-C++ produced symbol). Otherwise just return NULL in that calse. */
10639 :
10640 : tree
10641 274226 : gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
10642 : bool *can_refer)
10643 : {
10644 274226 : unsigned HOST_WIDE_INT offset;
10645 274226 : tree v;
10646 :
10647 274226 : v = BINFO_VTABLE (known_binfo);
10648 : /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
10649 274226 : if (!v)
10650 : return NULL_TREE;
10651 :
10652 274226 : if (!vtable_pointer_value_to_vtable (v, &v, &offset))
10653 : {
10654 0 : if (can_refer)
10655 0 : *can_refer = false;
10656 0 : return NULL_TREE;
10657 : }
10658 274226 : return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
10659 : }
10660 :
10661 : /* Given a pointer value T, return a simplified version of an
10662 : indirection through T, or NULL_TREE if no simplification is
10663 : possible. Note that the resulting type may be different from
10664 : the type pointed to in the sense that it is still compatible
10665 : from the langhooks point of view. */
10666 :
10667 : tree
10668 2355242 : gimple_fold_indirect_ref (tree t)
10669 : {
10670 2355242 : tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
10671 2355242 : tree sub = t;
10672 2355242 : tree subtype;
10673 :
10674 2355242 : STRIP_NOPS (sub);
10675 2355242 : subtype = TREE_TYPE (sub);
10676 2355242 : if (!POINTER_TYPE_P (subtype)
10677 2355242 : || TYPE_REF_CAN_ALIAS_ALL (ptype))
10678 : return NULL_TREE;
10679 :
10680 2353537 : if (TREE_CODE (sub) == ADDR_EXPR)
10681 : {
10682 80842 : tree op = TREE_OPERAND (sub, 0);
10683 80842 : tree optype = TREE_TYPE (op);
10684 : /* *&p => p */
10685 80842 : if (useless_type_conversion_p (type, optype))
10686 : return op;
10687 :
10688 : /* *(foo *)&fooarray => fooarray[0] */
10689 990 : if (TREE_CODE (optype) == ARRAY_TYPE
10690 307 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
10691 1297 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10692 : {
10693 54 : tree type_domain = TYPE_DOMAIN (optype);
10694 54 : tree min_val = size_zero_node;
10695 54 : if (type_domain && TYPE_MIN_VALUE (type_domain))
10696 54 : min_val = TYPE_MIN_VALUE (type_domain);
10697 54 : if (TREE_CODE (min_val) == INTEGER_CST)
10698 54 : return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
10699 : }
10700 : /* *(foo *)&complexfoo => __real__ complexfoo */
10701 936 : else if (TREE_CODE (optype) == COMPLEX_TYPE
10702 936 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10703 4 : return fold_build1 (REALPART_EXPR, type, op);
10704 : /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
10705 932 : else if (TREE_CODE (optype) == VECTOR_TYPE
10706 932 : && useless_type_conversion_p (type, TREE_TYPE (optype)))
10707 : {
10708 26 : tree part_width = TYPE_SIZE (type);
10709 26 : tree index = bitsize_int (0);
10710 26 : return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
10711 : }
10712 : }
10713 :
10714 : /* *(p + CST) -> ... */
10715 2273601 : if (TREE_CODE (sub) == POINTER_PLUS_EXPR
10716 2273601 : && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
10717 : {
10718 33828 : tree addr = TREE_OPERAND (sub, 0);
10719 33828 : tree off = TREE_OPERAND (sub, 1);
10720 33828 : tree addrtype;
10721 :
10722 33828 : STRIP_NOPS (addr);
10723 33828 : addrtype = TREE_TYPE (addr);
10724 :
10725 : /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
10726 33828 : if (TREE_CODE (addr) == ADDR_EXPR
10727 92 : && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
10728 39 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
10729 33856 : && tree_fits_uhwi_p (off))
10730 : {
10731 28 : unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
10732 28 : tree part_width = TYPE_SIZE (type);
10733 28 : unsigned HOST_WIDE_INT part_widthi
10734 28 : = tree_to_shwi (part_width) / BITS_PER_UNIT;
10735 28 : unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
10736 28 : tree index = bitsize_int (indexi);
10737 28 : if (known_lt (offset / part_widthi,
10738 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
10739 28 : return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
10740 : part_width, index);
10741 : }
10742 :
10743 : /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
10744 33800 : if (TREE_CODE (addr) == ADDR_EXPR
10745 64 : && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
10746 33801 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
10747 : {
10748 1 : tree size = TYPE_SIZE_UNIT (type);
10749 1 : if (tree_int_cst_equal (size, off))
10750 1 : return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
10751 : }
10752 :
10753 : /* *(p + CST) -> MEM_REF <p, CST>. */
10754 33799 : if (TREE_CODE (addr) != ADDR_EXPR
10755 33799 : || DECL_P (TREE_OPERAND (addr, 0)))
10756 33781 : return fold_build2 (MEM_REF, type,
10757 : addr,
10758 : wide_int_to_tree (ptype, wi::to_wide (off)));
10759 : }
10760 :
10761 : /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10762 2239791 : if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10763 2517 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
10764 2242278 : && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10765 : {
10766 1 : tree type_domain;
10767 1 : tree min_val = size_zero_node;
10768 1 : tree osub = sub;
10769 1 : sub = gimple_fold_indirect_ref (sub);
10770 1 : if (! sub)
10771 1 : sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
10772 1 : type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
10773 1 : if (type_domain && TYPE_MIN_VALUE (type_domain))
10774 1 : min_val = TYPE_MIN_VALUE (type_domain);
10775 1 : if (TREE_CODE (min_val) == INTEGER_CST)
10776 1 : return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
10777 : }
10778 :
10779 : return NULL_TREE;
10780 : }
10781 :
10782 : /* Return true if CODE is an operation that when operating on signed
10783 : integer types involves undefined behavior on overflow and the
10784 : operation can be expressed with unsigned arithmetic. */
10785 :
10786 : bool
10787 500978 : arith_code_with_undefined_signed_overflow (tree_code code)
10788 : {
10789 500978 : switch (code)
10790 : {
10791 : case ABS_EXPR:
10792 : case PLUS_EXPR:
10793 : case MINUS_EXPR:
10794 : case MULT_EXPR:
10795 : case NEGATE_EXPR:
10796 : case POINTER_PLUS_EXPR:
10797 : return true;
10798 188514 : default:
10799 188514 : return false;
10800 : }
10801 : }
10802 :
10803 : /* Return true if STMT has an operation that operates on a signed
10804 : integer types involves undefined behavior on overflow and the
10805 : operation can be expressed with unsigned arithmetic.
10806 : Also returns true if STMT is a VCE that needs to be rewritten
10807 : if moved to be executed unconditionally. */
10808 :
10809 : bool
10810 1201458 : gimple_needing_rewrite_undefined (gimple *stmt)
10811 : {
10812 1201458 : if (!is_gimple_assign (stmt))
10813 : return false;
10814 1046698 : tree lhs = gimple_assign_lhs (stmt);
10815 1046698 : if (!lhs)
10816 : return false;
10817 1046698 : tree lhs_type = TREE_TYPE (lhs);
10818 1046698 : if (!INTEGRAL_TYPE_P (lhs_type)
10819 121004 : && !POINTER_TYPE_P (lhs_type))
10820 : return false;
10821 1009653 : tree rhs = gimple_assign_rhs1 (stmt);
10822 : /* Boolean loads need special handling as they are treated as a full MODE load
10823 : and don't mask off the bits for the precision. */
10824 1009653 : if (gimple_assign_load_p (stmt)
10825 : /* Booleans are the integral type which has this non-masking issue. */
10826 94909 : && TREE_CODE (lhs_type) == BOOLEAN_TYPE
10827 : /* Only non mode precision booleans are need the masking. */
10828 405 : && !type_has_mode_precision_p (lhs_type)
10829 : /* BFR should be the correct thing and just grab the precision. */
10830 405 : && TREE_CODE (rhs) != BIT_FIELD_REF
10831 : /* Bit-fields loads don't need a rewrite as the masking
10832 : happens for them. */
10833 1010058 : && (TREE_CODE (rhs) != COMPONENT_REF
10834 139 : || !DECL_BIT_FIELD (TREE_OPERAND (rhs, 1))))
10835 : return true;
10836 : /* VCE from integral types to a integral types but with
10837 : a smaller precision need to be changed into casts
10838 : to be well defined. */
10839 1009248 : if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
10840 196 : && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
10841 169 : && is_gimple_val (TREE_OPERAND (rhs, 0))
10842 1009417 : && TYPE_PRECISION (lhs_type)
10843 169 : < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (rhs, 0))))
10844 : return true;
10845 1009083 : if (!TYPE_OVERFLOW_UNDEFINED (lhs_type))
10846 : return false;
10847 381550 : if (!arith_code_with_undefined_signed_overflow
10848 381550 : (gimple_assign_rhs_code (stmt)))
10849 : return false;
10850 : return true;
10851 : }
10852 :
10853 : /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
10854 : operation that can be transformed to unsigned arithmetic by converting
10855 : its operand, carrying out the operation in the corresponding unsigned
10856 : type and converting the result back to the original type.
10857 :
10858 : If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
10859 : return NULL.
10860 : Otherwise returns a sequence of statements that replace STMT and also
10861 : contain a modified form of STMT itself. */
10862 :
10863 : static gimple_seq
10864 64553 : rewrite_to_defined_unconditional (gimple_stmt_iterator *gsi, gimple *stmt,
10865 : bool in_place)
10866 : {
10867 64553 : gcc_assert (gimple_needing_rewrite_undefined (stmt));
10868 64553 : if (dump_file && (dump_flags & TDF_DETAILS))
10869 : {
10870 21 : fprintf (dump_file, "rewriting stmt for being uncondtional defined");
10871 21 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
10872 : }
10873 64553 : gimple_seq stmts = NULL;
10874 64553 : tree lhs = gimple_assign_lhs (stmt);
10875 :
10876 : /* Boolean loads need to be rewritten to be a load from the same mode
10877 : and then a cast to the other type so the other bits are masked off
10878 : correctly since the load was done conditionally. It is similar to the VCE
10879 : case below. */
10880 64553 : if (gimple_assign_load_p (stmt)
10881 64553 : && TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE)
10882 : {
10883 113 : tree rhs = gimple_assign_rhs1 (stmt);
10884 :
10885 : /* Double check that gimple_needing_rewrite_undefined was called. */
10886 : /* Bit-fields loads will do the masking so don't need the rewriting. */
10887 113 : gcc_assert (TREE_CODE (rhs) != COMPONENT_REF
10888 : || !DECL_BIT_FIELD (TREE_OPERAND (rhs, 1)));
10889 : /* BFR is like a bit field load and will do the correct thing. */
10890 113 : gcc_assert (TREE_CODE (lhs) != BIT_FIELD_REF);
10891 : /* Complex boolean types are not valid so REAL/IMAG part will
10892 : never show up. */
10893 113 : gcc_assert (TREE_CODE (rhs) != REALPART_EXPR
10894 : && TREE_CODE (lhs) != IMAGPART_EXPR);
10895 :
10896 113 : auto bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs)));
10897 113 : tree new_type = build_nonstandard_integer_type (bits, true);
10898 113 : location_t loc = gimple_location (stmt);
10899 113 : tree mem_ref = fold_build1_loc (loc, VIEW_CONVERT_EXPR, new_type, rhs);
10900 : /* Replace the original load with a new load and a new lhs. */
10901 113 : tree new_lhs = make_ssa_name (new_type);
10902 113 : gimple_assign_set_rhs1 (stmt, mem_ref);
10903 113 : gimple_assign_set_lhs (stmt, new_lhs);
10904 :
10905 113 : if (in_place)
10906 49 : update_stmt (stmt);
10907 : else
10908 : {
10909 64 : gimple_set_modified (stmt, true);
10910 64 : gimple_seq_add_stmt (&stmts, stmt);
10911 : }
10912 :
10913 : /* Build the conversion statement. */
10914 113 : gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
10915 113 : if (in_place)
10916 : {
10917 49 : gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
10918 49 : update_stmt (stmt);
10919 : }
10920 : else
10921 64 : gimple_seq_add_stmt (&stmts, cvt);
10922 113 : return stmts;
10923 : }
10924 :
10925 : /* VCE from integral types to another integral types but with
10926 : smaller precisions need to be changed into casts
10927 : to be well defined. */
10928 64440 : if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
10929 : {
10930 60 : tree rhs = gimple_assign_rhs1 (stmt);
10931 60 : tree new_rhs = TREE_OPERAND (rhs, 0);
10932 60 : gcc_assert (TYPE_PRECISION (TREE_TYPE (rhs))
10933 : < TYPE_PRECISION (TREE_TYPE (new_rhs)));
10934 60 : gcc_assert (is_gimple_val (new_rhs));
10935 60 : gimple_assign_set_rhs_code (stmt, NOP_EXPR);
10936 60 : gimple_assign_set_rhs1 (stmt, new_rhs);
10937 60 : if (in_place)
10938 51 : update_stmt (stmt);
10939 : else
10940 : {
10941 9 : gimple_set_modified (stmt, true);
10942 9 : gimple_seq_add_stmt (&stmts, stmt);
10943 : }
10944 60 : return stmts;
10945 : }
10946 64380 : tree type = unsigned_type_for (TREE_TYPE (lhs));
10947 64380 : if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
10948 24 : gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
10949 : else
10950 192547 : for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
10951 : {
10952 128191 : tree op = gimple_op (stmt, i);
10953 128191 : op = gimple_convert (&stmts, type, op);
10954 128191 : gimple_set_op (stmt, i, op);
10955 : }
10956 64380 : gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
10957 64380 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
10958 10564 : gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
10959 64380 : gimple_set_modified (stmt, true);
10960 64380 : if (in_place)
10961 : {
10962 44031 : if (stmts)
10963 43639 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
10964 44031 : stmts = NULL;
10965 : }
10966 : else
10967 20349 : gimple_seq_add_stmt (&stmts, stmt);
10968 64380 : gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
10969 64380 : if (in_place)
10970 : {
10971 44031 : gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
10972 44031 : update_stmt (stmt);
10973 : }
10974 : else
10975 20349 : gimple_seq_add_stmt (&stmts, cvt);
10976 :
10977 64380 : return stmts;
10978 : }
10979 :
10980 : void
10981 44131 : rewrite_to_defined_unconditional (gimple_stmt_iterator *gsi)
10982 : {
10983 44131 : rewrite_to_defined_unconditional (gsi, gsi_stmt (*gsi), true);
10984 44131 : }
10985 :
10986 : gimple_seq
10987 20422 : rewrite_to_defined_unconditional (gimple *stmt)
10988 : {
10989 20422 : return rewrite_to_defined_unconditional (nullptr, stmt, false);
10990 : }
10991 :
10992 : /* The valueization hook we use for the gimple_build API simplification.
10993 : This makes us match fold_buildN behavior by only combining with
10994 : statements in the sequence(s) we are currently building. */
10995 :
10996 : static tree
10997 19935867 : gimple_build_valueize (tree op)
10998 : {
10999 19935867 : if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
11000 4210488 : return op;
11001 : return NULL_TREE;
11002 : }
11003 :
11004 : /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
11005 :
11006 : static inline void
11007 1306440 : gimple_build_insert_seq (gimple_stmt_iterator *gsi,
11008 : bool before, gsi_iterator_update update,
11009 : gimple_seq seq)
11010 : {
11011 1306440 : if (before)
11012 : {
11013 93220 : if (gsi->bb)
11014 93220 : gsi_insert_seq_before (gsi, seq, update);
11015 : else
11016 0 : gsi_insert_seq_before_without_update (gsi, seq, update);
11017 : }
11018 : else
11019 : {
11020 1213220 : if (gsi->bb)
11021 131 : gsi_insert_seq_after (gsi, seq, update);
11022 : else
11023 1213089 : gsi_insert_seq_after_without_update (gsi, seq, update);
11024 : }
11025 1306440 : }
11026 :
11027 : /* Build the expression CODE OP0 of type TYPE with location LOC,
11028 : simplifying it first if possible. Returns the built
11029 : expression value and inserts statements possibly defining it
11030 : before GSI if BEFORE is true or after GSI if false and advance
11031 : the iterator accordingly.
11032 : If gsi refers to a basic block simplifying is allowed to look
11033 : at all SSA defs while when it does not it is restricted to
11034 : SSA defs that are not associated with a basic block yet,
11035 : indicating they belong to the currently building sequence. */
11036 :
11037 : tree
11038 334639 : gimple_build (gimple_stmt_iterator *gsi,
11039 : bool before, gsi_iterator_update update,
11040 : location_t loc, enum tree_code code, tree type, tree op0)
11041 : {
11042 334639 : gimple_seq seq = NULL;
11043 334639 : tree res
11044 334639 : = gimple_simplify (code, type, op0, &seq,
11045 334639 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11046 334639 : if (!res)
11047 : {
11048 294322 : res = make_ssa_name (type);
11049 294322 : gimple *stmt;
11050 294322 : if (code == REALPART_EXPR
11051 : || code == IMAGPART_EXPR
11052 294322 : || code == VIEW_CONVERT_EXPR)
11053 15103 : stmt = gimple_build_assign (res, code, build1 (code, type, op0));
11054 : else
11055 279219 : stmt = gimple_build_assign (res, code, op0);
11056 294322 : gimple_set_location (stmt, loc);
11057 294322 : gimple_seq_add_stmt_without_update (&seq, stmt);
11058 : }
11059 334639 : gimple_build_insert_seq (gsi, before, update, seq);
11060 334639 : return res;
11061 : }
11062 :
11063 : /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
11064 : simplifying it first if possible. Returns the built
11065 : expression value inserting any new statements at GSI honoring BEFORE
11066 : and UPDATE. */
11067 :
11068 : tree
11069 763332 : gimple_build (gimple_stmt_iterator *gsi,
11070 : bool before, gsi_iterator_update update,
11071 : location_t loc, enum tree_code code, tree type,
11072 : tree op0, tree op1)
11073 : {
11074 763332 : gimple_seq seq = NULL;
11075 763332 : tree res
11076 763332 : = gimple_simplify (code, type, op0, op1, &seq,
11077 763332 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11078 763332 : if (!res)
11079 : {
11080 673524 : res = make_ssa_name (type);
11081 673524 : gimple *stmt = gimple_build_assign (res, code, op0, op1);
11082 673524 : gimple_set_location (stmt, loc);
11083 673524 : gimple_seq_add_stmt_without_update (&seq, stmt);
11084 : }
11085 763332 : gimple_build_insert_seq (gsi, before, update, seq);
11086 763332 : return res;
11087 : }
11088 :
11089 : /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
11090 : simplifying it first if possible. Returns the built
11091 : expression value inserting any new statements at GSI honoring BEFORE
11092 : and UPDATE. */
11093 :
11094 : tree
11095 44405 : gimple_build (gimple_stmt_iterator *gsi,
11096 : bool before, gsi_iterator_update update,
11097 : location_t loc, enum tree_code code, tree type,
11098 : tree op0, tree op1, tree op2)
11099 : {
11100 :
11101 44405 : gimple_seq seq = NULL;
11102 44405 : tree res
11103 44405 : = gimple_simplify (code, type, op0, op1, op2, &seq,
11104 44405 : gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
11105 44405 : if (!res)
11106 : {
11107 32262 : res = make_ssa_name (type);
11108 32262 : gimple *stmt;
11109 32262 : if (code == BIT_FIELD_REF)
11110 24856 : stmt = gimple_build_assign (res, code,
11111 : build3 (code, type, op0, op1, op2));
11112 : else
11113 7406 : stmt = gimple_build_assign (res, code, op0, op1, op2);
11114 32262 : gimple_set_location (stmt, loc);
11115 32262 : gimple_seq_add_stmt_without_update (&seq, stmt);
11116 : }
11117 44405 : gimple_build_insert_seq (gsi, before, update, seq);
11118 44405 : return res;
11119 : }
11120 :
11121 : /* Build the call FN () with a result of type TYPE (or no result if TYPE is
11122 : void) with a location LOC. Returns the built expression value (or NULL_TREE
11123 : if TYPE is void) inserting any new statements at GSI honoring BEFORE
11124 : and UPDATE. */
11125 :
11126 : tree
11127 0 : gimple_build (gimple_stmt_iterator *gsi,
11128 : bool before, gsi_iterator_update update,
11129 : location_t loc, combined_fn fn, tree type)
11130 : {
11131 0 : tree res = NULL_TREE;
11132 0 : gimple_seq seq = NULL;
11133 0 : gcall *stmt;
11134 0 : if (internal_fn_p (fn))
11135 0 : stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
11136 : else
11137 : {
11138 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11139 0 : stmt = gimple_build_call (decl, 0);
11140 : }
11141 0 : if (!VOID_TYPE_P (type))
11142 : {
11143 0 : res = make_ssa_name (type);
11144 0 : gimple_call_set_lhs (stmt, res);
11145 : }
11146 0 : gimple_set_location (stmt, loc);
11147 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11148 0 : gimple_build_insert_seq (gsi, before, update, seq);
11149 0 : return res;
11150 : }
11151 :
11152 : /* Build the call FN (ARG0) with a result of type TYPE
11153 : (or no result if TYPE is void) with location LOC,
11154 : simplifying it first if possible. Returns the built
11155 : expression value (or NULL_TREE if TYPE is void) inserting any new
11156 : statements at GSI honoring BEFORE and UPDATE. */
11157 :
11158 : tree
11159 24436 : gimple_build (gimple_stmt_iterator *gsi,
11160 : bool before, gsi_iterator_update update,
11161 : location_t loc, combined_fn fn,
11162 : tree type, tree arg0)
11163 : {
11164 24436 : gimple_seq seq = NULL;
11165 24436 : tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
11166 24436 : if (!res)
11167 : {
11168 24436 : gcall *stmt;
11169 24436 : if (internal_fn_p (fn))
11170 24061 : stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
11171 : else
11172 : {
11173 375 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11174 375 : stmt = gimple_build_call (decl, 1, arg0);
11175 : }
11176 24436 : if (!VOID_TYPE_P (type))
11177 : {
11178 24061 : res = make_ssa_name (type);
11179 24061 : gimple_call_set_lhs (stmt, res);
11180 : }
11181 24436 : gimple_set_location (stmt, loc);
11182 24436 : gimple_seq_add_stmt_without_update (&seq, stmt);
11183 : }
11184 24436 : gimple_build_insert_seq (gsi, before, update, seq);
11185 24436 : return res;
11186 : }
11187 :
11188 : /* Build the call FN (ARG0, ARG1) with a result of type TYPE
11189 : (or no result if TYPE is void) with location LOC,
11190 : simplifying it first if possible. Returns the built
11191 : expression value (or NULL_TREE if TYPE is void) inserting any new
11192 : statements at GSI honoring BEFORE and UPDATE. */
11193 :
11194 : tree
11195 0 : gimple_build (gimple_stmt_iterator *gsi,
11196 : bool before, gsi_iterator_update update,
11197 : location_t loc, combined_fn fn,
11198 : tree type, tree arg0, tree arg1)
11199 : {
11200 0 : gimple_seq seq = NULL;
11201 0 : tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
11202 : gimple_build_valueize);
11203 0 : if (!res)
11204 : {
11205 0 : gcall *stmt;
11206 0 : if (internal_fn_p (fn))
11207 0 : stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
11208 : else
11209 : {
11210 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11211 0 : stmt = gimple_build_call (decl, 2, arg0, arg1);
11212 : }
11213 0 : if (!VOID_TYPE_P (type))
11214 : {
11215 0 : res = make_ssa_name (type);
11216 0 : gimple_call_set_lhs (stmt, res);
11217 : }
11218 0 : gimple_set_location (stmt, loc);
11219 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11220 : }
11221 0 : gimple_build_insert_seq (gsi, before, update, seq);
11222 0 : return res;
11223 : }
11224 :
11225 : /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
11226 : (or no result if TYPE is void) with location LOC,
11227 : simplifying it first if possible. Returns the built
11228 : expression value (or NULL_TREE if TYPE is void) inserting any new
11229 : statements at GSI honoring BEFORE and UPDATE. */
11230 :
11231 : tree
11232 0 : gimple_build (gimple_stmt_iterator *gsi,
11233 : bool before, gsi_iterator_update update,
11234 : location_t loc, combined_fn fn,
11235 : tree type, tree arg0, tree arg1, tree arg2)
11236 : {
11237 0 : gimple_seq seq = NULL;
11238 0 : tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
11239 : &seq, gimple_build_valueize);
11240 0 : if (!res)
11241 : {
11242 0 : gcall *stmt;
11243 0 : if (internal_fn_p (fn))
11244 0 : stmt = gimple_build_call_internal (as_internal_fn (fn),
11245 : 3, arg0, arg1, arg2);
11246 : else
11247 : {
11248 0 : tree decl = builtin_decl_implicit (as_builtin_fn (fn));
11249 0 : stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
11250 : }
11251 0 : if (!VOID_TYPE_P (type))
11252 : {
11253 0 : res = make_ssa_name (type);
11254 0 : gimple_call_set_lhs (stmt, res);
11255 : }
11256 0 : gimple_set_location (stmt, loc);
11257 0 : gimple_seq_add_stmt_without_update (&seq, stmt);
11258 : }
11259 0 : gimple_build_insert_seq (gsi, before, update, seq);
11260 0 : return res;
11261 : }
11262 :
11263 : /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
11264 : void) with location LOC, simplifying it first if possible. Returns the
11265 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11266 : statements at GSI honoring BEFORE and UPDATE. */
11267 :
11268 : tree
11269 21 : gimple_build (gimple_stmt_iterator *gsi,
11270 : bool before, gsi_iterator_update update,
11271 : location_t loc, code_helper code, tree type, tree op0)
11272 : {
11273 21 : if (code.is_tree_code ())
11274 0 : return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
11275 21 : return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
11276 : }
11277 :
11278 : /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
11279 : void) with location LOC, simplifying it first if possible. Returns the
11280 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11281 : statements at GSI honoring BEFORE and UPDATE. */
11282 :
11283 : tree
11284 23956 : gimple_build (gimple_stmt_iterator *gsi,
11285 : bool before, gsi_iterator_update update,
11286 : location_t loc, code_helper code, tree type, tree op0, tree op1)
11287 : {
11288 23956 : if (code.is_tree_code ())
11289 23956 : return gimple_build (gsi, before, update,
11290 23956 : loc, tree_code (code), type, op0, op1);
11291 0 : return gimple_build (gsi, before, update,
11292 0 : loc, combined_fn (code), type, op0, op1);
11293 : }
11294 :
11295 : /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
11296 : is void) with location LOC, simplifying it first if possible. Returns the
11297 : built expression value (or NULL_TREE if TYPE is void) inserting any new
11298 : statements at GSI honoring BEFORE and UPDATE. */
11299 :
11300 : tree
11301 0 : gimple_build (gimple_stmt_iterator *gsi,
11302 : bool before, gsi_iterator_update update,
11303 : location_t loc, code_helper code,
11304 : tree type, tree op0, tree op1, tree op2)
11305 : {
11306 0 : if (code.is_tree_code ())
11307 0 : return gimple_build (gsi, before, update,
11308 0 : loc, tree_code (code), type, op0, op1, op2);
11309 0 : return gimple_build (gsi, before, update,
11310 0 : loc, combined_fn (code), type, op0, op1, op2);
11311 : }
11312 :
11313 : /* Build the conversion (TYPE) OP with a result of type TYPE
11314 : with location LOC if such conversion is neccesary in GIMPLE,
11315 : simplifying it first.
11316 : Returns the built expression inserting any new statements
11317 : at GSI honoring BEFORE and UPDATE. */
11318 :
11319 : tree
11320 1978960 : gimple_convert (gimple_stmt_iterator *gsi,
11321 : bool before, gsi_iterator_update update,
11322 : location_t loc, tree type, tree op)
11323 : {
11324 1978960 : if (useless_type_conversion_p (type, TREE_TYPE (op)))
11325 : return op;
11326 181238 : return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
11327 : }
11328 :
11329 : /* Build the conversion (ptrofftype) OP with a result of a type
11330 : compatible with ptrofftype with location LOC if such conversion
11331 : is neccesary in GIMPLE, simplifying it first.
11332 : Returns the built expression value inserting any new statements
11333 : at GSI honoring BEFORE and UPDATE. */
11334 :
11335 : tree
11336 208 : gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
11337 : bool before, gsi_iterator_update update,
11338 : location_t loc, tree op)
11339 : {
11340 208 : if (ptrofftype_p (TREE_TYPE (op)))
11341 : return op;
11342 0 : return gimple_convert (gsi, before, update, loc, sizetype, op);
11343 : }
11344 :
11345 : /* Build a vector of type TYPE in which each element has the value OP.
11346 : Return a gimple value for the result, inserting any new statements
11347 : at GSI honoring BEFORE and UPDATE. */
11348 :
11349 : tree
11350 323617 : gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
11351 : bool before, gsi_iterator_update update,
11352 : location_t loc, tree type, tree op)
11353 : {
11354 323617 : if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
11355 : && !CONSTANT_CLASS_P (op))
11356 : return gimple_build (gsi, before, update,
11357 : loc, VEC_DUPLICATE_EXPR, type, op);
11358 :
11359 323617 : tree res, vec = build_vector_from_val (type, op);
11360 323617 : if (is_gimple_val (vec))
11361 : return vec;
11362 27382 : if (gimple_in_ssa_p (cfun))
11363 27382 : res = make_ssa_name (type);
11364 : else
11365 0 : res = create_tmp_reg (type);
11366 27382 : gimple_seq seq = NULL;
11367 27382 : gimple *stmt = gimple_build_assign (res, vec);
11368 27382 : gimple_set_location (stmt, loc);
11369 27382 : gimple_seq_add_stmt_without_update (&seq, stmt);
11370 27382 : gimple_build_insert_seq (gsi, before, update, seq);
11371 27382 : return res;
11372 : }
11373 :
11374 : /* Build a vector from BUILDER, handling the case in which some elements
11375 : are non-constant. Return a gimple value for the result, inserting
11376 : any new instructions to GSI honoring BEFORE and UPDATE.
11377 :
11378 : BUILDER must not have a stepped encoding on entry. This is because
11379 : the function is not geared up to handle the arithmetic that would
11380 : be needed in the variable case, and any code building a vector that
11381 : is known to be constant should use BUILDER->build () directly. */
11382 :
11383 : tree
11384 365752 : gimple_build_vector (gimple_stmt_iterator *gsi,
11385 : bool before, gsi_iterator_update update,
11386 : location_t loc, tree_vector_builder *builder)
11387 : {
11388 365752 : gcc_assert (builder->nelts_per_pattern () <= 2);
11389 365752 : unsigned int encoded_nelts = builder->encoded_nelts ();
11390 1294928 : for (unsigned int i = 0; i < encoded_nelts; ++i)
11391 1041422 : if (!CONSTANT_CLASS_P ((*builder)[i]))
11392 : {
11393 112246 : gimple_seq seq = NULL;
11394 112246 : tree type = builder->type ();
11395 112246 : unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
11396 112246 : vec<constructor_elt, va_gc> *v;
11397 112246 : vec_alloc (v, nelts);
11398 358522 : for (i = 0; i < nelts; ++i)
11399 246276 : CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
11400 :
11401 112246 : tree res;
11402 112246 : if (gimple_in_ssa_p (cfun))
11403 112246 : res = make_ssa_name (type);
11404 : else
11405 0 : res = create_tmp_reg (type);
11406 112246 : gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
11407 112246 : gimple_set_location (stmt, loc);
11408 112246 : gimple_seq_add_stmt_without_update (&seq, stmt);
11409 112246 : gimple_build_insert_seq (gsi, before, update, seq);
11410 112246 : return res;
11411 : }
11412 253506 : return builder->build ();
11413 : }
11414 :
11415 : /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
11416 : and generate a value guaranteed to be rounded upwards to ALIGN.
11417 :
11418 : Return the tree node representing this size, it is of TREE_TYPE TYPE. */
11419 :
11420 : tree
11421 0 : gimple_build_round_up (gimple_stmt_iterator *gsi,
11422 : bool before, gsi_iterator_update update,
11423 : location_t loc, tree type,
11424 : tree old_size, unsigned HOST_WIDE_INT align)
11425 : {
11426 0 : unsigned HOST_WIDE_INT tg_mask = align - 1;
11427 : /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
11428 0 : gcc_assert (INTEGRAL_TYPE_P (type));
11429 0 : tree tree_mask = build_int_cst (type, tg_mask);
11430 0 : tree oversize = gimple_build (gsi, before, update,
11431 : loc, PLUS_EXPR, type, old_size, tree_mask);
11432 :
11433 0 : tree mask = build_int_cst (type, -align);
11434 0 : return gimple_build (gsi, before, update,
11435 0 : loc, BIT_AND_EXPR, type, oversize, mask);
11436 : }
11437 :
11438 : /* Return true if the result of assignment STMT is known to be non-negative.
11439 : If the return value is based on the assumption that signed overflow is
11440 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11441 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11442 :
11443 : static bool
11444 57555726 : gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11445 : int depth)
11446 : {
11447 57555726 : enum tree_code code = gimple_assign_rhs_code (stmt);
11448 57555726 : tree type = TREE_TYPE (gimple_assign_lhs (stmt));
11449 57555726 : switch (get_gimple_rhs_class (code))
11450 : {
11451 12370707 : case GIMPLE_UNARY_RHS:
11452 12370707 : return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
11453 : type,
11454 : gimple_assign_rhs1 (stmt),
11455 12370707 : strict_overflow_p, depth);
11456 39376568 : case GIMPLE_BINARY_RHS:
11457 39376568 : return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
11458 : type,
11459 : gimple_assign_rhs1 (stmt),
11460 : gimple_assign_rhs2 (stmt),
11461 39376568 : strict_overflow_p, depth);
11462 : case GIMPLE_TERNARY_RHS:
11463 : return false;
11464 5747138 : case GIMPLE_SINGLE_RHS:
11465 5747138 : return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
11466 5747138 : strict_overflow_p, depth);
11467 : case GIMPLE_INVALID_RHS:
11468 : break;
11469 : }
11470 0 : gcc_unreachable ();
11471 : }
11472 :
11473 : /* Return true if return value of call STMT is known to be non-negative.
11474 : If the return value is based on the assumption that signed overflow is
11475 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11476 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11477 :
11478 : static bool
11479 21308593 : gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11480 : int depth)
11481 : {
11482 21308593 : tree arg0
11483 21308593 : = gimple_call_num_args (stmt) > 0 ? gimple_call_arg (stmt, 0) : NULL_TREE;
11484 21308593 : tree arg1
11485 21308593 : = gimple_call_num_args (stmt) > 1 ? gimple_call_arg (stmt, 1) : NULL_TREE;
11486 21308593 : tree lhs = gimple_call_lhs (stmt);
11487 21308593 : return (lhs
11488 21308593 : && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
11489 : gimple_call_combined_fn (stmt),
11490 : arg0, arg1,
11491 21308593 : strict_overflow_p, depth));
11492 : }
11493 :
11494 : /* Return true if return value of call STMT is known to be non-negative.
11495 : If the return value is based on the assumption that signed overflow is
11496 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11497 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11498 :
11499 : static bool
11500 12530947 : gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11501 : int depth)
11502 : {
11503 25029304 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
11504 : {
11505 20365798 : tree arg = gimple_phi_arg_def (stmt, i);
11506 20365798 : if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
11507 : return false;
11508 : }
11509 : return true;
11510 : }
11511 :
11512 : /* Return true if STMT is known to compute a non-negative value.
11513 : If the return value is based on the assumption that signed overflow is
11514 : undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
11515 : *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
11516 :
11517 : bool
11518 144576583 : gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
11519 : int depth)
11520 : {
11521 144576583 : tree type = gimple_range_type (stmt);
11522 144576583 : if (type && frange::supports_p (type))
11523 : {
11524 1635628 : frange r;
11525 1635628 : bool sign;
11526 1635628 : if (get_global_range_query ()->range_of_stmt (r, stmt)
11527 1635628 : && r.signbit_p (sign))
11528 31708 : return !sign;
11529 1635628 : }
11530 144544875 : switch (gimple_code (stmt))
11531 : {
11532 57555726 : case GIMPLE_ASSIGN:
11533 57555726 : return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
11534 57555726 : depth);
11535 21308593 : case GIMPLE_CALL:
11536 21308593 : return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
11537 21308593 : depth);
11538 12530947 : case GIMPLE_PHI:
11539 12530947 : return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
11540 12530947 : depth);
11541 : default:
11542 : return false;
11543 : }
11544 : }
11545 :
11546 : /* Return true if the floating-point value computed by assignment STMT
11547 : is known to have an integer value. We also allow +Inf, -Inf and NaN
11548 : to be considered integer values. Return false for signaling NaN.
11549 :
11550 : DEPTH is the current nesting depth of the query. */
11551 :
11552 : static bool
11553 58736 : gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
11554 : {
11555 58736 : enum tree_code code = gimple_assign_rhs_code (stmt);
11556 58736 : switch (get_gimple_rhs_class (code))
11557 : {
11558 15024 : case GIMPLE_UNARY_RHS:
11559 15024 : return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
11560 15024 : gimple_assign_rhs1 (stmt), depth);
11561 13260 : case GIMPLE_BINARY_RHS:
11562 13260 : return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
11563 : gimple_assign_rhs1 (stmt),
11564 13260 : gimple_assign_rhs2 (stmt), depth);
11565 : case GIMPLE_TERNARY_RHS:
11566 : return false;
11567 29297 : case GIMPLE_SINGLE_RHS:
11568 29297 : return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
11569 : case GIMPLE_INVALID_RHS:
11570 : break;
11571 : }
11572 0 : gcc_unreachable ();
11573 : }
11574 :
11575 : /* Return true if the floating-point value computed by call STMT is known
11576 : to have an integer value. We also allow +Inf, -Inf and NaN to be
11577 : considered integer values. Return false for signaling NaN.
11578 :
11579 : DEPTH is the current nesting depth of the query. */
11580 :
11581 : static bool
11582 1089 : gimple_call_integer_valued_real_p (gimple *stmt, int depth)
11583 : {
11584 1089 : tree arg0 = (gimple_call_num_args (stmt) > 0
11585 1089 : ? gimple_call_arg (stmt, 0)
11586 : : NULL_TREE);
11587 1089 : tree arg1 = (gimple_call_num_args (stmt) > 1
11588 1089 : ? gimple_call_arg (stmt, 1)
11589 : : NULL_TREE);
11590 1089 : return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
11591 1089 : arg0, arg1, depth);
11592 : }
11593 :
11594 : /* Return true if the floating-point result of phi STMT is known to have
11595 : an integer value. We also allow +Inf, -Inf and NaN to be considered
11596 : integer values. Return false for signaling NaN.
11597 :
11598 : DEPTH is the current nesting depth of the query. */
11599 :
11600 : static bool
11601 1489 : gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
11602 : {
11603 1652 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
11604 : {
11605 1643 : tree arg = gimple_phi_arg_def (stmt, i);
11606 1643 : if (!integer_valued_real_single_p (arg, depth + 1))
11607 : return false;
11608 : }
11609 : return true;
11610 : }
11611 :
11612 : /* Return true if the floating-point value computed by STMT is known
11613 : to have an integer value. We also allow +Inf, -Inf and NaN to be
11614 : considered integer values. Return false for signaling NaN.
11615 :
11616 : DEPTH is the current nesting depth of the query. */
11617 :
11618 : bool
11619 88691 : gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
11620 : {
11621 88691 : switch (gimple_code (stmt))
11622 : {
11623 58736 : case GIMPLE_ASSIGN:
11624 58736 : return gimple_assign_integer_valued_real_p (stmt, depth);
11625 1089 : case GIMPLE_CALL:
11626 1089 : return gimple_call_integer_valued_real_p (stmt, depth);
11627 1489 : case GIMPLE_PHI:
11628 1489 : return gimple_phi_integer_valued_real_p (stmt, depth);
11629 : default:
11630 : return false;
11631 : }
11632 : }
|