Line data Source code
1 : /* Forward propagation of expressions for single use variables.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify
7 : it under the terms of the GNU General Public License as published by
8 : the Free Software Foundation; either version 3, or (at your option)
9 : any later version.
10 :
11 : GCC is distributed in the hope that it will be useful,
12 : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : GNU General Public License for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "cfghooks.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "expmed.h"
31 : #include "optabs-query.h"
32 : #include "gimple-pretty-print.h"
33 : #include "fold-const.h"
34 : #include "stor-layout.h"
35 : #include "gimple-iterator.h"
36 : #include "gimple-fold.h"
37 : #include "tree-eh.h"
38 : #include "gimplify.h"
39 : #include "gimplify-me.h"
40 : #include "tree-cfg.h"
41 : #include "expr.h"
42 : #include "tree-dfa.h"
43 : #include "tree-ssa-propagate.h"
44 : #include "tree-ssa-dom.h"
45 : #include "tree-ssa-strlen.h"
46 : #include "builtins.h"
47 : #include "tree-cfgcleanup.h"
48 : #include "cfganal.h"
49 : #include "optabs-tree.h"
50 : #include "insn-config.h"
51 : #include "recog.h"
52 : #include "cfgloop.h"
53 : #include "tree-vectorizer.h"
54 : #include "tree-vector-builder.h"
55 : #include "vec-perm-indices.h"
56 : #include "internal-fn.h"
57 : #include "cgraph.h"
58 : #include "tree-ssa.h"
59 : #include "gimple-range.h"
60 : #include "tree-ssa-dce.h"
61 :
62 : /* This pass propagates the RHS of assignment statements into use
63 : sites of the LHS of the assignment. It's basically a specialized
64 : form of tree combination. It is hoped all of this can disappear
65 : when we have a generalized tree combiner.
66 :
67 : One class of common cases we handle is forward propagating a single use
68 : variable into a COND_EXPR.
69 :
70 : bb0:
71 : x = a COND b;
72 : if (x) goto ... else goto ...
73 :
74 : Will be transformed into:
75 :
76 : bb0:
77 : if (a COND b) goto ... else goto ...
78 :
79 : Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
80 :
81 : Or (assuming c1 and c2 are constants):
82 :
83 : bb0:
84 : x = a + c1;
85 : if (x EQ/NEQ c2) goto ... else goto ...
86 :
87 : Will be transformed into:
88 :
89 : bb0:
90 : if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
91 :
92 : Similarly for x = a - c1.
93 :
94 : Or
95 :
96 : bb0:
97 : x = !a
98 : if (x) goto ... else goto ...
99 :
100 : Will be transformed into:
101 :
102 : bb0:
103 : if (a == 0) goto ... else goto ...
104 :
105 : Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
106 : For these cases, we propagate A into all, possibly more than one,
107 : COND_EXPRs that use X.
108 :
109 : Or
110 :
111 : bb0:
112 : x = (typecast) a
113 : if (x) goto ... else goto ...
114 :
115 : Will be transformed into:
116 :
117 : bb0:
118 : if (a != 0) goto ... else goto ...
119 :
120 : (Assuming a is an integral type and x is a boolean or x is an
121 : integral and a is a boolean.)
122 :
123 : Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
124 : For these cases, we propagate A into all, possibly more than one,
125 : COND_EXPRs that use X.
126 :
127 : In addition to eliminating the variable and the statement which assigns
128 : a value to the variable, we may be able to later thread the jump without
129 : adding insane complexity in the dominator optimizer.
130 :
131 : Also note these transformations can cascade. We handle this by having
132 : a worklist of COND_EXPR statements to examine. As we make a change to
133 : a statement, we put it back on the worklist to examine on the next
134 : iteration of the main loop.
135 :
136 : A second class of propagation opportunities arises for ADDR_EXPR
137 : nodes.
138 :
139 : ptr = &x->y->z;
140 : res = *ptr;
141 :
142 : Will get turned into
143 :
144 : res = x->y->z;
145 :
146 : Or
147 : ptr = (type1*)&type2var;
148 : res = *ptr
149 :
150 : Will get turned into (if type1 and type2 are the same size
151 : and neither have volatile on them):
152 : res = VIEW_CONVERT_EXPR<type1>(type2var)
153 :
154 : Or
155 :
156 : ptr = &x[0];
157 : ptr2 = ptr + <constant>;
158 :
159 : Will get turned into
160 :
161 : ptr2 = &x[constant/elementsize];
162 :
163 : Or
164 :
165 : ptr = &x[0];
166 : offset = index * element_size;
167 : offset_p = (pointer) offset;
168 : ptr2 = ptr + offset_p
169 :
170 : Will get turned into:
171 :
172 : ptr2 = &x[index];
173 :
174 : Or
175 : ssa = (int) decl
176 : res = ssa & 1
177 :
178 : Provided that decl has known alignment >= 2, will get turned into
179 :
180 : res = 0
181 :
182 : We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
183 : allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
184 : {NOT_EXPR,NEG_EXPR}.
185 :
186 : This will (of course) be extended as other needs arise. */
187 :
188 : /* Data structure that contains simplifiable vectorized permute sequences.
189 : See recognise_vec_perm_simplify_seq () for a description of the sequence. */
190 :
191 : struct _vec_perm_simplify_seq
192 : {
193 : /* Defining stmts of vectors in the sequence. */
194 : gassign *v_1_stmt;
195 : gassign *v_2_stmt;
196 : gassign *v_x_stmt;
197 : gassign *v_y_stmt;
198 : /* Final permute statement. */
199 : gassign *stmt;
200 : /* New selector indices for stmt. */
201 : tree new_sel;
202 : /* Elements of each vector and selector. */
203 : unsigned int nelts;
204 : };
205 : typedef struct _vec_perm_simplify_seq *vec_perm_simplify_seq;
206 :
207 : static bool forward_propagate_addr_expr (tree, tree, bool);
208 :
209 : /* Set to true if we delete dead edges during the optimization. */
210 : static bool cfg_changed;
211 :
212 : static tree rhs_to_tree (tree type, gimple *stmt);
213 :
214 : static bitmap to_purge;
215 :
216 : /* Const-and-copy lattice. */
217 : static vec<tree> lattice;
218 :
219 : /* Set the lattice entry for NAME to VAL. */
220 : static void
221 31902913 : fwprop_set_lattice_val (tree name, tree val)
222 : {
223 31902913 : if (TREE_CODE (name) == SSA_NAME)
224 : {
225 31902913 : if (SSA_NAME_VERSION (name) >= lattice.length ())
226 : {
227 32169 : lattice.reserve (num_ssa_names - lattice.length ());
228 21446 : lattice.quick_grow_cleared (num_ssa_names);
229 : }
230 31902913 : lattice[SSA_NAME_VERSION (name)] = val;
231 : /* As this now constitutes a copy duplicate points-to
232 : and range info appropriately. */
233 31902913 : if (TREE_CODE (val) == SSA_NAME)
234 31456721 : maybe_duplicate_ssa_info_at_copy (name, val);
235 : }
236 31902913 : }
237 :
238 : /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
239 : static void
240 904807 : fwprop_invalidate_lattice (tree name)
241 : {
242 904807 : if (name
243 902463 : && TREE_CODE (name) == SSA_NAME
244 1807143 : && SSA_NAME_VERSION (name) < lattice.length ())
245 902303 : lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
246 904807 : }
247 :
248 : /* Get the statement we can propagate from into NAME skipping
249 : trivial copies. Returns the statement which defines the
250 : propagation source or NULL_TREE if there is no such one.
251 : If SINGLE_USE_ONLY is set considers only sources which have
252 : a single use chain up to NAME. If SINGLE_USE_P is non-null,
253 : it is set to whether the chain to NAME is a single use chain
254 : or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
255 :
256 : static gimple *
257 27577832 : get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
258 : {
259 27577832 : bool single_use = true;
260 :
261 27578816 : do {
262 27578324 : gimple *def_stmt = SSA_NAME_DEF_STMT (name);
263 :
264 27578324 : if (!has_single_use (name))
265 : {
266 15002017 : single_use = false;
267 15002017 : if (single_use_only)
268 : return NULL;
269 : }
270 :
271 : /* If name is defined by a PHI node or is the default def, bail out. */
272 27576888 : if (!is_gimple_assign (def_stmt))
273 : return NULL;
274 :
275 : /* If def_stmt is a simple copy, continue looking. */
276 19460635 : if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
277 492 : name = gimple_assign_rhs1 (def_stmt);
278 : else
279 : {
280 19460143 : if (!single_use_only && single_use_p)
281 19158100 : *single_use_p = single_use;
282 :
283 19460143 : return def_stmt;
284 : }
285 492 : } while (1);
286 : }
287 :
288 : /* Checks if the destination ssa name in DEF_STMT can be used as
289 : propagation source. Returns true if so, otherwise false. */
290 :
291 : static bool
292 27326437 : can_propagate_from (gimple *def_stmt)
293 : {
294 27326437 : gcc_assert (is_gimple_assign (def_stmt));
295 :
296 : /* If the rhs has side-effects we cannot propagate from it. */
297 27326437 : if (gimple_has_volatile_ops (def_stmt))
298 : return false;
299 :
300 : /* If the rhs is a load we cannot propagate from it. */
301 26734435 : if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
302 26734435 : || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
303 : return false;
304 :
305 : /* Constants can be always propagated. */
306 13224411 : if (gimple_assign_single_p (def_stmt)
307 13224411 : && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
308 : return true;
309 :
310 : /* We cannot propagate ssa names that occur in abnormal phi nodes. */
311 13224411 : if (stmt_references_abnormal_ssa_name (def_stmt))
312 : return false;
313 :
314 : /* If the definition is a conversion of a pointer to a function type,
315 : then we cannot apply optimizations as some targets require
316 : function pointers to be canonicalized and in this case this
317 : optimization could eliminate a necessary canonicalization. */
318 13223726 : if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
319 : {
320 3206170 : tree rhs = gimple_assign_rhs1 (def_stmt);
321 3206170 : if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
322 : return false;
323 : }
324 :
325 : return true;
326 : }
327 :
328 : /* Remove a chain of dead statements starting at the definition of
329 : NAME. The chain is linked via the first operand of the defining statements.
330 : If NAME was replaced in its only use then this function can be used
331 : to clean up dead stmts. The function handles already released SSA
332 : names gracefully. */
333 :
334 : static void
335 237478 : remove_prop_source_from_use (tree name)
336 : {
337 297175 : gimple_stmt_iterator gsi;
338 297175 : gimple *stmt;
339 :
340 297175 : do {
341 297175 : basic_block bb;
342 :
343 297175 : if (SSA_NAME_IN_FREE_LIST (name)
344 297132 : || SSA_NAME_IS_DEFAULT_DEF (name)
345 592313 : || !has_zero_uses (name))
346 : break;
347 :
348 60166 : stmt = SSA_NAME_DEF_STMT (name);
349 60166 : if (gimple_code (stmt) == GIMPLE_PHI
350 60166 : || gimple_has_side_effects (stmt))
351 : break;
352 :
353 60166 : bb = gimple_bb (stmt);
354 60166 : gsi = gsi_for_stmt (stmt);
355 60166 : unlink_stmt_vdef (stmt);
356 60166 : if (gsi_remove (&gsi, true))
357 6 : bitmap_set_bit (to_purge, bb->index);
358 60166 : fwprop_invalidate_lattice (gimple_get_lhs (stmt));
359 60166 : release_defs (stmt);
360 :
361 60166 : name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
362 60166 : } while (name && TREE_CODE (name) == SSA_NAME);
363 :
364 237478 : }
365 :
366 : /* Return the rhs of a gassign *STMT in a form of a single tree,
367 : converted to type TYPE.
368 :
369 : This should disappear, but is needed so we can combine expressions and use
370 : the fold() interfaces. Long term, we need to develop folding and combine
371 : routines that deal with gimple exclusively . */
372 :
373 : static tree
374 7228662 : rhs_to_tree (tree type, gimple *stmt)
375 : {
376 7228662 : location_t loc = gimple_location (stmt);
377 7228662 : enum tree_code code = gimple_assign_rhs_code (stmt);
378 7228662 : switch (get_gimple_rhs_class (code))
379 : {
380 12499 : case GIMPLE_TERNARY_RHS:
381 12499 : return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
382 : gimple_assign_rhs2 (stmt),
383 12499 : gimple_assign_rhs3 (stmt));
384 4935865 : case GIMPLE_BINARY_RHS:
385 4935865 : return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
386 4935865 : gimple_assign_rhs2 (stmt));
387 2018865 : case GIMPLE_UNARY_RHS:
388 2018865 : return build1 (code, type, gimple_assign_rhs1 (stmt));
389 261433 : case GIMPLE_SINGLE_RHS:
390 261433 : return gimple_assign_rhs1 (stmt);
391 0 : default:
392 0 : gcc_unreachable ();
393 : }
394 : }
395 :
396 : /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
397 : the folded result in a form suitable for COND_EXPR_COND or
398 : NULL_TREE, if there is no suitable simplified form. If
399 : INVARIANT_ONLY is true only gimple_min_invariant results are
400 : considered simplified. */
401 :
402 : static tree
403 8141805 : combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
404 : tree op0, tree op1, bool invariant_only)
405 : {
406 8141805 : tree t;
407 :
408 8141805 : gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
409 :
410 8141805 : fold_defer_overflow_warnings ();
411 8141805 : t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
412 8141805 : if (!t)
413 : {
414 4629448 : fold_undefer_overflow_warnings (false, NULL, 0);
415 4629448 : return NULL_TREE;
416 : }
417 :
418 : /* Require that we got a boolean type out if we put one in. */
419 3512357 : gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
420 :
421 : /* Canonicalize the combined condition for use in a COND_EXPR. */
422 3512357 : t = canonicalize_cond_expr_cond (t);
423 :
424 : /* Bail out if we required an invariant but didn't get one. */
425 3512357 : if (!t || (invariant_only && !is_gimple_min_invariant (t)))
426 : {
427 3277082 : fold_undefer_overflow_warnings (false, NULL, 0);
428 3277082 : return NULL_TREE;
429 : }
430 :
431 235275 : bool nowarn = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
432 235275 : fold_undefer_overflow_warnings (!nowarn, stmt, 0);
433 :
434 235275 : return t;
435 : }
436 :
437 : /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
438 : of its operand. Return a new comparison tree or NULL_TREE if there
439 : were no simplifying combines. */
440 :
441 : static tree
442 21685687 : forward_propagate_into_comparison_1 (gimple *stmt,
443 : enum tree_code code, tree type,
444 : tree op0, tree op1)
445 : {
446 21685687 : tree tmp = NULL_TREE;
447 21685687 : tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
448 21685687 : bool single_use0_p = false, single_use1_p = false;
449 :
450 : /* For comparisons use the first operand, that is likely to
451 : simplify comparisons against constants. */
452 21685687 : if (TREE_CODE (op0) == SSA_NAME)
453 : {
454 21646642 : gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
455 21646642 : if (def_stmt && can_propagate_from (def_stmt))
456 : {
457 5483278 : enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
458 5483278 : bool invariant_only_p = !single_use0_p;
459 :
460 5483278 : rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
461 :
462 : /* Always combine comparisons or conversions from booleans. */
463 5483278 : if (TREE_CODE (op1) == INTEGER_CST
464 5483278 : && ((CONVERT_EXPR_CODE_P (def_code)
465 887446 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
466 : == BOOLEAN_TYPE)
467 3522501 : || TREE_CODE_CLASS (def_code) == tcc_comparison))
468 : invariant_only_p = false;
469 :
470 5483278 : tmp = combine_cond_expr_cond (stmt, code, type,
471 : rhs0, op1, invariant_only_p);
472 5483278 : if (tmp)
473 : return tmp;
474 : }
475 : }
476 :
477 : /* If that wasn't successful, try the second operand. */
478 21458672 : if (TREE_CODE (op1) == SSA_NAME)
479 : {
480 5373556 : gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
481 5373556 : if (def_stmt && can_propagate_from (def_stmt))
482 : {
483 1745384 : rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
484 3490768 : tmp = combine_cond_expr_cond (stmt, code, type,
485 1745384 : op0, rhs1, !single_use1_p);
486 1745384 : if (tmp)
487 : return tmp;
488 : }
489 : }
490 :
491 : /* If that wasn't successful either, try both operands. */
492 21452359 : if (rhs0 != NULL_TREE
493 21452359 : && rhs1 != NULL_TREE)
494 913143 : tmp = combine_cond_expr_cond (stmt, code, type,
495 : rhs0, rhs1,
496 913143 : !(single_use0_p && single_use1_p));
497 :
498 : return tmp;
499 : }
500 :
501 : /* Propagate from the ssa name definition statements of the assignment
502 : from a comparison at *GSI into the conditional if that simplifies it.
503 : Returns true if the stmt was modified. */
504 :
505 : static bool
506 2537904 : forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
507 : {
508 2537904 : gimple *stmt = gsi_stmt (*gsi);
509 2537904 : tree tmp;
510 2537904 : tree type = TREE_TYPE (gimple_assign_lhs (stmt));
511 2537904 : tree rhs1 = gimple_assign_rhs1 (stmt);
512 2537904 : tree rhs2 = gimple_assign_rhs2 (stmt);
513 :
514 : /* Combine the comparison with defining statements. */
515 2537904 : tmp = forward_propagate_into_comparison_1 (stmt,
516 : gimple_assign_rhs_code (stmt),
517 : type, rhs1, rhs2);
518 2537904 : if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
519 : {
520 7111 : if (dump_file)
521 : {
522 0 : fprintf (dump_file, " Replaced '");
523 0 : print_gimple_expr (dump_file, stmt, 0);
524 0 : fprintf (dump_file, "' with '");
525 0 : print_generic_expr (dump_file, tmp);
526 0 : fprintf (dump_file, "'\n");
527 : }
528 7111 : gimple_assign_set_rhs_from_tree (gsi, tmp);
529 7111 : fold_stmt (gsi);
530 7111 : update_stmt (gsi_stmt (*gsi));
531 :
532 7111 : if (TREE_CODE (rhs1) == SSA_NAME)
533 7111 : remove_prop_source_from_use (rhs1);
534 7111 : if (TREE_CODE (rhs2) == SSA_NAME)
535 2946 : remove_prop_source_from_use (rhs2);
536 7111 : return true;
537 : }
538 :
539 : return false;
540 : }
541 :
542 : /* Propagate from the ssa name definition statements of COND_EXPR
543 : in GIMPLE_COND statement STMT into the conditional if that simplifies it.
544 : Returns zero if no statement was changed, one if there were
545 : changes and two if cfg_cleanup needs to run. */
546 :
547 : static int
548 19147783 : forward_propagate_into_gimple_cond (gcond *stmt)
549 : {
550 19147783 : tree tmp;
551 19147783 : enum tree_code code = gimple_cond_code (stmt);
552 19147783 : tree rhs1 = gimple_cond_lhs (stmt);
553 19147783 : tree rhs2 = gimple_cond_rhs (stmt);
554 :
555 : /* GIMPLE_COND will always be a comparison. */
556 19147783 : gcc_assert (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison);
557 :
558 19147783 : tmp = forward_propagate_into_comparison_1 (stmt, code,
559 : boolean_type_node,
560 : rhs1, rhs2);
561 19147783 : if (tmp
562 19147783 : && is_gimple_condexpr_for_cond (tmp))
563 : {
564 221832 : if (dump_file)
565 : {
566 9 : fprintf (dump_file, " Replaced '");
567 9 : print_gimple_expr (dump_file, stmt, 0);
568 9 : fprintf (dump_file, "' with '");
569 9 : print_generic_expr (dump_file, tmp);
570 9 : fprintf (dump_file, "'\n");
571 : }
572 :
573 221832 : gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
574 221832 : update_stmt (stmt);
575 :
576 221832 : if (TREE_CODE (rhs1) == SSA_NAME)
577 221832 : remove_prop_source_from_use (rhs1);
578 221832 : if (TREE_CODE (rhs2) == SSA_NAME)
579 5588 : remove_prop_source_from_use (rhs2);
580 221832 : return is_gimple_min_invariant (tmp) ? 2 : 1;
581 : }
582 :
583 18925951 : if (canonicalize_bool_cond (stmt, gimple_bb (stmt)))
584 : return 1;
585 :
586 : return 0;
587 : }
588 :
589 : /* We've just substituted an ADDR_EXPR into stmt. Update all the
590 : relevant data structures to match. */
591 :
592 : static void
593 1926477 : tidy_after_forward_propagate_addr (gimple *stmt)
594 : {
595 : /* We may have turned a trapping insn into a non-trapping insn. */
596 1926477 : if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
597 131 : bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
598 :
599 1926477 : if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
600 248783 : recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
601 1926477 : }
602 :
603 : /* NAME is a SSA_NAME representing DEF_RHS which is of the form
604 : ADDR_EXPR <whatever>.
605 :
606 : Try to forward propagate the ADDR_EXPR into the use USE_STMT.
607 : Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
608 : node or for recovery of array indexing from pointer arithmetic.
609 :
610 : Return true if the propagation was successful (the propagation can
611 : be not totally successful, yet things may have been changed). */
612 :
613 : static bool
614 2738340 : forward_propagate_addr_expr_1 (tree name, tree def_rhs,
615 : gimple_stmt_iterator *use_stmt_gsi,
616 : bool single_use_p)
617 : {
618 2738340 : tree lhs, rhs, rhs2, array_ref;
619 2738340 : gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
620 2738340 : enum tree_code rhs_code;
621 2738340 : bool res = true;
622 :
623 2738340 : gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
624 :
625 2738340 : lhs = gimple_assign_lhs (use_stmt);
626 2738340 : rhs_code = gimple_assign_rhs_code (use_stmt);
627 2738340 : rhs = gimple_assign_rhs1 (use_stmt);
628 :
629 : /* Do not perform copy-propagation but recurse through copy chains. */
630 2738340 : if (TREE_CODE (lhs) == SSA_NAME
631 1360838 : && rhs_code == SSA_NAME)
632 6844 : return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
633 :
634 : /* The use statement could be a conversion. Recurse to the uses of the
635 : lhs as copyprop does not copy through pointer to integer to pointer
636 : conversions and FRE does not catch all cases either.
637 : Treat the case of a single-use name and
638 : a conversion to def_rhs type separate, though. */
639 2731496 : if (TREE_CODE (lhs) == SSA_NAME
640 1353994 : && CONVERT_EXPR_CODE_P (rhs_code))
641 : {
642 : /* If there is a point in a conversion chain where the types match
643 : so we can remove a conversion re-materialize the address here
644 : and stop. */
645 23885 : if (single_use_p
646 23885 : && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
647 : {
648 1 : gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
649 1 : gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
650 1 : return true;
651 : }
652 :
653 : /* Else recurse if the conversion preserves the address value. */
654 47768 : if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
655 2 : || POINTER_TYPE_P (TREE_TYPE (lhs)))
656 47768 : && (TYPE_PRECISION (TREE_TYPE (lhs))
657 23884 : >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
658 23817 : return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
659 :
660 : return false;
661 : }
662 :
663 : /* If this isn't a conversion chain from this on we only can propagate
664 : into compatible pointer contexts. */
665 2707611 : if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
666 : return false;
667 :
668 : /* Propagate through constant pointer adjustments. */
669 2687067 : if (TREE_CODE (lhs) == SSA_NAME
670 1310738 : && rhs_code == POINTER_PLUS_EXPR
671 1310738 : && rhs == name
672 2849345 : && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
673 : {
674 117499 : tree new_def_rhs;
675 : /* As we come here with non-invariant addresses in def_rhs we need
676 : to make sure we can build a valid constant offsetted address
677 : for further propagation. Simply rely on fold building that
678 : and check after the fact. */
679 117499 : new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
680 : def_rhs,
681 : fold_convert (ptr_type_node,
682 : gimple_assign_rhs2 (use_stmt)));
683 117499 : if (TREE_CODE (new_def_rhs) == MEM_REF
684 117499 : && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
685 : return false;
686 113523 : new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
687 :
688 : /* Recurse. If we could propagate into all uses of lhs do not
689 : bother to replace into the current use but just pretend we did. */
690 113523 : if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
691 : return true;
692 :
693 37565 : if (useless_type_conversion_p (TREE_TYPE (lhs),
694 37565 : TREE_TYPE (new_def_rhs)))
695 37565 : gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
696 : new_def_rhs);
697 0 : else if (is_gimple_min_invariant (new_def_rhs))
698 0 : gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
699 : else
700 : return false;
701 37565 : gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
702 37565 : update_stmt (use_stmt);
703 37565 : return true;
704 : }
705 :
706 : /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
707 : ADDR_EXPR will not appear on the LHS. */
708 2569568 : tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
709 3874065 : while (handled_component_p (*lhsp))
710 1304497 : lhsp = &TREE_OPERAND (*lhsp, 0);
711 2569568 : lhs = *lhsp;
712 :
713 : /* Now see if the LHS node is a MEM_REF using NAME. If so,
714 : propagate the ADDR_EXPR into the use of NAME and fold the result. */
715 2569568 : if (TREE_CODE (lhs) == MEM_REF
716 2569568 : && TREE_OPERAND (lhs, 0) == name)
717 : {
718 871585 : tree def_rhs_base;
719 871585 : poly_int64 def_rhs_offset;
720 : /* If the address is invariant we can always fold it. */
721 871585 : if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
722 : &def_rhs_offset)))
723 : {
724 826046 : poly_offset_int off = mem_ref_offset (lhs);
725 826046 : tree new_ptr;
726 826046 : off += def_rhs_offset;
727 826046 : if (TREE_CODE (def_rhs_base) == MEM_REF)
728 : {
729 805424 : off += mem_ref_offset (def_rhs_base);
730 805424 : new_ptr = TREE_OPERAND (def_rhs_base, 0);
731 : }
732 : else
733 20622 : new_ptr = build_fold_addr_expr (def_rhs_base);
734 826046 : TREE_OPERAND (lhs, 0) = new_ptr;
735 826046 : TREE_OPERAND (lhs, 1)
736 826046 : = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
737 826046 : tidy_after_forward_propagate_addr (use_stmt);
738 : /* Continue propagating into the RHS if this was not the only use. */
739 826046 : if (single_use_p)
740 223776 : return true;
741 : }
742 : /* If the LHS is a plain dereference and the value type is the same as
743 : that of the pointed-to type of the address we can put the
744 : dereferenced address on the LHS preserving the original alias-type. */
745 45539 : else if (integer_zerop (TREE_OPERAND (lhs, 1))
746 17638 : && ((gimple_assign_lhs (use_stmt) == lhs
747 14080 : && useless_type_conversion_p
748 14080 : (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
749 14080 : TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
750 13395 : || types_compatible_p (TREE_TYPE (lhs),
751 13395 : TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
752 : /* Don't forward anything into clobber stmts if it would result
753 : in the lhs no longer being a MEM_REF. */
754 52993 : && (!gimple_clobber_p (use_stmt)
755 161 : || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
756 : {
757 7293 : tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
758 7293 : tree new_offset, new_base, saved, new_lhs;
759 26183 : while (handled_component_p (*def_rhs_basep))
760 11597 : def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
761 7293 : saved = *def_rhs_basep;
762 7293 : if (TREE_CODE (*def_rhs_basep) == MEM_REF)
763 : {
764 3757 : new_base = TREE_OPERAND (*def_rhs_basep, 0);
765 3757 : new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
766 : TREE_OPERAND (*def_rhs_basep, 1));
767 : }
768 : else
769 : {
770 3536 : new_base = build_fold_addr_expr (*def_rhs_basep);
771 3536 : new_offset = TREE_OPERAND (lhs, 1);
772 : }
773 7293 : *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
774 : new_base, new_offset);
775 7293 : TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
776 7293 : TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
777 7293 : TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
778 7293 : new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
779 7293 : *lhsp = new_lhs;
780 7293 : TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
781 7293 : TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
782 7293 : *def_rhs_basep = saved;
783 7293 : tidy_after_forward_propagate_addr (use_stmt);
784 : /* Continue propagating into the RHS if this was not the
785 : only use. */
786 7293 : if (single_use_p)
787 : return true;
788 : }
789 : else
790 : /* We can have a struct assignment dereferencing our name twice.
791 : Note that we didn't propagate into the lhs to not falsely
792 : claim we did when propagating into the rhs. */
793 : res = false;
794 : }
795 :
796 : /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
797 : nodes from the RHS. */
798 2342264 : tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
799 2342264 : if (TREE_CODE (*rhsp) == ADDR_EXPR)
800 237250 : rhsp = &TREE_OPERAND (*rhsp, 0);
801 3298708 : while (handled_component_p (*rhsp))
802 956444 : rhsp = &TREE_OPERAND (*rhsp, 0);
803 2342264 : rhs = *rhsp;
804 :
805 : /* Now see if the RHS node is a MEM_REF using NAME. If so,
806 : propagate the ADDR_EXPR into the use of NAME and fold the result. */
807 2342264 : if (TREE_CODE (rhs) == MEM_REF
808 2342264 : && TREE_OPERAND (rhs, 0) == name)
809 : {
810 1114532 : tree def_rhs_base;
811 1114532 : poly_int64 def_rhs_offset;
812 1114532 : if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
813 : &def_rhs_offset)))
814 : {
815 1078467 : poly_offset_int off = mem_ref_offset (rhs);
816 1078467 : tree new_ptr;
817 1078467 : off += def_rhs_offset;
818 1078467 : if (TREE_CODE (def_rhs_base) == MEM_REF)
819 : {
820 1053304 : off += mem_ref_offset (def_rhs_base);
821 1053304 : new_ptr = TREE_OPERAND (def_rhs_base, 0);
822 : }
823 : else
824 25163 : new_ptr = build_fold_addr_expr (def_rhs_base);
825 1078467 : TREE_OPERAND (rhs, 0) = new_ptr;
826 1078467 : TREE_OPERAND (rhs, 1)
827 1078467 : = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
828 1078467 : fold_stmt_inplace (use_stmt_gsi);
829 1078467 : tidy_after_forward_propagate_addr (use_stmt);
830 1078467 : return res;
831 : }
832 : /* If the RHS is a plain dereference and the value type is the same as
833 : that of the pointed-to type of the address we can put the
834 : dereferenced address on the RHS preserving the original alias-type. */
835 36065 : else if (integer_zerop (TREE_OPERAND (rhs, 1))
836 36065 : && ((gimple_assign_rhs1 (use_stmt) == rhs
837 19667 : && useless_type_conversion_p
838 19667 : (TREE_TYPE (gimple_assign_lhs (use_stmt)),
839 19667 : TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
840 22471 : || types_compatible_p (TREE_TYPE (rhs),
841 22471 : TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
842 : {
843 14671 : tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
844 14671 : tree new_offset, new_base, saved, new_rhs;
845 51889 : while (handled_component_p (*def_rhs_basep))
846 22547 : def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
847 14671 : saved = *def_rhs_basep;
848 14671 : if (TREE_CODE (*def_rhs_basep) == MEM_REF)
849 : {
850 7044 : new_base = TREE_OPERAND (*def_rhs_basep, 0);
851 7044 : new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
852 : TREE_OPERAND (*def_rhs_basep, 1));
853 : }
854 : else
855 : {
856 7627 : new_base = build_fold_addr_expr (*def_rhs_basep);
857 7627 : new_offset = TREE_OPERAND (rhs, 1);
858 : }
859 14671 : *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
860 : new_base, new_offset);
861 14671 : TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
862 14671 : TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
863 14671 : TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
864 14671 : new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
865 14671 : *rhsp = new_rhs;
866 14671 : TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
867 14671 : TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
868 14671 : *def_rhs_basep = saved;
869 14671 : fold_stmt_inplace (use_stmt_gsi);
870 14671 : tidy_after_forward_propagate_addr (use_stmt);
871 14671 : return res;
872 : }
873 : }
874 :
875 : /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
876 : is nothing to do. */
877 1249126 : if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
878 1249126 : || gimple_assign_rhs1 (use_stmt) != name)
879 : return false;
880 :
881 : /* The remaining cases are all for turning pointer arithmetic into
882 : array indexing. They only apply when we have the address of
883 : element zero in an array. If that is not the case then there
884 : is nothing to do. */
885 44779 : array_ref = TREE_OPERAND (def_rhs, 0);
886 44779 : if ((TREE_CODE (array_ref) != ARRAY_REF
887 4553 : || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
888 4553 : || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
889 46257 : && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
890 : return false;
891 :
892 22456 : rhs2 = gimple_assign_rhs2 (use_stmt);
893 : /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
894 22456 : if (TREE_CODE (rhs2) == INTEGER_CST)
895 : {
896 0 : tree new_rhs = build1_loc (gimple_location (use_stmt),
897 0 : ADDR_EXPR, TREE_TYPE (def_rhs),
898 0 : fold_build2 (MEM_REF,
899 : TREE_TYPE (TREE_TYPE (def_rhs)),
900 : unshare_expr (def_rhs),
901 : fold_convert (ptr_type_node,
902 : rhs2)));
903 0 : gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
904 0 : use_stmt = gsi_stmt (*use_stmt_gsi);
905 0 : update_stmt (use_stmt);
906 0 : tidy_after_forward_propagate_addr (use_stmt);
907 0 : return true;
908 : }
909 :
910 : return false;
911 : }
912 :
913 : /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
914 :
915 : Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
916 : Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
917 : node or for recovery of array indexing from pointer arithmetic.
918 :
919 : PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
920 : the single use in the previous invocation. Pass true when calling
921 : this as toplevel.
922 :
923 : Returns true, if all uses have been propagated into. */
924 :
925 : static bool
926 3172083 : forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
927 : {
928 3172083 : bool all = true;
929 3172083 : bool single_use_p = parent_single_use_p && has_single_use (name);
930 :
931 16700329 : for (gimple *use_stmt : gather_imm_use_stmts (name))
932 : {
933 7184080 : bool result;
934 7184080 : tree use_rhs;
935 :
936 : /* If the use is not in a simple assignment statement, then
937 : there is nothing we can do. */
938 7184080 : if (!is_gimple_assign (use_stmt))
939 : {
940 4445740 : if (!is_gimple_debug (use_stmt))
941 1844660 : all = false;
942 4445740 : continue;
943 : }
944 :
945 2738340 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
946 2738340 : result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
947 : single_use_p);
948 : /* If the use has moved to a different statement adjust
949 : the update machinery for the old statement too. */
950 2738340 : if (use_stmt != gsi_stmt (gsi))
951 : {
952 0 : update_stmt (use_stmt);
953 0 : use_stmt = gsi_stmt (gsi);
954 : }
955 2738340 : update_stmt (use_stmt);
956 2738340 : all &= result;
957 :
958 : /* Remove intermediate now unused copy and conversion chains. */
959 2738340 : use_rhs = gimple_assign_rhs1 (use_stmt);
960 2738340 : if (result
961 1437667 : && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
962 1198341 : && TREE_CODE (use_rhs) == SSA_NAME
963 2817999 : && has_zero_uses (gimple_assign_lhs (use_stmt)))
964 : {
965 79659 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
966 79659 : fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
967 79659 : release_defs (use_stmt);
968 79659 : gsi_remove (&gsi, true);
969 : }
970 3172083 : }
971 :
972 3172083 : return all && has_zero_uses (name);
973 : }
974 :
975 :
976 : /* Helper function for simplify_gimple_switch. Remove case labels that
977 : have values outside the range of the new type. */
978 :
979 : static void
980 11579 : simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type,
981 : vec<std::pair<int, int> > &edges_to_remove)
982 : {
983 11579 : unsigned int branch_num = gimple_switch_num_labels (stmt);
984 11579 : auto_vec<tree> labels (branch_num);
985 11579 : unsigned int i, len;
986 :
987 : /* Collect the existing case labels in a VEC, and preprocess it as if
988 : we are gimplifying a GENERIC SWITCH_EXPR. */
989 72372 : for (i = 1; i < branch_num; i++)
990 49214 : labels.quick_push (gimple_switch_label (stmt, i));
991 11579 : preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
992 :
993 : /* If any labels were removed, replace the existing case labels
994 : in the GIMPLE_SWITCH statement with the correct ones.
995 : Note that the type updates were done in-place on the case labels,
996 : so we only have to replace the case labels in the GIMPLE_SWITCH
997 : if the number of labels changed. */
998 11579 : len = labels.length ();
999 11579 : if (len < branch_num - 1)
1000 : {
1001 0 : bitmap target_blocks;
1002 0 : edge_iterator ei;
1003 0 : edge e;
1004 :
1005 : /* Corner case: *all* case labels have been removed as being
1006 : out-of-range for INDEX_TYPE. Push one label and let the
1007 : CFG cleanups deal with this further. */
1008 0 : if (len == 0)
1009 : {
1010 0 : tree label, elt;
1011 :
1012 0 : label = CASE_LABEL (gimple_switch_default_label (stmt));
1013 0 : elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1014 0 : labels.quick_push (elt);
1015 0 : len = 1;
1016 : }
1017 :
1018 0 : for (i = 0; i < labels.length (); i++)
1019 0 : gimple_switch_set_label (stmt, i + 1, labels[i]);
1020 0 : for (i++ ; i < branch_num; i++)
1021 0 : gimple_switch_set_label (stmt, i, NULL_TREE);
1022 0 : gimple_switch_set_num_labels (stmt, len + 1);
1023 :
1024 : /* Cleanup any edges that are now dead. */
1025 0 : target_blocks = BITMAP_ALLOC (NULL);
1026 0 : for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1027 : {
1028 0 : tree elt = gimple_switch_label (stmt, i);
1029 0 : basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1030 0 : bitmap_set_bit (target_blocks, target->index);
1031 : }
1032 0 : for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1033 : {
1034 0 : if (! bitmap_bit_p (target_blocks, e->dest->index))
1035 0 : edges_to_remove.safe_push (std::make_pair (e->src->index,
1036 0 : e->dest->index));
1037 : else
1038 0 : ei_next (&ei);
1039 : }
1040 0 : BITMAP_FREE (target_blocks);
1041 : }
1042 11579 : }
1043 :
1044 : /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1045 : the condition which we may be able to optimize better. */
1046 :
1047 : static bool
1048 103004 : simplify_gimple_switch (gswitch *stmt,
1049 : vec<std::pair<int, int> > &edges_to_remove,
1050 : bitmap simple_dce_worklist)
1051 : {
1052 : /* The optimization that we really care about is removing unnecessary
1053 : casts. That will let us do much better in propagating the inferred
1054 : constant at the switch target. */
1055 103004 : tree cond = gimple_switch_index (stmt);
1056 103004 : if (TREE_CODE (cond) == SSA_NAME)
1057 : {
1058 103003 : gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1059 103003 : if (gimple_assign_cast_p (def_stmt))
1060 : {
1061 12061 : tree def = gimple_assign_rhs1 (def_stmt);
1062 12061 : if (TREE_CODE (def) != SSA_NAME)
1063 : return false;
1064 :
1065 : /* If we have an extension or sign-change that preserves the
1066 : values we check against then we can copy the source value into
1067 : the switch. */
1068 12061 : tree ti = TREE_TYPE (def);
1069 12061 : if (INTEGRAL_TYPE_P (ti)
1070 12061 : && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1071 : {
1072 11816 : size_t n = gimple_switch_num_labels (stmt);
1073 11816 : tree min = NULL_TREE, max = NULL_TREE;
1074 11816 : if (n > 1)
1075 : {
1076 11816 : min = CASE_LOW (gimple_switch_label (stmt, 1));
1077 11816 : if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1078 154 : max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1079 : else
1080 11662 : max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1081 : }
1082 11816 : if ((!min || int_fits_type_p (min, ti))
1083 11812 : && (!max || int_fits_type_p (max, ti)))
1084 : {
1085 11579 : bitmap_set_bit (simple_dce_worklist,
1086 11579 : SSA_NAME_VERSION (cond));
1087 11579 : gimple_switch_set_index (stmt, def);
1088 11579 : simplify_gimple_switch_label_vec (stmt, ti,
1089 : edges_to_remove);
1090 11579 : update_stmt (stmt);
1091 11579 : return true;
1092 : }
1093 : }
1094 : }
1095 : }
1096 :
1097 : return false;
1098 : }
1099 :
1100 : /* For pointers p2 and p1 return p2 - p1 if the
1101 : difference is known and constant, otherwise return NULL. */
1102 :
1103 : static tree
1104 5234 : constant_pointer_difference (tree p1, tree p2)
1105 : {
1106 5234 : int i, j;
1107 : #define CPD_ITERATIONS 5
1108 5234 : tree exps[2][CPD_ITERATIONS];
1109 5234 : tree offs[2][CPD_ITERATIONS];
1110 5234 : int cnt[2];
1111 :
1112 15702 : for (i = 0; i < 2; i++)
1113 : {
1114 10468 : tree p = i ? p1 : p2;
1115 10468 : tree off = size_zero_node;
1116 10468 : gimple *stmt;
1117 10468 : enum tree_code code;
1118 :
1119 : /* For each of p1 and p2 we need to iterate at least
1120 : twice, to handle ADDR_EXPR directly in p1/p2,
1121 : SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1122 : on definition's stmt RHS. Iterate a few extra times. */
1123 10468 : j = 0;
1124 12412 : do
1125 : {
1126 12412 : if (!POINTER_TYPE_P (TREE_TYPE (p)))
1127 : break;
1128 12406 : if (TREE_CODE (p) == ADDR_EXPR)
1129 : {
1130 8988 : tree q = TREE_OPERAND (p, 0);
1131 8988 : poly_int64 offset;
1132 8988 : tree base = get_addr_base_and_unit_offset (q, &offset);
1133 8988 : if (base)
1134 : {
1135 8168 : q = base;
1136 8168 : if (maybe_ne (offset, 0))
1137 3407 : off = size_binop (PLUS_EXPR, off, size_int (offset));
1138 : }
1139 8988 : if (TREE_CODE (q) == MEM_REF
1140 8988 : && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1141 : {
1142 213 : p = TREE_OPERAND (q, 0);
1143 213 : off = size_binop (PLUS_EXPR, off,
1144 : wide_int_to_tree (sizetype,
1145 : mem_ref_offset (q)));
1146 : }
1147 : else
1148 : {
1149 8775 : exps[i][j] = q;
1150 8775 : offs[i][j++] = off;
1151 8775 : break;
1152 : }
1153 : }
1154 3631 : if (TREE_CODE (p) != SSA_NAME)
1155 : break;
1156 3631 : exps[i][j] = p;
1157 3631 : offs[i][j++] = off;
1158 3631 : if (j == CPD_ITERATIONS)
1159 : break;
1160 3631 : stmt = SSA_NAME_DEF_STMT (p);
1161 3631 : if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1162 : break;
1163 2852 : code = gimple_assign_rhs_code (stmt);
1164 2852 : if (code == POINTER_PLUS_EXPR)
1165 : {
1166 1486 : if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1167 : break;
1168 955 : off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1169 955 : p = gimple_assign_rhs1 (stmt);
1170 : }
1171 1366 : else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1172 989 : p = gimple_assign_rhs1 (stmt);
1173 : else
1174 : break;
1175 : }
1176 : while (1);
1177 10468 : cnt[i] = j;
1178 : }
1179 :
1180 7316 : for (i = 0; i < cnt[0]; i++)
1181 9755 : for (j = 0; j < cnt[1]; j++)
1182 7673 : if (exps[0][i] == exps[1][j])
1183 4344 : return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1184 :
1185 : return NULL_TREE;
1186 : }
1187 :
1188 : /* Helper function for optimize_aggr_zeroprop.
1189 : Props the zeroing (memset, VAL) that was done in DEST+OFFSET:LEN
1190 : (DEFSTMT) into the STMT. Returns true if the STMT was updated. */
1191 : static void
1192 21967880 : optimize_aggr_zeroprop_1 (gimple *defstmt, gimple *stmt,
1193 : tree dest, poly_int64 offset, tree val,
1194 : poly_offset_int len)
1195 : {
1196 21967880 : tree src2;
1197 21967880 : tree len2 = NULL_TREE;
1198 21967880 : poly_int64 offset2;
1199 :
1200 21967880 : if (gimple_call_builtin_p (stmt, BUILT_IN_MEMCPY)
1201 19199 : && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
1202 21981557 : && poly_int_tree_p (gimple_call_arg (stmt, 2)))
1203 : {
1204 12659 : src2 = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1205 12659 : len2 = gimple_call_arg (stmt, 2);
1206 : }
1207 21955221 : else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
1208 : {
1209 1864182 : src2 = gimple_assign_rhs1 (stmt);
1210 1864182 : len2 = (TREE_CODE (src2) == COMPONENT_REF
1211 1864182 : ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
1212 1696885 : : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
1213 : /* Can only handle zero memsets. */
1214 1864182 : if (!integer_zerop (val))
1215 21946369 : return;
1216 : }
1217 : else
1218 20091039 : return;
1219 :
1220 1875879 : if (len2 == NULL_TREE
1221 1875879 : || !poly_int_tree_p (len2))
1222 : return;
1223 :
1224 1875879 : src2 = get_addr_base_and_unit_offset (src2, &offset2);
1225 1875879 : if (src2 == NULL_TREE
1226 1875879 : || maybe_lt (offset2, offset))
1227 : return;
1228 :
1229 857255 : if (!operand_equal_p (dest, src2, 0))
1230 : return;
1231 :
1232 : /* [ dest + offset, dest + offset + len - 1 ] is set to val.
1233 : Make sure that
1234 : [ dest + offset2, dest + offset2 + len2 - 1 ] is a subset of that. */
1235 130691 : if (maybe_gt (wi::to_poly_offset (len2) + (offset2 - offset),
1236 : len))
1237 : return;
1238 :
1239 21511 : if (dump_file && (dump_flags & TDF_DETAILS))
1240 : {
1241 32 : fprintf (dump_file, "Simplified\n ");
1242 32 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1243 32 : fprintf (dump_file, "after previous\n ");
1244 32 : print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
1245 : }
1246 21511 : gimple *orig_stmt = stmt;
1247 : /* For simplicity, don't change the kind of the stmt,
1248 : turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
1249 : into memset (&dest, val, len);
1250 : In theory we could change dest = src into memset if dest
1251 : is addressable (maybe beneficial if val is not 0), or
1252 : memcpy (&dest, &src, len) into dest = {} if len is the size
1253 : of dest, dest isn't volatile. */
1254 21511 : if (is_gimple_assign (stmt))
1255 : {
1256 21506 : tree ctor_type = TREE_TYPE (gimple_assign_lhs (stmt));
1257 21506 : tree ctor = build_constructor (ctor_type, NULL);
1258 21506 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1259 21506 : gimple_assign_set_rhs_from_tree (&gsi, ctor);
1260 21506 : update_stmt (stmt);
1261 21506 : statistics_counter_event (cfun, "copy zeroing propagation of aggregate", 1);
1262 : }
1263 : else /* If stmt is memcpy, transform it into memset. */
1264 : {
1265 5 : gcall *call = as_a <gcall *> (stmt);
1266 5 : tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
1267 5 : gimple_call_set_fndecl (call, fndecl);
1268 5 : gimple_call_set_fntype (call, TREE_TYPE (fndecl));
1269 5 : gimple_call_set_arg (call, 1, val);
1270 5 : update_stmt (stmt);
1271 5 : statistics_counter_event (cfun, "memcpy to memset changed", 1);
1272 : }
1273 :
1274 21511 : if (dump_file && (dump_flags & TDF_DETAILS))
1275 : {
1276 32 : fprintf (dump_file, "into\n ");
1277 32 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1278 : }
1279 :
1280 : /* Mark the bb for eh cleanup if needed. */
1281 21511 : if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1282 6 : bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
1283 : }
1284 :
1285 : /* Optimize
1286 : a = {}; // DEST = value ;; LEN(nullptr)
1287 : b = a;
1288 : into
1289 : a = {};
1290 : b = {};
1291 : Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
1292 : and/or memcpy (&b, &a, sizeof (a)); instead of b = a; */
1293 :
1294 : static void
1295 30316220 : optimize_aggr_zeroprop (gimple *stmt, bool full_walk)
1296 : {
1297 30316220 : ao_ref read;
1298 60632440 : if (gimple_has_volatile_ops (stmt))
1299 26374878 : return;
1300 :
1301 29391153 : tree dest = NULL_TREE;
1302 29391153 : tree val = integer_zero_node;
1303 29391153 : tree len = NULL_TREE;
1304 29391153 : bool can_use_tbba = true;
1305 :
1306 29391153 : if (gimple_call_builtin_p (stmt, BUILT_IN_MEMSET)
1307 109302 : && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
1308 55476 : && TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
1309 29444337 : && poly_int_tree_p (gimple_call_arg (stmt, 2)))
1310 : {
1311 50418 : dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
1312 50418 : len = gimple_call_arg (stmt, 2);
1313 50418 : val = gimple_call_arg (stmt, 1);
1314 50418 : ao_ref_init_from_ptr_and_size (&read, gimple_call_arg (stmt, 0), len);
1315 50418 : can_use_tbba = false;
1316 : }
1317 29340735 : else if (gimple_store_p (stmt)
1318 29281691 : && gimple_assign_single_p (stmt)
1319 58622426 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == STRING_CST)
1320 : {
1321 33501 : tree str = gimple_assign_rhs1 (stmt);
1322 33501 : dest = gimple_assign_lhs (stmt);
1323 33501 : ao_ref_init (&read, dest);
1324 : /* The string must contain all null char's for now. */
1325 38769 : for (int i = 0; i < TREE_STRING_LENGTH (str); i++)
1326 : {
1327 36252 : if (TREE_STRING_POINTER (str)[i] != 0)
1328 : {
1329 : dest = NULL_TREE;
1330 : break;
1331 : }
1332 : }
1333 : }
1334 : /* A store of integer (scalar, vector or complex) zeros is
1335 : a zero store. */
1336 29307234 : else if (gimple_store_p (stmt)
1337 29248190 : && gimple_assign_single_p (stmt)
1338 58555424 : && integer_zerop (gimple_assign_rhs1 (stmt)))
1339 : {
1340 3484275 : tree rhs = gimple_assign_rhs1 (stmt);
1341 3484275 : tree type = TREE_TYPE (rhs);
1342 3484275 : dest = gimple_assign_lhs (stmt);
1343 3484275 : ao_ref_init (&read, dest);
1344 : /* For integral types, the type precision needs to be a multiply of BITS_PER_UNIT. */
1345 3484275 : if (INTEGRAL_TYPE_P (type)
1346 3484275 : && (TYPE_PRECISION (type) % BITS_PER_UNIT) != 0)
1347 : dest = NULL_TREE;
1348 : }
1349 25822959 : else if (gimple_store_p (stmt)
1350 25763915 : && gimple_assign_single_p (stmt)
1351 25763915 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == CONSTRUCTOR
1352 26516097 : && !gimple_clobber_p (stmt))
1353 : {
1354 693138 : dest = gimple_assign_lhs (stmt);
1355 693138 : ao_ref_init (&read, dest);
1356 : }
1357 :
1358 4057968 : if (dest == NULL_TREE)
1359 25364169 : return;
1360 :
1361 4026984 : if (len == NULL_TREE)
1362 3976566 : len = (TREE_CODE (dest) == COMPONENT_REF
1363 3976566 : ? DECL_SIZE_UNIT (TREE_OPERAND (dest, 1))
1364 1731456 : : TYPE_SIZE_UNIT (TREE_TYPE (dest)));
1365 3976566 : if (len == NULL_TREE
1366 4026984 : || !poly_int_tree_p (len))
1367 : return;
1368 :
1369 : /* Sometimes memset can have no vdef due to invalid declaration of memset (const, etc.). */
1370 34428822 : if (!gimple_vdef (stmt))
1371 : return;
1372 :
1373 : /* This store needs to be on the byte boundary and pointing to an object. */
1374 4026960 : poly_int64 offset;
1375 4026960 : tree dest_base = get_addr_base_and_unit_offset (dest, &offset);
1376 4026960 : if (dest_base == NULL_TREE)
1377 : return;
1378 :
1379 : /* Setup the worklist. */
1380 3941342 : auto_vec<std::pair<tree, unsigned>> worklist;
1381 3941342 : unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 0;
1382 7882684 : worklist.safe_push (std::make_pair (gimple_vdef (stmt), limit));
1383 :
1384 27440011 : while (!worklist.is_empty ())
1385 : {
1386 19557327 : std::pair<tree, unsigned> top = worklist.pop ();
1387 19557327 : tree vdef = top.first;
1388 19557327 : limit = top.second;
1389 19557327 : gimple *use_stmt;
1390 19557327 : imm_use_iterator iter;
1391 63047553 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1392 : {
1393 : /* Handling PHI nodes might not be worth it so don't. */
1394 23932899 : if (is_a <gphi*> (use_stmt))
1395 1965019 : continue;
1396 :
1397 : /* If this statement does not clobber add the vdef stmt to the
1398 : worklist.
1399 : After hitting the limit, allow clobbers to able to pass through. */
1400 1990677 : if ((limit != 0 || gimple_clobber_p (use_stmt))
1401 20012136 : && gimple_vdef (use_stmt)
1402 38977500 : && !stmt_may_clobber_ref_p_1 (use_stmt, &read,
1403 : /* tbaa_p = */ can_use_tbba))
1404 : {
1405 15615985 : unsigned new_limit = limit == 0 ? 0 : limit - 1;
1406 31231970 : worklist.safe_push (std::make_pair (gimple_vdef (use_stmt),
1407 : new_limit));
1408 : }
1409 :
1410 21967880 : optimize_aggr_zeroprop_1 (stmt, use_stmt, dest_base, offset,
1411 21967880 : val, wi::to_poly_offset (len));
1412 19557327 : }
1413 : }
1414 :
1415 3941342 : }
1416 :
1417 : /* Returns the pointer to the base of the object of the
1418 : reference EXPR and extracts the information about
1419 : the offset of the access, storing it to PBYTESIZE,
1420 : PBYTEPOS and PREVERSEP.
1421 : If the access is not a byte sized or position is not
1422 : on the byte, return NULL. */
1423 : static tree
1424 5164020 : split_core_and_offset_size (tree expr,
1425 : poly_int64 *pbytesize, poly_int64 *pbytepos,
1426 : tree *poffset, int *preversep)
1427 : {
1428 5164020 : tree core;
1429 5164020 : machine_mode mode;
1430 5164020 : int unsignedp, volatilep;
1431 5164020 : poly_int64 bitsize;
1432 5164020 : poly_int64 bitpos;
1433 5164020 : location_t loc = EXPR_LOCATION (expr);
1434 :
1435 5164020 : core = get_inner_reference (expr, &bitsize, &bitpos,
1436 : poffset, &mode, &unsignedp, preversep,
1437 : &volatilep);
1438 10328040 : if (!multiple_p (bitsize, BITS_PER_UNIT, pbytesize))
1439 : return NULL_TREE;
1440 5164020 : if (!multiple_p (bitpos, BITS_PER_UNIT, pbytepos))
1441 : return NULL_TREE;
1442 : /* If we are left with MEM[a + CST] strip that and add it to the
1443 : pbytepos and return a. */
1444 5164020 : if (TREE_CODE (core) == MEM_REF)
1445 : {
1446 1189133 : poly_offset_int tem;
1447 1189133 : tem = wi::to_poly_offset (TREE_OPERAND (core, 1));
1448 1189133 : tem += *pbytepos;
1449 1189133 : if (tem.to_shwi (pbytepos))
1450 1187207 : return TREE_OPERAND (core, 0);
1451 : }
1452 3976813 : core = build_fold_addr_expr_loc (loc, core);
1453 3976813 : STRIP_NOPS (core);
1454 3976813 : return core;
1455 : }
1456 :
1457 : /* Returns a new src based on the
1458 : copy `DEST = SRC` and for the old SRC2.
1459 : Returns null if SRC2 is not related to DEST. */
1460 :
1461 : static tree
1462 1193453 : new_src_based_on_copy (tree src2, tree dest, tree src)
1463 : {
1464 : /* If the second src is not exactly the same as dest,
1465 : try to handle it seperately; see it is address/size equivalent.
1466 : Handles `a` and `a.b` and `MEM<char[N]>(&a)` which all have
1467 : the same size and offsets as address/size equivalent.
1468 : This allows copying over a memcpy and also one for copying
1469 : where one field is the same size as the whole struct. */
1470 1193453 : if (operand_equal_p (dest, src2))
1471 : return src;
1472 : /* if both dest and src2 are decls, then we know these 2
1473 : accesses can't be the same. */
1474 697124 : if (DECL_P (dest) && DECL_P (src2))
1475 : return NULL_TREE;
1476 : /* A VCE can't be used with imag/real or BFR so reject them early. */
1477 366824 : if (TREE_CODE (src) == IMAGPART_EXPR
1478 366824 : || TREE_CODE (src) == REALPART_EXPR
1479 366824 : || TREE_CODE (src) == BIT_FIELD_REF)
1480 : return NULL_TREE;
1481 366824 : tree core1, core2;
1482 366824 : poly_int64 bytepos1, bytepos2;
1483 366824 : poly_int64 bytesize1, bytesize2;
1484 366824 : tree toffset1, toffset2;
1485 366824 : int reversep1 = 0;
1486 366824 : int reversep2 = 0;
1487 366824 : poly_int64 diff = 0;
1488 366824 : core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
1489 : &toffset1, &reversep1);
1490 366824 : core2 = split_core_and_offset_size (src2, &bytesize2, &bytepos2,
1491 : &toffset2, &reversep2);
1492 366824 : if (!core1 || !core2)
1493 : return NULL_TREE;
1494 366824 : if (reversep1 != reversep2)
1495 : return NULL_TREE;
1496 : /* The sizes of the 2 accesses need to be the same. */
1497 366824 : if (!known_eq (bytesize1, bytesize2))
1498 : return NULL_TREE;
1499 159838 : if (!operand_equal_p (core1, core2, 0))
1500 : return NULL_TREE;
1501 :
1502 22264 : if (toffset1 && toffset2)
1503 : {
1504 2 : tree type = TREE_TYPE (toffset1);
1505 2 : if (type != TREE_TYPE (toffset2))
1506 0 : toffset2 = fold_convert (type, toffset2);
1507 :
1508 2 : tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
1509 2 : if (!cst_and_fits_in_hwi (tdiff))
1510 : return NULL_TREE;
1511 :
1512 0 : diff = int_cst_value (tdiff);
1513 0 : }
1514 22262 : else if (toffset1 || toffset2)
1515 : {
1516 : /* If only one of the offsets is non-constant, the difference cannot
1517 : be a constant. */
1518 : return NULL_TREE;
1519 : }
1520 22230 : diff += bytepos1 - bytepos2;
1521 : /* The offset between the 2 need to be 0. */
1522 22230 : if (!known_eq (diff, 0))
1523 : return NULL_TREE;
1524 21519 : return fold_build1 (VIEW_CONVERT_EXPR,TREE_TYPE (src2), src);
1525 : }
1526 :
1527 : /* Returns true if SRC and DEST are the same address such that
1528 : `SRC == DEST;` is considered a nop. This is more than an
1529 : operand_equal_p check as it needs to be similar to
1530 : new_src_based_on_copy. */
1531 :
1532 : static bool
1533 4247089 : same_for_assignment (tree src, tree dest)
1534 : {
1535 4247089 : if (operand_equal_p (dest, src, 0))
1536 : return true;
1537 : /* if both dest and src2 are decls, then we know these 2
1538 : accesses can't be the same. */
1539 4244173 : if (DECL_P (dest) && DECL_P (src))
1540 : return false;
1541 :
1542 2215186 : tree core1, core2;
1543 2215186 : poly_int64 bytepos1, bytepos2;
1544 2215186 : poly_int64 bytesize1, bytesize2;
1545 2215186 : tree toffset1, toffset2;
1546 2215186 : int reversep1 = 0;
1547 2215186 : int reversep2 = 0;
1548 2215186 : poly_int64 diff = 0;
1549 2215186 : core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
1550 : &toffset1, &reversep1);
1551 2215186 : core2 = split_core_and_offset_size (src, &bytesize2, &bytepos2,
1552 : &toffset2, &reversep2);
1553 2215186 : if (!core1 || !core2)
1554 : return false;
1555 2215186 : if (reversep1 != reversep2)
1556 : return false;
1557 : /* The sizes of the 2 accesses need to be the same. */
1558 2215186 : if (!known_eq (bytesize1, bytesize2))
1559 : return false;
1560 2214265 : if (!operand_equal_p (core1, core2, 0))
1561 : return false;
1562 6026 : if (toffset1 && toffset2)
1563 : {
1564 313 : tree type = TREE_TYPE (toffset1);
1565 313 : if (type != TREE_TYPE (toffset2))
1566 0 : toffset2 = fold_convert (type, toffset2);
1567 :
1568 313 : tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
1569 313 : if (!cst_and_fits_in_hwi (tdiff))
1570 : return false;
1571 :
1572 0 : diff = int_cst_value (tdiff);
1573 0 : }
1574 5713 : else if (toffset1 || toffset2)
1575 : {
1576 : /* If only one of the offsets is non-constant, the difference cannot
1577 : be a constant. */
1578 : return false;
1579 : }
1580 5713 : diff += bytepos1 - bytepos2;
1581 : /* The offset between the 2 need to be 0. */
1582 5713 : if (!known_eq (diff, 0))
1583 : return false;
1584 : return true;
1585 : }
1586 :
1587 : /* Helper function for optimize_agr_copyprop.
1588 : For aggregate copies in USE_STMT, see if DEST
1589 : is on the lhs of USE_STMT and replace it with SRC. */
1590 : static void
1591 965811 : optimize_agr_copyprop_1 (gimple *stmt, gimple *use_stmt,
1592 : tree dest, tree src)
1593 : {
1594 965811 : gcc_assert (gimple_assign_load_p (use_stmt)
1595 : && gimple_store_p (use_stmt));
1596 1931622 : if (gimple_has_volatile_ops (use_stmt))
1597 593523 : return;
1598 965810 : tree dest2 = gimple_assign_lhs (use_stmt);
1599 965810 : tree src2 = gimple_assign_rhs1 (use_stmt);
1600 : /* If the new store is `src2 = src2;` skip over it. */
1601 965810 : if (same_for_assignment (src2, dest2))
1602 : return;
1603 965247 : src = new_src_based_on_copy (src2, dest, src);
1604 965247 : if (!src)
1605 : return;
1606 : /* For 2 memory refences and using a temporary to do the copy,
1607 : don't remove the temporary as the 2 memory references might overlap.
1608 : Note t does not need to be decl as it could be field.
1609 : See PR 22237 for full details.
1610 : E.g.
1611 : t = *a; #DEST = SRC;
1612 : *b = t; #DEST2 = SRC2;
1613 : Cannot be convert into
1614 : t = *a;
1615 : *b = *a;
1616 : Though the following is allowed to be done:
1617 : t = *a;
1618 : *a = t;
1619 : And convert it into:
1620 : t = *a;
1621 : *a = *a;
1622 : */
1623 400395 : if (!operand_equal_p (dest2, src, 0)
1624 400395 : && !DECL_P (dest2) && !DECL_P (src))
1625 : {
1626 : /* If *a and *b have the same base see if
1627 : the offset between the two is greater than
1628 : or equal to the size of the type. */
1629 31261 : poly_int64 offset1, offset2;
1630 31261 : tree len = TYPE_SIZE_UNIT (TREE_TYPE (src));
1631 31261 : if (len == NULL_TREE
1632 31261 : || !tree_fits_poly_int64_p (len))
1633 28107 : return;
1634 31261 : tree base1 = get_addr_base_and_unit_offset (dest2, &offset1);
1635 31261 : tree base2 = get_addr_base_and_unit_offset (src, &offset2);
1636 31261 : poly_int64 size = tree_to_poly_int64 (len);
1637 : /* If the bases are 2 different decls,
1638 : then there can be no overlapping. */
1639 31261 : if (base1 && base2
1640 30456 : && DECL_P (base1) && DECL_P (base2)
1641 1806 : && base1 != base2)
1642 : ;
1643 : /* If we can't figure out the base or the bases are
1644 : not equal then fall back to an alignment check. */
1645 29679 : else if (!base1
1646 29679 : || !base2
1647 29679 : || !operand_equal_p (base1, base2))
1648 : {
1649 29312 : unsigned int align1 = get_object_alignment (src);
1650 29312 : unsigned int align2 = get_object_alignment (dest2);
1651 29312 : align1 /= BITS_PER_UNIT;
1652 29312 : align2 /= BITS_PER_UNIT;
1653 : /* If the alignment of either object is less
1654 : than the size then there is a possibility
1655 : of overlapping. */
1656 29312 : if (maybe_lt (align1, size)
1657 29312 : || maybe_lt (align2, size))
1658 28107 : return;
1659 : }
1660 : /* Make sure [offset1, offset1 + len - 1] does
1661 : not overlap with [offset2, offset2 + len - 1],
1662 : it is ok if they are at the same location though. */
1663 367 : else if (ranges_maybe_overlap_p (offset1, size, offset2, size)
1664 367 : && !known_eq (offset2, offset1))
1665 : return;
1666 : }
1667 :
1668 372288 : if (dump_file && (dump_flags & TDF_DETAILS))
1669 : {
1670 11 : fprintf (dump_file, "Simplified\n ");
1671 11 : print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
1672 11 : fprintf (dump_file, "after previous\n ");
1673 11 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1674 : }
1675 372288 : gimple *orig_stmt = use_stmt;
1676 372288 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1677 372288 : gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (src));
1678 372288 : update_stmt (use_stmt);
1679 :
1680 372288 : if (dump_file && (dump_flags & TDF_DETAILS))
1681 : {
1682 11 : fprintf (dump_file, "into\n ");
1683 11 : print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
1684 : }
1685 372288 : if (maybe_clean_or_replace_eh_stmt (orig_stmt, use_stmt))
1686 0 : bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
1687 372288 : statistics_counter_event (cfun, "copy prop for aggregate", 1);
1688 : }
1689 :
1690 : /* Helper function for optimize_agr_copyprop_1, propagate aggregates
1691 : into the arguments of USE_STMT if the argument matches with DEST;
1692 : replacing it with SRC. */
1693 : static void
1694 678890 : optimize_agr_copyprop_arg (gimple *defstmt, gcall *call,
1695 : tree dest, tree src)
1696 : {
1697 678890 : bool changed = false;
1698 2256918 : for (unsigned arg = 0; arg < gimple_call_num_args (call); arg++)
1699 : {
1700 1578028 : tree *argptr = gimple_call_arg_ptr (call, arg);
1701 2973603 : if (TREE_CODE (*argptr) == SSA_NAME
1702 905909 : || is_gimple_min_invariant (*argptr)
1703 1760481 : || TYPE_VOLATILE (TREE_TYPE (*argptr)))
1704 1395575 : continue;
1705 182453 : tree newsrc = new_src_based_on_copy (*argptr, dest, src);
1706 182453 : if (!newsrc)
1707 110674 : continue;
1708 :
1709 71779 : if (dump_file && (dump_flags & TDF_DETAILS))
1710 : {
1711 9 : fprintf (dump_file, "Simplified\n ");
1712 9 : print_gimple_stmt (dump_file, call, 0, dump_flags);
1713 9 : fprintf (dump_file, "after previous\n ");
1714 9 : print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
1715 : }
1716 71779 : *argptr = unshare_expr (newsrc);
1717 71779 : changed = true;
1718 71779 : if (dump_file && (dump_flags & TDF_DETAILS))
1719 : {
1720 9 : fprintf (dump_file, "into\n ");
1721 9 : print_gimple_stmt (dump_file, call, 0, dump_flags);
1722 : }
1723 : }
1724 678890 : if (changed)
1725 71609 : update_stmt (call);
1726 678890 : }
1727 :
1728 : /* Helper function for optimize_agr_copyprop, propagate aggregates
1729 : into the return stmt USE if the operand of the return matches DEST;
1730 : replacing it with SRC. */
1731 : static void
1732 117924 : optimize_agr_copyprop_return (gimple *defstmt, greturn *use,
1733 : tree dest, tree src)
1734 : {
1735 117924 : tree rvalue = gimple_return_retval (use);
1736 117924 : if (!rvalue
1737 75485 : || TREE_CODE (rvalue) == SSA_NAME
1738 67162 : || is_gimple_min_invariant (rvalue)
1739 184691 : || TYPE_VOLATILE (TREE_TYPE (rvalue)))
1740 51158 : return;
1741 :
1742 : /* `return <retval>;` is already the best it could be.
1743 : Likewise `return *<retval>_N(D)`. */
1744 66766 : if (TREE_CODE (rvalue) == RESULT_DECL
1745 66766 : || (TREE_CODE (rvalue) == MEM_REF
1746 0 : && TREE_CODE (TREE_OPERAND (rvalue, 0)) == SSA_NAME
1747 0 : && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (rvalue, 0)))
1748 : == RESULT_DECL))
1749 : return;
1750 45753 : tree newsrc = new_src_based_on_copy (rvalue, dest, src);
1751 45753 : if (!newsrc)
1752 : return;
1753 : /* Currently only support non-global vars.
1754 : See PR 124099 on enumtls not supporting expanding for GIMPLE_RETURN.
1755 : FIXME: could support VCEs too? */
1756 45674 : if (!VAR_P (newsrc) || is_global_var (newsrc))
1757 : return;
1758 20934 : if (dump_file && (dump_flags & TDF_DETAILS))
1759 : {
1760 1 : fprintf (dump_file, "Simplified\n ");
1761 1 : print_gimple_stmt (dump_file, use, 0, dump_flags);
1762 1 : fprintf (dump_file, "after previous\n ");
1763 1 : print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
1764 : }
1765 20934 : gimple_return_set_retval (use, newsrc);
1766 20934 : if (dump_file && (dump_flags & TDF_DETAILS))
1767 : {
1768 1 : fprintf (dump_file, "into\n ");
1769 1 : print_gimple_stmt (dump_file, use, 0, dump_flags);
1770 : }
1771 20934 : update_stmt (use);
1772 : }
1773 :
1774 : /* Optimizes
1775 : DEST = SRC;
1776 : DEST2 = DEST; # DEST2 = SRC2;
1777 : into
1778 : DEST = SRC;
1779 : DEST2 = SRC;
1780 : STMT is the first statement and SRC is the common
1781 : between the statements.
1782 :
1783 : Also optimizes:
1784 : DEST = SRC;
1785 : call_func(..., DEST, ...);
1786 : into:
1787 : DEST = SRC;
1788 : call_func(..., SRC, ...);
1789 :
1790 : */
1791 : static void
1792 3690061 : optimize_agr_copyprop (gimple *stmt)
1793 : {
1794 7380122 : if (gimple_has_volatile_ops (stmt))
1795 411378 : return;
1796 :
1797 : /* Can't prop if the statement could throw. */
1798 3688922 : if (stmt_could_throw_p (cfun, stmt))
1799 : return;
1800 :
1801 3281279 : tree dest = gimple_assign_lhs (stmt);
1802 3281279 : tree src = gimple_assign_rhs1 (stmt);
1803 : /* If the statement is `src = src;` then ignore it. */
1804 3281279 : if (same_for_assignment (dest, src))
1805 : return;
1806 :
1807 3278683 : tree vdef = gimple_vdef (stmt);
1808 3278683 : imm_use_iterator iter;
1809 3278683 : gimple *use_stmt;
1810 12761030 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1811 : {
1812 6203664 : if (gimple_assign_load_p (use_stmt)
1813 6203664 : && gimple_store_p (use_stmt))
1814 965811 : optimize_agr_copyprop_1 (stmt, use_stmt, dest, src);
1815 5237853 : else if (is_gimple_call (use_stmt))
1816 678890 : optimize_agr_copyprop_arg (stmt, as_a<gcall*>(use_stmt), dest, src);
1817 4558963 : else if (is_a<greturn*> (use_stmt))
1818 117924 : optimize_agr_copyprop_return (stmt, as_a<greturn*>(use_stmt), dest, src);
1819 3278683 : }
1820 : }
1821 :
1822 : /* Simple DSE of the lhs from a clobber STMT.
1823 : This is used mostly to clean up from optimize_agr_copyprop and
1824 : to remove (exactly one) extra copy that might later on confuse SRA.
1825 : An example is:
1826 : ;; write to a and such.
1827 : b = a; // This statement is to be removed
1828 : b = {CLOBBER};
1829 : SRA will totally scalarize b (which means also a) here for the extra copy
1830 : which is not something welcomed. So removing the copy will
1831 : allow SRA to move the scalarization of a further down or not at all.
1832 : */
1833 : static void
1834 6765836 : do_simple_agr_dse (gassign *stmt, bool full_walk)
1835 : {
1836 : /* Don't do this while in -Og as we want to keep around the copy
1837 : for debuggability. */
1838 6765836 : if (optimize_debug)
1839 4716635 : return;
1840 6762443 : ao_ref read;
1841 6762443 : basic_block bb = gimple_bb (stmt);
1842 6762443 : tree lhs = gimple_assign_lhs (stmt);
1843 : /* Only handle clobbers of a full decl. */
1844 6762443 : if (!DECL_P (lhs))
1845 : return;
1846 6069656 : ao_ref_init (&read, lhs);
1847 6069656 : tree vuse = gimple_vuse (stmt);
1848 6069656 : unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 4;
1849 15456687 : while (limit)
1850 : {
1851 15444102 : gimple *ostmt = SSA_NAME_DEF_STMT (vuse);
1852 : /* Don't handle phis, just declare to be done. */
1853 15444102 : if (is_a<gphi*>(ostmt) || gimple_nop_p (ostmt))
1854 : break;
1855 13407486 : basic_block obb = gimple_bb (ostmt);
1856 : /* If the clobber is not fully dominating the statement define,
1857 : then it is not "simple" to detect if the define is fully clobbered. */
1858 13407486 : if (obb != bb && !dominated_by_p (CDI_DOMINATORS, bb, obb))
1859 4020455 : return;
1860 13407486 : gimple *use_stmt;
1861 13407486 : imm_use_iterator iter;
1862 54217890 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (ostmt))
1863 : {
1864 15783251 : basic_block ubb = gimple_bb (use_stmt);
1865 15783251 : if (stmt == use_stmt)
1866 4631005 : continue;
1867 : /* If the use is a clobber for lhs,
1868 : then it can be safely skipped; this happens with eh
1869 : and sometimes jump threading. */
1870 11152246 : if (gimple_clobber_p (use_stmt)
1871 11152246 : && lhs == gimple_assign_lhs (use_stmt))
1872 158478 : continue;
1873 : /* If the use is a phi and it is single use then check if that single use
1874 : is a clobber and lhs is the same. */
1875 10993768 : if (gphi *use_phi = dyn_cast<gphi*>(use_stmt))
1876 : {
1877 320548 : use_operand_p ou;
1878 320548 : gimple *ostmt;
1879 320548 : if (single_imm_use (gimple_phi_result (use_phi), &ou, &ostmt)
1880 273235 : && gimple_clobber_p (ostmt)
1881 537109 : && lhs == gimple_assign_lhs (ostmt))
1882 63851 : continue;
1883 : /* A phi node will never be dominating the clobber. */
1884 256697 : return;
1885 : }
1886 : /* The use needs to be dominating the clobber. */
1887 1331753 : if ((ubb != bb && !dominated_by_p (CDI_DOMINATORS, bb, ubb))
1888 11340654 : || ref_maybe_used_by_stmt_p (use_stmt, &read, false))
1889 1087771 : return;
1890 : /* Count the above alias lookup towards the limit. */
1891 9585449 : limit--;
1892 9585449 : if (limit == 0)
1893 : return;
1894 1787819 : }
1895 11619667 : vuse = gimple_vuse (ostmt);
1896 : /* This is a call with an assignment to the clobber decl,
1897 : remove the lhs or the whole stmt if it was pure/const. */
1898 11619667 : if (is_a <gcall*>(ostmt)
1899 11619667 : && lhs == gimple_call_lhs (ostmt))
1900 : {
1901 : /* Don't remove stores/statements that are needed for non-call
1902 : eh to work. */
1903 3394 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
1904 : return;
1905 : /* If we delete a stmt that could throw, mark the block
1906 : in to_purge to cleanup afterwards. */
1907 3388 : if (stmt_could_throw_p (cfun, ostmt))
1908 953 : bitmap_set_bit (to_purge, obb->index);
1909 3388 : int flags = gimple_call_flags (ostmt);
1910 3388 : if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1911 201 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1912 : {
1913 117 : gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
1914 117 : if (dump_file && (dump_flags & TDF_DETAILS))
1915 : {
1916 14 : fprintf (dump_file, "Removing dead call store stmt ");
1917 14 : print_gimple_stmt (dump_file, ostmt, 0);
1918 14 : fprintf (dump_file, "\n");
1919 : }
1920 117 : unlink_stmt_vdef (ostmt);
1921 117 : release_defs (ostmt);
1922 117 : gsi_remove (&gsi, true);
1923 117 : statistics_counter_event (cfun, "delete call dead store", 1);
1924 : /* Only remove the first store previous statement. */
1925 117 : return;
1926 : }
1927 : /* Make sure we do not remove a return slot we cannot reconstruct
1928 : later. */
1929 3271 : if (gimple_call_return_slot_opt_p (as_a <gcall *>(ostmt))
1930 3271 : && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (ostmt)))
1931 448 : || !poly_int_tree_p
1932 448 : (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (ostmt))))))
1933 : return;
1934 564 : if (dump_file && (dump_flags & TDF_DETAILS))
1935 : {
1936 6 : fprintf (dump_file, "Removing lhs of call stmt ");
1937 6 : print_gimple_stmt (dump_file, ostmt, 0);
1938 6 : fprintf (dump_file, "\n");
1939 : }
1940 564 : gimple_call_set_lhs (ostmt, NULL_TREE);
1941 564 : update_stmt (ostmt);
1942 564 : statistics_counter_event (cfun, "removed lhs call", 1);
1943 564 : return;
1944 : }
1945 : /* This an assignment store to the clobbered decl,
1946 : then maybe remove it. */
1947 11616273 : if (is_a <gassign*>(ostmt)
1948 9780666 : && gimple_store_p (ostmt)
1949 9780666 : && !gimple_clobber_p (ostmt)
1950 14499425 : && lhs == gimple_assign_lhs (ostmt))
1951 : {
1952 : /* Don't remove stores/statements that are needed for non-call
1953 : eh to work. */
1954 151313 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
1955 : return;
1956 : /* If we delete a stmt that could throw, mark the block
1957 : in to_purge to cleanup afterwards. */
1958 146229 : if (stmt_could_throw_p (cfun, ostmt))
1959 0 : bitmap_set_bit (to_purge, obb->index);
1960 146229 : gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
1961 146229 : if (dump_file && (dump_flags & TDF_DETAILS))
1962 : {
1963 12 : fprintf (dump_file, "Removing dead store stmt ");
1964 12 : print_gimple_stmt (dump_file, ostmt, 0);
1965 12 : fprintf (dump_file, "\n");
1966 : }
1967 146229 : unlink_stmt_vdef (ostmt);
1968 146229 : release_defs (ostmt);
1969 146229 : gsi_remove (&gsi, true);
1970 146229 : statistics_counter_event (cfun, "delete dead store", 1);
1971 : /* Only remove the first store previous statement. */
1972 146229 : return;
1973 : }
1974 : /* If the statement uses or maybe writes to the decl,
1975 : then nothing is to be removed. Don't know if the write
1976 : to the decl is partial write or a full one so the need
1977 : to stop.
1978 : e.g.
1979 : b.c = a;
1980 : Easier to stop here rather than do a full partial
1981 : dse of this statement.
1982 : b = {CLOBBER}; */
1983 11464960 : if (stmt_may_clobber_ref_p_1 (ostmt, &read, false)
1984 11464960 : || ref_maybe_used_by_stmt_p (ostmt, &read, false))
1985 2077929 : return;
1986 9387031 : limit--;
1987 : }
1988 : }
1989 :
1990 : /* Optimizes builtin memcmps for small constant sizes.
1991 : GSI_P is the GSI for the call. STMT is the call itself.
1992 : */
1993 :
1994 : static bool
1995 462871 : simplify_builtin_memcmp (gimple_stmt_iterator *gsi_p, gcall *stmt)
1996 : {
1997 : /* Make sure memcmp arguments are the correct type. */
1998 462871 : if (gimple_call_num_args (stmt) != 3)
1999 : return false;
2000 462871 : tree arg1 = gimple_call_arg (stmt, 0);
2001 462871 : tree arg2 = gimple_call_arg (stmt, 1);
2002 462871 : tree len = gimple_call_arg (stmt, 2);
2003 :
2004 462871 : if (!POINTER_TYPE_P (TREE_TYPE (arg1)))
2005 : return false;
2006 462871 : if (!POINTER_TYPE_P (TREE_TYPE (arg2)))
2007 : return false;
2008 462871 : if (!INTEGRAL_TYPE_P (TREE_TYPE (len)))
2009 : return false;
2010 :
2011 : /* The return value of the memcmp has to be used
2012 : equality comparison to zero. */
2013 462871 : tree res = gimple_call_lhs (stmt);
2014 :
2015 462871 : if (!res || !use_in_zero_equality (res))
2016 14069 : return false;
2017 :
2018 448802 : unsigned HOST_WIDE_INT leni;
2019 :
2020 448802 : if (tree_fits_uhwi_p (len)
2021 625302 : && (leni = tree_to_uhwi (len)) <= GET_MODE_SIZE (word_mode)
2022 528977 : && pow2p_hwi (leni))
2023 : {
2024 19039 : leni *= CHAR_TYPE_SIZE;
2025 19039 : unsigned align1 = get_pointer_alignment (arg1);
2026 19039 : unsigned align2 = get_pointer_alignment (arg2);
2027 19039 : unsigned align = MIN (align1, align2);
2028 19039 : scalar_int_mode mode;
2029 19039 : if (int_mode_for_size (leni, 1).exists (&mode)
2030 19039 : && (align >= leni || !targetm.slow_unaligned_access (mode, align)))
2031 : {
2032 19039 : location_t loc = gimple_location (stmt);
2033 19039 : tree type, off;
2034 19039 : type = build_nonstandard_integer_type (leni, 1);
2035 38078 : gcc_assert (known_eq (GET_MODE_BITSIZE (TYPE_MODE (type)), leni));
2036 19039 : tree ptrtype = build_pointer_type_for_mode (char_type_node,
2037 : ptr_mode, true);
2038 19039 : off = build_int_cst (ptrtype, 0);
2039 :
2040 : /* Create unaligned types if needed. */
2041 19039 : tree type1 = type, type2 = type;
2042 19039 : if (TYPE_ALIGN (type1) > align1)
2043 7722 : type1 = build_aligned_type (type1, align1);
2044 19039 : if (TYPE_ALIGN (type2) > align2)
2045 8220 : type2 = build_aligned_type (type2, align2);
2046 :
2047 19039 : arg1 = build2_loc (loc, MEM_REF, type1, arg1, off);
2048 19039 : arg2 = build2_loc (loc, MEM_REF, type2, arg2, off);
2049 19039 : tree tem1 = fold_const_aggregate_ref (arg1);
2050 19039 : if (tem1)
2051 219 : arg1 = tem1;
2052 19039 : tree tem2 = fold_const_aggregate_ref (arg2);
2053 19039 : if (tem2)
2054 7416 : arg2 = tem2;
2055 19039 : res = fold_convert_loc (loc, TREE_TYPE (res),
2056 : fold_build2_loc (loc, NE_EXPR,
2057 : boolean_type_node,
2058 : arg1, arg2));
2059 19039 : gimplify_and_update_call_from_tree (gsi_p, res);
2060 19039 : return true;
2061 : }
2062 : }
2063 :
2064 : /* Replace memcmp with memcmp_eq if the above fails. */
2065 429763 : if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) == BUILT_IN_MEMCMP_EQ)
2066 : return false;
2067 340514 : if (!fold_before_rtl_expansion_p ())
2068 : return false;
2069 89249 : gimple_call_set_fndecl (stmt, builtin_decl_explicit (BUILT_IN_MEMCMP_EQ));
2070 89249 : update_stmt (stmt);
2071 89249 : return true;
2072 : }
2073 :
2074 : /* Optimizes builtin memchrs for small constant sizes with a const string.
2075 : GSI_P is the GSI for the call. STMT is the call itself.
2076 : */
2077 :
2078 : static bool
2079 13082 : simplify_builtin_memchr (gimple_stmt_iterator *gsi_p, gcall *stmt)
2080 : {
2081 13082 : if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
2082 : return false;
2083 :
2084 13082 : if (gimple_call_num_args (stmt) != 3)
2085 : return false;
2086 :
2087 13082 : tree res = gimple_call_lhs (stmt);
2088 13082 : if (!res || !use_in_zero_equality (res))
2089 11649 : return false;
2090 :
2091 1433 : tree ptr = gimple_call_arg (stmt, 0);
2092 1433 : if (TREE_CODE (ptr) != ADDR_EXPR
2093 1433 : || TREE_CODE (TREE_OPERAND (ptr, 0)) != STRING_CST)
2094 : return false;
2095 :
2096 422 : unsigned HOST_WIDE_INT slen
2097 422 : = TREE_STRING_LENGTH (TREE_OPERAND (ptr, 0));
2098 : /* It must be a non-empty string constant. */
2099 422 : if (slen < 2)
2100 : return false;
2101 :
2102 : /* For -Os, only simplify strings with a single character. */
2103 418 : if (!optimize_bb_for_speed_p (gimple_bb (stmt))
2104 418 : && slen > 2)
2105 : return false;
2106 :
2107 402 : tree size = gimple_call_arg (stmt, 2);
2108 : /* Size must be a constant which is <= UNITS_PER_WORD and
2109 : <= the string length. */
2110 402 : if (!tree_fits_uhwi_p (size))
2111 : return false;
2112 :
2113 402 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
2114 403 : if (sz == 0 || sz > UNITS_PER_WORD || sz >= slen)
2115 : return false;
2116 :
2117 350 : tree ch = gimple_call_arg (stmt, 1);
2118 350 : location_t loc = gimple_location (stmt);
2119 350 : if (!useless_type_conversion_p (char_type_node,
2120 350 : TREE_TYPE (ch)))
2121 350 : ch = fold_convert_loc (loc, char_type_node, ch);
2122 350 : const char *p = TREE_STRING_POINTER (TREE_OPERAND (ptr, 0));
2123 350 : unsigned int isize = sz;
2124 350 : tree *op = XALLOCAVEC (tree, isize);
2125 1257 : for (unsigned int i = 0; i < isize; i++)
2126 : {
2127 907 : op[i] = build_int_cst (char_type_node, p[i]);
2128 907 : op[i] = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
2129 : op[i], ch);
2130 : }
2131 907 : for (unsigned int i = isize - 1; i >= 1; i--)
2132 557 : op[i - 1] = fold_convert_loc (loc, boolean_type_node,
2133 : fold_build2_loc (loc,
2134 : BIT_IOR_EXPR,
2135 : boolean_type_node,
2136 557 : op[i - 1],
2137 557 : op[i]));
2138 350 : res = fold_convert_loc (loc, TREE_TYPE (res), op[0]);
2139 350 : gimplify_and_update_call_from_tree (gsi_p, res);
2140 350 : return true;
2141 : }
2142 :
2143 : /* *GSI_P is a GIMPLE_CALL to a builtin function.
2144 : Optimize
2145 : memcpy (p, "abcd", 4); // STMT1
2146 : memset (p + 4, ' ', 3); // STMT2
2147 : into
2148 : memcpy (p, "abcd ", 7);
2149 : call if the latter can be stored by pieces during expansion.
2150 : */
2151 :
2152 : static bool
2153 109462 : simplify_builtin_memcpy_memset (gimple_stmt_iterator *gsi_p, gcall *stmt2)
2154 : {
2155 109462 : if (gimple_call_num_args (stmt2) != 3
2156 109462 : || gimple_call_lhs (stmt2)
2157 : || CHAR_BIT != 8
2158 109462 : || BITS_PER_UNIT != 8)
2159 : return false;
2160 :
2161 209023 : tree vuse = gimple_vuse (stmt2);
2162 101905 : if (vuse == NULL)
2163 : return false;
2164 101891 : gimple *stmt1 = SSA_NAME_DEF_STMT (vuse);
2165 :
2166 101891 : tree callee1;
2167 101891 : tree ptr1, src1, str1, off1, len1, lhs1;
2168 101891 : tree ptr2 = gimple_call_arg (stmt2, 0);
2169 101891 : tree val2 = gimple_call_arg (stmt2, 1);
2170 101891 : tree len2 = gimple_call_arg (stmt2, 2);
2171 101891 : tree diff, vdef, new_str_cst;
2172 101891 : gimple *use_stmt;
2173 101891 : unsigned int ptr1_align;
2174 101891 : unsigned HOST_WIDE_INT src_len;
2175 101891 : char *src_buf;
2176 101891 : use_operand_p use_p;
2177 :
2178 101891 : if (!tree_fits_shwi_p (val2)
2179 97867 : || !tree_fits_uhwi_p (len2)
2180 164587 : || compare_tree_int (len2, 1024) == 1)
2181 44277 : return false;
2182 :
2183 57614 : if (is_gimple_call (stmt1))
2184 : {
2185 : /* If first stmt is a call, it needs to be memcpy
2186 : or mempcpy, with string literal as second argument and
2187 : constant length. */
2188 29953 : callee1 = gimple_call_fndecl (stmt1);
2189 29953 : if (callee1 == NULL_TREE
2190 29837 : || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
2191 56289 : || gimple_call_num_args (stmt1) != 3)
2192 : return false;
2193 25042 : if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
2194 25042 : && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
2195 : return false;
2196 10867 : ptr1 = gimple_call_arg (stmt1, 0);
2197 10867 : src1 = gimple_call_arg (stmt1, 1);
2198 10867 : len1 = gimple_call_arg (stmt1, 2);
2199 10867 : lhs1 = gimple_call_lhs (stmt1);
2200 10867 : if (!tree_fits_uhwi_p (len1))
2201 : return false;
2202 10780 : str1 = string_constant (src1, &off1, NULL, NULL);
2203 10780 : if (str1 == NULL_TREE)
2204 : return false;
2205 4875 : if (!tree_fits_uhwi_p (off1)
2206 4875 : || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
2207 4875 : || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
2208 4875 : - tree_to_uhwi (off1)) > 0
2209 4875 : || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
2210 14625 : || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
2211 4875 : != TYPE_MODE (char_type_node))
2212 0 : return false;
2213 : }
2214 27661 : else if (gimple_assign_single_p (stmt1))
2215 : {
2216 : /* Otherwise look for length 1 memcpy optimized into
2217 : assignment. */
2218 16909 : ptr1 = gimple_assign_lhs (stmt1);
2219 16909 : src1 = gimple_assign_rhs1 (stmt1);
2220 16909 : if (TREE_CODE (ptr1) != MEM_REF
2221 3522 : || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
2222 17943 : || !tree_fits_shwi_p (src1))
2223 16557 : return false;
2224 352 : ptr1 = build_fold_addr_expr (ptr1);
2225 352 : STRIP_USELESS_TYPE_CONVERSION (ptr1);
2226 352 : callee1 = NULL_TREE;
2227 352 : len1 = size_one_node;
2228 352 : lhs1 = NULL_TREE;
2229 352 : off1 = size_zero_node;
2230 352 : str1 = NULL_TREE;
2231 : }
2232 : else
2233 : return false;
2234 :
2235 5227 : diff = constant_pointer_difference (ptr1, ptr2);
2236 5227 : if (diff == NULL && lhs1 != NULL)
2237 : {
2238 7 : diff = constant_pointer_difference (lhs1, ptr2);
2239 7 : if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
2240 7 : && diff != NULL)
2241 7 : diff = size_binop (PLUS_EXPR, diff,
2242 : fold_convert (sizetype, len1));
2243 : }
2244 : /* If the difference between the second and first destination pointer
2245 : is not constant, or is bigger than memcpy length, bail out. */
2246 5227 : if (diff == NULL
2247 4344 : || !tree_fits_uhwi_p (diff)
2248 4344 : || tree_int_cst_lt (len1, diff)
2249 9327 : || compare_tree_int (diff, 1024) == 1)
2250 1127 : return false;
2251 :
2252 : /* Use maximum of difference plus memset length and memcpy length
2253 : as the new memcpy length, if it is too big, bail out. */
2254 4100 : src_len = tree_to_uhwi (diff);
2255 4100 : src_len += tree_to_uhwi (len2);
2256 4100 : if (src_len < tree_to_uhwi (len1))
2257 : src_len = tree_to_uhwi (len1);
2258 4100 : if (src_len > 1024)
2259 : return false;
2260 :
2261 : /* If mempcpy value is used elsewhere, bail out, as mempcpy
2262 : with bigger length will return different result. */
2263 4100 : if (lhs1 != NULL_TREE
2264 193 : && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
2265 4107 : && (TREE_CODE (lhs1) != SSA_NAME
2266 7 : || !single_imm_use (lhs1, &use_p, &use_stmt)
2267 7 : || use_stmt != stmt2))
2268 0 : return false;
2269 :
2270 : /* If anything reads memory in between memcpy and memset
2271 : call, the modified memcpy call might change it. */
2272 4100 : vdef = gimple_vdef (stmt1);
2273 4100 : if (vdef != NULL
2274 4100 : && (!single_imm_use (vdef, &use_p, &use_stmt)
2275 3345 : || use_stmt != stmt2))
2276 : return false;
2277 :
2278 3345 : ptr1_align = get_pointer_alignment (ptr1);
2279 : /* Construct the new source string literal. */
2280 3345 : src_buf = XALLOCAVEC (char, src_len + 1);
2281 3345 : if (callee1)
2282 3184 : memcpy (src_buf,
2283 3184 : TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
2284 : tree_to_uhwi (len1));
2285 : else
2286 161 : src_buf[0] = tree_to_shwi (src1);
2287 3345 : memset (src_buf + tree_to_uhwi (diff),
2288 3345 : tree_to_shwi (val2), tree_to_uhwi (len2));
2289 3345 : src_buf[src_len] = '\0';
2290 : /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
2291 : handle embedded '\0's. */
2292 3345 : if (strlen (src_buf) != src_len)
2293 : return false;
2294 3255 : rtl_profile_for_bb (gimple_bb (stmt2));
2295 : /* If the new memcpy wouldn't be emitted by storing the literal
2296 : by pieces, this optimization might enlarge .rodata too much,
2297 : as commonly used string literals couldn't be shared any
2298 : longer. */
2299 3255 : if (!can_store_by_pieces (src_len,
2300 : builtin_strncpy_read_str,
2301 : src_buf, ptr1_align, false))
2302 : return false;
2303 :
2304 2471 : new_str_cst = build_string_literal (src_len, src_buf);
2305 2471 : if (callee1)
2306 : {
2307 : /* If STMT1 is a mem{,p}cpy call, adjust it and remove
2308 : memset call. */
2309 2344 : if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
2310 7 : gimple_call_set_lhs (stmt1, NULL_TREE);
2311 2344 : gimple_call_set_arg (stmt1, 1, new_str_cst);
2312 2344 : gimple_call_set_arg (stmt1, 2,
2313 2344 : build_int_cst (TREE_TYPE (len1), src_len));
2314 2344 : update_stmt (stmt1);
2315 2344 : unlink_stmt_vdef (stmt2);
2316 2344 : gsi_replace (gsi_p, gimple_build_nop (), false);
2317 2344 : fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
2318 2344 : release_defs (stmt2);
2319 2344 : if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
2320 : {
2321 7 : fwprop_invalidate_lattice (lhs1);
2322 7 : release_ssa_name (lhs1);
2323 : }
2324 2344 : return true;
2325 : }
2326 : else
2327 : {
2328 : /* Otherwise, if STMT1 is length 1 memcpy optimized into
2329 : assignment, remove STMT1 and change memset call into
2330 : memcpy call. */
2331 127 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
2332 :
2333 127 : if (!is_gimple_val (ptr1))
2334 12 : ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
2335 : true, GSI_SAME_STMT);
2336 127 : tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
2337 127 : gimple_call_set_fndecl (stmt2, fndecl);
2338 127 : gimple_call_set_fntype (stmt2,
2339 127 : TREE_TYPE (fndecl));
2340 127 : gimple_call_set_arg (stmt2, 0, ptr1);
2341 127 : gimple_call_set_arg (stmt2, 1, new_str_cst);
2342 127 : gimple_call_set_arg (stmt2, 2,
2343 127 : build_int_cst (TREE_TYPE (len2), src_len));
2344 127 : unlink_stmt_vdef (stmt1);
2345 127 : gsi_remove (&gsi, true);
2346 127 : fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
2347 127 : release_defs (stmt1);
2348 127 : update_stmt (stmt2);
2349 127 : return false;
2350 : }
2351 : }
2352 :
2353 :
2354 : /* Try to optimize out __builtin_stack_restore. Optimize it out
2355 : if there is another __builtin_stack_restore in the same basic
2356 : block and no calls or ASM_EXPRs are in between, or if this block's
2357 : only outgoing edge is to EXIT_BLOCK and there are no calls or
2358 : ASM_EXPRs after this __builtin_stack_restore.
2359 : Note restore right before a noreturn function is not needed.
2360 : And skip some cheap calls that will most likely become an instruction.
2361 : Restoring the stack before a call is important to be able to keep
2362 : stack usage down so that call does not run out of stack. */
2363 :
2364 :
2365 : static bool
2366 10412 : optimize_stack_restore (gimple_stmt_iterator *gsi, gimple *call)
2367 : {
2368 10412 : if (!fold_before_rtl_expansion_p ())
2369 : return false;
2370 2539 : tree callee;
2371 2539 : gimple *stmt;
2372 :
2373 2539 : basic_block bb = gsi_bb (*gsi);
2374 :
2375 2539 : if (gimple_call_num_args (call) != 1
2376 2539 : || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2377 5078 : || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2378 : return false;
2379 :
2380 2539 : gimple_stmt_iterator i = *gsi;
2381 6370 : for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2382 : {
2383 4292 : stmt = gsi_stmt (i);
2384 4292 : if (is_a<gasm*> (stmt))
2385 : return false;
2386 4291 : gcall *call = dyn_cast<gcall*>(stmt);
2387 4291 : if (!call)
2388 3622 : continue;
2389 :
2390 : /* We can remove the restore in front of noreturn
2391 : calls. Since the restore will happen either
2392 : via an unwind/longjmp or not at all. */
2393 669 : if (gimple_call_noreturn_p (call))
2394 : break;
2395 :
2396 : /* Internal calls are ok, to bypass
2397 : check first since fndecl will be null. */
2398 653 : if (gimple_call_internal_p (call))
2399 1 : continue;
2400 :
2401 652 : callee = gimple_call_fndecl (call);
2402 : /* Non-builtin calls are not ok. */
2403 652 : if (!callee
2404 652 : || !fndecl_built_in_p (callee))
2405 : return false;
2406 :
2407 : /* Do not remove stack updates before strub leave. */
2408 576 : if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE)
2409 : /* Alloca calls are not ok either. */
2410 576 : || fndecl_builtin_alloc_p (callee))
2411 : return false;
2412 :
2413 364 : if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
2414 52 : goto second_stack_restore;
2415 :
2416 : /* If not a simple or inexpensive builtin, then it is not ok either. */
2417 312 : if (!is_simple_builtin (callee)
2418 312 : && !is_inexpensive_builtin (callee))
2419 : return false;
2420 : }
2421 :
2422 : /* Allow one successor of the exit block, or zero successors. */
2423 2094 : switch (EDGE_COUNT (bb->succs))
2424 : {
2425 : case 0:
2426 : break;
2427 2007 : case 1:
2428 2007 : if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2429 : return false;
2430 : break;
2431 : default:
2432 : return false;
2433 : }
2434 1737 : second_stack_restore:
2435 :
2436 : /* If there's exactly one use, then zap the call to __builtin_stack_save.
2437 : If there are multiple uses, then the last one should remove the call.
2438 : In any case, whether the call to __builtin_stack_save can be removed
2439 : or not is irrelevant to removing the call to __builtin_stack_restore. */
2440 1737 : if (has_single_use (gimple_call_arg (call, 0)))
2441 : {
2442 1559 : gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2443 1559 : if (is_gimple_call (stack_save))
2444 : {
2445 1557 : callee = gimple_call_fndecl (stack_save);
2446 1557 : if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
2447 : {
2448 1557 : gimple_stmt_iterator stack_save_gsi;
2449 1557 : tree rhs;
2450 :
2451 1557 : stack_save_gsi = gsi_for_stmt (stack_save);
2452 1557 : rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2453 1557 : replace_call_with_value (&stack_save_gsi, rhs);
2454 : }
2455 : }
2456 : }
2457 :
2458 : /* No effect, so the statement will be deleted. */
2459 1737 : replace_call_with_value (gsi, NULL_TREE);
2460 1737 : return true;
2461 : }
2462 :
2463 : /* *GSI_P is a GIMPLE_CALL to a builtin function.
2464 : Optimize
2465 : memcpy (p, "abcd", 4);
2466 : memset (p + 4, ' ', 3);
2467 : into
2468 : memcpy (p, "abcd ", 7);
2469 : call if the latter can be stored by pieces during expansion.
2470 :
2471 : Optimize
2472 : memchr ("abcd", a, 4) == 0;
2473 : or
2474 : memchr ("abcd", a, 4) != 0;
2475 : to
2476 : (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
2477 : or
2478 : (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
2479 :
2480 : Also canonicalize __atomic_fetch_op (p, x, y) op x
2481 : to __atomic_op_fetch (p, x, y) or
2482 : __atomic_op_fetch (p, x, y) iop x
2483 : to __atomic_fetch_op (p, x, y) when possible (also __sync). */
2484 :
2485 : static bool
2486 6136585 : simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2, bool full_walk)
2487 : {
2488 6136585 : gimple *stmt2 = gsi_stmt (*gsi_p);
2489 6136585 : enum built_in_function other_atomic = END_BUILTINS;
2490 6136585 : enum tree_code atomic_op = ERROR_MARK;
2491 :
2492 6136585 : switch (DECL_FUNCTION_CODE (callee2))
2493 : {
2494 10412 : case BUILT_IN_STACK_RESTORE:
2495 10412 : return optimize_stack_restore (gsi_p, as_a<gcall*>(stmt2));
2496 462871 : case BUILT_IN_MEMCMP:
2497 462871 : case BUILT_IN_MEMCMP_EQ:
2498 462871 : return simplify_builtin_memcmp (gsi_p, as_a<gcall*>(stmt2));
2499 13082 : case BUILT_IN_MEMCHR:
2500 13082 : return simplify_builtin_memchr (gsi_p, as_a<gcall*>(stmt2));
2501 :
2502 109462 : case BUILT_IN_MEMSET:
2503 109462 : if (gimple_call_num_args (stmt2) == 3)
2504 : {
2505 : /* Try to prop the zeroing/value of the memset to memcpy
2506 : if the dest is an address and the value is a constant. */
2507 109462 : optimize_aggr_zeroprop (stmt2, full_walk);
2508 : }
2509 109462 : return simplify_builtin_memcpy_memset (gsi_p, as_a<gcall*>(stmt2));
2510 :
2511 : #define CASE_ATOMIC(NAME, OTHER, OP) \
2512 : case BUILT_IN_##NAME##_1: \
2513 : case BUILT_IN_##NAME##_2: \
2514 : case BUILT_IN_##NAME##_4: \
2515 : case BUILT_IN_##NAME##_8: \
2516 : case BUILT_IN_##NAME##_16: \
2517 : atomic_op = OP; \
2518 : other_atomic \
2519 : = (enum built_in_function) (BUILT_IN_##OTHER##_1 \
2520 : + (DECL_FUNCTION_CODE (callee2) \
2521 : - BUILT_IN_##NAME##_1)); \
2522 : goto handle_atomic_fetch_op;
2523 :
2524 48515 : CASE_ATOMIC (ATOMIC_FETCH_ADD, ATOMIC_ADD_FETCH, PLUS_EXPR)
2525 7125 : CASE_ATOMIC (ATOMIC_FETCH_SUB, ATOMIC_SUB_FETCH, MINUS_EXPR)
2526 2876 : CASE_ATOMIC (ATOMIC_FETCH_AND, ATOMIC_AND_FETCH, BIT_AND_EXPR)
2527 2895 : CASE_ATOMIC (ATOMIC_FETCH_XOR, ATOMIC_XOR_FETCH, BIT_XOR_EXPR)
2528 3823 : CASE_ATOMIC (ATOMIC_FETCH_OR, ATOMIC_OR_FETCH, BIT_IOR_EXPR)
2529 :
2530 2365 : CASE_ATOMIC (SYNC_FETCH_AND_ADD, SYNC_ADD_AND_FETCH, PLUS_EXPR)
2531 2004 : CASE_ATOMIC (SYNC_FETCH_AND_SUB, SYNC_SUB_AND_FETCH, MINUS_EXPR)
2532 1876 : CASE_ATOMIC (SYNC_FETCH_AND_AND, SYNC_AND_AND_FETCH, BIT_AND_EXPR)
2533 2144 : CASE_ATOMIC (SYNC_FETCH_AND_XOR, SYNC_XOR_AND_FETCH, BIT_XOR_EXPR)
2534 1987 : CASE_ATOMIC (SYNC_FETCH_AND_OR, SYNC_OR_AND_FETCH, BIT_IOR_EXPR)
2535 :
2536 14351 : CASE_ATOMIC (ATOMIC_ADD_FETCH, ATOMIC_FETCH_ADD, MINUS_EXPR)
2537 8552 : CASE_ATOMIC (ATOMIC_SUB_FETCH, ATOMIC_FETCH_SUB, PLUS_EXPR)
2538 2380 : CASE_ATOMIC (ATOMIC_XOR_FETCH, ATOMIC_FETCH_XOR, BIT_XOR_EXPR)
2539 :
2540 846 : CASE_ATOMIC (SYNC_ADD_AND_FETCH, SYNC_FETCH_AND_ADD, MINUS_EXPR)
2541 732 : CASE_ATOMIC (SYNC_SUB_AND_FETCH, SYNC_FETCH_AND_SUB, PLUS_EXPR)
2542 800 : CASE_ATOMIC (SYNC_XOR_AND_FETCH, SYNC_FETCH_AND_XOR, BIT_XOR_EXPR)
2543 :
2544 : #undef CASE_ATOMIC
2545 :
2546 103271 : handle_atomic_fetch_op:
2547 103271 : if (gimple_call_num_args (stmt2) >= 2 && gimple_call_lhs (stmt2))
2548 : {
2549 59802 : tree lhs2 = gimple_call_lhs (stmt2), lhsc = lhs2;
2550 59802 : tree arg = gimple_call_arg (stmt2, 1);
2551 59802 : gimple *use_stmt, *cast_stmt = NULL;
2552 59802 : use_operand_p use_p;
2553 59802 : tree ndecl = builtin_decl_explicit (other_atomic);
2554 :
2555 59802 : if (ndecl == NULL_TREE || !single_imm_use (lhs2, &use_p, &use_stmt))
2556 : break;
2557 :
2558 58673 : if (gimple_assign_cast_p (use_stmt))
2559 : {
2560 31188 : cast_stmt = use_stmt;
2561 31188 : lhsc = gimple_assign_lhs (cast_stmt);
2562 31188 : if (lhsc == NULL_TREE
2563 31188 : || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc))
2564 30637 : || (TYPE_PRECISION (TREE_TYPE (lhsc))
2565 30637 : != TYPE_PRECISION (TREE_TYPE (lhs2)))
2566 60299 : || !single_imm_use (lhsc, &use_p, &use_stmt))
2567 : {
2568 2605 : use_stmt = cast_stmt;
2569 2605 : cast_stmt = NULL;
2570 2605 : lhsc = lhs2;
2571 : }
2572 : }
2573 :
2574 58673 : bool ok = false;
2575 58673 : tree oarg = NULL_TREE;
2576 58673 : enum tree_code ccode = ERROR_MARK;
2577 58673 : tree crhs1 = NULL_TREE, crhs2 = NULL_TREE;
2578 58673 : if (is_gimple_assign (use_stmt)
2579 58673 : && gimple_assign_rhs_code (use_stmt) == atomic_op)
2580 : {
2581 1416 : if (gimple_assign_rhs1 (use_stmt) == lhsc)
2582 1016 : oarg = gimple_assign_rhs2 (use_stmt);
2583 400 : else if (atomic_op != MINUS_EXPR)
2584 : oarg = gimple_assign_rhs1 (use_stmt);
2585 : }
2586 57257 : else if (atomic_op == MINUS_EXPR
2587 13188 : && is_gimple_assign (use_stmt)
2588 3612 : && gimple_assign_rhs_code (use_stmt) == PLUS_EXPR
2589 199 : && TREE_CODE (arg) == INTEGER_CST
2590 57456 : && (TREE_CODE (gimple_assign_rhs2 (use_stmt))
2591 : == INTEGER_CST))
2592 : {
2593 183 : tree a = fold_convert (TREE_TYPE (lhs2), arg);
2594 183 : tree o = fold_convert (TREE_TYPE (lhs2),
2595 : gimple_assign_rhs2 (use_stmt));
2596 183 : if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
2597 : ok = true;
2598 : }
2599 57074 : else if (atomic_op == BIT_AND_EXPR || atomic_op == BIT_IOR_EXPR)
2600 : ;
2601 51832 : else if (gimple_code (use_stmt) == GIMPLE_COND)
2602 : {
2603 19457 : ccode = gimple_cond_code (use_stmt);
2604 19457 : crhs1 = gimple_cond_lhs (use_stmt);
2605 19457 : crhs2 = gimple_cond_rhs (use_stmt);
2606 : }
2607 32375 : else if (is_gimple_assign (use_stmt))
2608 : {
2609 9525 : if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
2610 : {
2611 3941 : ccode = gimple_assign_rhs_code (use_stmt);
2612 3941 : crhs1 = gimple_assign_rhs1 (use_stmt);
2613 3941 : crhs2 = gimple_assign_rhs2 (use_stmt);
2614 : }
2615 5584 : else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
2616 : {
2617 0 : tree cond = gimple_assign_rhs1 (use_stmt);
2618 0 : if (COMPARISON_CLASS_P (cond))
2619 : {
2620 0 : ccode = TREE_CODE (cond);
2621 0 : crhs1 = TREE_OPERAND (cond, 0);
2622 0 : crhs2 = TREE_OPERAND (cond, 1);
2623 : }
2624 : }
2625 : }
2626 24414 : if (ccode == EQ_EXPR || ccode == NE_EXPR)
2627 : {
2628 : /* Deal with x - y == 0 or x ^ y == 0
2629 : being optimized into x == y and x + cst == 0
2630 : into x == -cst. */
2631 22214 : tree o = NULL_TREE;
2632 22214 : if (crhs1 == lhsc)
2633 : o = crhs2;
2634 133 : else if (crhs2 == lhsc)
2635 133 : o = crhs1;
2636 22214 : if (o && atomic_op != PLUS_EXPR)
2637 : oarg = o;
2638 10057 : else if (o
2639 10057 : && TREE_CODE (o) == INTEGER_CST
2640 10057 : && TREE_CODE (arg) == INTEGER_CST)
2641 : {
2642 9347 : tree a = fold_convert (TREE_TYPE (lhs2), arg);
2643 9347 : o = fold_convert (TREE_TYPE (lhs2), o);
2644 9347 : if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
2645 58673 : ok = true;
2646 : }
2647 : }
2648 58673 : if (oarg && !ok)
2649 : {
2650 13573 : if (operand_equal_p (arg, oarg, 0))
2651 : ok = true;
2652 12244 : else if (TREE_CODE (arg) == SSA_NAME
2653 2203 : && TREE_CODE (oarg) == SSA_NAME)
2654 : {
2655 745 : tree oarg2 = oarg;
2656 745 : if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg)))
2657 : {
2658 104 : gimple *g = SSA_NAME_DEF_STMT (oarg);
2659 104 : oarg2 = gimple_assign_rhs1 (g);
2660 104 : if (TREE_CODE (oarg2) != SSA_NAME
2661 104 : || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2))
2662 208 : || (TYPE_PRECISION (TREE_TYPE (oarg2))
2663 104 : != TYPE_PRECISION (TREE_TYPE (oarg))))
2664 : oarg2 = oarg;
2665 : }
2666 745 : if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)))
2667 : {
2668 544 : gimple *g = SSA_NAME_DEF_STMT (arg);
2669 544 : tree rhs1 = gimple_assign_rhs1 (g);
2670 : /* Handle e.g.
2671 : x.0_1 = (long unsigned int) x_4(D);
2672 : _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
2673 : _3 = (long int) _2;
2674 : _7 = x_4(D) + _3; */
2675 544 : if (rhs1 == oarg || rhs1 == oarg2)
2676 : ok = true;
2677 : /* Handle e.g.
2678 : x.18_1 = (short unsigned int) x_5(D);
2679 : _2 = (int) x.18_1;
2680 : _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
2681 : _4 = (short int) _3;
2682 : _8 = x_5(D) ^ _4;
2683 : This happens only for char/short. */
2684 160 : else if (TREE_CODE (rhs1) == SSA_NAME
2685 160 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2686 320 : && (TYPE_PRECISION (TREE_TYPE (rhs1))
2687 160 : == TYPE_PRECISION (TREE_TYPE (lhs2))))
2688 : {
2689 160 : g = SSA_NAME_DEF_STMT (rhs1);
2690 160 : if (gimple_assign_cast_p (g)
2691 160 : && (gimple_assign_rhs1 (g) == oarg
2692 0 : || gimple_assign_rhs1 (g) == oarg2))
2693 : ok = true;
2694 : }
2695 : }
2696 745 : if (!ok && arg == oarg2)
2697 : /* Handle e.g.
2698 : _1 = __sync_fetch_and_add_4 (&v, x_5(D));
2699 : _2 = (int) _1;
2700 : x.0_3 = (int) x_5(D);
2701 : _7 = _2 + x.0_3; */
2702 : ok = true;
2703 : }
2704 : }
2705 :
2706 57344 : if (ok)
2707 : {
2708 2546 : tree new_lhs = make_ssa_name (TREE_TYPE (lhs2));
2709 2546 : gimple_call_set_lhs (stmt2, new_lhs);
2710 2546 : gimple_call_set_fndecl (stmt2, ndecl);
2711 2546 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2712 2546 : if (ccode == ERROR_MARK)
2713 2000 : gimple_assign_set_rhs_with_ops (&gsi, cast_stmt
2714 : ? NOP_EXPR : SSA_NAME,
2715 : new_lhs);
2716 : else
2717 : {
2718 1323 : crhs1 = new_lhs;
2719 1323 : crhs2 = build_zero_cst (TREE_TYPE (lhs2));
2720 1323 : if (gimple_code (use_stmt) == GIMPLE_COND)
2721 : {
2722 984 : gcond *cond_stmt = as_a <gcond *> (use_stmt);
2723 984 : gimple_cond_set_lhs (cond_stmt, crhs1);
2724 984 : gimple_cond_set_rhs (cond_stmt, crhs2);
2725 : }
2726 339 : else if (gimple_assign_rhs_class (use_stmt)
2727 : == GIMPLE_BINARY_RHS)
2728 : {
2729 339 : gimple_assign_set_rhs1 (use_stmt, crhs1);
2730 339 : gimple_assign_set_rhs2 (use_stmt, crhs2);
2731 : }
2732 : else
2733 : {
2734 0 : gcc_checking_assert (gimple_assign_rhs_code (use_stmt)
2735 : == COND_EXPR);
2736 0 : tree cond = build2 (ccode, boolean_type_node,
2737 : crhs1, crhs2);
2738 0 : gimple_assign_set_rhs1 (use_stmt, cond);
2739 : }
2740 : }
2741 2546 : update_stmt (use_stmt);
2742 2546 : if (atomic_op != BIT_AND_EXPR
2743 2546 : && atomic_op != BIT_IOR_EXPR
2744 2546 : && !stmt_ends_bb_p (stmt2))
2745 : {
2746 : /* For the benefit of debug stmts, emit stmt(s) to set
2747 : lhs2 to the value it had from the new builtin.
2748 : E.g. if it was previously:
2749 : lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
2750 : emit:
2751 : new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
2752 : lhs2 = new_lhs - arg;
2753 : We also keep cast_stmt if any in the IL for
2754 : the same reasons.
2755 : These stmts will be DCEd later and proper debug info
2756 : will be emitted.
2757 : This is only possible for reversible operations
2758 : (+/-/^) and without -fnon-call-exceptions. */
2759 2205 : gsi = gsi_for_stmt (stmt2);
2760 2205 : tree type = TREE_TYPE (lhs2);
2761 2205 : if (TREE_CODE (arg) == INTEGER_CST)
2762 1623 : arg = fold_convert (type, arg);
2763 582 : else if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
2764 : {
2765 0 : tree narg = make_ssa_name (type);
2766 0 : gimple *g = gimple_build_assign (narg, NOP_EXPR, arg);
2767 0 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2768 0 : arg = narg;
2769 : }
2770 2205 : enum tree_code rcode;
2771 2205 : switch (atomic_op)
2772 : {
2773 : case PLUS_EXPR: rcode = MINUS_EXPR; break;
2774 727 : case MINUS_EXPR: rcode = PLUS_EXPR; break;
2775 492 : case BIT_XOR_EXPR: rcode = atomic_op; break;
2776 0 : default: gcc_unreachable ();
2777 : }
2778 2205 : gimple *g = gimple_build_assign (lhs2, rcode, new_lhs, arg);
2779 2205 : gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2780 2205 : update_stmt (stmt2);
2781 : }
2782 : else
2783 : {
2784 : /* For e.g.
2785 : lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
2786 : after we change it to
2787 : new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
2788 : there is no way to find out the lhs2 value (i.e.
2789 : what the atomic memory contained before the operation),
2790 : values of some bits are lost. We have checked earlier
2791 : that we don't have any non-debug users except for what
2792 : we are already changing, so we need to reset the
2793 : debug stmts and remove the cast_stmt if any. */
2794 341 : imm_use_iterator iter;
2795 676 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs2)
2796 335 : if (use_stmt != cast_stmt)
2797 : {
2798 168 : gcc_assert (is_gimple_debug (use_stmt));
2799 168 : gimple_debug_bind_reset_value (use_stmt);
2800 168 : update_stmt (use_stmt);
2801 341 : }
2802 341 : if (cast_stmt)
2803 : {
2804 167 : gsi = gsi_for_stmt (cast_stmt);
2805 167 : gsi_remove (&gsi, true);
2806 : }
2807 341 : update_stmt (stmt2);
2808 341 : release_ssa_name (lhs2);
2809 : }
2810 : }
2811 : }
2812 : break;
2813 :
2814 : default:
2815 : break;
2816 : }
2817 : return false;
2818 : }
2819 :
2820 : /* Given a ssa_name in NAME see if it was defined by an assignment and
2821 : set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
2822 : to the second operand on the rhs. */
2823 :
2824 : static inline void
2825 17114705 : defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
2826 : {
2827 17114705 : gimple *def;
2828 17114705 : enum tree_code code1;
2829 17114705 : tree arg11;
2830 17114705 : tree arg21;
2831 17114705 : tree arg31;
2832 17114705 : enum gimple_rhs_class grhs_class;
2833 :
2834 17114705 : code1 = TREE_CODE (name);
2835 17114705 : arg11 = name;
2836 17114705 : arg21 = NULL_TREE;
2837 17114705 : arg31 = NULL_TREE;
2838 17114705 : grhs_class = get_gimple_rhs_class (code1);
2839 :
2840 17114705 : if (code1 == SSA_NAME)
2841 : {
2842 11417315 : def = SSA_NAME_DEF_STMT (name);
2843 :
2844 11417315 : if (def && is_gimple_assign (def)
2845 18498715 : && can_propagate_from (def))
2846 : {
2847 4884744 : code1 = gimple_assign_rhs_code (def);
2848 4884744 : arg11 = gimple_assign_rhs1 (def);
2849 4884744 : arg21 = gimple_assign_rhs2 (def);
2850 4884744 : arg31 = gimple_assign_rhs3 (def);
2851 : }
2852 : }
2853 5697390 : else if (grhs_class != GIMPLE_SINGLE_RHS)
2854 0 : code1 = ERROR_MARK;
2855 :
2856 17114705 : *code = code1;
2857 17114705 : *arg1 = arg11;
2858 17114705 : if (arg2)
2859 17097469 : *arg2 = arg21;
2860 17114705 : if (arg31)
2861 2188 : *code = ERROR_MARK;
2862 17114705 : }
2863 :
2864 :
2865 : /* Recognize rotation patterns. Return true if a transformation
2866 : applied, otherwise return false.
2867 :
2868 : We are looking for X with unsigned type T with bitsize B, OP being
2869 : +, | or ^, some type T2 wider than T. For:
2870 : (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
2871 : ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
2872 :
2873 : transform these into:
2874 : X r<< CNT1
2875 :
2876 : Or for:
2877 : (X << Y) OP (X >> (B - Y))
2878 : (X << (int) Y) OP (X >> (int) (B - Y))
2879 : ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
2880 : ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
2881 : (X << Y) | (X >> ((-Y) & (B - 1)))
2882 : (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
2883 : ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2884 : ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2885 :
2886 : transform these into (last 2 only if ranger can prove Y < B
2887 : or Y = N * B):
2888 : X r<< Y
2889 : or
2890 : X r<< (& & (B - 1))
2891 : The latter for the forms with T2 wider than T if ranger can't prove Y < B.
2892 :
2893 : Or for:
2894 : (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
2895 : (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
2896 : ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
2897 : ((T) ((T2) X << (int) (Y & (B - 1)))) \
2898 : | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
2899 :
2900 : transform these into:
2901 : X r<< (Y & (B - 1))
2902 :
2903 : Note, in the patterns with T2 type, the type of OP operands
2904 : might be even a signed type, but should have precision B.
2905 : Expressions with & (B - 1) should be recognized only if B is
2906 : a power of 2. */
2907 :
2908 : static bool
2909 10065584 : simplify_rotate (gimple_stmt_iterator *gsi)
2910 : {
2911 10065584 : gimple *stmt = gsi_stmt (*gsi);
2912 10065584 : tree arg[2], rtype, rotcnt = NULL_TREE;
2913 10065584 : tree def_arg1[2], def_arg2[2];
2914 10065584 : enum tree_code def_code[2];
2915 10065584 : tree lhs;
2916 10065584 : int i;
2917 10065584 : bool swapped_p = false;
2918 10065584 : gimple *g;
2919 10065584 : gimple *def_arg_stmt[2] = { NULL, NULL };
2920 10065584 : int wider_prec = 0;
2921 10065584 : bool add_masking = false;
2922 :
2923 10065584 : arg[0] = gimple_assign_rhs1 (stmt);
2924 10065584 : arg[1] = gimple_assign_rhs2 (stmt);
2925 10065584 : rtype = TREE_TYPE (arg[0]);
2926 :
2927 : /* Only create rotates in complete modes. Other cases are not
2928 : expanded properly. */
2929 10065584 : if (!INTEGRAL_TYPE_P (rtype)
2930 10065584 : || !type_has_mode_precision_p (rtype))
2931 1558063 : return false;
2932 :
2933 25522563 : for (i = 0; i < 2; i++)
2934 : {
2935 17015042 : defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2936 17015042 : if (TREE_CODE (arg[i]) == SSA_NAME)
2937 11317652 : def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
2938 : }
2939 :
2940 : /* Look through narrowing (or same precision) conversions. */
2941 7559222 : if (CONVERT_EXPR_CODE_P (def_code[0])
2942 948299 : && CONVERT_EXPR_CODE_P (def_code[1])
2943 139470 : && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
2944 116390 : && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
2945 109398 : && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
2946 109398 : == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
2947 63753 : && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
2948 44128 : && has_single_use (arg[0])
2949 8540407 : && has_single_use (arg[1]))
2950 : {
2951 28598 : wider_prec = TYPE_PRECISION (TREE_TYPE (def_arg1[0]));
2952 85794 : for (i = 0; i < 2; i++)
2953 : {
2954 57196 : arg[i] = def_arg1[i];
2955 57196 : defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2956 57196 : if (TREE_CODE (arg[i]) == SSA_NAME)
2957 57196 : def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
2958 : }
2959 : }
2960 : else
2961 : {
2962 : /* Handle signed rotate; the RSHIFT_EXPR has to be done
2963 : in unsigned type but LSHIFT_EXPR could be signed. */
2964 8478923 : i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
2965 7542399 : if (CONVERT_EXPR_CODE_P (def_code[i])
2966 936524 : && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
2967 28111 : && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
2968 27031 : && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
2969 8482241 : && has_single_use (arg[i]))
2970 : {
2971 1995 : arg[i] = def_arg1[i];
2972 1995 : defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
2973 1995 : if (TREE_CODE (arg[i]) == SSA_NAME)
2974 1995 : def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
2975 : }
2976 : }
2977 :
2978 : /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
2979 8705103 : for (i = 0; i < 2; i++)
2980 8680459 : if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
2981 : return false;
2982 237476 : else if (!has_single_use (arg[i]))
2983 : return false;
2984 24644 : if (def_code[0] == def_code[1])
2985 : return false;
2986 :
2987 : /* If we've looked through narrowing conversions before, look through
2988 : widening conversions from unsigned type with the same precision
2989 : as rtype here. */
2990 20299 : if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
2991 19348 : for (i = 0; i < 2; i++)
2992 : {
2993 12900 : tree tem;
2994 12900 : enum tree_code code;
2995 12900 : defcodefor_name (def_arg1[i], &code, &tem, NULL);
2996 4 : if (!CONVERT_EXPR_CODE_P (code)
2997 12896 : || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
2998 25796 : || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
2999 4 : return false;
3000 12896 : def_arg1[i] = tem;
3001 : }
3002 : /* Both shifts have to use the same first operand. */
3003 20295 : if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
3004 32224 : || !types_compatible_p (TREE_TYPE (def_arg1[0]),
3005 11929 : TREE_TYPE (def_arg1[1])))
3006 : {
3007 8366 : if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
3008 8366 : != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
3009 8366 : || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
3010 8366 : == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
3011 8342 : return false;
3012 :
3013 : /* Handle signed rotate; the RSHIFT_EXPR has to be done
3014 : in unsigned type but LSHIFT_EXPR could be signed. */
3015 540 : i = def_code[0] != RSHIFT_EXPR;
3016 540 : if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
3017 : return false;
3018 :
3019 507 : tree tem;
3020 507 : enum tree_code code;
3021 507 : defcodefor_name (def_arg1[i], &code, &tem, NULL);
3022 304 : if (!CONVERT_EXPR_CODE_P (code)
3023 203 : || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
3024 710 : || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
3025 : return false;
3026 194 : def_arg1[i] = tem;
3027 194 : if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
3028 218 : || !types_compatible_p (TREE_TYPE (def_arg1[0]),
3029 24 : TREE_TYPE (def_arg1[1])))
3030 170 : return false;
3031 : }
3032 11929 : else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
3033 : return false;
3034 :
3035 : /* CNT1 + CNT2 == B case above. */
3036 10698 : if (tree_fits_uhwi_p (def_arg2[0])
3037 1210 : && tree_fits_uhwi_p (def_arg2[1])
3038 10698 : && tree_to_uhwi (def_arg2[0])
3039 1210 : + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
3040 : rotcnt = def_arg2[0];
3041 9768 : else if (TREE_CODE (def_arg2[0]) != SSA_NAME
3042 9488 : || TREE_CODE (def_arg2[1]) != SSA_NAME)
3043 : return false;
3044 : else
3045 : {
3046 9488 : tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
3047 9488 : enum tree_code cdef_code[2];
3048 9488 : gimple *def_arg_alt_stmt[2] = { NULL, NULL };
3049 9488 : int check_range = 0;
3050 9488 : gimple *check_range_stmt = NULL;
3051 : /* Look through conversion of the shift count argument.
3052 : The C/C++ FE cast any shift count argument to integer_type_node.
3053 : The only problem might be if the shift count type maximum value
3054 : is equal or smaller than number of bits in rtype. */
3055 28464 : for (i = 0; i < 2; i++)
3056 : {
3057 18976 : def_arg2_alt[i] = def_arg2[i];
3058 18976 : defcodefor_name (def_arg2[i], &cdef_code[i],
3059 : &cdef_arg1[i], &cdef_arg2[i]);
3060 14716 : if (CONVERT_EXPR_CODE_P (cdef_code[i])
3061 4260 : && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
3062 4260 : && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
3063 8520 : > floor_log2 (TYPE_PRECISION (rtype))
3064 23236 : && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
3065 : {
3066 4260 : def_arg2_alt[i] = cdef_arg1[i];
3067 4260 : if (TREE_CODE (def_arg2[i]) == SSA_NAME)
3068 4260 : def_arg_alt_stmt[i] = SSA_NAME_DEF_STMT (def_arg2[i]);
3069 4260 : defcodefor_name (def_arg2_alt[i], &cdef_code[i],
3070 : &cdef_arg1[i], &cdef_arg2[i]);
3071 : }
3072 : else
3073 14716 : def_arg_alt_stmt[i] = def_arg_stmt[i];
3074 : }
3075 25796 : for (i = 0; i < 2; i++)
3076 : /* Check for one shift count being Y and the other B - Y,
3077 : with optional casts. */
3078 18625 : if (cdef_code[i] == MINUS_EXPR
3079 862 : && tree_fits_shwi_p (cdef_arg1[i])
3080 862 : && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
3081 19447 : && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
3082 : {
3083 822 : tree tem;
3084 822 : enum tree_code code;
3085 :
3086 822 : if (cdef_arg2[i] == def_arg2[1 - i]
3087 472 : || cdef_arg2[i] == def_arg2_alt[1 - i])
3088 : {
3089 350 : rotcnt = cdef_arg2[i];
3090 350 : check_range = -1;
3091 350 : if (cdef_arg2[i] == def_arg2[1 - i])
3092 350 : check_range_stmt = def_arg_stmt[1 - i];
3093 : else
3094 0 : check_range_stmt = def_arg_alt_stmt[1 - i];
3095 806 : break;
3096 : }
3097 472 : defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
3098 16 : if (CONVERT_EXPR_CODE_P (code)
3099 456 : && INTEGRAL_TYPE_P (TREE_TYPE (tem))
3100 456 : && TYPE_PRECISION (TREE_TYPE (tem))
3101 912 : > floor_log2 (TYPE_PRECISION (rtype))
3102 456 : && type_has_mode_precision_p (TREE_TYPE (tem))
3103 928 : && (tem == def_arg2[1 - i]
3104 288 : || tem == def_arg2_alt[1 - i]))
3105 : {
3106 456 : rotcnt = tem;
3107 456 : check_range = -1;
3108 456 : if (tem == def_arg2[1 - i])
3109 168 : check_range_stmt = def_arg_stmt[1 - i];
3110 : else
3111 288 : check_range_stmt = def_arg_alt_stmt[1 - i];
3112 : break;
3113 : }
3114 : }
3115 : /* The above sequence isn't safe for Y being 0,
3116 : because then one of the shifts triggers undefined behavior.
3117 : This alternative is safe even for rotation count of 0.
3118 : One shift count is Y and the other (-Y) & (B - 1).
3119 : Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
3120 17803 : else if (cdef_code[i] == BIT_AND_EXPR
3121 28724 : && pow2p_hwi (TYPE_PRECISION (rtype))
3122 12416 : && tree_fits_shwi_p (cdef_arg2[i])
3123 24832 : && tree_to_shwi (cdef_arg2[i])
3124 12416 : == TYPE_PRECISION (rtype) - 1
3125 12356 : && TREE_CODE (cdef_arg1[i]) == SSA_NAME
3126 30159 : && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
3127 : {
3128 2296 : tree tem;
3129 2296 : enum tree_code code;
3130 :
3131 2296 : defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
3132 2099 : if (CONVERT_EXPR_CODE_P (code)
3133 197 : && INTEGRAL_TYPE_P (TREE_TYPE (tem))
3134 197 : && TYPE_PRECISION (TREE_TYPE (tem))
3135 394 : > floor_log2 (TYPE_PRECISION (rtype))
3136 2493 : && type_has_mode_precision_p (TREE_TYPE (tem)))
3137 197 : defcodefor_name (tem, &code, &tem, NULL);
3138 :
3139 2296 : if (code == NEGATE_EXPR)
3140 : {
3141 1525 : if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
3142 : {
3143 854 : rotcnt = tem;
3144 854 : check_range = 1;
3145 854 : if (tem == def_arg2[1 - i])
3146 846 : check_range_stmt = def_arg_stmt[1 - i];
3147 : else
3148 8 : check_range_stmt = def_arg_alt_stmt[1 - i];
3149 1511 : break;
3150 : }
3151 671 : tree tem2;
3152 671 : defcodefor_name (tem, &code, &tem2, NULL);
3153 237 : if (CONVERT_EXPR_CODE_P (code)
3154 434 : && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
3155 434 : && TYPE_PRECISION (TREE_TYPE (tem2))
3156 868 : > floor_log2 (TYPE_PRECISION (rtype))
3157 1105 : && type_has_mode_precision_p (TREE_TYPE (tem2)))
3158 : {
3159 434 : if (tem2 == def_arg2[1 - i]
3160 434 : || tem2 == def_arg2_alt[1 - i])
3161 : {
3162 228 : rotcnt = tem2;
3163 228 : check_range = 1;
3164 228 : if (tem2 == def_arg2[1 - i])
3165 0 : check_range_stmt = def_arg_stmt[1 - i];
3166 : else
3167 228 : check_range_stmt = def_arg_alt_stmt[1 - i];
3168 : break;
3169 : }
3170 : }
3171 : else
3172 237 : tem2 = NULL_TREE;
3173 :
3174 443 : if (cdef_code[1 - i] == BIT_AND_EXPR
3175 430 : && tree_fits_shwi_p (cdef_arg2[1 - i])
3176 860 : && tree_to_shwi (cdef_arg2[1 - i])
3177 430 : == TYPE_PRECISION (rtype) - 1
3178 873 : && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
3179 : {
3180 430 : if (tem == cdef_arg1[1 - i]
3181 205 : || tem2 == cdef_arg1[1 - i])
3182 : {
3183 : rotcnt = def_arg2[1 - i];
3184 429 : break;
3185 : }
3186 193 : tree tem3;
3187 193 : defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
3188 0 : if (CONVERT_EXPR_CODE_P (code)
3189 193 : && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
3190 193 : && TYPE_PRECISION (TREE_TYPE (tem3))
3191 386 : > floor_log2 (TYPE_PRECISION (rtype))
3192 386 : && type_has_mode_precision_p (TREE_TYPE (tem3)))
3193 : {
3194 193 : if (tem == tem3 || tem2 == tem3)
3195 : {
3196 : rotcnt = def_arg2[1 - i];
3197 : break;
3198 : }
3199 : }
3200 : }
3201 : }
3202 : }
3203 2317 : if (check_range && wider_prec > TYPE_PRECISION (rtype))
3204 : {
3205 1533 : if (TREE_CODE (rotcnt) != SSA_NAME)
3206 573 : return false;
3207 1533 : int_range_max r;
3208 1533 : range_query *q = get_range_query (cfun);
3209 1533 : if (q == get_global_range_query ())
3210 1522 : q = enable_ranger (cfun);
3211 1533 : if (!q->range_of_expr (r, rotcnt, check_range_stmt))
3212 : {
3213 0 : if (check_range > 0)
3214 : return false;
3215 0 : r.set_varying (TREE_TYPE (rotcnt));
3216 : }
3217 1533 : int prec = TYPE_PRECISION (TREE_TYPE (rotcnt));
3218 1533 : signop sign = TYPE_SIGN (TREE_TYPE (rotcnt));
3219 1533 : wide_int min = wide_int::from (TYPE_PRECISION (rtype), prec, sign);
3220 1533 : wide_int max = wide_int::from (wider_prec - 1, prec, sign);
3221 1533 : if (check_range < 0)
3222 616 : max = min;
3223 1533 : int_range<1> r2 (TREE_TYPE (rotcnt), min, max);
3224 1533 : r.intersect (r2);
3225 1533 : if (!r.undefined_p ())
3226 : {
3227 1181 : if (check_range > 0)
3228 : {
3229 589 : int_range_max r3;
3230 1844 : for (int i = TYPE_PRECISION (rtype) + 1; i < wider_prec;
3231 1255 : i += TYPE_PRECISION (rtype))
3232 : {
3233 1255 : int j = i + TYPE_PRECISION (rtype) - 2;
3234 1255 : min = wide_int::from (i, prec, sign);
3235 1255 : max = wide_int::from (MIN (j, wider_prec - 1),
3236 1255 : prec, sign);
3237 1255 : int_range<1> r4 (TREE_TYPE (rotcnt), min, max);
3238 1255 : r3.union_ (r4);
3239 1255 : }
3240 589 : r.intersect (r3);
3241 589 : if (!r.undefined_p ())
3242 573 : return false;
3243 589 : }
3244 : add_masking = true;
3245 : }
3246 1533 : }
3247 8915 : if (rotcnt == NULL_TREE)
3248 : return false;
3249 1744 : swapped_p = i != 1;
3250 : }
3251 :
3252 2674 : if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
3253 2674 : TREE_TYPE (rotcnt)))
3254 : {
3255 496 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
3256 : NOP_EXPR, rotcnt);
3257 496 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
3258 496 : rotcnt = gimple_assign_lhs (g);
3259 : }
3260 2674 : if (add_masking)
3261 : {
3262 608 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rotcnt)),
3263 : BIT_AND_EXPR, rotcnt,
3264 608 : build_int_cst (TREE_TYPE (rotcnt),
3265 608 : TYPE_PRECISION (rtype) - 1));
3266 608 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
3267 608 : rotcnt = gimple_assign_lhs (g);
3268 : }
3269 2674 : lhs = gimple_assign_lhs (stmt);
3270 2674 : if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
3271 1010 : lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
3272 2674 : g = gimple_build_assign (lhs,
3273 2674 : ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
3274 : ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
3275 2674 : if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
3276 : {
3277 1010 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
3278 1010 : g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
3279 : }
3280 2674 : gsi_replace (gsi, g, false);
3281 2674 : return true;
3282 : }
3283 :
3284 :
3285 : /* Check whether an array contains a valid table according to VALIDATE_FN. */
3286 : template<typename ValidateFn>
3287 : static bool
3288 16 : check_table_array (tree ctor, HOST_WIDE_INT &zero_val, unsigned bits,
3289 : ValidateFn validate_fn)
3290 : {
3291 : tree elt, idx;
3292 16 : unsigned HOST_WIDE_INT i, raw_idx = 0;
3293 16 : unsigned matched = 0;
3294 :
3295 16 : zero_val = 0;
3296 :
3297 670 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
3298 : {
3299 670 : if (!tree_fits_shwi_p (idx))
3300 : return false;
3301 670 : if (!tree_fits_shwi_p (elt) && TREE_CODE (elt) != RAW_DATA_CST)
3302 : return false;
3303 :
3304 670 : unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
3305 : HOST_WIDE_INT val;
3306 :
3307 670 : if (TREE_CODE (elt) == INTEGER_CST)
3308 606 : val = tree_to_shwi (elt);
3309 : else
3310 : {
3311 64 : if (raw_idx == (unsigned) RAW_DATA_LENGTH (elt))
3312 : {
3313 0 : raw_idx = 0;
3314 0 : continue;
3315 : }
3316 64 : if (TYPE_UNSIGNED (TREE_TYPE (elt)))
3317 0 : val = RAW_DATA_UCHAR_ELT (elt, raw_idx);
3318 : else
3319 64 : val = RAW_DATA_SCHAR_ELT (elt, raw_idx);
3320 64 : index += raw_idx;
3321 64 : raw_idx++;
3322 64 : i--;
3323 : }
3324 :
3325 670 : if (index > bits * 2)
3326 : return false;
3327 :
3328 670 : if (index == 0)
3329 : {
3330 16 : zero_val = val;
3331 16 : matched++;
3332 : }
3333 :
3334 670 : if (val >= 0 && val < bits && validate_fn (val, index))
3335 608 : matched++;
3336 :
3337 670 : if (matched > bits)
3338 : return true;
3339 : }
3340 :
3341 : return false;
3342 : }
3343 :
3344 : /* Check whether a string contains a valid table according to VALIDATE_FN. */
3345 : template<typename ValidateFn>
3346 : static bool
3347 4 : check_table_string (tree string, HOST_WIDE_INT &zero_val,unsigned bits,
3348 : ValidateFn validate_fn)
3349 : {
3350 4 : unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
3351 4 : unsigned matched = 0;
3352 4 : const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
3353 :
3354 4 : if (len < bits || len > bits * 2)
3355 : return false;
3356 :
3357 4 : zero_val = p[0];
3358 :
3359 164 : for (unsigned i = 0; i < len; i++)
3360 160 : if (p[i] < bits && validate_fn (p[i], i))
3361 160 : matched++;
3362 :
3363 4 : return matched == bits;
3364 : }
3365 :
3366 : /* Check whether CTOR contains a valid table according to VALIDATE_FN. */
3367 : template<typename ValidateFn>
3368 : static bool
3369 28 : check_table (tree ctor, tree type, HOST_WIDE_INT &zero_val, unsigned bits,
3370 : ValidateFn validate_fn)
3371 : {
3372 28 : if (TREE_CODE (ctor) == CONSTRUCTOR)
3373 16 : return check_table_array (ctor, zero_val, bits, validate_fn);
3374 : else if (TREE_CODE (ctor) == STRING_CST
3375 12 : && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
3376 4 : return check_table_string (ctor, zero_val, bits, validate_fn);
3377 : return false;
3378 : }
3379 :
3380 : /* Match.pd function to match the ctz expression. */
3381 : extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
3382 : extern bool gimple_clz_table_index (tree, tree *, tree (*)(tree));
3383 :
3384 : /* Recognize count leading and trailing zeroes idioms.
3385 : The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
3386 : constant which when multiplied by a power of 2 creates a unique value
3387 : in the top 5 or 6 bits. This is then indexed into a table which maps it
3388 : to the number of trailing zeroes. Array[0] is returned so the caller can
3389 : emit an appropriate sequence depending on whether ctz (0) is defined on
3390 : the target. */
3391 :
3392 : static bool
3393 1952799 : simplify_count_zeroes (gimple_stmt_iterator *gsi)
3394 : {
3395 1952799 : gimple *stmt = gsi_stmt (*gsi);
3396 1952799 : tree array_ref = gimple_assign_rhs1 (stmt);
3397 1952799 : tree res_ops[3];
3398 :
3399 1952799 : gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
3400 :
3401 1952799 : internal_fn fn = IFN_LAST;
3402 : /* For CTZ we recognize ((x & -x) * C) >> SHIFT where the array data
3403 : represents the number of trailing zeros. */
3404 1952799 : if (gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
3405 : fn = IFN_CTZ;
3406 : /* For CLZ we recognize
3407 : x |= x >> 1;
3408 : x |= x >> 2;
3409 : x |= x >> 4;
3410 : x |= x >> 8;
3411 : x |= x >> 16;
3412 : (x * C) >> SHIFT
3413 : where 31 minus the array data represents the number of leading zeros. */
3414 1952776 : else if (gimple_clz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0],
3415 : NULL))
3416 : fn = IFN_CLZ;
3417 : else
3418 : return false;
3419 :
3420 33 : HOST_WIDE_INT zero_val;
3421 33 : tree type = TREE_TYPE (array_ref);
3422 33 : tree array = TREE_OPERAND (array_ref, 0);
3423 33 : tree input_type = TREE_TYPE (res_ops[0]);
3424 33 : unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
3425 :
3426 : /* Check the array element type is not wider than 32 bits and the input is
3427 : an unsigned 32-bit or 64-bit type. */
3428 33 : if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
3429 : return false;
3430 29 : if (input_bits != 32 && input_bits != 64)
3431 : return false;
3432 :
3433 29 : if (!direct_internal_fn_supported_p (fn, input_type, OPTIMIZE_FOR_BOTH))
3434 : return false;
3435 :
3436 : /* Check the lower bound of the array is zero. */
3437 29 : tree low = array_ref_low_bound (array_ref);
3438 29 : if (!low || !integer_zerop (low))
3439 0 : return false;
3440 :
3441 : /* Check the shift extracts the top 5..7 bits. */
3442 29 : unsigned shiftval = tree_to_shwi (res_ops[2]);
3443 29 : if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
3444 : return false;
3445 :
3446 28 : tree ctor = ctor_for_folding (array);
3447 28 : if (!ctor)
3448 : return false;
3449 28 : unsigned HOST_WIDE_INT mulval = tree_to_uhwi (res_ops[1]);
3450 28 : if (fn == IFN_CTZ)
3451 : {
3452 494 : auto checkfn = [&](unsigned data, unsigned i) -> bool
3453 : {
3454 476 : unsigned HOST_WIDE_INT mask
3455 476 : = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
3456 476 : return (((mulval << data) & mask) >> shiftval) == i;
3457 18 : };
3458 18 : if (!check_table (ctor, type, zero_val, input_bits, checkfn))
3459 8 : return false;
3460 : }
3461 10 : else if (fn == IFN_CLZ)
3462 : {
3463 362 : auto checkfn = [&](unsigned data, unsigned i) -> bool
3464 : {
3465 352 : unsigned HOST_WIDE_INT mask
3466 352 : = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
3467 : /* The OR-cascade produces a value with all bits from 0 to the
3468 : original MSB set. Compute (1 << (data + 1)) - 1 to simulate
3469 : that value. When data + 1 equals HOST_BITS_PER_WIDE_INT
3470 : (i.e. data is the MSB position of a 64-bit input) the shift
3471 : is undefined behavior, so handle that case explicitly using
3472 : all-ones. Without this, any well-formed 64-bit DeBruijn CLZ
3473 : table is rejected because its entry for the all-ones input
3474 : correctly maps to the MSB (e.g. table[...] == 63).
3475 : PR tree-optimization/122569. */
3476 703 : unsigned HOST_WIDE_INT all_bits_below
3477 : = (data + 1 == HOST_BITS_PER_WIDE_INT)
3478 352 : ? HOST_WIDE_INT_M1U
3479 351 : : ((HOST_WIDE_INT_1U << (data + 1)) - 1);
3480 352 : return (((all_bits_below * mulval) & mask) >> shiftval) == i;
3481 10 : };
3482 10 : if (!check_table (ctor, type, zero_val, input_bits, checkfn))
3483 0 : return false;
3484 : }
3485 :
3486 20 : HOST_WIDE_INT ctz_val = -1;
3487 20 : bool zero_ok;
3488 20 : if (fn == IFN_CTZ)
3489 : {
3490 10 : ctz_val = 0;
3491 20 : zero_ok = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
3492 : ctz_val) == 2;
3493 : }
3494 10 : else if (fn == IFN_CLZ)
3495 : {
3496 10 : ctz_val = 32;
3497 10 : zero_ok = CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
3498 : ctz_val) == 2;
3499 10 : zero_val = input_bits - 1 - zero_val;
3500 : }
3501 20 : int nargs = 2;
3502 :
3503 : /* If the input value can't be zero, don't special case ctz (0). */
3504 20 : range_query *q = get_range_query (cfun);
3505 20 : if (q == get_global_range_query ())
3506 20 : q = enable_ranger (cfun);
3507 20 : int_range_max vr;
3508 20 : if (q->range_of_expr (vr, res_ops[0], stmt)
3509 20 : && !range_includes_zero_p (vr))
3510 : {
3511 4 : zero_ok = true;
3512 4 : zero_val = 0;
3513 4 : ctz_val = 0;
3514 4 : nargs = 1;
3515 : }
3516 :
3517 20 : gimple_seq seq = NULL;
3518 20 : gimple *g;
3519 20 : gcall *call = gimple_build_call_internal (fn, nargs, res_ops[0],
3520 : nargs == 1 ? NULL_TREE
3521 36 : : build_int_cst (integer_type_node,
3522 16 : ctz_val));
3523 20 : gimple_set_location (call, gimple_location (stmt));
3524 20 : gimple_set_lhs (call, make_ssa_name (integer_type_node));
3525 20 : gimple_seq_add_stmt (&seq, call);
3526 :
3527 20 : tree prev_lhs = gimple_call_lhs (call);
3528 :
3529 20 : if (zero_ok && zero_val == ctz_val)
3530 : ;
3531 : /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0. */
3532 6 : else if (zero_ok && zero_val == 0 && ctz_val == input_bits)
3533 : {
3534 5 : g = gimple_build_assign (make_ssa_name (integer_type_node),
3535 : BIT_AND_EXPR, prev_lhs,
3536 : build_int_cst (integer_type_node,
3537 5 : input_bits - 1));
3538 5 : gimple_set_location (g, gimple_location (stmt));
3539 5 : gimple_seq_add_stmt (&seq, g);
3540 5 : prev_lhs = gimple_assign_lhs (g);
3541 : }
3542 : /* As fallback emit a conditional move. */
3543 : else
3544 : {
3545 9 : g = gimple_build_assign (make_ssa_name (boolean_type_node), EQ_EXPR,
3546 : res_ops[0], build_zero_cst (input_type));
3547 9 : gimple_set_location (g, gimple_location (stmt));
3548 9 : gimple_seq_add_stmt (&seq, g);
3549 9 : tree cond = gimple_assign_lhs (g);
3550 9 : g = gimple_build_assign (make_ssa_name (integer_type_node),
3551 : COND_EXPR, cond,
3552 9 : build_int_cst (integer_type_node, zero_val),
3553 : prev_lhs);
3554 9 : gimple_set_location (g, gimple_location (stmt));
3555 9 : gimple_seq_add_stmt (&seq, g);
3556 9 : prev_lhs = gimple_assign_lhs (g);
3557 : }
3558 :
3559 20 : if (fn == IFN_CLZ)
3560 : {
3561 10 : g = gimple_build_assign (make_ssa_name (integer_type_node),
3562 : MINUS_EXPR,
3563 : build_int_cst (integer_type_node,
3564 10 : input_bits - 1),
3565 : prev_lhs);
3566 10 : gimple_set_location (g, gimple_location (stmt));
3567 10 : gimple_seq_add_stmt (&seq, g);
3568 10 : prev_lhs = gimple_assign_lhs (g);
3569 : }
3570 :
3571 20 : g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
3572 20 : gimple_seq_add_stmt (&seq, g);
3573 20 : gsi_replace_with_seq (gsi, seq, true);
3574 20 : return true;
3575 20 : }
3576 :
3577 :
3578 : /* Determine whether applying the 2 permutations (mask1 then mask2)
3579 : gives back one of the input. */
3580 :
3581 : static int
3582 34 : is_combined_permutation_identity (tree mask1, tree mask2)
3583 : {
3584 34 : tree mask;
3585 34 : unsigned HOST_WIDE_INT nelts, i, j;
3586 34 : bool maybe_identity1 = true;
3587 34 : bool maybe_identity2 = true;
3588 :
3589 34 : gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
3590 : && TREE_CODE (mask2) == VECTOR_CST);
3591 :
3592 : /* For VLA masks, check for the following pattern:
3593 : v1 = VEC_PERM_EXPR (v0, ..., mask1)
3594 : v2 = VEC_PERM_EXPR (v1, ..., mask2)
3595 : -->
3596 : v2 = v0
3597 : if mask1 == mask2 == {nelts - 1, nelts - 2, ...}. */
3598 :
3599 34 : if (operand_equal_p (mask1, mask2, 0)
3600 34 : && !VECTOR_CST_NELTS (mask1).is_constant ())
3601 : {
3602 : vec_perm_builder builder;
3603 : if (tree_to_vec_perm_builder (&builder, mask1))
3604 : {
3605 : poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask1));
3606 : vec_perm_indices sel (builder, 1, nelts);
3607 : if (sel.series_p (0, 1, nelts - 1, -1))
3608 : return 1;
3609 : }
3610 : }
3611 :
3612 34 : mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
3613 34 : if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
3614 : return 0;
3615 :
3616 34 : if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
3617 : return 0;
3618 60 : for (i = 0; i < nelts; i++)
3619 : {
3620 60 : tree val = VECTOR_CST_ELT (mask, i);
3621 60 : gcc_assert (TREE_CODE (val) == INTEGER_CST);
3622 60 : j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
3623 60 : if (j == i)
3624 : maybe_identity2 = false;
3625 47 : else if (j == i + nelts)
3626 : maybe_identity1 = false;
3627 : else
3628 : return 0;
3629 : }
3630 0 : return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
3631 : }
3632 :
3633 : /* Combine a shuffle with its arguments. Returns true if there were any
3634 : changes made. */
3635 :
3636 : static bool
3637 184475 : simplify_permutation (gimple_stmt_iterator *gsi)
3638 : {
3639 184475 : gimple *stmt = gsi_stmt (*gsi);
3640 184475 : gimple *def_stmt = NULL;
3641 184475 : tree op0, op1, op2, op3, arg0, arg1;
3642 184475 : enum tree_code code, code2 = ERROR_MARK;
3643 184475 : bool single_use_op0 = false;
3644 :
3645 184475 : gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
3646 :
3647 184475 : op0 = gimple_assign_rhs1 (stmt);
3648 184475 : op1 = gimple_assign_rhs2 (stmt);
3649 184475 : op2 = gimple_assign_rhs3 (stmt);
3650 :
3651 184475 : if (TREE_CODE (op2) != VECTOR_CST)
3652 : return false;
3653 :
3654 181738 : if (TREE_CODE (op0) == VECTOR_CST)
3655 : {
3656 : code = VECTOR_CST;
3657 : arg0 = op0;
3658 : }
3659 179876 : else if (TREE_CODE (op0) == SSA_NAME)
3660 : {
3661 179876 : def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
3662 179876 : if (!def_stmt)
3663 : return false;
3664 171814 : code = gimple_assign_rhs_code (def_stmt);
3665 171814 : if (code == VIEW_CONVERT_EXPR)
3666 : {
3667 1433 : tree rhs = gimple_assign_rhs1 (def_stmt);
3668 1433 : tree name = TREE_OPERAND (rhs, 0);
3669 1433 : if (TREE_CODE (name) != SSA_NAME)
3670 : return false;
3671 1433 : if (!has_single_use (name))
3672 216 : single_use_op0 = false;
3673 : /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
3674 : but still keep the code to indicate it comes from
3675 : VIEW_CONVERT_EXPR. */
3676 1433 : def_stmt = SSA_NAME_DEF_STMT (name);
3677 1433 : if (!def_stmt || !is_gimple_assign (def_stmt))
3678 : return false;
3679 646 : if (gimple_assign_rhs_code (def_stmt) != CONSTRUCTOR)
3680 : return false;
3681 : }
3682 170519 : if (!can_propagate_from (def_stmt))
3683 : return false;
3684 21622 : arg0 = gimple_assign_rhs1 (def_stmt);
3685 : }
3686 : else
3687 : return false;
3688 :
3689 : /* Two consecutive shuffles. */
3690 21622 : if (code == VEC_PERM_EXPR)
3691 : {
3692 6451 : tree orig;
3693 6451 : int ident;
3694 :
3695 6451 : if (op0 != op1)
3696 : return false;
3697 34 : op3 = gimple_assign_rhs3 (def_stmt);
3698 34 : if (TREE_CODE (op3) != VECTOR_CST)
3699 : return false;
3700 34 : ident = is_combined_permutation_identity (op3, op2);
3701 34 : if (!ident)
3702 : return false;
3703 0 : orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
3704 0 : : gimple_assign_rhs2 (def_stmt);
3705 0 : gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
3706 0 : gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
3707 0 : gimple_set_num_ops (stmt, 2);
3708 0 : update_stmt (stmt);
3709 0 : remove_prop_source_from_use (op0);
3710 0 : return true;
3711 : }
3712 17033 : else if (code == CONSTRUCTOR
3713 17033 : || code == VECTOR_CST
3714 : || code == VIEW_CONVERT_EXPR)
3715 : {
3716 3121 : if (op0 != op1)
3717 : {
3718 2949 : if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
3719 : return false;
3720 :
3721 2746 : if (TREE_CODE (op1) == VECTOR_CST)
3722 : arg1 = op1;
3723 2246 : else if (TREE_CODE (op1) == SSA_NAME)
3724 : {
3725 2246 : gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
3726 2246 : if (!def_stmt2)
3727 : return false;
3728 785 : code2 = gimple_assign_rhs_code (def_stmt2);
3729 785 : if (code2 == VIEW_CONVERT_EXPR)
3730 : {
3731 4 : tree rhs = gimple_assign_rhs1 (def_stmt2);
3732 4 : tree name = TREE_OPERAND (rhs, 0);
3733 4 : if (TREE_CODE (name) != SSA_NAME)
3734 : return false;
3735 4 : if (!has_single_use (name))
3736 : return false;
3737 3 : def_stmt2 = SSA_NAME_DEF_STMT (name);
3738 3 : if (!def_stmt2 || !is_gimple_assign (def_stmt2))
3739 : return false;
3740 0 : if (gimple_assign_rhs_code (def_stmt2) != CONSTRUCTOR)
3741 : return false;
3742 : }
3743 781 : else if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
3744 : return false;
3745 651 : if (!can_propagate_from (def_stmt2))
3746 : return false;
3747 651 : arg1 = gimple_assign_rhs1 (def_stmt2);
3748 : }
3749 : else
3750 : return false;
3751 : }
3752 : else
3753 : {
3754 : /* Already used twice in this statement. */
3755 172 : if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
3756 : return false;
3757 : arg1 = arg0;
3758 : }
3759 :
3760 : /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
3761 : operands source, check whether it's valid to transform and prepare
3762 : the required new operands. */
3763 1249 : if (code == VIEW_CONVERT_EXPR || code2 == VIEW_CONVERT_EXPR)
3764 : {
3765 : /* Figure out the target vector type to which operands should be
3766 : converted. If both are CONSTRUCTOR, the types should be the
3767 : same, otherwise, use the one of CONSTRUCTOR. */
3768 24 : tree tgt_type = NULL_TREE;
3769 24 : if (code == VIEW_CONVERT_EXPR)
3770 : {
3771 24 : gcc_assert (gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR);
3772 24 : code = CONSTRUCTOR;
3773 24 : tgt_type = TREE_TYPE (arg0);
3774 : }
3775 24 : if (code2 == VIEW_CONVERT_EXPR)
3776 : {
3777 0 : tree arg1_type = TREE_TYPE (arg1);
3778 0 : if (tgt_type == NULL_TREE)
3779 : tgt_type = arg1_type;
3780 0 : else if (tgt_type != arg1_type)
3781 23 : return false;
3782 : }
3783 :
3784 24 : if (!VECTOR_TYPE_P (tgt_type))
3785 : return false;
3786 24 : tree op2_type = TREE_TYPE (op2);
3787 :
3788 : /* Figure out the shrunk factor. */
3789 24 : poly_uint64 tgt_units = TYPE_VECTOR_SUBPARTS (tgt_type);
3790 24 : poly_uint64 op2_units = TYPE_VECTOR_SUBPARTS (op2_type);
3791 24 : if (maybe_gt (tgt_units, op2_units))
3792 : return false;
3793 24 : unsigned int factor;
3794 47 : if (!constant_multiple_p (op2_units, tgt_units, &factor))
3795 : return false;
3796 :
3797 : /* Build the new permutation control vector as target vector. */
3798 24 : vec_perm_builder builder;
3799 24 : if (!tree_to_vec_perm_builder (&builder, op2))
3800 : return false;
3801 24 : vec_perm_indices indices (builder, 2, op2_units);
3802 24 : vec_perm_indices new_indices;
3803 24 : if (new_indices.new_shrunk_vector (indices, factor))
3804 : {
3805 1 : tree mask_type = tgt_type;
3806 1 : if (!VECTOR_INTEGER_TYPE_P (mask_type))
3807 : {
3808 0 : tree elem_type = TREE_TYPE (mask_type);
3809 0 : unsigned elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
3810 0 : tree int_type = build_nonstandard_integer_type (elem_size, 0);
3811 0 : mask_type = build_vector_type (int_type, tgt_units);
3812 : }
3813 1 : op2 = vec_perm_indices_to_tree (mask_type, new_indices);
3814 : }
3815 : else
3816 23 : return false;
3817 :
3818 : /* Convert the VECTOR_CST to the appropriate vector type. */
3819 1 : if (tgt_type != TREE_TYPE (arg0))
3820 0 : arg0 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg0);
3821 1 : else if (tgt_type != TREE_TYPE (arg1))
3822 0 : arg1 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg1);
3823 47 : }
3824 :
3825 : /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before. */
3826 1226 : gcc_assert (code == CONSTRUCTOR || code == VECTOR_CST);
3827 :
3828 : /* Shuffle of a constructor. */
3829 1226 : tree res_type
3830 1226 : = build_vector_type (TREE_TYPE (TREE_TYPE (arg0)),
3831 1226 : TYPE_VECTOR_SUBPARTS (TREE_TYPE (op2)));
3832 1226 : tree opt = fold_ternary (VEC_PERM_EXPR, res_type, arg0, arg1, op2);
3833 1226 : if (!opt
3834 280 : || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
3835 : return false;
3836 : /* Found VIEW_CONVERT_EXPR before, need one explicit conversion. */
3837 280 : if (res_type != TREE_TYPE (op0))
3838 : {
3839 1 : tree name = make_ssa_name (TREE_TYPE (opt));
3840 1 : gimple *ass_stmt = gimple_build_assign (name, opt);
3841 1 : gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
3842 1 : opt = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), name);
3843 : }
3844 280 : gimple_assign_set_rhs_from_tree (gsi, opt);
3845 280 : update_stmt (gsi_stmt (*gsi));
3846 280 : if (TREE_CODE (op0) == SSA_NAME)
3847 1 : remove_prop_source_from_use (op0);
3848 280 : if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
3849 0 : remove_prop_source_from_use (op1);
3850 280 : return true;
3851 : }
3852 :
3853 : return false;
3854 : }
3855 :
3856 : /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
3857 : conversions with code CONV_CODE or update it if still ERROR_MARK.
3858 : Return NULL_TREE if no such matching def was found. */
3859 :
3860 : static tree
3861 401200 : get_bit_field_ref_def (tree val, enum tree_code &conv_code)
3862 : {
3863 401200 : if (TREE_CODE (val) != SSA_NAME)
3864 : return NULL_TREE ;
3865 375512 : gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
3866 375512 : if (!def_stmt)
3867 : return NULL_TREE;
3868 301258 : enum tree_code code = gimple_assign_rhs_code (def_stmt);
3869 301258 : if (code == FLOAT_EXPR
3870 301258 : || code == FIX_TRUNC_EXPR
3871 : || CONVERT_EXPR_CODE_P (code))
3872 : {
3873 180109 : tree op1 = gimple_assign_rhs1 (def_stmt);
3874 180109 : if (conv_code == ERROR_MARK)
3875 86209 : conv_code = code;
3876 93900 : else if (conv_code != code)
3877 : return NULL_TREE;
3878 180084 : if (TREE_CODE (op1) != SSA_NAME)
3879 : return NULL_TREE;
3880 74204 : def_stmt = SSA_NAME_DEF_STMT (op1);
3881 74204 : if (! is_gimple_assign (def_stmt))
3882 : return NULL_TREE;
3883 59209 : code = gimple_assign_rhs_code (def_stmt);
3884 : }
3885 180358 : if (code != BIT_FIELD_REF)
3886 : return NULL_TREE;
3887 23202 : return gimple_assign_rhs1 (def_stmt);
3888 : }
3889 :
3890 : /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
3891 :
3892 : static bool
3893 152307 : simplify_vector_constructor (gimple_stmt_iterator *gsi)
3894 : {
3895 152307 : gimple *stmt = gsi_stmt (*gsi);
3896 152307 : tree op, orig[2], type;
3897 152307 : unsigned i;
3898 152307 : unsigned HOST_WIDE_INT nelts;
3899 152307 : unsigned HOST_WIDE_INT refnelts;
3900 152307 : enum tree_code conv_code;
3901 152307 : constructor_elt *elt;
3902 :
3903 152307 : op = gimple_assign_rhs1 (stmt);
3904 152307 : type = TREE_TYPE (op);
3905 152307 : gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
3906 : && TREE_CODE (type) == VECTOR_TYPE);
3907 :
3908 152307 : if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
3909 : return false;
3910 :
3911 152307 : orig[0] = NULL;
3912 152307 : orig[1] = NULL;
3913 152307 : tree orig_elem_type[2] = {};
3914 152307 : conv_code = ERROR_MARK;
3915 152307 : bool maybe_ident = true;
3916 152307 : bool maybe_blend[2] = { true, true };
3917 152307 : tree one_constant = NULL_TREE;
3918 152307 : tree one_nonconstant = NULL_TREE;
3919 152307 : tree subelt;
3920 152307 : auto_vec<tree> constants;
3921 152307 : constants.safe_grow_cleared (nelts, true);
3922 152307 : auto_vec<std::pair<unsigned, unsigned>, 64> elts;
3923 152307 : unsigned int tsubelts = 0;
3924 433015 : FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
3925 : {
3926 401200 : tree ref, op1;
3927 401200 : unsigned int elem, src_elem_size;
3928 401200 : unsigned HOST_WIDE_INT nsubelts = 1;
3929 :
3930 401200 : if (i >= nelts)
3931 152307 : return false;
3932 :
3933 : /* Look for elements extracted and possibly converted from
3934 : another vector. */
3935 401200 : op1 = get_bit_field_ref_def (elt->value, conv_code);
3936 401200 : if (op1
3937 23202 : && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
3938 5176 : && VECTOR_TYPE_P (TREE_TYPE (ref))
3939 5169 : && (tree_nop_conversion_p (TREE_TYPE (op1),
3940 5169 : TREE_TYPE (TREE_TYPE (ref)))
3941 840 : || (VECTOR_TYPE_P (TREE_TYPE (op1))
3942 76 : && tree_nop_conversion_p (TREE_TYPE (TREE_TYPE (op1)),
3943 76 : TREE_TYPE (TREE_TYPE (ref)))
3944 76 : && TYPE_VECTOR_SUBPARTS (TREE_TYPE (op1))
3945 76 : .is_constant (&nsubelts)))
3946 4405 : && constant_multiple_p (bit_field_size (op1), nsubelts,
3947 : &src_elem_size)
3948 405605 : && constant_multiple_p (bit_field_offset (op1), src_elem_size, &elem)
3949 405605 : && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
3950 : {
3951 : unsigned int j;
3952 4702 : for (j = 0; j < 2; ++j)
3953 : {
3954 4683 : if (!orig[j])
3955 : {
3956 2073 : if (j == 0
3957 2275 : || useless_type_conversion_p (TREE_TYPE (orig[0]),
3958 202 : TREE_TYPE (ref)))
3959 : break;
3960 : }
3961 2610 : else if (ref == orig[j])
3962 : break;
3963 : }
3964 : /* Found a suitable vector element. */
3965 4405 : if (j < 2)
3966 : {
3967 4386 : orig[j] = ref;
3968 : /* Track what element type was actually extracted (which may
3969 : differ in signedness from the vector's element type due to
3970 : tree_nop_conversion_p). */
3971 4386 : if (!orig_elem_type[j])
3972 2067 : orig_elem_type[j] = TREE_TYPE (op1);
3973 4386 : if (elem != i || j != 0)
3974 1965 : maybe_ident = false;
3975 4386 : if (elem != i)
3976 1908 : maybe_blend[j] = false;
3977 8907 : for (unsigned int k = 0; k < nsubelts; ++k)
3978 4521 : elts.safe_push (std::make_pair (j, elem + k));
3979 4386 : tsubelts += nsubelts;
3980 4386 : continue;
3981 4386 : }
3982 : /* Else fallthru. */
3983 : }
3984 : /* Handle elements not extracted from a vector.
3985 : 1. constants by permuting with constant vector
3986 : 2. a unique non-constant element by permuting with a splat vector */
3987 396814 : if (orig[1]
3988 245604 : && orig[1] != error_mark_node)
3989 : return false;
3990 396797 : orig[1] = error_mark_node;
3991 396797 : if (VECTOR_TYPE_P (TREE_TYPE (elt->value))
3992 396797 : && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (elt->value))
3993 3122 : .is_constant (&nsubelts))
3994 : return false;
3995 396797 : if (CONSTANT_CLASS_P (elt->value))
3996 : {
3997 25684 : if (one_nonconstant)
3998 : return false;
3999 17361 : if (!one_constant)
4000 8103 : one_constant = TREE_CODE (elt->value) == VECTOR_CST
4001 8103 : ? VECTOR_CST_ELT (elt->value, 0)
4002 : : elt->value;
4003 17361 : if (TREE_CODE (elt->value) == VECTOR_CST)
4004 : {
4005 347 : for (unsigned int k = 0; k < nsubelts; k++)
4006 255 : constants[tsubelts + k] = VECTOR_CST_ELT (elt->value, k);
4007 : }
4008 : else
4009 17269 : constants[tsubelts] = elt->value;
4010 : }
4011 : else
4012 : {
4013 371113 : if (one_constant)
4014 : return false;
4015 363298 : subelt = VECTOR_TYPE_P (TREE_TYPE (elt->value))
4016 363298 : ? ssa_uniform_vector_p (elt->value)
4017 : : elt->value;
4018 363298 : if (!subelt)
4019 : return false;
4020 360370 : if (!one_nonconstant)
4021 : one_nonconstant = subelt;
4022 220191 : else if (!operand_equal_p (one_nonconstant, subelt, 0))
4023 : return false;
4024 : }
4025 552809 : for (unsigned int k = 0; k < nsubelts; ++k)
4026 276487 : elts.safe_push (std::make_pair (1, tsubelts + k));
4027 276322 : tsubelts += nsubelts;
4028 276322 : maybe_ident = false;
4029 : }
4030 :
4031 63630 : if (elts.length () < nelts)
4032 : return false;
4033 :
4034 30567 : if (! orig[0]
4035 30567 : || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
4036 : return false;
4037 1369 : refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
4038 : /* We currently do not handle larger destination vectors. */
4039 1369 : if (refnelts < nelts)
4040 : return false;
4041 :
4042 : /* Determine the element type for the conversion source.
4043 : As orig_elem_type keeps track of the original type, check
4044 : if we need to perform a sign swap after permuting.
4045 : We need to be able to construct a vector type from the element
4046 : type which is not possible for e.g. BitInt or pointers
4047 : so pun with an integer type if needed. */
4048 1208 : tree perm_eltype = TREE_TYPE (TREE_TYPE (orig[0]));
4049 1208 : bool sign_change_p = false;
4050 1208 : if (conv_code != ERROR_MARK
4051 288 : && orig_elem_type[0]
4052 1496 : && TYPE_SIGN (orig_elem_type[0]) != TYPE_SIGN (perm_eltype))
4053 : {
4054 38 : perm_eltype = signed_or_unsigned_type_for
4055 38 : (TYPE_UNSIGNED (orig_elem_type[0]), perm_eltype);
4056 38 : sign_change_p = true;
4057 : }
4058 1208 : tree conv_src_type = build_vector_type (perm_eltype, nelts);
4059 :
4060 1208 : if (maybe_ident)
4061 : {
4062 : /* When there is no conversion, use the target type directly. */
4063 440 : if (conv_code == ERROR_MARK && nelts != refnelts)
4064 440 : conv_src_type = type;
4065 440 : if (conv_code != ERROR_MARK
4066 440 : && !supportable_convert_operation (conv_code, type, conv_src_type,
4067 : &conv_code))
4068 : {
4069 : /* Only few targets implement direct conversion patterns so try
4070 : some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR. */
4071 46 : optab optab;
4072 46 : insn_code icode;
4073 46 : tree halfvectype, dblvectype;
4074 46 : enum tree_code unpack_op;
4075 :
4076 46 : if (!BYTES_BIG_ENDIAN)
4077 84 : unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
4078 46 : ? VEC_UNPACK_FLOAT_LO_EXPR
4079 : : VEC_UNPACK_LO_EXPR);
4080 : else
4081 : unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
4082 : ? VEC_UNPACK_FLOAT_HI_EXPR
4083 : : VEC_UNPACK_HI_EXPR);
4084 :
4085 : /* Conversions between DFP and FP have no special tree code
4086 : but we cannot handle those since all relevant vector conversion
4087 : optabs only have a single mode. */
4088 6 : if (CONVERT_EXPR_CODE_P (conv_code)
4089 40 : && FLOAT_TYPE_P (TREE_TYPE (type))
4090 50 : && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
4091 2 : != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
4092 : return false;
4093 :
4094 6 : if (CONVERT_EXPR_CODE_P (conv_code)
4095 39 : && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
4096 39 : == TYPE_PRECISION (TREE_TYPE (type)))
4097 0 : && orig_elem_type[0]
4098 0 : && useless_type_conversion_p (orig_elem_type[0],
4099 0 : TREE_TYPE (TREE_TYPE (orig[0])))
4100 0 : && mode_for_vector (as_a <scalar_mode>
4101 0 : (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
4102 0 : nelts * 2).exists ()
4103 0 : && (dblvectype
4104 0 : = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
4105 0 : nelts * 2))
4106 : /* Only use it for vector modes or for vector booleans
4107 : represented as scalar bitmasks. See PR95528. */
4108 0 : && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
4109 0 : || VECTOR_BOOLEAN_TYPE_P (dblvectype))
4110 0 : && (optab = optab_for_tree_code (unpack_op,
4111 : dblvectype,
4112 : optab_default))
4113 0 : && ((icode = optab_handler (optab, TYPE_MODE (dblvectype)))
4114 : != CODE_FOR_nothing)
4115 45 : && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
4116 : {
4117 0 : gimple_seq stmts = NULL;
4118 0 : tree dbl;
4119 0 : if (refnelts == nelts)
4120 : {
4121 : /* ??? Paradoxical subregs don't exist, so insert into
4122 : the lower half of a wider zero vector. */
4123 0 : dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
4124 : build_zero_cst (dblvectype), orig[0],
4125 0 : bitsize_zero_node);
4126 : }
4127 0 : else if (refnelts == 2 * nelts)
4128 : dbl = orig[0];
4129 : else
4130 0 : dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
4131 0 : orig[0], TYPE_SIZE (dblvectype),
4132 0 : bitsize_zero_node);
4133 0 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4134 0 : gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
4135 : }
4136 6 : else if (CONVERT_EXPR_CODE_P (conv_code)
4137 39 : && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
4138 39 : == 2 * TYPE_PRECISION (TREE_TYPE (type)))
4139 1 : && orig_elem_type[0]
4140 1 : && useless_type_conversion_p (orig_elem_type[0],
4141 1 : TREE_TYPE (TREE_TYPE (orig[0])))
4142 1 : && mode_for_vector (as_a <scalar_mode>
4143 1 : (TYPE_MODE
4144 : (TREE_TYPE (TREE_TYPE (orig[0])))),
4145 2 : nelts / 2).exists ()
4146 1 : && (halfvectype
4147 1 : = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
4148 1 : nelts / 2))
4149 : /* Only use it for vector modes or for vector booleans
4150 : represented as scalar bitmasks. See PR95528. */
4151 1 : && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
4152 0 : || VECTOR_BOOLEAN_TYPE_P (halfvectype))
4153 1 : && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
4154 : halfvectype,
4155 : optab_default))
4156 1 : && ((icode = optab_handler (optab, TYPE_MODE (halfvectype)))
4157 : != CODE_FOR_nothing)
4158 46 : && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
4159 : {
4160 0 : gimple_seq stmts = NULL;
4161 0 : tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
4162 0 : orig[0], TYPE_SIZE (halfvectype),
4163 0 : bitsize_zero_node);
4164 0 : tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
4165 0 : orig[0], TYPE_SIZE (halfvectype),
4166 0 : TYPE_SIZE (halfvectype));
4167 0 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4168 0 : gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
4169 : low, hig);
4170 : }
4171 : else
4172 45 : return false;
4173 0 : update_stmt (gsi_stmt (*gsi));
4174 0 : return true;
4175 : }
4176 394 : if (nelts != refnelts)
4177 : {
4178 20 : gassign *lowpart
4179 20 : = gimple_build_assign (make_ssa_name (conv_src_type),
4180 : build3 (BIT_FIELD_REF, conv_src_type,
4181 20 : orig[0], TYPE_SIZE (conv_src_type),
4182 : bitsize_zero_node));
4183 20 : gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
4184 20 : orig[0] = gimple_assign_lhs (lowpart);
4185 : }
4186 374 : else if (sign_change_p)
4187 : {
4188 0 : gassign *conv
4189 0 : = gimple_build_assign (make_ssa_name (conv_src_type),
4190 : build1 (VIEW_CONVERT_EXPR, conv_src_type,
4191 : orig[0]));
4192 0 : gsi_insert_before (gsi, conv, GSI_SAME_STMT);
4193 0 : orig[0] = gimple_assign_lhs (conv);
4194 : }
4195 394 : if (conv_code == ERROR_MARK)
4196 : {
4197 377 : tree src_type = TREE_TYPE (orig[0]);
4198 377 : if (!useless_type_conversion_p (type, src_type))
4199 : {
4200 0 : gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
4201 : TYPE_VECTOR_SUBPARTS (src_type))
4202 : && tree_nop_conversion_p (TREE_TYPE (type),
4203 : TREE_TYPE (src_type)));
4204 0 : tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
4205 0 : orig[0] = make_ssa_name (type);
4206 0 : gassign *assign = gimple_build_assign (orig[0], rhs);
4207 0 : gsi_insert_before (gsi, assign, GSI_SAME_STMT);
4208 : }
4209 377 : gimple_assign_set_rhs_from_tree (gsi, orig[0]);
4210 : }
4211 : else
4212 17 : gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
4213 : NULL_TREE, NULL_TREE);
4214 : }
4215 : else
4216 : {
4217 : /* If we combine a vector with a non-vector avoid cases where
4218 : we'll obviously end up with more GIMPLE stmts which is when
4219 : we'll later not fold this to a single insert into the vector
4220 : and we had a single extract originally. See PR92819. */
4221 768 : if (nelts == 2
4222 521 : && refnelts > 2
4223 159 : && orig[1] == error_mark_node
4224 34 : && !maybe_blend[0])
4225 228 : return false;
4226 740 : tree mask_type, perm_type;
4227 740 : perm_type = TREE_TYPE (orig[0]);
4228 740 : if (conv_code != ERROR_MARK
4229 740 : && !supportable_convert_operation (conv_code, type, conv_src_type,
4230 : &conv_code))
4231 : return false;
4232 :
4233 : /* Now that we know the number of elements of the source build the
4234 : permute vector.
4235 : ??? When the second vector has constant values we can shuffle
4236 : it and its source indexes to make the permutation supported.
4237 : For now it mimics a blend. */
4238 581 : vec_perm_builder sel (refnelts, refnelts, 1);
4239 581 : bool all_same_p = true;
4240 5820 : for (i = 0; i < elts.length (); ++i)
4241 : {
4242 2329 : sel.quick_push (elts[i].second + elts[i].first * refnelts);
4243 2329 : all_same_p &= known_eq (sel[i], sel[0]);
4244 : }
4245 : /* And fill the tail with "something". It's really don't care,
4246 : and ideally we'd allow VEC_PERM to have a smaller destination
4247 : vector. As a heuristic:
4248 :
4249 : (a) if what we have so far duplicates a single element, make the
4250 : tail do the same
4251 :
4252 : (b) otherwise preserve a uniform orig[0]. This facilitates
4253 : later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR. */
4254 1260 : for (; i < refnelts; ++i)
4255 1358 : sel.quick_push (all_same_p
4256 2037 : ? sel[0]
4257 92 : : (elts[0].second == 0 && elts[0].first == 0
4258 1076 : ? 0 : refnelts) + i);
4259 784 : vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
4260 581 : machine_mode vmode = TYPE_MODE (perm_type);
4261 581 : if ((cfun->curr_properties & PROP_gimple_lvec)
4262 581 : && !can_vec_perm_const_p (vmode, vmode, indices))
4263 : return false;
4264 540 : mask_type = build_vector_type (ssizetype, refnelts);
4265 540 : tree op2 = vec_perm_indices_to_tree (mask_type, indices);
4266 540 : bool converted_orig1 = false;
4267 540 : gimple_seq stmts = NULL;
4268 540 : if (!orig[1])
4269 190 : orig[1] = orig[0];
4270 350 : else if (orig[1] == error_mark_node
4271 211 : && one_nonconstant)
4272 : {
4273 : /* ??? We can see if we can safely convert to the original
4274 : element type. */
4275 86 : converted_orig1 = conv_code != ERROR_MARK;
4276 86 : tree target_type = converted_orig1 ? type : perm_type;
4277 86 : tree nonconstant_for_splat = one_nonconstant;
4278 : /* If there's a nop conversion between the target element type and
4279 : the nonconstant's type, convert it. */
4280 86 : if (!useless_type_conversion_p (TREE_TYPE (target_type),
4281 86 : TREE_TYPE (one_nonconstant)))
4282 0 : nonconstant_for_splat
4283 0 : = gimple_build (&stmts, NOP_EXPR, TREE_TYPE (target_type),
4284 : one_nonconstant);
4285 86 : orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
4286 : target_type,
4287 : nonconstant_for_splat);
4288 86 : }
4289 264 : else if (orig[1] == error_mark_node)
4290 : {
4291 : /* ??? See if we can convert the vector to the original type. */
4292 125 : converted_orig1 = conv_code != ERROR_MARK;
4293 125 : unsigned n = converted_orig1 ? nelts : refnelts;
4294 108 : tree target_type = converted_orig1 ? type : perm_type;
4295 125 : tree_vector_builder vec (target_type, n, 1);
4296 1005 : for (unsigned i = 0; i < n; ++i)
4297 1684 : if (i < nelts && constants[i])
4298 : {
4299 449 : tree constant = constants[i];
4300 : /* If there's a nop conversion, convert the constant. */
4301 449 : if (!useless_type_conversion_p (TREE_TYPE (target_type),
4302 449 : TREE_TYPE (constant)))
4303 2 : constant = fold_convert (TREE_TYPE (target_type), constant);
4304 449 : vec.quick_push (constant);
4305 : }
4306 : else
4307 : {
4308 : /* ??? Push a don't-care value. */
4309 431 : tree constant = one_constant;
4310 431 : if (!useless_type_conversion_p (TREE_TYPE (target_type),
4311 431 : TREE_TYPE (constant)))
4312 2 : constant = fold_convert (TREE_TYPE (target_type), constant);
4313 431 : vec.quick_push (constant);
4314 : }
4315 125 : orig[1] = vec.build ();
4316 125 : }
4317 401 : tree blend_op2 = NULL_TREE;
4318 401 : if (converted_orig1)
4319 : {
4320 : /* Make sure we can do a blend in the target type. */
4321 19 : vec_perm_builder sel (nelts, nelts, 1);
4322 87 : for (i = 0; i < elts.length (); ++i)
4323 68 : sel.quick_push (elts[i].first
4324 68 : ? elts[i].second + nelts : i);
4325 19 : vec_perm_indices indices (sel, 2, nelts);
4326 19 : machine_mode vmode = TYPE_MODE (type);
4327 19 : if ((cfun->curr_properties & PROP_gimple_lvec)
4328 19 : && !can_vec_perm_const_p (vmode, vmode, indices))
4329 0 : return false;
4330 19 : mask_type = build_vector_type (ssizetype, nelts);
4331 19 : blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
4332 19 : }
4333 :
4334 : /* For a real orig[1] (no splat, constant etc.) we might need to
4335 : nop-convert it. Do so here. */
4336 540 : if (orig[1] && orig[1] != error_mark_node
4337 540 : && !useless_type_conversion_p (perm_type, TREE_TYPE (orig[1]))
4338 559 : && tree_nop_conversion_p (TREE_TYPE (perm_type),
4339 19 : TREE_TYPE (TREE_TYPE (orig[1]))))
4340 0 : orig[1] = gimple_build (&stmts, VIEW_CONVERT_EXPR, perm_type,
4341 : orig[1]);
4342 :
4343 540 : tree orig1_for_perm
4344 540 : = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
4345 540 : tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
4346 : orig[0], orig1_for_perm, op2);
4347 : /* If we're building a smaller vector, extract the element
4348 : with the proper type. */
4349 540 : if (nelts != refnelts)
4350 298 : res = gimple_build (&stmts, BIT_FIELD_REF,
4351 : conv_code != ERROR_MARK ? conv_src_type : type,
4352 : res,
4353 149 : TYPE_SIZE (conv_code != ERROR_MARK ? conv_src_type
4354 : : type),
4355 149 : bitsize_zero_node);
4356 : /* Otherwise, we can still have an intermediate sign change.
4357 : ??? In that case we have two subsequent conversions.
4358 : We should be able to merge them. */
4359 391 : else if (sign_change_p)
4360 14 : res = gimple_build (&stmts, VIEW_CONVERT_EXPR, conv_src_type, res);
4361 : /* Finally, apply the conversion. */
4362 540 : if (conv_code != ERROR_MARK)
4363 52 : res = gimple_build (&stmts, conv_code, type, res);
4364 488 : else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
4365 : {
4366 3 : gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
4367 : TYPE_VECTOR_SUBPARTS (perm_type))
4368 : && tree_nop_conversion_p (TREE_TYPE (type),
4369 : TREE_TYPE (perm_type)));
4370 3 : res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
4371 : }
4372 : /* Blend in the actual constant. */
4373 540 : if (converted_orig1)
4374 19 : res = gimple_build (&stmts, VEC_PERM_EXPR, type,
4375 19 : res, orig[1], blend_op2);
4376 540 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4377 540 : gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
4378 581 : }
4379 934 : update_stmt (gsi_stmt (*gsi));
4380 934 : return true;
4381 152307 : }
4382 :
4383 : /* Prepare a TARGET_MEM_REF ref so that it can be subsetted as
4384 : lvalue. This splits out an address computation stmt before *GSI
4385 : and returns a MEM_REF wrapping the address. */
4386 :
4387 : static tree
4388 1096 : prepare_target_mem_ref_lvalue (tree ref, gimple_stmt_iterator *gsi)
4389 : {
4390 1096 : if (TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
4391 215 : mark_addressable (TREE_OPERAND (TREE_OPERAND (ref, 0), 0));
4392 1096 : tree ptrtype = build_pointer_type (TREE_TYPE (ref));
4393 1096 : tree tem = make_ssa_name (ptrtype);
4394 1096 : gimple *new_stmt
4395 1096 : = gimple_build_assign (tem, build1 (ADDR_EXPR, TREE_TYPE (tem),
4396 : unshare_expr (ref)));
4397 1096 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4398 2192 : ref = build2_loc (EXPR_LOCATION (ref),
4399 1096 : MEM_REF, TREE_TYPE (ref), tem,
4400 1096 : build_int_cst (TREE_TYPE (TREE_OPERAND (ref, 1)), 0));
4401 1096 : return ref;
4402 : }
4403 :
4404 : /* Rewrite the vector load at *GSI to component-wise loads if the load
4405 : is only used in BIT_FIELD_REF extractions with eventual intermediate
4406 : widening. */
4407 :
4408 : static void
4409 288669 : optimize_vector_load (gimple_stmt_iterator *gsi)
4410 : {
4411 288669 : gimple *stmt = gsi_stmt (*gsi);
4412 288669 : tree lhs = gimple_assign_lhs (stmt);
4413 288669 : tree rhs = gimple_assign_rhs1 (stmt);
4414 288669 : tree vuse = gimple_vuse (stmt);
4415 :
4416 : /* Gather BIT_FIELD_REFs to rewrite, looking through
4417 : VEC_UNPACK_{LO,HI}_EXPR. */
4418 288669 : use_operand_p use_p;
4419 288669 : imm_use_iterator iter;
4420 288669 : bool rewrite = true;
4421 288669 : bool scalar_use = false;
4422 288669 : bool unpack_use = false;
4423 288669 : auto_vec<gimple *, 8> bf_stmts;
4424 288669 : auto_vec<tree, 8> worklist;
4425 288669 : worklist.quick_push (lhs);
4426 290524 : do
4427 : {
4428 290524 : tree def = worklist.pop ();
4429 290524 : unsigned HOST_WIDE_INT def_eltsize
4430 290524 : = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def))));
4431 657070 : FOR_EACH_IMM_USE_FAST (use_p, iter, def)
4432 : {
4433 346180 : gimple *use_stmt = USE_STMT (use_p);
4434 346180 : if (is_gimple_debug (use_stmt))
4435 76022 : continue;
4436 345587 : if (!is_gimple_assign (use_stmt))
4437 : {
4438 : rewrite = false;
4439 270158 : break;
4440 : }
4441 310242 : enum tree_code use_code = gimple_assign_rhs_code (use_stmt);
4442 310242 : tree use_rhs = gimple_assign_rhs1 (use_stmt);
4443 381950 : if (use_code == BIT_FIELD_REF
4444 71709 : && TREE_OPERAND (use_rhs, 0) == def
4445 : /* If its on the VEC_UNPACK_{HI,LO}_EXPR
4446 : def need to verify it is element aligned. */
4447 381951 : && (def == lhs
4448 85 : || (known_eq (bit_field_size (use_rhs), def_eltsize)
4449 85 : && constant_multiple_p (bit_field_offset (use_rhs),
4450 : def_eltsize)
4451 : /* We can simulate the VEC_UNPACK_{HI,LO}_EXPR
4452 : via a NOP_EXPR only for integral types.
4453 : ??? Support VEC_UNPACK_FLOAT_{HI,LO}_EXPR. */
4454 85 : && INTEGRAL_TYPE_P (TREE_TYPE (use_rhs)))))
4455 : {
4456 71708 : if (!VECTOR_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
4457 69595 : scalar_use = true;
4458 71708 : bf_stmts.safe_push (use_stmt);
4459 71708 : continue;
4460 : }
4461 : /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR. */
4462 238534 : if (def == lhs
4463 236726 : && (use_code == VEC_UNPACK_HI_EXPR
4464 236726 : || use_code == VEC_UNPACK_LO_EXPR)
4465 3721 : && use_rhs == lhs)
4466 : {
4467 3721 : unpack_use = true;
4468 3721 : worklist.safe_push (gimple_assign_lhs (use_stmt));
4469 3721 : continue;
4470 : }
4471 : rewrite = false;
4472 : break;
4473 290524 : }
4474 290524 : if (!rewrite)
4475 : break;
4476 : }
4477 40732 : while (!worklist.is_empty ());
4478 :
4479 288669 : rewrite = rewrite && (scalar_use
4480 18511 : || unpack_use
4481 587 : || !can_implement_p (mov_optab,
4482 587 : TYPE_MODE (TREE_TYPE (lhs))));
4483 288669 : if (!rewrite)
4484 : {
4485 270327 : gsi_next (gsi);
4486 270327 : return;
4487 : }
4488 : /* We now have all ultimate uses of the load to rewrite in bf_stmts. */
4489 :
4490 : /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
4491 : For TARGET_MEM_REFs we have to separate the LEA from the reference. */
4492 18342 : tree load_rhs = rhs;
4493 18342 : if (TREE_CODE (load_rhs) == TARGET_MEM_REF)
4494 1095 : load_rhs = prepare_target_mem_ref_lvalue (load_rhs, gsi);
4495 :
4496 : /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
4497 : the place of the original load. */
4498 120956 : for (gimple *use_stmt : bf_stmts)
4499 : {
4500 65930 : tree bfr = gimple_assign_rhs1 (use_stmt);
4501 65930 : tree new_rhs = unshare_expr (load_rhs);
4502 65930 : if (TREE_OPERAND (bfr, 0) != lhs)
4503 : {
4504 : /* When the BIT_FIELD_REF is on the promoted vector we have to
4505 : adjust it and emit a conversion afterwards. */
4506 84 : gimple *def_stmt
4507 84 : = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr, 0));
4508 84 : enum tree_code def_code
4509 84 : = gimple_assign_rhs_code (def_stmt);
4510 :
4511 : /* The adjusted BIT_FIELD_REF is of the promotion source
4512 : vector size and at half of the offset... */
4513 84 : new_rhs = fold_build3 (BIT_FIELD_REF,
4514 : TREE_TYPE (TREE_TYPE (lhs)),
4515 : new_rhs,
4516 : TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs))),
4517 : size_binop (EXACT_DIV_EXPR,
4518 : TREE_OPERAND (bfr, 2),
4519 : bitsize_int (2)));
4520 : /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR. */
4521 84 : if (def_code == (!BYTES_BIG_ENDIAN
4522 : ? VEC_UNPACK_HI_EXPR : VEC_UNPACK_LO_EXPR))
4523 42 : TREE_OPERAND (new_rhs, 2)
4524 84 : = size_binop (PLUS_EXPR, TREE_OPERAND (new_rhs, 2),
4525 : size_binop (EXACT_DIV_EXPR,
4526 : TYPE_SIZE (TREE_TYPE (lhs)),
4527 : bitsize_int (2)));
4528 84 : tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (lhs)));
4529 84 : gimple *new_stmt = gimple_build_assign (tem, new_rhs);
4530 84 : location_t loc = gimple_location (use_stmt);
4531 84 : gimple_set_location (new_stmt, loc);
4532 84 : gimple_set_vuse (new_stmt, vuse);
4533 84 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4534 : /* Perform scalar promotion. */
4535 84 : new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
4536 : NOP_EXPR, tem);
4537 84 : gimple_set_location (new_stmt, loc);
4538 84 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4539 : }
4540 : else
4541 : {
4542 : /* When the BIT_FIELD_REF is on the original load result
4543 : we can just wrap that. */
4544 65846 : tree new_rhs = fold_build3 (BIT_FIELD_REF, TREE_TYPE (bfr),
4545 : unshare_expr (load_rhs),
4546 : TREE_OPERAND (bfr, 1),
4547 : TREE_OPERAND (bfr, 2));
4548 65846 : gimple *new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
4549 : new_rhs);
4550 65846 : location_t loc = gimple_location (use_stmt);
4551 65846 : gimple_set_location (new_stmt, loc);
4552 65846 : gimple_set_vuse (new_stmt, vuse);
4553 65846 : gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4554 : }
4555 65930 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
4556 65930 : unlink_stmt_vdef (use_stmt);
4557 65930 : gsi_remove (&gsi2, true);
4558 : }
4559 :
4560 : /* Finally get rid of the intermediate stmts. */
4561 18342 : gimple *use_stmt;
4562 36984 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4563 : {
4564 300 : if (is_gimple_debug (use_stmt))
4565 : {
4566 272 : if (gimple_debug_bind_p (use_stmt))
4567 : {
4568 272 : gimple_debug_bind_reset_value (use_stmt);
4569 272 : update_stmt (use_stmt);
4570 : }
4571 272 : continue;
4572 : }
4573 28 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
4574 28 : unlink_stmt_vdef (use_stmt);
4575 28 : release_defs (use_stmt);
4576 28 : gsi_remove (&gsi2, true);
4577 18342 : }
4578 : /* And the original load. */
4579 18342 : release_defs (stmt);
4580 18342 : gsi_remove (gsi, true);
4581 288669 : }
4582 :
4583 :
4584 : /* Primitive "lattice" function for gimple_simplify. */
4585 :
4586 : static tree
4587 1596140899 : fwprop_ssa_val (tree name)
4588 : {
4589 : /* First valueize NAME. */
4590 1596140899 : if (TREE_CODE (name) == SSA_NAME
4591 1596140899 : && SSA_NAME_VERSION (name) < lattice.length ())
4592 : {
4593 1595295607 : tree val = lattice[SSA_NAME_VERSION (name)];
4594 1595295607 : if (val)
4595 1596140899 : name = val;
4596 : }
4597 : /* We continue matching along SSA use-def edges for SSA names
4598 : that are not single-use. Currently there are no patterns
4599 : that would cause any issues with that. */
4600 1596140899 : return name;
4601 : }
4602 :
4603 : /* Search for opportunities to free half of the lanes in the following pattern:
4604 :
4605 : v_in = {e0, e1, e2, e3}
4606 : v_1 = VEC_PERM <v_in, v_in, {0, 2, 0, 2}>
4607 : // v_1 = {e0, e2, e0, e2}
4608 : v_2 = VEC_PERM <v_in, v_in, {1, 3, 1, 3}>
4609 : // v_2 = {e1, e3, e1, e3}
4610 :
4611 : v_x = v_1 + v_2
4612 : // v_x = {e0+e1, e2+e3, e0+e1, e2+e3}
4613 : v_y = v_1 - v_2
4614 : // v_y = {e0-e1, e2-e3, e0-e1, e2-e3}
4615 :
4616 : v_out = VEC_PERM <v_x, v_y, {0, 1, 6, 7}>
4617 : // v_out = {e0+e1, e2+e3, e0-e1, e2-e3}
4618 :
4619 : The last statement could be simplified to:
4620 : v_out' = VEC_PERM <v_x, v_y, {0, 1, 4, 5}>
4621 : // v_out' = {e0+e1, e2+e3, e0-e1, e2-e3}
4622 :
4623 : Characteristic properties:
4624 : - v_1 and v_2 are created from the same input vector v_in and introduce the
4625 : lane duplication (in the selection operand) that we can eliminate.
4626 : - v_x and v_y are results from lane-preserving operations that use v_1 and
4627 : v_2 as inputs.
4628 : - v_out is created by selecting from duplicated lanes. */
4629 :
4630 : static bool
4631 182417 : recognise_vec_perm_simplify_seq (gassign *stmt, vec_perm_simplify_seq *seq)
4632 : {
4633 182417 : unsigned HOST_WIDE_INT nelts;
4634 :
4635 182417 : gcc_checking_assert (stmt);
4636 182417 : gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
4637 182417 : basic_block bb = gimple_bb (stmt);
4638 :
4639 : /* Decompose the final vec permute statement. */
4640 182417 : tree v_x = gimple_assign_rhs1 (stmt);
4641 182417 : tree v_y = gimple_assign_rhs2 (stmt);
4642 182417 : tree sel = gimple_assign_rhs3 (stmt);
4643 :
4644 182417 : if (TREE_CODE (sel) != VECTOR_CST
4645 179680 : || !VECTOR_CST_NELTS (sel).is_constant (&nelts)
4646 179680 : || TREE_CODE (v_x) != SSA_NAME
4647 177828 : || TREE_CODE (v_y) != SSA_NAME
4648 173794 : || !has_single_use (v_x)
4649 292050 : || !has_single_use (v_y))
4650 74509 : return false;
4651 :
4652 : /* Don't analyse sequences with many lanes. */
4653 107908 : if (nelts > 4)
4654 : return false;
4655 :
4656 : /* Lookup the definition of v_x and v_y. */
4657 105194 : gassign *v_x_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x));
4658 105194 : gassign *v_y_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_y));
4659 104817 : if (!v_x_stmt || gimple_bb (v_x_stmt) != bb
4660 210011 : || !v_y_stmt || gimple_bb (v_y_stmt) != bb)
4661 : return false;
4662 :
4663 : /* Check the operations that define v_x and v_y. */
4664 104810 : if (TREE_CODE_CLASS (gimple_assign_rhs_code (v_x_stmt)) != tcc_binary
4665 106858 : || TREE_CODE_CLASS (gimple_assign_rhs_code (v_y_stmt)) != tcc_binary)
4666 : return false;
4667 :
4668 2048 : tree v_x_1 = gimple_assign_rhs1 (v_x_stmt);
4669 2048 : tree v_x_2 = gimple_assign_rhs2 (v_x_stmt);
4670 2048 : tree v_y_1 = gimple_assign_rhs1 (v_y_stmt);
4671 2048 : tree v_y_2 = gimple_assign_rhs2 (v_y_stmt);
4672 :
4673 2048 : if (v_x_stmt == v_y_stmt
4674 2048 : || TREE_CODE (v_x_1) != SSA_NAME
4675 2045 : || TREE_CODE (v_x_2) != SSA_NAME
4676 2033 : || num_imm_uses (v_x_1) != 2
4677 3924 : || num_imm_uses (v_x_2) != 2)
4678 : return false;
4679 :
4680 1841 : if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
4681 : {
4682 : /* Allow operands of commutative operators to swap. */
4683 635 : if (commutative_tree_code (gimple_assign_rhs_code (v_x_stmt)))
4684 : {
4685 : /* Keep v_x_1 the first operand for non-commutative operators. */
4686 252 : std::swap (v_x_1, v_x_2);
4687 252 : if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
4688 : return false;
4689 : }
4690 383 : else if (commutative_tree_code (gimple_assign_rhs_code (v_y_stmt)))
4691 : {
4692 383 : if (v_x_1 != v_y_2 || v_x_2 != v_y_1)
4693 : return false;
4694 : }
4695 : else
4696 : return false;
4697 : }
4698 1841 : gassign *v_1_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_1));
4699 1841 : gassign *v_2_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_2));
4700 1777 : if (!v_1_stmt || gimple_bb (v_1_stmt) != bb
4701 3618 : || !v_2_stmt || gimple_bb (v_2_stmt) != bb)
4702 : return false;
4703 :
4704 1773 : if (gimple_assign_rhs_code (v_1_stmt) != VEC_PERM_EXPR
4705 1895 : || gimple_assign_rhs_code (v_2_stmt) != VEC_PERM_EXPR)
4706 : return false;
4707 :
4708 : /* Decompose initial VEC_PERM_EXPRs. */
4709 108 : tree v_in = gimple_assign_rhs1 (v_1_stmt);
4710 108 : tree v_1_sel = gimple_assign_rhs3 (v_1_stmt);
4711 108 : tree v_2_sel = gimple_assign_rhs3 (v_2_stmt);
4712 108 : if (v_in != gimple_assign_rhs2 (v_1_stmt)
4713 103 : || v_in != gimple_assign_rhs1 (v_2_stmt)
4714 209 : || v_in != gimple_assign_rhs2 (v_2_stmt))
4715 : return false;
4716 :
4717 101 : unsigned HOST_WIDE_INT v_1_nelts, v_2_nelts;
4718 101 : if (TREE_CODE (v_1_sel) != VECTOR_CST
4719 101 : || !VECTOR_CST_NELTS (v_1_sel).is_constant (&v_1_nelts)
4720 101 : || TREE_CODE (v_2_sel) != VECTOR_CST
4721 202 : || !VECTOR_CST_NELTS (v_2_sel).is_constant (&v_2_nelts))
4722 0 : return false;
4723 :
4724 101 : if (nelts != v_1_nelts || nelts != v_2_nelts)
4725 : return false;
4726 :
4727 : /* Create the new selector. */
4728 101 : vec_perm_builder new_sel_perm (nelts, nelts, 1);
4729 101 : auto_vec<bool> lanes (nelts);
4730 101 : lanes.quick_grow_cleared (nelts);
4731 505 : for (unsigned int i = 0; i < nelts; i++)
4732 : {
4733 : /* Extract the i-th value from the selector. */
4734 404 : unsigned int sel_cst = TREE_INT_CST_LOW (VECTOR_CST_ELT (sel, i));
4735 404 : unsigned int lane = sel_cst % nelts;
4736 404 : unsigned int offs = sel_cst / nelts;
4737 :
4738 : /* Check what's in the lane. */
4739 404 : unsigned int e_1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, lane));
4740 404 : unsigned int e_2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, lane));
4741 :
4742 : /* Reuse previous lane (if any). */
4743 404 : unsigned int l = 0;
4744 687 : for (; l < lane; l++)
4745 : {
4746 481 : if ((TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, l)) == e_1)
4747 481 : && (TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, l)) == e_2))
4748 : break;
4749 : }
4750 :
4751 : /* Add to narrowed selector. */
4752 404 : new_sel_perm.quick_push (l + offs * nelts);
4753 :
4754 : /* Mark lane as used. */
4755 404 : lanes[l] = true;
4756 : }
4757 :
4758 : /* Count how many lanes are need. */
4759 : unsigned int cnt = 0;
4760 505 : for (unsigned int i = 0; i < nelts; i++)
4761 404 : cnt += lanes[i];
4762 :
4763 : /* If more than (nelts/2) lanes are needed, skip the sequence. */
4764 101 : if (cnt > nelts / 2)
4765 : return false;
4766 :
4767 : /* Check if the resulting permutation is cheap. */
4768 101 : vec_perm_indices new_indices (new_sel_perm, 2, nelts);
4769 101 : tree vectype = TREE_TYPE (gimple_assign_lhs (stmt));
4770 101 : machine_mode vmode = TYPE_MODE (vectype);
4771 101 : if (!can_vec_perm_const_p (vmode, vmode, new_indices, false))
4772 : return false;
4773 :
4774 101 : *seq = XNEW (struct _vec_perm_simplify_seq);
4775 101 : (*seq)->stmt = stmt;
4776 101 : (*seq)->v_1_stmt = v_1_stmt;
4777 101 : (*seq)->v_2_stmt = v_2_stmt;
4778 101 : (*seq)->v_x_stmt = v_x_stmt;
4779 101 : (*seq)->v_y_stmt = v_y_stmt;
4780 101 : (*seq)->nelts = nelts;
4781 101 : (*seq)->new_sel = vect_gen_perm_mask_checked (vectype, new_indices);
4782 :
4783 101 : if (dump_file)
4784 : {
4785 28 : fprintf (dump_file, "Found vec perm simplify sequence ending with:\n\t");
4786 28 : print_gimple_stmt (dump_file, stmt, 0);
4787 :
4788 28 : if (dump_flags & TDF_DETAILS)
4789 : {
4790 28 : fprintf (dump_file, "\tNarrowed vec_perm selector: ");
4791 28 : print_generic_expr (dump_file, (*seq)->new_sel);
4792 28 : fprintf (dump_file, "\n");
4793 : }
4794 : }
4795 :
4796 : return true;
4797 202 : }
4798 :
4799 : /* Reduce the lane consumption of a simplifiable vec perm sequence. */
4800 :
4801 : static void
4802 74 : narrow_vec_perm_simplify_seq (const vec_perm_simplify_seq &seq)
4803 : {
4804 74 : gassign *stmt = seq->stmt;
4805 74 : if (dump_file && (dump_flags & TDF_DETAILS))
4806 : {
4807 22 : fprintf (dump_file, "Updating VEC_PERM statement:\n");
4808 22 : fprintf (dump_file, "Old stmt: ");
4809 22 : print_gimple_stmt (dump_file, stmt, 0);
4810 : }
4811 :
4812 : /* Update the last VEC_PERM statement. */
4813 74 : gimple_assign_set_rhs3 (stmt, seq->new_sel);
4814 74 : update_stmt (stmt);
4815 :
4816 74 : if (dump_file && (dump_flags & TDF_DETAILS))
4817 : {
4818 22 : fprintf (dump_file, "New stmt: ");
4819 22 : print_gimple_stmt (dump_file, stmt, 0);
4820 : }
4821 74 : }
4822 :
4823 : /* Test if we can blend two simplifiable vec permute sequences.
4824 : NEED_SWAP will be set, if sequences must be swapped for blending. */
4825 :
4826 : static bool
4827 47 : can_blend_vec_perm_simplify_seqs_p (vec_perm_simplify_seq seq1,
4828 : vec_perm_simplify_seq seq2,
4829 : bool *need_swap)
4830 : {
4831 47 : unsigned int nelts = seq1->nelts;
4832 47 : basic_block bb = gimple_bb (seq1->stmt);
4833 :
4834 47 : gcc_assert (gimple_bb (seq2->stmt) == bb);
4835 :
4836 : /* BBs and number of elements must be equal. */
4837 47 : if (gimple_bb (seq2->stmt) != bb || seq2->nelts != nelts)
4838 : return false;
4839 :
4840 : /* We need vectors of the same type. */
4841 47 : if (TREE_TYPE (gimple_assign_lhs (seq1->stmt))
4842 47 : != TREE_TYPE (gimple_assign_lhs (seq2->stmt)))
4843 : return false;
4844 :
4845 : /* We require isomorphic operators. */
4846 41 : if (((gimple_assign_rhs_code (seq1->v_x_stmt)
4847 41 : != gimple_assign_rhs_code (seq2->v_x_stmt))
4848 41 : || (gimple_assign_rhs_code (seq1->v_y_stmt)
4849 41 : != gimple_assign_rhs_code (seq2->v_y_stmt))))
4850 : return false;
4851 :
4852 : /* We cannot have any dependencies between the sequences.
4853 :
4854 : For merging, we will reuse seq1->v_1_stmt and seq1->v_2_stmt.
4855 : seq1's v_in is defined before these statements, but we need
4856 : to check if seq2's v_in is defined before them as well.
4857 :
4858 : Further, we will reuse seq2->stmt. We need to ensure that
4859 : seq1->v_x_stmt and seq1->v_y_stmt are before it.
4860 :
4861 : Note, that we don't need to check the BBs here, because all
4862 : statements of both sequences have to be in the same BB. */
4863 :
4864 41 : tree seq2_v_in = gimple_assign_rhs1 (seq2->v_1_stmt);
4865 41 : if (TREE_CODE (seq2_v_in) != SSA_NAME)
4866 : return false;
4867 :
4868 41 : gassign *seq2_v_in_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq2_v_in));
4869 41 : if (!seq2_v_in_stmt || gimple_bb (seq2_v_in_stmt) != bb
4870 41 : || (gimple_uid (seq2_v_in_stmt) > gimple_uid (seq1->v_1_stmt))
4871 37 : || (gimple_uid (seq1->v_x_stmt) > gimple_uid (seq2->stmt))
4872 37 : || (gimple_uid (seq1->v_y_stmt) > gimple_uid (seq2->stmt)))
4873 : {
4874 4 : tree seq1_v_in = gimple_assign_rhs1 (seq1->v_1_stmt);
4875 4 : if (TREE_CODE (seq1_v_in) != SSA_NAME)
4876 : return false;
4877 :
4878 4 : gassign *seq1_v_in_stmt
4879 4 : = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq1_v_in));
4880 : /* Let's try to see if we succeed when swapping the sequences. */
4881 4 : if (!seq1_v_in_stmt || gimple_bb (seq1_v_in_stmt)
4882 0 : || (gimple_uid (seq1_v_in_stmt) > gimple_uid (seq2->v_1_stmt))
4883 0 : || (gimple_uid (seq2->v_x_stmt) > gimple_uid (seq1->stmt))
4884 0 : || (gimple_uid (seq2->v_y_stmt) > gimple_uid (seq1->stmt)))
4885 : return false;
4886 0 : *need_swap = true;
4887 : }
4888 : else
4889 37 : *need_swap = false;
4890 :
4891 37 : if (dump_file && (dump_flags & TDF_DETAILS))
4892 11 : fprintf (dump_file, "Found vec perm simplify sequence pair.\n");
4893 :
4894 : return true;
4895 : }
4896 :
4897 : /* Calculate the permutations for blending the two given vec permute
4898 : sequences. This may fail if the resulting permutation is not
4899 : supported. */
4900 :
4901 : static bool
4902 37 : calc_perm_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
4903 : vec_perm_simplify_seq seq2,
4904 : vec_perm_indices *seq2_stmt_indices,
4905 : vec_perm_indices *seq1_v_1_stmt_indices,
4906 : vec_perm_indices *seq1_v_2_stmt_indices)
4907 : {
4908 37 : unsigned int i;
4909 37 : unsigned int nelts = seq1->nelts;
4910 37 : auto_vec<unsigned int> lane_assignment;
4911 37 : lane_assignment.create (nelts);
4912 :
4913 : /* Mark all lanes as free. */
4914 37 : lane_assignment.quick_grow_cleared (nelts);
4915 :
4916 : /* Allocate lanes for seq1. */
4917 185 : for (i = 0; i < nelts; i++)
4918 : {
4919 148 : unsigned int l = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq1->new_sel, i));
4920 148 : l %= nelts;
4921 148 : lane_assignment[l] = 1;
4922 : }
4923 :
4924 : /* Allocate lanes for seq2 and calculate selector for seq2->stmt. */
4925 37 : vec_perm_builder seq2_stmt_sel_perm (nelts, nelts, 1);
4926 185 : for (i = 0; i < nelts; i++)
4927 : {
4928 148 : unsigned int sel = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, i));
4929 148 : unsigned int lane = sel % nelts;
4930 148 : unsigned int offs = sel / nelts;
4931 148 : unsigned int new_sel;
4932 :
4933 : /* Check if we already allocated the lane for seq2. */
4934 148 : unsigned int j = 0;
4935 263 : for (; j < i; j++)
4936 : {
4937 189 : unsigned int sel_old;
4938 189 : sel_old = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, j));
4939 189 : unsigned int lane_old = sel_old % nelts;
4940 189 : if (lane == lane_old)
4941 : {
4942 74 : new_sel = seq2_stmt_sel_perm[j].to_constant ();
4943 74 : new_sel = (new_sel % nelts) + offs * nelts;
4944 74 : break;
4945 : }
4946 : }
4947 :
4948 : /* If the lane is not allocated, we need to do that now. */
4949 148 : if (j == i)
4950 : {
4951 : unsigned int l_orig = lane;
4952 182 : while (lane_assignment[lane] != 0)
4953 : {
4954 108 : lane = (lane + 1) % nelts;
4955 :
4956 : /* This should not happen if both sequences utilize no more than
4957 : half of the lanes. Test anyway to guarantee termination. */
4958 108 : if (lane == l_orig)
4959 0 : return false;
4960 : }
4961 :
4962 : /* Allocate lane. */
4963 74 : lane_assignment[lane] = 2 + l_orig;
4964 74 : new_sel = lane + offs * nelts;
4965 : }
4966 :
4967 148 : seq2_stmt_sel_perm.quick_push (new_sel);
4968 : }
4969 :
4970 : /* Check if the resulting permutation is cheap. */
4971 37 : seq2_stmt_indices->new_vector (seq2_stmt_sel_perm, 2, nelts);
4972 37 : tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
4973 37 : machine_mode vmode = TYPE_MODE (vectype);
4974 37 : if (!can_vec_perm_const_p (vmode, vmode, *seq2_stmt_indices, false))
4975 : return false;
4976 :
4977 : /* Calculate selectors for seq1->v_1_stmt and seq1->v_2_stmt. */
4978 37 : vec_perm_builder seq1_v_1_stmt_sel_perm (nelts, nelts, 1);
4979 37 : vec_perm_builder seq1_v_2_stmt_sel_perm (nelts, nelts, 1);
4980 185 : for (i = 0; i < nelts; i++)
4981 : {
4982 148 : bool use_seq1 = lane_assignment[i] < 2;
4983 148 : unsigned int l1, l2;
4984 :
4985 148 : if (use_seq1)
4986 : {
4987 : /* Just reuse the selector indices. */
4988 74 : tree s1 = gimple_assign_rhs3 (seq1->v_1_stmt);
4989 74 : tree s2 = gimple_assign_rhs3 (seq1->v_2_stmt);
4990 74 : l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, i));
4991 74 : l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, i));
4992 : }
4993 : else
4994 : {
4995 : /* We moved the lanes for seq2, so we need to adjust for that. */
4996 74 : tree s1 = gimple_assign_rhs3 (seq2->v_1_stmt);
4997 74 : tree s2 = gimple_assign_rhs3 (seq2->v_2_stmt);
4998 74 : l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, lane_assignment[i] - 2));
4999 74 : l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, lane_assignment[i] - 2));
5000 : }
5001 :
5002 148 : l1 %= nelts;
5003 148 : l2 %= nelts;
5004 222 : seq1_v_1_stmt_sel_perm.quick_push (l1 + (use_seq1 ? 0 : nelts));
5005 148 : seq1_v_2_stmt_sel_perm.quick_push (l2 + (use_seq1 ? 0 : nelts));
5006 : }
5007 :
5008 37 : seq1_v_1_stmt_indices->new_vector (seq1_v_1_stmt_sel_perm, 2, nelts);
5009 37 : vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
5010 37 : vmode = TYPE_MODE (vectype);
5011 37 : if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_1_stmt_indices, false))
5012 : return false;
5013 :
5014 37 : seq1_v_2_stmt_indices->new_vector (seq1_v_2_stmt_sel_perm, 2, nelts);
5015 37 : vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
5016 37 : vmode = TYPE_MODE (vectype);
5017 37 : if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_2_stmt_indices, false))
5018 : return false;
5019 :
5020 : return true;
5021 74 : }
5022 :
5023 : /* Blend the two given simplifiable vec permute sequences using the
5024 : given permutations. */
5025 :
5026 : static void
5027 37 : blend_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
5028 : vec_perm_simplify_seq seq2,
5029 : const vec_perm_indices &seq2_stmt_indices,
5030 : const vec_perm_indices &seq1_v_1_stmt_indices,
5031 : const vec_perm_indices &seq1_v_2_stmt_indices)
5032 : {
5033 : /* We don't need to adjust seq1->stmt because its lanes consumption
5034 : was already narrowed before entering this function. */
5035 :
5036 : /* Adjust seq2->stmt: copy RHS1/RHS2 from seq1->stmt and set new sel. */
5037 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5038 : {
5039 11 : fprintf (dump_file, "Updating VEC_PERM statement:\n");
5040 11 : fprintf (dump_file, "Old stmt: ");
5041 11 : print_gimple_stmt (dump_file, seq2->stmt, 0);
5042 : }
5043 :
5044 37 : gimple_assign_set_rhs1 (seq2->stmt, gimple_assign_rhs1 (seq1->stmt));
5045 74 : gimple_assign_set_rhs2 (seq2->stmt, gimple_assign_rhs2 (seq1->stmt));
5046 37 : tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
5047 37 : tree sel = vect_gen_perm_mask_checked (vectype, seq2_stmt_indices);
5048 37 : gimple_assign_set_rhs3 (seq2->stmt, sel);
5049 37 : update_stmt (seq2->stmt);
5050 :
5051 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5052 : {
5053 11 : fprintf (dump_file, "New stmt: ");
5054 11 : print_gimple_stmt (dump_file, seq2->stmt, 0);
5055 : }
5056 :
5057 : /* Adjust seq1->v_1_stmt: copy RHS2 from seq2->v_1_stmt and set new sel. */
5058 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5059 : {
5060 11 : fprintf (dump_file, "Updating VEC_PERM statement:\n");
5061 11 : fprintf (dump_file, "Old stmt: ");
5062 11 : print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
5063 : }
5064 :
5065 37 : gimple_assign_set_rhs2 (seq1->v_1_stmt, gimple_assign_rhs1 (seq2->v_1_stmt));
5066 37 : vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
5067 37 : sel = vect_gen_perm_mask_checked (vectype, seq1_v_1_stmt_indices);
5068 37 : gimple_assign_set_rhs3 (seq1->v_1_stmt, sel);
5069 37 : update_stmt (seq1->v_1_stmt);
5070 :
5071 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5072 : {
5073 11 : fprintf (dump_file, "New stmt: ");
5074 11 : print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
5075 : }
5076 :
5077 : /* Adjust seq1->v_2_stmt: copy RHS2 from seq2->v_2_stmt and set new sel. */
5078 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5079 : {
5080 11 : fprintf (dump_file, "Updating VEC_PERM statement:\n");
5081 11 : fprintf (dump_file, "Old stmt: ");
5082 11 : print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
5083 : }
5084 :
5085 37 : gimple_assign_set_rhs2 (seq1->v_2_stmt, gimple_assign_rhs1 (seq2->v_2_stmt));
5086 37 : vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
5087 37 : sel = vect_gen_perm_mask_checked (vectype, seq1_v_2_stmt_indices);
5088 37 : gimple_assign_set_rhs3 (seq1->v_2_stmt, sel);
5089 37 : update_stmt (seq1->v_2_stmt);
5090 :
5091 37 : if (dump_file && (dump_flags & TDF_DETAILS))
5092 : {
5093 11 : fprintf (dump_file, "New stmt: ");
5094 11 : print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
5095 : }
5096 :
5097 : /* At this point, we have four unmodified seq2 stmts, which will be
5098 : eliminated by DCE. */
5099 :
5100 37 : if (dump_file)
5101 11 : fprintf (dump_file, "Vec perm simplify sequences have been blended.\n\n");
5102 37 : }
5103 :
5104 : /* Try to blend narrowed vec_perm_simplify_seqs pairwise.
5105 : The provided list will be empty after this call. */
5106 :
5107 : static void
5108 309860248 : process_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l)
5109 : {
5110 309860248 : unsigned int i, j;
5111 309860248 : vec_perm_simplify_seq seq1, seq2;
5112 :
5113 309860248 : if (l->is_empty ())
5114 309860203 : return;
5115 :
5116 45 : if (dump_file && (dump_flags & TDF_DETAILS))
5117 13 : fprintf (dump_file, "\nProcessing %u vec perm simplify sequences.\n",
5118 : l->length ());
5119 :
5120 109 : FOR_EACH_VEC_ELT (*l, i, seq1)
5121 : {
5122 64 : if (i + 1 < l->length ())
5123 : {
5124 51 : FOR_EACH_VEC_ELT_FROM (*l, j, seq2, i + 1)
5125 : {
5126 47 : bool swap = false;
5127 47 : if (can_blend_vec_perm_simplify_seqs_p (seq1, seq2, &swap))
5128 : {
5129 37 : vec_perm_indices seq2_stmt_indices;
5130 37 : vec_perm_indices seq1_v_1_stmt_indices;
5131 37 : vec_perm_indices seq1_v_2_stmt_indices;
5132 111 : if (calc_perm_vec_perm_simplify_seqs (swap ? seq2 : seq1,
5133 : swap ? seq1 : seq2,
5134 : &seq2_stmt_indices,
5135 : &seq1_v_1_stmt_indices,
5136 : &seq1_v_2_stmt_indices))
5137 : {
5138 : /* Narrow lane usage. */
5139 37 : narrow_vec_perm_simplify_seq (seq1);
5140 37 : narrow_vec_perm_simplify_seq (seq2);
5141 :
5142 : /* Blend sequences. */
5143 37 : blend_vec_perm_simplify_seqs (swap ? seq2 : seq1,
5144 : swap ? seq1 : seq2,
5145 : seq2_stmt_indices,
5146 : seq1_v_1_stmt_indices,
5147 : seq1_v_2_stmt_indices);
5148 :
5149 : /* We can use unordered_remove as we break the loop. */
5150 37 : l->unordered_remove (j);
5151 37 : XDELETE (seq2);
5152 37 : break;
5153 : }
5154 37 : }
5155 : }
5156 : }
5157 :
5158 : /* We don't need to call l->remove for seq1. */
5159 64 : XDELETE (seq1);
5160 : }
5161 :
5162 45 : l->truncate (0);
5163 : }
5164 :
5165 : static void
5166 101 : append_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l,
5167 : const vec_perm_simplify_seq &seq)
5168 : {
5169 : /* If no space on list left, then process the list. */
5170 101 : if (!l->space (1))
5171 0 : process_vec_perm_simplify_seq_list (l);
5172 :
5173 101 : l->quick_push (seq);
5174 101 : }
5175 :
5176 : /* Main entry point for the forward propagation and statement combine
5177 : optimizer. */
5178 :
5179 : namespace {
5180 :
5181 : const pass_data pass_data_forwprop =
5182 : {
5183 : GIMPLE_PASS, /* type */
5184 : "forwprop", /* name */
5185 : OPTGROUP_NONE, /* optinfo_flags */
5186 : TV_TREE_FORWPROP, /* tv_id */
5187 : ( PROP_cfg | PROP_ssa ), /* properties_required */
5188 : 0, /* properties_provided */
5189 : 0, /* properties_destroyed */
5190 : 0, /* todo_flags_start */
5191 : 0, /* todo_flags_finish */
5192 : };
5193 :
5194 : class pass_forwprop : public gimple_opt_pass
5195 : {
5196 : public:
5197 1440235 : pass_forwprop (gcc::context *ctxt)
5198 2880470 : : gimple_opt_pass (pass_data_forwprop, ctxt), last_p (false)
5199 : {}
5200 :
5201 : /* opt_pass methods: */
5202 1152188 : opt_pass * clone () final override { return new pass_forwprop (m_ctxt); }
5203 1728282 : void set_pass_param (unsigned int n, bool param) final override
5204 : {
5205 1728282 : switch (n)
5206 : {
5207 1152188 : case 0:
5208 1152188 : m_full_walk = param;
5209 1152188 : break;
5210 576094 : case 1:
5211 576094 : last_p = param;
5212 576094 : break;
5213 0 : default:
5214 0 : gcc_unreachable();
5215 : }
5216 1728282 : }
5217 5532750 : bool gate (function *) final override { return flag_tree_forwprop; }
5218 : unsigned int execute (function *) final override;
5219 :
5220 : private:
5221 : /* Determines whether the pass instance should set PROP_last_full_fold. */
5222 : bool last_p;
5223 :
5224 : /* True if the aggregate props are doing a full walk or not. */
5225 : bool m_full_walk = false;
5226 : }; // class pass_forwprop
5227 :
5228 : /* Attemp to make the BB block of __builtin_unreachable unreachable by changing
5229 : the incoming jumps. Return true if at least one jump was changed. */
5230 :
5231 : static bool
5232 2252 : optimize_unreachable (basic_block bb)
5233 : {
5234 2252 : gimple_stmt_iterator gsi;
5235 2252 : gimple *stmt;
5236 2252 : edge_iterator ei;
5237 2252 : edge e;
5238 2252 : bool ret;
5239 :
5240 2252 : ret = false;
5241 5186 : FOR_EACH_EDGE (e, ei, bb->preds)
5242 : {
5243 2934 : gsi = gsi_last_bb (e->src);
5244 2934 : if (gsi_end_p (gsi))
5245 330 : continue;
5246 :
5247 2604 : stmt = gsi_stmt (gsi);
5248 2604 : if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5249 : {
5250 : /* If the condition is already true/false
5251 : ignore it. This can happen during copy prop of forwprop. */
5252 659 : if (gimple_cond_true_p (cond_stmt)
5253 651 : || gimple_cond_false_p (cond_stmt))
5254 8 : continue;
5255 643 : else if (e->flags & EDGE_TRUE_VALUE)
5256 561 : gimple_cond_make_false (cond_stmt);
5257 82 : else if (e->flags & EDGE_FALSE_VALUE)
5258 82 : gimple_cond_make_true (cond_stmt);
5259 : else
5260 0 : gcc_unreachable ();
5261 643 : update_stmt (cond_stmt);
5262 : }
5263 : else
5264 : {
5265 : /* Todo: handle other cases. Note that unreachable switch case
5266 : statements have already been removed. */
5267 1953 : continue;
5268 : }
5269 :
5270 643 : ret = true;
5271 : }
5272 :
5273 2252 : return ret;
5274 : }
5275 :
5276 : unsigned int
5277 5530190 : pass_forwprop::execute (function *fun)
5278 : {
5279 5530190 : unsigned int todoflags = 0;
5280 : /* Handle a full walk only when expensive optimizations are on. */
5281 5530190 : bool full_walk = m_full_walk && flag_expensive_optimizations;
5282 :
5283 5530190 : cfg_changed = false;
5284 5530190 : if (last_p)
5285 1041435 : fun->curr_properties |= PROP_last_full_fold;
5286 :
5287 5530190 : calculate_dominance_info (CDI_DOMINATORS);
5288 :
5289 : /* Combine stmts with the stmts defining their operands. Do that
5290 : in an order that guarantees visiting SSA defs before SSA uses. */
5291 11060380 : lattice.create (num_ssa_names);
5292 11060380 : lattice.quick_grow_cleared (num_ssa_names);
5293 5530190 : int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
5294 5530190 : int postorder_num = pre_and_rev_post_order_compute_fn (fun, NULL,
5295 : postorder, false);
5296 5530190 : int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
5297 49755215 : for (int i = 0; i < postorder_num; ++i)
5298 : {
5299 44225025 : bb_to_rpo[postorder[i]] = i;
5300 44225025 : edge_iterator ei;
5301 44225025 : edge e;
5302 106462850 : FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fun, postorder[i])->succs)
5303 62237825 : e->flags &= ~EDGE_EXECUTABLE;
5304 : }
5305 5530190 : single_succ_edge (BASIC_BLOCK_FOR_FN (fun, ENTRY_BLOCK))->flags
5306 5530190 : |= EDGE_EXECUTABLE;
5307 5530190 : auto_vec<gimple *, 4> to_fixup;
5308 5530190 : auto_vec<gimple *, 32> to_remove;
5309 5530190 : auto_vec<unsigned, 32> to_remove_defs;
5310 5530190 : auto_vec<std::pair<int, int>, 10> edges_to_remove;
5311 5530190 : auto_bitmap simple_dce_worklist;
5312 5530190 : auto_bitmap need_ab_cleanup;
5313 5530190 : to_purge = BITMAP_ALLOC (NULL);
5314 5530190 : auto_vec<vec_perm_simplify_seq, 8> vec_perm_simplify_seq_list;
5315 49755215 : for (int i = 0; i < postorder_num; ++i)
5316 : {
5317 44225025 : gimple_stmt_iterator gsi;
5318 44225025 : basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
5319 44225025 : edge_iterator ei;
5320 44225025 : edge e;
5321 :
5322 : /* Skip processing not executable blocks. We could improve
5323 : single_use tracking by at least unlinking uses from unreachable
5324 : blocks but since blocks with uses are not processed in a
5325 : meaningful order this is probably not worth it. */
5326 44225025 : bool any = false;
5327 45356660 : FOR_EACH_EDGE (e, ei, bb->preds)
5328 : {
5329 45342422 : if ((e->flags & EDGE_EXECUTABLE)
5330 : /* We can handle backedges in natural loops correctly but
5331 : for irreducible regions we have to take all backedges
5332 : conservatively when we did not visit the source yet. */
5333 45342422 : || (bb_to_rpo[e->src->index] > i
5334 660920 : && !dominated_by_p (CDI_DOMINATORS, e->src, e->dest)))
5335 : {
5336 : any = true;
5337 : break;
5338 : }
5339 : }
5340 44225025 : if (!any)
5341 14849 : continue;
5342 :
5343 : /* Remove conditions that go directly to unreachable when this is the last forwprop. */
5344 44210787 : if (last_p
5345 9662982 : && !(flag_sanitize & SANITIZE_UNREACHABLE))
5346 : {
5347 9658002 : gimple_stmt_iterator gsi;
5348 9658002 : gsi = gsi_start_nondebug_after_labels_bb (bb);
5349 9658613 : if (!gsi_end_p (gsi)
5350 8848333 : && gimple_call_builtin_p (*gsi, BUILT_IN_UNREACHABLE)
5351 9660254 : && optimize_unreachable (bb))
5352 : {
5353 611 : cfg_changed = true;
5354 611 : continue;
5355 : }
5356 : }
5357 :
5358 : /* Record degenerate PHIs in the lattice. */
5359 59835876 : for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
5360 15625700 : gsi_next (&si))
5361 : {
5362 15625700 : gphi *phi = si.phi ();
5363 15625700 : tree res = gimple_phi_result (phi);
5364 31251400 : if (virtual_operand_p (res))
5365 7164235 : continue;
5366 :
5367 8461465 : tree first = NULL_TREE;
5368 8461465 : bool all_same = true;
5369 8461465 : edge_iterator ei;
5370 8461465 : edge e;
5371 17389630 : FOR_EACH_EDGE (e, ei, bb->preds)
5372 : {
5373 : /* Ignore not executable forward edges. */
5374 17188372 : if (!(e->flags & EDGE_EXECUTABLE))
5375 : {
5376 3995962 : if (bb_to_rpo[e->src->index] < i)
5377 5494 : continue;
5378 : /* Avoid equivalences from backedges - while we might
5379 : be able to make irreducible regions reducible and
5380 : thus turning a back into a forward edge we do not
5381 : want to deal with the intermediate SSA issues that
5382 : exposes. */
5383 : all_same = false;
5384 : }
5385 17182878 : tree use = PHI_ARG_DEF_FROM_EDGE (phi, e);
5386 17182878 : if (use == res)
5387 : /* The PHI result can also appear on a backedge, if so
5388 : we can ignore this case for the purpose of determining
5389 : the singular value. */
5390 : ;
5391 17170504 : else if (! first)
5392 : first = use;
5393 8709039 : else if (! operand_equal_p (first, use, 0))
5394 : {
5395 : all_same = false;
5396 : break;
5397 : }
5398 : }
5399 8461465 : if (all_same)
5400 : {
5401 196754 : if (may_propagate_copy (res, first))
5402 196265 : to_remove_defs.safe_push (SSA_NAME_VERSION (res));
5403 196754 : fwprop_set_lattice_val (res, first);
5404 : }
5405 : }
5406 :
5407 : /* Apply forward propagation to all stmts in the basic-block.
5408 : Note we update GSI within the loop as necessary. */
5409 44210176 : unsigned int uid = 1;
5410 419429406 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
5411 : {
5412 331009054 : gimple *stmt = gsi_stmt (gsi);
5413 331009054 : tree lhs, rhs;
5414 331009054 : enum tree_code code;
5415 :
5416 331009054 : gimple_set_uid (stmt, uid++);
5417 :
5418 331009054 : if (!is_gimple_assign (stmt))
5419 : {
5420 228055328 : process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
5421 228055328 : gsi_next (&gsi);
5422 228055328 : continue;
5423 : }
5424 :
5425 102953726 : lhs = gimple_assign_lhs (stmt);
5426 102953726 : rhs = gimple_assign_rhs1 (stmt);
5427 102953726 : code = gimple_assign_rhs_code (stmt);
5428 :
5429 140548470 : if (TREE_CODE (lhs) != SSA_NAME
5430 102953726 : || has_zero_uses (lhs))
5431 : {
5432 37594744 : process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
5433 37594744 : gsi_next (&gsi);
5434 37594744 : continue;
5435 : }
5436 :
5437 : /* If this statement sets an SSA_NAME to an address,
5438 : try to propagate the address into the uses of the SSA_NAME. */
5439 65358982 : if ((code == ADDR_EXPR
5440 : /* Handle pointer conversions on invariant addresses
5441 : as well, as this is valid gimple. */
5442 63133635 : || (CONVERT_EXPR_CODE_P (code)
5443 8711206 : && TREE_CODE (rhs) == ADDR_EXPR
5444 350262 : && POINTER_TYPE_P (TREE_TYPE (lhs))))
5445 65359206 : && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
5446 : {
5447 2224916 : tree base = get_base_address (TREE_OPERAND (rhs, 0));
5448 2224916 : if ((!base
5449 2224916 : || !DECL_P (base)
5450 130714 : || decl_address_invariant_p (base))
5451 2224916 : && !stmt_references_abnormal_ssa_name (stmt)
5452 4449816 : && forward_propagate_addr_expr (lhs, rhs, true))
5453 : {
5454 451751 : fwprop_invalidate_lattice (gimple_get_lhs (stmt));
5455 451751 : release_defs (stmt);
5456 451751 : gsi_remove (&gsi, true);
5457 : }
5458 : else
5459 1773165 : gsi_next (&gsi);
5460 : }
5461 63134066 : else if (code == POINTER_PLUS_EXPR)
5462 : {
5463 3544199 : tree off = gimple_assign_rhs2 (stmt);
5464 3544199 : if (TREE_CODE (off) == INTEGER_CST
5465 1087581 : && can_propagate_from (stmt)
5466 1087228 : && !simple_iv_increment_p (stmt)
5467 : /* ??? Better adjust the interface to that function
5468 : instead of building new trees here. */
5469 4347198 : && forward_propagate_addr_expr
5470 2408997 : (lhs,
5471 : build1_loc (gimple_location (stmt),
5472 802999 : ADDR_EXPR, TREE_TYPE (rhs),
5473 802999 : fold_build2 (MEM_REF,
5474 : TREE_TYPE (TREE_TYPE (rhs)),
5475 : rhs,
5476 : fold_convert (ptr_type_node,
5477 : off))), true))
5478 : {
5479 310753 : fwprop_invalidate_lattice (gimple_get_lhs (stmt));
5480 310753 : release_defs (stmt);
5481 310753 : gsi_remove (&gsi, true);
5482 : }
5483 3233446 : else if (is_gimple_min_invariant (rhs))
5484 : {
5485 : /* Make sure to fold &a[0] + off_1 here. */
5486 401859 : fold_stmt_inplace (&gsi);
5487 401859 : update_stmt (stmt);
5488 401859 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5489 401841 : gsi_next (&gsi);
5490 : }
5491 : else
5492 2831587 : gsi_next (&gsi);
5493 : }
5494 59589867 : else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
5495 211908 : && gimple_assign_load_p (stmt)
5496 134528 : && !gimple_has_volatile_ops (stmt)
5497 40453 : && TREE_CODE (rhs) != TARGET_MEM_REF
5498 40424 : && TREE_CODE (rhs) != BIT_FIELD_REF
5499 59630287 : && !stmt_can_throw_internal (fun, stmt))
5500 : {
5501 : /* Rewrite loads used only in real/imagpart extractions to
5502 : component-wise loads. */
5503 40295 : use_operand_p use_p;
5504 40295 : imm_use_iterator iter;
5505 40295 : tree vuse = gimple_vuse (stmt);
5506 40295 : bool rewrite = true;
5507 85347 : FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
5508 : {
5509 42967 : gimple *use_stmt = USE_STMT (use_p);
5510 42967 : if (is_gimple_debug (use_stmt))
5511 691 : continue;
5512 42276 : if (!is_gimple_assign (use_stmt)
5513 27606 : || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
5514 25558 : && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
5515 46342 : || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
5516 : {
5517 : rewrite = false;
5518 : break;
5519 : }
5520 40295 : }
5521 40295 : if (rewrite)
5522 : {
5523 2085 : gimple *use_stmt;
5524 8661 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
5525 : {
5526 4491 : if (is_gimple_debug (use_stmt))
5527 : {
5528 454 : if (gimple_debug_bind_p (use_stmt))
5529 : {
5530 454 : gimple_debug_bind_reset_value (use_stmt);
5531 454 : update_stmt (use_stmt);
5532 : }
5533 454 : continue;
5534 : }
5535 :
5536 8074 : tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
5537 4037 : TREE_TYPE (TREE_TYPE (rhs)),
5538 : unshare_expr (rhs));
5539 4037 : gimple *new_stmt
5540 4037 : = gimple_build_assign (gimple_assign_lhs (use_stmt),
5541 : new_rhs);
5542 :
5543 4037 : location_t loc = gimple_location (use_stmt);
5544 4037 : gimple_set_location (new_stmt, loc);
5545 4037 : gimple_set_vuse (new_stmt, vuse);
5546 4037 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
5547 4037 : unlink_stmt_vdef (use_stmt);
5548 4037 : gsi_remove (&gsi2, true);
5549 :
5550 4037 : gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
5551 2085 : }
5552 :
5553 2085 : release_defs (stmt);
5554 2085 : gsi_remove (&gsi, true);
5555 : }
5556 : else
5557 38210 : gsi_next (&gsi);
5558 : }
5559 59549572 : else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
5560 1691508 : && (TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
5561 : /* After vector lowering rewrite all loads, but
5562 : initially do not since this conflicts with
5563 : vector CONSTRUCTOR to shuffle optimization. */
5564 1669517 : || (fun->curr_properties & PROP_gimple_lvec))
5565 900713 : && gimple_assign_load_p (stmt)
5566 303040 : && !gimple_has_volatile_ops (stmt)
5567 289171 : && !stmt_can_throw_internal (fun, stmt)
5568 59838743 : && (!VAR_P (rhs) || !DECL_HARD_REGISTER (rhs)))
5569 288669 : optimize_vector_load (&gsi);
5570 :
5571 59260903 : else if (code == COMPLEX_EXPR)
5572 : {
5573 : /* Rewrite stores of a single-use complex build expression
5574 : to component-wise stores. */
5575 36571 : use_operand_p use_p;
5576 36571 : gimple *use_stmt, *def1, *def2;
5577 36571 : tree rhs2;
5578 36571 : if (single_imm_use (lhs, &use_p, &use_stmt)
5579 34406 : && gimple_store_p (use_stmt)
5580 41038 : && !gimple_has_volatile_ops (use_stmt)
5581 2603 : && is_gimple_assign (use_stmt)
5582 2599 : && (TREE_CODE (TREE_TYPE (gimple_assign_lhs (use_stmt)))
5583 : == COMPLEX_TYPE)
5584 39165 : && (TREE_CODE (gimple_assign_lhs (use_stmt))
5585 : != TARGET_MEM_REF))
5586 : {
5587 2590 : tree use_lhs = gimple_assign_lhs (use_stmt);
5588 2590 : if (auto_var_p (use_lhs))
5589 601 : DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
5590 5180 : tree new_lhs = build1 (REALPART_EXPR,
5591 2590 : TREE_TYPE (TREE_TYPE (use_lhs)),
5592 : unshare_expr (use_lhs));
5593 2590 : gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
5594 2590 : location_t loc = gimple_location (use_stmt);
5595 2590 : gimple_set_location (new_stmt, loc);
5596 5180 : gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
5597 2590 : gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (fun)));
5598 5180 : SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
5599 5180 : gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
5600 2590 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
5601 2590 : gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
5602 :
5603 5180 : new_lhs = build1 (IMAGPART_EXPR,
5604 2590 : TREE_TYPE (TREE_TYPE (use_lhs)),
5605 : unshare_expr (use_lhs));
5606 2590 : gimple_assign_set_lhs (use_stmt, new_lhs);
5607 2590 : gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
5608 2590 : update_stmt (use_stmt);
5609 :
5610 2590 : release_defs (stmt);
5611 2590 : gsi_remove (&gsi, true);
5612 : }
5613 : /* Rewrite a component-wise load of a complex to a complex
5614 : load if the components are not used separately. */
5615 33981 : else if (TREE_CODE (rhs) == SSA_NAME
5616 33540 : && has_single_use (rhs)
5617 30050 : && ((rhs2 = gimple_assign_rhs2 (stmt)), true)
5618 30050 : && TREE_CODE (rhs2) == SSA_NAME
5619 28305 : && has_single_use (rhs2)
5620 27885 : && (def1 = SSA_NAME_DEF_STMT (rhs),
5621 27885 : gimple_assign_load_p (def1))
5622 1085 : && (def2 = SSA_NAME_DEF_STMT (rhs2),
5623 1085 : gimple_assign_load_p (def2))
5624 1582 : && (gimple_vuse (def1) == gimple_vuse (def2))
5625 788 : && !gimple_has_volatile_ops (def1)
5626 788 : && !gimple_has_volatile_ops (def2)
5627 788 : && !stmt_can_throw_internal (fun, def1)
5628 788 : && !stmt_can_throw_internal (fun, def2)
5629 788 : && gimple_assign_rhs_code (def1) == REALPART_EXPR
5630 542 : && gimple_assign_rhs_code (def2) == IMAGPART_EXPR
5631 34523 : && operand_equal_p (TREE_OPERAND (gimple_assign_rhs1
5632 : (def1), 0),
5633 542 : TREE_OPERAND (gimple_assign_rhs1
5634 : (def2), 0)))
5635 : {
5636 542 : tree cl = TREE_OPERAND (gimple_assign_rhs1 (def1), 0);
5637 542 : gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (cl));
5638 542 : gcc_assert (gsi_stmt (gsi) == stmt);
5639 1084 : gimple_set_vuse (stmt, gimple_vuse (def1));
5640 542 : gimple_set_modified (stmt, true);
5641 542 : gimple_stmt_iterator gsi2 = gsi_for_stmt (def1);
5642 542 : gsi_remove (&gsi, false);
5643 542 : gsi_insert_after (&gsi2, stmt, GSI_SAME_STMT);
5644 : }
5645 : else
5646 33439 : gsi_next (&gsi);
5647 : }
5648 59224332 : else if (code == CONSTRUCTOR
5649 154477 : && VECTOR_TYPE_P (TREE_TYPE (rhs))
5650 154477 : && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
5651 3436 : && CONSTRUCTOR_NELTS (rhs) > 0
5652 59227768 : && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
5653 1050 : || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
5654 : != BLKmode)))
5655 : {
5656 : /* Rewrite stores of a single-use vector constructors
5657 : to component-wise stores if the mode isn't supported. */
5658 3341 : use_operand_p use_p;
5659 3341 : gimple *use_stmt;
5660 3341 : if (single_imm_use (lhs, &use_p, &use_stmt)
5661 2903 : && gimple_store_p (use_stmt)
5662 2948 : && !gimple_has_volatile_ops (use_stmt)
5663 1468 : && !stmt_can_throw_internal (fun, use_stmt)
5664 4802 : && is_gimple_assign (use_stmt))
5665 : {
5666 1461 : tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
5667 1461 : unsigned HOST_WIDE_INT elt_w
5668 1461 : = tree_to_uhwi (TYPE_SIZE (elt_t));
5669 1461 : unsigned HOST_WIDE_INT n
5670 1461 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
5671 1461 : tree use_lhs = gimple_assign_lhs (use_stmt);
5672 1461 : if (auto_var_p (use_lhs))
5673 554 : DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
5674 907 : else if (TREE_CODE (use_lhs) == TARGET_MEM_REF)
5675 : {
5676 1 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
5677 1 : use_lhs = prepare_target_mem_ref_lvalue (use_lhs, &gsi2);
5678 : }
5679 32835 : for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
5680 : {
5681 31374 : unsigned HOST_WIDE_INT ci = bi / elt_w;
5682 31374 : tree new_rhs;
5683 31374 : if (ci < CONSTRUCTOR_NELTS (rhs))
5684 30756 : new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
5685 : else
5686 618 : new_rhs = build_zero_cst (elt_t);
5687 31374 : tree new_lhs = build3 (BIT_FIELD_REF,
5688 : elt_t,
5689 : unshare_expr (use_lhs),
5690 31374 : bitsize_int (elt_w),
5691 31374 : bitsize_int (bi));
5692 31374 : gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
5693 31374 : location_t loc = gimple_location (use_stmt);
5694 31374 : gimple_set_location (new_stmt, loc);
5695 62748 : gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
5696 31374 : gimple_set_vdef (new_stmt,
5697 : make_ssa_name (gimple_vop (fun)));
5698 62748 : SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
5699 62748 : gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
5700 31374 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
5701 31374 : gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
5702 : }
5703 1461 : gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
5704 1461 : unlink_stmt_vdef (use_stmt);
5705 1461 : release_defs (use_stmt);
5706 1461 : gsi_remove (&gsi2, true);
5707 1461 : release_defs (stmt);
5708 1461 : gsi_remove (&gsi, true);
5709 : }
5710 : else
5711 1880 : gsi_next (&gsi);
5712 : }
5713 59220991 : else if (code == VEC_PERM_EXPR)
5714 : {
5715 : /* Find vectorized sequences, where we can reduce the lane
5716 : utilization. The narrowing will be donw later and only
5717 : if we find a pair of sequences that can be blended. */
5718 182417 : gassign *assign = dyn_cast <gassign *> (stmt);
5719 182417 : vec_perm_simplify_seq seq;
5720 182417 : if (recognise_vec_perm_simplify_seq (assign, &seq))
5721 101 : append_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list,
5722 : seq);
5723 :
5724 182417 : gsi_next (&gsi);
5725 : }
5726 : else
5727 59038574 : gsi_next (&gsi);
5728 : }
5729 :
5730 44210176 : process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
5731 :
5732 : /* Combine stmts with the stmts defining their operands.
5733 : Note we update GSI within the loop as necessary. */
5734 419086319 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5735 : {
5736 330665967 : gimple *stmt = gsi_stmt (gsi);
5737 :
5738 : /* Mark stmt as potentially needing revisiting. */
5739 330665967 : gimple_set_plf (stmt, GF_PLF_1, false);
5740 :
5741 330665967 : bool can_make_abnormal_goto = (is_gimple_call (stmt)
5742 330665967 : && stmt_can_make_abnormal_goto (stmt));
5743 :
5744 : /* Substitute from our lattice. We need to do so only once. */
5745 330665967 : bool substituted_p = false;
5746 330665967 : use_operand_p usep;
5747 330665967 : ssa_op_iter iter;
5748 489344128 : FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
5749 : {
5750 158678161 : tree use = USE_FROM_PTR (usep);
5751 158678161 : tree val = fwprop_ssa_val (use);
5752 158678161 : if (val && val != use)
5753 : {
5754 1822293 : if (!is_gimple_debug (stmt))
5755 1517095 : bitmap_set_bit (simple_dce_worklist, SSA_NAME_VERSION (use));
5756 1822293 : if (may_propagate_copy (use, val))
5757 : {
5758 1819112 : propagate_value (usep, val);
5759 1819112 : substituted_p = true;
5760 : }
5761 : }
5762 : }
5763 330665967 : if (substituted_p)
5764 1766166 : update_stmt (stmt);
5765 1766166 : if (substituted_p
5766 1766166 : && is_gimple_assign (stmt)
5767 1059215 : && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
5768 20091 : recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5769 330665967 : if (substituted_p
5770 330665967 : && can_make_abnormal_goto
5771 330665967 : && !stmt_can_make_abnormal_goto (stmt))
5772 3 : bitmap_set_bit (need_ab_cleanup, bb->index);
5773 :
5774 333451522 : bool changed;
5775 666903044 : do
5776 : {
5777 333451522 : gimple *orig_stmt = stmt = gsi_stmt (gsi);
5778 333451522 : bool was_call = is_gimple_call (stmt);
5779 333451522 : bool was_noreturn = (was_call
5780 333451522 : && gimple_call_noreturn_p (stmt));
5781 333451522 : changed = false;
5782 :
5783 333451522 : auto_vec<tree, 8> uses;
5784 495094082 : FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
5785 161642560 : if (uses.space (1))
5786 161254700 : uses.quick_push (USE_FROM_PTR (usep));
5787 :
5788 333451522 : if (fold_stmt (&gsi, fwprop_ssa_val, simple_dce_worklist))
5789 : {
5790 2469712 : changed = true;
5791 : /* There is no updating of the address
5792 : taken after the last forwprop so update
5793 : the addresses when a folding happened to a call.
5794 : The va_* builtins can remove taking of the address so
5795 : can the sincos->cexpi transformation. See PR 39643 and PR 20983. */
5796 2469712 : if (was_call && last_p)
5797 2469712 : todoflags |= TODO_update_address_taken;
5798 2469712 : stmt = gsi_stmt (gsi);
5799 : /* Cleanup the CFG if we simplified a condition to
5800 : true or false. */
5801 2469712 : if (gcond *cond = dyn_cast <gcond *> (stmt))
5802 980504 : if (gimple_cond_true_p (cond)
5803 980504 : || gimple_cond_false_p (cond))
5804 15330 : cfg_changed = true;
5805 : /* Queue old uses for simple DCE if not debug statement. */
5806 2469712 : if (!is_gimple_debug (stmt))
5807 10447040 : for (tree use : uses)
5808 3058307 : if (TREE_CODE (use) == SSA_NAME
5809 3058307 : && !SSA_NAME_IS_DEFAULT_DEF (use))
5810 2863488 : bitmap_set_bit (simple_dce_worklist,
5811 2863488 : SSA_NAME_VERSION (use));
5812 2469712 : update_stmt (stmt);
5813 : }
5814 :
5815 333451522 : switch (gimple_code (stmt))
5816 : {
5817 103946577 : case GIMPLE_ASSIGN:
5818 103946577 : {
5819 103946577 : tree rhs1 = gimple_assign_rhs1 (stmt);
5820 103946577 : enum tree_code code = gimple_assign_rhs_code (stmt);
5821 103946577 : if (gimple_clobber_p (stmt))
5822 6765836 : do_simple_agr_dse (as_a<gassign*>(stmt), full_walk);
5823 97180741 : else if (gimple_store_p (stmt))
5824 : {
5825 30206758 : optimize_aggr_zeroprop (stmt, full_walk);
5826 30206758 : if (gimple_assign_load_p (stmt))
5827 3690061 : optimize_agr_copyprop (stmt);
5828 : }
5829 66973983 : else if (TREE_CODE_CLASS (code) == tcc_comparison)
5830 2537904 : changed |= forward_propagate_into_comparison (&gsi);
5831 64436079 : else if ((code == PLUS_EXPR
5832 64436079 : || code == BIT_IOR_EXPR
5833 54501544 : || code == BIT_XOR_EXPR)
5834 64567128 : && simplify_rotate (&gsi))
5835 : changed = true;
5836 64433405 : else if (code == VEC_PERM_EXPR)
5837 184475 : changed |= simplify_permutation (&gsi);
5838 64248930 : else if (code == CONSTRUCTOR
5839 64248930 : && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
5840 152307 : changed |= simplify_vector_constructor (&gsi);
5841 64096623 : else if (code == ARRAY_REF)
5842 1952799 : changed |= simplify_count_zeroes (&gsi);
5843 : break;
5844 : }
5845 :
5846 103004 : case GIMPLE_SWITCH:
5847 103004 : changed |= simplify_gimple_switch (as_a <gswitch *> (stmt),
5848 : edges_to_remove,
5849 : simple_dce_worklist);
5850 103004 : break;
5851 :
5852 19147783 : case GIMPLE_COND:
5853 19147783 : {
5854 19147783 : int did_something = forward_propagate_into_gimple_cond
5855 19147783 : (as_a <gcond *> (stmt));
5856 19147783 : if (did_something == 2)
5857 1691 : cfg_changed = true;
5858 19147783 : changed |= did_something != 0;
5859 19147783 : break;
5860 : }
5861 :
5862 23070302 : case GIMPLE_CALL:
5863 23070302 : {
5864 23070302 : tree callee = gimple_call_fndecl (stmt);
5865 23070302 : if (callee != NULL_TREE
5866 23070302 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
5867 6136585 : changed |= simplify_builtin_call (&gsi, callee, full_walk);
5868 : break;
5869 : }
5870 :
5871 333448848 : default:;
5872 : }
5873 :
5874 333448848 : if (changed || substituted_p)
5875 : {
5876 4027114 : substituted_p = false;
5877 4027114 : stmt = gsi_stmt (gsi);
5878 4027114 : if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5879 70 : bitmap_set_bit (to_purge, bb->index);
5880 4027114 : if (!was_noreturn
5881 4027114 : && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5882 12 : to_fixup.safe_push (stmt);
5883 : }
5884 4027114 : if (changed)
5885 : {
5886 : /* If the stmt changed then re-visit it and the statements
5887 : inserted before it. */
5888 8719247 : for (; !gsi_end_p (gsi); gsi_prev (&gsi))
5889 5531078 : if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
5890 : break;
5891 2785555 : if (gsi_end_p (gsi))
5892 442646 : gsi = gsi_start_bb (bb);
5893 : else
5894 2564232 : gsi_next (&gsi);
5895 : }
5896 333451522 : }
5897 : while (changed);
5898 :
5899 : /* Stmt no longer needs to be revisited. */
5900 330665967 : stmt = gsi_stmt (gsi);
5901 330665967 : gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
5902 330665967 : gimple_set_plf (stmt, GF_PLF_1, true);
5903 :
5904 : /* Fill up the lattice. */
5905 330665967 : if (gimple_assign_single_p (stmt))
5906 : {
5907 68665844 : tree lhs = gimple_assign_lhs (stmt);
5908 68665844 : tree rhs = gimple_assign_rhs1 (stmt);
5909 68665844 : if (TREE_CODE (lhs) == SSA_NAME)
5910 : {
5911 31706159 : tree val = lhs;
5912 31706159 : if (TREE_CODE (rhs) == SSA_NAME)
5913 781430 : val = fwprop_ssa_val (rhs);
5914 30924729 : else if (is_gimple_min_invariant (rhs))
5915 423588 : val = rhs;
5916 : /* If we can propagate the lattice-value mark the
5917 : stmt for removal. */
5918 31706159 : if (val != lhs
5919 31706159 : && may_propagate_copy (lhs, val))
5920 1201654 : to_remove_defs.safe_push (SSA_NAME_VERSION (lhs));
5921 31706159 : fwprop_set_lattice_val (lhs, val);
5922 : }
5923 : }
5924 262000123 : else if (gimple_nop_p (stmt))
5925 88641 : to_remove.safe_push (stmt);
5926 : }
5927 :
5928 : /* Substitute in destination PHI arguments. */
5929 106437239 : FOR_EACH_EDGE (e, ei, bb->succs)
5930 62227063 : for (gphi_iterator gsi = gsi_start_phis (e->dest);
5931 103451908 : !gsi_end_p (gsi); gsi_next (&gsi))
5932 : {
5933 41224845 : gphi *phi = gsi.phi ();
5934 41224845 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5935 41224845 : tree arg = USE_FROM_PTR (use_p);
5936 68020709 : if (TREE_CODE (arg) != SSA_NAME
5937 41224845 : || virtual_operand_p (arg))
5938 26795864 : continue;
5939 14428981 : tree val = fwprop_ssa_val (arg);
5940 14428981 : if (val != arg
5941 14428981 : && may_propagate_copy (arg, val, !(e->flags & EDGE_ABNORMAL)))
5942 236319 : propagate_value (use_p, val);
5943 : }
5944 :
5945 : /* Mark outgoing exectuable edges. */
5946 44210176 : if (edge e = find_taken_edge (bb, NULL))
5947 : {
5948 18764994 : e->flags |= EDGE_EXECUTABLE;
5949 44231406 : if (EDGE_COUNT (bb->succs) > 1)
5950 21230 : cfg_changed = true;
5951 : }
5952 : else
5953 : {
5954 68886020 : FOR_EACH_EDGE (e, ei, bb->succs)
5955 43440838 : e->flags |= EDGE_EXECUTABLE;
5956 : }
5957 : }
5958 5530190 : free (postorder);
5959 5530190 : free (bb_to_rpo);
5960 5530190 : lattice.release ();
5961 :
5962 : /* First remove chains of stmts where we check no uses remain. */
5963 5530190 : simple_dce_from_worklist (simple_dce_worklist, to_purge);
5964 :
5965 5869551 : auto remove = [](gimple *stmt)
5966 : {
5967 339361 : if (dump_file && (dump_flags & TDF_DETAILS))
5968 : {
5969 1 : fprintf (dump_file, "Removing dead stmt ");
5970 1 : print_gimple_stmt (dump_file, stmt, 0);
5971 1 : fprintf (dump_file, "\n");
5972 : }
5973 339361 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5974 339361 : if (gimple_code (stmt) == GIMPLE_PHI)
5975 84562 : remove_phi_node (&gsi, true);
5976 : else
5977 : {
5978 254799 : unlink_stmt_vdef (stmt);
5979 254799 : gsi_remove (&gsi, true);
5980 254799 : release_defs (stmt);
5981 : }
5982 339361 : };
5983 :
5984 : /* Then remove stmts we know we can remove even though we did not
5985 : substitute in dead code regions, so uses can remain. Do so in reverse
5986 : order to make debug stmt creation possible. */
5987 12458299 : while (!to_remove_defs.is_empty())
5988 : {
5989 1397919 : tree def = ssa_name (to_remove_defs.pop ());
5990 : /* For example remove_prop_source_from_use can remove stmts queued
5991 : for removal. Deal with this gracefully. */
5992 1397919 : if (!def)
5993 1147199 : continue;
5994 250720 : gimple *stmt = SSA_NAME_DEF_STMT (def);
5995 250720 : remove (stmt);
5996 : }
5997 :
5998 : /* Wipe other queued stmts that do not have SSA defs. */
5999 5618831 : while (!to_remove.is_empty())
6000 : {
6001 88641 : gimple *stmt = to_remove.pop ();
6002 88641 : remove (stmt);
6003 : }
6004 :
6005 : /* Fixup stmts that became noreturn calls. This may require splitting
6006 : blocks and thus isn't possible during the walk. Do this
6007 : in reverse order so we don't inadvertedly remove a stmt we want to
6008 : fixup by visiting a dominating now noreturn call first. */
6009 5530202 : while (!to_fixup.is_empty ())
6010 : {
6011 12 : gimple *stmt = to_fixup.pop ();
6012 12 : if (dump_file && dump_flags & TDF_DETAILS)
6013 : {
6014 0 : fprintf (dump_file, "Fixing up noreturn call ");
6015 0 : print_gimple_stmt (dump_file, stmt, 0);
6016 0 : fprintf (dump_file, "\n");
6017 : }
6018 12 : cfg_changed |= fixup_noreturn_call (stmt);
6019 : }
6020 :
6021 5530190 : cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
6022 5530190 : cfg_changed |= gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6023 5530190 : BITMAP_FREE (to_purge);
6024 :
6025 : /* Remove edges queued from switch stmt simplification. */
6026 16590570 : for (auto ep : edges_to_remove)
6027 : {
6028 0 : basic_block src = BASIC_BLOCK_FOR_FN (fun, ep.first);
6029 0 : basic_block dest = BASIC_BLOCK_FOR_FN (fun, ep.second);
6030 0 : edge e;
6031 0 : if (src && dest && (e = find_edge (src, dest)))
6032 : {
6033 0 : free_dominance_info (CDI_DOMINATORS);
6034 0 : remove_edge (e);
6035 0 : cfg_changed = true;
6036 : }
6037 : }
6038 :
6039 11058838 : if (get_range_query (fun) != get_global_range_query ())
6040 1542 : disable_ranger (fun);
6041 :
6042 5530190 : if (cfg_changed)
6043 9879 : todoflags |= TODO_cleanup_cfg;
6044 :
6045 5530190 : return todoflags;
6046 5530190 : }
6047 :
6048 : } // anon namespace
6049 :
6050 : gimple_opt_pass *
6051 288047 : make_pass_forwprop (gcc::context *ctxt)
6052 : {
6053 288047 : return new pass_forwprop (ctxt);
6054 : }
|