Line data Source code
1 : /* SCC value numbering for trees
2 : Copyright (C) 2006-2026 Free Software Foundation, Inc.
3 : Contributed by Daniel Berlin <dan@dberlin.org>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "ssa.h"
29 : #include "expmed.h"
30 : #include "insn-config.h"
31 : #include "memmodel.h"
32 : #include "emit-rtl.h"
33 : #include "cgraph.h"
34 : #include "gimple-pretty-print.h"
35 : #include "splay-tree-utils.h"
36 : #include "alias.h"
37 : #include "fold-const.h"
38 : #include "stor-layout.h"
39 : #include "cfganal.h"
40 : #include "tree-inline.h"
41 : #include "internal-fn.h"
42 : #include "gimple-iterator.h"
43 : #include "gimple-fold.h"
44 : #include "tree-eh.h"
45 : #include "gimplify.h"
46 : #include "flags.h"
47 : #include "dojump.h"
48 : #include "explow.h"
49 : #include "calls.h"
50 : #include "varasm.h"
51 : #include "stmt.h"
52 : #include "expr.h"
53 : #include "tree-dfa.h"
54 : #include "tree-ssa.h"
55 : #include "dumpfile.h"
56 : #include "cfgloop.h"
57 : #include "tree-ssa-propagate.h"
58 : #include "tree-cfg.h"
59 : #include "domwalk.h"
60 : #include "gimple-match.h"
61 : #include "stringpool.h"
62 : #include "attribs.h"
63 : #include "tree-pass.h"
64 : #include "statistics.h"
65 : #include "langhooks.h"
66 : #include "ipa-utils.h"
67 : #include "dbgcnt.h"
68 : #include "tree-cfgcleanup.h"
69 : #include "tree-ssa-loop.h"
70 : #include "tree-scalar-evolution.h"
71 : #include "tree-ssa-loop-niter.h"
72 : #include "builtins.h"
73 : #include "fold-const-call.h"
74 : #include "ipa-modref-tree.h"
75 : #include "ipa-modref.h"
76 : #include "tree-ssa-sccvn.h"
77 : #include "alloc-pool.h"
78 : #include "symbol-summary.h"
79 : #include "sreal.h"
80 : #include "ipa-cp.h"
81 : #include "ipa-prop.h"
82 : #include "target.h"
83 :
84 : /* This algorithm is based on the SCC algorithm presented by Keith
85 : Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
86 : (http://citeseer.ist.psu.edu/41805.html). In
87 : straight line code, it is equivalent to a regular hash based value
88 : numbering that is performed in reverse postorder.
89 :
90 : For code with cycles, there are two alternatives, both of which
91 : require keeping the hashtables separate from the actual list of
92 : value numbers for SSA names.
93 :
94 : 1. Iterate value numbering in an RPO walk of the blocks, removing
95 : all the entries from the hashtable after each iteration (but
96 : keeping the SSA name->value number mapping between iterations).
97 : Iterate until it does not change.
98 :
99 : 2. Perform value numbering as part of an SCC walk on the SSA graph,
100 : iterating only the cycles in the SSA graph until they do not change
101 : (using a separate, optimistic hashtable for value numbering the SCC
102 : operands).
103 :
104 : The second is not just faster in practice (because most SSA graph
105 : cycles do not involve all the variables in the graph), it also has
106 : some nice properties.
107 :
108 : One of these nice properties is that when we pop an SCC off the
109 : stack, we are guaranteed to have processed all the operands coming from
110 : *outside of that SCC*, so we do not need to do anything special to
111 : ensure they have value numbers.
112 :
113 : Another nice property is that the SCC walk is done as part of a DFS
114 : of the SSA graph, which makes it easy to perform combining and
115 : simplifying operations at the same time.
116 :
117 : The code below is deliberately written in a way that makes it easy
118 : to separate the SCC walk from the other work it does.
119 :
120 : In order to propagate constants through the code, we track which
121 : expressions contain constants, and use those while folding. In
122 : theory, we could also track expressions whose value numbers are
123 : replaced, in case we end up folding based on expression
124 : identities.
125 :
126 : In order to value number memory, we assign value numbers to vuses.
127 : This enables us to note that, for example, stores to the same
128 : address of the same value from the same starting memory states are
129 : equivalent.
130 : TODO:
131 :
132 : 1. We can iterate only the changing portions of the SCC's, but
133 : I have not seen an SCC big enough for this to be a win.
134 : 2. If you differentiate between phi nodes for loops and phi nodes
135 : for if-then-else, you can properly consider phi nodes in different
136 : blocks for equivalence.
137 : 3. We could value number vuses in more cases, particularly, whole
138 : structure copies.
139 : */
140 :
141 : /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
142 : #define BB_EXECUTABLE BB_VISITED
143 :
144 : static vn_lookup_kind default_vn_walk_kind;
145 :
146 : /* vn_nary_op hashtable helpers. */
147 :
148 : struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
149 : {
150 : typedef vn_nary_op_s *compare_type;
151 : static inline hashval_t hash (const vn_nary_op_s *);
152 : static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
153 : };
154 :
155 : /* Return the computed hashcode for nary operation P1. */
156 :
157 : inline hashval_t
158 759803332 : vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
159 : {
160 759803332 : return vno1->hashcode;
161 : }
162 :
163 : /* Compare nary operations P1 and P2 and return true if they are
164 : equivalent. */
165 :
166 : inline bool
167 963245482 : vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
168 : {
169 963245482 : return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
170 : }
171 :
172 : typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
173 : typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
174 :
175 :
176 : /* vn_phi hashtable helpers. */
177 :
178 : static int
179 : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
180 :
181 : struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
182 : {
183 : static inline hashval_t hash (const vn_phi_s *);
184 : static inline bool equal (const vn_phi_s *, const vn_phi_s *);
185 : };
186 :
187 : /* Return the computed hashcode for phi operation P1. */
188 :
189 : inline hashval_t
190 25418931 : vn_phi_hasher::hash (const vn_phi_s *vp1)
191 : {
192 25418931 : return vp1->hashcode;
193 : }
194 :
195 : /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
196 :
197 : inline bool
198 46140612 : vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
199 : {
200 46140612 : return vp1 == vp2 || vn_phi_eq (vp1, vp2);
201 : }
202 :
203 : typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 : typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205 :
206 :
207 : /* Compare two reference operands P1 and P2 for equality. Return true if
208 : they are equal, and false otherwise. */
209 :
210 : static int
211 25013983 : vn_reference_op_eq (const void *p1, const void *p2)
212 : {
213 25013983 : const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 25013983 : const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215 :
216 25013983 : return (vro1->opcode == vro2->opcode
217 : /* We do not care for differences in type qualification. */
218 25011970 : && (vro1->type == vro2->type
219 1146574 : || (vro1->type && vro2->type
220 1146574 : && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 1146574 : TYPE_MAIN_VARIANT (vro2->type))))
222 24032996 : && expressions_equal_p (vro1->op0, vro2->op0)
223 23997446 : && expressions_equal_p (vro1->op1, vro2->op1)
224 23997446 : && expressions_equal_p (vro1->op2, vro2->op2)
225 49011429 : && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
226 : }
227 :
228 : /* Free a reference operation structure VP. */
229 :
230 : static inline void
231 0 : free_reference (vn_reference_s *vr)
232 : {
233 0 : vr->operands.release ();
234 : }
235 :
236 :
237 : /* vn_reference hashtable helpers. */
238 :
239 : struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
240 : {
241 : static inline hashval_t hash (const vn_reference_s *);
242 : static inline bool equal (const vn_reference_s *, const vn_reference_s *);
243 : };
244 :
245 : /* Return the hashcode for a given reference operation P1. */
246 :
247 : inline hashval_t
248 3703762626 : vn_reference_hasher::hash (const vn_reference_s *vr1)
249 : {
250 3703762626 : return vr1->hashcode;
251 : }
252 :
253 : inline bool
254 4414834804 : vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
255 : {
256 4414834804 : return v == c || vn_reference_eq (v, c);
257 : }
258 :
259 : typedef hash_table<vn_reference_hasher> vn_reference_table_type;
260 : typedef vn_reference_table_type::iterator vn_reference_iterator_type;
261 :
262 : /* Pretty-print OPS to OUTFILE. */
263 :
264 : void
265 287 : print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
266 : {
267 287 : vn_reference_op_t vro;
268 287 : unsigned int i;
269 287 : fprintf (outfile, "{");
270 1304 : for (i = 0; ops.iterate (i, &vro); i++)
271 : {
272 1017 : bool closebrace = false;
273 1017 : if (vro->opcode != SSA_NAME
274 803 : && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
275 : {
276 803 : fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
277 803 : if (vro->op0 || vro->opcode == CALL_EXPR)
278 : {
279 803 : fprintf (outfile, "<");
280 803 : closebrace = true;
281 : }
282 : }
283 1017 : if (vro->opcode == MEM_REF || vro->opcode == TARGET_MEM_REF)
284 275 : fprintf (outfile, "(A%d)", TYPE_ALIGN (vro->type));
285 1017 : if (vro->op0 || vro->opcode == CALL_EXPR)
286 : {
287 1017 : if (!vro->op0)
288 0 : fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
289 : else
290 : {
291 1017 : if (vro->opcode == MEM_REF || vro->opcode == TARGET_MEM_REF)
292 : {
293 275 : fprintf (outfile, "(");
294 275 : print_generic_expr (outfile, TREE_TYPE (vro->op0));
295 275 : fprintf (outfile, ")");
296 : }
297 1017 : print_generic_expr (outfile, vro->op0);
298 : }
299 1017 : if (vro->op1)
300 : {
301 185 : fprintf (outfile, ",");
302 185 : print_generic_expr (outfile, vro->op1);
303 : }
304 1017 : if (vro->op2)
305 : {
306 185 : fprintf (outfile, ",");
307 185 : print_generic_expr (outfile, vro->op2);
308 : }
309 : }
310 1017 : if (closebrace)
311 803 : fprintf (outfile, ">");
312 1017 : if (i != ops.length () - 1)
313 730 : fprintf (outfile, ",");
314 : }
315 287 : fprintf (outfile, "}");
316 287 : }
317 :
318 : DEBUG_FUNCTION void
319 0 : debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
320 : {
321 0 : print_vn_reference_ops (stderr, ops);
322 0 : fputc ('\n', stderr);
323 0 : }
324 :
325 : /* The set of VN hashtables. */
326 :
327 : typedef struct vn_tables_s
328 : {
329 : vn_nary_op_table_type *nary;
330 : vn_phi_table_type *phis;
331 : vn_reference_table_type *references;
332 : } *vn_tables_t;
333 :
334 :
335 : /* vn_constant hashtable helpers. */
336 :
337 : struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
338 : {
339 : static inline hashval_t hash (const vn_constant_s *);
340 : static inline bool equal (const vn_constant_s *, const vn_constant_s *);
341 : };
342 :
343 : /* Hash table hash function for vn_constant_t. */
344 :
345 : inline hashval_t
346 12089020 : vn_constant_hasher::hash (const vn_constant_s *vc1)
347 : {
348 12089020 : return vc1->hashcode;
349 : }
350 :
351 : /* Hash table equality function for vn_constant_t. */
352 :
353 : inline bool
354 14568120 : vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
355 : {
356 14568120 : if (vc1->hashcode != vc2->hashcode)
357 : return false;
358 :
359 2184992 : return vn_constant_eq_with_type (vc1->constant, vc2->constant);
360 : }
361 :
362 : static hash_table<vn_constant_hasher> *constant_to_value_id;
363 :
364 :
365 : /* Obstack we allocate the vn-tables elements from. */
366 : static obstack vn_tables_obstack;
367 : /* Special obstack we never unwind. */
368 : static obstack vn_tables_insert_obstack;
369 :
370 : static vn_reference_t last_inserted_ref;
371 : static vn_phi_t last_inserted_phi;
372 : static vn_nary_op_t last_inserted_nary;
373 : static vn_ssa_aux_t last_pushed_avail;
374 :
375 : /* Valid hashtables storing information we have proven to be
376 : correct. */
377 : static vn_tables_t valid_info;
378 :
379 : /* Global RPO state for access from hooks. */
380 : static class eliminate_dom_walker *rpo_avail;
381 : basic_block vn_context_bb;
382 : int *vn_bb_to_rpo;
383 :
384 :
385 : /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
386 : an SSA name, otherwise just return it. */
387 : tree (*vn_valueize) (tree);
388 : static tree
389 83860 : vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
390 : {
391 83860 : basic_block saved_vn_context_bb = vn_context_bb;
392 : /* Look for sth available at the definition block of the argument.
393 : This avoids inconsistencies between availability there which
394 : decides if the stmt can be removed and availability at the
395 : use site. The SSA property ensures that things available
396 : at the definition are also available at uses. */
397 83860 : if (!SSA_NAME_IS_DEFAULT_DEF (t))
398 80102 : vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
399 83860 : tree res = vn_valueize (t);
400 83860 : vn_context_bb = saved_vn_context_bb;
401 83860 : return res;
402 : }
403 :
404 :
405 : /* This represents the top of the VN lattice, which is the universal
406 : value. */
407 :
408 : tree VN_TOP;
409 :
410 : /* Unique counter for our value ids. */
411 :
412 : static unsigned int next_value_id;
413 : static int next_constant_value_id;
414 :
415 :
416 : /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
417 : are allocated on an obstack for locality reasons, and to free them
418 : without looping over the vec. */
419 :
420 : struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
421 : {
422 : typedef vn_ssa_aux_t value_type;
423 : typedef tree compare_type;
424 : static inline hashval_t hash (const value_type &);
425 : static inline bool equal (const value_type &, const compare_type &);
426 : static inline void mark_deleted (value_type &) {}
427 : static const bool empty_zero_p = true;
428 0 : static inline void mark_empty (value_type &e) { e = NULL; }
429 : static inline bool is_deleted (value_type &) { return false; }
430 >13162*10^7 : static inline bool is_empty (value_type &e) { return e == NULL; }
431 : };
432 :
433 : hashval_t
434 43340927459 : vn_ssa_aux_hasher::hash (const value_type &entry)
435 : {
436 43340927459 : return SSA_NAME_VERSION (entry->name);
437 : }
438 :
439 : bool
440 49581662774 : vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
441 : {
442 49581662774 : return name == entry->name;
443 : }
444 :
445 : static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
446 : typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
447 : static struct obstack vn_ssa_aux_obstack;
448 :
449 : static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
450 : static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
451 : vn_nary_op_table_type *);
452 : static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
453 : enum tree_code, tree, tree *);
454 : static tree vn_lookup_simplify_result (gimple_match_op *);
455 : static vn_reference_t vn_reference_lookup_or_insert_for_pieces
456 : (tree, alias_set_type, alias_set_type, poly_int64, poly_int64, tree,
457 : vec<vn_reference_op_s, va_heap>, tree);
458 :
459 : /* Return whether there is value numbering information for a given SSA name. */
460 :
461 : bool
462 5098267 : has_VN_INFO (tree name)
463 : {
464 5098267 : return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
465 : }
466 :
467 : vn_ssa_aux_t
468 3726577504 : VN_INFO (tree name)
469 : {
470 3726577504 : vn_ssa_aux_t *res
471 3726577504 : = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
472 : INSERT);
473 3726577504 : if (*res != NULL)
474 : return *res;
475 :
476 169578872 : vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
477 169578872 : memset (newinfo, 0, sizeof (struct vn_ssa_aux));
478 169578872 : newinfo->name = name;
479 169578872 : newinfo->valnum = VN_TOP;
480 : /* We are using the visited flag to handle uses with defs not within the
481 : region being value-numbered. */
482 169578872 : newinfo->visited = false;
483 :
484 : /* Given we create the VN_INFOs on-demand now we have to do initialization
485 : different than VN_TOP here. */
486 169578872 : if (SSA_NAME_IS_DEFAULT_DEF (name))
487 9178038 : switch (TREE_CODE (SSA_NAME_VAR (name)))
488 : {
489 1657107 : case VAR_DECL:
490 : /* All undefined vars are VARYING. */
491 1657107 : newinfo->valnum = name;
492 1657107 : newinfo->visited = true;
493 1657107 : break;
494 :
495 7463255 : case PARM_DECL:
496 : /* Parameters are VARYING but we can record a condition
497 : if we know it is a non-NULL pointer. */
498 7463255 : newinfo->visited = true;
499 7463255 : newinfo->valnum = name;
500 11457977 : if (POINTER_TYPE_P (TREE_TYPE (name))
501 8552564 : && nonnull_arg_p (SSA_NAME_VAR (name)))
502 : {
503 2256836 : tree ops[2];
504 2256836 : ops[0] = name;
505 2256836 : ops[1] = build_int_cst (TREE_TYPE (name), 0);
506 2256836 : vn_nary_op_t nary;
507 : /* Allocate from non-unwinding stack. */
508 2256836 : nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
509 2256836 : init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
510 : boolean_type_node, ops);
511 2256836 : nary->predicated_values = 0;
512 2256836 : nary->u.result = boolean_true_node;
513 2256836 : vn_nary_op_insert_into (nary, valid_info->nary);
514 2256836 : gcc_assert (nary->unwind_to == NULL);
515 : /* Also do not link it into the undo chain. */
516 2256836 : last_inserted_nary = nary->next;
517 2256836 : nary->next = (vn_nary_op_t)(void *)-1;
518 2256836 : nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
519 2256836 : init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
520 : boolean_type_node, ops);
521 2256836 : nary->predicated_values = 0;
522 2256836 : nary->u.result = boolean_false_node;
523 2256836 : vn_nary_op_insert_into (nary, valid_info->nary);
524 2256836 : gcc_assert (nary->unwind_to == NULL);
525 2256836 : last_inserted_nary = nary->next;
526 2256836 : nary->next = (vn_nary_op_t)(void *)-1;
527 2256836 : if (dump_file && (dump_flags & TDF_DETAILS))
528 : {
529 38 : fprintf (dump_file, "Recording ");
530 38 : print_generic_expr (dump_file, name, TDF_SLIM);
531 38 : fprintf (dump_file, " != 0\n");
532 : }
533 : }
534 : break;
535 :
536 57676 : case RESULT_DECL:
537 : /* If the result is passed by invisible reference the default
538 : def is initialized, otherwise it's uninitialized. Still
539 : undefined is varying. */
540 57676 : newinfo->visited = true;
541 57676 : newinfo->valnum = name;
542 57676 : break;
543 :
544 0 : default:
545 0 : gcc_unreachable ();
546 : }
547 : return newinfo;
548 : }
549 :
550 : /* Return the SSA value of X. */
551 :
552 : inline tree
553 3402910539 : SSA_VAL (tree x, bool *visited = NULL)
554 : {
555 3402910539 : vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
556 3402910539 : if (visited)
557 1382321815 : *visited = tem && tem->visited;
558 3402910539 : return tem && tem->visited ? tem->valnum : x;
559 : }
560 :
561 : /* Return the SSA value of the VUSE x, supporting released VDEFs
562 : during elimination which will value-number the VDEF to the
563 : associated VUSE (but not substitute in the whole lattice). */
564 :
565 : static inline tree
566 1258735528 : vuse_ssa_val (tree x)
567 : {
568 1258735528 : if (!x)
569 : return NULL_TREE;
570 :
571 1255420017 : do
572 : {
573 1255420017 : x = SSA_VAL (x);
574 1255420017 : gcc_assert (x != VN_TOP);
575 : }
576 1255420017 : while (SSA_NAME_IN_FREE_LIST (x));
577 :
578 : return x;
579 : }
580 :
581 : /* Similar to the above but used as callback for walk_non_aliased_vuses
582 : and thus should stop at unvisited VUSE to not walk across region
583 : boundaries. */
584 :
585 : static tree
586 1065973665 : vuse_valueize (tree vuse)
587 : {
588 1065973665 : do
589 : {
590 1065973665 : bool visited;
591 1065973665 : vuse = SSA_VAL (vuse, &visited);
592 1065973665 : if (!visited)
593 15783177 : return NULL_TREE;
594 1050190488 : gcc_assert (vuse != VN_TOP);
595 : }
596 1050190488 : while (SSA_NAME_IN_FREE_LIST (vuse));
597 : return vuse;
598 : }
599 :
600 :
601 : /* Return the vn_kind the expression computed by the stmt should be
602 : associated with. */
603 :
604 : enum vn_kind
605 100838555 : vn_get_stmt_kind (gimple *stmt)
606 : {
607 100838555 : switch (gimple_code (stmt))
608 : {
609 : case GIMPLE_CALL:
610 : return VN_REFERENCE;
611 : case GIMPLE_PHI:
612 : return VN_PHI;
613 100838555 : case GIMPLE_ASSIGN:
614 100838555 : {
615 100838555 : enum tree_code code = gimple_assign_rhs_code (stmt);
616 100838555 : tree rhs1 = gimple_assign_rhs1 (stmt);
617 100838555 : switch (get_gimple_rhs_class (code))
618 : {
619 : case GIMPLE_UNARY_RHS:
620 : case GIMPLE_BINARY_RHS:
621 : case GIMPLE_TERNARY_RHS:
622 : return VN_NARY;
623 47172631 : case GIMPLE_SINGLE_RHS:
624 47172631 : switch (TREE_CODE_CLASS (code))
625 : {
626 35457915 : case tcc_reference:
627 : /* VOP-less references can go through unary case. */
628 35457915 : if ((code == REALPART_EXPR
629 : || code == IMAGPART_EXPR
630 35457915 : || code == VIEW_CONVERT_EXPR
631 35457915 : || code == BIT_FIELD_REF)
632 35457915 : && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
633 628439 : || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
634 2016843 : return VN_NARY;
635 :
636 : /* Fallthrough. */
637 : case tcc_declaration:
638 : return VN_REFERENCE;
639 :
640 : case tcc_constant:
641 : return VN_CONSTANT;
642 :
643 5835667 : default:
644 5835667 : if (code == ADDR_EXPR)
645 3171301 : return (is_gimple_min_invariant (rhs1)
646 3171301 : ? VN_CONSTANT : VN_REFERENCE);
647 2664366 : else if (code == CONSTRUCTOR)
648 : return VN_NARY;
649 : return VN_NONE;
650 : }
651 : default:
652 : return VN_NONE;
653 : }
654 : }
655 : default:
656 : return VN_NONE;
657 : }
658 : }
659 :
660 : /* Lookup a value id for CONSTANT and return it. If it does not
661 : exist returns 0. */
662 :
663 : unsigned int
664 0 : get_constant_value_id (tree constant)
665 : {
666 0 : vn_constant_s **slot;
667 0 : struct vn_constant_s vc;
668 :
669 0 : vc.hashcode = vn_hash_constant_with_type (constant);
670 0 : vc.constant = constant;
671 0 : slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
672 0 : if (slot)
673 0 : return (*slot)->value_id;
674 : return 0;
675 : }
676 :
677 : /* Lookup a value id for CONSTANT, and if it does not exist, create a
678 : new one and return it. If it does exist, return it. */
679 :
680 : unsigned int
681 28340049 : get_or_alloc_constant_value_id (tree constant)
682 : {
683 28340049 : vn_constant_s **slot;
684 28340049 : struct vn_constant_s vc;
685 28340049 : vn_constant_t vcp;
686 :
687 : /* If the hashtable isn't initialized we're not running from PRE and thus
688 : do not need value-ids. */
689 28340049 : if (!constant_to_value_id)
690 : return 0;
691 :
692 4663673 : vc.hashcode = vn_hash_constant_with_type (constant);
693 4663673 : vc.constant = constant;
694 4663673 : slot = constant_to_value_id->find_slot (&vc, INSERT);
695 4663673 : if (*slot)
696 2167702 : return (*slot)->value_id;
697 :
698 2495971 : vcp = XNEW (struct vn_constant_s);
699 2495971 : vcp->hashcode = vc.hashcode;
700 2495971 : vcp->constant = constant;
701 2495971 : vcp->value_id = get_next_constant_value_id ();
702 2495971 : *slot = vcp;
703 2495971 : return vcp->value_id;
704 : }
705 :
706 : /* Compute the hash for a reference operand VRO1. */
707 :
708 : static void
709 134254111 : vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
710 : {
711 134254111 : hstate.add_int (vro1->opcode);
712 134254111 : if (vro1->opcode == CALL_EXPR && !vro1->op0)
713 542676 : hstate.add_int (vro1->clique);
714 134254111 : if (vro1->op0)
715 127940848 : inchash::add_expr (vro1->op0, hstate);
716 134254111 : if (vro1->op1)
717 12067401 : inchash::add_expr (vro1->op1, hstate);
718 134254111 : if (vro1->op2)
719 13770228 : inchash::add_expr (vro1->op2, hstate);
720 134254111 : }
721 :
722 : /* Compute a hash for the reference operation VR1 and return it. */
723 :
724 : hashval_t
725 199342174 : vn_reference_compute_hash (const vn_reference_t vr1)
726 : {
727 199342174 : inchash::hash hstate;
728 199342174 : hashval_t result;
729 199342174 : int i;
730 199342174 : vn_reference_op_t vro;
731 199342174 : poly_offset_int off = -1;
732 199342174 : bool deref = false;
733 :
734 810463371 : FOR_EACH_VEC_ELT (vr1->operands, i, vro)
735 : {
736 611121197 : if (vro->opcode == MEM_REF)
737 : deref = true;
738 422317963 : else if (vro->opcode != ADDR_EXPR)
739 295899303 : deref = false;
740 611121197 : if (maybe_ne (vro->off, -1))
741 : {
742 358495944 : if (known_eq (off, -1))
743 191211672 : off = 0;
744 611121197 : off += vro->off;
745 : }
746 : else
747 : {
748 252625253 : if (maybe_ne (off, -1)
749 252625253 : && maybe_ne (off, 0))
750 101263003 : hstate.add_poly_hwi (off.force_shwi ());
751 252625253 : off = -1;
752 252625253 : if (deref
753 118573064 : && vro->opcode == ADDR_EXPR)
754 : {
755 118371142 : if (vro->op0)
756 : {
757 118371142 : tree op = TREE_OPERAND (vro->op0, 0);
758 118371142 : hstate.add_int (TREE_CODE (op));
759 118371142 : inchash::add_expr (op, hstate);
760 : }
761 : }
762 : else
763 134254111 : vn_reference_op_compute_hash (vro, hstate);
764 : }
765 : }
766 : /* Do not hash vr1->offset or vr1->max_size, we want to get collisions
767 : to be able to identify compatible results. */
768 199342174 : result = hstate.end ();
769 : /* ??? We would ICE later if we hash instead of adding that in. */
770 199342174 : if (vr1->vuse)
771 194489001 : result += SSA_NAME_VERSION (vr1->vuse);
772 :
773 199342174 : return result;
774 : }
775 :
776 : /* Return true if reference operations VR1 and VR2 are equivalent. This
777 : means they have the same set of operands and vuses. If LEXICAL
778 : is true then the full access path has to be the same. */
779 :
780 : bool
781 4410273784 : vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2,
782 : bool lexical)
783 : {
784 4410273784 : unsigned i, j;
785 :
786 : /* Early out if this is not a hash collision. */
787 4410273784 : if (vr1->hashcode != vr2->hashcode)
788 : return false;
789 :
790 : /* The VOP needs to be the same. */
791 17350703 : if (vr1->vuse != vr2->vuse)
792 : return false;
793 :
794 : /* The offset/max_size used for the ao_ref during lookup has to be
795 : the same. */
796 17350260 : if (maybe_ne (vr1->offset, vr2->offset)
797 17350260 : || maybe_ne (vr1->max_size, vr2->max_size))
798 : {
799 : /* But nothing known in the prevailing entry is OK to be used. */
800 6677061 : if (maybe_ne (vr1->offset, 0) || known_size_p (vr1->max_size))
801 : return false;
802 : }
803 :
804 : /* If the operands are the same we are done. */
805 34611858 : if (vr1->operands == vr2->operands)
806 : return true;
807 :
808 17305929 : if (!vr1->type || !vr2->type)
809 : {
810 539786 : if (vr1->type != vr2->type)
811 : return false;
812 : }
813 16766143 : else if (vr1->type == vr2->type)
814 : ;
815 2088696 : else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
816 2088696 : || (COMPLETE_TYPE_P (vr1->type)
817 2088696 : && !expressions_equal_p (TYPE_SIZE (vr1->type),
818 2088696 : TYPE_SIZE (vr2->type))))
819 758533 : return false;
820 1330163 : else if (vr1->operands[0].opcode == CALL_EXPR
821 1330163 : && !types_compatible_p (vr1->type, vr2->type))
822 : return false;
823 1330163 : else if (INTEGRAL_TYPE_P (vr1->type)
824 531968 : && INTEGRAL_TYPE_P (vr2->type))
825 : {
826 492042 : if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
827 : return false;
828 : }
829 838121 : else if (INTEGRAL_TYPE_P (vr1->type)
830 838121 : && (TYPE_PRECISION (vr1->type)
831 39926 : != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
832 : return false;
833 838075 : else if (INTEGRAL_TYPE_P (vr2->type)
834 838075 : && (TYPE_PRECISION (vr2->type)
835 9124 : != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
836 : return false;
837 12976 : else if (VECTOR_BOOLEAN_TYPE_P (vr1->type)
838 837480 : && VECTOR_BOOLEAN_TYPE_P (vr2->type))
839 : {
840 : /* Vector boolean types can have padding, verify we are dealing with
841 : the same number of elements, aka the precision of the types.
842 : For example, In most architecture the precision_size of vbool*_t
843 : types are caculated like below:
844 : precision_size = type_size * 8
845 :
846 : Unfortunately, the RISC-V will adjust the precision_size for the
847 : vbool*_t in order to align the ISA as below:
848 : type_size = [1, 1, 1, 1, 2, 4, 8]
849 : precision_size = [1, 2, 4, 8, 16, 32, 64]
850 :
851 : Then the precision_size of RISC-V vbool*_t will not be the multiple
852 : of the type_size. We take care of this case consolidated here. */
853 0 : if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1->type),
854 0 : TYPE_VECTOR_SUBPARTS (vr2->type)))
855 : return false;
856 : }
857 837480 : else if (TYPE_MODE (vr1->type) != TYPE_MODE (vr2->type)
858 837480 : && (!mode_can_transfer_bits (TYPE_MODE (vr1->type))
859 44719 : || !mode_can_transfer_bits (TYPE_MODE (vr2->type))))
860 1007 : return false;
861 :
862 : i = 0;
863 : j = 0;
864 21365986 : do
865 : {
866 21365986 : poly_offset_int off1 = 0, off2 = 0;
867 21365986 : vn_reference_op_t vro1, vro2;
868 21365986 : vn_reference_op_s tem1, tem2;
869 21365986 : bool deref1 = false, deref2 = false;
870 21365986 : bool reverse1 = false, reverse2 = false;
871 69294430 : for (; vr1->operands.iterate (i, &vro1); i++)
872 : {
873 47928444 : if (vro1->opcode == MEM_REF)
874 : deref1 = true;
875 : /* Do not look through a storage order barrier. */
876 32793772 : else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
877 70056 : return false;
878 47928444 : reverse1 |= vro1->reverse;
879 47928444 : if (lexical || known_eq (vro1->off, -1))
880 : break;
881 26562458 : off1 += vro1->off;
882 : }
883 48062892 : for (; vr2->operands.iterate (j, &vro2); j++)
884 : {
885 48062892 : if (vro2->opcode == MEM_REF)
886 : deref2 = true;
887 : /* Do not look through a storage order barrier. */
888 32904184 : else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
889 : return false;
890 48062892 : reverse2 |= vro2->reverse;
891 48062892 : if (lexical || known_eq (vro2->off, -1))
892 : break;
893 26696906 : off2 += vro2->off;
894 : }
895 21365986 : if (maybe_ne (off1, off2) || reverse1 != reverse2)
896 : return false;
897 21365848 : if (deref1 && vro1->opcode == ADDR_EXPR)
898 : {
899 7994044 : memset (&tem1, 0, sizeof (tem1));
900 7994044 : tem1.op0 = TREE_OPERAND (vro1->op0, 0);
901 7994044 : tem1.type = TREE_TYPE (tem1.op0);
902 7994044 : tem1.opcode = TREE_CODE (tem1.op0);
903 7994044 : vro1 = &tem1;
904 7994044 : deref1 = false;
905 : }
906 21365848 : if (deref2 && vro2->opcode == ADDR_EXPR)
907 : {
908 7994054 : memset (&tem2, 0, sizeof (tem2));
909 7994054 : tem2.op0 = TREE_OPERAND (vro2->op0, 0);
910 7994054 : tem2.type = TREE_TYPE (tem2.op0);
911 7994054 : tem2.opcode = TREE_CODE (tem2.op0);
912 7994054 : vro2 = &tem2;
913 7994054 : deref2 = false;
914 : }
915 21365848 : if (deref1 != deref2)
916 : return false;
917 21311074 : if (!vn_reference_op_eq (vro1, vro2))
918 : return false;
919 : /* Both alignment and alias set are not relevant for the produced
920 : value but need to be included when doing lexical comparison.
921 : We also need to make sure that the access path ends in an
922 : access of the same size as otherwise we might assume an access
923 : may not trap while in fact it might. */
924 21299077 : if (lexical
925 2192896 : && (vro1->opcode == MEM_REF
926 2192896 : || vro1->opcode == TARGET_MEM_REF)
927 22019560 : && (TYPE_ALIGN (vro1->type) != TYPE_ALIGN (vro2->type)
928 720302 : || (TYPE_SIZE (vro1->type) != TYPE_SIZE (vro2->type)
929 6 : && (! TYPE_SIZE (vro1->type)
930 6 : || ! TYPE_SIZE (vro2->type)
931 6 : || ! operand_equal_p (TYPE_SIZE (vro1->type),
932 6 : TYPE_SIZE (vro2->type))))
933 2160888 : || (get_deref_alias_set (vro1->opcode == MEM_REF
934 720296 : ? TREE_TYPE (vro1->op0)
935 0 : : TREE_TYPE (vro1->op2))
936 1440592 : != get_deref_alias_set (vro2->opcode == MEM_REF
937 720296 : ? TREE_TYPE (vro2->op0)
938 0 : : TREE_TYPE (vro2->op2)))))
939 3147 : return false;
940 21295930 : ++j;
941 21295930 : ++i;
942 : }
943 42591860 : while (vr1->operands.length () != i
944 63887790 : || vr2->operands.length () != j);
945 :
946 : return true;
947 : }
948 :
949 : /* Copy the operations present in load/store REF into RESULT, a vector of
950 : vn_reference_op_s's. */
951 :
952 : void
953 218087403 : copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
954 : {
955 : /* For non-calls, store the information that makes up the address. */
956 218087403 : tree orig = ref;
957 756319892 : while (ref)
958 : {
959 538232489 : vn_reference_op_s temp;
960 :
961 538232489 : memset (&temp, 0, sizeof (temp));
962 538232489 : temp.type = TREE_TYPE (ref);
963 538232489 : temp.opcode = TREE_CODE (ref);
964 538232489 : temp.off = -1;
965 :
966 538232489 : switch (temp.opcode)
967 : {
968 14790460 : case MODIFY_EXPR:
969 14790460 : temp.op0 = TREE_OPERAND (ref, 1);
970 14790460 : break;
971 137 : case WITH_SIZE_EXPR:
972 137 : temp.op0 = TREE_OPERAND (ref, 1);
973 137 : temp.off = 0;
974 137 : break;
975 114901381 : case MEM_REF:
976 : /* The base address gets its own vn_reference_op_s structure. */
977 114901381 : temp.op0 = TREE_OPERAND (ref, 1);
978 114901381 : if (!mem_ref_offset (ref).to_shwi (&temp.off))
979 0 : temp.off = -1;
980 114901381 : temp.clique = MR_DEPENDENCE_CLIQUE (ref);
981 114901381 : temp.base = MR_DEPENDENCE_BASE (ref);
982 114901381 : temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
983 114901381 : break;
984 2528463 : case TARGET_MEM_REF:
985 : /* The base address gets its own vn_reference_op_s structure. */
986 2528463 : temp.op0 = TMR_INDEX (ref);
987 2528463 : temp.op1 = TMR_STEP (ref);
988 2528463 : temp.op2 = TMR_OFFSET (ref);
989 2528463 : temp.clique = MR_DEPENDENCE_CLIQUE (ref);
990 2528463 : temp.base = MR_DEPENDENCE_BASE (ref);
991 2528463 : result->safe_push (temp);
992 2528463 : memset (&temp, 0, sizeof (temp));
993 2528463 : temp.type = NULL_TREE;
994 2528463 : temp.opcode = ERROR_MARK;
995 2528463 : temp.op0 = TMR_INDEX2 (ref);
996 2528463 : temp.off = -1;
997 2528463 : break;
998 723475 : case BIT_FIELD_REF:
999 : /* Record bits, position and storage order. */
1000 723475 : temp.op0 = TREE_OPERAND (ref, 1);
1001 723475 : temp.op1 = TREE_OPERAND (ref, 2);
1002 1446240 : if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
1003 710 : temp.off = -1;
1004 723475 : temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
1005 723475 : break;
1006 142689724 : case COMPONENT_REF:
1007 : /* The field decl is enough to unambiguously specify the field,
1008 : so use its type here. */
1009 142689724 : temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
1010 142689724 : temp.op0 = TREE_OPERAND (ref, 1);
1011 142689724 : temp.op1 = TREE_OPERAND (ref, 2);
1012 285377009 : temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
1013 285376744 : && TYPE_REVERSE_STORAGE_ORDER
1014 : (TREE_TYPE (TREE_OPERAND (ref, 0))));
1015 142689724 : {
1016 142689724 : tree this_offset = component_ref_field_offset (ref);
1017 142689724 : if (this_offset
1018 142689724 : && poly_int_tree_p (this_offset))
1019 : {
1020 142687588 : tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
1021 142687588 : if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
1022 : {
1023 142222108 : poly_offset_int off
1024 142222108 : = (wi::to_poly_offset (this_offset)
1025 142222108 : + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
1026 : /* Prohibit value-numbering zero offset components
1027 : of addresses the same before the pass folding
1028 : __builtin_object_size had a chance to run. Likewise
1029 : for components of zero size at arbitrary offset. */
1030 142222108 : if (TREE_CODE (orig) != ADDR_EXPR
1031 4752038 : || (TYPE_SIZE (temp.type)
1032 4739029 : && integer_nonzerop (TYPE_SIZE (temp.type))
1033 6113281 : && maybe_ne (off, 0))
1034 145151286 : || (cfun->curr_properties & PROP_objsz))
1035 140845686 : off.to_shwi (&temp.off);
1036 : }
1037 : }
1038 : }
1039 : break;
1040 38021578 : case ARRAY_RANGE_REF:
1041 38021578 : case ARRAY_REF:
1042 38021578 : {
1043 38021578 : tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
1044 : /* Record index as operand. */
1045 38021578 : temp.op0 = TREE_OPERAND (ref, 1);
1046 : /* Always record lower bounds and element size. */
1047 38021578 : temp.op1 = array_ref_low_bound (ref);
1048 : /* But record element size in units of the type alignment. */
1049 38021578 : temp.op2 = TREE_OPERAND (ref, 3);
1050 38021578 : temp.align = eltype->type_common.align;
1051 38021578 : if (! temp.op2)
1052 37811285 : temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
1053 : size_int (TYPE_ALIGN_UNIT (eltype)));
1054 : /* Prohibit value-numbering addresses of one-after-the-last
1055 : element ARRAY_REFs the same as addresses of other components
1056 : before the pass folding __builtin_object_size had a chance
1057 : to run. */
1058 38021578 : bool avoid_oob = true;
1059 38021578 : if (TREE_CODE (orig) != ADDR_EXPR
1060 469592 : || cfun->curr_properties & PROP_objsz)
1061 : avoid_oob = false;
1062 222087 : else if (poly_int_tree_p (temp.op0))
1063 : {
1064 74302 : tree ub = array_ref_up_bound (ref);
1065 74302 : if (ub
1066 72674 : && poly_int_tree_p (ub)
1067 : /* ??? The C frontend for T[0] uses [0:] and the
1068 : C++ frontend [0:-1U]. See layout_type for how
1069 : awkward this is. */
1070 64461 : && !integer_minus_onep (ub)
1071 146976 : && known_le (wi::to_poly_offset (temp.op0),
1072 : wi::to_poly_offset (ub)))
1073 63614 : avoid_oob = false;
1074 : }
1075 38021578 : if (poly_int_tree_p (temp.op0)
1076 21781147 : && poly_int_tree_p (temp.op1)
1077 21781119 : && TREE_CODE (temp.op2) == INTEGER_CST
1078 59742089 : && !avoid_oob)
1079 : {
1080 43421356 : poly_offset_int off = ((wi::to_poly_offset (temp.op0)
1081 65132034 : - wi::to_poly_offset (temp.op1))
1082 43421356 : * wi::to_offset (temp.op2)
1083 21710678 : * vn_ref_op_align_unit (&temp));
1084 21710678 : off.to_shwi (&temp.off);
1085 : }
1086 38021578 : temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
1087 38021578 : && TYPE_REVERSE_STORAGE_ORDER
1088 : (TREE_TYPE (TREE_OPERAND (ref, 0))));
1089 : }
1090 38021578 : break;
1091 80402673 : case VAR_DECL:
1092 80402673 : if (DECL_HARD_REGISTER (ref))
1093 : {
1094 20295 : temp.op0 = ref;
1095 20295 : break;
1096 : }
1097 : /* Fallthru. */
1098 83722732 : case PARM_DECL:
1099 83722732 : case CONST_DECL:
1100 83722732 : case RESULT_DECL:
1101 : /* Canonicalize decls to MEM[&decl] which is what we end up with
1102 : when valueizing MEM[ptr] with ptr = &decl. */
1103 83722732 : temp.opcode = MEM_REF;
1104 83722732 : temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1105 83722732 : temp.off = 0;
1106 83722732 : result->safe_push (temp);
1107 83722732 : temp.opcode = ADDR_EXPR;
1108 83722732 : temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1109 83722732 : temp.type = TREE_TYPE (temp.op0);
1110 83722732 : temp.off = -1;
1111 83722732 : break;
1112 94140285 : case STRING_CST:
1113 94140285 : case INTEGER_CST:
1114 94140285 : case POLY_INT_CST:
1115 94140285 : case COMPLEX_CST:
1116 94140285 : case VECTOR_CST:
1117 94140285 : case REAL_CST:
1118 94140285 : case FIXED_CST:
1119 94140285 : case CONSTRUCTOR:
1120 94140285 : case SSA_NAME:
1121 94140285 : temp.op0 = ref;
1122 94140285 : break;
1123 44291551 : case ADDR_EXPR:
1124 44291551 : if (is_gimple_min_invariant (ref))
1125 : {
1126 40204091 : temp.op0 = ref;
1127 40204091 : break;
1128 : }
1129 : break;
1130 : /* These are only interesting for their operands, their
1131 : existence, and their type. They will never be the last
1132 : ref in the chain of references (IE they require an
1133 : operand), so we don't have to put anything
1134 : for op* as it will be handled by the iteration */
1135 481506 : case REALPART_EXPR:
1136 481506 : temp.off = 0;
1137 481506 : break;
1138 1434298 : case VIEW_CONVERT_EXPR:
1139 1434298 : temp.off = 0;
1140 1434298 : temp.reverse = storage_order_barrier_p (ref);
1141 1434298 : break;
1142 486604 : case IMAGPART_EXPR:
1143 : /* This is only interesting for its constant offset. */
1144 486604 : temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1145 486604 : break;
1146 0 : default:
1147 0 : gcc_unreachable ();
1148 : }
1149 538232489 : result->safe_push (temp);
1150 :
1151 538232489 : if (REFERENCE_CLASS_P (ref)
1152 236965460 : || TREE_CODE (ref) == MODIFY_EXPR
1153 222175000 : || TREE_CODE (ref) == WITH_SIZE_EXPR
1154 760407352 : || (TREE_CODE (ref) == ADDR_EXPR
1155 44291551 : && !is_gimple_min_invariant (ref)))
1156 320145086 : ref = TREE_OPERAND (ref, 0);
1157 : else
1158 : ref = NULL_TREE;
1159 : }
1160 218087403 : }
1161 :
1162 : /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1163 : operands in *OPS, the reference alias set SET and the reference type TYPE.
1164 : Return true if something useful was produced. */
1165 :
1166 : bool
1167 14201283 : ao_ref_init_from_vn_reference (ao_ref *ref,
1168 : alias_set_type set, alias_set_type base_set,
1169 : tree type, const vec<vn_reference_op_s> &ops)
1170 : {
1171 14201283 : unsigned i;
1172 14201283 : tree base = NULL_TREE;
1173 14201283 : tree *op0_p = &base;
1174 14201283 : poly_offset_int offset = 0;
1175 14201283 : poly_offset_int max_size;
1176 14201283 : poly_offset_int size = -1;
1177 14201283 : tree size_tree = NULL_TREE;
1178 :
1179 : /* We don't handle calls. */
1180 14201283 : if (!type)
1181 : return false;
1182 :
1183 14201283 : machine_mode mode = TYPE_MODE (type);
1184 14201283 : if (mode == BLKmode)
1185 65028 : size_tree = TYPE_SIZE (type);
1186 : else
1187 28272510 : size = GET_MODE_BITSIZE (mode);
1188 14136255 : if (size_tree != NULL_TREE
1189 65028 : && poly_int_tree_p (size_tree))
1190 65028 : size = wi::to_poly_offset (size_tree);
1191 :
1192 : /* Lower the final access size from the outermost expression. */
1193 14201283 : const_vn_reference_op_t cst_op = &ops[0];
1194 : /* Cast away constness for the sake of the const-unsafe
1195 : FOR_EACH_VEC_ELT(). */
1196 14201283 : vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1197 14201283 : size_tree = NULL_TREE;
1198 14201283 : if (op->opcode == COMPONENT_REF)
1199 4824126 : size_tree = DECL_SIZE (op->op0);
1200 9377157 : else if (op->opcode == BIT_FIELD_REF)
1201 58917 : size_tree = op->op0;
1202 4883043 : if (size_tree != NULL_TREE
1203 4883043 : && poly_int_tree_p (size_tree)
1204 9766086 : && (!known_size_p (size)
1205 14201283 : || known_lt (wi::to_poly_offset (size_tree), size)))
1206 35212 : size = wi::to_poly_offset (size_tree);
1207 :
1208 : /* Initially, maxsize is the same as the accessed element size.
1209 : In the following it will only grow (or become -1). */
1210 14201283 : max_size = size;
1211 :
1212 : /* Compute cumulative bit-offset for nested component-refs and array-refs,
1213 : and find the ultimate containing object. */
1214 54473779 : FOR_EACH_VEC_ELT (ops, i, op)
1215 : {
1216 40419556 : switch (op->opcode)
1217 : {
1218 : case CALL_EXPR:
1219 : return false;
1220 :
1221 : /* Record the base objects. */
1222 13745448 : case MEM_REF:
1223 13745448 : *op0_p = build2 (MEM_REF, op->type,
1224 : NULL_TREE, op->op0);
1225 13745448 : MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1226 13745448 : MR_DEPENDENCE_BASE (*op0_p) = op->base;
1227 13745448 : op0_p = &TREE_OPERAND (*op0_p, 0);
1228 13745448 : break;
1229 :
1230 308247 : case TARGET_MEM_REF:
1231 924741 : *op0_p = build5 (TARGET_MEM_REF, op->type,
1232 : NULL_TREE, op->op2, op->op0,
1233 308247 : op->op1, ops[i+1].op0);
1234 308247 : MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1235 308247 : MR_DEPENDENCE_BASE (*op0_p) = op->base;
1236 308247 : op0_p = &TREE_OPERAND (*op0_p, 0);
1237 308247 : ++i;
1238 308247 : break;
1239 :
1240 : /* Unwrap some of the wrapped decls. */
1241 6434003 : case ADDR_EXPR:
1242 : /* Apart from ADDR_EXPR arguments to MEM_REF. */
1243 6434003 : if (base != NULL_TREE
1244 6434003 : && TREE_CODE (base) == MEM_REF
1245 6398033 : && op->op0
1246 12832036 : && DECL_P (TREE_OPERAND (op->op0, 0)))
1247 : {
1248 6391042 : const_vn_reference_op_t pop = &ops[i-1];
1249 6391042 : base = TREE_OPERAND (op->op0, 0);
1250 6391042 : if (known_eq (pop->off, -1))
1251 : {
1252 25 : max_size = -1;
1253 25 : offset = 0;
1254 : }
1255 : else
1256 19173051 : offset += poly_offset_int (pop->off) * BITS_PER_UNIT;
1257 : op0_p = NULL;
1258 : break;
1259 : }
1260 : /* Fallthru. */
1261 7663181 : case PARM_DECL:
1262 7663181 : case CONST_DECL:
1263 7663181 : case RESULT_DECL:
1264 : /* ??? We shouldn't see these, but un-canonicalize what
1265 : copy_reference_ops_from_ref does when visiting MEM_REF. */
1266 7663181 : case VAR_DECL:
1267 : /* ??? And for this only have DECL_HARD_REGISTER. */
1268 7663181 : case STRING_CST:
1269 : /* This can show up in ARRAY_REF bases. */
1270 7663181 : case INTEGER_CST:
1271 7663181 : case SSA_NAME:
1272 7663181 : *op0_p = op->op0;
1273 7663181 : op0_p = NULL;
1274 7663181 : break;
1275 :
1276 : /* And now the usual component-reference style ops. */
1277 58917 : case BIT_FIELD_REF:
1278 58917 : offset += wi::to_poly_offset (op->op1);
1279 58917 : break;
1280 :
1281 7907565 : case COMPONENT_REF:
1282 7907565 : {
1283 7907565 : tree field = op->op0;
1284 : /* We do not have a complete COMPONENT_REF tree here so we
1285 : cannot use component_ref_field_offset. Do the interesting
1286 : parts manually. */
1287 7907565 : tree this_offset = DECL_FIELD_OFFSET (field);
1288 :
1289 7907565 : if (op->op1 || !poly_int_tree_p (this_offset))
1290 234 : max_size = -1;
1291 : else
1292 : {
1293 7907331 : poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1294 7907331 : << LOG2_BITS_PER_UNIT);
1295 7907331 : woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1296 7907331 : offset += woffset;
1297 : }
1298 : break;
1299 : }
1300 :
1301 2994760 : case ARRAY_RANGE_REF:
1302 2994760 : case ARRAY_REF:
1303 : /* Use the recorded constant offset. */
1304 2994760 : if (maybe_eq (op->off, -1))
1305 1185414 : max_size = -1;
1306 : else
1307 5428038 : offset += poly_offset_int (op->off) * BITS_PER_UNIT;
1308 : break;
1309 :
1310 : case REALPART_EXPR:
1311 : break;
1312 :
1313 : case IMAGPART_EXPR:
1314 40272496 : offset += size;
1315 : break;
1316 :
1317 : case VIEW_CONVERT_EXPR:
1318 : break;
1319 :
1320 : case POLY_INT_CST:
1321 : case COMPLEX_CST:
1322 : case VECTOR_CST:
1323 : case REAL_CST:
1324 : case FIXED_CST:
1325 : case CONSTRUCTOR:
1326 : return false;
1327 :
1328 : default:
1329 : return false;
1330 : }
1331 : }
1332 :
1333 14054223 : if (base == NULL_TREE)
1334 : return false;
1335 :
1336 14054223 : ref->ref = NULL_TREE;
1337 14054223 : ref->base = base;
1338 14054223 : ref->ref_alias_set = set;
1339 14054223 : ref->base_alias_set = base_set;
1340 : /* We discount volatiles from value-numbering elsewhere. */
1341 14054223 : ref->volatile_p = false;
1342 :
1343 14054223 : if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1344 : {
1345 0 : ref->offset = 0;
1346 0 : ref->size = -1;
1347 0 : ref->max_size = -1;
1348 0 : return true;
1349 : }
1350 :
1351 14054223 : if (!offset.to_shwi (&ref->offset))
1352 : {
1353 26 : ref->offset = 0;
1354 26 : ref->max_size = -1;
1355 26 : return true;
1356 : }
1357 :
1358 14054197 : if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1359 1034617 : ref->max_size = -1;
1360 :
1361 : return true;
1362 : }
1363 :
1364 : /* Copy the operations present in load/store/call REF into RESULT, a vector of
1365 : vn_reference_op_s's. */
1366 :
1367 : static void
1368 9096970 : copy_reference_ops_from_call (gcall *call,
1369 : vec<vn_reference_op_s> *result)
1370 : {
1371 9096970 : vn_reference_op_s temp;
1372 9096970 : unsigned i;
1373 9096970 : tree lhs = gimple_call_lhs (call);
1374 9096970 : int lr;
1375 :
1376 : /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1377 : different. By adding the lhs here in the vector, we ensure that the
1378 : hashcode is different, guaranteeing a different value number. */
1379 9096970 : if (lhs && TREE_CODE (lhs) != SSA_NAME)
1380 : {
1381 435103 : memset (&temp, 0, sizeof (temp));
1382 435103 : temp.opcode = MODIFY_EXPR;
1383 435103 : temp.type = TREE_TYPE (lhs);
1384 435103 : temp.op0 = lhs;
1385 435103 : temp.off = -1;
1386 435103 : result->safe_push (temp);
1387 : }
1388 :
1389 : /* Copy the type, opcode, function, static chain and EH region, if any. */
1390 9096970 : memset (&temp, 0, sizeof (temp));
1391 9096970 : temp.type = gimple_call_fntype (call);
1392 9096970 : temp.opcode = CALL_EXPR;
1393 9096970 : temp.op0 = gimple_call_fn (call);
1394 9096970 : if (gimple_call_internal_p (call))
1395 527990 : temp.clique = gimple_call_internal_fn (call);
1396 9096970 : temp.op1 = gimple_call_chain (call);
1397 9096970 : if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1398 593412 : temp.op2 = size_int (lr);
1399 9096970 : temp.off = -1;
1400 9096970 : result->safe_push (temp);
1401 :
1402 : /* Copy the call arguments. As they can be references as well,
1403 : just chain them together. */
1404 26797616 : for (i = 0; i < gimple_call_num_args (call); ++i)
1405 : {
1406 17700646 : tree callarg = gimple_call_arg (call, i);
1407 17700646 : copy_reference_ops_from_ref (callarg, result);
1408 : }
1409 9096970 : }
1410 :
1411 : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1412 : *I_P to point to the last element of the replacement. */
1413 : static bool
1414 124352751 : vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1415 : unsigned int *i_p)
1416 : {
1417 124352751 : unsigned int i = *i_p;
1418 124352751 : vn_reference_op_t op = &(*ops)[i];
1419 124352751 : vn_reference_op_t mem_op = &(*ops)[i - 1];
1420 124352751 : tree addr_base;
1421 124352751 : poly_int64 addr_offset = 0;
1422 :
1423 : /* The only thing we have to do is from &OBJ.foo.bar add the offset
1424 : from .foo.bar to the preceding MEM_REF offset and replace the
1425 : address with &OBJ. */
1426 124352751 : addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1427 : &addr_offset, vn_valueize);
1428 124352751 : gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1429 124352751 : if (addr_base != TREE_OPERAND (op->op0, 0))
1430 : {
1431 653374 : poly_offset_int off
1432 653374 : = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1433 : SIGNED)
1434 653374 : + addr_offset);
1435 653374 : mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1436 653374 : op->op0 = build_fold_addr_expr (addr_base);
1437 653374 : if (tree_fits_shwi_p (mem_op->op0))
1438 653307 : mem_op->off = tree_to_shwi (mem_op->op0);
1439 : else
1440 67 : mem_op->off = -1;
1441 653374 : return true;
1442 : }
1443 : return false;
1444 : }
1445 :
1446 : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1447 : *I_P to point to the last element of the replacement. */
1448 : static bool
1449 83703681 : vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1450 : unsigned int *i_p)
1451 : {
1452 83703681 : bool changed = false;
1453 91534979 : vn_reference_op_t op;
1454 :
1455 91534979 : do
1456 : {
1457 91534979 : unsigned int i = *i_p;
1458 91534979 : op = &(*ops)[i];
1459 91534979 : vn_reference_op_t mem_op = &(*ops)[i - 1];
1460 91534979 : gimple *def_stmt;
1461 91534979 : enum tree_code code;
1462 91534979 : poly_offset_int off;
1463 :
1464 91534979 : def_stmt = SSA_NAME_DEF_STMT (op->op0);
1465 91534979 : if (!is_gimple_assign (def_stmt))
1466 83701730 : return changed;
1467 :
1468 37473633 : code = gimple_assign_rhs_code (def_stmt);
1469 37473633 : if (code != ADDR_EXPR
1470 37473633 : && code != POINTER_PLUS_EXPR)
1471 : return changed;
1472 :
1473 20284379 : off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1474 :
1475 : /* The only thing we have to do is from &OBJ.foo.bar add the offset
1476 : from .foo.bar to the preceding MEM_REF offset and replace the
1477 : address with &OBJ. */
1478 20284379 : if (code == ADDR_EXPR)
1479 : {
1480 927659 : tree addr, addr_base;
1481 927659 : poly_int64 addr_offset;
1482 :
1483 927659 : addr = gimple_assign_rhs1 (def_stmt);
1484 927659 : addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1485 : &addr_offset,
1486 : vn_valueize);
1487 : /* If that didn't work because the address isn't invariant propagate
1488 : the reference tree from the address operation in case the current
1489 : dereference isn't offsetted. */
1490 927659 : if (!addr_base
1491 279186 : && *i_p == ops->length () - 1
1492 139593 : && known_eq (off, 0)
1493 : /* This makes us disable this transform for PRE where the
1494 : reference ops might be also used for code insertion which
1495 : is invalid. */
1496 1012155 : && default_vn_walk_kind == VN_WALKREWRITE)
1497 : {
1498 84406 : auto_vec<vn_reference_op_s, 32> tem;
1499 84406 : copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1500 : /* Make sure to preserve TBAA info. The only objects not
1501 : wrapped in MEM_REFs that can have their address taken are
1502 : STRING_CSTs. */
1503 84406 : if (tem.length () >= 2
1504 84406 : && tem[tem.length () - 2].opcode == MEM_REF)
1505 : {
1506 84391 : vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1507 84391 : new_mem_op->op0
1508 84391 : = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1509 168782 : wi::to_poly_wide (new_mem_op->op0));
1510 : }
1511 : else
1512 15 : gcc_assert (tem.last ().opcode == STRING_CST);
1513 84406 : ops->pop ();
1514 84406 : ops->pop ();
1515 84406 : ops->safe_splice (tem);
1516 84406 : --*i_p;
1517 84406 : return true;
1518 84406 : }
1519 843253 : if (!addr_base
1520 788066 : || TREE_CODE (addr_base) != MEM_REF
1521 1629502 : || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1522 784388 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1523 : 0))))
1524 : return changed;
1525 :
1526 786249 : off += addr_offset;
1527 786249 : off += mem_ref_offset (addr_base);
1528 786249 : op->op0 = TREE_OPERAND (addr_base, 0);
1529 : }
1530 : else
1531 : {
1532 19356720 : tree ptr, ptroff;
1533 19356720 : ptr = gimple_assign_rhs1 (def_stmt);
1534 19356720 : ptroff = gimple_assign_rhs2 (def_stmt);
1535 19356720 : if (TREE_CODE (ptr) != SSA_NAME
1536 17697468 : || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1537 : /* Make sure to not endlessly recurse.
1538 : See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1539 : happen when we value-number a PHI to its backedge value. */
1540 17696074 : || SSA_VAL (ptr) == op->op0
1541 37052794 : || !poly_int_tree_p (ptroff))
1542 12309720 : return changed;
1543 :
1544 7047000 : off += wi::to_poly_offset (ptroff);
1545 7047000 : op->op0 = ptr;
1546 : }
1547 :
1548 7833249 : mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1549 7833249 : if (tree_fits_shwi_p (mem_op->op0))
1550 7544610 : mem_op->off = tree_to_shwi (mem_op->op0);
1551 : else
1552 288639 : mem_op->off = -1;
1553 : /* ??? Can end up with endless recursion here!?
1554 : gcc.c-torture/execute/strcmp-1.c */
1555 7833249 : if (TREE_CODE (op->op0) == SSA_NAME)
1556 7831388 : op->op0 = SSA_VAL (op->op0);
1557 7833249 : if (TREE_CODE (op->op0) != SSA_NAME)
1558 1951 : op->opcode = TREE_CODE (op->op0);
1559 :
1560 7833249 : changed = true;
1561 : }
1562 : /* Tail-recurse. */
1563 7833249 : while (TREE_CODE (op->op0) == SSA_NAME);
1564 :
1565 : /* Fold a remaining *&. */
1566 1951 : if (TREE_CODE (op->op0) == ADDR_EXPR)
1567 261 : vn_reference_fold_indirect (ops, i_p);
1568 :
1569 : return changed;
1570 : }
1571 :
1572 : /* Optimize the reference REF to a constant if possible or return
1573 : NULL_TREE if not. */
1574 :
1575 : tree
1576 107580027 : fully_constant_vn_reference_p (vn_reference_t ref)
1577 : {
1578 107580027 : vec<vn_reference_op_s> operands = ref->operands;
1579 107580027 : vn_reference_op_t op;
1580 :
1581 : /* Try to simplify the translated expression if it is
1582 : a call to a builtin function with at most two arguments. */
1583 107580027 : op = &operands[0];
1584 107580027 : if (op->opcode == CALL_EXPR
1585 87915 : && (!op->op0
1586 80546 : || (TREE_CODE (op->op0) == ADDR_EXPR
1587 80546 : && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1588 80546 : && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1589 : BUILT_IN_NORMAL)))
1590 71381 : && operands.length () >= 2
1591 107651364 : && operands.length () <= 3)
1592 : {
1593 33981 : vn_reference_op_t arg0, arg1 = NULL;
1594 33981 : bool anyconst = false;
1595 33981 : arg0 = &operands[1];
1596 33981 : if (operands.length () > 2)
1597 5492 : arg1 = &operands[2];
1598 33981 : if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1599 33981 : || (arg0->opcode == ADDR_EXPR
1600 13533 : && is_gimple_min_invariant (arg0->op0)))
1601 : anyconst = true;
1602 33981 : if (arg1
1603 33981 : && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1604 4028 : || (arg1->opcode == ADDR_EXPR
1605 579 : && is_gimple_min_invariant (arg1->op0))))
1606 : anyconst = true;
1607 31938 : if (anyconst)
1608 : {
1609 21670 : combined_fn fn;
1610 21670 : if (op->op0)
1611 20718 : fn = as_combined_fn (DECL_FUNCTION_CODE
1612 20718 : (TREE_OPERAND (op->op0, 0)));
1613 : else
1614 952 : fn = as_combined_fn ((internal_fn) op->clique);
1615 21670 : tree folded;
1616 21670 : if (arg1)
1617 2651 : folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1618 : else
1619 19019 : folded = fold_const_call (fn, ref->type, arg0->op0);
1620 21670 : if (folded
1621 21670 : && is_gimple_min_invariant (folded))
1622 : return folded;
1623 : }
1624 : }
1625 :
1626 : /* Simplify reads from constants or constant initializers. */
1627 107546046 : else if (BITS_PER_UNIT == 8
1628 107546046 : && ref->type
1629 107546046 : && COMPLETE_TYPE_P (ref->type)
1630 215092050 : && is_gimple_reg_type (ref->type))
1631 : {
1632 103330967 : poly_int64 off = 0;
1633 103330967 : HOST_WIDE_INT size;
1634 103330967 : if (INTEGRAL_TYPE_P (ref->type))
1635 52664330 : size = TYPE_PRECISION (ref->type);
1636 50666637 : else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1637 50666637 : size = tree_to_shwi (TYPE_SIZE (ref->type));
1638 : else
1639 107580027 : return NULL_TREE;
1640 103330967 : if (size % BITS_PER_UNIT != 0
1641 101579482 : || size > MAX_BITSIZE_MODE_ANY_MODE)
1642 : return NULL_TREE;
1643 101578155 : size /= BITS_PER_UNIT;
1644 101578155 : unsigned i;
1645 187492134 : for (i = 0; i < operands.length (); ++i)
1646 : {
1647 187492134 : if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1648 : {
1649 309 : ++i;
1650 309 : break;
1651 : }
1652 187491825 : if (operands[i].reverse)
1653 : return NULL_TREE;
1654 187483467 : if (known_eq (operands[i].off, -1))
1655 : return NULL_TREE;
1656 173838642 : off += operands[i].off;
1657 173838642 : if (operands[i].opcode == MEM_REF)
1658 : {
1659 87924663 : ++i;
1660 87924663 : break;
1661 : }
1662 : }
1663 87924972 : vn_reference_op_t base = &operands[--i];
1664 87924972 : tree ctor = error_mark_node;
1665 87924972 : tree decl = NULL_TREE;
1666 87924972 : if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1667 309 : ctor = base->op0;
1668 87924663 : else if (base->opcode == MEM_REF
1669 87924663 : && base[1].opcode == ADDR_EXPR
1670 144475851 : && (VAR_P (TREE_OPERAND (base[1].op0, 0))
1671 3518607 : || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1672 3518547 : || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1673 : {
1674 53038450 : decl = TREE_OPERAND (base[1].op0, 0);
1675 53038450 : if (TREE_CODE (decl) == STRING_CST)
1676 : ctor = decl;
1677 : else
1678 53032641 : ctor = ctor_for_folding (decl);
1679 : }
1680 87919163 : if (ctor == NULL_TREE)
1681 386 : return build_zero_cst (ref->type);
1682 87924586 : else if (ctor != error_mark_node)
1683 : {
1684 97675 : HOST_WIDE_INT const_off;
1685 97675 : if (decl)
1686 : {
1687 194732 : tree res = fold_ctor_reference (ref->type, ctor,
1688 97366 : off * BITS_PER_UNIT,
1689 97366 : size * BITS_PER_UNIT, decl);
1690 97366 : if (res)
1691 : {
1692 55889 : STRIP_USELESS_TYPE_CONVERSION (res);
1693 55889 : if (is_gimple_min_invariant (res))
1694 107580027 : return res;
1695 : }
1696 : }
1697 309 : else if (off.is_constant (&const_off))
1698 : {
1699 309 : unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1700 309 : int len = native_encode_expr (ctor, buf, size, const_off);
1701 309 : if (len > 0)
1702 139 : return native_interpret_expr (ref->type, buf, len);
1703 : }
1704 : }
1705 : }
1706 :
1707 : return NULL_TREE;
1708 : }
1709 :
1710 : /* Return true if OPS contain a storage order barrier. */
1711 :
1712 : static bool
1713 57983733 : contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1714 : {
1715 57983733 : vn_reference_op_t op;
1716 57983733 : unsigned i;
1717 :
1718 226923181 : FOR_EACH_VEC_ELT (ops, i, op)
1719 168939448 : if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1720 : return true;
1721 :
1722 : return false;
1723 : }
1724 :
1725 : /* Return true if OPS represent an access with reverse storage order. */
1726 :
1727 : static bool
1728 57992004 : reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1729 : {
1730 57992004 : unsigned i = 0;
1731 57992004 : if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1732 : ++i;
1733 57992004 : switch (ops[i].opcode)
1734 : {
1735 55923260 : case ARRAY_REF:
1736 55923260 : case COMPONENT_REF:
1737 55923260 : case BIT_FIELD_REF:
1738 55923260 : case MEM_REF:
1739 55923260 : return ops[i].reverse;
1740 : default:
1741 : return false;
1742 : }
1743 : }
1744 :
1745 : /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1746 : structures into their value numbers. This is done in-place, and
1747 : the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1748 : whether any operands were valueized. */
1749 :
1750 : static void
1751 215179935 : valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1752 : bool with_avail = false)
1753 : {
1754 215179935 : *valueized_anything = false;
1755 :
1756 867546192 : for (unsigned i = 0; i < orig->length (); ++i)
1757 : {
1758 652366257 : re_valueize:
1759 656223274 : vn_reference_op_t vro = &(*orig)[i];
1760 656223274 : if (vro->opcode == SSA_NAME
1761 560862172 : || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1762 : {
1763 119503872 : tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1764 119503872 : if (tem != vro->op0)
1765 : {
1766 17559860 : *valueized_anything = true;
1767 17559860 : vro->op0 = tem;
1768 : }
1769 : /* If it transforms from an SSA_NAME to a constant, update
1770 : the opcode. */
1771 119503872 : if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1772 2025826 : vro->opcode = TREE_CODE (vro->op0);
1773 : }
1774 656223274 : if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1775 : {
1776 26310 : tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1777 26310 : if (tem != vro->op1)
1778 : {
1779 603 : *valueized_anything = true;
1780 603 : vro->op1 = tem;
1781 : }
1782 : }
1783 656223274 : if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1784 : {
1785 205463 : tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1786 205463 : if (tem != vro->op2)
1787 : {
1788 119592 : *valueized_anything = true;
1789 119592 : vro->op2 = tem;
1790 : }
1791 : }
1792 : /* If it transforms from an SSA_NAME to an address, fold with
1793 : a preceding indirect reference. */
1794 656223274 : if (i > 0
1795 440965207 : && vro->op0
1796 437470747 : && TREE_CODE (vro->op0) == ADDR_EXPR
1797 786365451 : && (*orig)[i - 1].opcode == MEM_REF)
1798 : {
1799 124352490 : if (vn_reference_fold_indirect (orig, &i))
1800 653374 : *valueized_anything = true;
1801 : }
1802 531870784 : else if (i > 0
1803 316612717 : && vro->opcode == SSA_NAME
1804 625206060 : && (*orig)[i - 1].opcode == MEM_REF)
1805 : {
1806 83703681 : if (vn_reference_maybe_forwprop_address (orig, &i))
1807 : {
1808 3857017 : *valueized_anything = true;
1809 : /* Re-valueize the current operand. */
1810 3857017 : goto re_valueize;
1811 : }
1812 : }
1813 : /* If it transforms a non-constant ARRAY_REF into a constant
1814 : one, adjust the constant offset. */
1815 448167103 : else if ((vro->opcode == ARRAY_REF
1816 448167103 : || vro->opcode == ARRAY_RANGE_REF)
1817 39107908 : && known_eq (vro->off, -1)
1818 17028096 : && poly_int_tree_p (vro->op0)
1819 4693233 : && poly_int_tree_p (vro->op1)
1820 452860336 : && TREE_CODE (vro->op2) == INTEGER_CST)
1821 : {
1822 : /* Prohibit value-numbering addresses of one-after-the-last
1823 : element ARRAY_REFs the same as addresses of other components
1824 : before the pass folding __builtin_object_size had a chance
1825 : to run. */
1826 4559538 : if (!(cfun->curr_properties & PROP_objsz)
1827 5795533 : && (*orig)[0].opcode == ADDR_EXPR)
1828 : {
1829 35446 : tree dom = TYPE_DOMAIN ((*orig)[i + 1].type);
1830 53814 : if (!dom
1831 35296 : || !TYPE_MAX_VALUE (dom)
1832 25430 : || !poly_int_tree_p (TYPE_MAX_VALUE (dom))
1833 52610 : || integer_minus_onep (TYPE_MAX_VALUE (dom)))
1834 19175 : continue;
1835 17078 : if (!known_le (wi::to_poly_offset (vro->op0),
1836 : wi::to_poly_offset (TYPE_MAX_VALUE (dom))))
1837 807 : continue;
1838 : }
1839 :
1840 9080726 : poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1841 13621089 : - wi::to_poly_offset (vro->op1))
1842 9080726 : * wi::to_offset (vro->op2)
1843 4540363 : * vn_ref_op_align_unit (vro));
1844 4540363 : off.to_shwi (&vro->off);
1845 : }
1846 : }
1847 215179935 : }
1848 :
1849 : static void
1850 12338444 : valueize_refs (vec<vn_reference_op_s> *orig)
1851 : {
1852 12338444 : bool tem;
1853 0 : valueize_refs_1 (orig, &tem);
1854 0 : }
1855 :
1856 : static vec<vn_reference_op_s> shared_lookup_references;
1857 :
1858 : /* Create a vector of vn_reference_op_s structures from REF, a
1859 : REFERENCE_CLASS_P tree. The vector is shared among all callers of
1860 : this function. *VALUEIZED_ANYTHING will specify whether any
1861 : operands were valueized. */
1862 :
1863 : static vec<vn_reference_op_s>
1864 177266647 : valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1865 : {
1866 177266647 : if (!ref)
1867 0 : return vNULL;
1868 177266647 : shared_lookup_references.truncate (0);
1869 177266647 : copy_reference_ops_from_ref (ref, &shared_lookup_references);
1870 177266647 : valueize_refs_1 (&shared_lookup_references, valueized_anything);
1871 177266647 : return shared_lookup_references;
1872 : }
1873 :
1874 : /* Create a vector of vn_reference_op_s structures from CALL, a
1875 : call statement. The vector is shared among all callers of
1876 : this function. */
1877 :
1878 : static vec<vn_reference_op_s>
1879 9096970 : valueize_shared_reference_ops_from_call (gcall *call)
1880 : {
1881 9096970 : if (!call)
1882 0 : return vNULL;
1883 9096970 : shared_lookup_references.truncate (0);
1884 9096970 : copy_reference_ops_from_call (call, &shared_lookup_references);
1885 9096970 : valueize_refs (&shared_lookup_references);
1886 9096970 : return shared_lookup_references;
1887 : }
1888 :
1889 : /* Lookup a SCCVN reference operation VR in the current hash table.
1890 : Returns the resulting value number if it exists in the hash table,
1891 : NULL_TREE otherwise. VNRESULT will be filled in with the actual
1892 : vn_reference_t stored in the hashtable if something is found. */
1893 :
1894 : static tree
1895 64169112 : vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1896 : {
1897 64169112 : vn_reference_s **slot;
1898 64169112 : hashval_t hash;
1899 :
1900 64169112 : hash = vr->hashcode;
1901 64169112 : slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1902 64169112 : if (slot)
1903 : {
1904 8026718 : if (vnresult)
1905 8026718 : *vnresult = (vn_reference_t)*slot;
1906 8026718 : return ((vn_reference_t)*slot)->result;
1907 : }
1908 :
1909 : return NULL_TREE;
1910 : }
1911 :
1912 :
1913 : /* Partial definition tracking support. */
1914 :
1915 : struct pd_range
1916 : {
1917 : HOST_WIDE_INT offset;
1918 : HOST_WIDE_INT size;
1919 : pd_range *m_children[2];
1920 : };
1921 :
1922 : struct pd_data
1923 : {
1924 : tree rhs;
1925 : HOST_WIDE_INT rhs_off;
1926 : HOST_WIDE_INT offset;
1927 : HOST_WIDE_INT size;
1928 : };
1929 :
1930 : /* Context for alias walking. */
1931 :
1932 : struct vn_walk_cb_data
1933 : {
1934 60076677 : vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1935 : vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
1936 : bool redundant_store_removal_p_)
1937 60076677 : : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1938 60076677 : mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
1939 60076677 : vn_walk_kind (vn_walk_kind_),
1940 60076677 : tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
1941 120153354 : saved_operands (vNULL), first_range (), first_set (-2),
1942 120153354 : first_base_set (-2)
1943 : {
1944 60076677 : if (!last_vuse_ptr)
1945 27874928 : last_vuse_ptr = &last_vuse;
1946 60076677 : ao_ref_init (&orig_ref, orig_ref_);
1947 60076677 : if (mask)
1948 : {
1949 300798 : wide_int w = wi::to_wide (mask);
1950 300798 : unsigned int pos = 0, prec = w.get_precision ();
1951 300798 : pd_data pd;
1952 300798 : pd.rhs = build_constructor (NULL_TREE, NULL);
1953 300798 : pd.rhs_off = 0;
1954 : /* When bitwise and with a constant is done on a memory load,
1955 : we don't really need all the bits to be defined or defined
1956 : to constants, we don't really care what is in the position
1957 : corresponding to 0 bits in the mask.
1958 : So, push the ranges of those 0 bits in the mask as artificial
1959 : zero stores and let the partial def handling code do the
1960 : rest. */
1961 644697 : while (pos < prec)
1962 : {
1963 624618 : int tz = wi::ctz (w);
1964 624618 : if (pos + tz > prec)
1965 280719 : tz = prec - pos;
1966 624618 : if (tz)
1967 : {
1968 474130 : if (BYTES_BIG_ENDIAN)
1969 : pd.offset = prec - pos - tz;
1970 : else
1971 474130 : pd.offset = pos;
1972 474130 : pd.size = tz;
1973 474130 : void *r = push_partial_def (pd, 0, 0, 0, prec);
1974 474130 : gcc_assert (r == NULL_TREE);
1975 : }
1976 624618 : pos += tz;
1977 624618 : if (pos == prec)
1978 : break;
1979 343899 : w = wi::lrshift (w, tz);
1980 343899 : tz = wi::ctz (wi::bit_not (w));
1981 343899 : if (pos + tz > prec)
1982 0 : tz = prec - pos;
1983 343899 : pos += tz;
1984 343899 : w = wi::lrshift (w, tz);
1985 : }
1986 300798 : }
1987 60076677 : }
1988 : ~vn_walk_cb_data ();
1989 : void *finish (alias_set_type, alias_set_type, tree);
1990 : void *push_partial_def (pd_data pd,
1991 : alias_set_type, alias_set_type, HOST_WIDE_INT,
1992 : HOST_WIDE_INT);
1993 :
1994 : vn_reference_t vr;
1995 : ao_ref orig_ref;
1996 : tree *last_vuse_ptr;
1997 : tree last_vuse;
1998 : tree mask;
1999 : tree masked_result;
2000 : tree same_val;
2001 : vn_lookup_kind vn_walk_kind;
2002 : bool tbaa_p;
2003 : bool redundant_store_removal_p;
2004 : vec<vn_reference_op_s> saved_operands;
2005 :
2006 : /* The VDEFs of partial defs we come along. */
2007 : auto_vec<pd_data, 2> partial_defs;
2008 : /* The first defs range to avoid splay tree setup in most cases. */
2009 : pd_range first_range;
2010 : alias_set_type first_set;
2011 : alias_set_type first_base_set;
2012 : default_splay_tree<pd_range *> known_ranges;
2013 : obstack ranges_obstack;
2014 : static constexpr HOST_WIDE_INT bufsize = 64;
2015 : };
2016 :
2017 60076677 : vn_walk_cb_data::~vn_walk_cb_data ()
2018 : {
2019 60076677 : if (known_ranges)
2020 168568 : obstack_free (&ranges_obstack, NULL);
2021 60076677 : saved_operands.release ();
2022 60076677 : }
2023 :
2024 : void *
2025 1446476 : vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
2026 : {
2027 1446476 : if (first_set != -2)
2028 : {
2029 346768 : set = first_set;
2030 346768 : base_set = first_base_set;
2031 : }
2032 1446476 : if (mask)
2033 : {
2034 440 : masked_result = val;
2035 440 : return (void *) -1;
2036 : }
2037 1446036 : if (same_val && !operand_equal_p (val, same_val))
2038 : return (void *) -1;
2039 1442211 : vec<vn_reference_op_s> &operands
2040 1442211 : = saved_operands.exists () ? saved_operands : vr->operands;
2041 1442211 : return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
2042 : vr->offset, vr->max_size,
2043 1442211 : vr->type, operands, val);
2044 : }
2045 :
2046 : /* Push PD to the vector of partial definitions returning a
2047 : value when we are ready to combine things with VUSE, SET and MAXSIZEI,
2048 : NULL when we want to continue looking for partial defs or -1
2049 : on failure. */
2050 :
2051 : void *
2052 553593 : vn_walk_cb_data::push_partial_def (pd_data pd,
2053 : alias_set_type set, alias_set_type base_set,
2054 : HOST_WIDE_INT offseti,
2055 : HOST_WIDE_INT maxsizei)
2056 : {
2057 : /* We're using a fixed buffer for encoding so fail early if the object
2058 : we want to interpret is bigger. */
2059 553593 : if (maxsizei > bufsize * BITS_PER_UNIT
2060 : || CHAR_BIT != 8
2061 : || BITS_PER_UNIT != 8
2062 : /* Not prepared to handle PDP endian. */
2063 : || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
2064 : return (void *)-1;
2065 :
2066 : /* Turn too large constant stores into non-constant stores. */
2067 553522 : if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
2068 0 : pd.rhs = error_mark_node;
2069 :
2070 : /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
2071 : most a partial byte before and/or after the region. */
2072 553522 : if (!CONSTANT_CLASS_P (pd.rhs))
2073 : {
2074 514096 : if (pd.offset < offseti)
2075 : {
2076 8163 : HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
2077 8163 : gcc_assert (pd.size > o);
2078 8163 : pd.size -= o;
2079 8163 : pd.offset += o;
2080 : }
2081 514096 : if (pd.size > maxsizei)
2082 7304 : pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
2083 : }
2084 :
2085 553522 : pd.offset -= offseti;
2086 :
2087 1107044 : bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
2088 553522 : || CONSTANT_CLASS_P (pd.rhs));
2089 553522 : pd_range *r;
2090 553522 : if (partial_defs.is_empty ())
2091 : {
2092 : /* If we get a clobber upfront, fail. */
2093 353851 : if (TREE_CLOBBER_P (pd.rhs))
2094 : return (void *)-1;
2095 353498 : if (!pd_constant_p)
2096 : return (void *)-1;
2097 324126 : partial_defs.safe_push (pd);
2098 324126 : first_range.offset = pd.offset;
2099 324126 : first_range.size = pd.size;
2100 324126 : first_set = set;
2101 324126 : first_base_set = base_set;
2102 324126 : last_vuse_ptr = NULL;
2103 324126 : r = &first_range;
2104 : /* Go check if the first partial definition was a full one in case
2105 : the caller didn't optimize for this. */
2106 : }
2107 : else
2108 : {
2109 199671 : if (!known_ranges)
2110 : {
2111 : /* ??? Optimize the case where the 2nd partial def completes
2112 : things. */
2113 168568 : gcc_obstack_init (&ranges_obstack);
2114 168568 : known_ranges.insert_max_node (&first_range);
2115 : }
2116 : /* Lookup the offset and see if we need to merge. */
2117 199671 : int comparison = known_ranges.lookup_le
2118 403651 : ([&] (pd_range *r) { return pd.offset < r->offset; },
2119 179558 : [&] (pd_range *r) { return pd.offset > r->offset; });
2120 199671 : r = known_ranges.root ();
2121 199671 : if (comparison >= 0
2122 199671 : && ranges_known_overlap_p (r->offset, r->size + 1,
2123 : pd.offset, pd.size))
2124 : {
2125 : /* Ignore partial defs already covered. Here we also drop shadowed
2126 : clobbers arriving here at the floor. */
2127 5795 : if (known_subrange_p (pd.offset, pd.size, r->offset, r->size))
2128 : return NULL;
2129 4967 : r->size = MAX (r->offset + r->size, pd.offset + pd.size) - r->offset;
2130 : }
2131 : else
2132 : {
2133 : /* pd.offset wasn't covered yet, insert the range. */
2134 193876 : void *addr = XOBNEW (&ranges_obstack, pd_range);
2135 193876 : r = new (addr) pd_range { pd.offset, pd.size, {} };
2136 193876 : known_ranges.insert_relative (comparison, r);
2137 : }
2138 : /* Merge r which now contains pd's range and is a member of the splay
2139 : tree with adjacent overlapping ranges. */
2140 198843 : if (known_ranges.splay_next_node ())
2141 22236 : do
2142 : {
2143 22236 : pd_range *rafter = known_ranges.root ();
2144 22236 : if (!ranges_known_overlap_p (r->offset, r->size + 1,
2145 22236 : rafter->offset, rafter->size))
2146 : break;
2147 21965 : r->size = MAX (r->offset + r->size,
2148 21965 : rafter->offset + rafter->size) - r->offset;
2149 : }
2150 21965 : while (known_ranges.remove_root_and_splay_next ());
2151 : /* If we get a clobber, fail. */
2152 198843 : if (TREE_CLOBBER_P (pd.rhs))
2153 : return (void *)-1;
2154 : /* Non-constants are OK as long as they are shadowed by a constant. */
2155 196668 : if (!pd_constant_p)
2156 : return (void *)-1;
2157 190591 : partial_defs.safe_push (pd);
2158 : }
2159 :
2160 : /* Now we have merged pd's range into the range tree. When we have covered
2161 : [offseti, sizei] then the tree will contain exactly one node which has
2162 : the desired properties and it will be 'r'. */
2163 514717 : if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2164 : /* Continue looking for partial defs. */
2165 : return NULL;
2166 :
2167 : /* Now simply native encode all partial defs in reverse order. */
2168 8632 : unsigned ndefs = partial_defs.length ();
2169 : /* We support up to 512-bit values (for V8DFmode). */
2170 8632 : unsigned char buffer[bufsize + 1];
2171 8632 : unsigned char this_buffer[bufsize + 1];
2172 8632 : int len;
2173 :
2174 8632 : memset (buffer, 0, bufsize + 1);
2175 8632 : unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2176 42675 : while (!partial_defs.is_empty ())
2177 : {
2178 25411 : pd_data pd = partial_defs.pop ();
2179 25411 : unsigned int amnt;
2180 25411 : if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2181 : {
2182 : /* Empty CONSTRUCTOR. */
2183 2017 : if (pd.size >= needed_len * BITS_PER_UNIT)
2184 2017 : len = needed_len;
2185 : else
2186 1750 : len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2187 2017 : memset (this_buffer, 0, len);
2188 : }
2189 23394 : else if (pd.rhs_off >= 0)
2190 : {
2191 46788 : len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2192 23394 : (MAX (0, -pd.offset)
2193 23394 : + pd.rhs_off) / BITS_PER_UNIT);
2194 23394 : if (len <= 0
2195 23394 : || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2196 23394 : - MAX (0, -pd.offset) / BITS_PER_UNIT))
2197 : {
2198 0 : if (dump_file && (dump_flags & TDF_DETAILS))
2199 0 : fprintf (dump_file, "Failed to encode %u "
2200 : "partial definitions\n", ndefs);
2201 0 : return (void *)-1;
2202 : }
2203 : }
2204 : else /* negative pd.rhs_off indicates we want to chop off first bits */
2205 : {
2206 0 : if (-pd.rhs_off >= bufsize)
2207 : return (void *)-1;
2208 0 : len = native_encode_expr (pd.rhs,
2209 0 : this_buffer + -pd.rhs_off / BITS_PER_UNIT,
2210 0 : bufsize - -pd.rhs_off / BITS_PER_UNIT,
2211 0 : MAX (0, -pd.offset) / BITS_PER_UNIT);
2212 0 : if (len <= 0
2213 0 : || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2214 0 : - MAX (0, -pd.offset) / BITS_PER_UNIT))
2215 : {
2216 0 : if (dump_file && (dump_flags & TDF_DETAILS))
2217 0 : fprintf (dump_file, "Failed to encode %u "
2218 : "partial definitions\n", ndefs);
2219 0 : return (void *)-1;
2220 : }
2221 : }
2222 :
2223 25411 : unsigned char *p = buffer;
2224 25411 : HOST_WIDE_INT size = pd.size;
2225 25411 : if (pd.offset < 0)
2226 254 : size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2227 25411 : this_buffer[len] = 0;
2228 25411 : if (BYTES_BIG_ENDIAN)
2229 : {
2230 : /* LSB of this_buffer[len - 1] byte should be at
2231 : pd.offset + pd.size - 1 bits in buffer. */
2232 : amnt = ((unsigned HOST_WIDE_INT) pd.offset
2233 : + pd.size) % BITS_PER_UNIT;
2234 : if (amnt)
2235 : shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2236 : unsigned char *q = this_buffer;
2237 : unsigned int off = 0;
2238 : if (pd.offset >= 0)
2239 : {
2240 : unsigned int msk;
2241 : off = pd.offset / BITS_PER_UNIT;
2242 : gcc_assert (off < needed_len);
2243 : p = buffer + off;
2244 : if (size <= amnt)
2245 : {
2246 : msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2247 : *p = (*p & ~msk) | (this_buffer[len] & msk);
2248 : size = 0;
2249 : }
2250 : else
2251 : {
2252 : if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2253 : q = (this_buffer + len
2254 : - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2255 : / BITS_PER_UNIT));
2256 : if (pd.offset % BITS_PER_UNIT)
2257 : {
2258 : msk = -1U << (BITS_PER_UNIT
2259 : - (pd.offset % BITS_PER_UNIT));
2260 : *p = (*p & msk) | (*q & ~msk);
2261 : p++;
2262 : q++;
2263 : off++;
2264 : size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2265 : gcc_assert (size >= 0);
2266 : }
2267 : }
2268 : }
2269 : else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2270 : {
2271 : q = (this_buffer + len
2272 : - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2273 : / BITS_PER_UNIT));
2274 : if (pd.offset % BITS_PER_UNIT)
2275 : {
2276 : q++;
2277 : size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2278 : % BITS_PER_UNIT);
2279 : gcc_assert (size >= 0);
2280 : }
2281 : }
2282 : if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2283 : > needed_len)
2284 : size = (needed_len - off) * BITS_PER_UNIT;
2285 : memcpy (p, q, size / BITS_PER_UNIT);
2286 : if (size % BITS_PER_UNIT)
2287 : {
2288 : unsigned int msk
2289 : = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2290 : p += size / BITS_PER_UNIT;
2291 : q += size / BITS_PER_UNIT;
2292 : *p = (*q & msk) | (*p & ~msk);
2293 : }
2294 : }
2295 : else
2296 : {
2297 25411 : if (pd.offset >= 0)
2298 : {
2299 : /* LSB of this_buffer[0] byte should be at pd.offset bits
2300 : in buffer. */
2301 25157 : unsigned int msk;
2302 25157 : size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2303 25157 : amnt = pd.offset % BITS_PER_UNIT;
2304 25157 : if (amnt)
2305 1514 : shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2306 25157 : unsigned int off = pd.offset / BITS_PER_UNIT;
2307 25157 : gcc_assert (off < needed_len);
2308 25157 : size = MIN (size,
2309 : (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2310 25157 : p = buffer + off;
2311 25157 : if (amnt + size < BITS_PER_UNIT)
2312 : {
2313 : /* Low amnt bits come from *p, then size bits
2314 : from this_buffer[0] and the remaining again from
2315 : *p. */
2316 1085 : msk = ((1 << size) - 1) << amnt;
2317 1085 : *p = (*p & ~msk) | (this_buffer[0] & msk);
2318 1085 : size = 0;
2319 : }
2320 24072 : else if (amnt)
2321 : {
2322 1120 : msk = -1U << amnt;
2323 1120 : *p = (*p & ~msk) | (this_buffer[0] & msk);
2324 1120 : p++;
2325 1120 : size -= (BITS_PER_UNIT - amnt);
2326 : }
2327 : }
2328 : else
2329 : {
2330 254 : amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2331 254 : if (amnt)
2332 16 : size -= BITS_PER_UNIT - amnt;
2333 254 : size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2334 254 : if (amnt)
2335 16 : shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2336 : }
2337 25411 : memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2338 25411 : p += size / BITS_PER_UNIT;
2339 25411 : if (size % BITS_PER_UNIT)
2340 : {
2341 625 : unsigned int msk = -1U << (size % BITS_PER_UNIT);
2342 625 : *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2343 625 : & ~msk) | (*p & msk);
2344 : }
2345 : }
2346 : }
2347 :
2348 8632 : tree type = vr->type;
2349 : /* Make sure to interpret in a type that has a range covering the whole
2350 : access size. */
2351 8632 : if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2352 : {
2353 13 : if (TREE_CODE (vr->type) == BITINT_TYPE
2354 26 : && maxsizei > MAX_FIXED_MODE_SIZE)
2355 13 : type = build_bitint_type (maxsizei, TYPE_UNSIGNED (type));
2356 : else
2357 0 : type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2358 : }
2359 8632 : tree val;
2360 8632 : if (BYTES_BIG_ENDIAN)
2361 : {
2362 : unsigned sz = needed_len;
2363 : if (maxsizei % BITS_PER_UNIT)
2364 : shift_bytes_in_array_right (buffer, needed_len,
2365 : BITS_PER_UNIT
2366 : - (maxsizei % BITS_PER_UNIT));
2367 : if (INTEGRAL_TYPE_P (type))
2368 : {
2369 : if (TYPE_MODE (type) != BLKmode)
2370 : sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2371 : else
2372 : sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
2373 : }
2374 : if (sz > needed_len)
2375 : {
2376 : memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2377 : val = native_interpret_expr (type, this_buffer, sz);
2378 : }
2379 : else
2380 : val = native_interpret_expr (type, buffer, needed_len);
2381 : }
2382 : else
2383 8632 : val = native_interpret_expr (type, buffer, bufsize);
2384 : /* If we chop off bits because the types precision doesn't match the memory
2385 : access size this is ok when optimizing reads but not when called from
2386 : the DSE code during elimination. */
2387 8632 : if (val && type != vr->type)
2388 : {
2389 13 : if (! int_fits_type_p (val, vr->type))
2390 : val = NULL_TREE;
2391 : else
2392 13 : val = fold_convert (vr->type, val);
2393 : }
2394 :
2395 8628 : if (val)
2396 : {
2397 8628 : if (dump_file && (dump_flags & TDF_DETAILS))
2398 0 : fprintf (dump_file,
2399 : "Successfully combined %u partial definitions\n", ndefs);
2400 : /* We are using the alias-set of the first store we encounter which
2401 : should be appropriate here. */
2402 8628 : return finish (first_set, first_base_set, val);
2403 : }
2404 : else
2405 : {
2406 4 : if (dump_file && (dump_flags & TDF_DETAILS))
2407 0 : fprintf (dump_file,
2408 : "Failed to interpret %u encoded partial definitions\n", ndefs);
2409 4 : return (void *)-1;
2410 : }
2411 : }
2412 :
2413 : /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2414 : with the current VUSE and performs the expression lookup. */
2415 :
2416 : static void *
2417 1073590991 : vn_reference_lookup_2 (ao_ref *op, tree vuse, void *data_)
2418 : {
2419 1073590991 : vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2420 1073590991 : vn_reference_t vr = data->vr;
2421 1073590991 : vn_reference_s **slot;
2422 1073590991 : hashval_t hash;
2423 :
2424 : /* If we have partial definitions recorded we have to go through
2425 : vn_reference_lookup_3. */
2426 2139564656 : if (!data->partial_defs.is_empty ())
2427 : return NULL;
2428 :
2429 1072832094 : if (data->last_vuse_ptr)
2430 : {
2431 1051107224 : *data->last_vuse_ptr = vuse;
2432 1051107224 : data->last_vuse = vuse;
2433 : }
2434 :
2435 : /* Fixup vuse and hash. */
2436 1072832094 : if (vr->vuse)
2437 1072832094 : vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2438 1072832094 : vr->vuse = vuse_ssa_val (vuse);
2439 1072832094 : if (vr->vuse)
2440 1072832094 : vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2441 :
2442 1072832094 : hash = vr->hashcode;
2443 1072832094 : slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2444 1072832094 : if (slot)
2445 : {
2446 7616090 : if ((*slot)->result && data->saved_operands.exists ())
2447 332970 : return data->finish (vr->set, vr->base_set, (*slot)->result);
2448 : return *slot;
2449 : }
2450 :
2451 1065216004 : if (SSA_NAME_IS_DEFAULT_DEF (vuse))
2452 : {
2453 17795207 : HOST_WIDE_INT op_offset, op_size;
2454 17795207 : tree v = NULL_TREE;
2455 17795207 : tree base = ao_ref_base (op);
2456 :
2457 17795207 : if (base
2458 17795207 : && op->offset.is_constant (&op_offset)
2459 17795207 : && op->size.is_constant (&op_size)
2460 17795207 : && op->max_size_known_p ()
2461 35139540 : && known_eq (op->size, op->max_size))
2462 : {
2463 17043660 : if (TREE_CODE (base) == PARM_DECL)
2464 658582 : v = ipcp_get_aggregate_const (cfun, base, false, op_offset,
2465 : op_size);
2466 16385078 : else if (TREE_CODE (base) == MEM_REF
2467 6743622 : && integer_zerop (TREE_OPERAND (base, 1))
2468 5422421 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2469 5417238 : && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
2470 19982908 : && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0)))
2471 : == PARM_DECL))
2472 3545528 : v = ipcp_get_aggregate_const (cfun,
2473 3545528 : SSA_NAME_VAR (TREE_OPERAND (base, 0)),
2474 : true, op_offset, op_size);
2475 : }
2476 4204110 : if (v)
2477 1236 : return data->finish (vr->set, vr->base_set, v);
2478 : }
2479 :
2480 : return NULL;
2481 : }
2482 :
2483 : /* Lookup an existing or insert a new vn_reference entry into the
2484 : value table for the VUSE, SET, TYPE, OPERANDS reference which
2485 : has the value VALUE which is either a constant or an SSA name. */
2486 :
2487 : static vn_reference_t
2488 1442211 : vn_reference_lookup_or_insert_for_pieces (tree vuse,
2489 : alias_set_type set,
2490 : alias_set_type base_set,
2491 : poly_int64 offset,
2492 : poly_int64 max_size,
2493 : tree type,
2494 : vec<vn_reference_op_s,
2495 : va_heap> operands,
2496 : tree value)
2497 : {
2498 1442211 : vn_reference_s vr1;
2499 1442211 : vn_reference_t result;
2500 1442211 : unsigned value_id;
2501 1442211 : vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2502 1442211 : vr1.operands = operands;
2503 1442211 : vr1.type = type;
2504 1442211 : vr1.set = set;
2505 1442211 : vr1.base_set = base_set;
2506 1442211 : vr1.offset = offset;
2507 1442211 : vr1.max_size = max_size;
2508 1442211 : vr1.hashcode = vn_reference_compute_hash (&vr1);
2509 1442211 : if (vn_reference_lookup_1 (&vr1, &result))
2510 8013 : return result;
2511 :
2512 1434198 : if (TREE_CODE (value) == SSA_NAME)
2513 271876 : value_id = VN_INFO (value)->value_id;
2514 : else
2515 1162322 : value_id = get_or_alloc_constant_value_id (value);
2516 1434198 : return vn_reference_insert_pieces (vuse, set, base_set, offset, max_size,
2517 1434198 : type, operands.copy (), value, value_id);
2518 : }
2519 :
2520 : /* Return a value-number for RCODE OPS... either by looking up an existing
2521 : value-number for the possibly simplified result or by inserting the
2522 : operation if INSERT is true. If SIMPLIFY is false, return a value
2523 : number for the unsimplified expression. */
2524 :
2525 : static tree
2526 18339426 : vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2527 : bool simplify)
2528 : {
2529 18339426 : tree result = NULL_TREE;
2530 : /* We will be creating a value number for
2531 : RCODE (OPS...).
2532 : So first simplify and lookup this expression to see if it
2533 : is already available. */
2534 : /* For simplification valueize. */
2535 18339426 : unsigned i = 0;
2536 18339426 : if (simplify)
2537 42592378 : for (i = 0; i < res_op->num_ops; ++i)
2538 24258663 : if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2539 : {
2540 15522528 : tree tem = vn_valueize (res_op->ops[i]);
2541 15522528 : if (!tem)
2542 : break;
2543 15522528 : res_op->ops[i] = tem;
2544 : }
2545 : /* If valueization of an operand fails (it is not available), skip
2546 : simplification. */
2547 18339426 : bool res = false;
2548 18339426 : if (i == res_op->num_ops)
2549 : {
2550 : /* Do not leak not available operands into the simplified expression
2551 : when called from PRE context. */
2552 18333715 : if (rpo_avail)
2553 10972215 : mprts_hook = vn_lookup_simplify_result;
2554 18333715 : res = res_op->resimplify (NULL, vn_valueize);
2555 18333715 : mprts_hook = NULL;
2556 : }
2557 31538294 : gimple *new_stmt = NULL;
2558 18333715 : if (res
2559 18333715 : && gimple_simplified_result_is_gimple_val (res_op))
2560 : {
2561 : /* The expression is already available. */
2562 5134847 : result = res_op->ops[0];
2563 : /* Valueize it, simplification returns sth in AVAIL only. */
2564 5134847 : if (TREE_CODE (result) == SSA_NAME)
2565 288864 : result = SSA_VAL (result);
2566 : }
2567 : else
2568 : {
2569 13204579 : tree val = vn_lookup_simplify_result (res_op);
2570 : /* ??? In weird cases we can end up with internal-fn calls,
2571 : but this isn't expected so throw the result away. See
2572 : PR123040 for an example. */
2573 13204579 : if (!val && insert && res_op->code.is_tree_code ())
2574 : {
2575 135169 : gimple_seq stmts = NULL;
2576 135169 : result = maybe_push_res_to_seq (res_op, &stmts);
2577 135169 : if (result)
2578 : {
2579 135163 : gcc_assert (gimple_seq_singleton_p (stmts));
2580 135163 : new_stmt = gimple_seq_first_stmt (stmts);
2581 : }
2582 : }
2583 : else
2584 : /* The expression is already available. */
2585 : result = val;
2586 : }
2587 288870 : if (new_stmt)
2588 : {
2589 : /* The expression is not yet available, value-number lhs to
2590 : the new SSA_NAME we created. */
2591 : /* Initialize value-number information properly. */
2592 135163 : vn_ssa_aux_t result_info = VN_INFO (result);
2593 135163 : result_info->valnum = result;
2594 135163 : result_info->value_id = get_next_value_id ();
2595 135163 : result_info->visited = 1;
2596 135163 : gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2597 : new_stmt);
2598 135163 : result_info->needs_insertion = true;
2599 : /* ??? PRE phi-translation inserts NARYs without corresponding
2600 : SSA name result. Re-use those but set their result according
2601 : to the stmt we just built. */
2602 135163 : vn_nary_op_t nary = NULL;
2603 135163 : vn_nary_op_lookup_stmt (new_stmt, &nary);
2604 135163 : if (nary)
2605 : {
2606 0 : gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2607 0 : nary->u.result = gimple_assign_lhs (new_stmt);
2608 : }
2609 : /* As all "inserted" statements are singleton SCCs, insert
2610 : to the valid table. This is strictly needed to
2611 : avoid re-generating new value SSA_NAMEs for the same
2612 : expression during SCC iteration over and over (the
2613 : optimistic table gets cleared after each iteration).
2614 : We do not need to insert into the optimistic table, as
2615 : lookups there will fall back to the valid table. */
2616 : else
2617 : {
2618 135163 : unsigned int length = vn_nary_length_from_stmt (new_stmt);
2619 135163 : vn_nary_op_t vno1
2620 135163 : = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2621 135163 : vno1->value_id = result_info->value_id;
2622 135163 : vno1->length = length;
2623 135163 : vno1->predicated_values = 0;
2624 135163 : vno1->u.result = result;
2625 135163 : init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2626 135163 : vn_nary_op_insert_into (vno1, valid_info->nary);
2627 : /* Also do not link it into the undo chain. */
2628 135163 : last_inserted_nary = vno1->next;
2629 135163 : vno1->next = (vn_nary_op_t)(void *)-1;
2630 : }
2631 135163 : if (dump_file && (dump_flags & TDF_DETAILS))
2632 : {
2633 590 : fprintf (dump_file, "Inserting name ");
2634 590 : print_generic_expr (dump_file, result);
2635 590 : fprintf (dump_file, " for expression ");
2636 590 : print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2637 590 : fprintf (dump_file, "\n");
2638 : }
2639 : }
2640 18339426 : return result;
2641 : }
2642 :
2643 : /* Return a value-number for RCODE OPS... either by looking up an existing
2644 : value-number for the simplified result or by inserting the operation. */
2645 :
2646 : static tree
2647 179987 : vn_nary_build_or_lookup (gimple_match_op *res_op)
2648 : {
2649 0 : return vn_nary_build_or_lookup_1 (res_op, true, true);
2650 : }
2651 :
2652 : /* Try to simplify the expression RCODE OPS... of type TYPE and return
2653 : its value if present. Update NARY with a simplified expression if
2654 : it fits. */
2655 :
2656 : tree
2657 7357058 : vn_nary_simplify (vn_nary_op_t nary)
2658 : {
2659 7357058 : if (nary->length > gimple_match_op::MAX_NUM_OPS
2660 : /* For CONSTRUCTOR the vn_nary_op_t and gimple_match_op representation
2661 : does not match. */
2662 7356697 : || nary->opcode == CONSTRUCTOR)
2663 : return NULL_TREE;
2664 7355449 : gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2665 7355449 : nary->type, nary->length);
2666 7355449 : memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2667 7355449 : tree res = vn_nary_build_or_lookup_1 (&op, false, true);
2668 : /* Do not update *NARY with a simplified result that contains abnormals.
2669 : This matches what maybe_push_res_to_seq does when requesting insertion. */
2670 19303814 : for (unsigned i = 0; i < op.num_ops; ++i)
2671 11948454 : if (TREE_CODE (op.ops[i]) == SSA_NAME
2672 11948454 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op.ops[i]))
2673 : return res;
2674 7355360 : if (op.code.is_tree_code ()
2675 7355360 : && op.num_ops <= nary->length
2676 14710017 : && (tree_code) op.code != CONSTRUCTOR)
2677 : {
2678 7354656 : nary->opcode = (tree_code) op.code;
2679 7354656 : nary->length = op.num_ops;
2680 19301602 : for (unsigned i = 0; i < op.num_ops; ++i)
2681 11946946 : nary->op[i] = op.ops[i];
2682 : }
2683 : return res;
2684 : }
2685 :
2686 : /* Elimination engine. */
2687 :
2688 : class eliminate_dom_walker : public dom_walker
2689 : {
2690 : public:
2691 : eliminate_dom_walker (cdi_direction, bitmap);
2692 : ~eliminate_dom_walker ();
2693 :
2694 : edge before_dom_children (basic_block) final override;
2695 : void after_dom_children (basic_block) final override;
2696 :
2697 : virtual tree eliminate_avail (basic_block, tree op);
2698 : virtual void eliminate_push_avail (basic_block, tree op);
2699 : tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2700 :
2701 : void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2702 :
2703 : unsigned eliminate_cleanup (bool region_p = false);
2704 :
2705 : bool do_pre;
2706 : unsigned int el_todo;
2707 : unsigned int eliminations;
2708 : unsigned int insertions;
2709 :
2710 : /* SSA names that had their defs inserted by PRE if do_pre. */
2711 : bitmap inserted_exprs;
2712 :
2713 : /* Blocks with statements that have had their EH properties changed. */
2714 : bitmap need_eh_cleanup;
2715 :
2716 : /* Blocks with statements that have had their AB properties changed. */
2717 : bitmap need_ab_cleanup;
2718 :
2719 : /* Local state for the eliminate domwalk. */
2720 : auto_vec<gimple *> to_remove;
2721 : auto_vec<gimple *> to_fixup;
2722 : auto_vec<tree> avail;
2723 : auto_vec<tree> avail_stack;
2724 : };
2725 :
2726 : /* Adaptor to the elimination engine using RPO availability. */
2727 :
2728 12215628 : class rpo_elim : public eliminate_dom_walker
2729 : {
2730 : public:
2731 6107814 : rpo_elim(basic_block entry_)
2732 12215628 : : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2733 12215628 : m_avail_freelist (NULL) {}
2734 :
2735 : tree eliminate_avail (basic_block, tree op) final override;
2736 :
2737 : void eliminate_push_avail (basic_block, tree) final override;
2738 :
2739 : basic_block entry;
2740 : /* Freelist of avail entries which are allocated from the vn_ssa_aux
2741 : obstack. */
2742 : vn_avail *m_avail_freelist;
2743 : };
2744 :
2745 : /* Return true if BASE1 and BASE2 can be adjusted so they have the
2746 : same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2747 : Otherwise return false. */
2748 :
2749 : static bool
2750 6712624 : adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2751 : tree base2, poly_int64 *offset2)
2752 : {
2753 6712624 : poly_int64 soff;
2754 6712624 : if (TREE_CODE (base1) == MEM_REF
2755 3049896 : && TREE_CODE (base2) == MEM_REF)
2756 : {
2757 2443350 : if (mem_ref_offset (base1).to_shwi (&soff))
2758 : {
2759 2443350 : base1 = TREE_OPERAND (base1, 0);
2760 2443350 : *offset1 += soff * BITS_PER_UNIT;
2761 : }
2762 2443350 : if (mem_ref_offset (base2).to_shwi (&soff))
2763 : {
2764 2443350 : base2 = TREE_OPERAND (base2, 0);
2765 2443350 : *offset2 += soff * BITS_PER_UNIT;
2766 : }
2767 2443350 : return operand_equal_p (base1, base2, 0);
2768 : }
2769 4269274 : return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2770 : }
2771 :
2772 : /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2773 : from the statement defining VUSE and if not successful tries to
2774 : translate *REFP and VR_ through an aggregate copy at the definition
2775 : of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2776 : of *REF and *VR. If only disambiguation was performed then
2777 : *DISAMBIGUATE_ONLY is set to true. */
2778 :
2779 : static void *
2780 41784599 : vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2781 : translate_flags *disambiguate_only)
2782 : {
2783 41784599 : vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2784 41784599 : vn_reference_t vr = data->vr;
2785 41784599 : gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2786 41784599 : tree base = ao_ref_base (ref);
2787 41784599 : HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2788 41784599 : static vec<vn_reference_op_s> lhs_ops;
2789 41784599 : ao_ref lhs_ref;
2790 41784599 : bool lhs_ref_ok = false;
2791 41784599 : poly_int64 copy_size;
2792 :
2793 : /* First try to disambiguate after value-replacing in the definitions LHS. */
2794 41784599 : if (is_gimple_assign (def_stmt))
2795 : {
2796 20546969 : tree lhs = gimple_assign_lhs (def_stmt);
2797 20546969 : bool valueized_anything = false;
2798 : /* Avoid re-allocation overhead. */
2799 20546969 : lhs_ops.truncate (0);
2800 20546969 : basic_block saved_rpo_bb = vn_context_bb;
2801 20546969 : vn_context_bb = gimple_bb (def_stmt);
2802 20546969 : if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2803 : {
2804 13344500 : copy_reference_ops_from_ref (lhs, &lhs_ops);
2805 13344500 : valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2806 : }
2807 20546969 : vn_context_bb = saved_rpo_bb;
2808 20546969 : ao_ref_init (&lhs_ref, lhs);
2809 20546969 : lhs_ref_ok = true;
2810 20546969 : if (valueized_anything
2811 2001330 : && ao_ref_init_from_vn_reference
2812 2001330 : (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2813 2001330 : ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2814 22548299 : && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2815 : {
2816 1714693 : *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2817 8264490 : return NULL;
2818 : }
2819 :
2820 : /* When the def is a CLOBBER we can optimistically disambiguate
2821 : against it since any overlap it would be undefined behavior.
2822 : Avoid this for obvious must aliases to save compile-time though.
2823 : We also may not do this when the query is used for redundant
2824 : store removal. */
2825 18832276 : if (!data->redundant_store_removal_p
2826 10304130 : && gimple_clobber_p (def_stmt)
2827 19328967 : && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
2828 : {
2829 470210 : *disambiguate_only = TR_DISAMBIGUATE;
2830 470210 : return NULL;
2831 : }
2832 :
2833 : /* Besides valueizing the LHS we can also use access-path based
2834 : disambiguation on the original non-valueized ref. */
2835 18362066 : if (!ref->ref
2836 : && lhs_ref_ok
2837 2585376 : && data->orig_ref.ref)
2838 : {
2839 : /* We want to use the non-valueized LHS for this, but avoid redundant
2840 : work. */
2841 1798344 : ao_ref *lref = &lhs_ref;
2842 1798344 : ao_ref lref_alt;
2843 1798344 : if (valueized_anything)
2844 : {
2845 113321 : ao_ref_init (&lref_alt, lhs);
2846 113321 : lref = &lref_alt;
2847 : }
2848 1798344 : if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2849 : {
2850 234950 : *disambiguate_only = (valueized_anything
2851 117475 : ? TR_VALUEIZE_AND_DISAMBIGUATE
2852 : : TR_DISAMBIGUATE);
2853 117475 : return NULL;
2854 : }
2855 : }
2856 :
2857 : /* If we reach a clobbering statement try to skip it and see if
2858 : we find a VN result with exactly the same value as the
2859 : possible clobber. In this case we can ignore the clobber
2860 : and return the found value. */
2861 18244591 : if (!gimple_has_volatile_ops (def_stmt)
2862 16905870 : && ((is_gimple_reg_type (TREE_TYPE (lhs))
2863 12487502 : && types_compatible_p (TREE_TYPE (lhs), vr->type)
2864 9738855 : && !storage_order_barrier_p (lhs)
2865 9738855 : && !reverse_storage_order_for_component_p (lhs))
2866 7167019 : || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == CONSTRUCTOR)
2867 10784631 : && (ref->ref || data->orig_ref.ref)
2868 10325309 : && !data->mask
2869 10303411 : && data->partial_defs.is_empty ()
2870 10301207 : && multiple_p (get_object_alignment
2871 : (ref->ref ? ref->ref : data->orig_ref.ref),
2872 : ref->size)
2873 40812391 : && multiple_p (get_object_alignment (lhs), ref->size))
2874 : {
2875 9909349 : HOST_WIDE_INT offset2i, size2i;
2876 9909349 : poly_int64 offset = ref->offset;
2877 9909349 : poly_int64 maxsize = ref->max_size;
2878 :
2879 9909349 : gcc_assert (lhs_ref_ok);
2880 9909349 : tree base2 = ao_ref_base (&lhs_ref);
2881 9909349 : poly_int64 offset2 = lhs_ref.offset;
2882 9909349 : poly_int64 size2 = lhs_ref.size;
2883 9909349 : poly_int64 maxsize2 = lhs_ref.max_size;
2884 :
2885 9909349 : tree rhs = gimple_assign_rhs1 (def_stmt);
2886 9909349 : if (TREE_CODE (rhs) == CONSTRUCTOR)
2887 1018102 : rhs = integer_zero_node;
2888 : /* ??? We may not compare to ahead values which might be from
2889 : a different loop iteration but only to loop invariants. Use
2890 : CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2891 : The one-hop lookup below doesn't have this issue since there's
2892 : a virtual PHI before we ever reach a backedge to cross.
2893 : We can skip multiple defs as long as they are from the same
2894 : value though. */
2895 9909349 : if (data->same_val
2896 9909349 : && !operand_equal_p (data->same_val, rhs))
2897 : ;
2898 : /* When this is a (partial) must-def, leave it to handling
2899 : below in case we are interested in the value. */
2900 9621676 : else if (!(*disambiguate_only > TR_TRANSLATE)
2901 3297910 : && base2
2902 3297910 : && known_eq (maxsize2, size2)
2903 2299225 : && adjust_offsets_for_equal_base_address (base, &offset,
2904 : base2, &offset2)
2905 1128139 : && offset2.is_constant (&offset2i)
2906 1128139 : && size2.is_constant (&size2i)
2907 1128139 : && maxsize.is_constant (&maxsizei)
2908 1128139 : && offset.is_constant (&offseti)
2909 10749815 : && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2910 : size2i))
2911 : ;
2912 8579591 : else if (CONSTANT_CLASS_P (rhs))
2913 : {
2914 4184897 : if (dump_file && (dump_flags & TDF_DETAILS))
2915 : {
2916 2192 : fprintf (dump_file,
2917 : "Skipping possible redundant definition ");
2918 2192 : print_gimple_stmt (dump_file, def_stmt, 0);
2919 : }
2920 : /* Delay the actual compare of the values to the end of the walk
2921 : but do not update last_vuse from here. */
2922 4184897 : data->last_vuse_ptr = NULL;
2923 4184897 : data->same_val = rhs;
2924 4247419 : return NULL;
2925 : }
2926 : else
2927 : {
2928 4394694 : tree saved_vuse = vr->vuse;
2929 4394694 : hashval_t saved_hashcode = vr->hashcode;
2930 4394694 : if (vr->vuse)
2931 4394694 : vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2932 8789388 : vr->vuse = vuse_ssa_val (gimple_vuse (def_stmt));
2933 4394694 : if (vr->vuse)
2934 4394694 : vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2935 4394694 : vn_reference_t vnresult = NULL;
2936 : /* Do not use vn_reference_lookup_2 since that might perform
2937 : expression hashtable insertion but this lookup crosses
2938 : a possible may-alias making such insertion conditionally
2939 : invalid. */
2940 4394694 : vn_reference_lookup_1 (vr, &vnresult);
2941 : /* Need to restore vr->vuse and vr->hashcode. */
2942 4394694 : vr->vuse = saved_vuse;
2943 4394694 : vr->hashcode = saved_hashcode;
2944 4394694 : if (vnresult)
2945 : {
2946 236489 : if (TREE_CODE (rhs) == SSA_NAME)
2947 234970 : rhs = SSA_VAL (rhs);
2948 236489 : if (vnresult->result
2949 236489 : && operand_equal_p (vnresult->result, rhs, 0))
2950 62522 : return vnresult;
2951 : }
2952 : }
2953 : }
2954 : }
2955 21237630 : else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2956 19062370 : && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2957 23288974 : && gimple_call_num_args (def_stmt) <= 4)
2958 : {
2959 : /* For builtin calls valueize its arguments and call the
2960 : alias oracle again. Valueization may improve points-to
2961 : info of pointers and constify size and position arguments.
2962 : Originally this was motivated by PR61034 which has
2963 : conditional calls to free falsely clobbering ref because
2964 : of imprecise points-to info of the argument. */
2965 : tree oldargs[4];
2966 : bool valueized_anything = false;
2967 4839310 : for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2968 : {
2969 3326364 : oldargs[i] = gimple_call_arg (def_stmt, i);
2970 3326364 : tree val = vn_valueize (oldargs[i]);
2971 3326364 : if (val != oldargs[i])
2972 : {
2973 121718 : gimple_call_set_arg (def_stmt, i, val);
2974 121718 : valueized_anything = true;
2975 : }
2976 : }
2977 1512946 : if (valueized_anything)
2978 : {
2979 189808 : bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2980 94904 : ref, data->tbaa_p);
2981 345141 : for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2982 250237 : gimple_call_set_arg (def_stmt, i, oldargs[i]);
2983 94904 : if (!res)
2984 : {
2985 30516 : *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2986 30516 : return NULL;
2987 : }
2988 : }
2989 : }
2990 :
2991 35204286 : if (*disambiguate_only > TR_TRANSLATE)
2992 : return (void *)-1;
2993 :
2994 : /* If we cannot constrain the size of the reference we cannot
2995 : test if anything kills it. */
2996 23424733 : if (!ref->max_size_known_p ())
2997 : return (void *)-1;
2998 :
2999 23004655 : poly_int64 offset = ref->offset;
3000 23004655 : poly_int64 maxsize = ref->max_size;
3001 :
3002 : /* def_stmt may-defs *ref. See if we can derive a value for *ref
3003 : from that definition.
3004 : 1) Memset. */
3005 23004655 : if (is_gimple_reg_type (vr->type)
3006 22996714 : && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
3007 22907256 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
3008 90000 : && (integer_zerop (gimple_call_arg (def_stmt, 1))
3009 32468 : || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
3010 9086 : || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
3011 : && CHAR_BIT == 8
3012 : && BITS_PER_UNIT == 8
3013 : && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3014 31194 : && offset.is_constant (&offseti)
3015 31194 : && ref->size.is_constant (&sizei)
3016 31194 : && (offseti % BITS_PER_UNIT == 0
3017 39 : || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
3018 88726 : && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
3019 36189 : || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3020 36189 : && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
3021 23057755 : && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3022 30397 : || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
3023 : {
3024 53059 : tree base2;
3025 53059 : poly_int64 offset2, size2, maxsize2;
3026 53059 : bool reverse;
3027 53059 : tree ref2 = gimple_call_arg (def_stmt, 0);
3028 53059 : if (TREE_CODE (ref2) == SSA_NAME)
3029 : {
3030 30356 : ref2 = SSA_VAL (ref2);
3031 30356 : if (TREE_CODE (ref2) == SSA_NAME
3032 30356 : && (TREE_CODE (base) != MEM_REF
3033 19380 : || TREE_OPERAND (base, 0) != ref2))
3034 : {
3035 24033 : gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
3036 24033 : if (gimple_assign_single_p (def_stmt)
3037 24033 : && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3038 802 : ref2 = gimple_assign_rhs1 (def_stmt);
3039 : }
3040 : }
3041 53059 : if (TREE_CODE (ref2) == ADDR_EXPR)
3042 : {
3043 26490 : ref2 = TREE_OPERAND (ref2, 0);
3044 26490 : base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
3045 : &reverse);
3046 26490 : if (!known_size_p (maxsize2)
3047 26450 : || !known_eq (maxsize2, size2)
3048 52866 : || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
3049 56563 : return (void *)-1;
3050 : }
3051 26569 : else if (TREE_CODE (ref2) == SSA_NAME)
3052 : {
3053 26569 : poly_int64 soff;
3054 26569 : if (TREE_CODE (base) != MEM_REF
3055 45366 : || !(mem_ref_offset (base)
3056 37594 : << LOG2_BITS_PER_UNIT).to_shwi (&soff))
3057 22619 : return (void *)-1;
3058 18797 : offset += soff;
3059 18797 : offset2 = 0;
3060 18797 : if (TREE_OPERAND (base, 0) != ref2)
3061 : {
3062 15459 : gimple *def = SSA_NAME_DEF_STMT (ref2);
3063 15459 : if (is_gimple_assign (def)
3064 14153 : && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
3065 12179 : && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
3066 16101 : && poly_int_tree_p (gimple_assign_rhs2 (def)))
3067 : {
3068 612 : tree rhs2 = gimple_assign_rhs2 (def);
3069 612 : if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
3070 : SIGNED)
3071 612 : << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
3072 : return (void *)-1;
3073 612 : ref2 = gimple_assign_rhs1 (def);
3074 612 : if (TREE_CODE (ref2) == SSA_NAME)
3075 612 : ref2 = SSA_VAL (ref2);
3076 : }
3077 : else
3078 : return (void *)-1;
3079 : }
3080 : }
3081 : else
3082 : return (void *)-1;
3083 26550 : tree len = gimple_call_arg (def_stmt, 2);
3084 26550 : HOST_WIDE_INT leni, offset2i;
3085 26550 : if (TREE_CODE (len) == SSA_NAME)
3086 255 : len = SSA_VAL (len);
3087 : /* Sometimes the above trickery is smarter than alias analysis. Take
3088 : advantage of that. */
3089 26550 : if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
3090 53100 : (wi::to_poly_offset (len)
3091 26550 : << LOG2_BITS_PER_UNIT)))
3092 : return NULL;
3093 53043 : if (data->partial_defs.is_empty ()
3094 26493 : && known_subrange_p (offset, maxsize, offset2,
3095 26493 : wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
3096 : {
3097 25998 : tree val;
3098 25998 : if (integer_zerop (gimple_call_arg (def_stmt, 1)))
3099 21165 : val = build_zero_cst (vr->type);
3100 4833 : else if (INTEGRAL_TYPE_P (vr->type)
3101 3693 : && known_eq (ref->size, 8)
3102 7771 : && offseti % BITS_PER_UNIT == 0)
3103 : {
3104 2938 : gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
3105 2938 : vr->type, gimple_call_arg (def_stmt, 1));
3106 2938 : val = vn_nary_build_or_lookup (&res_op);
3107 2938 : if (!val
3108 2938 : || (TREE_CODE (val) == SSA_NAME
3109 626 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3110 0 : return (void *)-1;
3111 : }
3112 : else
3113 : {
3114 1895 : unsigned buflen
3115 1895 : = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
3116 1895 : if (INTEGRAL_TYPE_P (vr->type)
3117 1895 : && TYPE_MODE (vr->type) != BLKmode)
3118 1508 : buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
3119 1895 : unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
3120 1895 : memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
3121 : buflen);
3122 1895 : if (BYTES_BIG_ENDIAN)
3123 : {
3124 : unsigned int amnt
3125 : = (((unsigned HOST_WIDE_INT) offseti + sizei)
3126 : % BITS_PER_UNIT);
3127 : if (amnt)
3128 : {
3129 : shift_bytes_in_array_right (buf, buflen,
3130 : BITS_PER_UNIT - amnt);
3131 : buf++;
3132 : buflen--;
3133 : }
3134 : }
3135 1895 : else if (offseti % BITS_PER_UNIT != 0)
3136 : {
3137 7 : unsigned int amnt
3138 : = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
3139 7 : % BITS_PER_UNIT);
3140 7 : shift_bytes_in_array_left (buf, buflen, amnt);
3141 7 : buf++;
3142 7 : buflen--;
3143 : }
3144 1895 : val = native_interpret_expr (vr->type, buf, buflen);
3145 1895 : if (!val)
3146 : return (void *)-1;
3147 : }
3148 25998 : return data->finish (0, 0, val);
3149 : }
3150 : /* For now handle clearing memory with partial defs. */
3151 552 : else if (known_eq (ref->size, maxsize)
3152 481 : && integer_zerop (gimple_call_arg (def_stmt, 1))
3153 170 : && tree_fits_poly_int64_p (len)
3154 166 : && tree_to_poly_int64 (len).is_constant (&leni)
3155 166 : && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
3156 166 : && offset.is_constant (&offseti)
3157 166 : && offset2.is_constant (&offset2i)
3158 166 : && maxsize.is_constant (&maxsizei)
3159 552 : && ranges_known_overlap_p (offseti, maxsizei, offset2i,
3160 552 : leni << LOG2_BITS_PER_UNIT))
3161 : {
3162 166 : pd_data pd;
3163 166 : pd.rhs = build_constructor (NULL_TREE, NULL);
3164 166 : pd.rhs_off = 0;
3165 166 : pd.offset = offset2i;
3166 166 : pd.size = leni << LOG2_BITS_PER_UNIT;
3167 166 : return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
3168 : }
3169 : }
3170 :
3171 : /* 2) Assignment from an empty CONSTRUCTOR. */
3172 22951596 : else if (is_gimple_reg_type (vr->type)
3173 22943655 : && gimple_assign_single_p (def_stmt)
3174 7557840 : && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
3175 1926293 : && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0
3176 24877889 : && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt)))
3177 : {
3178 1926261 : tree base2;
3179 1926261 : poly_int64 offset2, size2, maxsize2;
3180 1926261 : HOST_WIDE_INT offset2i, size2i;
3181 1926261 : gcc_assert (lhs_ref_ok);
3182 1926261 : base2 = ao_ref_base (&lhs_ref);
3183 1926261 : offset2 = lhs_ref.offset;
3184 1926261 : size2 = lhs_ref.size;
3185 1926261 : maxsize2 = lhs_ref.max_size;
3186 1926261 : if (known_size_p (maxsize2)
3187 1926223 : && known_eq (maxsize2, size2)
3188 3852438 : && adjust_offsets_for_equal_base_address (base, &offset,
3189 : base2, &offset2))
3190 : {
3191 1899462 : if (data->partial_defs.is_empty ()
3192 1895977 : && known_subrange_p (offset, maxsize, offset2, size2))
3193 : {
3194 : /* While technically undefined behavior do not optimize
3195 : a full read from a clobber. */
3196 1895129 : if (gimple_clobber_p (def_stmt))
3197 1899408 : return (void *)-1;
3198 968320 : tree val = build_zero_cst (vr->type);
3199 968320 : return data->finish (ao_ref_alias_set (&lhs_ref),
3200 968320 : ao_ref_base_alias_set (&lhs_ref), val);
3201 : }
3202 4333 : else if (known_eq (ref->size, maxsize)
3203 4279 : && maxsize.is_constant (&maxsizei)
3204 4279 : && offset.is_constant (&offseti)
3205 4279 : && offset2.is_constant (&offset2i)
3206 4279 : && size2.is_constant (&size2i)
3207 4333 : && ranges_known_overlap_p (offseti, maxsizei,
3208 : offset2i, size2i))
3209 : {
3210 : /* Let clobbers be consumed by the partial-def tracker
3211 : which can choose to ignore them if they are shadowed
3212 : by a later def. */
3213 4279 : pd_data pd;
3214 4279 : pd.rhs = gimple_assign_rhs1 (def_stmt);
3215 4279 : pd.rhs_off = 0;
3216 4279 : pd.offset = offset2i;
3217 4279 : pd.size = size2i;
3218 4279 : return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3219 : ao_ref_base_alias_set (&lhs_ref),
3220 : offseti, maxsizei);
3221 : }
3222 : }
3223 : }
3224 :
3225 : /* 3) Assignment from a constant. We can use folds native encode/interpret
3226 : routines to extract the assigned bits. */
3227 21025335 : else if (known_eq (ref->size, maxsize)
3228 20506067 : && is_gimple_reg_type (vr->type)
3229 20498126 : && !reverse_storage_order_for_component_p (vr->operands)
3230 20495369 : && !contains_storage_order_barrier_p (vr->operands)
3231 20495369 : && gimple_assign_single_p (def_stmt)
3232 5309568 : && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
3233 : && CHAR_BIT == 8
3234 : && BITS_PER_UNIT == 8
3235 : && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3236 : /* native_encode and native_decode operate on arrays of bytes
3237 : and so fundamentally need a compile-time size and offset. */
3238 5306607 : && maxsize.is_constant (&maxsizei)
3239 5306607 : && offset.is_constant (&offseti)
3240 26331942 : && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
3241 4483968 : || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
3242 1840546 : && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
3243 : {
3244 839189 : tree lhs = gimple_assign_lhs (def_stmt);
3245 839189 : tree base2;
3246 839189 : poly_int64 offset2, size2, maxsize2;
3247 839189 : HOST_WIDE_INT offset2i, size2i;
3248 839189 : bool reverse;
3249 839189 : gcc_assert (lhs_ref_ok);
3250 839189 : base2 = ao_ref_base (&lhs_ref);
3251 839189 : offset2 = lhs_ref.offset;
3252 839189 : size2 = lhs_ref.size;
3253 839189 : maxsize2 = lhs_ref.max_size;
3254 839189 : reverse = reverse_storage_order_for_component_p (lhs);
3255 839189 : if (base2
3256 839189 : && !reverse
3257 838361 : && !storage_order_barrier_p (lhs)
3258 838361 : && known_eq (maxsize2, size2)
3259 806892 : && adjust_offsets_for_equal_base_address (base, &offset,
3260 : base2, &offset2)
3261 82127 : && offset.is_constant (&offseti)
3262 82127 : && offset2.is_constant (&offset2i)
3263 839189 : && size2.is_constant (&size2i))
3264 : {
3265 82127 : if (data->partial_defs.is_empty ()
3266 65388 : && known_subrange_p (offseti, maxsizei, offset2, size2))
3267 : {
3268 : /* We support up to 512-bit values (for V8DFmode). */
3269 42590 : unsigned char buffer[65];
3270 42590 : int len;
3271 :
3272 42590 : tree rhs = gimple_assign_rhs1 (def_stmt);
3273 42590 : if (TREE_CODE (rhs) == SSA_NAME)
3274 1664 : rhs = SSA_VAL (rhs);
3275 85180 : len = native_encode_expr (rhs,
3276 : buffer, sizeof (buffer) - 1,
3277 42590 : (offseti - offset2i) / BITS_PER_UNIT);
3278 42590 : if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3279 : {
3280 39568 : tree type = vr->type;
3281 39568 : unsigned char *buf = buffer;
3282 39568 : unsigned int amnt = 0;
3283 : /* Make sure to interpret in a type that has a range
3284 : covering the whole access size. */
3285 39568 : if (INTEGRAL_TYPE_P (vr->type)
3286 39568 : && maxsizei != TYPE_PRECISION (vr->type))
3287 : {
3288 884 : bool uns = TYPE_UNSIGNED (type);
3289 884 : if (TREE_CODE (vr->type) == BITINT_TYPE
3290 885 : && maxsizei > MAX_FIXED_MODE_SIZE)
3291 1 : type = build_bitint_type (maxsizei, uns);
3292 : else
3293 883 : type = build_nonstandard_integer_type (maxsizei, uns);
3294 : }
3295 39568 : if (BYTES_BIG_ENDIAN)
3296 : {
3297 : /* For big-endian native_encode_expr stored the rhs
3298 : such that the LSB of it is the LSB of buffer[len - 1].
3299 : That bit is stored into memory at position
3300 : offset2 + size2 - 1, i.e. in byte
3301 : base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3302 : E.g. for offset2 1 and size2 14, rhs -1 and memory
3303 : previously cleared that is:
3304 : 0 1
3305 : 01111111|11111110
3306 : Now, if we want to extract offset 2 and size 12 from
3307 : it using native_interpret_expr (which actually works
3308 : for integral bitfield types in terms of byte size of
3309 : the mode), the native_encode_expr stored the value
3310 : into buffer as
3311 : XX111111|11111111
3312 : and returned len 2 (the X bits are outside of
3313 : precision).
3314 : Let sz be maxsize / BITS_PER_UNIT if not extracting
3315 : a bitfield, and GET_MODE_SIZE otherwise.
3316 : We need to align the LSB of the value we want to
3317 : extract as the LSB of buf[sz - 1].
3318 : The LSB from memory we need to read is at position
3319 : offset + maxsize - 1. */
3320 : HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3321 : if (INTEGRAL_TYPE_P (type))
3322 : {
3323 : if (TYPE_MODE (type) != BLKmode)
3324 : sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3325 : else
3326 : sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
3327 : }
3328 : amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3329 : - offseti - maxsizei) % BITS_PER_UNIT;
3330 : if (amnt)
3331 : shift_bytes_in_array_right (buffer, len, amnt);
3332 : amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3333 : - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3334 : if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3335 : len = 0;
3336 : else
3337 : {
3338 : buf = buffer + len - sz - amnt;
3339 : len -= (buf - buffer);
3340 : }
3341 : }
3342 : else
3343 : {
3344 39568 : amnt = ((unsigned HOST_WIDE_INT) offset2i
3345 39568 : - offseti) % BITS_PER_UNIT;
3346 39568 : if (amnt)
3347 : {
3348 315 : buffer[len] = 0;
3349 315 : shift_bytes_in_array_left (buffer, len + 1, amnt);
3350 315 : buf = buffer + 1;
3351 : }
3352 : }
3353 39568 : tree val = native_interpret_expr (type, buf, len);
3354 : /* If we chop off bits because the types precision doesn't
3355 : match the memory access size this is ok when optimizing
3356 : reads but not when called from the DSE code during
3357 : elimination. */
3358 39568 : if (val
3359 39566 : && type != vr->type)
3360 : {
3361 884 : if (! int_fits_type_p (val, vr->type))
3362 : val = NULL_TREE;
3363 : else
3364 884 : val = fold_convert (vr->type, val);
3365 : }
3366 :
3367 39566 : if (val)
3368 39566 : return data->finish (ao_ref_alias_set (&lhs_ref),
3369 39566 : ao_ref_base_alias_set (&lhs_ref), val);
3370 : }
3371 : }
3372 39537 : else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3373 : size2i))
3374 : {
3375 39537 : pd_data pd;
3376 39537 : tree rhs = gimple_assign_rhs1 (def_stmt);
3377 39537 : if (TREE_CODE (rhs) == SSA_NAME)
3378 2177 : rhs = SSA_VAL (rhs);
3379 39537 : pd.rhs = rhs;
3380 39537 : pd.rhs_off = 0;
3381 39537 : pd.offset = offset2i;
3382 39537 : pd.size = size2i;
3383 39537 : return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3384 : ao_ref_base_alias_set (&lhs_ref),
3385 : offseti, maxsizei);
3386 : }
3387 : }
3388 : }
3389 :
3390 : /* 4) Assignment from an SSA name which definition we may be able
3391 : to access pieces from or we can combine to a larger entity. */
3392 20186146 : else if (known_eq (ref->size, maxsize)
3393 19666878 : && is_gimple_reg_type (vr->type)
3394 19658937 : && !reverse_storage_order_for_component_p (vr->operands)
3395 19656180 : && !contains_storage_order_barrier_p (vr->operands)
3396 19656180 : && gimple_assign_single_p (def_stmt)
3397 4470379 : && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
3398 24653564 : && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3399 : {
3400 1823996 : tree lhs = gimple_assign_lhs (def_stmt);
3401 1823996 : tree base2;
3402 1823996 : poly_int64 offset2, size2, maxsize2;
3403 1823996 : HOST_WIDE_INT offset2i, size2i, offseti;
3404 1823996 : bool reverse;
3405 1823996 : gcc_assert (lhs_ref_ok);
3406 1823996 : base2 = ao_ref_base (&lhs_ref);
3407 1823996 : offset2 = lhs_ref.offset;
3408 1823996 : size2 = lhs_ref.size;
3409 1823996 : maxsize2 = lhs_ref.max_size;
3410 1823996 : reverse = reverse_storage_order_for_component_p (lhs);
3411 1823996 : tree def_rhs = gimple_assign_rhs1 (def_stmt);
3412 1823996 : if (!reverse
3413 1823784 : && !storage_order_barrier_p (lhs)
3414 1823784 : && known_size_p (maxsize2)
3415 1799106 : && known_eq (maxsize2, size2)
3416 3504312 : && adjust_offsets_for_equal_base_address (base, &offset,
3417 : base2, &offset2))
3418 : {
3419 80444 : if (data->partial_defs.is_empty ()
3420 74377 : && known_subrange_p (offset, maxsize, offset2, size2)
3421 : /* ??? We can't handle bitfield precision extracts without
3422 : either using an alternate type for the BIT_FIELD_REF and
3423 : then doing a conversion or possibly adjusting the offset
3424 : according to endianness. */
3425 51062 : && (! INTEGRAL_TYPE_P (vr->type)
3426 37298 : || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3427 93389 : && multiple_p (ref->size, BITS_PER_UNIT))
3428 : {
3429 44974 : tree val = NULL_TREE;
3430 89942 : if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3431 49529 : || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3432 : {
3433 43899 : gimple_match_op op (gimple_match_cond::UNCOND,
3434 43899 : BIT_FIELD_REF, vr->type,
3435 : SSA_VAL (def_rhs),
3436 : bitsize_int (ref->size),
3437 43899 : bitsize_int (offset - offset2));
3438 43899 : val = vn_nary_build_or_lookup (&op);
3439 : }
3440 1075 : else if (known_eq (ref->size, size2))
3441 : {
3442 1001 : gimple_match_op op (gimple_match_cond::UNCOND,
3443 1001 : VIEW_CONVERT_EXPR, vr->type,
3444 1001 : SSA_VAL (def_rhs));
3445 1001 : val = vn_nary_build_or_lookup (&op);
3446 : }
3447 44900 : if (val
3448 44900 : && (TREE_CODE (val) != SSA_NAME
3449 44060 : || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3450 44881 : return data->finish (ao_ref_alias_set (&lhs_ref),
3451 80351 : ao_ref_base_alias_set (&lhs_ref), val);
3452 : }
3453 35470 : else if (maxsize.is_constant (&maxsizei)
3454 35470 : && offset.is_constant (&offseti)
3455 35470 : && offset2.is_constant (&offset2i)
3456 35470 : && size2.is_constant (&size2i)
3457 35470 : && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3458 : {
3459 35470 : pd_data pd;
3460 35470 : pd.rhs = SSA_VAL (def_rhs);
3461 35470 : pd.rhs_off = 0;
3462 35470 : pd.offset = offset2i;
3463 35470 : pd.size = size2i;
3464 35470 : return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3465 : ao_ref_base_alias_set (&lhs_ref),
3466 : offseti, maxsizei);
3467 : }
3468 : }
3469 : }
3470 :
3471 : /* 4b) Assignment done via one of the vectorizer internal store
3472 : functions where we may be able to access pieces from or we can
3473 : combine to a larger entity. */
3474 18362150 : else if (known_eq (ref->size, maxsize)
3475 17842882 : && is_gimple_reg_type (vr->type)
3476 17834941 : && !reverse_storage_order_for_component_p (vr->operands)
3477 17832184 : && !contains_storage_order_barrier_p (vr->operands)
3478 17832184 : && is_gimple_call (def_stmt)
3479 14395172 : && gimple_call_internal_p (def_stmt)
3480 18615453 : && internal_store_fn_p (gimple_call_internal_fn (def_stmt)))
3481 : {
3482 36 : gcall *call = as_a <gcall *> (def_stmt);
3483 36 : internal_fn fn = gimple_call_internal_fn (call);
3484 :
3485 36 : tree mask = NULL_TREE, len = NULL_TREE, bias = NULL_TREE;
3486 36 : switch (fn)
3487 : {
3488 36 : case IFN_MASK_STORE:
3489 36 : mask = gimple_call_arg (call, internal_fn_mask_index (fn));
3490 36 : mask = vn_valueize (mask);
3491 36 : if (TREE_CODE (mask) != VECTOR_CST)
3492 28 : return (void *)-1;
3493 : break;
3494 0 : case IFN_LEN_STORE:
3495 0 : {
3496 0 : int len_index = internal_fn_len_index (fn);
3497 0 : len = gimple_call_arg (call, len_index);
3498 0 : bias = gimple_call_arg (call, len_index + 1);
3499 0 : if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias))
3500 : return (void *) -1;
3501 : break;
3502 : }
3503 : default:
3504 : return (void *)-1;
3505 : }
3506 14 : tree def_rhs = gimple_call_arg (call,
3507 14 : internal_fn_stored_value_index (fn));
3508 14 : def_rhs = vn_valueize (def_rhs);
3509 14 : if (TREE_CODE (def_rhs) != VECTOR_CST)
3510 : return (void *)-1;
3511 :
3512 14 : ao_ref_init_from_ptr_and_size (&lhs_ref,
3513 : vn_valueize (gimple_call_arg (call, 0)),
3514 14 : TYPE_SIZE_UNIT (TREE_TYPE (def_rhs)));
3515 14 : tree base2;
3516 14 : poly_int64 offset2, size2, maxsize2;
3517 14 : HOST_WIDE_INT offset2i, size2i, offseti;
3518 14 : base2 = ao_ref_base (&lhs_ref);
3519 14 : offset2 = lhs_ref.offset;
3520 14 : size2 = lhs_ref.size;
3521 14 : maxsize2 = lhs_ref.max_size;
3522 14 : if (known_size_p (maxsize2)
3523 14 : && known_eq (maxsize2, size2)
3524 14 : && adjust_offsets_for_equal_base_address (base, &offset,
3525 : base2, &offset2)
3526 6 : && maxsize.is_constant (&maxsizei)
3527 6 : && offset.is_constant (&offseti)
3528 6 : && offset2.is_constant (&offset2i)
3529 14 : && size2.is_constant (&size2i))
3530 : {
3531 6 : if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2))
3532 : /* Poor-mans disambiguation. */
3533 : return NULL;
3534 6 : else if (ranges_known_overlap_p (offset, maxsize, offset2, size2))
3535 : {
3536 6 : pd_data pd;
3537 6 : pd.rhs = def_rhs;
3538 6 : tree aa = gimple_call_arg (call, 1);
3539 6 : alias_set_type set = get_deref_alias_set (TREE_TYPE (aa));
3540 6 : tree vectype = TREE_TYPE (def_rhs);
3541 6 : unsigned HOST_WIDE_INT elsz
3542 6 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype)));
3543 6 : if (mask)
3544 : {
3545 : HOST_WIDE_INT start = 0, length = 0;
3546 : unsigned mask_idx = 0;
3547 48 : do
3548 : {
3549 48 : if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)))
3550 : {
3551 24 : if (length != 0)
3552 : {
3553 18 : pd.rhs_off = start;
3554 18 : pd.offset = offset2i + start;
3555 18 : pd.size = length;
3556 18 : if (ranges_known_overlap_p
3557 18 : (offset, maxsize, pd.offset, pd.size))
3558 : {
3559 0 : void *res = data->push_partial_def
3560 0 : (pd, set, set, offseti, maxsizei);
3561 0 : if (res != NULL)
3562 6 : return res;
3563 : }
3564 : }
3565 24 : start = (mask_idx + 1) * elsz;
3566 24 : length = 0;
3567 : }
3568 : else
3569 24 : length += elsz;
3570 48 : mask_idx++;
3571 : }
3572 48 : while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype)));
3573 6 : if (length != 0)
3574 : {
3575 6 : pd.rhs_off = start;
3576 6 : pd.offset = offset2i + start;
3577 6 : pd.size = length;
3578 6 : if (ranges_known_overlap_p (offset, maxsize,
3579 : pd.offset, pd.size))
3580 2 : return data->push_partial_def (pd, set, set,
3581 2 : offseti, maxsizei);
3582 : }
3583 : }
3584 0 : else if (fn == IFN_LEN_STORE)
3585 : {
3586 0 : pd.offset = offset2i;
3587 0 : pd.size = (tree_to_uhwi (len)
3588 0 : + -tree_to_shwi (bias)) * BITS_PER_UNIT;
3589 0 : if (BYTES_BIG_ENDIAN)
3590 : pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype));
3591 : else
3592 0 : pd.rhs_off = 0;
3593 0 : if (ranges_known_overlap_p (offset, maxsize,
3594 : pd.offset, pd.size))
3595 0 : return data->push_partial_def (pd, set, set,
3596 0 : offseti, maxsizei);
3597 : }
3598 : else
3599 0 : gcc_unreachable ();
3600 4 : return NULL;
3601 : }
3602 : }
3603 : }
3604 :
3605 : /* 5) For aggregate copies translate the reference through them if
3606 : the copy kills ref. */
3607 18362114 : else if (data->vn_walk_kind == VN_WALKREWRITE
3608 14818263 : && gimple_assign_single_p (def_stmt)
3609 2440261 : && !gimple_has_volatile_ops (def_stmt)
3610 20800077 : && (DECL_P (gimple_assign_rhs1 (def_stmt))
3611 1969913 : || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3612 1566926 : || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3613 : {
3614 2228447 : tree base2;
3615 2228447 : int i, j, k;
3616 2228447 : auto_vec<vn_reference_op_s> rhs;
3617 2228447 : vn_reference_op_t vro;
3618 2228447 : ao_ref r;
3619 :
3620 2228447 : gcc_assert (lhs_ref_ok);
3621 :
3622 : /* See if the assignment kills REF. */
3623 2228447 : base2 = ao_ref_base (&lhs_ref);
3624 2228447 : if (!lhs_ref.max_size_known_p ()
3625 2227998 : || (base != base2
3626 82555 : && (TREE_CODE (base) != MEM_REF
3627 68184 : || TREE_CODE (base2) != MEM_REF
3628 52982 : || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3629 19450 : || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3630 19450 : TREE_OPERAND (base2, 1))))
3631 4391851 : || !stmt_kills_ref_p (def_stmt, ref))
3632 390138 : return (void *)-1;
3633 :
3634 : /* Find the common base of ref and the lhs. lhs_ops already
3635 : contains valueized operands for the lhs. */
3636 1838309 : poly_int64 extra_off = 0;
3637 1838309 : i = vr->operands.length () - 1;
3638 1838309 : j = lhs_ops.length () - 1;
3639 :
3640 : /* The base should be always equal due to the above check. */
3641 1838309 : if (! vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3642 : return (void *)-1;
3643 1838047 : i--, j--;
3644 :
3645 : /* The 2nd component should always exist and be a MEM_REF. */
3646 1838047 : if (!(i >= 0 && j >= 0))
3647 : ;
3648 1838047 : else if (vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3649 835718 : i--, j--;
3650 1002329 : else if (vr->operands[i].opcode == MEM_REF
3651 1000747 : && lhs_ops[j].opcode == MEM_REF
3652 1000747 : && known_ne (lhs_ops[j].off, -1)
3653 2003076 : && known_ne (vr->operands[i].off, -1))
3654 : {
3655 1000747 : bool found = false;
3656 : /* When we ge a mismatch at a MEM_REF that is not the sole component
3657 : try finding a match in one of the outer components and continue
3658 : stripping there. This happens when addresses of components get
3659 : forwarded into dereferences. */
3660 1000747 : if (i > 0)
3661 : {
3662 104373 : int temi = i - 1;
3663 104373 : poly_int64 tem_extra_off = extra_off + vr->operands[i].off;
3664 104373 : while (temi >= 0
3665 227036 : && known_ne (vr->operands[temi].off, -1))
3666 : {
3667 124174 : if (vr->operands[temi].type
3668 124174 : && lhs_ops[j].type
3669 248348 : && (TYPE_MAIN_VARIANT (vr->operands[temi].type)
3670 124174 : == TYPE_MAIN_VARIANT (lhs_ops[j].type)))
3671 : {
3672 1511 : i = temi;
3673 : /* Strip the component that was type matched to
3674 : the MEM_REF. */
3675 1511 : extra_off = (tem_extra_off
3676 1511 : + vr->operands[i].off - lhs_ops[j].off);
3677 1511 : i--, j--;
3678 : /* Strip further equal components. */
3679 1511 : found = true;
3680 1511 : break;
3681 : }
3682 122663 : tem_extra_off += vr->operands[temi].off;
3683 122663 : temi--;
3684 : }
3685 : }
3686 1000747 : if (!found && j > 0)
3687 : {
3688 26349 : int temj = j - 1;
3689 26349 : poly_int64 tem_extra_off = extra_off - lhs_ops[j].off;
3690 26349 : while (temj >= 0
3691 51044 : && known_ne (lhs_ops[temj].off, -1))
3692 : {
3693 28472 : if (vr->operands[i].type
3694 28472 : && lhs_ops[temj].type
3695 56944 : && (TYPE_MAIN_VARIANT (vr->operands[i].type)
3696 28472 : == TYPE_MAIN_VARIANT (lhs_ops[temj].type)))
3697 : {
3698 3777 : j = temj;
3699 : /* Strip the component that was type matched to
3700 : the MEM_REF. */
3701 3777 : extra_off = (tem_extra_off
3702 3777 : + vr->operands[i].off - lhs_ops[j].off);
3703 3777 : i--, j--;
3704 : /* Strip further equal components. */
3705 3777 : found = true;
3706 3777 : break;
3707 : }
3708 24695 : tem_extra_off += -lhs_ops[temj].off;
3709 24695 : temj--;
3710 : }
3711 : }
3712 : /* When we cannot find a common base to reconstruct the full
3713 : reference instead try to reduce the lookup to the new
3714 : base plus a constant offset. */
3715 1000747 : if (!found)
3716 : {
3717 : while (j >= 0
3718 2015275 : && known_ne (lhs_ops[j].off, -1))
3719 : {
3720 1019816 : extra_off += -lhs_ops[j].off;
3721 1019816 : j--;
3722 : }
3723 995459 : if (j != -1)
3724 : return (void *)-1;
3725 : while (i >= 0
3726 2108427 : && known_ne (vr->operands[i].off, -1))
3727 : {
3728 : /* Punt if the additional ops contain a storage order
3729 : barrier. */
3730 1112968 : if (vr->operands[i].opcode == VIEW_CONVERT_EXPR
3731 1112968 : && vr->operands[i].reverse)
3732 : break;
3733 1112968 : extra_off += vr->operands[i].off;
3734 1112968 : i--;
3735 : }
3736 995459 : if (i != -1)
3737 : return (void *)-1;
3738 : found = true;
3739 : }
3740 : /* If we did find a match we'd eventually append a MEM_REF
3741 : as component. Don't. */
3742 : if (!found)
3743 : return (void *)-1;
3744 : }
3745 : else
3746 : return (void *)-1;
3747 :
3748 : /* Strip further common components, attempting to consume lhs_ops
3749 : in full. */
3750 1834617 : while (j >= 0 && i >= 0
3751 1834617 : && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3752 : {
3753 24604 : i--;
3754 24604 : j--;
3755 : }
3756 :
3757 : /* i now points to the first additional op.
3758 : ??? LHS may not be completely contained in VR, one or more
3759 : VIEW_CONVERT_EXPRs could be in its way. We could at least
3760 : try handling outermost VIEW_CONVERT_EXPRs. */
3761 1810013 : if (j != -1)
3762 : return (void *)-1;
3763 :
3764 : /* Punt if the additional ops contain a storage order barrier. */
3765 2798941 : for (k = i; k >= 0; k--)
3766 : {
3767 991647 : vro = &vr->operands[k];
3768 991647 : if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3769 : return (void *)-1;
3770 : }
3771 :
3772 : /* Now re-write REF to be based on the rhs of the assignment. */
3773 1807294 : tree rhs1 = gimple_assign_rhs1 (def_stmt);
3774 1807294 : copy_reference_ops_from_ref (rhs1, &rhs);
3775 :
3776 : /* Apply an extra offset to the inner MEM_REF of the RHS. */
3777 1807294 : bool force_no_tbaa = false;
3778 1807294 : if (maybe_ne (extra_off, 0))
3779 : {
3780 721561 : if (rhs.length () < 2)
3781 : return (void *)-1;
3782 721561 : int ix = rhs.length () - 2;
3783 721561 : if (rhs[ix].opcode != MEM_REF
3784 721561 : || known_eq (rhs[ix].off, -1))
3785 : return (void *)-1;
3786 721543 : rhs[ix].off += extra_off;
3787 721543 : rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3788 721543 : build_int_cst (TREE_TYPE (rhs[ix].op0),
3789 : extra_off));
3790 : /* When we have offsetted the RHS, reading only parts of it,
3791 : we can no longer use the original TBAA type, force alias-set
3792 : zero. */
3793 721543 : force_no_tbaa = true;
3794 : }
3795 :
3796 : /* Save the operands since we need to use the original ones for
3797 : the hash entry we use. */
3798 1807276 : if (!data->saved_operands.exists ())
3799 1708172 : data->saved_operands = vr->operands.copy ();
3800 :
3801 : /* We need to pre-pend vr->operands[0..i] to rhs. */
3802 1807276 : vec<vn_reference_op_s> old = vr->operands;
3803 5421828 : if (i + 1 + rhs.length () > vr->operands.length ())
3804 1149544 : vr->operands.safe_grow (i + 1 + rhs.length (), true);
3805 : else
3806 657732 : vr->operands.truncate (i + 1 + rhs.length ());
3807 6616742 : FOR_EACH_VEC_ELT (rhs, j, vro)
3808 4809466 : vr->operands[i + 1 + j] = *vro;
3809 1807276 : valueize_refs (&vr->operands);
3810 3614552 : if (old == shared_lookup_references)
3811 1807276 : shared_lookup_references = vr->operands;
3812 1807276 : vr->hashcode = vn_reference_compute_hash (vr);
3813 :
3814 : /* Try folding the new reference to a constant. */
3815 1807276 : tree val = fully_constant_vn_reference_p (vr);
3816 1807276 : if (val)
3817 : {
3818 21954 : if (data->partial_defs.is_empty ())
3819 21945 : return data->finish (ao_ref_alias_set (&lhs_ref),
3820 21945 : ao_ref_base_alias_set (&lhs_ref), val);
3821 : /* This is the only interesting case for partial-def handling
3822 : coming from targets that like to gimplify init-ctors as
3823 : aggregate copies from constant data like aarch64 for
3824 : PR83518. */
3825 9 : if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3826 : {
3827 9 : pd_data pd;
3828 9 : pd.rhs = val;
3829 9 : pd.rhs_off = 0;
3830 9 : pd.offset = 0;
3831 9 : pd.size = maxsizei;
3832 9 : return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3833 : ao_ref_base_alias_set (&lhs_ref),
3834 : 0, maxsizei);
3835 : }
3836 : }
3837 :
3838 : /* Continuing with partial defs isn't easily possible here, we
3839 : have to find a full def from further lookups from here. Probably
3840 : not worth the special-casing everywhere. */
3841 2212663 : if (!data->partial_defs.is_empty ())
3842 : return (void *)-1;
3843 :
3844 : /* Adjust *ref from the new operands. */
3845 1779152 : ao_ref rhs1_ref;
3846 1779152 : ao_ref_init (&rhs1_ref, rhs1);
3847 2851248 : if (!ao_ref_init_from_vn_reference (&r,
3848 : force_no_tbaa ? 0
3849 1072096 : : ao_ref_alias_set (&rhs1_ref),
3850 : force_no_tbaa ? 0
3851 1072096 : : ao_ref_base_alias_set (&rhs1_ref),
3852 : vr->type, vr->operands))
3853 : return (void *)-1;
3854 : /* This can happen with bitfields. */
3855 1779152 : if (maybe_ne (ref->size, r.size))
3856 : {
3857 : /* If the access lacks some subsetting simply apply that by
3858 : shortening it. That in the end can only be successful
3859 : if we can pun the lookup result which in turn requires
3860 : exact offsets. */
3861 1393 : if (known_eq (r.size, r.max_size)
3862 1393 : && known_lt (ref->size, r.size))
3863 1393 : r.size = r.max_size = ref->size;
3864 : else
3865 : return (void *)-1;
3866 : }
3867 1779152 : *ref = r;
3868 1779152 : vr->offset = r.offset;
3869 1779152 : vr->max_size = r.max_size;
3870 :
3871 : /* Do not update last seen VUSE after translating. */
3872 1779152 : data->last_vuse_ptr = NULL;
3873 : /* Invalidate the original access path since it now contains
3874 : the wrong base. */
3875 1779152 : data->orig_ref.ref = NULL_TREE;
3876 : /* Use the alias-set of this LHS for recording an eventual result. */
3877 1779152 : if (data->first_set == -2)
3878 : {
3879 1681577 : data->first_set = ao_ref_alias_set (&lhs_ref);
3880 1681577 : data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3881 : }
3882 :
3883 : /* Keep looking for the adjusted *REF / VR pair. */
3884 1779152 : return NULL;
3885 2228447 : }
3886 :
3887 : /* 6) For memcpy copies translate the reference through them if the copy
3888 : kills ref. But we cannot (easily) do this translation if the memcpy is
3889 : a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3890 : can modify the storage order of objects (see storage_order_barrier_p). */
3891 16133667 : else if (data->vn_walk_kind == VN_WALKREWRITE
3892 12589816 : && is_gimple_reg_type (vr->type)
3893 : /* ??? Handle BCOPY as well. */
3894 12581909 : && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3895 12515293 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3896 12514870 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3897 12513694 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3898 12513452 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3899 12488404 : || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3900 93833 : && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3901 83427 : || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3902 93799 : && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3903 66551 : || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3904 93784 : && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), ©_size)
3905 54144 : || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3906 54144 : && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3907 : ©_size)))
3908 : /* Handling this is more complicated, give up for now. */
3909 16175791 : && data->partial_defs.is_empty ())
3910 : {
3911 41513 : tree lhs, rhs;
3912 41513 : ao_ref r;
3913 41513 : poly_int64 rhs_offset, lhs_offset;
3914 41513 : vn_reference_op_s op;
3915 41513 : poly_uint64 mem_offset;
3916 41513 : poly_int64 at, byte_maxsize;
3917 :
3918 : /* Only handle non-variable, addressable refs. */
3919 41513 : if (maybe_ne (ref->size, maxsize)
3920 41022 : || !multiple_p (offset, BITS_PER_UNIT, &at)
3921 41513 : || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3922 491 : return (void *)-1;
3923 :
3924 : /* Extract a pointer base and an offset for the destination. */
3925 41022 : lhs = gimple_call_arg (def_stmt, 0);
3926 41022 : lhs_offset = 0;
3927 41022 : if (TREE_CODE (lhs) == SSA_NAME)
3928 : {
3929 32195 : lhs = vn_valueize (lhs);
3930 32195 : if (TREE_CODE (lhs) == SSA_NAME)
3931 : {
3932 31875 : gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3933 31875 : if (gimple_assign_single_p (def_stmt)
3934 31875 : && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3935 2381 : lhs = gimple_assign_rhs1 (def_stmt);
3936 : }
3937 : }
3938 41022 : if (TREE_CODE (lhs) == ADDR_EXPR)
3939 : {
3940 15787 : if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3941 15490 : && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3942 : return (void *)-1;
3943 11388 : tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3944 : &lhs_offset);
3945 11388 : if (!tem)
3946 : return (void *)-1;
3947 10696 : if (TREE_CODE (tem) == MEM_REF
3948 10696 : && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3949 : {
3950 1689 : lhs = TREE_OPERAND (tem, 0);
3951 1689 : if (TREE_CODE (lhs) == SSA_NAME)
3952 1689 : lhs = vn_valueize (lhs);
3953 1689 : lhs_offset += mem_offset;
3954 : }
3955 9007 : else if (DECL_P (tem))
3956 9007 : lhs = build_fold_addr_expr (tem);
3957 : else
3958 : return (void *)-1;
3959 : }
3960 40190 : if (TREE_CODE (lhs) != SSA_NAME
3961 9008 : && TREE_CODE (lhs) != ADDR_EXPR)
3962 : return (void *)-1;
3963 :
3964 : /* Extract a pointer base and an offset for the source. */
3965 40190 : rhs = gimple_call_arg (def_stmt, 1);
3966 40190 : rhs_offset = 0;
3967 40190 : if (TREE_CODE (rhs) == SSA_NAME)
3968 18505 : rhs = vn_valueize (rhs);
3969 40190 : if (TREE_CODE (rhs) == ADDR_EXPR)
3970 : {
3971 34171 : if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3972 23709 : && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3973 : return (void *)-1;
3974 23265 : tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3975 : &rhs_offset);
3976 23265 : if (!tem)
3977 : return (void *)-1;
3978 23265 : if (TREE_CODE (tem) == MEM_REF
3979 23265 : && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3980 : {
3981 0 : rhs = TREE_OPERAND (tem, 0);
3982 0 : rhs_offset += mem_offset;
3983 : }
3984 23265 : else if (DECL_P (tem)
3985 17327 : || TREE_CODE (tem) == STRING_CST)
3986 23265 : rhs = build_fold_addr_expr (tem);
3987 : else
3988 : return (void *)-1;
3989 : }
3990 40190 : if (TREE_CODE (rhs) == SSA_NAME)
3991 16925 : rhs = SSA_VAL (rhs);
3992 23265 : else if (TREE_CODE (rhs) != ADDR_EXPR)
3993 : return (void *)-1;
3994 :
3995 : /* The bases of the destination and the references have to agree. */
3996 40190 : if (TREE_CODE (base) == MEM_REF)
3997 : {
3998 15506 : if (TREE_OPERAND (base, 0) != lhs
3999 15506 : || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
4000 11878 : return (void *) -1;
4001 11437 : at += mem_offset;
4002 : }
4003 24684 : else if (!DECL_P (base)
4004 23772 : || TREE_CODE (lhs) != ADDR_EXPR
4005 32494 : || TREE_OPERAND (lhs, 0) != base)
4006 : return (void *)-1;
4007 :
4008 : /* If the access is completely outside of the memcpy destination
4009 : area there is no aliasing. */
4010 11437 : if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
4011 : return NULL;
4012 : /* And the access has to be contained within the memcpy destination. */
4013 11404 : if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
4014 : return (void *)-1;
4015 :
4016 : /* Save the operands since we need to use the original ones for
4017 : the hash entry we use. */
4018 10951 : if (!data->saved_operands.exists ())
4019 10527 : data->saved_operands = vr->operands.copy ();
4020 :
4021 : /* Make room for 2 operands in the new reference. */
4022 10951 : if (vr->operands.length () < 2)
4023 : {
4024 0 : vec<vn_reference_op_s> old = vr->operands;
4025 0 : vr->operands.safe_grow_cleared (2, true);
4026 0 : if (old == shared_lookup_references)
4027 0 : shared_lookup_references = vr->operands;
4028 : }
4029 : else
4030 10951 : vr->operands.truncate (2);
4031 :
4032 : /* The looked-through reference is a simple MEM_REF. */
4033 10951 : memset (&op, 0, sizeof (op));
4034 10951 : op.type = vr->type;
4035 10951 : op.opcode = MEM_REF;
4036 10951 : op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
4037 10951 : op.off = at - lhs_offset + rhs_offset;
4038 10951 : vr->operands[0] = op;
4039 10951 : op.type = TREE_TYPE (rhs);
4040 10951 : op.opcode = TREE_CODE (rhs);
4041 10951 : op.op0 = rhs;
4042 10951 : op.off = -1;
4043 10951 : vr->operands[1] = op;
4044 10951 : vr->hashcode = vn_reference_compute_hash (vr);
4045 :
4046 : /* Try folding the new reference to a constant. */
4047 10951 : tree val = fully_constant_vn_reference_p (vr);
4048 10951 : if (val)
4049 2932 : return data->finish (0, 0, val);
4050 :
4051 : /* Adjust *ref from the new operands. */
4052 8019 : if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
4053 : return (void *)-1;
4054 : /* This can happen with bitfields. */
4055 8019 : if (maybe_ne (ref->size, r.size))
4056 : return (void *)-1;
4057 8019 : *ref = r;
4058 8019 : vr->offset = r.offset;
4059 8019 : vr->max_size = r.max_size;
4060 :
4061 : /* Do not update last seen VUSE after translating. */
4062 8019 : data->last_vuse_ptr = NULL;
4063 : /* Invalidate the original access path since it now contains
4064 : the wrong base. */
4065 8019 : data->orig_ref.ref = NULL_TREE;
4066 : /* Use the alias-set of this stmt for recording an eventual result. */
4067 8019 : if (data->first_set == -2)
4068 : {
4069 7637 : data->first_set = 0;
4070 7637 : data->first_base_set = 0;
4071 : }
4072 :
4073 : /* Keep looking for the adjusted *REF / VR pair. */
4074 8019 : return NULL;
4075 : }
4076 :
4077 : /* Bail out and stop walking. */
4078 : return (void *)-1;
4079 : }
4080 :
4081 : /* Return true if E is a backedge with respect to our CFG walk order. */
4082 :
4083 : static bool
4084 118851198 : vn_is_backedge (edge e, void *)
4085 : {
4086 : /* During PRE elimination we no longer have access to this info. */
4087 118851198 : return (!vn_bb_to_rpo
4088 118851198 : || vn_bb_to_rpo[e->dest->index] <= vn_bb_to_rpo[e->src->index]);
4089 : }
4090 :
4091 : /* Return a reference op vector from OP that can be used for
4092 : vn_reference_lookup_pieces. The caller is responsible for releasing
4093 : the vector. */
4094 :
4095 : vec<vn_reference_op_s>
4096 4650485 : vn_reference_operands_for_lookup (tree op)
4097 : {
4098 4650485 : bool valueized;
4099 4650485 : return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
4100 : }
4101 :
4102 : /* Lookup a reference operation by it's parts, in the current hash table.
4103 : Returns the resulting value number if it exists in the hash table,
4104 : NULL_TREE otherwise. VNRESULT will be filled in with the actual
4105 : vn_reference_t stored in the hashtable if something is found. */
4106 :
4107 : tree
4108 7659894 : vn_reference_lookup_pieces (tree vuse, alias_set_type set,
4109 : alias_set_type base_set, tree type,
4110 : vec<vn_reference_op_s> operands,
4111 : vn_reference_t *vnresult, vn_lookup_kind kind)
4112 : {
4113 7659894 : struct vn_reference_s vr1;
4114 7659894 : vn_reference_t tmp;
4115 7659894 : tree cst;
4116 :
4117 7659894 : if (!vnresult)
4118 0 : vnresult = &tmp;
4119 7659894 : *vnresult = NULL;
4120 :
4121 7659894 : vr1.vuse = vuse_ssa_val (vuse);
4122 7659894 : shared_lookup_references.truncate (0);
4123 15319788 : shared_lookup_references.safe_grow (operands.length (), true);
4124 7659894 : memcpy (shared_lookup_references.address (),
4125 7659894 : operands.address (),
4126 : sizeof (vn_reference_op_s)
4127 7659894 : * operands.length ());
4128 7659894 : bool valueized_p;
4129 7659894 : valueize_refs_1 (&shared_lookup_references, &valueized_p);
4130 7659894 : vr1.operands = shared_lookup_references;
4131 7659894 : vr1.type = type;
4132 7659894 : vr1.set = set;
4133 7659894 : vr1.base_set = base_set;
4134 : /* We can pretend there's no extra info fed in since the ao_refs offset
4135 : and max_size are computed only from the VN reference ops. */
4136 7659894 : vr1.offset = 0;
4137 7659894 : vr1.max_size = -1;
4138 7659894 : vr1.hashcode = vn_reference_compute_hash (&vr1);
4139 7659894 : if ((cst = fully_constant_vn_reference_p (&vr1)))
4140 : return cst;
4141 :
4142 7641226 : vn_reference_lookup_1 (&vr1, vnresult);
4143 7641226 : if (!*vnresult
4144 2969838 : && kind != VN_NOWALK
4145 2969838 : && vr1.vuse)
4146 : {
4147 2941962 : ao_ref r;
4148 2941962 : unsigned limit = param_sccvn_max_alias_queries_per_access;
4149 2941962 : vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
4150 2941962 : false);
4151 2941962 : vec<vn_reference_op_s> ops_for_ref;
4152 2941962 : if (!valueized_p)
4153 2852335 : ops_for_ref = vr1.operands;
4154 : else
4155 : {
4156 : /* For ao_ref_from_mem we have to ensure only available SSA names
4157 : end up in base and the only convenient way to make this work
4158 : for PRE is to re-valueize with that in mind. */
4159 179254 : ops_for_ref.create (operands.length ());
4160 179254 : ops_for_ref.quick_grow (operands.length ());
4161 89627 : memcpy (ops_for_ref.address (),
4162 89627 : operands.address (),
4163 : sizeof (vn_reference_op_s)
4164 89627 : * operands.length ());
4165 89627 : valueize_refs_1 (&ops_for_ref, &valueized_p, true);
4166 : }
4167 2941962 : if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
4168 : ops_for_ref))
4169 2872486 : *vnresult
4170 2872486 : = ((vn_reference_t)
4171 2872486 : walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
4172 : vn_reference_lookup_3, vn_is_backedge,
4173 : vuse_valueize, limit, &data));
4174 5883924 : if (ops_for_ref != shared_lookup_references)
4175 89627 : ops_for_ref.release ();
4176 5883924 : gcc_checking_assert (vr1.operands == shared_lookup_references);
4177 2941962 : if (*vnresult
4178 419873 : && data.same_val
4179 2941962 : && (!(*vnresult)->result
4180 0 : || !operand_equal_p ((*vnresult)->result, data.same_val)))
4181 : {
4182 0 : *vnresult = NULL;
4183 0 : return NULL_TREE;
4184 : }
4185 2941962 : }
4186 :
4187 7641226 : if (*vnresult)
4188 5091261 : return (*vnresult)->result;
4189 :
4190 : return NULL_TREE;
4191 : }
4192 :
4193 : /* When OPERANDS is an ADDR_EXPR that can be possibly expressed as a
4194 : POINTER_PLUS_EXPR return true and fill in its operands in OPS. */
4195 :
4196 : bool
4197 2181941 : vn_pp_nary_for_addr (const vec<vn_reference_op_s>& operands, tree ops[2])
4198 : {
4199 4363882 : gcc_assert (operands[0].opcode == ADDR_EXPR
4200 : && operands.last ().opcode == SSA_NAME);
4201 : poly_int64 off = 0;
4202 : vn_reference_op_t vro;
4203 : unsigned i;
4204 7051379 : for (i = 1; operands.iterate (i, &vro); ++i)
4205 : {
4206 7051379 : if (vro->opcode == SSA_NAME)
4207 : break;
4208 4919148 : else if (known_eq (vro->off, -1))
4209 : break;
4210 4869438 : off += vro->off;
4211 : }
4212 2181941 : if (i == operands.length () - 1
4213 2132231 : && maybe_ne (off, 0)
4214 : /* Make sure we the offset we accumulated in a 64bit int
4215 : fits the address computation carried out in target
4216 : offset precision. */
4217 3582132 : && (off.coeffs[0]
4218 1400191 : == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
4219 : {
4220 1399421 : gcc_assert (operands[i-1].opcode == MEM_REF);
4221 1399421 : ops[0] = operands[i].op0;
4222 1399421 : ops[1] = wide_int_to_tree (sizetype, off);
4223 1399421 : return true;
4224 : }
4225 : return false;
4226 : }
4227 :
4228 : /* Lookup OP in the current hash table, and return the resulting value
4229 : number if it exists in the hash table. Return NULL_TREE if it does
4230 : not exist in the hash table or if the result field of the structure
4231 : was NULL.. VNRESULT will be filled in with the vn_reference_t
4232 : stored in the hashtable if one exists. When TBAA_P is false assume
4233 : we are looking up a store and treat it as having alias-set zero.
4234 : *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
4235 : MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
4236 : load is bitwise anded with MASK and so we are only interested in a subset
4237 : of the bits and can ignore if the other bits are uninitialized or
4238 : not initialized with constants. When doing redundant store removal
4239 : the caller has to set REDUNDANT_STORE_REMOVAL_P. */
4240 :
4241 : tree
4242 99086757 : vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
4243 : vn_reference_t *vnresult, bool tbaa_p,
4244 : tree *last_vuse_ptr, tree mask,
4245 : bool redundant_store_removal_p)
4246 : {
4247 99086757 : vec<vn_reference_op_s> operands;
4248 99086757 : struct vn_reference_s vr1;
4249 99086757 : bool valueized_anything;
4250 :
4251 99086757 : if (vnresult)
4252 98702554 : *vnresult = NULL;
4253 :
4254 99086757 : vr1.vuse = vuse_ssa_val (vuse);
4255 198173514 : vr1.operands = operands
4256 99086757 : = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
4257 :
4258 : /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing
4259 : this before the pass folding __builtin_object_size had a chance to run. */
4260 99086757 : if ((cfun->curr_properties & PROP_objsz)
4261 71838954 : && operands[0].opcode == ADDR_EXPR
4262 100186659 : && operands.last ().opcode == SSA_NAME)
4263 : {
4264 1066311 : tree ops[2];
4265 1066311 : if (vn_pp_nary_for_addr (operands, ops))
4266 : {
4267 684053 : tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
4268 684053 : TREE_TYPE (op), ops, NULL);
4269 684053 : if (res)
4270 684053 : return res;
4271 684053 : return NULL_TREE;
4272 : }
4273 : }
4274 :
4275 98402704 : vr1.type = TREE_TYPE (op);
4276 98402704 : ao_ref op_ref;
4277 98402704 : ao_ref_init (&op_ref, op);
4278 98402704 : vr1.set = ao_ref_alias_set (&op_ref);
4279 98402704 : vr1.base_set = ao_ref_base_alias_set (&op_ref);
4280 98402704 : vr1.offset = 0;
4281 98402704 : vr1.max_size = -1;
4282 98402704 : vr1.hashcode = vn_reference_compute_hash (&vr1);
4283 98402704 : if (mask == NULL_TREE)
4284 98101906 : if (tree cst = fully_constant_vn_reference_p (&vr1))
4285 : return cst;
4286 :
4287 98389326 : if (kind != VN_NOWALK && vr1.vuse)
4288 : {
4289 57134715 : vn_reference_t wvnresult;
4290 57134715 : ao_ref r;
4291 57134715 : unsigned limit = param_sccvn_max_alias_queries_per_access;
4292 57134715 : auto_vec<vn_reference_op_s> ops_for_ref;
4293 57134715 : if (valueized_anything)
4294 : {
4295 4480823 : copy_reference_ops_from_ref (op, &ops_for_ref);
4296 4480823 : bool tem;
4297 4480823 : valueize_refs_1 (&ops_for_ref, &tem, true);
4298 : }
4299 : /* Make sure to use a valueized reference if we valueized anything.
4300 : Otherwise preserve the full reference for advanced TBAA. */
4301 57134715 : if (!valueized_anything
4302 57134715 : || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
4303 : vr1.type, ops_for_ref))
4304 : {
4305 52653892 : ao_ref_init (&r, op);
4306 : /* Record the extra info we're getting from the full ref. */
4307 52653892 : ao_ref_base (&r);
4308 52653892 : vr1.offset = r.offset;
4309 52653892 : vr1.max_size = r.max_size;
4310 : }
4311 57134715 : vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
4312 : last_vuse_ptr, kind, tbaa_p, mask,
4313 109788607 : redundant_store_removal_p);
4314 :
4315 57134715 : wvnresult
4316 : = ((vn_reference_t)
4317 57134715 : walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
4318 : vn_reference_lookup_3, vn_is_backedge,
4319 : vuse_valueize, limit, &data));
4320 114269430 : gcc_checking_assert (vr1.operands == shared_lookup_references);
4321 57134715 : if (wvnresult)
4322 : {
4323 8363712 : gcc_assert (mask == NULL_TREE);
4324 8363712 : if (data.same_val
4325 8363712 : && (!wvnresult->result
4326 64919 : || !operand_equal_p (wvnresult->result, data.same_val)))
4327 45453 : return NULL_TREE;
4328 8318259 : if (vnresult)
4329 8316732 : *vnresult = wvnresult;
4330 8318259 : return wvnresult->result;
4331 : }
4332 48771003 : else if (mask)
4333 300798 : return data.masked_result;
4334 :
4335 : return NULL_TREE;
4336 57134715 : }
4337 :
4338 41254611 : if (last_vuse_ptr)
4339 1417439 : *last_vuse_ptr = vr1.vuse;
4340 41254611 : if (mask)
4341 : return NULL_TREE;
4342 41254611 : return vn_reference_lookup_1 (&vr1, vnresult);
4343 : }
4344 :
4345 : /* Lookup CALL in the current hash table and return the entry in
4346 : *VNRESULT if found. Populates *VR for the hashtable lookup. */
4347 :
4348 : void
4349 9096970 : vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
4350 : vn_reference_t vr)
4351 : {
4352 9096970 : if (vnresult)
4353 9096970 : *vnresult = NULL;
4354 :
4355 9096970 : tree vuse = gimple_vuse (call);
4356 :
4357 9096970 : vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
4358 9096970 : vr->operands = valueize_shared_reference_ops_from_call (call);
4359 9096970 : tree lhs = gimple_call_lhs (call);
4360 : /* For non-SSA return values the referece ops contain the LHS. */
4361 4986979 : vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
4362 13648846 : ? TREE_TYPE (lhs) : NULL_TREE);
4363 9096970 : vr->punned = false;
4364 9096970 : vr->set = 0;
4365 9096970 : vr->base_set = 0;
4366 9096970 : vr->offset = 0;
4367 9096970 : vr->max_size = -1;
4368 9096970 : vr->hashcode = vn_reference_compute_hash (vr);
4369 9096970 : vn_reference_lookup_1 (vr, vnresult);
4370 9096970 : }
4371 :
4372 : /* Insert OP into the current hash table with a value number of RESULT. */
4373 :
4374 : static void
4375 73529405 : vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
4376 : {
4377 73529405 : vn_reference_s **slot;
4378 73529405 : vn_reference_t vr1;
4379 73529405 : bool tem;
4380 :
4381 73529405 : vec<vn_reference_op_s> operands
4382 73529405 : = valueize_shared_reference_ops_from_ref (op, &tem);
4383 : /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this
4384 : before the pass folding __builtin_object_size had a chance to run. */
4385 73529405 : if ((cfun->curr_properties & PROP_objsz)
4386 55171466 : && operands[0].opcode == ADDR_EXPR
4387 74422805 : && operands.last ().opcode == SSA_NAME)
4388 : {
4389 861893 : tree ops[2];
4390 861893 : if (vn_pp_nary_for_addr (operands, ops))
4391 : {
4392 547178 : vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
4393 547178 : TREE_TYPE (op), ops, result,
4394 547178 : VN_INFO (result)->value_id);
4395 547178 : return;
4396 : }
4397 : }
4398 :
4399 72982227 : vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4400 72982227 : if (TREE_CODE (result) == SSA_NAME)
4401 50311194 : vr1->value_id = VN_INFO (result)->value_id;
4402 : else
4403 22671033 : vr1->value_id = get_or_alloc_constant_value_id (result);
4404 72982227 : vr1->vuse = vuse_ssa_val (vuse);
4405 72982227 : vr1->operands = operands.copy ();
4406 72982227 : vr1->type = TREE_TYPE (op);
4407 72982227 : vr1->punned = false;
4408 72982227 : ao_ref op_ref;
4409 72982227 : ao_ref_init (&op_ref, op);
4410 72982227 : vr1->set = ao_ref_alias_set (&op_ref);
4411 72982227 : vr1->base_set = ao_ref_base_alias_set (&op_ref);
4412 : /* Specifically use an unknown extent here, we're not doing any lookup
4413 : and assume the caller didn't either (or it went VARYING). */
4414 72982227 : vr1->offset = 0;
4415 72982227 : vr1->max_size = -1;
4416 72982227 : vr1->hashcode = vn_reference_compute_hash (vr1);
4417 72982227 : vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4418 72982227 : vr1->result_vdef = vdef;
4419 :
4420 72982227 : slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4421 : INSERT);
4422 :
4423 : /* Because IL walking on reference lookup can end up visiting
4424 : a def that is only to be visited later in iteration order
4425 : when we are about to make an irreducible region reducible
4426 : the def can be effectively processed and its ref being inserted
4427 : by vn_reference_lookup_3 already. So we cannot assert (!*slot)
4428 : but save a lookup if we deal with already inserted refs here. */
4429 72982227 : if (*slot)
4430 : {
4431 : /* We cannot assert that we have the same value either because
4432 : when disentangling an irreducible region we may end up visiting
4433 : a use before the corresponding def. That's a missed optimization
4434 : only though. See gcc.dg/tree-ssa/pr87126.c for example. */
4435 0 : if (dump_file && (dump_flags & TDF_DETAILS)
4436 0 : && !operand_equal_p ((*slot)->result, vr1->result, 0))
4437 : {
4438 0 : fprintf (dump_file, "Keeping old value ");
4439 0 : print_generic_expr (dump_file, (*slot)->result);
4440 0 : fprintf (dump_file, " because of collision\n");
4441 : }
4442 0 : free_reference (vr1);
4443 0 : obstack_free (&vn_tables_obstack, vr1);
4444 0 : return;
4445 : }
4446 :
4447 72982227 : *slot = vr1;
4448 72982227 : vr1->next = last_inserted_ref;
4449 72982227 : last_inserted_ref = vr1;
4450 : }
4451 :
4452 : /* Insert a reference by it's pieces into the current hash table with
4453 : a value number of RESULT. Return the resulting reference
4454 : structure we created. */
4455 :
4456 : vn_reference_t
4457 1434198 : vn_reference_insert_pieces (tree vuse, alias_set_type set,
4458 : alias_set_type base_set,
4459 : poly_int64 offset, poly_int64 max_size, tree type,
4460 : vec<vn_reference_op_s> operands,
4461 : tree result, unsigned int value_id)
4462 :
4463 : {
4464 1434198 : vn_reference_s **slot;
4465 1434198 : vn_reference_t vr1;
4466 :
4467 1434198 : vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4468 1434198 : vr1->value_id = value_id;
4469 1434198 : vr1->vuse = vuse_ssa_val (vuse);
4470 1434198 : vr1->operands = operands;
4471 1434198 : valueize_refs (&vr1->operands);
4472 1434198 : vr1->type = type;
4473 1434198 : vr1->punned = false;
4474 1434198 : vr1->set = set;
4475 1434198 : vr1->base_set = base_set;
4476 1434198 : vr1->offset = offset;
4477 1434198 : vr1->max_size = max_size;
4478 1434198 : vr1->hashcode = vn_reference_compute_hash (vr1);
4479 1434198 : if (result && TREE_CODE (result) == SSA_NAME)
4480 271876 : result = SSA_VAL (result);
4481 1434198 : vr1->result = result;
4482 1434198 : vr1->result_vdef = NULL_TREE;
4483 :
4484 1434198 : slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4485 : INSERT);
4486 :
4487 : /* At this point we should have all the things inserted that we have
4488 : seen before, and we should never try inserting something that
4489 : already exists. */
4490 1434198 : gcc_assert (!*slot);
4491 :
4492 1434198 : *slot = vr1;
4493 1434198 : vr1->next = last_inserted_ref;
4494 1434198 : last_inserted_ref = vr1;
4495 1434198 : return vr1;
4496 : }
4497 :
4498 : /* Compute and return the hash value for nary operation VBO1. */
4499 :
4500 : hashval_t
4501 299368566 : vn_nary_op_compute_hash (const vn_nary_op_t vno1)
4502 : {
4503 299368566 : inchash::hash hstate;
4504 299368566 : unsigned i;
4505 :
4506 299368566 : if (((vno1->length == 2
4507 252308772 : && commutative_tree_code (vno1->opcode))
4508 136840224 : || (vno1->length == 3
4509 1543361 : && commutative_ternary_tree_code (vno1->opcode)))
4510 461899051 : && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4511 2384600 : std::swap (vno1->op[0], vno1->op[1]);
4512 296983966 : else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
4513 296983966 : && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4514 : {
4515 472155 : std::swap (vno1->op[0], vno1->op[1]);
4516 472155 : vno1->opcode = swap_tree_comparison (vno1->opcode);
4517 : }
4518 :
4519 299368566 : hstate.add_int (vno1->opcode);
4520 854864734 : for (i = 0; i < vno1->length; ++i)
4521 555496168 : inchash::add_expr (vno1->op[i], hstate);
4522 :
4523 299368566 : return hstate.end ();
4524 : }
4525 :
4526 : /* Compare nary operations VNO1 and VNO2 and return true if they are
4527 : equivalent. */
4528 :
4529 : bool
4530 952371752 : vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
4531 : {
4532 952371752 : unsigned i;
4533 :
4534 952371752 : if (vno1->hashcode != vno2->hashcode)
4535 : return false;
4536 :
4537 49668775 : if (vno1->length != vno2->length)
4538 : return false;
4539 :
4540 49668775 : if (vno1->opcode != vno2->opcode
4541 49668775 : || !types_compatible_p (vno1->type, vno2->type))
4542 1137430 : return false;
4543 :
4544 140305463 : for (i = 0; i < vno1->length; ++i)
4545 91872826 : if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
4546 : return false;
4547 :
4548 : /* BIT_INSERT_EXPR has an implict operand as the type precision
4549 : of op1. Need to check to make sure they are the same. */
4550 48432637 : if (vno1->opcode == BIT_INSERT_EXPR
4551 536 : && TREE_CODE (vno1->op[1]) == INTEGER_CST
4552 48432746 : && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
4553 109 : != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
4554 : return false;
4555 :
4556 : return true;
4557 : }
4558 :
4559 : /* Initialize VNO from the pieces provided. */
4560 :
4561 : static void
4562 185847460 : init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
4563 : enum tree_code code, tree type, tree *ops)
4564 : {
4565 185847460 : vno->opcode = code;
4566 185847460 : vno->length = length;
4567 185847460 : vno->type = type;
4568 4513672 : memcpy (&vno->op[0], ops, sizeof (tree) * length);
4569 0 : }
4570 :
4571 : /* Return the number of operands for a vn_nary ops structure from STMT. */
4572 :
4573 : unsigned int
4574 107573348 : vn_nary_length_from_stmt (gimple *stmt)
4575 : {
4576 107573348 : switch (gimple_assign_rhs_code (stmt))
4577 : {
4578 : case REALPART_EXPR:
4579 : case IMAGPART_EXPR:
4580 : case VIEW_CONVERT_EXPR:
4581 : return 1;
4582 :
4583 601878 : case BIT_FIELD_REF:
4584 601878 : return 3;
4585 :
4586 505652 : case CONSTRUCTOR:
4587 505652 : return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4588 :
4589 103143220 : default:
4590 103143220 : return gimple_num_ops (stmt) - 1;
4591 : }
4592 : }
4593 :
4594 : /* Initialize VNO from STMT. */
4595 :
4596 : void
4597 107573348 : init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
4598 : {
4599 107573348 : unsigned i;
4600 :
4601 107573348 : vno->opcode = gimple_assign_rhs_code (stmt);
4602 107573348 : vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
4603 107573348 : switch (vno->opcode)
4604 : {
4605 3322598 : case REALPART_EXPR:
4606 3322598 : case IMAGPART_EXPR:
4607 3322598 : case VIEW_CONVERT_EXPR:
4608 3322598 : vno->length = 1;
4609 3322598 : vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4610 3322598 : break;
4611 :
4612 601878 : case BIT_FIELD_REF:
4613 601878 : vno->length = 3;
4614 601878 : vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4615 601878 : vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
4616 601878 : vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
4617 601878 : break;
4618 :
4619 505652 : case CONSTRUCTOR:
4620 505652 : vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4621 2005077 : for (i = 0; i < vno->length; ++i)
4622 1499425 : vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
4623 : break;
4624 :
4625 103143220 : default:
4626 103143220 : gcc_checking_assert (!gimple_assign_single_p (stmt));
4627 103143220 : vno->length = gimple_num_ops (stmt) - 1;
4628 282620697 : for (i = 0; i < vno->length; ++i)
4629 179477477 : vno->op[i] = gimple_op (stmt, i + 1);
4630 : }
4631 107573348 : }
4632 :
4633 : /* Compute the hashcode for VNO and look for it in the hash table;
4634 : return the resulting value number if it exists in the hash table.
4635 : Return NULL_TREE if it does not exist in the hash table or if the
4636 : result field of the operation is NULL. VNRESULT will contain the
4637 : vn_nary_op_t from the hashtable if it exists. */
4638 :
4639 : static tree
4640 129658320 : vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
4641 : {
4642 129658320 : vn_nary_op_s **slot;
4643 :
4644 129658320 : if (vnresult)
4645 122556108 : *vnresult = NULL;
4646 :
4647 360625487 : for (unsigned i = 0; i < vno->length; ++i)
4648 230967167 : if (TREE_CODE (vno->op[i]) == SSA_NAME)
4649 163488840 : vno->op[i] = SSA_VAL (vno->op[i]);
4650 :
4651 129658320 : vno->hashcode = vn_nary_op_compute_hash (vno);
4652 129658320 : slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4653 129658320 : if (!slot)
4654 : return NULL_TREE;
4655 17423572 : if (vnresult)
4656 16987976 : *vnresult = *slot;
4657 17423572 : return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4658 : }
4659 :
4660 : /* Lookup a n-ary operation by its pieces and return the resulting value
4661 : number if it exists in the hash table. Return NULL_TREE if it does
4662 : not exist in the hash table or if the result field of the operation
4663 : is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4664 : if it exists. */
4665 :
4666 : tree
4667 73977710 : vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4668 : tree type, tree *ops, vn_nary_op_t *vnresult)
4669 : {
4670 73977710 : vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4671 : sizeof_vn_nary_op (length));
4672 73977710 : init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4673 73977710 : return vn_nary_op_lookup_1 (vno1, vnresult);
4674 : }
4675 :
4676 : /* Lookup the rhs of STMT in the current hash table, and return the resulting
4677 : value number if it exists in the hash table. Return NULL_TREE if
4678 : it does not exist in the hash table. VNRESULT will contain the
4679 : vn_nary_op_t from the hashtable if it exists. */
4680 :
4681 : tree
4682 55680610 : vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4683 : {
4684 55680610 : vn_nary_op_t vno1
4685 55680610 : = XALLOCAVAR (struct vn_nary_op_s,
4686 : sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4687 55680610 : init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4688 55680610 : return vn_nary_op_lookup_1 (vno1, vnresult);
4689 : }
4690 :
4691 : /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4692 :
4693 : vn_nary_op_t
4694 168749361 : alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4695 : {
4696 168749361 : return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4697 : }
4698 :
4699 : /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4700 : obstack. */
4701 :
4702 : static vn_nary_op_t
4703 151825073 : alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4704 : {
4705 0 : vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4706 :
4707 151825073 : vno1->value_id = value_id;
4708 151825073 : vno1->length = length;
4709 151825073 : vno1->predicated_values = 0;
4710 151825073 : vno1->u.result = result;
4711 :
4712 151825073 : return vno1;
4713 : }
4714 :
4715 : /* Insert VNO into TABLE. */
4716 :
4717 : static vn_nary_op_t
4718 156473908 : vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
4719 : {
4720 156473908 : vn_nary_op_s **slot;
4721 :
4722 156473908 : gcc_assert (! vno->predicated_values
4723 : || (! vno->u.values->next
4724 : && vno->u.values->n == 1));
4725 :
4726 457943127 : for (unsigned i = 0; i < vno->length; ++i)
4727 301469219 : if (TREE_CODE (vno->op[i]) == SSA_NAME)
4728 196539790 : vno->op[i] = SSA_VAL (vno->op[i]);
4729 :
4730 156473908 : vno->hashcode = vn_nary_op_compute_hash (vno);
4731 156473908 : slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4732 156473908 : vno->unwind_to = *slot;
4733 156473908 : if (*slot)
4734 : {
4735 : /* Prefer non-predicated values.
4736 : ??? Only if those are constant, otherwise, with constant predicated
4737 : value, turn them into predicated values with entry-block validity
4738 : (??? but we always find the first valid result currently). */
4739 30048180 : if ((*slot)->predicated_values
4740 29310520 : && ! vno->predicated_values)
4741 : {
4742 : /* ??? We cannot remove *slot from the unwind stack list.
4743 : For the moment we deal with this by skipping not found
4744 : entries but this isn't ideal ... */
4745 83833 : *slot = vno;
4746 : /* ??? Maintain a stack of states we can unwind in
4747 : vn_nary_op_s? But how far do we unwind? In reality
4748 : we need to push change records somewhere... Or not
4749 : unwind vn_nary_op_s and linking them but instead
4750 : unwind the results "list", linking that, which also
4751 : doesn't move on hashtable resize. */
4752 : /* We can also have a ->unwind_to recording *slot there.
4753 : That way we can make u.values a fixed size array with
4754 : recording the number of entries but of course we then
4755 : have always N copies for each unwind_to-state. Or we
4756 : make sure to only ever append and each unwinding will
4757 : pop off one entry (but how to deal with predicated
4758 : replaced with non-predicated here?) */
4759 83833 : vno->next = last_inserted_nary;
4760 83833 : last_inserted_nary = vno;
4761 83833 : return vno;
4762 : }
4763 29964347 : else if (vno->predicated_values
4764 29963991 : && ! (*slot)->predicated_values)
4765 : return *slot;
4766 29227043 : else if (vno->predicated_values
4767 29226687 : && (*slot)->predicated_values)
4768 : {
4769 : /* ??? Factor this all into a insert_single_predicated_value
4770 : routine. */
4771 29226687 : gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4772 29226687 : basic_block vno_bb
4773 29226687 : = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4774 29226687 : vn_pval *nval = vno->u.values;
4775 29226687 : vn_pval **next = &vno->u.values;
4776 29226687 : vn_pval *ins = NULL;
4777 29226687 : vn_pval *ins_at = NULL;
4778 : /* Find an existing value to append to. */
4779 54953397 : for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4780 : {
4781 30222382 : if (expressions_equal_p (val->result, nval->result))
4782 : {
4783 : /* Limit the number of places we register a predicate
4784 : as valid. */
4785 4495672 : if (val->n > 8)
4786 129935 : return *slot;
4787 11091318 : for (unsigned i = 0; i < val->n; ++i)
4788 : {
4789 6958742 : basic_block val_bb
4790 6958742 : = BASIC_BLOCK_FOR_FN (cfun,
4791 : val->valid_dominated_by_p[i]);
4792 6958742 : if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4793 : /* Value registered with more generic predicate. */
4794 233161 : return *slot;
4795 6725581 : else if (flag_checking)
4796 : /* Shouldn't happen, we insert in RPO order. */
4797 6725581 : gcc_assert (!dominated_by_p (CDI_DOMINATORS,
4798 : val_bb, vno_bb));
4799 : }
4800 : /* Append the location. */
4801 4132576 : ins_at = val;
4802 4132576 : ins = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4803 : sizeof (vn_pval)
4804 : + val->n * sizeof (int));
4805 4132576 : ins->next = NULL;
4806 4132576 : ins->result = val->result;
4807 4132576 : ins->n = val->n + 1;
4808 4132576 : memcpy (ins->valid_dominated_by_p,
4809 4132576 : val->valid_dominated_by_p,
4810 4132576 : val->n * sizeof (int));
4811 4132576 : ins->valid_dominated_by_p[val->n] = vno_bb->index;
4812 4132576 : if (dump_file && (dump_flags & TDF_DETAILS))
4813 4 : fprintf (dump_file, "Appending predicate to value.\n");
4814 : break;
4815 : }
4816 : }
4817 : /* Copy the rest of the value chain. */
4818 59593600 : for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4819 : {
4820 30730009 : if (val == ins_at)
4821 : /* Replace the node we appended to. */
4822 4132576 : *next = ins;
4823 : else
4824 : {
4825 : /* Copy other predicated values. */
4826 26597433 : *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4827 : sizeof (vn_pval)
4828 : + ((val->n-1)
4829 : * sizeof (int)));
4830 26597433 : memcpy (*next, val,
4831 26597433 : sizeof (vn_pval) + (val->n-1) * sizeof (int));
4832 26597433 : (*next)->next = NULL;
4833 : }
4834 30730009 : next = &(*next)->next;
4835 : }
4836 : /* Append the value if we didn't find it. */
4837 28863591 : if (!ins_at)
4838 24731015 : *next = nval;
4839 28863591 : *slot = vno;
4840 28863591 : vno->next = last_inserted_nary;
4841 28863591 : last_inserted_nary = vno;
4842 28863591 : return vno;
4843 : }
4844 :
4845 : /* While we do not want to insert things twice it's awkward to
4846 : avoid it in the case where visit_nary_op pattern-matches stuff
4847 : and ends up simplifying the replacement to itself. We then
4848 : get two inserts, one from visit_nary_op and one from
4849 : vn_nary_build_or_lookup.
4850 : So allow inserts with the same value number. */
4851 356 : if ((*slot)->u.result == vno->u.result)
4852 : return *slot;
4853 : }
4854 :
4855 : /* ??? There's also optimistic vs. previous commited state merging
4856 : that is problematic for the case of unwinding. */
4857 :
4858 : /* ??? We should return NULL if we do not use 'vno' and have the
4859 : caller release it. */
4860 126425728 : gcc_assert (!*slot);
4861 :
4862 126425728 : *slot = vno;
4863 126425728 : vno->next = last_inserted_nary;
4864 126425728 : last_inserted_nary = vno;
4865 126425728 : return vno;
4866 : }
4867 :
4868 : /* Insert a n-ary operation into the current hash table using it's
4869 : pieces. Return the vn_nary_op_t structure we created and put in
4870 : the hashtable. */
4871 :
4872 : vn_nary_op_t
4873 547178 : vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4874 : tree type, tree *ops,
4875 : tree result, unsigned int value_id)
4876 : {
4877 547178 : vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4878 547178 : init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4879 547178 : return vn_nary_op_insert_into (vno1, valid_info->nary);
4880 : }
4881 :
4882 : /* Return whether we can track a predicate valid when PRED_E is executed. */
4883 :
4884 : static bool
4885 150671114 : can_track_predicate_on_edge (edge pred_e)
4886 : {
4887 : /* ??? As we are currently recording the destination basic-block index in
4888 : vn_pval.valid_dominated_by_p and using dominance for the
4889 : validity check we cannot track predicates on all edges. */
4890 150671114 : if (single_pred_p (pred_e->dest))
4891 : return true;
4892 : /* Never record for backedges. */
4893 11882015 : if (pred_e->flags & EDGE_DFS_BACK)
4894 : return false;
4895 : /* When there's more than one predecessor we cannot track
4896 : predicate validity based on the destination block. The
4897 : exception is when all other incoming edges sources are
4898 : dominated by the destination block. */
4899 11231089 : edge_iterator ei;
4900 11231089 : edge e;
4901 19246338 : FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4902 17415063 : if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4903 : return false;
4904 : return true;
4905 : }
4906 :
4907 : static vn_nary_op_t
4908 106808900 : vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4909 : tree type, tree *ops,
4910 : tree result, unsigned int value_id,
4911 : edge pred_e)
4912 : {
4913 106808900 : if (flag_checking)
4914 106808064 : gcc_assert (can_track_predicate_on_edge (pred_e));
4915 :
4916 74868 : if (dump_file && (dump_flags & TDF_DETAILS)
4917 : /* ??? Fix dumping, but currently we only get comparisons. */
4918 106879720 : && TREE_CODE_CLASS (code) == tcc_comparison)
4919 : {
4920 70820 : fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4921 70820 : pred_e->dest->index);
4922 70820 : print_generic_expr (dump_file, ops[0], TDF_SLIM);
4923 70820 : fprintf (dump_file, " %s ", get_tree_code_name (code));
4924 70820 : print_generic_expr (dump_file, ops[1], TDF_SLIM);
4925 105859 : fprintf (dump_file, " == %s\n",
4926 70820 : integer_zerop (result) ? "false" : "true");
4927 : }
4928 106808900 : vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4929 106808900 : init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4930 106808900 : vno1->predicated_values = 1;
4931 106808900 : vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4932 : sizeof (vn_pval));
4933 106808900 : vno1->u.values->next = NULL;
4934 106808900 : vno1->u.values->result = result;
4935 106808900 : vno1->u.values->n = 1;
4936 106808900 : vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4937 106808900 : return vn_nary_op_insert_into (vno1, valid_info->nary);
4938 : }
4939 :
4940 : static bool
4941 : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4942 :
4943 : static tree
4944 1733934 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb,
4945 : edge e = NULL)
4946 : {
4947 1733934 : if (! vno->predicated_values)
4948 0 : return vno->u.result;
4949 3597374 : for (vn_pval *val = vno->u.values; val; val = val->next)
4950 5497213 : for (unsigned i = 0; i < val->n; ++i)
4951 : {
4952 3633773 : basic_block cand
4953 3633773 : = BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]);
4954 : /* Do not handle backedge executability optimistically since
4955 : when figuring out whether to iterate we do not consider
4956 : changed predication.
4957 : When asking for predicated values on an edge avoid looking
4958 : at edge executability for edges forward in our iteration
4959 : as well. */
4960 3633773 : if (e && (e->flags & EDGE_DFS_BACK))
4961 : {
4962 23377 : if (dominated_by_p (CDI_DOMINATORS, bb, cand))
4963 7782 : return val->result;
4964 : }
4965 3610396 : else if (dominated_by_p_w_unex (bb, cand, false))
4966 541153 : return val->result;
4967 : }
4968 : return NULL_TREE;
4969 : }
4970 :
4971 : static tree
4972 209731 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, edge e)
4973 : {
4974 0 : return vn_nary_op_get_predicated_value (vno, e->src, e);
4975 : }
4976 :
4977 : /* Insert the rhs of STMT into the current hash table with a value number of
4978 : RESULT. */
4979 :
4980 : static vn_nary_op_t
4981 44468995 : vn_nary_op_insert_stmt (gimple *stmt, tree result)
4982 : {
4983 44468995 : vn_nary_op_t vno1
4984 44468995 : = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4985 44468995 : result, VN_INFO (result)->value_id);
4986 44468995 : init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4987 44468995 : return vn_nary_op_insert_into (vno1, valid_info->nary);
4988 : }
4989 :
4990 : /* Compute a hashcode for PHI operation VP1 and return it. */
4991 :
4992 : static inline hashval_t
4993 49605768 : vn_phi_compute_hash (vn_phi_t vp1)
4994 : {
4995 49605768 : inchash::hash hstate;
4996 49605768 : tree phi1op;
4997 49605768 : tree type;
4998 49605768 : edge e;
4999 49605768 : edge_iterator ei;
5000 :
5001 99211536 : hstate.add_int (EDGE_COUNT (vp1->block->preds));
5002 49605768 : switch (EDGE_COUNT (vp1->block->preds))
5003 : {
5004 : case 1:
5005 : break;
5006 42667211 : case 2:
5007 : /* When this is a PHI node subject to CSE for different blocks
5008 : avoid hashing the block index. */
5009 42667211 : if (vp1->cclhs)
5010 : break;
5011 : /* Fallthru. */
5012 33581742 : default:
5013 33581742 : hstate.add_int (vp1->block->index);
5014 : }
5015 :
5016 : /* If all PHI arguments are constants we need to distinguish
5017 : the PHI node via its type. */
5018 49605768 : type = vp1->type;
5019 49605768 : hstate.merge_hash (vn_hash_type (type));
5020 :
5021 172381826 : FOR_EACH_EDGE (e, ei, vp1->block->preds)
5022 : {
5023 : /* Don't hash backedge values they need to be handled as VN_TOP
5024 : for optimistic value-numbering. */
5025 122776058 : if (e->flags & EDGE_DFS_BACK)
5026 27628394 : continue;
5027 :
5028 95147664 : phi1op = vp1->phiargs[e->dest_idx];
5029 95147664 : if (phi1op == VN_TOP)
5030 244211 : continue;
5031 94903453 : inchash::add_expr (phi1op, hstate);
5032 : }
5033 :
5034 49605768 : return hstate.end ();
5035 : }
5036 :
5037 :
5038 : /* Return true if COND1 and COND2 represent the same condition, set
5039 : *INVERTED_P if one needs to be inverted to make it the same as
5040 : the other. */
5041 :
5042 : static bool
5043 3775919 : cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
5044 : gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
5045 : {
5046 3775919 : enum tree_code code1 = gimple_cond_code (cond1);
5047 3775919 : enum tree_code code2 = gimple_cond_code (cond2);
5048 :
5049 3775919 : *inverted_p = false;
5050 3775919 : if (code1 == code2)
5051 : ;
5052 299255 : else if (code1 == swap_tree_comparison (code2))
5053 : std::swap (lhs2, rhs2);
5054 263219 : else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
5055 130944 : *inverted_p = true;
5056 132275 : else if (code1 == invert_tree_comparison
5057 132275 : (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
5058 : {
5059 10332 : std::swap (lhs2, rhs2);
5060 10332 : *inverted_p = true;
5061 : }
5062 : else
5063 : return false;
5064 :
5065 3653976 : return ((expressions_equal_p (lhs1, lhs2)
5066 108293 : && expressions_equal_p (rhs1, rhs2))
5067 3678308 : || (commutative_tree_code (code1)
5068 1793308 : && expressions_equal_p (lhs1, rhs2)
5069 2349 : && expressions_equal_p (rhs1, lhs2)));
5070 : }
5071 :
5072 : /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
5073 :
5074 : static int
5075 40585764 : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
5076 : {
5077 40585764 : if (vp1->hashcode != vp2->hashcode)
5078 : return false;
5079 :
5080 12654490 : if (vp1->block != vp2->block)
5081 : {
5082 11351160 : if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
5083 : return false;
5084 :
5085 36463889 : switch (EDGE_COUNT (vp1->block->preds))
5086 : {
5087 : case 1:
5088 : /* Single-arg PHIs are just copies. */
5089 : break;
5090 :
5091 3783720 : case 2:
5092 3783720 : {
5093 : /* Make sure both PHIs are classified as CSEable. */
5094 3783720 : if (! vp1->cclhs || ! vp2->cclhs)
5095 : return false;
5096 :
5097 : /* Rule out backedges into the PHI. */
5098 3783720 : gcc_checking_assert
5099 : (vp1->block->loop_father->header != vp1->block
5100 : && vp2->block->loop_father->header != vp2->block);
5101 :
5102 : /* If the PHI nodes do not have compatible types
5103 : they are not the same. */
5104 3783720 : if (!types_compatible_p (vp1->type, vp2->type))
5105 : return false;
5106 :
5107 : /* If the immediate dominator end in switch stmts multiple
5108 : values may end up in the same PHI arg via intermediate
5109 : CFG merges. */
5110 3775919 : basic_block idom1
5111 3775919 : = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5112 3775919 : basic_block idom2
5113 3775919 : = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
5114 3775919 : gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2
5115 : && EDGE_COUNT (idom2->succs) == 2);
5116 :
5117 : /* Verify the controlling stmt is the same. */
5118 7551838 : gcond *last1 = as_a <gcond *> (*gsi_last_bb (idom1));
5119 7551838 : gcond *last2 = as_a <gcond *> (*gsi_last_bb (idom2));
5120 3775919 : bool inverted_p;
5121 3775919 : if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
5122 3775919 : last2, vp2->cclhs, vp2->ccrhs,
5123 : &inverted_p))
5124 : return false;
5125 :
5126 : /* Get at true/false controlled edges into the PHI. */
5127 84047 : edge te1, te2, fe1, fe2;
5128 84047 : if (! extract_true_false_controlled_edges (idom1, vp1->block,
5129 : &te1, &fe1)
5130 84047 : || ! extract_true_false_controlled_edges (idom2, vp2->block,
5131 : &te2, &fe2))
5132 37331 : return false;
5133 :
5134 : /* Swap edges if the second condition is the inverted of the
5135 : first. */
5136 46716 : if (inverted_p)
5137 2038 : std::swap (te2, fe2);
5138 :
5139 : /* Since we do not know which edge will be executed we have
5140 : to be careful when matching VN_TOP. Be conservative and
5141 : only match VN_TOP == VN_TOP for now, we could allow
5142 : VN_TOP on the not prevailing PHI though. See for example
5143 : PR102920. */
5144 46716 : if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
5145 46716 : vp2->phiargs[te2->dest_idx], false)
5146 91619 : || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
5147 44903 : vp2->phiargs[fe2->dest_idx], false))
5148 1813 : return false;
5149 :
5150 : return true;
5151 : }
5152 :
5153 : default:
5154 : return false;
5155 : }
5156 : }
5157 :
5158 : /* If the PHI nodes do not have compatible types
5159 : they are not the same. */
5160 8870770 : if (!types_compatible_p (vp1->type, vp2->type))
5161 : return false;
5162 :
5163 : /* Any phi in the same block will have it's arguments in the
5164 : same edge order, because of how we store phi nodes. */
5165 8869696 : unsigned nargs = EDGE_COUNT (vp1->block->preds);
5166 20568722 : for (unsigned i = 0; i < nargs; ++i)
5167 : {
5168 16446847 : tree phi1op = vp1->phiargs[i];
5169 16446847 : tree phi2op = vp2->phiargs[i];
5170 16446847 : if (phi1op == phi2op)
5171 11603630 : continue;
5172 4843217 : if (!expressions_equal_p (phi1op, phi2op, false))
5173 : return false;
5174 : }
5175 :
5176 : return true;
5177 : }
5178 :
5179 : /* Lookup PHI in the current hash table, and return the resulting
5180 : value number if it exists in the hash table. Return NULL_TREE if
5181 : it does not exist in the hash table. */
5182 :
5183 : static tree
5184 27187159 : vn_phi_lookup (gimple *phi, bool backedges_varying_p)
5185 : {
5186 27187159 : vn_phi_s **slot;
5187 27187159 : struct vn_phi_s *vp1;
5188 27187159 : edge e;
5189 27187159 : edge_iterator ei;
5190 :
5191 27187159 : vp1 = XALLOCAVAR (struct vn_phi_s,
5192 : sizeof (struct vn_phi_s)
5193 : + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
5194 :
5195 : /* Canonicalize the SSA_NAME's to their value number. */
5196 93795627 : FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5197 : {
5198 66608468 : tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5199 66608468 : if (TREE_CODE (def) == SSA_NAME
5200 55484585 : && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
5201 : {
5202 52955571 : if (!virtual_operand_p (def)
5203 52955571 : && ssa_undefined_value_p (def, false))
5204 135549 : def = VN_TOP;
5205 : else
5206 52820022 : def = SSA_VAL (def);
5207 : }
5208 66608468 : vp1->phiargs[e->dest_idx] = def;
5209 : }
5210 27187159 : vp1->type = TREE_TYPE (gimple_phi_result (phi));
5211 27187159 : vp1->block = gimple_bb (phi);
5212 : /* Extract values of the controlling condition. */
5213 27187159 : vp1->cclhs = NULL_TREE;
5214 27187159 : vp1->ccrhs = NULL_TREE;
5215 27187159 : if (EDGE_COUNT (vp1->block->preds) == 2
5216 27187159 : && vp1->block->loop_father->header != vp1->block)
5217 : {
5218 8445498 : basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5219 8445498 : if (EDGE_COUNT (idom1->succs) == 2)
5220 16803034 : if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
5221 : {
5222 : /* ??? We want to use SSA_VAL here. But possibly not
5223 : allow VN_TOP. */
5224 8183683 : vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
5225 8183683 : vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
5226 : }
5227 : }
5228 27187159 : vp1->hashcode = vn_phi_compute_hash (vp1);
5229 27187159 : slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
5230 27187159 : if (!slot)
5231 : return NULL_TREE;
5232 4166778 : return (*slot)->result;
5233 : }
5234 :
5235 : /* Insert PHI into the current hash table with a value number of
5236 : RESULT. */
5237 :
5238 : static vn_phi_t
5239 22418609 : vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
5240 : {
5241 22418609 : vn_phi_s **slot;
5242 22418609 : vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
5243 : sizeof (vn_phi_s)
5244 : + ((gimple_phi_num_args (phi) - 1)
5245 : * sizeof (tree)));
5246 22418609 : edge e;
5247 22418609 : edge_iterator ei;
5248 :
5249 : /* Canonicalize the SSA_NAME's to their value number. */
5250 78586199 : FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5251 : {
5252 56167590 : tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5253 56167590 : if (TREE_CODE (def) == SSA_NAME
5254 46115903 : && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
5255 : {
5256 43587306 : if (!virtual_operand_p (def)
5257 43587306 : && ssa_undefined_value_p (def, false))
5258 108906 : def = VN_TOP;
5259 : else
5260 43478400 : def = SSA_VAL (def);
5261 : }
5262 56167590 : vp1->phiargs[e->dest_idx] = def;
5263 : }
5264 22418609 : vp1->value_id = VN_INFO (result)->value_id;
5265 22418609 : vp1->type = TREE_TYPE (gimple_phi_result (phi));
5266 22418609 : vp1->block = gimple_bb (phi);
5267 : /* Extract values of the controlling condition. */
5268 22418609 : vp1->cclhs = NULL_TREE;
5269 22418609 : vp1->ccrhs = NULL_TREE;
5270 22418609 : if (EDGE_COUNT (vp1->block->preds) == 2
5271 22418609 : && vp1->block->loop_father->header != vp1->block)
5272 : {
5273 8098531 : basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5274 8098531 : if (EDGE_COUNT (idom1->succs) == 2)
5275 16112050 : if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
5276 : {
5277 : /* ??? We want to use SSA_VAL here. But possibly not
5278 : allow VN_TOP. */
5279 7840343 : vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
5280 7840343 : vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
5281 : }
5282 : }
5283 22418609 : vp1->result = result;
5284 22418609 : vp1->hashcode = vn_phi_compute_hash (vp1);
5285 :
5286 22418609 : slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
5287 22418609 : gcc_assert (!*slot);
5288 :
5289 22418609 : *slot = vp1;
5290 22418609 : vp1->next = last_inserted_phi;
5291 22418609 : last_inserted_phi = vp1;
5292 22418609 : return vp1;
5293 : }
5294 :
5295 :
5296 : /* Return true if BB1 is dominated by BB2 taking into account edges
5297 : that are not executable. When ALLOW_BACK is false consider not
5298 : executable backedges as executable. */
5299 :
5300 : static bool
5301 71308361 : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
5302 : {
5303 71308361 : edge_iterator ei;
5304 71308361 : edge e;
5305 :
5306 71308361 : if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5307 : return true;
5308 :
5309 : /* Before iterating we'd like to know if there exists a
5310 : (executable) path from bb2 to bb1 at all, if not we can
5311 : directly return false. For now simply iterate once. */
5312 :
5313 : /* Iterate to the single executable bb1 predecessor. */
5314 21190274 : if (EDGE_COUNT (bb1->preds) > 1)
5315 : {
5316 2877027 : edge prede = NULL;
5317 6265101 : FOR_EACH_EDGE (e, ei, bb1->preds)
5318 5861228 : if ((e->flags & EDGE_EXECUTABLE)
5319 588720 : || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5320 : {
5321 5350181 : if (prede)
5322 : {
5323 : prede = NULL;
5324 : break;
5325 : }
5326 : prede = e;
5327 : }
5328 2877027 : if (prede)
5329 : {
5330 403873 : bb1 = prede->src;
5331 :
5332 : /* Re-do the dominance check with changed bb1. */
5333 403873 : if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5334 : return true;
5335 : }
5336 : }
5337 :
5338 : /* Iterate to the single executable bb2 successor. */
5339 20960672 : if (EDGE_COUNT (bb2->succs) > 1)
5340 : {
5341 6293943 : edge succe = NULL;
5342 12736171 : FOR_EACH_EDGE (e, ei, bb2->succs)
5343 12588126 : if ((e->flags & EDGE_EXECUTABLE)
5344 183282 : || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5345 : {
5346 12404885 : if (succe)
5347 : {
5348 : succe = NULL;
5349 : break;
5350 : }
5351 : succe = e;
5352 : }
5353 6293943 : if (succe
5354 : /* Limit the number of edges we check, we should bring in
5355 : context from the iteration and compute the single
5356 : executable incoming edge when visiting a block. */
5357 6293943 : && EDGE_COUNT (succe->dest->preds) < 8)
5358 : {
5359 : /* Verify the reached block is only reached through succe.
5360 : If there is only one edge we can spare us the dominator
5361 : check and iterate directly. */
5362 112549 : if (EDGE_COUNT (succe->dest->preds) > 1)
5363 : {
5364 51476 : FOR_EACH_EDGE (e, ei, succe->dest->preds)
5365 39985 : if (e != succe
5366 26108 : && ((e->flags & EDGE_EXECUTABLE)
5367 17147 : || (!allow_back && (e->flags & EDGE_DFS_BACK))))
5368 : {
5369 : succe = NULL;
5370 : break;
5371 : }
5372 : }
5373 112549 : if (succe)
5374 : {
5375 103579 : bb2 = succe->dest;
5376 :
5377 : /* Re-do the dominance check with changed bb2. */
5378 103579 : if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5379 : return true;
5380 : }
5381 : }
5382 : }
5383 : /* Iterate to the single successor of bb2 with only a single executable
5384 : incoming edge. */
5385 14666729 : else if (EDGE_COUNT (bb2->succs) == 1
5386 14123388 : && EDGE_COUNT (single_succ (bb2)->preds) > 1
5387 : /* Limit the number of edges we check, we should bring in
5388 : context from the iteration and compute the single
5389 : executable incoming edge when visiting a block. */
5390 28556492 : && EDGE_COUNT (single_succ (bb2)->preds) < 8)
5391 : {
5392 4934102 : edge prede = NULL;
5393 11117349 : FOR_EACH_EDGE (e, ei, single_succ (bb2)->preds)
5394 10596903 : if ((e->flags & EDGE_EXECUTABLE)
5395 1297872 : || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5396 : {
5397 9303225 : if (prede)
5398 : {
5399 : prede = NULL;
5400 : break;
5401 : }
5402 : prede = e;
5403 : }
5404 : /* We might actually get to a query with BB2 not visited yet when
5405 : we're querying for a predicated value. */
5406 4934102 : if (prede && prede->src == bb2)
5407 : {
5408 462754 : bb2 = prede->dest;
5409 :
5410 : /* Re-do the dominance check with changed bb2. */
5411 462754 : if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5412 : return true;
5413 : }
5414 : }
5415 :
5416 : /* We could now iterate updating bb1 / bb2. */
5417 : return false;
5418 : }
5419 :
5420 : /* Set the value number of FROM to TO, return true if it has changed
5421 : as a result. */
5422 :
5423 : static inline bool
5424 202026189 : set_ssa_val_to (tree from, tree to)
5425 : {
5426 202026189 : vn_ssa_aux_t from_info = VN_INFO (from);
5427 202026189 : tree currval = from_info->valnum; // SSA_VAL (from)
5428 202026189 : poly_int64 toff, coff;
5429 202026189 : bool curr_undefined = false;
5430 202026189 : bool curr_invariant = false;
5431 :
5432 : /* The only thing we allow as value numbers are ssa_names
5433 : and invariants. So assert that here. We don't allow VN_TOP
5434 : as visiting a stmt should produce a value-number other than
5435 : that.
5436 : ??? Still VN_TOP can happen for unreachable code, so force
5437 : it to varying in that case. Not all code is prepared to
5438 : get VN_TOP on valueization. */
5439 202026189 : if (to == VN_TOP)
5440 : {
5441 : /* ??? When iterating and visiting PHI <undef, backedge-value>
5442 : for the first time we rightfully get VN_TOP and we need to
5443 : preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
5444 : With SCCVN we were simply lucky we iterated the other PHI
5445 : cycles first and thus visited the backedge-value DEF. */
5446 0 : if (currval == VN_TOP)
5447 0 : goto set_and_exit;
5448 0 : if (dump_file && (dump_flags & TDF_DETAILS))
5449 0 : fprintf (dump_file, "Forcing value number to varying on "
5450 : "receiving VN_TOP\n");
5451 : to = from;
5452 : }
5453 :
5454 202026189 : gcc_checking_assert (to != NULL_TREE
5455 : && ((TREE_CODE (to) == SSA_NAME
5456 : && (to == from || SSA_VAL (to) == to))
5457 : || is_gimple_min_invariant (to)));
5458 :
5459 202026189 : if (from != to)
5460 : {
5461 32136910 : if (currval == from)
5462 : {
5463 13768 : if (dump_file && (dump_flags & TDF_DETAILS))
5464 : {
5465 0 : fprintf (dump_file, "Not changing value number of ");
5466 0 : print_generic_expr (dump_file, from);
5467 0 : fprintf (dump_file, " from VARYING to ");
5468 0 : print_generic_expr (dump_file, to);
5469 0 : fprintf (dump_file, "\n");
5470 : }
5471 13768 : return false;
5472 : }
5473 32123142 : curr_invariant = is_gimple_min_invariant (currval);
5474 64246284 : curr_undefined = (TREE_CODE (currval) == SSA_NAME
5475 3828586 : && !virtual_operand_p (currval)
5476 35731966 : && ssa_undefined_value_p (currval, false));
5477 32123142 : if (currval != VN_TOP
5478 : && !curr_invariant
5479 5316597 : && !curr_undefined
5480 35938710 : && is_gimple_min_invariant (to))
5481 : {
5482 220 : if (dump_file && (dump_flags & TDF_DETAILS))
5483 : {
5484 0 : fprintf (dump_file, "Forcing VARYING instead of changing "
5485 : "value number of ");
5486 0 : print_generic_expr (dump_file, from);
5487 0 : fprintf (dump_file, " from ");
5488 0 : print_generic_expr (dump_file, currval);
5489 0 : fprintf (dump_file, " (non-constant) to ");
5490 0 : print_generic_expr (dump_file, to);
5491 0 : fprintf (dump_file, " (constant)\n");
5492 : }
5493 : to = from;
5494 : }
5495 32122922 : else if (currval != VN_TOP
5496 5316377 : && !curr_undefined
5497 5303359 : && TREE_CODE (to) == SSA_NAME
5498 4473776 : && !virtual_operand_p (to)
5499 36376936 : && ssa_undefined_value_p (to, false))
5500 : {
5501 6 : if (dump_file && (dump_flags & TDF_DETAILS))
5502 : {
5503 0 : fprintf (dump_file, "Forcing VARYING instead of changing "
5504 : "value number of ");
5505 0 : print_generic_expr (dump_file, from);
5506 0 : fprintf (dump_file, " from ");
5507 0 : print_generic_expr (dump_file, currval);
5508 0 : fprintf (dump_file, " (non-undefined) to ");
5509 0 : print_generic_expr (dump_file, to);
5510 0 : fprintf (dump_file, " (undefined)\n");
5511 : }
5512 : to = from;
5513 : }
5514 32122916 : else if (TREE_CODE (to) == SSA_NAME
5515 32122916 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
5516 : to = from;
5517 : }
5518 :
5519 169889279 : set_and_exit:
5520 202012421 : if (dump_file && (dump_flags & TDF_DETAILS))
5521 : {
5522 396974 : fprintf (dump_file, "Setting value number of ");
5523 396974 : print_generic_expr (dump_file, from);
5524 396974 : fprintf (dump_file, " to ");
5525 396974 : print_generic_expr (dump_file, to);
5526 : }
5527 :
5528 202012421 : if (currval != to
5529 164476616 : && !operand_equal_p (currval, to, 0)
5530 : /* Different undefined SSA names are not actually different. See
5531 : PR82320 for a testcase were we'd otherwise not terminate iteration. */
5532 164407739 : && !(curr_undefined
5533 3377 : && TREE_CODE (to) == SSA_NAME
5534 574 : && !virtual_operand_p (to)
5535 574 : && ssa_undefined_value_p (to, false))
5536 : /* ??? For addresses involving volatile objects or types operand_equal_p
5537 : does not reliably detect ADDR_EXPRs as equal. We know we are only
5538 : getting invariant gimple addresses here, so can use
5539 : get_addr_base_and_unit_offset to do this comparison. */
5540 366419554 : && !(TREE_CODE (currval) == ADDR_EXPR
5541 454835 : && TREE_CODE (to) == ADDR_EXPR
5542 12 : && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
5543 6 : == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
5544 6 : && known_eq (coff, toff)))
5545 : {
5546 164407127 : if (to != from
5547 27800455 : && currval != VN_TOP
5548 997381 : && !curr_undefined
5549 : /* We do not want to allow lattice transitions from one value
5550 : to another since that may lead to not terminating iteration
5551 : (see PR95049). Since there's no convenient way to check
5552 : for the allowed transition of VAL -> PHI (loop entry value,
5553 : same on two PHIs, to same PHI result) we restrict the check
5554 : to invariants. */
5555 997381 : && curr_invariant
5556 165065549 : && is_gimple_min_invariant (to))
5557 : {
5558 0 : if (dump_file && (dump_flags & TDF_DETAILS))
5559 0 : fprintf (dump_file, " forced VARYING");
5560 : to = from;
5561 : }
5562 164407127 : if (dump_file && (dump_flags & TDF_DETAILS))
5563 396658 : fprintf (dump_file, " (changed)\n");
5564 164407127 : from_info->valnum = to;
5565 164407127 : return true;
5566 : }
5567 37605294 : if (dump_file && (dump_flags & TDF_DETAILS))
5568 316 : fprintf (dump_file, "\n");
5569 : return false;
5570 : }
5571 :
5572 : /* Set all definitions in STMT to value number to themselves.
5573 : Return true if a value number changed. */
5574 :
5575 : static bool
5576 280819123 : defs_to_varying (gimple *stmt)
5577 : {
5578 280819123 : bool changed = false;
5579 280819123 : ssa_op_iter iter;
5580 280819123 : def_operand_p defp;
5581 :
5582 309913782 : FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
5583 : {
5584 29094659 : tree def = DEF_FROM_PTR (defp);
5585 29094659 : changed |= set_ssa_val_to (def, def);
5586 : }
5587 280819123 : return changed;
5588 : }
5589 :
5590 : /* Visit a copy between LHS and RHS, return true if the value number
5591 : changed. */
5592 :
5593 : static bool
5594 7867931 : visit_copy (tree lhs, tree rhs)
5595 : {
5596 : /* Valueize. */
5597 7867931 : rhs = SSA_VAL (rhs);
5598 :
5599 7867931 : return set_ssa_val_to (lhs, rhs);
5600 : }
5601 :
5602 : /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
5603 : is the same. */
5604 :
5605 : static tree
5606 2424186 : valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
5607 : {
5608 2424186 : if (TREE_CODE (op) == SSA_NAME)
5609 2130137 : op = vn_valueize (op);
5610 :
5611 : /* Either we have the op widened available. */
5612 2424186 : tree ops[3] = {};
5613 2424186 : ops[0] = op;
5614 2424186 : tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
5615 : wide_type, ops, NULL);
5616 2424186 : if (tem)
5617 : return tem;
5618 :
5619 : /* Or the op is truncated from some existing value. */
5620 2141145 : if (allow_truncate && TREE_CODE (op) == SSA_NAME)
5621 : {
5622 535467 : gimple *def = SSA_NAME_DEF_STMT (op);
5623 535467 : if (is_gimple_assign (def)
5624 535467 : && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
5625 : {
5626 270681 : tem = gimple_assign_rhs1 (def);
5627 270681 : if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
5628 : {
5629 179441 : if (TREE_CODE (tem) == SSA_NAME)
5630 179441 : tem = vn_valueize (tem);
5631 179441 : return tem;
5632 : }
5633 : }
5634 : }
5635 :
5636 : /* For constants simply extend it. */
5637 1961704 : if (TREE_CODE (op) == INTEGER_CST)
5638 327169 : return wide_int_to_tree (wide_type, wi::to_widest (op));
5639 :
5640 : return NULL_TREE;
5641 : }
5642 :
5643 : /* Visit a nary operator RHS, value number it, and return true if the
5644 : value number of LHS has changed as a result. */
5645 :
5646 : static bool
5647 48151390 : visit_nary_op (tree lhs, gassign *stmt)
5648 : {
5649 48151390 : vn_nary_op_t vnresult;
5650 48151390 : tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
5651 48151390 : if (! result && vnresult)
5652 153616 : result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
5653 44540468 : if (result)
5654 3680705 : return set_ssa_val_to (lhs, result);
5655 :
5656 : /* Do some special pattern matching for redundancies of operations
5657 : in different types. */
5658 44470685 : enum tree_code code = gimple_assign_rhs_code (stmt);
5659 44470685 : tree type = TREE_TYPE (lhs);
5660 44470685 : tree rhs1 = gimple_assign_rhs1 (stmt);
5661 44470685 : switch (code)
5662 : {
5663 9883035 : CASE_CONVERT:
5664 : /* Match arithmetic done in a different type where we can easily
5665 : substitute the result from some earlier sign-changed or widened
5666 : operation. */
5667 9883035 : if (INTEGRAL_TYPE_P (type)
5668 8844298 : && TREE_CODE (rhs1) == SSA_NAME
5669 : /* We only handle sign-changes, zero-extension -> & mask or
5670 : sign-extension if we know the inner operation doesn't
5671 : overflow. */
5672 18492726 : && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
5673 5211047 : || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5674 5210249 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5675 7881112 : && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
5676 5840795 : || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
5677 : {
5678 7549559 : gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5679 5420949 : if (def
5680 5420949 : && (gimple_assign_rhs_code (def) == PLUS_EXPR
5681 4190213 : || gimple_assign_rhs_code (def) == MINUS_EXPR
5682 4056426 : || gimple_assign_rhs_code (def) == MULT_EXPR))
5683 : {
5684 1964747 : tree ops[3] = {};
5685 : /* When requiring a sign-extension we cannot model a
5686 : previous truncation with a single op so don't bother. */
5687 1964747 : bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
5688 : /* Either we have the op widened available. */
5689 1964747 : ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
5690 : allow_truncate);
5691 1964747 : if (ops[0])
5692 918878 : ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
5693 : allow_truncate);
5694 1964747 : if (ops[0] && ops[1])
5695 : {
5696 330212 : ops[0] = vn_nary_op_lookup_pieces
5697 330212 : (2, gimple_assign_rhs_code (def), type, ops, NULL);
5698 : /* We have wider operation available. */
5699 330212 : if (ops[0]
5700 : /* If the leader is a wrapping operation we can
5701 : insert it for code hoisting w/o introducing
5702 : undefined overflow. If it is not it has to
5703 : be available. See PR86554. */
5704 330212 : && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
5705 1866 : || (rpo_avail && vn_context_bb
5706 1866 : && rpo_avail->eliminate_avail (vn_context_bb,
5707 : ops[0]))))
5708 : {
5709 9547 : unsigned lhs_prec = TYPE_PRECISION (type);
5710 9547 : unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
5711 9547 : if (lhs_prec == rhs_prec
5712 9547 : || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5713 1767 : && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5714 : {
5715 8957 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5716 8957 : NOP_EXPR, type, ops[0]);
5717 8957 : result = vn_nary_build_or_lookup (&match_op);
5718 8957 : if (result)
5719 : {
5720 8957 : bool changed = set_ssa_val_to (lhs, result);
5721 8957 : if (TREE_CODE (result) == SSA_NAME)
5722 8957 : vn_nary_op_insert_stmt (stmt, result);
5723 8957 : return changed;
5724 : }
5725 : }
5726 : else
5727 : {
5728 590 : tree mask = wide_int_to_tree
5729 590 : (type, wi::mask (rhs_prec, false, lhs_prec));
5730 590 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5731 590 : BIT_AND_EXPR,
5732 590 : TREE_TYPE (lhs),
5733 590 : ops[0], mask);
5734 590 : result = vn_nary_build_or_lookup (&match_op);
5735 590 : if (result)
5736 : {
5737 590 : bool changed = set_ssa_val_to (lhs, result);
5738 590 : if (TREE_CODE (result) == SSA_NAME)
5739 590 : vn_nary_op_insert_stmt (stmt, result);
5740 590 : return changed;
5741 : }
5742 : }
5743 : }
5744 : }
5745 : }
5746 : }
5747 : break;
5748 1474723 : case BIT_AND_EXPR:
5749 1474723 : if (INTEGRAL_TYPE_P (type)
5750 1436616 : && TREE_CODE (rhs1) == SSA_NAME
5751 1436616 : && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
5752 885775 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
5753 885657 : && default_vn_walk_kind != VN_NOWALK
5754 : && CHAR_BIT == 8
5755 : && BITS_PER_UNIT == 8
5756 : && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
5757 885449 : && TYPE_PRECISION (type) <= vn_walk_cb_data::bufsize * BITS_PER_UNIT
5758 885447 : && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5759 2360170 : && !integer_zerop (gimple_assign_rhs2 (stmt)))
5760 : {
5761 885447 : gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5762 648947 : if (ass
5763 648947 : && !gimple_has_volatile_ops (ass)
5764 647485 : && vn_get_stmt_kind (ass) == VN_REFERENCE)
5765 : {
5766 300798 : tree last_vuse = gimple_vuse (ass);
5767 300798 : tree op = gimple_assign_rhs1 (ass);
5768 902394 : tree result = vn_reference_lookup (op, gimple_vuse (ass),
5769 : default_vn_walk_kind,
5770 : NULL, true, &last_vuse,
5771 : gimple_assign_rhs2 (stmt));
5772 300798 : if (result
5773 301238 : && useless_type_conversion_p (TREE_TYPE (result),
5774 440 : TREE_TYPE (op)))
5775 440 : return set_ssa_val_to (lhs, result);
5776 : }
5777 : }
5778 : break;
5779 246534 : case BIT_FIELD_REF:
5780 246534 : if (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
5781 : {
5782 246514 : tree op0 = TREE_OPERAND (rhs1, 0);
5783 246514 : gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (op0));
5784 204260 : if (ass
5785 204260 : && !gimple_has_volatile_ops (ass)
5786 204177 : && vn_get_stmt_kind (ass) == VN_REFERENCE)
5787 : {
5788 90879 : tree last_vuse = gimple_vuse (ass);
5789 90879 : tree op = gimple_assign_rhs1 (ass);
5790 : /* Avoid building invalid and unexpected refs. */
5791 90879 : if (TREE_CODE (op) != TARGET_MEM_REF
5792 : && TREE_CODE (op) != BIT_FIELD_REF
5793 : && TREE_CODE (op) != REALPART_EXPR
5794 : && TREE_CODE (op) != IMAGPART_EXPR)
5795 : {
5796 83405 : tree op = build3 (BIT_FIELD_REF, TREE_TYPE (rhs1),
5797 : gimple_assign_rhs1 (ass),
5798 83405 : TREE_OPERAND (rhs1, 1),
5799 83405 : TREE_OPERAND (rhs1, 2));
5800 166810 : tree result = vn_reference_lookup (op, gimple_vuse (ass),
5801 : default_vn_walk_kind,
5802 : NULL, true, &last_vuse);
5803 83405 : if (result
5804 83405 : && useless_type_conversion_p (type, TREE_TYPE (result)))
5805 1527 : return set_ssa_val_to (lhs, result);
5806 82168 : else if (result
5807 290 : && TYPE_SIZE (type)
5808 290 : && TYPE_SIZE (TREE_TYPE (result))
5809 82458 : && operand_equal_p (TYPE_SIZE (type),
5810 290 : TYPE_SIZE (TREE_TYPE (result))))
5811 : {
5812 290 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5813 290 : VIEW_CONVERT_EXPR,
5814 290 : type, result);
5815 290 : result = vn_nary_build_or_lookup (&match_op);
5816 290 : if (result)
5817 : {
5818 290 : bool changed = set_ssa_val_to (lhs, result);
5819 290 : if (TREE_CODE (result) == SSA_NAME)
5820 278 : vn_nary_op_insert_stmt (stmt, result);
5821 290 : return changed;
5822 : }
5823 : }
5824 : }
5825 : }
5826 : }
5827 : break;
5828 320406 : case TRUNC_DIV_EXPR:
5829 320406 : if (TYPE_UNSIGNED (type))
5830 : break;
5831 : /* Fallthru. */
5832 5400626 : case RDIV_EXPR:
5833 5400626 : case MULT_EXPR:
5834 : /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5835 5400626 : if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5836 : {
5837 5399716 : tree rhs[2];
5838 5399716 : rhs[0] = rhs1;
5839 5399716 : rhs[1] = gimple_assign_rhs2 (stmt);
5840 16192284 : for (unsigned i = 0; i <= 1; ++i)
5841 : {
5842 10798279 : unsigned j = i == 0 ? 1 : 0;
5843 10798279 : tree ops[2];
5844 10798279 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5845 10798279 : NEGATE_EXPR, type, rhs[i]);
5846 10798279 : ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5847 10798279 : ops[j] = rhs[j];
5848 10798279 : if (ops[i]
5849 10798279 : && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5850 : type, ops, NULL)))
5851 : {
5852 5711 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5853 5711 : NEGATE_EXPR, type, ops[0]);
5854 5711 : result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5855 5711 : if (result)
5856 : {
5857 5711 : bool changed = set_ssa_val_to (lhs, result);
5858 5711 : if (TREE_CODE (result) == SSA_NAME)
5859 5711 : vn_nary_op_insert_stmt (stmt, result);
5860 5711 : return changed;
5861 : }
5862 : }
5863 : }
5864 : }
5865 : break;
5866 365700 : case LSHIFT_EXPR:
5867 : /* For X << C, use the value number of X * (1 << C). */
5868 365700 : if (INTEGRAL_TYPE_P (type)
5869 350974 : && TYPE_OVERFLOW_WRAPS (type)
5870 552919 : && !TYPE_SATURATING (type))
5871 : {
5872 187219 : tree rhs2 = gimple_assign_rhs2 (stmt);
5873 187219 : if (TREE_CODE (rhs2) == INTEGER_CST
5874 108454 : && tree_fits_uhwi_p (rhs2)
5875 295673 : && tree_to_uhwi (rhs2) < TYPE_PRECISION (type))
5876 : {
5877 108454 : wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2),
5878 108454 : TYPE_PRECISION (type));
5879 216908 : gimple_match_op match_op (gimple_match_cond::UNCOND,
5880 108454 : MULT_EXPR, type, rhs1,
5881 108454 : wide_int_to_tree (type, w));
5882 108454 : result = vn_nary_build_or_lookup (&match_op);
5883 108454 : if (result)
5884 : {
5885 108454 : bool changed = set_ssa_val_to (lhs, result);
5886 108454 : if (TREE_CODE (result) == SSA_NAME)
5887 108453 : vn_nary_op_insert_stmt (stmt, result);
5888 108454 : return changed;
5889 : }
5890 108454 : }
5891 : }
5892 : break;
5893 : default:
5894 : break;
5895 : }
5896 :
5897 44345006 : bool changed = set_ssa_val_to (lhs, lhs);
5898 44345006 : vn_nary_op_insert_stmt (stmt, lhs);
5899 44345006 : return changed;
5900 : }
5901 :
5902 : /* Visit a call STMT storing into LHS. Return true if the value number
5903 : of the LHS has changed as a result. */
5904 :
5905 : static bool
5906 8544388 : visit_reference_op_call (tree lhs, gcall *stmt)
5907 : {
5908 8544388 : bool changed = false;
5909 8544388 : struct vn_reference_s vr1;
5910 8544388 : vn_reference_t vnresult = NULL;
5911 8544388 : tree vdef = gimple_vdef (stmt);
5912 8544388 : modref_summary *summary;
5913 :
5914 : /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5915 8544388 : if (lhs && TREE_CODE (lhs) != SSA_NAME)
5916 4521134 : lhs = NULL_TREE;
5917 :
5918 8544388 : vn_reference_lookup_call (stmt, &vnresult, &vr1);
5919 :
5920 : /* If the lookup did not succeed for pure functions try to use
5921 : modref info to find a candidate to CSE to. */
5922 8544388 : const unsigned accesses_limit = 8;
5923 8544388 : if (!vnresult
5924 7905788 : && !vdef
5925 7905788 : && lhs
5926 2795728 : && gimple_vuse (stmt)
5927 10093362 : && (((summary = get_modref_function_summary (stmt, NULL))
5928 208582 : && !summary->global_memory_read
5929 81008 : && summary->load_accesses < accesses_limit)
5930 1468100 : || gimple_call_flags (stmt) & ECF_CONST))
5931 : {
5932 : /* First search if we can do someting useful and build a
5933 : vector of all loads we have to check. */
5934 81607 : bool unknown_memory_access = false;
5935 81607 : auto_vec<ao_ref, accesses_limit> accesses;
5936 81607 : unsigned load_accesses = summary ? summary->load_accesses : 0;
5937 81607 : if (!unknown_memory_access)
5938 : /* Add loads done as part of setting up the call arguments.
5939 : That's also necessary for CONST functions which will
5940 : not have a modref summary. */
5941 237147 : for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
5942 : {
5943 155548 : tree arg = gimple_call_arg (stmt, i);
5944 155548 : if (TREE_CODE (arg) != SSA_NAME
5945 155548 : && !is_gimple_min_invariant (arg))
5946 : {
5947 61770 : if (accesses.length () >= accesses_limit - load_accesses)
5948 : {
5949 : unknown_memory_access = true;
5950 : break;
5951 : }
5952 30877 : accesses.quick_grow (accesses.length () + 1);
5953 30877 : ao_ref_init (&accesses.last (), arg);
5954 : }
5955 : }
5956 81607 : if (summary && !unknown_memory_access)
5957 : {
5958 : /* Add loads as analyzed by IPA modref. */
5959 281249 : for (auto base_node : summary->loads->bases)
5960 70496 : if (unknown_memory_access)
5961 : break;
5962 287235 : else for (auto ref_node : base_node->refs)
5963 76590 : if (unknown_memory_access)
5964 : break;
5965 317185 : else for (auto access_node : ref_node->accesses)
5966 : {
5967 207312 : accesses.quick_grow (accesses.length () + 1);
5968 103656 : ao_ref *r = &accesses.last ();
5969 103656 : if (!access_node.get_ao_ref (stmt, r))
5970 : {
5971 : /* Initialize a ref based on the argument and
5972 : unknown offset if possible. */
5973 16205 : tree arg = access_node.get_call_arg (stmt);
5974 16205 : if (arg && TREE_CODE (arg) == SSA_NAME)
5975 2745 : arg = SSA_VAL (arg);
5976 2745 : if (arg
5977 16195 : && TREE_CODE (arg) == ADDR_EXPR
5978 13456 : && (arg = get_base_address (arg))
5979 16201 : && DECL_P (arg))
5980 : {
5981 0 : ao_ref_init (r, arg);
5982 0 : r->ref = NULL_TREE;
5983 0 : r->base = arg;
5984 : }
5985 : else
5986 : {
5987 : unknown_memory_access = true;
5988 : break;
5989 : }
5990 : }
5991 87451 : r->base_alias_set = base_node->base;
5992 87451 : r->ref_alias_set = ref_node->ref;
5993 : }
5994 : }
5995 :
5996 : /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5997 : for the call in the hashtable. */
5998 81607 : unsigned limit = (unknown_memory_access
5999 81607 : ? 0
6000 65394 : : (param_sccvn_max_alias_queries_per_access
6001 65394 : / (accesses.length () + 1)));
6002 81607 : tree saved_vuse = vr1.vuse;
6003 81607 : hashval_t saved_hashcode = vr1.hashcode;
6004 421007 : while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
6005 : {
6006 361948 : vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
6007 361948 : gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
6008 : /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
6009 : do not bother for now. */
6010 361948 : if (is_a <gphi *> (def))
6011 : break;
6012 678800 : vr1.vuse = vuse_ssa_val (gimple_vuse (def));
6013 339400 : vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
6014 339400 : vn_reference_lookup_1 (&vr1, &vnresult);
6015 339400 : limit--;
6016 : }
6017 :
6018 : /* If we found a candidate to CSE to verify it is valid. */
6019 81607 : if (vnresult && !accesses.is_empty ())
6020 : {
6021 1909 : tree vuse = vuse_ssa_val (gimple_vuse (stmt));
6022 7116 : while (vnresult && vuse != vr1.vuse)
6023 : {
6024 3298 : gimple *def = SSA_NAME_DEF_STMT (vuse);
6025 17357 : for (auto &ref : accesses)
6026 : {
6027 : /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
6028 : analysis overhead that we might be able to cache. */
6029 9198 : if (stmt_may_clobber_ref_p_1 (def, &ref, true))
6030 : {
6031 1735 : vnresult = NULL;
6032 1735 : break;
6033 : }
6034 : }
6035 6596 : vuse = vuse_ssa_val (gimple_vuse (def));
6036 : }
6037 : }
6038 81607 : vr1.vuse = saved_vuse;
6039 81607 : vr1.hashcode = saved_hashcode;
6040 81607 : }
6041 :
6042 8544388 : if (vnresult)
6043 : {
6044 638802 : if (vdef)
6045 : {
6046 173059 : if (vnresult->result_vdef)
6047 173059 : changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
6048 0 : else if (!lhs && gimple_call_lhs (stmt))
6049 : /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
6050 : as the call still acts as a lhs store. */
6051 0 : changed |= set_ssa_val_to (vdef, vdef);
6052 : else
6053 : /* If the call was discovered to be pure or const reflect
6054 : that as far as possible. */
6055 0 : changed |= set_ssa_val_to (vdef,
6056 : vuse_ssa_val (gimple_vuse (stmt)));
6057 : }
6058 :
6059 638802 : if (!vnresult->result && lhs)
6060 0 : vnresult->result = lhs;
6061 :
6062 638802 : if (vnresult->result && lhs)
6063 108082 : changed |= set_ssa_val_to (lhs, vnresult->result);
6064 : }
6065 : else
6066 : {
6067 7905586 : vn_reference_t vr2;
6068 7905586 : vn_reference_s **slot;
6069 7905586 : tree vdef_val = vdef;
6070 7905586 : if (vdef)
6071 : {
6072 : /* If we value numbered an indirect functions function to
6073 : one not clobbering memory value number its VDEF to its
6074 : VUSE. */
6075 4796494 : tree fn = gimple_call_fn (stmt);
6076 4796494 : if (fn && TREE_CODE (fn) == SSA_NAME)
6077 : {
6078 128576 : fn = SSA_VAL (fn);
6079 128576 : if (TREE_CODE (fn) == ADDR_EXPR
6080 1719 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6081 1719 : && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
6082 1719 : & (ECF_CONST | ECF_PURE))
6083 : /* If stmt has non-SSA_NAME lhs, value number the
6084 : vdef to itself, as the call still acts as a lhs
6085 : store. */
6086 129699 : && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
6087 2114 : vdef_val = vuse_ssa_val (gimple_vuse (stmt));
6088 : }
6089 4796494 : changed |= set_ssa_val_to (vdef, vdef_val);
6090 : }
6091 7905586 : if (lhs)
6092 3915172 : changed |= set_ssa_val_to (lhs, lhs);
6093 7905586 : vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
6094 7905586 : vr2->vuse = vr1.vuse;
6095 : /* As we are not walking the virtual operand chain we know the
6096 : shared_lookup_references are still original so we can re-use
6097 : them here. */
6098 7905586 : vr2->operands = vr1.operands.copy ();
6099 7905586 : vr2->type = vr1.type;
6100 7905586 : vr2->punned = vr1.punned;
6101 7905586 : vr2->set = vr1.set;
6102 7905586 : vr2->offset = vr1.offset;
6103 7905586 : vr2->max_size = vr1.max_size;
6104 7905586 : vr2->base_set = vr1.base_set;
6105 7905586 : vr2->hashcode = vr1.hashcode;
6106 7905586 : vr2->result = lhs;
6107 7905586 : vr2->result_vdef = vdef_val;
6108 7905586 : vr2->value_id = 0;
6109 7905586 : slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
6110 : INSERT);
6111 7905586 : gcc_assert (!*slot);
6112 7905586 : *slot = vr2;
6113 7905586 : vr2->next = last_inserted_ref;
6114 7905586 : last_inserted_ref = vr2;
6115 : }
6116 :
6117 8544388 : return changed;
6118 : }
6119 :
6120 : /* Visit a load from a reference operator RHS, part of STMT, value number it,
6121 : and return true if the value number of the LHS has changed as a result. */
6122 :
6123 : static bool
6124 33932091 : visit_reference_op_load (tree lhs, tree op, gimple *stmt)
6125 : {
6126 33932091 : bool changed = false;
6127 33932091 : tree result;
6128 33932091 : vn_reference_t res;
6129 :
6130 33932091 : tree vuse = gimple_vuse (stmt);
6131 33932091 : tree last_vuse = vuse;
6132 33932091 : result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
6133 :
6134 : /* We handle type-punning through unions by value-numbering based
6135 : on offset and size of the access. Be prepared to handle a
6136 : type-mismatch here via creating a VIEW_CONVERT_EXPR. */
6137 33932091 : if (result
6138 33932091 : && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
6139 : {
6140 18023 : if (CONSTANT_CLASS_P (result))
6141 4165 : result = const_unop (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
6142 : else
6143 : {
6144 : /* We will be setting the value number of lhs to the value number
6145 : of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
6146 : So first simplify and lookup this expression to see if it
6147 : is already available. */
6148 13858 : gimple_match_op res_op (gimple_match_cond::UNCOND,
6149 13858 : VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
6150 13858 : result = vn_nary_build_or_lookup (&res_op);
6151 13858 : if (result
6152 13852 : && TREE_CODE (result) == SSA_NAME
6153 26024 : && VN_INFO (result)->needs_insertion)
6154 : /* Track whether this is the canonical expression for different
6155 : typed loads. We use that as a stopgap measure for code
6156 : hoisting when dealing with floating point loads. */
6157 10938 : res->punned = true;
6158 : }
6159 :
6160 : /* When building the conversion fails avoid inserting the reference
6161 : again. */
6162 18023 : if (!result)
6163 6 : return set_ssa_val_to (lhs, lhs);
6164 : }
6165 :
6166 33914068 : if (result)
6167 5394802 : changed = set_ssa_val_to (lhs, result);
6168 : else
6169 : {
6170 28537283 : changed = set_ssa_val_to (lhs, lhs);
6171 28537283 : vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
6172 28537283 : if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
6173 : {
6174 8735359 : if (dump_file && (dump_flags & TDF_DETAILS))
6175 : {
6176 23115 : fprintf (dump_file, "Using extra use virtual operand ");
6177 23115 : print_generic_expr (dump_file, last_vuse);
6178 23115 : fprintf (dump_file, "\n");
6179 : }
6180 8735359 : vn_reference_insert (op, lhs, vuse, NULL_TREE);
6181 : }
6182 : }
6183 :
6184 : return changed;
6185 : }
6186 :
6187 :
6188 : /* Visit a store to a reference operator LHS, part of STMT, value number it,
6189 : and return true if the value number of the LHS has changed as a result. */
6190 :
6191 : static bool
6192 32421606 : visit_reference_op_store (tree lhs, tree op, gimple *stmt)
6193 : {
6194 32421606 : bool changed = false;
6195 32421606 : vn_reference_t vnresult = NULL;
6196 32421606 : tree assign;
6197 32421606 : bool resultsame = false;
6198 32421606 : tree vuse = gimple_vuse (stmt);
6199 32421606 : tree vdef = gimple_vdef (stmt);
6200 :
6201 32421606 : if (TREE_CODE (op) == SSA_NAME)
6202 14679240 : op = SSA_VAL (op);
6203 :
6204 : /* First we want to lookup using the *vuses* from the store and see
6205 : if there the last store to this location with the same address
6206 : had the same value.
6207 :
6208 : The vuses represent the memory state before the store. If the
6209 : memory state, address, and value of the store is the same as the
6210 : last store to this location, then this store will produce the
6211 : same memory state as that store.
6212 :
6213 : In this case the vdef versions for this store are value numbered to those
6214 : vuse versions, since they represent the same memory state after
6215 : this store.
6216 :
6217 : Otherwise, the vdefs for the store are used when inserting into
6218 : the table, since the store generates a new memory state. */
6219 :
6220 32421606 : vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
6221 32421606 : if (vnresult
6222 1661760 : && vnresult->result)
6223 : {
6224 1661760 : tree result = vnresult->result;
6225 1661760 : gcc_checking_assert (TREE_CODE (result) != SSA_NAME
6226 : || result == SSA_VAL (result));
6227 1661760 : resultsame = expressions_equal_p (result, op);
6228 1661760 : if (resultsame)
6229 : {
6230 : /* If the TBAA state isn't compatible for downstream reads
6231 : we cannot value-number the VDEFs the same. */
6232 51737 : ao_ref lhs_ref;
6233 51737 : ao_ref_init (&lhs_ref, lhs);
6234 51737 : alias_set_type set = ao_ref_alias_set (&lhs_ref);
6235 51737 : alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6236 51737 : if ((vnresult->set != set
6237 902 : && ! alias_set_subset_of (set, vnresult->set))
6238 52329 : || (vnresult->base_set != base_set
6239 6347 : && ! alias_set_subset_of (base_set, vnresult->base_set)))
6240 841 : resultsame = false;
6241 : }
6242 : }
6243 :
6244 841 : if (!resultsame)
6245 : {
6246 32370710 : if (dump_file && (dump_flags & TDF_DETAILS))
6247 : {
6248 20429 : fprintf (dump_file, "No store match\n");
6249 20429 : fprintf (dump_file, "Value numbering store ");
6250 20429 : print_generic_expr (dump_file, lhs);
6251 20429 : fprintf (dump_file, " to ");
6252 20429 : print_generic_expr (dump_file, op);
6253 20429 : fprintf (dump_file, "\n");
6254 : }
6255 : /* Have to set value numbers before insert, since insert is
6256 : going to valueize the references in-place. */
6257 32370710 : if (vdef)
6258 32370710 : changed |= set_ssa_val_to (vdef, vdef);
6259 :
6260 : /* Do not insert structure copies into the tables. */
6261 32370710 : if (is_gimple_min_invariant (op)
6262 32370710 : || is_gimple_reg (op))
6263 28882042 : vn_reference_insert (lhs, op, vdef, NULL);
6264 :
6265 : /* Only perform the following when being called from PRE
6266 : which embeds tail merging. */
6267 32370710 : if (default_vn_walk_kind == VN_WALK)
6268 : {
6269 7415739 : assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
6270 7415739 : vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
6271 7415739 : if (!vnresult)
6272 7374721 : vn_reference_insert (assign, lhs, vuse, vdef);
6273 : }
6274 : }
6275 : else
6276 : {
6277 : /* We had a match, so value number the vdef to have the value
6278 : number of the vuse it came from. */
6279 :
6280 50896 : if (dump_file && (dump_flags & TDF_DETAILS))
6281 9 : fprintf (dump_file, "Store matched earlier value, "
6282 : "value numbering store vdefs to matching vuses.\n");
6283 :
6284 50896 : changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
6285 : }
6286 :
6287 32421606 : return changed;
6288 : }
6289 :
6290 : /* Visit and value number PHI, return true if the value number
6291 : changed. When BACKEDGES_VARYING_P is true then assume all
6292 : backedge values are varying. When INSERTED is not NULL then
6293 : this is just a ahead query for a possible iteration, set INSERTED
6294 : to true if we'd insert into the hashtable. */
6295 :
6296 : static bool
6297 34008887 : visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
6298 : {
6299 34008887 : tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
6300 34008887 : bool seen_undef_visited = false;
6301 34008887 : tree backedge_val = NULL_TREE;
6302 34008887 : bool seen_non_backedge = false;
6303 34008887 : tree sameval_base = NULL_TREE;
6304 34008887 : poly_int64 soff, doff;
6305 34008887 : unsigned n_executable = 0;
6306 34008887 : edge sameval_e = NULL;
6307 :
6308 : /* TODO: We could check for this in initialization, and replace this
6309 : with a gcc_assert. */
6310 34008887 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
6311 30018 : return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
6312 :
6313 : /* We track whether a PHI was CSEd to avoid excessive iterations
6314 : that would be necessary only because the PHI changed arguments
6315 : but not value. */
6316 33978869 : if (!inserted)
6317 26470476 : gimple_set_plf (phi, GF_PLF_1, false);
6318 :
6319 33978869 : basic_block bb = gimple_bb (phi);
6320 :
6321 : /* For the equivalence handling below make sure to first process an
6322 : edge with a non-constant. */
6323 33978869 : auto_vec<edge, 2> preds;
6324 67957738 : preds.reserve_exact (EDGE_COUNT (bb->preds));
6325 33978869 : bool seen_nonconstant = false;
6326 112265337 : for (unsigned i = 0; i < EDGE_COUNT (bb->preds); ++i)
6327 : {
6328 78286468 : edge e = EDGE_PRED (bb, i);
6329 78286468 : preds.quick_push (e);
6330 78286468 : if (!seen_nonconstant)
6331 : {
6332 41566556 : tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6333 41566556 : if (TREE_CODE (def) == SSA_NAME)
6334 : {
6335 32290710 : seen_nonconstant = true;
6336 32290710 : if (i != 0)
6337 5696742 : std::swap (preds[0], preds[i]);
6338 : }
6339 : }
6340 : }
6341 :
6342 : /* See if all non-TOP arguments have the same value. TOP is
6343 : equivalent to everything, so we can ignore it. */
6344 143020847 : for (edge e : preds)
6345 67529252 : if (e->flags & EDGE_EXECUTABLE)
6346 : {
6347 62539860 : tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6348 :
6349 62539860 : if (def == PHI_RESULT (phi))
6350 317302 : continue;
6351 62244158 : ++n_executable;
6352 62244158 : bool visited = true;
6353 62244158 : if (TREE_CODE (def) == SSA_NAME)
6354 : {
6355 50195143 : tree val = SSA_VAL (def, &visited);
6356 50195143 : if (SSA_NAME_IS_DEFAULT_DEF (def))
6357 2631845 : visited = true;
6358 50195143 : if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
6359 47670982 : def = val;
6360 50195143 : if (e->flags & EDGE_DFS_BACK)
6361 15314193 : backedge_val = def;
6362 : }
6363 62244158 : if (!(e->flags & EDGE_DFS_BACK))
6364 46762661 : seen_non_backedge = true;
6365 62244158 : if (def == VN_TOP)
6366 : ;
6367 : /* Ignore undefined defs for sameval but record one. */
6368 62244158 : else if (TREE_CODE (def) == SSA_NAME
6369 46826696 : && ! virtual_operand_p (def)
6370 86106484 : && ssa_undefined_value_p (def, false))
6371 : {
6372 227176 : if (!seen_undef
6373 : /* Avoid having not visited undefined defs if we also have
6374 : a visited one. */
6375 29847 : || (!seen_undef_visited && visited))
6376 : {
6377 197332 : seen_undef = def;
6378 197332 : seen_undef_visited = visited;
6379 : }
6380 : }
6381 62016982 : else if (sameval == VN_TOP)
6382 : {
6383 : sameval = def;
6384 : sameval_e = e;
6385 : }
6386 28084966 : else if (expressions_equal_p (def, sameval))
6387 : sameval_e = NULL;
6388 44181746 : else if (virtual_operand_p (def))
6389 : {
6390 : sameval = NULL_TREE;
6391 26445012 : break;
6392 : }
6393 : else
6394 : {
6395 : /* We know we're arriving only with invariant addresses here,
6396 : try harder comparing them. We can do some caching here
6397 : which we cannot do in expressions_equal_p. */
6398 16549256 : if (TREE_CODE (def) == ADDR_EXPR
6399 367942 : && TREE_CODE (sameval) == ADDR_EXPR
6400 101906 : && sameval_base != (void *)-1)
6401 : {
6402 101906 : if (!sameval_base)
6403 101904 : sameval_base = get_addr_base_and_unit_offset
6404 101904 : (TREE_OPERAND (sameval, 0), &soff);
6405 101904 : if (!sameval_base)
6406 : sameval_base = (tree)(void *)-1;
6407 101911 : else if ((get_addr_base_and_unit_offset
6408 101906 : (TREE_OPERAND (def, 0), &doff) == sameval_base)
6409 101906 : && known_eq (soff, doff))
6410 5 : continue;
6411 : }
6412 : /* There's also the possibility to use equivalences. */
6413 32011678 : if (!FLOAT_TYPE_P (TREE_TYPE (def))
6414 : /* But only do this if we didn't force any of sameval or
6415 : val to VARYING because of backedge processing rules. */
6416 15358581 : && (TREE_CODE (sameval) != SSA_NAME
6417 12109515 : || SSA_VAL (sameval) == sameval)
6418 31907769 : && (TREE_CODE (def) != SSA_NAME || SSA_VAL (def) == def))
6419 : {
6420 15358504 : vn_nary_op_t vnresult;
6421 15358504 : tree ops[2];
6422 15358504 : ops[0] = def;
6423 15358504 : ops[1] = sameval;
6424 : /* Canonicalize the operands order for eq below. */
6425 15358504 : if (tree_swap_operands_p (ops[0], ops[1]))
6426 9276310 : std::swap (ops[0], ops[1]);
6427 15358504 : tree val = vn_nary_op_lookup_pieces (2, EQ_EXPR,
6428 : boolean_type_node,
6429 : ops, &vnresult);
6430 15358504 : if (! val && vnresult && vnresult->predicated_values)
6431 : {
6432 209731 : val = vn_nary_op_get_predicated_value (vnresult, e);
6433 124598 : if (val && integer_truep (val)
6434 231448 : && !(sameval_e && (sameval_e->flags & EDGE_DFS_BACK)))
6435 : {
6436 21595 : if (dump_file && (dump_flags & TDF_DETAILS))
6437 : {
6438 2 : fprintf (dump_file, "Predication says ");
6439 2 : print_generic_expr (dump_file, def, TDF_NONE);
6440 2 : fprintf (dump_file, " and ");
6441 2 : print_generic_expr (dump_file, sameval, TDF_NONE);
6442 2 : fprintf (dump_file, " are equal on edge %d -> %d\n",
6443 2 : e->src->index, e->dest->index);
6444 : }
6445 21595 : continue;
6446 : }
6447 : }
6448 : }
6449 : sameval = NULL_TREE;
6450 : break;
6451 : }
6452 : }
6453 :
6454 : /* If the value we want to use is flowing over the backedge and we
6455 : should take it as VARYING but it has a non-VARYING value drop to
6456 : VARYING.
6457 : If we value-number a virtual operand never value-number to the
6458 : value from the backedge as that confuses the alias-walking code.
6459 : See gcc.dg/torture/pr87176.c. If the value is the same on a
6460 : non-backedge everything is OK though. */
6461 33978869 : bool visited_p;
6462 33978869 : if ((backedge_val
6463 33978869 : && !seen_non_backedge
6464 2008 : && TREE_CODE (backedge_val) == SSA_NAME
6465 1741 : && sameval == backedge_val
6466 311 : && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
6467 40 : || SSA_VAL (backedge_val) != backedge_val))
6468 : /* Do not value-number a virtual operand to sth not visited though
6469 : given that allows us to escape a region in alias walking. */
6470 33980606 : || (sameval
6471 7533586 : && TREE_CODE (sameval) == SSA_NAME
6472 4451566 : && !SSA_NAME_IS_DEFAULT_DEF (sameval)
6473 3765334 : && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
6474 1893294 : && (SSA_VAL (sameval, &visited_p), !visited_p)))
6475 : /* Note this just drops to VARYING without inserting the PHI into
6476 : the hashes. */
6477 293525 : result = PHI_RESULT (phi);
6478 : /* If none of the edges was executable keep the value-number at VN_TOP,
6479 : if only a single edge is exectuable use its value. */
6480 33685344 : else if (n_executable <= 1)
6481 6492779 : result = seen_undef ? seen_undef : sameval;
6482 : /* If we saw only undefined values and VN_TOP use one of the
6483 : undefined values. */
6484 27192565 : else if (sameval == VN_TOP)
6485 7058510 : result = (seen_undef && seen_undef_visited) ? seen_undef : sameval;
6486 : /* First see if it is equivalent to a phi node in this block. We prefer
6487 : this as it allows IV elimination - see PRs 66502 and 67167. */
6488 27187159 : else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
6489 : {
6490 4166778 : if (!inserted
6491 68692 : && TREE_CODE (result) == SSA_NAME
6492 4235470 : && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
6493 : {
6494 68692 : gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
6495 68692 : if (dump_file && (dump_flags & TDF_DETAILS))
6496 : {
6497 6 : fprintf (dump_file, "Marking CSEd to PHI node ");
6498 6 : print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
6499 : 0, TDF_SLIM);
6500 6 : fprintf (dump_file, "\n");
6501 : }
6502 : }
6503 : }
6504 : /* If all values are the same use that, unless we've seen undefined
6505 : values as well and the value isn't constant.
6506 : CCP/copyprop have the same restriction to not remove uninit warnings. */
6507 23020381 : else if (sameval
6508 23020381 : && (! seen_undef || is_gimple_min_invariant (sameval)))
6509 : result = sameval;
6510 : else
6511 : {
6512 22418609 : result = PHI_RESULT (phi);
6513 : /* Only insert PHIs that are varying, for constant value numbers
6514 : we mess up equivalences otherwise as we are only comparing
6515 : the immediate controlling predicates. */
6516 22418609 : vn_phi_insert (phi, result, backedges_varying_p);
6517 22418609 : if (inserted)
6518 3260029 : *inserted = true;
6519 : }
6520 :
6521 33978869 : return set_ssa_val_to (PHI_RESULT (phi), result);
6522 33978869 : }
6523 :
6524 : /* Try to simplify RHS using equivalences and constant folding. */
6525 :
6526 : static tree
6527 125038387 : try_to_simplify (gassign *stmt)
6528 : {
6529 125038387 : enum tree_code code = gimple_assign_rhs_code (stmt);
6530 125038387 : tree tem;
6531 :
6532 : /* For stores we can end up simplifying a SSA_NAME rhs. Just return
6533 : in this case, there is no point in doing extra work. */
6534 125038387 : if (code == SSA_NAME)
6535 : return NULL_TREE;
6536 :
6537 : /* First try constant folding based on our current lattice. */
6538 110358894 : mprts_hook = vn_lookup_simplify_result;
6539 110358894 : tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
6540 110358894 : mprts_hook = NULL;
6541 110358894 : if (tem
6542 110358894 : && (TREE_CODE (tem) == SSA_NAME
6543 24596585 : || is_gimple_min_invariant (tem)))
6544 24686145 : return tem;
6545 :
6546 : return NULL_TREE;
6547 : }
6548 :
6549 : /* Visit and value number STMT, return true if the value number
6550 : changed. */
6551 :
6552 : static bool
6553 445783546 : visit_stmt (gimple *stmt, bool backedges_varying_p = false)
6554 : {
6555 445783546 : bool changed = false;
6556 :
6557 445783546 : if (dump_file && (dump_flags & TDF_DETAILS))
6558 : {
6559 409363 : fprintf (dump_file, "Value numbering stmt = ");
6560 409363 : print_gimple_stmt (dump_file, stmt, 0);
6561 : }
6562 :
6563 445783546 : if (gimple_code (stmt) == GIMPLE_PHI)
6564 26490939 : changed = visit_phi (stmt, NULL, backedges_varying_p);
6565 587989600 : else if (gimple_has_volatile_ops (stmt))
6566 8527948 : changed = defs_to_varying (stmt);
6567 410764659 : else if (gassign *ass = dyn_cast <gassign *> (stmt))
6568 : {
6569 129971833 : enum tree_code code = gimple_assign_rhs_code (ass);
6570 129971833 : tree lhs = gimple_assign_lhs (ass);
6571 129971833 : tree rhs1 = gimple_assign_rhs1 (ass);
6572 129971833 : tree simplified;
6573 :
6574 : /* Shortcut for copies. Simplifying copies is pointless,
6575 : since we copy the expression and value they represent. */
6576 129971833 : if (code == SSA_NAME
6577 19612939 : && TREE_CODE (lhs) == SSA_NAME)
6578 : {
6579 4933446 : changed = visit_copy (lhs, rhs1);
6580 4933446 : goto done;
6581 : }
6582 125038387 : simplified = try_to_simplify (ass);
6583 125038387 : if (simplified)
6584 : {
6585 24686145 : if (dump_file && (dump_flags & TDF_DETAILS))
6586 : {
6587 14426 : fprintf (dump_file, "RHS ");
6588 14426 : print_gimple_expr (dump_file, ass, 0);
6589 14426 : fprintf (dump_file, " simplified to ");
6590 14426 : print_generic_expr (dump_file, simplified);
6591 14426 : fprintf (dump_file, "\n");
6592 : }
6593 : }
6594 : /* Setting value numbers to constants will occasionally
6595 : screw up phi congruence because constants are not
6596 : uniquely associated with a single ssa name that can be
6597 : looked up. */
6598 24686145 : if (simplified
6599 24686145 : && is_gimple_min_invariant (simplified)
6600 21751953 : && TREE_CODE (lhs) == SSA_NAME)
6601 : {
6602 7498235 : changed = set_ssa_val_to (lhs, simplified);
6603 7498235 : goto done;
6604 : }
6605 117540152 : else if (simplified
6606 17187910 : && TREE_CODE (simplified) == SSA_NAME
6607 2934192 : && TREE_CODE (lhs) == SSA_NAME)
6608 : {
6609 2934192 : changed = visit_copy (lhs, simplified);
6610 2934192 : goto done;
6611 : }
6612 :
6613 114605960 : if ((TREE_CODE (lhs) == SSA_NAME
6614 : /* We can substitute SSA_NAMEs that are live over
6615 : abnormal edges with their constant value. */
6616 82184127 : && !(gimple_assign_copy_p (ass)
6617 26 : && is_gimple_min_invariant (rhs1))
6618 82184101 : && !(simplified
6619 0 : && is_gimple_min_invariant (simplified))
6620 82184101 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6621 : /* Stores or copies from SSA_NAMEs that are live over
6622 : abnormal edges are a problem. */
6623 196788777 : || (code == SSA_NAME
6624 14679493 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6625 1537 : changed = defs_to_varying (ass);
6626 114604423 : else if (REFERENCE_CLASS_P (lhs)
6627 114604423 : || DECL_P (lhs))
6628 32421606 : changed = visit_reference_op_store (lhs, rhs1, ass);
6629 82182817 : else if (TREE_CODE (lhs) == SSA_NAME)
6630 : {
6631 82182817 : if ((gimple_assign_copy_p (ass)
6632 26 : && is_gimple_min_invariant (rhs1))
6633 82182843 : || (simplified
6634 0 : && is_gimple_min_invariant (simplified)))
6635 : {
6636 0 : if (simplified)
6637 0 : changed = set_ssa_val_to (lhs, simplified);
6638 : else
6639 0 : changed = set_ssa_val_to (lhs, rhs1);
6640 : }
6641 : else
6642 : {
6643 : /* Visit the original statement. */
6644 82182817 : switch (vn_get_stmt_kind (ass))
6645 : {
6646 48151390 : case VN_NARY:
6647 48151390 : changed = visit_nary_op (lhs, ass);
6648 48151390 : break;
6649 33932091 : case VN_REFERENCE:
6650 33932091 : changed = visit_reference_op_load (lhs, rhs1, ass);
6651 33932091 : break;
6652 99336 : default:
6653 99336 : changed = defs_to_varying (ass);
6654 99336 : break;
6655 : }
6656 : }
6657 : }
6658 : else
6659 0 : changed = defs_to_varying (ass);
6660 : }
6661 280792826 : else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6662 : {
6663 24543165 : tree lhs = gimple_call_lhs (call_stmt);
6664 24543165 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
6665 : {
6666 : /* Try constant folding based on our current lattice. */
6667 8323105 : tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
6668 : vn_valueize);
6669 8323105 : if (simplified)
6670 : {
6671 64257 : if (dump_file && (dump_flags & TDF_DETAILS))
6672 : {
6673 1 : fprintf (dump_file, "call ");
6674 1 : print_gimple_expr (dump_file, call_stmt, 0);
6675 1 : fprintf (dump_file, " simplified to ");
6676 1 : print_generic_expr (dump_file, simplified);
6677 1 : fprintf (dump_file, "\n");
6678 : }
6679 : }
6680 : /* Setting value numbers to constants will occasionally
6681 : screw up phi congruence because constants are not
6682 : uniquely associated with a single ssa name that can be
6683 : looked up. */
6684 64257 : if (simplified
6685 64257 : && is_gimple_min_invariant (simplified))
6686 : {
6687 57843 : changed = set_ssa_val_to (lhs, simplified);
6688 115686 : if (gimple_vdef (call_stmt))
6689 740 : changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6690 : SSA_VAL (gimple_vuse (call_stmt)));
6691 57843 : goto done;
6692 : }
6693 8265262 : else if (simplified
6694 6414 : && TREE_CODE (simplified) == SSA_NAME)
6695 : {
6696 293 : changed = visit_copy (lhs, simplified);
6697 586 : if (gimple_vdef (call_stmt))
6698 0 : changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6699 : SSA_VAL (gimple_vuse (call_stmt)));
6700 293 : goto done;
6701 : }
6702 8264969 : else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6703 : {
6704 381 : changed = defs_to_varying (call_stmt);
6705 381 : goto done;
6706 : }
6707 : }
6708 :
6709 : /* Pick up flags from a devirtualization target. */
6710 24484648 : tree fn = gimple_call_fn (stmt);
6711 24484648 : int extra_fnflags = 0;
6712 24484648 : if (fn && TREE_CODE (fn) == SSA_NAME)
6713 : {
6714 534690 : fn = SSA_VAL (fn);
6715 534690 : if (TREE_CODE (fn) == ADDR_EXPR
6716 534690 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
6717 4852 : extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
6718 : }
6719 24484648 : if ((/* Calls to the same function with the same vuse
6720 : and the same operands do not necessarily return the same
6721 : value, unless they're pure or const. */
6722 24484648 : ((gimple_call_flags (call_stmt) | extra_fnflags)
6723 24484648 : & (ECF_PURE | ECF_CONST))
6724 : /* If calls have a vdef, subsequent calls won't have
6725 : the same incoming vuse. So, if 2 calls with vdef have the
6726 : same vuse, we know they're not subsequent.
6727 : We can value number 2 calls to the same function with the
6728 : same vuse and the same operands which are not subsequent
6729 : the same, because there is no code in the program that can
6730 : compare the 2 values... */
6731 20649004 : || (gimple_vdef (call_stmt)
6732 : /* ... unless the call returns a pointer which does
6733 : not alias with anything else. In which case the
6734 : information that the values are distinct are encoded
6735 : in the IL. */
6736 20614071 : && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
6737 : /* Only perform the following when being called from PRE
6738 : which embeds tail merging. */
6739 20056063 : && default_vn_walk_kind == VN_WALK))
6740 : /* Do not process .DEFERRED_INIT since that confuses uninit
6741 : analysis. */
6742 29389973 : && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
6743 8544388 : changed = visit_reference_op_call (lhs, call_stmt);
6744 : else
6745 15940260 : changed = defs_to_varying (call_stmt);
6746 : }
6747 : else
6748 256249661 : changed = defs_to_varying (stmt);
6749 445783546 : done:
6750 445783546 : return changed;
6751 : }
6752 :
6753 :
6754 : /* Allocate a value number table. */
6755 :
6756 : static void
6757 6107814 : allocate_vn_table (vn_tables_t table, unsigned size)
6758 : {
6759 6107814 : table->phis = new vn_phi_table_type (size);
6760 6107814 : table->nary = new vn_nary_op_table_type (size);
6761 6107814 : table->references = new vn_reference_table_type (size);
6762 6107814 : }
6763 :
6764 : /* Free a value number table. */
6765 :
6766 : static void
6767 6107814 : free_vn_table (vn_tables_t table)
6768 : {
6769 : /* Walk over elements and release vectors. */
6770 6107814 : vn_reference_iterator_type hir;
6771 6107814 : vn_reference_t vr;
6772 144368118 : FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
6773 69130152 : vr->operands.release ();
6774 6107814 : delete table->phis;
6775 6107814 : table->phis = NULL;
6776 6107814 : delete table->nary;
6777 6107814 : table->nary = NULL;
6778 6107814 : delete table->references;
6779 6107814 : table->references = NULL;
6780 6107814 : }
6781 :
6782 : /* Set *ID according to RESULT. */
6783 :
6784 : static void
6785 34055449 : set_value_id_for_result (tree result, unsigned int *id)
6786 : {
6787 34055449 : if (result && TREE_CODE (result) == SSA_NAME)
6788 21177873 : *id = VN_INFO (result)->value_id;
6789 9651031 : else if (result && is_gimple_min_invariant (result))
6790 3645592 : *id = get_or_alloc_constant_value_id (result);
6791 : else
6792 9231984 : *id = get_next_value_id ();
6793 34055449 : }
6794 :
6795 : /* Set the value ids in the valid hash tables. */
6796 :
6797 : static void
6798 961783 : set_hashtable_value_ids (void)
6799 : {
6800 961783 : vn_nary_op_iterator_type hin;
6801 961783 : vn_phi_iterator_type hip;
6802 961783 : vn_reference_iterator_type hir;
6803 961783 : vn_nary_op_t vno;
6804 961783 : vn_reference_t vr;
6805 961783 : vn_phi_t vp;
6806 :
6807 : /* Now set the value ids of the things we had put in the hash
6808 : table. */
6809 :
6810 48186203 : FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
6811 23612210 : if (! vno->predicated_values)
6812 7637516 : set_value_id_for_result (vno->u.result, &vno->value_id);
6813 :
6814 8861677 : FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
6815 3949947 : set_value_id_for_result (vp->result, &vp->value_id);
6816 :
6817 45897755 : FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
6818 : hir)
6819 22467986 : set_value_id_for_result (vr->result, &vr->value_id);
6820 961783 : }
6821 :
6822 : /* Return the maximum value id we have ever seen. */
6823 :
6824 : unsigned int
6825 1923566 : get_max_value_id (void)
6826 : {
6827 1923566 : return next_value_id;
6828 : }
6829 :
6830 : /* Return the maximum constant value id we have ever seen. */
6831 :
6832 : unsigned int
6833 1923566 : get_max_constant_value_id (void)
6834 : {
6835 1923566 : return -next_constant_value_id;
6836 : }
6837 :
6838 : /* Return the next unique value id. */
6839 :
6840 : unsigned int
6841 48470532 : get_next_value_id (void)
6842 : {
6843 48470532 : gcc_checking_assert ((int)next_value_id > 0);
6844 48470532 : return next_value_id++;
6845 : }
6846 :
6847 : /* Return the next unique value id for constants. */
6848 :
6849 : unsigned int
6850 2495971 : get_next_constant_value_id (void)
6851 : {
6852 2495971 : gcc_checking_assert (next_constant_value_id < 0);
6853 2495971 : return next_constant_value_id--;
6854 : }
6855 :
6856 :
6857 : /* Compare two expressions E1 and E2 and return true if they are equal.
6858 : If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6859 : otherwise VN_TOP only matches VN_TOP. */
6860 :
6861 : bool
6862 243279220 : expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
6863 : {
6864 : /* The obvious case. */
6865 243279220 : if (e1 == e2)
6866 : return true;
6867 :
6868 : /* If either one is VN_TOP consider them equal. */
6869 69952753 : if (match_vn_top_optimistically
6870 65106061 : && (e1 == VN_TOP || e2 == VN_TOP))
6871 : return true;
6872 :
6873 : /* If only one of them is null, they cannot be equal. While in general
6874 : this should not happen for operations like TARGET_MEM_REF some
6875 : operands are optional and an identity value we could substitute
6876 : has differing semantics. */
6877 69952753 : if (!e1 || !e2)
6878 : return false;
6879 :
6880 : /* SSA_NAME compare pointer equal. */
6881 69952753 : if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
6882 : return false;
6883 :
6884 : /* Now perform the actual comparison. */
6885 34713873 : if (TREE_CODE (e1) == TREE_CODE (e2)
6886 34713873 : && operand_equal_p (e1, e2, OEP_PURE_SAME))
6887 : return true;
6888 :
6889 : return false;
6890 : }
6891 :
6892 :
6893 : /* Return true if the nary operation NARY may trap. This is a copy
6894 : of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6895 :
6896 : bool
6897 5621976 : vn_nary_may_trap (vn_nary_op_t nary)
6898 : {
6899 5621976 : tree type;
6900 5621976 : tree rhs2 = NULL_TREE;
6901 5621976 : bool honor_nans = false;
6902 5621976 : bool honor_snans = false;
6903 5621976 : bool fp_operation = false;
6904 5621976 : bool honor_trapv = false;
6905 5621976 : bool handled, ret;
6906 5621976 : unsigned i;
6907 :
6908 5621976 : if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
6909 : || TREE_CODE_CLASS (nary->opcode) == tcc_unary
6910 5621976 : || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
6911 : {
6912 5520533 : type = nary->type;
6913 5520533 : fp_operation = FLOAT_TYPE_P (type);
6914 5520533 : if (fp_operation)
6915 : {
6916 117537 : honor_nans = flag_trapping_math && !flag_finite_math_only;
6917 117537 : honor_snans = flag_signaling_nans != 0;
6918 : }
6919 5402996 : else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
6920 : honor_trapv = true;
6921 : }
6922 5621976 : if (nary->length >= 2)
6923 2235435 : rhs2 = nary->op[1];
6924 5621976 : ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
6925 : honor_trapv, honor_nans, honor_snans,
6926 : rhs2, &handled);
6927 5621976 : if (handled && ret)
6928 : return true;
6929 :
6930 13170511 : for (i = 0; i < nary->length; ++i)
6931 7664970 : if (tree_could_trap_p (nary->op[i]))
6932 : return true;
6933 :
6934 : return false;
6935 : }
6936 :
6937 : /* Return true if the reference operation REF may trap. */
6938 :
6939 : bool
6940 913976 : vn_reference_may_trap (vn_reference_t ref)
6941 : {
6942 913976 : switch (ref->operands[0].opcode)
6943 : {
6944 : case MODIFY_EXPR:
6945 : case CALL_EXPR:
6946 : /* We do not handle calls. */
6947 : return true;
6948 : case ADDR_EXPR:
6949 : /* And toplevel address computations never trap. */
6950 : return false;
6951 : default:;
6952 : }
6953 :
6954 : vn_reference_op_t op;
6955 : unsigned i;
6956 2530551 : FOR_EACH_VEC_ELT (ref->operands, i, op)
6957 : {
6958 2530326 : switch (op->opcode)
6959 : {
6960 : case WITH_SIZE_EXPR:
6961 : case TARGET_MEM_REF:
6962 : /* Always variable. */
6963 : return true;
6964 712112 : case COMPONENT_REF:
6965 712112 : if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
6966 : return true;
6967 : break;
6968 0 : case ARRAY_RANGE_REF:
6969 0 : if (TREE_CODE (op->op0) == SSA_NAME)
6970 : return true;
6971 : break;
6972 203867 : case ARRAY_REF:
6973 203867 : {
6974 203867 : if (TREE_CODE (op->op0) != INTEGER_CST)
6975 : return true;
6976 :
6977 : /* !in_array_bounds */
6978 183750 : tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
6979 183750 : if (!domain_type)
6980 : return true;
6981 :
6982 183704 : tree min = op->op1;
6983 183704 : tree max = TYPE_MAX_VALUE (domain_type);
6984 183704 : if (!min
6985 183704 : || !max
6986 171102 : || TREE_CODE (min) != INTEGER_CST
6987 171102 : || TREE_CODE (max) != INTEGER_CST)
6988 : return true;
6989 :
6990 168484 : if (tree_int_cst_lt (op->op0, min)
6991 168484 : || tree_int_cst_lt (max, op->op0))
6992 316 : return true;
6993 :
6994 : break;
6995 : }
6996 : case MEM_REF:
6997 : /* Nothing interesting in itself, the base is separate. */
6998 : break;
6999 : /* The following are the address bases. */
7000 : case SSA_NAME:
7001 : return true;
7002 526531 : case ADDR_EXPR:
7003 526531 : if (op->op0)
7004 526531 : return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
7005 : return false;
7006 1698917 : default:;
7007 : }
7008 : }
7009 : return false;
7010 : }
7011 :
7012 10336652 : eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
7013 10336652 : bitmap inserted_exprs_)
7014 10336652 : : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
7015 10336652 : el_todo (0), eliminations (0), insertions (0),
7016 10336652 : inserted_exprs (inserted_exprs_)
7017 : {
7018 10336652 : need_eh_cleanup = BITMAP_ALLOC (NULL);
7019 10336652 : need_ab_cleanup = BITMAP_ALLOC (NULL);
7020 10336652 : }
7021 :
7022 10336652 : eliminate_dom_walker::~eliminate_dom_walker ()
7023 : {
7024 10336652 : BITMAP_FREE (need_eh_cleanup);
7025 10336652 : BITMAP_FREE (need_ab_cleanup);
7026 10336652 : }
7027 :
7028 : /* Return a leader for OP that is available at the current point of the
7029 : eliminate domwalk. */
7030 :
7031 : tree
7032 178416529 : eliminate_dom_walker::eliminate_avail (basic_block, tree op)
7033 : {
7034 178416529 : tree valnum = VN_INFO (op)->valnum;
7035 178416529 : if (TREE_CODE (valnum) == SSA_NAME)
7036 : {
7037 173478590 : if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7038 : return valnum;
7039 302192818 : if (avail.length () > SSA_NAME_VERSION (valnum))
7040 : {
7041 136159130 : tree av = avail[SSA_NAME_VERSION (valnum)];
7042 : /* When PRE discovers a new redundancy there's no way to unite
7043 : the value classes so it instead inserts a copy old-val = new-val.
7044 : Look through such copies here, providing one more level of
7045 : simplification at elimination time. */
7046 136159130 : gassign *ass;
7047 239218845 : if (av && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (av))))
7048 72989058 : if (gimple_assign_rhs_class (ass) == GIMPLE_SINGLE_RHS)
7049 : {
7050 38647574 : tree rhs1 = gimple_assign_rhs1 (ass);
7051 38647574 : if (CONSTANT_CLASS_P (rhs1)
7052 38647574 : || (TREE_CODE (rhs1) == SSA_NAME
7053 9768 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
7054 : av = rhs1;
7055 : }
7056 136159130 : return av;
7057 : }
7058 : }
7059 4937939 : else if (is_gimple_min_invariant (valnum))
7060 : return valnum;
7061 : return NULL_TREE;
7062 : }
7063 :
7064 : /* At the current point of the eliminate domwalk make OP available. */
7065 :
7066 : void
7067 49409483 : eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
7068 : {
7069 49409483 : tree valnum = VN_INFO (op)->valnum;
7070 49409483 : if (TREE_CODE (valnum) == SSA_NAME)
7071 : {
7072 95487511 : if (avail.length () <= SSA_NAME_VERSION (valnum))
7073 16656749 : avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
7074 49409483 : tree pushop = op;
7075 49409483 : if (avail[SSA_NAME_VERSION (valnum)])
7076 44141 : pushop = avail[SSA_NAME_VERSION (valnum)];
7077 49409483 : avail_stack.safe_push (pushop);
7078 49409483 : avail[SSA_NAME_VERSION (valnum)] = op;
7079 : }
7080 49409483 : }
7081 :
7082 : /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
7083 : the leader for the expression if insertion was successful. */
7084 :
7085 : tree
7086 123467 : eliminate_dom_walker::eliminate_insert (basic_block bb,
7087 : gimple_stmt_iterator *gsi, tree val)
7088 : {
7089 : /* We can insert a sequence with a single assignment only. */
7090 123467 : gimple_seq stmts = VN_INFO (val)->expr;
7091 123467 : if (!gimple_seq_singleton_p (stmts))
7092 : return NULL_TREE;
7093 224282 : gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
7094 123467 : if (!stmt
7095 123467 : || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
7096 : && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
7097 : && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
7098 : && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
7099 : && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
7100 75 : || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
7101 : return NULL_TREE;
7102 :
7103 32275 : tree op = gimple_assign_rhs1 (stmt);
7104 32275 : if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
7105 32275 : || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
7106 19913 : op = TREE_OPERAND (op, 0);
7107 32275 : tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
7108 32229 : if (!leader)
7109 : return NULL_TREE;
7110 :
7111 22656 : tree res;
7112 22656 : stmts = NULL;
7113 41557 : if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
7114 33880 : res = gimple_build (&stmts, BIT_FIELD_REF,
7115 16940 : TREE_TYPE (val), leader,
7116 16940 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
7117 16940 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
7118 5716 : else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
7119 150 : res = gimple_build (&stmts, BIT_AND_EXPR,
7120 75 : TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
7121 : else
7122 5641 : res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
7123 5641 : TREE_TYPE (val), leader);
7124 22656 : if (TREE_CODE (res) != SSA_NAME
7125 22655 : || SSA_NAME_IS_DEFAULT_DEF (res)
7126 45311 : || gimple_bb (SSA_NAME_DEF_STMT (res)))
7127 : {
7128 4 : gimple_seq_discard (stmts);
7129 :
7130 : /* During propagation we have to treat SSA info conservatively
7131 : and thus we can end up simplifying the inserted expression
7132 : at elimination time to sth not defined in stmts. */
7133 : /* But then this is a redundancy we failed to detect. Which means
7134 : res now has two values. That doesn't play well with how
7135 : we track availability here, so give up. */
7136 4 : if (dump_file && (dump_flags & TDF_DETAILS))
7137 : {
7138 0 : if (TREE_CODE (res) == SSA_NAME)
7139 0 : res = eliminate_avail (bb, res);
7140 0 : if (res)
7141 : {
7142 0 : fprintf (dump_file, "Failed to insert expression for value ");
7143 0 : print_generic_expr (dump_file, val);
7144 0 : fprintf (dump_file, " which is really fully redundant to ");
7145 0 : print_generic_expr (dump_file, res);
7146 0 : fprintf (dump_file, "\n");
7147 : }
7148 : }
7149 :
7150 4 : return NULL_TREE;
7151 : }
7152 : else
7153 : {
7154 22652 : gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
7155 22652 : vn_ssa_aux_t vn_info = VN_INFO (res);
7156 22652 : vn_info->valnum = val;
7157 22652 : vn_info->visited = true;
7158 : }
7159 :
7160 22652 : insertions++;
7161 22652 : if (dump_file && (dump_flags & TDF_DETAILS))
7162 : {
7163 499 : fprintf (dump_file, "Inserted ");
7164 499 : print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
7165 : }
7166 :
7167 : return res;
7168 : }
7169 :
7170 : void
7171 344526197 : eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
7172 : {
7173 344526197 : tree sprime = NULL_TREE;
7174 344526197 : gimple *stmt = gsi_stmt (*gsi);
7175 344526197 : tree lhs = gimple_get_lhs (stmt);
7176 118066908 : if (lhs && TREE_CODE (lhs) == SSA_NAME
7177 163555158 : && !gimple_has_volatile_ops (stmt)
7178 : /* See PR43491. Do not replace a global register variable when
7179 : it is a the RHS of an assignment. Do replace local register
7180 : variables since gcc does not guarantee a local variable will
7181 : be allocated in register.
7182 : ??? The fix isn't effective here. This should instead
7183 : be ensured by not value-numbering them the same but treating
7184 : them like volatiles? */
7185 425241057 : && !(gimple_assign_single_p (stmt)
7186 34744464 : && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
7187 2456718 : && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
7188 4172 : && is_global_var (gimple_assign_rhs1 (stmt)))))
7189 : {
7190 80714616 : sprime = eliminate_avail (b, lhs);
7191 80714616 : if (!sprime)
7192 : {
7193 : /* If there is no existing usable leader but SCCVN thinks
7194 : it has an expression it wants to use as replacement,
7195 : insert that. */
7196 68015421 : tree val = VN_INFO (lhs)->valnum;
7197 68015421 : vn_ssa_aux_t vn_info;
7198 68015421 : if (val != VN_TOP
7199 68015421 : && TREE_CODE (val) == SSA_NAME
7200 68015421 : && (vn_info = VN_INFO (val), true)
7201 68015421 : && vn_info->needs_insertion
7202 310700 : && vn_info->expr != NULL
7203 68138888 : && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
7204 22652 : eliminate_push_avail (b, sprime);
7205 : }
7206 :
7207 : /* If this now constitutes a copy duplicate points-to
7208 : and range info appropriately. This is especially
7209 : important for inserted code. */
7210 68015421 : if (sprime
7211 12721847 : && TREE_CODE (sprime) == SSA_NAME)
7212 8736159 : maybe_duplicate_ssa_info_at_copy (lhs, sprime);
7213 :
7214 : /* Inhibit the use of an inserted PHI on a loop header when
7215 : the address of the memory reference is a simple induction
7216 : variable. In other cases the vectorizer won't do anything
7217 : anyway (either it's loop invariant or a complicated
7218 : expression). */
7219 8736159 : if (sprime
7220 12721847 : && TREE_CODE (sprime) == SSA_NAME
7221 8736159 : && do_pre
7222 898295 : && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
7223 879739 : && loop_outer (b->loop_father)
7224 379993 : && has_zero_uses (sprime)
7225 187984 : && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
7226 187831 : && gimple_assign_load_p (stmt))
7227 : {
7228 102369 : gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
7229 102369 : basic_block def_bb = gimple_bb (def_stmt);
7230 102369 : if (gimple_code (def_stmt) == GIMPLE_PHI
7231 102369 : && def_bb->loop_father->header == def_bb)
7232 : {
7233 65038 : loop_p loop = def_bb->loop_father;
7234 65038 : ssa_op_iter iter;
7235 65038 : tree op;
7236 65038 : bool found = false;
7237 82450 : FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
7238 : {
7239 61511 : affine_iv iv;
7240 61511 : def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
7241 61511 : if (def_bb
7242 55721 : && flow_bb_inside_loop_p (loop, def_bb)
7243 112193 : && simple_iv (loop, loop, op, &iv, true))
7244 : {
7245 44099 : found = true;
7246 44099 : break;
7247 : }
7248 : }
7249 20939 : if (found)
7250 : {
7251 44099 : if (dump_file && (dump_flags & TDF_DETAILS))
7252 : {
7253 3 : fprintf (dump_file, "Not replacing ");
7254 3 : print_gimple_expr (dump_file, stmt, 0);
7255 3 : fprintf (dump_file, " with ");
7256 3 : print_generic_expr (dump_file, sprime);
7257 3 : fprintf (dump_file, " which would add a loop"
7258 : " carried dependence to loop %d\n",
7259 : loop->num);
7260 : }
7261 : /* Don't keep sprime available. */
7262 44099 : sprime = NULL_TREE;
7263 : }
7264 : }
7265 : }
7266 :
7267 80714616 : if (sprime)
7268 : {
7269 : /* If we can propagate the value computed for LHS into
7270 : all uses don't bother doing anything with this stmt. */
7271 12677748 : if (may_propagate_copy (lhs, sprime))
7272 : {
7273 : /* Mark it for removal. */
7274 12675841 : to_remove.safe_push (stmt);
7275 :
7276 : /* ??? Don't count copy/constant propagations. */
7277 12675841 : if (gimple_assign_single_p (stmt)
7278 12675841 : && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
7279 4409417 : || gimple_assign_rhs1 (stmt) == sprime))
7280 13498986 : return;
7281 :
7282 7669270 : if (dump_file && (dump_flags & TDF_DETAILS))
7283 : {
7284 18738 : fprintf (dump_file, "Replaced ");
7285 18738 : print_gimple_expr (dump_file, stmt, 0);
7286 18738 : fprintf (dump_file, " with ");
7287 18738 : print_generic_expr (dump_file, sprime);
7288 18738 : fprintf (dump_file, " in all uses of ");
7289 18738 : print_gimple_stmt (dump_file, stmt, 0);
7290 : }
7291 :
7292 7669270 : eliminations++;
7293 7669270 : return;
7294 : }
7295 :
7296 : /* If this is an assignment from our leader (which
7297 : happens in the case the value-number is a constant)
7298 : then there is nothing to do. Likewise if we run into
7299 : inserted code that needed a conversion because of
7300 : our type-agnostic value-numbering of loads. */
7301 1907 : if ((gimple_assign_single_p (stmt)
7302 1 : || (is_gimple_assign (stmt)
7303 1 : && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
7304 0 : || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
7305 1908 : && sprime == gimple_assign_rhs1 (stmt))
7306 : return;
7307 :
7308 : /* Else replace its RHS. */
7309 719 : if (dump_file && (dump_flags & TDF_DETAILS))
7310 : {
7311 0 : fprintf (dump_file, "Replaced ");
7312 0 : print_gimple_expr (dump_file, stmt, 0);
7313 0 : fprintf (dump_file, " with ");
7314 0 : print_generic_expr (dump_file, sprime);
7315 0 : fprintf (dump_file, " in ");
7316 0 : print_gimple_stmt (dump_file, stmt, 0);
7317 : }
7318 719 : eliminations++;
7319 :
7320 719 : bool can_make_abnormal_goto = (is_gimple_call (stmt)
7321 719 : && stmt_can_make_abnormal_goto (stmt));
7322 719 : gimple *orig_stmt = stmt;
7323 719 : if (!useless_type_conversion_p (TREE_TYPE (lhs),
7324 719 : TREE_TYPE (sprime)))
7325 : {
7326 : /* We preserve conversions to but not from function or method
7327 : types. This asymmetry makes it necessary to re-instantiate
7328 : conversions here. */
7329 717 : if (POINTER_TYPE_P (TREE_TYPE (lhs))
7330 717 : && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
7331 717 : sprime = fold_convert (TREE_TYPE (lhs), sprime);
7332 : else
7333 0 : gcc_unreachable ();
7334 : }
7335 719 : tree vdef = gimple_vdef (stmt);
7336 719 : tree vuse = gimple_vuse (stmt);
7337 719 : propagate_tree_value_into_stmt (gsi, sprime);
7338 719 : stmt = gsi_stmt (*gsi);
7339 719 : update_stmt (stmt);
7340 : /* In case the VDEF on the original stmt was released, value-number
7341 : it to the VUSE. This is to make vuse_ssa_val able to skip
7342 : released virtual operands. */
7343 1438 : if (vdef != gimple_vdef (stmt))
7344 : {
7345 0 : gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
7346 0 : VN_INFO (vdef)->valnum = vuse;
7347 : }
7348 :
7349 : /* If we removed EH side-effects from the statement, clean
7350 : its EH information. */
7351 719 : if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
7352 : {
7353 0 : bitmap_set_bit (need_eh_cleanup,
7354 0 : gimple_bb (stmt)->index);
7355 0 : if (dump_file && (dump_flags & TDF_DETAILS))
7356 0 : fprintf (dump_file, " Removed EH side-effects.\n");
7357 : }
7358 :
7359 : /* Likewise for AB side-effects. */
7360 719 : if (can_make_abnormal_goto
7361 719 : && !stmt_can_make_abnormal_goto (stmt))
7362 : {
7363 0 : bitmap_set_bit (need_ab_cleanup,
7364 0 : gimple_bb (stmt)->index);
7365 0 : if (dump_file && (dump_flags & TDF_DETAILS))
7366 0 : fprintf (dump_file, " Removed AB side-effects.\n");
7367 : }
7368 :
7369 719 : return;
7370 : }
7371 : }
7372 :
7373 : /* If the statement is a scalar store, see if the expression
7374 : has the same value number as its rhs. If so, the store is
7375 : dead. */
7376 331848449 : if (gimple_assign_single_p (stmt)
7377 124654684 : && !gimple_has_volatile_ops (stmt)
7378 54408471 : && !is_gimple_reg (gimple_assign_lhs (stmt))
7379 28044273 : && (TREE_CODE (gimple_assign_lhs (stmt)) != VAR_DECL
7380 2699761 : || !DECL_HARD_REGISTER (gimple_assign_lhs (stmt)))
7381 359888713 : && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
7382 16064946 : || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
7383 : {
7384 25062370 : tree rhs = gimple_assign_rhs1 (stmt);
7385 25062370 : vn_reference_t vnresult;
7386 : /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
7387 : typed load of a byte known to be 0x11 as 1 so a store of
7388 : a boolean 1 is detected as redundant. Because of this we
7389 : have to make sure to lookup with a ref where its size
7390 : matches the precision. */
7391 25062370 : tree lookup_lhs = lhs;
7392 49867979 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7393 13107757 : && (TREE_CODE (lhs) != COMPONENT_REF
7394 7952300 : || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7395 37975687 : && !type_has_mode_precision_p (TREE_TYPE (lhs)))
7396 : {
7397 415583 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7398 426383 : && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
7399 : lookup_lhs = NULL_TREE;
7400 408408 : else if (TREE_CODE (lhs) == COMPONENT_REF
7401 408408 : || TREE_CODE (lhs) == MEM_REF)
7402 : {
7403 286331 : tree ltype = build_nonstandard_integer_type
7404 286331 : (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
7405 286331 : TYPE_UNSIGNED (TREE_TYPE (lhs)));
7406 286331 : if (TREE_CODE (lhs) == COMPONENT_REF)
7407 : {
7408 218743 : tree foff = component_ref_field_offset (lhs);
7409 218743 : tree f = TREE_OPERAND (lhs, 1);
7410 218743 : if (!poly_int_tree_p (foff))
7411 : lookup_lhs = NULL_TREE;
7412 : else
7413 437486 : lookup_lhs = build3 (BIT_FIELD_REF, ltype,
7414 218743 : TREE_OPERAND (lhs, 0),
7415 218743 : TYPE_SIZE (TREE_TYPE (lhs)),
7416 : bit_from_pos
7417 218743 : (foff, DECL_FIELD_BIT_OFFSET (f)));
7418 : }
7419 : else
7420 67588 : lookup_lhs = build2 (MEM_REF, ltype,
7421 67588 : TREE_OPERAND (lhs, 0),
7422 67588 : TREE_OPERAND (lhs, 1));
7423 : }
7424 : else
7425 : lookup_lhs = NULL_TREE;
7426 : }
7427 24933118 : tree val = NULL_TREE, tem;
7428 24933118 : if (lookup_lhs)
7429 49866236 : val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
7430 : VN_WALKREWRITE, &vnresult, false,
7431 : NULL, NULL_TREE, true);
7432 25062370 : if (TREE_CODE (rhs) == SSA_NAME)
7433 11975318 : rhs = VN_INFO (rhs)->valnum;
7434 25062370 : gassign *ass;
7435 25062370 : if (val
7436 25062370 : && (operand_equal_p (val, rhs, 0)
7437 : /* Due to the bitfield lookups above we can get bit
7438 : interpretations of the same RHS as values here. Those
7439 : are redundant as well. */
7440 3066580 : || (TREE_CODE (val) == SSA_NAME
7441 1871114 : && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
7442 1693165 : && (tem = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
7443 1693165 : && TREE_CODE (tem) == VIEW_CONVERT_EXPR
7444 3518 : && TREE_OPERAND (tem, 0) == rhs)
7445 3066578 : || (TREE_CODE (rhs) == SSA_NAME
7446 25508671 : && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs)))
7447 1457806 : && gimple_assign_rhs1 (ass) == val
7448 667668 : && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (ass))
7449 9 : && tree_nop_conversion_p (TREE_TYPE (rhs), TREE_TYPE (val)))))
7450 : {
7451 : /* We can only remove the later store if the former aliases
7452 : at least all accesses the later one does or if the store
7453 : was to readonly memory storing the same value. */
7454 237752 : ao_ref lhs_ref;
7455 237752 : ao_ref_init (&lhs_ref, lhs);
7456 237752 : alias_set_type set = ao_ref_alias_set (&lhs_ref);
7457 237752 : alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
7458 237752 : if (! vnresult
7459 237752 : || ((vnresult->set == set
7460 47343 : || alias_set_subset_of (set, vnresult->set))
7461 223652 : && (vnresult->base_set == base_set
7462 20912 : || alias_set_subset_of (base_set, vnresult->base_set))))
7463 : {
7464 221367 : if (dump_file && (dump_flags & TDF_DETAILS))
7465 : {
7466 17 : fprintf (dump_file, "Deleted redundant store ");
7467 17 : print_gimple_stmt (dump_file, stmt, 0);
7468 : }
7469 :
7470 : /* Queue stmt for removal. */
7471 221367 : to_remove.safe_push (stmt);
7472 221367 : return;
7473 : }
7474 : }
7475 : }
7476 :
7477 : /* If this is a control statement value numbering left edges
7478 : unexecuted on force the condition in a way consistent with
7479 : that. */
7480 331627082 : if (gcond *cond = dyn_cast <gcond *> (stmt))
7481 : {
7482 18828673 : if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
7483 18828673 : ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
7484 : {
7485 599871 : if (dump_file && (dump_flags & TDF_DETAILS))
7486 : {
7487 15 : fprintf (dump_file, "Removing unexecutable edge from ");
7488 15 : print_gimple_stmt (dump_file, stmt, 0);
7489 : }
7490 599871 : if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
7491 599871 : == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
7492 231734 : gimple_cond_make_true (cond);
7493 : else
7494 368137 : gimple_cond_make_false (cond);
7495 599871 : update_stmt (cond);
7496 599871 : el_todo |= TODO_cleanup_cfg;
7497 599871 : return;
7498 : }
7499 : }
7500 :
7501 331027211 : bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
7502 331027211 : bool was_noreturn = (is_gimple_call (stmt)
7503 331027211 : && gimple_call_noreturn_p (stmt));
7504 331027211 : tree vdef = gimple_vdef (stmt);
7505 331027211 : tree vuse = gimple_vuse (stmt);
7506 :
7507 : /* If we didn't replace the whole stmt (or propagate the result
7508 : into all uses), replace all uses on this stmt with their
7509 : leaders. */
7510 331027211 : bool modified = false;
7511 331027211 : use_operand_p use_p;
7512 331027211 : ssa_op_iter iter;
7513 493091585 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
7514 : {
7515 162064374 : tree use = USE_FROM_PTR (use_p);
7516 : /* ??? The call code above leaves stmt operands un-updated. */
7517 162064374 : if (TREE_CODE (use) != SSA_NAME)
7518 0 : continue;
7519 162064374 : tree sprime;
7520 162064374 : if (SSA_NAME_IS_DEFAULT_DEF (use))
7521 : /* ??? For default defs BB shouldn't matter, but we have to
7522 : solve the inconsistency between rpo eliminate and
7523 : dom eliminate avail valueization first. */
7524 26004863 : sprime = eliminate_avail (b, use);
7525 : else
7526 : /* Look for sth available at the definition block of the argument.
7527 : This avoids inconsistencies between availability there which
7528 : decides if the stmt can be removed and availability at the
7529 : use site. The SSA property ensures that things available
7530 : at the definition are also available at uses. */
7531 136059511 : sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
7532 162064374 : if (sprime && sprime != use
7533 12397536 : && may_propagate_copy (use, sprime, true)
7534 : /* We substitute into debug stmts to avoid excessive
7535 : debug temporaries created by removed stmts, but we need
7536 : to avoid doing so for inserted sprimes as we never want
7537 : to create debug temporaries for them. */
7538 174461193 : && (!inserted_exprs
7539 1165801 : || TREE_CODE (sprime) != SSA_NAME
7540 1151239 : || !is_gimple_debug (stmt)
7541 370718 : || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
7542 : {
7543 12053615 : propagate_value (use_p, sprime);
7544 12053615 : modified = true;
7545 : }
7546 : }
7547 :
7548 : /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
7549 : into which is a requirement for the IPA devirt machinery. */
7550 331027211 : gimple *old_stmt = stmt;
7551 331027211 : if (modified)
7552 : {
7553 : /* If a formerly non-invariant ADDR_EXPR is turned into an
7554 : invariant one it was on a separate stmt. */
7555 11187021 : if (gimple_assign_single_p (stmt)
7556 11187021 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
7557 233615 : recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
7558 11187021 : gimple_stmt_iterator prev = *gsi;
7559 11187021 : gsi_prev (&prev);
7560 11187021 : if (fold_stmt (gsi, follow_all_ssa_edges))
7561 : {
7562 : /* fold_stmt may have created new stmts inbetween
7563 : the previous stmt and the folded stmt. Mark
7564 : all defs created there as varying to not confuse
7565 : the SCCVN machinery as we're using that even during
7566 : elimination. */
7567 965244 : if (gsi_end_p (prev))
7568 218784 : prev = gsi_start_bb (b);
7569 : else
7570 855852 : gsi_next (&prev);
7571 965244 : if (gsi_stmt (prev) != gsi_stmt (*gsi))
7572 89572 : do
7573 : {
7574 55858 : tree def;
7575 55858 : ssa_op_iter dit;
7576 107028 : FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
7577 : dit, SSA_OP_ALL_DEFS)
7578 : /* As existing DEFs may move between stmts
7579 : only process new ones. */
7580 51170 : if (! has_VN_INFO (def))
7581 : {
7582 33612 : vn_ssa_aux_t vn_info = VN_INFO (def);
7583 33612 : vn_info->valnum = def;
7584 33612 : vn_info->visited = true;
7585 : }
7586 55858 : if (gsi_stmt (prev) == gsi_stmt (*gsi))
7587 : break;
7588 33714 : gsi_next (&prev);
7589 33714 : }
7590 : while (1);
7591 : }
7592 11187021 : stmt = gsi_stmt (*gsi);
7593 : /* In case we folded the stmt away schedule the NOP for removal. */
7594 11187021 : if (gimple_nop_p (stmt))
7595 819 : to_remove.safe_push (stmt);
7596 : }
7597 :
7598 : /* Visit indirect calls and turn them into direct calls if
7599 : possible using the devirtualization machinery. Do this before
7600 : checking for required EH/abnormal/noreturn cleanup as devird
7601 : may expose more of those. */
7602 331027211 : if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
7603 : {
7604 22099655 : tree fn = gimple_call_fn (call_stmt);
7605 22099655 : if (fn
7606 21364736 : && flag_devirtualize
7607 42724479 : && virtual_method_call_p (fn))
7608 : {
7609 179745 : tree otr_type = obj_type_ref_class (fn);
7610 179745 : unsigned HOST_WIDE_INT otr_tok
7611 179745 : = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
7612 179745 : tree instance;
7613 179745 : ipa_polymorphic_call_context context (current_function_decl,
7614 179745 : fn, stmt, &instance);
7615 179745 : context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
7616 : otr_type, stmt, NULL);
7617 179745 : bool final;
7618 179745 : vec <cgraph_node *> targets
7619 179745 : = possible_polymorphic_call_targets (obj_type_ref_class (fn),
7620 : otr_tok, context, &final);
7621 179745 : if (dump_file)
7622 22 : dump_possible_polymorphic_call_targets (dump_file,
7623 : obj_type_ref_class (fn),
7624 : otr_tok, context);
7625 180031 : if (final && targets.length () <= 1 && dbg_cnt (devirt))
7626 : {
7627 64 : tree fn;
7628 64 : if (targets.length () == 1)
7629 64 : fn = targets[0]->decl;
7630 : else
7631 0 : fn = builtin_decl_unreachable ();
7632 64 : if (dump_enabled_p ())
7633 : {
7634 9 : dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
7635 : "converting indirect call to "
7636 : "function %s\n",
7637 9 : lang_hooks.decl_printable_name (fn, 2));
7638 : }
7639 64 : gimple_call_set_fndecl (call_stmt, fn);
7640 : /* If changing the call to __builtin_unreachable
7641 : or similar noreturn function, adjust gimple_call_fntype
7642 : too. */
7643 64 : if (gimple_call_noreturn_p (call_stmt)
7644 0 : && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
7645 0 : && TYPE_ARG_TYPES (TREE_TYPE (fn))
7646 64 : && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
7647 0 : == void_type_node))
7648 0 : gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
7649 64 : maybe_remove_unused_call_args (cfun, call_stmt);
7650 64 : modified = true;
7651 : }
7652 : }
7653 : }
7654 :
7655 331027211 : if (modified)
7656 : {
7657 : /* When changing a call into a noreturn call, cfg cleanup
7658 : is needed to fix up the noreturn call. */
7659 11187042 : if (!was_noreturn
7660 11187042 : && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
7661 56 : to_fixup.safe_push (stmt);
7662 : /* When changing a condition or switch into one we know what
7663 : edge will be executed, schedule a cfg cleanup. */
7664 11187042 : if ((gimple_code (stmt) == GIMPLE_COND
7665 1490748 : && (gimple_cond_true_p (as_a <gcond *> (stmt))
7666 1485433 : || gimple_cond_false_p (as_a <gcond *> (stmt))))
7667 12670229 : || (gimple_code (stmt) == GIMPLE_SWITCH
7668 7461 : && TREE_CODE (gimple_switch_index
7669 : (as_a <gswitch *> (stmt))) == INTEGER_CST))
7670 9344 : el_todo |= TODO_cleanup_cfg;
7671 : /* If we removed EH side-effects from the statement, clean
7672 : its EH information. */
7673 11187042 : if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
7674 : {
7675 1637 : bitmap_set_bit (need_eh_cleanup,
7676 1637 : gimple_bb (stmt)->index);
7677 1637 : if (dump_file && (dump_flags & TDF_DETAILS))
7678 0 : fprintf (dump_file, " Removed EH side-effects.\n");
7679 : }
7680 : /* Likewise for AB side-effects. */
7681 11187042 : if (can_make_abnormal_goto
7682 11187042 : && !stmt_can_make_abnormal_goto (stmt))
7683 : {
7684 0 : bitmap_set_bit (need_ab_cleanup,
7685 0 : gimple_bb (stmt)->index);
7686 0 : if (dump_file && (dump_flags & TDF_DETAILS))
7687 0 : fprintf (dump_file, " Removed AB side-effects.\n");
7688 : }
7689 11187042 : update_stmt (stmt);
7690 : /* In case the VDEF on the original stmt was released, value-number
7691 : it to the VUSE. This is to make vuse_ssa_val able to skip
7692 : released virtual operands. */
7693 14269731 : if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
7694 1863 : VN_INFO (vdef)->valnum = vuse;
7695 : }
7696 :
7697 : /* Make new values available - for fully redundant LHS we
7698 : continue with the next stmt above and skip this.
7699 : But avoid picking up dead defs. */
7700 331027211 : tree def;
7701 400386251 : FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
7702 69359040 : if (! has_zero_uses (def)
7703 69359040 : || (inserted_exprs
7704 207998 : && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (def))))
7705 67982931 : eliminate_push_avail (b, def);
7706 : }
7707 :
7708 : /* Perform elimination for the basic-block B during the domwalk. */
7709 :
7710 : edge
7711 40758933 : eliminate_dom_walker::before_dom_children (basic_block b)
7712 : {
7713 : /* Mark new bb. */
7714 40758933 : avail_stack.safe_push (NULL_TREE);
7715 :
7716 : /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
7717 40758933 : if (!(b->flags & BB_EXECUTABLE))
7718 : return NULL;
7719 :
7720 36000503 : vn_context_bb = b;
7721 :
7722 47316755 : for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
7723 : {
7724 11316252 : gphi *phi = gsi.phi ();
7725 11316252 : tree res = PHI_RESULT (phi);
7726 :
7727 22632504 : if (virtual_operand_p (res))
7728 : {
7729 5211606 : gsi_next (&gsi);
7730 5211606 : continue;
7731 : }
7732 :
7733 6104646 : tree sprime = eliminate_avail (b, res);
7734 6104646 : if (sprime
7735 6104646 : && sprime != res)
7736 : {
7737 421528 : if (dump_file && (dump_flags & TDF_DETAILS))
7738 : {
7739 20 : fprintf (dump_file, "Replaced redundant PHI node defining ");
7740 20 : print_generic_expr (dump_file, res);
7741 20 : fprintf (dump_file, " with ");
7742 20 : print_generic_expr (dump_file, sprime);
7743 20 : fprintf (dump_file, "\n");
7744 : }
7745 :
7746 : /* If we inserted this PHI node ourself, it's not an elimination. */
7747 421528 : if (! inserted_exprs
7748 534890 : || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
7749 394558 : eliminations++;
7750 :
7751 : /* If we will propagate into all uses don't bother to do
7752 : anything. */
7753 421528 : if (may_propagate_copy (res, sprime))
7754 : {
7755 : /* Mark the PHI for removal. */
7756 421528 : to_remove.safe_push (phi);
7757 421528 : gsi_next (&gsi);
7758 421528 : continue;
7759 : }
7760 :
7761 0 : remove_phi_node (&gsi, false);
7762 :
7763 0 : if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
7764 0 : sprime = fold_convert (TREE_TYPE (res), sprime);
7765 0 : gimple *stmt = gimple_build_assign (res, sprime);
7766 0 : gimple_stmt_iterator gsi2 = gsi_after_labels (b);
7767 0 : gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
7768 0 : continue;
7769 0 : }
7770 :
7771 5683118 : eliminate_push_avail (b, res);
7772 5683118 : gsi_next (&gsi);
7773 : }
7774 :
7775 72001006 : for (gimple_stmt_iterator gsi = gsi_start_bb (b);
7776 275420757 : !gsi_end_p (gsi);
7777 239420254 : gsi_next (&gsi))
7778 239420254 : eliminate_stmt (b, &gsi);
7779 :
7780 : /* Replace destination PHI arguments. */
7781 36000503 : edge_iterator ei;
7782 36000503 : edge e;
7783 84989581 : FOR_EACH_EDGE (e, ei, b->succs)
7784 48989078 : if (e->flags & EDGE_EXECUTABLE)
7785 48454201 : for (gphi_iterator gsi = gsi_start_phis (e->dest);
7786 77565913 : !gsi_end_p (gsi);
7787 29111712 : gsi_next (&gsi))
7788 : {
7789 29111712 : gphi *phi = gsi.phi ();
7790 29111712 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7791 29111712 : tree arg = USE_FROM_PTR (use_p);
7792 48199458 : if (TREE_CODE (arg) != SSA_NAME
7793 29111712 : || virtual_operand_p (arg))
7794 19087746 : continue;
7795 10023966 : tree sprime = eliminate_avail (b, arg);
7796 20047932 : if (sprime && may_propagate_copy (arg, sprime,
7797 10023966 : !(e->flags & EDGE_ABNORMAL)))
7798 10011927 : propagate_value (use_p, sprime);
7799 : }
7800 :
7801 36000503 : vn_context_bb = NULL;
7802 :
7803 36000503 : return NULL;
7804 : }
7805 :
7806 : /* Make no longer available leaders no longer available. */
7807 :
7808 : void
7809 40758933 : eliminate_dom_walker::after_dom_children (basic_block)
7810 : {
7811 40758933 : tree entry;
7812 90168416 : while ((entry = avail_stack.pop ()) != NULL_TREE)
7813 : {
7814 49409483 : tree valnum = VN_INFO (entry)->valnum;
7815 49409483 : tree old = avail[SSA_NAME_VERSION (valnum)];
7816 49409483 : if (old == entry)
7817 49365342 : avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
7818 : else
7819 44141 : avail[SSA_NAME_VERSION (valnum)] = entry;
7820 : }
7821 40758933 : }
7822 :
7823 : /* Remove queued stmts and perform delayed cleanups. */
7824 :
7825 : unsigned
7826 6088360 : eliminate_dom_walker::eliminate_cleanup (bool region_p)
7827 : {
7828 6088360 : statistics_counter_event (cfun, "Eliminated", eliminations);
7829 6088360 : statistics_counter_event (cfun, "Insertions", insertions);
7830 :
7831 : /* We cannot remove stmts during BB walk, especially not release SSA
7832 : names there as this confuses the VN machinery. The stmts ending
7833 : up in to_remove are either stores or simple copies.
7834 : Remove stmts in reverse order to make debug stmt creation possible. */
7835 32849568 : while (!to_remove.is_empty ())
7836 : {
7837 14584432 : bool do_release_defs = true;
7838 14584432 : gimple *stmt = to_remove.pop ();
7839 :
7840 : /* When we are value-numbering a region we do not require exit PHIs to
7841 : be present so we have to make sure to deal with uses outside of the
7842 : region of stmts that we thought are eliminated.
7843 : ??? Note we may be confused by uses in dead regions we didn't run
7844 : elimination on. Rather than checking individual uses we accept
7845 : dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
7846 : contains such example). */
7847 14584432 : if (region_p)
7848 : {
7849 1699421 : if (gphi *phi = dyn_cast <gphi *> (stmt))
7850 : {
7851 1093987 : tree lhs = gimple_phi_result (phi);
7852 1093987 : if (!has_zero_uses (lhs))
7853 : {
7854 22769 : if (dump_file && (dump_flags & TDF_DETAILS))
7855 3 : fprintf (dump_file, "Keeping eliminated stmt live "
7856 : "as copy because of out-of-region uses\n");
7857 22769 : tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7858 22769 : gimple *copy = gimple_build_assign (lhs, sprime);
7859 22769 : gimple_stmt_iterator gsi
7860 22769 : = gsi_after_labels (gimple_bb (stmt));
7861 22769 : gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7862 22769 : do_release_defs = false;
7863 : }
7864 : }
7865 605434 : else if (tree lhs = gimple_get_lhs (stmt))
7866 605434 : if (TREE_CODE (lhs) == SSA_NAME
7867 605434 : && !has_zero_uses (lhs))
7868 : {
7869 1711 : if (dump_file && (dump_flags & TDF_DETAILS))
7870 0 : fprintf (dump_file, "Keeping eliminated stmt live "
7871 : "as copy because of out-of-region uses\n");
7872 1711 : tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7873 1711 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7874 1711 : if (is_gimple_assign (stmt))
7875 : {
7876 1711 : gimple_assign_set_rhs_from_tree (&gsi, sprime);
7877 1711 : stmt = gsi_stmt (gsi);
7878 1711 : update_stmt (stmt);
7879 1711 : if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
7880 0 : bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
7881 1711 : continue;
7882 : }
7883 : else
7884 : {
7885 0 : gimple *copy = gimple_build_assign (lhs, sprime);
7886 0 : gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7887 0 : do_release_defs = false;
7888 : }
7889 : }
7890 : }
7891 :
7892 14582721 : if (dump_file && (dump_flags & TDF_DETAILS))
7893 : {
7894 21773 : fprintf (dump_file, "Removing dead stmt ");
7895 21773 : print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
7896 : }
7897 :
7898 14582721 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7899 14582721 : if (gimple_code (stmt) == GIMPLE_PHI)
7900 1686405 : remove_phi_node (&gsi, do_release_defs);
7901 : else
7902 : {
7903 12896316 : basic_block bb = gimple_bb (stmt);
7904 12896316 : unlink_stmt_vdef (stmt);
7905 12896316 : if (gsi_remove (&gsi, true))
7906 26543 : bitmap_set_bit (need_eh_cleanup, bb->index);
7907 12896316 : if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
7908 2 : bitmap_set_bit (need_ab_cleanup, bb->index);
7909 12896316 : if (do_release_defs)
7910 12896316 : release_defs (stmt);
7911 : }
7912 :
7913 : /* Removing a stmt may expose a forwarder block. */
7914 14582721 : el_todo |= TODO_cleanup_cfg;
7915 : }
7916 :
7917 : /* Fixup stmts that became noreturn calls. This may require splitting
7918 : blocks and thus isn't possible during the dominator walk. Do this
7919 : in reverse order so we don't inadvertedly remove a stmt we want to
7920 : fixup by visiting a dominating now noreturn call first. */
7921 6088416 : while (!to_fixup.is_empty ())
7922 : {
7923 56 : gimple *stmt = to_fixup.pop ();
7924 :
7925 56 : if (dump_file && (dump_flags & TDF_DETAILS))
7926 : {
7927 0 : fprintf (dump_file, "Fixing up noreturn call ");
7928 0 : print_gimple_stmt (dump_file, stmt, 0);
7929 : }
7930 :
7931 56 : if (fixup_noreturn_call (stmt))
7932 56 : el_todo |= TODO_cleanup_cfg;
7933 : }
7934 :
7935 6088360 : bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
7936 6088360 : bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
7937 :
7938 6088360 : if (do_eh_cleanup)
7939 10628 : gimple_purge_all_dead_eh_edges (need_eh_cleanup);
7940 :
7941 6088360 : if (do_ab_cleanup)
7942 2 : gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
7943 :
7944 6088360 : if (do_eh_cleanup || do_ab_cleanup)
7945 10630 : el_todo |= TODO_cleanup_cfg;
7946 :
7947 6088360 : return el_todo;
7948 : }
7949 :
7950 : /* Eliminate fully redundant computations. */
7951 :
7952 : unsigned
7953 4228838 : eliminate_with_rpo_vn (bitmap inserted_exprs)
7954 : {
7955 4228838 : eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
7956 :
7957 4228838 : eliminate_dom_walker *saved_rpo_avail = rpo_avail;
7958 4228838 : rpo_avail = &walker;
7959 4228838 : walker.walk (cfun->cfg->x_entry_block_ptr);
7960 4228838 : rpo_avail = saved_rpo_avail;
7961 :
7962 4228838 : return walker.eliminate_cleanup ();
7963 4228838 : }
7964 :
7965 : static unsigned
7966 : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
7967 : bool iterate, bool eliminate, bool skip_entry_phis,
7968 : vn_lookup_kind kind);
7969 :
7970 : void
7971 961783 : run_rpo_vn (vn_lookup_kind kind)
7972 : {
7973 961783 : do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
7974 :
7975 : /* ??? Prune requirement of these. */
7976 961783 : constant_to_value_id = new hash_table<vn_constant_hasher> (23);
7977 :
7978 : /* Initialize the value ids and prune out remaining VN_TOPs
7979 : from dead code. */
7980 961783 : tree name;
7981 961783 : unsigned i;
7982 46621607 : FOR_EACH_SSA_NAME (i, name, cfun)
7983 : {
7984 33167560 : vn_ssa_aux_t info = VN_INFO (name);
7985 33167560 : if (!info->visited
7986 33091451 : || info->valnum == VN_TOP)
7987 76109 : info->valnum = name;
7988 33167560 : if (info->valnum == name)
7989 32042883 : info->value_id = get_next_value_id ();
7990 1124677 : else if (is_gimple_min_invariant (info->valnum))
7991 38622 : info->value_id = get_or_alloc_constant_value_id (info->valnum);
7992 : }
7993 :
7994 : /* Propagate. */
7995 46621607 : FOR_EACH_SSA_NAME (i, name, cfun)
7996 : {
7997 33167560 : vn_ssa_aux_t info = VN_INFO (name);
7998 33167560 : if (TREE_CODE (info->valnum) == SSA_NAME
7999 33128938 : && info->valnum != name
8000 34253615 : && info->value_id != VN_INFO (info->valnum)->value_id)
8001 1086055 : info->value_id = VN_INFO (info->valnum)->value_id;
8002 : }
8003 :
8004 961783 : set_hashtable_value_ids ();
8005 :
8006 961783 : if (dump_file && (dump_flags & TDF_DETAILS))
8007 : {
8008 14 : fprintf (dump_file, "Value numbers:\n");
8009 406 : FOR_EACH_SSA_NAME (i, name, cfun)
8010 : {
8011 307 : if (VN_INFO (name)->visited
8012 307 : && SSA_VAL (name) != name)
8013 : {
8014 33 : print_generic_expr (dump_file, name);
8015 33 : fprintf (dump_file, " = ");
8016 33 : print_generic_expr (dump_file, SSA_VAL (name));
8017 33 : fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
8018 : }
8019 : }
8020 : }
8021 961783 : }
8022 :
8023 : /* Free VN associated data structures. */
8024 :
8025 : void
8026 6107814 : free_rpo_vn (void)
8027 : {
8028 6107814 : free_vn_table (valid_info);
8029 6107814 : XDELETE (valid_info);
8030 6107814 : obstack_free (&vn_tables_obstack, NULL);
8031 6107814 : obstack_free (&vn_tables_insert_obstack, NULL);
8032 :
8033 6107814 : vn_ssa_aux_iterator_type it;
8034 6107814 : vn_ssa_aux_t info;
8035 345265558 : FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
8036 169578872 : if (info->needs_insertion)
8037 4067068 : release_ssa_name (info->name);
8038 6107814 : obstack_free (&vn_ssa_aux_obstack, NULL);
8039 6107814 : delete vn_ssa_aux_hash;
8040 :
8041 6107814 : delete constant_to_value_id;
8042 6107814 : constant_to_value_id = NULL;
8043 6107814 : }
8044 :
8045 : /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
8046 :
8047 : static tree
8048 22780754 : vn_lookup_simplify_result (gimple_match_op *res_op)
8049 : {
8050 22780754 : if (!res_op->code.is_tree_code ())
8051 : return NULL_TREE;
8052 22777594 : tree *ops = res_op->ops;
8053 22777594 : unsigned int length = res_op->num_ops;
8054 22777594 : if (res_op->code == CONSTRUCTOR
8055 : /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
8056 : and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
8057 22777594 : && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
8058 : {
8059 1002 : length = CONSTRUCTOR_NELTS (res_op->ops[0]);
8060 1002 : ops = XALLOCAVEC (tree, length);
8061 4574 : for (unsigned i = 0; i < length; ++i)
8062 3572 : ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
8063 : }
8064 22777594 : vn_nary_op_t vnresult = NULL;
8065 22777594 : tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
8066 : res_op->type, ops, &vnresult);
8067 : /* If this is used from expression simplification make sure to
8068 : return an available expression. */
8069 22777594 : if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
8070 2216028 : res = rpo_avail->eliminate_avail (vn_context_bb, res);
8071 : return res;
8072 : }
8073 :
8074 : /* Return a leader for OPs value that is valid at BB. */
8075 :
8076 : tree
8077 264259713 : rpo_elim::eliminate_avail (basic_block bb, tree op)
8078 : {
8079 264259713 : bool visited;
8080 264259713 : tree valnum = SSA_VAL (op, &visited);
8081 : /* If we didn't visit OP then it must be defined outside of the
8082 : region we process and also dominate it. So it is available. */
8083 264259713 : if (!visited)
8084 : return op;
8085 262124168 : if (TREE_CODE (valnum) == SSA_NAME)
8086 : {
8087 248213100 : if (SSA_NAME_IS_DEFAULT_DEF (valnum))
8088 : return valnum;
8089 241601139 : vn_ssa_aux_t valnum_info = VN_INFO (valnum);
8090 241601139 : vn_avail *av = valnum_info->avail;
8091 241601139 : if (!av)
8092 : {
8093 : /* See above. But when there's availability info prefer
8094 : what we recorded there for example to preserve LC SSA. */
8095 82606569 : if (!valnum_info->visited)
8096 : return valnum;
8097 : return NULL_TREE;
8098 : }
8099 158994570 : if (av->location == bb->index)
8100 : /* On tramp3d 90% of the cases are here. */
8101 105190208 : return ssa_name (av->leader);
8102 67697965 : do
8103 : {
8104 67697965 : basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
8105 : /* ??? During elimination we have to use availability at the
8106 : definition site of a use we try to replace. This
8107 : is required to not run into inconsistencies because
8108 : of dominated_by_p_w_unex behavior and removing a definition
8109 : while not replacing all uses.
8110 : ??? We could try to consistently walk dominators
8111 : ignoring non-executable regions. The nearest common
8112 : dominator of bb and abb is where we can stop walking. We
8113 : may also be able to "pre-compute" (bits of) the next immediate
8114 : (non-)dominator during the RPO walk when marking edges as
8115 : executable. */
8116 67697965 : if (dominated_by_p_w_unex (bb, abb, true))
8117 : {
8118 49978216 : tree leader = ssa_name (av->leader);
8119 : /* Prevent eliminations that break loop-closed SSA. */
8120 49978216 : if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
8121 3104728 : && ! SSA_NAME_IS_DEFAULT_DEF (leader)
8122 53082944 : && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
8123 3104728 : (leader))->loop_father,
8124 : bb))
8125 : return NULL_TREE;
8126 49899415 : if (dump_file && (dump_flags & TDF_DETAILS))
8127 : {
8128 3540 : print_generic_expr (dump_file, leader);
8129 3540 : fprintf (dump_file, " is available for ");
8130 3540 : print_generic_expr (dump_file, valnum);
8131 3540 : fprintf (dump_file, "\n");
8132 : }
8133 : /* On tramp3d 99% of the _remaining_ cases succeed at
8134 : the first enty. */
8135 49899415 : return leader;
8136 : }
8137 : /* ??? Can we somehow skip to the immediate dominator
8138 : RPO index (bb_to_rpo)? Again, maybe not worth, on
8139 : tramp3d the worst number of elements in the vector is 9. */
8140 17719749 : av = av->next;
8141 : }
8142 17719749 : while (av);
8143 : /* While we prefer avail we have to fallback to using the value
8144 : directly if defined outside of the region when none of the
8145 : available defs suit. */
8146 3826146 : if (!valnum_info->visited)
8147 : return valnum;
8148 : }
8149 13911068 : else if (valnum != VN_TOP)
8150 : /* valnum is is_gimple_min_invariant. */
8151 : return valnum;
8152 : return NULL_TREE;
8153 : }
8154 :
8155 : /* Make LEADER a leader for its value at BB. */
8156 :
8157 : void
8158 95835417 : rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
8159 : {
8160 95835417 : tree valnum = VN_INFO (leader)->valnum;
8161 95835417 : if (valnum == VN_TOP
8162 95835417 : || is_gimple_min_invariant (valnum))
8163 0 : return;
8164 95835417 : if (dump_file && (dump_flags & TDF_DETAILS))
8165 : {
8166 323450 : fprintf (dump_file, "Making available beyond BB%d ", bb->index);
8167 323450 : print_generic_expr (dump_file, leader);
8168 323450 : fprintf (dump_file, " for value ");
8169 323450 : print_generic_expr (dump_file, valnum);
8170 323450 : fprintf (dump_file, "\n");
8171 : }
8172 95835417 : vn_ssa_aux_t value = VN_INFO (valnum);
8173 95835417 : vn_avail *av;
8174 95835417 : if (m_avail_freelist)
8175 : {
8176 18667328 : av = m_avail_freelist;
8177 18667328 : m_avail_freelist = m_avail_freelist->next;
8178 : }
8179 : else
8180 77168089 : av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
8181 95835417 : av->location = bb->index;
8182 95835417 : av->leader = SSA_NAME_VERSION (leader);
8183 95835417 : av->next = value->avail;
8184 95835417 : av->next_undo = last_pushed_avail;
8185 95835417 : last_pushed_avail = value;
8186 95835417 : value->avail = av;
8187 : }
8188 :
8189 : /* Valueization hook for RPO VN plus required state. */
8190 :
8191 : tree
8192 2034320036 : rpo_vn_valueize (tree name)
8193 : {
8194 2034320036 : if (TREE_CODE (name) == SSA_NAME)
8195 : {
8196 1988801636 : vn_ssa_aux_t val = VN_INFO (name);
8197 1988801636 : if (val)
8198 : {
8199 1988801636 : tree tem = val->valnum;
8200 1988801636 : if (tem != VN_TOP && tem != name)
8201 : {
8202 107056427 : if (TREE_CODE (tem) != SSA_NAME)
8203 : return tem;
8204 : /* For all values we only valueize to an available leader
8205 : which means we can use SSA name info without restriction. */
8206 90237653 : tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
8207 90237653 : if (tem)
8208 : return tem;
8209 : }
8210 : }
8211 : }
8212 : return name;
8213 : }
8214 :
8215 : /* Insert on PRED_E predicates derived from CODE OPS being true besides the
8216 : inverted condition. */
8217 :
8218 : static void
8219 27193543 : insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
8220 : {
8221 27193543 : switch (code)
8222 : {
8223 1371985 : case LT_EXPR:
8224 : /* a < b -> a {!,<}= b */
8225 1371985 : vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
8226 : ops, boolean_true_node, 0, pred_e);
8227 1371985 : vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
8228 : ops, boolean_true_node, 0, pred_e);
8229 : /* a < b -> ! a {>,=} b */
8230 1371985 : vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
8231 : ops, boolean_false_node, 0, pred_e);
8232 1371985 : vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
8233 : ops, boolean_false_node, 0, pred_e);
8234 1371985 : break;
8235 3413883 : case GT_EXPR:
8236 : /* a > b -> a {!,>}= b */
8237 3413883 : vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
8238 : ops, boolean_true_node, 0, pred_e);
8239 3413883 : vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
8240 : ops, boolean_true_node, 0, pred_e);
8241 : /* a > b -> ! a {<,=} b */
8242 3413883 : vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
8243 : ops, boolean_false_node, 0, pred_e);
8244 3413883 : vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
8245 : ops, boolean_false_node, 0, pred_e);
8246 3413883 : break;
8247 9292416 : case EQ_EXPR:
8248 : /* a == b -> ! a {<,>} b */
8249 9292416 : vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
8250 : ops, boolean_false_node, 0, pred_e);
8251 9292416 : vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
8252 : ops, boolean_false_node, 0, pred_e);
8253 9292416 : break;
8254 : case LE_EXPR:
8255 : case GE_EXPR:
8256 : case NE_EXPR:
8257 : /* Nothing besides inverted condition. */
8258 : break;
8259 27193543 : default:;
8260 : }
8261 27193543 : }
8262 :
8263 : /* Insert on the TRUE_E true and FALSE_E false predicates
8264 : derived from LHS CODE RHS. */
8265 :
8266 : static void
8267 23194886 : insert_predicates_for_cond (tree_code code, tree lhs, tree rhs,
8268 : edge true_e, edge false_e)
8269 : {
8270 : /* If both edges are null, then there is nothing to be done. */
8271 23194886 : if (!true_e && !false_e)
8272 1304212 : return;
8273 :
8274 : /* Canonicalize the comparison if needed, putting
8275 : the constant in the rhs. */
8276 21894169 : if (tree_swap_operands_p (lhs, rhs))
8277 : {
8278 16523 : std::swap (lhs, rhs);
8279 16523 : code = swap_tree_comparison (code);
8280 : }
8281 :
8282 : /* If the lhs is not a ssa name, don't record anything. */
8283 21894169 : if (TREE_CODE (lhs) != SSA_NAME)
8284 : return;
8285 :
8286 21890674 : tree_code icode = invert_tree_comparison (code, HONOR_NANS (lhs));
8287 21890674 : tree ops[2];
8288 21890674 : ops[0] = lhs;
8289 21890674 : ops[1] = rhs;
8290 21890674 : if (true_e)
8291 17864362 : vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
8292 : boolean_true_node, 0, true_e);
8293 21890674 : if (false_e)
8294 16850566 : vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
8295 : boolean_false_node, 0, false_e);
8296 21890674 : if (icode != ERROR_MARK)
8297 : {
8298 21646587 : if (true_e)
8299 17711827 : vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
8300 : boolean_false_node, 0, true_e);
8301 21646587 : if (false_e)
8302 16653841 : vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
8303 : boolean_true_node, 0, false_e);
8304 : }
8305 : /* Relax for non-integers, inverted condition handled
8306 : above. */
8307 21890674 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
8308 : {
8309 17140473 : if (true_e)
8310 14057780 : insert_related_predicates_on_edge (code, ops, true_e);
8311 17140473 : if (false_e)
8312 13135763 : insert_related_predicates_on_edge (icode, ops, false_e);
8313 : }
8314 21890674 : if (integer_zerop (rhs)
8315 21890674 : && (code == NE_EXPR || code == EQ_EXPR))
8316 : {
8317 9120242 : gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
8318 : /* (A CMP B) != 0 is the same as (A CMP B).
8319 : (A CMP B) == 0 is just (A CMP B) with the edges swapped. */
8320 9120242 : if (is_gimple_assign (def_stmt)
8321 9120242 : && TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_comparison)
8322 : {
8323 430304 : tree_code nc = gimple_assign_rhs_code (def_stmt);
8324 430304 : tree nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
8325 430304 : tree nrhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
8326 430304 : edge nt = true_e;
8327 430304 : edge nf = false_e;
8328 430304 : if (code == EQ_EXPR)
8329 304375 : std::swap (nt, nf);
8330 430304 : if (lhs != nlhs)
8331 430304 : insert_predicates_for_cond (nc, nlhs, nrhs, nt, nf);
8332 : }
8333 : /* (a | b) == 0 ->
8334 : on true edge assert: a == 0 & b == 0. */
8335 : /* (a | b) != 0 ->
8336 : on false edge assert: a == 0 & b == 0. */
8337 9120242 : if (is_gimple_assign (def_stmt)
8338 9120242 : && gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR)
8339 : {
8340 255050 : edge e = code == EQ_EXPR ? true_e : false_e;
8341 255050 : tree nlhs;
8342 :
8343 255050 : nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
8344 : /* A valueization of the `a` might return the old lhs
8345 : which is already handled above. */
8346 255050 : if (nlhs != lhs)
8347 255050 : insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
8348 :
8349 : /* A valueization of the `b` might return the old lhs
8350 : which is already handled above. */
8351 255050 : nlhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
8352 255050 : if (nlhs != lhs)
8353 255050 : insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
8354 : }
8355 : }
8356 : }
8357 :
8358 : /* Main stmt worker for RPO VN, process BB. */
8359 :
8360 : static unsigned
8361 60820351 : process_bb (rpo_elim &avail, basic_block bb,
8362 : bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
8363 : bool do_region, bitmap exit_bbs, bool skip_phis)
8364 : {
8365 60820351 : unsigned todo = 0;
8366 60820351 : edge_iterator ei;
8367 60820351 : edge e;
8368 :
8369 60820351 : vn_context_bb = bb;
8370 :
8371 : /* If we are in loop-closed SSA preserve this state. This is
8372 : relevant when called on regions from outside of FRE/PRE. */
8373 60820351 : bool lc_phi_nodes = false;
8374 60820351 : if (!skip_phis
8375 60820351 : && loops_state_satisfies_p (LOOP_CLOSED_SSA))
8376 3688370 : FOR_EACH_EDGE (e, ei, bb->preds)
8377 2228250 : if (e->src->loop_father != e->dest->loop_father
8378 2228250 : && flow_loop_nested_p (e->dest->loop_father,
8379 : e->src->loop_father))
8380 : {
8381 : lc_phi_nodes = true;
8382 : break;
8383 : }
8384 :
8385 : /* When we visit a loop header substitute into loop info. */
8386 60820351 : if (!iterate && eliminate && bb->loop_father->header == bb)
8387 : {
8388 : /* Keep fields in sync with substitute_in_loop_info. */
8389 941903 : if (bb->loop_father->nb_iterations)
8390 155626 : bb->loop_father->nb_iterations
8391 155626 : = simplify_replace_tree (bb->loop_father->nb_iterations,
8392 : NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
8393 : }
8394 :
8395 : /* Value-number all defs in the basic-block. */
8396 60820351 : if (!skip_phis)
8397 87282196 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8398 26490939 : gsi_next (&gsi))
8399 : {
8400 26490939 : gphi *phi = gsi.phi ();
8401 26490939 : tree res = PHI_RESULT (phi);
8402 26490939 : vn_ssa_aux_t res_info = VN_INFO (res);
8403 26490939 : if (!bb_visited)
8404 : {
8405 18660372 : gcc_assert (!res_info->visited);
8406 18660372 : res_info->valnum = VN_TOP;
8407 18660372 : res_info->visited = true;
8408 : }
8409 :
8410 : /* When not iterating force backedge values to varying. */
8411 26490939 : visit_stmt (phi, !iterate_phis);
8412 52981878 : if (virtual_operand_p (res))
8413 10496276 : continue;
8414 :
8415 : /* Eliminate */
8416 : /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
8417 : how we handle backedges and availability.
8418 : And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
8419 15994663 : tree val = res_info->valnum;
8420 15994663 : if (res != val && !iterate && eliminate)
8421 : {
8422 1387020 : if (tree leader = avail.eliminate_avail (bb, res))
8423 : {
8424 1265480 : if (leader != res
8425 : /* Preserve loop-closed SSA form. */
8426 1265480 : && (! lc_phi_nodes
8427 6671 : || is_gimple_min_invariant (leader)))
8428 : {
8429 1264877 : if (dump_file && (dump_flags & TDF_DETAILS))
8430 : {
8431 203 : fprintf (dump_file, "Replaced redundant PHI node "
8432 : "defining ");
8433 203 : print_generic_expr (dump_file, res);
8434 203 : fprintf (dump_file, " with ");
8435 203 : print_generic_expr (dump_file, leader);
8436 203 : fprintf (dump_file, "\n");
8437 : }
8438 1264877 : avail.eliminations++;
8439 :
8440 1264877 : if (may_propagate_copy (res, leader))
8441 : {
8442 : /* Schedule for removal. */
8443 1264877 : avail.to_remove.safe_push (phi);
8444 1264877 : continue;
8445 : }
8446 : /* ??? Else generate a copy stmt. */
8447 : }
8448 : }
8449 : }
8450 : /* Only make defs available that not already are. But make
8451 : sure loop-closed SSA PHI node defs are picked up for
8452 : downstream uses. */
8453 14729786 : if (lc_phi_nodes
8454 14729786 : || res == val
8455 14729786 : || ! avail.eliminate_avail (bb, res))
8456 11257447 : avail.eliminate_push_avail (bb, res);
8457 : }
8458 :
8459 : /* For empty BBs mark outgoing edges executable. For non-empty BBs
8460 : we do this when processing the last stmt as we have to do this
8461 : before elimination which otherwise forces GIMPLE_CONDs to
8462 : if (1 != 0) style when seeing non-executable edges. */
8463 121640702 : if (gsi_end_p (gsi_start_bb (bb)))
8464 : {
8465 13947858 : FOR_EACH_EDGE (e, ei, bb->succs)
8466 : {
8467 6973929 : if (!(e->flags & EDGE_EXECUTABLE))
8468 : {
8469 4736479 : if (dump_file && (dump_flags & TDF_DETAILS))
8470 6169 : fprintf (dump_file,
8471 : "marking outgoing edge %d -> %d executable\n",
8472 6169 : e->src->index, e->dest->index);
8473 4736479 : e->flags |= EDGE_EXECUTABLE;
8474 4736479 : e->dest->flags |= BB_EXECUTABLE;
8475 : }
8476 2237450 : else if (!(e->dest->flags & BB_EXECUTABLE))
8477 : {
8478 0 : if (dump_file && (dump_flags & TDF_DETAILS))
8479 0 : fprintf (dump_file,
8480 : "marking destination block %d reachable\n",
8481 : e->dest->index);
8482 0 : e->dest->flags |= BB_EXECUTABLE;
8483 : }
8484 : }
8485 : }
8486 121640702 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8487 480112958 : !gsi_end_p (gsi); gsi_next (&gsi))
8488 : {
8489 419292607 : ssa_op_iter i;
8490 419292607 : tree op;
8491 419292607 : if (!bb_visited)
8492 : {
8493 479691116 : FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
8494 : {
8495 135484707 : vn_ssa_aux_t op_info = VN_INFO (op);
8496 135484707 : gcc_assert (!op_info->visited);
8497 135484707 : op_info->valnum = VN_TOP;
8498 135484707 : op_info->visited = true;
8499 : }
8500 :
8501 : /* We somehow have to deal with uses that are not defined
8502 : in the processed region. Forcing unvisited uses to
8503 : varying here doesn't play well with def-use following during
8504 : expression simplification, so we deal with this by checking
8505 : the visited flag in SSA_VAL. */
8506 : }
8507 :
8508 419292607 : visit_stmt (gsi_stmt (gsi));
8509 :
8510 419292607 : gimple *last = gsi_stmt (gsi);
8511 419292607 : e = NULL;
8512 419292607 : switch (gimple_code (last))
8513 : {
8514 108009 : case GIMPLE_SWITCH:
8515 108009 : e = find_taken_edge (bb, vn_valueize (gimple_switch_index
8516 108009 : (as_a <gswitch *> (last))));
8517 108009 : break;
8518 24451502 : case GIMPLE_COND:
8519 24451502 : {
8520 24451502 : tree lhs = vn_valueize (gimple_cond_lhs (last));
8521 24451502 : tree rhs = vn_valueize (gimple_cond_rhs (last));
8522 24451502 : tree_code cmpcode = gimple_cond_code (last);
8523 : /* Canonicalize the comparison if needed, putting
8524 : the constant in the rhs. */
8525 24451502 : if (tree_swap_operands_p (lhs, rhs))
8526 : {
8527 829091 : std::swap (lhs, rhs);
8528 829091 : cmpcode = swap_tree_comparison (cmpcode);
8529 : }
8530 24451502 : tree val = gimple_simplify (cmpcode,
8531 : boolean_type_node, lhs, rhs,
8532 : NULL, vn_valueize);
8533 : /* If the condition didn't simplfy see if we have recorded
8534 : an expression from sofar taken edges. */
8535 24451502 : if (! val || TREE_CODE (val) != INTEGER_CST)
8536 : {
8537 22609052 : vn_nary_op_t vnresult;
8538 22609052 : tree ops[2];
8539 22609052 : ops[0] = lhs;
8540 22609052 : ops[1] = rhs;
8541 22609052 : val = vn_nary_op_lookup_pieces (2, cmpcode,
8542 : boolean_type_node, ops,
8543 : &vnresult);
8544 : /* Got back a ssa name, then try looking up `val != 0`
8545 : as it might have been recorded that way. */
8546 22609052 : if (val && TREE_CODE (val) == SSA_NAME)
8547 : {
8548 153682 : ops[0] = val;
8549 153682 : ops[1] = build_zero_cst (TREE_TYPE (val));
8550 153682 : val = vn_nary_op_lookup_pieces (2, NE_EXPR,
8551 : boolean_type_node, ops,
8552 : &vnresult);
8553 : }
8554 : /* Did we get a predicated value? */
8555 22609036 : if (! val && vnresult && vnresult->predicated_values)
8556 : {
8557 1370587 : val = vn_nary_op_get_predicated_value (vnresult, bb);
8558 1370587 : if (val && dump_file && (dump_flags & TDF_DETAILS))
8559 : {
8560 2 : fprintf (dump_file, "Got predicated value ");
8561 2 : print_generic_expr (dump_file, val, TDF_NONE);
8562 2 : fprintf (dump_file, " for ");
8563 2 : print_gimple_stmt (dump_file, last, TDF_SLIM);
8564 : }
8565 : }
8566 : }
8567 22609052 : if (val)
8568 2197020 : e = find_taken_edge (bb, val);
8569 24451502 : if (! e)
8570 : {
8571 : /* If we didn't manage to compute the taken edge then
8572 : push predicated expressions for the condition itself
8573 : and related conditions to the hashtables. This allows
8574 : simplification of redundant conditions which is
8575 : important as early cleanup. */
8576 22254482 : edge true_e, false_e;
8577 22254482 : extract_true_false_edges_from_block (bb, &true_e, &false_e);
8578 538850 : if ((do_region && bitmap_bit_p (exit_bbs, true_e->dest->index))
8579 22483311 : || !can_track_predicate_on_edge (true_e))
8580 4900576 : true_e = NULL;
8581 538850 : if ((do_region && bitmap_bit_p (exit_bbs, false_e->dest->index))
8582 22457439 : || !can_track_predicate_on_edge (false_e))
8583 5796078 : false_e = NULL;
8584 22254482 : insert_predicates_for_cond (cmpcode, lhs, rhs, true_e, false_e);
8585 : }
8586 : break;
8587 : }
8588 1394 : case GIMPLE_GOTO:
8589 1394 : e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
8590 1394 : break;
8591 : default:
8592 : e = NULL;
8593 : }
8594 419292607 : if (e)
8595 : {
8596 2200610 : todo = TODO_cleanup_cfg;
8597 2200610 : if (!(e->flags & EDGE_EXECUTABLE))
8598 : {
8599 1738648 : if (dump_file && (dump_flags & TDF_DETAILS))
8600 35 : fprintf (dump_file,
8601 : "marking known outgoing %sedge %d -> %d executable\n",
8602 35 : e->flags & EDGE_DFS_BACK ? "back-" : "",
8603 35 : e->src->index, e->dest->index);
8604 1738648 : e->flags |= EDGE_EXECUTABLE;
8605 1738648 : e->dest->flags |= BB_EXECUTABLE;
8606 : }
8607 461962 : else if (!(e->dest->flags & BB_EXECUTABLE))
8608 : {
8609 27235 : if (dump_file && (dump_flags & TDF_DETAILS))
8610 1 : fprintf (dump_file,
8611 : "marking destination block %d reachable\n",
8612 : e->dest->index);
8613 27235 : e->dest->flags |= BB_EXECUTABLE;
8614 : }
8615 : }
8616 834183994 : else if (gsi_one_before_end_p (gsi))
8617 : {
8618 126738934 : FOR_EACH_EDGE (e, ei, bb->succs)
8619 : {
8620 75093122 : if (!(e->flags & EDGE_EXECUTABLE))
8621 : {
8622 55031860 : if (dump_file && (dump_flags & TDF_DETAILS))
8623 18490 : fprintf (dump_file,
8624 : "marking outgoing edge %d -> %d executable\n",
8625 18490 : e->src->index, e->dest->index);
8626 55031860 : e->flags |= EDGE_EXECUTABLE;
8627 55031860 : e->dest->flags |= BB_EXECUTABLE;
8628 : }
8629 20061262 : else if (!(e->dest->flags & BB_EXECUTABLE))
8630 : {
8631 2501703 : if (dump_file && (dump_flags & TDF_DETAILS))
8632 5998 : fprintf (dump_file,
8633 : "marking destination block %d reachable\n",
8634 : e->dest->index);
8635 2501703 : e->dest->flags |= BB_EXECUTABLE;
8636 : }
8637 : }
8638 : }
8639 :
8640 : /* Eliminate. That also pushes to avail. */
8641 419292607 : if (eliminate && ! iterate)
8642 105105943 : avail.eliminate_stmt (bb, &gsi);
8643 : else
8644 : /* If not eliminating, make all not already available defs
8645 : available. But avoid picking up dead defs. */
8646 393274147 : FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
8647 79087483 : if (! has_zero_uses (op)
8648 79087483 : && ! avail.eliminate_avail (bb, op))
8649 60298752 : avail.eliminate_push_avail (bb, op);
8650 : }
8651 :
8652 : /* Eliminate in destination PHI arguments. Always substitute in dest
8653 : PHIs, even for non-executable edges. This handles region
8654 : exits PHIs. */
8655 60820351 : if (!iterate && eliminate)
8656 32652391 : FOR_EACH_EDGE (e, ei, bb->succs)
8657 19443257 : for (gphi_iterator gsi = gsi_start_phis (e->dest);
8658 37650206 : !gsi_end_p (gsi); gsi_next (&gsi))
8659 : {
8660 18206949 : gphi *phi = gsi.phi ();
8661 18206949 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
8662 18206949 : tree arg = USE_FROM_PTR (use_p);
8663 27684970 : if (TREE_CODE (arg) != SSA_NAME
8664 18206949 : || virtual_operand_p (arg))
8665 9478021 : continue;
8666 8728928 : tree sprime;
8667 8728928 : if (SSA_NAME_IS_DEFAULT_DEF (arg))
8668 : {
8669 116401 : sprime = SSA_VAL (arg);
8670 116401 : gcc_assert (TREE_CODE (sprime) != SSA_NAME
8671 : || SSA_NAME_IS_DEFAULT_DEF (sprime));
8672 : }
8673 : else
8674 : /* Look for sth available at the definition block of the argument.
8675 : This avoids inconsistencies between availability there which
8676 : decides if the stmt can be removed and availability at the
8677 : use site. The SSA property ensures that things available
8678 : at the definition are also available at uses. */
8679 8612527 : sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
8680 : arg);
8681 8728928 : if (sprime
8682 8728928 : && sprime != arg
8683 8728928 : && may_propagate_copy (arg, sprime, !(e->flags & EDGE_ABNORMAL)))
8684 1497375 : propagate_value (use_p, sprime);
8685 : }
8686 :
8687 60820351 : vn_context_bb = NULL;
8688 60820351 : return todo;
8689 : }
8690 :
8691 : /* Unwind state per basic-block. */
8692 :
8693 : struct unwind_state
8694 : {
8695 : /* Times this block has been visited. */
8696 : unsigned visited;
8697 : /* Whether to handle this as iteration point or whether to treat
8698 : incoming backedge PHI values as varying. */
8699 : bool iterate;
8700 : /* Maximum RPO index this block is reachable from. */
8701 : int max_rpo;
8702 : /* Unwind state. */
8703 : void *ob_top;
8704 : vn_reference_t ref_top;
8705 : vn_phi_t phi_top;
8706 : vn_nary_op_t nary_top;
8707 : vn_avail *avail_top;
8708 : };
8709 :
8710 : /* Unwind the RPO VN state for iteration. */
8711 :
8712 : static void
8713 1890662 : do_unwind (unwind_state *to, rpo_elim &avail)
8714 : {
8715 1890662 : gcc_assert (to->iterate);
8716 34683939 : for (; last_inserted_nary != to->nary_top;
8717 32793277 : last_inserted_nary = last_inserted_nary->next)
8718 : {
8719 32793277 : vn_nary_op_t *slot;
8720 32793277 : slot = valid_info->nary->find_slot_with_hash
8721 32793277 : (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
8722 : /* Predication causes the need to restore previous state. */
8723 32793277 : if ((*slot)->unwind_to)
8724 6639961 : *slot = (*slot)->unwind_to;
8725 : else
8726 26153316 : valid_info->nary->clear_slot (slot);
8727 : }
8728 7445510 : for (; last_inserted_phi != to->phi_top;
8729 5554848 : last_inserted_phi = last_inserted_phi->next)
8730 : {
8731 5554848 : vn_phi_t *slot;
8732 5554848 : slot = valid_info->phis->find_slot_with_hash
8733 5554848 : (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
8734 5554848 : valid_info->phis->clear_slot (slot);
8735 : }
8736 15082521 : for (; last_inserted_ref != to->ref_top;
8737 13191859 : last_inserted_ref = last_inserted_ref->next)
8738 : {
8739 13191859 : vn_reference_t *slot;
8740 13191859 : slot = valid_info->references->find_slot_with_hash
8741 13191859 : (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
8742 13191859 : (*slot)->operands.release ();
8743 13191859 : valid_info->references->clear_slot (slot);
8744 : }
8745 1890662 : obstack_free (&vn_tables_obstack, to->ob_top);
8746 :
8747 : /* Prune [rpo_idx, ] from avail. */
8748 20557990 : for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
8749 : {
8750 18667328 : vn_ssa_aux_t val = last_pushed_avail;
8751 18667328 : vn_avail *av = val->avail;
8752 18667328 : val->avail = av->next;
8753 18667328 : last_pushed_avail = av->next_undo;
8754 18667328 : av->next = avail.m_avail_freelist;
8755 18667328 : avail.m_avail_freelist = av;
8756 : }
8757 1890662 : }
8758 :
8759 : /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
8760 : If ITERATE is true then treat backedges optimistically as not
8761 : executed and iterate. If ELIMINATE is true then perform
8762 : elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
8763 : is true then force PHI nodes in ENTRY->dest to VARYING. */
8764 :
8765 : static unsigned
8766 6107814 : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
8767 : bool iterate, bool eliminate, bool skip_entry_phis,
8768 : vn_lookup_kind kind)
8769 : {
8770 6107814 : unsigned todo = 0;
8771 6107814 : default_vn_walk_kind = kind;
8772 :
8773 : /* We currently do not support region-based iteration when
8774 : elimination is requested. */
8775 6107814 : gcc_assert (!entry || !iterate || !eliminate);
8776 : /* When iterating we need loop info up-to-date. */
8777 6107814 : gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
8778 :
8779 6107814 : bool do_region = entry != NULL;
8780 6107814 : if (!do_region)
8781 : {
8782 5425900 : entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
8783 5425900 : exit_bbs = BITMAP_ALLOC (NULL);
8784 5425900 : bitmap_set_bit (exit_bbs, EXIT_BLOCK);
8785 : }
8786 :
8787 : /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
8788 : re-mark those that are contained in the region. */
8789 6107814 : edge_iterator ei;
8790 6107814 : edge e;
8791 12275401 : FOR_EACH_EDGE (e, ei, entry->dest->preds)
8792 6167587 : e->flags &= ~EDGE_DFS_BACK;
8793 :
8794 6107814 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
8795 6107814 : auto_vec<std::pair<int, int> > toplevel_scc_extents;
8796 6107814 : int n = rev_post_order_and_mark_dfs_back_seme
8797 7986790 : (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
8798 :
8799 6107814 : if (!do_region)
8800 5425900 : BITMAP_FREE (exit_bbs);
8801 :
8802 : /* If there are any non-DFS_BACK edges into entry->dest skip
8803 : processing PHI nodes for that block. This supports
8804 : value-numbering loop bodies w/o the actual loop. */
8805 12275400 : FOR_EACH_EDGE (e, ei, entry->dest->preds)
8806 6167587 : if (e != entry
8807 59773 : && !(e->flags & EDGE_DFS_BACK))
8808 : break;
8809 6107814 : if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
8810 0 : fprintf (dump_file, "Region does not contain all edges into "
8811 : "the entry block, skipping its PHIs.\n");
8812 6107814 : skip_entry_phis |= e != NULL;
8813 :
8814 6107814 : int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
8815 56007709 : for (int i = 0; i < n; ++i)
8816 49899895 : bb_to_rpo[rpo[i]] = i;
8817 6107814 : vn_bb_to_rpo = bb_to_rpo;
8818 :
8819 6107814 : unwind_state *rpo_state = XNEWVEC (unwind_state, n);
8820 :
8821 6107814 : rpo_elim avail (entry->dest);
8822 6107814 : rpo_avail = &avail;
8823 :
8824 : /* Verify we have no extra entries into the region. */
8825 6107814 : if (flag_checking && do_region)
8826 : {
8827 681908 : auto_bb_flag bb_in_region (fn);
8828 2072285 : for (int i = 0; i < n; ++i)
8829 : {
8830 1390377 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8831 1390377 : bb->flags |= bb_in_region;
8832 : }
8833 : /* We can't merge the first two loops because we cannot rely
8834 : on EDGE_DFS_BACK for edges not within the region. But if
8835 : we decide to always have the bb_in_region flag we can
8836 : do the checking during the RPO walk itself (but then it's
8837 : also easy to handle MEME conservatively). */
8838 2072285 : for (int i = 0; i < n; ++i)
8839 : {
8840 1390377 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8841 1390377 : edge e;
8842 1390377 : edge_iterator ei;
8843 3041709 : FOR_EACH_EDGE (e, ei, bb->preds)
8844 1651332 : gcc_assert (e == entry
8845 : || (skip_entry_phis && bb == entry->dest)
8846 : || (e->src->flags & bb_in_region));
8847 : }
8848 2072285 : for (int i = 0; i < n; ++i)
8849 : {
8850 1390377 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8851 1390377 : bb->flags &= ~bb_in_region;
8852 : }
8853 681908 : }
8854 :
8855 : /* Create the VN state. For the initial size of the various hashtables
8856 : use a heuristic based on region size and number of SSA names. */
8857 6107814 : unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
8858 6107814 : / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
8859 6107814 : VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
8860 6107814 : next_value_id = 1;
8861 6107814 : next_constant_value_id = -1;
8862 :
8863 6107814 : vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
8864 6107814 : gcc_obstack_init (&vn_ssa_aux_obstack);
8865 :
8866 6107814 : gcc_obstack_init (&vn_tables_obstack);
8867 6107814 : gcc_obstack_init (&vn_tables_insert_obstack);
8868 6107814 : valid_info = XCNEW (struct vn_tables_s);
8869 6107814 : allocate_vn_table (valid_info, region_size);
8870 6107814 : last_inserted_ref = NULL;
8871 6107814 : last_inserted_phi = NULL;
8872 6107814 : last_inserted_nary = NULL;
8873 6107814 : last_pushed_avail = NULL;
8874 :
8875 6107814 : vn_valueize = rpo_vn_valueize;
8876 :
8877 : /* Initialize the unwind state and edge/BB executable state. */
8878 6107814 : unsigned curr_scc = 0;
8879 56007709 : for (int i = 0; i < n; ++i)
8880 : {
8881 49899895 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8882 49899895 : rpo_state[i].visited = 0;
8883 49899895 : rpo_state[i].max_rpo = i;
8884 58294536 : if (!iterate && curr_scc < toplevel_scc_extents.length ())
8885 : {
8886 7014605 : if (i >= toplevel_scc_extents[curr_scc].first
8887 7014605 : && i <= toplevel_scc_extents[curr_scc].second)
8888 3880074 : rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
8889 7014605 : if (i == toplevel_scc_extents[curr_scc].second)
8890 728211 : curr_scc++;
8891 : }
8892 49899895 : bb->flags &= ~BB_EXECUTABLE;
8893 49899895 : bool has_backedges = false;
8894 49899895 : edge e;
8895 49899895 : edge_iterator ei;
8896 118405768 : FOR_EACH_EDGE (e, ei, bb->preds)
8897 : {
8898 68505873 : if (e->flags & EDGE_DFS_BACK)
8899 2829668 : has_backedges = true;
8900 68505873 : e->flags &= ~EDGE_EXECUTABLE;
8901 68505873 : if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
8902 68505873 : continue;
8903 : }
8904 49899895 : rpo_state[i].iterate = iterate && has_backedges;
8905 : }
8906 6107814 : entry->flags |= EDGE_EXECUTABLE;
8907 6107814 : entry->dest->flags |= BB_EXECUTABLE;
8908 :
8909 : /* As heuristic to improve compile-time we handle only the N innermost
8910 : loops and the outermost one optimistically. */
8911 6107814 : if (iterate)
8912 : {
8913 4228838 : unsigned max_depth = param_rpo_vn_max_loop_depth;
8914 14220581 : for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
8915 1536460 : if (loop_depth (loop) > max_depth)
8916 2092 : for (unsigned i = 2;
8917 8970 : i < loop_depth (loop) - max_depth; ++i)
8918 : {
8919 2092 : basic_block header = superloop_at_depth (loop, i)->header;
8920 2092 : bool non_latch_backedge = false;
8921 2092 : edge e;
8922 2092 : edge_iterator ei;
8923 6307 : FOR_EACH_EDGE (e, ei, header->preds)
8924 4215 : if (e->flags & EDGE_DFS_BACK)
8925 : {
8926 : /* There can be a non-latch backedge into the header
8927 : which is part of an outer irreducible region. We
8928 : cannot avoid iterating this block then. */
8929 2123 : if (!dominated_by_p (CDI_DOMINATORS,
8930 2123 : e->src, e->dest))
8931 : {
8932 12 : if (dump_file && (dump_flags & TDF_DETAILS))
8933 0 : fprintf (dump_file, "non-latch backedge %d -> %d "
8934 : "forces iteration of loop %d\n",
8935 0 : e->src->index, e->dest->index, loop->num);
8936 : non_latch_backedge = true;
8937 : }
8938 : else
8939 2111 : e->flags |= EDGE_EXECUTABLE;
8940 : }
8941 2092 : rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
8942 4228838 : }
8943 : }
8944 :
8945 6107814 : uint64_t nblk = 0;
8946 6107814 : int idx = 0;
8947 4228838 : if (iterate)
8948 : /* Go and process all blocks, iterating as necessary. */
8949 48443447 : do
8950 : {
8951 48443447 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8952 :
8953 : /* If the block has incoming backedges remember unwind state. This
8954 : is required even for non-executable blocks since in irreducible
8955 : regions we might reach them via the backedge and re-start iterating
8956 : from there.
8957 : Note we can individually mark blocks with incoming backedges to
8958 : not iterate where we then handle PHIs conservatively. We do that
8959 : heuristically to reduce compile-time for degenerate cases. */
8960 48443447 : if (rpo_state[idx].iterate)
8961 : {
8962 4361260 : rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
8963 4361260 : rpo_state[idx].ref_top = last_inserted_ref;
8964 4361260 : rpo_state[idx].phi_top = last_inserted_phi;
8965 4361260 : rpo_state[idx].nary_top = last_inserted_nary;
8966 4361260 : rpo_state[idx].avail_top
8967 4361260 : = last_pushed_avail ? last_pushed_avail->avail : NULL;
8968 : }
8969 :
8970 48443447 : if (!(bb->flags & BB_EXECUTABLE))
8971 : {
8972 938129 : if (dump_file && (dump_flags & TDF_DETAILS))
8973 2 : fprintf (dump_file, "Block %d: BB%d found not executable\n",
8974 : idx, bb->index);
8975 938129 : idx++;
8976 2828791 : continue;
8977 : }
8978 :
8979 47505318 : if (dump_file && (dump_flags & TDF_DETAILS))
8980 334 : fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8981 47505318 : nblk++;
8982 95010636 : todo |= process_bb (avail, bb,
8983 47505318 : rpo_state[idx].visited != 0,
8984 : rpo_state[idx].iterate,
8985 : iterate, eliminate, do_region, exit_bbs, false);
8986 47505318 : rpo_state[idx].visited++;
8987 :
8988 : /* Verify if changed values flow over executable outgoing backedges
8989 : and those change destination PHI values (that's the thing we
8990 : can easily verify). Reduce over all such edges to the farthest
8991 : away PHI. */
8992 47505318 : int iterate_to = -1;
8993 47505318 : edge_iterator ei;
8994 47505318 : edge e;
8995 114414950 : FOR_EACH_EDGE (e, ei, bb->succs)
8996 66909632 : if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
8997 : == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
8998 4370755 : && rpo_state[bb_to_rpo[e->dest->index]].iterate)
8999 : {
9000 4368021 : int destidx = bb_to_rpo[e->dest->index];
9001 4368021 : if (!rpo_state[destidx].visited)
9002 : {
9003 134 : if (dump_file && (dump_flags & TDF_DETAILS))
9004 0 : fprintf (dump_file, "Unvisited destination %d\n",
9005 : e->dest->index);
9006 134 : if (iterate_to == -1 || destidx < iterate_to)
9007 134 : iterate_to = destidx;
9008 134 : continue;
9009 : }
9010 4367887 : if (dump_file && (dump_flags & TDF_DETAILS))
9011 53 : fprintf (dump_file, "Looking for changed values of backedge"
9012 : " %d->%d destination PHIs\n",
9013 53 : e->src->index, e->dest->index);
9014 4367887 : vn_context_bb = e->dest;
9015 4367887 : gphi_iterator gsi;
9016 4367887 : for (gsi = gsi_start_phis (e->dest);
9017 9995015 : !gsi_end_p (gsi); gsi_next (&gsi))
9018 : {
9019 7517948 : bool inserted = false;
9020 : /* While we'd ideally just iterate on value changes
9021 : we CSE PHIs and do that even across basic-block
9022 : boundaries. So even hashtable state changes can
9023 : be important (which is roughly equivalent to
9024 : PHI argument value changes). To not excessively
9025 : iterate because of that we track whether a PHI
9026 : was CSEd to with GF_PLF_1. */
9027 7517948 : bool phival_changed;
9028 7517948 : if ((phival_changed = visit_phi (gsi.phi (),
9029 : &inserted, false))
9030 8891757 : || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
9031 : {
9032 1890820 : if (!phival_changed
9033 1890820 : && dump_file && (dump_flags & TDF_DETAILS))
9034 0 : fprintf (dump_file, "PHI was CSEd and hashtable "
9035 : "state (changed)\n");
9036 1890820 : if (iterate_to == -1 || destidx < iterate_to)
9037 1890735 : iterate_to = destidx;
9038 1890820 : break;
9039 : }
9040 : }
9041 4367887 : vn_context_bb = NULL;
9042 : }
9043 47505318 : if (iterate_to != -1)
9044 : {
9045 1890662 : do_unwind (&rpo_state[iterate_to], avail);
9046 1890662 : idx = iterate_to;
9047 1890662 : if (dump_file && (dump_flags & TDF_DETAILS))
9048 20 : fprintf (dump_file, "Iterating to %d BB%d\n",
9049 20 : iterate_to, rpo[iterate_to]);
9050 1890662 : continue;
9051 : }
9052 :
9053 45614656 : idx++;
9054 : }
9055 48443447 : while (idx < n);
9056 :
9057 : else /* !iterate */
9058 : {
9059 : /* Process all blocks greedily with a worklist that enforces RPO
9060 : processing of reachable blocks. */
9061 1878976 : auto_bitmap worklist;
9062 1878976 : bitmap_set_bit (worklist, 0);
9063 17072985 : while (!bitmap_empty_p (worklist))
9064 : {
9065 13315033 : int idx = bitmap_clear_first_set_bit (worklist);
9066 13315033 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
9067 13315033 : gcc_assert ((bb->flags & BB_EXECUTABLE)
9068 : && !rpo_state[idx].visited);
9069 :
9070 13315033 : if (dump_file && (dump_flags & TDF_DETAILS))
9071 35072 : fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
9072 :
9073 : /* When we run into predecessor edges where we cannot trust its
9074 : executable state mark them executable so PHI processing will
9075 : be conservative.
9076 : ??? Do we need to force arguments flowing over that edge
9077 : to be varying or will they even always be? */
9078 13315033 : edge_iterator ei;
9079 13315033 : edge e;
9080 32257641 : FOR_EACH_EDGE (e, ei, bb->preds)
9081 18942608 : if (!(e->flags & EDGE_EXECUTABLE)
9082 1017978 : && (bb == entry->dest
9083 961412 : || (!rpo_state[bb_to_rpo[e->src->index]].visited
9084 925057 : && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
9085 : >= (int)idx))))
9086 : {
9087 958187 : if (dump_file && (dump_flags & TDF_DETAILS))
9088 11258 : fprintf (dump_file, "Cannot trust state of predecessor "
9089 : "edge %d -> %d, marking executable\n",
9090 11258 : e->src->index, e->dest->index);
9091 958187 : e->flags |= EDGE_EXECUTABLE;
9092 : }
9093 :
9094 13315033 : nblk++;
9095 13315033 : todo |= process_bb (avail, bb, false, false, false, eliminate,
9096 : do_region, exit_bbs,
9097 13315033 : skip_entry_phis && bb == entry->dest);
9098 13315033 : rpo_state[idx].visited++;
9099 :
9100 32887613 : FOR_EACH_EDGE (e, ei, bb->succs)
9101 19572580 : if ((e->flags & EDGE_EXECUTABLE)
9102 19495697 : && e->dest->index != EXIT_BLOCK
9103 18328144 : && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
9104 36571027 : && !rpo_state[bb_to_rpo[e->dest->index]].visited)
9105 16045654 : bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
9106 : }
9107 1878976 : }
9108 :
9109 : /* If statistics or dump file active. */
9110 6107814 : int nex = 0;
9111 6107814 : unsigned max_visited = 1;
9112 56007709 : for (int i = 0; i < n; ++i)
9113 : {
9114 49899895 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
9115 49899895 : if (bb->flags & BB_EXECUTABLE)
9116 49303982 : nex++;
9117 49899895 : statistics_histogram_event (cfun, "RPO block visited times",
9118 49899895 : rpo_state[i].visited);
9119 49899895 : if (rpo_state[i].visited > max_visited)
9120 : max_visited = rpo_state[i].visited;
9121 : }
9122 6107814 : unsigned nvalues = 0, navail = 0;
9123 167057973 : for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
9124 328008132 : i != vn_ssa_aux_hash->end (); ++i)
9125 : {
9126 160950159 : nvalues++;
9127 160950159 : vn_avail *av = (*i)->avail;
9128 238118248 : while (av)
9129 : {
9130 77168089 : navail++;
9131 77168089 : av = av->next;
9132 : }
9133 : }
9134 6107814 : statistics_counter_event (cfun, "RPO blocks", n);
9135 6107814 : statistics_counter_event (cfun, "RPO blocks visited", nblk);
9136 6107814 : statistics_counter_event (cfun, "RPO blocks executable", nex);
9137 6107814 : statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
9138 6107814 : statistics_histogram_event (cfun, "RPO num values", nvalues);
9139 6107814 : statistics_histogram_event (cfun, "RPO num avail", navail);
9140 6107814 : statistics_histogram_event (cfun, "RPO num lattice",
9141 6107814 : vn_ssa_aux_hash->elements ());
9142 6107814 : if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
9143 : {
9144 11163 : fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
9145 : " blocks in total discovering %d executable blocks iterating "
9146 : "%d.%d times, a block was visited max. %u times\n",
9147 : n, nblk, nex,
9148 11163 : (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
9149 : max_visited);
9150 11163 : fprintf (dump_file, "RPO tracked %d values available at %d locations "
9151 : "and %" PRIu64 " lattice elements\n",
9152 11163 : nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
9153 : }
9154 :
9155 6107814 : if (eliminate)
9156 : {
9157 : /* When !iterate we already performed elimination during the RPO
9158 : walk. */
9159 5126577 : if (iterate)
9160 : {
9161 : /* Elimination for region-based VN needs to be done within the
9162 : RPO walk. */
9163 3267055 : gcc_assert (! do_region);
9164 : /* Note we can't use avail.walk here because that gets confused
9165 : by the existing availability and it will be less efficient
9166 : as well. */
9167 3267055 : todo |= eliminate_with_rpo_vn (NULL);
9168 : }
9169 : else
9170 1859522 : todo |= avail.eliminate_cleanup (do_region);
9171 : }
9172 :
9173 6107814 : vn_valueize = NULL;
9174 6107814 : rpo_avail = NULL;
9175 6107814 : vn_bb_to_rpo = NULL;
9176 :
9177 6107814 : XDELETEVEC (bb_to_rpo);
9178 6107814 : XDELETEVEC (rpo);
9179 6107814 : XDELETEVEC (rpo_state);
9180 :
9181 6107814 : return todo;
9182 6107814 : }
9183 :
9184 : /* Region-based entry for RPO VN. Performs value-numbering and elimination
9185 : on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
9186 : the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
9187 : are not considered.
9188 : If ITERATE is true then treat backedges optimistically as not
9189 : executed and iterate. If ELIMINATE is true then perform
9190 : elimination, otherwise leave that to the caller.
9191 : If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
9192 : KIND specifies the amount of work done for handling memory operations. */
9193 :
9194 : unsigned
9195 701368 : do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
9196 : bool iterate, bool eliminate, bool skip_entry_phis,
9197 : vn_lookup_kind kind)
9198 : {
9199 701368 : auto_timevar tv (TV_TREE_RPO_VN);
9200 701368 : unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
9201 : skip_entry_phis, kind);
9202 701368 : free_rpo_vn ();
9203 1402736 : return todo;
9204 701368 : }
9205 :
9206 :
9207 : namespace {
9208 :
9209 : const pass_data pass_data_fre =
9210 : {
9211 : GIMPLE_PASS, /* type */
9212 : "fre", /* name */
9213 : OPTGROUP_NONE, /* optinfo_flags */
9214 : TV_TREE_FRE, /* tv_id */
9215 : ( PROP_cfg | PROP_ssa ), /* properties_required */
9216 : 0, /* properties_provided */
9217 : 0, /* properties_destroyed */
9218 : 0, /* todo_flags_start */
9219 : 0, /* todo_flags_finish */
9220 : };
9221 :
9222 : class pass_fre : public gimple_opt_pass
9223 : {
9224 : public:
9225 1440235 : pass_fre (gcc::context *ctxt)
9226 2880470 : : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
9227 : {}
9228 :
9229 : /* opt_pass methods: */
9230 1152188 : opt_pass * clone () final override { return new pass_fre (m_ctxt); }
9231 1440235 : void set_pass_param (unsigned int n, bool param) final override
9232 : {
9233 1440235 : gcc_assert (n == 0);
9234 1440235 : may_iterate = param;
9235 1440235 : }
9236 4523849 : bool gate (function *) final override
9237 : {
9238 4523849 : return flag_tree_fre != 0 && (may_iterate || optimize > 1);
9239 : }
9240 : unsigned int execute (function *) final override;
9241 :
9242 : private:
9243 : bool may_iterate;
9244 : }; // class pass_fre
9245 :
9246 : unsigned int
9247 4444663 : pass_fre::execute (function *fun)
9248 : {
9249 4444663 : unsigned todo = 0;
9250 :
9251 : /* At -O[1g] use the cheap non-iterating mode. */
9252 4444663 : bool iterate_p = may_iterate && (optimize > 1);
9253 4444663 : calculate_dominance_info (CDI_DOMINATORS);
9254 4444663 : if (iterate_p)
9255 3267055 : loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
9256 :
9257 4444663 : todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
9258 4444663 : free_rpo_vn ();
9259 :
9260 4444663 : if (iterate_p)
9261 3267055 : loop_optimizer_finalize ();
9262 :
9263 4444663 : if (scev_initialized_p ())
9264 31858 : scev_reset_htab ();
9265 :
9266 : /* For late FRE after IVOPTs and unrolling, see if we can
9267 : remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
9268 4444663 : if (!may_iterate)
9269 993414 : todo |= TODO_update_address_taken;
9270 :
9271 4444663 : return todo;
9272 : }
9273 :
9274 : } // anon namespace
9275 :
9276 : gimple_opt_pass *
9277 288047 : make_pass_fre (gcc::context *ctxt)
9278 : {
9279 288047 : return new pass_fre (ctxt);
9280 : }
9281 :
9282 : #undef BB_EXECUTABLE
|