Line data Source code
1 : /* Common subexpression elimination library for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "df.h"
28 : #include "memmodel.h"
29 : #include "tm_p.h"
30 : #include "regs.h"
31 : #include "emit-rtl.h"
32 : #include "dumpfile.h"
33 : #include "cselib.h"
34 : #include "function-abi.h"
35 : #include "alias.h"
36 : #include "predict.h"
37 :
38 : /* A list of cselib_val structures. */
39 : struct elt_list
40 : {
41 : struct elt_list *next;
42 : cselib_val *elt;
43 : };
44 :
45 : static bool cselib_record_memory;
46 : static bool cselib_preserve_constants;
47 : static bool cselib_any_perm_equivs;
48 : static inline void promote_debug_loc (struct elt_loc_list *l);
49 : static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
50 : static void new_elt_loc_list (cselib_val *, rtx);
51 : static void unchain_one_value (cselib_val *);
52 : static void unchain_one_elt_list (struct elt_list **);
53 : static void unchain_one_elt_loc_list (struct elt_loc_list **);
54 : static void remove_useless_values (void);
55 : static hashval_t cselib_hash_rtx (rtx, int, machine_mode);
56 : static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
57 : static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
58 : static cselib_val *cselib_lookup_mem (rtx, int);
59 : static void cselib_invalidate_regno (unsigned int, machine_mode);
60 : static void cselib_invalidate_mem (rtx);
61 : static void cselib_record_set (rtx, cselib_val *, cselib_val *);
62 : static void cselib_record_sets (rtx_insn *);
63 : static rtx autoinc_split (rtx, rtx *, machine_mode);
64 :
65 : #define PRESERVED_VALUE_P(RTX) \
66 : (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
67 :
68 : #define SP_BASED_VALUE_P(RTX) \
69 : (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
70 :
71 : #define SP_DERIVED_VALUE_P(RTX) \
72 : (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
73 :
74 : struct expand_value_data
75 : {
76 : bitmap regs_active;
77 : cselib_expand_callback callback;
78 : void *callback_arg;
79 : bool dummy;
80 : };
81 :
82 : static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
83 :
84 : /* This is a global so we don't have to pass this through every function.
85 : It is used in new_elt_loc_list to set SETTING_INSN. */
86 : static rtx_insn *cselib_current_insn;
87 :
88 : /* There are three ways in which cselib can look up an rtx:
89 : - for a REG, the reg_values table (which is indexed by regno) is used
90 : - for a MEM, we recursively look up its address and then follow the
91 : addr_list of that value
92 : - for everything else, we compute a hash value and go through the hash
93 : table. Since different rtx's can still have the same hash value,
94 : this involves walking the table entries for a given value and comparing
95 : the locations of the entries with the rtx we are looking up. */
96 :
97 : struct cselib_hasher : nofree_ptr_hash <cselib_val>
98 : {
99 : struct key {
100 : /* The rtx value and its mode (needed separately for constant
101 : integers). */
102 : machine_mode mode;
103 : rtx x;
104 : /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
105 : machine_mode memmode;
106 : };
107 : typedef key *compare_type;
108 : static inline hashval_t hash (const cselib_val *);
109 : static inline bool equal (const cselib_val *, const key *);
110 : };
111 :
112 : /* The hash function for our hash table. The value is always computed with
113 : cselib_hash_rtx when adding an element; this function just extracts the
114 : hash value from a cselib_val structure. */
115 :
116 : inline hashval_t
117 100560210 : cselib_hasher::hash (const cselib_val *v)
118 : {
119 100560210 : return v->hash;
120 : }
121 :
122 : /* The equality test for our hash table. The first argument V is a table
123 : element (i.e. a cselib_val), while the second arg X is an rtx. We know
124 : that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
125 : CONST of an appropriate mode. */
126 :
127 : inline bool
128 571545032 : cselib_hasher::equal (const cselib_val *v, const key *x_arg)
129 : {
130 571545032 : struct elt_loc_list *l;
131 571545032 : rtx x = x_arg->x;
132 571545032 : machine_mode mode = x_arg->mode;
133 571545032 : machine_mode memmode = x_arg->memmode;
134 :
135 571545032 : if (mode != GET_MODE (v->val_rtx))
136 : return false;
137 :
138 460559759 : if (GET_CODE (x) == VALUE)
139 12759759 : return x == v->val_rtx;
140 :
141 457864830 : if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
142 : {
143 17581041 : rtx xoff = NULL;
144 17581041 : if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
145 7900045 : return true;
146 : }
147 :
148 : /* We don't guarantee that distinct rtx's have different hash values,
149 : so we need to do a comparison. */
150 754586111 : for (l = v->locs; l; l = l->next)
151 490757234 : if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
152 38336549 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
153 : {
154 11045531 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
155 : /* If l is so far a debug only loc, without debug stmts it
156 : would never be compared to x at all, so temporarily pretend
157 : current instruction is that DEBUG_INSN so that we don't
158 : promote other debug locs even for unsuccessful comparison. */
159 11045531 : cselib_current_insn = l->setting_insn;
160 11045531 : bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0);
161 11045531 : cselib_current_insn = save_cselib_current_insn;
162 11045531 : if (match)
163 : {
164 876240 : promote_debug_loc (l);
165 876240 : return true;
166 : }
167 : }
168 479711703 : else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
169 : return true;
170 :
171 : return false;
172 : }
173 :
174 : /* A table that enables us to look up elts by their value. */
175 : static hash_table<cselib_hasher> *cselib_hash_table;
176 :
177 : /* A table to hold preserved values. */
178 : static hash_table<cselib_hasher> *cselib_preserved_hash_table;
179 :
180 : /* The unique id that the next create value will take. */
181 : static unsigned int next_uid;
182 :
183 : /* The number of registers we had when the varrays were last resized. */
184 : static unsigned int cselib_nregs;
185 :
186 : /* Count values without known locations, or with only locations that
187 : wouldn't have been known except for debug insns. Whenever this
188 : grows too big, we remove these useless values from the table.
189 :
190 : Counting values with only debug values is a bit tricky. We don't
191 : want to increment n_useless_values when we create a value for a
192 : debug insn, for this would get n_useless_values out of sync, but we
193 : want increment it if all locs in the list that were ever referenced
194 : in nondebug insns are removed from the list.
195 :
196 : In the general case, once we do that, we'd have to stop accepting
197 : nondebug expressions in the loc list, to avoid having two values
198 : equivalent that, without debug insns, would have been made into
199 : separate values. However, because debug insns never introduce
200 : equivalences themselves (no assignments), the only means for
201 : growing loc lists is through nondebug assignments. If the locs
202 : also happen to be referenced in debug insns, it will work just fine.
203 :
204 : A consequence of this is that there's at most one debug-only loc in
205 : each loc list. If we keep it in the first entry, testing whether
206 : we have a debug-only loc list takes O(1).
207 :
208 : Furthermore, since any additional entry in a loc list containing a
209 : debug loc would have to come from an assignment (nondebug) that
210 : references both the initial debug loc and the newly-equivalent loc,
211 : the initial debug loc would be promoted to a nondebug loc, and the
212 : loc list would not contain debug locs any more.
213 :
214 : So the only case we have to be careful with in order to keep
215 : n_useless_values in sync between debug and nondebug compilations is
216 : to avoid incrementing n_useless_values when removing the single loc
217 : from a value that turns out to not appear outside debug values. We
218 : increment n_useless_debug_values instead, and leave such values
219 : alone until, for other reasons, we garbage-collect useless
220 : values. */
221 : static int n_useless_values;
222 : static int n_useless_debug_values;
223 :
224 : /* Count values whose locs have been taken exclusively from debug
225 : insns for the entire life of the value. */
226 : static int n_debug_values;
227 :
228 : /* Number of useless values before we remove them from the hash table. */
229 : #define MAX_USELESS_VALUES 32
230 :
231 : /* This table maps from register number to values. It does not
232 : contain pointers to cselib_val structures, but rather elt_lists.
233 : The purpose is to be able to refer to the same register in
234 : different modes. The first element of the list defines the mode in
235 : which the register was set; if the mode is unknown or the value is
236 : no longer valid in that mode, ELT will be NULL for the first
237 : element. */
238 : static struct elt_list **reg_values;
239 : static unsigned int reg_values_size;
240 : #define REG_VALUES(i) reg_values[i]
241 :
242 : /* The largest number of hard regs used by any entry added to the
243 : REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
244 : static unsigned int max_value_regs;
245 :
246 : /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
247 : in cselib_clear_table() for fast emptying. */
248 : static unsigned int *used_regs;
249 : static unsigned int n_used_regs;
250 :
251 : /* We pass this to cselib_invalidate_mem to invalidate all of
252 : memory for a non-const call instruction and memory below stack pointer
253 : for const/pure calls. */
254 : static GTY(()) rtx callmem[2];
255 :
256 : /* Set by discard_useless_locs if it deleted the last location of any
257 : value. */
258 : static int values_became_useless;
259 :
260 : /* Used as stop element of the containing_mem list so we can check
261 : presence in the list by checking the next pointer. */
262 : static cselib_val dummy_val;
263 :
264 : /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
265 : that is constant through the whole function and should never be
266 : eliminated. */
267 : static cselib_val *cfa_base_preserved_val;
268 : static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
269 :
270 : /* Used to list all values that contain memory reference.
271 : May or may not contain the useless values - the list is compacted
272 : each time memory is invalidated. */
273 : static cselib_val *first_containing_mem = &dummy_val;
274 :
275 : static object_allocator<elt_list> elt_list_pool ("elt_list");
276 : static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
277 : static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
278 :
279 : static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
280 :
281 : /* If nonnull, cselib will call this function before freeing useless
282 : VALUEs. A VALUE is deemed useless if its "locs" field is null. */
283 : void (*cselib_discard_hook) (cselib_val *);
284 :
285 : /* If nonnull, cselib will call this function before recording sets or
286 : even clobbering outputs of INSN. All the recorded sets will be
287 : represented in the array sets[n_sets]. new_val_min can be used to
288 : tell whether values present in sets are introduced by this
289 : instruction. */
290 : void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
291 : int n_sets);
292 :
293 :
294 :
295 : /* Allocate a struct elt_list and fill in its two elements with the
296 : arguments. */
297 :
298 : static inline struct elt_list *
299 482368127 : new_elt_list (struct elt_list *next, cselib_val *elt)
300 : {
301 964736254 : elt_list *el = elt_list_pool.allocate ();
302 482368127 : el->next = next;
303 482368127 : el->elt = elt;
304 482368127 : return el;
305 : }
306 :
307 : /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
308 : list. */
309 :
310 : static inline void
311 714761746 : new_elt_loc_list (cselib_val *val, rtx loc)
312 : {
313 718646233 : struct elt_loc_list *el, *next = val->locs;
314 :
315 718646233 : gcc_checking_assert (!next || !next->setting_insn
316 : || !DEBUG_INSN_P (next->setting_insn)
317 : || cselib_current_insn == next->setting_insn);
318 :
319 : /* If we're creating the first loc in a debug insn context, we've
320 : just created a debug value. Count it. */
321 420807907 : if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
322 6646725 : n_debug_values++;
323 :
324 718646233 : val = canonical_cselib_val (val);
325 718646233 : next = val->locs;
326 :
327 718646233 : if (GET_CODE (loc) == VALUE)
328 : {
329 7773869 : loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
330 :
331 7773869 : gcc_checking_assert (PRESERVED_VALUE_P (loc)
332 : == PRESERVED_VALUE_P (val->val_rtx));
333 :
334 7773869 : if (val->val_rtx == loc)
335 : return;
336 7769164 : else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
337 : {
338 : /* Reverse the insertion. */
339 : new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
340 : return;
341 : }
342 :
343 3884677 : gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
344 :
345 3884677 : if (CSELIB_VAL_PTR (loc)->locs)
346 : {
347 : /* Bring all locs from LOC to VAL. */
348 2953855 : for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
349 : {
350 : /* Adjust values that have LOC as canonical so that VAL
351 : becomes their canonical. */
352 18 : if (el->loc && GET_CODE (el->loc) == VALUE)
353 : {
354 0 : gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
355 : == loc);
356 0 : CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
357 : }
358 : }
359 2953837 : el->next = val->locs;
360 2953837 : next = val->locs = CSELIB_VAL_PTR (loc)->locs;
361 : }
362 :
363 3884677 : if (CSELIB_VAL_PTR (loc)->addr_list)
364 : {
365 : /* Bring in addr_list into canonical node. */
366 : struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
367 2 : while (last->next)
368 : last = last->next;
369 2 : last->next = val->addr_list;
370 2 : val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
371 2 : CSELIB_VAL_PTR (loc)->addr_list = NULL;
372 : }
373 :
374 3884677 : if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
375 0 : && val->next_containing_mem == NULL)
376 : {
377 : /* Add VAL to the containing_mem list after LOC. LOC will
378 : be removed when we notice it doesn't contain any
379 : MEMs. */
380 0 : val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
381 0 : CSELIB_VAL_PTR (loc)->next_containing_mem = val;
382 : }
383 :
384 : /* Chain LOC back to VAL. */
385 3884677 : el = elt_loc_list_pool.allocate ();
386 3884677 : el->loc = val->val_rtx;
387 3884677 : el->setting_insn = cselib_current_insn;
388 3884677 : el->next = NULL;
389 3884677 : CSELIB_VAL_PTR (loc)->locs = el;
390 : }
391 :
392 714757041 : el = elt_loc_list_pool.allocate ();
393 714757041 : el->loc = loc;
394 714757041 : el->setting_insn = cselib_current_insn;
395 714757041 : el->next = next;
396 714757041 : val->locs = el;
397 : }
398 :
399 : /* Promote loc L to a nondebug cselib_current_insn if L is marked as
400 : originating from a debug insn, maintaining the debug values
401 : count. */
402 :
403 : static inline void
404 1212529537 : promote_debug_loc (struct elt_loc_list *l)
405 : {
406 1212529537 : if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
407 22043791 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
408 : {
409 2537541 : n_debug_values--;
410 2537541 : l->setting_insn = cselib_current_insn;
411 2537541 : if (cselib_preserve_constants && l->next)
412 : {
413 19512 : gcc_assert (l->next->setting_insn
414 : && DEBUG_INSN_P (l->next->setting_insn)
415 : && !l->next->next);
416 19512 : l->next->setting_insn = cselib_current_insn;
417 : }
418 : else
419 2518029 : gcc_assert (!l->next);
420 : }
421 1212529537 : }
422 :
423 : /* The elt_list at *PL is no longer needed. Unchain it and free its
424 : storage. */
425 :
426 : static inline void
427 79707051 : unchain_one_elt_list (struct elt_list **pl)
428 : {
429 79707051 : struct elt_list *l = *pl;
430 :
431 79707051 : *pl = l->next;
432 116953203 : elt_list_pool.remove (l);
433 42460899 : }
434 :
435 : /* Likewise for elt_loc_lists. */
436 :
437 : static void
438 221126664 : unchain_one_elt_loc_list (struct elt_loc_list **pl)
439 : {
440 221126664 : struct elt_loc_list *l = *pl;
441 :
442 221126664 : *pl = l->next;
443 0 : elt_loc_list_pool.remove (l);
444 38515609 : }
445 :
446 : /* Likewise for cselib_vals. This also frees the addr_list associated with
447 : V. */
448 :
449 : static void
450 2350627 : unchain_one_value (cselib_val *v)
451 : {
452 2370013 : while (v->addr_list)
453 19386 : unchain_one_elt_list (&v->addr_list);
454 :
455 2350627 : cselib_val_pool.remove (v);
456 2350627 : }
457 :
458 : /* Remove all entries from the hash table. Also used during
459 : initialization. */
460 :
461 : void
462 60557835 : cselib_clear_table (void)
463 : {
464 60557835 : cselib_reset_table (1);
465 60557835 : }
466 :
467 : /* Return TRUE if V is a constant, a function invariant or a VALUE
468 : equivalence; FALSE otherwise. */
469 :
470 : static bool
471 57358982 : invariant_or_equiv_p (cselib_val *v)
472 : {
473 57358982 : struct elt_loc_list *l;
474 :
475 57358982 : if (v == cfa_base_preserved_val)
476 : return true;
477 :
478 : /* Keep VALUE equivalences around. */
479 79642927 : for (l = v->locs; l; l = l->next)
480 38445753 : if (GET_CODE (l->loc) == VALUE)
481 : return true;
482 :
483 41197174 : if (v->locs != NULL
484 22588598 : && v->locs->next == NULL)
485 : {
486 22534763 : if (CONSTANT_P (v->locs->loc)
487 22534763 : && (GET_CODE (v->locs->loc) != CONST
488 173125 : || !references_value_p (v->locs->loc, 0)))
489 3470272 : return true;
490 : /* Although a debug expr may be bound to different expressions,
491 : we can preserve it as if it was constant, to get unification
492 : and proper merging within var-tracking. */
493 19064491 : if (GET_CODE (v->locs->loc) == DEBUG_EXPR
494 17568600 : || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
495 17074917 : || GET_CODE (v->locs->loc) == ENTRY_VALUE
496 17074692 : || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
497 : return true;
498 :
499 : /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
500 17058331 : if (GET_CODE (v->locs->loc) == PLUS
501 10668089 : && CONST_INT_P (XEXP (v->locs->loc, 1))
502 9524793 : && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
503 25830766 : && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
504 : return true;
505 : }
506 :
507 : return false;
508 : }
509 :
510 : /* Remove from hash table all VALUEs except constants, function
511 : invariants and VALUE equivalences. */
512 :
513 : int
514 44246732 : preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
515 : {
516 44246732 : cselib_val *v = *x;
517 :
518 44246732 : if (invariant_or_equiv_p (v))
519 : {
520 17298425 : cselib_hasher::key lookup = {
521 17298425 : GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
522 17298425 : };
523 17298425 : cselib_val **slot
524 34596850 : = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
525 17298425 : v->hash, INSERT);
526 17298425 : gcc_assert (!*slot);
527 17298425 : *slot = v;
528 : }
529 :
530 44246732 : cselib_hash_table->clear_slot (x);
531 :
532 44246732 : return 1;
533 : }
534 :
535 : /* Remove all entries from the hash table, arranging for the next
536 : value to be numbered NUM. */
537 :
538 : void
539 99901598 : cselib_reset_table (unsigned int num)
540 : {
541 99901598 : unsigned int i;
542 :
543 99901598 : max_value_regs = 0;
544 :
545 99901598 : if (cfa_base_preserved_val)
546 : {
547 4339815 : unsigned int regno = cfa_base_preserved_regno;
548 4339815 : unsigned int new_used_regs = 0;
549 30932980 : for (i = 0; i < n_used_regs; i++)
550 26593165 : if (used_regs[i] == regno)
551 : {
552 4339815 : new_used_regs = 1;
553 4339815 : continue;
554 : }
555 : else
556 22253350 : REG_VALUES (used_regs[i]) = 0;
557 4339815 : gcc_assert (new_used_regs == 1);
558 4339815 : n_used_regs = new_used_regs;
559 4339815 : used_regs[0] = regno;
560 4339815 : max_value_regs
561 4339815 : = hard_regno_nregs (regno,
562 4339815 : GET_MODE (cfa_base_preserved_val->locs->loc));
563 :
564 : /* If cfa_base is sp + const_int, need to preserve also the
565 : SP_DERIVED_VALUE_P value. */
566 4339815 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
567 8679630 : l; l = l->next)
568 8679630 : if (GET_CODE (l->loc) == PLUS
569 4339815 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
570 4339815 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
571 13019445 : && CONST_INT_P (XEXP (l->loc, 1)))
572 : {
573 4339815 : if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
574 : {
575 0 : rtx val = cfa_base_preserved_val->val_rtx;
576 0 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
577 0 : cselib_current_insn = l->setting_insn;
578 0 : new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
579 0 : plus_constant (Pmode, val,
580 0 : -UINTVAL (XEXP (l->loc, 1))));
581 0 : cselib_current_insn = save_cselib_current_insn;
582 : }
583 : break;
584 : }
585 : }
586 : else
587 : {
588 366553546 : for (i = 0; i < n_used_regs; i++)
589 270991763 : REG_VALUES (used_regs[i]) = 0;
590 95561783 : n_used_regs = 0;
591 : }
592 :
593 99901598 : if (cselib_preserve_constants)
594 48586547 : cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
595 : else
596 : {
597 95561783 : cselib_hash_table->empty ();
598 95561783 : gcc_checking_assert (!cselib_any_perm_equivs);
599 : }
600 :
601 99901598 : n_useless_values = 0;
602 99901598 : n_useless_debug_values = 0;
603 99901598 : n_debug_values = 0;
604 :
605 99901598 : next_uid = num;
606 :
607 99901598 : first_containing_mem = &dummy_val;
608 99901598 : }
609 :
610 : /* Return the number of the next value that will be generated. */
611 :
612 : unsigned int
613 4835377 : cselib_get_next_uid (void)
614 : {
615 4835377 : return next_uid;
616 : }
617 :
618 : /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
619 : INSERTing if requested. When X is part of the address of a MEM,
620 : MEMMODE should specify the mode of the MEM. */
621 :
622 : static cselib_val **
623 710213323 : cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
624 : enum insert_option insert, machine_mode memmode)
625 : {
626 710213323 : cselib_val **slot = NULL;
627 710213323 : cselib_hasher::key lookup = { mode, x, memmode };
628 710213323 : if (cselib_preserve_constants)
629 159365194 : slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
630 : NO_INSERT);
631 159365194 : if (!slot)
632 655771056 : slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
633 710213323 : return slot;
634 : }
635 :
636 : /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
637 : only return true for values which point to a cselib_val whose value
638 : element has been set to zero, which implies the cselib_val will be
639 : removed. */
640 :
641 : bool
642 1274481486 : references_value_p (const_rtx x, int only_useless)
643 : {
644 1274481486 : const enum rtx_code code = GET_CODE (x);
645 1274481486 : const char *fmt = GET_RTX_FORMAT (code);
646 1274481486 : int i, j;
647 :
648 1274481486 : if (GET_CODE (x) == VALUE
649 1274481486 : && (! only_useless
650 447838308 : || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
651 : return true;
652 :
653 2911440961 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
654 : {
655 1639705454 : if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
656 : return true;
657 1638319815 : else if (fmt[i] == 'E')
658 45500372 : for (j = 0; j < XVECLEN (x, i); j++)
659 38095668 : if (references_value_p (XVECEXP (x, i, j), only_useless))
660 : return true;
661 : }
662 :
663 : return false;
664 : }
665 :
666 : /* Return true if V is a useless VALUE and can be discarded as such. */
667 :
668 : static bool
669 1202064428 : cselib_useless_value_p (cselib_val *v)
670 : {
671 1202064428 : return (v->locs == 0
672 78834013 : && !PRESERVED_VALUE_P (v->val_rtx)
673 1247677482 : && !SP_DERIVED_VALUE_P (v->val_rtx));
674 : }
675 :
676 : /* For all locations found in X, delete locations that reference useless
677 : values (i.e. values without any location). Called through
678 : htab_traverse. */
679 :
680 : int
681 558258554 : discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
682 : {
683 558258554 : cselib_val *v = *x;
684 558258554 : struct elt_loc_list **p = &v->locs;
685 558258554 : bool had_locs = v->locs != NULL;
686 558258554 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
687 :
688 1180301954 : while (*p)
689 : {
690 622043400 : if (references_value_p ((*p)->loc, 1))
691 1269457 : unchain_one_elt_loc_list (p);
692 : else
693 620773943 : p = &(*p)->next;
694 : }
695 :
696 558258554 : if (had_locs && cselib_useless_value_p (v))
697 : {
698 1136943 : if (setting_insn && DEBUG_INSN_P (setting_insn))
699 2 : n_useless_debug_values++;
700 : else
701 1136941 : n_useless_values++;
702 1136943 : values_became_useless = 1;
703 : }
704 558258554 : return 1;
705 : }
706 :
707 : /* If X is a value with no locations, remove it from the hashtable. */
708 :
709 : int
710 48081520 : discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
711 : {
712 48081520 : cselib_val *v = *x;
713 :
714 48081520 : if (v->locs == 0 && cselib_useless_value_p (v))
715 : {
716 2350627 : if (cselib_discard_hook)
717 524276 : cselib_discard_hook (v);
718 :
719 2350627 : CSELIB_VAL_PTR (v->val_rtx) = NULL;
720 2350627 : cselib_hash_table->clear_slot (x);
721 2350627 : unchain_one_value (v);
722 2350627 : n_useless_values--;
723 : }
724 :
725 48081520 : return 1;
726 : }
727 :
728 : /* Clean out useless values (i.e. those which no longer have locations
729 : associated with them) from the hash table. */
730 :
731 : static void
732 4368394 : remove_useless_values (void)
733 : {
734 4422543 : cselib_val **p, *v;
735 :
736 : /* First pass: eliminate locations that reference the value. That in
737 : turn can make more values useless. */
738 4422543 : do
739 : {
740 4422543 : values_became_useless = 0;
741 562681097 : cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
742 : }
743 4422543 : while (values_became_useless);
744 :
745 : /* Second pass: actually remove the values. */
746 :
747 4368394 : p = &first_containing_mem;
748 4490530 : for (v = *p; v != &dummy_val; v = v->next_containing_mem)
749 122136 : if (v->locs && v == canonical_cselib_val (v))
750 : {
751 118903 : *p = v;
752 118903 : p = &(*p)->next_containing_mem;
753 : }
754 4368394 : *p = &dummy_val;
755 :
756 4368394 : if (cselib_preserve_constants)
757 4339815 : cselib_preserved_hash_table->traverse <void *,
758 4339815 : discard_useless_locs> (NULL);
759 4368394 : gcc_assert (!values_became_useless);
760 :
761 4368394 : n_useless_values += n_useless_debug_values;
762 4368394 : n_debug_values -= n_useless_debug_values;
763 4368394 : n_useless_debug_values = 0;
764 :
765 52449914 : cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
766 :
767 4368394 : gcc_assert (!n_useless_values);
768 4368394 : }
769 :
770 : /* Arrange for a value to not be removed from the hash table even if
771 : it becomes useless. */
772 :
773 : void
774 44876549 : cselib_preserve_value (cselib_val *v)
775 : {
776 44876549 : PRESERVED_VALUE_P (v->val_rtx) = 1;
777 44876549 : }
778 :
779 : /* Test whether a value is preserved. */
780 :
781 : bool
782 260563216 : cselib_preserved_value_p (cselib_val *v)
783 : {
784 260563216 : return PRESERVED_VALUE_P (v->val_rtx);
785 : }
786 :
787 : /* Arrange for a REG value to be assumed constant through the whole function,
788 : never invalidated and preserved across cselib_reset_table calls. */
789 :
790 : void
791 990606 : cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
792 : {
793 990606 : if (cselib_preserve_constants
794 990606 : && v->locs
795 990606 : && REG_P (v->locs->loc))
796 : {
797 495852 : cfa_base_preserved_val = v;
798 495852 : cfa_base_preserved_regno = regno;
799 : }
800 990606 : }
801 :
802 : /* Clean all non-constant expressions in the hash table, but retain
803 : their values. */
804 :
805 : void
806 4339815 : cselib_preserve_only_values (void)
807 : {
808 4339815 : int i;
809 :
810 403602795 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
811 399262980 : cselib_invalidate_regno (i, reg_raw_mode[i]);
812 :
813 4339815 : cselib_invalidate_mem (callmem[0]);
814 :
815 4339815 : remove_useless_values ();
816 :
817 4339815 : gcc_assert (first_containing_mem == &dummy_val);
818 4339815 : }
819 :
820 : /* Arrange for a value to be marked as based on stack pointer
821 : for find_base_term purposes. */
822 :
823 : void
824 1911387 : cselib_set_value_sp_based (cselib_val *v)
825 : {
826 1911387 : SP_BASED_VALUE_P (v->val_rtx) = 1;
827 1911387 : }
828 :
829 : /* Test whether a value is based on stack pointer for
830 : find_base_term purposes. */
831 :
832 : bool
833 835454347 : cselib_sp_based_value_p (cselib_val *v)
834 : {
835 835454347 : return SP_BASED_VALUE_P (v->val_rtx);
836 : }
837 :
838 : /* Return the mode in which a register was last set. If X is not a
839 : register, return its mode. If the mode in which the register was
840 : set is not known, or the value was already clobbered, return
841 : VOIDmode. */
842 :
843 : machine_mode
844 112241581 : cselib_reg_set_mode (const_rtx x)
845 : {
846 112241581 : if (!REG_P (x))
847 33986776 : return GET_MODE (x);
848 :
849 78254805 : if (REG_VALUES (REGNO (x)) == NULL
850 78254805 : || REG_VALUES (REGNO (x))->elt == NULL)
851 : return VOIDmode;
852 :
853 23719359 : return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
854 : }
855 :
856 : /* If x is a PLUS or an autoinc operation, expand the operation,
857 : storing the offset, if any, in *OFF. */
858 :
859 : static rtx
860 1453659896 : autoinc_split (rtx x, rtx *off, machine_mode memmode)
861 : {
862 1453659896 : switch (GET_CODE (x))
863 : {
864 802782262 : case PLUS:
865 802782262 : *off = XEXP (x, 1);
866 802782262 : x = XEXP (x, 0);
867 802782262 : break;
868 :
869 12703610 : case PRE_DEC:
870 12703610 : if (memmode == VOIDmode)
871 : return x;
872 :
873 25407220 : *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
874 12703610 : x = XEXP (x, 0);
875 12703610 : break;
876 :
877 0 : case PRE_INC:
878 0 : if (memmode == VOIDmode)
879 : return x;
880 :
881 0 : *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
882 0 : x = XEXP (x, 0);
883 0 : break;
884 :
885 1639130 : case PRE_MODIFY:
886 1639130 : x = XEXP (x, 1);
887 1639130 : break;
888 :
889 1812696 : case POST_DEC:
890 1812696 : case POST_INC:
891 1812696 : case POST_MODIFY:
892 1812696 : x = XEXP (x, 0);
893 1812696 : break;
894 :
895 : default:
896 : break;
897 : }
898 :
899 1453659896 : if (GET_MODE (x) == Pmode
900 1404691676 : && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
901 2258721541 : && (*off == NULL_RTX || CONST_INT_P (*off)))
902 : {
903 800451538 : cselib_val *e;
904 800451538 : if (GET_CODE (x) == VALUE)
905 511801189 : e = CSELIB_VAL_PTR (x);
906 : else
907 288650349 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
908 800451538 : if (e)
909 : {
910 791235511 : if (SP_DERIVED_VALUE_P (e->val_rtx)
911 791235511 : && (*off == NULL_RTX || *off == const0_rtx))
912 : {
913 66107 : *off = NULL_RTX;
914 66107 : return e->val_rtx;
915 : }
916 1729223141 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
917 1357001716 : if (GET_CODE (l->loc) == PLUS
918 582311277 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
919 581048923 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
920 1775952142 : && CONST_INT_P (XEXP (l->loc, 1)))
921 : {
922 418947979 : if (*off == NULL_RTX)
923 1455978 : *off = XEXP (l->loc, 1);
924 : else
925 417492001 : *off = plus_constant (Pmode, *off,
926 417492001 : INTVAL (XEXP (l->loc, 1)));
927 418947979 : if (*off == const0_rtx)
928 257737629 : *off = NULL_RTX;
929 418947979 : return XEXP (l->loc, 0);
930 : }
931 : }
932 : }
933 : return x;
934 : }
935 :
936 : /* Return true if we can prove that X and Y contain the same value,
937 : taking our gathered information into account. MEMMODE holds the
938 : mode of the enclosing MEM, if any, as required to deal with autoinc
939 : addressing modes. If X and Y are not (known to be) part of
940 : addresses, MEMMODE should be VOIDmode. */
941 :
942 : bool
943 1195551565 : rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
944 : {
945 1572394170 : enum rtx_code code;
946 1572394170 : const char *fmt;
947 1572394170 : int i;
948 :
949 1572394170 : if (REG_P (x) || MEM_P (x))
950 : {
951 147600608 : cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
952 :
953 147600608 : if (e)
954 124439829 : x = e->val_rtx;
955 : }
956 :
957 1572394170 : if (REG_P (y) || MEM_P (y))
958 : {
959 140548416 : cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
960 :
961 140548416 : if (e)
962 128640113 : y = e->val_rtx;
963 : }
964 :
965 1572394170 : if (x == y)
966 : return true;
967 :
968 1249876163 : if (GET_CODE (x) == VALUE)
969 : {
970 426347623 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
971 426347623 : struct elt_loc_list *l;
972 :
973 426347623 : if (GET_CODE (y) == VALUE)
974 30975388 : return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
975 :
976 395372235 : if ((SP_DERIVED_VALUE_P (x)
977 147863365 : || SP_DERIVED_VALUE_P (e->val_rtx))
978 439505409 : && GET_MODE (y) == Pmode)
979 : {
980 249807551 : rtx yoff = NULL;
981 249807551 : rtx yr = autoinc_split (y, &yoff, memmode);
982 249807551 : if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
983 52913 : return true;
984 : }
985 :
986 395319322 : if (depth == 128)
987 : return false;
988 :
989 1217853362 : for (l = e->locs; l; l = l->next)
990 : {
991 845232106 : rtx t = l->loc;
992 :
993 : /* Avoid infinite recursion. We know we have the canonical
994 : value, so we can just skip any values in the equivalence
995 : list. */
996 845232106 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
997 504109759 : continue;
998 341122347 : else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
999 : return true;
1000 : }
1001 :
1002 : return false;
1003 : }
1004 823528540 : else if (GET_CODE (y) == VALUE)
1005 : {
1006 45566837 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
1007 45566837 : struct elt_loc_list *l;
1008 :
1009 45566837 : if ((SP_DERIVED_VALUE_P (y)
1010 43479262 : || SP_DERIVED_VALUE_P (e->val_rtx))
1011 45979348 : && GET_MODE (x) == Pmode)
1012 : {
1013 2013750 : rtx xoff = NULL;
1014 2013750 : rtx xr = autoinc_split (x, &xoff, memmode);
1015 2013750 : if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
1016 163 : return true;
1017 : }
1018 :
1019 45566674 : if (depth == 128)
1020 : return false;
1021 :
1022 109563864 : for (l = e->locs; l; l = l->next)
1023 : {
1024 64077495 : rtx t = l->loc;
1025 :
1026 64077495 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1027 55165923 : continue;
1028 8911572 : else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
1029 : return true;
1030 : }
1031 :
1032 : return false;
1033 : }
1034 :
1035 777961703 : if (GET_MODE (x) != GET_MODE (y))
1036 : return false;
1037 :
1038 739615062 : if (GET_CODE (x) != GET_CODE (y)
1039 739615062 : || (GET_CODE (x) == PLUS
1040 341047671 : && GET_MODE (x) == Pmode
1041 243295355 : && CONST_INT_P (XEXP (x, 1))
1042 232417177 : && CONST_INT_P (XEXP (y, 1))))
1043 : {
1044 592128777 : rtx xorig = x, yorig = y;
1045 592128777 : rtx xoff = NULL, yoff = NULL;
1046 :
1047 592128777 : x = autoinc_split (x, &xoff, memmode);
1048 592128777 : y = autoinc_split (y, &yoff, memmode);
1049 :
1050 : /* Don't recurse if nothing changed. */
1051 592128777 : if (x != xorig || y != yorig)
1052 : {
1053 553088769 : if (!xoff != !yoff)
1054 215932310 : return false;
1055 :
1056 474469846 : if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
1057 : return false;
1058 :
1059 376196467 : return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1060 : }
1061 :
1062 39040008 : if (GET_CODE (xorig) != GET_CODE (yorig))
1063 : return false;
1064 : }
1065 :
1066 : /* These won't be handled correctly by the code below. */
1067 147486285 : switch (GET_CODE (x))
1068 : {
1069 : CASE_CONST_UNIQUE:
1070 : case DEBUG_EXPR:
1071 : return false;
1072 :
1073 : case CONST_VECTOR:
1074 : if (!same_vector_encodings_p (x, y))
1075 : return false;
1076 : break;
1077 :
1078 2096992 : case DEBUG_IMPLICIT_PTR:
1079 2096992 : return DEBUG_IMPLICIT_PTR_DECL (x)
1080 2096992 : == DEBUG_IMPLICIT_PTR_DECL (y);
1081 :
1082 5818 : case DEBUG_PARAMETER_REF:
1083 5818 : return DEBUG_PARAMETER_REF_DECL (x)
1084 5818 : == DEBUG_PARAMETER_REF_DECL (y);
1085 :
1086 296932 : case ENTRY_VALUE:
1087 : /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1088 : use rtx_equal_for_cselib_1 to compare the operands. */
1089 296932 : return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1090 :
1091 245 : case LABEL_REF:
1092 245 : return label_ref_label (x) == label_ref_label (y);
1093 :
1094 162 : case REG:
1095 162 : return REGNO (x) == REGNO (y);
1096 :
1097 646138 : case MEM:
1098 : /* We have to compare any autoinc operations in the addresses
1099 : using this MEM's mode. */
1100 646138 : return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1101 646138 : depth);
1102 :
1103 : default:
1104 : break;
1105 : }
1106 :
1107 39503184 : code = GET_CODE (x);
1108 39503184 : fmt = GET_RTX_FORMAT (code);
1109 :
1110 68959045 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1111 : {
1112 57399135 : int j;
1113 :
1114 57399135 : switch (fmt[i])
1115 : {
1116 0 : case 'w':
1117 0 : if (XWINT (x, i) != XWINT (y, i))
1118 : return false;
1119 : break;
1120 :
1121 583918 : case 'n':
1122 583918 : case 'i':
1123 583918 : if (XINT (x, i) != XINT (y, i))
1124 : return false;
1125 : break;
1126 :
1127 5520 : case 'L':
1128 5520 : if (XLOC (x, i) != XLOC (y, i))
1129 : return false;
1130 : break;
1131 :
1132 577663 : case 'p':
1133 577663 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1134 : return false;
1135 : break;
1136 :
1137 1489551 : case 'V':
1138 1489551 : case 'E':
1139 : /* Two vectors must have the same length. */
1140 1489551 : if (XVECLEN (x, i) != XVECLEN (y, i))
1141 : return false;
1142 :
1143 : /* And the corresponding elements must match. */
1144 3706150 : for (j = 0; j < XVECLEN (x, i); j++)
1145 3084418 : if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1146 3084418 : XVECEXP (y, i, j), memmode, depth))
1147 : return false;
1148 : break;
1149 :
1150 44219675 : case 'e':
1151 44219675 : if (i == 1
1152 27862218 : && targetm.commutative_p (x, UNKNOWN)
1153 20232698 : && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1154 : depth)
1155 44627441 : && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1156 : depth))
1157 : return true;
1158 43910873 : if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1159 : depth))
1160 : return false;
1161 : break;
1162 :
1163 5368964 : case 'S':
1164 5368964 : case 's':
1165 5368964 : if (strcmp (XSTR (x, i), XSTR (y, i)))
1166 : return false;
1167 : break;
1168 :
1169 : case 'u':
1170 : /* These are just backpointers, so they don't matter. */
1171 : break;
1172 :
1173 : case '0':
1174 : case 't':
1175 : break;
1176 :
1177 : /* It is believed that rtx's at this level will never
1178 : contain anything but integers and other rtx's,
1179 : except for within LABEL_REFs and SYMBOL_REFs. */
1180 0 : default:
1181 0 : gcc_unreachable ();
1182 : }
1183 : }
1184 : return true;
1185 : }
1186 :
1187 : /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1188 : truly redundant, taking into account aliasing information. */
1189 : bool
1190 112241581 : cselib_redundant_set_p (rtx set)
1191 : {
1192 112241581 : gcc_assert (GET_CODE (set) == SET);
1193 112241581 : rtx dest = SET_DEST (set);
1194 112241581 : if (cselib_reg_set_mode (dest) != GET_MODE (dest))
1195 : return false;
1196 :
1197 53773502 : rtx src = SET_SRC (set);
1198 4005874 : if ((MEM_P (src) && MEM_VOLATILE_P (src))
1199 57710411 : || !rtx_equal_for_cselib_p (dest, src))
1200 53357908 : return false;
1201 :
1202 415594 : while (GET_CODE (dest) == SUBREG
1203 415594 : || GET_CODE (dest) == ZERO_EXTRACT
1204 831188 : || GET_CODE (dest) == STRICT_LOW_PART)
1205 0 : dest = XEXP (dest, 0);
1206 :
1207 415594 : if (!flag_strict_aliasing || !MEM_P (dest))
1208 : return true;
1209 :
1210 36830 : if (MEM_VOLATILE_P (dest))
1211 : return false;
1212 :
1213 : /* For a store we need to check that suppressing it will not change
1214 : the effective alias set. */
1215 36830 : rtx dest_addr = XEXP (dest, 0);
1216 :
1217 : /* Lookup the equivalents to the original dest (rather than just the
1218 : MEM). */
1219 73660 : cselib_val *src_val = cselib_lookup (SET_DEST (set),
1220 36830 : GET_MODE (SET_DEST (set)),
1221 : 0, VOIDmode);
1222 :
1223 36830 : if (src_val)
1224 : {
1225 : /* Walk the list of source equivalents to find the MEM accessing
1226 : the same location. */
1227 83776 : for (elt_loc_list *l = src_val->locs; l; l = l->next)
1228 : {
1229 83776 : rtx src_equiv = l->loc;
1230 83776 : while (GET_CODE (src_equiv) == SUBREG
1231 83776 : || GET_CODE (src_equiv) == ZERO_EXTRACT
1232 167558 : || GET_CODE (src_equiv) == STRICT_LOW_PART)
1233 6 : src_equiv = XEXP (src_equiv, 0);
1234 :
1235 83776 : if (MEM_P (src_equiv))
1236 : {
1237 : /* Match the MEMs by comparing the addresses. We can
1238 : only remove the later store if the earlier aliases at
1239 : least all the accesses of the later one. */
1240 44742 : if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0),
1241 44742 : GET_MODE (dest), 0))
1242 36824 : return mems_same_for_tbaa_p (src_equiv, dest);
1243 : }
1244 : }
1245 : }
1246 :
1247 : /* We failed to find a recorded value in the cselib history, so try
1248 : the source of this set; this catches cases such as *p = *q when p
1249 : and q have the same value. */
1250 6 : while (GET_CODE (src) == SUBREG)
1251 0 : src = XEXP (src, 0);
1252 :
1253 6 : if (MEM_P (src)
1254 6 : && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0), GET_MODE (dest), 0))
1255 6 : return mems_same_for_tbaa_p (src, dest);
1256 :
1257 : return false;
1258 : }
1259 :
1260 : /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1261 : except that it hashes (plus:P x c). */
1262 :
1263 : static hashval_t
1264 282191908 : cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1265 : machine_mode memmode)
1266 : {
1267 282191908 : cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1268 282191908 : if (! e)
1269 : return 0;
1270 :
1271 269180935 : if (! SP_DERIVED_VALUE_P (e->val_rtx))
1272 436479463 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
1273 352691814 : if (GET_CODE (l->loc) == PLUS
1274 129702851 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
1275 128911958 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1276 474852523 : && CONST_INT_P (XEXP (l->loc, 1)))
1277 : {
1278 122157913 : e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1279 122157913 : c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1280 122157913 : break;
1281 : }
1282 269180935 : if (c == 0)
1283 8101956 : return e->hash;
1284 :
1285 261078979 : inchash::hash hash;
1286 261078979 : hash.add_int (PLUS);
1287 261078979 : hash.add_int (GET_MODE (x));
1288 261078979 : hash.merge_hash (e->hash);
1289 261078979 : hash.add_hwi (c);
1290 :
1291 261078979 : return hash.end () ? hash.end () : 1 + (unsigned int) PLUS;
1292 : }
1293 :
1294 : /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1295 : For registers and memory locations, we look up their cselib_val structure
1296 : and return its VALUE element.
1297 : Possible reasons for return 0 are: the object is volatile, or we couldn't
1298 : find a register or memory location in the table and CREATE is zero. If
1299 : CREATE is nonzero, table elts are created for regs and mem.
1300 : N.B. this hash function returns the same hash value for RTXes that
1301 : differ only in the order of operands, thus it is suitable for comparisons
1302 : that take commutativity into account.
1303 : If we wanted to also support associative rules, we'd have to use a different
1304 : strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1305 : MEMMODE indicates the mode of an enclosing MEM, and it's only
1306 : used to compute autoinc values.
1307 : We used to have a MODE argument for hashing for CONST_INTs, but that
1308 : didn't make sense, since it caused spurious hash differences between
1309 : (set (reg:SI 1) (const_int))
1310 : (plus:SI (reg:SI 2) (reg:SI 1))
1311 : and
1312 : (plus:SI (reg:SI 2) (const_int))
1313 : If the mode is important in any context, it must be checked specifically
1314 : in a comparison anyway, since relying on hash differences is unsafe. */
1315 :
1316 : static hashval_t
1317 982711861 : cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1318 : {
1319 982711861 : cselib_val *e;
1320 982711861 : poly_int64 offset;
1321 982711861 : int i, j;
1322 982711861 : enum rtx_code code;
1323 982711861 : const char *fmt;
1324 982711861 : inchash::hash hash;
1325 :
1326 982711861 : code = GET_CODE (x);
1327 982711861 : hash.add_int (code);
1328 982711861 : hash.add_int (GET_MODE (x));
1329 :
1330 982711861 : switch (code)
1331 : {
1332 429446 : case VALUE:
1333 429446 : e = CSELIB_VAL_PTR (x);
1334 429446 : return e->hash;
1335 :
1336 214859328 : case MEM:
1337 214859328 : case REG:
1338 214859328 : e = cselib_lookup (x, GET_MODE (x), create, memmode);
1339 214859328 : if (! e)
1340 : return 0;
1341 :
1342 190719404 : return e->hash;
1343 :
1344 12953364 : case DEBUG_EXPR:
1345 12953364 : hash.add_int (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)));
1346 12953364 : return hash.end () ? hash.end() : (unsigned int) DEBUG_EXPR;
1347 :
1348 2764438 : case DEBUG_IMPLICIT_PTR:
1349 2764438 : hash.add_int (DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x)));
1350 2764438 : return hash.end () ? hash.end () : (unsigned int) DEBUG_IMPLICIT_PTR;
1351 :
1352 38066 : case DEBUG_PARAMETER_REF:
1353 38066 : hash.add_int (DECL_UID (DEBUG_PARAMETER_REF_DECL (x)));
1354 38066 : return hash.end () ? hash.end () : (unsigned int) DEBUG_PARAMETER_REF;
1355 :
1356 1367435 : case ENTRY_VALUE:
1357 : /* ENTRY_VALUEs are function invariant, thus try to avoid
1358 : recursing on argument if ENTRY_VALUE is one of the
1359 : forms emitted by expand_debug_expr, otherwise
1360 : ENTRY_VALUE hash would depend on the current value
1361 : in some register or memory. */
1362 1367435 : if (REG_P (ENTRY_VALUE_EXP (x)))
1363 1349850 : hash.add_int ((unsigned int) REG
1364 1349850 : + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1365 1349850 : + (unsigned int) REGNO (ENTRY_VALUE_EXP (x)));
1366 17585 : else if (MEM_P (ENTRY_VALUE_EXP (x))
1367 17585 : && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1368 17585 : hash.add_int ((unsigned int) MEM
1369 17585 : + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1370 17585 : + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0)));
1371 : else
1372 0 : hash.add_int (cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode));
1373 1367435 : return hash.end () ? hash.end () : (unsigned int) ENTRY_VALUE;
1374 :
1375 171043629 : case CONST_INT:
1376 171043629 : hash.add_hwi (UINTVAL (x));
1377 171043629 : return hash.end () ? hash.end () : (unsigned int) CONST_INT;
1378 :
1379 : case CONST_WIDE_INT:
1380 2885310 : for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1381 1925389 : hash.add_hwi (CONST_WIDE_INT_ELT (x, i));
1382 959921 : return hash.end () ? hash.end () : (unsigned int) CONST_WIDE_INT;
1383 :
1384 : case CONST_POLY_INT:
1385 : {
1386 0 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1387 0 : hash.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1388 0 : return hash.end () ? hash.end () : (unsigned int) CONST_POLY_INT;
1389 : }
1390 :
1391 3477927 : case CONST_DOUBLE:
1392 : /* This is like the general case, except that it only counts
1393 : the integers representing the constant. */
1394 3477927 : if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1395 : {
1396 : hash.add_hwi (CONST_DOUBLE_LOW (x));
1397 : hash.add_hwi (CONST_DOUBLE_HIGH (x));
1398 : }
1399 : else
1400 3477927 : hash.merge_hash (real_hash (CONST_DOUBLE_REAL_VALUE (x)));
1401 3477927 : return hash.end () ? hash.end () : (unsigned int) CONST_DOUBLE;
1402 :
1403 0 : case CONST_FIXED:
1404 0 : hash.merge_hash (fixed_hash (CONST_FIXED_VALUE (x)));
1405 0 : return hash.end () ? hash.end () : (unsigned int) CONST_FIXED;
1406 :
1407 3098278 : case CONST_VECTOR:
1408 3098278 : {
1409 3098278 : int units;
1410 3098278 : rtx elt;
1411 :
1412 3098278 : units = const_vector_encoded_nelts (x);
1413 :
1414 8056218 : for (i = 0; i < units; ++i)
1415 : {
1416 4957940 : elt = CONST_VECTOR_ENCODED_ELT (x, i);
1417 4957940 : hash.merge_hash (cselib_hash_rtx (elt, 0, memmode));
1418 : }
1419 :
1420 3098278 : return hash.end () ? hash.end () : (unsigned int) CONST_VECTOR;
1421 : }
1422 :
1423 : /* Assume there is only one rtx object for any given label. */
1424 136018 : case LABEL_REF:
1425 : /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1426 : differences and differences between each stage's debugging dumps. */
1427 136018 : hash.add_int (CODE_LABEL_NUMBER (label_ref_label (x)));
1428 136018 : return hash.end () ? hash.end () : (unsigned int) LABEL_REF;
1429 :
1430 64549947 : case SYMBOL_REF:
1431 64549947 : {
1432 : /* Don't hash on the symbol's address to avoid bootstrap differences.
1433 : Different hash values may cause expressions to be recorded in
1434 : different orders and thus different registers to be used in the
1435 : final assembler. This also avoids differences in the dump files
1436 : between various stages. */
1437 64549947 : const char *p = (const char *) XSTR (x, 0);
1438 :
1439 64549947 : if (*p)
1440 64549947 : hash.add (p, strlen (p));
1441 :
1442 64549947 : return hash.end () ? hash.end () : (unsigned int) SYMBOL_REF;
1443 : }
1444 :
1445 17545482 : case PRE_DEC:
1446 17545482 : case PRE_INC:
1447 17545482 : {
1448 : /* We can't compute these without knowing the MEM mode. */
1449 17545482 : gcc_assert (memmode != VOIDmode);
1450 35090964 : offset = GET_MODE_SIZE (memmode);
1451 17545482 : if (code == PRE_DEC)
1452 17545482 : offset = -offset;
1453 : /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1454 : like (mem:MEMMODE (plus (reg) (const_int I))). */
1455 17545482 : if (GET_MODE (x) == Pmode
1456 17545482 : && (REG_P (XEXP (x, 0))
1457 : || MEM_P (XEXP (x, 0))
1458 : || GET_CODE (XEXP (x, 0)) == VALUE))
1459 : {
1460 17545482 : HOST_WIDE_INT c;
1461 17545482 : if (offset.is_constant (&c))
1462 17545482 : return cselib_hash_plus_const_int (XEXP (x, 0),
1463 : trunc_int_for_mode (c, Pmode),
1464 : create, memmode);
1465 : }
1466 :
1467 0 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1468 0 : if (tem_hash == 0)
1469 : return 0;
1470 0 : hash.merge_hash (tem_hash);
1471 0 : tem_hash = cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1472 : create, memmode);
1473 0 : if (tem_hash == 0)
1474 : return 0;
1475 0 : hash.merge_hash (tem_hash);
1476 0 : return hash.end () ? hash.end () : 1 + (unsigned) PLUS;
1477 : }
1478 :
1479 507515 : case PRE_MODIFY:
1480 507515 : {
1481 507515 : gcc_assert (memmode != VOIDmode);
1482 507515 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1483 507515 : if (tem_hash == 0)
1484 : return 0;
1485 500887 : hash.merge_hash (tem_hash);
1486 500887 : return hash.end () ? hash.end () : 1 + (unsigned) PRE_MODIFY;
1487 : }
1488 :
1489 1771657 : case POST_DEC:
1490 1771657 : case POST_INC:
1491 1771657 : case POST_MODIFY:
1492 1771657 : {
1493 1771657 : gcc_assert (memmode != VOIDmode);
1494 1771657 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1495 1771657 : if (tem_hash == 0)
1496 : return 0;
1497 1771657 : hash.merge_hash (tem_hash);
1498 1771657 : return hash.end () ? hash.end () : 1 + (unsigned) code;
1499 : }
1500 :
1501 : case PC:
1502 : case CALL:
1503 : case UNSPEC_VOLATILE:
1504 : return 0;
1505 :
1506 467498 : case ASM_OPERANDS:
1507 467498 : if (MEM_VOLATILE_P (x))
1508 : return 0;
1509 :
1510 : break;
1511 :
1512 319287834 : case PLUS:
1513 319287834 : if (GET_MODE (x) == Pmode
1514 308395131 : && (REG_P (XEXP (x, 0))
1515 : || MEM_P (XEXP (x, 0))
1516 : || GET_CODE (XEXP (x, 0)) == VALUE)
1517 602626020 : && CONST_INT_P (XEXP (x, 1)))
1518 264646426 : return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1519 264646426 : create, memmode);
1520 : break;
1521 :
1522 : default:
1523 : break;
1524 : }
1525 :
1526 207512058 : i = GET_RTX_LENGTH (code) - 1;
1527 207512058 : fmt = GET_RTX_FORMAT (code);
1528 :
1529 207512058 : if (COMMUTATIVE_P (x))
1530 : {
1531 79351270 : gcc_assert (i == 1 && fmt[0] == 'e' && fmt[1] == 'e');
1532 79351270 : hashval_t tem1_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1533 79351270 : if (tem1_hash == 0)
1534 : return 0;
1535 73917681 : hashval_t tem0_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1536 73917681 : if (tem0_hash == 0)
1537 : return 0;
1538 70214002 : hash.add_commutative (tem0_hash, tem1_hash);
1539 70214002 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1540 : }
1541 :
1542 337227545 : for (; i >= 0; i--)
1543 : {
1544 226963553 : switch (fmt[i])
1545 : {
1546 206075158 : case 'e':
1547 206075158 : {
1548 206075158 : rtx tem = XEXP (x, i);
1549 206075158 : hashval_t tem_hash = cselib_hash_rtx (tem, create, memmode);
1550 206075158 : if (tem_hash == 0)
1551 : return 0;
1552 189114268 : hash.merge_hash (tem_hash);
1553 : }
1554 189114268 : break;
1555 : case 'E':
1556 26417050 : for (j = 0; j < XVECLEN (x, i); j++)
1557 : {
1558 18535921 : hashval_t tem_hash
1559 18535921 : = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1560 18535921 : if (tem_hash == 0)
1561 : return 0;
1562 17600015 : hash.merge_hash (tem_hash);
1563 : }
1564 : break;
1565 :
1566 589870 : case 's':
1567 589870 : {
1568 589870 : const char *p = (const char *) XSTR (x, i);
1569 :
1570 589870 : if (p && *p)
1571 555882 : hash.add (p, strlen (p));
1572 : break;
1573 : }
1574 :
1575 5352266 : case 'i':
1576 5352266 : hash.add_hwi (XINT (x, i));
1577 5352266 : break;
1578 :
1579 244771 : case 'L':
1580 244771 : hash.add_hwi (XLOC (x, i));
1581 244771 : break;
1582 :
1583 5884453 : case 'p':
1584 5884453 : hash.add_int (constant_lower_bound (SUBREG_BYTE (x)));
1585 5884453 : break;
1586 :
1587 : case '0':
1588 : case 't':
1589 : /* unused */
1590 : break;
1591 :
1592 0 : default:
1593 0 : gcc_unreachable ();
1594 : }
1595 : }
1596 :
1597 110263992 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1598 : }
1599 :
1600 : /* Create a new value structure for VALUE and initialize it. The mode of the
1601 : value is MODE. */
1602 :
1603 : static inline cselib_val *
1604 415471060 : new_cselib_val (hashval_t hash, machine_mode mode, rtx x)
1605 : {
1606 415471060 : cselib_val *e = cselib_val_pool.allocate ();
1607 :
1608 415471060 : gcc_assert (hash);
1609 415471060 : gcc_assert (next_uid);
1610 :
1611 415471060 : e->hash = hash;
1612 415471060 : e->uid = next_uid++;
1613 : /* We use an alloc pool to allocate this RTL construct because it
1614 : accounts for about 8% of the overall memory usage. We know
1615 : precisely when we can have VALUE RTXen (when cselib is active)
1616 : so we don't need to put them in garbage collected memory.
1617 : ??? Why should a VALUE be an RTX in the first place? */
1618 415471060 : e->val_rtx = (rtx_def*) value_pool.allocate ();
1619 415471060 : memset (e->val_rtx, 0, RTX_HDR_SIZE);
1620 415471060 : PUT_CODE (e->val_rtx, VALUE);
1621 415471060 : PUT_MODE (e->val_rtx, mode);
1622 415471060 : CSELIB_VAL_PTR (e->val_rtx) = e;
1623 415471060 : e->addr_list = 0;
1624 415471060 : e->locs = 0;
1625 415471060 : e->next_containing_mem = 0;
1626 :
1627 415471060 : scalar_int_mode int_mode;
1628 541743505 : if (REG_P (x) && is_int_mode (mode, &int_mode)
1629 126272445 : && GET_MODE_SIZE (int_mode) > 1
1630 120288564 : && REG_VALUES (REGNO (x)) != NULL
1631 422244733 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1632 : {
1633 6630913 : rtx copy = shallow_copy_rtx (x);
1634 6630913 : scalar_int_mode narrow_mode_iter;
1635 23810294 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1636 : {
1637 17179381 : PUT_MODE_RAW (copy, narrow_mode_iter);
1638 17179381 : cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1639 17179381 : if (v)
1640 : {
1641 755449 : rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode);
1642 755449 : if (sub)
1643 755449 : new_elt_loc_list (v, sub);
1644 : }
1645 : }
1646 : }
1647 :
1648 415471060 : if (dump_file && (dump_flags & TDF_CSELIB))
1649 : {
1650 0 : fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1651 0 : if (flag_dump_noaddr || flag_dump_unnumbered)
1652 0 : fputs ("# ", dump_file);
1653 : else
1654 0 : fprintf (dump_file, "%p ", (void*)e);
1655 0 : print_rtl_single (dump_file, x);
1656 0 : fputc ('\n', dump_file);
1657 : }
1658 :
1659 415471060 : return e;
1660 : }
1661 :
1662 : /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1663 : contains the data at this address. X is a MEM that represents the
1664 : value. Update the two value structures to represent this situation. */
1665 :
1666 : static void
1667 52599984 : add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1668 : {
1669 52599984 : addr_elt = canonical_cselib_val (addr_elt);
1670 52599984 : mem_elt = canonical_cselib_val (mem_elt);
1671 :
1672 : /* Avoid duplicates. */
1673 52599984 : addr_space_t as = MEM_ADDR_SPACE (x);
1674 94811412 : for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1675 42211428 : if (MEM_P (l->loc)
1676 12923198 : && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1677 42211428 : && MEM_ADDR_SPACE (l->loc) == as)
1678 : {
1679 0 : promote_debug_loc (l);
1680 0 : return;
1681 : }
1682 :
1683 52599984 : addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1684 52599984 : new_elt_loc_list (mem_elt,
1685 : replace_equiv_address_nv (x, addr_elt->val_rtx));
1686 52599984 : if (mem_elt->next_containing_mem == NULL)
1687 : {
1688 46092644 : mem_elt->next_containing_mem = first_containing_mem;
1689 46092644 : first_containing_mem = mem_elt;
1690 : }
1691 : }
1692 :
1693 : /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1694 : If CREATE, make a new one if we haven't seen it before. */
1695 :
1696 : static cselib_val *
1697 237557272 : cselib_lookup_mem (rtx x, int create)
1698 : {
1699 237557272 : machine_mode mode = GET_MODE (x);
1700 237557272 : machine_mode addr_mode;
1701 237557272 : cselib_val **slot;
1702 237557272 : cselib_val *addr;
1703 237557272 : cselib_val *mem_elt;
1704 :
1705 471365359 : if (MEM_VOLATILE_P (x) || mode == BLKmode
1706 233610876 : || !cselib_record_memory
1707 425471049 : || (FLOAT_MODE_P (mode) && flag_float_store))
1708 : return 0;
1709 :
1710 187901156 : addr_mode = GET_MODE (XEXP (x, 0));
1711 187901156 : if (addr_mode == VOIDmode)
1712 1001539 : addr_mode = Pmode;
1713 :
1714 : /* Look up the value for the address. */
1715 187901156 : addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1716 187901156 : if (! addr)
1717 : return 0;
1718 115447161 : addr = canonical_cselib_val (addr);
1719 :
1720 : /* Find a value that describes a value of our mode at that address. */
1721 115447161 : addr_space_t as = MEM_ADDR_SPACE (x);
1722 117874742 : for (elt_list *l = addr->addr_list; l; l = l->next)
1723 72753793 : if (GET_MODE (l->elt->val_rtx) == mode)
1724 : {
1725 76649969 : for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1726 80695688 : if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1727 : {
1728 70326212 : promote_debug_loc (l->elt->locs);
1729 70326212 : return l->elt;
1730 : }
1731 : }
1732 :
1733 45120949 : if (! create)
1734 : return 0;
1735 :
1736 28297397 : mem_elt = new_cselib_val (next_uid, mode, x);
1737 28297397 : add_mem_for_addr (addr, mem_elt, x);
1738 28297397 : slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1739 28297397 : *slot = mem_elt;
1740 28297397 : return mem_elt;
1741 : }
1742 :
1743 : /* Search through the possible substitutions in P. We prefer a non reg
1744 : substitution because this allows us to expand the tree further. If
1745 : we find, just a reg, take the lowest regno. There may be several
1746 : non-reg results, we just take the first one because they will all
1747 : expand to the same place. */
1748 :
1749 : static rtx
1750 24525476 : expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1751 : int max_depth)
1752 : {
1753 24525476 : rtx reg_result = NULL;
1754 24525476 : unsigned int regno = UINT_MAX;
1755 24525476 : struct elt_loc_list *p_in = p;
1756 :
1757 53268454 : for (; p; p = p->next)
1758 : {
1759 : /* Return these right away to avoid returning stack pointer based
1760 : expressions for frame pointer and vice versa, which is something
1761 : that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1762 : for more details. */
1763 32702462 : if (REG_P (p->loc)
1764 32702462 : && (REGNO (p->loc) == STACK_POINTER_REGNUM
1765 : || REGNO (p->loc) == FRAME_POINTER_REGNUM
1766 : || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1767 25940318 : || REGNO (p->loc) == cfa_base_preserved_regno))
1768 : return p->loc;
1769 : /* Avoid infinite recursion trying to expand a reg into a
1770 : the same reg. */
1771 32196088 : if ((REG_P (p->loc))
1772 25934755 : && (REGNO (p->loc) < regno)
1773 57828694 : && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1774 : {
1775 4536576 : reg_result = p->loc;
1776 4536576 : regno = REGNO (p->loc);
1777 : }
1778 : /* Avoid infinite recursion and do not try to expand the
1779 : value. */
1780 27659512 : else if (GET_CODE (p->loc) == VALUE
1781 342365 : && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1782 0 : continue;
1783 27659512 : else if (!REG_P (p->loc))
1784 : {
1785 6261333 : rtx result, note;
1786 6261333 : if (dump_file && (dump_flags & TDF_CSELIB))
1787 : {
1788 0 : print_inline_rtx (dump_file, p->loc, 0);
1789 0 : fprintf (dump_file, "\n");
1790 : }
1791 6261333 : if (GET_CODE (p->loc) == LO_SUM
1792 0 : && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1793 0 : && p->setting_insn
1794 0 : && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1795 6261333 : && XEXP (note, 0) == XEXP (p->loc, 1))
1796 : return XEXP (p->loc, 1);
1797 6261333 : result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1798 6261333 : if (result)
1799 : return result;
1800 : }
1801 :
1802 : }
1803 :
1804 20565992 : if (regno != UINT_MAX)
1805 : {
1806 3782684 : rtx result;
1807 3782684 : if (dump_file && (dump_flags & TDF_CSELIB))
1808 0 : fprintf (dump_file, "r%d\n", regno);
1809 :
1810 3782684 : result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1811 3782684 : if (result)
1812 : return result;
1813 : }
1814 :
1815 17714908 : if (dump_file && (dump_flags & TDF_CSELIB))
1816 : {
1817 0 : if (reg_result)
1818 : {
1819 0 : print_inline_rtx (dump_file, reg_result, 0);
1820 0 : fprintf (dump_file, "\n");
1821 : }
1822 : else
1823 0 : fprintf (dump_file, "NULL\n");
1824 : }
1825 : return reg_result;
1826 : }
1827 :
1828 :
1829 : /* Forward substitute and expand an expression out to its roots.
1830 : This is the opposite of common subexpression. Because local value
1831 : numbering is such a weak optimization, the expanded expression is
1832 : pretty much unique (not from a pointer equals point of view but
1833 : from a tree shape point of view.
1834 :
1835 : This function returns NULL if the expansion fails. The expansion
1836 : will fail if there is no value number for one of the operands or if
1837 : one of the operands has been overwritten between the current insn
1838 : and the beginning of the basic block. For instance x has no
1839 : expansion in:
1840 :
1841 : r1 <- r1 + 3
1842 : x <- r1 + 8
1843 :
1844 : REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1845 : It is clear on return. */
1846 :
1847 : rtx
1848 37280494 : cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1849 : {
1850 37280494 : struct expand_value_data evd;
1851 :
1852 37280494 : evd.regs_active = regs_active;
1853 37280494 : evd.callback = NULL;
1854 37280494 : evd.callback_arg = NULL;
1855 37280494 : evd.dummy = false;
1856 :
1857 37280494 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1858 : }
1859 :
1860 : /* Same as cselib_expand_value_rtx, but using a callback to try to
1861 : resolve some expressions. The CB function should return ORIG if it
1862 : can't or does not want to deal with a certain RTX. Any other
1863 : return value, including NULL, will be used as the expansion for
1864 : VALUE, without any further changes. */
1865 :
1866 : rtx
1867 88600830 : cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1868 : cselib_expand_callback cb, void *data)
1869 : {
1870 88600830 : struct expand_value_data evd;
1871 :
1872 88600830 : evd.regs_active = regs_active;
1873 88600830 : evd.callback = cb;
1874 88600830 : evd.callback_arg = data;
1875 88600830 : evd.dummy = false;
1876 :
1877 88600830 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1878 : }
1879 :
1880 : /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1881 : or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1882 : would return NULL or non-NULL, without allocating new rtx. */
1883 :
1884 : bool
1885 0 : cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1886 : cselib_expand_callback cb, void *data)
1887 : {
1888 0 : struct expand_value_data evd;
1889 :
1890 0 : evd.regs_active = regs_active;
1891 0 : evd.callback = cb;
1892 0 : evd.callback_arg = data;
1893 0 : evd.dummy = true;
1894 :
1895 0 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1896 : }
1897 :
1898 : /* Internal implementation of cselib_expand_value_rtx and
1899 : cselib_expand_value_rtx_cb. */
1900 :
1901 : static rtx
1902 207289439 : cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1903 : int max_depth)
1904 : {
1905 207289439 : rtx copy, scopy;
1906 207289439 : int i, j;
1907 207289439 : RTX_CODE code;
1908 207289439 : const char *format_ptr;
1909 207289439 : machine_mode mode;
1910 :
1911 207289439 : code = GET_CODE (orig);
1912 :
1913 : /* For the context of dse, if we end up expand into a huge tree, we
1914 : will not have a useful address, so we might as well just give up
1915 : quickly. */
1916 207289439 : if (max_depth <= 0)
1917 : return NULL;
1918 :
1919 204447768 : switch (code)
1920 : {
1921 51035126 : case REG:
1922 51035126 : {
1923 51035126 : struct elt_list *l = REG_VALUES (REGNO (orig));
1924 :
1925 51035126 : if (l && l->elt == NULL)
1926 25256214 : l = l->next;
1927 51049595 : for (; l; l = l->next)
1928 38377042 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1929 : {
1930 38362573 : rtx result;
1931 38362573 : unsigned regno = REGNO (orig);
1932 :
1933 : /* The only thing that we are not willing to do (this
1934 : is requirement of dse and if others potential uses
1935 : need this function we should add a parm to control
1936 : it) is that we will not substitute the
1937 : STACK_POINTER_REGNUM, FRAME_POINTER or the
1938 : HARD_FRAME_POINTER.
1939 :
1940 : These expansions confuses the code that notices that
1941 : stores into the frame go dead at the end of the
1942 : function and that the frame is not effected by calls
1943 : to subroutines. If you allow the
1944 : STACK_POINTER_REGNUM substitution, then dse will
1945 : think that parameter pushing also goes dead which is
1946 : wrong. If you allow the FRAME_POINTER or the
1947 : HARD_FRAME_POINTER then you lose the opportunity to
1948 : make the frame assumptions. */
1949 38362573 : if (regno == STACK_POINTER_REGNUM
1950 38362573 : || regno == FRAME_POINTER_REGNUM
1951 20775001 : || regno == HARD_FRAME_POINTER_REGNUM
1952 20266052 : || regno == cfa_base_preserved_regno)
1953 : return orig;
1954 :
1955 19964145 : bitmap_set_bit (evd->regs_active, regno);
1956 :
1957 19964145 : if (dump_file && (dump_flags & TDF_CSELIB))
1958 0 : fprintf (dump_file, "expanding: r%d into: ", regno);
1959 :
1960 19964145 : result = expand_loc (l->elt->locs, evd, max_depth);
1961 19964145 : bitmap_clear_bit (evd->regs_active, regno);
1962 :
1963 19964145 : if (result)
1964 : return result;
1965 : else
1966 : return orig;
1967 : }
1968 : return orig;
1969 : }
1970 :
1971 : CASE_CONST_ANY:
1972 : case SYMBOL_REF:
1973 : case CODE_LABEL:
1974 : case PC:
1975 : case SCRATCH:
1976 : /* SCRATCH must be shared because they represent distinct values. */
1977 : return orig;
1978 0 : case CLOBBER:
1979 0 : if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1980 : return orig;
1981 : break;
1982 :
1983 285349 : case CONST:
1984 285349 : if (shared_const_p (orig))
1985 : return orig;
1986 : break;
1987 :
1988 909942 : case SUBREG:
1989 909942 : {
1990 909942 : rtx subreg;
1991 :
1992 909942 : if (evd->callback)
1993 : {
1994 817258 : subreg = evd->callback (orig, evd->regs_active, max_depth,
1995 : evd->callback_arg);
1996 817258 : if (subreg != orig)
1997 : return subreg;
1998 : }
1999 :
2000 92684 : subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
2001 : max_depth - 1);
2002 92684 : if (!subreg)
2003 : return NULL;
2004 100742 : scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
2005 50371 : GET_MODE (SUBREG_REG (orig)),
2006 50371 : SUBREG_BYTE (orig));
2007 50371 : if (scopy == NULL
2008 49289 : || (GET_CODE (scopy) == SUBREG
2009 45384 : && !REG_P (SUBREG_REG (scopy))
2010 5791 : && !MEM_P (SUBREG_REG (scopy))))
2011 : return NULL;
2012 :
2013 : return scopy;
2014 : }
2015 :
2016 70960107 : case VALUE:
2017 70960107 : {
2018 70960107 : rtx result;
2019 :
2020 70960107 : if (dump_file && (dump_flags & TDF_CSELIB))
2021 : {
2022 0 : fputs ("\nexpanding ", dump_file);
2023 0 : print_rtl_single (dump_file, orig);
2024 0 : fputs (" into...", dump_file);
2025 : }
2026 :
2027 70960107 : if (evd->callback)
2028 : {
2029 66398776 : result = evd->callback (orig, evd->regs_active, max_depth,
2030 : evd->callback_arg);
2031 :
2032 66398776 : if (result != orig)
2033 : return result;
2034 : }
2035 :
2036 4561331 : result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
2037 4561331 : return result;
2038 : }
2039 :
2040 6046540 : case DEBUG_EXPR:
2041 6046540 : if (evd->callback)
2042 6046540 : return evd->callback (orig, evd->regs_active, max_depth,
2043 6046540 : evd->callback_arg);
2044 : return orig;
2045 :
2046 : default:
2047 : break;
2048 : }
2049 :
2050 : /* Copy the various flags, fields, and other information. We assume
2051 : that all fields need copying, and then clear the fields that should
2052 : not be copied. That is the sensible default behavior, and forces
2053 : us to explicitly document why we are *not* copying a flag. */
2054 45070717 : if (evd->dummy)
2055 : copy = NULL;
2056 : else
2057 45070717 : copy = shallow_copy_rtx (orig);
2058 :
2059 45070717 : format_ptr = GET_RTX_FORMAT (code);
2060 :
2061 118459211 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
2062 77543060 : switch (*format_ptr++)
2063 : {
2064 70795180 : case 'e':
2065 70795180 : if (XEXP (orig, i) != NULL)
2066 : {
2067 70795180 : rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2068 : max_depth - 1);
2069 70795180 : if (!result)
2070 : return NULL;
2071 66674382 : if (copy)
2072 66674382 : XEXP (copy, i) = result;
2073 : }
2074 : break;
2075 :
2076 298291 : case 'E':
2077 298291 : case 'V':
2078 298291 : if (XVEC (orig, i) != NULL)
2079 : {
2080 298291 : if (copy)
2081 298291 : XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2082 740757 : for (j = 0; j < XVECLEN (orig, i); j++)
2083 : {
2084 476234 : rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2085 : evd, max_depth - 1);
2086 476234 : if (!result)
2087 : return NULL;
2088 442466 : if (copy)
2089 442466 : XVECEXP (copy, i, j) = result;
2090 : }
2091 : }
2092 : break;
2093 :
2094 : case 't':
2095 : case 'w':
2096 : case 'i':
2097 : case 'L':
2098 : case 's':
2099 : case 'S':
2100 : case 'T':
2101 : case 'u':
2102 : case 'B':
2103 : case '0':
2104 : /* These are left unchanged. */
2105 : break;
2106 :
2107 0 : default:
2108 0 : gcc_unreachable ();
2109 : }
2110 :
2111 40916151 : if (evd->dummy)
2112 : return orig;
2113 :
2114 40916151 : mode = GET_MODE (copy);
2115 : /* If an operand has been simplified into CONST_INT, which doesn't
2116 : have a mode and the mode isn't derivable from whole rtx's mode,
2117 : try simplify_*_operation first with mode from original's operand
2118 : and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2119 40916151 : scopy = copy;
2120 40916151 : switch (GET_RTX_CLASS (code))
2121 : {
2122 751732 : case RTX_UNARY:
2123 751732 : if (CONST_INT_P (XEXP (copy, 0))
2124 50279 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2125 : {
2126 50271 : scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2127 : GET_MODE (XEXP (orig, 0)));
2128 50271 : if (scopy)
2129 : return scopy;
2130 : }
2131 : break;
2132 : case RTX_COMM_ARITH:
2133 : case RTX_BIN_ARITH:
2134 : /* These expressions can derive operand modes from the whole rtx's mode. */
2135 : break;
2136 64405 : case RTX_TERNARY:
2137 64405 : case RTX_BITFIELD_OPS:
2138 64405 : if (CONST_INT_P (XEXP (copy, 0))
2139 78 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2140 : {
2141 1 : scopy = simplify_ternary_operation (code, mode,
2142 : GET_MODE (XEXP (orig, 0)),
2143 : XEXP (copy, 0), XEXP (copy, 1),
2144 : XEXP (copy, 2));
2145 1 : if (scopy)
2146 : return scopy;
2147 : }
2148 : break;
2149 171754 : case RTX_COMPARE:
2150 171754 : case RTX_COMM_COMPARE:
2151 171754 : if (CONST_INT_P (XEXP (copy, 0))
2152 450 : && GET_MODE (XEXP (copy, 1)) == VOIDmode
2153 171 : && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2154 5 : || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2155 : {
2156 171 : scopy = simplify_relational_operation (code, mode,
2157 : (GET_MODE (XEXP (orig, 0))
2158 : != VOIDmode)
2159 : ? GET_MODE (XEXP (orig, 0))
2160 5 : : GET_MODE (XEXP (orig, 1)),
2161 : XEXP (copy, 0),
2162 : XEXP (copy, 1));
2163 171 : if (scopy)
2164 : return scopy;
2165 : }
2166 : break;
2167 : default:
2168 : break;
2169 : }
2170 40865709 : scopy = simplify_rtx (copy);
2171 40865709 : if (scopy)
2172 4929062 : return scopy;
2173 : return copy;
2174 : }
2175 :
2176 : /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2177 : with VALUE expressions. This way, it becomes independent of changes
2178 : to registers and memory.
2179 : X isn't actually modified; if modifications are needed, new rtl is
2180 : allocated. However, the return value can share rtl with X.
2181 : If X is within a MEM, MEMMODE must be the mode of the MEM. */
2182 :
2183 : rtx
2184 584821617 : cselib_subst_to_values (rtx x, machine_mode memmode)
2185 : {
2186 592497937 : enum rtx_code code = GET_CODE (x);
2187 592497937 : const char *fmt = GET_RTX_FORMAT (code);
2188 592497937 : cselib_val *e;
2189 592497937 : struct elt_list *l;
2190 592497937 : rtx copy = x;
2191 592497937 : int i;
2192 592497937 : poly_int64 offset;
2193 :
2194 592497937 : switch (code)
2195 : {
2196 231680998 : case REG:
2197 231680998 : l = REG_VALUES (REGNO (x));
2198 231680998 : if (l && l->elt == NULL)
2199 133195381 : l = l->next;
2200 233917627 : for (; l; l = l->next)
2201 233917627 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2202 : return l->elt->val_rtx;
2203 :
2204 0 : gcc_unreachable ();
2205 :
2206 6077545 : case MEM:
2207 6077545 : e = cselib_lookup_mem (x, 0);
2208 : /* This used to happen for autoincrements, but we deal with them
2209 : properly now. Remove the if stmt for the next release. */
2210 6077545 : if (! e)
2211 : {
2212 : /* Assign a value that doesn't match any other. */
2213 0 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2214 : }
2215 6077545 : return e->val_rtx;
2216 :
2217 666422 : case ENTRY_VALUE:
2218 666422 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2219 666422 : if (! e)
2220 : break;
2221 945 : return e->val_rtx;
2222 :
2223 : CASE_CONST_ANY:
2224 : return x;
2225 :
2226 6364995 : case PRE_DEC:
2227 6364995 : case PRE_INC:
2228 6364995 : gcc_assert (memmode != VOIDmode);
2229 12729990 : offset = GET_MODE_SIZE (memmode);
2230 6364995 : if (code == PRE_DEC)
2231 6364995 : offset = -offset;
2232 6364995 : return cselib_subst_to_values (plus_constant (GET_MODE (x),
2233 : XEXP (x, 0), offset),
2234 6364995 : memmode);
2235 :
2236 380677 : case PRE_MODIFY:
2237 380677 : gcc_assert (memmode != VOIDmode);
2238 380677 : return cselib_subst_to_values (XEXP (x, 1), memmode);
2239 :
2240 930648 : case POST_DEC:
2241 930648 : case POST_INC:
2242 930648 : case POST_MODIFY:
2243 930648 : gcc_assert (memmode != VOIDmode);
2244 930648 : return cselib_subst_to_values (XEXP (x, 0), memmode);
2245 :
2246 115681287 : case PLUS:
2247 151108063 : if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2248 : {
2249 93982205 : rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2250 93982205 : if (GET_CODE (t) == VALUE)
2251 : {
2252 88057584 : if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2253 : return t;
2254 88057584 : for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2255 185922831 : l; l = l->next)
2256 119230600 : if (GET_CODE (l->loc) == PLUS
2257 25189759 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2258 24966052 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2259 140597201 : && CONST_INT_P (XEXP (l->loc, 1)))
2260 35222404 : return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2261 : }
2262 72616852 : if (t != XEXP (x, 0))
2263 : {
2264 68315084 : copy = shallow_copy_rtx (x);
2265 68315084 : XEXP (copy, 0) = t;
2266 : }
2267 72616852 : return copy;
2268 : }
2269 :
2270 : default:
2271 : break;
2272 : }
2273 :
2274 472493582 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2275 : {
2276 308522525 : if (fmt[i] == 'e')
2277 : {
2278 226804870 : rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2279 :
2280 226804870 : if (t != XEXP (x, i))
2281 : {
2282 165936555 : if (x == copy)
2283 117782700 : copy = shallow_copy_rtx (x);
2284 165936555 : XEXP (copy, i) = t;
2285 : }
2286 : }
2287 81717655 : else if (fmt[i] == 'E')
2288 : {
2289 : int j;
2290 :
2291 18959352 : for (j = 0; j < XVECLEN (x, i); j++)
2292 : {
2293 13382446 : rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2294 :
2295 13382446 : if (t != XVECEXP (x, i, j))
2296 : {
2297 2578200 : if (XVEC (x, i) == XVEC (copy, i))
2298 : {
2299 1925273 : if (x == copy)
2300 1925273 : copy = shallow_copy_rtx (x);
2301 1925273 : XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2302 : }
2303 2578200 : XVECEXP (copy, i, j) = t;
2304 : }
2305 : }
2306 : }
2307 : }
2308 :
2309 : return copy;
2310 : }
2311 :
2312 : /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2313 :
2314 : rtx
2315 1463 : cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2316 : {
2317 1463 : rtx ret;
2318 1463 : gcc_assert (!cselib_current_insn);
2319 1463 : cselib_current_insn = insn;
2320 1463 : ret = cselib_subst_to_values (x, memmode);
2321 1463 : cselib_current_insn = NULL;
2322 1463 : return ret;
2323 : }
2324 :
2325 : /* Look up the rtl expression X in our tables and return the value it
2326 : has. If CREATE is zero, we return NULL if we don't know the value.
2327 : Otherwise, we create a new one if possible, using mode MODE if X
2328 : doesn't have a mode (i.e. because it's a constant). When X is part
2329 : of an address, MEMMODE should be the mode of the enclosing MEM if
2330 : we're tracking autoinc expressions. */
2331 :
2332 : static cselib_val *
2333 2396172281 : cselib_lookup_1 (rtx x, machine_mode mode,
2334 : int create, machine_mode memmode)
2335 : {
2336 2396172281 : cselib_val **slot;
2337 2396172281 : cselib_val *e;
2338 :
2339 2396172281 : if (GET_MODE (x) != VOIDmode)
2340 2313471719 : mode = GET_MODE (x);
2341 :
2342 2396172281 : if (GET_CODE (x) == VALUE)
2343 64445260 : return CSELIB_VAL_PTR (x);
2344 :
2345 2331727021 : if (REG_P (x))
2346 : {
2347 1502652575 : struct elt_list *l;
2348 1502652575 : unsigned int i = REGNO (x);
2349 :
2350 1502652575 : l = REG_VALUES (i);
2351 1502652575 : if (l && l->elt == NULL)
2352 626294819 : l = l->next;
2353 1522927846 : for (; l; l = l->next)
2354 1161602356 : if (mode == GET_MODE (l->elt->val_rtx))
2355 : {
2356 1141327085 : promote_debug_loc (l->elt->locs);
2357 1141327085 : return l->elt;
2358 : }
2359 :
2360 361325490 : if (! create)
2361 : return 0;
2362 :
2363 136523030 : if (i < FIRST_PSEUDO_REGISTER)
2364 : {
2365 77725563 : unsigned int n = hard_regno_nregs (i, mode);
2366 :
2367 77725563 : if (n > max_value_regs)
2368 26879276 : max_value_regs = n;
2369 : }
2370 :
2371 136523030 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2372 161027707 : if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2373 11989266 : SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2374 136523030 : new_elt_loc_list (e, x);
2375 :
2376 136523030 : scalar_int_mode int_mode;
2377 136523030 : if (REG_VALUES (i) == 0)
2378 : {
2379 : /* Maintain the invariant that the first entry of
2380 : REG_VALUES, if present, must be the value used to set the
2381 : register, or NULL. */
2382 126678803 : used_regs[n_used_regs++] = i;
2383 126678803 : REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2384 : }
2385 9844227 : else if (cselib_preserve_constants
2386 9844227 : && is_int_mode (mode, &int_mode))
2387 : {
2388 : /* During var-tracking, try harder to find equivalences
2389 : for SUBREGs. If a setter sets say a DImode register
2390 : and user uses that register only in SImode, add a lowpart
2391 : subreg location. */
2392 1529967 : struct elt_list *lwider = NULL;
2393 1529967 : scalar_int_mode lmode;
2394 1529967 : l = REG_VALUES (i);
2395 1529967 : if (l && l->elt == NULL)
2396 995140 : l = l->next;
2397 2301913 : for (; l; l = l->next)
2398 771946 : if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2399 1080924 : && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2400 261460 : && (lwider == NULL
2401 5045 : || partial_subreg_p (lmode,
2402 5045 : GET_MODE (lwider->elt->val_rtx))))
2403 : {
2404 260831 : struct elt_loc_list *el;
2405 277238 : if (i < FIRST_PSEUDO_REGISTER
2406 260831 : && hard_regno_nregs (i, lmode) != 1)
2407 16407 : continue;
2408 478710 : for (el = l->elt->locs; el; el = el->next)
2409 444668 : if (!REG_P (el->loc))
2410 : break;
2411 244424 : if (el)
2412 771946 : lwider = l;
2413 : }
2414 1529967 : if (lwider)
2415 : {
2416 411934 : rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2417 205967 : GET_MODE (lwider->elt->val_rtx));
2418 205967 : if (sub)
2419 205967 : new_elt_loc_list (e, sub);
2420 : }
2421 : }
2422 136523030 : REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2423 136523030 : slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2424 136523030 : *slot = e;
2425 136523030 : return e;
2426 : }
2427 :
2428 829074446 : if (MEM_P (x))
2429 231479727 : return cselib_lookup_mem (x, create);
2430 :
2431 597594719 : hashval_t hashval = cselib_hash_rtx (x, create, memmode);
2432 : /* Can't even create if hashing is not possible. */
2433 597594719 : if (! hashval)
2434 : return 0;
2435 :
2436 766317644 : slot = cselib_find_slot (mode, x, hashval,
2437 : create ? INSERT : NO_INSERT, memmode);
2438 545392896 : if (slot == 0)
2439 : return 0;
2440 :
2441 434621091 : e = (cselib_val *) *slot;
2442 434621091 : if (e)
2443 : return e;
2444 :
2445 250650633 : e = new_cselib_val (hashval, mode, x);
2446 :
2447 : /* We have to fill the slot before calling cselib_subst_to_values:
2448 : the hash table is inconsistent until we do so, and
2449 : cselib_subst_to_values will need to do lookups. */
2450 250650633 : *slot = e;
2451 250650633 : rtx v = cselib_subst_to_values (x, memmode);
2452 :
2453 : /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2454 : VALUE that isn't in the hash tables anymore. */
2455 250650633 : if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2456 0 : PRESERVED_VALUE_P (e->val_rtx) = 1;
2457 :
2458 250650633 : new_elt_loc_list (e, v);
2459 250650633 : return e;
2460 : }
2461 :
2462 : /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2463 :
2464 : cselib_val *
2465 6202139 : cselib_lookup_from_insn (rtx x, machine_mode mode,
2466 : int create, machine_mode memmode, rtx_insn *insn)
2467 : {
2468 6202139 : cselib_val *ret;
2469 :
2470 6202139 : gcc_assert (!cselib_current_insn);
2471 6202139 : cselib_current_insn = insn;
2472 :
2473 6202139 : ret = cselib_lookup (x, mode, create, memmode);
2474 :
2475 6202139 : cselib_current_insn = NULL;
2476 :
2477 6202139 : return ret;
2478 : }
2479 :
2480 : /* Wrapper for cselib_lookup_1, that logs the lookup result and
2481 : maintains invariants related with debug insns. */
2482 :
2483 : cselib_val *
2484 2395121273 : cselib_lookup (rtx x, machine_mode mode,
2485 : int create, machine_mode memmode)
2486 : {
2487 2395121273 : cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2488 :
2489 : /* ??? Should we return NULL if we're not to create an entry, the
2490 : found loc is a debug loc and cselib_current_insn is not DEBUG?
2491 : If so, we should also avoid converting val to non-DEBUG; probably
2492 : easiest setting cselib_current_insn to NULL before the call
2493 : above. */
2494 :
2495 2395121273 : if (dump_file && (dump_flags & TDF_CSELIB))
2496 : {
2497 0 : fputs ("cselib lookup ", dump_file);
2498 0 : print_inline_rtx (dump_file, x, 2);
2499 0 : fprintf (dump_file, " => %u:%u\n",
2500 : ret ? ret->uid : 0,
2501 : ret ? ret->hash : 0);
2502 : }
2503 :
2504 2395121273 : return ret;
2505 : }
2506 :
2507 : /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2508 :
2509 : static void
2510 182611055 : cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2511 : {
2512 182611055 : cselib_val *v = (*l)->elt;
2513 182611055 : if (*l == REG_VALUES (regno))
2514 : {
2515 : /* Maintain the invariant that the first entry of
2516 : REG_VALUES, if present, must be the value used to set
2517 : the register, or NULL. This is also nice because
2518 : then we won't push the same regno onto user_regs
2519 : multiple times. */
2520 140169542 : (*l)->elt = NULL;
2521 140169542 : l = &(*l)->next;
2522 : }
2523 : else
2524 42441513 : unchain_one_elt_list (l);
2525 :
2526 182611055 : v = canonical_cselib_val (v);
2527 :
2528 182611055 : bool had_locs = v->locs != NULL;
2529 182611055 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2530 :
2531 : /* Now, we clear the mapping from value to reg. It must exist, so
2532 : this code will crash intentionally if it doesn't. */
2533 182611055 : for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2534 : {
2535 210501311 : rtx x = (*p)->loc;
2536 :
2537 210501311 : if (REG_P (x) && REGNO (x) == regno)
2538 : {
2539 182611055 : unchain_one_elt_loc_list (p);
2540 182611055 : break;
2541 : }
2542 27890256 : }
2543 :
2544 182611055 : if (had_locs && cselib_useless_value_p (v))
2545 : {
2546 21330698 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2547 0 : n_useless_debug_values++;
2548 : else
2549 21330698 : n_useless_values++;
2550 : }
2551 182611055 : }
2552 :
2553 : /* Invalidate any entries in reg_values that overlap REGNO. This is called
2554 : if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2555 : is used to determine how many hard registers are being changed. If MODE
2556 : is VOIDmode, then only REGNO is being changed; this is used when
2557 : invalidating call clobbered registers across a call. */
2558 :
2559 : static void
2560 794913932 : cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2561 : {
2562 794913932 : unsigned int endregno;
2563 794913932 : unsigned int i;
2564 :
2565 : /* If we see pseudos after reload, something is _wrong_. */
2566 794913932 : gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2567 : || reg_renumber[regno] < 0);
2568 :
2569 : /* Determine the range of registers that must be invalidated. For
2570 : pseudos, only REGNO is affected. For hard regs, we must take MODE
2571 : into account, and we must also invalidate lower register numbers
2572 : if they contain values that overlap REGNO. */
2573 220477426 : if (regno < FIRST_PSEUDO_REGISTER)
2574 : {
2575 691392004 : gcc_assert (mode != VOIDmode);
2576 :
2577 691392004 : if (regno < max_value_regs)
2578 : i = 0;
2579 : else
2580 650520935 : i = regno - max_value_regs;
2581 :
2582 691392004 : endregno = end_hard_regno (mode, regno);
2583 : }
2584 : else
2585 : {
2586 103521928 : i = regno;
2587 103521928 : endregno = regno + 1;
2588 : }
2589 :
2590 2203041317 : for (; i < endregno; i++)
2591 : {
2592 1408127385 : struct elt_list **l = ®_VALUES (i);
2593 :
2594 : /* Go through all known values for this reg; if it overlaps the range
2595 : we're invalidating, remove the value. */
2596 1790810709 : while (*l)
2597 : {
2598 382683324 : cselib_val *v = (*l)->elt;
2599 382683324 : unsigned int this_last = i;
2600 :
2601 382683324 : if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2602 166277302 : this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2603 :
2604 382683324 : if (this_last < regno || v == NULL
2605 117586755 : || (v == cfa_base_preserved_val
2606 4341461 : && i == cfa_base_preserved_regno))
2607 : {
2608 269436384 : l = &(*l)->next;
2609 269436384 : continue;
2610 : }
2611 :
2612 : /* We have an overlap. */
2613 113246940 : cselib_invalidate_regno_val (i, l);
2614 : }
2615 : }
2616 794913932 : }
2617 :
2618 : /* Invalidate any locations in the table which are changed because of a
2619 : store to MEM_RTX. If this is called because of a non-const call
2620 : instruction, MEM_RTX is (mem:BLK const0_rtx). */
2621 :
2622 : static void
2623 113975142 : cselib_invalidate_mem (rtx mem_rtx)
2624 : {
2625 113975142 : cselib_val **vp, *v, *next;
2626 113975142 : int num_mems = 0;
2627 113975142 : rtx mem_addr;
2628 :
2629 113975142 : mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2630 113975142 : mem_rtx = canon_rtx (mem_rtx);
2631 :
2632 113975142 : vp = &first_containing_mem;
2633 284836808 : for (v = *vp; v != &dummy_val; v = next)
2634 : {
2635 170861666 : bool has_mem = false;
2636 170861666 : struct elt_loc_list **p = &v->locs;
2637 170861666 : bool had_locs = v->locs != NULL;
2638 170861666 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2639 170861666 : rtx sp_base = NULL_RTX;
2640 170861666 : HOST_WIDE_INT sp_off = 0;
2641 :
2642 509860342 : while (*p)
2643 : {
2644 338998676 : rtx x = (*p)->loc;
2645 338998676 : cselib_val *addr;
2646 338998676 : struct elt_list **mem_chain;
2647 :
2648 : /* MEMs may occur in locations only at the top level; below
2649 : that every MEM or REG is substituted by its VALUE. */
2650 338998676 : if (!MEM_P (x))
2651 : {
2652 102779000 : p = &(*p)->next;
2653 102779000 : continue;
2654 : }
2655 :
2656 : /* When invalidating memory below the stack pointer for const/pure
2657 : calls and alloca/VLAs aren't used, attempt to optimize. Values
2658 : stored into area sometimes below the stack pointer shouldn't be
2659 : addressable and should be stored just through stack pointer
2660 : derived expressions, so don't invalidate MEMs not using stack
2661 : derived addresses, or if the MEMs clearly aren't below the stack
2662 : pointer. This isn't a fully conservative approach, the hope is
2663 : that invalidating more MEMs than this isn't actually needed. */
2664 236219676 : if (mem_rtx == callmem[1]
2665 2897143 : && num_mems < param_max_cselib_memory_locations
2666 2897085 : && GET_CODE (XEXP (x, 0)) == VALUE
2667 2897085 : && !cfun->calls_alloca)
2668 : {
2669 2849396 : cselib_val *v2 = CSELIB_VAL_PTR (XEXP (x, 0));
2670 2849396 : rtx x_base = NULL_RTX;
2671 2849396 : HOST_WIDE_INT x_off = 0;
2672 2849396 : if (SP_DERIVED_VALUE_P (v2->val_rtx))
2673 : x_base = v2->val_rtx;
2674 : else
2675 4527698 : for (struct elt_loc_list *l = v2->locs; l; l = l->next)
2676 2820728 : if (GET_CODE (l->loc) == PLUS
2677 1513855 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2678 1412036 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2679 3889202 : && CONST_INT_P (XEXP (l->loc, 1)))
2680 : {
2681 1068460 : x_base = XEXP (l->loc, 0);
2682 1068460 : x_off = INTVAL (XEXP (l->loc, 1));
2683 1068460 : break;
2684 : }
2685 : /* If x_base is NULL here, don't invalidate x as its address
2686 : isn't derived from sp such that it could be in outgoing
2687 : argument area of some call in !ACCUMULATE_OUTGOING_ARGS
2688 : function. */
2689 2849396 : if (x_base)
2690 : {
2691 1142426 : if (sp_base == NULL_RTX)
2692 : {
2693 2102016 : if (cselib_val *v3
2694 1054089 : = cselib_lookup_1 (stack_pointer_rtx, Pmode, 0,
2695 : VOIDmode))
2696 : {
2697 1046553 : if (SP_DERIVED_VALUE_P (v3->val_rtx))
2698 : sp_base = v3->val_rtx;
2699 : else
2700 274979 : for (struct elt_loc_list *l = v3->locs;
2701 551690 : l; l = l->next)
2702 551676 : if (GET_CODE (l->loc) == PLUS
2703 274965 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2704 274965 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2705 826641 : && CONST_INT_P (XEXP (l->loc, 1)))
2706 : {
2707 274965 : sp_base = XEXP (l->loc, 0);
2708 274965 : sp_off = INTVAL (XEXP (l->loc, 1));
2709 274965 : break;
2710 : }
2711 : }
2712 1051008 : if (sp_base == NULL_RTX)
2713 4469 : sp_base = pc_rtx;
2714 : }
2715 : /* Otherwise, if x_base and sp_base are the same,
2716 : we know that x_base + x_off is the x's address and
2717 : sp_base + sp_off is current value of stack pointer,
2718 : so try to determine if x is certainly not below stack
2719 : pointer. */
2720 1142426 : if (sp_base == x_base)
2721 : {
2722 1135413 : if (STACK_GROWS_DOWNWARD)
2723 : {
2724 1135413 : HOST_WIDE_INT off = sp_off;
2725 : #ifdef STACK_ADDRESS_OFFSET
2726 : /* On SPARC take stack pointer bias into account as
2727 : well. */
2728 : off += (STACK_ADDRESS_OFFSET
2729 : - FIRST_PARM_OFFSET (current_function_decl));
2730 : #endif
2731 1135413 : if (x_off >= off)
2732 : /* x is at or above the current stack pointer,
2733 : no need to invalidate it. */
2734 : x_base = NULL_RTX;
2735 : }
2736 : else
2737 : {
2738 : HOST_WIDE_INT sz;
2739 : enum machine_mode mode = GET_MODE (x);
2740 : if ((MEM_SIZE_KNOWN_P (x)
2741 : && MEM_SIZE (x).is_constant (&sz))
2742 : || (mode != BLKmode
2743 : && GET_MODE_SIZE (mode).is_constant (&sz)))
2744 : if (x_off < sp_off
2745 : && ((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT)
2746 : x_off + sz) <= sp_off))
2747 : /* x's end is below or at the current stack
2748 : pointer in !STACK_GROWS_DOWNWARD target,
2749 : no need to invalidate it. */
2750 : x_base = NULL_RTX;
2751 : }
2752 : }
2753 : }
2754 2841616 : if (x_base == NULL_RTX)
2755 : {
2756 2841616 : has_mem = true;
2757 2841616 : num_mems++;
2758 2841616 : p = &(*p)->next;
2759 2841616 : continue;
2760 : }
2761 : }
2762 :
2763 429509968 : if (num_mems < param_max_cselib_memory_locations
2764 466690791 : && ! canon_anti_dependence (x, false, mem_rtx,
2765 233312731 : GET_MODE (mem_rtx), mem_addr))
2766 : {
2767 196131908 : has_mem = true;
2768 196131908 : num_mems++;
2769 196131908 : p = &(*p)->next;
2770 196131908 : continue;
2771 : }
2772 :
2773 : /* This one overlaps. */
2774 : /* We must have a mapping from this MEM's address to the
2775 : value (E). Remove that, too. */
2776 37246152 : addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2777 37246152 : addr = canonical_cselib_val (addr);
2778 37246152 : gcc_checking_assert (v == canonical_cselib_val (v));
2779 37246152 : mem_chain = &addr->addr_list;
2780 37248212 : for (;;)
2781 : {
2782 37247182 : cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2783 :
2784 37247182 : if (canon == v)
2785 : {
2786 37246152 : unchain_one_elt_list (mem_chain);
2787 37246152 : break;
2788 : }
2789 :
2790 : /* Record canonicalized elt. */
2791 1030 : (*mem_chain)->elt = canon;
2792 :
2793 1030 : mem_chain = &(*mem_chain)->next;
2794 1030 : }
2795 :
2796 37246152 : unchain_one_elt_loc_list (p);
2797 : }
2798 :
2799 170861666 : if (had_locs && cselib_useless_value_p (v))
2800 : {
2801 8628127 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2802 0 : n_useless_debug_values++;
2803 : else
2804 8628127 : n_useless_values++;
2805 : }
2806 :
2807 170861666 : next = v->next_containing_mem;
2808 170861666 : if (has_mem)
2809 : {
2810 138451251 : *vp = v;
2811 138451251 : vp = &(*vp)->next_containing_mem;
2812 : }
2813 : else
2814 32410415 : v->next_containing_mem = NULL;
2815 : }
2816 113975142 : *vp = &dummy_val;
2817 113975142 : }
2818 :
2819 : /* Invalidate DEST. */
2820 :
2821 : void
2822 519794708 : cselib_invalidate_rtx (rtx dest)
2823 : {
2824 519794708 : while (GET_CODE (dest) == SUBREG
2825 519794708 : || GET_CODE (dest) == ZERO_EXTRACT
2826 1039596027 : || GET_CODE (dest) == STRICT_LOW_PART)
2827 6611 : dest = XEXP (dest, 0);
2828 :
2829 519794708 : if (REG_P (dest))
2830 395650952 : cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2831 124143756 : else if (MEM_P (dest))
2832 75792591 : cselib_invalidate_mem (dest);
2833 519794708 : }
2834 :
2835 : /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2836 :
2837 : static void
2838 503834819 : cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2839 : void *data ATTRIBUTE_UNUSED)
2840 : {
2841 503834819 : cselib_invalidate_rtx (dest);
2842 503834819 : }
2843 :
2844 : /* Record the result of a SET instruction. DEST is being set; the source
2845 : contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2846 : describes its address. */
2847 :
2848 : static void
2849 361491869 : cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2850 : {
2851 361491869 : if (src_elt == 0 || side_effects_p (dest))
2852 66121141 : return;
2853 :
2854 295370728 : if (REG_P (dest))
2855 : {
2856 271068141 : unsigned int dreg = REGNO (dest);
2857 271068141 : if (dreg < FIRST_PSEUDO_REGISTER)
2858 : {
2859 199030127 : unsigned int n = REG_NREGS (dest);
2860 :
2861 199030127 : if (n > max_value_regs)
2862 25545116 : max_value_regs = n;
2863 : }
2864 :
2865 271068141 : if (REG_VALUES (dreg) == 0)
2866 : {
2867 166566310 : used_regs[n_used_regs++] = dreg;
2868 166566310 : REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2869 : }
2870 : else
2871 : {
2872 : /* The register should have been invalidated. */
2873 104501831 : gcc_assert (REG_VALUES (dreg)->elt == 0);
2874 104501831 : REG_VALUES (dreg)->elt = src_elt;
2875 : }
2876 :
2877 271068141 : if (cselib_useless_value_p (src_elt))
2878 41750 : n_useless_values--;
2879 271068141 : new_elt_loc_list (src_elt, dest);
2880 : }
2881 24302587 : else if (MEM_P (dest) && dest_addr_elt != 0
2882 24302587 : && cselib_record_memory)
2883 : {
2884 24302587 : if (cselib_useless_value_p (src_elt))
2885 46 : n_useless_values--;
2886 24302587 : add_mem_for_addr (dest_addr_elt, src_elt, dest);
2887 : }
2888 : }
2889 :
2890 : /* Make ELT and X's VALUE equivalent to each other at INSN. */
2891 :
2892 : void
2893 3827224 : cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2894 : {
2895 3827224 : cselib_val *nelt;
2896 3827224 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
2897 :
2898 3827224 : gcc_checking_assert (elt);
2899 3827224 : gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2900 3827224 : gcc_checking_assert (!side_effects_p (x));
2901 :
2902 3827224 : cselib_current_insn = insn;
2903 :
2904 3827224 : nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2905 :
2906 3827224 : if (nelt != elt)
2907 : {
2908 2958542 : cselib_any_perm_equivs = true;
2909 :
2910 2958542 : if (!PRESERVED_VALUE_P (nelt->val_rtx))
2911 2953640 : cselib_preserve_value (nelt);
2912 :
2913 2958542 : new_elt_loc_list (nelt, elt->val_rtx);
2914 : }
2915 :
2916 3827224 : cselib_current_insn = save_cselib_current_insn;
2917 3827224 : }
2918 :
2919 : /* Return TRUE if any permanent equivalences have been recorded since
2920 : the table was last initialized. */
2921 : bool
2922 1390091322 : cselib_have_permanent_equivalences (void)
2923 : {
2924 1390091322 : return cselib_any_perm_equivs;
2925 : }
2926 :
2927 : /* Record stack_pointer_rtx to be equal to
2928 : (plus:P cfa_base_preserved_val offset). Used by var-tracking
2929 : at the start of basic blocks for !frame_pointer_needed functions. */
2930 :
2931 : void
2932 3411823 : cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2933 : {
2934 3411823 : rtx sp_derived_value = NULL_RTX;
2935 6823646 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2936 6823646 : if (GET_CODE (l->loc) == VALUE
2937 6823646 : && SP_DERIVED_VALUE_P (l->loc))
2938 : {
2939 : sp_derived_value = l->loc;
2940 : break;
2941 : }
2942 6823646 : else if (GET_CODE (l->loc) == PLUS
2943 3411823 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2944 3411823 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2945 10235469 : && CONST_INT_P (XEXP (l->loc, 1)))
2946 : {
2947 3411823 : sp_derived_value = XEXP (l->loc, 0);
2948 3411823 : offset = offset + UINTVAL (XEXP (l->loc, 1));
2949 3411823 : break;
2950 : }
2951 3411823 : if (sp_derived_value == NULL_RTX)
2952 : return;
2953 3411823 : cselib_val *val
2954 3411823 : = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2955 3411823 : Pmode, 1, VOIDmode, insn);
2956 3411823 : if (val != NULL)
2957 : {
2958 3411823 : PRESERVED_VALUE_P (val->val_rtx) = 1;
2959 3411823 : cselib_record_set (stack_pointer_rtx, val, NULL);
2960 : }
2961 : }
2962 :
2963 : /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2964 : that can be expressed using cfa_base_preserved_val + CONST_INT. */
2965 :
2966 : bool
2967 30331341 : cselib_sp_derived_value_p (cselib_val *v)
2968 : {
2969 30331341 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2970 66821674 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2971 36999865 : if (GET_CODE (l->loc) == PLUS
2972 6981155 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2973 6762936 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2974 41997387 : && CONST_INT_P (XEXP (l->loc, 1)))
2975 4997522 : v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2976 30331341 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2977 : return false;
2978 11311967 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2979 11311967 : if (l->loc == cfa_base_preserved_val->val_rtx)
2980 : return true;
2981 11311967 : else if (GET_CODE (l->loc) == PLUS
2982 5507054 : && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2983 5507054 : && CONST_INT_P (XEXP (l->loc, 1)))
2984 : return true;
2985 : return false;
2986 : }
2987 :
2988 : /* There is no good way to determine how many elements there can be
2989 : in a PARALLEL. Since it's fairly cheap, use a really large number. */
2990 : #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2991 :
2992 : struct cselib_record_autoinc_data
2993 : {
2994 : struct cselib_set *sets;
2995 : int n_sets;
2996 : };
2997 :
2998 : /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2999 : autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
3000 :
3001 : static int
3002 13401900 : cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
3003 : rtx dest, rtx src, rtx srcoff, void *arg)
3004 : {
3005 13401900 : struct cselib_record_autoinc_data *data;
3006 13401900 : data = (struct cselib_record_autoinc_data *)arg;
3007 :
3008 13401900 : data->sets[data->n_sets].dest = dest;
3009 :
3010 13401900 : if (srcoff)
3011 13033535 : data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
3012 : else
3013 368365 : data->sets[data->n_sets].src = src;
3014 :
3015 13401900 : data->n_sets++;
3016 :
3017 13401900 : return 0;
3018 : }
3019 :
3020 : /* Record the effects of any sets and autoincs in INSN. */
3021 : static void
3022 870722496 : cselib_record_sets (rtx_insn *insn)
3023 : {
3024 870722496 : int n_sets = 0;
3025 870722496 : int i;
3026 870722496 : struct cselib_set sets[MAX_SETS];
3027 870722496 : rtx cond = 0;
3028 870722496 : int n_sets_before_autoinc;
3029 870722496 : int n_strict_low_parts = 0;
3030 870722496 : struct cselib_record_autoinc_data data;
3031 :
3032 870722496 : rtx body = PATTERN (insn);
3033 870722496 : if (GET_CODE (body) == COND_EXEC)
3034 : {
3035 0 : cond = COND_EXEC_TEST (body);
3036 0 : body = COND_EXEC_CODE (body);
3037 : }
3038 :
3039 : /* Find all sets. */
3040 870722496 : if (GET_CODE (body) == SET)
3041 : {
3042 366135412 : sets[0].src = SET_SRC (body);
3043 366135412 : sets[0].dest = SET_DEST (body);
3044 366135412 : n_sets = 1;
3045 : }
3046 504587084 : else if (GET_CODE (body) == PARALLEL)
3047 : {
3048 : /* Look through the PARALLEL and record the values being
3049 : set, if possible. Also handle any CLOBBERs. */
3050 207222713 : for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
3051 : {
3052 139733703 : rtx x = XVECEXP (body, 0, i);
3053 :
3054 139733703 : if (GET_CODE (x) == SET)
3055 : {
3056 73087983 : sets[n_sets].src = SET_SRC (x);
3057 73087983 : sets[n_sets].dest = SET_DEST (x);
3058 73087983 : n_sets++;
3059 : }
3060 : }
3061 : }
3062 :
3063 366135412 : if (n_sets == 1
3064 427917181 : && MEM_P (sets[0].src)
3065 61919034 : && !cselib_record_memory
3066 107237327 : && MEM_READONLY_P (sets[0].src))
3067 : {
3068 3214624 : rtx note = find_reg_equal_equiv_note (insn);
3069 :
3070 3214624 : if (note && CONSTANT_P (XEXP (note, 0)))
3071 2034536 : sets[0].src = XEXP (note, 0);
3072 : }
3073 :
3074 870722496 : data.sets = sets;
3075 870722496 : data.n_sets = n_sets_before_autoinc = n_sets;
3076 870722496 : for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
3077 870722496 : n_sets = data.n_sets;
3078 :
3079 : /* Look up the values that are read. Do this before invalidating the
3080 : locations that are written. */
3081 1323347791 : for (i = 0; i < n_sets; i++)
3082 : {
3083 452625295 : rtx dest = sets[i].dest;
3084 452625295 : rtx orig = dest;
3085 :
3086 : /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
3087 : the low part after invalidating any knowledge about larger modes. */
3088 452625295 : if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
3089 50946 : sets[i].dest = dest = XEXP (dest, 0);
3090 :
3091 : /* We don't know how to record anything but REG or MEM. */
3092 452625295 : if (REG_P (dest)
3093 123001802 : || (MEM_P (dest) && cselib_record_memory))
3094 : {
3095 358080046 : rtx src = sets[i].src;
3096 358080046 : if (cond)
3097 0 : src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
3098 358080046 : sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3099 358080046 : if (MEM_P (dest))
3100 : {
3101 28456553 : machine_mode address_mode = get_address_mode (dest);
3102 :
3103 28456553 : sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
3104 : address_mode, 1,
3105 28456553 : GET_MODE (dest));
3106 : }
3107 : else
3108 329623493 : sets[i].dest_addr_elt = 0;
3109 : }
3110 :
3111 : /* Improve handling of STRICT_LOW_PART if the current value is known
3112 : to be const0_rtx, then the low bits will be set to dest and higher
3113 : bits will remain zero. Used in code like:
3114 :
3115 : {di:SI=0;clobber flags:CC;}
3116 : flags:CCNO=cmp(bx:SI,0)
3117 : strict_low_part(di:QI)=flags:CCNO<=0
3118 :
3119 : where we can note both that di:QI=flags:CCNO<=0 and
3120 : also that because di:SI is known to be 0 and strict_low_part(di:QI)
3121 : preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
3122 452625295 : scalar_int_mode mode;
3123 452625295 : if (dest != orig
3124 50946 : && cselib_record_sets_hook
3125 16005 : && REG_P (dest)
3126 16005 : && HARD_REGISTER_P (dest)
3127 16005 : && sets[i].src_elt
3128 452641300 : && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
3129 452641300 : && n_sets + n_strict_low_parts < MAX_SETS)
3130 : {
3131 16005 : opt_scalar_int_mode wider_mode_iter;
3132 40053 : FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
3133 : {
3134 40053 : scalar_int_mode wider_mode = wider_mode_iter.require ();
3135 47550 : if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
3136 : break;
3137 :
3138 38777 : rtx reg = gen_lowpart (wider_mode, dest);
3139 38777 : if (!REG_P (reg))
3140 : break;
3141 :
3142 38752 : cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
3143 38752 : if (!v)
3144 24048 : continue;
3145 :
3146 15660 : struct elt_loc_list *l;
3147 33180 : for (l = v->locs; l; l = l->next)
3148 32224 : if (l->loc == const0_rtx)
3149 : break;
3150 :
3151 956 : if (!l)
3152 956 : continue;
3153 :
3154 14704 : sets[n_sets + n_strict_low_parts].dest = reg;
3155 14704 : sets[n_sets + n_strict_low_parts].src = dest;
3156 14704 : sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
3157 14704 : break;
3158 : }
3159 : }
3160 : }
3161 :
3162 870722496 : if (cselib_record_sets_hook)
3163 78175938 : cselib_record_sets_hook (insn, sets, n_sets);
3164 :
3165 : /* Invalidate all locations written by this insn. Note that the elts we
3166 : looked up in the previous loop aren't affected, just some of their
3167 : locations may go away. */
3168 870722496 : note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
3169 :
3170 1754846892 : for (i = n_sets_before_autoinc; i < n_sets; i++)
3171 13401900 : cselib_invalidate_rtx (sets[i].dest);
3172 :
3173 : /* If this is an asm, look for duplicate sets. This can happen when the
3174 : user uses the same value as an output multiple times. This is valid
3175 : if the outputs are not actually used thereafter. Treat this case as
3176 : if the value isn't actually set. We do this by smashing the destination
3177 : to pc_rtx, so that we won't record the value later. */
3178 870722496 : if (n_sets >= 2 && asm_noperands (body) >= 0)
3179 : {
3180 496412 : for (i = 0; i < n_sets; i++)
3181 : {
3182 384462 : rtx dest = sets[i].dest;
3183 384462 : if (REG_P (dest) || MEM_P (dest))
3184 : {
3185 384429 : int j;
3186 901903 : for (j = i + 1; j < n_sets; j++)
3187 517474 : if (rtx_equal_p (dest, sets[j].dest))
3188 : {
3189 0 : sets[i].dest = pc_rtx;
3190 0 : sets[j].dest = pc_rtx;
3191 : }
3192 : }
3193 : }
3194 : }
3195 :
3196 : /* Now enter the equivalences in our tables. */
3197 1323347791 : for (i = 0; i < n_sets; i++)
3198 : {
3199 452625295 : rtx dest = sets[i].dest;
3200 452625295 : if (REG_P (dest)
3201 123001802 : || (MEM_P (dest) && cselib_record_memory))
3202 358080046 : cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
3203 : }
3204 :
3205 : /* And deal with STRICT_LOW_PART. */
3206 870737200 : for (i = 0; i < n_strict_low_parts; i++)
3207 : {
3208 14704 : if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3209 0 : continue;
3210 14704 : machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3211 14704 : cselib_val *v
3212 14704 : = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
3213 14704 : cselib_preserve_value (v);
3214 14704 : rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3215 : sets[n_sets + i].src_elt->val_rtx);
3216 14704 : cselib_add_permanent_equiv (v, r, insn);
3217 : }
3218 870722496 : }
3219 :
3220 : /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3221 :
3222 : bool
3223 40797100 : fp_setter_insn (rtx_insn *insn)
3224 : {
3225 40797100 : rtx expr, pat = NULL_RTX;
3226 :
3227 40797100 : if (!RTX_FRAME_RELATED_P (insn))
3228 : return false;
3229 :
3230 624115 : expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3231 624115 : if (expr)
3232 62 : pat = XEXP (expr, 0);
3233 624115 : if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3234 : return false;
3235 :
3236 : /* Don't return true for frame pointer restores in the epilogue. */
3237 153322 : if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3238 : return false;
3239 : return true;
3240 : }
3241 :
3242 : /* V is one of the values in REG_VALUES (REGNO). Return true if it
3243 : would be invalidated by CALLEE_ABI. */
3244 :
3245 : static bool
3246 112028184 : cselib_invalidated_by_call_p (const function_abi &callee_abi,
3247 : unsigned int regno, cselib_val *v)
3248 : {
3249 112028184 : machine_mode mode = GET_MODE (v->val_rtx);
3250 112028184 : if (mode == VOIDmode)
3251 : {
3252 0 : v = REG_VALUES (regno)->elt;
3253 0 : if (!v)
3254 : /* If we don't know what the mode of the constant value is, and we
3255 : don't know what mode the register was set in, conservatively
3256 : assume that the register is clobbered. The value's going to be
3257 : essentially useless in this case anyway. */
3258 : return true;
3259 0 : mode = GET_MODE (v->val_rtx);
3260 : }
3261 112028184 : return callee_abi.clobbers_reg_p (mode, regno);
3262 : }
3263 :
3264 : /* Record the effects of INSN. */
3265 :
3266 : void
3267 1004811785 : cselib_process_insn (rtx_insn *insn)
3268 : {
3269 1004811785 : int i;
3270 1004811785 : rtx x;
3271 :
3272 1004811785 : cselib_current_insn = insn;
3273 :
3274 : /* Forget everything at a CODE_LABEL or a setjmp. */
3275 1004811785 : if ((LABEL_P (insn)
3276 969811823 : || (CALL_P (insn)
3277 33706996 : && find_reg_note (insn, REG_SETJMP, NULL)))
3278 1004815994 : && !cselib_preserve_constants)
3279 : {
3280 35003948 : cselib_reset_table (next_uid);
3281 35003948 : cselib_current_insn = NULL;
3282 35003948 : return;
3283 : }
3284 :
3285 969807837 : if (! INSN_P (insn))
3286 : {
3287 99085341 : cselib_current_insn = NULL;
3288 99085341 : return;
3289 : }
3290 :
3291 : /* If this is a call instruction, forget anything stored in a
3292 : call clobbered register, or, if this is not a const call, in
3293 : memory. */
3294 870722496 : if (CALL_P (insn))
3295 : {
3296 33703010 : function_abi callee_abi = insn_callee_abi (insn);
3297 3134379930 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3298 : {
3299 3100676920 : elt_list **l = ®_VALUES (i);
3300 3318611221 : while (*l)
3301 : {
3302 217934301 : cselib_val *v = (*l)->elt;
3303 217934301 : if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3304 69364115 : cselib_invalidate_regno_val (i, l);
3305 : else
3306 148570186 : l = &(*l)->next;
3307 : }
3308 : }
3309 :
3310 : /* Since it is not clear how cselib is going to be used, be
3311 : conservative here and treat looping pure or const functions
3312 : as if they were regular functions. */
3313 33703010 : if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3314 33703010 : || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3315 30729160 : cselib_invalidate_mem (callmem[0]);
3316 : else
3317 : {
3318 : /* For const/pure calls, invalidate any argument slots because
3319 : they are owned by the callee. */
3320 8551280 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3321 5577430 : if (GET_CODE (XEXP (x, 0)) == USE
3322 5577154 : && MEM_P (XEXP (XEXP (x, 0), 0)))
3323 140169 : cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3324 : /* And invalidate memory below the stack (or above for
3325 : !STACK_GROWS_DOWNWARD), as even const/pure call can invalidate
3326 : that. Do this only if !ACCUMULATE_OUTGOING_ARGS or if
3327 : cfun->calls_alloca, otherwise the stack pointer shouldn't be
3328 : changing in the middle of the function and nothing should be
3329 : stored below the stack pointer. */
3330 2973850 : if (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca)
3331 2973407 : cselib_invalidate_mem (callmem[1]);
3332 : }
3333 : }
3334 :
3335 870722496 : cselib_record_sets (insn);
3336 :
3337 : /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3338 : after we have processed the insn. */
3339 870722496 : if (CALL_P (insn))
3340 : {
3341 101267216 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3342 67564206 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3343 1858699 : cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3344 :
3345 : /* Flush everything on setjmp. */
3346 33703010 : if (cselib_preserve_constants
3347 33703010 : && find_reg_note (insn, REG_SETJMP, NULL))
3348 : {
3349 223 : cselib_preserve_only_values ();
3350 223 : cselib_reset_table (next_uid);
3351 : }
3352 : }
3353 :
3354 : /* On setter of the hard frame pointer if frame_pointer_needed,
3355 : invalidate stack_pointer_rtx, so that sp and {,h}fp based
3356 : VALUEs are distinct. */
3357 870722496 : if (reload_completed
3358 398937674 : && frame_pointer_needed
3359 911400136 : && fp_setter_insn (insn))
3360 93478 : cselib_invalidate_rtx (stack_pointer_rtx);
3361 :
3362 870722496 : cselib_current_insn = NULL;
3363 :
3364 870722496 : if (n_useless_values > MAX_USELESS_VALUES
3365 : /* remove_useless_values is linear in the hash table size. Avoid
3366 : quadratic behavior for very large hashtables with very few
3367 : useless elements. */
3368 870722496 : && ((unsigned int)n_useless_values
3369 2074105 : > (cselib_hash_table->elements () - n_debug_values) / 4))
3370 28579 : remove_useless_values ();
3371 : }
3372 :
3373 : /* Initialize cselib for one pass. The caller must also call
3374 : init_alias_analysis. */
3375 :
3376 : void
3377 10173101 : cselib_init (int record_what)
3378 : {
3379 10173101 : cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3380 10173101 : cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3381 10173101 : cselib_any_perm_equivs = false;
3382 :
3383 : /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3384 : see canon_true_dependence. This is only created once. */
3385 10173101 : if (! callmem[0])
3386 145128 : callmem[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3387 : /* Similarly create a MEM representing roughly everything below
3388 : the stack for STACK_GROWS_DOWNWARD targets or everything above
3389 : it otherwise. Do this only when !ACCUMULATE_OUTGOING_ARGS or
3390 : if cfun->calls_alloca, otherwise the stack pointer shouldn't be
3391 : changing in the middle of the function and nothing should be stored
3392 : below the stack pointer. */
3393 10173101 : if (!callmem[1] && (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca))
3394 : {
3395 144873 : if (STACK_GROWS_DOWNWARD)
3396 : {
3397 144873 : unsigned HOST_WIDE_INT off = -(GET_MODE_MASK (Pmode) >> 1);
3398 : #ifdef STACK_ADDRESS_OFFSET
3399 : /* On SPARC take stack pointer bias into account as well. */
3400 : off += (STACK_ADDRESS_OFFSET
3401 : - FIRST_PARM_OFFSET (current_function_decl));
3402 : #endif
3403 144873 : callmem[1] = plus_constant (Pmode, stack_pointer_rtx, off);
3404 : }
3405 : else
3406 : callmem[1] = stack_pointer_rtx;
3407 144873 : callmem[1] = gen_rtx_MEM (BLKmode, callmem[1]);
3408 150297 : set_mem_size (callmem[1], GET_MODE_MASK (Pmode) >> 1);
3409 : }
3410 :
3411 10173101 : cselib_nregs = max_reg_num ();
3412 :
3413 : /* We preserve reg_values to allow expensive clearing of the whole thing.
3414 : Reallocate it however if it happens to be too large. */
3415 10173101 : if (!reg_values || reg_values_size < cselib_nregs
3416 9875108 : || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3417 : {
3418 313338 : free (reg_values);
3419 : /* Some space for newly emit instructions so we don't end up
3420 : reallocating in between passes. */
3421 313338 : reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3422 313338 : reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3423 : }
3424 10173101 : used_regs = XNEWVEC (unsigned int, cselib_nregs);
3425 10173101 : n_used_regs = 0;
3426 : /* FIXME: enable sanitization (PR87845) */
3427 10173101 : cselib_hash_table
3428 10173101 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3429 10173101 : /* sanitize_eq_and_hash */ false);
3430 10173101 : if (cselib_preserve_constants)
3431 495852 : cselib_preserved_hash_table
3432 495852 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3433 495852 : /* sanitize_eq_and_hash */ false);
3434 10173101 : next_uid = 1;
3435 10173101 : }
3436 :
3437 : /* Called when the current user is done with cselib. */
3438 :
3439 : void
3440 10173101 : cselib_finish (void)
3441 : {
3442 10173101 : bool preserved = cselib_preserve_constants;
3443 10173101 : cselib_discard_hook = NULL;
3444 10173101 : cselib_preserve_constants = false;
3445 10173101 : cselib_any_perm_equivs = false;
3446 10173101 : cfa_base_preserved_val = NULL;
3447 10173101 : cfa_base_preserved_regno = INVALID_REGNUM;
3448 10173101 : elt_list_pool.release ();
3449 10173101 : elt_loc_list_pool.release ();
3450 10173101 : cselib_val_pool.release ();
3451 10173101 : value_pool.release ();
3452 10173101 : cselib_clear_table ();
3453 10173101 : delete cselib_hash_table;
3454 10173101 : cselib_hash_table = NULL;
3455 10173101 : if (preserved)
3456 495852 : delete cselib_preserved_hash_table;
3457 10173101 : cselib_preserved_hash_table = NULL;
3458 10173101 : free (used_regs);
3459 10173101 : used_regs = 0;
3460 10173101 : n_useless_values = 0;
3461 10173101 : n_useless_debug_values = 0;
3462 10173101 : n_debug_values = 0;
3463 10173101 : next_uid = 0;
3464 10173101 : }
3465 :
3466 : /* Dump the cselib_val V to FILE *OUT. */
3467 :
3468 : int
3469 93 : dump_cselib_val (cselib_val *v, FILE *out)
3470 : {
3471 93 : bool need_lf = true;
3472 :
3473 93 : print_inline_rtx (out, v->val_rtx, 0);
3474 :
3475 93 : if (v->locs)
3476 : {
3477 86 : struct elt_loc_list *l = v->locs;
3478 86 : if (need_lf)
3479 : {
3480 86 : fputc ('\n', out);
3481 86 : need_lf = false;
3482 : }
3483 86 : fputs (" locs:", out);
3484 149 : do
3485 : {
3486 149 : if (l->setting_insn)
3487 149 : fprintf (out, "\n from insn %i ",
3488 149 : INSN_UID (l->setting_insn));
3489 : else
3490 0 : fprintf (out, "\n ");
3491 149 : print_inline_rtx (out, l->loc, 4);
3492 : }
3493 149 : while ((l = l->next));
3494 86 : fputc ('\n', out);
3495 : }
3496 : else
3497 : {
3498 7 : fputs (" no locs", out);
3499 7 : need_lf = true;
3500 : }
3501 :
3502 93 : if (v->addr_list)
3503 : {
3504 6 : struct elt_list *e = v->addr_list;
3505 6 : if (need_lf)
3506 : {
3507 0 : fputc ('\n', out);
3508 0 : need_lf = false;
3509 : }
3510 6 : fputs (" addr list:", out);
3511 6 : do
3512 : {
3513 6 : fputs ("\n ", out);
3514 6 : print_inline_rtx (out, e->elt->val_rtx, 2);
3515 : }
3516 6 : while ((e = e->next));
3517 6 : fputc ('\n', out);
3518 : }
3519 : else
3520 : {
3521 87 : fputs (" no addrs", out);
3522 87 : need_lf = true;
3523 : }
3524 :
3525 93 : if (v->next_containing_mem == &dummy_val)
3526 6 : fputs (" last mem\n", out);
3527 87 : else if (v->next_containing_mem)
3528 : {
3529 0 : fputs (" next mem ", out);
3530 0 : print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3531 0 : fputc ('\n', out);
3532 : }
3533 87 : else if (need_lf)
3534 81 : fputc ('\n', out);
3535 :
3536 93 : return 1;
3537 : }
3538 :
3539 : /* Dump the cselib_val *X to FILE *OUT. */
3540 :
3541 : static int
3542 93 : dump_cselib_val_ptr (cselib_val **x, FILE *out)
3543 : {
3544 93 : cselib_val *v = *x;
3545 93 : return dump_cselib_val (v, out);
3546 : }
3547 :
3548 : /* Dump to OUT everything in the CSELIB table. */
3549 :
3550 : void
3551 11 : dump_cselib_table (FILE *out)
3552 : {
3553 11 : fprintf (out, "cselib hash table:\n");
3554 104 : cselib_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3555 11 : if (cselib_preserved_hash_table)
3556 : {
3557 11 : fprintf (out, "cselib preserved hash table:\n");
3558 11 : cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3559 : }
3560 11 : if (first_containing_mem != &dummy_val)
3561 : {
3562 6 : fputs ("first mem ", out);
3563 6 : print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3564 6 : fputc ('\n', out);
3565 : }
3566 11 : fprintf (out, "next uid %i\n", next_uid);
3567 11 : }
3568 :
3569 : #include "gt-cselib.h"
|