Line data Source code
1 : /* Common subexpression elimination library for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "df.h"
28 : #include "memmodel.h"
29 : #include "tm_p.h"
30 : #include "regs.h"
31 : #include "emit-rtl.h"
32 : #include "dumpfile.h"
33 : #include "cselib.h"
34 : #include "function-abi.h"
35 : #include "alias.h"
36 : #include "predict.h"
37 :
38 : /* A list of cselib_val structures. */
39 : struct elt_list
40 : {
41 : struct elt_list *next;
42 : cselib_val *elt;
43 : };
44 :
45 : static bool cselib_record_memory;
46 : static bool cselib_preserve_constants;
47 : static bool cselib_any_perm_equivs;
48 : static inline void promote_debug_loc (struct elt_loc_list *l);
49 : static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
50 : static void new_elt_loc_list (cselib_val *, rtx);
51 : static void unchain_one_value (cselib_val *);
52 : static void unchain_one_elt_list (struct elt_list **);
53 : static void unchain_one_elt_loc_list (struct elt_loc_list **);
54 : static void remove_useless_values (void);
55 : static hashval_t cselib_hash_rtx (rtx, int, machine_mode);
56 : static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
57 : static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
58 : static cselib_val *cselib_lookup_mem (rtx, int);
59 : static void cselib_invalidate_regno (unsigned int, machine_mode);
60 : static void cselib_invalidate_mem (rtx);
61 : static void cselib_record_set (rtx, cselib_val *, cselib_val *);
62 : static void cselib_record_sets (rtx_insn *);
63 : static rtx autoinc_split (rtx, rtx *, machine_mode);
64 :
65 : #define PRESERVED_VALUE_P(RTX) \
66 : (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
67 :
68 : #define SP_BASED_VALUE_P(RTX) \
69 : (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
70 :
71 : #define SP_DERIVED_VALUE_P(RTX) \
72 : (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
73 :
74 : struct expand_value_data
75 : {
76 : bitmap regs_active;
77 : cselib_expand_callback callback;
78 : void *callback_arg;
79 : bool dummy;
80 : };
81 :
82 : static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
83 :
84 : /* This is a global so we don't have to pass this through every function.
85 : It is used in new_elt_loc_list to set SETTING_INSN. */
86 : static rtx_insn *cselib_current_insn;
87 :
88 : /* There are three ways in which cselib can look up an rtx:
89 : - for a REG, the reg_values table (which is indexed by regno) is used
90 : - for a MEM, we recursively look up its address and then follow the
91 : addr_list of that value
92 : - for everything else, we compute a hash value and go through the hash
93 : table. Since different rtx's can still have the same hash value,
94 : this involves walking the table entries for a given value and comparing
95 : the locations of the entries with the rtx we are looking up. */
96 :
97 : struct cselib_hasher : nofree_ptr_hash <cselib_val>
98 : {
99 : struct key {
100 : /* The rtx value and its mode (needed separately for constant
101 : integers). */
102 : machine_mode mode;
103 : rtx x;
104 : /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
105 : machine_mode memmode;
106 : };
107 : typedef key *compare_type;
108 : static inline hashval_t hash (const cselib_val *);
109 : static inline bool equal (const cselib_val *, const key *);
110 : };
111 :
112 : /* The hash function for our hash table. The value is always computed with
113 : cselib_hash_rtx when adding an element; this function just extracts the
114 : hash value from a cselib_val structure. */
115 :
116 : inline hashval_t
117 101054458 : cselib_hasher::hash (const cselib_val *v)
118 : {
119 101054458 : return v->hash;
120 : }
121 :
122 : /* The equality test for our hash table. The first argument V is a table
123 : element (i.e. a cselib_val), while the second arg X is an rtx. We know
124 : that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
125 : CONST of an appropriate mode. */
126 :
127 : inline bool
128 575249985 : cselib_hasher::equal (const cselib_val *v, const key *x_arg)
129 : {
130 575249985 : struct elt_loc_list *l;
131 575249985 : rtx x = x_arg->x;
132 575249985 : machine_mode mode = x_arg->mode;
133 575249985 : machine_mode memmode = x_arg->memmode;
134 :
135 575249985 : if (mode != GET_MODE (v->val_rtx))
136 : return false;
137 :
138 463160954 : if (GET_CODE (x) == VALUE)
139 12883458 : return x == v->val_rtx;
140 :
141 460316348 : if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
142 : {
143 17596284 : rtx xoff = NULL;
144 17596284 : if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
145 7895744 : return true;
146 : }
147 :
148 : /* We don't guarantee that distinct rtx's have different hash values,
149 : so we need to do a comparison. */
150 758525088 : for (l = v->locs; l; l = l->next)
151 493660034 : if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
152 38926665 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
153 : {
154 11066948 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
155 : /* If l is so far a debug only loc, without debug stmts it
156 : would never be compared to x at all, so temporarily pretend
157 : current instruction is that DEBUG_INSN so that we don't
158 : promote other debug locs even for unsuccessful comparison. */
159 11066948 : cselib_current_insn = l->setting_insn;
160 11066948 : bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0);
161 11066948 : cselib_current_insn = save_cselib_current_insn;
162 11066948 : if (match)
163 : {
164 909211 : promote_debug_loc (l);
165 909211 : return true;
166 : }
167 : }
168 482593086 : else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
169 : return true;
170 :
171 : return false;
172 : }
173 :
174 : /* A table that enables us to look up elts by their value. */
175 : static hash_table<cselib_hasher> *cselib_hash_table;
176 :
177 : /* A table to hold preserved values. */
178 : static hash_table<cselib_hasher> *cselib_preserved_hash_table;
179 :
180 : /* The unique id that the next create value will take. */
181 : static unsigned int next_uid;
182 :
183 : /* The number of registers we had when the varrays were last resized. */
184 : static unsigned int cselib_nregs;
185 :
186 : /* Count values without known locations, or with only locations that
187 : wouldn't have been known except for debug insns. Whenever this
188 : grows too big, we remove these useless values from the table.
189 :
190 : Counting values with only debug values is a bit tricky. We don't
191 : want to increment n_useless_values when we create a value for a
192 : debug insn, for this would get n_useless_values out of sync, but we
193 : want increment it if all locs in the list that were ever referenced
194 : in nondebug insns are removed from the list.
195 :
196 : In the general case, once we do that, we'd have to stop accepting
197 : nondebug expressions in the loc list, to avoid having two values
198 : equivalent that, without debug insns, would have been made into
199 : separate values. However, because debug insns never introduce
200 : equivalences themselves (no assignments), the only means for
201 : growing loc lists is through nondebug assignments. If the locs
202 : also happen to be referenced in debug insns, it will work just fine.
203 :
204 : A consequence of this is that there's at most one debug-only loc in
205 : each loc list. If we keep it in the first entry, testing whether
206 : we have a debug-only loc list takes O(1).
207 :
208 : Furthermore, since any additional entry in a loc list containing a
209 : debug loc would have to come from an assignment (nondebug) that
210 : references both the initial debug loc and the newly-equivalent loc,
211 : the initial debug loc would be promoted to a nondebug loc, and the
212 : loc list would not contain debug locs any more.
213 :
214 : So the only case we have to be careful with in order to keep
215 : n_useless_values in sync between debug and nondebug compilations is
216 : to avoid incrementing n_useless_values when removing the single loc
217 : from a value that turns out to not appear outside debug values. We
218 : increment n_useless_debug_values instead, and leave such values
219 : alone until, for other reasons, we garbage-collect useless
220 : values. */
221 : static int n_useless_values;
222 : static int n_useless_debug_values;
223 :
224 : /* Count values whose locs have been taken exclusively from debug
225 : insns for the entire life of the value. */
226 : static int n_debug_values;
227 :
228 : /* Number of useless values before we remove them from the hash table. */
229 : #define MAX_USELESS_VALUES 32
230 :
231 : /* This table maps from register number to values. It does not
232 : contain pointers to cselib_val structures, but rather elt_lists.
233 : The purpose is to be able to refer to the same register in
234 : different modes. The first element of the list defines the mode in
235 : which the register was set; if the mode is unknown or the value is
236 : no longer valid in that mode, ELT will be NULL for the first
237 : element. */
238 : static struct elt_list **reg_values;
239 : static unsigned int reg_values_size;
240 : #define REG_VALUES(i) reg_values[i]
241 :
242 : /* The largest number of hard regs used by any entry added to the
243 : REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
244 : static unsigned int max_value_regs;
245 :
246 : /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
247 : in cselib_clear_table() for fast emptying. */
248 : static unsigned int *used_regs;
249 : static unsigned int n_used_regs;
250 :
251 : /* We pass this to cselib_invalidate_mem to invalidate all of
252 : memory for a non-const call instruction and memory below stack pointer
253 : for const/pure calls. */
254 : static GTY(()) rtx callmem[2];
255 :
256 : /* Set by discard_useless_locs if it deleted the last location of any
257 : value. */
258 : static int values_became_useless;
259 :
260 : /* Used as stop element of the containing_mem list so we can check
261 : presence in the list by checking the next pointer. */
262 : static cselib_val dummy_val;
263 :
264 : /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
265 : that is constant through the whole function and should never be
266 : eliminated. */
267 : static cselib_val *cfa_base_preserved_val;
268 : static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
269 :
270 : /* Used to list all values that contain memory reference.
271 : May or may not contain the useless values - the list is compacted
272 : each time memory is invalidated. */
273 : static cselib_val *first_containing_mem = &dummy_val;
274 :
275 : static object_allocator<elt_list> elt_list_pool ("elt_list");
276 : static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
277 : static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
278 :
279 : static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
280 :
281 : /* If nonnull, cselib will call this function before freeing useless
282 : VALUEs. A VALUE is deemed useless if its "locs" field is null. */
283 : void (*cselib_discard_hook) (cselib_val *);
284 :
285 : /* If nonnull, cselib will call this function before recording sets or
286 : even clobbering outputs of INSN. All the recorded sets will be
287 : represented in the array sets[n_sets]. new_val_min can be used to
288 : tell whether values present in sets are introduced by this
289 : instruction. */
290 : void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
291 : int n_sets);
292 :
293 :
294 :
295 : /* Allocate a struct elt_list and fill in its two elements with the
296 : arguments. */
297 :
298 : static inline struct elt_list *
299 488157949 : new_elt_list (struct elt_list *next, cselib_val *elt)
300 : {
301 976315898 : elt_list *el = elt_list_pool.allocate ();
302 488157949 : el->next = next;
303 488157949 : el->elt = elt;
304 488157949 : return el;
305 : }
306 :
307 : /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
308 : list. */
309 :
310 : static inline void
311 721840090 : new_elt_loc_list (cselib_val *val, rtx loc)
312 : {
313 725785790 : struct elt_loc_list *el, *next = val->locs;
314 :
315 725785790 : gcc_checking_assert (!next || !next->setting_insn
316 : || !DEBUG_INSN_P (next->setting_insn)
317 : || cselib_current_insn == next->setting_insn);
318 :
319 : /* If we're creating the first loc in a debug insn context, we've
320 : just created a debug value. Count it. */
321 424946121 : if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
322 6778255 : n_debug_values++;
323 :
324 725785790 : val = canonical_cselib_val (val);
325 725785790 : next = val->locs;
326 :
327 725785790 : if (GET_CODE (loc) == VALUE)
328 : {
329 7896006 : loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
330 :
331 7896006 : gcc_checking_assert (PRESERVED_VALUE_P (loc)
332 : == PRESERVED_VALUE_P (val->val_rtx));
333 :
334 7896006 : if (val->val_rtx == loc)
335 : return;
336 7891574 : else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
337 : {
338 : /* Reverse the insertion. */
339 : new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
340 : return;
341 : }
342 :
343 3945874 : gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
344 :
345 3945874 : if (CSELIB_VAL_PTR (loc)->locs)
346 : {
347 : /* Bring all locs from LOC to VAL. */
348 3012306 : for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
349 : {
350 : /* Adjust values that have LOC as canonical so that VAL
351 : becomes their canonical. */
352 18 : if (el->loc && GET_CODE (el->loc) == VALUE)
353 : {
354 0 : gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
355 : == loc);
356 0 : CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
357 : }
358 : }
359 3012288 : el->next = val->locs;
360 3012288 : next = val->locs = CSELIB_VAL_PTR (loc)->locs;
361 : }
362 :
363 3945874 : if (CSELIB_VAL_PTR (loc)->addr_list)
364 : {
365 : /* Bring in addr_list into canonical node. */
366 : struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
367 2 : while (last->next)
368 : last = last->next;
369 2 : last->next = val->addr_list;
370 2 : val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
371 2 : CSELIB_VAL_PTR (loc)->addr_list = NULL;
372 : }
373 :
374 3945874 : if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
375 0 : && val->next_containing_mem == NULL)
376 : {
377 : /* Add VAL to the containing_mem list after LOC. LOC will
378 : be removed when we notice it doesn't contain any
379 : MEMs. */
380 0 : val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
381 0 : CSELIB_VAL_PTR (loc)->next_containing_mem = val;
382 : }
383 :
384 : /* Chain LOC back to VAL. */
385 3945874 : el = elt_loc_list_pool.allocate ();
386 3945874 : el->loc = val->val_rtx;
387 3945874 : el->setting_insn = cselib_current_insn;
388 3945874 : el->next = NULL;
389 3945874 : CSELIB_VAL_PTR (loc)->locs = el;
390 : }
391 :
392 721835658 : el = elt_loc_list_pool.allocate ();
393 721835658 : el->loc = loc;
394 721835658 : el->setting_insn = cselib_current_insn;
395 721835658 : el->next = next;
396 721835658 : val->locs = el;
397 : }
398 :
399 : /* Promote loc L to a nondebug cselib_current_insn if L is marked as
400 : originating from a debug insn, maintaining the debug values
401 : count. */
402 :
403 : static inline void
404 1222648261 : promote_debug_loc (struct elt_loc_list *l)
405 : {
406 1222648261 : if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
407 22763667 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
408 : {
409 2615185 : n_debug_values--;
410 2615185 : l->setting_insn = cselib_current_insn;
411 2615185 : if (cselib_preserve_constants && l->next)
412 : {
413 21196 : gcc_assert (l->next->setting_insn
414 : && DEBUG_INSN_P (l->next->setting_insn)
415 : && !l->next->next);
416 21196 : l->next->setting_insn = cselib_current_insn;
417 : }
418 : else
419 2593989 : gcc_assert (!l->next);
420 : }
421 1222648261 : }
422 :
423 : /* The elt_list at *PL is no longer needed. Unchain it and free its
424 : storage. */
425 :
426 : static inline void
427 80310004 : unchain_one_elt_list (struct elt_list **pl)
428 : {
429 80310004 : struct elt_list *l = *pl;
430 :
431 80310004 : *pl = l->next;
432 117785045 : elt_list_pool.remove (l);
433 42834963 : }
434 :
435 : /* Likewise for elt_loc_lists. */
436 :
437 : static void
438 222715798 : unchain_one_elt_loc_list (struct elt_loc_list **pl)
439 : {
440 222715798 : struct elt_loc_list *l = *pl;
441 :
442 222715798 : *pl = l->next;
443 0 : elt_loc_list_pool.remove (l);
444 38747263 : }
445 :
446 : /* Likewise for cselib_vals. This also frees the addr_list associated with
447 : V. */
448 :
449 : static void
450 2355530 : unchain_one_value (cselib_val *v)
451 : {
452 2374952 : while (v->addr_list)
453 19422 : unchain_one_elt_list (&v->addr_list);
454 :
455 2355530 : cselib_val_pool.remove (v);
456 2355530 : }
457 :
458 : /* Remove all entries from the hash table. Also used during
459 : initialization. */
460 :
461 : void
462 61220320 : cselib_clear_table (void)
463 : {
464 61220320 : cselib_reset_table (1);
465 61220320 : }
466 :
467 : /* Return TRUE if V is a constant, a function invariant or a VALUE
468 : equivalence; FALSE otherwise. */
469 :
470 : static bool
471 58046018 : invariant_or_equiv_p (cselib_val *v)
472 : {
473 58046018 : struct elt_loc_list *l;
474 :
475 58046018 : if (v == cfa_base_preserved_val)
476 : return true;
477 :
478 : /* Keep VALUE equivalences around. */
479 80597525 : for (l = v->locs; l; l = l->next)
480 38942015 : if (GET_CODE (l->loc) == VALUE)
481 : return true;
482 :
483 41655510 : if (v->locs != NULL
484 22851742 : && v->locs->next == NULL)
485 : {
486 22795429 : if (CONSTANT_P (v->locs->loc)
487 22795429 : && (GET_CODE (v->locs->loc) != CONST
488 173867 : || !references_value_p (v->locs->loc, 0)))
489 3492094 : return true;
490 : /* Although a debug expr may be bound to different expressions,
491 : we can preserve it as if it was constant, to get unification
492 : and proper merging within var-tracking. */
493 19303335 : if (GET_CODE (v->locs->loc) == DEBUG_EXPR
494 17780953 : || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
495 17280494 : || GET_CODE (v->locs->loc) == ENTRY_VALUE
496 17280267 : || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
497 : return true;
498 :
499 : /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
500 17263933 : if (GET_CODE (v->locs->loc) == PLUS
501 10745250 : && CONST_INT_P (XEXP (v->locs->loc, 1))
502 9586781 : && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
503 26096511 : && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
504 : return true;
505 : }
506 :
507 : return false;
508 : }
509 :
510 : /* Remove from hash table all VALUEs except constants, function
511 : invariants and VALUE equivalences. */
512 :
513 : int
514 44803625 : preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
515 : {
516 44803625 : cselib_val *v = *x;
517 :
518 44803625 : if (invariant_or_equiv_p (v))
519 : {
520 17512189 : cselib_hasher::key lookup = {
521 17512189 : GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
522 17512189 : };
523 17512189 : cselib_val **slot
524 35024378 : = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
525 17512189 : v->hash, INSERT);
526 17512189 : gcc_assert (!*slot);
527 17512189 : *slot = v;
528 : }
529 :
530 44803625 : cselib_hash_table->clear_slot (x);
531 :
532 44803625 : return 1;
533 : }
534 :
535 : /* Remove all entries from the hash table, arranging for the next
536 : value to be numbered NUM. */
537 :
538 : void
539 101094458 : cselib_reset_table (unsigned int num)
540 : {
541 101094458 : unsigned int i;
542 :
543 101094458 : max_value_regs = 0;
544 :
545 101094458 : if (cfa_base_preserved_val)
546 : {
547 4409815 : unsigned int regno = cfa_base_preserved_regno;
548 4409815 : unsigned int new_used_regs = 0;
549 31466130 : for (i = 0; i < n_used_regs; i++)
550 27056315 : if (used_regs[i] == regno)
551 : {
552 4409815 : new_used_regs = 1;
553 4409815 : continue;
554 : }
555 : else
556 22646500 : REG_VALUES (used_regs[i]) = 0;
557 4409815 : gcc_assert (new_used_regs == 1);
558 4409815 : n_used_regs = new_used_regs;
559 4409815 : used_regs[0] = regno;
560 4409815 : max_value_regs
561 4409815 : = hard_regno_nregs (regno,
562 4409815 : GET_MODE (cfa_base_preserved_val->locs->loc));
563 :
564 : /* If cfa_base is sp + const_int, need to preserve also the
565 : SP_DERIVED_VALUE_P value. */
566 4409815 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
567 8819630 : l; l = l->next)
568 8819630 : if (GET_CODE (l->loc) == PLUS
569 4409815 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
570 4409815 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
571 13229445 : && CONST_INT_P (XEXP (l->loc, 1)))
572 : {
573 4409815 : if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
574 : {
575 0 : rtx val = cfa_base_preserved_val->val_rtx;
576 0 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
577 0 : cselib_current_insn = l->setting_insn;
578 0 : new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
579 0 : plus_constant (Pmode, val,
580 0 : -UINTVAL (XEXP (l->loc, 1))));
581 0 : cselib_current_insn = save_cselib_current_insn;
582 : }
583 : break;
584 : }
585 : }
586 : else
587 : {
588 371126256 : for (i = 0; i < n_used_regs; i++)
589 274441613 : REG_VALUES (used_regs[i]) = 0;
590 96684643 : n_used_regs = 0;
591 : }
592 :
593 101094458 : if (cselib_preserve_constants)
594 49213440 : cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
595 : else
596 : {
597 96684643 : cselib_hash_table->empty ();
598 96684643 : gcc_checking_assert (!cselib_any_perm_equivs);
599 : }
600 :
601 101094458 : n_useless_values = 0;
602 101094458 : n_useless_debug_values = 0;
603 101094458 : n_debug_values = 0;
604 :
605 101094458 : next_uid = num;
606 :
607 101094458 : first_containing_mem = &dummy_val;
608 101094458 : }
609 :
610 : /* Return the number of the next value that will be generated. */
611 :
612 : unsigned int
613 4909575 : cselib_get_next_uid (void)
614 : {
615 4909575 : return next_uid;
616 : }
617 :
618 : /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
619 : INSERTing if requested. When X is part of the address of a MEM,
620 : MEMMODE should specify the mode of the MEM. */
621 :
622 : static cselib_val **
623 716390070 : cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
624 : enum insert_option insert, machine_mode memmode)
625 : {
626 716390070 : cselib_val **slot = NULL;
627 716390070 : cselib_hasher::key lookup = { mode, x, memmode };
628 716390070 : if (cselib_preserve_constants)
629 161182262 : slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
630 : NO_INSERT);
631 161182262 : if (!slot)
632 661332638 : slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
633 716390070 : return slot;
634 : }
635 :
636 : /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
637 : only return true for values which point to a cselib_val whose value
638 : element has been set to zero, which implies the cselib_val will be
639 : removed. */
640 :
641 : bool
642 1276274669 : references_value_p (const_rtx x, int only_useless)
643 : {
644 1276274669 : const enum rtx_code code = GET_CODE (x);
645 1276274669 : const char *fmt = GET_RTX_FORMAT (code);
646 1276274669 : int i, j;
647 :
648 1276274669 : if (GET_CODE (x) == VALUE
649 1276274669 : && (! only_useless
650 449418665 : || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
651 : return true;
652 :
653 2915475755 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
654 : {
655 1641955376 : if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
656 : return true;
657 1640565763 : else if (fmt[i] == 'E')
658 45341001 : for (j = 0; j < XVECLEN (x, i); j++)
659 38029989 : if (references_value_p (XVECEXP (x, i, j), only_useless))
660 : return true;
661 : }
662 :
663 : return false;
664 : }
665 :
666 : /* Return true if V is a useless VALUE and can be discarded as such. */
667 :
668 : static bool
669 1206070676 : cselib_useless_value_p (cselib_val *v)
670 : {
671 1206070676 : return (v->locs == 0
672 79505448 : && !PRESERVED_VALUE_P (v->val_rtx)
673 1251966616 : && !SP_DERIVED_VALUE_P (v->val_rtx));
674 : }
675 :
676 : /* For all locations found in X, delete locations that reference useless
677 : values (i.e. values without any location). Called through
678 : htab_traverse. */
679 :
680 : int
681 558378685 : discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
682 : {
683 558378685 : cselib_val *v = *x;
684 558378685 : struct elt_loc_list **p = &v->locs;
685 558378685 : bool had_locs = v->locs != NULL;
686 558378685 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
687 :
688 1181219066 : while (*p)
689 : {
690 622840381 : if (references_value_p ((*p)->loc, 1))
691 1272222 : unchain_one_elt_loc_list (p);
692 : else
693 621568159 : p = &(*p)->next;
694 : }
695 :
696 558378685 : if (had_locs && cselib_useless_value_p (v))
697 : {
698 1139240 : if (setting_insn && DEBUG_INSN_P (setting_insn))
699 2 : n_useless_debug_values++;
700 : else
701 1139238 : n_useless_values++;
702 1139240 : values_became_useless = 1;
703 : }
704 558378685 : return 1;
705 : }
706 :
707 : /* If X is a value with no locations, remove it from the hashtable. */
708 :
709 : int
710 48646107 : discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
711 : {
712 48646107 : cselib_val *v = *x;
713 :
714 48646107 : if (v->locs == 0 && cselib_useless_value_p (v))
715 : {
716 2355530 : if (cselib_discard_hook)
717 530566 : cselib_discard_hook (v);
718 :
719 2355530 : CSELIB_VAL_PTR (v->val_rtx) = NULL;
720 2355530 : cselib_hash_table->clear_slot (x);
721 2355530 : unchain_one_value (v);
722 2355530 : n_useless_values--;
723 : }
724 :
725 48646107 : return 1;
726 : }
727 :
728 : /* Clean out useless values (i.e. those which no longer have locations
729 : associated with them) from the hash table. */
730 :
731 : static void
732 4438540 : remove_useless_values (void)
733 : {
734 4492900 : cselib_val **p, *v;
735 :
736 : /* First pass: eliminate locations that reference the value. That in
737 : turn can make more values useless. */
738 4492900 : do
739 : {
740 4492900 : values_became_useless = 0;
741 562871585 : cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
742 : }
743 4492900 : while (values_became_useless);
744 :
745 : /* Second pass: actually remove the values. */
746 :
747 4438540 : p = &first_containing_mem;
748 4560978 : for (v = *p; v != &dummy_val; v = v->next_containing_mem)
749 122438 : if (v->locs && v == canonical_cselib_val (v))
750 : {
751 119155 : *p = v;
752 119155 : p = &(*p)->next_containing_mem;
753 : }
754 4438540 : *p = &dummy_val;
755 :
756 4438540 : if (cselib_preserve_constants)
757 4409815 : cselib_preserved_hash_table->traverse <void *,
758 4409815 : discard_useless_locs> (NULL);
759 4438540 : gcc_assert (!values_became_useless);
760 :
761 4438540 : n_useless_values += n_useless_debug_values;
762 4438540 : n_debug_values -= n_useless_debug_values;
763 4438540 : n_useless_debug_values = 0;
764 :
765 53084647 : cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
766 :
767 4438540 : gcc_assert (!n_useless_values);
768 4438540 : }
769 :
770 : /* Arrange for a value to not be removed from the hash table even if
771 : it becomes useless. */
772 :
773 : void
774 45460164 : cselib_preserve_value (cselib_val *v)
775 : {
776 45460164 : PRESERVED_VALUE_P (v->val_rtx) = 1;
777 45460164 : }
778 :
779 : /* Test whether a value is preserved. */
780 :
781 : bool
782 263993353 : cselib_preserved_value_p (cselib_val *v)
783 : {
784 263993353 : return PRESERVED_VALUE_P (v->val_rtx);
785 : }
786 :
787 : /* Arrange for a REG value to be assumed constant through the whole function,
788 : never invalidated and preserved across cselib_reset_table calls. */
789 :
790 : void
791 999019 : cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
792 : {
793 999019 : if (cselib_preserve_constants
794 999019 : && v->locs
795 999019 : && REG_P (v->locs->loc))
796 : {
797 500050 : cfa_base_preserved_val = v;
798 500050 : cfa_base_preserved_regno = regno;
799 : }
800 999019 : }
801 :
802 : /* Clean all non-constant expressions in the hash table, but retain
803 : their values. */
804 :
805 : void
806 4409815 : cselib_preserve_only_values (void)
807 : {
808 4409815 : int i;
809 :
810 410112795 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
811 405702980 : cselib_invalidate_regno (i, reg_raw_mode[i]);
812 :
813 4409815 : cselib_invalidate_mem (callmem[0]);
814 :
815 4409815 : remove_useless_values ();
816 :
817 4409815 : gcc_assert (first_containing_mem == &dummy_val);
818 4409815 : }
819 :
820 : /* Arrange for a value to be marked as based on stack pointer
821 : for find_base_term purposes. */
822 :
823 : void
824 1915863 : cselib_set_value_sp_based (cselib_val *v)
825 : {
826 1915863 : SP_BASED_VALUE_P (v->val_rtx) = 1;
827 1915863 : }
828 :
829 : /* Test whether a value is based on stack pointer for
830 : find_base_term purposes. */
831 :
832 : bool
833 836238435 : cselib_sp_based_value_p (cselib_val *v)
834 : {
835 836238435 : return SP_BASED_VALUE_P (v->val_rtx);
836 : }
837 :
838 : /* Return the mode in which a register was last set. If X is not a
839 : register, return its mode. If the mode in which the register was
840 : set is not known, or the value was already clobbered, return
841 : VOIDmode. */
842 :
843 : machine_mode
844 113219872 : cselib_reg_set_mode (const_rtx x)
845 : {
846 113219872 : if (!REG_P (x))
847 34198096 : return GET_MODE (x);
848 :
849 79021776 : if (REG_VALUES (REGNO (x)) == NULL
850 79021776 : || REG_VALUES (REGNO (x))->elt == NULL)
851 : return VOIDmode;
852 :
853 23916427 : return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
854 : }
855 :
856 : /* If x is a PLUS or an autoinc operation, expand the operation,
857 : storing the offset, if any, in *OFF. */
858 :
859 : static rtx
860 1463664304 : autoinc_split (rtx x, rtx *off, machine_mode memmode)
861 : {
862 1463664304 : switch (GET_CODE (x))
863 : {
864 807943512 : case PLUS:
865 807943512 : *off = XEXP (x, 1);
866 807943512 : x = XEXP (x, 0);
867 807943512 : break;
868 :
869 12658444 : case PRE_DEC:
870 12658444 : if (memmode == VOIDmode)
871 : return x;
872 :
873 25316888 : *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
874 12658444 : x = XEXP (x, 0);
875 12658444 : break;
876 :
877 0 : case PRE_INC:
878 0 : if (memmode == VOIDmode)
879 : return x;
880 :
881 0 : *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
882 0 : x = XEXP (x, 0);
883 0 : break;
884 :
885 1635618 : case PRE_MODIFY:
886 1635618 : x = XEXP (x, 1);
887 1635618 : break;
888 :
889 1814299 : case POST_DEC:
890 1814299 : case POST_INC:
891 1814299 : case POST_MODIFY:
892 1814299 : x = XEXP (x, 0);
893 1814299 : break;
894 :
895 : default:
896 : break;
897 : }
898 :
899 1463664304 : if (GET_MODE (x) == Pmode
900 1414136658 : && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
901 2273892960 : && (*off == NULL_RTX || CONST_INT_P (*off)))
902 : {
903 805567796 : cselib_val *e;
904 805567796 : if (GET_CODE (x) == VALUE)
905 515095938 : e = CSELIB_VAL_PTR (x);
906 : else
907 290471858 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
908 805567796 : if (e)
909 : {
910 796349299 : if (SP_DERIVED_VALUE_P (e->val_rtx)
911 796349299 : && (*off == NULL_RTX || *off == const0_rtx))
912 : {
913 66109 : *off = NULL_RTX;
914 66109 : return e->val_rtx;
915 : }
916 1742101365 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
917 1367632388 : if (GET_CODE (l->loc) == PLUS
918 586658723 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
919 585469944 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
920 1789449111 : && CONST_INT_P (XEXP (l->loc, 1)))
921 : {
922 421814213 : if (*off == NULL_RTX)
923 1455315 : *off = XEXP (l->loc, 1);
924 : else
925 420358898 : *off = plus_constant (Pmode, *off,
926 420358898 : INTVAL (XEXP (l->loc, 1)));
927 421814213 : if (*off == const0_rtx)
928 259357281 : *off = NULL_RTX;
929 421814213 : return XEXP (l->loc, 0);
930 : }
931 : }
932 : }
933 : return x;
934 : }
935 :
936 : /* Return true if we can prove that X and Y contain the same value,
937 : taking our gathered information into account. MEMMODE holds the
938 : mode of the enclosing MEM, if any, as required to deal with autoinc
939 : addressing modes. If X and Y are not (known to be) part of
940 : addresses, MEMMODE should be VOIDmode. */
941 :
942 : bool
943 1203691322 : rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
944 : {
945 1583310990 : enum rtx_code code;
946 1583310990 : const char *fmt;
947 1583310990 : int i;
948 :
949 1583310990 : if (REG_P (x) || MEM_P (x))
950 : {
951 148535008 : cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
952 :
953 148535008 : if (e)
954 125325122 : x = e->val_rtx;
955 : }
956 :
957 1583310990 : if (REG_P (y) || MEM_P (y))
958 : {
959 141745564 : cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
960 :
961 141745564 : if (e)
962 129847915 : y = e->val_rtx;
963 : }
964 :
965 1583310990 : if (x == y)
966 : return true;
967 :
968 1258024866 : if (GET_CODE (x) == VALUE)
969 : {
970 429296460 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
971 429296460 : struct elt_loc_list *l;
972 :
973 429296460 : if (GET_CODE (y) == VALUE)
974 31181801 : return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
975 :
976 398114659 : if ((SP_DERIVED_VALUE_P (x)
977 149127120 : || SP_DERIVED_VALUE_P (e->val_rtx))
978 441949675 : && GET_MODE (y) == Pmode)
979 : {
980 251302697 : rtx yoff = NULL;
981 251302697 : rtx yr = autoinc_split (y, &yoff, memmode);
982 251302697 : if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
983 53228 : return true;
984 : }
985 :
986 398061431 : if (depth == 128)
987 : return false;
988 :
989 1223991617 : for (l = e->locs; l; l = l->next)
990 : {
991 849008570 : rtx t = l->loc;
992 :
993 : /* Avoid infinite recursion. We know we have the canonical
994 : value, so we can just skip any values in the equivalence
995 : list. */
996 849008570 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
997 505179004 : continue;
998 343829566 : else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
999 : return true;
1000 : }
1001 :
1002 : return false;
1003 : }
1004 828728406 : else if (GET_CODE (y) == VALUE)
1005 : {
1006 46134366 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
1007 46134366 : struct elt_loc_list *l;
1008 :
1009 46134366 : if ((SP_DERIVED_VALUE_P (y)
1010 43920780 : || SP_DERIVED_VALUE_P (e->val_rtx))
1011 46545109 : && GET_MODE (x) == Pmode)
1012 : {
1013 2141171 : rtx xoff = NULL;
1014 2141171 : rtx xr = autoinc_split (x, &xoff, memmode);
1015 2141171 : if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
1016 173 : return true;
1017 : }
1018 :
1019 46134193 : if (depth == 128)
1020 : return false;
1021 :
1022 110968875 : for (l = e->locs; l; l = l->next)
1023 : {
1024 64917158 : rtx t = l->loc;
1025 :
1026 64917158 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1027 55746295 : continue;
1028 9170863 : else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
1029 : return true;
1030 : }
1031 :
1032 : return false;
1033 : }
1034 :
1035 782594040 : if (GET_MODE (x) != GET_MODE (y))
1036 : return false;
1037 :
1038 744178843 : if (GET_CODE (x) != GET_CODE (y)
1039 744178843 : || (GET_CODE (x) == PLUS
1040 342336345 : && GET_MODE (x) == Pmode
1041 244776901 : && CONST_INT_P (XEXP (x, 1))
1042 233860997 : && CONST_INT_P (XEXP (y, 1))))
1043 : {
1044 596312076 : rtx xorig = x, yorig = y;
1045 596312076 : rtx xoff = NULL, yoff = NULL;
1046 :
1047 596312076 : x = autoinc_split (x, &xoff, memmode);
1048 596312076 : y = autoinc_split (y, &yoff, memmode);
1049 :
1050 : /* Don't recurse if nothing changed. */
1051 596312076 : if (x != xorig || y != yorig)
1052 : {
1053 556898875 : if (!xoff != !yoff)
1054 217335792 : return false;
1055 :
1056 477285677 : if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
1057 : return false;
1058 :
1059 378976284 : return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1060 : }
1061 :
1062 39413201 : if (GET_CODE (xorig) != GET_CODE (yorig))
1063 : return false;
1064 : }
1065 :
1066 : /* These won't be handled correctly by the code below. */
1067 147866767 : switch (GET_CODE (x))
1068 : {
1069 : CASE_CONST_UNIQUE:
1070 : case DEBUG_EXPR:
1071 : return false;
1072 :
1073 : case CONST_VECTOR:
1074 : if (!same_vector_encodings_p (x, y))
1075 : return false;
1076 : break;
1077 :
1078 2161731 : case DEBUG_IMPLICIT_PTR:
1079 2161731 : return DEBUG_IMPLICIT_PTR_DECL (x)
1080 2161731 : == DEBUG_IMPLICIT_PTR_DECL (y);
1081 :
1082 5514 : case DEBUG_PARAMETER_REF:
1083 5514 : return DEBUG_PARAMETER_REF_DECL (x)
1084 5514 : == DEBUG_PARAMETER_REF_DECL (y);
1085 :
1086 306382 : case ENTRY_VALUE:
1087 : /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1088 : use rtx_equal_for_cselib_1 to compare the operands. */
1089 306382 : return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1090 :
1091 258 : case LABEL_REF:
1092 258 : return label_ref_label (x) == label_ref_label (y);
1093 :
1094 164 : case REG:
1095 164 : return REGNO (x) == REGNO (y);
1096 :
1097 643384 : case MEM:
1098 : /* We have to compare any autoinc operations in the addresses
1099 : using this MEM's mode. */
1100 643384 : return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1101 643384 : depth);
1102 :
1103 : default:
1104 : break;
1105 : }
1106 :
1107 39751847 : code = GET_CODE (x);
1108 39751847 : fmt = GET_RTX_FORMAT (code);
1109 :
1110 69259101 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1111 : {
1112 57689526 : int j;
1113 :
1114 57689526 : switch (fmt[i])
1115 : {
1116 0 : case 'w':
1117 0 : if (XWINT (x, i) != XWINT (y, i))
1118 : return false;
1119 : break;
1120 :
1121 580764 : case 'n':
1122 580764 : case 'i':
1123 580764 : if (XINT (x, i) != XINT (y, i))
1124 : return false;
1125 : break;
1126 :
1127 5504 : case 'L':
1128 5504 : if (XLOC (x, i) != XLOC (y, i))
1129 : return false;
1130 : break;
1131 :
1132 579831 : case 'p':
1133 579831 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1134 : return false;
1135 : break;
1136 :
1137 1493479 : case 'V':
1138 1493479 : case 'E':
1139 : /* Two vectors must have the same length. */
1140 1493479 : if (XVECLEN (x, i) != XVECLEN (y, i))
1141 : return false;
1142 :
1143 : /* And the corresponding elements must match. */
1144 3827646 : for (j = 0; j < XVECLEN (x, i); j++)
1145 3205495 : if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1146 3205495 : XVECEXP (y, i, j), memmode, depth))
1147 : return false;
1148 : break;
1149 :
1150 44540958 : case 'e':
1151 44540958 : if (i == 1
1152 28146862 : && targetm.commutative_p (x, UNKNOWN)
1153 20465028 : && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1154 : depth)
1155 44945735 : && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1156 : depth))
1157 : return true;
1158 44233734 : if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1159 : depth))
1160 : return false;
1161 : break;
1162 :
1163 5347256 : case 'S':
1164 5347256 : case 's':
1165 5347256 : if (strcmp (XSTR (x, i), XSTR (y, i)))
1166 : return false;
1167 : break;
1168 :
1169 : case 'u':
1170 : /* These are just backpointers, so they don't matter. */
1171 : break;
1172 :
1173 : case '0':
1174 : case 't':
1175 : break;
1176 :
1177 : /* It is believed that rtx's at this level will never
1178 : contain anything but integers and other rtx's,
1179 : except for within LABEL_REFs and SYMBOL_REFs. */
1180 0 : default:
1181 0 : gcc_unreachable ();
1182 : }
1183 : }
1184 : return true;
1185 : }
1186 :
1187 : /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1188 : truly redundant, taking into account aliasing information. */
1189 : bool
1190 113219872 : cselib_redundant_set_p (rtx set)
1191 : {
1192 113219872 : gcc_assert (GET_CODE (set) == SET);
1193 113219872 : rtx dest = SET_DEST (set);
1194 113219872 : if (cselib_reg_set_mode (dest) != GET_MODE (dest))
1195 : return false;
1196 :
1197 54141871 : rtx src = SET_SRC (set);
1198 4025466 : if ((MEM_P (src) && MEM_VOLATILE_P (src))
1199 58098384 : || !rtx_equal_for_cselib_p (dest, src))
1200 53724700 : return false;
1201 :
1202 417171 : while (GET_CODE (dest) == SUBREG
1203 417171 : || GET_CODE (dest) == ZERO_EXTRACT
1204 834342 : || GET_CODE (dest) == STRICT_LOW_PART)
1205 0 : dest = XEXP (dest, 0);
1206 :
1207 417171 : if (!flag_strict_aliasing || !MEM_P (dest))
1208 : return true;
1209 :
1210 37325 : if (MEM_VOLATILE_P (dest))
1211 : return false;
1212 :
1213 : /* For a store we need to check that suppressing it will not change
1214 : the effective alias set. */
1215 37325 : rtx dest_addr = XEXP (dest, 0);
1216 :
1217 : /* Lookup the equivalents to the original dest (rather than just the
1218 : MEM). */
1219 74650 : cselib_val *src_val = cselib_lookup (SET_DEST (set),
1220 37325 : GET_MODE (SET_DEST (set)),
1221 : 0, VOIDmode);
1222 :
1223 37325 : if (src_val)
1224 : {
1225 : /* Walk the list of source equivalents to find the MEM accessing
1226 : the same location. */
1227 84695 : for (elt_loc_list *l = src_val->locs; l; l = l->next)
1228 : {
1229 84695 : rtx src_equiv = l->loc;
1230 84695 : while (GET_CODE (src_equiv) == SUBREG
1231 84695 : || GET_CODE (src_equiv) == ZERO_EXTRACT
1232 169396 : || GET_CODE (src_equiv) == STRICT_LOW_PART)
1233 6 : src_equiv = XEXP (src_equiv, 0);
1234 :
1235 84695 : if (MEM_P (src_equiv))
1236 : {
1237 : /* Match the MEMs by comparing the addresses. We can
1238 : only remove the later store if the earlier aliases at
1239 : least all the accesses of the later one. */
1240 45024 : if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0),
1241 45024 : GET_MODE (dest), 0))
1242 37245 : return mems_same_for_tbaa_p (src_equiv, dest);
1243 : }
1244 : }
1245 : }
1246 :
1247 : /* We failed to find a recorded value in the cselib history, so try
1248 : the source of this set; this catches cases such as *p = *q when p
1249 : and q have the same value. */
1250 80 : while (GET_CODE (src) == SUBREG)
1251 0 : src = XEXP (src, 0);
1252 :
1253 80 : if (MEM_P (src)
1254 80 : && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0), GET_MODE (dest), 0))
1255 80 : return mems_same_for_tbaa_p (src, dest);
1256 :
1257 : return false;
1258 : }
1259 :
1260 : /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1261 : except that it hashes (plus:P x c). */
1262 :
1263 : static hashval_t
1264 284094825 : cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1265 : machine_mode memmode)
1266 : {
1267 284094825 : cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1268 284094825 : if (! e)
1269 : return 0;
1270 :
1271 270952100 : if (! SP_DERIVED_VALUE_P (e->val_rtx))
1272 439334627 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
1273 354909471 : if (GET_CODE (l->loc) == PLUS
1274 130312073 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
1275 129555011 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1276 477687950 : && CONST_INT_P (XEXP (l->loc, 1)))
1277 : {
1278 122775627 : e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1279 122775627 : c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1280 122775627 : break;
1281 : }
1282 270952100 : if (c == 0)
1283 8097539 : return e->hash;
1284 :
1285 262854561 : inchash::hash hash;
1286 262854561 : hash.add_int (PLUS);
1287 262854561 : hash.add_int (GET_MODE (x));
1288 262854561 : hash.merge_hash (e->hash);
1289 262854561 : hash.add_hwi (c);
1290 :
1291 262854561 : return hash.end () ? hash.end () : 1 + (unsigned int) PLUS;
1292 : }
1293 :
1294 : /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1295 : For registers and memory locations, we look up their cselib_val structure
1296 : and return its VALUE element.
1297 : Possible reasons for return 0 are: the object is volatile, or we couldn't
1298 : find a register or memory location in the table and CREATE is zero. If
1299 : CREATE is nonzero, table elts are created for regs and mem.
1300 : N.B. this hash function returns the same hash value for RTXes that
1301 : differ only in the order of operands, thus it is suitable for comparisons
1302 : that take commutativity into account.
1303 : If we wanted to also support associative rules, we'd have to use a different
1304 : strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1305 : MEMMODE indicates the mode of an enclosing MEM, and it's only
1306 : used to compute autoinc values.
1307 : We used to have a MODE argument for hashing for CONST_INTs, but that
1308 : didn't make sense, since it caused spurious hash differences between
1309 : (set (reg:SI 1) (const_int))
1310 : (plus:SI (reg:SI 2) (reg:SI 1))
1311 : and
1312 : (plus:SI (reg:SI 2) (const_int))
1313 : If the mode is important in any context, it must be checked specifically
1314 : in a comparison anyway, since relying on hash differences is unsafe. */
1315 :
1316 : static hashval_t
1317 991262776 : cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1318 : {
1319 991262776 : cselib_val *e;
1320 991262776 : poly_int64 offset;
1321 991262776 : int i, j;
1322 991262776 : enum rtx_code code;
1323 991262776 : const char *fmt;
1324 991262776 : inchash::hash hash;
1325 :
1326 991262776 : code = GET_CODE (x);
1327 991262776 : hash.add_int (code);
1328 991262776 : hash.add_int (GET_MODE (x));
1329 :
1330 991262776 : switch (code)
1331 : {
1332 446297 : case VALUE:
1333 446297 : e = CSELIB_VAL_PTR (x);
1334 446297 : return e->hash;
1335 :
1336 217453915 : case MEM:
1337 217453915 : case REG:
1338 217453915 : e = cselib_lookup (x, GET_MODE (x), create, memmode);
1339 217453915 : if (! e)
1340 : return 0;
1341 :
1342 193130476 : return e->hash;
1343 :
1344 13147892 : case DEBUG_EXPR:
1345 13147892 : hash.add_int (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)));
1346 13147892 : return hash.end () ? hash.end() : (unsigned int) DEBUG_EXPR;
1347 :
1348 2831472 : case DEBUG_IMPLICIT_PTR:
1349 2831472 : hash.add_int (DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x)));
1350 2831472 : return hash.end () ? hash.end () : (unsigned int) DEBUG_IMPLICIT_PTR;
1351 :
1352 37660 : case DEBUG_PARAMETER_REF:
1353 37660 : hash.add_int (DECL_UID (DEBUG_PARAMETER_REF_DECL (x)));
1354 37660 : return hash.end () ? hash.end () : (unsigned int) DEBUG_PARAMETER_REF;
1355 :
1356 1394995 : case ENTRY_VALUE:
1357 : /* ENTRY_VALUEs are function invariant, thus try to avoid
1358 : recursing on argument if ENTRY_VALUE is one of the
1359 : forms emitted by expand_debug_expr, otherwise
1360 : ENTRY_VALUE hash would depend on the current value
1361 : in some register or memory. */
1362 1394995 : if (REG_P (ENTRY_VALUE_EXP (x)))
1363 1377686 : hash.add_int ((unsigned int) REG
1364 1377686 : + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1365 1377686 : + (unsigned int) REGNO (ENTRY_VALUE_EXP (x)));
1366 17309 : else if (MEM_P (ENTRY_VALUE_EXP (x))
1367 17309 : && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1368 17309 : hash.add_int ((unsigned int) MEM
1369 17309 : + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1370 17309 : + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0)));
1371 : else
1372 0 : hash.add_int (cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode));
1373 1394995 : return hash.end () ? hash.end () : (unsigned int) ENTRY_VALUE;
1374 :
1375 172969374 : case CONST_INT:
1376 172969374 : hash.add_hwi (UINTVAL (x));
1377 172969374 : return hash.end () ? hash.end () : (unsigned int) CONST_INT;
1378 :
1379 : case CONST_WIDE_INT:
1380 2947791 : for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1381 1967043 : hash.add_hwi (CONST_WIDE_INT_ELT (x, i));
1382 980748 : return hash.end () ? hash.end () : (unsigned int) CONST_WIDE_INT;
1383 :
1384 : case CONST_POLY_INT:
1385 : {
1386 0 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1387 0 : hash.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1388 0 : return hash.end () ? hash.end () : (unsigned int) CONST_POLY_INT;
1389 : }
1390 :
1391 3488407 : case CONST_DOUBLE:
1392 : /* This is like the general case, except that it only counts
1393 : the integers representing the constant. */
1394 3488407 : if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1395 : {
1396 : hash.add_hwi (CONST_DOUBLE_LOW (x));
1397 : hash.add_hwi (CONST_DOUBLE_HIGH (x));
1398 : }
1399 : else
1400 3488407 : hash.merge_hash (real_hash (CONST_DOUBLE_REAL_VALUE (x)));
1401 3488407 : return hash.end () ? hash.end () : (unsigned int) CONST_DOUBLE;
1402 :
1403 0 : case CONST_FIXED:
1404 0 : hash.merge_hash (fixed_hash (CONST_FIXED_VALUE (x)));
1405 0 : return hash.end () ? hash.end () : (unsigned int) CONST_FIXED;
1406 :
1407 3117133 : case CONST_VECTOR:
1408 3117133 : {
1409 3117133 : int units;
1410 3117133 : rtx elt;
1411 :
1412 3117133 : units = const_vector_encoded_nelts (x);
1413 :
1414 8146058 : for (i = 0; i < units; ++i)
1415 : {
1416 5028925 : elt = CONST_VECTOR_ENCODED_ELT (x, i);
1417 5028925 : hash.merge_hash (cselib_hash_rtx (elt, 0, memmode));
1418 : }
1419 :
1420 3117133 : return hash.end () ? hash.end () : (unsigned int) CONST_VECTOR;
1421 : }
1422 :
1423 : /* Assume there is only one rtx object for any given label. */
1424 143168 : case LABEL_REF:
1425 : /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1426 : differences and differences between each stage's debugging dumps. */
1427 143168 : hash.add_int (CODE_LABEL_NUMBER (label_ref_label (x)));
1428 143168 : return hash.end () ? hash.end () : (unsigned int) LABEL_REF;
1429 :
1430 64435972 : case SYMBOL_REF:
1431 64435972 : {
1432 : /* Don't hash on the symbol's address to avoid bootstrap differences.
1433 : Different hash values may cause expressions to be recorded in
1434 : different orders and thus different registers to be used in the
1435 : final assembler. This also avoids differences in the dump files
1436 : between various stages. */
1437 64435972 : const char *p = (const char *) XSTR (x, 0);
1438 :
1439 64435972 : if (*p)
1440 64435972 : hash.add (p, strlen (p));
1441 :
1442 64435972 : return hash.end () ? hash.end () : (unsigned int) SYMBOL_REF;
1443 : }
1444 :
1445 17514317 : case PRE_DEC:
1446 17514317 : case PRE_INC:
1447 17514317 : {
1448 : /* We can't compute these without knowing the MEM mode. */
1449 17514317 : gcc_assert (memmode != VOIDmode);
1450 35028634 : offset = GET_MODE_SIZE (memmode);
1451 17514317 : if (code == PRE_DEC)
1452 17514317 : offset = -offset;
1453 : /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1454 : like (mem:MEMMODE (plus (reg) (const_int I))). */
1455 17514317 : if (GET_MODE (x) == Pmode
1456 17514317 : && (REG_P (XEXP (x, 0))
1457 : || MEM_P (XEXP (x, 0))
1458 : || GET_CODE (XEXP (x, 0)) == VALUE))
1459 : {
1460 17514317 : HOST_WIDE_INT c;
1461 17514317 : if (offset.is_constant (&c))
1462 17514317 : return cselib_hash_plus_const_int (XEXP (x, 0),
1463 : trunc_int_for_mode (c, Pmode),
1464 : create, memmode);
1465 : }
1466 :
1467 0 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1468 0 : if (tem_hash == 0)
1469 : return 0;
1470 0 : hash.merge_hash (tem_hash);
1471 0 : tem_hash = cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1472 : create, memmode);
1473 0 : if (tem_hash == 0)
1474 : return 0;
1475 0 : hash.merge_hash (tem_hash);
1476 0 : return hash.end () ? hash.end () : 1 + (unsigned) PLUS;
1477 : }
1478 :
1479 504739 : case PRE_MODIFY:
1480 504739 : {
1481 504739 : gcc_assert (memmode != VOIDmode);
1482 504739 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1483 504739 : if (tem_hash == 0)
1484 : return 0;
1485 498113 : hash.merge_hash (tem_hash);
1486 498113 : return hash.end () ? hash.end () : 1 + (unsigned) PRE_MODIFY;
1487 : }
1488 :
1489 1774571 : case POST_DEC:
1490 1774571 : case POST_INC:
1491 1774571 : case POST_MODIFY:
1492 1774571 : {
1493 1774571 : gcc_assert (memmode != VOIDmode);
1494 1774571 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1495 1774571 : if (tem_hash == 0)
1496 : return 0;
1497 1774571 : hash.merge_hash (tem_hash);
1498 1774571 : return hash.end () ? hash.end () : 1 + (unsigned) code;
1499 : }
1500 :
1501 : case PC:
1502 : case CALL:
1503 : case UNSPEC_VOLATILE:
1504 : return 0;
1505 :
1506 461150 : case ASM_OPERANDS:
1507 461150 : if (MEM_VOLATILE_P (x))
1508 : return 0;
1509 :
1510 : break;
1511 :
1512 321484582 : case PLUS:
1513 321484582 : if (GET_MODE (x) == Pmode
1514 310400357 : && (REG_P (XEXP (x, 0))
1515 : || MEM_P (XEXP (x, 0))
1516 : || GET_CODE (XEXP (x, 0)) == VALUE)
1517 606859566 : && CONST_INT_P (XEXP (x, 1)))
1518 266580508 : return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1519 266580508 : create, memmode);
1520 : break;
1521 :
1522 : default:
1523 : break;
1524 : }
1525 :
1526 209368096 : i = GET_RTX_LENGTH (code) - 1;
1527 209368096 : fmt = GET_RTX_FORMAT (code);
1528 :
1529 209368096 : if (COMMUTATIVE_P (x))
1530 : {
1531 79952435 : gcc_assert (i == 1 && fmt[0] == 'e' && fmt[1] == 'e');
1532 79952435 : hashval_t tem1_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1533 79952435 : if (tem1_hash == 0)
1534 : return 0;
1535 74500648 : hashval_t tem0_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1536 74500648 : if (tem0_hash == 0)
1537 : return 0;
1538 70759411 : hash.add_commutative (tem0_hash, tem1_hash);
1539 70759411 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1540 : }
1541 :
1542 340681779 : for (; i >= 0; i--)
1543 : {
1544 229315212 : switch (fmt[i])
1545 : {
1546 208446885 : case 'e':
1547 208446885 : {
1548 208446885 : rtx tem = XEXP (x, i);
1549 208446885 : hashval_t tem_hash = cselib_hash_rtx (tem, create, memmode);
1550 208446885 : if (tem_hash == 0)
1551 : return 0;
1552 191332154 : hash.merge_hash (tem_hash);
1553 : }
1554 191332154 : break;
1555 : case 'E':
1556 26652227 : for (j = 0; j < XVECLEN (x, i); j++)
1557 : {
1558 18799861 : hashval_t tem_hash
1559 18799861 : = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1560 18799861 : if (tem_hash == 0)
1561 : return 0;
1562 17865498 : hash.merge_hash (tem_hash);
1563 : }
1564 : break;
1565 :
1566 574678 : case 's':
1567 574678 : {
1568 574678 : const char *p = (const char *) XSTR (x, i);
1569 :
1570 574678 : if (p && *p)
1571 540872 : hash.add (p, strlen (p));
1572 : break;
1573 : }
1574 :
1575 5338246 : case 'i':
1576 5338246 : hash.add_hwi (XINT (x, i));
1577 5338246 : break;
1578 :
1579 244763 : case 'L':
1580 244763 : hash.add_hwi (XLOC (x, i));
1581 244763 : break;
1582 :
1583 5923911 : case 'p':
1584 5923911 : hash.add_int (constant_lower_bound (SUBREG_BYTE (x)));
1585 5923911 : break;
1586 :
1587 : case '0':
1588 : case 't':
1589 : /* unused */
1590 : break;
1591 :
1592 0 : default:
1593 0 : gcc_unreachable ();
1594 : }
1595 : }
1596 :
1597 111366567 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1598 : }
1599 :
1600 : /* Create a new value structure for VALUE and initialize it. The mode of the
1601 : value is MODE. */
1602 :
1603 : static inline cselib_val *
1604 419614986 : new_cselib_val (hashval_t hash, machine_mode mode, rtx x)
1605 : {
1606 419614986 : cselib_val *e = cselib_val_pool.allocate ();
1607 :
1608 419614986 : gcc_assert (hash);
1609 419614986 : gcc_assert (next_uid);
1610 :
1611 419614986 : e->hash = hash;
1612 419614986 : e->uid = next_uid++;
1613 : /* We use an alloc pool to allocate this RTL construct because it
1614 : accounts for about 8% of the overall memory usage. We know
1615 : precisely when we can have VALUE RTXen (when cselib is active)
1616 : so we don't need to put them in garbage collected memory.
1617 : ??? Why should a VALUE be an RTX in the first place? */
1618 419614986 : e->val_rtx = (rtx_def*) value_pool.allocate ();
1619 419614986 : memset (e->val_rtx, 0, RTX_HDR_SIZE);
1620 419614986 : PUT_CODE (e->val_rtx, VALUE);
1621 419614986 : PUT_MODE (e->val_rtx, mode);
1622 419614986 : CSELIB_VAL_PTR (e->val_rtx) = e;
1623 419614986 : e->addr_list = 0;
1624 419614986 : e->locs = 0;
1625 419614986 : e->next_containing_mem = 0;
1626 :
1627 419614986 : scalar_int_mode int_mode;
1628 547582117 : if (REG_P (x) && is_int_mode (mode, &int_mode)
1629 127967131 : && GET_MODE_SIZE (int_mode) > 1
1630 121737256 : && REG_VALUES (REGNO (x)) != NULL
1631 426457322 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1632 : {
1633 6695377 : rtx copy = shallow_copy_rtx (x);
1634 6695377 : scalar_int_mode narrow_mode_iter;
1635 24048457 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1636 : {
1637 17353080 : PUT_MODE_RAW (copy, narrow_mode_iter);
1638 17353080 : cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1639 17353080 : if (v)
1640 : {
1641 777325 : rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode);
1642 777325 : if (sub)
1643 777325 : new_elt_loc_list (v, sub);
1644 : }
1645 : }
1646 : }
1647 :
1648 419614986 : if (dump_file && (dump_flags & TDF_CSELIB))
1649 : {
1650 0 : fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1651 0 : if (flag_dump_noaddr || flag_dump_unnumbered)
1652 0 : fputs ("# ", dump_file);
1653 : else
1654 0 : fprintf (dump_file, "%p ", (void*)e);
1655 0 : print_rtl_single (dump_file, x);
1656 0 : fputc ('\n', dump_file);
1657 : }
1658 :
1659 419614986 : return e;
1660 : }
1661 :
1662 : /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1663 : contains the data at this address. X is a MEM that represents the
1664 : value. Update the two value structures to represent this situation. */
1665 :
1666 : static void
1667 52941166 : add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1668 : {
1669 52941166 : addr_elt = canonical_cselib_val (addr_elt);
1670 52941166 : mem_elt = canonical_cselib_val (mem_elt);
1671 :
1672 : /* Avoid duplicates. */
1673 52941166 : addr_space_t as = MEM_ADDR_SPACE (x);
1674 94968467 : for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1675 42027301 : if (MEM_P (l->loc)
1676 12609405 : && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1677 42027301 : && MEM_ADDR_SPACE (l->loc) == as)
1678 : {
1679 0 : promote_debug_loc (l);
1680 0 : return;
1681 : }
1682 :
1683 52941166 : addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1684 52941166 : new_elt_loc_list (mem_elt,
1685 : replace_equiv_address_nv (x, addr_elt->val_rtx));
1686 52941166 : if (mem_elt->next_containing_mem == NULL)
1687 : {
1688 46393834 : mem_elt->next_containing_mem = first_containing_mem;
1689 46393834 : first_containing_mem = mem_elt;
1690 : }
1691 : }
1692 :
1693 : /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1694 : If CREATE, make a new one if we haven't seen it before. */
1695 :
1696 : static cselib_val *
1697 239269134 : cselib_lookup_mem (rtx x, int create)
1698 : {
1699 239269134 : machine_mode mode = GET_MODE (x);
1700 239269134 : machine_mode addr_mode;
1701 239269134 : cselib_val **slot;
1702 239269134 : cselib_val *addr;
1703 239269134 : cselib_val *mem_elt;
1704 :
1705 474788211 : if (MEM_VOLATILE_P (x) || mode == BLKmode
1706 235331860 : || !cselib_record_memory
1707 428518699 : || (FLOAT_MODE_P (mode) && flag_float_store))
1708 : return 0;
1709 :
1710 189236870 : addr_mode = GET_MODE (XEXP (x, 0));
1711 189236870 : if (addr_mode == VOIDmode)
1712 1001894 : addr_mode = Pmode;
1713 :
1714 : /* Look up the value for the address. */
1715 189236870 : addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1716 189236870 : if (! addr)
1717 : return 0;
1718 116475403 : addr = canonical_cselib_val (addr);
1719 :
1720 : /* Find a value that describes a value of our mode at that address. */
1721 116475403 : addr_space_t as = MEM_ADDR_SPACE (x);
1722 118914380 : for (elt_list *l = addr->addr_list; l; l = l->next)
1723 73380084 : if (GET_MODE (l->elt->val_rtx) == mode)
1724 : {
1725 77241356 : for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1726 81276124 : if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1727 : {
1728 70941107 : promote_debug_loc (l->elt->locs);
1729 70941107 : return l->elt;
1730 : }
1731 : }
1732 :
1733 45534296 : if (! create)
1734 : return 0;
1735 :
1736 28546364 : mem_elt = new_cselib_val (next_uid, mode, x);
1737 28546364 : add_mem_for_addr (addr, mem_elt, x);
1738 28546364 : slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1739 28546364 : *slot = mem_elt;
1740 28546364 : return mem_elt;
1741 : }
1742 :
1743 : /* Search through the possible substitutions in P. We prefer a non reg
1744 : substitution because this allows us to expand the tree further. If
1745 : we find, just a reg, take the lowest regno. There may be several
1746 : non-reg results, we just take the first one because they will all
1747 : expand to the same place. */
1748 :
1749 : static rtx
1750 24629968 : expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1751 : int max_depth)
1752 : {
1753 24629968 : rtx reg_result = NULL;
1754 24629968 : unsigned int regno = UINT_MAX;
1755 24629968 : struct elt_loc_list *p_in = p;
1756 :
1757 53467109 : for (; p; p = p->next)
1758 : {
1759 : /* Return these right away to avoid returning stack pointer based
1760 : expressions for frame pointer and vice versa, which is something
1761 : that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1762 : for more details. */
1763 32791842 : if (REG_P (p->loc)
1764 32791842 : && (REGNO (p->loc) == STACK_POINTER_REGNUM
1765 : || REGNO (p->loc) == FRAME_POINTER_REGNUM
1766 : || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1767 26027155 : || REGNO (p->loc) == cfa_base_preserved_regno))
1768 : return p->loc;
1769 : /* Avoid infinite recursion trying to expand a reg into a
1770 : the same reg. */
1771 32286695 : if ((REG_P (p->loc))
1772 26020619 : && (REGNO (p->loc) < regno)
1773 58005705 : && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1774 : {
1775 4529288 : reg_result = p->loc;
1776 4529288 : regno = REGNO (p->loc);
1777 : }
1778 : /* Avoid infinite recursion and do not try to expand the
1779 : value. */
1780 27757407 : else if (GET_CODE (p->loc) == VALUE
1781 356431 : && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1782 0 : continue;
1783 27757407 : else if (!REG_P (p->loc))
1784 : {
1785 6266076 : rtx result, note;
1786 6266076 : if (dump_file && (dump_flags & TDF_CSELIB))
1787 : {
1788 0 : print_inline_rtx (dump_file, p->loc, 0);
1789 0 : fprintf (dump_file, "\n");
1790 : }
1791 6266076 : if (GET_CODE (p->loc) == LO_SUM
1792 0 : && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1793 0 : && p->setting_insn
1794 0 : && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1795 6266076 : && XEXP (note, 0) == XEXP (p->loc, 1))
1796 : return XEXP (p->loc, 1);
1797 6266076 : result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1798 6266076 : if (result)
1799 : return result;
1800 : }
1801 :
1802 : }
1803 :
1804 20675267 : if (regno != UINT_MAX)
1805 : {
1806 3772133 : rtx result;
1807 3772133 : if (dump_file && (dump_flags & TDF_CSELIB))
1808 0 : fprintf (dump_file, "r%d\n", regno);
1809 :
1810 3772133 : result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1811 3772133 : if (result)
1812 : return result;
1813 : }
1814 :
1815 17829450 : if (dump_file && (dump_flags & TDF_CSELIB))
1816 : {
1817 0 : if (reg_result)
1818 : {
1819 0 : print_inline_rtx (dump_file, reg_result, 0);
1820 0 : fprintf (dump_file, "\n");
1821 : }
1822 : else
1823 0 : fprintf (dump_file, "NULL\n");
1824 : }
1825 : return reg_result;
1826 : }
1827 :
1828 :
1829 : /* Forward substitute and expand an expression out to its roots.
1830 : This is the opposite of common subexpression. Because local value
1831 : numbering is such a weak optimization, the expanded expression is
1832 : pretty much unique (not from a pointer equals point of view but
1833 : from a tree shape point of view.
1834 :
1835 : This function returns NULL if the expansion fails. The expansion
1836 : will fail if there is no value number for one of the operands or if
1837 : one of the operands has been overwritten between the current insn
1838 : and the beginning of the basic block. For instance x has no
1839 : expansion in:
1840 :
1841 : r1 <- r1 + 3
1842 : x <- r1 + 8
1843 :
1844 : REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1845 : It is clear on return. */
1846 :
1847 : rtx
1848 37470098 : cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1849 : {
1850 37470098 : struct expand_value_data evd;
1851 :
1852 37470098 : evd.regs_active = regs_active;
1853 37470098 : evd.callback = NULL;
1854 37470098 : evd.callback_arg = NULL;
1855 37470098 : evd.dummy = false;
1856 :
1857 37470098 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1858 : }
1859 :
1860 : /* Same as cselib_expand_value_rtx, but using a callback to try to
1861 : resolve some expressions. The CB function should return ORIG if it
1862 : can't or does not want to deal with a certain RTX. Any other
1863 : return value, including NULL, will be used as the expansion for
1864 : VALUE, without any further changes. */
1865 :
1866 : rtx
1867 90268989 : cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1868 : cselib_expand_callback cb, void *data)
1869 : {
1870 90268989 : struct expand_value_data evd;
1871 :
1872 90268989 : evd.regs_active = regs_active;
1873 90268989 : evd.callback = cb;
1874 90268989 : evd.callback_arg = data;
1875 90268989 : evd.dummy = false;
1876 :
1877 90268989 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1878 : }
1879 :
1880 : /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1881 : or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1882 : would return NULL or non-NULL, without allocating new rtx. */
1883 :
1884 : bool
1885 0 : cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1886 : cselib_expand_callback cb, void *data)
1887 : {
1888 0 : struct expand_value_data evd;
1889 :
1890 0 : evd.regs_active = regs_active;
1891 0 : evd.callback = cb;
1892 0 : evd.callback_arg = data;
1893 0 : evd.dummy = true;
1894 :
1895 0 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1896 : }
1897 :
1898 : /* Internal implementation of cselib_expand_value_rtx and
1899 : cselib_expand_value_rtx_cb. */
1900 :
1901 : static rtx
1902 209591156 : cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1903 : int max_depth)
1904 : {
1905 209591156 : rtx copy, scopy;
1906 209591156 : int i, j;
1907 209591156 : RTX_CODE code;
1908 209591156 : const char *format_ptr;
1909 209591156 : machine_mode mode;
1910 :
1911 209591156 : code = GET_CODE (orig);
1912 :
1913 : /* For the context of dse, if we end up expand into a huge tree, we
1914 : will not have a useful address, so we might as well just give up
1915 : quickly. */
1916 209591156 : if (max_depth <= 0)
1917 : return NULL;
1918 :
1919 206756599 : switch (code)
1920 : {
1921 51472181 : case REG:
1922 51472181 : {
1923 51472181 : struct elt_list *l = REG_VALUES (REGNO (orig));
1924 :
1925 51472181 : if (l && l->elt == NULL)
1926 25629535 : l = l->next;
1927 51486912 : for (; l; l = l->next)
1928 38585996 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1929 : {
1930 38571265 : rtx result;
1931 38571265 : unsigned regno = REGNO (orig);
1932 :
1933 : /* The only thing that we are not willing to do (this
1934 : is requirement of dse and if others potential uses
1935 : need this function we should add a parm to control
1936 : it) is that we will not substitute the
1937 : STACK_POINTER_REGNUM, FRAME_POINTER or the
1938 : HARD_FRAME_POINTER.
1939 :
1940 : These expansions confuses the code that notices that
1941 : stores into the frame go dead at the end of the
1942 : function and that the frame is not effected by calls
1943 : to subroutines. If you allow the
1944 : STACK_POINTER_REGNUM substitution, then dse will
1945 : think that parameter pushing also goes dead which is
1946 : wrong. If you allow the FRAME_POINTER or the
1947 : HARD_FRAME_POINTER then you lose the opportunity to
1948 : make the frame assumptions. */
1949 38571265 : if (regno == STACK_POINTER_REGNUM
1950 38571265 : || regno == FRAME_POINTER_REGNUM
1951 20897987 : || regno == HARD_FRAME_POINTER_REGNUM
1952 20364634 : || regno == cfa_base_preserved_regno)
1953 : return orig;
1954 :
1955 20062127 : bitmap_set_bit (evd->regs_active, regno);
1956 :
1957 20062127 : if (dump_file && (dump_flags & TDF_CSELIB))
1958 0 : fprintf (dump_file, "expanding: r%d into: ", regno);
1959 :
1960 20062127 : result = expand_loc (l->elt->locs, evd, max_depth);
1961 20062127 : bitmap_clear_bit (evd->regs_active, regno);
1962 :
1963 20062127 : if (result)
1964 : return result;
1965 : else
1966 : return orig;
1967 : }
1968 : return orig;
1969 : }
1970 :
1971 : CASE_CONST_ANY:
1972 : case SYMBOL_REF:
1973 : case CODE_LABEL:
1974 : case PC:
1975 : case SCRATCH:
1976 : /* SCRATCH must be shared because they represent distinct values. */
1977 : return orig;
1978 0 : case CLOBBER:
1979 0 : if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1980 : return orig;
1981 : break;
1982 :
1983 283395 : case CONST:
1984 283395 : if (shared_const_p (orig))
1985 : return orig;
1986 : break;
1987 :
1988 958535 : case SUBREG:
1989 958535 : {
1990 958535 : rtx subreg;
1991 :
1992 958535 : if (evd->callback)
1993 : {
1994 863355 : subreg = evd->callback (orig, evd->regs_active, max_depth,
1995 : evd->callback_arg);
1996 863355 : if (subreg != orig)
1997 : return subreg;
1998 : }
1999 :
2000 95180 : subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
2001 : max_depth - 1);
2002 95180 : if (!subreg)
2003 : return NULL;
2004 104282 : scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
2005 52141 : GET_MODE (SUBREG_REG (orig)),
2006 52141 : SUBREG_BYTE (orig));
2007 52141 : if (scopy == NULL
2008 51059 : || (GET_CODE (scopy) == SUBREG
2009 47188 : && !REG_P (SUBREG_REG (scopy))
2010 5798 : && !MEM_P (SUBREG_REG (scopy))))
2011 : return NULL;
2012 :
2013 : return scopy;
2014 : }
2015 :
2016 72174260 : case VALUE:
2017 72174260 : {
2018 72174260 : rtx result;
2019 :
2020 72174260 : if (dump_file && (dump_flags & TDF_CSELIB))
2021 : {
2022 0 : fputs ("\nexpanding ", dump_file);
2023 0 : print_rtl_single (dump_file, orig);
2024 0 : fputs (" into...", dump_file);
2025 : }
2026 :
2027 72174260 : if (evd->callback)
2028 : {
2029 67606419 : result = evd->callback (orig, evd->regs_active, max_depth,
2030 : evd->callback_arg);
2031 :
2032 67606419 : if (result != orig)
2033 : return result;
2034 : }
2035 :
2036 4567841 : result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
2037 4567841 : return result;
2038 : }
2039 :
2040 6137483 : case DEBUG_EXPR:
2041 6137483 : if (evd->callback)
2042 6137483 : return evd->callback (orig, evd->regs_active, max_depth,
2043 6137483 : evd->callback_arg);
2044 : return orig;
2045 :
2046 : default:
2047 : break;
2048 : }
2049 :
2050 : /* Copy the various flags, fields, and other information. We assume
2051 : that all fields need copying, and then clear the fields that should
2052 : not be copied. That is the sensible default behavior, and forces
2053 : us to explicitly document why we are *not* copying a flag. */
2054 45410840 : if (evd->dummy)
2055 : copy = NULL;
2056 : else
2057 45410840 : copy = shallow_copy_rtx (orig);
2058 :
2059 45410840 : format_ptr = GET_RTX_FORMAT (code);
2060 :
2061 119330109 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
2062 78107264 : switch (*format_ptr++)
2063 : {
2064 71239576 : case 'e':
2065 71239576 : if (XEXP (orig, i) != NULL)
2066 : {
2067 71239576 : rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2068 : max_depth - 1);
2069 71239576 : if (!result)
2070 : return NULL;
2071 67086335 : if (copy)
2072 67086335 : XEXP (copy, i) = result;
2073 : }
2074 : break;
2075 :
2076 301491 : case 'E':
2077 301491 : case 'V':
2078 301491 : if (XVEC (orig, i) != NULL)
2079 : {
2080 301491 : if (copy)
2081 301491 : XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2082 745841 : for (j = 0; j < XVECLEN (orig, i); j++)
2083 : {
2084 479104 : rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2085 : evd, max_depth - 1);
2086 479104 : if (!result)
2087 : return NULL;
2088 444350 : if (copy)
2089 444350 : XVECEXP (copy, i, j) = result;
2090 : }
2091 : }
2092 : break;
2093 :
2094 : case 't':
2095 : case 'w':
2096 : case 'i':
2097 : case 'L':
2098 : case 's':
2099 : case 'S':
2100 : case 'T':
2101 : case 'u':
2102 : case 'B':
2103 : case '0':
2104 : /* These are left unchanged. */
2105 : break;
2106 :
2107 0 : default:
2108 0 : gcc_unreachable ();
2109 : }
2110 :
2111 41222845 : if (evd->dummy)
2112 : return orig;
2113 :
2114 41222845 : mode = GET_MODE (copy);
2115 : /* If an operand has been simplified into CONST_INT, which doesn't
2116 : have a mode and the mode isn't derivable from whole rtx's mode,
2117 : try simplify_*_operation first with mode from original's operand
2118 : and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2119 41222845 : scopy = copy;
2120 41222845 : switch (GET_RTX_CLASS (code))
2121 : {
2122 748615 : case RTX_UNARY:
2123 748615 : if (CONST_INT_P (XEXP (copy, 0))
2124 50889 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2125 : {
2126 50881 : scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2127 : GET_MODE (XEXP (orig, 0)));
2128 50881 : if (scopy)
2129 : return scopy;
2130 : }
2131 : break;
2132 : case RTX_COMM_ARITH:
2133 : case RTX_BIN_ARITH:
2134 : /* These expressions can derive operand modes from the whole rtx's mode. */
2135 : break;
2136 59833 : case RTX_TERNARY:
2137 59833 : case RTX_BITFIELD_OPS:
2138 59833 : if (CONST_INT_P (XEXP (copy, 0))
2139 78 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2140 : {
2141 1 : scopy = simplify_ternary_operation (code, mode,
2142 : GET_MODE (XEXP (orig, 0)),
2143 : XEXP (copy, 0), XEXP (copy, 1),
2144 : XEXP (copy, 2));
2145 1 : if (scopy)
2146 : return scopy;
2147 : }
2148 : break;
2149 168588 : case RTX_COMPARE:
2150 168588 : case RTX_COMM_COMPARE:
2151 168588 : if (CONST_INT_P (XEXP (copy, 0))
2152 614 : && GET_MODE (XEXP (copy, 1)) == VOIDmode
2153 174 : && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2154 6 : || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2155 : {
2156 174 : scopy = simplify_relational_operation (code, mode,
2157 : (GET_MODE (XEXP (orig, 0))
2158 : != VOIDmode)
2159 : ? GET_MODE (XEXP (orig, 0))
2160 6 : : GET_MODE (XEXP (orig, 1)),
2161 : XEXP (copy, 0),
2162 : XEXP (copy, 1));
2163 174 : if (scopy)
2164 : return scopy;
2165 : }
2166 : break;
2167 : default:
2168 : break;
2169 : }
2170 41171790 : scopy = simplify_rtx (copy);
2171 41171790 : if (scopy)
2172 4920438 : return scopy;
2173 : return copy;
2174 : }
2175 :
2176 : /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2177 : with VALUE expressions. This way, it becomes independent of changes
2178 : to registers and memory.
2179 : X isn't actually modified; if modifications are needed, new rtl is
2180 : allocated. However, the return value can share rtl with X.
2181 : If X is within a MEM, MEMMODE must be the mode of the MEM. */
2182 :
2183 : rtx
2184 590720535 : cselib_subst_to_values (rtx x, machine_mode memmode)
2185 : {
2186 598386660 : enum rtx_code code = GET_CODE (x);
2187 598386660 : const char *fmt = GET_RTX_FORMAT (code);
2188 598386660 : cselib_val *e;
2189 598386660 : struct elt_list *l;
2190 598386660 : rtx copy = x;
2191 598386660 : int i;
2192 598386660 : poly_int64 offset;
2193 :
2194 598386660 : switch (code)
2195 : {
2196 234119655 : case REG:
2197 234119655 : l = REG_VALUES (REGNO (x));
2198 234119655 : if (l && l->elt == NULL)
2199 134691829 : l = l->next;
2200 236391381 : for (; l; l = l->next)
2201 236391381 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2202 : return l->elt->val_rtx;
2203 :
2204 0 : gcc_unreachable ();
2205 :
2206 6128420 : case MEM:
2207 6128420 : e = cselib_lookup_mem (x, 0);
2208 : /* This used to happen for autoincrements, but we deal with them
2209 : properly now. Remove the if stmt for the next release. */
2210 6128420 : if (! e)
2211 : {
2212 : /* Assign a value that doesn't match any other. */
2213 0 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2214 : }
2215 6128420 : return e->val_rtx;
2216 :
2217 680254 : case ENTRY_VALUE:
2218 680254 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2219 680254 : if (! e)
2220 : break;
2221 1021 : return e->val_rtx;
2222 :
2223 : CASE_CONST_ANY:
2224 : return x;
2225 :
2226 6354152 : case PRE_DEC:
2227 6354152 : case PRE_INC:
2228 6354152 : gcc_assert (memmode != VOIDmode);
2229 12708304 : offset = GET_MODE_SIZE (memmode);
2230 6354152 : if (code == PRE_DEC)
2231 6354152 : offset = -offset;
2232 6354152 : return cselib_subst_to_values (plus_constant (GET_MODE (x),
2233 : XEXP (x, 0), offset),
2234 6354152 : memmode);
2235 :
2236 378575 : case PRE_MODIFY:
2237 378575 : gcc_assert (memmode != VOIDmode);
2238 378575 : return cselib_subst_to_values (XEXP (x, 1), memmode);
2239 :
2240 933398 : case POST_DEC:
2241 933398 : case POST_INC:
2242 933398 : case POST_MODIFY:
2243 933398 : gcc_assert (memmode != VOIDmode);
2244 933398 : return cselib_subst_to_values (XEXP (x, 0), memmode);
2245 :
2246 116379770 : case PLUS:
2247 151720304 : if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2248 : {
2249 94448075 : rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2250 94448075 : if (GET_CODE (t) == VALUE)
2251 : {
2252 88528559 : if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2253 : return t;
2254 88528559 : for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2255 186897191 : l; l = l->next)
2256 119724881 : if (GET_CODE (l->loc) == PLUS
2257 25161481 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2258 24947660 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2259 141082396 : && CONST_INT_P (XEXP (l->loc, 1)))
2260 35181285 : return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2261 : }
2262 73091826 : if (t != XEXP (x, 0))
2263 : {
2264 68772245 : copy = shallow_copy_rtx (x);
2265 68772245 : XEXP (copy, 0) = t;
2266 : }
2267 73091826 : return copy;
2268 : }
2269 :
2270 : default:
2271 : break;
2272 : }
2273 :
2274 477052070 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2275 : {
2276 311494646 : if (fmt[i] == 'e')
2277 : {
2278 229733038 : rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2279 :
2280 229733038 : if (t != XEXP (x, i))
2281 : {
2282 168161001 : if (x == copy)
2283 119326373 : copy = shallow_copy_rtx (x);
2284 168161001 : XEXP (copy, i) = t;
2285 : }
2286 : }
2287 81761608 : else if (fmt[i] == 'E')
2288 : {
2289 : int j;
2290 :
2291 19157983 : for (j = 0; j < XVECLEN (x, i); j++)
2292 : {
2293 13598007 : rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2294 :
2295 13598007 : if (t != XVECEXP (x, i, j))
2296 : {
2297 2583476 : if (XVEC (x, i) == XVEC (copy, i))
2298 : {
2299 1925974 : if (x == copy)
2300 1925974 : copy = shallow_copy_rtx (x);
2301 1925974 : XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2302 : }
2303 2583476 : XVECEXP (copy, i, j) = t;
2304 : }
2305 : }
2306 : }
2307 : }
2308 :
2309 : return copy;
2310 : }
2311 :
2312 : /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2313 :
2314 : rtx
2315 1463 : cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2316 : {
2317 1463 : rtx ret;
2318 1463 : gcc_assert (!cselib_current_insn);
2319 1463 : cselib_current_insn = insn;
2320 1463 : ret = cselib_subst_to_values (x, memmode);
2321 1463 : cselib_current_insn = NULL;
2322 1463 : return ret;
2323 : }
2324 :
2325 : /* Look up the rtl expression X in our tables and return the value it
2326 : has. If CREATE is zero, we return NULL if we don't know the value.
2327 : Otherwise, we create a new one if possible, using mode MODE if X
2328 : doesn't have a mode (i.e. because it's a constant). When X is part
2329 : of an address, MEMMODE should be the mode of the enclosing MEM if
2330 : we're tracking autoinc expressions. */
2331 :
2332 : static cselib_val *
2333 2416089918 : cselib_lookup_1 (rtx x, machine_mode mode,
2334 : int create, machine_mode memmode)
2335 : {
2336 2416089918 : cselib_val **slot;
2337 2416089918 : cselib_val *e;
2338 :
2339 2416089918 : if (GET_MODE (x) != VOIDmode)
2340 2332613999 : mode = GET_MODE (x);
2341 :
2342 2416089918 : if (GET_CODE (x) == VALUE)
2343 64805098 : return CSELIB_VAL_PTR (x);
2344 :
2345 2351284820 : if (REG_P (x))
2346 : {
2347 1515889394 : struct elt_list *l;
2348 1515889394 : unsigned int i = REGNO (x);
2349 :
2350 1515889394 : l = REG_VALUES (i);
2351 1515889394 : if (l && l->elt == NULL)
2352 632596001 : l = l->next;
2353 1536098791 : for (; l; l = l->next)
2354 1171007340 : if (mode == GET_MODE (l->elt->val_rtx))
2355 : {
2356 1150797943 : promote_debug_loc (l->elt->locs);
2357 1150797943 : return l->elt;
2358 : }
2359 :
2360 365091451 : if (! create)
2361 : return 0;
2362 :
2363 138128670 : if (i < FIRST_PSEUDO_REGISTER)
2364 : {
2365 78456230 : unsigned int n = hard_regno_nregs (i, mode);
2366 :
2367 78456230 : if (n > max_value_regs)
2368 27146923 : max_value_regs = n;
2369 : }
2370 :
2371 138128670 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2372 162528030 : if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2373 12092236 : SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2374 138128670 : new_elt_loc_list (e, x);
2375 :
2376 138128670 : scalar_int_mode int_mode;
2377 138128670 : if (REG_VALUES (i) == 0)
2378 : {
2379 : /* Maintain the invariant that the first entry of
2380 : REG_VALUES, if present, must be the value used to set the
2381 : register, or NULL. */
2382 128284503 : used_regs[n_used_regs++] = i;
2383 128284503 : REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2384 : }
2385 9844167 : else if (cselib_preserve_constants
2386 9844167 : && is_int_mode (mode, &int_mode))
2387 : {
2388 : /* During var-tracking, try harder to find equivalences
2389 : for SUBREGs. If a setter sets say a DImode register
2390 : and user uses that register only in SImode, add a lowpart
2391 : subreg location. */
2392 1571806 : struct elt_list *lwider = NULL;
2393 1571806 : scalar_int_mode lmode;
2394 1571806 : l = REG_VALUES (i);
2395 1571806 : if (l && l->elt == NULL)
2396 1016736 : l = l->next;
2397 2370098 : for (; l; l = l->next)
2398 798292 : if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2399 1136912 : && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2400 273120 : && (lwider == NULL
2401 4941 : || partial_subreg_p (lmode,
2402 4941 : GET_MODE (lwider->elt->val_rtx))))
2403 : {
2404 272419 : struct elt_loc_list *el;
2405 289309 : if (i < FIRST_PSEUDO_REGISTER
2406 272419 : && hard_regno_nregs (i, lmode) != 1)
2407 16890 : continue;
2408 501872 : for (el = l->elt->locs; el; el = el->next)
2409 470337 : if (!REG_P (el->loc))
2410 : break;
2411 255529 : if (el)
2412 798292 : lwider = l;
2413 : }
2414 1571806 : if (lwider)
2415 : {
2416 439510 : rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2417 219755 : GET_MODE (lwider->elt->val_rtx));
2418 219755 : if (sub)
2419 219755 : new_elt_loc_list (e, sub);
2420 : }
2421 : }
2422 138128670 : REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2423 138128670 : slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2424 138128670 : *slot = e;
2425 138128670 : return e;
2426 : }
2427 :
2428 835395426 : if (MEM_P (x))
2429 233140714 : return cselib_lookup_mem (x, create);
2430 :
2431 602254712 : hashval_t hashval = cselib_hash_rtx (x, create, memmode);
2432 : /* Can't even create if hashing is not possible. */
2433 602254712 : if (! hashval)
2434 : return 0;
2435 :
2436 772252093 : slot = cselib_find_slot (mode, x, hashval,
2437 : create ? INSERT : NO_INSERT, memmode);
2438 549715036 : if (slot == 0)
2439 : return 0;
2440 :
2441 438351645 : e = (cselib_val *) *slot;
2442 438351645 : if (e)
2443 : return e;
2444 :
2445 252939952 : e = new_cselib_val (hashval, mode, x);
2446 :
2447 : /* We have to fill the slot before calling cselib_subst_to_values:
2448 : the hash table is inconsistent until we do so, and
2449 : cselib_subst_to_values will need to do lookups. */
2450 252939952 : *slot = e;
2451 252939952 : rtx v = cselib_subst_to_values (x, memmode);
2452 :
2453 : /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2454 : VALUE that isn't in the hash tables anymore. */
2455 252939952 : if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2456 0 : PRESERVED_VALUE_P (e->val_rtx) = 1;
2457 :
2458 252939952 : new_elt_loc_list (e, v);
2459 252939952 : return e;
2460 : }
2461 :
2462 : /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2463 :
2464 : cselib_val *
2465 6296254 : cselib_lookup_from_insn (rtx x, machine_mode mode,
2466 : int create, machine_mode memmode, rtx_insn *insn)
2467 : {
2468 6296254 : cselib_val *ret;
2469 :
2470 6296254 : gcc_assert (!cselib_current_insn);
2471 6296254 : cselib_current_insn = insn;
2472 :
2473 6296254 : ret = cselib_lookup (x, mode, create, memmode);
2474 :
2475 6296254 : cselib_current_insn = NULL;
2476 :
2477 6296254 : return ret;
2478 : }
2479 :
2480 : /* Wrapper for cselib_lookup_1, that logs the lookup result and
2481 : maintains invariants related with debug insns. */
2482 :
2483 : cselib_val *
2484 2415032633 : cselib_lookup (rtx x, machine_mode mode,
2485 : int create, machine_mode memmode)
2486 : {
2487 2415032633 : cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2488 :
2489 : /* ??? Should we return NULL if we're not to create an entry, the
2490 : found loc is a debug loc and cselib_current_insn is not DEBUG?
2491 : If so, we should also avoid converting val to non-DEBUG; probably
2492 : easiest setting cselib_current_insn to NULL before the call
2493 : above. */
2494 :
2495 2415032633 : if (dump_file && (dump_flags & TDF_CSELIB))
2496 : {
2497 0 : fputs ("cselib lookup ", dump_file);
2498 0 : print_inline_rtx (dump_file, x, 2);
2499 0 : fprintf (dump_file, " => %u:%u\n",
2500 : ret ? ret->uid : 0,
2501 : ret ? ret->hash : 0);
2502 : }
2503 :
2504 2415032633 : return ret;
2505 : }
2506 :
2507 : /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2508 :
2509 : static void
2510 183968535 : cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2511 : {
2512 183968535 : cselib_val *v = (*l)->elt;
2513 183968535 : if (*l == REG_VALUES (regno))
2514 : {
2515 : /* Maintain the invariant that the first entry of
2516 : REG_VALUES, if present, must be the value used to set
2517 : the register, or NULL. This is also nice because
2518 : then we won't push the same regno onto user_regs
2519 : multiple times. */
2520 141152994 : (*l)->elt = NULL;
2521 141152994 : l = &(*l)->next;
2522 : }
2523 : else
2524 42815541 : unchain_one_elt_list (l);
2525 :
2526 183968535 : v = canonical_cselib_val (v);
2527 :
2528 183968535 : bool had_locs = v->locs != NULL;
2529 183968535 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2530 :
2531 : /* Now, we clear the mapping from value to reg. It must exist, so
2532 : this code will crash intentionally if it doesn't. */
2533 183968535 : for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2534 : {
2535 212082861 : rtx x = (*p)->loc;
2536 :
2537 212082861 : if (REG_P (x) && REGNO (x) == regno)
2538 : {
2539 183968535 : unchain_one_elt_loc_list (p);
2540 183968535 : break;
2541 : }
2542 28114326 : }
2543 :
2544 183968535 : if (had_locs && cselib_useless_value_p (v))
2545 : {
2546 21547151 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2547 0 : n_useless_debug_values++;
2548 : else
2549 21547151 : n_useless_values++;
2550 : }
2551 183968535 : }
2552 :
2553 : /* Invalidate any entries in reg_values that overlap REGNO. This is called
2554 : if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2555 : is used to determine how many hard registers are being changed. If MODE
2556 : is VOIDmode, then only REGNO is being changed; this is used when
2557 : invalidating call clobbered registers across a call. */
2558 :
2559 : static void
2560 805229279 : cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2561 : {
2562 805229279 : unsigned int endregno;
2563 805229279 : unsigned int i;
2564 :
2565 : /* If we see pseudos after reload, something is _wrong_. */
2566 805229279 : gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2567 : || reg_renumber[regno] < 0);
2568 :
2569 : /* Determine the range of registers that must be invalidated. For
2570 : pseudos, only REGNO is affected. For hard regs, we must take MODE
2571 : into account, and we must also invalidate lower register numbers
2572 : if they contain values that overlap REGNO. */
2573 222807506 : if (regno < FIRST_PSEUDO_REGISTER)
2574 : {
2575 700432279 : gcc_assert (mode != VOIDmode);
2576 :
2577 700432279 : if (regno < max_value_regs)
2578 : i = 0;
2579 : else
2580 659225750 : i = regno - max_value_regs;
2581 :
2582 700432279 : endregno = end_hard_regno (mode, regno);
2583 : }
2584 : else
2585 : {
2586 104797000 : i = regno;
2587 104797000 : endregno = regno + 1;
2588 : }
2589 :
2590 2231808229 : for (; i < endregno; i++)
2591 : {
2592 1426578950 : struct elt_list **l = ®_VALUES (i);
2593 :
2594 : /* Go through all known values for this reg; if it overlaps the range
2595 : we're invalidating, remove the value. */
2596 1812785745 : while (*l)
2597 : {
2598 386206795 : cselib_val *v = (*l)->elt;
2599 386206795 : unsigned int this_last = i;
2600 :
2601 386206795 : if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2602 167756831 : this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2603 :
2604 386206795 : if (this_last < regno || v == NULL
2605 118665422 : || (v == cfa_base_preserved_val
2606 4411450 : && i == cfa_base_preserved_regno))
2607 : {
2608 271951188 : l = &(*l)->next;
2609 271951188 : continue;
2610 : }
2611 :
2612 : /* We have an overlap. */
2613 114255607 : cselib_invalidate_regno_val (i, l);
2614 : }
2615 : }
2616 805229279 : }
2617 :
2618 : /* Invalidate any locations in the table which are changed because of a
2619 : store to MEM_RTX. If this is called because of a non-const call
2620 : instruction, MEM_RTX is (mem:BLK const0_rtx). */
2621 :
2622 : static void
2623 114287754 : cselib_invalidate_mem (rtx mem_rtx)
2624 : {
2625 114287754 : cselib_val **vp, *v, *next;
2626 114287754 : int num_mems = 0;
2627 114287754 : rtx mem_addr;
2628 :
2629 114287754 : mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2630 114287754 : mem_rtx = canon_rtx (mem_rtx);
2631 :
2632 114287754 : vp = &first_containing_mem;
2633 284832542 : for (v = *vp; v != &dummy_val; v = next)
2634 : {
2635 170544788 : bool has_mem = false;
2636 170544788 : struct elt_loc_list **p = &v->locs;
2637 170544788 : bool had_locs = v->locs != NULL;
2638 170544788 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2639 170544788 : rtx sp_base = NULL_RTX;
2640 170544788 : HOST_WIDE_INT sp_off = 0;
2641 :
2642 508395373 : while (*p)
2643 : {
2644 337850585 : rtx x = (*p)->loc;
2645 337850585 : cselib_val *addr;
2646 337850585 : struct elt_list **mem_chain;
2647 :
2648 : /* MEMs may occur in locations only at the top level; below
2649 : that every MEM or REG is substituted by its VALUE. */
2650 337850585 : if (!MEM_P (x))
2651 : {
2652 102531104 : p = &(*p)->next;
2653 102531104 : continue;
2654 : }
2655 :
2656 : /* When invalidating memory below the stack pointer for const/pure
2657 : calls and alloca/VLAs aren't used, attempt to optimize. Values
2658 : stored into area sometimes below the stack pointer shouldn't be
2659 : addressable and should be stored just through stack pointer
2660 : derived expressions, so don't invalidate MEMs not using stack
2661 : derived addresses, or if the MEMs clearly aren't below the stack
2662 : pointer. This isn't a fully conservative approach, the hope is
2663 : that invalidating more MEMs than this isn't actually needed. */
2664 235319481 : if (mem_rtx == callmem[1]
2665 2905373 : && num_mems < param_max_cselib_memory_locations
2666 2905315 : && GET_CODE (XEXP (x, 0)) == VALUE
2667 2905315 : && !cfun->calls_alloca)
2668 : {
2669 2857432 : cselib_val *v2 = CSELIB_VAL_PTR (XEXP (x, 0));
2670 2857432 : rtx x_base = NULL_RTX;
2671 2857432 : HOST_WIDE_INT x_off = 0;
2672 2857432 : if (SP_DERIVED_VALUE_P (v2->val_rtx))
2673 : x_base = v2->val_rtx;
2674 : else
2675 4540829 : for (struct elt_loc_list *l = v2->locs; l; l = l->next)
2676 2828471 : if (GET_CODE (l->loc) == PLUS
2677 1517785 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2678 1415710 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2679 3897803 : && CONST_INT_P (XEXP (l->loc, 1)))
2680 : {
2681 1069318 : x_base = XEXP (l->loc, 0);
2682 1069318 : x_off = INTVAL (XEXP (l->loc, 1));
2683 1069318 : break;
2684 : }
2685 : /* If x_base is NULL here, don't invalidate x as its address
2686 : isn't derived from sp such that it could be in outgoing
2687 : argument area of some call in !ACCUMULATE_OUTGOING_ARGS
2688 : function. */
2689 2857432 : if (x_base)
2690 : {
2691 1145074 : if (sp_base == NULL_RTX)
2692 : {
2693 2114570 : if (cselib_val *v3
2694 1060365 : = cselib_lookup_1 (stack_pointer_rtx, Pmode, 0,
2695 : VOIDmode))
2696 : {
2697 1052623 : if (SP_DERIVED_VALUE_P (v3->val_rtx))
2698 : sp_base = v3->val_rtx;
2699 : else
2700 277428 : for (struct elt_loc_list *l = v3->locs;
2701 556590 : l; l = l->next)
2702 556576 : if (GET_CODE (l->loc) == PLUS
2703 277414 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2704 277414 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2705 833990 : && CONST_INT_P (XEXP (l->loc, 1)))
2706 : {
2707 277414 : sp_base = XEXP (l->loc, 0);
2708 277414 : sp_off = INTVAL (XEXP (l->loc, 1));
2709 277414 : break;
2710 : }
2711 : }
2712 1057285 : if (sp_base == NULL_RTX)
2713 4676 : sp_base = pc_rtx;
2714 : }
2715 : /* Otherwise, if x_base and sp_base are the same,
2716 : we know that x_base + x_off is the x's address and
2717 : sp_base + sp_off is current value of stack pointer,
2718 : so try to determine if x is certainly not below stack
2719 : pointer. */
2720 1145074 : if (sp_base == x_base)
2721 : {
2722 1137971 : if (STACK_GROWS_DOWNWARD)
2723 : {
2724 1137971 : HOST_WIDE_INT off = sp_off;
2725 : #ifdef STACK_ADDRESS_OFFSET
2726 : /* On SPARC take stack pointer bias into account as
2727 : well. */
2728 : off += (STACK_ADDRESS_OFFSET
2729 : - FIRST_PARM_OFFSET (current_function_decl));
2730 : #endif
2731 1137971 : if (x_off >= off)
2732 : /* x is at or above the current stack pointer,
2733 : no need to invalidate it. */
2734 : x_base = NULL_RTX;
2735 : }
2736 : else
2737 : {
2738 : HOST_WIDE_INT sz;
2739 : enum machine_mode mode = GET_MODE (x);
2740 : if ((MEM_SIZE_KNOWN_P (x)
2741 : && MEM_SIZE (x).is_constant (&sz))
2742 : || (mode != BLKmode
2743 : && GET_MODE_SIZE (mode).is_constant (&sz)))
2744 : if (x_off < sp_off
2745 : && ((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT)
2746 : x_off + sz) <= sp_off))
2747 : /* x's end is below or at the current stack
2748 : pointer in !STACK_GROWS_DOWNWARD target,
2749 : no need to invalidate it. */
2750 : x_base = NULL_RTX;
2751 : }
2752 : }
2753 : }
2754 2849566 : if (x_base == NULL_RTX)
2755 : {
2756 2849566 : has_mem = true;
2757 2849566 : num_mems++;
2758 2849566 : p = &(*p)->next;
2759 2849566 : continue;
2760 : }
2761 : }
2762 :
2763 427464789 : if (num_mems < param_max_cselib_memory_locations
2764 464874500 : && ! canon_anti_dependence (x, false, mem_rtx,
2765 232404585 : GET_MODE (mem_rtx), mem_addr))
2766 : {
2767 194994874 : has_mem = true;
2768 194994874 : num_mems++;
2769 194994874 : p = &(*p)->next;
2770 194994874 : continue;
2771 : }
2772 :
2773 : /* This one overlaps. */
2774 : /* We must have a mapping from this MEM's address to the
2775 : value (E). Remove that, too. */
2776 37475041 : addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2777 37475041 : addr = canonical_cselib_val (addr);
2778 37475041 : gcc_checking_assert (v == canonical_cselib_val (v));
2779 37475041 : mem_chain = &addr->addr_list;
2780 37477069 : for (;;)
2781 : {
2782 37476055 : cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2783 :
2784 37476055 : if (canon == v)
2785 : {
2786 37475041 : unchain_one_elt_list (mem_chain);
2787 37475041 : break;
2788 : }
2789 :
2790 : /* Record canonicalized elt. */
2791 1014 : (*mem_chain)->elt = canon;
2792 :
2793 1014 : mem_chain = &(*mem_chain)->next;
2794 1014 : }
2795 :
2796 37475041 : unchain_one_elt_loc_list (p);
2797 : }
2798 :
2799 170544788 : if (had_locs && cselib_useless_value_p (v))
2800 : {
2801 8697864 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2802 0 : n_useless_debug_values++;
2803 : else
2804 8697864 : n_useless_values++;
2805 : }
2806 :
2807 170544788 : next = v->next_containing_mem;
2808 170544788 : if (has_mem)
2809 : {
2810 137931801 : *vp = v;
2811 137931801 : vp = &(*vp)->next_containing_mem;
2812 : }
2813 : else
2814 32612987 : v->next_containing_mem = NULL;
2815 : }
2816 114287754 : *vp = &dummy_val;
2817 114287754 : }
2818 :
2819 : /* Invalidate DEST. */
2820 :
2821 : void
2822 524468110 : cselib_invalidate_rtx (rtx dest)
2823 : {
2824 524468110 : while (GET_CODE (dest) == SUBREG
2825 524468110 : || GET_CODE (dest) == ZERO_EXTRACT
2826 1048942831 : || GET_CODE (dest) == STRICT_LOW_PART)
2827 6611 : dest = XEXP (dest, 0);
2828 :
2829 524468110 : if (REG_P (dest))
2830 399526299 : cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2831 124941811 : else if (MEM_P (dest))
2832 75968142 : cselib_invalidate_mem (dest);
2833 524468110 : }
2834 :
2835 : /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2836 :
2837 : static void
2838 508507301 : cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2839 : void *data ATTRIBUTE_UNUSED)
2840 : {
2841 508507301 : cselib_invalidate_rtx (dest);
2842 508507301 : }
2843 :
2844 : /* Record the result of a SET instruction. DEST is being set; the source
2845 : contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2846 : describes its address. */
2847 :
2848 : static void
2849 364701244 : cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2850 : {
2851 364701244 : if (src_elt == 0 || side_effects_p (dest))
2852 66489940 : return;
2853 :
2854 298211304 : if (REG_P (dest))
2855 : {
2856 273816502 : unsigned int dreg = REGNO (dest);
2857 273816502 : if (dreg < FIRST_PSEUDO_REGISTER)
2858 : {
2859 200740738 : unsigned int n = REG_NREGS (dest);
2860 :
2861 200740738 : if (n > max_value_regs)
2862 25871782 : max_value_regs = n;
2863 : }
2864 :
2865 273816502 : if (REG_VALUES (dreg) == 0)
2866 : {
2867 168803610 : used_regs[n_used_regs++] = dreg;
2868 168803610 : REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2869 : }
2870 : else
2871 : {
2872 : /* The register should have been invalidated. */
2873 105012892 : gcc_assert (REG_VALUES (dreg)->elt == 0);
2874 105012892 : REG_VALUES (dreg)->elt = src_elt;
2875 : }
2876 :
2877 273816502 : if (cselib_useless_value_p (src_elt))
2878 41540 : n_useless_values--;
2879 273816502 : new_elt_loc_list (src_elt, dest);
2880 : }
2881 24394802 : else if (MEM_P (dest) && dest_addr_elt != 0
2882 24394802 : && cselib_record_memory)
2883 : {
2884 24394802 : if (cselib_useless_value_p (src_elt))
2885 28 : n_useless_values--;
2886 24394802 : add_mem_for_addr (dest_addr_elt, src_elt, dest);
2887 : }
2888 : }
2889 :
2890 : /* Make ELT and X's VALUE equivalent to each other at INSN. */
2891 :
2892 : void
2893 3883537 : cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2894 : {
2895 3883537 : cselib_val *nelt;
2896 3883537 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
2897 :
2898 3883537 : gcc_checking_assert (elt);
2899 3883537 : gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2900 3883537 : gcc_checking_assert (!side_effects_p (x));
2901 :
2902 3883537 : cselib_current_insn = insn;
2903 :
2904 3883537 : nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2905 :
2906 3883537 : if (nelt != elt)
2907 : {
2908 3016720 : cselib_any_perm_equivs = true;
2909 :
2910 3016720 : if (!PRESERVED_VALUE_P (nelt->val_rtx))
2911 3012108 : cselib_preserve_value (nelt);
2912 :
2913 3016720 : new_elt_loc_list (nelt, elt->val_rtx);
2914 : }
2915 :
2916 3883537 : cselib_current_insn = save_cselib_current_insn;
2917 3883537 : }
2918 :
2919 : /* Return TRUE if any permanent equivalences have been recorded since
2920 : the table was last initialized. */
2921 : bool
2922 1387179896 : cselib_have_permanent_equivalences (void)
2923 : {
2924 1387179896 : return cselib_any_perm_equivs;
2925 : }
2926 :
2927 : /* Record stack_pointer_rtx to be equal to
2928 : (plus:P cfa_base_preserved_val offset). Used by var-tracking
2929 : at the start of basic blocks for !frame_pointer_needed functions. */
2930 :
2931 : void
2932 3480177 : cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2933 : {
2934 3480177 : rtx sp_derived_value = NULL_RTX;
2935 6960354 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2936 6960354 : if (GET_CODE (l->loc) == VALUE
2937 6960354 : && SP_DERIVED_VALUE_P (l->loc))
2938 : {
2939 : sp_derived_value = l->loc;
2940 : break;
2941 : }
2942 6960354 : else if (GET_CODE (l->loc) == PLUS
2943 3480177 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2944 3480177 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2945 10440531 : && CONST_INT_P (XEXP (l->loc, 1)))
2946 : {
2947 3480177 : sp_derived_value = XEXP (l->loc, 0);
2948 3480177 : offset = offset + UINTVAL (XEXP (l->loc, 1));
2949 3480177 : break;
2950 : }
2951 3480177 : if (sp_derived_value == NULL_RTX)
2952 : return;
2953 3480177 : cselib_val *val
2954 3480177 : = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2955 3480177 : Pmode, 1, VOIDmode, insn);
2956 3480177 : if (val != NULL)
2957 : {
2958 3480177 : PRESERVED_VALUE_P (val->val_rtx) = 1;
2959 3480177 : cselib_record_set (stack_pointer_rtx, val, NULL);
2960 : }
2961 : }
2962 :
2963 : /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2964 : that can be expressed using cfa_base_preserved_val + CONST_INT. */
2965 :
2966 : bool
2967 30712658 : cselib_sp_derived_value_p (cselib_val *v)
2968 : {
2969 30712658 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2970 67535746 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2971 37335885 : if (GET_CODE (l->loc) == PLUS
2972 7042920 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2973 6820971 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2974 42358383 : && CONST_INT_P (XEXP (l->loc, 1)))
2975 5022498 : v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2976 30712658 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2977 : return false;
2978 11368351 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2979 11368351 : if (l->loc == cfa_base_preserved_val->val_rtx)
2980 : return true;
2981 11368351 : else if (GET_CODE (l->loc) == PLUS
2982 5535295 : && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2983 5535295 : && CONST_INT_P (XEXP (l->loc, 1)))
2984 : return true;
2985 : return false;
2986 : }
2987 :
2988 : /* There is no good way to determine how many elements there can be
2989 : in a PARALLEL. Since it's fairly cheap, use a really large number. */
2990 : #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2991 :
2992 : struct cselib_record_autoinc_data
2993 : {
2994 : struct cselib_set *sets;
2995 : int n_sets;
2996 : };
2997 :
2998 : /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2999 : autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
3000 :
3001 : static int
3002 13378536 : cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
3003 : rtx dest, rtx src, rtx srcoff, void *arg)
3004 : {
3005 13378536 : struct cselib_record_autoinc_data *data;
3006 13378536 : data = (struct cselib_record_autoinc_data *)arg;
3007 :
3008 13378536 : data->sets[data->n_sets].dest = dest;
3009 :
3010 13378536 : if (srcoff)
3011 13011285 : data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
3012 : else
3013 367251 : data->sets[data->n_sets].src = src;
3014 :
3015 13378536 : data->n_sets++;
3016 :
3017 13378536 : return 0;
3018 : }
3019 :
3020 : /* Record the effects of any sets and autoincs in INSN. */
3021 : static void
3022 882016584 : cselib_record_sets (rtx_insn *insn)
3023 : {
3024 882016584 : int n_sets = 0;
3025 882016584 : int i;
3026 882016584 : struct cselib_set sets[MAX_SETS];
3027 882016584 : rtx cond = 0;
3028 882016584 : int n_sets_before_autoinc;
3029 882016584 : int n_strict_low_parts = 0;
3030 882016584 : struct cselib_record_autoinc_data data;
3031 :
3032 882016584 : rtx body = PATTERN (insn);
3033 882016584 : if (GET_CODE (body) == COND_EXEC)
3034 : {
3035 0 : cond = COND_EXEC_TEST (body);
3036 0 : body = COND_EXEC_CODE (body);
3037 : }
3038 :
3039 : /* Find all sets. */
3040 882016584 : if (GET_CODE (body) == SET)
3041 : {
3042 369150528 : sets[0].src = SET_SRC (body);
3043 369150528 : sets[0].dest = SET_DEST (body);
3044 369150528 : n_sets = 1;
3045 : }
3046 512866056 : else if (GET_CODE (body) == PARALLEL)
3047 : {
3048 : /* Look through the PARALLEL and record the values being
3049 : set, if possible. Also handle any CLOBBERs. */
3050 209731283 : for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
3051 : {
3052 141394763 : rtx x = XVECEXP (body, 0, i);
3053 :
3054 141394763 : if (GET_CODE (x) == SET)
3055 : {
3056 73920885 : sets[n_sets].src = SET_SRC (x);
3057 73920885 : sets[n_sets].dest = SET_DEST (x);
3058 73920885 : n_sets++;
3059 : }
3060 : }
3061 : }
3062 :
3063 369150528 : if (n_sets == 1
3064 431771494 : && MEM_P (sets[0].src)
3065 62446259 : && !cselib_record_memory
3066 108390678 : && MEM_READONLY_P (sets[0].src))
3067 : {
3068 3239976 : rtx note = find_reg_equal_equiv_note (insn);
3069 :
3070 3239976 : if (note && CONSTANT_P (XEXP (note, 0)))
3071 2052953 : sets[0].src = XEXP (note, 0);
3072 : }
3073 :
3074 882016584 : data.sets = sets;
3075 882016584 : data.n_sets = n_sets_before_autoinc = n_sets;
3076 882016584 : for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
3077 882016584 : n_sets = data.n_sets;
3078 :
3079 : /* Look up the values that are read. Do this before invalidating the
3080 : locations that are written. */
3081 1338466533 : for (i = 0; i < n_sets; i++)
3082 : {
3083 456449949 : rtx dest = sets[i].dest;
3084 456449949 : rtx orig = dest;
3085 :
3086 : /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
3087 : the low part after invalidating any knowledge about larger modes. */
3088 456449949 : if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
3089 50886 : sets[i].dest = dest = XEXP (dest, 0);
3090 :
3091 : /* We don't know how to record anything but REG or MEM. */
3092 456449949 : if (REG_P (dest)
3093 123765538 : || (MEM_P (dest) && cselib_record_memory))
3094 : {
3095 361221067 : rtx src = sets[i].src;
3096 361221067 : if (cond)
3097 0 : src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
3098 361221067 : sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3099 361221067 : if (MEM_P (dest))
3100 : {
3101 28536656 : machine_mode address_mode = get_address_mode (dest);
3102 :
3103 28536656 : sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
3104 : address_mode, 1,
3105 28536656 : GET_MODE (dest));
3106 : }
3107 : else
3108 332684411 : sets[i].dest_addr_elt = 0;
3109 : }
3110 :
3111 : /* Improve handling of STRICT_LOW_PART if the current value is known
3112 : to be const0_rtx, then the low bits will be set to dest and higher
3113 : bits will remain zero. Used in code like:
3114 :
3115 : {di:SI=0;clobber flags:CC;}
3116 : flags:CCNO=cmp(bx:SI,0)
3117 : strict_low_part(di:QI)=flags:CCNO<=0
3118 :
3119 : where we can note both that di:QI=flags:CCNO<=0 and
3120 : also that because di:SI is known to be 0 and strict_low_part(di:QI)
3121 : preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
3122 456449949 : scalar_int_mode mode;
3123 456449949 : if (dest != orig
3124 50886 : && cselib_record_sets_hook
3125 16233 : && REG_P (dest)
3126 16233 : && HARD_REGISTER_P (dest)
3127 16233 : && sets[i].src_elt
3128 456466182 : && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
3129 456466182 : && n_sets + n_strict_low_parts < MAX_SETS)
3130 : {
3131 16233 : opt_scalar_int_mode wider_mode_iter;
3132 40856 : FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
3133 : {
3134 40856 : scalar_int_mode wider_mode = wider_mode_iter.require ();
3135 48355 : if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
3136 : break;
3137 :
3138 39579 : rtx reg = gen_lowpart (wider_mode, dest);
3139 39579 : if (!REG_P (reg))
3140 : break;
3141 :
3142 39575 : cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
3143 39575 : if (!v)
3144 24623 : continue;
3145 :
3146 15915 : struct elt_loc_list *l;
3147 33696 : for (l = v->locs; l; l = l->next)
3148 32733 : if (l->loc == const0_rtx)
3149 : break;
3150 :
3151 963 : if (!l)
3152 963 : continue;
3153 :
3154 14952 : sets[n_sets + n_strict_low_parts].dest = reg;
3155 14952 : sets[n_sets + n_strict_low_parts].src = dest;
3156 14952 : sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
3157 14952 : break;
3158 : }
3159 : }
3160 : }
3161 :
3162 882016584 : if (cselib_record_sets_hook)
3163 79258099 : cselib_record_sets_hook (insn, sets, n_sets);
3164 :
3165 : /* Invalidate all locations written by this insn. Note that the elts we
3166 : looked up in the previous loop aren't affected, just some of their
3167 : locations may go away. */
3168 882016584 : note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
3169 :
3170 1777411704 : for (i = n_sets_before_autoinc; i < n_sets; i++)
3171 13378536 : cselib_invalidate_rtx (sets[i].dest);
3172 :
3173 : /* If this is an asm, look for duplicate sets. This can happen when the
3174 : user uses the same value as an output multiple times. This is valid
3175 : if the outputs are not actually used thereafter. Treat this case as
3176 : if the value isn't actually set. We do this by smashing the destination
3177 : to pc_rtx, so that we won't record the value later. */
3178 882016584 : if (n_sets >= 2 && asm_noperands (body) >= 0)
3179 : {
3180 483015 : for (i = 0; i < n_sets; i++)
3181 : {
3182 375496 : rtx dest = sets[i].dest;
3183 375496 : if (REG_P (dest) || MEM_P (dest))
3184 : {
3185 375463 : int j;
3186 888246 : for (j = i + 1; j < n_sets; j++)
3187 512783 : if (rtx_equal_p (dest, sets[j].dest))
3188 : {
3189 0 : sets[i].dest = pc_rtx;
3190 0 : sets[j].dest = pc_rtx;
3191 : }
3192 : }
3193 : }
3194 : }
3195 :
3196 : /* Now enter the equivalences in our tables. */
3197 1338466533 : for (i = 0; i < n_sets; i++)
3198 : {
3199 456449949 : rtx dest = sets[i].dest;
3200 456449949 : if (REG_P (dest)
3201 123765538 : || (MEM_P (dest) && cselib_record_memory))
3202 361221067 : cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
3203 : }
3204 :
3205 : /* And deal with STRICT_LOW_PART. */
3206 882031536 : for (i = 0; i < n_strict_low_parts; i++)
3207 : {
3208 14952 : if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3209 0 : continue;
3210 14952 : machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3211 14952 : cselib_val *v
3212 14952 : = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
3213 14952 : cselib_preserve_value (v);
3214 14952 : rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3215 : sets[n_sets + i].src_elt->val_rtx);
3216 14952 : cselib_add_permanent_equiv (v, r, insn);
3217 : }
3218 882016584 : }
3219 :
3220 : /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3221 :
3222 : bool
3223 40609031 : fp_setter_insn (rtx_insn *insn)
3224 : {
3225 40609031 : rtx expr, pat = NULL_RTX;
3226 :
3227 40609031 : if (!RTX_FRAME_RELATED_P (insn))
3228 : return false;
3229 :
3230 625182 : expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3231 625182 : if (expr)
3232 55 : pat = XEXP (expr, 0);
3233 625182 : if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3234 : return false;
3235 :
3236 : /* Don't return true for frame pointer restores in the epilogue. */
3237 153667 : if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3238 : return false;
3239 : return true;
3240 : }
3241 :
3242 : /* V is one of the values in REG_VALUES (REGNO). Return true if it
3243 : would be invalidated by CALLEE_ABI. */
3244 :
3245 : static bool
3246 112411792 : cselib_invalidated_by_call_p (const function_abi &callee_abi,
3247 : unsigned int regno, cselib_val *v)
3248 : {
3249 112411792 : machine_mode mode = GET_MODE (v->val_rtx);
3250 112411792 : if (mode == VOIDmode)
3251 : {
3252 0 : v = REG_VALUES (regno)->elt;
3253 0 : if (!v)
3254 : /* If we don't know what the mode of the constant value is, and we
3255 : don't know what mode the register was set in, conservatively
3256 : assume that the register is clobbered. The value's going to be
3257 : essentially useless in this case anyway. */
3258 : return true;
3259 0 : mode = GET_MODE (v->val_rtx);
3260 : }
3261 112411792 : return callee_abi.clobbers_reg_p (mode, regno);
3262 : }
3263 :
3264 : /* Record the effects of INSN. */
3265 :
3266 : void
3267 1017517273 : cselib_process_insn (rtx_insn *insn)
3268 : {
3269 1017517273 : int i;
3270 1017517273 : rtx x;
3271 :
3272 1017517273 : cselib_current_insn = insn;
3273 :
3274 : /* Forget everything at a CODE_LABEL or a setjmp. */
3275 1017517273 : if ((LABEL_P (insn)
3276 982056936 : || (CALL_P (insn)
3277 33775807 : && find_reg_note (insn, REG_SETJMP, NULL)))
3278 1017521482 : && !cselib_preserve_constants)
3279 : {
3280 35464323 : cselib_reset_table (next_uid);
3281 35464323 : cselib_current_insn = NULL;
3282 35464323 : return;
3283 : }
3284 :
3285 982052950 : if (! INSN_P (insn))
3286 : {
3287 100036366 : cselib_current_insn = NULL;
3288 100036366 : return;
3289 : }
3290 :
3291 : /* If this is a call instruction, forget anything stored in a
3292 : call clobbered register, or, if this is not a const call, in
3293 : memory. */
3294 882016584 : if (CALL_P (insn))
3295 : {
3296 33771821 : function_abi callee_abi = insn_callee_abi (insn);
3297 3140779353 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3298 : {
3299 3107007532 : elt_list **l = ®_VALUES (i);
3300 3325702418 : while (*l)
3301 : {
3302 218694886 : cselib_val *v = (*l)->elt;
3303 218694886 : if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3304 69712928 : cselib_invalidate_regno_val (i, l);
3305 : else
3306 148981958 : l = &(*l)->next;
3307 : }
3308 : }
3309 :
3310 : /* Since it is not clear how cselib is going to be used, be
3311 : conservative here and treat looping pure or const functions
3312 : as if they were regular functions. */
3313 33771821 : if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3314 33771821 : || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3315 30775297 : cselib_invalidate_mem (callmem[0]);
3316 : else
3317 : {
3318 : /* For const/pure calls, invalidate any argument slots because
3319 : they are owned by the callee. */
3320 8629665 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3321 5633141 : if (GET_CODE (XEXP (x, 0)) == USE
3322 5632865 : && MEM_P (XEXP (XEXP (x, 0), 0)))
3323 138419 : cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3324 : /* And invalidate memory below the stack (or above for
3325 : !STACK_GROWS_DOWNWARD), as even const/pure call can invalidate
3326 : that. Do this only if !ACCUMULATE_OUTGOING_ARGS or if
3327 : cfun->calls_alloca, otherwise the stack pointer shouldn't be
3328 : changing in the middle of the function and nothing should be
3329 : stored below the stack pointer. */
3330 2996524 : if (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca)
3331 2996081 : cselib_invalidate_mem (callmem[1]);
3332 : }
3333 : }
3334 :
3335 882016584 : cselib_record_sets (insn);
3336 :
3337 : /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3338 : after we have processed the insn. */
3339 882016584 : if (CALL_P (insn))
3340 : {
3341 101680746 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3342 67908925 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3343 1858699 : cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3344 :
3345 : /* Flush everything on setjmp. */
3346 33771821 : if (cselib_preserve_constants
3347 33771821 : && find_reg_note (insn, REG_SETJMP, NULL))
3348 : {
3349 223 : cselib_preserve_only_values ();
3350 223 : cselib_reset_table (next_uid);
3351 : }
3352 : }
3353 :
3354 : /* On setter of the hard frame pointer if frame_pointer_needed,
3355 : invalidate stack_pointer_rtx, so that sp and {,h}fp based
3356 : VALUEs are distinct. */
3357 882016584 : if (reload_completed
3358 403464512 : && frame_pointer_needed
3359 922505789 : && fp_setter_insn (insn))
3360 93837 : cselib_invalidate_rtx (stack_pointer_rtx);
3361 :
3362 882016584 : cselib_current_insn = NULL;
3363 :
3364 882016584 : if (n_useless_values > MAX_USELESS_VALUES
3365 : /* remove_useless_values is linear in the hash table size. Avoid
3366 : quadratic behavior for very large hashtables with very few
3367 : useless elements. */
3368 882016584 : && ((unsigned int)n_useless_values
3369 2062650 : > (cselib_hash_table->elements () - n_debug_values) / 4))
3370 28725 : remove_useless_values ();
3371 : }
3372 :
3373 : /* Initialize cselib for one pass. The caller must also call
3374 : init_alias_analysis. */
3375 :
3376 : void
3377 10276164 : cselib_init (int record_what)
3378 : {
3379 10276164 : cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3380 10276164 : cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3381 10276164 : cselib_any_perm_equivs = false;
3382 :
3383 : /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3384 : see canon_true_dependence. This is only created once. */
3385 10276164 : if (! callmem[0])
3386 144338 : callmem[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3387 : /* Similarly create a MEM representing roughly everything below
3388 : the stack for STACK_GROWS_DOWNWARD targets or everything above
3389 : it otherwise. Do this only when !ACCUMULATE_OUTGOING_ARGS or
3390 : if cfun->calls_alloca, otherwise the stack pointer shouldn't be
3391 : changing in the middle of the function and nothing should be stored
3392 : below the stack pointer. */
3393 10276164 : if (!callmem[1] && (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca))
3394 : {
3395 144083 : if (STACK_GROWS_DOWNWARD)
3396 : {
3397 144083 : unsigned HOST_WIDE_INT off = -(GET_MODE_MASK (Pmode) >> 1);
3398 : #ifdef STACK_ADDRESS_OFFSET
3399 : /* On SPARC take stack pointer bias into account as well. */
3400 : off += (STACK_ADDRESS_OFFSET
3401 : - FIRST_PARM_OFFSET (current_function_decl));
3402 : #endif
3403 144083 : callmem[1] = plus_constant (Pmode, stack_pointer_rtx, off);
3404 : }
3405 : else
3406 : callmem[1] = stack_pointer_rtx;
3407 144083 : callmem[1] = gen_rtx_MEM (BLKmode, callmem[1]);
3408 149502 : set_mem_size (callmem[1], GET_MODE_MASK (Pmode) >> 1);
3409 : }
3410 :
3411 10276164 : cselib_nregs = max_reg_num ();
3412 :
3413 : /* We preserve reg_values to allow expensive clearing of the whole thing.
3414 : Reallocate it however if it happens to be too large. */
3415 10276164 : if (!reg_values || reg_values_size < cselib_nregs
3416 9978422 : || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3417 : {
3418 313011 : free (reg_values);
3419 : /* Some space for newly emit instructions so we don't end up
3420 : reallocating in between passes. */
3421 313011 : reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3422 313011 : reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3423 : }
3424 10276164 : used_regs = XNEWVEC (unsigned int, cselib_nregs);
3425 10276164 : n_used_regs = 0;
3426 : /* FIXME: enable sanitization (PR87845) */
3427 10276164 : cselib_hash_table
3428 10276164 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3429 10276164 : /* sanitize_eq_and_hash */ false);
3430 10276164 : if (cselib_preserve_constants)
3431 500050 : cselib_preserved_hash_table
3432 500050 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3433 500050 : /* sanitize_eq_and_hash */ false);
3434 10276164 : next_uid = 1;
3435 10276164 : }
3436 :
3437 : /* Called when the current user is done with cselib. */
3438 :
3439 : void
3440 10276164 : cselib_finish (void)
3441 : {
3442 10276164 : bool preserved = cselib_preserve_constants;
3443 10276164 : cselib_discard_hook = NULL;
3444 10276164 : cselib_preserve_constants = false;
3445 10276164 : cselib_any_perm_equivs = false;
3446 10276164 : cfa_base_preserved_val = NULL;
3447 10276164 : cfa_base_preserved_regno = INVALID_REGNUM;
3448 10276164 : elt_list_pool.release ();
3449 10276164 : elt_loc_list_pool.release ();
3450 10276164 : cselib_val_pool.release ();
3451 10276164 : value_pool.release ();
3452 10276164 : cselib_clear_table ();
3453 10276164 : delete cselib_hash_table;
3454 10276164 : cselib_hash_table = NULL;
3455 10276164 : if (preserved)
3456 500050 : delete cselib_preserved_hash_table;
3457 10276164 : cselib_preserved_hash_table = NULL;
3458 10276164 : free (used_regs);
3459 10276164 : used_regs = 0;
3460 10276164 : n_useless_values = 0;
3461 10276164 : n_useless_debug_values = 0;
3462 10276164 : n_debug_values = 0;
3463 10276164 : next_uid = 0;
3464 10276164 : }
3465 :
3466 : /* Dump the cselib_val V to FILE *OUT. */
3467 :
3468 : int
3469 93 : dump_cselib_val (cselib_val *v, FILE *out)
3470 : {
3471 93 : bool need_lf = true;
3472 :
3473 93 : print_inline_rtx (out, v->val_rtx, 0);
3474 :
3475 93 : if (v->locs)
3476 : {
3477 86 : struct elt_loc_list *l = v->locs;
3478 86 : if (need_lf)
3479 : {
3480 86 : fputc ('\n', out);
3481 86 : need_lf = false;
3482 : }
3483 86 : fputs (" locs:", out);
3484 149 : do
3485 : {
3486 149 : if (l->setting_insn)
3487 149 : fprintf (out, "\n from insn %i ",
3488 149 : INSN_UID (l->setting_insn));
3489 : else
3490 0 : fprintf (out, "\n ");
3491 149 : print_inline_rtx (out, l->loc, 4);
3492 : }
3493 149 : while ((l = l->next));
3494 86 : fputc ('\n', out);
3495 : }
3496 : else
3497 : {
3498 7 : fputs (" no locs", out);
3499 7 : need_lf = true;
3500 : }
3501 :
3502 93 : if (v->addr_list)
3503 : {
3504 6 : struct elt_list *e = v->addr_list;
3505 6 : if (need_lf)
3506 : {
3507 0 : fputc ('\n', out);
3508 0 : need_lf = false;
3509 : }
3510 6 : fputs (" addr list:", out);
3511 6 : do
3512 : {
3513 6 : fputs ("\n ", out);
3514 6 : print_inline_rtx (out, e->elt->val_rtx, 2);
3515 : }
3516 6 : while ((e = e->next));
3517 6 : fputc ('\n', out);
3518 : }
3519 : else
3520 : {
3521 87 : fputs (" no addrs", out);
3522 87 : need_lf = true;
3523 : }
3524 :
3525 93 : if (v->next_containing_mem == &dummy_val)
3526 6 : fputs (" last mem\n", out);
3527 87 : else if (v->next_containing_mem)
3528 : {
3529 0 : fputs (" next mem ", out);
3530 0 : print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3531 0 : fputc ('\n', out);
3532 : }
3533 87 : else if (need_lf)
3534 81 : fputc ('\n', out);
3535 :
3536 93 : return 1;
3537 : }
3538 :
3539 : /* Dump the cselib_val *X to FILE *OUT. */
3540 :
3541 : static int
3542 93 : dump_cselib_val_ptr (cselib_val **x, FILE *out)
3543 : {
3544 93 : cselib_val *v = *x;
3545 93 : return dump_cselib_val (v, out);
3546 : }
3547 :
3548 : /* Dump to OUT everything in the CSELIB table. */
3549 :
3550 : void
3551 11 : dump_cselib_table (FILE *out)
3552 : {
3553 11 : fprintf (out, "cselib hash table:\n");
3554 104 : cselib_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3555 11 : if (cselib_preserved_hash_table)
3556 : {
3557 11 : fprintf (out, "cselib preserved hash table:\n");
3558 11 : cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3559 : }
3560 11 : if (first_containing_mem != &dummy_val)
3561 : {
3562 6 : fputs ("first mem ", out);
3563 6 : print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3564 6 : fputc ('\n', out);
3565 : }
3566 11 : fprintf (out, "next uid %i\n", next_uid);
3567 11 : }
3568 :
3569 : #include "gt-cselib.h"
|