Line data Source code
1 : /* Common subexpression elimination library for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "df.h"
28 : #include "memmodel.h"
29 : #include "tm_p.h"
30 : #include "regs.h"
31 : #include "emit-rtl.h"
32 : #include "dumpfile.h"
33 : #include "cselib.h"
34 : #include "function-abi.h"
35 : #include "alias.h"
36 : #include "predict.h"
37 :
38 : /* A list of cselib_val structures. */
39 : struct elt_list
40 : {
41 : struct elt_list *next;
42 : cselib_val *elt;
43 : };
44 :
45 : static bool cselib_record_memory;
46 : static bool cselib_preserve_constants;
47 : static bool cselib_any_perm_equivs;
48 : static inline void promote_debug_loc (struct elt_loc_list *l);
49 : static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
50 : static void new_elt_loc_list (cselib_val *, rtx);
51 : static void unchain_one_value (cselib_val *);
52 : static void unchain_one_elt_list (struct elt_list **);
53 : static void unchain_one_elt_loc_list (struct elt_loc_list **);
54 : static void remove_useless_values (void);
55 : static hashval_t cselib_hash_rtx (rtx, int, machine_mode);
56 : static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
57 : static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
58 : static cselib_val *cselib_lookup_mem (rtx, int);
59 : static void cselib_invalidate_regno (unsigned int, machine_mode);
60 : static void cselib_invalidate_mem (rtx);
61 : static void cselib_record_set (rtx, cselib_val *, cselib_val *);
62 : static void cselib_record_sets (rtx_insn *);
63 : static rtx autoinc_split (rtx, rtx *, machine_mode);
64 :
65 : #define PRESERVED_VALUE_P(RTX) \
66 : (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
67 :
68 : #define SP_BASED_VALUE_P(RTX) \
69 : (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
70 :
71 : #define SP_DERIVED_VALUE_P(RTX) \
72 : (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
73 :
74 : struct expand_value_data
75 : {
76 : bitmap regs_active;
77 : cselib_expand_callback callback;
78 : void *callback_arg;
79 : bool dummy;
80 : };
81 :
82 : static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
83 :
84 : /* This is a global so we don't have to pass this through every function.
85 : It is used in new_elt_loc_list to set SETTING_INSN. */
86 : static rtx_insn *cselib_current_insn;
87 :
88 : /* There are three ways in which cselib can look up an rtx:
89 : - for a REG, the reg_values table (which is indexed by regno) is used
90 : - for a MEM, we recursively look up its address and then follow the
91 : addr_list of that value
92 : - for everything else, we compute a hash value and go through the hash
93 : table. Since different rtx's can still have the same hash value,
94 : this involves walking the table entries for a given value and comparing
95 : the locations of the entries with the rtx we are looking up. */
96 :
97 : struct cselib_hasher : nofree_ptr_hash <cselib_val>
98 : {
99 : struct key {
100 : /* The rtx value and its mode (needed separately for constant
101 : integers). */
102 : machine_mode mode;
103 : rtx x;
104 : /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
105 : machine_mode memmode;
106 : };
107 : typedef key *compare_type;
108 : static inline hashval_t hash (const cselib_val *);
109 : static inline bool equal (const cselib_val *, const key *);
110 : };
111 :
112 : /* The hash function for our hash table. The value is always computed with
113 : cselib_hash_rtx when adding an element; this function just extracts the
114 : hash value from a cselib_val structure. */
115 :
116 : inline hashval_t
117 101505400 : cselib_hasher::hash (const cselib_val *v)
118 : {
119 101505400 : return v->hash;
120 : }
121 :
122 : /* The equality test for our hash table. The first argument V is a table
123 : element (i.e. a cselib_val), while the second arg X is an rtx. We know
124 : that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
125 : CONST of an appropriate mode. */
126 :
127 : inline bool
128 575421407 : cselib_hasher::equal (const cselib_val *v, const key *x_arg)
129 : {
130 575421407 : struct elt_loc_list *l;
131 575421407 : rtx x = x_arg->x;
132 575421407 : machine_mode mode = x_arg->mode;
133 575421407 : machine_mode memmode = x_arg->memmode;
134 :
135 575421407 : if (mode != GET_MODE (v->val_rtx))
136 : return false;
137 :
138 462760531 : if (GET_CODE (x) == VALUE)
139 12847321 : return x == v->val_rtx;
140 :
141 459951702 : if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
142 : {
143 17598108 : rtx xoff = NULL;
144 17598108 : if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
145 7893094 : return true;
146 : }
147 :
148 : /* We don't guarantee that distinct rtx's have different hash values,
149 : so we need to do a comparison. */
150 758420745 : for (l = v->locs; l; l = l->next)
151 493295696 : if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
152 38660838 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
153 : {
154 11061750 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
155 : /* If l is so far a debug only loc, without debug stmts it
156 : would never be compared to x at all, so temporarily pretend
157 : current instruction is that DEBUG_INSN so that we don't
158 : promote other debug locs even for unsuccessful comparison. */
159 11061750 : cselib_current_insn = l->setting_insn;
160 11061750 : bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0);
161 11061750 : cselib_current_insn = save_cselib_current_insn;
162 11061750 : if (match)
163 : {
164 895751 : promote_debug_loc (l);
165 895751 : return true;
166 : }
167 : }
168 482233946 : else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
169 : return true;
170 :
171 : return false;
172 : }
173 :
174 : /* A table that enables us to look up elts by their value. */
175 : static hash_table<cselib_hasher> *cselib_hash_table;
176 :
177 : /* A table to hold preserved values. */
178 : static hash_table<cselib_hasher> *cselib_preserved_hash_table;
179 :
180 : /* The unique id that the next create value will take. */
181 : static unsigned int next_uid;
182 :
183 : /* The number of registers we had when the varrays were last resized. */
184 : static unsigned int cselib_nregs;
185 :
186 : /* Count values without known locations, or with only locations that
187 : wouldn't have been known except for debug insns. Whenever this
188 : grows too big, we remove these useless values from the table.
189 :
190 : Counting values with only debug values is a bit tricky. We don't
191 : want to increment n_useless_values when we create a value for a
192 : debug insn, for this would get n_useless_values out of sync, but we
193 : want increment it if all locs in the list that were ever referenced
194 : in nondebug insns are removed from the list.
195 :
196 : In the general case, once we do that, we'd have to stop accepting
197 : nondebug expressions in the loc list, to avoid having two values
198 : equivalent that, without debug insns, would have been made into
199 : separate values. However, because debug insns never introduce
200 : equivalences themselves (no assignments), the only means for
201 : growing loc lists is through nondebug assignments. If the locs
202 : also happen to be referenced in debug insns, it will work just fine.
203 :
204 : A consequence of this is that there's at most one debug-only loc in
205 : each loc list. If we keep it in the first entry, testing whether
206 : we have a debug-only loc list takes O(1).
207 :
208 : Furthermore, since any additional entry in a loc list containing a
209 : debug loc would have to come from an assignment (nondebug) that
210 : references both the initial debug loc and the newly-equivalent loc,
211 : the initial debug loc would be promoted to a nondebug loc, and the
212 : loc list would not contain debug locs any more.
213 :
214 : So the only case we have to be careful with in order to keep
215 : n_useless_values in sync between debug and nondebug compilations is
216 : to avoid incrementing n_useless_values when removing the single loc
217 : from a value that turns out to not appear outside debug values. We
218 : increment n_useless_debug_values instead, and leave such values
219 : alone until, for other reasons, we garbage-collect useless
220 : values. */
221 : static int n_useless_values;
222 : static int n_useless_debug_values;
223 :
224 : /* Count values whose locs have been taken exclusively from debug
225 : insns for the entire life of the value. */
226 : static int n_debug_values;
227 :
228 : /* Number of useless values before we remove them from the hash table. */
229 : #define MAX_USELESS_VALUES 32
230 :
231 : /* This table maps from register number to values. It does not
232 : contain pointers to cselib_val structures, but rather elt_lists.
233 : The purpose is to be able to refer to the same register in
234 : different modes. The first element of the list defines the mode in
235 : which the register was set; if the mode is unknown or the value is
236 : no longer valid in that mode, ELT will be NULL for the first
237 : element. */
238 : static struct elt_list **reg_values;
239 : static unsigned int reg_values_size;
240 : #define REG_VALUES(i) reg_values[i]
241 :
242 : /* The largest number of hard regs used by any entry added to the
243 : REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
244 : static unsigned int max_value_regs;
245 :
246 : /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
247 : in cselib_clear_table() for fast emptying. */
248 : static unsigned int *used_regs;
249 : static unsigned int n_used_regs;
250 :
251 : /* We pass this to cselib_invalidate_mem to invalidate all of
252 : memory for a non-const call instruction and memory below stack pointer
253 : for const/pure calls. */
254 : static GTY(()) rtx callmem[2];
255 :
256 : /* Set by discard_useless_locs if it deleted the last location of any
257 : value. */
258 : static int values_became_useless;
259 :
260 : /* Used as stop element of the containing_mem list so we can check
261 : presence in the list by checking the next pointer. */
262 : static cselib_val dummy_val;
263 :
264 : /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
265 : that is constant through the whole function and should never be
266 : eliminated. */
267 : static cselib_val *cfa_base_preserved_val;
268 : static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
269 :
270 : /* Used to list all values that contain memory reference.
271 : May or may not contain the useless values - the list is compacted
272 : each time memory is invalidated. */
273 : static cselib_val *first_containing_mem = &dummy_val;
274 :
275 : static object_allocator<elt_list> elt_list_pool ("elt_list");
276 : static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
277 : static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
278 :
279 : static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
280 :
281 : /* If nonnull, cselib will call this function before freeing useless
282 : VALUEs. A VALUE is deemed useless if its "locs" field is null. */
283 : void (*cselib_discard_hook) (cselib_val *);
284 :
285 : /* If nonnull, cselib will call this function before recording sets or
286 : even clobbering outputs of INSN. All the recorded sets will be
287 : represented in the array sets[n_sets]. new_val_min can be used to
288 : tell whether values present in sets are introduced by this
289 : instruction. */
290 : void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
291 : int n_sets);
292 :
293 :
294 :
295 : /* Allocate a struct elt_list and fill in its two elements with the
296 : arguments. */
297 :
298 : static inline struct elt_list *
299 487381693 : new_elt_list (struct elt_list *next, cselib_val *elt)
300 : {
301 974763386 : elt_list *el = elt_list_pool.allocate ();
302 487381693 : el->next = next;
303 487381693 : el->elt = elt;
304 487381693 : return el;
305 : }
306 :
307 : /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
308 : list. */
309 :
310 : static inline void
311 721806790 : new_elt_loc_list (cselib_val *val, rtx loc)
312 : {
313 725733190 : struct elt_loc_list *el, *next = val->locs;
314 :
315 725733190 : gcc_checking_assert (!next || !next->setting_insn
316 : || !DEBUG_INSN_P (next->setting_insn)
317 : || cselib_current_insn == next->setting_insn);
318 :
319 : /* If we're creating the first loc in a debug insn context, we've
320 : just created a debug value. Count it. */
321 424773311 : if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
322 6737946 : n_debug_values++;
323 :
324 725733190 : val = canonical_cselib_val (val);
325 725733190 : next = val->locs;
326 :
327 725733190 : if (GET_CODE (loc) == VALUE)
328 : {
329 7857550 : loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
330 :
331 7857550 : gcc_checking_assert (PRESERVED_VALUE_P (loc)
332 : == PRESERVED_VALUE_P (val->val_rtx));
333 :
334 7857550 : if (val->val_rtx == loc)
335 : return;
336 7852974 : else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
337 : {
338 : /* Reverse the insertion. */
339 : new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
340 : return;
341 : }
342 :
343 3926574 : gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
344 :
345 3926574 : if (CSELIB_VAL_PTR (loc)->locs)
346 : {
347 : /* Bring all locs from LOC to VAL. */
348 2993555 : for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
349 : {
350 : /* Adjust values that have LOC as canonical so that VAL
351 : becomes their canonical. */
352 18 : if (el->loc && GET_CODE (el->loc) == VALUE)
353 : {
354 0 : gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
355 : == loc);
356 0 : CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
357 : }
358 : }
359 2993537 : el->next = val->locs;
360 2993537 : next = val->locs = CSELIB_VAL_PTR (loc)->locs;
361 : }
362 :
363 3926574 : if (CSELIB_VAL_PTR (loc)->addr_list)
364 : {
365 : /* Bring in addr_list into canonical node. */
366 : struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
367 2 : while (last->next)
368 : last = last->next;
369 2 : last->next = val->addr_list;
370 2 : val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
371 2 : CSELIB_VAL_PTR (loc)->addr_list = NULL;
372 : }
373 :
374 3926574 : if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
375 0 : && val->next_containing_mem == NULL)
376 : {
377 : /* Add VAL to the containing_mem list after LOC. LOC will
378 : be removed when we notice it doesn't contain any
379 : MEMs. */
380 0 : val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
381 0 : CSELIB_VAL_PTR (loc)->next_containing_mem = val;
382 : }
383 :
384 : /* Chain LOC back to VAL. */
385 3926574 : el = elt_loc_list_pool.allocate ();
386 3926574 : el->loc = val->val_rtx;
387 3926574 : el->setting_insn = cselib_current_insn;
388 3926574 : el->next = NULL;
389 3926574 : CSELIB_VAL_PTR (loc)->locs = el;
390 : }
391 :
392 721802214 : el = elt_loc_list_pool.allocate ();
393 721802214 : el->loc = loc;
394 721802214 : el->setting_insn = cselib_current_insn;
395 721802214 : el->next = next;
396 721802214 : val->locs = el;
397 : }
398 :
399 : /* Promote loc L to a nondebug cselib_current_insn if L is marked as
400 : originating from a debug insn, maintaining the debug values
401 : count. */
402 :
403 : static inline void
404 1221515844 : promote_debug_loc (struct elt_loc_list *l)
405 : {
406 1221515844 : if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
407 22229912 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
408 : {
409 2578257 : n_debug_values--;
410 2578257 : l->setting_insn = cselib_current_insn;
411 2578257 : if (cselib_preserve_constants && l->next)
412 : {
413 21969 : gcc_assert (l->next->setting_insn
414 : && DEBUG_INSN_P (l->next->setting_insn)
415 : && !l->next->next);
416 21969 : l->next->setting_insn = cselib_current_insn;
417 : }
418 : else
419 2556288 : gcc_assert (!l->next);
420 : }
421 1221515844 : }
422 :
423 : /* The elt_list at *PL is no longer needed. Unchain it and free its
424 : storage. */
425 :
426 : static inline void
427 80229806 : unchain_one_elt_list (struct elt_list **pl)
428 : {
429 80229806 : struct elt_list *l = *pl;
430 :
431 80229806 : *pl = l->next;
432 117655442 : elt_list_pool.remove (l);
433 42804170 : }
434 :
435 : /* Likewise for elt_loc_lists. */
436 :
437 : static void
438 222716961 : unchain_one_elt_loc_list (struct elt_loc_list **pl)
439 : {
440 222716961 : struct elt_loc_list *l = *pl;
441 :
442 222716961 : *pl = l->next;
443 0 : elt_loc_list_pool.remove (l);
444 38700838 : }
445 :
446 : /* Likewise for cselib_vals. This also frees the addr_list associated with
447 : V. */
448 :
449 : static void
450 2360986 : unchain_one_value (cselib_val *v)
451 : {
452 2380402 : while (v->addr_list)
453 19416 : unchain_one_elt_list (&v->addr_list);
454 :
455 2360986 : cselib_val_pool.remove (v);
456 2360986 : }
457 :
458 : /* Remove all entries from the hash table. Also used during
459 : initialization. */
460 :
461 : void
462 61044219 : cselib_clear_table (void)
463 : {
464 61044219 : cselib_reset_table (1);
465 61044219 : }
466 :
467 : /* Return TRUE if V is a constant, a function invariant or a VALUE
468 : equivalence; FALSE otherwise. */
469 :
470 : static bool
471 57915558 : invariant_or_equiv_p (cselib_val *v)
472 : {
473 57915558 : struct elt_loc_list *l;
474 :
475 57915558 : if (v == cfa_base_preserved_val)
476 : return true;
477 :
478 : /* Keep VALUE equivalences around. */
479 80467463 : for (l = v->locs; l; l = l->next)
480 38876084 : if (GET_CODE (l->loc) == VALUE)
481 : return true;
482 :
483 41591379 : if (v->locs != NULL
484 22848945 : && v->locs->next == NULL)
485 : {
486 22792149 : if (CONSTANT_P (v->locs->loc)
487 22792149 : && (GET_CODE (v->locs->loc) != CONST
488 173381 : || !references_value_p (v->locs->loc, 0)))
489 3503842 : return true;
490 : /* Although a debug expr may be bound to different expressions,
491 : we can preserve it as if it was constant, to get unification
492 : and proper merging within var-tracking. */
493 19288307 : if (GET_CODE (v->locs->loc) == DEBUG_EXPR
494 17774558 : || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
495 17273944 : || GET_CODE (v->locs->loc) == ENTRY_VALUE
496 17273717 : || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
497 : return true;
498 :
499 : /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
500 17258310 : if (GET_CODE (v->locs->loc) == PLUS
501 10726291 : && CONST_INT_P (XEXP (v->locs->loc, 1))
502 9565869 : && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
503 26068911 : && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
504 : return true;
505 : }
506 :
507 : return false;
508 : }
509 :
510 : /* Remove from hash table all VALUEs except constants, function
511 : invariants and VALUE equivalences. */
512 :
513 : int
514 44712952 : preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
515 : {
516 44712952 : cselib_val *v = *x;
517 :
518 44712952 : if (invariant_or_equiv_p (v))
519 : {
520 17466013 : cselib_hasher::key lookup = {
521 17466013 : GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
522 17466013 : };
523 17466013 : cselib_val **slot
524 34932026 : = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
525 17466013 : v->hash, INSERT);
526 17466013 : gcc_assert (!*slot);
527 17466013 : *slot = v;
528 : }
529 :
530 44712952 : cselib_hash_table->clear_slot (x);
531 :
532 44712952 : return 1;
533 : }
534 :
535 : /* Remove all entries from the hash table, arranging for the next
536 : value to be numbered NUM. */
537 :
538 : void
539 100818284 : cselib_reset_table (unsigned int num)
540 : {
541 100818284 : unsigned int i;
542 :
543 100818284 : max_value_regs = 0;
544 :
545 100818284 : if (cfa_base_preserved_val)
546 : {
547 4392005 : unsigned int regno = cfa_base_preserved_regno;
548 4392005 : unsigned int new_used_regs = 0;
549 31353094 : for (i = 0; i < n_used_regs; i++)
550 26961089 : if (used_regs[i] == regno)
551 : {
552 4392005 : new_used_regs = 1;
553 4392005 : continue;
554 : }
555 : else
556 22569084 : REG_VALUES (used_regs[i]) = 0;
557 4392005 : gcc_assert (new_used_regs == 1);
558 4392005 : n_used_regs = new_used_regs;
559 4392005 : used_regs[0] = regno;
560 4392005 : max_value_regs
561 4392005 : = hard_regno_nregs (regno,
562 4392005 : GET_MODE (cfa_base_preserved_val->locs->loc));
563 :
564 : /* If cfa_base is sp + const_int, need to preserve also the
565 : SP_DERIVED_VALUE_P value. */
566 4392005 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
567 8784010 : l; l = l->next)
568 8784010 : if (GET_CODE (l->loc) == PLUS
569 4392005 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
570 4392005 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
571 13176015 : && CONST_INT_P (XEXP (l->loc, 1)))
572 : {
573 4392005 : if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
574 : {
575 0 : rtx val = cfa_base_preserved_val->val_rtx;
576 0 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
577 0 : cselib_current_insn = l->setting_insn;
578 0 : new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
579 0 : plus_constant (Pmode, val,
580 0 : -UINTVAL (XEXP (l->loc, 1))));
581 0 : cselib_current_insn = save_cselib_current_insn;
582 : }
583 : break;
584 : }
585 : }
586 : else
587 : {
588 370551635 : for (i = 0; i < n_used_regs; i++)
589 274125356 : REG_VALUES (used_regs[i]) = 0;
590 96426279 : n_used_regs = 0;
591 : }
592 :
593 100818284 : if (cselib_preserve_constants)
594 49104957 : cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
595 : else
596 : {
597 96426279 : cselib_hash_table->empty ();
598 96426279 : gcc_checking_assert (!cselib_any_perm_equivs);
599 : }
600 :
601 100818284 : n_useless_values = 0;
602 100818284 : n_useless_debug_values = 0;
603 100818284 : n_debug_values = 0;
604 :
605 100818284 : next_uid = num;
606 :
607 100818284 : first_containing_mem = &dummy_val;
608 100818284 : }
609 :
610 : /* Return the number of the next value that will be generated. */
611 :
612 : unsigned int
613 4887844 : cselib_get_next_uid (void)
614 : {
615 4887844 : return next_uid;
616 : }
617 :
618 : /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
619 : INSERTing if requested. When X is part of the address of a MEM,
620 : MEMMODE should specify the mode of the MEM. */
621 :
622 : static cselib_val **
623 715730980 : cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
624 : enum insert_option insert, machine_mode memmode)
625 : {
626 715730980 : cselib_val **slot = NULL;
627 715730980 : cselib_hasher::key lookup = { mode, x, memmode };
628 715730980 : if (cselib_preserve_constants)
629 160399256 : slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
630 : NO_INSERT);
631 160399256 : if (!slot)
632 661016865 : slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
633 715730980 : return slot;
634 : }
635 :
636 : /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
637 : only return true for values which point to a cselib_val whose value
638 : element has been set to zero, which implies the cselib_val will be
639 : removed. */
640 :
641 : bool
642 1261531278 : references_value_p (const_rtx x, int only_useless)
643 : {
644 1261531278 : const enum rtx_code code = GET_CODE (x);
645 1261531278 : const char *fmt = GET_RTX_FORMAT (code);
646 1261531278 : int i, j;
647 :
648 1261531278 : if (GET_CODE (x) == VALUE
649 1261531278 : && (! only_useless
650 442506867 : || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
651 : return true;
652 :
653 2881962988 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
654 : {
655 1623192225 : if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
656 : return true;
657 1621799377 : else if (fmt[i] == 'E')
658 45869531 : for (j = 0; j < XVECLEN (x, i); j++)
659 38462120 : if (references_value_p (XVECEXP (x, i, j), only_useless))
660 : return true;
661 : }
662 :
663 : return false;
664 : }
665 :
666 : /* Return true if V is a useless VALUE and can be discarded as such. */
667 :
668 : static bool
669 1200892865 : cselib_useless_value_p (cselib_val *v)
670 : {
671 1200892865 : return (v->locs == 0
672 79398044 : && !PRESERVED_VALUE_P (v->val_rtx)
673 1246787533 : && !SP_DERIVED_VALUE_P (v->val_rtx));
674 : }
675 :
676 : /* For all locations found in X, delete locations that reference useless
677 : values (i.e. values without any location). Called through
678 : htab_traverse. */
679 :
680 : int
681 552485492 : discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
682 : {
683 552485492 : cselib_val *v = *x;
684 552485492 : struct elt_loc_list **p = &v->locs;
685 552485492 : bool had_locs = v->locs != NULL;
686 552485492 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
687 :
688 1167757969 : while (*p)
689 : {
690 615272477 : if (references_value_p ((*p)->loc, 1))
691 1275202 : unchain_one_elt_loc_list (p);
692 : else
693 613997275 : p = &(*p)->next;
694 : }
695 :
696 552485492 : if (had_locs && cselib_useless_value_p (v))
697 : {
698 1141598 : if (setting_insn && DEBUG_INSN_P (setting_insn))
699 2 : n_useless_debug_values++;
700 : else
701 1141596 : n_useless_values++;
702 1141598 : values_became_useless = 1;
703 : }
704 552485492 : return 1;
705 : }
706 :
707 : /* If X is a value with no locations, remove it from the hashtable. */
708 :
709 : int
710 48565380 : discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
711 : {
712 48565380 : cselib_val *v = *x;
713 :
714 48565380 : if (v->locs == 0 && cselib_useless_value_p (v))
715 : {
716 2360986 : if (cselib_discard_hook)
717 533377 : cselib_discard_hook (v);
718 :
719 2360986 : CSELIB_VAL_PTR (v->val_rtx) = NULL;
720 2360986 : cselib_hash_table->clear_slot (x);
721 2360986 : unchain_one_value (v);
722 2360986 : n_useless_values--;
723 : }
724 :
725 48565380 : return 1;
726 : }
727 :
728 : /* Clean out useless values (i.e. those which no longer have locations
729 : associated with them) from the hash table. */
730 :
731 : static void
732 4420818 : remove_useless_values (void)
733 : {
734 4475330 : cselib_val **p, *v;
735 :
736 : /* First pass: eliminate locations that reference the value. That in
737 : turn can make more values useless. */
738 4475330 : do
739 : {
740 4475330 : values_became_useless = 0;
741 556960822 : cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
742 : }
743 4475330 : while (values_became_useless);
744 :
745 : /* Second pass: actually remove the values. */
746 :
747 4420818 : p = &first_containing_mem;
748 4543278 : for (v = *p; v != &dummy_val; v = v->next_containing_mem)
749 122460 : if (v->locs && v == canonical_cselib_val (v))
750 : {
751 119177 : *p = v;
752 119177 : p = &(*p)->next_containing_mem;
753 : }
754 4420818 : *p = &dummy_val;
755 :
756 4420818 : if (cselib_preserve_constants)
757 4392005 : cselib_preserved_hash_table->traverse <void *,
758 4392005 : discard_useless_locs> (NULL);
759 4420818 : gcc_assert (!values_became_useless);
760 :
761 4420818 : n_useless_values += n_useless_debug_values;
762 4420818 : n_debug_values -= n_useless_debug_values;
763 4420818 : n_useless_debug_values = 0;
764 :
765 52986198 : cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
766 :
767 4420818 : gcc_assert (!n_useless_values);
768 4420818 : }
769 :
770 : /* Arrange for a value to not be removed from the hash table even if
771 : it becomes useless. */
772 :
773 : void
774 45355764 : cselib_preserve_value (cselib_val *v)
775 : {
776 45355764 : PRESERVED_VALUE_P (v->val_rtx) = 1;
777 45355764 : }
778 :
779 : /* Test whether a value is preserved. */
780 :
781 : bool
782 263295122 : cselib_preserved_value_p (cselib_val *v)
783 : {
784 263295122 : return PRESERVED_VALUE_P (v->val_rtx);
785 : }
786 :
787 : /* Arrange for a REG value to be assumed constant through the whole function,
788 : never invalidated and preserved across cselib_reset_table calls. */
789 :
790 : void
791 991160 : cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
792 : {
793 991160 : if (cselib_preserve_constants
794 991160 : && v->locs
795 991160 : && REG_P (v->locs->loc))
796 : {
797 496129 : cfa_base_preserved_val = v;
798 496129 : cfa_base_preserved_regno = regno;
799 : }
800 991160 : }
801 :
802 : /* Clean all non-constant expressions in the hash table, but retain
803 : their values. */
804 :
805 : void
806 4392005 : cselib_preserve_only_values (void)
807 : {
808 4392005 : int i;
809 :
810 408456465 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
811 404064460 : cselib_invalidate_regno (i, reg_raw_mode[i]);
812 :
813 4392005 : cselib_invalidate_mem (callmem[0]);
814 :
815 4392005 : remove_useless_values ();
816 :
817 4392005 : gcc_assert (first_containing_mem == &dummy_val);
818 4392005 : }
819 :
820 : /* Arrange for a value to be marked as based on stack pointer
821 : for find_base_term purposes. */
822 :
823 : void
824 1912301 : cselib_set_value_sp_based (cselib_val *v)
825 : {
826 1912301 : SP_BASED_VALUE_P (v->val_rtx) = 1;
827 1912301 : }
828 :
829 : /* Test whether a value is based on stack pointer for
830 : find_base_term purposes. */
831 :
832 : bool
833 837002937 : cselib_sp_based_value_p (cselib_val *v)
834 : {
835 837002937 : return SP_BASED_VALUE_P (v->val_rtx);
836 : }
837 :
838 : /* Return the mode in which a register was last set. If X is not a
839 : register, return its mode. If the mode in which the register was
840 : set is not known, or the value was already clobbered, return
841 : VOIDmode. */
842 :
843 : machine_mode
844 113228629 : cselib_reg_set_mode (const_rtx x)
845 : {
846 113228629 : if (!REG_P (x))
847 34199651 : return GET_MODE (x);
848 :
849 79028978 : if (REG_VALUES (REGNO (x)) == NULL
850 79028978 : || REG_VALUES (REGNO (x))->elt == NULL)
851 : return VOIDmode;
852 :
853 23964670 : return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
854 : }
855 :
856 : /* If x is a PLUS or an autoinc operation, expand the operation,
857 : storing the offset, if any, in *OFF. */
858 :
859 : static rtx
860 1459468796 : autoinc_split (rtx x, rtx *off, machine_mode memmode)
861 : {
862 1459468796 : switch (GET_CODE (x))
863 : {
864 806317241 : case PLUS:
865 806317241 : *off = XEXP (x, 1);
866 806317241 : x = XEXP (x, 0);
867 806317241 : break;
868 :
869 12659867 : case PRE_DEC:
870 12659867 : if (memmode == VOIDmode)
871 : return x;
872 :
873 25319734 : *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
874 12659867 : x = XEXP (x, 0);
875 12659867 : break;
876 :
877 0 : case PRE_INC:
878 0 : if (memmode == VOIDmode)
879 : return x;
880 :
881 0 : *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
882 0 : x = XEXP (x, 0);
883 0 : break;
884 :
885 1639395 : case PRE_MODIFY:
886 1639395 : x = XEXP (x, 1);
887 1639395 : break;
888 :
889 1816432 : case POST_DEC:
890 1816432 : case POST_INC:
891 1816432 : case POST_MODIFY:
892 1816432 : x = XEXP (x, 0);
893 1816432 : break;
894 :
895 : default:
896 : break;
897 : }
898 :
899 1459468796 : if (GET_MODE (x) == Pmode
900 1409829638 : && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
901 2268063046 : && (*off == NULL_RTX || CONST_INT_P (*off)))
902 : {
903 803920457 : cselib_val *e;
904 803920457 : if (GET_CODE (x) == VALUE)
905 513561857 : e = CSELIB_VAL_PTR (x);
906 : else
907 290358600 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
908 803920457 : if (e)
909 : {
910 794706867 : if (SP_DERIVED_VALUE_P (e->val_rtx)
911 794706867 : && (*off == NULL_RTX || *off == const0_rtx))
912 : {
913 65671 : *off = NULL_RTX;
914 65671 : return e->val_rtx;
915 : }
916 1738677611 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
917 1364905416 : if (GET_CODE (l->loc) == PLUS
918 585747577 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
919 584560291 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
920 1785776933 : && CONST_INT_P (XEXP (l->loc, 1)))
921 : {
922 420869001 : if (*off == NULL_RTX)
923 1458315 : *off = XEXP (l->loc, 1);
924 : else
925 419410686 : *off = plus_constant (Pmode, *off,
926 419410686 : INTVAL (XEXP (l->loc, 1)));
927 420869001 : if (*off == const0_rtx)
928 258107534 : *off = NULL_RTX;
929 420869001 : return XEXP (l->loc, 0);
930 : }
931 : }
932 : }
933 : return x;
934 : }
935 :
936 : /* Return true if we can prove that X and Y contain the same value,
937 : taking our gathered information into account. MEMMODE holds the
938 : mode of the enclosing MEM, if any, as required to deal with autoinc
939 : addressing modes. If X and Y are not (known to be) part of
940 : addresses, MEMMODE should be VOIDmode. */
941 :
942 : bool
943 1202333410 : rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
944 : {
945 1580092283 : enum rtx_code code;
946 1580092283 : const char *fmt;
947 1580092283 : int i;
948 :
949 1580092283 : if (REG_P (x) || MEM_P (x))
950 : {
951 148517933 : cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
952 :
953 148517933 : if (e)
954 125273369 : x = e->val_rtx;
955 : }
956 :
957 1580092283 : if (REG_P (y) || MEM_P (y))
958 : {
959 141495062 : cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
960 :
961 141495062 : if (e)
962 129610589 : y = e->val_rtx;
963 : }
964 :
965 1580092283 : if (x == y)
966 : return true;
967 :
968 1255954432 : if (GET_CODE (x) == VALUE)
969 : {
970 427999812 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
971 427999812 : struct elt_loc_list *l;
972 :
973 427999812 : if (GET_CODE (y) == VALUE)
974 31201202 : return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
975 :
976 396798610 : if ((SP_DERIVED_VALUE_P (x)
977 149052537 : || SP_DERIVED_VALUE_P (e->val_rtx))
978 440671902 : && GET_MODE (y) == Pmode)
979 : {
980 250057153 : rtx yoff = NULL;
981 250057153 : rtx yr = autoinc_split (y, &yoff, memmode);
982 250057153 : if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
983 52614 : return true;
984 : }
985 :
986 396745996 : if (depth == 128)
987 : return false;
988 :
989 1221092084 : for (l = e->locs; l; l = l->next)
990 : {
991 847271174 : rtx t = l->loc;
992 :
993 : /* Avoid infinite recursion. We know we have the canonical
994 : value, so we can just skip any values in the equivalence
995 : list. */
996 847271174 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
997 504552279 : continue;
998 342718895 : else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
999 : return true;
1000 : }
1001 :
1002 : return false;
1003 : }
1004 827954620 : else if (GET_CODE (y) == VALUE)
1005 : {
1006 46143458 : cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
1007 46143458 : struct elt_loc_list *l;
1008 :
1009 46143458 : if ((SP_DERIVED_VALUE_P (y)
1010 43923225 : || SP_DERIVED_VALUE_P (e->val_rtx))
1011 46553480 : && GET_MODE (x) == Pmode)
1012 : {
1013 2147239 : rtx xoff = NULL;
1014 2147239 : rtx xr = autoinc_split (x, &xoff, memmode);
1015 2147239 : if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
1016 173 : return true;
1017 : }
1018 :
1019 46143285 : if (depth == 128)
1020 : return false;
1021 :
1022 111049002 : for (l = e->locs; l; l = l->next)
1023 : {
1024 64988061 : rtx t = l->loc;
1025 :
1026 64988061 : if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1027 55801958 : continue;
1028 9186103 : else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
1029 : return true;
1030 : }
1031 :
1032 : return false;
1033 : }
1034 :
1035 781811162 : if (GET_MODE (x) != GET_MODE (y))
1036 : return false;
1037 :
1038 743364822 : if (GET_CODE (x) != GET_CODE (y)
1039 743364822 : || (GET_CODE (x) == PLUS
1040 342191882 : && GET_MODE (x) == Pmode
1041 244651557 : && CONST_INT_P (XEXP (x, 1))
1042 233727561 : && CONST_INT_P (XEXP (y, 1))))
1043 : {
1044 594833148 : rtx xorig = x, yorig = y;
1045 594833148 : rtx xoff = NULL, yoff = NULL;
1046 :
1047 594833148 : x = autoinc_split (x, &xoff, memmode);
1048 594833148 : y = autoinc_split (y, &yoff, memmode);
1049 :
1050 : /* Don't recurse if nothing changed. */
1051 594833148 : if (x != xorig || y != yorig)
1052 : {
1053 555413473 : if (!xoff != !yoff)
1054 217718037 : return false;
1055 :
1056 475918989 : if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
1057 : return false;
1058 :
1059 377115111 : return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1060 : }
1061 :
1062 39419675 : if (GET_CODE (xorig) != GET_CODE (yorig))
1063 : return false;
1064 : }
1065 :
1066 : /* These won't be handled correctly by the code below. */
1067 148531674 : switch (GET_CODE (x))
1068 : {
1069 : CASE_CONST_UNIQUE:
1070 : case DEBUG_EXPR:
1071 : return false;
1072 :
1073 : case CONST_VECTOR:
1074 : if (!same_vector_encodings_p (x, y))
1075 : return false;
1076 : break;
1077 :
1078 2142915 : case DEBUG_IMPLICIT_PTR:
1079 2142915 : return DEBUG_IMPLICIT_PTR_DECL (x)
1080 2142915 : == DEBUG_IMPLICIT_PTR_DECL (y);
1081 :
1082 5422 : case DEBUG_PARAMETER_REF:
1083 5422 : return DEBUG_PARAMETER_REF_DECL (x)
1084 5422 : == DEBUG_PARAMETER_REF_DECL (y);
1085 :
1086 305125 : case ENTRY_VALUE:
1087 : /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1088 : use rtx_equal_for_cselib_1 to compare the operands. */
1089 305125 : return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1090 :
1091 246 : case LABEL_REF:
1092 246 : return label_ref_label (x) == label_ref_label (y);
1093 :
1094 155 : case REG:
1095 155 : return REGNO (x) == REGNO (y);
1096 :
1097 643762 : case MEM:
1098 : /* We have to compare any autoinc operations in the addresses
1099 : using this MEM's mode. */
1100 643762 : return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1101 643762 : depth);
1102 :
1103 : default:
1104 : break;
1105 : }
1106 :
1107 39904052 : code = GET_CODE (x);
1108 39904052 : fmt = GET_RTX_FORMAT (code);
1109 :
1110 69569734 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1111 : {
1112 57957630 : int j;
1113 :
1114 57957630 : switch (fmt[i])
1115 : {
1116 0 : case 'w':
1117 0 : if (XWINT (x, i) != XWINT (y, i))
1118 : return false;
1119 : break;
1120 :
1121 581041 : case 'n':
1122 581041 : case 'i':
1123 581041 : if (XINT (x, i) != XINT (y, i))
1124 : return false;
1125 : break;
1126 :
1127 5467 : case 'L':
1128 5467 : if (XLOC (x, i) != XLOC (y, i))
1129 : return false;
1130 : break;
1131 :
1132 587044 : case 'p':
1133 587044 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1134 : return false;
1135 : break;
1136 :
1137 1508625 : case 'V':
1138 1508625 : case 'E':
1139 : /* Two vectors must have the same length. */
1140 1508625 : if (XVECLEN (x, i) != XVECLEN (y, i))
1141 : return false;
1142 :
1143 : /* And the corresponding elements must match. */
1144 3866107 : for (j = 0; j < XVECLEN (x, i); j++)
1145 3236935 : if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1146 3236935 : XVECEXP (y, i, j), memmode, depth))
1147 : return false;
1148 : break;
1149 :
1150 44674465 : case 'e':
1151 44674465 : if (i == 1
1152 28198692 : && targetm.commutative_p (x, UNKNOWN)
1153 20500578 : && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1154 : depth)
1155 45080323 : && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1156 : depth))
1157 : return true;
1158 44366858 : if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1159 : depth))
1160 : return false;
1161 : break;
1162 :
1163 5396811 : case 'S':
1164 5396811 : case 's':
1165 5396811 : if (strcmp (XSTR (x, i), XSTR (y, i)))
1166 : return false;
1167 : break;
1168 :
1169 : case 'u':
1170 : /* These are just backpointers, so they don't matter. */
1171 : break;
1172 :
1173 : case '0':
1174 : case 't':
1175 : break;
1176 :
1177 : /* It is believed that rtx's at this level will never
1178 : contain anything but integers and other rtx's,
1179 : except for within LABEL_REFs and SYMBOL_REFs. */
1180 0 : default:
1181 0 : gcc_unreachable ();
1182 : }
1183 : }
1184 : return true;
1185 : }
1186 :
1187 : /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1188 : truly redundant, taking into account aliasing information. */
1189 : bool
1190 113228629 : cselib_redundant_set_p (rtx set)
1191 : {
1192 113228629 : gcc_assert (GET_CODE (set) == SET);
1193 113228629 : rtx dest = SET_DEST (set);
1194 113228629 : if (cselib_reg_set_mode (dest) != GET_MODE (dest))
1195 : return false;
1196 :
1197 54169574 : rtx src = SET_SRC (set);
1198 4016738 : if ((MEM_P (src) && MEM_VOLATILE_P (src))
1199 58117361 : || !rtx_equal_for_cselib_p (dest, src))
1200 53750871 : return false;
1201 :
1202 418703 : while (GET_CODE (dest) == SUBREG
1203 418703 : || GET_CODE (dest) == ZERO_EXTRACT
1204 837406 : || GET_CODE (dest) == STRICT_LOW_PART)
1205 0 : dest = XEXP (dest, 0);
1206 :
1207 418703 : if (!flag_strict_aliasing || !MEM_P (dest))
1208 : return true;
1209 :
1210 37114 : if (MEM_VOLATILE_P (dest))
1211 : return false;
1212 :
1213 : /* For a store we need to check that suppressing it will not change
1214 : the effective alias set. */
1215 37114 : rtx dest_addr = XEXP (dest, 0);
1216 :
1217 : /* Lookup the equivalents to the original dest (rather than just the
1218 : MEM). */
1219 74228 : cselib_val *src_val = cselib_lookup (SET_DEST (set),
1220 37114 : GET_MODE (SET_DEST (set)),
1221 : 0, VOIDmode);
1222 :
1223 37114 : if (src_val)
1224 : {
1225 : /* Walk the list of source equivalents to find the MEM accessing
1226 : the same location. */
1227 84461 : for (elt_loc_list *l = src_val->locs; l; l = l->next)
1228 : {
1229 84461 : rtx src_equiv = l->loc;
1230 84461 : while (GET_CODE (src_equiv) == SUBREG
1231 84461 : || GET_CODE (src_equiv) == ZERO_EXTRACT
1232 168928 : || GET_CODE (src_equiv) == STRICT_LOW_PART)
1233 6 : src_equiv = XEXP (src_equiv, 0);
1234 :
1235 84461 : if (MEM_P (src_equiv))
1236 : {
1237 : /* Match the MEMs by comparing the addresses. We can
1238 : only remove the later store if the earlier aliases at
1239 : least all the accesses of the later one. */
1240 44949 : if (rtx_equal_for_cselib_1 (dest_addr, XEXP (src_equiv, 0),
1241 44949 : GET_MODE (dest), 0))
1242 37108 : return mems_same_for_tbaa_p (src_equiv, dest);
1243 : }
1244 : }
1245 : }
1246 :
1247 : /* We failed to find a recorded value in the cselib history, so try
1248 : the source of this set; this catches cases such as *p = *q when p
1249 : and q have the same value. */
1250 6 : while (GET_CODE (src) == SUBREG)
1251 0 : src = XEXP (src, 0);
1252 :
1253 6 : if (MEM_P (src)
1254 6 : && rtx_equal_for_cselib_1 (dest_addr, XEXP (src, 0), GET_MODE (dest), 0))
1255 6 : return mems_same_for_tbaa_p (src, dest);
1256 :
1257 : return false;
1258 : }
1259 :
1260 : /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1261 : except that it hashes (plus:P x c). */
1262 :
1263 : static hashval_t
1264 283262373 : cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1265 : machine_mode memmode)
1266 : {
1267 283262373 : cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1268 283262373 : if (! e)
1269 : return 0;
1270 :
1271 270182212 : if (! SP_DERIVED_VALUE_P (e->val_rtx))
1272 437626355 : for (struct elt_loc_list *l = e->locs; l; l = l->next)
1273 353657050 : if (GET_CODE (l->loc) == PLUS
1274 129955212 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
1275 129198219 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1276 476140391 : && CONST_INT_P (XEXP (l->loc, 1)))
1277 : {
1278 122480471 : e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1279 122480471 : c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1280 122480471 : break;
1281 : }
1282 270182212 : if (c == 0)
1283 8094710 : return e->hash;
1284 :
1285 262087502 : inchash::hash hash;
1286 262087502 : hash.add_int (PLUS);
1287 262087502 : hash.add_int (GET_MODE (x));
1288 262087502 : hash.merge_hash (e->hash);
1289 262087502 : hash.add_hwi (c);
1290 :
1291 262087502 : return hash.end () ? hash.end () : 1 + (unsigned int) PLUS;
1292 : }
1293 :
1294 : /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1295 : For registers and memory locations, we look up their cselib_val structure
1296 : and return its VALUE element.
1297 : Possible reasons for return 0 are: the object is volatile, or we couldn't
1298 : find a register or memory location in the table and CREATE is zero. If
1299 : CREATE is nonzero, table elts are created for regs and mem.
1300 : N.B. this hash function returns the same hash value for RTXes that
1301 : differ only in the order of operands, thus it is suitable for comparisons
1302 : that take commutativity into account.
1303 : If we wanted to also support associative rules, we'd have to use a different
1304 : strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1305 : MEMMODE indicates the mode of an enclosing MEM, and it's only
1306 : used to compute autoinc values.
1307 : We used to have a MODE argument for hashing for CONST_INTs, but that
1308 : didn't make sense, since it caused spurious hash differences between
1309 : (set (reg:SI 1) (const_int))
1310 : (plus:SI (reg:SI 2) (reg:SI 1))
1311 : and
1312 : (plus:SI (reg:SI 2) (const_int))
1313 : If the mode is important in any context, it must be checked specifically
1314 : in a comparison anyway, since relying on hash differences is unsafe. */
1315 :
1316 : static hashval_t
1317 992186204 : cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1318 : {
1319 992186204 : cselib_val *e;
1320 992186204 : poly_int64 offset;
1321 992186204 : int i, j;
1322 992186204 : enum rtx_code code;
1323 992186204 : const char *fmt;
1324 992186204 : inchash::hash hash;
1325 :
1326 992186204 : code = GET_CODE (x);
1327 992186204 : hash.add_int (code);
1328 992186204 : hash.add_int (GET_MODE (x));
1329 :
1330 992186204 : switch (code)
1331 : {
1332 451825 : case VALUE:
1333 451825 : e = CSELIB_VAL_PTR (x);
1334 451825 : return e->hash;
1335 :
1336 217749697 : case MEM:
1337 217749697 : case REG:
1338 217749697 : e = cselib_lookup (x, GET_MODE (x), create, memmode);
1339 217749697 : if (! e)
1340 : return 0;
1341 :
1342 193462547 : return e->hash;
1343 :
1344 13014031 : case DEBUG_EXPR:
1345 13014031 : hash.add_int (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)));
1346 13014031 : return hash.end () ? hash.end() : (unsigned int) DEBUG_EXPR;
1347 :
1348 2815574 : case DEBUG_IMPLICIT_PTR:
1349 2815574 : hash.add_int (DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x)));
1350 2815574 : return hash.end () ? hash.end () : (unsigned int) DEBUG_IMPLICIT_PTR;
1351 :
1352 35748 : case DEBUG_PARAMETER_REF:
1353 35748 : hash.add_int (DECL_UID (DEBUG_PARAMETER_REF_DECL (x)));
1354 35748 : return hash.end () ? hash.end () : (unsigned int) DEBUG_PARAMETER_REF;
1355 :
1356 1383209 : case ENTRY_VALUE:
1357 : /* ENTRY_VALUEs are function invariant, thus try to avoid
1358 : recursing on argument if ENTRY_VALUE is one of the
1359 : forms emitted by expand_debug_expr, otherwise
1360 : ENTRY_VALUE hash would depend on the current value
1361 : in some register or memory. */
1362 1383209 : if (REG_P (ENTRY_VALUE_EXP (x)))
1363 1365930 : hash.add_int ((unsigned int) REG
1364 1365930 : + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1365 1365930 : + (unsigned int) REGNO (ENTRY_VALUE_EXP (x)));
1366 17279 : else if (MEM_P (ENTRY_VALUE_EXP (x))
1367 17279 : && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1368 17279 : hash.add_int ((unsigned int) MEM
1369 17279 : + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1370 17279 : + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0)));
1371 : else
1372 0 : hash.add_int (cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode));
1373 1383209 : return hash.end () ? hash.end () : (unsigned int) ENTRY_VALUE;
1374 :
1375 173677906 : case CONST_INT:
1376 173677906 : hash.add_hwi (UINTVAL (x));
1377 173677906 : return hash.end () ? hash.end () : (unsigned int) CONST_INT;
1378 :
1379 : case CONST_WIDE_INT:
1380 2993541 : for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1381 1997543 : hash.add_hwi (CONST_WIDE_INT_ELT (x, i));
1382 995998 : return hash.end () ? hash.end () : (unsigned int) CONST_WIDE_INT;
1383 :
1384 : case CONST_POLY_INT:
1385 : {
1386 0 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1387 0 : hash.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1388 0 : return hash.end () ? hash.end () : (unsigned int) CONST_POLY_INT;
1389 : }
1390 :
1391 3493827 : case CONST_DOUBLE:
1392 : /* This is like the general case, except that it only counts
1393 : the integers representing the constant. */
1394 3493827 : if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1395 : {
1396 : hash.add_hwi (CONST_DOUBLE_LOW (x));
1397 : hash.add_hwi (CONST_DOUBLE_HIGH (x));
1398 : }
1399 : else
1400 3493827 : hash.merge_hash (real_hash (CONST_DOUBLE_REAL_VALUE (x)));
1401 3493827 : return hash.end () ? hash.end () : (unsigned int) CONST_DOUBLE;
1402 :
1403 0 : case CONST_FIXED:
1404 0 : hash.merge_hash (fixed_hash (CONST_FIXED_VALUE (x)));
1405 0 : return hash.end () ? hash.end () : (unsigned int) CONST_FIXED;
1406 :
1407 3156331 : case CONST_VECTOR:
1408 3156331 : {
1409 3156331 : int units;
1410 3156331 : rtx elt;
1411 :
1412 3156331 : units = const_vector_encoded_nelts (x);
1413 :
1414 8234449 : for (i = 0; i < units; ++i)
1415 : {
1416 5078118 : elt = CONST_VECTOR_ENCODED_ELT (x, i);
1417 5078118 : hash.merge_hash (cselib_hash_rtx (elt, 0, memmode));
1418 : }
1419 :
1420 3156331 : return hash.end () ? hash.end () : (unsigned int) CONST_VECTOR;
1421 : }
1422 :
1423 : /* Assume there is only one rtx object for any given label. */
1424 144202 : case LABEL_REF:
1425 : /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1426 : differences and differences between each stage's debugging dumps. */
1427 144202 : hash.add_int (CODE_LABEL_NUMBER (label_ref_label (x)));
1428 144202 : return hash.end () ? hash.end () : (unsigned int) LABEL_REF;
1429 :
1430 64732632 : case SYMBOL_REF:
1431 64732632 : {
1432 : /* Don't hash on the symbol's address to avoid bootstrap differences.
1433 : Different hash values may cause expressions to be recorded in
1434 : different orders and thus different registers to be used in the
1435 : final assembler. This also avoids differences in the dump files
1436 : between various stages. */
1437 64732632 : const char *p = (const char *) XSTR (x, 0);
1438 :
1439 64732632 : if (*p)
1440 64732632 : hash.add (p, strlen (p));
1441 :
1442 64732632 : return hash.end () ? hash.end () : (unsigned int) SYMBOL_REF;
1443 : }
1444 :
1445 17514048 : case PRE_DEC:
1446 17514048 : case PRE_INC:
1447 17514048 : {
1448 : /* We can't compute these without knowing the MEM mode. */
1449 17514048 : gcc_assert (memmode != VOIDmode);
1450 35028096 : offset = GET_MODE_SIZE (memmode);
1451 17514048 : if (code == PRE_DEC)
1452 17514048 : offset = -offset;
1453 : /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1454 : like (mem:MEMMODE (plus (reg) (const_int I))). */
1455 17514048 : if (GET_MODE (x) == Pmode
1456 17514048 : && (REG_P (XEXP (x, 0))
1457 : || MEM_P (XEXP (x, 0))
1458 : || GET_CODE (XEXP (x, 0)) == VALUE))
1459 : {
1460 17514048 : HOST_WIDE_INT c;
1461 17514048 : if (offset.is_constant (&c))
1462 17514048 : return cselib_hash_plus_const_int (XEXP (x, 0),
1463 : trunc_int_for_mode (c, Pmode),
1464 : create, memmode);
1465 : }
1466 :
1467 0 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1468 0 : if (tem_hash == 0)
1469 : return 0;
1470 0 : hash.merge_hash (tem_hash);
1471 0 : tem_hash = cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1472 : create, memmode);
1473 0 : if (tem_hash == 0)
1474 : return 0;
1475 0 : hash.merge_hash (tem_hash);
1476 0 : return hash.end () ? hash.end () : 1 + (unsigned) PLUS;
1477 : }
1478 :
1479 507143 : case PRE_MODIFY:
1480 507143 : {
1481 507143 : gcc_assert (memmode != VOIDmode);
1482 507143 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1483 507143 : if (tem_hash == 0)
1484 : return 0;
1485 500517 : hash.merge_hash (tem_hash);
1486 500517 : return hash.end () ? hash.end () : 1 + (unsigned) PRE_MODIFY;
1487 : }
1488 :
1489 1775021 : case POST_DEC:
1490 1775021 : case POST_INC:
1491 1775021 : case POST_MODIFY:
1492 1775021 : {
1493 1775021 : gcc_assert (memmode != VOIDmode);
1494 1775021 : hashval_t tem_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1495 1775021 : if (tem_hash == 0)
1496 : return 0;
1497 1775021 : hash.merge_hash (tem_hash);
1498 1775021 : return hash.end () ? hash.end () : 1 + (unsigned) code;
1499 : }
1500 :
1501 : case PC:
1502 : case CALL:
1503 : case UNSPEC_VOLATILE:
1504 : return 0;
1505 :
1506 466563 : case ASM_OPERANDS:
1507 466563 : if (MEM_VOLATILE_P (x))
1508 : return 0;
1509 :
1510 : break;
1511 :
1512 320759323 : case PLUS:
1513 320759323 : if (GET_MODE (x) == Pmode
1514 309633871 : && (REG_P (XEXP (x, 0))
1515 : || MEM_P (XEXP (x, 0))
1516 : || GET_CODE (XEXP (x, 0)) == VALUE)
1517 605309834 : && CONST_INT_P (XEXP (x, 1)))
1518 265748325 : return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1519 265748325 : create, memmode);
1520 : break;
1521 :
1522 : default:
1523 : break;
1524 : }
1525 :
1526 209927303 : i = GET_RTX_LENGTH (code) - 1;
1527 209927303 : fmt = GET_RTX_FORMAT (code);
1528 :
1529 209927303 : if (COMMUTATIVE_P (x))
1530 : {
1531 80171160 : gcc_assert (i == 1 && fmt[0] == 'e' && fmt[1] == 'e');
1532 80171160 : hashval_t tem1_hash = cselib_hash_rtx (XEXP (x, 1), create, memmode);
1533 80171160 : if (tem1_hash == 0)
1534 : return 0;
1535 74708929 : hashval_t tem0_hash = cselib_hash_rtx (XEXP (x, 0), create, memmode);
1536 74708929 : if (tem0_hash == 0)
1537 : return 0;
1538 70958299 : hash.add_commutative (tem0_hash, tem1_hash);
1539 70958299 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1540 : }
1541 :
1542 341617801 : for (; i >= 0; i--)
1543 : {
1544 229863464 : switch (fmt[i])
1545 : {
1546 208823205 : case 'e':
1547 208823205 : {
1548 208823205 : rtx tem = XEXP (x, i);
1549 208823205 : hashval_t tem_hash = cselib_hash_rtx (tem, create, memmode);
1550 208823205 : if (tem_hash == 0)
1551 : return 0;
1552 191757724 : hash.merge_hash (tem_hash);
1553 : }
1554 191757724 : break;
1555 : case 'E':
1556 27175592 : for (j = 0; j < XVECLEN (x, i); j++)
1557 : {
1558 19230081 : hashval_t tem_hash
1559 19230081 : = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1560 19230081 : if (tem_hash == 0)
1561 : return 0;
1562 18293756 : hash.merge_hash (tem_hash);
1563 : }
1564 : break;
1565 :
1566 558998 : case 's':
1567 558998 : {
1568 558998 : const char *p = (const char *) XSTR (x, i);
1569 :
1570 558998 : if (p && *p)
1571 525182 : hash.add (p, strlen (p));
1572 : break;
1573 : }
1574 :
1575 5351177 : case 'i':
1576 5351177 : hash.add_hwi (XINT (x, i));
1577 5351177 : break;
1578 :
1579 244763 : case 'L':
1580 244763 : hash.add_hwi (XLOC (x, i));
1581 244763 : break;
1582 :
1583 6003485 : case 'p':
1584 6003485 : hash.add_int (constant_lower_bound (SUBREG_BYTE (x)));
1585 6003485 : break;
1586 :
1587 : case '0':
1588 : case 't':
1589 : /* unused */
1590 : break;
1591 :
1592 0 : default:
1593 0 : gcc_unreachable ();
1594 : }
1595 : }
1596 :
1597 111754337 : return hash.end () ? hash.end () : 1 + (unsigned int) GET_CODE (x);
1598 : }
1599 :
1600 : /* Create a new value structure for VALUE and initialize it. The mode of the
1601 : value is MODE. */
1602 :
1603 : static inline cselib_val *
1604 419442634 : new_cselib_val (hashval_t hash, machine_mode mode, rtx x)
1605 : {
1606 419442634 : cselib_val *e = cselib_val_pool.allocate ();
1607 :
1608 419442634 : gcc_assert (hash);
1609 419442634 : gcc_assert (next_uid);
1610 :
1611 419442634 : e->hash = hash;
1612 419442634 : e->uid = next_uid++;
1613 : /* We use an alloc pool to allocate this RTL construct because it
1614 : accounts for about 8% of the overall memory usage. We know
1615 : precisely when we can have VALUE RTXen (when cselib is active)
1616 : so we don't need to put them in garbage collected memory.
1617 : ??? Why should a VALUE be an RTX in the first place? */
1618 419442634 : e->val_rtx = (rtx_def*) value_pool.allocate ();
1619 419442634 : memset (e->val_rtx, 0, RTX_HDR_SIZE);
1620 419442634 : PUT_CODE (e->val_rtx, VALUE);
1621 419442634 : PUT_MODE (e->val_rtx, mode);
1622 419442634 : CSELIB_VAL_PTR (e->val_rtx) = e;
1623 419442634 : e->addr_list = 0;
1624 419442634 : e->locs = 0;
1625 419442634 : e->next_containing_mem = 0;
1626 :
1627 419442634 : scalar_int_mode int_mode;
1628 547023087 : if (REG_P (x) && is_int_mode (mode, &int_mode)
1629 127580453 : && GET_MODE_SIZE (int_mode) > 1
1630 121331337 : && REG_VALUES (REGNO (x)) != NULL
1631 426297825 : && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1632 : {
1633 6707111 : rtx copy = shallow_copy_rtx (x);
1634 6707111 : scalar_int_mode narrow_mode_iter;
1635 24086445 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1636 : {
1637 17379334 : PUT_MODE_RAW (copy, narrow_mode_iter);
1638 17379334 : cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1639 17379334 : if (v)
1640 : {
1641 779162 : rtx sub = lowpart_subreg (narrow_mode_iter, e->val_rtx, int_mode);
1642 779162 : if (sub)
1643 779162 : new_elt_loc_list (v, sub);
1644 : }
1645 : }
1646 : }
1647 :
1648 419442634 : if (dump_file && (dump_flags & TDF_CSELIB))
1649 : {
1650 0 : fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1651 0 : if (flag_dump_noaddr || flag_dump_unnumbered)
1652 0 : fputs ("# ", dump_file);
1653 : else
1654 0 : fprintf (dump_file, "%p ", (void*)e);
1655 0 : print_rtl_single (dump_file, x);
1656 0 : fputc ('\n', dump_file);
1657 : }
1658 :
1659 419442634 : return e;
1660 : }
1661 :
1662 : /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1663 : contains the data at this address. X is a MEM that represents the
1664 : value. Update the two value structures to represent this situation. */
1665 :
1666 : static void
1667 52882043 : add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1668 : {
1669 52882043 : addr_elt = canonical_cselib_val (addr_elt);
1670 52882043 : mem_elt = canonical_cselib_val (mem_elt);
1671 :
1672 : /* Avoid duplicates. */
1673 52882043 : addr_space_t as = MEM_ADDR_SPACE (x);
1674 94969445 : for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1675 42087402 : if (MEM_P (l->loc)
1676 12641159 : && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1677 42087402 : && MEM_ADDR_SPACE (l->loc) == as)
1678 : {
1679 0 : promote_debug_loc (l);
1680 0 : return;
1681 : }
1682 :
1683 52882043 : addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1684 52882043 : new_elt_loc_list (mem_elt,
1685 : replace_equiv_address_nv (x, addr_elt->val_rtx));
1686 52882043 : if (mem_elt->next_containing_mem == NULL)
1687 : {
1688 46316914 : mem_elt->next_containing_mem = first_containing_mem;
1689 46316914 : first_containing_mem = mem_elt;
1690 : }
1691 : }
1692 :
1693 : /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1694 : If CREATE, make a new one if we haven't seen it before. */
1695 :
1696 : static cselib_val *
1697 238495576 : cselib_lookup_mem (rtx x, int create)
1698 : {
1699 238495576 : machine_mode mode = GET_MODE (x);
1700 238495576 : machine_mode addr_mode;
1701 238495576 : cselib_val **slot;
1702 238495576 : cselib_val *addr;
1703 238495576 : cselib_val *mem_elt;
1704 :
1705 473238708 : if (MEM_VOLATILE_P (x) || mode == BLKmode
1706 234551242 : || !cselib_record_memory
1707 427118852 : || (FLOAT_MODE_P (mode) && flag_float_store))
1708 : return 0;
1709 :
1710 188610659 : addr_mode = GET_MODE (XEXP (x, 0));
1711 188610659 : if (addr_mode == VOIDmode)
1712 1001705 : addr_mode = Pmode;
1713 :
1714 : /* Look up the value for the address. */
1715 188610659 : addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1716 188610659 : if (! addr)
1717 : return 0;
1718 115886525 : addr = canonical_cselib_val (addr);
1719 :
1720 : /* Find a value that describes a value of our mode at that address. */
1721 115886525 : addr_space_t as = MEM_ADDR_SPACE (x);
1722 118320171 : for (elt_list *l = addr->addr_list; l; l = l->next)
1723 72919517 : if (GET_MODE (l->elt->val_rtx) == mode)
1724 : {
1725 76799470 : for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1726 80823646 : if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1727 : {
1728 70485871 : promote_debug_loc (l->elt->locs);
1729 70485871 : return l->elt;
1730 : }
1731 : }
1732 :
1733 45400654 : if (! create)
1734 : return 0;
1735 :
1736 28463918 : mem_elt = new_cselib_val (next_uid, mode, x);
1737 28463918 : add_mem_for_addr (addr, mem_elt, x);
1738 28463918 : slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1739 28463918 : *slot = mem_elt;
1740 28463918 : return mem_elt;
1741 : }
1742 :
1743 : /* Search through the possible substitutions in P. We prefer a non reg
1744 : substitution because this allows us to expand the tree further. If
1745 : we find, just a reg, take the lowest regno. There may be several
1746 : non-reg results, we just take the first one because they will all
1747 : expand to the same place. */
1748 :
1749 : static rtx
1750 24599758 : expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1751 : int max_depth)
1752 : {
1753 24599758 : rtx reg_result = NULL;
1754 24599758 : unsigned int regno = UINT_MAX;
1755 24599758 : struct elt_loc_list *p_in = p;
1756 :
1757 53424016 : for (; p; p = p->next)
1758 : {
1759 : /* Return these right away to avoid returning stack pointer based
1760 : expressions for frame pointer and vice versa, which is something
1761 : that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1762 : for more details. */
1763 32790654 : if (REG_P (p->loc)
1764 32790654 : && (REGNO (p->loc) == STACK_POINTER_REGNUM
1765 : || REGNO (p->loc) == FRAME_POINTER_REGNUM
1766 : || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1767 26025003 : || REGNO (p->loc) == cfa_base_preserved_regno))
1768 : return p->loc;
1769 : /* Avoid infinite recursion trying to expand a reg into a
1770 : the same reg. */
1771 32285099 : if ((REG_P (p->loc))
1772 26018427 : && (REGNO (p->loc) < regno)
1773 58001054 : && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1774 : {
1775 4546468 : reg_result = p->loc;
1776 4546468 : regno = REGNO (p->loc);
1777 : }
1778 : /* Avoid infinite recursion and do not try to expand the
1779 : value. */
1780 27738631 : else if (GET_CODE (p->loc) == VALUE
1781 351996 : && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1782 0 : continue;
1783 27738631 : else if (!REG_P (p->loc))
1784 : {
1785 6266672 : rtx result, note;
1786 6266672 : if (dump_file && (dump_flags & TDF_CSELIB))
1787 : {
1788 0 : print_inline_rtx (dump_file, p->loc, 0);
1789 0 : fprintf (dump_file, "\n");
1790 : }
1791 6266672 : if (GET_CODE (p->loc) == LO_SUM
1792 0 : && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1793 0 : && p->setting_insn
1794 0 : && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1795 6266672 : && XEXP (note, 0) == XEXP (p->loc, 1))
1796 : return XEXP (p->loc, 1);
1797 6266672 : result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1798 6266672 : if (result)
1799 : return result;
1800 : }
1801 :
1802 : }
1803 :
1804 20633362 : if (regno != UINT_MAX)
1805 : {
1806 3788147 : rtx result;
1807 3788147 : if (dump_file && (dump_flags & TDF_CSELIB))
1808 0 : fprintf (dump_file, "r%d\n", regno);
1809 :
1810 3788147 : result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1811 3788147 : if (result)
1812 : return result;
1813 : }
1814 :
1815 17774832 : if (dump_file && (dump_flags & TDF_CSELIB))
1816 : {
1817 0 : if (reg_result)
1818 : {
1819 0 : print_inline_rtx (dump_file, reg_result, 0);
1820 0 : fprintf (dump_file, "\n");
1821 : }
1822 : else
1823 0 : fprintf (dump_file, "NULL\n");
1824 : }
1825 : return reg_result;
1826 : }
1827 :
1828 :
1829 : /* Forward substitute and expand an expression out to its roots.
1830 : This is the opposite of common subexpression. Because local value
1831 : numbering is such a weak optimization, the expanded expression is
1832 : pretty much unique (not from a pointer equals point of view but
1833 : from a tree shape point of view.
1834 :
1835 : This function returns NULL if the expansion fails. The expansion
1836 : will fail if there is no value number for one of the operands or if
1837 : one of the operands has been overwritten between the current insn
1838 : and the beginning of the basic block. For instance x has no
1839 : expansion in:
1840 :
1841 : r1 <- r1 + 3
1842 : x <- r1 + 8
1843 :
1844 : REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1845 : It is clear on return. */
1846 :
1847 : rtx
1848 37418914 : cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1849 : {
1850 37418914 : struct expand_value_data evd;
1851 :
1852 37418914 : evd.regs_active = regs_active;
1853 37418914 : evd.callback = NULL;
1854 37418914 : evd.callback_arg = NULL;
1855 37418914 : evd.dummy = false;
1856 :
1857 37418914 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1858 : }
1859 :
1860 : /* Same as cselib_expand_value_rtx, but using a callback to try to
1861 : resolve some expressions. The CB function should return ORIG if it
1862 : can't or does not want to deal with a certain RTX. Any other
1863 : return value, including NULL, will be used as the expansion for
1864 : VALUE, without any further changes. */
1865 :
1866 : rtx
1867 89849439 : cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1868 : cselib_expand_callback cb, void *data)
1869 : {
1870 89849439 : struct expand_value_data evd;
1871 :
1872 89849439 : evd.regs_active = regs_active;
1873 89849439 : evd.callback = cb;
1874 89849439 : evd.callback_arg = data;
1875 89849439 : evd.dummy = false;
1876 :
1877 89849439 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1878 : }
1879 :
1880 : /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1881 : or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1882 : would return NULL or non-NULL, without allocating new rtx. */
1883 :
1884 : bool
1885 0 : cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1886 : cselib_expand_callback cb, void *data)
1887 : {
1888 0 : struct expand_value_data evd;
1889 :
1890 0 : evd.regs_active = regs_active;
1891 0 : evd.callback = cb;
1892 0 : evd.callback_arg = data;
1893 0 : evd.dummy = true;
1894 :
1895 0 : return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1896 : }
1897 :
1898 : /* Internal implementation of cselib_expand_value_rtx and
1899 : cselib_expand_value_rtx_cb. */
1900 :
1901 : static rtx
1902 209022880 : cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1903 : int max_depth)
1904 : {
1905 209022880 : rtx copy, scopy;
1906 209022880 : int i, j;
1907 209022880 : RTX_CODE code;
1908 209022880 : const char *format_ptr;
1909 209022880 : machine_mode mode;
1910 :
1911 209022880 : code = GET_CODE (orig);
1912 :
1913 : /* For the context of dse, if we end up expand into a huge tree, we
1914 : will not have a useful address, so we might as well just give up
1915 : quickly. */
1916 209022880 : if (max_depth <= 0)
1917 : return NULL;
1918 :
1919 206187106 : switch (code)
1920 : {
1921 51378653 : case REG:
1922 51378653 : {
1923 51378653 : struct elt_list *l = REG_VALUES (REGNO (orig));
1924 :
1925 51378653 : if (l && l->elt == NULL)
1926 25502129 : l = l->next;
1927 51393659 : for (; l; l = l->next)
1928 38542109 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1929 : {
1930 38527103 : rtx result;
1931 38527103 : unsigned regno = REGNO (orig);
1932 :
1933 : /* The only thing that we are not willing to do (this
1934 : is requirement of dse and if others potential uses
1935 : need this function we should add a parm to control
1936 : it) is that we will not substitute the
1937 : STACK_POINTER_REGNUM, FRAME_POINTER or the
1938 : HARD_FRAME_POINTER.
1939 :
1940 : These expansions confuses the code that notices that
1941 : stores into the frame go dead at the end of the
1942 : function and that the frame is not effected by calls
1943 : to subroutines. If you allow the
1944 : STACK_POINTER_REGNUM substitution, then dse will
1945 : think that parameter pushing also goes dead which is
1946 : wrong. If you allow the FRAME_POINTER or the
1947 : HARD_FRAME_POINTER then you lose the opportunity to
1948 : make the frame assumptions. */
1949 38527103 : if (regno == STACK_POINTER_REGNUM
1950 38527103 : || regno == FRAME_POINTER_REGNUM
1951 20860017 : || regno == HARD_FRAME_POINTER_REGNUM
1952 20334475 : || regno == cfa_base_preserved_regno)
1953 : return orig;
1954 :
1955 20032926 : bitmap_set_bit (evd->regs_active, regno);
1956 :
1957 20032926 : if (dump_file && (dump_flags & TDF_CSELIB))
1958 0 : fprintf (dump_file, "expanding: r%d into: ", regno);
1959 :
1960 20032926 : result = expand_loc (l->elt->locs, evd, max_depth);
1961 20032926 : bitmap_clear_bit (evd->regs_active, regno);
1962 :
1963 20032926 : if (result)
1964 : return result;
1965 : else
1966 : return orig;
1967 : }
1968 : return orig;
1969 : }
1970 :
1971 : CASE_CONST_ANY:
1972 : case SYMBOL_REF:
1973 : case CODE_LABEL:
1974 : case PC:
1975 : case SCRATCH:
1976 : /* SCRATCH must be shared because they represent distinct values. */
1977 : return orig;
1978 0 : case CLOBBER:
1979 0 : if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1980 : return orig;
1981 : break;
1982 :
1983 284769 : case CONST:
1984 284769 : if (shared_const_p (orig))
1985 : return orig;
1986 : break;
1987 :
1988 958486 : case SUBREG:
1989 958486 : {
1990 958486 : rtx subreg;
1991 :
1992 958486 : if (evd->callback)
1993 : {
1994 862174 : subreg = evd->callback (orig, evd->regs_active, max_depth,
1995 : evd->callback_arg);
1996 862174 : if (subreg != orig)
1997 : return subreg;
1998 : }
1999 :
2000 96312 : subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
2001 : max_depth - 1);
2002 96312 : if (!subreg)
2003 : return NULL;
2004 105470 : scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
2005 52735 : GET_MODE (SUBREG_REG (orig)),
2006 52735 : SUBREG_BYTE (orig));
2007 52735 : if (scopy == NULL
2008 51653 : || (GET_CODE (scopy) == SUBREG
2009 47760 : && !REG_P (SUBREG_REG (scopy))
2010 5784 : && !MEM_P (SUBREG_REG (scopy))))
2011 : return NULL;
2012 :
2013 : return scopy;
2014 : }
2015 :
2016 71782370 : case VALUE:
2017 71782370 : {
2018 71782370 : rtx result;
2019 :
2020 71782370 : if (dump_file && (dump_flags & TDF_CSELIB))
2021 : {
2022 0 : fputs ("\nexpanding ", dump_file);
2023 0 : print_rtl_single (dump_file, orig);
2024 0 : fputs (" into...", dump_file);
2025 : }
2026 :
2027 71782370 : if (evd->callback)
2028 : {
2029 67215538 : result = evd->callback (orig, evd->regs_active, max_depth,
2030 : evd->callback_arg);
2031 :
2032 67215538 : if (result != orig)
2033 : return result;
2034 : }
2035 :
2036 4566832 : result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
2037 4566832 : return result;
2038 : }
2039 :
2040 6117660 : case DEBUG_EXPR:
2041 6117660 : if (evd->callback)
2042 6117660 : return evd->callback (orig, evd->regs_active, max_depth,
2043 6117660 : evd->callback_arg);
2044 : return orig;
2045 :
2046 : default:
2047 : break;
2048 : }
2049 :
2050 : /* Copy the various flags, fields, and other information. We assume
2051 : that all fields need copying, and then clear the fields that should
2052 : not be copied. That is the sensible default behavior, and forces
2053 : us to explicitly document why we are *not* copying a flag. */
2054 45324590 : if (evd->dummy)
2055 : copy = NULL;
2056 : else
2057 45324590 : copy = shallow_copy_rtx (orig);
2058 :
2059 45324590 : format_ptr = GET_RTX_FORMAT (code);
2060 :
2061 119078581 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
2062 77941531 : switch (*format_ptr++)
2063 : {
2064 71116147 : case 'e':
2065 71116147 : if (XEXP (orig, i) != NULL)
2066 : {
2067 71116147 : rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2068 : max_depth - 1);
2069 71116147 : if (!result)
2070 : return NULL;
2071 66963642 : if (copy)
2072 66963642 : XEXP (copy, i) = result;
2073 : }
2074 : break;
2075 :
2076 303780 : case 'E':
2077 303780 : case 'V':
2078 303780 : if (XVEC (orig, i) != NULL)
2079 : {
2080 303780 : if (copy)
2081 303780 : XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2082 755994 : for (j = 0; j < XVECLEN (orig, i); j++)
2083 : {
2084 487249 : rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2085 : evd, max_depth - 1);
2086 487249 : if (!result)
2087 : return NULL;
2088 452214 : if (copy)
2089 452214 : XVECEXP (copy, i, j) = result;
2090 : }
2091 : }
2092 : break;
2093 :
2094 : case 't':
2095 : case 'w':
2096 : case 'i':
2097 : case 'L':
2098 : case 's':
2099 : case 'S':
2100 : case 'T':
2101 : case 'u':
2102 : case 'B':
2103 : case '0':
2104 : /* These are left unchanged. */
2105 : break;
2106 :
2107 0 : default:
2108 0 : gcc_unreachable ();
2109 : }
2110 :
2111 41137050 : if (evd->dummy)
2112 : return orig;
2113 :
2114 41137050 : mode = GET_MODE (copy);
2115 : /* If an operand has been simplified into CONST_INT, which doesn't
2116 : have a mode and the mode isn't derivable from whole rtx's mode,
2117 : try simplify_*_operation first with mode from original's operand
2118 : and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2119 41137050 : scopy = copy;
2120 41137050 : switch (GET_RTX_CLASS (code))
2121 : {
2122 752082 : case RTX_UNARY:
2123 752082 : if (CONST_INT_P (XEXP (copy, 0))
2124 50985 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2125 : {
2126 50977 : scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2127 : GET_MODE (XEXP (orig, 0)));
2128 50977 : if (scopy)
2129 : return scopy;
2130 : }
2131 : break;
2132 : case RTX_COMM_ARITH:
2133 : case RTX_BIN_ARITH:
2134 : /* These expressions can derive operand modes from the whole rtx's mode. */
2135 : break;
2136 60357 : case RTX_TERNARY:
2137 60357 : case RTX_BITFIELD_OPS:
2138 60357 : if (CONST_INT_P (XEXP (copy, 0))
2139 78 : && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2140 : {
2141 1 : scopy = simplify_ternary_operation (code, mode,
2142 : GET_MODE (XEXP (orig, 0)),
2143 : XEXP (copy, 0), XEXP (copy, 1),
2144 : XEXP (copy, 2));
2145 1 : if (scopy)
2146 : return scopy;
2147 : }
2148 : break;
2149 171566 : case RTX_COMPARE:
2150 171566 : case RTX_COMM_COMPARE:
2151 171566 : if (CONST_INT_P (XEXP (copy, 0))
2152 656 : && GET_MODE (XEXP (copy, 1)) == VOIDmode
2153 173 : && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2154 5 : || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2155 : {
2156 173 : scopy = simplify_relational_operation (code, mode,
2157 : (GET_MODE (XEXP (orig, 0))
2158 : != VOIDmode)
2159 : ? GET_MODE (XEXP (orig, 0))
2160 5 : : GET_MODE (XEXP (orig, 1)),
2161 : XEXP (copy, 0),
2162 : XEXP (copy, 1));
2163 173 : if (scopy)
2164 : return scopy;
2165 : }
2166 : break;
2167 : default:
2168 : break;
2169 : }
2170 41085900 : scopy = simplify_rtx (copy);
2171 41085900 : if (scopy)
2172 4924419 : return scopy;
2173 : return copy;
2174 : }
2175 :
2176 : /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2177 : with VALUE expressions. This way, it becomes independent of changes
2178 : to registers and memory.
2179 : X isn't actually modified; if modifications are needed, new rtl is
2180 : allocated. However, the return value can share rtl with X.
2181 : If X is within a MEM, MEMMODE must be the mode of the MEM. */
2182 :
2183 : rtx
2184 591785535 : cselib_subst_to_values (rtx x, machine_mode memmode)
2185 : {
2186 599452861 : enum rtx_code code = GET_CODE (x);
2187 599452861 : const char *fmt = GET_RTX_FORMAT (code);
2188 599452861 : cselib_val *e;
2189 599452861 : struct elt_list *l;
2190 599452861 : rtx copy = x;
2191 599452861 : int i;
2192 599452861 : poly_int64 offset;
2193 :
2194 599452861 : switch (code)
2195 : {
2196 234283744 : case REG:
2197 234283744 : l = REG_VALUES (REGNO (x));
2198 234283744 : if (l && l->elt == NULL)
2199 134587752 : l = l->next;
2200 236587290 : for (; l; l = l->next)
2201 236587290 : if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2202 : return l->elt->val_rtx;
2203 :
2204 0 : gcc_unreachable ();
2205 :
2206 6096066 : case MEM:
2207 6096066 : e = cselib_lookup_mem (x, 0);
2208 : /* This used to happen for autoincrements, but we deal with them
2209 : properly now. Remove the if stmt for the next release. */
2210 6096066 : if (! e)
2211 : {
2212 : /* Assign a value that doesn't match any other. */
2213 0 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2214 : }
2215 6096066 : return e->val_rtx;
2216 :
2217 674100 : case ENTRY_VALUE:
2218 674100 : e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2219 674100 : if (! e)
2220 : break;
2221 1021 : return e->val_rtx;
2222 :
2223 : CASE_CONST_ANY:
2224 : return x;
2225 :
2226 6354072 : case PRE_DEC:
2227 6354072 : case PRE_INC:
2228 6354072 : gcc_assert (memmode != VOIDmode);
2229 12708144 : offset = GET_MODE_SIZE (memmode);
2230 6354072 : if (code == PRE_DEC)
2231 6354072 : offset = -offset;
2232 6354072 : return cselib_subst_to_values (plus_constant (GET_MODE (x),
2233 : XEXP (x, 0), offset),
2234 6354072 : memmode);
2235 :
2236 380405 : case PRE_MODIFY:
2237 380405 : gcc_assert (memmode != VOIDmode);
2238 380405 : return cselib_subst_to_values (XEXP (x, 1), memmode);
2239 :
2240 932849 : case POST_DEC:
2241 932849 : case POST_INC:
2242 932849 : case POST_MODIFY:
2243 932849 : gcc_assert (memmode != VOIDmode);
2244 932849 : return cselib_subst_to_values (XEXP (x, 0), memmode);
2245 :
2246 116294996 : case PLUS:
2247 151629259 : if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2248 : {
2249 94310579 : rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2250 94310579 : if (GET_CODE (t) == VALUE)
2251 : {
2252 88386826 : if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2253 : return t;
2254 88386826 : for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2255 186581244 : l; l = l->next)
2256 119559287 : if (GET_CODE (l->loc) == PLUS
2257 25150308 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2258 24936898 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2259 140925428 : && CONST_INT_P (XEXP (l->loc, 1)))
2260 35189282 : return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2261 : }
2262 72945710 : if (t != XEXP (x, 0))
2263 : {
2264 68631263 : copy = shallow_copy_rtx (x);
2265 68631263 : XEXP (copy, 0) = t;
2266 : }
2267 72945710 : return copy;
2268 : }
2269 :
2270 : default:
2271 : break;
2272 : }
2273 :
2274 478567942 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2275 : {
2276 312465637 : if (fmt[i] == 'e')
2277 : {
2278 230351382 : rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2279 :
2280 230351382 : if (t != XEXP (x, i))
2281 : {
2282 168569020 : if (x == copy)
2283 119696915 : copy = shallow_copy_rtx (x);
2284 168569020 : XEXP (copy, i) = t;
2285 : }
2286 : }
2287 82114255 : else if (fmt[i] == 'E')
2288 : {
2289 : int j;
2290 :
2291 19585313 : for (j = 0; j < XVECLEN (x, i); j++)
2292 : {
2293 13948605 : rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2294 :
2295 13948605 : if (t != XVECEXP (x, i, j))
2296 : {
2297 2595668 : if (XVEC (x, i) == XVEC (copy, i))
2298 : {
2299 1938006 : if (x == copy)
2300 1938006 : copy = shallow_copy_rtx (x);
2301 1938006 : XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2302 : }
2303 2595668 : XVECEXP (copy, i, j) = t;
2304 : }
2305 : }
2306 : }
2307 : }
2308 :
2309 : return copy;
2310 : }
2311 :
2312 : /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2313 :
2314 : rtx
2315 1463 : cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2316 : {
2317 1463 : rtx ret;
2318 1463 : gcc_assert (!cselib_current_insn);
2319 1463 : cselib_current_insn = insn;
2320 1463 : ret = cselib_subst_to_values (x, memmode);
2321 1463 : cselib_current_insn = NULL;
2322 1463 : return ret;
2323 : }
2324 :
2325 : /* Look up the rtl expression X in our tables and return the value it
2326 : has. If CREATE is zero, we return NULL if we don't know the value.
2327 : Otherwise, we create a new one if possible, using mode MODE if X
2328 : doesn't have a mode (i.e. because it's a constant). When X is part
2329 : of an address, MEMMODE should be the mode of the enclosing MEM if
2330 : we're tracking autoinc expressions. */
2331 :
2332 : static cselib_val *
2333 2413409977 : cselib_lookup_1 (rtx x, machine_mode mode,
2334 : int create, machine_mode memmode)
2335 : {
2336 2413409977 : cselib_val **slot;
2337 2413409977 : cselib_val *e;
2338 :
2339 2413409977 : if (GET_MODE (x) != VOIDmode)
2340 2329836598 : mode = GET_MODE (x);
2341 :
2342 2413409977 : if (GET_CODE (x) == VALUE)
2343 64715832 : return CSELIB_VAL_PTR (x);
2344 :
2345 2348694145 : if (REG_P (x))
2346 : {
2347 1514402088 : struct elt_list *l;
2348 1514402088 : unsigned int i = REGNO (x);
2349 :
2350 1514402088 : l = REG_VALUES (i);
2351 1514402088 : if (l && l->elt == NULL)
2352 631537053 : l = l->next;
2353 1534838887 : for (; l; l = l->next)
2354 1170571021 : if (mode == GET_MODE (l->elt->val_rtx))
2355 : {
2356 1150134222 : promote_debug_loc (l->elt->locs);
2357 1150134222 : return l->elt;
2358 : }
2359 :
2360 364267866 : if (! create)
2361 : return 0;
2362 :
2363 137805210 : if (i < FIRST_PSEUDO_REGISTER)
2364 : {
2365 78302976 : unsigned int n = hard_regno_nregs (i, mode);
2366 :
2367 78302976 : if (n > max_value_regs)
2368 27046889 : max_value_regs = n;
2369 : }
2370 :
2371 137805210 : e = new_cselib_val (next_uid, GET_MODE (x), x);
2372 162199717 : if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2373 12063853 : SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2374 137805210 : new_elt_loc_list (e, x);
2375 :
2376 137805210 : scalar_int_mode int_mode;
2377 137805210 : if (REG_VALUES (i) == 0)
2378 : {
2379 : /* Maintain the invariant that the first entry of
2380 : REG_VALUES, if present, must be the value used to set the
2381 : register, or NULL. */
2382 127915576 : used_regs[n_used_regs++] = i;
2383 127915576 : REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2384 : }
2385 9889634 : else if (cselib_preserve_constants
2386 9889634 : && is_int_mode (mode, &int_mode))
2387 : {
2388 : /* During var-tracking, try harder to find equivalences
2389 : for SUBREGs. If a setter sets say a DImode register
2390 : and user uses that register only in SImode, add a lowpart
2391 : subreg location. */
2392 1575008 : struct elt_list *lwider = NULL;
2393 1575008 : scalar_int_mode lmode;
2394 1575008 : l = REG_VALUES (i);
2395 1575008 : if (l && l->elt == NULL)
2396 1012457 : l = l->next;
2397 2380727 : for (; l; l = l->next)
2398 805719 : if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2399 1148120 : && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2400 278416 : && (lwider == NULL
2401 4914 : || partial_subreg_p (lmode,
2402 4914 : GET_MODE (lwider->elt->val_rtx))))
2403 : {
2404 277675 : struct elt_loc_list *el;
2405 294847 : if (i < FIRST_PSEUDO_REGISTER
2406 277675 : && hard_regno_nregs (i, lmode) != 1)
2407 17172 : continue;
2408 511772 : for (el = l->elt->locs; el; el = el->next)
2409 479835 : if (!REG_P (el->loc))
2410 : break;
2411 260503 : if (el)
2412 805719 : lwider = l;
2413 : }
2414 1575008 : if (lwider)
2415 : {
2416 448788 : rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2417 224394 : GET_MODE (lwider->elt->val_rtx));
2418 224394 : if (sub)
2419 224394 : new_elt_loc_list (e, sub);
2420 : }
2421 : }
2422 137805210 : REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2423 137805210 : slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2424 137805210 : *slot = e;
2425 137805210 : return e;
2426 : }
2427 :
2428 834292057 : if (MEM_P (x))
2429 232399510 : return cselib_lookup_mem (x, create);
2430 :
2431 601892547 : hashval_t hashval = cselib_hash_rtx (x, create, memmode);
2432 : /* Can't even create if hashing is not possible. */
2433 601892547 : if (! hashval)
2434 : return 0;
2435 :
2436 771597034 : slot = cselib_find_slot (mode, x, hashval,
2437 : create ? INSERT : NO_INSERT, memmode);
2438 549461852 : if (slot == 0)
2439 : return 0;
2440 :
2441 437960902 : e = (cselib_val *) *slot;
2442 437960902 : if (e)
2443 : return e;
2444 :
2445 253173506 : e = new_cselib_val (hashval, mode, x);
2446 :
2447 : /* We have to fill the slot before calling cselib_subst_to_values:
2448 : the hash table is inconsistent until we do so, and
2449 : cselib_subst_to_values will need to do lookups. */
2450 253173506 : *slot = e;
2451 253173506 : rtx v = cselib_subst_to_values (x, memmode);
2452 :
2453 : /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2454 : VALUE that isn't in the hash tables anymore. */
2455 253173506 : if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2456 0 : PRESERVED_VALUE_P (e->val_rtx) = 1;
2457 :
2458 253173506 : new_elt_loc_list (e, v);
2459 253173506 : return e;
2460 : }
2461 :
2462 : /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2463 :
2464 : cselib_val *
2465 6265013 : cselib_lookup_from_insn (rtx x, machine_mode mode,
2466 : int create, machine_mode memmode, rtx_insn *insn)
2467 : {
2468 6265013 : cselib_val *ret;
2469 :
2470 6265013 : gcc_assert (!cselib_current_insn);
2471 6265013 : cselib_current_insn = insn;
2472 :
2473 6265013 : ret = cselib_lookup (x, mode, create, memmode);
2474 :
2475 6265013 : cselib_current_insn = NULL;
2476 :
2477 6265013 : return ret;
2478 : }
2479 :
2480 : /* Wrapper for cselib_lookup_1, that logs the lookup result and
2481 : maintains invariants related with debug insns. */
2482 :
2483 : cselib_val *
2484 2412342987 : cselib_lookup (rtx x, machine_mode mode,
2485 : int create, machine_mode memmode)
2486 : {
2487 2412342987 : cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2488 :
2489 : /* ??? Should we return NULL if we're not to create an entry, the
2490 : found loc is a debug loc and cselib_current_insn is not DEBUG?
2491 : If so, we should also avoid converting val to non-DEBUG; probably
2492 : easiest setting cselib_current_insn to NULL before the call
2493 : above. */
2494 :
2495 2412342987 : if (dump_file && (dump_flags & TDF_CSELIB))
2496 : {
2497 0 : fputs ("cselib lookup ", dump_file);
2498 0 : print_inline_rtx (dump_file, x, 2);
2499 0 : fprintf (dump_file, " => %u:%u\n",
2500 : ret ? ret->uid : 0,
2501 : ret ? ret->hash : 0);
2502 : }
2503 :
2504 2412342987 : return ret;
2505 : }
2506 :
2507 : /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2508 :
2509 : static void
2510 184016123 : cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2511 : {
2512 184016123 : cselib_val *v = (*l)->elt;
2513 184016123 : if (*l == REG_VALUES (regno))
2514 : {
2515 : /* Maintain the invariant that the first entry of
2516 : REG_VALUES, if present, must be the value used to set
2517 : the register, or NULL. This is also nice because
2518 : then we won't push the same regno onto user_regs
2519 : multiple times. */
2520 141231369 : (*l)->elt = NULL;
2521 141231369 : l = &(*l)->next;
2522 : }
2523 : else
2524 42784754 : unchain_one_elt_list (l);
2525 :
2526 184016123 : v = canonical_cselib_val (v);
2527 :
2528 184016123 : bool had_locs = v->locs != NULL;
2529 184016123 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2530 :
2531 : /* Now, we clear the mapping from value to reg. It must exist, so
2532 : this code will crash intentionally if it doesn't. */
2533 184016123 : for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2534 : {
2535 212122280 : rtx x = (*p)->loc;
2536 :
2537 212122280 : if (REG_P (x) && REGNO (x) == regno)
2538 : {
2539 184016123 : unchain_one_elt_loc_list (p);
2540 184016123 : break;
2541 : }
2542 28106157 : }
2543 :
2544 184016123 : if (had_locs && cselib_useless_value_p (v))
2545 : {
2546 21566548 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2547 0 : n_useless_debug_values++;
2548 : else
2549 21566548 : n_useless_values++;
2550 : }
2551 184016123 : }
2552 :
2553 : /* Invalidate any entries in reg_values that overlap REGNO. This is called
2554 : if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2555 : is used to determine how many hard registers are being changed. If MODE
2556 : is VOIDmode, then only REGNO is being changed; this is used when
2557 : invalidating call clobbered registers across a call. */
2558 :
2559 : static void
2560 803616174 : cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2561 : {
2562 803616174 : unsigned int endregno;
2563 803616174 : unsigned int i;
2564 :
2565 : /* If we see pseudos after reload, something is _wrong_. */
2566 803616174 : gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2567 : || reg_renumber[regno] < 0);
2568 :
2569 : /* Determine the range of registers that must be invalidated. For
2570 : pseudos, only REGNO is affected. For hard regs, we must take MODE
2571 : into account, and we must also invalidate lower register numbers
2572 : if they contain values that overlap REGNO. */
2573 222801777 : if (regno < FIRST_PSEUDO_REGISTER)
2574 : {
2575 698759460 : gcc_assert (mode != VOIDmode);
2576 :
2577 698759460 : if (regno < max_value_regs)
2578 : i = 0;
2579 : else
2580 657613158 : i = regno - max_value_regs;
2581 :
2582 698759460 : endregno = end_hard_regno (mode, regno);
2583 : }
2584 : else
2585 : {
2586 104856714 : i = regno;
2587 104856714 : endregno = regno + 1;
2588 : }
2589 :
2590 2227121416 : for (; i < endregno; i++)
2591 : {
2592 1423505242 : struct elt_list **l = ®_VALUES (i);
2593 :
2594 : /* Go through all known values for this reg; if it overlaps the range
2595 : we're invalidating, remove the value. */
2596 1809804216 : while (*l)
2597 : {
2598 386298974 : cselib_val *v = (*l)->elt;
2599 386298974 : unsigned int this_last = i;
2600 :
2601 386298974 : if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2602 167845774 : this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2603 :
2604 386298974 : if (this_last < regno || v == NULL
2605 118682548 : || (v == cfa_base_preserved_val
2606 4393652 : && i == cfa_base_preserved_regno))
2607 : {
2608 272008431 : l = &(*l)->next;
2609 272008431 : continue;
2610 : }
2611 :
2612 : /* We have an overlap. */
2613 114290543 : cselib_invalidate_regno_val (i, l);
2614 : }
2615 : }
2616 803616174 : }
2617 :
2618 : /* Invalidate any locations in the table which are changed because of a
2619 : store to MEM_RTX. If this is called because of a non-const call
2620 : instruction, MEM_RTX is (mem:BLK const0_rtx). */
2621 :
2622 : static void
2623 114257822 : cselib_invalidate_mem (rtx mem_rtx)
2624 : {
2625 114257822 : cselib_val **vp, *v, *next;
2626 114257822 : int num_mems = 0;
2627 114257822 : rtx mem_addr;
2628 :
2629 114257822 : mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2630 114257822 : mem_rtx = canon_rtx (mem_rtx);
2631 :
2632 114257822 : vp = &first_containing_mem;
2633 285326729 : for (v = *vp; v != &dummy_val; v = next)
2634 : {
2635 171068907 : bool has_mem = false;
2636 171068907 : struct elt_loc_list **p = &v->locs;
2637 171068907 : bool had_locs = v->locs != NULL;
2638 171068907 : rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2639 171068907 : rtx sp_base = NULL_RTX;
2640 171068907 : HOST_WIDE_INT sp_off = 0;
2641 :
2642 510208275 : while (*p)
2643 : {
2644 339139368 : rtx x = (*p)->loc;
2645 339139368 : cselib_val *addr;
2646 339139368 : struct elt_list **mem_chain;
2647 :
2648 : /* MEMs may occur in locations only at the top level; below
2649 : that every MEM or REG is substituted by its VALUE. */
2650 339139368 : if (!MEM_P (x))
2651 : {
2652 103022325 : p = &(*p)->next;
2653 103022325 : continue;
2654 : }
2655 :
2656 : /* When invalidating memory below the stack pointer for const/pure
2657 : calls and alloca/VLAs aren't used, attempt to optimize. Values
2658 : stored into area sometimes below the stack pointer shouldn't be
2659 : addressable and should be stored just through stack pointer
2660 : derived expressions, so don't invalidate MEMs not using stack
2661 : derived addresses, or if the MEMs clearly aren't below the stack
2662 : pointer. This isn't a fully conservative approach, the hope is
2663 : that invalidating more MEMs than this isn't actually needed. */
2664 236117043 : if (mem_rtx == callmem[1]
2665 2908740 : && num_mems < param_max_cselib_memory_locations
2666 2908682 : && GET_CODE (XEXP (x, 0)) == VALUE
2667 2908682 : && !cfun->calls_alloca)
2668 : {
2669 2860937 : cselib_val *v2 = CSELIB_VAL_PTR (XEXP (x, 0));
2670 2860937 : rtx x_base = NULL_RTX;
2671 2860937 : HOST_WIDE_INT x_off = 0;
2672 2860937 : if (SP_DERIVED_VALUE_P (v2->val_rtx))
2673 : x_base = v2->val_rtx;
2674 : else
2675 4532886 : for (struct elt_loc_list *l = v2->locs; l; l = l->next)
2676 2830181 : if (GET_CODE (l->loc) == PLUS
2677 1522801 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2678 1420725 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2679 3912297 : && CONST_INT_P (XEXP (l->loc, 1)))
2680 : {
2681 1082102 : x_base = XEXP (l->loc, 0);
2682 1082102 : x_off = INTVAL (XEXP (l->loc, 1));
2683 1082102 : break;
2684 : }
2685 : /* If x_base is NULL here, don't invalidate x as its address
2686 : isn't derived from sp such that it could be in outgoing
2687 : argument area of some call in !ACCUMULATE_OUTGOING_ARGS
2688 : function. */
2689 2860937 : if (x_base)
2690 : {
2691 1158232 : if (sp_base == NULL_RTX)
2692 : {
2693 2133980 : if (cselib_val *v3
2694 1070068 : = cselib_lookup_1 (stack_pointer_rtx, Pmode, 0,
2695 : VOIDmode))
2696 : {
2697 1062327 : if (SP_DERIVED_VALUE_P (v3->val_rtx))
2698 : sp_base = v3->val_rtx;
2699 : else
2700 280683 : for (struct elt_loc_list *l = v3->locs;
2701 563100 : l; l = l->next)
2702 563086 : if (GET_CODE (l->loc) == PLUS
2703 280669 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2704 280669 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2705 843755 : && CONST_INT_P (XEXP (l->loc, 1)))
2706 : {
2707 280669 : sp_base = XEXP (l->loc, 0);
2708 280669 : sp_off = INTVAL (XEXP (l->loc, 1));
2709 280669 : break;
2710 : }
2711 : }
2712 1066990 : if (sp_base == NULL_RTX)
2713 4677 : sp_base = pc_rtx;
2714 : }
2715 : /* Otherwise, if x_base and sp_base are the same,
2716 : we know that x_base + x_off is the x's address and
2717 : sp_base + sp_off is current value of stack pointer,
2718 : so try to determine if x is certainly not below stack
2719 : pointer. */
2720 1158232 : if (sp_base == x_base)
2721 : {
2722 1150997 : if (STACK_GROWS_DOWNWARD)
2723 : {
2724 1150997 : HOST_WIDE_INT off = sp_off;
2725 : #ifdef STACK_ADDRESS_OFFSET
2726 : /* On SPARC take stack pointer bias into account as
2727 : well. */
2728 : off += (STACK_ADDRESS_OFFSET
2729 : - FIRST_PARM_OFFSET (current_function_decl));
2730 : #endif
2731 1150997 : if (x_off >= off)
2732 : /* x is at or above the current stack pointer,
2733 : no need to invalidate it. */
2734 : x_base = NULL_RTX;
2735 : }
2736 : else
2737 : {
2738 : HOST_WIDE_INT sz;
2739 : enum machine_mode mode = GET_MODE (x);
2740 : if ((MEM_SIZE_KNOWN_P (x)
2741 : && MEM_SIZE (x).is_constant (&sz))
2742 : || (mode != BLKmode
2743 : && GET_MODE_SIZE (mode).is_constant (&sz)))
2744 : if (x_off < sp_off
2745 : && ((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT)
2746 : x_off + sz) <= sp_off))
2747 : /* x's end is below or at the current stack
2748 : pointer in !STACK_GROWS_DOWNWARD target,
2749 : no need to invalidate it. */
2750 : x_base = NULL_RTX;
2751 : }
2752 : }
2753 : }
2754 2852939 : if (x_base == NULL_RTX)
2755 : {
2756 2852939 : has_mem = true;
2757 2852939 : num_mems++;
2758 2852939 : p = &(*p)->next;
2759 2852939 : continue;
2760 : }
2761 : }
2762 :
2763 429102572 : if (num_mems < param_max_cselib_memory_locations
2764 466462878 : && ! canon_anti_dependence (x, false, mem_rtx,
2765 233198774 : GET_MODE (mem_rtx), mem_addr))
2766 : {
2767 195838468 : has_mem = true;
2768 195838468 : num_mems++;
2769 195838468 : p = &(*p)->next;
2770 195838468 : continue;
2771 : }
2772 :
2773 : /* This one overlaps. */
2774 : /* We must have a mapping from this MEM's address to the
2775 : value (E). Remove that, too. */
2776 37425636 : addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2777 37425636 : addr = canonical_cselib_val (addr);
2778 37425636 : gcc_checking_assert (v == canonical_cselib_val (v));
2779 37425636 : mem_chain = &addr->addr_list;
2780 37427664 : for (;;)
2781 : {
2782 37426650 : cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2783 :
2784 37426650 : if (canon == v)
2785 : {
2786 37425636 : unchain_one_elt_list (mem_chain);
2787 37425636 : break;
2788 : }
2789 :
2790 : /* Record canonicalized elt. */
2791 1014 : (*mem_chain)->elt = canon;
2792 :
2793 1014 : mem_chain = &(*mem_chain)->next;
2794 1014 : }
2795 :
2796 37425636 : unchain_one_elt_loc_list (p);
2797 : }
2798 :
2799 171068907 : if (had_locs && cselib_useless_value_p (v))
2800 : {
2801 8672260 : if (setting_insn && DEBUG_INSN_P (setting_insn))
2802 0 : n_useless_debug_values++;
2803 : else
2804 8672260 : n_useless_values++;
2805 : }
2806 :
2807 171068907 : next = v->next_containing_mem;
2808 171068907 : if (has_mem)
2809 : {
2810 138519667 : *vp = v;
2811 138519667 : vp = &(*vp)->next_containing_mem;
2812 : }
2813 : else
2814 32549240 : v->next_containing_mem = NULL;
2815 : }
2816 114257822 : *vp = &dummy_val;
2817 114257822 : }
2818 :
2819 : /* Invalidate DEST. */
2820 :
2821 : void
2822 524471097 : cselib_invalidate_rtx (rtx dest)
2823 : {
2824 524471097 : while (GET_CODE (dest) == SUBREG
2825 524471097 : || GET_CODE (dest) == ZERO_EXTRACT
2826 1048948805 : || GET_CODE (dest) == STRICT_LOW_PART)
2827 6611 : dest = XEXP (dest, 0);
2828 :
2829 524471097 : if (REG_P (dest))
2830 399551714 : cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2831 124919383 : else if (MEM_P (dest))
2832 76063177 : cselib_invalidate_mem (dest);
2833 524471097 : }
2834 :
2835 : /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2836 :
2837 : static void
2838 508516882 : cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2839 : void *data ATTRIBUTE_UNUSED)
2840 : {
2841 508516882 : cselib_invalidate_rtx (dest);
2842 508516882 : }
2843 :
2844 : /* Record the result of a SET instruction. DEST is being set; the source
2845 : contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2846 : describes its address. */
2847 :
2848 : static void
2849 364696398 : cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2850 : {
2851 364696398 : if (src_elt == 0 || side_effects_p (dest))
2852 66333911 : return;
2853 :
2854 298362487 : if (REG_P (dest))
2855 : {
2856 273944362 : unsigned int dreg = REGNO (dest);
2857 273944362 : if (dreg < FIRST_PSEUDO_REGISTER)
2858 : {
2859 200711325 : unsigned int n = REG_NREGS (dest);
2860 :
2861 200711325 : if (n > max_value_regs)
2862 25803595 : max_value_regs = n;
2863 : }
2864 :
2865 273944362 : if (REG_VALUES (dreg) == 0)
2866 : {
2867 168778864 : used_regs[n_used_regs++] = dreg;
2868 168778864 : REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2869 : }
2870 : else
2871 : {
2872 : /* The register should have been invalidated. */
2873 105165498 : gcc_assert (REG_VALUES (dreg)->elt == 0);
2874 105165498 : REG_VALUES (dreg)->elt = src_elt;
2875 : }
2876 :
2877 273944362 : if (cselib_useless_value_p (src_elt))
2878 41847 : n_useless_values--;
2879 273944362 : new_elt_loc_list (src_elt, dest);
2880 : }
2881 24418125 : else if (MEM_P (dest) && dest_addr_elt != 0
2882 24418125 : && cselib_record_memory)
2883 : {
2884 24418125 : if (cselib_useless_value_p (src_elt))
2885 28 : n_useless_values--;
2886 24418125 : add_mem_for_addr (dest_addr_elt, src_elt, dest);
2887 : }
2888 : }
2889 :
2890 : /* Make ELT and X's VALUE equivalent to each other at INSN. */
2891 :
2892 : void
2893 3863878 : cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2894 : {
2895 3863878 : cselib_val *nelt;
2896 3863878 : rtx_insn *save_cselib_current_insn = cselib_current_insn;
2897 :
2898 3863878 : gcc_checking_assert (elt);
2899 3863878 : gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2900 3863878 : gcc_checking_assert (!side_effects_p (x));
2901 :
2902 3863878 : cselib_current_insn = insn;
2903 :
2904 3863878 : nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2905 :
2906 3863878 : if (nelt != elt)
2907 : {
2908 2998113 : cselib_any_perm_equivs = true;
2909 :
2910 2998113 : if (!PRESERVED_VALUE_P (nelt->val_rtx))
2911 2993357 : cselib_preserve_value (nelt);
2912 :
2913 2998113 : new_elt_loc_list (nelt, elt->val_rtx);
2914 : }
2915 :
2916 3863878 : cselib_current_insn = save_cselib_current_insn;
2917 3863878 : }
2918 :
2919 : /* Return TRUE if any permanent equivalences have been recorded since
2920 : the table was last initialized. */
2921 : bool
2922 1388289583 : cselib_have_permanent_equivalences (void)
2923 : {
2924 1388289583 : return cselib_any_perm_equivs;
2925 : }
2926 :
2927 : /* Record stack_pointer_rtx to be equal to
2928 : (plus:P cfa_base_preserved_val offset). Used by var-tracking
2929 : at the start of basic blocks for !frame_pointer_needed functions. */
2930 :
2931 : void
2932 3467086 : cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2933 : {
2934 3467086 : rtx sp_derived_value = NULL_RTX;
2935 6934172 : for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2936 6934172 : if (GET_CODE (l->loc) == VALUE
2937 6934172 : && SP_DERIVED_VALUE_P (l->loc))
2938 : {
2939 : sp_derived_value = l->loc;
2940 : break;
2941 : }
2942 6934172 : else if (GET_CODE (l->loc) == PLUS
2943 3467086 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2944 3467086 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2945 10401258 : && CONST_INT_P (XEXP (l->loc, 1)))
2946 : {
2947 3467086 : sp_derived_value = XEXP (l->loc, 0);
2948 3467086 : offset = offset + UINTVAL (XEXP (l->loc, 1));
2949 3467086 : break;
2950 : }
2951 3467086 : if (sp_derived_value == NULL_RTX)
2952 : return;
2953 3467086 : cselib_val *val
2954 3467086 : = cselib_lookup_from_insn (plus_constant (Pmode, sp_derived_value, offset),
2955 3467086 : Pmode, 1, VOIDmode, insn);
2956 3467086 : if (val != NULL)
2957 : {
2958 3467086 : PRESERVED_VALUE_P (val->val_rtx) = 1;
2959 3467086 : cselib_record_set (stack_pointer_rtx, val, NULL);
2960 : }
2961 : }
2962 :
2963 : /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2964 : that can be expressed using cfa_base_preserved_val + CONST_INT. */
2965 :
2966 : bool
2967 30705560 : cselib_sp_derived_value_p (cselib_val *v)
2968 : {
2969 30705560 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2970 67527083 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2971 37332516 : if (GET_CODE (l->loc) == PLUS
2972 7038394 : && GET_CODE (XEXP (l->loc, 0)) == VALUE
2973 6814130 : && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2974 42353721 : && CONST_INT_P (XEXP (l->loc, 1)))
2975 5021205 : v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2976 30705560 : if (!SP_DERIVED_VALUE_P (v->val_rtx))
2977 : return false;
2978 11362183 : for (struct elt_loc_list *l = v->locs; l; l = l->next)
2979 11362183 : if (l->loc == cfa_base_preserved_val->val_rtx)
2980 : return true;
2981 11362183 : else if (GET_CODE (l->loc) == PLUS
2982 5532198 : && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2983 5532198 : && CONST_INT_P (XEXP (l->loc, 1)))
2984 : return true;
2985 : return false;
2986 : }
2987 :
2988 : /* There is no good way to determine how many elements there can be
2989 : in a PARALLEL. Since it's fairly cheap, use a really large number. */
2990 : #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2991 :
2992 : struct cselib_record_autoinc_data
2993 : {
2994 : struct cselib_set *sets;
2995 : int n_sets;
2996 : };
2997 :
2998 : /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2999 : autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
3000 :
3001 : static int
3002 13379296 : cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
3003 : rtx dest, rtx src, rtx srcoff, void *arg)
3004 : {
3005 13379296 : struct cselib_record_autoinc_data *data;
3006 13379296 : data = (struct cselib_record_autoinc_data *)arg;
3007 :
3008 13379296 : data->sets[data->n_sets].dest = dest;
3009 :
3010 13379296 : if (srcoff)
3011 13010840 : data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
3012 : else
3013 368456 : data->sets[data->n_sets].src = src;
3014 :
3015 13379296 : data->n_sets++;
3016 :
3017 13379296 : return 0;
3018 : }
3019 :
3020 : /* Record the effects of any sets and autoincs in INSN. */
3021 : static void
3022 877791528 : cselib_record_sets (rtx_insn *insn)
3023 : {
3024 877791528 : int n_sets = 0;
3025 877791528 : int i;
3026 877791528 : struct cselib_set sets[MAX_SETS];
3027 877791528 : rtx cond = 0;
3028 877791528 : int n_sets_before_autoinc;
3029 877791528 : int n_strict_low_parts = 0;
3030 877791528 : struct cselib_record_autoinc_data data;
3031 :
3032 877791528 : rtx body = PATTERN (insn);
3033 877791528 : if (GET_CODE (body) == COND_EXEC)
3034 : {
3035 0 : cond = COND_EXEC_TEST (body);
3036 0 : body = COND_EXEC_CODE (body);
3037 : }
3038 :
3039 : /* Find all sets. */
3040 877791528 : if (GET_CODE (body) == SET)
3041 : {
3042 369044330 : sets[0].src = SET_SRC (body);
3043 369044330 : sets[0].dest = SET_DEST (body);
3044 369044330 : n_sets = 1;
3045 : }
3046 508747198 : else if (GET_CODE (body) == PARALLEL)
3047 : {
3048 : /* Look through the PARALLEL and record the values being
3049 : set, if possible. Also handle any CLOBBERs. */
3050 209908195 : for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
3051 : {
3052 141522739 : rtx x = XVECEXP (body, 0, i);
3053 :
3054 141522739 : if (GET_CODE (x) == SET)
3055 : {
3056 73988966 : sets[n_sets].src = SET_SRC (x);
3057 73988966 : sets[n_sets].dest = SET_DEST (x);
3058 73988966 : n_sets++;
3059 : }
3060 : }
3061 : }
3062 :
3063 369044330 : if (n_sets == 1
3064 431704490 : && MEM_P (sets[0].src)
3065 62296499 : && !cselib_record_memory
3066 108342560 : && MEM_READONLY_P (sets[0].src))
3067 : {
3068 3244910 : rtx note = find_reg_equal_equiv_note (insn);
3069 :
3070 3244910 : if (note && CONSTANT_P (XEXP (note, 0)))
3071 2058931 : sets[0].src = XEXP (note, 0);
3072 : }
3073 :
3074 877791528 : data.sets = sets;
3075 877791528 : data.n_sets = n_sets_before_autoinc = n_sets;
3076 877791528 : for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
3077 877791528 : n_sets = data.n_sets;
3078 :
3079 : /* Look up the values that are read. Do this before invalidating the
3080 : locations that are written. */
3081 1334204120 : for (i = 0; i < n_sets; i++)
3082 : {
3083 456412592 : rtx dest = sets[i].dest;
3084 456412592 : rtx orig = dest;
3085 :
3086 : /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
3087 : the low part after invalidating any knowledge about larger modes. */
3088 456412592 : if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
3089 51316 : sets[i].dest = dest = XEXP (dest, 0);
3090 :
3091 : /* We don't know how to record anything but REG or MEM. */
3092 456412592 : if (REG_P (dest)
3093 123746530 : || (MEM_P (dest) && cselib_record_memory))
3094 : {
3095 361229312 : rtx src = sets[i].src;
3096 361229312 : if (cond)
3097 0 : src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
3098 361229312 : sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
3099 361229312 : if (MEM_P (dest))
3100 : {
3101 28563250 : machine_mode address_mode = get_address_mode (dest);
3102 :
3103 28563250 : sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
3104 : address_mode, 1,
3105 28563250 : GET_MODE (dest));
3106 : }
3107 : else
3108 332666062 : sets[i].dest_addr_elt = 0;
3109 : }
3110 :
3111 : /* Improve handling of STRICT_LOW_PART if the current value is known
3112 : to be const0_rtx, then the low bits will be set to dest and higher
3113 : bits will remain zero. Used in code like:
3114 :
3115 : {di:SI=0;clobber flags:CC;}
3116 : flags:CCNO=cmp(bx:SI,0)
3117 : strict_low_part(di:QI)=flags:CCNO<=0
3118 :
3119 : where we can note both that di:QI=flags:CCNO<=0 and
3120 : also that because di:SI is known to be 0 and strict_low_part(di:QI)
3121 : preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
3122 456412592 : scalar_int_mode mode;
3123 456412592 : if (dest != orig
3124 51316 : && cselib_record_sets_hook
3125 16380 : && REG_P (dest)
3126 16380 : && HARD_REGISTER_P (dest)
3127 16380 : && sets[i].src_elt
3128 456428972 : && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
3129 456428972 : && n_sets + n_strict_low_parts < MAX_SETS)
3130 : {
3131 16380 : opt_scalar_int_mode wider_mode_iter;
3132 41285 : FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
3133 : {
3134 41285 : scalar_int_mode wider_mode = wider_mode_iter.require ();
3135 48798 : if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
3136 : break;
3137 :
3138 39999 : rtx reg = gen_lowpart (wider_mode, dest);
3139 39999 : if (!REG_P (reg))
3140 : break;
3141 :
3142 39974 : cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
3143 39974 : if (!v)
3144 24905 : continue;
3145 :
3146 16032 : struct elt_loc_list *l;
3147 33923 : for (l = v->locs; l; l = l->next)
3148 32960 : if (l->loc == const0_rtx)
3149 : break;
3150 :
3151 963 : if (!l)
3152 963 : continue;
3153 :
3154 15069 : sets[n_sets + n_strict_low_parts].dest = reg;
3155 15069 : sets[n_sets + n_strict_low_parts].src = dest;
3156 15069 : sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
3157 15069 : break;
3158 : }
3159 : }
3160 : }
3161 :
3162 877791528 : if (cselib_record_sets_hook)
3163 78867726 : cselib_record_sets_hook (insn, sets, n_sets);
3164 :
3165 : /* Invalidate all locations written by this insn. Note that the elts we
3166 : looked up in the previous loop aren't affected, just some of their
3167 : locations may go away. */
3168 877791528 : note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
3169 :
3170 1768962352 : for (i = n_sets_before_autoinc; i < n_sets; i++)
3171 13379296 : cselib_invalidate_rtx (sets[i].dest);
3172 :
3173 : /* If this is an asm, look for duplicate sets. This can happen when the
3174 : user uses the same value as an output multiple times. This is valid
3175 : if the outputs are not actually used thereafter. Treat this case as
3176 : if the value isn't actually set. We do this by smashing the destination
3177 : to pc_rtx, so that we won't record the value later. */
3178 877791528 : if (n_sets >= 2 && asm_noperands (body) >= 0)
3179 : {
3180 495364 : for (i = 0; i < n_sets; i++)
3181 : {
3182 383590 : rtx dest = sets[i].dest;
3183 383590 : if (REG_P (dest) || MEM_P (dest))
3184 : {
3185 383557 : int j;
3186 899555 : for (j = i + 1; j < n_sets; j++)
3187 515998 : if (rtx_equal_p (dest, sets[j].dest))
3188 : {
3189 0 : sets[i].dest = pc_rtx;
3190 0 : sets[j].dest = pc_rtx;
3191 : }
3192 : }
3193 : }
3194 : }
3195 :
3196 : /* Now enter the equivalences in our tables. */
3197 1334204120 : for (i = 0; i < n_sets; i++)
3198 : {
3199 456412592 : rtx dest = sets[i].dest;
3200 456412592 : if (REG_P (dest)
3201 123746530 : || (MEM_P (dest) && cselib_record_memory))
3202 361229312 : cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
3203 : }
3204 :
3205 : /* And deal with STRICT_LOW_PART. */
3206 877806597 : for (i = 0; i < n_strict_low_parts; i++)
3207 : {
3208 15069 : if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3209 0 : continue;
3210 15069 : machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3211 15069 : cselib_val *v
3212 15069 : = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
3213 15069 : cselib_preserve_value (v);
3214 15069 : rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3215 : sets[n_sets + i].src_elt->val_rtx);
3216 15069 : cselib_add_permanent_equiv (v, r, insn);
3217 : }
3218 877791528 : }
3219 :
3220 : /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3221 :
3222 : bool
3223 40550523 : fp_setter_insn (rtx_insn *insn)
3224 : {
3225 40550523 : rtx expr, pat = NULL_RTX;
3226 :
3227 40550523 : if (!RTX_FRAME_RELATED_P (insn))
3228 : return false;
3229 :
3230 624772 : expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3231 624772 : if (expr)
3232 62 : pat = XEXP (expr, 0);
3233 624772 : if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3234 : return false;
3235 :
3236 : /* Don't return true for frame pointer restores in the epilogue. */
3237 153476 : if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3238 : return false;
3239 : return true;
3240 : }
3241 :
3242 : /* V is one of the values in REG_VALUES (REGNO). Return true if it
3243 : would be invalidated by CALLEE_ABI. */
3244 :
3245 : static bool
3246 112323112 : cselib_invalidated_by_call_p (const function_abi &callee_abi,
3247 : unsigned int regno, cselib_val *v)
3248 : {
3249 112323112 : machine_mode mode = GET_MODE (v->val_rtx);
3250 112323112 : if (mode == VOIDmode)
3251 : {
3252 0 : v = REG_VALUES (regno)->elt;
3253 0 : if (!v)
3254 : /* If we don't know what the mode of the constant value is, and we
3255 : don't know what mode the register was set in, conservatively
3256 : assume that the register is clobbered. The value's going to be
3257 : essentially useless in this case anyway. */
3258 : return true;
3259 0 : mode = GET_MODE (v->val_rtx);
3260 : }
3261 112323112 : return callee_abi.clobbers_reg_p (mode, regno);
3262 : }
3263 :
3264 : /* Record the effects of INSN. */
3265 :
3266 : void
3267 1012954914 : cselib_process_insn (rtx_insn *insn)
3268 : {
3269 1012954914 : int i;
3270 1012954914 : rtx x;
3271 :
3272 1012954914 : cselib_current_insn = insn;
3273 :
3274 : /* Forget everything at a CODE_LABEL or a setjmp. */
3275 1012954914 : if ((LABEL_P (insn)
3276 977576840 : || (CALL_P (insn)
3277 33668644 : && find_reg_note (insn, REG_SETJMP, NULL)))
3278 1012959123 : && !cselib_preserve_constants)
3279 : {
3280 35382060 : cselib_reset_table (next_uid);
3281 35382060 : cselib_current_insn = NULL;
3282 35382060 : return;
3283 : }
3284 :
3285 977572854 : if (! INSN_P (insn))
3286 : {
3287 99781326 : cselib_current_insn = NULL;
3288 99781326 : return;
3289 : }
3290 :
3291 : /* If this is a call instruction, forget anything stored in a
3292 : call clobbered register, or, if this is not a const call, in
3293 : memory. */
3294 877791528 : if (CALL_P (insn))
3295 : {
3296 33664658 : function_abi callee_abi = insn_callee_abi (insn);
3297 3130813194 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3298 : {
3299 3097148536 : elt_list **l = ®_VALUES (i);
3300 3315762952 : while (*l)
3301 : {
3302 218614416 : cselib_val *v = (*l)->elt;
3303 218614416 : if (v && cselib_invalidated_by_call_p (callee_abi, i, v))
3304 69725580 : cselib_invalidate_regno_val (i, l);
3305 : else
3306 148888836 : l = &(*l)->next;
3307 : }
3308 : }
3309 :
3310 : /* Since it is not clear how cselib is going to be used, be
3311 : conservative here and treat looping pure or const functions
3312 : as if they were regular functions. */
3313 33664658 : if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3314 33664658 : || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3315 30690038 : cselib_invalidate_mem (callmem[0]);
3316 : else
3317 : {
3318 : /* For const/pure calls, invalidate any argument slots because
3319 : they are owned by the callee. */
3320 8562864 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3321 5588244 : if (GET_CODE (XEXP (x, 0)) == USE
3322 5587968 : && MEM_P (XEXP (XEXP (x, 0), 0)))
3323 138425 : cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3324 : /* And invalidate memory below the stack (or above for
3325 : !STACK_GROWS_DOWNWARD), as even const/pure call can invalidate
3326 : that. Do this only if !ACCUMULATE_OUTGOING_ARGS or if
3327 : cfun->calls_alloca, otherwise the stack pointer shouldn't be
3328 : changing in the middle of the function and nothing should be
3329 : stored below the stack pointer. */
3330 2974620 : if (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca)
3331 2974177 : cselib_invalidate_mem (callmem[1]);
3332 : }
3333 : }
3334 :
3335 877791528 : cselib_record_sets (insn);
3336 :
3337 : /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3338 : after we have processed the insn. */
3339 877791528 : if (CALL_P (insn))
3340 : {
3341 101527222 : for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3342 67862564 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3343 1858699 : cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3344 :
3345 : /* Flush everything on setjmp. */
3346 33664658 : if (cselib_preserve_constants
3347 33664658 : && find_reg_note (insn, REG_SETJMP, NULL))
3348 : {
3349 223 : cselib_preserve_only_values ();
3350 223 : cselib_reset_table (next_uid);
3351 : }
3352 : }
3353 :
3354 : /* On setter of the hard frame pointer if frame_pointer_needed,
3355 : invalidate stack_pointer_rtx, so that sp and {,h}fp based
3356 : VALUEs are distinct. */
3357 877791528 : if (reload_completed
3358 401702197 : && frame_pointer_needed
3359 918222526 : && fp_setter_insn (insn))
3360 93673 : cselib_invalidate_rtx (stack_pointer_rtx);
3361 :
3362 877791528 : cselib_current_insn = NULL;
3363 :
3364 877791528 : if (n_useless_values > MAX_USELESS_VALUES
3365 : /* remove_useless_values is linear in the hash table size. Avoid
3366 : quadratic behavior for very large hashtables with very few
3367 : useless elements. */
3368 877791528 : && ((unsigned int)n_useless_values
3369 2066192 : > (cselib_hash_table->elements () - n_debug_values) / 4))
3370 28813 : remove_useless_values ();
3371 : }
3372 :
3373 : /* Initialize cselib for one pass. The caller must also call
3374 : init_alias_analysis. */
3375 :
3376 : void
3377 10237103 : cselib_init (int record_what)
3378 : {
3379 10237103 : cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3380 10237103 : cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3381 10237103 : cselib_any_perm_equivs = false;
3382 :
3383 : /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3384 : see canon_true_dependence. This is only created once. */
3385 10237103 : if (! callmem[0])
3386 144193 : callmem[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3387 : /* Similarly create a MEM representing roughly everything below
3388 : the stack for STACK_GROWS_DOWNWARD targets or everything above
3389 : it otherwise. Do this only when !ACCUMULATE_OUTGOING_ARGS or
3390 : if cfun->calls_alloca, otherwise the stack pointer shouldn't be
3391 : changing in the middle of the function and nothing should be stored
3392 : below the stack pointer. */
3393 10237103 : if (!callmem[1] && (!ACCUMULATE_OUTGOING_ARGS || cfun->calls_alloca))
3394 : {
3395 143938 : if (STACK_GROWS_DOWNWARD)
3396 : {
3397 143938 : unsigned HOST_WIDE_INT off = -(GET_MODE_MASK (Pmode) >> 1);
3398 : #ifdef STACK_ADDRESS_OFFSET
3399 : /* On SPARC take stack pointer bias into account as well. */
3400 : off += (STACK_ADDRESS_OFFSET
3401 : - FIRST_PARM_OFFSET (current_function_decl));
3402 : #endif
3403 143938 : callmem[1] = plus_constant (Pmode, stack_pointer_rtx, off);
3404 : }
3405 : else
3406 : callmem[1] = stack_pointer_rtx;
3407 143938 : callmem[1] = gen_rtx_MEM (BLKmode, callmem[1]);
3408 149358 : set_mem_size (callmem[1], GET_MODE_MASK (Pmode) >> 1);
3409 : }
3410 :
3411 10237103 : cselib_nregs = max_reg_num ();
3412 :
3413 : /* We preserve reg_values to allow expensive clearing of the whole thing.
3414 : Reallocate it however if it happens to be too large. */
3415 10237103 : if (!reg_values || reg_values_size < cselib_nregs
3416 9940054 : || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3417 : {
3418 312290 : free (reg_values);
3419 : /* Some space for newly emit instructions so we don't end up
3420 : reallocating in between passes. */
3421 312290 : reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3422 312290 : reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3423 : }
3424 10237103 : used_regs = XNEWVEC (unsigned int, cselib_nregs);
3425 10237103 : n_used_regs = 0;
3426 : /* FIXME: enable sanitization (PR87845) */
3427 10237103 : cselib_hash_table
3428 10237103 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3429 10237103 : /* sanitize_eq_and_hash */ false);
3430 10237103 : if (cselib_preserve_constants)
3431 496129 : cselib_preserved_hash_table
3432 496129 : = new hash_table<cselib_hasher> (31, /* ggc */ false,
3433 496129 : /* sanitize_eq_and_hash */ false);
3434 10237103 : next_uid = 1;
3435 10237103 : }
3436 :
3437 : /* Called when the current user is done with cselib. */
3438 :
3439 : void
3440 10237103 : cselib_finish (void)
3441 : {
3442 10237103 : bool preserved = cselib_preserve_constants;
3443 10237103 : cselib_discard_hook = NULL;
3444 10237103 : cselib_preserve_constants = false;
3445 10237103 : cselib_any_perm_equivs = false;
3446 10237103 : cfa_base_preserved_val = NULL;
3447 10237103 : cfa_base_preserved_regno = INVALID_REGNUM;
3448 10237103 : elt_list_pool.release ();
3449 10237103 : elt_loc_list_pool.release ();
3450 10237103 : cselib_val_pool.release ();
3451 10237103 : value_pool.release ();
3452 10237103 : cselib_clear_table ();
3453 10237103 : delete cselib_hash_table;
3454 10237103 : cselib_hash_table = NULL;
3455 10237103 : if (preserved)
3456 496129 : delete cselib_preserved_hash_table;
3457 10237103 : cselib_preserved_hash_table = NULL;
3458 10237103 : free (used_regs);
3459 10237103 : used_regs = 0;
3460 10237103 : n_useless_values = 0;
3461 10237103 : n_useless_debug_values = 0;
3462 10237103 : n_debug_values = 0;
3463 10237103 : next_uid = 0;
3464 10237103 : }
3465 :
3466 : /* Dump the cselib_val V to FILE *OUT. */
3467 :
3468 : int
3469 93 : dump_cselib_val (cselib_val *v, FILE *out)
3470 : {
3471 93 : bool need_lf = true;
3472 :
3473 93 : print_inline_rtx (out, v->val_rtx, 0);
3474 :
3475 93 : if (v->locs)
3476 : {
3477 86 : struct elt_loc_list *l = v->locs;
3478 86 : if (need_lf)
3479 : {
3480 86 : fputc ('\n', out);
3481 86 : need_lf = false;
3482 : }
3483 86 : fputs (" locs:", out);
3484 149 : do
3485 : {
3486 149 : if (l->setting_insn)
3487 149 : fprintf (out, "\n from insn %i ",
3488 149 : INSN_UID (l->setting_insn));
3489 : else
3490 0 : fprintf (out, "\n ");
3491 149 : print_inline_rtx (out, l->loc, 4);
3492 : }
3493 149 : while ((l = l->next));
3494 86 : fputc ('\n', out);
3495 : }
3496 : else
3497 : {
3498 7 : fputs (" no locs", out);
3499 7 : need_lf = true;
3500 : }
3501 :
3502 93 : if (v->addr_list)
3503 : {
3504 6 : struct elt_list *e = v->addr_list;
3505 6 : if (need_lf)
3506 : {
3507 0 : fputc ('\n', out);
3508 0 : need_lf = false;
3509 : }
3510 6 : fputs (" addr list:", out);
3511 6 : do
3512 : {
3513 6 : fputs ("\n ", out);
3514 6 : print_inline_rtx (out, e->elt->val_rtx, 2);
3515 : }
3516 6 : while ((e = e->next));
3517 6 : fputc ('\n', out);
3518 : }
3519 : else
3520 : {
3521 87 : fputs (" no addrs", out);
3522 87 : need_lf = true;
3523 : }
3524 :
3525 93 : if (v->next_containing_mem == &dummy_val)
3526 6 : fputs (" last mem\n", out);
3527 87 : else if (v->next_containing_mem)
3528 : {
3529 0 : fputs (" next mem ", out);
3530 0 : print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3531 0 : fputc ('\n', out);
3532 : }
3533 87 : else if (need_lf)
3534 81 : fputc ('\n', out);
3535 :
3536 93 : return 1;
3537 : }
3538 :
3539 : /* Dump the cselib_val *X to FILE *OUT. */
3540 :
3541 : static int
3542 93 : dump_cselib_val_ptr (cselib_val **x, FILE *out)
3543 : {
3544 93 : cselib_val *v = *x;
3545 93 : return dump_cselib_val (v, out);
3546 : }
3547 :
3548 : /* Dump to OUT everything in the CSELIB table. */
3549 :
3550 : void
3551 11 : dump_cselib_table (FILE *out)
3552 : {
3553 11 : fprintf (out, "cselib hash table:\n");
3554 104 : cselib_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3555 11 : if (cselib_preserved_hash_table)
3556 : {
3557 11 : fprintf (out, "cselib preserved hash table:\n");
3558 11 : cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val_ptr> (out);
3559 : }
3560 11 : if (first_containing_mem != &dummy_val)
3561 : {
3562 6 : fputs ("first mem ", out);
3563 6 : print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3564 6 : fputc ('\n', out);
3565 : }
3566 11 : fprintf (out, "next uid %i\n", next_uid);
3567 11 : }
3568 :
3569 : #include "gt-cselib.h"
|