Line data Source code
1 : /* Common subexpression elimination for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "rtl.h"
26 : #include "stmt.h"
27 : #include "tree.h"
28 : #include "cfghooks.h"
29 : #include "df.h"
30 : #include "memmodel.h"
31 : #include "tm_p.h"
32 : #include "insn-config.h"
33 : #include "regs.h"
34 : #include "emit-rtl.h"
35 : #include "recog.h"
36 : #include "cfgrtl.h"
37 : #include "cfganal.h"
38 : #include "cfgcleanup.h"
39 : #include "alias.h"
40 : #include "toplev.h"
41 : #include "rtlhooks-def.h"
42 : #include "tree-pass.h"
43 : #include "dbgcnt.h"
44 : #include "rtl-iter.h"
45 : #include "regs.h"
46 : #include "function-abi.h"
47 : #include "rtlanal.h"
48 : #include "expr.h"
49 :
50 : /* The basic idea of common subexpression elimination is to go
51 : through the code, keeping a record of expressions that would
52 : have the same value at the current scan point, and replacing
53 : expressions encountered with the cheapest equivalent expression.
54 :
55 : It is too complicated to keep track of the different possibilities
56 : when control paths merge in this code; so, at each label, we forget all
57 : that is known and start fresh. This can be described as processing each
58 : extended basic block separately. We have a separate pass to perform
59 : global CSE.
60 :
61 : Note CSE can turn a conditional or computed jump into a nop or
62 : an unconditional jump. When this occurs we arrange to run the jump
63 : optimizer after CSE to delete the unreachable code.
64 :
65 : We use two data structures to record the equivalent expressions:
66 : a hash table for most expressions, and a vector of "quantity
67 : numbers" to record equivalent (pseudo) registers.
68 :
69 : The use of the special data structure for registers is desirable
70 : because it is faster. It is possible because registers references
71 : contain a fairly small number, the register number, taken from
72 : a contiguously allocated series, and two register references are
73 : identical if they have the same number. General expressions
74 : do not have any such thing, so the only way to retrieve the
75 : information recorded on an expression other than a register
76 : is to keep it in a hash table.
77 :
78 : Registers and "quantity numbers":
79 :
80 : At the start of each basic block, all of the (hardware and pseudo)
81 : registers used in the function are given distinct quantity
82 : numbers to indicate their contents. During scan, when the code
83 : copies one register into another, we copy the quantity number.
84 : When a register is loaded in any other way, we allocate a new
85 : quantity number to describe the value generated by this operation.
86 : `REG_QTY (N)' records what quantity register N is currently thought
87 : of as containing.
88 :
89 : All real quantity numbers are greater than or equal to zero.
90 : If register N has not been assigned a quantity, `REG_QTY (N)' will
91 : equal -N - 1, which is always negative.
92 :
93 : Quantity numbers below zero do not exist and none of the `qty_table'
94 : entries should be referenced with a negative index.
95 :
96 : We also maintain a bidirectional chain of registers for each
97 : quantity number. The `qty_table` members `first_reg' and `last_reg',
98 : and `reg_eqv_table' members `next' and `prev' hold these chains.
99 :
100 : The first register in a chain is the one whose lifespan is least local.
101 : Among equals, it is the one that was seen first.
102 : We replace any equivalent register with that one.
103 :
104 : If two registers have the same quantity number, it must be true that
105 : REG expressions with qty_table `mode' must be in the hash table for both
106 : registers and must be in the same class.
107 :
108 : The converse is not true. Since hard registers may be referenced in
109 : any mode, two REG expressions might be equivalent in the hash table
110 : but not have the same quantity number if the quantity number of one
111 : of the registers is not the same mode as those expressions.
112 :
113 : Constants and quantity numbers
114 :
115 : When a quantity has a known constant value, that value is stored
116 : in the appropriate qty_table `const_rtx'. This is in addition to
117 : putting the constant in the hash table as is usual for non-regs.
118 :
119 : Whether a reg or a constant is preferred is determined by the configuration
120 : macro CONST_COSTS and will often depend on the constant value. In any
121 : event, expressions containing constants can be simplified, by fold_rtx.
122 :
123 : When a quantity has a known nearly constant value (such as an address
124 : of a stack slot), that value is stored in the appropriate qty_table
125 : `const_rtx'.
126 :
127 : Integer constants don't have a machine mode. However, cse
128 : determines the intended machine mode from the destination
129 : of the instruction that moves the constant. The machine mode
130 : is recorded in the hash table along with the actual RTL
131 : constant expression so that different modes are kept separate.
132 :
133 : Other expressions:
134 :
135 : To record known equivalences among expressions in general
136 : we use a hash table called `table'. It has a fixed number of buckets
137 : that contain chains of `struct table_elt' elements for expressions.
138 : These chains connect the elements whose expressions have the same
139 : hash codes.
140 :
141 : Other chains through the same elements connect the elements which
142 : currently have equivalent values.
143 :
144 : Register references in an expression are canonicalized before hashing
145 : the expression. This is done using `reg_qty' and qty_table `first_reg'.
146 : The hash code of a register reference is computed using the quantity
147 : number, not the register number.
148 :
149 : When the value of an expression changes, it is necessary to remove from the
150 : hash table not just that expression but all expressions whose values
151 : could be different as a result.
152 :
153 : 1. If the value changing is in memory, except in special cases
154 : ANYTHING referring to memory could be changed. That is because
155 : nobody knows where a pointer does not point.
156 : The function `invalidate_memory' removes what is necessary.
157 :
158 : The special cases are when the address is constant or is
159 : a constant plus a fixed register such as the frame pointer
160 : or a static chain pointer. When such addresses are stored in,
161 : we can tell exactly which other such addresses must be invalidated
162 : due to overlap. `invalidate' does this.
163 : All expressions that refer to non-constant
164 : memory addresses are also invalidated. `invalidate_memory' does this.
165 :
166 : 2. If the value changing is a register, all expressions
167 : containing references to that register, and only those,
168 : must be removed.
169 :
170 : Because searching the entire hash table for expressions that contain
171 : a register is very slow, we try to figure out when it isn't necessary.
172 : Precisely, this is necessary only when expressions have been
173 : entered in the hash table using this register, and then the value has
174 : changed, and then another expression wants to be added to refer to
175 : the register's new value. This sequence of circumstances is rare
176 : within any one basic block.
177 :
178 : `REG_TICK' and `REG_IN_TABLE', accessors for members of
179 : cse_reg_info, are used to detect this case. REG_TICK (i) is
180 : incremented whenever a value is stored in register i.
181 : REG_IN_TABLE (i) holds -1 if no references to register i have been
182 : entered in the table; otherwise, it contains the value REG_TICK (i)
183 : had when the references were entered. If we want to enter a
184 : reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
185 : remove old references. Until we want to enter a new entry, the
186 : mere fact that the two vectors don't match makes the entries be
187 : ignored if anyone tries to match them.
188 :
189 : Registers themselves are entered in the hash table as well as in
190 : the equivalent-register chains. However, `REG_TICK' and
191 : `REG_IN_TABLE' do not apply to expressions which are simple
192 : register references. These expressions are removed from the table
193 : immediately when they become invalid, and this can be done even if
194 : we do not immediately search for all the expressions that refer to
195 : the register.
196 :
197 : A CLOBBER rtx in an instruction invalidates its operand for further
198 : reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
199 : invalidates everything that resides in memory.
200 :
201 : Related expressions:
202 :
203 : Constant expressions that differ only by an additive integer
204 : are called related. When a constant expression is put in
205 : the table, the related expression with no constant term
206 : is also entered. These are made to point at each other
207 : so that it is possible to find out if there exists any
208 : register equivalent to an expression related to a given expression. */
209 :
210 : /* Length of qty_table vector. We know in advance we will not need
211 : a quantity number this big. */
212 :
213 : static int max_qty;
214 :
215 : /* Next quantity number to be allocated.
216 : This is 1 + the largest number needed so far. */
217 :
218 : static int next_qty;
219 :
220 : /* Per-qty information tracking.
221 :
222 : `first_reg' and `last_reg' track the head and tail of the
223 : chain of registers which currently contain this quantity.
224 :
225 : `mode' contains the machine mode of this quantity.
226 :
227 : `const_rtx' holds the rtx of the constant value of this
228 : quantity, if known. A summations of the frame/arg pointer
229 : and a constant can also be entered here. When this holds
230 : a known value, `const_insn' is the insn which stored the
231 : constant value.
232 :
233 : `comparison_{code,const,qty}' are used to track when a
234 : comparison between a quantity and some constant or register has
235 : been passed. In such a case, we know the results of the comparison
236 : in case we see it again. These members record a comparison that
237 : is known to be true. `comparison_code' holds the rtx code of such
238 : a comparison, else it is set to UNKNOWN and the other two
239 : comparison members are undefined. `comparison_const' holds
240 : the constant being compared against, or zero if the comparison
241 : is not against a constant. `comparison_qty' holds the quantity
242 : being compared against when the result is known. If the comparison
243 : is not with a register, `comparison_qty' is INT_MIN. */
244 :
245 : struct qty_table_elem
246 : {
247 : rtx const_rtx;
248 : rtx_insn *const_insn;
249 : rtx comparison_const;
250 : int comparison_qty;
251 : unsigned int first_reg, last_reg;
252 : ENUM_BITFIELD(machine_mode) mode : MACHINE_MODE_BITSIZE;
253 : ENUM_BITFIELD(rtx_code) comparison_code : RTX_CODE_BITSIZE;
254 : };
255 :
256 : /* The table of all qtys, indexed by qty number. */
257 : static struct qty_table_elem *qty_table;
258 :
259 : /* Insn being scanned. */
260 :
261 : static rtx_insn *this_insn;
262 : static bool optimize_this_for_speed_p;
263 :
264 : /* Index by register number, gives the number of the next (or
265 : previous) register in the chain of registers sharing the same
266 : value.
267 :
268 : Or -1 if this register is at the end of the chain.
269 :
270 : If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
271 :
272 : /* Per-register equivalence chain. */
273 : struct reg_eqv_elem
274 : {
275 : int next, prev;
276 : };
277 :
278 : /* The table of all register equivalence chains. */
279 : static struct reg_eqv_elem *reg_eqv_table;
280 :
281 : struct cse_reg_info
282 : {
283 : /* The timestamp at which this register is initialized. */
284 : unsigned int timestamp;
285 :
286 : /* The quantity number of the register's current contents. */
287 : int reg_qty;
288 :
289 : /* The number of times the register has been altered in the current
290 : basic block. */
291 : int reg_tick;
292 :
293 : /* The REG_TICK value at which rtx's containing this register are
294 : valid in the hash table. If this does not equal the current
295 : reg_tick value, such expressions existing in the hash table are
296 : invalid. */
297 : int reg_in_table;
298 :
299 : /* The SUBREG that was set when REG_TICK was last incremented. Set
300 : to -1 if the last store was to the whole register, not a subreg. */
301 : unsigned int subreg_ticked;
302 : };
303 :
304 : /* A table of cse_reg_info indexed by register numbers. */
305 : static struct cse_reg_info *cse_reg_info_table;
306 :
307 : /* The size of the above table. */
308 : static unsigned int cse_reg_info_table_size;
309 :
310 : /* The index of the first entry that has not been initialized. */
311 : static unsigned int cse_reg_info_table_first_uninitialized;
312 :
313 : /* The timestamp at the beginning of the current run of
314 : cse_extended_basic_block. We increment this variable at the beginning of
315 : the current run of cse_extended_basic_block. The timestamp field of a
316 : cse_reg_info entry matches the value of this variable if and only
317 : if the entry has been initialized during the current run of
318 : cse_extended_basic_block. */
319 : static unsigned int cse_reg_info_timestamp;
320 :
321 : /* A HARD_REG_SET containing all the hard registers for which there is
322 : currently a REG expression in the hash table. Note the difference
323 : from the above variables, which indicate if the REG is mentioned in some
324 : expression in the table. */
325 :
326 : static HARD_REG_SET hard_regs_in_table;
327 :
328 : /* True if CSE has altered the CFG. */
329 : static bool cse_cfg_altered;
330 :
331 : /* True if CSE has altered conditional jump insns in such a way
332 : that jump optimization should be redone. */
333 : static bool cse_jumps_altered;
334 :
335 : /* True if we put a LABEL_REF into the hash table for an INSN
336 : without a REG_LABEL_OPERAND, we have to rerun jump after CSE
337 : to put in the note. */
338 : static bool recorded_label_ref;
339 :
340 : /* canon_hash stores 1 in do_not_record if it notices a reference to PC or
341 : some other volatile subexpression. */
342 :
343 : static int do_not_record;
344 :
345 : /* canon_hash stores 1 in hash_arg_in_memory
346 : if it notices a reference to memory within the expression being hashed. */
347 :
348 : static int hash_arg_in_memory;
349 :
350 : /* The hash table contains buckets which are chains of `struct table_elt's,
351 : each recording one expression's information.
352 : That expression is in the `exp' field.
353 :
354 : The canon_exp field contains a canonical (from the point of view of
355 : alias analysis) version of the `exp' field.
356 :
357 : Those elements with the same hash code are chained in both directions
358 : through the `next_same_hash' and `prev_same_hash' fields.
359 :
360 : Each set of expressions with equivalent values
361 : are on a two-way chain through the `next_same_value'
362 : and `prev_same_value' fields, and all point with
363 : the `first_same_value' field at the first element in
364 : that chain. The chain is in order of increasing cost.
365 : Each element's cost value is in its `cost' field.
366 :
367 : The `in_memory' field is nonzero for elements that
368 : involve any reference to memory. These elements are removed
369 : whenever a write is done to an unidentified location in memory.
370 : To be safe, we assume that a memory address is unidentified unless
371 : the address is either a symbol constant or a constant plus
372 : the frame pointer or argument pointer.
373 :
374 : The `related_value' field is used to connect related expressions
375 : (that differ by adding an integer).
376 : The related expressions are chained in a circular fashion.
377 : `related_value' is zero for expressions for which this
378 : chain is not useful.
379 :
380 : The `cost' field stores the cost of this element's expression.
381 : The `regcost' field stores the value returned by approx_reg_cost for
382 : this element's expression.
383 :
384 : The `is_const' flag is set if the element is a constant (including
385 : a fixed address).
386 :
387 : The `flag' field is used as a temporary during some search routines.
388 :
389 : The `mode' field is usually the same as GET_MODE (`exp'), but
390 : if `exp' is a CONST_INT and has no machine mode then the `mode'
391 : field is the mode it was being used as. Each constant is
392 : recorded separately for each mode it is used with. */
393 :
394 : struct table_elt
395 : {
396 : rtx exp;
397 : rtx canon_exp;
398 : struct table_elt *next_same_hash;
399 : struct table_elt *prev_same_hash;
400 : struct table_elt *next_same_value;
401 : struct table_elt *prev_same_value;
402 : struct table_elt *first_same_value;
403 : struct table_elt *related_value;
404 : int cost;
405 : int regcost;
406 : ENUM_BITFIELD(machine_mode) mode : MACHINE_MODE_BITSIZE;
407 : char in_memory;
408 : char is_const;
409 : char flag;
410 : };
411 :
412 : /* We don't want a lot of buckets, because we rarely have very many
413 : things stored in the hash table, and a lot of buckets slows
414 : down a lot of loops that happen frequently. */
415 : #define HASH_SHIFT 5
416 : #define HASH_SIZE (1 << HASH_SHIFT)
417 : #define HASH_MASK (HASH_SIZE - 1)
418 :
419 : /* Determine whether register number N is considered a fixed register for the
420 : purpose of approximating register costs.
421 : It is desirable to replace other regs with fixed regs, to reduce need for
422 : non-fixed hard regs.
423 : A reg wins if it is either the frame pointer or designated as fixed. */
424 : #define FIXED_REGNO_P(N) \
425 : ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
426 : || fixed_regs[N] || global_regs[N])
427 :
428 : /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
429 : hard registers and pointers into the frame are the cheapest with a cost
430 : of 0. Next come pseudos with a cost of one and other hard registers with
431 : a cost of 2. Aside from these special cases, call `rtx_cost'. */
432 :
433 : #define CHEAP_REGNO(N) \
434 : (REGNO_PTR_FRAME_P (N) \
435 : || (HARD_REGISTER_NUM_P (N) \
436 : && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
437 :
438 : #define COST(X, MODE) \
439 : (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
440 : #define COST_IN(X, MODE, OUTER, OPNO) \
441 : (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
442 :
443 : /* Get the number of times this register has been updated in this
444 : basic block. */
445 :
446 : #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
447 :
448 : /* Get the point at which REG was recorded in the table. */
449 :
450 : #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
451 :
452 : /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
453 : SUBREG). */
454 :
455 : #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
456 :
457 : /* Get the quantity number for REG. */
458 :
459 : #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
460 :
461 : /* Determine if the quantity number for register X represents a valid index
462 : into the qty_table. */
463 :
464 : #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
465 :
466 : /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
467 :
468 : #define CHEAPER(X, Y) \
469 : (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
470 :
471 : static struct table_elt *table[HASH_SIZE];
472 :
473 : /* Chain of `struct table_elt's made so far for this function
474 : but currently removed from the table. */
475 :
476 : static struct table_elt *free_element_chain;
477 :
478 : /* Trace a patch through the CFG. */
479 :
480 : struct branch_path
481 : {
482 : /* The basic block for this path entry. */
483 : basic_block bb;
484 : };
485 :
486 : /* This data describes a block that will be processed by
487 : cse_extended_basic_block. */
488 :
489 : struct cse_basic_block_data
490 : {
491 : /* Total number of SETs in block. */
492 : int nsets;
493 : /* Size of current branch path, if any. */
494 : int path_size;
495 : /* Current path, indicating which basic_blocks will be processed. */
496 : struct branch_path *path;
497 : };
498 :
499 :
500 : /* Pointers to the live in/live out bitmaps for the boundaries of the
501 : current EBB. */
502 : static bitmap cse_ebb_live_in, cse_ebb_live_out;
503 :
504 : /* A simple bitmap to track which basic blocks have been visited
505 : already as part of an already processed extended basic block. */
506 : static sbitmap cse_visited_basic_blocks;
507 :
508 : static bool fixed_base_plus_p (rtx x);
509 : static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
510 : static int preferable (int, int, int, int);
511 : static void new_basic_block (void);
512 : static void make_new_qty (unsigned int, machine_mode);
513 : static void make_regs_eqv (unsigned int, unsigned int);
514 : static void delete_reg_equiv (unsigned int);
515 : static bool mention_regs (rtx);
516 : static bool insert_regs (rtx, struct table_elt *, bool);
517 : static void remove_from_table (struct table_elt *, unsigned);
518 : static void remove_pseudo_from_table (rtx, unsigned);
519 : static struct table_elt *lookup (rtx, unsigned, machine_mode);
520 : static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
521 : static rtx lookup_as_function (rtx, enum rtx_code);
522 : static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
523 : machine_mode, int, int);
524 : static struct table_elt *insert (rtx, struct table_elt *, unsigned,
525 : machine_mode);
526 : static void merge_equiv_classes (struct table_elt *, struct table_elt *);
527 : static void invalidate (rtx, machine_mode);
528 : static void remove_invalid_refs (unsigned int);
529 : static void remove_invalid_subreg_refs (unsigned int, poly_uint64,
530 : machine_mode);
531 : static void rehash_using_reg (rtx);
532 : static void invalidate_memory (void);
533 : static rtx use_related_value (rtx, struct table_elt *);
534 :
535 : static inline unsigned canon_hash (rtx, machine_mode);
536 : static inline unsigned safe_hash (rtx, machine_mode);
537 : static inline unsigned hash_rtx_string (const char *);
538 :
539 : static rtx canon_reg (rtx, rtx_insn *);
540 : static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
541 : machine_mode *,
542 : machine_mode *);
543 : static rtx fold_rtx (rtx, rtx_insn *);
544 : static rtx equiv_constant (rtx);
545 : static void record_jump_equiv (rtx_insn *, bool);
546 : static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx);
547 : static void cse_insn (rtx_insn *);
548 : static void cse_prescan_path (struct cse_basic_block_data *);
549 : static void invalidate_from_clobbers (rtx_insn *);
550 : static void invalidate_from_sets_and_clobbers (rtx_insn *);
551 : static void cse_extended_basic_block (struct cse_basic_block_data *);
552 : extern void dump_class (struct table_elt*);
553 : static void get_cse_reg_info_1 (unsigned int regno);
554 : static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
555 :
556 : static void flush_hash_table (void);
557 : static bool insn_live_p (rtx_insn *, int *);
558 : static bool set_live_p (rtx, int *);
559 : static void cse_change_cc_mode_insn (rtx_insn *, rtx);
560 : static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
561 : static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
562 : bool);
563 :
564 :
565 : #undef RTL_HOOKS_GEN_LOWPART
566 : #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
567 :
568 : static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
569 :
570 : /* Compute hash code of X in mode M. Special-case case where X is a pseudo
571 : register (hard registers may require `do_not_record' to be set). */
572 :
573 : static inline unsigned
574 829362579 : HASH (rtx x, machine_mode mode)
575 : {
576 538375982 : unsigned h = (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
577 1149088318 : ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (x)))
578 829362579 : : canon_hash (x, mode));
579 829362579 : return (h ^ (h >> HASH_SHIFT)) & HASH_MASK;
580 : }
581 :
582 : /* Like HASH, but without side-effects. */
583 :
584 : static inline unsigned
585 227844050 : SAFE_HASH (rtx x, machine_mode mode)
586 : {
587 116513814 : unsigned h = (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
588 293955100 : ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (x)))
589 227844050 : : safe_hash (x, mode));
590 227844050 : return (h ^ (h >> HASH_SHIFT)) & HASH_MASK;
591 : }
592 :
593 : /* Nonzero if X has the form (PLUS frame-pointer integer). */
594 :
595 : static bool
596 235355993 : fixed_base_plus_p (rtx x)
597 : {
598 267388305 : switch (GET_CODE (x))
599 : {
600 139158706 : case REG:
601 139158706 : if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
602 : return true;
603 125128516 : if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
604 117904 : return true;
605 : return false;
606 :
607 38229037 : case PLUS:
608 38229037 : if (!CONST_INT_P (XEXP (x, 1)))
609 : return false;
610 32032312 : return fixed_base_plus_p (XEXP (x, 0));
611 :
612 : default:
613 : return false;
614 : }
615 : }
616 :
617 : /* Dump the expressions in the equivalence class indicated by CLASSP.
618 : This function is used only for debugging. */
619 : DEBUG_FUNCTION void
620 0 : dump_class (struct table_elt *classp)
621 : {
622 0 : struct table_elt *elt;
623 :
624 0 : fprintf (stderr, "Equivalence chain for ");
625 0 : print_rtl (stderr, classp->exp);
626 0 : fprintf (stderr, ": \n");
627 :
628 0 : for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
629 : {
630 0 : print_rtl (stderr, elt->exp);
631 0 : fprintf (stderr, "\n");
632 : }
633 0 : }
634 :
635 : /* Return an estimate of the cost of the registers used in an rtx.
636 : This is mostly the number of different REG expressions in the rtx;
637 : however for some exceptions like fixed registers we use a cost of
638 : 0. If any other hard register reference occurs, return MAX_COST. */
639 :
640 : static int
641 429927306 : approx_reg_cost (const_rtx x)
642 : {
643 429927306 : int cost = 0;
644 429927306 : subrtx_iterator::array_type array;
645 1356187949 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
646 : {
647 982052124 : const_rtx x = *iter;
648 982052124 : if (REG_P (x))
649 : {
650 406899405 : unsigned int regno = REGNO (x);
651 406899405 : if (!CHEAP_REGNO (regno))
652 : {
653 55791481 : if (regno < FIRST_PSEUDO_REGISTER)
654 : {
655 55791481 : if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
656 55791481 : return MAX_COST;
657 0 : cost += 2;
658 : }
659 : else
660 288755065 : cost += 1;
661 : }
662 : }
663 : }
664 374135825 : return cost;
665 429927306 : }
666 :
667 : /* Return a negative value if an rtx A, whose costs are given by COST_A
668 : and REGCOST_A, is more desirable than an rtx B.
669 : Return a positive value if A is less desirable, or 0 if the two are
670 : equally good. */
671 : static int
672 653665144 : preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
673 : {
674 : /* First, get rid of cases involving expressions that are entirely
675 : unwanted. */
676 653665144 : if (cost_a != cost_b)
677 : {
678 611213687 : if (cost_a == MAX_COST)
679 : return 1;
680 609842929 : if (cost_b == MAX_COST)
681 : return -1;
682 : }
683 :
684 : /* Avoid extending lifetimes of hardregs. */
685 171578606 : if (regcost_a != regcost_b)
686 : {
687 93188493 : if (regcost_a == MAX_COST)
688 : return 1;
689 72038648 : if (regcost_b == MAX_COST)
690 : return -1;
691 : }
692 :
693 : /* Normal operation costs take precedence. */
694 148503438 : if (cost_a != cost_b)
695 106174759 : return cost_a - cost_b;
696 : /* Only if these are identical consider effects on register pressure. */
697 42328679 : if (regcost_a != regcost_b)
698 42328679 : return regcost_a - regcost_b;
699 : return 0;
700 : }
701 :
702 : /* Internal function, to compute cost when X is not a register; called
703 : from COST macro to keep it simple. */
704 :
705 : static int
706 303449976 : notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno)
707 : {
708 303449976 : scalar_int_mode int_mode, inner_mode;
709 303449976 : return ((GET_CODE (x) == SUBREG
710 5219933 : && REG_P (SUBREG_REG (x))
711 305466283 : && is_int_mode (mode, &int_mode)
712 304698811 : && is_int_mode (GET_MODE (SUBREG_REG (x)), &inner_mode)
713 7587058 : && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
714 3738102 : && subreg_lowpart_p (x)
715 2544694 : && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, inner_mode))
716 303449976 : ? 0
717 300905282 : : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2);
718 : }
719 :
720 :
721 : /* Initialize CSE_REG_INFO_TABLE. */
722 :
723 : static void
724 2292560 : init_cse_reg_info (unsigned int nregs)
725 : {
726 : /* Do we need to grow the table? */
727 2292560 : if (nregs > cse_reg_info_table_size)
728 : {
729 175939 : unsigned int new_size;
730 :
731 175939 : if (cse_reg_info_table_size < 2048)
732 : {
733 : /* Compute a new size that is a power of 2 and no smaller
734 : than the large of NREGS and 64. */
735 31408 : new_size = (cse_reg_info_table_size
736 175609 : ? cse_reg_info_table_size : 64);
737 :
738 389879 : while (new_size < nregs)
739 214270 : new_size *= 2;
740 : }
741 : else
742 : {
743 : /* If we need a big table, allocate just enough to hold
744 : NREGS registers. */
745 : new_size = nregs;
746 : }
747 :
748 : /* Reallocate the table with NEW_SIZE entries. */
749 175939 : free (cse_reg_info_table);
750 175939 : cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
751 175939 : cse_reg_info_table_size = new_size;
752 175939 : cse_reg_info_table_first_uninitialized = 0;
753 : }
754 :
755 : /* Do we have all of the first NREGS entries initialized? */
756 2292560 : if (cse_reg_info_table_first_uninitialized < nregs)
757 : {
758 317420 : unsigned int old_timestamp = cse_reg_info_timestamp - 1;
759 317420 : unsigned int i;
760 :
761 : /* Put the old timestamp on newly allocated entries so that they
762 : will all be considered out of date. We do not touch those
763 : entries beyond the first NREGS entries to be nice to the
764 : virtual memory. */
765 33063069 : for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
766 32745649 : cse_reg_info_table[i].timestamp = old_timestamp;
767 :
768 317420 : cse_reg_info_table_first_uninitialized = nregs;
769 : }
770 2292560 : }
771 :
772 : /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
773 :
774 : static void
775 843627014 : get_cse_reg_info_1 (unsigned int regno)
776 : {
777 : /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
778 : entry will be considered to have been initialized. */
779 843627014 : cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
780 :
781 : /* Initialize the rest of the entry. */
782 843627014 : cse_reg_info_table[regno].reg_tick = 1;
783 843627014 : cse_reg_info_table[regno].reg_in_table = -1;
784 843627014 : cse_reg_info_table[regno].subreg_ticked = -1;
785 843627014 : cse_reg_info_table[regno].reg_qty = -regno - 1;
786 843627014 : }
787 :
788 : /* Find a cse_reg_info entry for REGNO. */
789 :
790 : static inline struct cse_reg_info *
791 11046258855 : get_cse_reg_info (unsigned int regno)
792 : {
793 11046258855 : struct cse_reg_info *p = &cse_reg_info_table[regno];
794 :
795 : /* If this entry has not been initialized, go ahead and initialize
796 : it. */
797 11046258855 : if (p->timestamp != cse_reg_info_timestamp)
798 843627014 : get_cse_reg_info_1 (regno);
799 :
800 11046258855 : return p;
801 : }
802 :
803 : /* Clear the hash table and initialize each register with its own quantity,
804 : for a new basic block. */
805 :
806 : static void
807 20467602 : new_basic_block (void)
808 : {
809 20467602 : int i;
810 :
811 20467602 : next_qty = 0;
812 :
813 : /* Invalidate cse_reg_info_table. */
814 20467602 : cse_reg_info_timestamp++;
815 :
816 : /* Clear out hash table state for this pass. */
817 20467602 : CLEAR_HARD_REG_SET (hard_regs_in_table);
818 :
819 : /* The per-quantity values used to be initialized here, but it is
820 : much faster to initialize each as it is made in `make_new_qty'. */
821 :
822 675430866 : for (i = 0; i < HASH_SIZE; i++)
823 : {
824 654963264 : struct table_elt *first;
825 :
826 654963264 : first = table[i];
827 654963264 : if (first != NULL)
828 : {
829 134846041 : struct table_elt *last = first;
830 :
831 134846041 : table[i] = NULL;
832 :
833 190877717 : while (last->next_same_hash != NULL)
834 : last = last->next_same_hash;
835 :
836 : /* Now relink this hash entire chain into
837 : the free element list. */
838 :
839 134846041 : last->next_same_hash = free_element_chain;
840 134846041 : free_element_chain = first;
841 : }
842 : }
843 20467602 : }
844 :
845 : /* Say that register REG contains a quantity in mode MODE not in any
846 : register before and initialize that quantity. */
847 :
848 : static void
849 103016949 : make_new_qty (unsigned int reg, machine_mode mode)
850 : {
851 103016949 : int q;
852 103016949 : struct qty_table_elem *ent;
853 103016949 : struct reg_eqv_elem *eqv;
854 :
855 103016949 : gcc_assert (next_qty < max_qty);
856 :
857 103016949 : q = REG_QTY (reg) = next_qty++;
858 103016949 : ent = &qty_table[q];
859 103016949 : ent->first_reg = reg;
860 103016949 : ent->last_reg = reg;
861 103016949 : ent->mode = mode;
862 103016949 : ent->const_rtx = ent->const_insn = NULL;
863 103016949 : ent->comparison_code = UNKNOWN;
864 :
865 103016949 : eqv = ®_eqv_table[reg];
866 103016949 : eqv->next = eqv->prev = -1;
867 103016949 : }
868 :
869 : /* Make reg NEW equivalent to reg OLD.
870 : OLD is not changing; NEW is. */
871 :
872 : static void
873 11448073 : make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
874 : {
875 11448073 : unsigned int lastr, firstr;
876 11448073 : int q = REG_QTY (old_reg);
877 11448073 : struct qty_table_elem *ent;
878 :
879 11448073 : ent = &qty_table[q];
880 :
881 : /* Nothing should become eqv until it has a "non-invalid" qty number. */
882 11448073 : gcc_assert (REGNO_QTY_VALID_P (old_reg));
883 :
884 11448073 : REG_QTY (new_reg) = q;
885 11448073 : firstr = ent->first_reg;
886 11448073 : lastr = ent->last_reg;
887 :
888 : /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
889 : hard regs. Among pseudos, if NEW will live longer than any other reg
890 : of the same qty, and that is beyond the current basic block,
891 : make it the new canonical replacement for this qty. */
892 312097 : if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
893 : /* Certain fixed registers might be of the class NO_REGS. This means
894 : that not only can they not be allocated by the compiler, but
895 : they cannot be used in substitutions or canonicalizations
896 : either. */
897 11135976 : && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
898 11452633 : && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
899 11131416 : || (new_reg >= FIRST_PSEUDO_REGISTER
900 11131416 : && (firstr < FIRST_PSEUDO_REGISTER
901 11131416 : || (bitmap_bit_p (cse_ebb_live_out, new_reg)
902 3557213 : && !bitmap_bit_p (cse_ebb_live_out, firstr))
903 9124948 : || (bitmap_bit_p (cse_ebb_live_in, new_reg)
904 467975 : && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
905 : {
906 2108889 : reg_eqv_table[firstr].prev = new_reg;
907 2108889 : reg_eqv_table[new_reg].next = firstr;
908 2108889 : reg_eqv_table[new_reg].prev = -1;
909 2108889 : ent->first_reg = new_reg;
910 : }
911 : else
912 : {
913 : /* If NEW is a hard reg (known to be non-fixed), insert at end.
914 : Otherwise, insert before any non-fixed hard regs that are at the
915 : end. Registers of class NO_REGS cannot be used as an
916 : equivalent for anything. */
917 294208 : while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
918 0 : && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
919 9339184 : && new_reg >= FIRST_PSEUDO_REGISTER)
920 0 : lastr = reg_eqv_table[lastr].prev;
921 9339184 : reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
922 9339184 : if (reg_eqv_table[lastr].next >= 0)
923 0 : reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
924 : else
925 9339184 : qty_table[q].last_reg = new_reg;
926 9339184 : reg_eqv_table[lastr].next = new_reg;
927 9339184 : reg_eqv_table[new_reg].prev = lastr;
928 : }
929 11448073 : }
930 :
931 : /* Remove REG from its equivalence class. */
932 :
933 : static void
934 1470080317 : delete_reg_equiv (unsigned int reg)
935 : {
936 1470080317 : struct qty_table_elem *ent;
937 1470080317 : int q = REG_QTY (reg);
938 1470080317 : int p, n;
939 :
940 : /* If invalid, do nothing. */
941 1470080317 : if (! REGNO_QTY_VALID_P (reg))
942 : return;
943 :
944 18591758 : ent = &qty_table[q];
945 :
946 18591758 : p = reg_eqv_table[reg].prev;
947 18591758 : n = reg_eqv_table[reg].next;
948 :
949 18591758 : if (n != -1)
950 656759 : reg_eqv_table[n].prev = p;
951 : else
952 17934999 : ent->last_reg = p;
953 18591758 : if (p != -1)
954 652946 : reg_eqv_table[p].next = n;
955 : else
956 17938812 : ent->first_reg = n;
957 :
958 18591758 : REG_QTY (reg) = -reg - 1;
959 : }
960 :
961 : /* Remove any invalid expressions from the hash table
962 : that refer to any of the registers contained in expression X.
963 :
964 : Make sure that newly inserted references to those registers
965 : as subexpressions will be considered valid.
966 :
967 : mention_regs is not called when a register itself
968 : is being stored in the table.
969 :
970 : Return true if we have done something that may have changed
971 : the hash code of X. */
972 :
973 : static bool
974 461696467 : mention_regs (rtx x)
975 : {
976 461696467 : enum rtx_code code;
977 461696467 : int i, j;
978 461696467 : const char *fmt;
979 461696467 : bool changed = false;
980 :
981 461696467 : if (x == 0)
982 : return false;
983 :
984 461696467 : code = GET_CODE (x);
985 461696467 : if (code == REG)
986 : {
987 139489164 : unsigned int regno = REGNO (x);
988 139489164 : unsigned int endregno = END_REGNO (x);
989 139489164 : unsigned int i;
990 :
991 278978328 : for (i = regno; i < endregno; i++)
992 : {
993 139489164 : if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
994 169995 : remove_invalid_refs (i);
995 :
996 139489164 : REG_IN_TABLE (i) = REG_TICK (i);
997 139489164 : SUBREG_TICKED (i) = -1;
998 : }
999 :
1000 : return false;
1001 : }
1002 :
1003 : /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1004 : pseudo if they don't use overlapping words. We handle only pseudos
1005 : here for simplicity. */
1006 7555630 : if (code == SUBREG && REG_P (SUBREG_REG (x))
1007 329751961 : && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1008 : {
1009 7544545 : unsigned int i = REGNO (SUBREG_REG (x));
1010 :
1011 7544545 : if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1012 : {
1013 : /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1014 : the last store to this register really stored into this
1015 : subreg, then remove the memory of this subreg.
1016 : Otherwise, remove any memory of the entire register and
1017 : all its subregs from the table. */
1018 331100 : if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1019 331100 : || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1020 331100 : remove_invalid_refs (i);
1021 : else
1022 0 : remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1023 : }
1024 :
1025 7544545 : REG_IN_TABLE (i) = REG_TICK (i);
1026 7544545 : SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1027 7544545 : return false;
1028 : }
1029 :
1030 : /* If X is a comparison or a COMPARE and either operand is a register
1031 : that does not have a quantity, give it one. This is so that a later
1032 : call to record_jump_equiv won't cause X to be assigned a different
1033 : hash code and not found in the table after that call.
1034 :
1035 : It is not necessary to do this here, since rehash_using_reg can
1036 : fix up the table later, but doing this here eliminates the need to
1037 : call that expensive function in the most common case where the only
1038 : use of the register is in the comparison. */
1039 :
1040 314662758 : if (code == COMPARE || COMPARISON_P (x))
1041 : {
1042 23280590 : if (REG_P (XEXP (x, 0))
1043 23280590 : && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1044 8777113 : if (insert_regs (XEXP (x, 0), NULL, false))
1045 : {
1046 8777113 : rehash_using_reg (XEXP (x, 0));
1047 8777113 : changed = true;
1048 : }
1049 :
1050 23280590 : if (REG_P (XEXP (x, 1))
1051 23280590 : && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1052 2421169 : if (insert_regs (XEXP (x, 1), NULL, false))
1053 : {
1054 2421169 : rehash_using_reg (XEXP (x, 1));
1055 2421169 : changed = true;
1056 : }
1057 : }
1058 :
1059 314662758 : fmt = GET_RTX_FORMAT (code);
1060 817818340 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1061 503155582 : if (fmt[i] == 'e')
1062 : {
1063 297529308 : if (mention_regs (XEXP (x, i)))
1064 503155582 : changed = true;
1065 : }
1066 205626274 : else if (fmt[i] == 'E')
1067 17036628 : for (j = 0; j < XVECLEN (x, i); j++)
1068 12791817 : if (mention_regs (XVECEXP (x, i, j)))
1069 360243 : changed = true;
1070 :
1071 : return changed;
1072 : }
1073 :
1074 : /* Update the register quantities for inserting X into the hash table
1075 : with a value equivalent to CLASSP.
1076 : (If the class does not contain a REG, it is irrelevant.)
1077 : If MODIFIED is true, X is a destination; it is being modified.
1078 : Note that delete_reg_equiv should be called on a register
1079 : before insert_regs is done on that register with MODIFIED != 0.
1080 :
1081 : True value means that elements of reg_qty have changed
1082 : so X's hash code may be different. */
1083 :
1084 : static bool
1085 248846579 : insert_regs (rtx x, struct table_elt *classp, bool modified)
1086 : {
1087 248846579 : if (REG_P (x))
1088 : {
1089 120872269 : unsigned int regno = REGNO (x);
1090 120872269 : int qty_valid;
1091 :
1092 : /* If REGNO is in the equivalence table already but is of the
1093 : wrong mode for that equivalence, don't do anything here. */
1094 :
1095 120872269 : qty_valid = REGNO_QTY_VALID_P (regno);
1096 120872269 : if (qty_valid)
1097 : {
1098 6407247 : struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1099 :
1100 6407247 : if (ent->mode != GET_MODE (x))
1101 : return false;
1102 : }
1103 :
1104 120872269 : if (modified || ! qty_valid)
1105 : {
1106 114465022 : if (classp)
1107 92479730 : for (classp = classp->first_same_value;
1108 182492828 : classp != 0;
1109 90013098 : classp = classp->next_same_value)
1110 101461171 : if (REG_P (classp->exp)
1111 11448073 : && GET_MODE (classp->exp) == GET_MODE (x))
1112 : {
1113 11448073 : unsigned c_regno = REGNO (classp->exp);
1114 :
1115 11448073 : gcc_assert (REGNO_QTY_VALID_P (c_regno));
1116 :
1117 : /* Suppose that 5 is hard reg and 100 and 101 are
1118 : pseudos. Consider
1119 :
1120 : (set (reg:si 100) (reg:si 5))
1121 : (set (reg:si 5) (reg:si 100))
1122 : (set (reg:di 101) (reg:di 5))
1123 :
1124 : We would now set REG_QTY (101) = REG_QTY (5), but the
1125 : entry for 5 is in SImode. When we use this later in
1126 : copy propagation, we get the register in wrong mode. */
1127 11448073 : if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1128 0 : continue;
1129 :
1130 11448073 : make_regs_eqv (regno, c_regno);
1131 11448073 : return true;
1132 : }
1133 :
1134 : /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1135 : than REG_IN_TABLE to find out if there was only a single preceding
1136 : invalidation - for the SUBREG - or another one, which would be
1137 : for the full register. However, if we find here that REG_TICK
1138 : indicates that the register is invalid, it means that it has
1139 : been invalidated in a separate operation. The SUBREG might be used
1140 : now (then this is a recursive call), or we might use the full REG
1141 : now and a SUBREG of it later. So bump up REG_TICK so that
1142 : mention_regs will do the right thing. */
1143 103016949 : if (! modified
1144 22190883 : && REG_IN_TABLE (regno) >= 0
1145 105084935 : && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1146 448 : REG_TICK (regno)++;
1147 103016949 : make_new_qty (regno, GET_MODE (x));
1148 103016949 : return true;
1149 : }
1150 :
1151 : return false;
1152 : }
1153 :
1154 : /* If X is a SUBREG, we will likely be inserting the inner register in the
1155 : table. If that register doesn't have an assigned quantity number at
1156 : this point but does later, the insertion that we will be doing now will
1157 : not be accessible because its hash code will have changed. So assign
1158 : a quantity number now. */
1159 :
1160 3311493 : else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1161 131277247 : && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1162 : {
1163 1599369 : insert_regs (SUBREG_REG (x), NULL, false);
1164 1599369 : mention_regs (x);
1165 1599369 : return true;
1166 : }
1167 : else
1168 126374941 : return mention_regs (x);
1169 : }
1170 :
1171 :
1172 : /* Compute upper and lower anchors for CST. Also compute the offset of CST
1173 : from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1174 : CST is equal to an anchor. */
1175 :
1176 : static bool
1177 0 : compute_const_anchors (rtx cst,
1178 : HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1179 : HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1180 : {
1181 0 : unsigned HOST_WIDE_INT n = UINTVAL (cst);
1182 :
1183 0 : *lower_base = n & ~(targetm.const_anchor - 1);
1184 0 : if ((unsigned HOST_WIDE_INT) *lower_base == n)
1185 : return false;
1186 :
1187 0 : *upper_base = ((n + (targetm.const_anchor - 1))
1188 0 : & ~(targetm.const_anchor - 1));
1189 0 : *upper_offs = n - *upper_base;
1190 0 : *lower_offs = n - *lower_base;
1191 0 : return true;
1192 : }
1193 :
1194 : /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1195 :
1196 : static void
1197 0 : insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1198 : machine_mode mode)
1199 : {
1200 0 : struct table_elt *elt;
1201 0 : unsigned hash;
1202 0 : rtx anchor_exp;
1203 0 : rtx exp;
1204 :
1205 0 : anchor_exp = gen_int_mode (anchor, mode);
1206 0 : hash = HASH (anchor_exp, mode);
1207 0 : elt = lookup (anchor_exp, hash, mode);
1208 0 : if (!elt)
1209 0 : elt = insert (anchor_exp, NULL, hash, mode);
1210 :
1211 0 : exp = plus_constant (mode, reg, offs);
1212 : /* REG has just been inserted and the hash codes recomputed. */
1213 0 : mention_regs (exp);
1214 0 : hash = HASH (exp, mode);
1215 :
1216 : /* Use the cost of the register rather than the whole expression. When
1217 : looking up constant anchors we will further offset the corresponding
1218 : expression therefore it does not make sense to prefer REGs over
1219 : reg-immediate additions. Prefer instead the oldest expression. Also
1220 : don't prefer pseudos over hard regs so that we derive constants in
1221 : argument registers from other argument registers rather than from the
1222 : original pseudo that was used to synthesize the constant. */
1223 0 : insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1);
1224 0 : }
1225 :
1226 : /* The constant CST is equivalent to the register REG. Create
1227 : equivalences between the two anchors of CST and the corresponding
1228 : register-offset expressions using REG. */
1229 :
1230 : static void
1231 0 : insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1232 : {
1233 0 : HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1234 :
1235 0 : if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1236 : &upper_base, &upper_offs))
1237 0 : return;
1238 :
1239 : /* Ignore anchors of value 0. Constants accessible from zero are
1240 : simple. */
1241 0 : if (lower_base != 0)
1242 0 : insert_const_anchor (lower_base, reg, -lower_offs, mode);
1243 :
1244 0 : if (upper_base != 0)
1245 0 : insert_const_anchor (upper_base, reg, -upper_offs, mode);
1246 : }
1247 :
1248 : /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1249 : ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1250 : valid expression. Return the cheapest and oldest of such expressions. In
1251 : *OLD, return how old the resulting expression is compared to the other
1252 : equivalent expressions. */
1253 :
1254 : static rtx
1255 0 : find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1256 : unsigned *old)
1257 : {
1258 0 : struct table_elt *elt;
1259 0 : unsigned idx;
1260 0 : struct table_elt *match_elt;
1261 0 : rtx match;
1262 :
1263 : /* Find the cheapest and *oldest* expression to maximize the chance of
1264 : reusing the same pseudo. */
1265 :
1266 0 : match_elt = NULL;
1267 0 : match = NULL_RTX;
1268 0 : for (elt = anchor_elt->first_same_value, idx = 0;
1269 0 : elt;
1270 0 : elt = elt->next_same_value, idx++)
1271 : {
1272 0 : if (match_elt && CHEAPER (match_elt, elt))
1273 : return match;
1274 :
1275 0 : if (REG_P (elt->exp)
1276 0 : || (GET_CODE (elt->exp) == PLUS
1277 0 : && REG_P (XEXP (elt->exp, 0))
1278 0 : && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1279 : {
1280 0 : rtx x;
1281 :
1282 : /* Ignore expressions that are no longer valid. */
1283 0 : if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1284 0 : continue;
1285 :
1286 0 : x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1287 0 : if (REG_P (x)
1288 0 : || (GET_CODE (x) == PLUS
1289 0 : && IN_RANGE (INTVAL (XEXP (x, 1)),
1290 : -targetm.const_anchor,
1291 : targetm.const_anchor - 1)))
1292 : {
1293 0 : match = x;
1294 0 : match_elt = elt;
1295 0 : *old = idx;
1296 : }
1297 : }
1298 : }
1299 :
1300 : return match;
1301 : }
1302 :
1303 : /* Try to express the constant SRC_CONST using a register+offset expression
1304 : derived from a constant anchor. Return it if successful or NULL_RTX,
1305 : otherwise. */
1306 :
1307 : static rtx
1308 0 : try_const_anchors (rtx src_const, machine_mode mode)
1309 : {
1310 0 : struct table_elt *lower_elt, *upper_elt;
1311 0 : HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1312 0 : rtx lower_anchor_rtx, upper_anchor_rtx;
1313 0 : rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1314 0 : unsigned lower_old, upper_old;
1315 :
1316 : /* CONST_INT may be in various modes, avoid non-scalar-int mode. */
1317 0 : if (!SCALAR_INT_MODE_P (mode))
1318 : return NULL_RTX;
1319 :
1320 0 : if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1321 : &upper_base, &upper_offs))
1322 : return NULL_RTX;
1323 :
1324 0 : lower_anchor_rtx = GEN_INT (lower_base);
1325 0 : upper_anchor_rtx = GEN_INT (upper_base);
1326 0 : lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1327 0 : upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1328 :
1329 0 : if (lower_elt)
1330 0 : lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1331 0 : if (upper_elt)
1332 0 : upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1333 :
1334 0 : if (!lower_exp)
1335 : return upper_exp;
1336 0 : if (!upper_exp)
1337 : return lower_exp;
1338 :
1339 : /* Return the older expression. */
1340 0 : return (upper_old > lower_old ? upper_exp : lower_exp);
1341 : }
1342 :
1343 : /* Look in or update the hash table. */
1344 :
1345 : /* Remove table element ELT from use in the table.
1346 : HASH is its hash code, made using the HASH macro.
1347 : It's an argument because often that is known in advance
1348 : and we save much time not recomputing it. */
1349 :
1350 : static void
1351 68617628 : remove_from_table (struct table_elt *elt, unsigned int hash)
1352 : {
1353 68617628 : if (elt == 0)
1354 : return;
1355 :
1356 : /* Mark this element as removed. See cse_insn. */
1357 68617628 : elt->first_same_value = 0;
1358 :
1359 : /* Remove the table element from its equivalence class. */
1360 :
1361 68617628 : {
1362 68617628 : struct table_elt *prev = elt->prev_same_value;
1363 68617628 : struct table_elt *next = elt->next_same_value;
1364 :
1365 68617628 : if (next)
1366 7794664 : next->prev_same_value = prev;
1367 :
1368 68617628 : if (prev)
1369 43993138 : prev->next_same_value = next;
1370 : else
1371 : {
1372 : struct table_elt *newfirst = next;
1373 32345228 : while (next)
1374 : {
1375 7720738 : next->first_same_value = newfirst;
1376 7720738 : next = next->next_same_value;
1377 : }
1378 : }
1379 : }
1380 :
1381 : /* Remove the table element from its hash bucket. */
1382 :
1383 68617628 : {
1384 68617628 : struct table_elt *prev = elt->prev_same_hash;
1385 68617628 : struct table_elt *next = elt->next_same_hash;
1386 :
1387 68617628 : if (next)
1388 20217065 : next->prev_same_hash = prev;
1389 :
1390 68617628 : if (prev)
1391 8743790 : prev->next_same_hash = next;
1392 59873838 : else if (table[hash] == elt)
1393 59873822 : table[hash] = next;
1394 : else
1395 : {
1396 : /* This entry is not in the proper hash bucket. This can happen
1397 : when two classes were merged by `merge_equiv_classes'. Search
1398 : for the hash bucket that it heads. This happens only very
1399 : rarely, so the cost is acceptable. */
1400 528 : for (hash = 0; hash < HASH_SIZE; hash++)
1401 512 : if (table[hash] == elt)
1402 16 : table[hash] = next;
1403 : }
1404 : }
1405 :
1406 : /* Remove the table element from its related-value circular chain. */
1407 :
1408 68617628 : if (elt->related_value != 0 && elt->related_value != elt)
1409 : {
1410 : struct table_elt *p = elt->related_value;
1411 :
1412 114201 : while (p->related_value != elt)
1413 : p = p->related_value;
1414 30302 : p->related_value = elt->related_value;
1415 30302 : if (p->related_value == p)
1416 24721 : p->related_value = 0;
1417 : }
1418 :
1419 : /* Now add it to the free element chain. */
1420 68617628 : elt->next_same_hash = free_element_chain;
1421 68617628 : free_element_chain = elt;
1422 : }
1423 :
1424 : /* Same as above, but X is a pseudo-register. */
1425 :
1426 : static void
1427 90570705 : remove_pseudo_from_table (rtx x, unsigned int hash)
1428 : {
1429 90570705 : struct table_elt *elt;
1430 :
1431 : /* Because a pseudo-register can be referenced in more than one
1432 : mode, we might have to remove more than one table entry. */
1433 94984112 : while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1434 4413407 : remove_from_table (elt, hash);
1435 90570705 : }
1436 :
1437 : /* Look up X in the hash table and return its table element,
1438 : or 0 if X is not in the table.
1439 :
1440 : MODE is the machine-mode of X, or if X is an integer constant
1441 : with VOIDmode then MODE is the mode with which X will be used.
1442 :
1443 : Here we are satisfied to find an expression whose tree structure
1444 : looks like X. */
1445 :
1446 : static struct table_elt *
1447 486830722 : lookup (rtx x, unsigned int hash, machine_mode mode)
1448 : {
1449 486830722 : struct table_elt *p;
1450 :
1451 722606417 : for (p = table[hash]; p; p = p->next_same_hash)
1452 366097362 : if (mode == p->mode && ((x == p->exp && REG_P (x))
1453 143778560 : || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1454 130321667 : return p;
1455 :
1456 : return 0;
1457 : }
1458 :
1459 : /* Like `lookup' but don't care whether the table element uses invalid regs.
1460 : Also ignore discrepancies in the machine mode of a register. */
1461 :
1462 : static struct table_elt *
1463 94984112 : lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1464 : {
1465 94984112 : struct table_elt *p;
1466 :
1467 94984112 : if (REG_P (x))
1468 : {
1469 94984112 : unsigned int regno = REGNO (x);
1470 :
1471 : /* Don't check the machine mode when comparing registers;
1472 : invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1473 173280730 : for (p = table[hash]; p; p = p->next_same_hash)
1474 82710025 : if (REG_P (p->exp)
1475 82710025 : && REGNO (p->exp) == regno)
1476 : return p;
1477 : }
1478 : else
1479 : {
1480 0 : for (p = table[hash]; p; p = p->next_same_hash)
1481 0 : if (mode == p->mode
1482 0 : && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1483 0 : return p;
1484 : }
1485 :
1486 : return 0;
1487 : }
1488 :
1489 : /* Look for an expression equivalent to X and with code CODE.
1490 : If one is found, return that expression. */
1491 :
1492 : static rtx
1493 57887856 : lookup_as_function (rtx x, enum rtx_code code)
1494 : {
1495 57887856 : struct table_elt *p
1496 57887856 : = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1497 :
1498 57887856 : if (p == 0)
1499 : return 0;
1500 :
1501 39084498 : for (p = p->first_same_value; p; p = p->next_same_value)
1502 27166432 : if (GET_CODE (p->exp) == code
1503 : /* Make sure this is a valid entry in the table. */
1504 27166432 : && exp_equiv_p (p->exp, p->exp, 1, false))
1505 923871 : return p->exp;
1506 :
1507 : return 0;
1508 : }
1509 :
1510 : /* Insert X in the hash table, assuming HASH is its hash code and
1511 : CLASSP is an element of the class it should go in (or 0 if a new
1512 : class should be made). COST is the code of X and reg_cost is the
1513 : cost of registers in X. It is inserted at the proper position to
1514 : keep the class in the order cheapest first.
1515 :
1516 : MODE is the machine-mode of X, or if X is an integer constant
1517 : with VOIDmode then MODE is the mode with which X will be used.
1518 :
1519 : For elements of equal cheapness, the most recent one
1520 : goes in front, except that the first element in the list
1521 : remains first unless a cheaper element is added. The order of
1522 : pseudo-registers does not matter, as canon_reg will be called to
1523 : find the cheapest when a register is retrieved from the table.
1524 :
1525 : The in_memory field in the hash table element is set to 0.
1526 : The caller must set it nonzero if appropriate.
1527 :
1528 : You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1529 : and if insert_regs returns a nonzero value
1530 : you must then recompute its hash code before calling here.
1531 :
1532 : If necessary, update table showing constant values of quantities. */
1533 :
1534 : static struct table_elt *
1535 260011306 : insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1536 : machine_mode mode, int cost, int reg_cost)
1537 : {
1538 260011306 : struct table_elt *elt;
1539 :
1540 : /* If X is a register and we haven't made a quantity for it,
1541 : something is wrong. */
1542 260011306 : gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1543 :
1544 : /* If X is a hard register, show it is being put in the table. */
1545 260011306 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1546 21739066 : add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1547 :
1548 : /* Put an element for X into the right hash bucket. */
1549 :
1550 260011306 : elt = free_element_chain;
1551 260011306 : if (elt)
1552 255036635 : free_element_chain = elt->next_same_hash;
1553 : else
1554 4974671 : elt = XNEW (struct table_elt);
1555 :
1556 260011306 : elt->exp = x;
1557 260011306 : elt->canon_exp = NULL_RTX;
1558 260011306 : elt->cost = cost;
1559 260011306 : elt->regcost = reg_cost;
1560 260011306 : elt->next_same_value = 0;
1561 260011306 : elt->prev_same_value = 0;
1562 260011306 : elt->next_same_hash = table[hash];
1563 260011306 : elt->prev_same_hash = 0;
1564 260011306 : elt->related_value = 0;
1565 260011306 : elt->in_memory = 0;
1566 260011306 : elt->mode = mode;
1567 260011306 : elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1568 :
1569 260011306 : if (table[hash])
1570 81895931 : table[hash]->prev_same_hash = elt;
1571 260011306 : table[hash] = elt;
1572 :
1573 : /* Put it into the proper value-class. */
1574 260011306 : if (classp)
1575 : {
1576 127108880 : classp = classp->first_same_value;
1577 127108880 : if (CHEAPER (elt, classp))
1578 : /* Insert at the head of the class. */
1579 : {
1580 59267592 : struct table_elt *p;
1581 59267592 : elt->next_same_value = classp;
1582 59267592 : classp->prev_same_value = elt;
1583 59267592 : elt->first_same_value = elt;
1584 :
1585 125259693 : for (p = classp; p; p = p->next_same_value)
1586 65992101 : p->first_same_value = elt;
1587 : }
1588 : else
1589 : {
1590 : /* Insert not at head of the class. */
1591 : /* Put it after the last element cheaper than X. */
1592 : struct table_elt *p, *next;
1593 :
1594 : for (p = classp;
1595 148220050 : (next = p->next_same_value) && CHEAPER (next, elt);
1596 : p = next)
1597 : ;
1598 :
1599 : /* Put it after P and before NEXT. */
1600 67841288 : elt->next_same_value = next;
1601 67841288 : if (next)
1602 16613990 : next->prev_same_value = elt;
1603 :
1604 67841288 : elt->prev_same_value = p;
1605 67841288 : p->next_same_value = elt;
1606 67841288 : elt->first_same_value = classp;
1607 : }
1608 : }
1609 : else
1610 132902426 : elt->first_same_value = elt;
1611 :
1612 : /* If this is a constant being set equivalent to a register or a register
1613 : being set equivalent to a constant, note the constant equivalence.
1614 :
1615 : If this is a constant, it cannot be equivalent to a different constant,
1616 : and a constant is the only thing that can be cheaper than a register. So
1617 : we know the register is the head of the class (before the constant was
1618 : inserted).
1619 :
1620 : If this is a register that is not already known equivalent to a
1621 : constant, we must check the entire class.
1622 :
1623 : If this is a register that is already known equivalent to an insn,
1624 : update the qtys `const_insn' to show that `this_insn' is the latest
1625 : insn making that quantity equivalent to the constant. */
1626 :
1627 260011306 : if (elt->is_const && classp && REG_P (classp->exp)
1628 3388774 : && !REG_P (x))
1629 : {
1630 3386507 : int exp_q = REG_QTY (REGNO (classp->exp));
1631 3386507 : struct qty_table_elem *exp_ent = &qty_table[exp_q];
1632 :
1633 3386507 : exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1634 3386507 : exp_ent->const_insn = this_insn;
1635 3386507 : }
1636 :
1637 256624799 : else if (REG_P (x)
1638 108074618 : && classp
1639 92488899 : && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1640 344533352 : && ! elt->is_const)
1641 : {
1642 : struct table_elt *p;
1643 :
1644 198622552 : for (p = classp; p != 0; p = p->next_same_value)
1645 : {
1646 124587504 : if (p->is_const && !REG_P (p->exp))
1647 : {
1648 13871219 : int x_q = REG_QTY (REGNO (x));
1649 13871219 : struct qty_table_elem *x_ent = &qty_table[x_q];
1650 :
1651 13871219 : x_ent->const_rtx
1652 13871219 : = gen_lowpart (GET_MODE (x), p->exp);
1653 13871219 : x_ent->const_insn = this_insn;
1654 13871219 : break;
1655 : }
1656 : }
1657 : }
1658 :
1659 168718532 : else if (REG_P (x)
1660 20168351 : && qty_table[REG_QTY (REGNO (x))].const_rtx
1661 173298879 : && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1662 4580347 : qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1663 :
1664 : /* If this is a constant with symbolic value,
1665 : and it has a term with an explicit integer value,
1666 : link it up with related expressions. */
1667 260011306 : if (GET_CODE (x) == CONST)
1668 : {
1669 822872 : rtx subexp = get_related_value (x);
1670 822872 : unsigned subhash;
1671 822872 : struct table_elt *subelt, *subelt_prev;
1672 :
1673 822872 : if (subexp != 0)
1674 : {
1675 : /* Get the integer-free subexpression in the hash table. */
1676 809064 : subhash = SAFE_HASH (subexp, mode);
1677 809064 : subelt = lookup (subexp, subhash, mode);
1678 809064 : if (subelt == 0)
1679 360408 : subelt = insert (subexp, NULL, subhash, mode);
1680 : /* Initialize SUBELT's circular chain if it has none. */
1681 809064 : if (subelt->related_value == 0)
1682 526792 : subelt->related_value = subelt;
1683 : /* Find the element in the circular chain that precedes SUBELT. */
1684 809064 : subelt_prev = subelt;
1685 2980156 : while (subelt_prev->related_value != subelt)
1686 : subelt_prev = subelt_prev->related_value;
1687 : /* Put new ELT into SUBELT's circular chain just before SUBELT.
1688 : This way the element that follows SUBELT is the oldest one. */
1689 809064 : elt->related_value = subelt_prev->related_value;
1690 809064 : subelt_prev->related_value = elt;
1691 : }
1692 : }
1693 :
1694 260011306 : return elt;
1695 : }
1696 :
1697 : /* Wrap insert_with_costs by passing the default costs. */
1698 :
1699 : static struct table_elt *
1700 260011306 : insert (rtx x, struct table_elt *classp, unsigned int hash,
1701 : machine_mode mode)
1702 : {
1703 520022612 : return insert_with_costs (x, classp, hash, mode,
1704 260011306 : COST (x, mode), approx_reg_cost (x));
1705 : }
1706 :
1707 :
1708 : /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1709 : CLASS2 into CLASS1. This is done when we have reached an insn which makes
1710 : the two classes equivalent.
1711 :
1712 : CLASS1 will be the surviving class; CLASS2 should not be used after this
1713 : call.
1714 :
1715 : Any invalid entries in CLASS2 will not be copied. */
1716 :
1717 : static void
1718 5284337 : merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1719 : {
1720 5284337 : struct table_elt *elt, *next, *new_elt;
1721 :
1722 : /* Ensure we start with the head of the classes. */
1723 5284337 : class1 = class1->first_same_value;
1724 5284337 : class2 = class2->first_same_value;
1725 :
1726 : /* If they were already equal, forget it. */
1727 5284337 : if (class1 == class2)
1728 : return;
1729 :
1730 12804817 : for (elt = class2; elt; elt = next)
1731 : {
1732 7520480 : unsigned int hash;
1733 7520480 : rtx exp = elt->exp;
1734 7520480 : machine_mode mode = elt->mode;
1735 :
1736 7520480 : next = elt->next_same_value;
1737 :
1738 : /* Remove old entry, make a new one in CLASS1's class.
1739 : Don't do this for invalid entries as we cannot find their
1740 : hash code (it also isn't necessary). */
1741 7520480 : if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1742 : {
1743 7520456 : bool need_rehash = false;
1744 :
1745 7520456 : hash_arg_in_memory = 0;
1746 7520456 : hash = HASH (exp, mode);
1747 :
1748 7520456 : if (REG_P (exp))
1749 : {
1750 2054308 : need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1751 2054308 : delete_reg_equiv (REGNO (exp));
1752 : }
1753 :
1754 7520456 : if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1755 2053660 : remove_pseudo_from_table (exp, hash);
1756 : else
1757 5466796 : remove_from_table (elt, hash);
1758 :
1759 7520456 : if (insert_regs (exp, class1, false) || need_rehash)
1760 : {
1761 2054308 : rehash_using_reg (exp);
1762 2054308 : hash = HASH (exp, mode);
1763 : }
1764 7520456 : new_elt = insert (exp, class1, hash, mode);
1765 7520456 : new_elt->in_memory = hash_arg_in_memory;
1766 7520456 : if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1767 0 : new_elt->cost = MAX_COST;
1768 : }
1769 : }
1770 : }
1771 :
1772 : /* Flush the entire hash table. */
1773 :
1774 : static void
1775 7962 : flush_hash_table (void)
1776 : {
1777 7962 : int i;
1778 7962 : struct table_elt *p;
1779 :
1780 262746 : for (i = 0; i < HASH_SIZE; i++)
1781 1066891 : for (p = table[i]; p; p = table[i])
1782 : {
1783 : /* Note that invalidate can remove elements
1784 : after P in the current hash chain. */
1785 812107 : if (REG_P (p->exp))
1786 357515 : invalidate (p->exp, VOIDmode);
1787 : else
1788 454592 : remove_from_table (p, i);
1789 : }
1790 7962 : }
1791 :
1792 : /* Check whether an anti dependence exists between X and EXP. MODE and
1793 : ADDR are as for canon_anti_dependence. */
1794 :
1795 : static bool
1796 181381741 : check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1797 : {
1798 181381741 : subrtx_iterator::array_type array;
1799 853018545 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1800 : {
1801 680009893 : const_rtx x = *iter;
1802 680009893 : if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1803 8373089 : return true;
1804 : }
1805 173008652 : return false;
1806 181381741 : }
1807 :
1808 : /* Remove from the hash table, or mark as invalid, all expressions whose
1809 : values could be altered by storing in register X. */
1810 :
1811 : static void
1812 217343792 : invalidate_reg (rtx x)
1813 : {
1814 217343792 : gcc_assert (GET_CODE (x) == REG);
1815 :
1816 : /* If X is a register, dependencies on its contents are recorded
1817 : through the qty number mechanism. Just change the qty number of
1818 : the register, mark it as invalid for expressions that refer to it,
1819 : and remove it itself. */
1820 217343792 : unsigned int regno = REGNO (x);
1821 217343792 : unsigned int hash = HASH (x, GET_MODE (x));
1822 :
1823 : /* Remove REGNO from any quantity list it might be on and indicate
1824 : that its value might have changed. If it is a pseudo, remove its
1825 : entry from the hash table.
1826 :
1827 : For a hard register, we do the first two actions above for any
1828 : additional hard registers corresponding to X. Then, if any of these
1829 : registers are in the table, we must remove any REG entries that
1830 : overlap these registers. */
1831 :
1832 217343792 : delete_reg_equiv (regno);
1833 217343792 : REG_TICK (regno)++;
1834 217343792 : SUBREG_TICKED (regno) = -1;
1835 :
1836 217343792 : if (regno >= FIRST_PSEUDO_REGISTER)
1837 88517045 : remove_pseudo_from_table (x, hash);
1838 : else
1839 : {
1840 128826747 : HOST_WIDE_INT in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1841 128826747 : unsigned int endregno = END_REGNO (x);
1842 128826747 : unsigned int rn;
1843 128826747 : struct table_elt *p, *next;
1844 :
1845 128826747 : CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1846 :
1847 129409178 : for (rn = regno + 1; rn < endregno; rn++)
1848 : {
1849 582431 : in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1850 582431 : CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1851 582431 : delete_reg_equiv (rn);
1852 582431 : REG_TICK (rn)++;
1853 582431 : SUBREG_TICKED (rn) = -1;
1854 : }
1855 :
1856 128826747 : if (in_table)
1857 403093548 : for (hash = 0; hash < HASH_SIZE; hash++)
1858 626724873 : for (p = table[hash]; p; p = next)
1859 : {
1860 235846281 : next = p->next_same_hash;
1861 :
1862 235846281 : if (!REG_P (p->exp) || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1863 225246058 : continue;
1864 :
1865 10600223 : unsigned int tregno = REGNO (p->exp);
1866 10600223 : unsigned int tendregno = END_REGNO (p->exp);
1867 10600223 : if (tendregno > regno && tregno < endregno)
1868 10531415 : remove_from_table (p, hash);
1869 : }
1870 : }
1871 217343792 : }
1872 :
1873 : /* Remove from the hash table, or mark as invalid, all expressions whose
1874 : values could be altered by storing in X. X is a register, a subreg, or
1875 : a memory reference with nonvarying address (because, when a memory
1876 : reference with a varying address is stored in, all memory references are
1877 : removed by invalidate_memory so specific invalidation is superfluous).
1878 : FULL_MODE, if not VOIDmode, indicates that this much should be
1879 : invalidated instead of just the amount indicated by the mode of X. This
1880 : is only used for bitfield stores into memory.
1881 :
1882 : A nonvarying address may be just a register or just a symbol reference,
1883 : or it may be either of those plus a numeric offset. */
1884 :
1885 : static void
1886 245955257 : invalidate (rtx x, machine_mode full_mode)
1887 : {
1888 247543655 : int i;
1889 247543655 : struct table_elt *p;
1890 247543655 : rtx addr;
1891 :
1892 247543655 : switch (GET_CODE (x))
1893 : {
1894 217343602 : case REG:
1895 217343602 : invalidate_reg (x);
1896 217343602 : return;
1897 :
1898 1543232 : case SUBREG:
1899 1543232 : invalidate (SUBREG_REG (x), VOIDmode);
1900 1543232 : return;
1901 :
1902 26057 : case PARALLEL:
1903 71223 : for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1904 45166 : invalidate (XVECEXP (x, 0, i), VOIDmode);
1905 : return;
1906 :
1907 45166 : case EXPR_LIST:
1908 : /* This is part of a disjoint return value; extract the location in
1909 : question ignoring the offset. */
1910 45166 : invalidate (XEXP (x, 0), VOIDmode);
1911 45166 : return;
1912 :
1913 28585598 : case MEM:
1914 28585598 : addr = canon_rtx (get_addr (XEXP (x, 0)));
1915 : /* Calculate the canonical version of X here so that
1916 : true_dependence doesn't generate new RTL for X on each call. */
1917 28585598 : x = canon_rtx (x);
1918 :
1919 : /* Remove all hash table elements that refer to overlapping pieces of
1920 : memory. */
1921 28585598 : if (full_mode == VOIDmode)
1922 28584646 : full_mode = GET_MODE (x);
1923 :
1924 943324734 : for (i = 0; i < HASH_SIZE; i++)
1925 : {
1926 914739136 : struct table_elt *next;
1927 :
1928 1855208916 : for (p = table[i]; p; p = next)
1929 : {
1930 940469780 : next = p->next_same_hash;
1931 940469780 : if (p->in_memory)
1932 : {
1933 : /* Just canonicalize the expression once;
1934 : otherwise each time we call invalidate
1935 : true_dependence will canonicalize the
1936 : expression again. */
1937 181381741 : if (!p->canon_exp)
1938 27000619 : p->canon_exp = canon_rtx (p->exp);
1939 181381741 : if (check_dependence (p->canon_exp, x, full_mode, addr))
1940 8373089 : remove_from_table (p, i);
1941 : }
1942 : }
1943 : }
1944 : return;
1945 :
1946 0 : default:
1947 0 : gcc_unreachable ();
1948 : }
1949 : }
1950 :
1951 : /* Invalidate DEST. Used when DEST is not going to be added
1952 : into the hash table for some reason, e.g. do_not_record
1953 : flagged on it. */
1954 :
1955 : static void
1956 54447792 : invalidate_dest (rtx dest)
1957 : {
1958 54447792 : if (REG_P (dest)
1959 26550372 : || GET_CODE (dest) == SUBREG
1960 26550372 : || MEM_P (dest))
1961 34480959 : invalidate (dest, VOIDmode);
1962 19966833 : else if (GET_CODE (dest) == STRICT_LOW_PART
1963 19966833 : || GET_CODE (dest) == ZERO_EXTRACT)
1964 960 : invalidate (XEXP (dest, 0), GET_MODE (dest));
1965 54447792 : }
1966 :
1967 : /* Remove all expressions that refer to register REGNO,
1968 : since they are already invalid, and we are about to
1969 : mark that register valid again and don't want the old
1970 : expressions to reappear as valid. */
1971 :
1972 : static void
1973 13073526 : remove_invalid_refs (unsigned int regno)
1974 : {
1975 13073526 : unsigned int i;
1976 13073526 : struct table_elt *p, *next;
1977 :
1978 431426358 : for (i = 0; i < HASH_SIZE; i++)
1979 662253218 : for (p = table[i]; p; p = next)
1980 : {
1981 243900386 : next = p->next_same_hash;
1982 243900386 : if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
1983 17141493 : remove_from_table (p, i);
1984 : }
1985 13073526 : }
1986 :
1987 : /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1988 : and mode MODE. */
1989 : static void
1990 0 : remove_invalid_subreg_refs (unsigned int regno, poly_uint64 offset,
1991 : machine_mode mode)
1992 : {
1993 0 : unsigned int i;
1994 0 : struct table_elt *p, *next;
1995 :
1996 0 : for (i = 0; i < HASH_SIZE; i++)
1997 0 : for (p = table[i]; p; p = next)
1998 : {
1999 0 : rtx exp = p->exp;
2000 0 : next = p->next_same_hash;
2001 :
2002 0 : if (!REG_P (exp)
2003 0 : && (GET_CODE (exp) != SUBREG
2004 0 : || !REG_P (SUBREG_REG (exp))
2005 0 : || REGNO (SUBREG_REG (exp)) != regno
2006 0 : || ranges_maybe_overlap_p (SUBREG_BYTE (exp),
2007 0 : GET_MODE_SIZE (GET_MODE (exp)),
2008 0 : offset, GET_MODE_SIZE (mode)))
2009 0 : && refers_to_regno_p (regno, p->exp))
2010 0 : remove_from_table (p, i);
2011 : }
2012 0 : }
2013 :
2014 : /* Recompute the hash codes of any valid entries in the hash table that
2015 : reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2016 :
2017 : This is called when we make a jump equivalence. */
2018 :
2019 : static void
2020 124011998 : rehash_using_reg (rtx x)
2021 : {
2022 124011998 : unsigned int i;
2023 124011998 : struct table_elt *p, *next;
2024 124011998 : unsigned hash;
2025 :
2026 124011998 : if (GET_CODE (x) == SUBREG)
2027 1599369 : x = SUBREG_REG (x);
2028 :
2029 : /* If X is not a register or if the register is known not to be in any
2030 : valid entries in the table, we have no work to do. */
2031 :
2032 124011998 : if (!REG_P (x)
2033 114465022 : || REG_IN_TABLE (REGNO (x)) < 0
2034 129024456 : || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2035 119000063 : return;
2036 :
2037 : /* Scan all hash chains looking for valid entries that mention X.
2038 : If we find one and it is in the wrong hash chain, move it. */
2039 :
2040 165393855 : for (i = 0; i < HASH_SIZE; i++)
2041 271305245 : for (p = table[i]; p; p = next)
2042 : {
2043 110923325 : next = p->next_same_hash;
2044 110923325 : if (reg_mentioned_p (x, p->exp)
2045 4926498 : && exp_equiv_p (p->exp, p->exp, 1, false)
2046 115849662 : && i != (hash = SAFE_HASH (p->exp, p->mode)))
2047 : {
2048 3519140 : if (p->next_same_hash)
2049 1144486 : p->next_same_hash->prev_same_hash = p->prev_same_hash;
2050 :
2051 3519140 : if (p->prev_same_hash)
2052 654437 : p->prev_same_hash->next_same_hash = p->next_same_hash;
2053 : else
2054 2864703 : table[i] = p->next_same_hash;
2055 :
2056 3519140 : p->next_same_hash = table[hash];
2057 3519140 : p->prev_same_hash = 0;
2058 3519140 : if (table[hash])
2059 1734121 : table[hash]->prev_same_hash = p;
2060 3519140 : table[hash] = p;
2061 : }
2062 : }
2063 : }
2064 :
2065 : /* Remove from the hash table any expression that is a call-clobbered
2066 : register in INSN. Also update their TICK values. */
2067 :
2068 : static void
2069 15298139 : invalidate_for_call (rtx_insn *insn)
2070 : {
2071 15298139 : unsigned int regno;
2072 15298139 : unsigned hash;
2073 15298139 : struct table_elt *p, *next;
2074 15298139 : int in_table = 0;
2075 15298139 : hard_reg_set_iterator hrsi;
2076 :
2077 : /* Go through all the hard registers. For each that might be clobbered
2078 : in call insn INSN, remove the register from quantity chains and update
2079 : reg_tick if defined. Also see if any of these registers is currently
2080 : in the table.
2081 :
2082 : ??? We could be more precise for partially-clobbered registers,
2083 : and only invalidate values that actually occupy the clobbered part
2084 : of the registers. It doesn't seem worth the effort though, since
2085 : we shouldn't see this situation much before RA. Whatever choice
2086 : we make here has to be consistent with the table walk below,
2087 : so any change to this test will require a change there too. */
2088 15298139 : HARD_REG_SET callee_clobbers
2089 15298139 : = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
2090 1265397925 : EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi)
2091 : {
2092 1250099786 : delete_reg_equiv (regno);
2093 1250099786 : if (REG_TICK (regno) >= 0)
2094 : {
2095 1250099786 : REG_TICK (regno)++;
2096 1250099786 : SUBREG_TICKED (regno) = -1;
2097 : }
2098 1250099786 : in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2099 : }
2100 :
2101 : /* In the case where we have no call-clobbered hard registers in the
2102 : table, we are done. Otherwise, scan the table and remove any
2103 : entry that overlaps a call-clobbered register. */
2104 :
2105 15298139 : if (in_table)
2106 116205177 : for (hash = 0; hash < HASH_SIZE; hash++)
2107 168135893 : for (p = table[hash]; p; p = next)
2108 : {
2109 55452085 : next = p->next_same_hash;
2110 :
2111 107889374 : if (!REG_P (p->exp)
2112 55452085 : || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2113 52437289 : continue;
2114 :
2115 : /* This must use the same test as above rather than the
2116 : more accurate clobbers_reg_p. */
2117 3014796 : if (overlaps_hard_reg_set_p (callee_clobbers, GET_MODE (p->exp),
2118 3014796 : REGNO (p->exp)))
2119 2995086 : remove_from_table (p, hash);
2120 : }
2121 15298139 : }
2122 :
2123 : /* Given an expression X of type CONST,
2124 : and ELT which is its table entry (or 0 if it
2125 : is not in the hash table),
2126 : return an alternate expression for X as a register plus integer.
2127 : If none can be found, return 0. */
2128 :
2129 : static rtx
2130 725622 : use_related_value (rtx x, struct table_elt *elt)
2131 : {
2132 725622 : struct table_elt *relt = 0;
2133 725622 : struct table_elt *p, *q;
2134 725622 : HOST_WIDE_INT offset;
2135 :
2136 : /* First, is there anything related known?
2137 : If we have a table element, we can tell from that.
2138 : Otherwise, must look it up. */
2139 :
2140 725622 : if (elt != 0 && elt->related_value != 0)
2141 : relt = elt;
2142 547898 : else if (elt == 0 && GET_CODE (x) == CONST)
2143 : {
2144 547898 : rtx subexp = get_related_value (x);
2145 547898 : if (subexp != 0)
2146 534091 : relt = lookup (subexp,
2147 : SAFE_HASH (subexp, GET_MODE (subexp)),
2148 534091 : GET_MODE (subexp));
2149 : }
2150 :
2151 668575 : if (relt == 0)
2152 387496 : return 0;
2153 :
2154 : /* Search all related table entries for one that has an
2155 : equivalent register. */
2156 :
2157 : p = relt;
2158 987328 : while (1)
2159 : {
2160 : /* This loop is strange in that it is executed in two different cases.
2161 : The first is when X is already in the table. Then it is searching
2162 : the RELATED_VALUE list of X's class (RELT). The second case is when
2163 : X is not in the table. Then RELT points to a class for the related
2164 : value.
2165 :
2166 : Ensure that, whatever case we are in, that we ignore classes that have
2167 : the same value as X. */
2168 :
2169 987328 : if (rtx_equal_p (x, p->exp))
2170 : q = 0;
2171 : else
2172 1974077 : for (q = p->first_same_value; q; q = q->next_same_value)
2173 1353908 : if (REG_P (q->exp))
2174 : break;
2175 :
2176 852844 : if (q)
2177 : break;
2178 :
2179 754653 : p = p->related_value;
2180 :
2181 : /* We went all the way around, so there is nothing to be found.
2182 : Alternatively, perhaps RELT was in the table for some other reason
2183 : and it has no related values recorded. */
2184 754653 : if (p == relt || p == 0)
2185 : break;
2186 : }
2187 :
2188 338126 : if (q == 0)
2189 : return 0;
2190 :
2191 232675 : offset = (get_integer_term (x) - get_integer_term (p->exp));
2192 : /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2193 232675 : return plus_constant (q->mode, q->exp, offset);
2194 : }
2195 :
2196 :
2197 : /* Hash a string. Just add its bytes up. */
2198 : static inline unsigned
2199 130628 : hash_rtx_string (const char *ps)
2200 : {
2201 130628 : unsigned hash = 0;
2202 130628 : const unsigned char *p = (const unsigned char *) ps;
2203 :
2204 130628 : if (p)
2205 743611 : while (*p)
2206 612983 : hash += *p++;
2207 :
2208 130628 : return hash;
2209 : }
2210 :
2211 : /* Hash an rtx. We are careful to make sure the value is never negative.
2212 : Equivalent registers hash identically.
2213 : MODE is used in hashing for CONST_INTs only;
2214 : otherwise the mode of X is used.
2215 :
2216 : Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2217 :
2218 : If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2219 : a MEM rtx which does not have the MEM_READONLY_P flag set.
2220 :
2221 : Note that cse_insn knows that the hash code of a MEM expression
2222 : is just (int) MEM plus the hash code of the address.
2223 :
2224 : Call CB on each rtx if CB is not NULL.
2225 : When the callback returns true, we continue with the new rtx. */
2226 :
2227 : unsigned
2228 1265840179 : hash_rtx (const_rtx x, machine_mode mode,
2229 : int *do_not_record_p, int *hash_arg_in_memory_p,
2230 : bool have_reg_qty, hash_rtx_callback_function cb)
2231 : {
2232 1265840179 : int i, j;
2233 1265840179 : unsigned hash = 0;
2234 1869121166 : enum rtx_code code;
2235 1869121166 : const char *fmt;
2236 1869121166 : machine_mode newmode;
2237 1869121166 : rtx newx;
2238 :
2239 : /* Used to turn recursion into iteration. We can't rely on GCC's
2240 : tail-recursion elimination since we need to keep accumulating values
2241 : in HASH. */
2242 1869121166 : repeat:
2243 1869121166 : if (x == 0)
2244 : return hash;
2245 :
2246 : /* Invoke the callback first. */
2247 1869121166 : if (cb != NULL
2248 1869121166 : && ((*cb) (x, mode, &newx, &newmode)))
2249 : {
2250 0 : hash += hash_rtx (newx, newmode, do_not_record_p,
2251 : hash_arg_in_memory_p, have_reg_qty, cb);
2252 0 : return hash;
2253 : }
2254 :
2255 1869121166 : code = GET_CODE (x);
2256 1869121166 : switch (code)
2257 : {
2258 653273107 : case REG:
2259 653273107 : {
2260 653273107 : unsigned int regno = REGNO (x);
2261 :
2262 653273107 : if (do_not_record_p && !reload_completed)
2263 : {
2264 : /* On some machines, we can't record any non-fixed hard register,
2265 : because extending its life will cause reload problems. We
2266 : consider ap, fp, sp, gp to be fixed for this purpose.
2267 :
2268 : We also consider CCmode registers to be fixed for this purpose;
2269 : failure to do so leads to failure to simplify 0<100 type of
2270 : conditionals.
2271 :
2272 : On all machines, we can't record any global registers.
2273 : Nor should we record any register that is in a small
2274 : class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */
2275 649803688 : bool record;
2276 :
2277 649803688 : if (regno >= FIRST_PSEUDO_REGISTER)
2278 : record = true;
2279 430563797 : else if (x == frame_pointer_rtx
2280 300714072 : || x == hard_frame_pointer_rtx
2281 300597171 : || x == arg_pointer_rtx
2282 292958444 : || x == stack_pointer_rtx
2283 257625776 : || x == pic_offset_table_rtx)
2284 : record = true;
2285 257625776 : else if (global_regs[regno])
2286 : record = false;
2287 257625405 : else if (fixed_regs[regno])
2288 : record = true;
2289 76399389 : else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2290 : record = true;
2291 76399389 : else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2292 : record = false;
2293 0 : else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2294 : record = false;
2295 : else
2296 : record = true;
2297 :
2298 : if (!record)
2299 : {
2300 76399760 : *do_not_record_p = 1;
2301 76399760 : return 0;
2302 : }
2303 : }
2304 :
2305 576873347 : hash += ((unsigned int) REG << 7);
2306 576873347 : hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2307 576873347 : return hash;
2308 : }
2309 :
2310 : /* We handle SUBREG of a REG specially because the underlying
2311 : reg changes its hash value with every value change; we don't
2312 : want to have to forget unrelated subregs when one subreg changes. */
2313 32750997 : case SUBREG:
2314 32750997 : {
2315 32750997 : if (REG_P (SUBREG_REG (x)))
2316 : {
2317 65390476 : hash += (((unsigned int) SUBREG << 7)
2318 32695238 : + REGNO (SUBREG_REG (x))
2319 32695238 : + (constant_lower_bound (SUBREG_BYTE (x))
2320 32695238 : / UNITS_PER_WORD));
2321 32695238 : return hash;
2322 : }
2323 : break;
2324 : }
2325 :
2326 337322882 : case CONST_INT:
2327 337322882 : hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2328 337322882 : + (unsigned int) INTVAL (x));
2329 337322882 : return hash;
2330 :
2331 : case CONST_WIDE_INT:
2332 2937528 : for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2333 1958516 : hash += CONST_WIDE_INT_ELT (x, i);
2334 : return hash;
2335 :
2336 0 : case CONST_POLY_INT:
2337 0 : {
2338 0 : inchash::hash h;
2339 0 : h.add_int (hash);
2340 0 : for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2341 0 : h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
2342 0 : return h.end ();
2343 : }
2344 :
2345 4264664 : case CONST_DOUBLE:
2346 : /* This is like the general case, except that it only counts
2347 : the integers representing the constant. */
2348 4264664 : hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2349 4264664 : if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2350 : hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2351 : + (unsigned int) CONST_DOUBLE_HIGH (x));
2352 : else
2353 4264664 : hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2354 4264664 : return hash;
2355 :
2356 0 : case CONST_FIXED:
2357 0 : hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2358 0 : hash += fixed_hash (CONST_FIXED_VALUE (x));
2359 0 : return hash;
2360 :
2361 3551234 : case CONST_VECTOR:
2362 3551234 : {
2363 3551234 : int units;
2364 3551234 : rtx elt;
2365 :
2366 3551234 : units = const_vector_encoded_nelts (x);
2367 :
2368 9753248 : for (i = 0; i < units; ++i)
2369 : {
2370 6202014 : elt = CONST_VECTOR_ENCODED_ELT (x, i);
2371 6202014 : hash += hash_rtx (elt, GET_MODE (elt),
2372 : do_not_record_p, hash_arg_in_memory_p,
2373 : have_reg_qty, cb);
2374 : }
2375 :
2376 : return hash;
2377 : }
2378 :
2379 : /* Assume there is only one rtx object for any given label. */
2380 20186660 : case LABEL_REF:
2381 : /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2382 : differences and differences between each stage's debugging dumps. */
2383 20186660 : hash += (((unsigned int) LABEL_REF << 7)
2384 20186660 : + CODE_LABEL_NUMBER (label_ref_label (x)));
2385 20186660 : return hash;
2386 :
2387 150309672 : case SYMBOL_REF:
2388 150309672 : {
2389 : /* Don't hash on the symbol's address to avoid bootstrap differences.
2390 : Different hash values may cause expressions to be recorded in
2391 : different orders and thus different registers to be used in the
2392 : final assembler. This also avoids differences in the dump files
2393 : between various stages. */
2394 150309672 : unsigned int h = 0;
2395 150309672 : const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2396 :
2397 3267879839 : while (*p)
2398 3117570167 : h += (h << 7) + *p++; /* ??? revisit */
2399 :
2400 150309672 : hash += ((unsigned int) SYMBOL_REF << 7) + h;
2401 150309672 : return hash;
2402 : }
2403 :
2404 262785267 : case MEM:
2405 : /* We don't record if marked volatile or if BLKmode since we don't
2406 : know the size of the move. */
2407 262785267 : if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2408 : {
2409 5174927 : *do_not_record_p = 1;
2410 5174927 : return 0;
2411 : }
2412 257610340 : if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2413 59066635 : *hash_arg_in_memory_p = 1;
2414 :
2415 : /* Now that we have already found this special case,
2416 : might as well speed it up as much as possible. */
2417 257610340 : hash += (unsigned) MEM;
2418 257610340 : x = XEXP (x, 0);
2419 257610340 : goto repeat;
2420 :
2421 70 : case USE:
2422 : /* A USE that mentions non-volatile memory needs special
2423 : handling since the MEM may be BLKmode which normally
2424 : prevents an entry from being made. Pure calls are
2425 : marked by a USE which mentions BLKmode memory.
2426 : See calls.cc:emit_call_1. */
2427 70 : if (MEM_P (XEXP (x, 0))
2428 70 : && ! MEM_VOLATILE_P (XEXP (x, 0)))
2429 : {
2430 0 : hash += (unsigned) USE;
2431 0 : x = XEXP (x, 0);
2432 :
2433 0 : if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2434 0 : *hash_arg_in_memory_p = 1;
2435 :
2436 : /* Now that we have already found this special case,
2437 : might as well speed it up as much as possible. */
2438 0 : hash += (unsigned) MEM;
2439 0 : x = XEXP (x, 0);
2440 0 : goto repeat;
2441 : }
2442 : break;
2443 :
2444 51577449 : case PRE_DEC:
2445 51577449 : case PRE_INC:
2446 51577449 : case POST_DEC:
2447 51577449 : case POST_INC:
2448 51577449 : case PRE_MODIFY:
2449 51577449 : case POST_MODIFY:
2450 51577449 : case PC:
2451 51577449 : case CALL:
2452 51577449 : case UNSPEC_VOLATILE:
2453 51577449 : if (do_not_record_p) {
2454 51576044 : *do_not_record_p = 1;
2455 51576044 : return 0;
2456 : }
2457 : else
2458 : return hash;
2459 200667 : break;
2460 :
2461 200667 : case ASM_OPERANDS:
2462 200667 : if (do_not_record_p && MEM_VOLATILE_P (x))
2463 : {
2464 163737 : *do_not_record_p = 1;
2465 163737 : return 0;
2466 : }
2467 : else
2468 : {
2469 : /* We don't want to take the filename and line into account. */
2470 73860 : hash += (unsigned) code + (unsigned) GET_MODE (x)
2471 36930 : + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2472 36930 : + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2473 36930 : + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2474 :
2475 36930 : if (ASM_OPERANDS_INPUT_LENGTH (x))
2476 : {
2477 56768 : for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2478 : {
2479 49292 : hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2480 24646 : GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2481 : do_not_record_p, hash_arg_in_memory_p,
2482 : have_reg_qty, cb)
2483 24646 : + hash_rtx_string
2484 49292 : (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2485 : }
2486 :
2487 32122 : hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2488 32122 : x = ASM_OPERANDS_INPUT (x, 0);
2489 32122 : mode = GET_MODE (x);
2490 32122 : goto repeat;
2491 : }
2492 :
2493 : return hash;
2494 : }
2495 : break;
2496 :
2497 : default:
2498 : break;
2499 : }
2500 :
2501 351975314 : i = GET_RTX_LENGTH (code) - 1;
2502 351975314 : hash += (unsigned) code + (unsigned) GET_MODE (x);
2503 351975314 : fmt = GET_RTX_FORMAT (code);
2504 712489637 : for (; i >= 0; i--)
2505 : {
2506 706152848 : switch (fmt[i])
2507 : {
2508 694550792 : case 'e':
2509 : /* If we are about to do the last recursive call
2510 : needed at this level, change it into iteration.
2511 : This function is called enough to be worth it. */
2512 694550792 : if (i == 0)
2513 : {
2514 345638525 : x = XEXP (x, i);
2515 345638525 : goto repeat;
2516 : }
2517 :
2518 348912267 : hash += hash_rtx (XEXP (x, i), VOIDmode, do_not_record_p,
2519 : hash_arg_in_memory_p,
2520 : have_reg_qty, cb);
2521 348912267 : break;
2522 :
2523 : case 'E':
2524 16040043 : for (j = 0; j < XVECLEN (x, i); j++)
2525 9703513 : hash += hash_rtx (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2526 : hash_arg_in_memory_p,
2527 : have_reg_qty, cb);
2528 : break;
2529 :
2530 0 : case 's':
2531 0 : hash += hash_rtx_string (XSTR (x, i));
2532 0 : break;
2533 :
2534 5209712 : case 'i':
2535 5209712 : hash += (unsigned int) XINT (x, i);
2536 5209712 : break;
2537 :
2538 0 : case 'L':
2539 0 : hash += (unsigned int) XLOC (x, i);
2540 0 : break;
2541 :
2542 55759 : case 'p':
2543 55759 : hash += constant_lower_bound (SUBREG_BYTE (x));
2544 55759 : break;
2545 :
2546 : case '0': case 't':
2547 : /* Unused. */
2548 : break;
2549 :
2550 0 : default:
2551 0 : gcc_unreachable ();
2552 : }
2553 : }
2554 :
2555 : return hash;
2556 : }
2557 :
2558 : /* Hash an rtx X for cse via hash_rtx.
2559 : Stores 1 in do_not_record if any subexpression is volatile.
2560 : Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2561 : does not have the MEM_READONLY_P flag set. */
2562 :
2563 : static inline unsigned
2564 509636840 : canon_hash (rtx x, machine_mode mode)
2565 : {
2566 509636840 : return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2567 : }
2568 :
2569 : /* Like canon_hash but with no side effects, i.e. do_not_record
2570 : and hash_arg_in_memory are not changed. */
2571 :
2572 : static inline unsigned
2573 161733000 : safe_hash (rtx x, machine_mode mode)
2574 : {
2575 161733000 : int dummy_do_not_record;
2576 161733000 : return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2577 : }
2578 :
2579 : /* Return true iff X and Y would canonicalize into the same thing,
2580 : without actually constructing the canonicalization of either one.
2581 : If VALIDATE is nonzero,
2582 : we assume X is an expression being processed from the rtl
2583 : and Y was found in the hash table. We check register refs
2584 : in Y for being marked as valid.
2585 :
2586 : If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2587 :
2588 : bool
2589 753309944 : exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2590 : {
2591 753309944 : int i, j;
2592 753309944 : enum rtx_code code;
2593 753309944 : const char *fmt;
2594 :
2595 : /* Note: it is incorrect to assume an expression is equivalent to itself
2596 : if VALIDATE is nonzero. */
2597 753309944 : if (x == y && !validate)
2598 : return true;
2599 :
2600 730855985 : if (x == 0 || y == 0)
2601 : return x == y;
2602 :
2603 730855985 : code = GET_CODE (x);
2604 730855985 : if (code != GET_CODE (y))
2605 : return false;
2606 :
2607 : /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2608 624406233 : if (GET_MODE (x) != GET_MODE (y))
2609 : return false;
2610 :
2611 : /* MEMs referring to different address space are not equivalent. */
2612 650175563 : if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2613 : return false;
2614 :
2615 549229925 : switch (code)
2616 : {
2617 : case PC:
2618 : CASE_CONST_UNIQUE:
2619 : return x == y;
2620 :
2621 : case CONST_VECTOR:
2622 : if (!same_vector_encodings_p (x, y))
2623 : return false;
2624 : break;
2625 :
2626 21460 : case LABEL_REF:
2627 21460 : return label_ref_label (x) == label_ref_label (y);
2628 :
2629 17037437 : case SYMBOL_REF:
2630 17037437 : return XSTR (x, 0) == XSTR (y, 0);
2631 :
2632 163246133 : case REG:
2633 163246133 : if (for_gcse)
2634 1536624 : return REGNO (x) == REGNO (y);
2635 : else
2636 : {
2637 161709509 : unsigned int regno = REGNO (y);
2638 161709509 : unsigned int i;
2639 161709509 : unsigned int endregno = END_REGNO (y);
2640 :
2641 : /* If the quantities are not the same, the expressions are not
2642 : equivalent. If there are and we are not to validate, they
2643 : are equivalent. Otherwise, ensure all regs are up-to-date. */
2644 :
2645 161709509 : if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2646 : return false;
2647 :
2648 148350544 : if (! validate)
2649 : return true;
2650 :
2651 274903477 : for (i = regno; i < endregno; i++)
2652 138605411 : if (REG_IN_TABLE (i) != REG_TICK (i))
2653 : return false;
2654 :
2655 : return true;
2656 : }
2657 :
2658 98941144 : case MEM:
2659 98941144 : if (for_gcse)
2660 : {
2661 : /* A volatile mem should not be considered equivalent to any
2662 : other. */
2663 55922904 : if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2664 : return false;
2665 :
2666 : /* Can't merge two expressions in different alias sets, since we
2667 : can decide that the expression is transparent in a block when
2668 : it isn't, due to it being set with the different alias set.
2669 :
2670 : Also, can't merge two expressions with different MEM_ATTRS.
2671 : They could e.g. be two different entities allocated into the
2672 : same space on the stack (see e.g. PR25130). In that case, the
2673 : MEM addresses can be the same, even though the two MEMs are
2674 : absolutely not equivalent.
2675 :
2676 : But because really all MEM attributes should be the same for
2677 : equivalent MEMs, we just use the invariant that MEMs that have
2678 : the same attributes share the same mem_attrs data structure. */
2679 55812905 : if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2680 : return false;
2681 :
2682 : /* If we are handling exceptions, we cannot consider two expressions
2683 : with different trapping status as equivalent, because simple_mem
2684 : might accept one and reject the other. */
2685 9065529 : if (cfun->can_throw_non_call_exceptions
2686 9065529 : && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2687 : return false;
2688 : }
2689 : break;
2690 :
2691 : /* For commutative operations, check both orders. */
2692 69177926 : case PLUS:
2693 69177926 : case MULT:
2694 69177926 : case AND:
2695 69177926 : case IOR:
2696 69177926 : case XOR:
2697 69177926 : case NE:
2698 69177926 : case EQ:
2699 69177926 : return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2700 : validate, for_gcse)
2701 63198519 : && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2702 : validate, for_gcse))
2703 75877639 : || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2704 : validate, for_gcse)
2705 17546 : && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2706 : validate, for_gcse)));
2707 :
2708 12823 : case ASM_OPERANDS:
2709 : /* We don't use the generic code below because we want to
2710 : disregard filename and line numbers. */
2711 :
2712 : /* A volatile asm isn't equivalent to any other. */
2713 12823 : if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2714 : return false;
2715 :
2716 12823 : if (GET_MODE (x) != GET_MODE (y)
2717 12823 : || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2718 12823 : || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2719 12823 : ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2720 12813 : || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2721 12813 : || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2722 : return false;
2723 :
2724 12813 : if (ASM_OPERANDS_INPUT_LENGTH (x))
2725 : {
2726 17240 : for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2727 8719 : if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2728 8719 : ASM_OPERANDS_INPUT (y, i),
2729 : validate, for_gcse)
2730 8719 : || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2731 8639 : ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2732 : return false;
2733 : }
2734 :
2735 : return true;
2736 :
2737 : default:
2738 : break;
2739 : }
2740 :
2741 : /* Compare the elements. If any pair of corresponding elements
2742 : fail to match, return 0 for the whole thing. */
2743 :
2744 118856238 : fmt = GET_RTX_FORMAT (code);
2745 332223671 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2746 : {
2747 225593173 : switch (fmt[i])
2748 : {
2749 153326359 : case 'e':
2750 153326359 : if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2751 : validate, for_gcse))
2752 : return false;
2753 : break;
2754 :
2755 8984575 : case 'E':
2756 8984575 : if (XVECLEN (x, i) != XVECLEN (y, i))
2757 : return 0;
2758 42217988 : for (j = 0; j < XVECLEN (x, i); j++)
2759 34082574 : if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2760 : validate, for_gcse))
2761 : return false;
2762 : break;
2763 :
2764 0 : case 's':
2765 0 : if (strcmp (XSTR (x, i), XSTR (y, i)))
2766 : return false;
2767 : break;
2768 :
2769 3486165 : case 'i':
2770 3486165 : if (XINT (x, i) != XINT (y, i))
2771 : return false;
2772 : break;
2773 :
2774 0 : case 'L':
2775 0 : if (XLOC (x, i) != XLOC (y, i))
2776 : return false;
2777 : break;
2778 :
2779 0 : case 'w':
2780 0 : if (XWINT (x, i) != XWINT (y, i))
2781 : return false;
2782 : break;
2783 :
2784 7722237 : case 'p':
2785 7722237 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2786 : return false;
2787 : break;
2788 :
2789 : case '0':
2790 : case 't':
2791 : break;
2792 :
2793 0 : default:
2794 0 : gcc_unreachable ();
2795 : }
2796 : }
2797 :
2798 : return true;
2799 : }
2800 :
2801 : /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2802 : the result if necessary. INSN is as for canon_reg. */
2803 :
2804 : static void
2805 1011886284 : validate_canon_reg (rtx *xloc, rtx_insn *insn)
2806 : {
2807 1011886284 : if (*xloc)
2808 : {
2809 1011886284 : rtx new_rtx = canon_reg (*xloc, insn);
2810 :
2811 : /* If replacing pseudo with hard reg or vice versa, ensure the
2812 : insn remains valid. Likewise if the insn has MATCH_DUPs. */
2813 1011886284 : gcc_assert (insn && new_rtx);
2814 1011886284 : validate_change (insn, xloc, new_rtx, 1);
2815 : }
2816 1011886284 : }
2817 :
2818 : /* Canonicalize an expression:
2819 : replace each register reference inside it
2820 : with the "oldest" equivalent register.
2821 :
2822 : If INSN is nonzero validate_change is used to ensure that INSN remains valid
2823 : after we make our substitution. The calls are made with IN_GROUP nonzero
2824 : so apply_change_group must be called upon the outermost return from this
2825 : function (unless INSN is zero). The result of apply_change_group can
2826 : generally be discarded since the changes we are making are optional. */
2827 :
2828 : static rtx
2829 1673950200 : canon_reg (rtx x, rtx_insn *insn)
2830 : {
2831 1673950200 : int i;
2832 1673950200 : enum rtx_code code;
2833 1673950200 : const char *fmt;
2834 :
2835 1673950200 : if (x == 0)
2836 : return x;
2837 :
2838 1673950200 : code = GET_CODE (x);
2839 1673950200 : switch (code)
2840 : {
2841 : case PC:
2842 : case CONST:
2843 : CASE_CONST_ANY:
2844 : case SYMBOL_REF:
2845 : case LABEL_REF:
2846 : case ADDR_VEC:
2847 : case ADDR_DIFF_VEC:
2848 : return x;
2849 :
2850 9422526 : case SUBREG:
2851 9422526 : {
2852 9422526 : rtx inner = canon_reg (SUBREG_REG (x), insn);
2853 9422526 : if (inner != SUBREG_REG (x))
2854 : {
2855 232068 : rtx newx = simplify_subreg (GET_MODE (x), inner,
2856 116034 : GET_MODE (SUBREG_REG (x)),
2857 116034 : SUBREG_BYTE (x));
2858 116034 : if (newx)
2859 : return newx;
2860 :
2861 116034 : if (validate_subreg (GET_MODE (x), GET_MODE (inner),
2862 116034 : inner, SUBREG_BYTE (x)))
2863 116034 : validate_change (insn, &SUBREG_REG (x), inner, 1);
2864 : }
2865 : return x;
2866 : }
2867 :
2868 466570521 : case REG:
2869 466570521 : {
2870 466570521 : int first;
2871 466570521 : int q;
2872 466570521 : struct qty_table_elem *ent;
2873 :
2874 : /* Never replace a hard reg, because hard regs can appear
2875 : in more than one machine mode, and we must preserve the mode
2876 : of each occurrence. Also, some hard regs appear in
2877 : MEMs that are shared and mustn't be altered. Don't try to
2878 : replace any reg that maps to a reg of class NO_REGS. */
2879 466570521 : if (REGNO (x) < FIRST_PSEUDO_REGISTER
2880 466570521 : || ! REGNO_QTY_VALID_P (REGNO (x)))
2881 302231209 : return x;
2882 :
2883 164339312 : q = REG_QTY (REGNO (x));
2884 164339312 : ent = &qty_table[q];
2885 164339312 : first = ent->first_reg;
2886 164339312 : return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2887 402915 : : REGNO_REG_CLASS (first) == NO_REGS ? x
2888 164339312 : : gen_rtx_REG (ent->mode, first));
2889 : }
2890 :
2891 736617311 : default:
2892 736617311 : break;
2893 : }
2894 :
2895 736617311 : fmt = GET_RTX_FORMAT (code);
2896 2028252784 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2897 : {
2898 1291635473 : int j;
2899 :
2900 1291635473 : if (fmt[i] == 'e')
2901 999461605 : validate_canon_reg (&XEXP (x, i), insn);
2902 292173868 : else if (fmt[i] == 'E')
2903 18896900 : for (j = 0; j < XVECLEN (x, i); j++)
2904 12424679 : validate_canon_reg (&XVECEXP (x, i, j), insn);
2905 : }
2906 :
2907 : return x;
2908 : }
2909 :
2910 : /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2911 : operation (EQ, NE, GT, etc.), follow it back through the hash table and
2912 : what values are being compared.
2913 :
2914 : *PARG1 and *PARG2 are updated to contain the rtx representing the values
2915 : actually being compared. For example, if *PARG1 was (reg:CC CC_REG) and
2916 : *PARG2 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that
2917 : were compared to produce (reg:CC CC_REG).
2918 :
2919 : The return value is the comparison operator and is either the code of
2920 : A or the code corresponding to the inverse of the comparison. */
2921 :
2922 : static enum rtx_code
2923 36496587 : find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2924 : machine_mode *pmode1, machine_mode *pmode2)
2925 : {
2926 36496587 : rtx arg1, arg2;
2927 36496587 : hash_set<rtx> *visited = NULL;
2928 : /* Set nonzero when we find something of interest. */
2929 36496587 : rtx x = NULL;
2930 :
2931 36496587 : arg1 = *parg1, arg2 = *parg2;
2932 :
2933 : /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2934 :
2935 71121693 : while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2936 : {
2937 53366220 : int reverse_code = 0;
2938 53366220 : struct table_elt *p = 0;
2939 :
2940 : /* Remember state from previous iteration. */
2941 53366220 : if (x)
2942 : {
2943 16944881 : if (!visited)
2944 16940539 : visited = new hash_set<rtx>;
2945 16944881 : visited->add (x);
2946 16944881 : x = 0;
2947 : }
2948 :
2949 : /* If arg1 is a COMPARE, extract the comparison arguments from it. */
2950 :
2951 53366220 : if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2952 0 : x = arg1;
2953 :
2954 : /* If ARG1 is a comparison operator and CODE is testing for
2955 : STORE_FLAG_VALUE, get the inner arguments. */
2956 :
2957 53366220 : else if (COMPARISON_P (arg1))
2958 : {
2959 : #ifdef FLOAT_STORE_FLAG_VALUE
2960 : REAL_VALUE_TYPE fsfv;
2961 : #endif
2962 :
2963 0 : if (code == NE
2964 : || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2965 : && code == LT && STORE_FLAG_VALUE == -1)
2966 : #ifdef FLOAT_STORE_FLAG_VALUE
2967 : || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2968 : && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2969 : REAL_VALUE_NEGATIVE (fsfv)))
2970 : #endif
2971 : )
2972 0 : x = arg1;
2973 0 : else if (code == EQ
2974 : || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2975 : && code == GE && STORE_FLAG_VALUE == -1)
2976 : #ifdef FLOAT_STORE_FLAG_VALUE
2977 : || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2978 : && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2979 : REAL_VALUE_NEGATIVE (fsfv)))
2980 : #endif
2981 : )
2982 0 : x = arg1, reverse_code = 1;
2983 : }
2984 :
2985 : /* ??? We could also check for
2986 :
2987 : (ne (and (eq (...) (const_int 1))) (const_int 0))
2988 :
2989 : and related forms, but let's wait until we see them occurring. */
2990 :
2991 53366220 : if (x == 0)
2992 : /* Look up ARG1 in the hash table and see if it has an equivalence
2993 : that lets us see what is being compared. */
2994 53366220 : p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2995 53366220 : if (p)
2996 : {
2997 42697280 : p = p->first_same_value;
2998 :
2999 : /* If what we compare is already known to be constant, that is as
3000 : good as it gets.
3001 : We need to break the loop in this case, because otherwise we
3002 : can have an infinite loop when looking at a reg that is known
3003 : to be a constant which is the same as a comparison of a reg
3004 : against zero which appears later in the insn stream, which in
3005 : turn is constant and the same as the comparison of the first reg
3006 : against zero... */
3007 42697280 : if (p->is_const)
3008 : break;
3009 : }
3010 :
3011 68988042 : for (; p; p = p->next_same_value)
3012 : {
3013 50252256 : machine_mode inner_mode = GET_MODE (p->exp);
3014 : #ifdef FLOAT_STORE_FLAG_VALUE
3015 : REAL_VALUE_TYPE fsfv;
3016 : #endif
3017 :
3018 : /* If the entry isn't valid, skip it. */
3019 50252256 : if (! exp_equiv_p (p->exp, p->exp, 1, false))
3020 1799389 : continue;
3021 :
3022 : /* If it's a comparison we've used before, skip it. */
3023 48452867 : if (visited && visited->contains (p->exp))
3024 0 : continue;
3025 :
3026 48452867 : if (GET_CODE (p->exp) == COMPARE
3027 : /* Another possibility is that this machine has a compare insn
3028 : that includes the comparison code. In that case, ARG1 would
3029 : be equivalent to a comparison operation that would set ARG1 to
3030 : either STORE_FLAG_VALUE or zero. If this is an NE operation,
3031 : ORIG_CODE is the actual comparison being done; if it is an EQ,
3032 : we must reverse ORIG_CODE. On machine with a negative value
3033 : for STORE_FLAG_VALUE, also look at LT and GE operations. */
3034 48452867 : || ((code == NE
3035 9727373 : || (code == LT
3036 274334 : && val_signbit_known_set_p (inner_mode,
3037 : STORE_FLAG_VALUE))
3038 : #ifdef FLOAT_STORE_FLAG_VALUE
3039 : || (code == LT
3040 : && SCALAR_FLOAT_MODE_P (inner_mode)
3041 : && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3042 : REAL_VALUE_NEGATIVE (fsfv)))
3043 : #endif
3044 : )
3045 4205490 : && COMPARISON_P (p->exp)))
3046 : {
3047 34523348 : x = p->exp;
3048 34523348 : break;
3049 : }
3050 13929519 : else if ((code == EQ
3051 7330284 : || (code == GE
3052 221600 : && val_signbit_known_set_p (inner_mode,
3053 : STORE_FLAG_VALUE))
3054 : #ifdef FLOAT_STORE_FLAG_VALUE
3055 : || (code == GE
3056 : && SCALAR_FLOAT_MODE_P (inner_mode)
3057 : && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3058 : REAL_VALUE_NEGATIVE (fsfv)))
3059 : #endif
3060 : )
3061 13929519 : && COMPARISON_P (p->exp))
3062 : {
3063 101758 : reverse_code = 1;
3064 101758 : x = p->exp;
3065 101758 : break;
3066 : }
3067 :
3068 : /* If this non-trapping address, e.g. fp + constant, the
3069 : equivalent is a better operand since it may let us predict
3070 : the value of the comparison. */
3071 13827761 : else if (!rtx_addr_can_trap_p (p->exp))
3072 : {
3073 0 : arg1 = p->exp;
3074 0 : continue;
3075 : }
3076 : }
3077 :
3078 : /* If we didn't find a useful equivalence for ARG1, we are done.
3079 : Otherwise, set up for the next iteration. */
3080 53360892 : if (x == 0)
3081 : break;
3082 :
3083 : /* If we need to reverse the comparison, make sure that is
3084 : possible -- we can't necessarily infer the value of GE from LT
3085 : with floating-point operands. */
3086 34625106 : if (reverse_code)
3087 : {
3088 101758 : enum rtx_code reversed = reversed_comparison_code (x, NULL);
3089 101758 : if (reversed == UNKNOWN)
3090 : break;
3091 : else
3092 : code = reversed;
3093 : }
3094 34523348 : else if (COMPARISON_P (x))
3095 3344 : code = GET_CODE (x);
3096 34625106 : arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3097 : }
3098 :
3099 : /* Return our results. Return the modes from before fold_rtx
3100 : because fold_rtx might produce const_int, and then it's too late. */
3101 36496587 : *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3102 36496587 : *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3103 :
3104 36496587 : if (visited)
3105 16940539 : delete visited;
3106 36496587 : return code;
3107 : }
3108 :
3109 : /* If X is a nontrivial arithmetic operation on an argument for which
3110 : a constant value can be determined, return the result of operating
3111 : on that value, as a constant. Otherwise, return X, possibly with
3112 : one or more operands changed to a forward-propagated constant.
3113 :
3114 : If X is a register whose contents are known, we do NOT return
3115 : those contents here; equiv_constant is called to perform that task.
3116 : For SUBREGs and MEMs, we do that both here and in equiv_constant.
3117 :
3118 : INSN is the insn that we may be modifying. If it is 0, make a copy
3119 : of X before modifying it. */
3120 :
3121 : static rtx
3122 392651461 : fold_rtx (rtx x, rtx_insn *insn)
3123 : {
3124 392653245 : enum rtx_code code;
3125 392653245 : machine_mode mode;
3126 392653245 : const char *fmt;
3127 392653245 : int i;
3128 392653245 : rtx new_rtx = 0;
3129 392653245 : bool changed = false;
3130 392653245 : poly_int64 xval;
3131 :
3132 : /* Operands of X. */
3133 : /* Workaround -Wmaybe-uninitialized false positive during
3134 : profiledbootstrap by initializing them. */
3135 392653245 : rtx folded_arg0 = NULL_RTX;
3136 392653245 : rtx folded_arg1 = NULL_RTX;
3137 :
3138 : /* Constant equivalents of first three operands of X;
3139 : 0 when no such equivalent is known. */
3140 392653245 : rtx const_arg0;
3141 392653245 : rtx const_arg1;
3142 392653245 : rtx const_arg2;
3143 :
3144 : /* The mode of the first operand of X. We need this for sign and zero
3145 : extends. */
3146 392653245 : machine_mode mode_arg0;
3147 :
3148 392653245 : if (x == 0)
3149 : return x;
3150 :
3151 : /* Try to perform some initial simplifications on X. */
3152 392653245 : code = GET_CODE (x);
3153 392653245 : switch (code)
3154 : {
3155 61260209 : case MEM:
3156 61260209 : case SUBREG:
3157 : /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3158 : than it would in other contexts. Basically its mode does not
3159 : signify the size of the object read. That information is carried
3160 : by size operand. If we happen to have a MEM of the appropriate
3161 : mode in our tables with a constant value we could simplify the
3162 : extraction incorrectly if we allowed substitution of that value
3163 : for the MEM. */
3164 61260209 : case ZERO_EXTRACT:
3165 61260209 : case SIGN_EXTRACT:
3166 61260209 : if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3167 : return new_rtx;
3168 : return x;
3169 :
3170 : case CONST:
3171 : CASE_CONST_ANY:
3172 : case SYMBOL_REF:
3173 : case LABEL_REF:
3174 : case REG:
3175 : case PC:
3176 : /* No use simplifying an EXPR_LIST
3177 : since they are used only for lists of args
3178 : in a function call's REG_EQUAL note. */
3179 : case EXPR_LIST:
3180 : return x;
3181 :
3182 199858 : case ASM_OPERANDS:
3183 199858 : if (insn)
3184 : {
3185 0 : for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3186 0 : validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3187 0 : fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3188 : }
3189 : return x;
3190 :
3191 15298139 : case CALL:
3192 15298139 : if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3193 : return x;
3194 : break;
3195 897501 : case VEC_SELECT:
3196 897501 : {
3197 897501 : rtx trueop0 = XEXP (x, 0);
3198 897501 : mode = GET_MODE (trueop0);
3199 897501 : rtx trueop1 = XEXP (x, 1);
3200 : /* If we select a low-part subreg, return that. */
3201 897501 : if (vec_series_lowpart_p (GET_MODE (x), mode, trueop1))
3202 : {
3203 219 : rtx new_rtx = lowpart_subreg (GET_MODE (x), trueop0, mode);
3204 219 : if (new_rtx != NULL_RTX)
3205 : return new_rtx;
3206 : }
3207 : }
3208 :
3209 : /* Anything else goes through the loop below. */
3210 : default:
3211 : break;
3212 : }
3213 :
3214 114212610 : mode = GET_MODE (x);
3215 114212610 : const_arg0 = 0;
3216 114212610 : const_arg1 = 0;
3217 114212610 : const_arg2 = 0;
3218 114212610 : mode_arg0 = VOIDmode;
3219 :
3220 : /* Try folding our operands.
3221 : Then see which ones have constant values known. */
3222 :
3223 114212610 : fmt = GET_RTX_FORMAT (code);
3224 356574861 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3225 242362251 : if (fmt[i] == 'e')
3226 : {
3227 237915353 : rtx folded_arg = XEXP (x, i), const_arg;
3228 237915353 : machine_mode mode_arg = GET_MODE (folded_arg);
3229 :
3230 237915353 : switch (GET_CODE (folded_arg))
3231 : {
3232 105927494 : case MEM:
3233 105927494 : case REG:
3234 105927494 : case SUBREG:
3235 105927494 : const_arg = equiv_constant (folded_arg);
3236 105927494 : break;
3237 :
3238 : case CONST:
3239 : CASE_CONST_ANY:
3240 : case SYMBOL_REF:
3241 : case LABEL_REF:
3242 : const_arg = folded_arg;
3243 : break;
3244 :
3245 44993729 : default:
3246 44993729 : folded_arg = fold_rtx (folded_arg, insn);
3247 44993729 : const_arg = equiv_constant (folded_arg);
3248 44993729 : break;
3249 : }
3250 :
3251 : /* For the first three operands, see if the operand
3252 : is constant or equivalent to a constant. */
3253 237915353 : switch (i)
3254 : {
3255 111540520 : case 0:
3256 111540520 : folded_arg0 = folded_arg;
3257 111540520 : const_arg0 = const_arg;
3258 111540520 : mode_arg0 = mode_arg;
3259 111540520 : break;
3260 105707198 : case 1:
3261 105707198 : folded_arg1 = folded_arg;
3262 105707198 : const_arg1 = const_arg;
3263 105707198 : break;
3264 20667635 : case 2:
3265 20667635 : const_arg2 = const_arg;
3266 20667635 : break;
3267 : }
3268 :
3269 : /* Pick the least expensive of the argument and an equivalent constant
3270 : argument. */
3271 237915353 : if (const_arg != 0
3272 237915353 : && const_arg != folded_arg
3273 6042722 : && (COST_IN (const_arg, mode_arg, code, i)
3274 3021361 : <= COST_IN (folded_arg, mode_arg, code, i))
3275 :
3276 : /* It's not safe to substitute the operand of a conversion
3277 : operator with a constant, as the conversion's identity
3278 : depends upon the mode of its operand. This optimization
3279 : is handled by the call to simplify_unary_operation. */
3280 239518258 : && (GET_RTX_CLASS (code) != RTX_UNARY
3281 404774 : || GET_MODE (const_arg) == mode_arg0
3282 327423 : || (code != ZERO_EXTEND
3283 : && code != SIGN_EXTEND
3284 327423 : && code != TRUNCATE
3285 327423 : && code != FLOAT_TRUNCATE
3286 256201 : && code != FLOAT_EXTEND
3287 256201 : && code != FLOAT
3288 : && code != FIX
3289 256027 : && code != UNSIGNED_FLOAT
3290 256027 : && code != UNSIGNED_FIX)))
3291 : folded_arg = const_arg;
3292 :
3293 237915353 : if (folded_arg == XEXP (x, i))
3294 235879817 : continue;
3295 :
3296 2035536 : if (insn == NULL_RTX && !changed)
3297 1819737 : x = copy_rtx (x);
3298 2035536 : changed = true;
3299 2035536 : validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3300 : }
3301 :
3302 114212610 : if (changed)
3303 : {
3304 : /* Canonicalize X if necessary, and keep const_argN and folded_argN
3305 : consistent with the order in X. */
3306 1820194 : if (canonicalize_change_group (insn, x))
3307 : {
3308 94513 : std::swap (const_arg0, const_arg1);
3309 94513 : std::swap (folded_arg0, folded_arg1);
3310 : }
3311 :
3312 1820194 : apply_change_group ();
3313 : }
3314 :
3315 : /* If X is an arithmetic operation, see if we can simplify it. */
3316 :
3317 114212610 : switch (GET_RTX_CLASS (code))
3318 : {
3319 5833322 : case RTX_UNARY:
3320 5833322 : {
3321 : /* We can't simplify extension ops unless we know the
3322 : original mode. */
3323 5833322 : if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3324 4178747 : && mode_arg0 == VOIDmode)
3325 : break;
3326 :
3327 5833322 : new_rtx = simplify_unary_operation (code, mode,
3328 : const_arg0 ? const_arg0 : folded_arg0,
3329 : mode_arg0);
3330 : }
3331 5833322 : break;
3332 :
3333 22002079 : case RTX_COMPARE:
3334 22002079 : case RTX_COMM_COMPARE:
3335 : /* See what items are actually being compared and set FOLDED_ARG[01]
3336 : to those values and CODE to the actual comparison code. If any are
3337 : constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3338 : do anything if both operands are already known to be constant. */
3339 :
3340 : /* ??? Vector mode comparisons are not supported yet. */
3341 22002079 : if (VECTOR_MODE_P (mode))
3342 : break;
3343 :
3344 21873922 : if (const_arg0 == 0 || const_arg1 == 0)
3345 : {
3346 21872847 : struct table_elt *p0, *p1;
3347 21872847 : rtx true_rtx, false_rtx;
3348 21872847 : machine_mode mode_arg1;
3349 :
3350 21872847 : if (SCALAR_FLOAT_MODE_P (mode))
3351 : {
3352 : #ifdef FLOAT_STORE_FLAG_VALUE
3353 : true_rtx = (const_double_from_real_value
3354 : (FLOAT_STORE_FLAG_VALUE (mode), mode));
3355 : #else
3356 2380 : true_rtx = NULL_RTX;
3357 : #endif
3358 2380 : false_rtx = CONST0_RTX (mode);
3359 : }
3360 : else
3361 : {
3362 21870467 : true_rtx = const_true_rtx;
3363 21870467 : false_rtx = const0_rtx;
3364 : }
3365 :
3366 21872847 : code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3367 : &mode_arg0, &mode_arg1);
3368 :
3369 : /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3370 : what kinds of things are being compared, so we can't do
3371 : anything with this comparison. */
3372 :
3373 21872847 : if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3374 : break;
3375 :
3376 20557068 : const_arg0 = equiv_constant (folded_arg0);
3377 20557068 : const_arg1 = equiv_constant (folded_arg1);
3378 :
3379 : /* If we do not now have two constants being compared, see
3380 : if we can nevertheless deduce some things about the
3381 : comparison. */
3382 20557068 : if (const_arg0 == 0 || const_arg1 == 0)
3383 : {
3384 20321414 : if (const_arg1 != NULL)
3385 : {
3386 15086701 : rtx cheapest_simplification;
3387 15086701 : int cheapest_cost;
3388 15086701 : rtx simp_result;
3389 15086701 : struct table_elt *p;
3390 :
3391 : /* See if we can find an equivalent of folded_arg0
3392 : that gets us a cheaper expression, possibly a
3393 : constant through simplifications. */
3394 15086701 : p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3395 : mode_arg0);
3396 :
3397 15086701 : if (p != NULL)
3398 : {
3399 6387245 : cheapest_simplification = x;
3400 6387245 : cheapest_cost = COST (x, mode);
3401 :
3402 18578330 : for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3403 : {
3404 12191085 : int cost;
3405 :
3406 : /* If the entry isn't valid, skip it. */
3407 12191085 : if (! exp_equiv_p (p->exp, p->exp, 1, false))
3408 501865 : continue;
3409 :
3410 : /* Try to simplify using this equivalence. */
3411 11689220 : simp_result
3412 11689220 : = simplify_relational_operation (code, mode,
3413 : mode_arg0,
3414 : p->exp,
3415 : const_arg1);
3416 :
3417 11689220 : if (simp_result == NULL)
3418 11551910 : continue;
3419 :
3420 137310 : cost = COST (simp_result, mode);
3421 137310 : if (cost < cheapest_cost)
3422 : {
3423 12191085 : cheapest_cost = cost;
3424 12191085 : cheapest_simplification = simp_result;
3425 : }
3426 : }
3427 :
3428 : /* If we have a cheaper expression now, use that
3429 : and try folding it further, from the top. */
3430 6387245 : if (cheapest_simplification != x)
3431 1757 : return fold_rtx (copy_rtx (cheapest_simplification),
3432 11135 : insn);
3433 : }
3434 : }
3435 :
3436 : /* See if the two operands are the same. */
3437 :
3438 20543601 : if ((REG_P (folded_arg0)
3439 17380920 : && REG_P (folded_arg1)
3440 4665936 : && (REG_QTY (REGNO (folded_arg0))
3441 4665936 : == REG_QTY (REGNO (folded_arg1))))
3442 37912801 : || ((p0 = lookup (folded_arg0,
3443 : SAFE_HASH (folded_arg0, mode_arg0),
3444 : mode_arg0))
3445 8961094 : && (p1 = lookup (folded_arg1,
3446 : SAFE_HASH (folded_arg1, mode_arg0),
3447 : mode_arg0))
3448 2648505 : && p0->first_same_value == p1->first_same_value))
3449 12686 : folded_arg1 = folded_arg0;
3450 :
3451 : /* If FOLDED_ARG0 is a register, see if the comparison we are
3452 : doing now is either the same as we did before or the reverse
3453 : (we only check the reverse if not floating-point). */
3454 20530915 : else if (REG_P (folded_arg0))
3455 : {
3456 17368825 : int qty = REG_QTY (REGNO (folded_arg0));
3457 :
3458 17368825 : if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3459 : {
3460 17358027 : struct qty_table_elem *ent = &qty_table[qty];
3461 :
3462 17358027 : if ((comparison_dominates_p (ent->comparison_code, code)
3463 16884954 : || (! FLOAT_MODE_P (mode_arg0)
3464 16667113 : && comparison_dominates_p (ent->comparison_code,
3465 : reverse_condition (code))))
3466 17802464 : && (rtx_equal_p (ent->comparison_const, folded_arg1)
3467 909692 : || (const_arg1
3468 753343 : && rtx_equal_p (ent->comparison_const,
3469 : const_arg1))
3470 909692 : || (REG_P (folded_arg1)
3471 144902 : && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3472 : {
3473 9378 : if (comparison_dominates_p (ent->comparison_code, code))
3474 : {
3475 6715 : if (true_rtx)
3476 : return true_rtx;
3477 : else
3478 : break;
3479 : }
3480 : else
3481 : return false_rtx;
3482 : }
3483 : }
3484 : }
3485 : }
3486 : }
3487 :
3488 : /* If we are comparing against zero, see if the first operand is
3489 : equivalent to an IOR with a constant. If so, we may be able to
3490 : determine the result of this comparison. */
3491 20547008 : if (const_arg1 == const0_rtx && !const_arg0)
3492 : {
3493 9819349 : rtx y = lookup_as_function (folded_arg0, IOR);
3494 9819349 : rtx inner_const;
3495 :
3496 9819349 : if (y != 0
3497 68651 : && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3498 78 : && CONST_INT_P (inner_const)
3499 9819427 : && INTVAL (inner_const) != 0)
3500 78 : folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3501 : }
3502 :
3503 20539816 : {
3504 20539816 : rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3505 20547008 : rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3506 20547008 : new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3507 : op0, op1);
3508 : }
3509 20547008 : break;
3510 :
3511 62528132 : case RTX_BIN_ARITH:
3512 62528132 : case RTX_COMM_ARITH:
3513 62528132 : switch (code)
3514 : {
3515 26805077 : case PLUS:
3516 : /* If the second operand is a LABEL_REF, see if the first is a MINUS
3517 : with that LABEL_REF as its second operand. If so, the result is
3518 : the first operand of that MINUS. This handles switches with an
3519 : ADDR_DIFF_VEC table. */
3520 26805077 : if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3521 : {
3522 2891 : rtx y
3523 2891 : = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3524 2891 : : lookup_as_function (folded_arg0, MINUS);
3525 :
3526 0 : if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3527 2891 : && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg1))
3528 0 : return XEXP (y, 0);
3529 :
3530 : /* Now try for a CONST of a MINUS like the above. */
3531 2891 : if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3532 2891 : : lookup_as_function (folded_arg0, CONST))) != 0
3533 0 : && GET_CODE (XEXP (y, 0)) == MINUS
3534 0 : && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3535 2891 : && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg1))
3536 0 : return XEXP (XEXP (y, 0), 0);
3537 : }
3538 :
3539 : /* Likewise if the operands are in the other order. */
3540 26805077 : if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3541 : {
3542 23 : rtx y
3543 23 : = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3544 23 : : lookup_as_function (folded_arg1, MINUS);
3545 :
3546 0 : if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3547 23 : && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg0))
3548 0 : return XEXP (y, 0);
3549 :
3550 : /* Now try for a CONST of a MINUS like the above. */
3551 23 : if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3552 23 : : lookup_as_function (folded_arg1, CONST))) != 0
3553 0 : && GET_CODE (XEXP (y, 0)) == MINUS
3554 0 : && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3555 23 : && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg0))
3556 0 : return XEXP (XEXP (y, 0), 0);
3557 : }
3558 :
3559 : /* If second operand is a register equivalent to a negative
3560 : CONST_INT, see if we can find a register equivalent to the
3561 : positive constant. Make a MINUS if so. Don't do this for
3562 : a non-negative constant since we might then alternate between
3563 : choosing positive and negative constants. Having the positive
3564 : constant previously-used is the more common case. Be sure
3565 : the resulting constant is non-negative; if const_arg1 were
3566 : the smallest negative number this would overflow: depending
3567 : on the mode, this would either just be the same value (and
3568 : hence not save anything) or be incorrect. */
3569 26805077 : if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3570 21508264 : && INTVAL (const_arg1) < 0
3571 : /* This used to test
3572 :
3573 : -INTVAL (const_arg1) >= 0
3574 :
3575 : But The Sun V5.0 compilers mis-compiled that test. So
3576 : instead we test for the problematic value in a more direct
3577 : manner and hope the Sun compilers get it correct. */
3578 12051515 : && INTVAL (const_arg1) !=
3579 : (HOST_WIDE_INT_1 << (HOST_BITS_PER_WIDE_INT - 1))
3580 12032489 : && REG_P (folded_arg1))
3581 : {
3582 32192 : rtx new_const = GEN_INT (-INTVAL (const_arg1));
3583 32192 : struct table_elt *p
3584 32192 : = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3585 :
3586 32192 : if (p)
3587 5027 : for (p = p->first_same_value; p; p = p->next_same_value)
3588 5026 : if (REG_P (p->exp))
3589 2619 : return simplify_gen_binary (MINUS, mode, folded_arg0,
3590 2619 : canon_reg (p->exp, NULL));
3591 : }
3592 26802458 : goto from_plus;
3593 :
3594 2118427 : case MINUS:
3595 : /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3596 : If so, produce (PLUS Z C2-C). */
3597 2118427 : if (const_arg1 != 0 && poly_int_rtx_p (const_arg1, &xval))
3598 : {
3599 43639 : rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3600 43639 : if (y && poly_int_rtx_p (XEXP (y, 1)))
3601 27 : return fold_rtx (plus_constant (mode, copy_rtx (y), -xval),
3602 27 : NULL);
3603 : }
3604 :
3605 : /* Fall through. */
3606 :
3607 39120625 : from_plus:
3608 39120625 : case SMIN: case SMAX: case UMIN: case UMAX:
3609 39120625 : case IOR: case AND: case XOR:
3610 39120625 : case MULT:
3611 39120625 : case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3612 : /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3613 : is known to be of similar form, we may be able to replace the
3614 : operation with a combined operation. This may eliminate the
3615 : intermediate operation if every use is simplified in this way.
3616 : Note that the similar optimization done by combine.cc only works
3617 : if the intermediate operation's result has only one reference. */
3618 :
3619 39120625 : if (REG_P (folded_arg0)
3620 35850885 : && const_arg1 && CONST_INT_P (const_arg1))
3621 : {
3622 26309794 : int is_shift
3623 26309794 : = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3624 : rtx y, inner_const, new_const;
3625 : rtx canon_const_arg1 = const_arg1;
3626 : enum rtx_code associate_code;
3627 :
3628 : if (is_shift
3629 6380800 : && (INTVAL (const_arg1) >= GET_MODE_UNIT_PRECISION (mode)
3630 3190287 : || INTVAL (const_arg1) < 0))
3631 : {
3632 : if (SHIFT_COUNT_TRUNCATED)
3633 : canon_const_arg1 = gen_int_shift_amount
3634 : (mode, (INTVAL (const_arg1)
3635 : & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3636 : else
3637 : break;
3638 : }
3639 :
3640 26309671 : y = lookup_as_function (folded_arg0, code);
3641 26309671 : if (y == 0)
3642 : break;
3643 :
3644 : /* If we have compiled a statement like
3645 : "if (x == (x & mask1))", and now are looking at
3646 : "x & mask2", we will have a case where the first operand
3647 : of Y is the same as our first operand. Unless we detect
3648 : this case, an infinite loop will result. */
3649 838293 : if (XEXP (y, 0) == folded_arg0)
3650 : break;
3651 :
3652 838022 : inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3653 838022 : if (!inner_const || !CONST_INT_P (inner_const))
3654 : break;
3655 :
3656 : /* Don't associate these operations if they are a PLUS with the
3657 : same constant and it is a power of two. These might be doable
3658 : with a pre- or post-increment. Similarly for two subtracts of
3659 : identical powers of two with post decrement. */
3660 :
3661 494757 : if (code == PLUS && const_arg1 == inner_const
3662 : && ((HAVE_PRE_INCREMENT
3663 : && pow2p_hwi (INTVAL (const_arg1)))
3664 : || (HAVE_POST_INCREMENT
3665 : && pow2p_hwi (INTVAL (const_arg1)))
3666 : || (HAVE_PRE_DECREMENT
3667 : && pow2p_hwi (- INTVAL (const_arg1)))
3668 : || (HAVE_POST_DECREMENT
3669 : && pow2p_hwi (- INTVAL (const_arg1)))))
3670 : break;
3671 :
3672 : /* ??? Vector mode shifts by scalar
3673 : shift operand are not supported yet. */
3674 494757 : if (is_shift && VECTOR_MODE_P (mode))
3675 : break;
3676 :
3677 4004 : if (is_shift
3678 8008 : && (INTVAL (inner_const) >= GET_MODE_UNIT_PRECISION (mode)
3679 4004 : || INTVAL (inner_const) < 0))
3680 : {
3681 : if (SHIFT_COUNT_TRUNCATED)
3682 : inner_const = gen_int_shift_amount
3683 : (mode, (INTVAL (inner_const)
3684 : & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3685 : else
3686 : break;
3687 : }
3688 :
3689 : /* Compute the code used to compose the constants. For example,
3690 : A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3691 :
3692 494492 : associate_code = (is_shift || code == MINUS ? PLUS : code);
3693 :
3694 494492 : new_const = simplify_binary_operation (associate_code, mode,
3695 : canon_const_arg1,
3696 : inner_const);
3697 :
3698 494492 : if (new_const == 0)
3699 : break;
3700 :
3701 : /* If we are associating shift operations, don't let this
3702 : produce a shift of the size of the object or larger.
3703 : This could occur when we follow a sign-extend by a right
3704 : shift on a machine that does a sign-extend as a pair
3705 : of shifts. */
3706 :
3707 494492 : if (is_shift
3708 4004 : && CONST_INT_P (new_const)
3709 502500 : && INTVAL (new_const) >= GET_MODE_UNIT_PRECISION (mode))
3710 : {
3711 : /* As an exception, we can turn an ASHIFTRT of this
3712 : form into a shift of the number of bits - 1. */
3713 1526 : if (code == ASHIFTRT)
3714 1503 : new_const = gen_int_shift_amount
3715 1503 : (mode, GET_MODE_UNIT_BITSIZE (mode) - 1);
3716 23 : else if (!side_effects_p (XEXP (y, 0)))
3717 23 : return CONST0_RTX (mode);
3718 : else
3719 : break;
3720 : }
3721 :
3722 494469 : y = copy_rtx (XEXP (y, 0));
3723 :
3724 : /* If Y contains our first operand (the most common way this
3725 : can happen is if Y is a MEM), we would do into an infinite
3726 : loop if we tried to fold it. So don't in that case. */
3727 :
3728 494469 : if (! reg_mentioned_p (folded_arg0, y))
3729 494469 : y = fold_rtx (y, insn);
3730 :
3731 494469 : return simplify_gen_binary (code, mode, y, new_const);
3732 : }
3733 : break;
3734 :
3735 : case DIV: case UDIV:
3736 : /* ??? The associative optimization performed immediately above is
3737 : also possible for DIV and UDIV using associate_code of MULT.
3738 : However, we would need extra code to verify that the
3739 : multiplication does not overflow, that is, there is no overflow
3740 : in the calculation of new_const. */
3741 : break;
3742 :
3743 : default:
3744 : break;
3745 : }
3746 :
3747 105702008 : new_rtx = simplify_binary_operation (code, mode,
3748 : const_arg0 ? const_arg0 : folded_arg0,
3749 : const_arg1 ? const_arg1 : folded_arg1);
3750 62030994 : break;
3751 :
3752 0 : case RTX_OBJ:
3753 : /* (lo_sum (high X) X) is simply X. */
3754 0 : if (code == LO_SUM && const_arg0 != 0
3755 0 : && GET_CODE (const_arg0) == HIGH
3756 0 : && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3757 : return const_arg1;
3758 : break;
3759 :
3760 20667635 : case RTX_TERNARY:
3761 20667635 : case RTX_BITFIELD_OPS:
3762 20667635 : new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3763 : const_arg0 ? const_arg0 : folded_arg0,
3764 : const_arg1 ? const_arg1 : folded_arg1,
3765 : const_arg2 ? const_arg2 : XEXP (x, 2));
3766 20667635 : break;
3767 :
3768 : default:
3769 : break;
3770 : }
3771 :
3772 110394738 : return new_rtx ? new_rtx : x;
3773 : }
3774 :
3775 : /* Return a constant value currently equivalent to X.
3776 : Return 0 if we don't know one. */
3777 :
3778 : static rtx
3779 266737049 : equiv_constant (rtx x)
3780 : {
3781 266737049 : if (REG_P (x)
3782 266737049 : && REGNO_QTY_VALID_P (REGNO (x)))
3783 : {
3784 85082491 : int x_q = REG_QTY (REGNO (x));
3785 85082491 : struct qty_table_elem *x_ent = &qty_table[x_q];
3786 :
3787 85082491 : if (x_ent->const_rtx)
3788 4423781 : x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3789 : }
3790 :
3791 266737049 : if (x == 0 || CONSTANT_P (x))
3792 26198757 : return x;
3793 :
3794 240538292 : if (GET_CODE (x) == SUBREG)
3795 : {
3796 5430057 : machine_mode mode = GET_MODE (x);
3797 5430057 : machine_mode imode = GET_MODE (SUBREG_REG (x));
3798 5430057 : rtx new_rtx;
3799 :
3800 : /* See if we previously assigned a constant value to this SUBREG. */
3801 5430057 : if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3802 5419886 : || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3803 5419886 : || (NUM_POLY_INT_COEFFS > 1
3804 : && (new_rtx = lookup_as_function (x, CONST_POLY_INT)) != 0)
3805 5413970 : || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3806 10843839 : || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3807 16275 : return new_rtx;
3808 :
3809 : /* If we didn't and if doing so makes sense, see if we previously
3810 : assigned a constant value to the enclosing word mode SUBREG. */
3811 11660435 : if (known_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)
3812 8351959 : && known_lt (UNITS_PER_WORD, GET_MODE_SIZE (imode)))
3813 : {
3814 31674 : poly_int64 byte = (SUBREG_BYTE (x)
3815 31674 : - subreg_lowpart_offset (mode, word_mode));
3816 63348 : if (known_ge (byte, 0) && multiple_p (byte, UNITS_PER_WORD))
3817 : {
3818 31674 : rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3819 31674 : new_rtx = lookup_as_function (y, CONST_INT);
3820 31674 : if (new_rtx)
3821 0 : return gen_lowpart (mode, new_rtx);
3822 : }
3823 : }
3824 :
3825 : /* Otherwise see if we already have a constant for the inner REG,
3826 : and if that is enough to calculate an equivalent constant for
3827 : the subreg. Note that the upper bits of paradoxical subregs
3828 : are undefined, so they cannot be said to equal anything. */
3829 5413782 : if (REG_P (SUBREG_REG (x))
3830 5404862 : && !paradoxical_subreg_p (x)
3831 10631124 : && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3832 85792 : return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3833 :
3834 5327990 : return 0;
3835 : }
3836 :
3837 : /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3838 : the hash table in case its value was seen before. */
3839 :
3840 235108235 : if (MEM_P (x))
3841 : {
3842 67797998 : struct table_elt *elt;
3843 :
3844 67797998 : x = avoid_constant_pool_reference (x);
3845 67797998 : if (CONSTANT_P (x))
3846 : return x;
3847 :
3848 65708614 : elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3849 65708614 : if (elt == 0)
3850 : return 0;
3851 :
3852 5440913 : for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3853 3855534 : if (elt->is_const && CONSTANT_P (elt->exp))
3854 : return elt->exp;
3855 : }
3856 :
3857 : return 0;
3858 : }
3859 :
3860 : /* Given INSN, a jump insn, TAKEN indicates if we are following the
3861 : "taken" branch.
3862 :
3863 : In certain cases, this can cause us to add an equivalence. For example,
3864 : if we are following the taken case of
3865 : if (i == 2)
3866 : we can add the fact that `i' and '2' are now equivalent.
3867 :
3868 : In any case, we can record that this comparison was passed. If the same
3869 : comparison is seen later, we will know its value. */
3870 :
3871 : static void
3872 14623740 : record_jump_equiv (rtx_insn *insn, bool taken)
3873 : {
3874 14623740 : int cond_known_true;
3875 14623740 : rtx op0, op1;
3876 14623740 : rtx set;
3877 14623740 : machine_mode mode, mode0, mode1;
3878 14623740 : enum rtx_code code;
3879 :
3880 : /* Ensure this is the right kind of insn. */
3881 14623740 : gcc_assert (any_condjump_p (insn));
3882 :
3883 14623740 : set = pc_set (insn);
3884 :
3885 : /* See if this jump condition is known true or false. */
3886 14623740 : if (taken)
3887 5868628 : cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3888 : else
3889 8755112 : cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3890 :
3891 : /* Get the type of comparison being done and the operands being compared.
3892 : If we had to reverse a non-equality condition, record that fact so we
3893 : know that it isn't valid for floating-point. */
3894 14623740 : code = GET_CODE (XEXP (SET_SRC (set), 0));
3895 14623740 : op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3896 14623740 : op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3897 :
3898 : /* If fold_rtx returns NULL_RTX, there's nothing to record. */
3899 14623740 : if (op0 == NULL_RTX || op1 == NULL_RTX)
3900 84204 : return;
3901 :
3902 14623740 : code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3903 14623740 : if (! cond_known_true)
3904 : {
3905 8755112 : code = reversed_comparison_code_parts (code, op0, op1, insn);
3906 :
3907 : /* Don't remember if we can't find the inverse. */
3908 8755112 : if (code == UNKNOWN)
3909 : return;
3910 : }
3911 :
3912 : /* The mode is the mode of the non-constant. */
3913 14539536 : mode = mode0;
3914 14539536 : if (mode1 != VOIDmode)
3915 3747528 : mode = mode1;
3916 :
3917 14539536 : record_jump_cond (code, mode, op0, op1);
3918 : }
3919 :
3920 : /* Yet another form of subreg creation. In this case, we want something in
3921 : MODE, and we should assume OP has MODE iff it is naturally modeless. */
3922 :
3923 : static rtx
3924 67992 : record_jump_cond_subreg (machine_mode mode, rtx op)
3925 : {
3926 67992 : machine_mode op_mode = GET_MODE (op);
3927 67992 : if (op_mode == mode || op_mode == VOIDmode)
3928 : return op;
3929 6972 : return lowpart_subreg (mode, op, op_mode);
3930 : }
3931 :
3932 : /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3933 : Make any useful entries we can with that information. Called from
3934 : above function and called recursively. */
3935 :
3936 : static void
3937 14607524 : record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0, rtx op1)
3938 : {
3939 14607524 : unsigned op0_hash, op1_hash;
3940 14607524 : int op0_in_memory, op1_in_memory;
3941 14607524 : struct table_elt *op0_elt, *op1_elt;
3942 :
3943 : /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3944 : we know that they are also equal in the smaller mode (this is also
3945 : true for all smaller modes whether or not there is a SUBREG, but
3946 : is not worth testing for with no SUBREG). */
3947 :
3948 : /* Note that GET_MODE (op0) may not equal MODE. */
3949 14655089 : if (code == EQ && paradoxical_subreg_p (op0))
3950 : {
3951 0 : machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3952 0 : rtx tem = record_jump_cond_subreg (inner_mode, op1);
3953 0 : if (tem)
3954 0 : record_jump_cond (code, mode, SUBREG_REG (op0), tem);
3955 : }
3956 :
3957 14621682 : if (code == EQ && paradoxical_subreg_p (op1))
3958 : {
3959 0 : machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3960 0 : rtx tem = record_jump_cond_subreg (inner_mode, op0);
3961 0 : if (tem)
3962 0 : record_jump_cond (code, mode, SUBREG_REG (op1), tem);
3963 : }
3964 :
3965 : /* Similarly, if this is an NE comparison, and either is a SUBREG
3966 : making a smaller mode, we know the whole thing is also NE. */
3967 :
3968 : /* Note that GET_MODE (op0) may not equal MODE;
3969 : if we test MODE instead, we can get an infinite recursion
3970 : alternating between two modes each wider than MODE. */
3971 :
3972 14607524 : if (code == NE
3973 62512 : && partial_subreg_p (op0)
3974 14669880 : && subreg_lowpart_p (op0))
3975 : {
3976 61993 : machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3977 61993 : rtx tem = record_jump_cond_subreg (inner_mode, op1);
3978 61993 : if (tem)
3979 61993 : record_jump_cond (code, mode, SUBREG_REG (op0), tem);
3980 : }
3981 :
3982 14607524 : if (code == NE
3983 12381 : && partial_subreg_p (op1)
3984 14613579 : && subreg_lowpart_p (op1))
3985 : {
3986 5999 : machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3987 5999 : rtx tem = record_jump_cond_subreg (inner_mode, op0);
3988 5999 : if (tem)
3989 5995 : record_jump_cond (code, mode, SUBREG_REG (op1), tem);
3990 : }
3991 :
3992 : /* Hash both operands. */
3993 :
3994 14607524 : do_not_record = 0;
3995 14607524 : hash_arg_in_memory = 0;
3996 14607524 : op0_hash = HASH (op0, mode);
3997 14607524 : op0_in_memory = hash_arg_in_memory;
3998 :
3999 14607524 : if (do_not_record)
4000 : return;
4001 :
4002 14607524 : do_not_record = 0;
4003 14607524 : hash_arg_in_memory = 0;
4004 14607524 : op1_hash = HASH (op1, mode);
4005 14607524 : op1_in_memory = hash_arg_in_memory;
4006 :
4007 14607524 : if (do_not_record)
4008 : return;
4009 :
4010 : /* Look up both operands. */
4011 14607524 : op0_elt = lookup (op0, op0_hash, mode);
4012 14607524 : op1_elt = lookup (op1, op1_hash, mode);
4013 :
4014 : /* If both operands are already equivalent or if they are not in the
4015 : table but are identical, do nothing. */
4016 14607524 : if ((op0_elt != 0 && op1_elt != 0
4017 2044170 : && op0_elt->first_same_value == op1_elt->first_same_value)
4018 16651573 : || op0 == op1 || rtx_equal_p (op0, op1))
4019 831 : return;
4020 :
4021 : /* If we aren't setting two things equal all we can do is save this
4022 : comparison. Similarly if this is floating-point. In the latter
4023 : case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4024 : If we record the equality, we might inadvertently delete code
4025 : whose intent was to change -0 to +0. */
4026 :
4027 14606693 : if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4028 : {
4029 9332921 : struct qty_table_elem *ent;
4030 9332921 : int qty;
4031 :
4032 : /* If OP0 is not a register, or if OP1 is neither a register
4033 : or constant, we can't do anything. */
4034 :
4035 9332921 : if (!REG_P (op1))
4036 7317466 : op1 = equiv_constant (op1);
4037 :
4038 9332921 : if (!REG_P (op0) || op1 == 0)
4039 : return;
4040 :
4041 : /* Put OP0 in the hash table if it isn't already. This gives it a
4042 : new quantity number. */
4043 8106630 : if (op0_elt == 0)
4044 : {
4045 3281536 : if (insert_regs (op0, NULL, false))
4046 : {
4047 55409 : rehash_using_reg (op0);
4048 55409 : op0_hash = HASH (op0, mode);
4049 :
4050 : /* If OP0 is contained in OP1, this changes its hash code
4051 : as well. Faster to rehash than to check, except
4052 : for the simple case of a constant. */
4053 55409 : if (! CONSTANT_P (op1))
4054 497 : op1_hash = HASH (op1,mode);
4055 : }
4056 :
4057 3281536 : op0_elt = insert (op0, NULL, op0_hash, mode);
4058 3281536 : op0_elt->in_memory = op0_in_memory;
4059 : }
4060 :
4061 8106630 : qty = REG_QTY (REGNO (op0));
4062 8106630 : ent = &qty_table[qty];
4063 :
4064 8106630 : ent->comparison_code = code;
4065 8106630 : if (REG_P (op1))
4066 : {
4067 : /* Look it up again--in case op0 and op1 are the same. */
4068 1929756 : op1_elt = lookup (op1, op1_hash, mode);
4069 :
4070 : /* Put OP1 in the hash table so it gets a new quantity number. */
4071 1929756 : if (op1_elt == 0)
4072 : {
4073 685652 : if (insert_regs (op1, NULL, false))
4074 : {
4075 456 : rehash_using_reg (op1);
4076 456 : op1_hash = HASH (op1, mode);
4077 : }
4078 :
4079 685652 : op1_elt = insert (op1, NULL, op1_hash, mode);
4080 685652 : op1_elt->in_memory = op1_in_memory;
4081 : }
4082 :
4083 1929756 : ent->comparison_const = NULL_RTX;
4084 1929756 : ent->comparison_qty = REG_QTY (REGNO (op1));
4085 : }
4086 : else
4087 : {
4088 6176874 : ent->comparison_const = op1;
4089 6176874 : ent->comparison_qty = INT_MIN;
4090 : }
4091 :
4092 8106630 : return;
4093 : }
4094 :
4095 : /* If either side is still missing an equivalence, make it now,
4096 : then merge the equivalences. */
4097 :
4098 5273772 : if (op0_elt == 0)
4099 : {
4100 3195289 : if (insert_regs (op0, NULL, false))
4101 : {
4102 20240 : rehash_using_reg (op0);
4103 20240 : op0_hash = HASH (op0, mode);
4104 : }
4105 :
4106 3195289 : op0_elt = insert (op0, NULL, op0_hash, mode);
4107 3195289 : op0_elt->in_memory = op0_in_memory;
4108 : }
4109 :
4110 5273772 : if (op1_elt == 0)
4111 : {
4112 3924501 : if (insert_regs (op1, NULL, false))
4113 : {
4114 8294 : rehash_using_reg (op1);
4115 8294 : op1_hash = HASH (op1, mode);
4116 : }
4117 :
4118 3924501 : op1_elt = insert (op1, NULL, op1_hash, mode);
4119 3924501 : op1_elt->in_memory = op1_in_memory;
4120 : }
4121 :
4122 5273772 : merge_equiv_classes (op0_elt, op1_elt);
4123 : }
4124 :
4125 : /* CSE processing for one instruction.
4126 :
4127 : Most "true" common subexpressions are mostly optimized away in GIMPLE,
4128 : but the few that "leak through" are cleaned up by cse_insn, and complex
4129 : addressing modes are often formed here.
4130 :
4131 : The main function is cse_insn, and between here and that function
4132 : a couple of helper functions is defined to keep the size of cse_insn
4133 : within reasonable proportions.
4134 :
4135 : Data is shared between the main and helper functions via STRUCT SET,
4136 : that contains all data related for every set in the instruction that
4137 : is being processed.
4138 :
4139 : Note that cse_main processes all sets in the instruction. Most
4140 : passes in GCC only process simple SET insns or single_set insns, but
4141 : CSE processes insns with multiple sets as well. */
4142 :
4143 : /* Data on one SET contained in the instruction. */
4144 :
4145 : struct set
4146 : {
4147 : /* The SET rtx itself. */
4148 : rtx rtl;
4149 : /* The SET_SRC of the rtx (the original value, if it is changing). */
4150 : rtx src;
4151 : /* The hash-table element for the SET_SRC of the SET. */
4152 : struct table_elt *src_elt;
4153 : /* Hash value for the SET_SRC. */
4154 : unsigned src_hash;
4155 : /* Hash value for the SET_DEST. */
4156 : unsigned dest_hash;
4157 : /* The SET_DEST, with SUBREG, etc., stripped. */
4158 : rtx inner_dest;
4159 : /* Original machine mode, in case it becomes a CONST_INT. */
4160 : ENUM_BITFIELD(machine_mode) mode : MACHINE_MODE_BITSIZE;
4161 : /* Nonzero if the SET_SRC is in memory. */
4162 : unsigned int src_in_memory : 1;
4163 : /* Nonzero if the SET_SRC contains something
4164 : whose value cannot be predicted and understood. */
4165 : unsigned int src_volatile : 1;
4166 : /* Nonzero if RTL is an artifical set that has been created to describe
4167 : part of an insn's effect. Zero means that RTL appears directly in
4168 : the insn pattern. */
4169 : unsigned int is_fake_set : 1;
4170 : /* Hash value of constant equivalent for SET_SRC. */
4171 : unsigned src_const_hash;
4172 : /* A constant equivalent for SET_SRC, if any. */
4173 : rtx src_const;
4174 : /* Table entry for constant equivalent for SET_SRC, if any. */
4175 : struct table_elt *src_const_elt;
4176 : /* Table entry for the destination address. */
4177 : struct table_elt *dest_addr_elt;
4178 : };
4179 :
4180 : /* Special handling for (set REG0 REG1) where REG0 is the
4181 : "cheapest", cheaper than REG1. After cse, REG1 will probably not
4182 : be used in the sequel, so (if easily done) change this insn to
4183 : (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4184 : that computed their value. Then REG1 will become a dead store
4185 : and won't cloud the situation for later optimizations.
4186 :
4187 : Do not make this change if REG1 is a hard register, because it will
4188 : then be used in the sequel and we may be changing a two-operand insn
4189 : into a three-operand insn.
4190 :
4191 : This is the last transformation that cse_insn will try to do. */
4192 :
4193 : static void
4194 134789668 : try_back_substitute_reg (rtx set, rtx_insn *insn)
4195 : {
4196 134789668 : rtx dest = SET_DEST (set);
4197 134789668 : rtx src = SET_SRC (set);
4198 :
4199 134789668 : if (REG_P (dest)
4200 112108973 : && REG_P (src) && ! HARD_REGISTER_P (src)
4201 141927195 : && REGNO_QTY_VALID_P (REGNO (src)))
4202 : {
4203 7137493 : int src_q = REG_QTY (REGNO (src));
4204 7137493 : struct qty_table_elem *src_ent = &qty_table[src_q];
4205 :
4206 7137493 : if (src_ent->first_reg == REGNO (dest))
4207 : {
4208 : /* Scan for the previous nonnote insn, but stop at a basic
4209 : block boundary. */
4210 1800477 : rtx_insn *prev = insn;
4211 1800477 : rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4212 4265448 : do
4213 : {
4214 4265448 : prev = PREV_INSN (prev);
4215 : }
4216 4265448 : while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4217 :
4218 : /* Do not swap the registers around if the previous instruction
4219 : attaches a REG_EQUIV note to REG1.
4220 :
4221 : ??? It's not entirely clear whether we can transfer a REG_EQUIV
4222 : from the pseudo that originally shadowed an incoming argument
4223 : to another register. Some uses of REG_EQUIV might rely on it
4224 : being attached to REG1 rather than REG2.
4225 :
4226 : This section previously turned the REG_EQUIV into a REG_EQUAL
4227 : note. We cannot do that because REG_EQUIV may provide an
4228 : uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
4229 1800477 : if (NONJUMP_INSN_P (prev)
4230 1105460 : && GET_CODE (PATTERN (prev)) == SET
4231 794811 : && SET_DEST (PATTERN (prev)) == src
4232 2037602 : && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4233 : {
4234 237007 : rtx note;
4235 :
4236 237007 : validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4237 237007 : validate_change (insn, &SET_DEST (set), src, 1);
4238 237007 : validate_change (insn, &SET_SRC (set), dest, 1);
4239 237007 : apply_change_group ();
4240 :
4241 : /* If INSN has a REG_EQUAL note, and this note mentions
4242 : REG0, then we must delete it, because the value in
4243 : REG0 has changed. If the note's value is REG1, we must
4244 : also delete it because that is now this insn's dest. */
4245 237007 : note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4246 237007 : if (note != 0
4247 237007 : && (reg_mentioned_p (dest, XEXP (note, 0))
4248 1455 : || rtx_equal_p (src, XEXP (note, 0))))
4249 3 : remove_note (insn, note);
4250 :
4251 : /* If INSN has a REG_ARGS_SIZE note, move it to PREV. */
4252 237007 : note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4253 237007 : if (note != 0)
4254 : {
4255 0 : remove_note (insn, note);
4256 0 : gcc_assert (!find_reg_note (prev, REG_ARGS_SIZE, NULL_RTX));
4257 0 : set_unique_reg_note (prev, REG_ARGS_SIZE, XEXP (note, 0));
4258 : }
4259 : }
4260 : }
4261 : }
4262 134789668 : }
4263 :
4264 : /* Add an entry containing RTL X into SETS. IS_FAKE_SET is true if X is
4265 : an artifical set that has been created to describe part of an insn's
4266 : effect. */
4267 : static inline void
4268 191986845 : add_to_set (vec<struct set> *sets, rtx x, bool is_fake_set)
4269 : {
4270 191986845 : struct set entry = {};
4271 191986845 : entry.rtl = x;
4272 191986845 : entry.is_fake_set = is_fake_set;
4273 191986845 : sets->safe_push (entry);
4274 191986845 : }
4275 :
4276 : /* Record all the SETs in this instruction into SETS_PTR,
4277 : and return the number of recorded sets. */
4278 : static int
4279 388320734 : find_sets_in_insn (rtx_insn *insn, vec<struct set> *psets)
4280 : {
4281 388320734 : rtx x = PATTERN (insn);
4282 :
4283 388320734 : if (GET_CODE (x) == SET)
4284 : {
4285 : /* Ignore SETs that are unconditional jumps.
4286 : They never need cse processing, so this does not hurt.
4287 : The reason is not efficiency but rather
4288 : so that we can test at the end for instructions
4289 : that have been simplified to unconditional jumps
4290 : and not be misled by unchanged instructions
4291 : that were unconditional jumps to begin with. */
4292 167199993 : if (SET_DEST (x) == pc_rtx
4293 19970976 : && GET_CODE (SET_SRC (x)) == LABEL_REF)
4294 : ;
4295 : /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4296 : The hard function value register is used only once, to copy to
4297 : someplace else, so it isn't worth cse'ing. */
4298 167199690 : else if (GET_CODE (SET_SRC (x)) == CALL)
4299 : ;
4300 160111571 : else if (GET_CODE (SET_SRC (x)) == CONST_VECTOR
4301 639578 : && GET_MODE_CLASS (GET_MODE (SET_SRC (x))) != MODE_VECTOR_BOOL
4302 : /* Prevent duplicates from being generated if the type is a V1
4303 : type and a subreg. Folding this will result in the same
4304 : element as folding x itself. */
4305 160751149 : && !(SUBREG_P (SET_DEST (x))
4306 68 : && known_eq (GET_MODE_NUNITS (GET_MODE (SET_SRC (x))), 1)))
4307 : {
4308 : /* First register the vector itself. */
4309 639578 : add_to_set (psets, x, false);
4310 639578 : rtx src = SET_SRC (x);
4311 : /* Go over the constants of the CONST_VECTOR in forward order, to
4312 : put them in the same order in the SETS array. */
4313 1279306 : for (unsigned i = 0; i < const_vector_encoded_nelts (src) ; i++)
4314 : {
4315 : /* These are templates and don't actually get emitted but are
4316 : used to tell CSE how to get to a particular constant. */
4317 639728 : rtx y = simplify_gen_vec_select (SET_DEST (x), i);
4318 639728 : gcc_assert (y);
4319 639728 : if (!REG_P (y))
4320 : {
4321 638855 : rtx set = gen_rtx_SET (y, CONST_VECTOR_ELT (src, i));
4322 638855 : add_to_set (psets, set, true);
4323 : }
4324 : }
4325 : }
4326 : else
4327 159471993 : add_to_set (psets, x, false);
4328 : }
4329 221120741 : else if (GET_CODE (x) == PARALLEL)
4330 : {
4331 30500061 : int i, lim = XVECLEN (x, 0);
4332 :
4333 : /* Go over the expressions of the PARALLEL in forward order, to
4334 : put them in the same order in the SETS array. */
4335 92697688 : for (i = 0; i < lim; i++)
4336 : {
4337 62197627 : rtx y = XVECEXP (x, 0, i);
4338 62197627 : if (GET_CODE (y) == SET)
4339 : {
4340 : /* As above, we ignore unconditional jumps and call-insns and
4341 : ignore the result of apply_change_group. */
4342 31246620 : if (SET_DEST (y) == pc_rtx
4343 17307 : && GET_CODE (SET_SRC (y)) == LABEL_REF)
4344 : ;
4345 31246620 : else if (GET_CODE (SET_SRC (y)) == CALL)
4346 : ;
4347 : else
4348 31236419 : add_to_set (psets, y, false);
4349 : }
4350 : }
4351 : }
4352 :
4353 388320734 : return psets->length ();
4354 : }
4355 :
4356 : /* Subroutine of canonicalize_insn. X is an ASM_OPERANDS in INSN. */
4357 :
4358 : static void
4359 99549 : canon_asm_operands (rtx x, rtx_insn *insn)
4360 : {
4361 128868 : for (int i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4362 : {
4363 29319 : rtx input = ASM_OPERANDS_INPUT (x, i);
4364 29319 : if (!(REG_P (input) && HARD_REGISTER_P (input)))
4365 : {
4366 28933 : input = canon_reg (input, insn);
4367 28933 : validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4368 : }
4369 : }
4370 99549 : }
4371 :
4372 : /* Where possible, substitute every register reference in the N_SETS
4373 : number of SETS in INSN with the canonical register.
4374 :
4375 : Register canonicalization propagatest the earliest register (i.e.
4376 : one that is set before INSN) with the same value. This is a very
4377 : useful, simple form of CSE, to clean up warts from expanding GIMPLE
4378 : to RTL. For instance, a CONST for an address is usually expanded
4379 : multiple times to loads into different registers, thus creating many
4380 : subexpressions of the form:
4381 :
4382 : (set (reg1) (some_const))
4383 : (set (mem (... reg1 ...) (thing)))
4384 : (set (reg2) (some_const))
4385 : (set (mem (... reg2 ...) (thing)))
4386 :
4387 : After canonicalizing, the code takes the following form:
4388 :
4389 : (set (reg1) (some_const))
4390 : (set (mem (... reg1 ...) (thing)))
4391 : (set (reg2) (some_const))
4392 : (set (mem (... reg1 ...) (thing)))
4393 :
4394 : The set to reg2 is now trivially dead, and the memory reference (or
4395 : address, or whatever) may be a candidate for further CSEing.
4396 :
4397 : In this function, the result of apply_change_group can be ignored;
4398 : see canon_reg. */
4399 :
4400 : static void
4401 388320734 : canonicalize_insn (rtx_insn *insn, vec<struct set> *psets)
4402 : {
4403 388320734 : vec<struct set> sets = *psets;
4404 388320734 : int n_sets = sets.length ();
4405 388320734 : rtx tem;
4406 388320734 : rtx x = PATTERN (insn);
4407 388320734 : int i;
4408 :
4409 388320734 : if (CALL_P (insn))
4410 : {
4411 45894253 : for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4412 30596114 : if (GET_CODE (XEXP (tem, 0)) != SET)
4413 30395529 : XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4414 : }
4415 :
4416 388320734 : if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4417 : {
4418 7088119 : canon_reg (SET_SRC (x), insn);
4419 7088119 : apply_change_group ();
4420 7088119 : fold_rtx (SET_SRC (x), insn);
4421 : }
4422 381232615 : else if (GET_CODE (x) == CLOBBER)
4423 : {
4424 : /* If we clobber memory, canon the address.
4425 : This does nothing when a register is clobbered
4426 : because we have already invalidated the reg. */
4427 65705 : if (MEM_P (XEXP (x, 0)))
4428 12970 : canon_reg (XEXP (x, 0), insn);
4429 : }
4430 381166910 : else if (GET_CODE (x) == USE
4431 381166910 : && ! (REG_P (XEXP (x, 0))
4432 1265702 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4433 : /* Canonicalize a USE of a pseudo register or memory location. */
4434 0 : canon_reg (x, insn);
4435 381166910 : else if (GET_CODE (x) == ASM_OPERANDS)
4436 18 : canon_asm_operands (x, insn);
4437 381166892 : else if (GET_CODE (x) == CALL)
4438 : {
4439 7710044 : canon_reg (x, insn);
4440 7710044 : apply_change_group ();
4441 7710044 : fold_rtx (x, insn);
4442 : }
4443 373456848 : else if (DEBUG_INSN_P (insn))
4444 180871128 : canon_reg (PATTERN (insn), insn);
4445 192585720 : else if (GET_CODE (x) == PARALLEL)
4446 : {
4447 92697688 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4448 : {
4449 62197627 : rtx y = XVECEXP (x, 0, i);
4450 62197627 : if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4451 : {
4452 10201 : canon_reg (SET_SRC (y), insn);
4453 10201 : apply_change_group ();
4454 10201 : fold_rtx (SET_SRC (y), insn);
4455 : }
4456 62187426 : else if (GET_CODE (y) == CLOBBER)
4457 : {
4458 30123062 : if (MEM_P (XEXP (y, 0)))
4459 62304 : canon_reg (XEXP (y, 0), insn);
4460 : }
4461 32064364 : else if (GET_CODE (y) == USE
4462 32064364 : && ! (REG_P (XEXP (y, 0))
4463 164731 : && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4464 199236 : canon_reg (y, insn);
4465 31865128 : else if (GET_CODE (y) == ASM_OPERANDS)
4466 99531 : canon_asm_operands (y, insn);
4467 31765597 : else if (GET_CODE (y) == CALL)
4468 : {
4469 489775 : canon_reg (y, insn);
4470 489775 : apply_change_group ();
4471 489775 : fold_rtx (y, insn);
4472 : }
4473 : }
4474 : }
4475 :
4476 189223405 : if (n_sets == 1 && REG_NOTES (insn) != 0
4477 510537283 : && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4478 : {
4479 : /* We potentially will process this insn many times. Therefore,
4480 : drop the REG_EQUAL note if it is equal to the SET_SRC of the
4481 : unique set in INSN.
4482 :
4483 : Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4484 : because cse_insn handles those specially. */
4485 8528271 : if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4486 8528271 : && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4487 181657 : remove_note (insn, tem);
4488 : else
4489 : {
4490 8346614 : canon_reg (XEXP (tem, 0), insn);
4491 8346614 : apply_change_group ();
4492 8346614 : XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4493 8346614 : df_notes_rescan (insn);
4494 : }
4495 : }
4496 :
4497 : /* Canonicalize sources and addresses of destinations.
4498 : We do this in a separate pass to avoid problems when a MATCH_DUP is
4499 : present in the insn pattern. In that case, we want to ensure that
4500 : we don't break the duplicate nature of the pattern. So we will replace
4501 : both operands at the same time. Otherwise, we would fail to find an
4502 : equivalent substitution in the loop calling validate_change below.
4503 :
4504 : We used to suppress canonicalization of DEST if it appears in SRC,
4505 : but we don't do this any more. */
4506 :
4507 580307579 : for (i = 0; i < n_sets; i++)
4508 : {
4509 191986845 : rtx dest = SET_DEST (sets[i].rtl);
4510 191986845 : rtx src = SET_SRC (sets[i].rtl);
4511 191986845 : rtx new_rtx = canon_reg (src, insn);
4512 :
4513 191986845 : validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4514 :
4515 191986845 : if (GET_CODE (dest) == ZERO_EXTRACT)
4516 : {
4517 3885 : validate_change (insn, &XEXP (dest, 1),
4518 : canon_reg (XEXP (dest, 1), insn), 1);
4519 3885 : validate_change (insn, &XEXP (dest, 2),
4520 : canon_reg (XEXP (dest, 2), insn), 1);
4521 : }
4522 :
4523 193551693 : while (GET_CODE (dest) == SUBREG
4524 192008369 : || GET_CODE (dest) == ZERO_EXTRACT
4525 385556177 : || GET_CODE (dest) == STRICT_LOW_PART)
4526 1564848 : dest = XEXP (dest, 0);
4527 :
4528 191986845 : if (MEM_P (dest))
4529 28452981 : canon_reg (dest, insn);
4530 : }
4531 :
4532 : /* Now that we have done all the replacements, we can apply the change
4533 : group and see if they all work. Note that this will cause some
4534 : canonicalizations that would have worked individually not to be applied
4535 : because some other canonicalization didn't work, but this should not
4536 : occur often.
4537 :
4538 : The result of apply_change_group can be ignored; see canon_reg. */
4539 :
4540 388320734 : apply_change_group ();
4541 388320734 : }
4542 :
4543 : /* Main function of CSE.
4544 : First simplify sources and addresses of all assignments
4545 : in the instruction, using previously-computed equivalents values.
4546 : Then install the new sources and destinations in the table
4547 : of available values. */
4548 :
4549 : static void
4550 388320734 : cse_insn (rtx_insn *insn)
4551 : {
4552 388320734 : rtx x = PATTERN (insn);
4553 388320734 : int i;
4554 388320734 : rtx tem;
4555 388320734 : int n_sets = 0;
4556 :
4557 388320734 : rtx src_eqv = 0;
4558 388320734 : struct table_elt *src_eqv_elt = 0;
4559 388320734 : int src_eqv_volatile = 0;
4560 388320734 : int src_eqv_in_memory = 0;
4561 388320734 : unsigned src_eqv_hash = 0;
4562 :
4563 388320734 : this_insn = insn;
4564 :
4565 : /* Find all regs explicitly clobbered in this insn,
4566 : to ensure they are not replaced with any other regs
4567 : elsewhere in this insn. */
4568 388320734 : invalidate_from_sets_and_clobbers (insn);
4569 :
4570 : /* Record all the SETs in this instruction. */
4571 388320734 : auto_vec<struct set, 8> sets;
4572 388320734 : n_sets = find_sets_in_insn (insn, (vec<struct set>*)&sets);
4573 :
4574 : /* Substitute the canonical register where possible. */
4575 388320734 : canonicalize_insn (insn, (vec<struct set>*)&sets);
4576 :
4577 : /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4578 : if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT. The
4579 : latter condition is necessary because SRC_EQV is handled specially for
4580 : this case, and if it isn't set, then there will be no equivalence
4581 : for the destination. */
4582 189223405 : if (n_sets == 1 && REG_NOTES (insn) != 0
4583 510391974 : && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4584 : {
4585 :
4586 8346614 : if (GET_CODE (SET_DEST (sets[0].rtl)) != ZERO_EXTRACT
4587 8346614 : && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4588 16603 : || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4589 8330011 : src_eqv = copy_rtx (XEXP (tem, 0));
4590 : /* If DEST is of the form ZERO_EXTACT, as in:
4591 : (set (zero_extract:SI (reg:SI 119)
4592 : (const_int 16 [0x10])
4593 : (const_int 16 [0x10]))
4594 : (const_int 51154 [0xc7d2]))
4595 : REG_EQUAL note will specify the value of register (reg:SI 119) at this
4596 : point. Note that this is different from SRC_EQV. We can however
4597 : calculate SRC_EQV with the position and width of ZERO_EXTRACT. */
4598 16603 : else if (GET_CODE (SET_DEST (sets[0].rtl)) == ZERO_EXTRACT
4599 0 : && CONST_INT_P (XEXP (tem, 0))
4600 0 : && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 1))
4601 16603 : && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 2)))
4602 : {
4603 0 : rtx dest_reg = XEXP (SET_DEST (sets[0].rtl), 0);
4604 : /* This is the mode of XEXP (tem, 0) as well. */
4605 0 : scalar_int_mode dest_mode
4606 0 : = as_a <scalar_int_mode> (GET_MODE (dest_reg));
4607 0 : rtx width = XEXP (SET_DEST (sets[0].rtl), 1);
4608 0 : rtx pos = XEXP (SET_DEST (sets[0].rtl), 2);
4609 0 : HOST_WIDE_INT val = INTVAL (XEXP (tem, 0));
4610 0 : HOST_WIDE_INT mask;
4611 0 : unsigned int shift;
4612 0 : if (BITS_BIG_ENDIAN)
4613 : shift = (GET_MODE_PRECISION (dest_mode)
4614 : - INTVAL (pos) - INTVAL (width));
4615 : else
4616 0 : shift = INTVAL (pos);
4617 0 : if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
4618 : mask = HOST_WIDE_INT_M1;
4619 : else
4620 0 : mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
4621 0 : val = (val >> shift) & mask;
4622 0 : src_eqv = GEN_INT (val);
4623 : }
4624 : }
4625 :
4626 : /* Set sets[i].src_elt to the class each source belongs to.
4627 : Detect assignments from or to volatile things
4628 : and set set[i] to zero so they will be ignored
4629 : in the rest of this function.
4630 :
4631 : Nothing in this loop changes the hash table or the register chains. */
4632 :
4633 580307587 : for (i = 0; i < n_sets; i++)
4634 : {
4635 191986853 : bool repeat = false;
4636 191986853 : bool noop_insn = false;
4637 191986853 : rtx src, dest;
4638 191986853 : rtx src_folded;
4639 191986853 : struct table_elt *elt = 0, *p;
4640 191986853 : machine_mode mode;
4641 191986853 : rtx src_eqv_here;
4642 191986853 : rtx src_const = 0;
4643 191986853 : rtx src_related = 0;
4644 191986853 : rtx dest_related = 0;
4645 191986853 : bool src_related_is_const_anchor = false;
4646 191986853 : struct table_elt *src_const_elt = 0;
4647 191986853 : int src_cost = MAX_COST;
4648 191986853 : int src_eqv_cost = MAX_COST;
4649 191986853 : int src_folded_cost = MAX_COST;
4650 191986853 : int src_related_cost = MAX_COST;
4651 191986853 : int src_elt_cost = MAX_COST;
4652 191986853 : int src_regcost = MAX_COST;
4653 191986853 : int src_eqv_regcost = MAX_COST;
4654 191986853 : int src_folded_regcost = MAX_COST;
4655 191986853 : int src_related_regcost = MAX_COST;
4656 191986853 : int src_elt_regcost = MAX_COST;
4657 191986853 : scalar_int_mode int_mode;
4658 191986853 : bool is_fake_set = sets[i].is_fake_set;
4659 :
4660 191986853 : dest = SET_DEST (sets[i].rtl);
4661 191986853 : src = SET_SRC (sets[i].rtl);
4662 :
4663 : /* If SRC is a constant that has no machine mode,
4664 : hash it with the destination's machine mode.
4665 : This way we can keep different modes separate. */
4666 :
4667 191986853 : mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4668 191986853 : sets[i].mode = mode;
4669 :
4670 191986853 : if (!is_fake_set && src_eqv)
4671 : {
4672 8330011 : machine_mode eqvmode = mode;
4673 8330011 : if (GET_CODE (dest) == STRICT_LOW_PART)
4674 0 : eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4675 8330011 : do_not_record = 0;
4676 8330011 : hash_arg_in_memory = 0;
4677 8330011 : src_eqv_hash = HASH (src_eqv, eqvmode);
4678 :
4679 : /* Find the equivalence class for the equivalent expression. */
4680 :
4681 8330011 : if (!do_not_record)
4682 8327809 : src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4683 :
4684 8330011 : src_eqv_volatile = do_not_record;
4685 8330011 : src_eqv_in_memory = hash_arg_in_memory;
4686 : }
4687 :
4688 : /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4689 : value of the INNER register, not the destination. So it is not
4690 : a valid substitution for the source. But save it for later. */
4691 191986853 : if (is_fake_set || GET_CODE (dest) == STRICT_LOW_PART)
4692 : src_eqv_here = 0;
4693 : else
4694 191986853 : src_eqv_here = src_eqv;
4695 :
4696 : /* Simplify and foldable subexpressions in SRC. Then get the fully-
4697 : simplified result, which may not necessarily be valid. */
4698 191986853 : src_folded = fold_rtx (src, NULL);
4699 :
4700 : #if 0
4701 : /* ??? This caused bad code to be generated for the m68k port with -O2.
4702 : Suppose src is (CONST_INT -1), and that after truncation src_folded
4703 : is (CONST_INT 3). Suppose src_folded is then used for src_const.
4704 : At the end we will add src and src_const to the same equivalence
4705 : class. We now have 3 and -1 on the same equivalence class. This
4706 : causes later instructions to be mis-optimized. */
4707 : /* If storing a constant in a bitfield, pre-truncate the constant
4708 : so we will be able to record it later. */
4709 : if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4710 : {
4711 : rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4712 :
4713 : if (CONST_INT_P (src)
4714 : && CONST_INT_P (width)
4715 : && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4716 : && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4717 : src_folded
4718 : = GEN_INT (INTVAL (src) & ((HOST_WIDE_INT_1
4719 : << INTVAL (width)) - 1));
4720 : }
4721 : #endif
4722 :
4723 : /* Compute SRC's hash code, and also notice if it
4724 : should not be recorded at all. In that case,
4725 : prevent any further processing of this assignment.
4726 :
4727 : We set DO_NOT_RECORD if the destination has a REG_UNUSED note.
4728 : This avoids getting the source register into the tables, where it
4729 : may be invalidated later (via REG_QTY), then trigger an ICE upon
4730 : re-insertion.
4731 :
4732 : This is only a problem in multi-set insns. If it were a single
4733 : set the dead copy would have been removed. If the RHS were anything
4734 : but a simple REG, then we won't call insert_regs and thus there's
4735 : no potential for triggering the ICE. */
4736 383973706 : do_not_record = (REG_P (dest)
4737 142000540 : && REG_P (src)
4738 225650855 : && find_reg_note (insn, REG_UNUSED, dest));
4739 191986853 : hash_arg_in_memory = 0;
4740 :
4741 191986853 : sets[i].src = src;
4742 191986853 : sets[i].src_hash = HASH (src, mode);
4743 191986853 : sets[i].src_volatile = do_not_record;
4744 191986853 : sets[i].src_in_memory = hash_arg_in_memory;
4745 :
4746 : /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4747 : a pseudo, do not record SRC. Using SRC as a replacement for
4748 : anything else will be incorrect in that situation. Note that
4749 : this usually occurs only for stack slots, in which case all the
4750 : RTL would be referring to SRC, so we don't lose any optimization
4751 : opportunities by not having SRC in the hash table. */
4752 :
4753 191986853 : if (MEM_P (src)
4754 24852650 : && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4755 920945 : && REG_P (dest)
4756 192907798 : && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4757 920945 : sets[i].src_volatile = 1;
4758 :
4759 191065908 : else if (GET_CODE (src) == ASM_OPERANDS
4760 199858 : && GET_CODE (x) == PARALLEL)
4761 : {
4762 : /* Do not record result of a non-volatile inline asm with
4763 : more than one result. */
4764 199834 : if (n_sets > 1)
4765 156793 : sets[i].src_volatile = 1;
4766 :
4767 199834 : int j, lim = XVECLEN (x, 0);
4768 1023990 : for (j = 0; j < lim; j++)
4769 : {
4770 825942 : rtx y = XVECEXP (x, 0, j);
4771 : /* And do not record result of a non-volatile inline asm
4772 : with "memory" clobber. */
4773 825942 : if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4774 : {
4775 1786 : sets[i].src_volatile = 1;
4776 1786 : break;
4777 : }
4778 : }
4779 : }
4780 :
4781 : #if 0
4782 : /* It is no longer clear why we used to do this, but it doesn't
4783 : appear to still be needed. So let's try without it since this
4784 : code hurts cse'ing widened ops. */
4785 : /* If source is a paradoxical subreg (such as QI treated as an SI),
4786 : treat it as volatile. It may do the work of an SI in one context
4787 : where the extra bits are not being used, but cannot replace an SI
4788 : in general. */
4789 : if (paradoxical_subreg_p (src))
4790 : sets[i].src_volatile = 1;
4791 : #endif
4792 :
4793 : /* Locate all possible equivalent forms for SRC. Try to replace
4794 : SRC in the insn with each cheaper equivalent.
4795 :
4796 : We have the following types of equivalents: SRC itself, a folded
4797 : version, a value given in a REG_EQUAL note, or a value related
4798 : to a constant.
4799 :
4800 : Each of these equivalents may be part of an additional class
4801 : of equivalents (if more than one is in the table, they must be in
4802 : the same class; we check for this).
4803 :
4804 : If the source is volatile, we don't do any table lookups.
4805 :
4806 : We note any constant equivalent for possible later use in a
4807 : REG_NOTE. */
4808 :
4809 191986853 : if (!sets[i].src_volatile)
4810 158084895 : elt = lookup (src, sets[i].src_hash, mode);
4811 :
4812 191986853 : sets[i].src_elt = elt;
4813 :
4814 191986853 : if (elt && src_eqv_here && src_eqv_elt)
4815 : {
4816 2815353 : if (elt->first_same_value != src_eqv_elt->first_same_value)
4817 : {
4818 : /* The REG_EQUAL is indicating that two formerly distinct
4819 : classes are now equivalent. So merge them. */
4820 9595 : merge_equiv_classes (elt, src_eqv_elt);
4821 9595 : src_eqv_hash = HASH (src_eqv, elt->mode);
4822 9595 : src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4823 : }
4824 :
4825 9595 : src_eqv_here = 0;
4826 : }
4827 :
4828 188973393 : else if (src_eqv_elt)
4829 : elt = src_eqv_elt;
4830 :
4831 : /* Try to find a constant somewhere and record it in `src_const'.
4832 : Record its table element, if any, in `src_const_elt'. Look in
4833 : any known equivalences first. (If the constant is not in the
4834 : table, also set `sets[i].src_const_hash'). */
4835 188811978 : if (elt)
4836 90728456 : for (p = elt->first_same_value; p; p = p->next_same_value)
4837 72698552 : if (p->is_const)
4838 : {
4839 15739075 : src_const = p->exp;
4840 15739075 : src_const_elt = elt;
4841 15739075 : break;
4842 : }
4843 :
4844 33768979 : if (src_const == 0
4845 176247778 : && (CONSTANT_P (src_folded)
4846 : /* Consider (minus (label_ref L1) (label_ref L2)) as
4847 : "constant" here so we will record it. This allows us
4848 : to fold switch statements when an ADDR_DIFF_VEC is used. */
4849 150376853 : || (GET_CODE (src_folded) == MINUS
4850 1730217 : && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4851 95 : && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4852 : src_const = src_folded, src_const_elt = elt;
4853 166115842 : else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4854 413002 : src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4855 :
4856 : /* If we don't know if the constant is in the table, get its
4857 : hash code and look it up. */
4858 191986853 : if (src_const && src_const_elt == 0)
4859 : {
4860 26283801 : sets[i].src_const_hash = HASH (src_const, mode);
4861 26283801 : src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4862 : }
4863 :
4864 191986853 : sets[i].src_const = src_const;
4865 191986853 : sets[i].src_const_elt = src_const_elt;
4866 :
4867 : /* If the constant and our source are both in the table, mark them as
4868 : equivalent. Otherwise, if a constant is in the table but the source
4869 : isn't, set ELT to it. */
4870 191986853 : if (src_const_elt && elt
4871 15739287 : && src_const_elt->first_same_value != elt->first_same_value)
4872 0 : merge_equiv_classes (elt, src_const_elt);
4873 191986853 : else if (src_const_elt && elt == 0)
4874 191986853 : elt = src_const_elt;
4875 :
4876 : /* See if there is a register linearly related to a constant
4877 : equivalent of SRC. */
4878 191986853 : if (src_const
4879 42023088 : && (GET_CODE (src_const) == CONST
4880 41392464 : || (src_const_elt && src_const_elt->related_value != 0)))
4881 : {
4882 725622 : src_related = use_related_value (src_const, src_const_elt);
4883 725622 : if (src_related)
4884 : {
4885 232675 : struct table_elt *src_related_elt
4886 232675 : = lookup (src_related, HASH (src_related, mode), mode);
4887 232675 : if (src_related_elt && elt)
4888 : {
4889 1633 : if (elt->first_same_value
4890 1633 : != src_related_elt->first_same_value)
4891 : /* This can occur when we previously saw a CONST
4892 : involving a SYMBOL_REF and then see the SYMBOL_REF
4893 : twice. Merge the involved classes. */
4894 857 : merge_equiv_classes (elt, src_related_elt);
4895 :
4896 : src_related = 0;
4897 191986853 : src_related_elt = 0;
4898 : }
4899 231042 : else if (src_related_elt && elt == 0)
4900 7308 : elt = src_related_elt;
4901 : }
4902 : }
4903 :
4904 : /* See if we have a CONST_INT that is already in a register in a
4905 : wider mode. */
4906 :
4907 41792046 : if (src_const && src_related == 0 && CONST_INT_P (src_const)
4908 18209460 : && is_int_mode (mode, &int_mode)
4909 212200146 : && GET_MODE_PRECISION (int_mode) < BITS_PER_WORD)
4910 : {
4911 7725049 : opt_scalar_int_mode wider_mode_iter;
4912 19753824 : FOR_EACH_WIDER_MODE (wider_mode_iter, int_mode)
4913 : {
4914 19753824 : scalar_int_mode wider_mode = wider_mode_iter.require ();
4915 20496101 : if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
4916 : break;
4917 :
4918 12253655 : struct table_elt *const_elt
4919 12253655 : = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4920 :
4921 12253655 : if (const_elt == 0)
4922 11599456 : continue;
4923 :
4924 654199 : for (const_elt = const_elt->first_same_value;
4925 2020304 : const_elt; const_elt = const_elt->next_same_value)
4926 1590985 : if (REG_P (const_elt->exp))
4927 : {
4928 224880 : src_related = gen_lowpart (int_mode, const_elt->exp);
4929 224880 : break;
4930 : }
4931 :
4932 654199 : if (src_related != 0)
4933 : break;
4934 : }
4935 : }
4936 :
4937 : /* Another possibility is that we have an AND with a constant in
4938 : a mode narrower than a word. If so, it might have been generated
4939 : as part of an "if" which would narrow the AND. If we already
4940 : have done the AND in a wider mode, we can use a SUBREG of that
4941 : value. */
4942 :
4943 187769364 : if (flag_expensive_optimizations && ! src_related
4944 320209264 : && is_a <scalar_int_mode> (mode, &int_mode)
4945 128222411 : && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4946 193478636 : && GET_MODE_SIZE (int_mode) < UNITS_PER_WORD)
4947 : {
4948 979040 : opt_scalar_int_mode tmode_iter;
4949 979040 : rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4950 :
4951 2613088 : FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4952 : {
4953 2613088 : scalar_int_mode tmode = tmode_iter.require ();
4954 5376735 : if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
4955 : break;
4956 :
4957 1634105 : rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4958 1634105 : struct table_elt *larger_elt;
4959 :
4960 1634105 : if (inner)
4961 : {
4962 1630451 : PUT_MODE (new_and, tmode);
4963 1630451 : XEXP (new_and, 0) = inner;
4964 1630451 : larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4965 1630451 : if (larger_elt == 0)
4966 1630394 : continue;
4967 :
4968 57 : for (larger_elt = larger_elt->first_same_value;
4969 57 : larger_elt; larger_elt = larger_elt->next_same_value)
4970 57 : if (REG_P (larger_elt->exp))
4971 : {
4972 57 : src_related
4973 57 : = gen_lowpart (int_mode, larger_elt->exp);
4974 57 : break;
4975 : }
4976 :
4977 57 : if (src_related)
4978 : break;
4979 : }
4980 : }
4981 : }
4982 :
4983 : /* If SRC_EQV is a CONST_INT, try looking up some related
4984 : constants (logical and arithmetic negation). Those may
4985 : ultimately be cheaper to re-use. */
4986 191986853 : if (GET_CODE (src) != CONST_INT
4987 : && GET_CODE (src) != REG
4988 : && GET_CODE (src) != SUBREG
4989 119304611 : && src_const
4990 15821387 : && GET_CODE (src_const) == CONST_INT)
4991 : {
4992 72915 : rtx trial_rtx = GEN_INT (~UINTVAL (src_const));
4993 72915 : struct table_elt *tmp = lookup (trial_rtx, HASH (trial_rtx, mode), mode);
4994 72915 : rtx_code code = NOT;
4995 72915 : if (!tmp)
4996 : {
4997 71740 : trial_rtx = GEN_INT (-UINTVAL (src_const));
4998 71740 : tmp = lookup (trial_rtx, HASH (trial_rtx, mode), mode);
4999 71740 : code = NEG;
5000 : }
5001 :
5002 71740 : if (tmp)
5003 : {
5004 13982 : src_related = gen_rtx_fmt_e (code, mode, tmp->first_same_value->exp);
5005 13982 : src_eqv_here = src_related;
5006 13982 : src_related_is_const_anchor = true;
5007 : }
5008 :
5009 : }
5010 :
5011 : /* See if a MEM has already been loaded with a widening operation;
5012 : if it has, we can use a subreg of that. Many CISC machines
5013 : also have such operations, but this is only likely to be
5014 : beneficial on these machines. */
5015 :
5016 191986853 : rtx_code extend_op;
5017 191986853 : if (flag_expensive_optimizations && src_related == 0
5018 : && MEM_P (src) && ! do_not_record
5019 : && is_a <scalar_int_mode> (mode, &int_mode)
5020 : && (extend_op = load_extend_op (int_mode)) != UNKNOWN)
5021 : {
5022 : #if GCC_VERSION >= 5000
5023 : struct rtx_def memory_extend_buf;
5024 : rtx memory_extend_rtx = &memory_extend_buf;
5025 : #else
5026 : /* Workaround GCC < 5 bug, fixed in r5-3834 as part of PR63362
5027 : fix. */
5028 : alignas (rtx_def) unsigned char memory_extended_buf[sizeof (rtx_def)];
5029 : rtx memory_extend_rtx = (rtx) &memory_extended_buf[0];
5030 : #endif
5031 :
5032 : /* Set what we are trying to extend and the operation it might
5033 : have been extended with. */
5034 : memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
5035 : PUT_CODE (memory_extend_rtx, extend_op);
5036 : XEXP (memory_extend_rtx, 0) = src;
5037 :
5038 : opt_scalar_int_mode tmode_iter;
5039 : FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
5040 : {
5041 : struct table_elt *larger_elt;
5042 :
5043 : scalar_int_mode tmode = tmode_iter.require ();
5044 : if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
5045 : break;
5046 :
5047 : PUT_MODE (memory_extend_rtx, tmode);
5048 : larger_elt = lookup (memory_extend_rtx,
5049 : HASH (memory_extend_rtx, tmode), tmode);
5050 : if (larger_elt == 0)
5051 : continue;
5052 :
5053 : for (larger_elt = larger_elt->first_same_value;
5054 : larger_elt; larger_elt = larger_elt->next_same_value)
5055 : if (REG_P (larger_elt->exp))
5056 : {
5057 : src_related = gen_lowpart (int_mode, larger_elt->exp);
5058 : break;
5059 : }
5060 :
5061 : if (src_related)
5062 : break;
5063 : }
5064 : }
5065 :
5066 : /* Try to express the constant using a register+offset expression
5067 : derived from a constant anchor. */
5068 :
5069 191986853 : if (targetm.const_anchor
5070 0 : && !src_related
5071 0 : && src_const
5072 0 : && GET_CODE (src_const) == CONST_INT)
5073 : {
5074 0 : src_related = try_const_anchors (src_const, mode);
5075 0 : src_related_is_const_anchor = src_related != NULL_RTX;
5076 : }
5077 :
5078 : /* Try to re-materialize a vec_dup with an existing constant. */
5079 191986853 : rtx src_elt;
5080 5528600 : if ((!src_eqv_here || CONSTANT_P (src_eqv_here))
5081 191986853 : && const_vec_duplicate_p (src, &src_elt))
5082 : {
5083 643498 : machine_mode const_mode = GET_MODE_INNER (GET_MODE (src));
5084 643498 : struct table_elt *related_elt
5085 643498 : = lookup (src_elt, HASH (src_elt, const_mode), const_mode);
5086 643498 : if (related_elt)
5087 : {
5088 266639 : for (related_elt = related_elt->first_same_value;
5089 1755387 : related_elt; related_elt = related_elt->next_same_value)
5090 1523592 : if (REG_P (related_elt->exp))
5091 : {
5092 : /* We don't need to compare costs with an existing (constant)
5093 : src_eqv_here, since any such src_eqv_here should already be
5094 : available in src_const. */
5095 34844 : src_eqv_here
5096 34844 : = gen_rtx_VEC_DUPLICATE (GET_MODE (src),
5097 : related_elt->exp);
5098 34844 : break;
5099 : }
5100 : }
5101 : }
5102 :
5103 191986853 : if (src == src_folded)
5104 187728339 : src_folded = 0;
5105 :
5106 : /* At this point, ELT, if nonzero, points to a class of expressions
5107 : equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5108 : and SRC_RELATED, if nonzero, each contain additional equivalent
5109 : expressions. Prune these latter expressions by deleting expressions
5110 : already in the equivalence class.
5111 :
5112 : Check for an equivalent identical to the destination. If found,
5113 : this is the preferred equivalent since it will likely lead to
5114 : elimination of the insn. Indicate this by placing it in
5115 : `src_related'. */
5116 :
5117 191986853 : if (elt)
5118 33836881 : elt = elt->first_same_value;
5119 289269542 : for (p = elt; p; p = p->next_same_value)
5120 : {
5121 97282689 : enum rtx_code code = GET_CODE (p->exp);
5122 :
5123 : /* If the expression is not valid, ignore it. Then we do not
5124 : have to check for validity below. In most cases, we can use
5125 : `rtx_equal_p', since canonicalization has already been done. */
5126 97282689 : if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5127 4037 : continue;
5128 :
5129 : /* Also skip paradoxical subregs, unless that's what we're
5130 : looking for. */
5131 97278652 : if (paradoxical_subreg_p (p->exp)
5132 2344466 : && ! (src != 0
5133 3328 : && GET_CODE (src) == SUBREG
5134 3328 : && GET_MODE (src) == GET_MODE (p->exp)
5135 3328 : && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5136 : GET_MODE (SUBREG_REG (p->exp)))))
5137 3632 : continue;
5138 :
5139 97275020 : if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5140 : src = 0;
5141 2163246 : else if (src_folded && GET_CODE (src_folded) == code
5142 64427585 : && rtx_equal_p (src_folded, p->exp))
5143 : src_folded = 0;
5144 784509 : else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5145 63639569 : && rtx_equal_p (src_eqv_here, p->exp))
5146 : src_eqv_here = 0;
5147 981113 : else if (src_related && GET_CODE (src_related) == code
5148 63104703 : && rtx_equal_p (src_related, p->exp))
5149 : src_related = 0;
5150 :
5151 : /* This is the same as the destination of the insns, we want
5152 : to prefer it. The code below will then give it a negative
5153 : cost. */
5154 97275020 : if (!dest_related
5155 97275020 : && GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5156 209525 : dest_related = p->exp;
5157 : }
5158 :
5159 : /* Find the cheapest valid equivalent, trying all the available
5160 : possibilities. Prefer items not in the hash table to ones
5161 : that are when they are equal cost. Note that we can never
5162 : worsen an insn as the current contents will also succeed.
5163 : If we find an equivalent identical to the destination, use it as best,
5164 : since this insn will probably be eliminated in that case. */
5165 191986853 : if (src)
5166 : {
5167 158598298 : if (rtx_equal_p (src, dest))
5168 : src_cost = src_regcost = -1;
5169 : else
5170 : {
5171 158598292 : src_cost = COST (src, mode);
5172 158598292 : src_regcost = approx_reg_cost (src);
5173 : }
5174 : }
5175 :
5176 191986853 : if (src_eqv_here)
5177 : {
5178 5270192 : if (rtx_equal_p (src_eqv_here, dest))
5179 : src_eqv_cost = src_eqv_regcost = -1;
5180 : else
5181 : {
5182 5270192 : src_eqv_cost = COST (src_eqv_here, mode);
5183 5270192 : src_eqv_regcost = approx_reg_cost (src_eqv_here);
5184 : }
5185 : }
5186 :
5187 191986853 : if (src_folded)
5188 : {
5189 3718023 : if (rtx_equal_p (src_folded, dest))
5190 : src_folded_cost = src_folded_regcost = -1;
5191 : else
5192 : {
5193 3704894 : src_folded_cost = COST (src_folded, mode);
5194 3704894 : src_folded_regcost = approx_reg_cost (src_folded);
5195 : }
5196 : }
5197 :
5198 191986853 : if (dest_related)
5199 : {
5200 : src_related_cost = src_related_regcost = -1;
5201 : /* Handle it as src_related. */
5202 : src_related = dest_related;
5203 : }
5204 191777328 : else if (src_related)
5205 : {
5206 467618 : src_related_cost = COST (src_related, mode);
5207 467618 : src_related_regcost = approx_reg_cost (src_related);
5208 :
5209 : /* If a const-anchor is used to synthesize a constant that
5210 : normally requires multiple instructions then slightly prefer
5211 : it over the original sequence. These instructions are likely
5212 : to become redundant now. We can't compare against the cost
5213 : of src_eqv_here because, on MIPS for example, multi-insn
5214 : constants have zero cost; they are assumed to be hoisted from
5215 : loops. */
5216 467618 : if (src_related_is_const_anchor
5217 467618 : && src_related_cost == src_cost
5218 8475 : && src_eqv_here)
5219 8473 : src_related_cost--;
5220 : }
5221 :
5222 : /* If this was an indirect jump insn, a known label will really be
5223 : cheaper even though it looks more expensive. */
5224 191986853 : if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5225 191986853 : src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5226 :
5227 : /* Terminate loop when replacement made. This must terminate since
5228 : the current contents will be tested and will always be valid. */
5229 197128302 : while (!is_fake_set)
5230 : {
5231 : rtx trial;
5232 :
5233 : /* Skip invalid entries. */
5234 34771439 : while (elt && !REG_P (elt->exp)
5235 205065781 : && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5236 20 : elt = elt->next_same_value;
5237 :
5238 : /* A paradoxical subreg would be bad here: it'll be the right
5239 : size, but later may be adjusted so that the upper bits aren't
5240 : what we want. So reject it. */
5241 196490847 : if (elt != 0
5242 34771419 : && paradoxical_subreg_p (elt->exp)
5243 : /* It is okay, though, if the rtx we're trying to match
5244 : will ignore any of the bits we can't predict. */
5245 196492247 : && ! (src != 0
5246 1400 : && GET_CODE (src) == SUBREG
5247 1400 : && GET_MODE (src) == GET_MODE (elt->exp)
5248 1400 : && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5249 : GET_MODE (SUBREG_REG (elt->exp)))))
5250 : {
5251 1400 : elt = elt->next_same_value;
5252 1400 : continue;
5253 : }
5254 :
5255 196488047 : if (elt)
5256 : {
5257 34770019 : src_elt_cost = elt->cost;
5258 34770019 : src_elt_regcost = elt->regcost;
5259 : }
5260 :
5261 : /* Find cheapest and skip it for the next time. For items
5262 : of equal cost, use this order:
5263 : src_folded, src, src_eqv, src_related and hash table entry. */
5264 196488047 : if (src_folded
5265 7812681 : && preferable (src_folded_cost, src_folded_regcost,
5266 : src_cost, src_regcost) <= 0
5267 5980329 : && preferable (src_folded_cost, src_folded_regcost,
5268 : src_eqv_cost, src_eqv_regcost) <= 0
5269 5132286 : && preferable (src_folded_cost, src_folded_regcost,
5270 : src_related_cost, src_related_regcost) <= 0
5271 201617420 : && preferable (src_folded_cost, src_folded_regcost,
5272 : src_elt_cost, src_elt_regcost) <= 0)
5273 : trial = src_folded, src_folded_cost = MAX_COST;
5274 191813823 : else if (src
5275 157528474 : && preferable (src_cost, src_regcost,
5276 : src_eqv_cost, src_eqv_regcost) <= 0
5277 156026329 : && preferable (src_cost, src_regcost,
5278 : src_related_cost, src_related_regcost) <= 0
5279 347813829 : && preferable (src_cost, src_regcost,
5280 : src_elt_cost, src_elt_regcost) <= 0)
5281 : trial = src, src_cost = MAX_COST;
5282 35874094 : else if (src_eqv_here
5283 1711074 : && preferable (src_eqv_cost, src_eqv_regcost,
5284 : src_related_cost, src_related_regcost) <= 0
5285 37576424 : && preferable (src_eqv_cost, src_eqv_regcost,
5286 : src_elt_cost, src_elt_regcost) <= 0)
5287 : trial = src_eqv_here, src_eqv_cost = MAX_COST;
5288 34384803 : else if (src_related
5289 34384803 : && preferable (src_related_cost, src_related_regcost,
5290 : src_elt_cost, src_elt_regcost) <= 0)
5291 : trial = src_related, src_related_cost = MAX_COST;
5292 : else
5293 : {
5294 34156817 : trial = elt->exp;
5295 34156817 : elt = elt->next_same_value;
5296 34156817 : src_elt_cost = MAX_COST;
5297 : }
5298 :
5299 : /* Try to optimize
5300 : (set (reg:M N) (const_int A))
5301 : (set (reg:M2 O) (const_int B))
5302 : (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5303 : (reg:M2 O)). */
5304 196488047 : if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5305 3885 : && CONST_INT_P (trial)
5306 720 : && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5307 720 : && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5308 569 : && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5309 105 : && (known_ge
5310 : (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))),
5311 : INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))))
5312 196488047 : && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5313 105 : + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5314 : <= HOST_BITS_PER_WIDE_INT))
5315 : {
5316 105 : rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5317 105 : rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5318 105 : rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5319 105 : unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5320 105 : struct table_elt *dest_elt
5321 105 : = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5322 105 : rtx dest_cst = NULL;
5323 :
5324 105 : if (dest_elt)
5325 153 : for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5326 104 : if (p->is_const && CONST_INT_P (p->exp))
5327 : {
5328 : dest_cst = p->exp;
5329 : break;
5330 : }
5331 57 : if (dest_cst)
5332 : {
5333 8 : HOST_WIDE_INT val = INTVAL (dest_cst);
5334 8 : HOST_WIDE_INT mask;
5335 8 : unsigned int shift;
5336 : /* This is the mode of DEST_CST as well. */
5337 8 : scalar_int_mode dest_mode
5338 8 : = as_a <scalar_int_mode> (GET_MODE (dest_reg));
5339 8 : if (BITS_BIG_ENDIAN)
5340 : shift = GET_MODE_PRECISION (dest_mode)
5341 : - INTVAL (pos) - INTVAL (width);
5342 : else
5343 8 : shift = INTVAL (pos);
5344 8 : if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5345 : mask = HOST_WIDE_INT_M1;
5346 : else
5347 8 : mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
5348 8 : val &= ~(mask << shift);
5349 8 : val |= (INTVAL (trial) & mask) << shift;
5350 8 : val = trunc_int_for_mode (val, dest_mode);
5351 8 : validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5352 : dest_reg, 1);
5353 8 : validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5354 : GEN_INT (val), 1);
5355 8 : if (apply_change_group ())
5356 : {
5357 8 : rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5358 8 : if (note)
5359 : {
5360 0 : remove_note (insn, note);
5361 0 : df_notes_rescan (insn);
5362 : }
5363 8 : src_eqv = NULL_RTX;
5364 8 : src_eqv_elt = NULL;
5365 8 : src_eqv_volatile = 0;
5366 8 : src_eqv_in_memory = 0;
5367 8 : src_eqv_hash = 0;
5368 8 : repeat = true;
5369 8 : break;
5370 : }
5371 : }
5372 : }
5373 :
5374 : /* We don't normally have an insn matching (set (pc) (pc)), so
5375 : check for this separately here. We will delete such an
5376 : insn below.
5377 :
5378 : For other cases such as a table jump or conditional jump
5379 : where we know the ultimate target, go ahead and replace the
5380 : operand. While that may not make a valid insn, we will
5381 : reemit the jump below (and also insert any necessary
5382 : barriers). */
5383 193956607 : if (n_sets == 1 && dest == pc_rtx
5384 216487676 : && (trial == pc_rtx
5385 19986699 : || (GET_CODE (trial) == LABEL_REF
5386 10838 : && ! condjump_p (insn))))
5387 : {
5388 : /* Don't substitute non-local labels, this confuses CFG. */
5389 15555 : if (GET_CODE (trial) == LABEL_REF
5390 14247 : && LABEL_REF_NONLOCAL_P (trial))
5391 1308 : continue;
5392 :
5393 12939 : SET_SRC (sets[i].rtl) = trial;
5394 12939 : cse_jumps_altered = true;
5395 12939 : break;
5396 : }
5397 :
5398 : /* Similarly, lots of targets don't allow no-op
5399 : (set (mem x) (mem x)) moves. Even (set (reg x) (reg x))
5400 : might be impossible for certain registers (like CC registers). */
5401 196473792 : else if (n_sets == 1
5402 193942360 : && !CALL_P (insn)
5403 193452649 : && (MEM_P (trial) || REG_P (trial))
5404 78532388 : && rtx_equal_p (trial, dest)
5405 200109 : && !side_effects_p (dest)
5406 200105 : && (cfun->can_delete_dead_exceptions
5407 46369 : || insn_nothrow_p (insn))
5408 : /* We can only remove the later store if the earlier aliases
5409 : at least all accesses the later one. */
5410 196663384 : && (!MEM_P (trial)
5411 22025 : || ((MEM_ALIAS_SET (dest) == MEM_ALIAS_SET (trial)
5412 8541 : || alias_set_subset_of (MEM_ALIAS_SET (dest),
5413 8541 : MEM_ALIAS_SET (trial)))
5414 13820 : && (!MEM_EXPR (trial)
5415 12919 : || refs_same_for_tbaa_p (MEM_EXPR (trial),
5416 12919 : MEM_EXPR (dest))))))
5417 : {
5418 180213 : SET_SRC (sets[i].rtl) = trial;
5419 180213 : noop_insn = true;
5420 180213 : break;
5421 : }
5422 :
5423 : /* Reject certain invalid forms of CONST that we create. */
5424 196293579 : else if (CONSTANT_P (trial)
5425 32098252 : && GET_CODE (trial) == CONST
5426 : /* Reject cases that will cause decode_rtx_const to
5427 : die. On the alpha when simplifying a switch, we
5428 : get (const (truncate (minus (label_ref)
5429 : (label_ref)))). */
5430 565029 : && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5431 : /* Likewise on IA-64, except without the
5432 : truncate. */
5433 565029 : || (GET_CODE (XEXP (trial, 0)) == MINUS
5434 0 : && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5435 0 : && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5436 : /* Do nothing for this case. */
5437 : ;
5438 :
5439 : /* Do not replace anything with a MEM, except the replacement
5440 : is a no-op. This allows this loop to terminate. */
5441 196293579 : else if (MEM_P (trial) && !rtx_equal_p (trial, SET_SRC(sets[i].rtl)))
5442 : /* Do nothing for this case. */
5443 : ;
5444 :
5445 : /* Look for a substitution that makes a valid insn. */
5446 196195318 : else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5447 : trial, 0))
5448 : {
5449 191154838 : rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5450 :
5451 : /* The result of apply_change_group can be ignored; see
5452 : canon_reg. */
5453 :
5454 191154838 : validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5455 191154838 : apply_change_group ();
5456 :
5457 191154838 : break;
5458 : }
5459 :
5460 : /* If the current function uses a constant pool and this is a
5461 : constant, try making a pool entry. Put it in src_folded
5462 : unless we already have done this since that is where it
5463 : likely came from. */
5464 :
5465 5040480 : else if (crtl->uses_const_pool
5466 3701570 : && CONSTANT_P (trial)
5467 2730753 : && !CONST_INT_P (trial)
5468 2712331 : && (src_folded == 0 || !MEM_P (src_folded))
5469 1875651 : && GET_MODE_CLASS (mode) != MODE_CC
5470 1875651 : && mode != VOIDmode)
5471 : {
5472 1875651 : src_folded = force_const_mem (mode, trial);
5473 1875651 : if (src_folded)
5474 : {
5475 1875004 : src_folded_cost = COST (src_folded, mode);
5476 1875004 : src_folded_regcost = approx_reg_cost (src_folded);
5477 : }
5478 : }
5479 : }
5480 :
5481 : /* If we changed the insn too much, handle this set from scratch. */
5482 191347990 : if (repeat)
5483 : {
5484 8 : i--;
5485 8 : continue;
5486 : }
5487 :
5488 191986845 : src = SET_SRC (sets[i].rtl);
5489 :
5490 : /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5491 : However, there is an important exception: If both are registers
5492 : that are not the head of their equivalence class, replace SET_SRC
5493 : with the head of the class. If we do not do this, we will have
5494 : both registers live over a portion of the basic block. This way,
5495 : their lifetimes will likely abut instead of overlapping. */
5496 191986845 : if (!is_fake_set
5497 191347990 : && REG_P (dest)
5498 333987385 : && REGNO_QTY_VALID_P (REGNO (dest)))
5499 : {
5500 7928301 : int dest_q = REG_QTY (REGNO (dest));
5501 7928301 : struct qty_table_elem *dest_ent = &qty_table[dest_q];
5502 :
5503 7928301 : if (dest_ent->mode == GET_MODE (dest)
5504 6119739 : && dest_ent->first_reg != REGNO (dest)
5505 113282 : && REG_P (src) && REGNO (src) == REGNO (dest)
5506 : /* Don't do this if the original insn had a hard reg as
5507 : SET_SRC or SET_DEST. */
5508 5485 : && (!REG_P (sets[i].src)
5509 4069 : || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5510 7933774 : && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5511 : /* We can't call canon_reg here because it won't do anything if
5512 : SRC is a hard register. */
5513 : {
5514 5473 : int src_q = REG_QTY (REGNO (src));
5515 5473 : struct qty_table_elem *src_ent = &qty_table[src_q];
5516 5473 : int first = src_ent->first_reg;
5517 5473 : rtx new_src
5518 : = (first >= FIRST_PSEUDO_REGISTER
5519 5473 : ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5520 :
5521 : /* We must use validate-change even for this, because this
5522 : might be a special no-op instruction, suitable only to
5523 : tag notes onto. */
5524 5473 : if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5525 : {
5526 5473 : src = new_src;
5527 : /* If we had a constant that is cheaper than what we are now
5528 : setting SRC to, use that constant. We ignored it when we
5529 : thought we could make this into a no-op. */
5530 1954 : if (src_const && COST (src_const, mode) < COST (src, mode)
5531 5473 : && validate_change (insn, &SET_SRC (sets[i].rtl),
5532 : src_const, 0))
5533 : src = src_const;
5534 : }
5535 : }
5536 : }
5537 :
5538 : /* If we made a change, recompute SRC values. */
5539 191986845 : if (src != sets[i].src)
5540 : {
5541 3183901 : do_not_record = 0;
5542 3183901 : hash_arg_in_memory = 0;
5543 3183901 : sets[i].src = src;
5544 3183901 : sets[i].src_hash = HASH (src, mode);
5545 3183901 : sets[i].src_volatile = do_not_record;
5546 3183901 : sets[i].src_in_memory = hash_arg_in_memory;
5547 3183901 : sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5548 : }
5549 :
5550 : /* If this is a single SET, we are setting a register, and we have an
5551 : equivalent constant, we want to add a REG_EQUAL note if the constant
5552 : is different from the source. We don't want to do it for a constant
5553 : pseudo since verifying that this pseudo hasn't been eliminated is a
5554 : pain; moreover such a note won't help anything.
5555 :
5556 : Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5557 : which can be created for a reference to a compile time computable
5558 : entry in a jump table. */
5559 191986845 : if (n_sets == 1
5560 189223405 : && REG_P (dest)
5561 140168649 : && src_const
5562 28465555 : && !REG_P (src_const)
5563 28441202 : && !(GET_CODE (src_const) == SUBREG
5564 0 : && REG_P (SUBREG_REG (src_const)))
5565 28441202 : && !(GET_CODE (src_const) == CONST
5566 377221 : && GET_CODE (XEXP (src_const, 0)) == MINUS
5567 0 : && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5568 0 : && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5569 220428047 : && !rtx_equal_p (src, src_const))
5570 : {
5571 : /* Make sure that the rtx is not shared. */
5572 7318311 : src_const = copy_rtx (src_const);
5573 :
5574 : /* Record the actual constant value in a REG_EQUAL note,
5575 : making a new one if one does not already exist. */
5576 7318311 : set_unique_reg_note (insn, REG_EQUAL, src_const);
5577 7318311 : df_notes_rescan (insn);
5578 : }
5579 :
5580 : /* Now deal with the destination. */
5581 191986845 : do_not_record = 0;
5582 :
5583 : /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5584 191986845 : while (GET_CODE (dest) == SUBREG
5585 192008361 : || GET_CODE (dest) == ZERO_EXTRACT
5586 385556169 : || GET_CODE (dest) == STRICT_LOW_PART)
5587 1564840 : dest = XEXP (dest, 0);
5588 :
5589 191986845 : sets[i].inner_dest = dest;
5590 :
5591 191986845 : if (MEM_P (dest))
5592 : {
5593 : #ifdef PUSH_ROUNDING
5594 : /* Stack pushes invalidate the stack pointer. */
5595 28452981 : rtx addr = XEXP (dest, 0);
5596 28452981 : if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5597 5519983 : && XEXP (addr, 0) == stack_pointer_rtx)
5598 5519983 : invalidate (stack_pointer_rtx, VOIDmode);
5599 : #endif
5600 28452981 : dest = fold_rtx (dest, insn);
5601 : }
5602 :
5603 : /* Compute the hash code of the destination now,
5604 : before the effects of this instruction are recorded,
5605 : since the register values used in the address computation
5606 : are those before this instruction. */
5607 191986845 : sets[i].dest_hash = HASH (dest, mode);
5608 :
5609 : /* Don't enter a bit-field in the hash table
5610 : because the value in it after the store
5611 : may not equal what was stored, due to truncation. */
5612 :
5613 191986845 : if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5614 : {
5615 3877 : rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5616 :
5617 3877 : if (src_const != 0 && CONST_INT_P (src_const)
5618 712 : && CONST_INT_P (width)
5619 712 : && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5620 712 : && ! (INTVAL (src_const)
5621 712 : & (HOST_WIDE_INT_M1U << INTVAL (width))))
5622 : /* Exception: if the value is constant,
5623 : and it won't be truncated, record it. */
5624 : ;
5625 : else
5626 : {
5627 : /* This is chosen so that the destination will be invalidated
5628 : but no new value will be recorded.
5629 : We must invalidate because sometimes constant
5630 : values can be recorded for bitfields. */
5631 3166 : sets[i].src_elt = 0;
5632 3166 : sets[i].src_volatile = 1;
5633 3166 : src_eqv = 0;
5634 3166 : src_eqv_elt = 0;
5635 : }
5636 : }
5637 :
5638 : /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5639 : the insn. */
5640 191982968 : else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5641 : {
5642 : /* One less use of the label this insn used to jump to. */
5643 12938 : cse_cfg_altered |= delete_insn_and_edges (insn);
5644 12938 : cse_jumps_altered = true;
5645 : /* No more processing for this set. */
5646 12938 : sets[i].rtl = 0;
5647 : }
5648 :
5649 : /* Similarly for no-op moves. */
5650 191970030 : else if (noop_insn)
5651 : {
5652 180213 : if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5653 0 : cse_cfg_altered = true;
5654 180213 : cse_cfg_altered |= delete_insn_and_edges (insn);
5655 : /* No more processing for this set. */
5656 180213 : sets[i].rtl = 0;
5657 : }
5658 :
5659 : /* If this SET is now setting PC to a label, we know it used to
5660 : be a conditional or computed branch. */
5661 19975042 : else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5662 191799347 : && !LABEL_REF_NONLOCAL_P (src))
5663 : {
5664 : /* We reemit the jump in as many cases as possible just in
5665 : case the form of an unconditional jump is significantly
5666 : different than a computed jump or conditional jump.
5667 :
5668 : If this insn has multiple sets, then reemitting the
5669 : jump is nontrivial. So instead we just force rerecognition
5670 : and hope for the best. */
5671 9530 : if (n_sets == 1)
5672 : {
5673 9530 : rtx_jump_insn *new_rtx;
5674 9530 : rtx note;
5675 :
5676 9530 : rtx_insn *seq = targetm.gen_jump (XEXP (src, 0));
5677 9530 : new_rtx = emit_jump_insn_before (seq, insn);
5678 9530 : JUMP_LABEL (new_rtx) = XEXP (src, 0);
5679 9530 : LABEL_NUSES (XEXP (src, 0))++;
5680 :
5681 : /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5682 9530 : note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5683 9530 : if (note)
5684 : {
5685 0 : XEXP (note, 1) = NULL_RTX;
5686 0 : REG_NOTES (new_rtx) = note;
5687 : }
5688 :
5689 9530 : cse_cfg_altered |= delete_insn_and_edges (insn);
5690 9530 : insn = new_rtx;
5691 : }
5692 : else
5693 0 : INSN_CODE (insn) = -1;
5694 :
5695 : /* Do not bother deleting any unreachable code, let jump do it. */
5696 9530 : cse_jumps_altered = true;
5697 9530 : sets[i].rtl = 0;
5698 : }
5699 :
5700 : /* If destination is volatile, invalidate it and then do no further
5701 : processing for this assignment. */
5702 :
5703 191780287 : else if (do_not_record)
5704 : {
5705 54446813 : invalidate_dest (dest);
5706 54446813 : sets[i].rtl = 0;
5707 : }
5708 :
5709 191986845 : if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5710 : {
5711 1626687 : do_not_record = 0;
5712 1626687 : sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5713 1626687 : if (do_not_record)
5714 : {
5715 979 : invalidate_dest (SET_DEST (sets[i].rtl));
5716 979 : sets[i].rtl = 0;
5717 : }
5718 : }
5719 : }
5720 :
5721 : /* Now enter all non-volatile source expressions in the hash table
5722 : if they are not already present.
5723 : Record their equivalence classes in src_elt.
5724 : This way we can insert the corresponding destinations into
5725 : the same classes even if the actual sources are no longer in them
5726 : (having been invalidated). */
5727 :
5728 5145541 : if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5729 392560031 : && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5730 : {
5731 4239297 : struct table_elt *elt;
5732 4239297 : struct table_elt *classp = sets[0].src_elt;
5733 4239297 : rtx dest = SET_DEST (sets[0].rtl);
5734 4239297 : machine_mode eqvmode = GET_MODE (dest);
5735 :
5736 4239297 : if (GET_CODE (dest) == STRICT_LOW_PART)
5737 : {
5738 0 : eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5739 0 : classp = 0;
5740 : }
5741 4239297 : if (insert_regs (src_eqv, classp, false))
5742 : {
5743 157254 : rehash_using_reg (src_eqv);
5744 157254 : src_eqv_hash = HASH (src_eqv, eqvmode);
5745 : }
5746 4239297 : elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5747 4239297 : elt->in_memory = src_eqv_in_memory;
5748 4239297 : src_eqv_elt = elt;
5749 :
5750 : /* Check to see if src_eqv_elt is the same as a set source which
5751 : does not yet have an elt, and if so set the elt of the set source
5752 : to src_eqv_elt. */
5753 8478594 : for (i = 0; i < n_sets; i++)
5754 8478594 : if (sets[i].rtl && sets[i].src_elt == 0
5755 8346132 : && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5756 97076 : sets[i].src_elt = src_eqv_elt;
5757 : }
5758 :
5759 580307579 : for (i = 0; i < n_sets; i++)
5760 329323217 : if (sets[i].rtl && ! sets[i].src_volatile
5761 315886326 : && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5762 : {
5763 123894441 : if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5764 : {
5765 : /* REG_EQUAL in setting a STRICT_LOW_PART
5766 : gives an equivalent for the entire destination register,
5767 : not just for the subreg being stored in now.
5768 : This is a more interesting equivalence, so we arrange later
5769 : to treat the entire reg as the destination. */
5770 17639 : sets[i].src_elt = src_eqv_elt;
5771 17639 : sets[i].src_hash = src_eqv_hash;
5772 : }
5773 : else
5774 : {
5775 : /* Insert source and constant equivalent into hash table, if not
5776 : already present. */
5777 123876802 : struct table_elt *classp = src_eqv_elt;
5778 123876802 : rtx src = sets[i].src;
5779 123876802 : rtx dest = SET_DEST (sets[i].rtl);
5780 258725443 : machine_mode mode
5781 123876802 : = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5782 :
5783 : /* It's possible that we have a source value known to be
5784 : constant but don't have a REG_EQUAL note on the insn.
5785 : Lack of a note will mean src_eqv_elt will be NULL. This
5786 : can happen where we've generated a SUBREG to access a
5787 : CONST_INT that is already in a register in a wider mode.
5788 : Ensure that the source expression is put in the proper
5789 : constant class. */
5790 123876802 : if (!classp)
5791 118719653 : classp = sets[i].src_const_elt;
5792 :
5793 123876802 : if (sets[i].src_elt == 0)
5794 : {
5795 102200177 : struct table_elt *elt;
5796 :
5797 : /* Note that these insert_regs calls cannot remove
5798 : any of the src_elt's, because they would have failed to
5799 : match if not still valid. */
5800 102200177 : if (insert_regs (src, classp, false))
5801 : {
5802 17044981 : rehash_using_reg (src);
5803 17044981 : sets[i].src_hash = HASH (src, mode);
5804 : }
5805 102200177 : elt = insert (src, classp, sets[i].src_hash, mode);
5806 102200177 : elt->in_memory = sets[i].src_in_memory;
5807 : /* If inline asm has any clobbers, ensure we only reuse
5808 : existing inline asms and never try to put the ASM_OPERANDS
5809 : into an insn that isn't inline asm. */
5810 102200177 : if (GET_CODE (src) == ASM_OPERANDS
5811 20448 : && GET_CODE (x) == PARALLEL)
5812 20430 : elt->cost = MAX_COST;
5813 102200177 : sets[i].src_elt = classp = elt;
5814 : }
5815 147996886 : if (sets[i].src_const && sets[i].src_const_elt == 0
5816 14324469 : && src != sets[i].src_const
5817 126030452 : && ! rtx_equal_p (sets[i].src_const, src))
5818 2153648 : sets[i].src_elt = insert (sets[i].src_const, classp,
5819 2153648 : sets[i].src_const_hash, mode);
5820 : }
5821 : }
5822 68092404 : else if (sets[i].src_elt == 0)
5823 : /* If we did not insert the source into the hash table (e.g., it was
5824 : volatile), note the equivalence class for the REG_EQUAL value, if any,
5825 : so that the destination goes into that class. */
5826 56057744 : sets[i].src_elt = src_eqv_elt;
5827 :
5828 : /* Record destination addresses in the hash table. This allows us to
5829 : check if they are invalidated by other sets. */
5830 580307579 : for (i = 0; i < n_sets; i++)
5831 : {
5832 191986845 : if (sets[i].rtl)
5833 : {
5834 137336372 : rtx x = sets[i].inner_dest;
5835 137336372 : struct table_elt *elt;
5836 137336372 : machine_mode mode;
5837 137336372 : unsigned hash;
5838 :
5839 137336372 : if (MEM_P (x))
5840 : {
5841 21856057 : x = XEXP (x, 0);
5842 21856057 : mode = GET_MODE (x);
5843 21856057 : hash = HASH (x, mode);
5844 21856057 : elt = lookup (x, hash, mode);
5845 21856057 : if (!elt)
5846 : {
5847 19111692 : if (insert_regs (x, NULL, false))
5848 : {
5849 2183173 : rtx dest = SET_DEST (sets[i].rtl);
5850 :
5851 2183173 : rehash_using_reg (x);
5852 2183173 : hash = HASH (x, mode);
5853 2183173 : sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5854 : }
5855 19111692 : elt = insert (x, NULL, hash, mode);
5856 : }
5857 :
5858 21856057 : sets[i].dest_addr_elt = elt;
5859 : }
5860 : else
5861 115480315 : sets[i].dest_addr_elt = NULL;
5862 : }
5863 : }
5864 :
5865 388320734 : invalidate_from_clobbers (insn);
5866 :
5867 : /* Some registers are invalidated by subroutine calls. Memory is
5868 : invalidated by non-constant calls. */
5869 :
5870 388320734 : if (CALL_P (insn))
5871 : {
5872 15298139 : if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5873 13161198 : invalidate_memory ();
5874 : else
5875 : /* For const/pure calls, invalidate any argument slots, because
5876 : those are owned by the callee. */
5877 6227320 : for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5878 4090379 : if (GET_CODE (XEXP (tem, 0)) == USE
5879 4090240 : && MEM_P (XEXP (XEXP (tem, 0), 0)))
5880 69776 : invalidate (XEXP (XEXP (tem, 0), 0), VOIDmode);
5881 15298139 : invalidate_for_call (insn);
5882 : }
5883 :
5884 : /* Now invalidate everything set by this instruction.
5885 : If a SUBREG or other funny destination is being set,
5886 : sets[i].rtl is still nonzero, so here we invalidate the reg
5887 : a part of which is being set. */
5888 :
5889 580307579 : for (i = 0; i < n_sets; i++)
5890 191986845 : if (sets[i].rtl)
5891 : {
5892 : /* We can't use the inner dest, because the mode associated with
5893 : a ZERO_EXTRACT is significant. */
5894 137336372 : rtx dest = SET_DEST (sets[i].rtl);
5895 :
5896 : /* Needed for registers to remove the register from its
5897 : previous quantity's chain.
5898 : Needed for memory if this is a nonvarying address, unless
5899 : we have just done an invalidate_memory that covers even those. */
5900 137336372 : if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5901 115459637 : invalidate (dest, VOIDmode);
5902 21876735 : else if (MEM_P (dest))
5903 21856057 : invalidate (dest, VOIDmode);
5904 20678 : else if (GET_CODE (dest) == STRICT_LOW_PART
5905 3039 : || GET_CODE (dest) == ZERO_EXTRACT)
5906 20556 : invalidate (XEXP (dest, 0), GET_MODE (dest));
5907 : }
5908 :
5909 : /* Don't cse over a call to setjmp; on some machines (eg VAX)
5910 : the regs restored by the longjmp come from a later time
5911 : than the setjmp. */
5912 388320734 : if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5913 : {
5914 2146 : flush_hash_table ();
5915 2146 : goto done;
5916 : }
5917 :
5918 : /* Make sure registers mentioned in destinations
5919 : are safe for use in an expression to be inserted.
5920 : This removes from the hash table
5921 : any invalid entry that refers to one of these registers.
5922 :
5923 : We don't care about the return value from mention_regs because
5924 : we are going to hash the SET_DEST values unconditionally. */
5925 :
5926 580305433 : for (i = 0; i < n_sets; i++)
5927 : {
5928 191986845 : if (sets[i].rtl)
5929 : {
5930 137336372 : rtx x = SET_DEST (sets[i].rtl);
5931 :
5932 137336372 : if (!REG_P (x))
5933 23401032 : mention_regs (x);
5934 : else
5935 : {
5936 : /* We used to rely on all references to a register becoming
5937 : inaccessible when a register changes to a new quantity,
5938 : since that changes the hash code. However, that is not
5939 : safe, since after HASH_SIZE new quantities we get a
5940 : hash 'collision' of a register with its own invalid
5941 : entries. And since SUBREGs have been changed not to
5942 : change their hash code with the hash code of the register,
5943 : it wouldn't work any longer at all. So we have to check
5944 : for any invalid references lying around now.
5945 : This code is similar to the REG case in mention_regs,
5946 : but it knows that reg_tick has been incremented, and
5947 : it leaves reg_in_table as -1 . */
5948 113935340 : unsigned int regno = REGNO (x);
5949 113935340 : unsigned int endregno = END_REGNO (x);
5950 113935340 : unsigned int i;
5951 :
5952 227870680 : for (i = regno; i < endregno; i++)
5953 : {
5954 113935340 : if (REG_IN_TABLE (i) >= 0)
5955 : {
5956 12572431 : remove_invalid_refs (i);
5957 12572431 : REG_IN_TABLE (i) = -1;
5958 : }
5959 : }
5960 : }
5961 : }
5962 : }
5963 :
5964 : /* We may have just removed some of the src_elt's from the hash table.
5965 : So replace each one with the current head of the same class.
5966 : Also check if destination addresses have been removed. */
5967 :
5968 580305433 : for (i = 0; i < n_sets; i++)
5969 191986845 : if (sets[i].rtl)
5970 : {
5971 137336372 : if (sets[i].dest_addr_elt
5972 137336372 : && sets[i].dest_addr_elt->first_same_value == 0)
5973 : {
5974 : /* The elt was removed, which means this destination is not
5975 : valid after this instruction. */
5976 0 : sets[i].rtl = NULL_RTX;
5977 : }
5978 137336372 : else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5979 : /* If elt was removed, find current head of same class,
5980 : or 0 if nothing remains of that class. */
5981 : {
5982 10609191 : struct table_elt *elt = sets[i].src_elt;
5983 :
5984 10609191 : while (elt && elt->prev_same_value)
5985 : elt = elt->prev_same_value;
5986 :
5987 21135034 : while (elt && elt->first_same_value == 0)
5988 10562594 : elt = elt->next_same_value;
5989 10572440 : sets[i].src_elt = elt ? elt->first_same_value : 0;
5990 : }
5991 : }
5992 :
5993 : /* Now insert the destinations into their equivalence classes. */
5994 :
5995 580305433 : for (i = 0; i < n_sets; i++)
5996 191986845 : if (sets[i].rtl)
5997 : {
5998 137336372 : rtx dest = SET_DEST (sets[i].rtl);
5999 137336372 : struct table_elt *elt;
6000 :
6001 : /* Don't record value if we are not supposed to risk allocating
6002 : floating-point values in registers that might be wider than
6003 : memory. */
6004 161311781 : if ((flag_float_store
6005 12303 : && MEM_P (dest)
6006 4216 : && FLOAT_MODE_P (GET_MODE (dest)))
6007 : /* Don't record BLKmode values, because we don't know the
6008 : size of it, and can't be sure that other BLKmode values
6009 : have the same or smaller size. */
6010 137333855 : || GET_MODE (dest) == BLKmode
6011 : /* If we didn't put a REG_EQUAL value or a source into the hash
6012 : table, there is no point is recording DEST. */
6013 274670227 : || sets[i].src_elt == 0)
6014 23975409 : continue;
6015 :
6016 : /* STRICT_LOW_PART isn't part of the value BEING set,
6017 : and neither is the SUBREG inside it.
6018 : Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6019 113360963 : if (GET_CODE (dest) == STRICT_LOW_PART)
6020 0 : dest = SUBREG_REG (XEXP (dest, 0));
6021 :
6022 113360963 : if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6023 : /* Registers must also be inserted into chains for quantities. */
6024 91849355 : if (insert_regs (dest, sets[i].src_elt, true))
6025 : {
6026 : /* If `insert_regs' changes something, the hash code must be
6027 : recalculated. */
6028 91289601 : rehash_using_reg (dest);
6029 91289601 : sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6030 : }
6031 :
6032 : /* If DEST is a paradoxical SUBREG, don't record DEST since the bits
6033 : outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined. */
6034 113360963 : if (paradoxical_subreg_p (dest))
6035 63286 : continue;
6036 :
6037 339893031 : elt = insert (dest, sets[i].src_elt,
6038 113297677 : sets[i].dest_hash, GET_MODE (dest));
6039 :
6040 : /* If this is a constant, insert the constant anchors with the
6041 : equivalent register-offset expressions using register DEST. */
6042 113297677 : if (targetm.const_anchor
6043 0 : && REG_P (dest)
6044 0 : && SCALAR_INT_MODE_P (GET_MODE (dest))
6045 113297677 : && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
6046 0 : insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
6047 :
6048 113297677 : elt->in_memory = (MEM_P (sets[i].inner_dest)
6049 113297677 : && !MEM_READONLY_P (sets[i].inner_dest));
6050 :
6051 : /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6052 : narrower than M2, and both M1 and M2 are the same number of words,
6053 : we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6054 : make that equivalence as well.
6055 :
6056 : However, BAR may have equivalences for which gen_lowpart
6057 : will produce a simpler value than gen_lowpart applied to
6058 : BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6059 : BAR's equivalences. If we don't get a simplified form, make
6060 : the SUBREG. It will not be used in an equivalence, but will
6061 : cause two similar assignments to be detected.
6062 :
6063 : Note the loop below will find SUBREG_REG (DEST) since we have
6064 : already entered SRC and DEST of the SET in the table. */
6065 :
6066 113297677 : if (GET_CODE (dest) == SUBREG
6067 : && (known_equal_after_align_down
6068 193454531 : (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1,
6069 2868238 : GET_MODE_SIZE (GET_MODE (dest)) - 1,
6070 1434119 : UNITS_PER_WORD))
6071 83891 : && !partial_subreg_p (dest)
6072 113331244 : && sets[i].src_elt != 0)
6073 : {
6074 33567 : machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6075 33567 : struct table_elt *elt, *classp = 0;
6076 :
6077 150681 : for (elt = sets[i].src_elt->first_same_value; elt;
6078 117114 : elt = elt->next_same_value)
6079 : {
6080 117114 : rtx new_src = 0;
6081 117114 : unsigned src_hash;
6082 117114 : struct table_elt *src_elt;
6083 :
6084 : /* Ignore invalid entries. */
6085 117114 : if (!REG_P (elt->exp)
6086 117114 : && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6087 0 : continue;
6088 :
6089 : /* We may have already been playing subreg games. If the
6090 : mode is already correct for the destination, use it. */
6091 117114 : if (GET_MODE (elt->exp) == new_mode)
6092 : new_src = elt->exp;
6093 : else
6094 : {
6095 117114 : poly_uint64 byte
6096 117114 : = subreg_lowpart_offset (new_mode, GET_MODE (dest));
6097 117114 : new_src = simplify_gen_subreg (new_mode, elt->exp,
6098 117114 : GET_MODE (dest), byte);
6099 : }
6100 :
6101 : /* The call to simplify_gen_subreg fails if the value
6102 : is VOIDmode, yet we can't do any simplification, e.g.
6103 : for EXPR_LISTs denoting function call results.
6104 : It is invalid to construct a SUBREG with a VOIDmode
6105 : SUBREG_REG, hence a zero new_src means we can't do
6106 : this substitution. */
6107 117114 : if (! new_src)
6108 6 : continue;
6109 :
6110 117108 : src_hash = HASH (new_src, new_mode);
6111 117108 : src_elt = lookup (new_src, src_hash, new_mode);
6112 :
6113 : /* Put the new source in the hash table is if isn't
6114 : already. */
6115 117108 : if (src_elt == 0)
6116 : {
6117 40973 : if (insert_regs (new_src, classp, false))
6118 : {
6119 0 : rehash_using_reg (new_src);
6120 0 : src_hash = HASH (new_src, new_mode);
6121 : }
6122 40973 : src_elt = insert (new_src, classp, src_hash, new_mode);
6123 40973 : src_elt->in_memory = elt->in_memory;
6124 40973 : if (GET_CODE (new_src) == ASM_OPERANDS
6125 0 : && elt->cost == MAX_COST)
6126 0 : src_elt->cost = MAX_COST;
6127 : }
6128 76135 : else if (classp && classp != src_elt->first_same_value)
6129 : /* Show that two things that we've seen before are
6130 : actually the same. */
6131 113 : merge_equiv_classes (src_elt, classp);
6132 :
6133 117108 : classp = src_elt->first_same_value;
6134 : /* Ignore invalid entries. */
6135 117108 : while (classp
6136 117108 : && !REG_P (classp->exp)
6137 196952 : && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6138 0 : classp = classp->next_same_value;
6139 : }
6140 : }
6141 : }
6142 :
6143 : /* Special handling for (set REG0 REG1) where REG0 is the
6144 : "cheapest", cheaper than REG1. After cse, REG1 will probably not
6145 : be used in the sequel, so (if easily done) change this insn to
6146 : (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6147 : that computed their value. Then REG1 will become a dead store
6148 : and won't cloud the situation for later optimizations.
6149 :
6150 : Do not make this change if REG1 is a hard register, because it will
6151 : then be used in the sequel and we may be changing a two-operand insn
6152 : into a three-operand insn.
6153 :
6154 : Also do not do this if we are operating on a copy of INSN. */
6155 :
6156 577541993 : if (n_sets == 1 && sets[0].rtl)
6157 134789668 : try_back_substitute_reg (sets[0].rtl, insn);
6158 :
6159 388320734 : done:;
6160 388320734 : }
6161 :
6162 : /* Remove from the hash table all expressions that reference memory. */
6163 :
6164 : static void
6165 13161198 : invalidate_memory (void)
6166 : {
6167 13161198 : int i;
6168 13161198 : struct table_elt *p, *next;
6169 :
6170 434319534 : for (i = 0; i < HASH_SIZE; i++)
6171 606916487 : for (p = table[i]; p; p = next)
6172 : {
6173 185758151 : next = p->next_same_hash;
6174 185758151 : if (p->in_memory)
6175 19241750 : remove_from_table (p, i);
6176 : }
6177 13161198 : }
6178 :
6179 : /* Perform invalidation on the basis of everything about INSN,
6180 : except for invalidating the actual places that are SET in it.
6181 : This includes the places CLOBBERed, and anything that might
6182 : alias with something that is SET or CLOBBERed. */
6183 :
6184 : static void
6185 388320734 : invalidate_from_clobbers (rtx_insn *insn)
6186 : {
6187 388320734 : rtx x = PATTERN (insn);
6188 :
6189 388320734 : if (GET_CODE (x) == CLOBBER)
6190 : {
6191 65705 : rtx ref = XEXP (x, 0);
6192 65705 : if (ref)
6193 : {
6194 65705 : if (REG_P (ref) || GET_CODE (ref) == SUBREG
6195 12970 : || MEM_P (ref))
6196 65705 : invalidate (ref, VOIDmode);
6197 0 : else if (GET_CODE (ref) == STRICT_LOW_PART
6198 0 : || GET_CODE (ref) == ZERO_EXTRACT)
6199 0 : invalidate (XEXP (ref, 0), GET_MODE (ref));
6200 : }
6201 : }
6202 388255029 : else if (GET_CODE (x) == PARALLEL)
6203 : {
6204 29800430 : int i;
6205 90593898 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6206 : {
6207 60793468 : rtx y = XVECEXP (x, 0, i);
6208 60793468 : if (GET_CODE (y) == CLOBBER)
6209 : {
6210 29418534 : rtx ref = XEXP (y, 0);
6211 29418534 : if (REG_P (ref) || GET_CODE (ref) == SUBREG
6212 243981 : || MEM_P (ref))
6213 29236857 : invalidate (ref, VOIDmode);
6214 181677 : else if (GET_CODE (ref) == STRICT_LOW_PART
6215 181677 : || GET_CODE (ref) == ZERO_EXTRACT)
6216 0 : invalidate (XEXP (ref, 0), GET_MODE (ref));
6217 : }
6218 : }
6219 : }
6220 388320734 : }
6221 :
6222 : /* Perform invalidation on the basis of everything about INSN.
6223 : This includes the places CLOBBERed, and anything that might
6224 : alias with something that is SET or CLOBBERed. */
6225 :
6226 : static void
6227 388320734 : invalidate_from_sets_and_clobbers (rtx_insn *insn)
6228 : {
6229 388320734 : rtx tem;
6230 388320734 : rtx x = PATTERN (insn);
6231 :
6232 388320734 : if (CALL_P (insn))
6233 : {
6234 45894253 : for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6235 : {
6236 30596114 : rtx temx = XEXP (tem, 0);
6237 30596114 : if (GET_CODE (temx) == CLOBBER)
6238 865348 : invalidate (SET_DEST (temx), VOIDmode);
6239 : }
6240 : }
6241 :
6242 : /* Ensure we invalidate the destination register of a CALL insn.
6243 : This is necessary for machines where this register is a fixed_reg,
6244 : because no other code would invalidate it. */
6245 388320734 : if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6246 7088119 : invalidate (SET_DEST (x), VOIDmode);
6247 :
6248 381232615 : else if (GET_CODE (x) == PARALLEL)
6249 : {
6250 30500061 : int i;
6251 :
6252 92697688 : for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6253 : {
6254 62197627 : rtx y = XVECEXP (x, 0, i);
6255 62197627 : if (GET_CODE (y) == CLOBBER)
6256 : {
6257 30123062 : rtx clobbered = XEXP (y, 0);
6258 :
6259 30123062 : if (REG_P (clobbered)
6260 248874 : || GET_CODE (clobbered) == SUBREG)
6261 29874188 : invalidate (clobbered, VOIDmode);
6262 248874 : else if (GET_CODE (clobbered) == STRICT_LOW_PART
6263 248874 : || GET_CODE (clobbered) == ZERO_EXTRACT)
6264 0 : invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6265 : }
6266 32074565 : else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6267 10201 : invalidate (SET_DEST (y), VOIDmode);
6268 : }
6269 : }
6270 :
6271 : /* Any single register constraint may introduce a conflict, if the associated
6272 : hard register is live. For example:
6273 :
6274 : r100=%1
6275 : r101=42
6276 : r102=exp(r101)
6277 :
6278 : If the first operand r101 of exp is constrained to hard register %1, then
6279 : r100 cannot be trivially substituted by %1 in the following since %1 got
6280 : clobbered. Such conflicts may stem from single register classes as well
6281 : as hard register constraints. Since prior RA we do not know which
6282 : alternative will be chosen, be conservative and consider any such hard
6283 : register from any alternative as a potential clobber. */
6284 388320734 : extract_insn (insn);
6285 844548533 : for (int nop = recog_data.n_operands - 1; nop >= 0; --nop)
6286 : {
6287 456227799 : int c;
6288 456227799 : const char *p = recog_data.constraints[nop];
6289 15015822650 : for (; (c = *p); p += CONSTRAINT_LEN (c, p))
6290 14559594851 : if (c == ',')
6291 : ;
6292 9548492369 : else if (c == '{')
6293 : {
6294 190 : int regno = decode_hard_reg_constraint (p);
6295 190 : machine_mode mode = recog_data.operand_mode[nop];
6296 190 : invalidate_reg (gen_rtx_REG (mode, regno));
6297 : }
6298 : }
6299 388320734 : }
6300 :
6301 : static rtx cse_process_note (rtx);
6302 :
6303 : /* A simplify_replace_fn_rtx callback for cse_process_note. Process X,
6304 : part of the REG_NOTES of an insn. Replace any registers with either
6305 : an equivalent constant or the canonical form of the register.
6306 : Only replace addresses if the containing MEM remains valid.
6307 :
6308 : Return the replacement for X, or null if it should be simplified
6309 : recursively. */
6310 :
6311 : static rtx
6312 27574382 : cse_process_note_1 (rtx x, const_rtx, void *)
6313 : {
6314 27574382 : if (MEM_P (x))
6315 : {
6316 1875508 : validate_change (x, &XEXP (x, 0), cse_process_note (XEXP (x, 0)), false);
6317 937754 : return x;
6318 : }
6319 :
6320 26636628 : if (REG_P (x))
6321 : {
6322 5826568 : int i = REG_QTY (REGNO (x));
6323 :
6324 : /* Return a constant or a constant register. */
6325 5826568 : if (REGNO_QTY_VALID_P (REGNO (x)))
6326 : {
6327 1556996 : struct qty_table_elem *ent = &qty_table[i];
6328 :
6329 1556996 : if (ent->const_rtx != NULL_RTX
6330 23918 : && (CONSTANT_P (ent->const_rtx)
6331 18834 : || REG_P (ent->const_rtx)))
6332 : {
6333 5084 : rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6334 5084 : if (new_rtx)
6335 5084 : return copy_rtx (new_rtx);
6336 : }
6337 : }
6338 :
6339 : /* Otherwise, canonicalize this register. */
6340 5821484 : return canon_reg (x, NULL);
6341 : }
6342 :
6343 : return NULL_RTX;
6344 : }
6345 :
6346 : /* Process X, part of the REG_NOTES of an insn. Replace any registers in it
6347 : with either an equivalent constant or the canonical form of the register.
6348 : Only replace addresses if the containing MEM remains valid. */
6349 :
6350 : static rtx
6351 9466169 : cse_process_note (rtx x)
6352 : {
6353 937754 : return simplify_replace_fn_rtx (x, NULL_RTX, cse_process_note_1, NULL);
6354 : }
6355 :
6356 :
6357 : /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6358 :
6359 : DATA is a pointer to a struct cse_basic_block_data, that is used to
6360 : describe the path.
6361 : It is filled with a queue of basic blocks, starting with FIRST_BB
6362 : and following a trace through the CFG.
6363 :
6364 : If all paths starting at FIRST_BB have been followed, or no new path
6365 : starting at FIRST_BB can be constructed, this function returns FALSE.
6366 : Otherwise, DATA->path is filled and the function returns TRUE indicating
6367 : that a path to follow was found.
6368 :
6369 : If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6370 : block in the path will be FIRST_BB. */
6371 :
6372 : static bool
6373 39028296 : cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6374 : bool follow_jumps)
6375 : {
6376 39028296 : basic_block bb;
6377 39028296 : edge e;
6378 39028296 : int path_size;
6379 :
6380 39028296 : bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6381 :
6382 : /* See if there is a previous path. */
6383 39028296 : path_size = data->path_size;
6384 :
6385 : /* There is a previous path. Make sure it started with FIRST_BB. */
6386 39028296 : if (path_size)
6387 20988422 : gcc_assert (data->path[0].bb == first_bb);
6388 :
6389 : /* There was only one basic block in the last path. Clear the path and
6390 : return, so that paths starting at another basic block can be tried. */
6391 20988422 : if (path_size == 1)
6392 : {
6393 14074578 : path_size = 0;
6394 14074578 : goto done;
6395 : }
6396 :
6397 : /* If the path was empty from the beginning, construct a new path. */
6398 24953718 : if (path_size == 0)
6399 18039874 : data->path[path_size++].bb = first_bb;
6400 : else
6401 : {
6402 : /* Otherwise, path_size must be equal to or greater than 2, because
6403 : a previous path exists that is at least two basic blocks long.
6404 :
6405 : Update the previous branch path, if any. If the last branch was
6406 : previously along the branch edge, take the fallthrough edge now. */
6407 15314149 : while (path_size >= 2)
6408 : {
6409 11348853 : basic_block last_bb_in_path, previous_bb_in_path;
6410 11348853 : edge e;
6411 :
6412 11348853 : --path_size;
6413 11348853 : last_bb_in_path = data->path[path_size].bb;
6414 11348853 : previous_bb_in_path = data->path[path_size - 1].bb;
6415 :
6416 : /* If we previously followed a path along the branch edge, try
6417 : the fallthru edge now. */
6418 19749158 : if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6419 11031649 : && any_condjump_p (BB_END (previous_bb_in_path))
6420 11031649 : && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6421 22380502 : && e == BRANCH_EDGE (previous_bb_in_path))
6422 : {
6423 3927519 : bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6424 3927519 : if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6425 3927519 : && single_pred_p (bb)
6426 : /* We used to assert here that we would only see blocks
6427 : that we have not visited yet. But we may end up
6428 : visiting basic blocks twice if the CFG has changed
6429 : in this run of cse_main, because when the CFG changes
6430 : the topological sort of the CFG also changes. A basic
6431 : blocks that previously had more than two predecessors
6432 : may now have a single predecessor, and become part of
6433 : a path that starts at another basic block.
6434 :
6435 : We still want to visit each basic block only once, so
6436 : halt the path here if we have already visited BB. */
6437 6876067 : && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6438 : {
6439 2948548 : bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6440 2948548 : data->path[path_size++].bb = bb;
6441 2948548 : break;
6442 : }
6443 : }
6444 :
6445 8400305 : data->path[path_size].bb = NULL;
6446 : }
6447 :
6448 : /* If only one block remains in the path, bail. */
6449 6913844 : if (path_size == 1)
6450 : {
6451 3965296 : path_size = 0;
6452 3965296 : goto done;
6453 : }
6454 : }
6455 :
6456 : /* Extend the path if possible. */
6457 20988422 : if (follow_jumps)
6458 : {
6459 11970681 : bb = data->path[path_size - 1].bb;
6460 20374383 : while (bb && path_size < param_max_cse_path_length)
6461 : {
6462 20210143 : if (single_succ_p (bb))
6463 8836258 : e = single_succ_edge (bb);
6464 11373885 : else if (EDGE_COUNT (bb->succs) == 2
6465 11362062 : && any_condjump_p (BB_END (bb)))
6466 : {
6467 : /* First try to follow the branch. If that doesn't lead
6468 : to a useful path, follow the fallthru edge. */
6469 9029836 : e = BRANCH_EDGE (bb);
6470 9029836 : if (!single_pred_p (e->dest))
6471 5092008 : e = FALLTHRU_EDGE (bb);
6472 : }
6473 : else
6474 : e = NULL;
6475 :
6476 17866094 : if (e
6477 17866094 : && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6478 17865049 : && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6479 15724674 : && single_pred_p (e->dest)
6480 : /* Avoid visiting basic blocks twice. The large comment
6481 : above explains why this can happen. */
6482 26269810 : && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6483 : {
6484 8403702 : basic_block bb2 = e->dest;
6485 8403702 : bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6486 8403702 : data->path[path_size++].bb = bb2;
6487 8403702 : bb = bb2;
6488 : }
6489 : else
6490 : bb = NULL;
6491 : }
6492 : }
6493 :
6494 9017741 : done:
6495 39028296 : data->path_size = path_size;
6496 39028296 : return path_size != 0;
6497 : }
6498 :
6499 : /* Dump the path in DATA to file F. NSETS is the number of sets
6500 : in the path. */
6501 :
6502 : static void
6503 317 : cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6504 : {
6505 317 : int path_entry;
6506 :
6507 317 : fprintf (f, ";; Following path with %d sets: ", nsets);
6508 1119 : for (path_entry = 0; path_entry < data->path_size; path_entry++)
6509 485 : fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6510 317 : fputc ('\n', f);
6511 317 : fflush (f);
6512 317 : }
6513 :
6514 :
6515 : /* Return true if BB has exception handling successor edges. */
6516 :
6517 : static bool
6518 9074846 : have_eh_succ_edges (basic_block bb)
6519 : {
6520 9074846 : edge e;
6521 9074846 : edge_iterator ei;
6522 :
6523 21367981 : FOR_EACH_EDGE (e, ei, bb->succs)
6524 13291791 : if (e->flags & EDGE_EH)
6525 : return true;
6526 :
6527 : return false;
6528 : }
6529 :
6530 :
6531 : /* Scan to the end of the path described by DATA. Return an estimate of
6532 : the total number of SETs of all insns in the path. */
6533 :
6534 : static void
6535 20988422 : cse_prescan_path (struct cse_basic_block_data *data)
6536 : {
6537 20988422 : int nsets = 0;
6538 20988422 : int path_size = data->path_size;
6539 20988422 : int path_entry;
6540 :
6541 : /* Scan to end of each basic block in the path. */
6542 56933545 : for (path_entry = 0; path_entry < path_size; path_entry++)
6543 : {
6544 35945123 : basic_block bb;
6545 35945123 : rtx_insn *insn;
6546 :
6547 35945123 : bb = data->path[path_entry].bb;
6548 :
6549 481700591 : FOR_BB_INSNS (bb, insn)
6550 : {
6551 445755468 : if (!INSN_P (insn))
6552 57419967 : continue;
6553 :
6554 : /* A PARALLEL can have lots of SETs in it,
6555 : especially if it is really an ASM_OPERANDS. */
6556 388335501 : if (GET_CODE (PATTERN (insn)) == PARALLEL)
6557 30504700 : nsets += XVECLEN (PATTERN (insn), 0);
6558 : else
6559 357830801 : nsets += 1;
6560 : }
6561 : }
6562 :
6563 20988422 : data->nsets = nsets;
6564 20988422 : }
6565 :
6566 : /* Return true if the pattern of INSN uses a LABEL_REF for which
6567 : there isn't a REG_LABEL_OPERAND note. */
6568 :
6569 : static bool
6570 388152724 : check_for_label_ref (rtx_insn *insn)
6571 : {
6572 : /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6573 : note for it, we must rerun jump since it needs to place the note. If
6574 : this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6575 : don't do this since no REG_LABEL_OPERAND will be added. */
6576 388152724 : subrtx_iterator::array_type array;
6577 1944871514 : FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6578 : {
6579 1556720085 : const_rtx x = *iter;
6580 1556720085 : if (GET_CODE (x) == LABEL_REF
6581 20008102 : && !LABEL_REF_NONLOCAL_P (x)
6582 20007275 : && (!JUMP_P (insn)
6583 19969060 : || !label_is_jump_target_p (label_ref_label (x), insn))
6584 38216 : && LABEL_P (label_ref_label (x))
6585 37859 : && INSN_UID (label_ref_label (x)) != 0
6586 1556757944 : && !find_reg_note (insn, REG_LABEL_OPERAND, label_ref_label (x)))
6587 1295 : return true;
6588 : }
6589 388151429 : return false;
6590 388152724 : }
6591 :
6592 : /* Process a single extended basic block described by EBB_DATA. */
6593 :
6594 : static void
6595 20467602 : cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6596 : {
6597 20467602 : int path_size = ebb_data->path_size;
6598 20467602 : int path_entry;
6599 20467602 : int num_insns = 0;
6600 :
6601 : /* Allocate the space needed by qty_table. */
6602 20467602 : qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6603 :
6604 20467602 : new_basic_block ();
6605 20467602 : cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6606 20467602 : cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6607 55888383 : for (path_entry = 0; path_entry < path_size; path_entry++)
6608 : {
6609 35420781 : basic_block bb;
6610 35420781 : rtx_insn *insn;
6611 :
6612 35420781 : bb = ebb_data->path[path_entry].bb;
6613 :
6614 : /* Invalidate recorded information for eh regs if there is an EH
6615 : edge pointing to that bb. */
6616 35420781 : if (bb_has_eh_pred (bb))
6617 : {
6618 502115 : df_ref def;
6619 :
6620 2008460 : FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6621 1004230 : if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6622 1004230 : invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6623 : }
6624 :
6625 35420781 : optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6626 480244431 : FOR_BB_INSNS (bb, insn)
6627 : {
6628 : /* If we have processed 1,000 insns, flush the hash table to
6629 : avoid extreme quadratic behavior. We must not include NOTEs
6630 : in the count since there may be more of them when generating
6631 : debugging information. If we clear the table at different
6632 : times, code generated with -g -O might be different than code
6633 : generated with -O but not -g.
6634 :
6635 : FIXME: This is a real kludge and needs to be done some other
6636 : way. */
6637 444823650 : if (NONDEBUG_INSN_P (insn)
6638 444823650 : && num_insns++ > param_max_cse_insns)
6639 : {
6640 5816 : flush_hash_table ();
6641 5816 : num_insns = 0;
6642 : }
6643 :
6644 444823650 : if (INSN_P (insn))
6645 : {
6646 : /* Process notes first so we have all notes in canonical forms
6647 : when looking for duplicate operations. */
6648 388320734 : bool changed = false;
6649 619082992 : for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
6650 230762258 : if (REG_NOTE_KIND (note) == REG_EQUAL)
6651 : {
6652 8528415 : rtx newval = cse_process_note (XEXP (note, 0));
6653 8528415 : if (newval != XEXP (note, 0))
6654 : {
6655 37013 : XEXP (note, 0) = newval;
6656 37013 : changed = true;
6657 : }
6658 : }
6659 388320734 : if (changed)
6660 37013 : df_notes_rescan (insn);
6661 :
6662 388320734 : cse_insn (insn);
6663 :
6664 : /* If we haven't already found an insn where we added a LABEL_REF,
6665 : check this one. */
6666 388320734 : if (INSN_P (insn) && !recorded_label_ref
6667 776473458 : && check_for_label_ref (insn))
6668 1295 : recorded_label_ref = true;
6669 : }
6670 : }
6671 :
6672 : /* With non-call exceptions, we are not always able to update
6673 : the CFG properly inside cse_insn. So clean up possibly
6674 : redundant EH edges here. */
6675 35420781 : if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6676 998656 : cse_cfg_altered |= purge_dead_edges (bb);
6677 :
6678 : /* If we changed a conditional jump, we may have terminated
6679 : the path we are following. Check that by verifying that
6680 : the edge we would take still exists. If the edge does
6681 : not exist anymore, purge the remainder of the path.
6682 : Note that this will cause us to return to the caller. */
6683 35420781 : if (path_entry < path_size - 1)
6684 : {
6685 14956137 : basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6686 14956137 : if (!find_edge (bb, next_bb))
6687 : {
6688 3397 : do
6689 : {
6690 3397 : path_size--;
6691 :
6692 : /* If we truncate the path, we must also reset the
6693 : visited bit on the remaining blocks in the path,
6694 : or we will never visit them at all. */
6695 3397 : bitmap_clear_bit (cse_visited_basic_blocks,
6696 3397 : ebb_data->path[path_size].bb->index);
6697 3397 : ebb_data->path[path_size].bb = NULL;
6698 : }
6699 3397 : while (path_size - 1 != path_entry);
6700 2958 : ebb_data->path_size = path_size;
6701 : }
6702 : }
6703 :
6704 : /* If this is a conditional jump insn, record any known
6705 : equivalences due to the condition being tested. */
6706 35420781 : insn = BB_END (bb);
6707 35420781 : if (path_entry < path_size - 1
6708 50044521 : && EDGE_COUNT (bb->succs) == 2
6709 14623740 : && JUMP_P (insn)
6710 14623740 : && single_set (insn)
6711 14623740 : && any_condjump_p (insn)
6712 : /* single_set may return non-NULL even for multiple sets
6713 : if there are REG_UNUSED notes. record_jump_equiv only
6714 : looks at pc_set and doesn't consider other sets that
6715 : could affect the value, and the recorded equivalence
6716 : can extend the lifetime of the compared REG, so use
6717 : also !multiple_sets check to verify it is exactly one
6718 : set. */
6719 50044521 : && !multiple_sets (insn))
6720 : {
6721 14623740 : basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6722 14623740 : bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6723 14623740 : record_jump_equiv (insn, taken);
6724 : }
6725 : }
6726 :
6727 20467602 : gcc_assert (next_qty <= max_qty);
6728 :
6729 20467602 : free (qty_table);
6730 20467602 : }
6731 :
6732 :
6733 : /* Perform cse on the instructions of a function.
6734 : F is the first instruction.
6735 : NREGS is one plus the highest pseudo-reg number used in the instruction.
6736 :
6737 : Return 2 if jump optimizations should be redone due to simplifications
6738 : in conditional jump instructions.
6739 : Return 1 if the CFG should be cleaned up because it has been modified.
6740 : Return 0 otherwise. */
6741 :
6742 : static int
6743 2292560 : cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6744 : {
6745 2292560 : struct cse_basic_block_data ebb_data;
6746 2292560 : basic_block bb;
6747 2292560 : int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6748 2292560 : int i, n_blocks;
6749 :
6750 : /* CSE doesn't use dominane info but can invalidate it in different ways.
6751 : For simplicity free dominance info here. */
6752 2292560 : free_dominance_info (CDI_DOMINATORS);
6753 :
6754 2292560 : df_set_flags (DF_LR_RUN_DCE);
6755 2292560 : df_note_add_problem ();
6756 2292560 : df_analyze ();
6757 2292560 : df_set_flags (DF_DEFER_INSN_RESCAN);
6758 :
6759 2292560 : reg_scan (get_insns (), max_reg_num ());
6760 2292560 : init_cse_reg_info (nregs);
6761 :
6762 2292560 : ebb_data.path = XNEWVEC (struct branch_path,
6763 : param_max_cse_path_length);
6764 :
6765 2292560 : cse_cfg_altered = false;
6766 2292560 : cse_jumps_altered = false;
6767 2292560 : recorded_label_ref = false;
6768 2292560 : ebb_data.path_size = 0;
6769 2292560 : ebb_data.nsets = 0;
6770 2292560 : rtl_hooks = cse_rtl_hooks;
6771 :
6772 2292560 : init_recog ();
6773 2292560 : init_alias_analysis ();
6774 :
6775 2292560 : reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6776 :
6777 : /* Set up the table of already visited basic blocks. */
6778 2292560 : cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6779 2292560 : bitmap_clear (cse_visited_basic_blocks);
6780 :
6781 : /* Loop over basic blocks in reverse completion order (RPO),
6782 : excluding the ENTRY and EXIT blocks. */
6783 2292560 : n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6784 2292560 : i = 0;
6785 22624994 : while (i < n_blocks)
6786 : {
6787 : /* Find the first block in the RPO queue that we have not yet
6788 : processed before. */
6789 28984786 : do
6790 : {
6791 28984786 : bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6792 : }
6793 28984786 : while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6794 47024660 : && i < n_blocks);
6795 :
6796 : /* Find all paths starting with BB, and process them. */
6797 39028296 : while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6798 : {
6799 : /* Pre-scan the path. */
6800 20988422 : cse_prescan_path (&ebb_data);
6801 :
6802 : /* If this basic block has no sets, skip it. */
6803 20988422 : if (ebb_data.nsets == 0)
6804 520820 : continue;
6805 :
6806 : /* Get a reasonable estimate for the maximum number of qty's
6807 : needed for this path. For this, we take the number of sets
6808 : and multiply that by MAX_RECOG_OPERANDS. */
6809 20467602 : max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6810 :
6811 : /* Dump the path we're about to process. */
6812 20467602 : if (dump_file)
6813 317 : cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6814 :
6815 20467602 : cse_extended_basic_block (&ebb_data);
6816 : }
6817 : }
6818 :
6819 : /* Clean up. */
6820 2292560 : end_alias_analysis ();
6821 2292560 : free (reg_eqv_table);
6822 2292560 : free (ebb_data.path);
6823 2292560 : sbitmap_free (cse_visited_basic_blocks);
6824 2292560 : free (rc_order);
6825 2292560 : rtl_hooks = general_rtl_hooks;
6826 :
6827 2292560 : if (cse_jumps_altered || recorded_label_ref)
6828 : return 2;
6829 2284650 : else if (cse_cfg_altered)
6830 : return 1;
6831 : else
6832 2274977 : return 0;
6833 : }
6834 :
6835 : /* Count the number of times registers are used (not set) in X.
6836 : COUNTS is an array in which we accumulate the count, INCR is how much
6837 : we count each register usage.
6838 :
6839 : Don't count a usage of DEST, which is the SET_DEST of a SET which
6840 : contains X in its SET_SRC. This is because such a SET does not
6841 : modify the liveness of DEST.
6842 : DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6843 : We must then count uses of a SET_DEST regardless, because the insn can't be
6844 : deleted here.
6845 : Also count uses of a SET_DEST if it has been used by an earlier insn,
6846 : but in that case only when incrementing and not when decrementing, effectively
6847 : making setters of such a pseudo non-eliminable. This is for cases like
6848 : (set (reg x) (expr))
6849 : ...
6850 : (set (reg y) (expr (reg (x))))
6851 : ...
6852 : (set (reg x) (expr (reg (x))))
6853 : where we can't eliminate the last insn because x is is still used, if y
6854 : is unused we can eliminate the middle insn and when considering the first insn
6855 : we used to eliminate it despite it being used in the last insn. */
6856 :
6857 : static void
6858 2417464233 : count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6859 : {
6860 2892573406 : enum rtx_code code;
6861 2892573406 : rtx note;
6862 2892573406 : const char *fmt;
6863 2892573406 : int i, j;
6864 :
6865 2892573406 : if (x == 0)
6866 : return;
6867 :
6868 2863218489 : switch (code = GET_CODE (x))
6869 : {
6870 544095255 : case REG:
6871 544095255 : if (x != dest || (incr > 0 && counts[REGNO (x)]))
6872 540373365 : counts[REGNO (x)] += incr;
6873 : return;
6874 :
6875 : case PC:
6876 : case CONST:
6877 : CASE_CONST_ANY:
6878 : case SYMBOL_REF:
6879 : case LABEL_REF:
6880 : return;
6881 :
6882 166754824 : case CLOBBER:
6883 : /* If we are clobbering a MEM, mark any registers inside the address
6884 : as being used. */
6885 166754824 : if (MEM_P (XEXP (x, 0)))
6886 216065 : count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6887 : return;
6888 :
6889 391767192 : case SET:
6890 : /* Unless we are setting a REG, count everything in SET_DEST. */
6891 391767192 : if (!REG_P (SET_DEST (x)))
6892 96078375 : count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6893 391767192 : count_reg_usage (SET_SRC (x), counts,
6894 : dest ? dest : SET_DEST (x),
6895 : incr);
6896 391767192 : return;
6897 :
6898 : case DEBUG_INSN:
6899 : return;
6900 :
6901 412217186 : case CALL_INSN:
6902 412217186 : case INSN:
6903 412217186 : case JUMP_INSN:
6904 : /* We expect dest to be NULL_RTX here. If the insn may throw,
6905 : or if it cannot be deleted due to side-effects, mark this fact
6906 : by setting DEST to pc_rtx. */
6907 159332612 : if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6908 553416424 : || side_effects_p (PATTERN (x)))
6909 54316449 : dest = pc_rtx;
6910 412217186 : if (code == CALL_INSN)
6911 29354917 : count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6912 412217186 : count_reg_usage (PATTERN (x), counts, dest, incr);
6913 :
6914 : /* Things used in a REG_EQUAL note aren't dead since loop may try to
6915 : use them. */
6916 :
6917 412217186 : note = find_reg_equal_equiv_note (x);
6918 412217186 : if (note)
6919 : {
6920 21512724 : rtx eqv = XEXP (note, 0);
6921 :
6922 21512724 : if (GET_CODE (eqv) == EXPR_LIST)
6923 : /* This REG_EQUAL note describes the result of a function call.
6924 : Process all the arguments. */
6925 0 : do
6926 : {
6927 0 : count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6928 0 : eqv = XEXP (eqv, 1);
6929 : }
6930 0 : while (eqv && GET_CODE (eqv) == EXPR_LIST);
6931 : else
6932 : count_reg_usage (eqv, counts, dest, incr);
6933 : }
6934 : return;
6935 :
6936 61613192 : case EXPR_LIST:
6937 61613192 : if (REG_NOTE_KIND (x) == REG_EQUAL
6938 61613192 : || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6939 : /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6940 : involving registers in the address. */
6941 3541033 : || GET_CODE (XEXP (x, 0)) == CLOBBER)
6942 60956229 : count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6943 :
6944 61613192 : count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6945 61613192 : return;
6946 :
6947 709446 : case ASM_OPERANDS:
6948 : /* Iterate over just the inputs, not the constraints as well. */
6949 1361927 : for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6950 652481 : count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6951 : return;
6952 :
6953 0 : case INSN_LIST:
6954 0 : case INT_LIST:
6955 0 : gcc_unreachable ();
6956 :
6957 719072402 : default:
6958 719072402 : break;
6959 : }
6960 :
6961 719072402 : fmt = GET_RTX_FORMAT (code);
6962 1995042912 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6963 : {
6964 1275970510 : if (fmt[i] == 'e')
6965 972892072 : count_reg_usage (XEXP (x, i), counts, dest, incr);
6966 303078438 : else if (fmt[i] == 'E')
6967 204376422 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6968 136758855 : count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6969 : }
6970 : }
6971 :
6972 : /* Return true if X is a dead register. */
6973 :
6974 : static inline bool
6975 778996717 : is_dead_reg (const_rtx x, int *counts)
6976 : {
6977 778996717 : return (REG_P (x)
6978 344106219 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
6979 214942524 : && counts[REGNO (x)] == 0);
6980 : }
6981 :
6982 : /* Return true if set is live. */
6983 : static bool
6984 369957686 : set_live_p (rtx set, int *counts)
6985 : {
6986 369957686 : if (set_noop_p (set))
6987 : return false;
6988 :
6989 369937172 : if (!is_dead_reg (SET_DEST (set), counts)
6990 8402495 : || side_effects_p (SET_SRC (set)))
6991 361597941 : return true;
6992 :
6993 : return false;
6994 : }
6995 :
6996 : /* Return true if insn is live. */
6997 :
6998 : static bool
6999 700082176 : insn_live_p (rtx_insn *insn, int *counts)
7000 : {
7001 700082176 : int i;
7002 700082176 : if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
7003 : return true;
7004 681632894 : else if (GET_CODE (PATTERN (insn)) == SET)
7005 311616148 : return set_live_p (PATTERN (insn), counts);
7006 370016746 : else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7007 : {
7008 118154567 : for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7009 : {
7010 117672467 : rtx elt = XVECEXP (PATTERN (insn), 0, i);
7011 :
7012 117672467 : if (GET_CODE (elt) == SET)
7013 : {
7014 58341538 : if (set_live_p (elt, counts))
7015 : return true;
7016 : }
7017 59330929 : else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7018 : return true;
7019 : }
7020 : return false;
7021 : }
7022 311548847 : else if (DEBUG_INSN_P (insn))
7023 : {
7024 295644900 : if (DEBUG_MARKER_INSN_P (insn))
7025 : return true;
7026 :
7027 227974921 : if (DEBUG_BIND_INSN_P (insn)
7028 227974921 : && TREE_VISITED (INSN_VAR_LOCATION_DECL (insn)))
7029 : return false;
7030 :
7031 : return true;
7032 : }
7033 : else
7034 : return true;
7035 : }
7036 :
7037 : /* Count the number of stores into pseudo. Callback for note_stores. */
7038 :
7039 : static void
7040 254451957 : count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
7041 : {
7042 254451957 : int *counts = (int *) data;
7043 254451957 : if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
7044 95081845 : counts[REGNO (x)]++;
7045 254451957 : }
7046 :
7047 : /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
7048 : pseudo doesn't have a replacement. COUNTS[X] is zero if register X
7049 : is dead and REPLACEMENTS[X] is null if it has no replacemenet.
7050 : Set *SEEN_REPL to true if we see a dead register that does have
7051 : a replacement. */
7052 :
7053 : static bool
7054 227949705 : is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
7055 : bool *seen_repl)
7056 : {
7057 227949705 : subrtx_iterator::array_type array;
7058 633039097 : FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
7059 : {
7060 405114572 : const_rtx x = *iter;
7061 469027895 : if (is_dead_reg (x, counts))
7062 : {
7063 50696 : if (replacements && replacements[REGNO (x)] != NULL_RTX)
7064 25516 : *seen_repl = true;
7065 : else
7066 25180 : return true;
7067 : }
7068 : }
7069 227924525 : return false;
7070 227949705 : }
7071 :
7072 : /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7073 : Callback for simplify_replace_fn_rtx. */
7074 :
7075 : static rtx
7076 40700 : replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
7077 : {
7078 40700 : rtx *replacements = (rtx *) data;
7079 :
7080 40700 : if (REG_P (x)
7081 26304 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
7082 66982 : && replacements[REGNO (x)] != NULL_RTX)
7083 : {
7084 25516 : if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
7085 : return replacements[REGNO (x)];
7086 0 : return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
7087 0 : GET_MODE (replacements[REGNO (x)]));
7088 : }
7089 : return NULL_RTX;
7090 : }
7091 :
7092 : /* Scan all the insns and delete any that are dead; i.e., they store a register
7093 : that is never used or they copy a register to itself.
7094 :
7095 : This is used to remove insns made obviously dead by cse, loop or other
7096 : optimizations. It improves the heuristics in loop since it won't try to
7097 : move dead invariants out of loops or make givs for dead quantities. The
7098 : remaining passes of the compilation are also sped up. */
7099 :
7100 : int
7101 6358127 : delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7102 : {
7103 6358127 : int *counts;
7104 6358127 : rtx_insn *insn, *prev;
7105 6358127 : rtx *replacements = NULL;
7106 6358127 : int ndead = 0;
7107 :
7108 6358127 : timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7109 : /* First count the number of times each register is used. */
7110 6358127 : if (MAY_HAVE_DEBUG_BIND_INSNS)
7111 : {
7112 2649357 : counts = XCNEWVEC (int, nreg * 3);
7113 602573530 : for (insn = insns; insn; insn = NEXT_INSN (insn))
7114 599924173 : if (DEBUG_BIND_INSN_P (insn))
7115 : {
7116 228290829 : count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7117 : NULL_RTX, 1);
7118 228290829 : TREE_VISITED (INSN_VAR_LOCATION_DECL (insn)) = 0;
7119 : }
7120 371633344 : else if (INSN_P (insn))
7121 : {
7122 299144965 : count_reg_usage (insn, counts, NULL_RTX, 1);
7123 299144965 : note_stores (insn, count_stores, counts + nreg * 2);
7124 : }
7125 : /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7126 : First one counts how many times each pseudo is used outside
7127 : of debug insns, second counts how many times each pseudo is
7128 : used in debug insns and third counts how many times a pseudo
7129 : is stored. */
7130 : }
7131 : else
7132 : {
7133 3708770 : counts = XCNEWVEC (int, nreg);
7134 227369390 : for (insn = insns; insn; insn = NEXT_INSN (insn))
7135 219951850 : if (INSN_P (insn))
7136 172646382 : count_reg_usage (insn, counts, NULL_RTX, 1);
7137 : /* If no debug insns can be present, COUNTS is just an array
7138 : which counts how many times each pseudo is used. */
7139 : }
7140 : /* Pseudo PIC register should be considered as used due to possible
7141 : new usages generated. */
7142 6358127 : if (!reload_completed
7143 6358127 : && pic_offset_table_rtx
7144 6576633 : && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7145 218506 : counts[REGNO (pic_offset_table_rtx)]++;
7146 : /* Go from the last insn to the first and delete insns that only set unused
7147 : registers or copy a register to itself. As we delete an insn, remove
7148 : usage counts for registers it uses.
7149 :
7150 : The first jump optimization pass may leave a real insn as the last
7151 : insn in the function. We must not skip that insn or we may end
7152 : up deleting code that is not really dead.
7153 :
7154 : If some otherwise unused register is only used in DEBUG_INSNs,
7155 : try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7156 : the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR
7157 : has been created for the unused register, replace it with
7158 : the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */
7159 6358127 : auto_vec<tree, 32> later_debug_set_vars;
7160 826234150 : for (insn = get_last_insn (); insn; insn = prev)
7161 : {
7162 819876023 : int live_insn = 0;
7163 :
7164 819876023 : prev = PREV_INSN (insn);
7165 819876023 : if (!INSN_P (insn))
7166 119793847 : continue;
7167 :
7168 700082176 : live_insn = insn_live_p (insn, counts);
7169 :
7170 : /* If this is a dead insn, delete it and show registers in it aren't
7171 : being used. */
7172 :
7173 700082176 : if (! live_insn && dbg_cnt (delete_trivial_dead))
7174 : {
7175 8454442 : if (DEBUG_INSN_P (insn))
7176 : {
7177 358624 : if (DEBUG_BIND_INSN_P (insn))
7178 358624 : count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7179 : NULL_RTX, -1);
7180 : }
7181 : else
7182 : {
7183 8095818 : rtx set;
7184 8095818 : if (MAY_HAVE_DEBUG_BIND_INSNS
7185 3944973 : && (set = single_set (insn)) != NULL_RTX
7186 3944973 : && is_dead_reg (SET_DEST (set), counts)
7187 : /* Used at least once in some DEBUG_INSN. */
7188 3939567 : && counts[REGNO (SET_DEST (set)) + nreg] > 0
7189 : /* And set exactly once. */
7190 18230 : && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7191 17501 : && !side_effects_p (SET_SRC (set))
7192 8113319 : && asm_noperands (PATTERN (insn)) < 0)
7193 : {
7194 17500 : rtx dval, bind_var_loc;
7195 17500 : rtx_insn *bind;
7196 :
7197 : /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
7198 17500 : dval = make_debug_expr_from_rtl (SET_DEST (set));
7199 :
7200 : /* Emit a debug bind insn before the insn in which
7201 : reg dies. */
7202 17500 : bind_var_loc =
7203 17500 : gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7204 : DEBUG_EXPR_TREE_DECL (dval),
7205 : SET_SRC (set),
7206 : VAR_INIT_STATUS_INITIALIZED);
7207 17500 : count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7208 :
7209 17500 : bind = emit_debug_insn_before (bind_var_loc, insn);
7210 17500 : df_insn_rescan (bind);
7211 :
7212 17500 : if (replacements == NULL)
7213 9050 : replacements = XCNEWVEC (rtx, nreg);
7214 17500 : replacements[REGNO (SET_DEST (set))] = dval;
7215 : }
7216 :
7217 8095818 : count_reg_usage (insn, counts, NULL_RTX, -1);
7218 8095818 : ndead++;
7219 : }
7220 8454442 : cse_cfg_altered |= delete_insn_and_edges (insn);
7221 : }
7222 : else
7223 : {
7224 691627734 : if (!DEBUG_INSN_P (insn) || DEBUG_MARKER_INSN_P (insn))
7225 : {
7226 1618720330 : for (tree var : later_debug_set_vars)
7227 227633743 : TREE_VISITED (var) = 0;
7228 463695529 : later_debug_set_vars.truncate (0);
7229 : }
7230 227932205 : else if (DEBUG_BIND_INSN_P (insn)
7231 227932205 : && !TREE_VISITED (INSN_VAR_LOCATION_DECL (insn)))
7232 : {
7233 227926066 : later_debug_set_vars.safe_push (INSN_VAR_LOCATION_DECL (insn));
7234 227926066 : TREE_VISITED (INSN_VAR_LOCATION_DECL (insn)) = 1;
7235 : }
7236 : }
7237 : }
7238 :
7239 6358127 : if (MAY_HAVE_DEBUG_BIND_INSNS)
7240 : {
7241 598287433 : for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7242 595638076 : if (DEBUG_BIND_INSN_P (insn))
7243 : {
7244 : /* If this debug insn references a dead register that wasn't replaced
7245 : with an DEBUG_EXPR, reset the DEBUG_INSN. */
7246 227949705 : bool seen_repl = false;
7247 227949705 : if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7248 : counts, replacements, &seen_repl))
7249 : {
7250 25180 : INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7251 25180 : df_insn_rescan (insn);
7252 : }
7253 227924525 : else if (seen_repl)
7254 : {
7255 25426 : INSN_VAR_LOCATION_LOC (insn)
7256 25426 : = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7257 : NULL_RTX, replace_dead_reg,
7258 : replacements);
7259 25426 : df_insn_rescan (insn);
7260 : }
7261 : }
7262 2649357 : free (replacements);
7263 : }
7264 :
7265 6358127 : if (dump_file && ndead)
7266 26 : fprintf (dump_file, "Deleted %i trivially dead insns\n",
7267 : ndead);
7268 : /* Clean up. */
7269 6358127 : free (counts);
7270 6358127 : timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7271 6358127 : return ndead;
7272 6358127 : }
7273 :
7274 : /* If LOC contains references to NEWREG in a different mode, change them
7275 : to use NEWREG instead. */
7276 :
7277 : static void
7278 55698 : cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7279 : rtx *loc, rtx_insn *insn, rtx newreg)
7280 : {
7281 346944 : FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7282 : {
7283 291246 : rtx *loc = *iter;
7284 291246 : rtx x = *loc;
7285 291246 : if (x
7286 263397 : && REG_P (x)
7287 64988 : && REGNO (x) == REGNO (newreg)
7288 339084 : && GET_MODE (x) != GET_MODE (newreg))
7289 : {
7290 47838 : validate_change (insn, loc, newreg, 1);
7291 47838 : iter.skip_subrtxes ();
7292 : }
7293 : }
7294 55698 : }
7295 :
7296 : /* Change the mode of any reference to the register REGNO (NEWREG) to
7297 : GET_MODE (NEWREG) in INSN. */
7298 :
7299 : static void
7300 27849 : cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7301 : {
7302 27849 : int success;
7303 :
7304 27849 : if (!INSN_P (insn))
7305 0 : return;
7306 :
7307 27849 : subrtx_ptr_iterator::array_type array;
7308 27849 : cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7309 27849 : cse_change_cc_mode (array, ®_NOTES (insn), insn, newreg);
7310 :
7311 : /* If the following assertion was triggered, there is most probably
7312 : something wrong with the cc_modes_compatible back end function.
7313 : CC modes only can be considered compatible if the insn - with the mode
7314 : replaced by any of the compatible modes - can still be recognized. */
7315 27849 : success = apply_change_group ();
7316 27849 : gcc_assert (success);
7317 27849 : }
7318 :
7319 : /* Change the mode of any reference to the register REGNO (NEWREG) to
7320 : GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7321 : any instruction which modifies NEWREG. */
7322 :
7323 : static void
7324 19148 : cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7325 : {
7326 19148 : rtx_insn *insn;
7327 :
7328 38700 : for (insn = start; insn != end; insn = NEXT_INSN (insn))
7329 : {
7330 21026 : if (! INSN_P (insn))
7331 0 : continue;
7332 :
7333 21026 : if (reg_set_p (newreg, insn))
7334 : return;
7335 :
7336 19552 : cse_change_cc_mode_insn (insn, newreg);
7337 : }
7338 : }
7339 :
7340 : /* BB is a basic block which finishes with CC_REG as a condition code
7341 : register which is set to CC_SRC. Look through the successors of BB
7342 : to find blocks which have a single predecessor (i.e., this one),
7343 : and look through those blocks for an assignment to CC_REG which is
7344 : equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7345 : permitted to change the mode of CC_SRC to a compatible mode. This
7346 : returns VOIDmode if no equivalent assignments were found.
7347 : Otherwise it returns the mode which CC_SRC should wind up with.
7348 : ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7349 : but is passed unmodified down to recursive calls in order to prevent
7350 : endless recursion.
7351 :
7352 : The main complexity in this function is handling the mode issues.
7353 : We may have more than one duplicate which we can eliminate, and we
7354 : try to find a mode which will work for multiple duplicates. */
7355 :
7356 : static machine_mode
7357 5527026 : cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7358 : bool can_change_mode)
7359 : {
7360 5527026 : bool found_equiv;
7361 5527026 : machine_mode mode;
7362 5527026 : unsigned int insn_count;
7363 5527026 : edge e;
7364 5527026 : rtx_insn *insns[2];
7365 5527026 : machine_mode modes[2];
7366 5527026 : rtx_insn *last_insns[2];
7367 5527026 : unsigned int i;
7368 5527026 : rtx newreg;
7369 5527026 : edge_iterator ei;
7370 :
7371 : /* We expect to have two successors. Look at both before picking
7372 : the final mode for the comparison. If we have more successors
7373 : (i.e., some sort of table jump, although that seems unlikely),
7374 : then we require all beyond the first two to use the same
7375 : mode. */
7376 :
7377 5527026 : found_equiv = false;
7378 5527026 : mode = GET_MODE (cc_src);
7379 5527026 : insn_count = 0;
7380 15635496 : FOR_EACH_EDGE (e, ei, bb->succs)
7381 : {
7382 10108470 : rtx_insn *insn;
7383 10108470 : rtx_insn *end;
7384 :
7385 10108470 : if (e->flags & EDGE_COMPLEX)
7386 32791 : continue;
7387 :
7388 10075679 : if (EDGE_COUNT (e->dest->preds) != 1
7389 5605878 : || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7390 : /* Avoid endless recursion on unreachable blocks. */
7391 15586155 : || e->dest == orig_bb)
7392 4565203 : continue;
7393 :
7394 5510476 : end = NEXT_INSN (BB_END (e->dest));
7395 34169358 : for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7396 : {
7397 33044353 : rtx set;
7398 :
7399 33044353 : if (! INSN_P (insn))
7400 7453994 : continue;
7401 :
7402 : /* If CC_SRC is modified, we have to stop looking for
7403 : something which uses it. */
7404 25590359 : if (modified_in_p (cc_src, insn))
7405 : break;
7406 :
7407 : /* Check whether INSN sets CC_REG to CC_SRC. */
7408 25111452 : set = single_set (insn);
7409 25111452 : if (set
7410 11184982 : && REG_P (SET_DEST (set))
7411 34856032 : && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7412 : {
7413 1366704 : bool found;
7414 1366704 : machine_mode set_mode;
7415 1366704 : machine_mode comp_mode;
7416 :
7417 1366704 : found = false;
7418 1366704 : set_mode = GET_MODE (SET_SRC (set));
7419 1366704 : comp_mode = set_mode;
7420 1366704 : if (rtx_equal_p (cc_src, SET_SRC (set)))
7421 : found = true;
7422 1360429 : else if (GET_CODE (cc_src) == COMPARE
7423 1290931 : && GET_CODE (SET_SRC (set)) == COMPARE
7424 1241154 : && mode != set_mode
7425 347857 : && rtx_equal_p (XEXP (cc_src, 0),
7426 347857 : XEXP (SET_SRC (set), 0))
7427 1421113 : && rtx_equal_p (XEXP (cc_src, 1),
7428 60684 : XEXP (SET_SRC (set), 1)))
7429 :
7430 : {
7431 19152 : comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7432 19152 : if (comp_mode != VOIDmode
7433 19152 : && (can_change_mode || comp_mode == mode))
7434 : found = true;
7435 : }
7436 :
7437 25423 : if (found)
7438 : {
7439 25423 : found_equiv = true;
7440 25423 : if (insn_count < ARRAY_SIZE (insns))
7441 : {
7442 25423 : insns[insn_count] = insn;
7443 25423 : modes[insn_count] = set_mode;
7444 25423 : last_insns[insn_count] = end;
7445 25423 : ++insn_count;
7446 :
7447 25423 : if (mode != comp_mode)
7448 : {
7449 8297 : gcc_assert (can_change_mode);
7450 8297 : mode = comp_mode;
7451 :
7452 : /* The modified insn will be re-recognized later. */
7453 8297 : PUT_MODE (cc_src, mode);
7454 : }
7455 : }
7456 : else
7457 : {
7458 0 : if (set_mode != mode)
7459 : {
7460 : /* We found a matching expression in the
7461 : wrong mode, but we don't have room to
7462 : store it in the array. Punt. This case
7463 : should be rare. */
7464 : break;
7465 : }
7466 : /* INSN sets CC_REG to a value equal to CC_SRC
7467 : with the right mode. We can simply delete
7468 : it. */
7469 0 : delete_insn (insn);
7470 : }
7471 :
7472 : /* We found an instruction to delete. Keep looking,
7473 : in the hopes of finding a three-way jump. */
7474 25423 : continue;
7475 : }
7476 :
7477 : /* We found an instruction which sets the condition
7478 : code, so don't look any farther. */
7479 : break;
7480 : }
7481 :
7482 : /* If INSN sets CC_REG in some other way, don't look any
7483 : farther. */
7484 23744748 : if (reg_set_p (cc_reg, insn))
7485 : break;
7486 : }
7487 :
7488 : /* If we fell off the bottom of the block, we can keep looking
7489 : through successors. We pass CAN_CHANGE_MODE as false because
7490 : we aren't prepared to handle compatibility between the
7491 : further blocks and this block. */
7492 5510476 : if (insn == end)
7493 : {
7494 1125005 : machine_mode submode;
7495 :
7496 1125005 : submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7497 1125005 : if (submode != VOIDmode)
7498 : {
7499 169 : gcc_assert (submode == mode);
7500 : found_equiv = true;
7501 : can_change_mode = false;
7502 : }
7503 : }
7504 : }
7505 :
7506 5527026 : if (! found_equiv)
7507 : return VOIDmode;
7508 :
7509 : /* Now INSN_COUNT is the number of instructions we found which set
7510 : CC_REG to a value equivalent to CC_SRC. The instructions are in
7511 : INSNS. The modes used by those instructions are in MODES. */
7512 :
7513 : newreg = NULL_RTX;
7514 50882 : for (i = 0; i < insn_count; ++i)
7515 : {
7516 25423 : if (modes[i] != mode)
7517 : {
7518 : /* We need to change the mode of CC_REG in INSNS[i] and
7519 : subsequent instructions. */
7520 10851 : if (! newreg)
7521 : {
7522 10724 : if (GET_MODE (cc_reg) == mode)
7523 : newreg = cc_reg;
7524 : else
7525 7382 : newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7526 : }
7527 10851 : cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7528 : newreg);
7529 : }
7530 :
7531 25423 : cse_cfg_altered |= delete_insn_and_edges (insns[i]);
7532 : }
7533 :
7534 : return mode;
7535 : }
7536 :
7537 : /* If we have a fixed condition code register (or two), walk through
7538 : the instructions and try to eliminate duplicate assignments. */
7539 :
7540 : static void
7541 961545 : cse_condition_code_reg (void)
7542 : {
7543 961545 : unsigned int cc_regno_1;
7544 961545 : unsigned int cc_regno_2;
7545 961545 : rtx cc_reg_1;
7546 961545 : rtx cc_reg_2;
7547 961545 : basic_block bb;
7548 :
7549 961545 : if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7550 0 : return;
7551 :
7552 961545 : cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7553 961545 : if (cc_regno_2 != INVALID_REGNUM)
7554 0 : cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7555 : else
7556 : cc_reg_2 = NULL_RTX;
7557 :
7558 10710131 : FOR_EACH_BB_FN (bb, cfun)
7559 : {
7560 9748586 : rtx_insn *last_insn;
7561 9748586 : rtx cc_reg;
7562 9748586 : rtx_insn *insn;
7563 9748586 : rtx_insn *cc_src_insn;
7564 9748586 : rtx cc_src;
7565 9748586 : machine_mode mode;
7566 9748586 : machine_mode orig_mode;
7567 :
7568 : /* Look for blocks which end with a conditional jump based on a
7569 : condition code register. Then look for the instruction which
7570 : sets the condition code register. Then look through the
7571 : successor blocks for instructions which set the condition
7572 : code register to the same value. There are other possible
7573 : uses of the condition code register, but these are by far the
7574 : most common and the ones which we are most likely to be able
7575 : to optimize. */
7576 :
7577 9748586 : last_insn = BB_END (bb);
7578 9748586 : if (!JUMP_P (last_insn))
7579 5198987 : continue;
7580 :
7581 4549599 : if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7582 : cc_reg = cc_reg_1;
7583 13510 : else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7584 : cc_reg = cc_reg_2;
7585 : else
7586 13510 : continue;
7587 :
7588 4536089 : cc_src_insn = NULL;
7589 4536089 : cc_src = NULL_RTX;
7590 4711155 : for (insn = PREV_INSN (last_insn);
7591 4711155 : insn && insn != PREV_INSN (BB_HEAD (bb));
7592 175066 : insn = PREV_INSN (insn))
7593 : {
7594 4594055 : rtx set;
7595 :
7596 4594055 : if (! INSN_P (insn))
7597 125666 : continue;
7598 4468389 : set = single_set (insn);
7599 4468389 : if (set
7600 4410091 : && REG_P (SET_DEST (set))
7601 8877603 : && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7602 : {
7603 4402038 : cc_src_insn = insn;
7604 4402038 : cc_src = SET_SRC (set);
7605 4402038 : break;
7606 : }
7607 66351 : else if (reg_set_p (cc_reg, insn))
7608 : break;
7609 : }
7610 :
7611 4536089 : if (! cc_src_insn)
7612 134051 : continue;
7613 :
7614 4402038 : if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7615 17 : continue;
7616 :
7617 : /* Now CC_REG is a condition code register used for a
7618 : conditional jump at the end of the block, and CC_SRC, in
7619 : CC_SRC_INSN, is the value to which that condition code
7620 : register is set, and CC_SRC is still meaningful at the end of
7621 : the basic block. */
7622 :
7623 4402021 : orig_mode = GET_MODE (cc_src);
7624 4402021 : mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7625 4402021 : if (mode != VOIDmode)
7626 : {
7627 25290 : gcc_assert (mode == GET_MODE (cc_src));
7628 25290 : if (mode != orig_mode)
7629 : {
7630 8297 : rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7631 :
7632 8297 : cse_change_cc_mode_insn (cc_src_insn, newreg);
7633 :
7634 : /* Do the same in the following insns that use the
7635 : current value of CC_REG within BB. */
7636 8297 : cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7637 : NEXT_INSN (last_insn),
7638 : newreg);
7639 : }
7640 : }
7641 : }
7642 : }
7643 :
7644 :
7645 : /* Perform common subexpression elimination. Nonzero value from
7646 : `cse_main' means that jumps were simplified and some code may now
7647 : be unreachable, so do jump optimization again. */
7648 : static unsigned int
7649 1041491 : rest_of_handle_cse (void)
7650 : {
7651 1041491 : int tem;
7652 :
7653 1041491 : if (dump_file)
7654 32 : dump_flow_info (dump_file, dump_flags);
7655 :
7656 1041491 : tem = cse_main (get_insns (), max_reg_num ());
7657 :
7658 : /* If we are not running more CSE passes, then we are no longer
7659 : expecting CSE to be run. But always rerun it in a cheap mode. */
7660 1041491 : cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7661 :
7662 1041491 : if (tem == 2)
7663 : {
7664 5389 : timevar_push (TV_JUMP);
7665 5389 : rebuild_jump_labels (get_insns ());
7666 5389 : cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7667 5389 : timevar_pop (TV_JUMP);
7668 : }
7669 1036102 : else if (tem == 1 || optimize > 1)
7670 956508 : cse_cfg_altered |= cleanup_cfg (0);
7671 :
7672 1041491 : return 0;
7673 : }
7674 :
7675 : namespace {
7676 :
7677 : const pass_data pass_data_cse =
7678 : {
7679 : RTL_PASS, /* type */
7680 : "cse1", /* name */
7681 : OPTGROUP_NONE, /* optinfo_flags */
7682 : TV_CSE, /* tv_id */
7683 : 0, /* properties_required */
7684 : 0, /* properties_provided */
7685 : 0, /* properties_destroyed */
7686 : 0, /* todo_flags_start */
7687 : TODO_df_finish, /* todo_flags_finish */
7688 : };
7689 :
7690 : class pass_cse : public rtl_opt_pass
7691 : {
7692 : public:
7693 288047 : pass_cse (gcc::context *ctxt)
7694 576094 : : rtl_opt_pass (pass_data_cse, ctxt)
7695 : {}
7696 :
7697 : /* opt_pass methods: */
7698 1474422 : bool gate (function *) final override { return optimize > 0; }
7699 1041491 : unsigned int execute (function *) final override
7700 : {
7701 1041491 : return rest_of_handle_cse ();
7702 : }
7703 :
7704 : }; // class pass_cse
7705 :
7706 : } // anon namespace
7707 :
7708 : rtl_opt_pass *
7709 288047 : make_pass_cse (gcc::context *ctxt)
7710 : {
7711 288047 : return new pass_cse (ctxt);
7712 : }
7713 :
7714 :
7715 : /* Run second CSE pass after loop optimizations. */
7716 : static unsigned int
7717 961545 : rest_of_handle_cse2 (void)
7718 : {
7719 961545 : int tem;
7720 :
7721 961545 : if (dump_file)
7722 22 : dump_flow_info (dump_file, dump_flags);
7723 :
7724 961545 : tem = cse_main (get_insns (), max_reg_num ());
7725 :
7726 : /* Run a pass to eliminate duplicated assignments to condition code
7727 : registers. We have to run this after bypass_jumps, because it
7728 : makes it harder for that pass to determine whether a jump can be
7729 : bypassed safely. */
7730 961545 : cse_condition_code_reg ();
7731 :
7732 961545 : delete_trivially_dead_insns (get_insns (), max_reg_num ());
7733 :
7734 961545 : if (tem == 2)
7735 : {
7736 2223 : timevar_push (TV_JUMP);
7737 2223 : rebuild_jump_labels (get_insns ());
7738 2223 : cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7739 2223 : timevar_pop (TV_JUMP);
7740 : }
7741 959322 : else if (tem == 1 || cse_cfg_altered)
7742 108 : cse_cfg_altered |= cleanup_cfg (0);
7743 :
7744 961545 : cse_not_expected = 1;
7745 961545 : return 0;
7746 : }
7747 :
7748 :
7749 : namespace {
7750 :
7751 : const pass_data pass_data_cse2 =
7752 : {
7753 : RTL_PASS, /* type */
7754 : "cse2", /* name */
7755 : OPTGROUP_NONE, /* optinfo_flags */
7756 : TV_CSE2, /* tv_id */
7757 : 0, /* properties_required */
7758 : 0, /* properties_provided */
7759 : 0, /* properties_destroyed */
7760 : 0, /* todo_flags_start */
7761 : TODO_df_finish, /* todo_flags_finish */
7762 : };
7763 :
7764 : class pass_cse2 : public rtl_opt_pass
7765 : {
7766 : public:
7767 288047 : pass_cse2 (gcc::context *ctxt)
7768 576094 : : rtl_opt_pass (pass_data_cse2, ctxt)
7769 : {}
7770 :
7771 : /* opt_pass methods: */
7772 1474422 : bool gate (function *) final override
7773 : {
7774 1474422 : return optimize > 0 && flag_rerun_cse_after_loop;
7775 : }
7776 :
7777 961545 : unsigned int execute (function *) final override
7778 : {
7779 961545 : return rest_of_handle_cse2 ();
7780 : }
7781 :
7782 : }; // class pass_cse2
7783 :
7784 : } // anon namespace
7785 :
7786 : rtl_opt_pass *
7787 288047 : make_pass_cse2 (gcc::context *ctxt)
7788 : {
7789 288047 : return new pass_cse2 (ctxt);
7790 : }
7791 :
7792 : /* Run second CSE pass after loop optimizations. */
7793 : static unsigned int
7794 289524 : rest_of_handle_cse_after_global_opts (void)
7795 : {
7796 289524 : int save_cfj;
7797 289524 : int tem;
7798 :
7799 : /* We only want to do local CSE, so don't follow jumps. */
7800 289524 : save_cfj = flag_cse_follow_jumps;
7801 289524 : flag_cse_follow_jumps = 0;
7802 :
7803 289524 : rebuild_jump_labels (get_insns ());
7804 289524 : tem = cse_main (get_insns (), max_reg_num ());
7805 289524 : cse_cfg_altered |= purge_all_dead_edges ();
7806 289524 : delete_trivially_dead_insns (get_insns (), max_reg_num ());
7807 :
7808 289524 : cse_not_expected = !flag_rerun_cse_after_loop;
7809 :
7810 : /* If cse altered any jumps, rerun jump opts to clean things up. */
7811 289524 : if (tem == 2)
7812 : {
7813 298 : timevar_push (TV_JUMP);
7814 298 : rebuild_jump_labels (get_insns ());
7815 298 : cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7816 298 : timevar_pop (TV_JUMP);
7817 : }
7818 289226 : else if (tem == 1 || cse_cfg_altered)
7819 4785 : cse_cfg_altered |= cleanup_cfg (0);
7820 :
7821 289524 : flag_cse_follow_jumps = save_cfj;
7822 289524 : return 0;
7823 : }
7824 :
7825 : namespace {
7826 :
7827 : const pass_data pass_data_cse_after_global_opts =
7828 : {
7829 : RTL_PASS, /* type */
7830 : "cse_local", /* name */
7831 : OPTGROUP_NONE, /* optinfo_flags */
7832 : TV_CSE, /* tv_id */
7833 : 0, /* properties_required */
7834 : 0, /* properties_provided */
7835 : 0, /* properties_destroyed */
7836 : 0, /* todo_flags_start */
7837 : TODO_df_finish, /* todo_flags_finish */
7838 : };
7839 :
7840 : class pass_cse_after_global_opts : public rtl_opt_pass
7841 : {
7842 : public:
7843 288047 : pass_cse_after_global_opts (gcc::context *ctxt)
7844 576094 : : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7845 : {}
7846 :
7847 : /* opt_pass methods: */
7848 1474422 : bool gate (function *) final override
7849 : {
7850 1474422 : return optimize > 0 && flag_rerun_cse_after_global_opts;
7851 : }
7852 :
7853 289524 : unsigned int execute (function *) final override
7854 : {
7855 289524 : return rest_of_handle_cse_after_global_opts ();
7856 : }
7857 :
7858 : }; // class pass_cse_after_global_opts
7859 :
7860 : } // anon namespace
7861 :
7862 : rtl_opt_pass *
7863 288047 : make_pass_cse_after_global_opts (gcc::context *ctxt)
7864 : {
7865 288047 : return new pass_cse_after_global_opts (ctxt);
7866 : }
|