Line data Source code
1 : /* Code for RTL transformations to satisfy insn constraints.
2 : Copyright (C) 2010-2026 Free Software Foundation, Inc.
3 : Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : /* This file contains code for 3 passes: constraint pass,
23 : inheritance/split pass, and pass for undoing failed inheritance and
24 : split.
25 :
26 : The major goal of constraint pass is to transform RTL to satisfy
27 : insn and address constraints by:
28 : o choosing insn alternatives;
29 : o generating *reload insns* (or reloads in brief) and *reload
30 : pseudos* which will get necessary hard registers later;
31 : o substituting pseudos with equivalent values and removing the
32 : instructions that initialized those pseudos.
33 :
34 : The constraint pass has biggest and most complicated code in LRA.
35 : There are a lot of important details like:
36 : o reuse of input reload pseudos to simplify reload pseudo
37 : allocations;
38 : o some heuristics to choose insn alternative to improve the
39 : inheritance;
40 : o early clobbers etc.
41 :
42 : The pass is mimicking former reload pass in alternative choosing
43 : because the reload pass is oriented to current machine description
44 : model. It might be changed if the machine description model is
45 : changed.
46 :
47 : There is special code for preventing all LRA and this pass cycling
48 : in case of bugs.
49 :
50 : On the first iteration of the pass we process every instruction and
51 : choose an alternative for each one. On subsequent iterations we try
52 : to avoid reprocessing instructions if we can be sure that the old
53 : choice is still valid.
54 :
55 : The inheritance/spilt pass is to transform code to achieve
56 : ineheritance and live range splitting. It is done on backward
57 : traversal of EBBs.
58 :
59 : The inheritance optimization goal is to reuse values in hard
60 : registers. There is analogous optimization in old reload pass. The
61 : inheritance is achieved by following transformation:
62 :
63 : reload_p1 <- p reload_p1 <- p
64 : ... new_p <- reload_p1
65 : ... => ...
66 : reload_p2 <- p reload_p2 <- new_p
67 :
68 : where p is spilled and not changed between the insns. Reload_p1 is
69 : also called *original pseudo* and new_p is called *inheritance
70 : pseudo*.
71 :
72 : The subsequent assignment pass will try to assign the same (or
73 : another if it is not possible) hard register to new_p as to
74 : reload_p1 or reload_p2.
75 :
76 : If the assignment pass fails to assign a hard register to new_p,
77 : this file will undo the inheritance and restore the original code.
78 : This is because implementing the above sequence with a spilled
79 : new_p would make the code much worse. The inheritance is done in
80 : EBB scope. The above is just a simplified example to get an idea
81 : of the inheritance as the inheritance is also done for non-reload
82 : insns.
83 :
84 : Splitting (transformation) is also done in EBB scope on the same
85 : pass as the inheritance:
86 :
87 : r <- ... or ... <- r r <- ... or ... <- r
88 : ... s <- r (new insn -- save)
89 : ... =>
90 : ... r <- s (new insn -- restore)
91 : ... <- r ... <- r
92 :
93 : The *split pseudo* s is assigned to the hard register of the
94 : original pseudo or hard register r.
95 :
96 : Splitting is done:
97 : o In EBBs with high register pressure for global pseudos (living
98 : in at least 2 BBs) and assigned to hard registers when there
99 : are more one reloads needing the hard registers;
100 : o for pseudos needing save/restore code around calls.
101 :
102 : If the split pseudo still has the same hard register as the
103 : original pseudo after the subsequent assignment pass or the
104 : original pseudo was split, the opposite transformation is done on
105 : the same pass for undoing inheritance. */
106 :
107 : #undef REG_OK_STRICT
108 :
109 : #include "config.h"
110 : #include "system.h"
111 : #include "coretypes.h"
112 : #include "backend.h"
113 : #include "hooks.h"
114 : #include "target.h"
115 : #include "rtl.h"
116 : #include "tree.h"
117 : #include "stmt.h"
118 : #include "predict.h"
119 : #include "df.h"
120 : #include "memmodel.h"
121 : #include "tm_p.h"
122 : #include "expmed.h"
123 : #include "optabs.h"
124 : #include "regs.h"
125 : #include "ira.h"
126 : #include "recog.h"
127 : #include "output.h"
128 : #include "addresses.h"
129 : #include "expr.h"
130 : #include "cfgrtl.h"
131 : #include "rtl-error.h"
132 : #include "lra.h"
133 : #include "lra-int.h"
134 : #include "print-rtl.h"
135 : #include "function-abi.h"
136 : #include "rtl-iter.h"
137 : #include "hash-set.h"
138 :
139 : /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
140 : insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
141 : reload insns. */
142 : static int bb_reload_num;
143 :
144 : /* The current insn being processed and corresponding its single set
145 : (NULL otherwise), its data (basic block, the insn data, the insn
146 : static data, and the mode of each operand). */
147 : static rtx_insn *curr_insn;
148 : static rtx curr_insn_set;
149 : static basic_block curr_bb;
150 : static lra_insn_recog_data_t curr_id;
151 : static struct lra_static_insn_data *curr_static_id;
152 : static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
153 : /* Mode of the register substituted by its equivalence with VOIDmode
154 : (e.g. constant) and whose subreg is given operand of the current
155 : insn. VOIDmode in all other cases. */
156 : static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
157 : /* The first call insn after curr_insn within the EBB during inherit_in_ebb
158 : or NULL outside of that function. */
159 : static rtx_insn *first_call_insn;
160 :
161 :
162 :
163 : /* Start numbers for new registers and insns at the current constraints
164 : pass start. */
165 : static int new_regno_start;
166 : static int new_insn_uid_start;
167 :
168 : /* If LOC is nonnull, strip any outer subreg from it. */
169 : static inline rtx *
170 228786108 : strip_subreg (rtx *loc)
171 : {
172 101290671 : return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
173 : }
174 :
175 : /* Return hard regno of REGNO or if it is was not assigned to a hard
176 : register, use a hard register from its allocno class. */
177 : static int
178 72801 : get_try_hard_regno (int regno)
179 : {
180 72801 : int hard_regno;
181 72801 : enum reg_class rclass;
182 :
183 72801 : if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
184 72801 : hard_regno = lra_get_regno_hard_regno (regno);
185 72801 : if (hard_regno >= 0)
186 : return hard_regno;
187 39691 : rclass = lra_get_allocno_class (regno);
188 39691 : if (rclass == NO_REGS)
189 : return -1;
190 38129 : return ira_class_hard_regs[rclass][0];
191 : }
192 :
193 : /* Return the hard regno of X after removing its subreg. If X is not a
194 : register or a subreg of a register, return -1. If X is a pseudo, use its
195 : assignment. If X is a hard regno, return the final hard regno which will be
196 : after elimination. */
197 : static int
198 288151703 : get_hard_regno (rtx x)
199 : {
200 288151703 : rtx reg;
201 288151703 : int hard_regno;
202 :
203 288151703 : reg = x;
204 288151703 : if (SUBREG_P (x))
205 5130742 : reg = SUBREG_REG (x);
206 288151703 : if (! REG_P (reg))
207 : return -1;
208 198922882 : int regno = REGNO (reg);
209 198922882 : if (HARD_REGISTER_NUM_P (regno))
210 34927584 : hard_regno = lra_get_elimination_hard_regno (regno);
211 : else
212 163995298 : hard_regno = lra_get_regno_hard_regno (regno);
213 198922882 : if (hard_regno < 0)
214 : return -1;
215 181119310 : if (SUBREG_P (x))
216 4434179 : hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
217 4434179 : SUBREG_BYTE (x), GET_MODE (x));
218 : return hard_regno;
219 : }
220 :
221 : /* If REGNO is a hard register or has been allocated a hard register,
222 : return the class of that register. If REGNO is a reload pseudo
223 : created by the current constraints pass, return its allocno class.
224 : Return NO_REGS otherwise. */
225 : static enum reg_class
226 513014634 : get_reg_class (int regno)
227 : {
228 513014634 : int hard_regno;
229 :
230 513014634 : if (HARD_REGISTER_NUM_P (regno))
231 64921232 : hard_regno = lra_get_elimination_hard_regno (regno);
232 : else
233 448093402 : hard_regno = lra_get_regno_hard_regno (regno);
234 513014634 : if (hard_regno >= 0)
235 321110326 : return REGNO_REG_CLASS (hard_regno);
236 191904308 : if (regno >= new_regno_start)
237 62005092 : return lra_get_allocno_class (regno);
238 : return NO_REGS;
239 : }
240 :
241 : /* Return true if REG_CLASS has enough allocatable hard regs to keep value of
242 : REG_MODE. */
243 : static bool
244 18630722 : enough_allocatable_hard_regs_p (enum reg_class reg_class,
245 : enum machine_mode reg_mode)
246 : {
247 18630722 : int i, j, hard_regno, class_size, nregs;
248 :
249 37261444 : if (hard_reg_set_subset_p (reg_class_contents[reg_class], lra_no_alloc_regs))
250 : return false;
251 6320771 : class_size = ira_class_hard_regs_num[reg_class];
252 6320771 : for (i = 0; i < class_size; i++)
253 : {
254 6320771 : hard_regno = ira_class_hard_regs[reg_class][i];
255 6320771 : nregs = hard_regno_nregs (hard_regno, reg_mode);
256 6320771 : if (nregs == 1)
257 : return true;
258 253764 : for (j = 0; j < nregs; j++)
259 169176 : if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
260 169176 : || ! TEST_HARD_REG_BIT (reg_class_contents[reg_class],
261 : hard_regno + j))
262 : break;
263 84588 : if (j >= nregs)
264 : return true;
265 : }
266 : return false;
267 : }
268 :
269 : /* True if C is a non-empty register class that has too few registers
270 : to be safely used as a reload target class. */
271 : #define SMALL_REGISTER_CLASS_P(C) \
272 : (ira_class_hard_regs_num [(C)] == 1 \
273 : || (ira_class_hard_regs_num [(C)] >= 1 \
274 : && targetm.class_likely_spilled_p (C)))
275 :
276 : /* Return true if REG satisfies (or will satisfy) reg class constraint
277 : CL. Use elimination first if REG is a hard register. If REG is a
278 : reload pseudo created by this constraints pass, assume that it will
279 : be allocated a hard register from its allocno class, but allow that
280 : class to be narrowed to CL if it is currently a superset of CL and
281 : if either:
282 :
283 : - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
284 : - the instruction we're processing is not a reload move.
285 :
286 : If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
287 : REGNO (reg), or NO_REGS if no change in its class was needed. */
288 : static bool
289 218293460 : in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
290 : bool allow_all_reload_class_changes_p = false)
291 : {
292 218293460 : enum reg_class rclass, common_class;
293 218293460 : machine_mode reg_mode;
294 218293460 : rtx src;
295 218293460 : int regno = REGNO (reg);
296 :
297 218293460 : if (new_class != NULL)
298 112330962 : *new_class = NO_REGS;
299 218293460 : if (regno < FIRST_PSEUDO_REGISTER)
300 : {
301 27525042 : rtx final_reg = reg;
302 27525042 : rtx *final_loc = &final_reg;
303 :
304 27525042 : lra_eliminate_reg_if_possible (final_loc);
305 27525042 : return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
306 : }
307 190768418 : reg_mode = GET_MODE (reg);
308 190768418 : rclass = get_reg_class (regno);
309 190768418 : src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
310 190768418 : if (regno < new_regno_start
311 : /* Do not allow the constraints for reload instructions to
312 : influence the classes of new pseudos. These reloads are
313 : typically moves that have many alternatives, and restricting
314 : reload pseudos for one alternative may lead to situations
315 : where other reload pseudos are no longer allocatable. */
316 190768418 : || (!allow_all_reload_class_changes_p
317 15005742 : && INSN_UID (curr_insn) >= new_insn_uid_start
318 14478179 : && src != NULL
319 14478179 : && ((REG_P (src) || MEM_P (src))
320 1397880 : || (GET_CODE (src) == SUBREG
321 630699 : && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
322 : /* When we don't know what class will be used finally for reload
323 : pseudos, we use ALL_REGS. */
324 13710998 : return ((regno >= new_regno_start && rclass == ALL_REGS)
325 185846318 : || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
326 200670432 : && ! hard_reg_set_subset_p (reg_class_contents[cl],
327 : lra_no_alloc_regs)));
328 : else
329 : {
330 18630722 : common_class = ira_reg_class_subset[rclass][cl];
331 18630722 : if (new_class != NULL)
332 5250199 : *new_class = common_class;
333 18630722 : return (enough_allocatable_hard_regs_p (common_class, reg_mode)
334 : /* Do not permit reload insn operand matching (new_class == NULL
335 : case) if the new class is too small. */
336 18630722 : && (new_class != NULL || common_class == rclass
337 986271 : || !SMALL_REGISTER_CLASS_P (common_class)));
338 : }
339 : }
340 :
341 : /* Return true if REGNO satisfies a memory constraint. */
342 : static bool
343 63614301 : in_mem_p (int regno)
344 : {
345 0 : return get_reg_class (regno) == NO_REGS;
346 : }
347 :
348 : /* Return true if ADDR is a valid memory address for mode MODE in address
349 : space AS, and check that each pseudo has the proper kind of hard
350 : reg. */
351 : static bool
352 35645179 : valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
353 : rtx addr, addr_space_t as)
354 : {
355 : #ifdef GO_IF_LEGITIMATE_ADDRESS
356 : lra_assert (ADDR_SPACE_GENERIC_P (as));
357 : GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
358 : return false;
359 :
360 : win:
361 : return true;
362 : #else
363 0 : return targetm.addr_space.legitimate_address_p (mode, addr, 0, as,
364 35645179 : ERROR_MARK);
365 : #endif
366 : }
367 :
368 : namespace {
369 : /* Temporarily eliminates registers in an address (for the lifetime of
370 : the object). */
371 : class address_eliminator {
372 : public:
373 : address_eliminator (struct address_info *ad);
374 : ~address_eliminator ();
375 :
376 : private:
377 : struct address_info *m_ad;
378 : rtx *m_base_loc;
379 : rtx m_base_reg;
380 : rtx *m_index_loc;
381 : rtx m_index_reg;
382 : };
383 : }
384 :
385 74947461 : address_eliminator::address_eliminator (struct address_info *ad)
386 74947461 : : m_ad (ad),
387 74947461 : m_base_loc (strip_subreg (ad->base_term)),
388 74947461 : m_base_reg (NULL_RTX),
389 74947461 : m_index_loc (strip_subreg (ad->index_term)),
390 74947461 : m_index_reg (NULL_RTX)
391 : {
392 74947461 : if (m_base_loc != NULL)
393 : {
394 62427181 : m_base_reg = *m_base_loc;
395 : /* If we have non-legitimate address which is decomposed not in
396 : the way we expected, don't do elimination here. In such case
397 : the address will be reloaded and elimination will be done in
398 : reload insn finally. */
399 62427181 : if (REG_P (m_base_reg))
400 62427181 : lra_eliminate_reg_if_possible (m_base_loc);
401 62427181 : if (m_ad->base_term2 != NULL)
402 0 : *m_ad->base_term2 = *m_ad->base_term;
403 : }
404 74947461 : if (m_index_loc != NULL)
405 : {
406 3730491 : m_index_reg = *m_index_loc;
407 3730491 : if (REG_P (m_index_reg))
408 3730491 : lra_eliminate_reg_if_possible (m_index_loc);
409 : }
410 74947461 : }
411 :
412 74947461 : address_eliminator::~address_eliminator ()
413 : {
414 74947461 : if (m_base_loc && *m_base_loc != m_base_reg)
415 : {
416 43951841 : *m_base_loc = m_base_reg;
417 43951841 : if (m_ad->base_term2 != NULL)
418 0 : *m_ad->base_term2 = *m_ad->base_term;
419 : }
420 74947461 : if (m_index_loc && *m_index_loc != m_index_reg)
421 0 : *m_index_loc = m_index_reg;
422 74947461 : }
423 :
424 : /* Return true if the eliminated form of AD is a legitimate target address.
425 : If OP is a MEM, AD is the address within OP, otherwise OP should be
426 : ignored. CONSTRAINT is one constraint that the operand may need
427 : to meet. */
428 : static bool
429 35622679 : valid_address_p (rtx op, struct address_info *ad,
430 : enum constraint_num constraint)
431 : {
432 35622679 : address_eliminator eliminator (ad);
433 :
434 : /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
435 : forgiving than "m".
436 : Need to extract memory from op for special memory constraint,
437 : i.e. bcst_mem_operand in i386 backend. */
438 35622679 : if (MEM_P (extract_mem_from_operand (op))
439 : && insn_extra_relaxed_memory_constraint (constraint)
440 : && constraint_satisfied_p (op, constraint))
441 : return true;
442 :
443 35622679 : return valid_address_p (ad->mode, *ad->outer, ad->as);
444 35622679 : }
445 :
446 : /* For special_memory_operand, it could be false for MEM_P (op),
447 : i.e. bcst_mem_operand in i386 backend.
448 : Extract and return real memory operand or op. */
449 : rtx
450 623070287 : extract_mem_from_operand (rtx op)
451 : {
452 624741007 : for (rtx x = op;; x = XEXP (x, 0))
453 : {
454 624741007 : if (MEM_P (x))
455 : return x;
456 443494158 : if (GET_RTX_LENGTH (GET_CODE (x)) != 1
457 363460475 : || GET_RTX_FORMAT (GET_CODE (x))[0] != 'e')
458 : break;
459 : }
460 : return op;
461 : }
462 :
463 : /* Return true if the eliminated form of memory reference OP satisfies
464 : extra (special) memory constraint CONSTRAINT. */
465 : static bool
466 36916372 : satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
467 : {
468 36916372 : struct address_info ad;
469 36916372 : rtx mem = extract_mem_from_operand (op);
470 36916372 : if (!MEM_P (mem))
471 : return false;
472 :
473 35912163 : decompose_mem_address (&ad, mem);
474 35912163 : address_eliminator eliminator (&ad);
475 35912163 : return constraint_satisfied_p (op, constraint);
476 35912163 : }
477 :
478 : /* Return true if the eliminated form of address AD satisfies extra
479 : address constraint CONSTRAINT. */
480 : static bool
481 3412619 : satisfies_address_constraint_p (struct address_info *ad,
482 : enum constraint_num constraint)
483 : {
484 3412619 : address_eliminator eliminator (ad);
485 3412619 : return constraint_satisfied_p (*ad->outer, constraint);
486 3412619 : }
487 :
488 : /* Return true if the eliminated form of address OP satisfies extra
489 : address constraint CONSTRAINT. */
490 : static bool
491 1673965 : satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
492 : {
493 1673965 : struct address_info ad;
494 :
495 1673965 : decompose_lea_address (&ad, &op);
496 1673965 : return satisfies_address_constraint_p (&ad, constraint);
497 : }
498 :
499 : /* Set of equivalences whose original targets have set up pointer flag. */
500 : static hash_set <rtx> *pointer_equiv_set;
501 :
502 : /* Add x to pointer_equiv_set. */
503 : void
504 1917546 : lra_pointer_equiv_set_add (rtx x)
505 : {
506 1917546 : pointer_equiv_set->add (x);
507 1917546 : }
508 :
509 : /* Return true if x is in pointer_equiv_set. */
510 : bool
511 9724651 : lra_pointer_equiv_set_in (rtx x)
512 : {
513 9724651 : return pointer_equiv_set->contains (x);
514 : }
515 :
516 : /* Initiate equivalences for LRA. As we keep original equivalences
517 : before any elimination, we need to make copies otherwise any change
518 : in insns might change the equivalences. */
519 : void
520 1480117 : lra_init_equiv (void)
521 : {
522 1480117 : ira_expand_reg_equiv ();
523 69653357 : for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
524 : {
525 68173240 : rtx res;
526 :
527 68173240 : if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
528 3055329 : ira_reg_equiv[i].memory = copy_rtx (res);
529 68173240 : if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
530 874370 : ira_reg_equiv[i].invariant = copy_rtx (res);
531 : }
532 1480117 : pointer_equiv_set = new hash_set <rtx>;
533 1480117 : }
534 :
535 : /* Finish equivalence data for LRA. */
536 : void
537 1480117 : lra_finish_equiv (void)
538 : {
539 2960234 : delete pointer_equiv_set;
540 1480117 : }
541 :
542 : static rtx loc_equivalence_callback (rtx, const_rtx, void *);
543 :
544 : /* Update equivalence for REGNO. We need to this as the equivalence
545 : might contain other pseudos which are changed by their
546 : equivalences. */
547 : static void
548 202492617 : update_equiv (int regno)
549 : {
550 202492617 : rtx x;
551 :
552 202492617 : if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
553 9325741 : ira_reg_equiv[regno].memory
554 9325741 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
555 : NULL_RTX);
556 202492617 : if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
557 2714713 : ira_reg_equiv[regno].invariant
558 2714713 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
559 : NULL_RTX);
560 202492617 : }
561 :
562 : /* If we have decided to substitute X with another value, return that
563 : value, otherwise return X. */
564 : static rtx
565 436180914 : get_equiv (rtx x)
566 : {
567 436180914 : int regno;
568 436180914 : rtx res;
569 :
570 295231124 : if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
571 196019406 : || regno >= ira_reg_equiv_len
572 196019406 : || ! ira_reg_equiv[regno].defined_p
573 25652999 : || ! ira_reg_equiv[regno].profitable_p
574 461790096 : || lra_get_regno_hard_regno (regno) >= 0)
575 431244282 : return x;
576 4936632 : if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
577 : {
578 2214742 : if (targetm.cannot_substitute_mem_equiv_p (res))
579 : return x;
580 : return res;
581 : }
582 2721890 : if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
583 : return res;
584 1874911 : if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
585 : return res;
586 0 : gcc_unreachable ();
587 : }
588 :
589 : /* If we have decided to substitute X with the equivalent value, return that
590 : value after elimination for INSN, otherwise return X. Add the result to
591 : pointer_equiv_set if X has set up pointer flag. */
592 : static rtx
593 245422332 : get_equiv_with_elimination (rtx x, rtx_insn *insn)
594 : {
595 245422332 : rtx res = get_equiv (x);
596 :
597 245422332 : if (x == res || CONSTANT_P (res))
598 : return res;
599 1507285 : res = lra_eliminate_regs_1 (insn, res, GET_MODE (res),
600 : false, false, 0, true);
601 1507285 : if (REG_POINTER (x))
602 1036400 : lra_pointer_equiv_set_add (res);
603 : return res;
604 : }
605 :
606 : /* Set up curr_operand_mode. */
607 : static void
608 105848858 : init_curr_operand_mode (void)
609 : {
610 105848858 : int nop = curr_static_id->n_operands;
611 329877523 : for (int i = 0; i < nop; i++)
612 : {
613 224028665 : machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
614 224028665 : if (mode == VOIDmode)
615 : {
616 : /* The .md mode for address operands is the mode of the
617 : addressed value rather than the mode of the address itself. */
618 43345572 : if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
619 95 : mode = Pmode;
620 : else
621 43345477 : mode = curr_static_id->operand[i].mode;
622 : }
623 224028665 : curr_operand_mode[i] = mode;
624 : }
625 105848858 : }
626 :
627 :
628 :
629 : /* The page contains code to reuse input reloads. */
630 :
631 : /* Structure describes input reload of the current insns. */
632 : struct input_reload
633 : {
634 : /* True for input reload of matched operands. */
635 : bool match_p;
636 : /* True for input reload of inout earlyclobber operand. */
637 : bool early_clobber_p;
638 : /* Reloaded value. */
639 : rtx input;
640 : /* Reload pseudo used. */
641 : rtx reg;
642 : };
643 :
644 : /* The number of elements in the following array. */
645 : static int curr_insn_input_reloads_num;
646 : /* Array containing info about input reloads. It is used to find the
647 : same input reload and reuse the reload pseudo in this case. */
648 : static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
649 :
650 : /* Initiate data concerning reuse of input reloads for the current
651 : insn. */
652 : static void
653 105848858 : init_curr_insn_input_reloads (void)
654 : {
655 105848858 : curr_insn_input_reloads_num = 0;
656 0 : }
657 :
658 : /* The canonical form of an rtx inside a MEM is not necessarily the same as the
659 : canonical form of the rtx outside the MEM. Fix this up in the case that
660 : we're reloading an address (and therefore pulling it outside a MEM). */
661 : static rtx
662 72 : canonicalize_reload_addr (rtx addr)
663 : {
664 72 : subrtx_var_iterator::array_type array;
665 246 : FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
666 : {
667 174 : rtx x = *iter;
668 174 : if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
669 : {
670 14 : const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
671 188 : const int pwr2 = exact_log2 (ci);
672 14 : if (pwr2 > 0)
673 : {
674 : /* Rewrite this to use a shift instead, which is canonical when
675 : outside of a MEM. */
676 14 : PUT_CODE (x, ASHIFT);
677 14 : XEXP (x, 1) = GEN_INT (pwr2);
678 : }
679 : }
680 : }
681 :
682 72 : return addr;
683 72 : }
684 :
685 : /* Return rtx accessing reload REG of RCLASS matching another reload reg in
686 : MODE. */
687 : static rtx
688 122778 : get_matching_reload_reg_subreg (machine_mode mode, rtx reg,
689 : enum reg_class rclass)
690 : {
691 122778 : int hard_regno = ira_class_hard_regs[rclass][0];
692 122778 : if (subreg_regno_offset (hard_regno,
693 122778 : GET_MODE (reg),
694 122778 : subreg_lowpart_offset (mode, GET_MODE (reg)),
695 : mode) == 0)
696 : /* For matching scalar int modes generate the right subreg byte offset for
697 : BE targets -- see call of reload.cc:operands_match_p in
698 : recog.cc:constrain_operands. */
699 122778 : return lowpart_subreg (mode, reg, GET_MODE (reg));
700 0 : int offset = (lra_constraint_offset (hard_regno, GET_MODE (reg))
701 0 : - lra_constraint_offset (hard_regno, mode)) * UNITS_PER_WORD;
702 0 : lra_assert (offset >= 0);
703 0 : return gen_rtx_SUBREG (mode, reg, offset);
704 : }
705 :
706 : /* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
707 : reuse an existing reload pseudo. Don't reuse an existing reload pseudo if
708 : IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
709 : EARLY_CLOBBER_P is true for input reload of inout early clobber operand.
710 : The result pseudo is returned through RESULT_REG. Return TRUE if we created
711 : a new pseudo, FALSE if we reused an existing reload pseudo. Use TITLE to
712 : describe new registers for debug purposes. */
713 : static bool
714 3808439 : get_reload_reg (enum op_type type, machine_mode mode, rtx original,
715 : enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
716 : bool in_subreg_p, bool early_clobber_p,
717 : const char *title, rtx *result_reg)
718 : {
719 3808439 : int i, regno;
720 3808439 : enum reg_class new_class;
721 :
722 3808439 : if (type == OP_OUT)
723 : {
724 : /* Output reload registers tend to start out with a conservative
725 : choice of register class. Usually this is ALL_REGS, although
726 : a target might narrow it (for performance reasons) through
727 : targetm.preferred_reload_class. It's therefore quite common
728 : for a reload instruction to require a more restrictive class
729 : than the class that was originally assigned to the reload register.
730 :
731 : In these situations, it's more efficient to refine the choice
732 : of register class rather than create a second reload register.
733 : This also helps to avoid cycling for registers that are only
734 : used by reload instructions. */
735 968251 : if (REG_P (original)
736 709379 : && (int) REGNO (original) >= new_regno_start
737 7147 : && (INSN_UID (curr_insn) >= new_insn_uid_start
738 250 : || ira_former_scratch_p (REGNO (original)))
739 7147 : && in_class_p (original, rclass, &new_class, true)
740 968501 : && (exclude_start_hard_regs == nullptr
741 250 : || hard_reg_set_intersect_p (
742 968501 : ~lra_reg_info[REGNO (original)].exclude_start_hard_regs,
743 250 : ~*exclude_start_hard_regs)))
744 : {
745 250 : unsigned int regno = REGNO (original);
746 250 : if (lra_dump_file != NULL)
747 : {
748 0 : fprintf (lra_dump_file, " Reuse r%d for output ", regno);
749 0 : dump_value_slim (lra_dump_file, original, 1);
750 : }
751 500 : if (new_class != lra_get_allocno_class (regno))
752 250 : lra_change_class (regno, new_class, ", change to", false);
753 250 : if (lra_dump_file != NULL)
754 0 : fprintf (lra_dump_file, "\n");
755 250 : if (exclude_start_hard_regs)
756 250 : lra_reg_info[regno].exclude_start_hard_regs
757 250 : |= *exclude_start_hard_regs;
758 250 : *result_reg = original;
759 250 : return false;
760 : }
761 968001 : *result_reg
762 968001 : = lra_create_new_reg_with_unique_value (mode, original, rclass,
763 : exclude_start_hard_regs, title);
764 968001 : return true;
765 : }
766 :
767 2840188 : bool unique_p = early_clobber_p;
768 : /* Prevent reuse value of expression with side effects,
769 : e.g. volatile memory. */
770 2840188 : if (! side_effects_p (original))
771 3060671 : for (i = 0; i < curr_insn_input_reloads_num; i++)
772 : {
773 239103 : if (! curr_insn_input_reloads[i].match_p
774 100009 : && ! curr_insn_input_reloads[i].early_clobber_p
775 100008 : && rtx_equal_p (curr_insn_input_reloads[i].input, original)
776 247695 : && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
777 : {
778 8579 : rtx reg = curr_insn_input_reloads[i].reg;
779 8579 : regno = REGNO (reg);
780 : /* If input is equal to original and both are VOIDmode,
781 : GET_MODE (reg) might be still different from mode.
782 : Ensure we don't return *result_reg with wrong mode. */
783 8579 : if (GET_MODE (reg) != mode)
784 : {
785 0 : if (in_subreg_p)
786 0 : continue;
787 0 : if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
788 0 : GET_MODE_SIZE (mode)))
789 0 : continue;
790 0 : reg = get_matching_reload_reg_subreg (mode, reg, new_class);
791 0 : if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
792 0 : continue;
793 : }
794 : /* If the existing reload and this have no start hard register in
795 : common, then skip. Otherwise update exclude_start_hard_regs. */
796 8579 : if (exclude_start_hard_regs
797 9840 : && ! hard_reg_set_empty_p (*exclude_start_hard_regs))
798 : {
799 1 : HARD_REG_SET r = lra_reg_info[regno].exclude_start_hard_regs
800 1 : | *exclude_start_hard_regs;
801 2 : if (hard_reg_set_empty_p (~r))
802 0 : continue;
803 : else
804 1 : lra_reg_info[regno].exclude_start_hard_regs = r;
805 : }
806 8579 : *result_reg = reg;
807 8579 : if (lra_dump_file != NULL)
808 : {
809 0 : fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
810 0 : dump_value_slim (lra_dump_file, original, 1);
811 : }
812 17158 : if (new_class != lra_get_allocno_class (regno))
813 4428 : lra_change_class (regno, new_class, ", change to", false);
814 8579 : if (lra_dump_file != NULL)
815 0 : fprintf (lra_dump_file, "\n");
816 8579 : return false;
817 : }
818 : /* If we have an input reload with a different mode, make sure it
819 : will get a different hard reg. */
820 230524 : else if (REG_P (original)
821 181546 : && REG_P (curr_insn_input_reloads[i].input)
822 148629 : && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
823 230524 : && (GET_MODE (original)
824 2161 : != GET_MODE (curr_insn_input_reloads[i].input)))
825 : unique_p = true;
826 : }
827 5663218 : *result_reg = (unique_p
828 2831609 : ? lra_create_new_reg_with_unique_value
829 2831609 : : lra_create_new_reg) (mode, original, rclass,
830 : exclude_start_hard_regs, title);
831 2831609 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
832 2831609 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
833 2831609 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
834 2831609 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p
835 2831609 : = early_clobber_p;
836 2831609 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
837 2831609 : return true;
838 : }
839 :
840 :
841 : /* The page contains major code to choose the current insn alternative
842 : and generate reloads for it. */
843 :
844 : /* Return the offset from REGNO of the least significant register
845 : in (reg:MODE REGNO).
846 :
847 : This function is used to tell whether two registers satisfy
848 : a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
849 :
850 : REGNO1 + lra_constraint_offset (REGNO1, MODE1)
851 : == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
852 : int
853 43223708 : lra_constraint_offset (int regno, machine_mode mode)
854 : {
855 43223708 : lra_assert (regno < FIRST_PSEUDO_REGISTER);
856 :
857 43223708 : scalar_int_mode int_mode;
858 43223708 : if (WORDS_BIG_ENDIAN
859 : && is_a <scalar_int_mode> (mode, &int_mode)
860 : && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
861 : return hard_regno_nregs (regno, mode) - 1;
862 43223708 : return 0;
863 : }
864 :
865 : /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
866 : if they are the same hard reg, and has special hacks for
867 : auto-increment and auto-decrement. This is specifically intended for
868 : process_alt_operands to use in determining whether two operands
869 : match. X is the operand whose number is the lower of the two.
870 :
871 : It is supposed that X is the output operand and Y is the input
872 : operand. Y_HARD_REGNO is the final hard regno of register Y or
873 : register in subreg Y as we know it now. Otherwise, it is a
874 : negative value. */
875 : static bool
876 57566044 : operands_match_p (rtx x, rtx y, int y_hard_regno)
877 : {
878 57566044 : int i;
879 57566044 : RTX_CODE code = GET_CODE (x);
880 57566044 : const char *fmt;
881 :
882 57566044 : if (x == y)
883 : return true;
884 49946071 : if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
885 23584780 : && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
886 : {
887 23491402 : int j;
888 :
889 23491402 : i = get_hard_regno (x);
890 23491402 : if (i < 0)
891 1203708 : goto slow;
892 :
893 22287694 : if ((j = y_hard_regno) < 0)
894 675840 : goto slow;
895 :
896 21611854 : i += lra_constraint_offset (i, GET_MODE (x));
897 21611854 : j += lra_constraint_offset (j, GET_MODE (y));
898 :
899 21611854 : return i == j;
900 : }
901 :
902 : /* If two operands must match, because they are really a single
903 : operand of an assembler insn, then two post-increments are invalid
904 : because the assembler insn would increment only once. On the
905 : other hand, a post-increment matches ordinary indexing if the
906 : post-increment is the output operand. */
907 26454669 : if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
908 0 : return operands_match_p (XEXP (x, 0), y, y_hard_regno);
909 :
910 : /* Two pre-increments are invalid because the assembler insn would
911 : increment only once. On the other hand, a pre-increment matches
912 : ordinary indexing if the pre-increment is the input operand. */
913 26454669 : if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
914 26454669 : || GET_CODE (y) == PRE_MODIFY)
915 0 : return operands_match_p (x, XEXP (y, 0), -1);
916 :
917 26454669 : slow:
918 :
919 28334217 : if (code == REG && REG_P (y))
920 1782770 : return REGNO (x) == REGNO (y);
921 :
922 93412 : if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
923 7591 : && x == SUBREG_REG (y))
924 : return true;
925 26551447 : if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
926 64266 : && SUBREG_REG (x) == y)
927 : return true;
928 :
929 : /* Now we have disposed of all the cases in which different rtx
930 : codes can match. */
931 26551277 : if (code != GET_CODE (y))
932 : return false;
933 :
934 : /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
935 1041855 : if (GET_MODE (x) != GET_MODE (y))
936 : return false;
937 :
938 1041146 : switch (code)
939 : {
940 : CASE_CONST_UNIQUE:
941 : return false;
942 :
943 : case CONST_VECTOR:
944 : if (!same_vector_encodings_p (x, y))
945 : return false;
946 : break;
947 :
948 0 : case LABEL_REF:
949 0 : return label_ref_label (x) == label_ref_label (y);
950 25 : case SYMBOL_REF:
951 25 : return XSTR (x, 0) == XSTR (y, 0);
952 :
953 : default:
954 : break;
955 : }
956 :
957 : /* Compare the elements. If any pair of corresponding elements fail
958 : to match, return false for the whole things. */
959 :
960 1020374 : fmt = GET_RTX_FORMAT (code);
961 2955540 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962 : {
963 2008913 : int val, j;
964 2008913 : switch (fmt[i])
965 : {
966 0 : case 'w':
967 0 : if (XWINT (x, i) != XWINT (y, i))
968 : return false;
969 : break;
970 :
971 488 : case 'i':
972 488 : if (XINT (x, i) != XINT (y, i))
973 : return false;
974 : break;
975 :
976 0 : case 'L':
977 0 : if (XLOC (x, i) != XLOC (y, i))
978 : return false;
979 : break;
980 :
981 24833 : case 'p':
982 24833 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
983 : return false;
984 : break;
985 :
986 1461598 : case 'e':
987 1461598 : val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
988 1461598 : if (val == 0)
989 : return false;
990 : break;
991 :
992 : case '0':
993 : break;
994 :
995 488 : case 'E':
996 488 : if (XVECLEN (x, i) != XVECLEN (y, i))
997 : return false;
998 976 : for (j = XVECLEN (x, i) - 1; j >= 0; --j)
999 : {
1000 488 : val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
1001 488 : if (val == 0)
1002 : return false;
1003 : }
1004 : break;
1005 :
1006 : /* It is believed that rtx's at this level will never
1007 : contain anything but integers and other rtx's, except for
1008 : within LABEL_REFs and SYMBOL_REFs. */
1009 0 : default:
1010 0 : gcc_unreachable ();
1011 : }
1012 : }
1013 : return true;
1014 : }
1015 :
1016 : /* True if X is a constant that can be forced into the constant pool.
1017 : MODE is the mode of the operand, or VOIDmode if not known. */
1018 : #define CONST_POOL_OK_P(MODE, X) \
1019 : ((MODE) != VOIDmode \
1020 : && CONSTANT_P (X) \
1021 : && GET_CODE (X) != HIGH \
1022 : && GET_MODE_SIZE (MODE).is_constant () \
1023 : && !targetm.cannot_force_const_mem (MODE, X))
1024 :
1025 : /* If REG is a reload pseudo, try to make its class satisfying CL. */
1026 : static void
1027 3364270 : narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
1028 : {
1029 3364270 : enum reg_class rclass;
1030 :
1031 : /* Do not make more accurate class from reloads generated. They are
1032 : mostly moves with a lot of constraints. Making more accurate
1033 : class may results in very narrow class and impossibility of find
1034 : registers for several reloads of one insn. */
1035 3364270 : if (INSN_UID (curr_insn) >= new_insn_uid_start)
1036 3364238 : return;
1037 3364170 : if (GET_CODE (reg) == SUBREG)
1038 164704 : reg = SUBREG_REG (reg);
1039 3364170 : if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
1040 : return;
1041 32 : if (in_class_p (reg, cl, &rclass) && rclass != cl)
1042 13 : lra_change_class (REGNO (reg), rclass, " Change to", true);
1043 : }
1044 :
1045 : /* Searches X for any reference to a reg with the same value as REGNO,
1046 : returning the rtx of the reference found if any. Otherwise,
1047 : returns NULL_RTX. */
1048 : static rtx
1049 525283 : regno_val_use_in (unsigned int regno, rtx x)
1050 : {
1051 525283 : const char *fmt;
1052 525283 : int i, j;
1053 525283 : rtx tem;
1054 :
1055 525283 : if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
1056 : return x;
1057 :
1058 524957 : fmt = GET_RTX_FORMAT (GET_CODE (x));
1059 1056297 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1060 : {
1061 531340 : if (fmt[i] == 'e')
1062 : {
1063 7664 : if ((tem = regno_val_use_in (regno, XEXP (x, i))))
1064 : return tem;
1065 : }
1066 523676 : else if (fmt[i] == 'E')
1067 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1068 0 : if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
1069 : return tem;
1070 : }
1071 :
1072 : return NULL_RTX;
1073 : }
1074 :
1075 : /* Return true if all current insn non-output operands except INS (it
1076 : has a negaitve end marker) do not use pseudos with the same value
1077 : as REGNO. */
1078 : static bool
1079 2 : check_conflict_input_operands (int regno, signed char *ins)
1080 : {
1081 2 : int in;
1082 2 : int n_operands = curr_static_id->n_operands;
1083 :
1084 8 : for (int nop = 0; nop < n_operands; nop++)
1085 7 : if (! curr_static_id->operand[nop].is_operator
1086 7 : && curr_static_id->operand[nop].type != OP_OUT)
1087 : {
1088 5 : for (int i = 0; (in = ins[i]) >= 0; i++)
1089 4 : if (in == nop)
1090 : break;
1091 3 : if (in < 0
1092 3 : && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
1093 : return false;
1094 : }
1095 : return true;
1096 : }
1097 :
1098 : /* Generate reloads for matching OUT and INS (array of input operand numbers
1099 : with end marker -1) with reg class GOAL_CLASS and EXCLUDE_START_HARD_REGS,
1100 : considering output operands OUTS (similar array to INS) needing to be in
1101 : different registers. Add input and output reloads correspondingly to the
1102 : lists *BEFORE and *AFTER. OUT might be negative. In this case we generate
1103 : input reloads for matched input operands INS. EARLY_CLOBBER_P is a flag
1104 : that the output operand is early clobbered for chosen alternative. */
1105 : static void
1106 1682135 : match_reload (signed char out, signed char *ins, signed char *outs,
1107 : enum reg_class goal_class, HARD_REG_SET *exclude_start_hard_regs,
1108 : rtx_insn **before, rtx_insn **after, bool early_clobber_p)
1109 : {
1110 1682135 : bool out_conflict;
1111 1682135 : int i, in;
1112 1682135 : rtx new_in_reg, new_out_reg, reg;
1113 1682135 : machine_mode inmode, outmode;
1114 1682135 : rtx in_rtx = *curr_id->operand_loc[ins[0]];
1115 1682135 : rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
1116 :
1117 1682135 : inmode = curr_operand_mode[ins[0]];
1118 1682135 : outmode = out < 0 ? inmode : curr_operand_mode[out];
1119 1682135 : push_to_sequence (*before);
1120 1682135 : if (inmode != outmode)
1121 : {
1122 : /* process_alt_operands has already checked that the mode sizes
1123 : are ordered. */
1124 122778 : if (partial_subreg_p (outmode, inmode))
1125 : {
1126 1712 : bool asm_p = asm_noperands (PATTERN (curr_insn)) >= 0;
1127 1712 : int hr;
1128 1712 : HARD_REG_SET temp_hard_reg_set;
1129 :
1130 19 : if (asm_p && (hr = get_hard_regno (out_rtx)) >= 0
1131 1715 : && hard_regno_nregs (hr, inmode) > 1)
1132 : {
1133 : /* See gcc.c-torture/execute/20030222-1.c.
1134 : Consider the code for 32-bit (e.g. BE) target:
1135 : int i, v; long x; x = v; asm ("" : "=r" (i) : "0" (x));
1136 : We generate the following RTL with reload insns:
1137 : 1. subreg:si(x:di, 0) = 0;
1138 : 2. subreg:si(x:di, 4) = v:si;
1139 : 3. t:di = x:di, dead x;
1140 : 4. asm ("" : "=r" (subreg:si(t:di,4)) : "0" (t:di))
1141 : 5. i:si = subreg:si(t:di,4);
1142 : If we assign hard reg of x to t, dead code elimination
1143 : will remove insn #2 and we will use unitialized hard reg.
1144 : So exclude the hard reg of x for t. We could ignore this
1145 : problem for non-empty asm using all x value but it is hard to
1146 : check that the asm are expanded into insn realy using x
1147 : and setting r. */
1148 0 : CLEAR_HARD_REG_SET (temp_hard_reg_set);
1149 0 : if (exclude_start_hard_regs != NULL)
1150 0 : temp_hard_reg_set = *exclude_start_hard_regs;
1151 0 : SET_HARD_REG_BIT (temp_hard_reg_set, hr);
1152 0 : exclude_start_hard_regs = &temp_hard_reg_set;
1153 : }
1154 3424 : reg = new_in_reg
1155 1712 : = lra_create_new_reg_with_unique_value (inmode, in_rtx, goal_class,
1156 : exclude_start_hard_regs,
1157 : "");
1158 1712 : new_out_reg = get_matching_reload_reg_subreg (outmode, reg, goal_class);
1159 1712 : LRA_SUBREG_P (new_out_reg) = 1;
1160 : /* If the input reg is dying here, we can use the same hard
1161 : register for REG and IN_RTX. We do it only for original
1162 : pseudos as reload pseudos can die although original
1163 : pseudos still live where reload pseudos dies. */
1164 1497 : if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
1165 1455 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1166 2738 : && (!early_clobber_p
1167 2 : || check_conflict_input_operands(REGNO (in_rtx), ins)))
1168 1025 : lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
1169 : }
1170 : else
1171 : {
1172 242132 : reg = new_out_reg
1173 121066 : = lra_create_new_reg_with_unique_value (outmode, out_rtx,
1174 : goal_class,
1175 : exclude_start_hard_regs,
1176 : "");
1177 121066 : new_in_reg = get_matching_reload_reg_subreg (inmode, reg, goal_class);
1178 : /* NEW_IN_REG is non-paradoxical subreg. We don't want
1179 : NEW_OUT_REG living above. We add clobber clause for
1180 : this. This is just a temporary clobber. We can remove
1181 : it at the end of LRA work. */
1182 121066 : rtx_insn *clobber = emit_clobber (new_out_reg);
1183 121066 : LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
1184 121066 : LRA_SUBREG_P (new_in_reg) = 1;
1185 121066 : if (GET_CODE (in_rtx) == SUBREG)
1186 : {
1187 1728 : rtx subreg_reg = SUBREG_REG (in_rtx);
1188 :
1189 : /* If SUBREG_REG is dying here and sub-registers IN_RTX
1190 : and NEW_IN_REG are similar, we can use the same hard
1191 : register for REG and SUBREG_REG. */
1192 1728 : if (REG_P (subreg_reg)
1193 1728 : && (int) REGNO (subreg_reg) < lra_new_regno_start
1194 1728 : && GET_MODE (subreg_reg) == outmode
1195 1075 : && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
1196 1075 : && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
1197 1831 : && (! early_clobber_p
1198 0 : || check_conflict_input_operands (REGNO (subreg_reg),
1199 : ins)))
1200 103 : lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
1201 : }
1202 : }
1203 : }
1204 : else
1205 : {
1206 : /* Pseudos have values -- see comments for lra_reg_info.
1207 : Different pseudos with the same value do not conflict even if
1208 : they live in the same place. When we create a pseudo we
1209 : assign value of original pseudo (if any) from which we
1210 : created the new pseudo. If we create the pseudo from the
1211 : input pseudo, the new pseudo will have no conflict with the
1212 : input pseudo which is wrong when the input pseudo lives after
1213 : the insn and as the new pseudo value is changed by the insn
1214 : output. Therefore we create the new pseudo from the output
1215 : except the case when we have single matched dying input
1216 : pseudo.
1217 :
1218 : We cannot reuse the current output register because we might
1219 : have a situation like "a <- a op b", where the constraints
1220 : force the second input operand ("b") to match the output
1221 : operand ("a"). "b" must then be copied into a new register
1222 : so that it doesn't clobber the current value of "a".
1223 :
1224 : We cannot use the same value if the output pseudo is
1225 : early clobbered or the input pseudo is mentioned in the
1226 : output, e.g. as an address part in memory, because
1227 : output reload will actually extend the pseudo liveness.
1228 : We don't care about eliminable hard regs here as we are
1229 : interesting only in pseudos. */
1230 :
1231 : /* Matching input's register value is the same as one of the other
1232 : output operand. Output operands in a parallel insn must be in
1233 : different registers. */
1234 1559357 : out_conflict = false;
1235 1559357 : if (REG_P (in_rtx))
1236 : {
1237 2683761 : for (i = 0; outs[i] >= 0; i++)
1238 : {
1239 1392311 : rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1240 100631 : if (outs[i] != out && REG_P (other_out_rtx)
1241 1492748 : && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1242 : != NULL_RTX))
1243 : {
1244 : out_conflict = true;
1245 : break;
1246 : }
1247 : }
1248 : }
1249 :
1250 1559357 : new_in_reg = new_out_reg
1251 1528554 : = (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
1252 1261702 : && (int) REGNO (in_rtx) < lra_new_regno_start
1253 1261413 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1254 : && (! early_clobber_p
1255 : || check_conflict_input_operands (REGNO (in_rtx), ins))
1256 417181 : && (out < 0
1257 417181 : || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
1258 417149 : && !out_conflict
1259 1976504 : ? lra_create_new_reg (inmode, in_rtx, goal_class,
1260 : exclude_start_hard_regs, "")
1261 1142210 : : lra_create_new_reg_with_unique_value (outmode, out_rtx, goal_class,
1262 : exclude_start_hard_regs,
1263 : ""));
1264 : }
1265 : /* In operand can be got from transformations before processing insn
1266 : constraints. One example of such transformations is subreg
1267 : reloading (see function simplify_operand_subreg). The new
1268 : pseudos created by the transformations might have inaccurate
1269 : class (ALL_REGS) and we should make their classes more
1270 : accurate. */
1271 1682135 : narrow_reload_pseudo_class (in_rtx, goal_class);
1272 1682135 : lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1273 1682135 : *before = end_sequence ();
1274 : /* Add the new pseudo to consider values of subsequent input reload
1275 : pseudos. */
1276 1682135 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1277 1682135 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1278 1682135 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1279 1682135 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p = false;
1280 1682135 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
1281 3364271 : for (i = 0; (in = ins[i]) >= 0; i++)
1282 1682136 : if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1283 1654178 : || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
1284 1682135 : *curr_id->operand_loc[in] = new_in_reg;
1285 : else
1286 : {
1287 1 : lra_assert
1288 : (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
1289 1 : *curr_id->operand_loc[in] = new_out_reg;
1290 : }
1291 1682135 : lra_update_dups (curr_id, ins);
1292 1682135 : if (out < 0)
1293 : return;
1294 : /* See a comment for the input operand above. */
1295 1682135 : narrow_reload_pseudo_class (out_rtx, goal_class);
1296 1682135 : reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
1297 1682135 : if (find_reg_note (curr_insn, REG_UNUSED, reg) == NULL_RTX
1298 1682135 : && (!REG_P (reg) || !ira_former_scratch_p (REGNO (reg))))
1299 : {
1300 1597976 : start_sequence ();
1301 : /* If we had strict_low_part, use it also in reload to keep other
1302 : parts unchanged but do it only for regs as strict_low_part
1303 : has no sense for memory and probably there is no insn pattern
1304 : to match the reload insn in memory case. */
1305 1597976 : if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
1306 0 : out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
1307 1597976 : lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1308 1597976 : emit_insn (*after);
1309 1597976 : *after = end_sequence ();
1310 : }
1311 1682135 : *curr_id->operand_loc[out] = new_out_reg;
1312 1682135 : lra_update_dup (curr_id, out);
1313 : }
1314 :
1315 : /* Return register class which is union of all reg classes in insn
1316 : constraint alternative string starting with P. */
1317 : static enum reg_class
1318 0 : reg_class_from_constraints (const char *p)
1319 : {
1320 0 : int c, len;
1321 0 : enum reg_class op_class = NO_REGS;
1322 :
1323 0 : do
1324 0 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1325 : {
1326 : case '#':
1327 : case ',':
1328 : return op_class;
1329 :
1330 0 : case 'g':
1331 0 : op_class = reg_class_subunion[op_class][GENERAL_REGS];
1332 0 : break;
1333 :
1334 0 : default:
1335 0 : enum constraint_num cn = lookup_constraint (p);
1336 0 : enum reg_class cl = reg_class_for_constraint (cn);
1337 0 : if (cl == NO_REGS)
1338 : {
1339 0 : if (insn_extra_address_constraint (cn))
1340 0 : op_class
1341 0 : = (reg_class_subunion
1342 0 : [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1343 0 : ADDRESS, SCRATCH)]);
1344 : break;
1345 : }
1346 :
1347 0 : op_class = reg_class_subunion[op_class][cl];
1348 0 : break;
1349 : }
1350 0 : while ((p += len), c);
1351 : return op_class;
1352 : }
1353 :
1354 : /* If OP is a register, return the class of the register as per
1355 : get_reg_class, otherwise return NO_REGS. */
1356 : static inline enum reg_class
1357 162555044 : get_op_class (rtx op)
1358 : {
1359 134915383 : return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1360 : }
1361 :
1362 : /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1363 : otherwise. If modes of MEM_PSEUDO and VAL are different, use
1364 : SUBREG for VAL to make them equal. */
1365 : static rtx_insn *
1366 1321809 : emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1367 : {
1368 1321809 : if (GET_MODE (mem_pseudo) != GET_MODE (val))
1369 : {
1370 : /* Usually size of mem_pseudo is greater than val size but in
1371 : rare cases it can be less as it can be defined by target
1372 : dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
1373 3010 : if (! MEM_P (val))
1374 : {
1375 3010 : val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1376 : GET_CODE (val) == SUBREG
1377 : ? SUBREG_REG (val) : val);
1378 3010 : LRA_SUBREG_P (val) = 1;
1379 : }
1380 : else
1381 : {
1382 0 : mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1383 0 : LRA_SUBREG_P (mem_pseudo) = 1;
1384 : }
1385 : }
1386 1321809 : return to_p ? gen_move_insn (mem_pseudo, val)
1387 667484 : : gen_move_insn (val, mem_pseudo);
1388 : }
1389 :
1390 : /* Process a special case insn (register move), return true if we
1391 : don't need to process it anymore. INSN should be a single set
1392 : insn. Set up that RTL was changed through CHANGE_P and that hook
1393 : TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
1394 : SEC_MEM_P. */
1395 : static bool
1396 75386169 : check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1397 : {
1398 75386169 : int sregno, dregno;
1399 75386169 : rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1400 75386169 : rtx_insn *before;
1401 75386169 : enum reg_class dclass, sclass, secondary_class;
1402 75386169 : secondary_reload_info sri;
1403 :
1404 75386169 : lra_assert (curr_insn_set != NULL_RTX);
1405 75386169 : dreg = dest = SET_DEST (curr_insn_set);
1406 75386169 : sreg = src = SET_SRC (curr_insn_set);
1407 75386169 : if (GET_CODE (dest) == SUBREG)
1408 1151637 : dreg = SUBREG_REG (dest);
1409 75386169 : if (GET_CODE (src) == SUBREG)
1410 1186615 : sreg = SUBREG_REG (src);
1411 75386169 : if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1412 : return false;
1413 35461810 : sclass = dclass = NO_REGS;
1414 35461810 : if (REG_P (dreg))
1415 22842822 : dclass = get_reg_class (REGNO (dreg));
1416 22842822 : gcc_assert (dclass < LIM_REG_CLASSES && dclass >= NO_REGS);
1417 35461810 : if (dclass == ALL_REGS)
1418 : /* ALL_REGS is used for new pseudos created by transformations
1419 : like reload of SUBREG_REG (see function
1420 : simplify_operand_subreg). We don't know their class yet. We
1421 : should figure out the class from processing the insn
1422 : constraints not in this fast path function. Even if ALL_REGS
1423 : were a right class for the pseudo, secondary_... hooks usually
1424 : are not define for ALL_REGS. */
1425 : return false;
1426 35459609 : if (REG_P (sreg))
1427 19745250 : sclass = get_reg_class (REGNO (sreg));
1428 19745250 : gcc_assert (sclass < LIM_REG_CLASSES && sclass >= NO_REGS);
1429 35459609 : if (sclass == ALL_REGS)
1430 : /* See comments above. */
1431 : return false;
1432 35459609 : if (sclass == NO_REGS && dclass == NO_REGS)
1433 : return false;
1434 33991126 : if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
1435 33991126 : && ((sclass != NO_REGS && dclass != NO_REGS)
1436 0 : || (GET_MODE (src)
1437 0 : != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
1438 : {
1439 13349 : *sec_mem_p = true;
1440 13349 : return false;
1441 : }
1442 33977777 : if (! REG_P (dreg) || ! REG_P (sreg))
1443 : return false;
1444 7640396 : sri.prev_sri = NULL;
1445 7640396 : sri.icode = CODE_FOR_nothing;
1446 7640396 : sri.extra_cost = 0;
1447 7640396 : secondary_class = NO_REGS;
1448 : /* Set up hard register for a reload pseudo for hook
1449 : secondary_reload because some targets just ignore unassigned
1450 : pseudos in the hook. */
1451 7640396 : if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1452 : {
1453 2864994 : dregno = REGNO (dreg);
1454 2864994 : reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1455 : }
1456 : else
1457 : dregno = -1;
1458 7640396 : if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1459 : {
1460 1271618 : sregno = REGNO (sreg);
1461 1271618 : reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1462 : }
1463 : else
1464 : sregno = -1;
1465 7640396 : if (sclass != NO_REGS)
1466 3842534 : secondary_class
1467 7685068 : = (enum reg_class) targetm.secondary_reload (false, dest,
1468 : (reg_class_t) sclass,
1469 3842534 : GET_MODE (src), &sri);
1470 3842534 : if (sclass == NO_REGS
1471 3842534 : || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1472 1408 : && dclass != NO_REGS))
1473 : {
1474 3797862 : enum reg_class old_sclass = secondary_class;
1475 3797862 : secondary_reload_info old_sri = sri;
1476 :
1477 3797862 : sri.prev_sri = NULL;
1478 3797862 : sri.icode = CODE_FOR_nothing;
1479 3797862 : sri.extra_cost = 0;
1480 3797862 : secondary_class
1481 7595724 : = (enum reg_class) targetm.secondary_reload (true, src,
1482 : (reg_class_t) dclass,
1483 3797862 : GET_MODE (src), &sri);
1484 : /* Check the target hook consistency. */
1485 3797862 : lra_assert
1486 : ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1487 : || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1488 : || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1489 : }
1490 7640396 : if (sregno >= 0)
1491 1271618 : reg_renumber [sregno] = -1;
1492 7640396 : if (dregno >= 0)
1493 2864994 : reg_renumber [dregno] = -1;
1494 7640396 : if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1495 : return false;
1496 1409 : *change_p = true;
1497 1409 : new_reg = NULL_RTX;
1498 0 : if (secondary_class != NO_REGS)
1499 1409 : new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1500 : secondary_class, NULL,
1501 : "secondary");
1502 1409 : start_sequence ();
1503 1409 : if (sri.icode == CODE_FOR_nothing)
1504 1409 : lra_emit_move (new_reg, src);
1505 : else
1506 : {
1507 0 : enum reg_class scratch_class;
1508 :
1509 0 : scratch_class = (reg_class_from_constraints
1510 0 : (insn_data[sri.icode].operand[2].constraint));
1511 0 : scratch_reg = (lra_create_new_reg_with_unique_value
1512 0 : (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1513 : scratch_class, NULL, "scratch"));
1514 0 : emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1515 : src, scratch_reg));
1516 : }
1517 1409 : before = end_sequence ();
1518 1409 : lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1519 1409 : if (new_reg != NULL_RTX)
1520 1409 : SET_SRC (curr_insn_set) = new_reg;
1521 : else
1522 : {
1523 0 : if (lra_dump_file != NULL)
1524 : {
1525 0 : fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1526 0 : dump_insn_slim (lra_dump_file, curr_insn);
1527 : }
1528 0 : lra_set_insn_deleted (curr_insn);
1529 0 : return true;
1530 : }
1531 1409 : return false;
1532 : }
1533 :
1534 : /* The following data describe the result of process_alt_operands.
1535 : The data are used in curr_insn_transform to generate reloads. */
1536 :
1537 : /* The chosen reg classes which should be used for the corresponding
1538 : operands. */
1539 : static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1540 : /* Hard registers which cannot be a start hard register for the corresponding
1541 : operands. */
1542 : static HARD_REG_SET goal_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
1543 : /* True if the operand should be the same as another operand and that
1544 : other operand does not need a reload. */
1545 : static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1546 : /* True if the operand does not need a reload. */
1547 : static bool goal_alt_win[MAX_RECOG_OPERANDS];
1548 : /* True if the operand can be offsetable memory. */
1549 : static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1550 : /* The number of an operand to which given operand can be matched to. */
1551 : static int goal_alt_matches[MAX_RECOG_OPERANDS];
1552 : /* The number of elements in the following array. */
1553 : static int goal_alt_dont_inherit_ops_num;
1554 : /* Numbers of operands whose reload pseudos should not be inherited. */
1555 : static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1556 : /* True if we should try only this alternative for the next constraint sub-pass
1557 : to speed up the sub-pass. */
1558 : static bool goal_reuse_alt_p;
1559 : /* True if the insn commutative operands should be swapped. */
1560 : static bool goal_alt_swapped;
1561 : /* The chosen insn alternative. */
1562 : static int goal_alt_number;
1563 : /* True if output reload of the stack pointer should be generated. */
1564 : static bool goal_alt_out_sp_reload_p;
1565 :
1566 : /* True if the corresponding operand is the result of an equivalence
1567 : substitution. */
1568 : static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1569 :
1570 : /* The following five variables are used to choose the best insn
1571 : alternative. They reflect final characteristics of the best
1572 : alternative. */
1573 :
1574 : /* Number of necessary reloads and overall cost reflecting the
1575 : previous value and other unpleasantness of the best alternative. */
1576 : static int best_losers, best_overall;
1577 : /* Overall number hard registers used for reloads. For example, on
1578 : some targets we need 2 general registers to reload DFmode and only
1579 : one floating point register. */
1580 : static int best_reload_nregs;
1581 : /* Overall number reflecting distances of previous reloading the same
1582 : value. The distances are counted from the current BB start. It is
1583 : used to improve inheritance chances. */
1584 : static int best_reload_sum;
1585 :
1586 : /* True if the current insn should have no correspondingly input or
1587 : output reloads. */
1588 : static bool no_input_reloads_p, no_output_reloads_p;
1589 :
1590 : /* True if we swapped the commutative operands in the current
1591 : insn. */
1592 : static int curr_swapped;
1593 :
1594 : /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1595 : register of class CL. Add any input reloads to list BEFORE. AFTER
1596 : is nonnull if *LOC is an automodified value; handle that case by
1597 : adding the required output reloads to list AFTER. Return true if
1598 : the RTL was changed.
1599 :
1600 : if CHECK_ONLY_P is true, check that the *LOC is a correct address
1601 : register. Return false if the address register is correct. */
1602 : static bool
1603 35140353 : process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1604 : enum reg_class cl)
1605 : {
1606 35140353 : int regno;
1607 35140353 : enum reg_class rclass, new_class;
1608 35140353 : rtx reg;
1609 35140353 : rtx new_reg;
1610 35140353 : machine_mode mode;
1611 35140353 : bool subreg_p, before_p = false;
1612 :
1613 35140353 : subreg_p = GET_CODE (*loc) == SUBREG;
1614 35140353 : if (subreg_p)
1615 : {
1616 14180 : reg = SUBREG_REG (*loc);
1617 14180 : mode = GET_MODE (reg);
1618 :
1619 : /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1620 : between two registers with different classes, but there normally will
1621 : be "mov" which transfers element of vector register into the general
1622 : register, and this normally will be a subreg which should be reloaded
1623 : as a whole. This is particularly likely to be triggered when
1624 : -fno-split-wide-types specified. */
1625 14180 : if (!REG_P (reg)
1626 14180 : || in_class_p (reg, cl, &new_class)
1627 16358 : || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
1628 14180 : loc = &SUBREG_REG (*loc);
1629 : }
1630 :
1631 35140353 : reg = *loc;
1632 35140353 : mode = GET_MODE (reg);
1633 35140353 : if (! REG_P (reg))
1634 : {
1635 0 : if (check_only_p)
1636 : return true;
1637 : /* Always reload memory in an address even if the target supports
1638 : such addresses. */
1639 0 : new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, NULL,
1640 : "address");
1641 0 : before_p = true;
1642 : }
1643 : else
1644 : {
1645 35140353 : regno = REGNO (reg);
1646 35140353 : rclass = get_reg_class (regno);
1647 35140353 : if (! check_only_p
1648 35140353 : && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1649 : {
1650 111795 : if (lra_dump_file != NULL)
1651 : {
1652 0 : fprintf (lra_dump_file,
1653 : "Changing pseudo %d in address of insn %u on equiv ",
1654 0 : REGNO (reg), INSN_UID (curr_insn));
1655 0 : dump_value_slim (lra_dump_file, *loc, 1);
1656 0 : fprintf (lra_dump_file, "\n");
1657 : }
1658 111795 : rtx new_equiv = copy_rtx (*loc);
1659 111795 : if (lra_pointer_equiv_set_in (*loc))
1660 106657 : lra_pointer_equiv_set_add (new_equiv);
1661 111795 : *loc = new_equiv;
1662 : }
1663 35140353 : if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1664 : {
1665 475717 : if (check_only_p)
1666 : return true;
1667 475717 : reg = *loc;
1668 475717 : if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1669 : mode, reg, cl, NULL,
1670 : subreg_p, false, "address", &new_reg))
1671 : before_p = true;
1672 : }
1673 34664636 : else if (new_class != NO_REGS && rclass != new_class)
1674 : {
1675 464682 : if (check_only_p)
1676 : return true;
1677 464682 : lra_change_class (regno, new_class, " Change to", true);
1678 464682 : return false;
1679 : }
1680 : else
1681 : return false;
1682 : }
1683 0 : if (before_p)
1684 : {
1685 468399 : push_to_sequence (*before);
1686 468399 : lra_emit_move (new_reg, reg);
1687 468399 : *before = end_sequence ();
1688 : }
1689 475717 : *loc = new_reg;
1690 475717 : if (after != NULL)
1691 : {
1692 0 : start_sequence ();
1693 0 : lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1694 0 : emit_insn (*after);
1695 0 : *after = end_sequence ();
1696 : }
1697 : return true;
1698 : }
1699 :
1700 : /* Insert move insn in simplify_operand_subreg. BEFORE returns
1701 : the insn to be inserted before curr insn. AFTER returns the
1702 : the insn to be inserted after curr insn. ORIGREG and NEWREG
1703 : are the original reg and new reg for reload. */
1704 : static void
1705 458 : insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1706 : rtx newreg)
1707 : {
1708 458 : if (before)
1709 : {
1710 458 : push_to_sequence (*before);
1711 458 : lra_emit_move (newreg, origreg);
1712 458 : *before = end_sequence ();
1713 : }
1714 458 : if (after)
1715 : {
1716 0 : start_sequence ();
1717 0 : lra_emit_move (origreg, newreg);
1718 0 : emit_insn (*after);
1719 0 : *after = end_sequence ();
1720 : }
1721 458 : }
1722 :
1723 : static bool valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1724 : static bool process_address (int, bool, rtx_insn **, rtx_insn **);
1725 :
1726 : /* Make reloads for subreg in operand NOP with internal subreg mode
1727 : REG_MODE, add new reloads for further processing. Return true if
1728 : any change was done. */
1729 : static bool
1730 175122045 : simplify_operand_subreg (int nop, machine_mode reg_mode)
1731 : {
1732 175122045 : int hard_regno, inner_hard_regno;
1733 175122045 : rtx_insn *before, *after;
1734 175122045 : machine_mode mode, innermode;
1735 175122045 : rtx reg, new_reg;
1736 175122045 : rtx operand = *curr_id->operand_loc[nop];
1737 175122045 : enum reg_class regclass;
1738 175122045 : enum op_type type;
1739 :
1740 175122045 : before = after = NULL;
1741 :
1742 175122045 : if (GET_CODE (operand) != SUBREG)
1743 : return false;
1744 :
1745 3611035 : mode = GET_MODE (operand);
1746 3611035 : reg = SUBREG_REG (operand);
1747 3611035 : innermode = GET_MODE (reg);
1748 3611035 : type = curr_static_id->operand[nop].type;
1749 3611035 : if (MEM_P (reg))
1750 : {
1751 11250 : const bool addr_was_valid
1752 11250 : = valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
1753 11250 : alter_subreg (curr_id->operand_loc[nop], false);
1754 11250 : rtx subst = *curr_id->operand_loc[nop];
1755 11250 : lra_assert (MEM_P (subst));
1756 11250 : const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1757 : XEXP (subst, 0),
1758 11250 : MEM_ADDR_SPACE (subst));
1759 11250 : if (!addr_was_valid
1760 11250 : || addr_is_valid
1761 11250 : || ((get_constraint_type (lookup_constraint
1762 0 : (curr_static_id->operand[nop].constraint))
1763 : != CT_SPECIAL_MEMORY)
1764 : /* We still can reload address and if the address is
1765 : valid, we can remove subreg without reloading its
1766 : inner memory. */
1767 0 : && valid_address_p (GET_MODE (subst),
1768 0 : regno_reg_rtx
1769 : [ira_class_hard_regs
1770 0 : [base_reg_class (GET_MODE (subst),
1771 0 : MEM_ADDR_SPACE (subst),
1772 0 : ADDRESS, SCRATCH)][0]],
1773 0 : MEM_ADDR_SPACE (subst))))
1774 : {
1775 : /* If we change the address for a paradoxical subreg of memory, the
1776 : new address might violate the necessary alignment or the access
1777 : might be slow; take this into consideration. We need not worry
1778 : about accesses beyond allocated memory for paradoxical memory
1779 : subregs as we don't substitute such equiv memory (see processing
1780 : equivalences in function lra_constraints) and because for spilled
1781 : pseudos we allocate stack memory enough for the biggest
1782 : corresponding paradoxical subreg.
1783 :
1784 : However, do not blindly simplify a (subreg (mem ...)) for
1785 : WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1786 : data into a register when the inner is narrower than outer or
1787 : missing important data from memory when the inner is wider than
1788 : outer. This rule only applies to modes that are no wider than
1789 : a word.
1790 :
1791 : If valid memory becomes invalid after subreg elimination
1792 : and address might be different we still have to reload
1793 : memory.
1794 : */
1795 11250 : if ((! addr_was_valid
1796 : || addr_is_valid
1797 0 : || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
1798 11250 : && !(maybe_ne (GET_MODE_PRECISION (mode),
1799 11250 : GET_MODE_PRECISION (innermode))
1800 13747 : && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1801 19793 : && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1802 : && WORD_REGISTER_OPERATIONS)
1803 23630 : && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
1804 1130 : && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
1805 0 : || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
1806 0 : && targetm.slow_unaligned_access (innermode,
1807 0 : MEM_ALIGN (reg)))))
1808 11250 : return true;
1809 :
1810 0 : *curr_id->operand_loc[nop] = operand;
1811 :
1812 : /* But if the address was not valid, we cannot reload the MEM without
1813 : reloading the address first. */
1814 0 : if (!addr_was_valid)
1815 0 : process_address (nop, false, &before, &after);
1816 :
1817 : /* INNERMODE is fast, MODE slow. Reload the mem in INNERMODE. */
1818 0 : enum reg_class rclass
1819 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1820 0 : if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
1821 : reg, rclass, NULL,
1822 : true, false, "slow/invalid mem", &new_reg))
1823 : {
1824 0 : bool insert_before, insert_after;
1825 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1826 :
1827 0 : insert_before = (type != OP_OUT
1828 0 : || partial_subreg_p (mode, innermode));
1829 0 : insert_after = type != OP_IN;
1830 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1831 : insert_after ? &after : NULL,
1832 : reg, new_reg);
1833 : }
1834 0 : SUBREG_REG (operand) = new_reg;
1835 :
1836 : /* Convert to MODE. */
1837 0 : reg = operand;
1838 0 : rclass
1839 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1840 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1841 : rclass, NULL,
1842 : true, false, "slow/invalid mem", &new_reg))
1843 : {
1844 0 : bool insert_before, insert_after;
1845 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1846 :
1847 0 : insert_before = type != OP_OUT;
1848 0 : insert_after = type != OP_IN;
1849 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1850 : insert_after ? &after : NULL,
1851 : reg, new_reg);
1852 : }
1853 0 : *curr_id->operand_loc[nop] = new_reg;
1854 0 : lra_process_new_insns (curr_insn, before, after,
1855 : "Inserting slow/invalid mem reload");
1856 0 : return true;
1857 : }
1858 :
1859 : /* If the address was valid and became invalid, prefer to reload
1860 : the memory. Typical case is when the index scale should
1861 : correspond the memory. */
1862 0 : *curr_id->operand_loc[nop] = operand;
1863 : /* Do not return false here as the MEM_P (reg) will be processed
1864 : later in this function. */
1865 : }
1866 3599785 : else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1867 : {
1868 67 : alter_subreg (curr_id->operand_loc[nop], false);
1869 67 : return true;
1870 : }
1871 3599718 : else if (CONSTANT_P (reg))
1872 : {
1873 : /* Try to simplify subreg of constant. It is usually result of
1874 : equivalence substitution. */
1875 43782 : if (innermode == VOIDmode
1876 43782 : && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1877 0 : innermode = curr_static_id->operand[nop].mode;
1878 43782 : if ((new_reg = simplify_subreg (mode, reg, innermode,
1879 43782 : SUBREG_BYTE (operand))) != NULL_RTX)
1880 : {
1881 43365 : *curr_id->operand_loc[nop] = new_reg;
1882 43365 : return true;
1883 : }
1884 : }
1885 : /* Put constant into memory when we have mixed modes. It generates
1886 : a better code in most cases as it does not need a secondary
1887 : reload memory. It also prevents LRA looping when LRA is using
1888 : secondary reload memory again and again. */
1889 834 : if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1890 3556770 : && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1891 : {
1892 8 : SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1893 8 : alter_subreg (curr_id->operand_loc[nop], false);
1894 8 : return true;
1895 : }
1896 3556345 : auto fp_subreg_can_be_simplified_after_reload_p = [] (machine_mode innermode,
1897 : poly_uint64 offset,
1898 : machine_mode mode) {
1899 0 : reload_completed = 1;
1900 0 : bool res = simplify_subreg_regno (FRAME_POINTER_REGNUM,
1901 : innermode,
1902 0 : offset, mode) >= 0;
1903 0 : reload_completed = 0;
1904 0 : return res;
1905 : };
1906 : /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1907 : if there may be a problem accessing OPERAND in the outer
1908 : mode. */
1909 3556345 : if ((REG_P (reg)
1910 3555887 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1911 3555887 : && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1912 : /* Don't reload paradoxical subregs because we could be looping
1913 : having repeatedly final regno out of hard regs range. */
1914 2999712 : && (hard_regno_nregs (hard_regno, innermode)
1915 2999712 : >= hard_regno_nregs (hard_regno, mode))
1916 2994584 : && simplify_subreg_regno (hard_regno, innermode,
1917 2994584 : SUBREG_BYTE (operand), mode) < 0
1918 : /* Exclude reloading of frame pointer in subreg if frame pointer can not
1919 : be simplified here only because the reload is not finished yet. */
1920 845 : && (hard_regno != FRAME_POINTER_REGNUM
1921 0 : || !fp_subreg_can_be_simplified_after_reload_p (innermode,
1922 0 : SUBREG_BYTE (operand),
1923 : mode))
1924 : /* Don't reload subreg for matching reload. It is actually
1925 : valid subreg in LRA. */
1926 845 : && ! LRA_SUBREG_P (operand))
1927 7112232 : || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1928 : {
1929 458 : enum reg_class rclass;
1930 :
1931 458 : if (REG_P (reg))
1932 : /* There is a big probability that we will get the same class
1933 : for the new pseudo and we will get the same insn which
1934 : means infinite looping. So spill the new pseudo. */
1935 : rclass = NO_REGS;
1936 : else
1937 : /* The class will be defined later in curr_insn_transform. */
1938 458 : rclass
1939 458 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1940 :
1941 458 : if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1942 : rclass, NULL,
1943 : true, false, "subreg reg", &new_reg))
1944 : {
1945 458 : bool insert_before, insert_after;
1946 458 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1947 :
1948 916 : insert_before = (type != OP_OUT
1949 458 : || read_modify_subreg_p (operand));
1950 458 : insert_after = (type != OP_IN);
1951 916 : insert_move_for_subreg (insert_before ? &before : NULL,
1952 : insert_after ? &after : NULL,
1953 : reg, new_reg);
1954 : }
1955 458 : SUBREG_REG (operand) = new_reg;
1956 458 : lra_process_new_insns (curr_insn, before, after,
1957 : "Inserting subreg reload");
1958 458 : return true;
1959 : }
1960 : /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1961 : IRA allocates hardreg to the inner pseudo reg according to its mode
1962 : instead of the outermode, so the size of the hardreg may not be enough
1963 : to contain the outermode operand, in that case we may need to insert
1964 : reload for the reg. For the following two types of paradoxical subreg,
1965 : we need to insert reload:
1966 : 1. If the op_type is OP_IN, and the hardreg could not be paired with
1967 : other hardreg to contain the outermode operand
1968 : (checked by in_hard_reg_set_p), we need to insert the reload.
1969 : 2. If the op_type is OP_OUT or OP_INOUT.
1970 :
1971 : Here is a paradoxical subreg example showing how the reload is generated:
1972 :
1973 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1974 : (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1975 :
1976 : In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1977 : here, if reg107 is assigned to hardreg R15, because R15 is the last
1978 : hardreg, compiler cannot find another hardreg to pair with R15 to
1979 : contain TImode data. So we insert a TImode reload reg180 for it.
1980 : After reload is inserted:
1981 :
1982 : (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1983 : (reg:DI 107 [ __comp ])) -1
1984 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1985 : (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1986 :
1987 : Two reload hard registers will be allocated to reg180 to save TImode data
1988 : in LRA_assign.
1989 :
1990 : For LRA pseudos this should normally be handled by the biggest_mode
1991 : mechanism. However, it's possible for new uses of an LRA pseudo
1992 : to be introduced after we've allocated it, such as when undoing
1993 : inheritance, and the allocated register might not then be appropriate
1994 : for the new uses. */
1995 3555887 : else if (REG_P (reg)
1996 3555887 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1997 3555887 : && paradoxical_subreg_p (operand)
1998 1043629 : && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1999 953072 : && hard_regno_nregs (inner_hard_regno, mode) > 1
2000 3555887 : && ((hard_regno
2001 3561015 : = simplify_subreg_regno (inner_hard_regno, innermode,
2002 5128 : SUBREG_BYTE (operand), mode)) < 0
2003 5128 : || ((hard_regno_nregs (inner_hard_regno, innermode)
2004 5128 : < hard_regno_nregs (hard_regno, mode))
2005 10256 : && (regclass = lra_get_allocno_class (REGNO (reg)))
2006 5128 : && (type != OP_IN
2007 5128 : || !in_hard_reg_set_p (reg_class_contents[regclass],
2008 : mode, hard_regno)
2009 5128 : || overlaps_hard_reg_set_p (lra_no_alloc_regs,
2010 : mode, hard_regno)))))
2011 : {
2012 : /* The class will be defined later in curr_insn_transform. */
2013 0 : enum reg_class rclass
2014 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
2015 :
2016 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
2017 : rclass, NULL,
2018 : true, false, "paradoxical subreg", &new_reg))
2019 : {
2020 0 : rtx subreg;
2021 0 : bool insert_before, insert_after;
2022 :
2023 0 : PUT_MODE (new_reg, mode);
2024 0 : subreg = gen_lowpart_SUBREG (innermode, new_reg);
2025 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
2026 :
2027 0 : insert_before = (type != OP_OUT);
2028 0 : insert_after = (type != OP_IN);
2029 0 : insert_move_for_subreg (insert_before ? &before : NULL,
2030 : insert_after ? &after : NULL,
2031 : reg, subreg);
2032 : }
2033 0 : SUBREG_REG (operand) = new_reg;
2034 0 : lra_process_new_insns (curr_insn, before, after,
2035 : "Inserting paradoxical subreg reload");
2036 0 : return true;
2037 : }
2038 : return false;
2039 : }
2040 :
2041 : /* Return TRUE if X refers for a hard register from SET. */
2042 : static bool
2043 407349 : uses_hard_regs_p (rtx x, HARD_REG_SET set)
2044 : {
2045 407349 : int i, j, x_hard_regno;
2046 407349 : machine_mode mode;
2047 407349 : const char *fmt;
2048 407349 : enum rtx_code code;
2049 :
2050 407349 : if (x == NULL_RTX)
2051 : return false;
2052 407349 : code = GET_CODE (x);
2053 407349 : mode = GET_MODE (x);
2054 :
2055 407349 : if (code == SUBREG)
2056 : {
2057 : /* For all SUBREGs we want to check whether the full multi-register
2058 : overlaps the set. For normal SUBREGs this means 'get_hard_regno' of
2059 : the inner register, for paradoxical SUBREGs this means the
2060 : 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
2061 : fine. Use the wider mode for all cases. */
2062 2717 : rtx subreg = SUBREG_REG (x);
2063 2717 : mode = wider_subreg_mode (x);
2064 2717 : if (mode == GET_MODE (subreg))
2065 : {
2066 1681 : x = subreg;
2067 1681 : code = GET_CODE (x);
2068 : }
2069 : }
2070 :
2071 407349 : if (REG_P (x) || SUBREG_P (x))
2072 : {
2073 265847 : x_hard_regno = get_hard_regno (x);
2074 265847 : return (x_hard_regno >= 0
2075 265847 : && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
2076 : }
2077 141502 : fmt = GET_RTX_FORMAT (code);
2078 367576 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2079 : {
2080 228076 : if (fmt[i] == 'e')
2081 : {
2082 110926 : if (uses_hard_regs_p (XEXP (x, i), set))
2083 : return true;
2084 : }
2085 117150 : else if (fmt[i] == 'E')
2086 : {
2087 4398 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2088 3980 : if (uses_hard_regs_p (XVECEXP (x, i, j), set))
2089 : return true;
2090 : }
2091 : }
2092 : return false;
2093 : }
2094 :
2095 : /* Return true if OP is a spilled pseudo. */
2096 : static inline bool
2097 80562941 : spilled_pseudo_p (rtx op)
2098 : {
2099 80562941 : return (REG_P (op)
2100 80562941 : && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
2101 : }
2102 :
2103 : /* Return true if X is a general constant. */
2104 : static inline bool
2105 7815862 : general_constant_p (rtx x)
2106 : {
2107 7815862 : return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
2108 : }
2109 :
2110 : static bool
2111 24717302 : reg_in_class_p (rtx reg, enum reg_class cl)
2112 : {
2113 24717302 : if (cl == NO_REGS)
2114 1105977 : return get_reg_class (REGNO (reg)) == NO_REGS;
2115 23611325 : return in_class_p (reg, cl, NULL);
2116 : }
2117 :
2118 : /* Return true if SET of RCLASS contains no hard regs which can be
2119 : used in MODE. */
2120 : static bool
2121 3836295 : prohibited_class_reg_set_mode_p (enum reg_class rclass,
2122 : HARD_REG_SET &set,
2123 : machine_mode mode)
2124 : {
2125 3836295 : HARD_REG_SET temp;
2126 :
2127 7672590 : lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
2128 3836295 : temp = set & ~lra_no_alloc_regs;
2129 3836295 : return (hard_reg_set_subset_p
2130 3836295 : (temp, ira_prohibited_class_mode_regs[rclass][mode]));
2131 : }
2132 :
2133 :
2134 : /* Used to check validity info about small class input operands. It
2135 : should be incremented at start of processing an insn
2136 : alternative. */
2137 : static unsigned int curr_small_class_check = 0;
2138 :
2139 : /* Update number of used inputs of class OP_CLASS for operand NOP
2140 : of alternative NALT. Return true if we have more such class operands
2141 : than the number of available regs. */
2142 : static bool
2143 392885845 : update_and_check_small_class_inputs (int nop, int nalt,
2144 : enum reg_class op_class)
2145 : {
2146 392885845 : static unsigned int small_class_check[LIM_REG_CLASSES];
2147 392885845 : static int small_class_input_nums[LIM_REG_CLASSES];
2148 :
2149 390026135 : if (SMALL_REGISTER_CLASS_P (op_class)
2150 : /* We are interesting in classes became small because of fixing
2151 : some hard regs, e.g. by an user through GCC options. */
2152 2967139 : && hard_reg_set_intersect_p (reg_class_contents[op_class],
2153 2967139 : ira_no_alloc_regs)
2154 392885884 : && (curr_static_id->operand[nop].type != OP_OUT
2155 33 : || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
2156 : {
2157 6 : if (small_class_check[op_class] == curr_small_class_check)
2158 0 : small_class_input_nums[op_class]++;
2159 : else
2160 : {
2161 6 : small_class_check[op_class] = curr_small_class_check;
2162 6 : small_class_input_nums[op_class] = 1;
2163 : }
2164 6 : if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
2165 : return true;
2166 : }
2167 : return false;
2168 : }
2169 :
2170 : /* Print operand constraints for alternative ALT_NUMBER of the current
2171 : insn. */
2172 : static void
2173 4590 : print_curr_insn_alt (int alt_number)
2174 : {
2175 15917 : for (int i = 0; i < curr_static_id->n_operands; i++)
2176 : {
2177 11327 : const char *p = (curr_static_id->operand_alternative
2178 11327 : [alt_number * curr_static_id->n_operands + i].constraint);
2179 11327 : if (*p == '\0')
2180 220 : continue;
2181 11107 : fprintf (lra_dump_file, " (%d) ", i);
2182 39435 : for (; *p != '\0' && *p != ',' && *p != '#'; p++)
2183 17221 : fputc (*p, lra_dump_file);
2184 : }
2185 4590 : }
2186 :
2187 : /* Major function to choose the current insn alternative and what
2188 : operands should be reloaded and how. If ONLY_ALTERNATIVE is not
2189 : negative we should consider only this alternative. Return false if
2190 : we cannot choose the alternative or find how to reload the
2191 : operands. */
2192 : static bool
2193 89627109 : process_alt_operands (int only_alternative)
2194 : {
2195 89627109 : bool ok_p = false;
2196 89627109 : int nop, overall, nalt;
2197 89627109 : int n_alternatives = curr_static_id->n_alternatives;
2198 89627109 : int n_operands = curr_static_id->n_operands;
2199 : /* LOSERS counts the operands that don't fit this alternative and
2200 : would require loading. */
2201 89627109 : int losers;
2202 89627109 : int addr_losers;
2203 : /* REJECT is a count of how undesirable this alternative says it is
2204 : if any reloading is required. If the alternative matches exactly
2205 : then REJECT is ignored, but otherwise it gets this much counted
2206 : against it in addition to the reloading needed. */
2207 89627109 : int reject;
2208 : /* This is defined by '!' or '?' alternative constraint and added to
2209 : reject. But in some cases it can be ignored. */
2210 89627109 : int static_reject;
2211 89627109 : int op_reject;
2212 : /* The number of elements in the following array. */
2213 89627109 : int early_clobbered_regs_num;
2214 : /* Numbers of operands which are early clobber registers. */
2215 89627109 : int early_clobbered_nops[MAX_RECOG_OPERANDS];
2216 89627109 : enum reg_class curr_alt[MAX_RECOG_OPERANDS];
2217 89627109 : enum reg_class all_this_alternative;
2218 89627109 : int all_used_nregs, all_reload_nregs;
2219 89627109 : HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
2220 89627109 : HARD_REG_SET curr_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
2221 89627109 : bool curr_alt_match_win[MAX_RECOG_OPERANDS];
2222 89627109 : bool curr_alt_win[MAX_RECOG_OPERANDS];
2223 89627109 : bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
2224 89627109 : int curr_alt_matches[MAX_RECOG_OPERANDS];
2225 : /* The number of elements in the following array. */
2226 89627109 : int curr_alt_dont_inherit_ops_num;
2227 : /* Numbers of operands whose reload pseudos should not be inherited. */
2228 89627109 : int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
2229 89627109 : bool curr_reuse_alt_p;
2230 : /* True if output stack pointer reload should be generated for the current
2231 : alternative. */
2232 89627109 : bool curr_alt_out_sp_reload_p;
2233 89627109 : bool curr_alt_class_change_p;
2234 89627109 : rtx op;
2235 : /* The register when the operand is a subreg of register, otherwise the
2236 : operand itself. */
2237 89627109 : rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
2238 : /* The register if the operand is a register or subreg of register,
2239 : otherwise NULL. */
2240 89627109 : rtx operand_reg[MAX_RECOG_OPERANDS];
2241 89627109 : int hard_regno[MAX_RECOG_OPERANDS];
2242 89627109 : machine_mode biggest_mode[MAX_RECOG_OPERANDS];
2243 89627109 : int reload_nregs, reload_sum;
2244 89627109 : bool costly_p;
2245 89627109 : enum reg_class cl;
2246 89627109 : const HARD_REG_SET *cl_filter;
2247 89627109 : HARD_REG_SET hard_reg_constraint;
2248 :
2249 : /* Calculate some data common for all alternatives to speed up the
2250 : function. */
2251 297917586 : for (nop = 0; nop < n_operands; nop++)
2252 : {
2253 208290477 : rtx reg;
2254 :
2255 208290477 : op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
2256 : /* The real hard regno of the operand after the allocation. */
2257 208290477 : hard_regno[nop] = get_hard_regno (op);
2258 :
2259 208290477 : operand_reg[nop] = reg = op;
2260 208290477 : biggest_mode[nop] = GET_MODE (op);
2261 208290477 : if (GET_CODE (op) == SUBREG)
2262 : {
2263 4082381 : biggest_mode[nop] = wider_subreg_mode (op);
2264 4082381 : operand_reg[nop] = reg = SUBREG_REG (op);
2265 : }
2266 208290477 : if (! REG_P (reg))
2267 88184745 : operand_reg[nop] = NULL_RTX;
2268 120105732 : else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
2269 141077876 : || ((int) REGNO (reg)
2270 20972144 : == lra_get_elimination_hard_regno (REGNO (reg))))
2271 117173337 : no_subreg_reg_operand[nop] = reg;
2272 : else
2273 2932395 : operand_reg[nop] = no_subreg_reg_operand[nop]
2274 : /* Just use natural mode for elimination result. It should
2275 : be enough for extra constraints hooks. */
2276 2932395 : = regno_reg_rtx[hard_regno[nop]];
2277 : }
2278 :
2279 : /* The constraints are made of several alternatives. Each operand's
2280 : constraint looks like foo,bar,... with commas separating the
2281 : alternatives. The first alternatives for all operands go
2282 : together, the second alternatives go together, etc.
2283 :
2284 : First loop over alternatives. */
2285 89627109 : alternative_mask preferred = curr_id->preferred_alternatives;
2286 89627109 : if (only_alternative >= 0)
2287 975134 : preferred &= ALTERNATIVE_BIT (only_alternative);
2288 :
2289 89627109 : bool prefer_memory_p = false;
2290 89627211 : repeat:
2291 368509881 : for (nalt = 0; nalt < n_alternatives; nalt++)
2292 : {
2293 : /* Loop over operands for one constraint alternative. */
2294 354805795 : if (!TEST_BIT (preferred, nalt))
2295 98277248 : continue;
2296 :
2297 256528547 : if (lra_dump_file != NULL)
2298 : {
2299 3403 : fprintf (lra_dump_file, " Considering alt=%d of insn %d: ",
2300 3403 : nalt, INSN_UID (curr_insn));
2301 3403 : print_curr_insn_alt (nalt);
2302 3403 : fprintf (lra_dump_file, "\n");
2303 : }
2304 :
2305 256528547 : bool matching_early_clobber[MAX_RECOG_OPERANDS];
2306 256528547 : curr_small_class_check++;
2307 256528547 : overall = losers = addr_losers = 0;
2308 256528547 : static_reject = reject = reload_nregs = reload_sum = 0;
2309 850691262 : for (nop = 0; nop < n_operands; nop++)
2310 : {
2311 594162715 : int inc = (curr_static_id
2312 594162715 : ->operand_alternative[nalt * n_operands + nop].reject);
2313 594162715 : if (lra_dump_file != NULL && inc != 0)
2314 53 : fprintf (lra_dump_file,
2315 : " Staticly defined alt reject+=%d\n", inc);
2316 594162715 : static_reject += inc;
2317 594162715 : matching_early_clobber[nop] = 0;
2318 : }
2319 : reject += static_reject;
2320 : early_clobbered_regs_num = 0;
2321 : curr_alt_out_sp_reload_p = false;
2322 : curr_reuse_alt_p = true;
2323 : curr_alt_class_change_p = false;
2324 : all_this_alternative = NO_REGS;
2325 : all_used_nregs = all_reload_nregs = 0;
2326 664031527 : for (nop = 0; nop < n_operands; nop++)
2327 : {
2328 529627001 : const char *p;
2329 529627001 : char *end;
2330 529627001 : int len, c, m, i, opalt_num, this_alternative_matches;
2331 529627001 : bool win, did_match, offmemok, early_clobber_p;
2332 : /* false => this operand can be reloaded somehow for this
2333 : alternative. */
2334 529627001 : bool badop;
2335 : /* true => this operand can be reloaded if the alternative
2336 : allows regs. */
2337 529627001 : bool winreg;
2338 : /* True if a constant forced into memory would be OK for
2339 : this operand. */
2340 529627001 : bool constmemok;
2341 529627001 : enum reg_class this_alternative, this_costly_alternative;
2342 529627001 : HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2343 529627001 : HARD_REG_SET this_alternative_exclude_start_hard_regs;
2344 529627001 : bool this_alternative_match_win, this_alternative_win;
2345 529627001 : bool this_alternative_offmemok;
2346 529627001 : bool scratch_p;
2347 529627001 : machine_mode mode;
2348 529627001 : enum constraint_num cn;
2349 529627001 : bool class_change_p = false;
2350 :
2351 529627001 : opalt_num = nalt * n_operands + nop;
2352 529627001 : if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2353 : {
2354 : /* Fast track for no constraints at all. */
2355 14617135 : curr_alt[nop] = NO_REGS;
2356 14617135 : CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2357 14617135 : curr_alt_win[nop] = true;
2358 14617135 : curr_alt_match_win[nop] = false;
2359 14617135 : curr_alt_offmemok[nop] = false;
2360 14617135 : curr_alt_matches[nop] = -1;
2361 14617135 : continue;
2362 : }
2363 :
2364 515009866 : op = no_subreg_reg_operand[nop];
2365 515009866 : mode = curr_operand_mode[nop];
2366 :
2367 515009866 : win = did_match = winreg = offmemok = constmemok = false;
2368 515009866 : badop = true;
2369 :
2370 515009866 : early_clobber_p = false;
2371 515009866 : p = curr_static_id->operand_alternative[opalt_num].constraint;
2372 :
2373 515009866 : this_costly_alternative = this_alternative = NO_REGS;
2374 : /* We update set of possible hard regs besides its class
2375 : because reg class might be inaccurate. For example,
2376 : union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2377 : is translated in HI_REGS because classes are merged by
2378 : pairs and there is no accurate intermediate class. */
2379 2060039464 : CLEAR_HARD_REG_SET (this_alternative_set);
2380 1545029598 : CLEAR_HARD_REG_SET (this_costly_alternative_set);
2381 515009866 : CLEAR_HARD_REG_SET (this_alternative_exclude_start_hard_regs);
2382 515009866 : this_alternative_win = false;
2383 515009866 : this_alternative_match_win = false;
2384 515009866 : this_alternative_offmemok = false;
2385 515009866 : this_alternative_matches = -1;
2386 :
2387 : /* An empty constraint should be excluded by the fast
2388 : track. */
2389 515009866 : lra_assert (*p != 0 && *p != ',');
2390 :
2391 : op_reject = 0;
2392 : /* Scan this alternative's specs for this operand; set WIN
2393 : if the operand fits any letter in this alternative.
2394 : Otherwise, clear BADOP if this operand could fit some
2395 : letter after reloads, or set WINREG if this operand could
2396 : fit after reloads provided the constraint allows some
2397 : registers. */
2398 : costly_p = false;
2399 1308032208 : do
2400 : {
2401 1308032208 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2402 : {
2403 : case '\0':
2404 : len = 0;
2405 : break;
2406 492253789 : case ',':
2407 492253789 : c = '\0';
2408 492253789 : break;
2409 :
2410 175991 : case '&':
2411 175991 : early_clobber_p = true;
2412 175991 : break;
2413 :
2414 19299 : case '$':
2415 19299 : op_reject += LRA_MAX_REJECT;
2416 19299 : break;
2417 0 : case '^':
2418 0 : op_reject += LRA_LOSER_COST_FACTOR;
2419 0 : break;
2420 :
2421 0 : case '#':
2422 : /* Ignore rest of this alternative. */
2423 0 : c = '\0';
2424 0 : break;
2425 :
2426 56103959 : case '0': case '1': case '2': case '3': case '4':
2427 56103959 : case '5': case '6': case '7': case '8': case '9':
2428 56103959 : {
2429 56103959 : int m_hregno;
2430 56103959 : bool match_p;
2431 :
2432 56103959 : m = strtoul (p, &end, 10);
2433 56103959 : p = end;
2434 56103959 : len = 0;
2435 56103959 : lra_assert (nop > m);
2436 :
2437 : /* Reject matches if we don't know which operand is
2438 : bigger. This situation would arguably be a bug in
2439 : an .md pattern, but could also occur in a user asm. */
2440 168311877 : if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2441 56103959 : GET_MODE_SIZE (biggest_mode[nop])))
2442 : break;
2443 :
2444 : /* Don't match wrong asm insn operands for proper
2445 : diagnostic later. */
2446 56103959 : if (INSN_CODE (curr_insn) < 0
2447 33235 : && (curr_operand_mode[m] == BLKmode
2448 33234 : || curr_operand_mode[nop] == BLKmode)
2449 1 : && curr_operand_mode[m] != curr_operand_mode[nop])
2450 : break;
2451 :
2452 56103958 : m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
2453 : /* We are supposed to match a previous operand.
2454 : If we do, we win if that one did. If we do
2455 : not, count both of the operands as losers.
2456 : (This is too conservative, since most of the
2457 : time only a single reload insn will be needed
2458 : to make the two operands win. As a result,
2459 : this alternative may be rejected when it is
2460 : actually desirable.) */
2461 56103958 : match_p = false;
2462 56103958 : if (operands_match_p (*curr_id->operand_loc[nop],
2463 56103958 : *curr_id->operand_loc[m], m_hregno))
2464 : {
2465 : /* We should reject matching of an early
2466 : clobber operand if the matching operand is
2467 : not dying in the insn. */
2468 14795371 : if (!TEST_BIT (curr_static_id->operand[m]
2469 : .early_clobber_alts, nalt)
2470 18241 : || operand_reg[nop] == NULL_RTX
2471 14813612 : || (find_regno_note (curr_insn, REG_DEAD,
2472 : REGNO (op))
2473 4271 : || REGNO (op) == REGNO (operand_reg[m])))
2474 14795371 : match_p = true;
2475 : }
2476 14795371 : if (match_p)
2477 : {
2478 : /* If we are matching a non-offsettable
2479 : address where an offsettable address was
2480 : expected, then we must reject this
2481 : combination, because we can't reload
2482 : it. */
2483 14795371 : if (curr_alt_offmemok[m]
2484 1493 : && MEM_P (*curr_id->operand_loc[m])
2485 0 : && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2486 0 : continue;
2487 : }
2488 : else
2489 : {
2490 : /* If the operands do not match and one
2491 : operand is INOUT, we can not match them.
2492 : Try other possibilities, e.g. other
2493 : alternatives or commutative operand
2494 : exchange. */
2495 41308587 : if (curr_static_id->operand[nop].type == OP_INOUT
2496 41308587 : || curr_static_id->operand[m].type == OP_INOUT)
2497 : break;
2498 : /* Operands don't match. For asm if the operands
2499 : are different user defined explicit hard
2500 : registers, then we cannot make them match
2501 : when one is early clobber operand. */
2502 41308155 : if ((REG_P (*curr_id->operand_loc[nop])
2503 25955294 : || SUBREG_P (*curr_id->operand_loc[nop]))
2504 15940430 : && (REG_P (*curr_id->operand_loc[m])
2505 200374 : || SUBREG_P (*curr_id->operand_loc[m]))
2506 15848870 : && INSN_CODE (curr_insn) < 0)
2507 : {
2508 590 : rtx nop_reg = *curr_id->operand_loc[nop];
2509 590 : if (SUBREG_P (nop_reg))
2510 0 : nop_reg = SUBREG_REG (nop_reg);
2511 590 : rtx m_reg = *curr_id->operand_loc[m];
2512 590 : if (SUBREG_P (m_reg))
2513 0 : m_reg = SUBREG_REG (m_reg);
2514 :
2515 590 : if (REG_P (nop_reg)
2516 590 : && HARD_REGISTER_P (nop_reg)
2517 0 : && REG_USERVAR_P (nop_reg)
2518 0 : && REG_P (m_reg)
2519 0 : && HARD_REGISTER_P (m_reg)
2520 590 : && REG_USERVAR_P (m_reg))
2521 : {
2522 : int i;
2523 :
2524 0 : for (i = 0; i < early_clobbered_regs_num; i++)
2525 0 : if (m == early_clobbered_nops[i])
2526 : break;
2527 0 : if (i < early_clobbered_regs_num
2528 0 : || early_clobber_p)
2529 : break;
2530 : }
2531 : }
2532 : /* Both operands must allow a reload register,
2533 : otherwise we cannot make them match. */
2534 41308155 : if (curr_alt[m] == NO_REGS)
2535 : break;
2536 : /* Retroactively mark the operand we had to
2537 : match as a loser, if it wasn't already and
2538 : it wasn't matched to a register constraint
2539 : (e.g it might be matched by memory). */
2540 41283789 : if (curr_alt_win[m]
2541 40452579 : && (operand_reg[m] == NULL_RTX
2542 39949545 : || hard_regno[m] < 0))
2543 : {
2544 1260012 : if (lra_dump_file != NULL)
2545 9 : fprintf
2546 9 : (lra_dump_file,
2547 : " %d Matched operand reload: "
2548 : "losers++\n", m);
2549 1260012 : losers++;
2550 1260012 : reload_nregs
2551 1260012 : += (ira_reg_class_max_nregs[curr_alt[m]]
2552 1260012 : [GET_MODE (*curr_id->operand_loc[m])]);
2553 : }
2554 :
2555 : /* Prefer matching earlyclobber alternative as
2556 : it results in less hard regs required for
2557 : the insn than a non-matching earlyclobber
2558 : alternative. */
2559 41283789 : if (TEST_BIT (curr_static_id->operand[m]
2560 : .early_clobber_alts, nalt))
2561 : {
2562 17790 : if (lra_dump_file != NULL)
2563 0 : fprintf
2564 0 : (lra_dump_file,
2565 : " %d Matching earlyclobber alt:"
2566 : " reject--\n",
2567 : nop);
2568 17790 : if (!matching_early_clobber[m])
2569 : {
2570 17790 : reject--;
2571 17790 : matching_early_clobber[m] = 1;
2572 : }
2573 : }
2574 : /* Otherwise we prefer no matching
2575 : alternatives because it gives more freedom
2576 : in RA. */
2577 41265999 : else if (operand_reg[nop] == NULL_RTX
2578 41265999 : || (find_regno_note (curr_insn, REG_DEAD,
2579 15915526 : REGNO (operand_reg[nop]))
2580 : == NULL_RTX))
2581 : {
2582 36364670 : if (lra_dump_file != NULL)
2583 912 : fprintf
2584 912 : (lra_dump_file,
2585 : " %d Matching alt: reject+=2\n",
2586 : nop);
2587 36364670 : reject += 2;
2588 : }
2589 : }
2590 : /* If we have to reload this operand and some
2591 : previous operand also had to match the same
2592 : thing as this operand, we don't know how to do
2593 : that. */
2594 56079160 : if (!match_p || !curr_alt_win[m])
2595 : {
2596 86716176 : for (i = 0; i < nop; i++)
2597 45326004 : if (curr_alt_matches[i] == m)
2598 : break;
2599 41390173 : if (i < nop)
2600 : break;
2601 : }
2602 : else
2603 : did_match = true;
2604 :
2605 56079159 : this_alternative_matches = m;
2606 : /* This can be fixed with reloads if the operand
2607 : we are supposed to match can be fixed with
2608 : reloads. */
2609 56079159 : badop = false;
2610 56079159 : this_alternative = curr_alt[m];
2611 56079159 : this_alternative_set = curr_alt_set[m];
2612 56079159 : this_alternative_exclude_start_hard_regs
2613 56079159 : = curr_alt_exclude_start_hard_regs[m];
2614 56079159 : winreg = this_alternative != NO_REGS;
2615 56079159 : break;
2616 : }
2617 :
2618 11679485 : case 'g':
2619 11679485 : if (MEM_P (op)
2620 7815862 : || general_constant_p (op)
2621 16240858 : || spilled_pseudo_p (op))
2622 : win = true;
2623 11679485 : if (REG_P (op) && prefer_memory_p)
2624 : {
2625 11679485 : badop = false;
2626 11679485 : offmemok = true;
2627 : }
2628 11679485 : cl = GENERAL_REGS;
2629 11679485 : cl_filter = nullptr;
2630 11679485 : goto reg;
2631 :
2632 1140 : case '{':
2633 1140 : {
2634 1140 : int regno = decode_hard_reg_constraint (p);
2635 1140 : gcc_assert (regno >= 0);
2636 1140 : cl = NO_REGS;
2637 1140 : int nregs = hard_regno_nregs (regno, mode);
2638 2280 : for (int i = 0; i < nregs; ++i)
2639 1140 : cl = reg_class_superunion[cl][REGNO_REG_CLASS (regno + i)];
2640 1140 : CLEAR_HARD_REG_SET (hard_reg_constraint);
2641 1140 : SET_HARD_REG_BIT (hard_reg_constraint, regno);
2642 1140 : cl_filter = &hard_reg_constraint;
2643 1140 : goto reg;
2644 : }
2645 :
2646 725042468 : default:
2647 725042468 : cn = lookup_constraint (p);
2648 725042468 : switch (get_constraint_type (cn))
2649 : {
2650 479461815 : case CT_REGISTER:
2651 479461815 : cl = reg_class_for_constraint (cn);
2652 352782724 : if (cl != NO_REGS)
2653 : {
2654 343358592 : cl_filter = get_register_filter (cn);
2655 343358592 : goto reg;
2656 : }
2657 : break;
2658 :
2659 2059733 : case CT_CONST_INT:
2660 2059733 : if (CONST_INT_P (op)
2661 2059733 : && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2662 : win = true;
2663 : break;
2664 :
2665 110565917 : case CT_MEMORY:
2666 110565917 : case CT_RELAXED_MEMORY:
2667 110565917 : if (MEM_P (op)
2668 110565917 : && satisfies_memory_constraint_p (op, cn))
2669 : win = true;
2670 74960969 : else if (spilled_pseudo_p (op))
2671 45036061 : win = true;
2672 :
2673 : /* If we didn't already win, we can reload constants
2674 : via force_const_mem or put the pseudo value into
2675 : memory, or make other memory by reloading the
2676 : address like for 'o'. */
2677 115646087 : if (CONST_POOL_OK_P (mode, op)
2678 105485589 : || MEM_P (op) || REG_P (op)
2679 : /* We can restore the equiv insn by a
2680 : reload. */
2681 111137005 : || equiv_substition_p[nop])
2682 110531123 : badop = false;
2683 : constmemok = true;
2684 : offmemok = true;
2685 : break;
2686 :
2687 1673984 : case CT_ADDRESS:
2688 : /* An asm operand with an address constraint
2689 : that doesn't satisfy address_operand has
2690 : is_address cleared, so that we don't try to
2691 : make a non-address fit. */
2692 1673984 : if (!curr_static_id->operand[nop].is_address)
2693 : break;
2694 : /* If we didn't already win, we can reload the address
2695 : into a base register. */
2696 1673965 : if (satisfies_address_constraint_p (op, cn))
2697 1673965 : win = true;
2698 1673965 : cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2699 : ADDRESS, SCRATCH);
2700 1673965 : cl_filter = nullptr;
2701 1673965 : badop = false;
2702 1673965 : goto reg;
2703 :
2704 130067968 : case CT_FIXED_FORM:
2705 130067968 : if (constraint_satisfied_p (op, cn))
2706 1308032208 : win = true;
2707 : break;
2708 :
2709 1213051 : case CT_SPECIAL_MEMORY:
2710 1213051 : if (satisfies_memory_constraint_p (op, cn))
2711 : win = true;
2712 1040599 : else if (spilled_pseudo_p (op))
2713 : {
2714 1308032208 : curr_reuse_alt_p = false;
2715 1308032208 : win = true;
2716 : }
2717 : break;
2718 : }
2719 : break;
2720 :
2721 356713182 : reg:
2722 356713182 : if (mode == BLKmode)
2723 : break;
2724 356713164 : this_alternative = reg_class_subunion[this_alternative][cl];
2725 356713164 : if (hard_reg_set_subset_p (this_alternative_set,
2726 356713164 : reg_class_contents[cl]))
2727 356709539 : this_alternative_exclude_start_hard_regs
2728 356709539 : = ira_exclude_class_mode_regs[cl][mode];
2729 3625 : else if (!hard_reg_set_subset_p (reg_class_contents[cl],
2730 : this_alternative_set))
2731 3624 : this_alternative_exclude_start_hard_regs
2732 1070143116 : |= ira_exclude_class_mode_regs[cl][mode];
2733 356713164 : this_alternative_set |= reg_class_contents[cl];
2734 356713164 : if (cl_filter)
2735 2280 : this_alternative_exclude_start_hard_regs |= ~*cl_filter;
2736 356713164 : if (costly_p)
2737 : {
2738 21165129 : this_costly_alternative
2739 21165129 : = reg_class_subunion[this_costly_alternative][cl];
2740 21165129 : this_costly_alternative_set |= reg_class_contents[cl];
2741 : }
2742 356713164 : winreg = true;
2743 356713164 : if (REG_P (op))
2744 : {
2745 227080743 : rtx orig_op = *curr_id->operand_loc[nop];
2746 6530935 : if (GET_CODE (orig_op) == SUBREG && HARD_REGISTER_P (op)
2747 227080833 : && !targetm.hard_regno_mode_ok (REGNO (op),
2748 90 : GET_MODE(orig_op)))
2749 : break;
2750 :
2751 227080743 : tree decl;
2752 :
2753 227080743 : if (hard_regno[nop] >= 0
2754 191501599 : && in_hard_reg_set_p (this_alternative_set,
2755 : mode, hard_regno[nop])
2756 173545908 : && (!cl_filter
2757 590 : || TEST_HARD_REG_BIT (*cl_filter,
2758 : hard_regno[nop]))
2759 400626645 : && ((REG_ATTRS (op) && (decl = REG_EXPR (op)) != NULL
2760 96664005 : && VAR_P (decl) && DECL_HARD_REGISTER (decl))
2761 173542645 : || !(TEST_HARD_REG_BIT
2762 173542645 : (this_alternative_exclude_start_hard_regs,
2763 : hard_regno[nop]))))
2764 : win = true;
2765 53534871 : else if (hard_regno[nop] < 0 && !prefer_memory_p)
2766 : {
2767 35579008 : if (in_class_p (op, this_alternative, NULL))
2768 : win = true;
2769 26717477 : else if (in_class_p (op, this_alternative, NULL, true))
2770 : {
2771 1308032208 : class_change_p = true;
2772 1308032208 : win = true;
2773 : }
2774 : }
2775 : }
2776 : break;
2777 : }
2778 1308032208 : if (c != ' ' && c != '\t')
2779 1308032208 : costly_p = c == '*';
2780 : }
2781 1308032208 : while ((p += len), c);
2782 :
2783 1030019732 : scratch_p = (operand_reg[nop] != NULL_RTX
2784 515009866 : && ira_former_scratch_p (REGNO (operand_reg[nop])));
2785 : /* Record which operands fit this alternative. */
2786 515009866 : if (win)
2787 : {
2788 277525997 : if (early_clobber_p
2789 277383711 : || curr_static_id->operand[nop].type != OP_OUT)
2790 : {
2791 121408923 : if (winreg)
2792 100376508 : all_used_nregs
2793 100376508 : += ira_reg_class_min_nregs[this_alternative][mode];
2794 121408923 : all_this_alternative
2795 121408923 : = (reg_class_subunion
2796 121408923 : [all_this_alternative][this_alternative]);
2797 : }
2798 277525997 : this_alternative_win = true;
2799 277525997 : if (class_change_p)
2800 : {
2801 249677 : curr_alt_class_change_p = true;
2802 249677 : if (lra_dump_file != NULL)
2803 10 : fprintf (lra_dump_file,
2804 : " %d Narrowing class: reject+=3\n",
2805 : nop);
2806 249677 : reject += 3;
2807 : }
2808 277525997 : if (operand_reg[nop] != NULL_RTX)
2809 : {
2810 193508189 : if (hard_regno[nop] >= 0)
2811 : {
2812 173489605 : if (in_hard_reg_set_p (this_costly_alternative_set,
2813 : mode, hard_regno[nop]))
2814 : {
2815 773000 : if (lra_dump_file != NULL)
2816 21 : fprintf (lra_dump_file,
2817 : " %d Costly set: reject++\n",
2818 : nop);
2819 773000 : reject++;
2820 : }
2821 : }
2822 : else
2823 : {
2824 : /* Prefer won reg to spilled pseudo under other
2825 : equal conditions for possibe inheritance. */
2826 20018584 : if (! scratch_p)
2827 : {
2828 20013988 : if (lra_dump_file != NULL)
2829 59 : fprintf
2830 59 : (lra_dump_file,
2831 : " %d Non pseudo reload: reject++\n",
2832 : nop);
2833 20013988 : reject++;
2834 : }
2835 20018584 : if (in_class_p (operand_reg[nop],
2836 : this_costly_alternative, NULL, true))
2837 : {
2838 134139 : if (lra_dump_file != NULL)
2839 0 : fprintf
2840 0 : (lra_dump_file,
2841 : " %d Non pseudo costly reload:"
2842 : " reject++\n",
2843 : nop);
2844 134139 : reject++;
2845 : }
2846 : }
2847 : /* We simulate the behavior of old reload here.
2848 : Although scratches need hard registers and it
2849 : might result in spilling other pseudos, no reload
2850 : insns are generated for the scratches. So it
2851 : might cost something but probably less than old
2852 : reload pass believes. */
2853 193508189 : if (scratch_p)
2854 : {
2855 115382 : if (lra_dump_file != NULL)
2856 6 : fprintf (lra_dump_file,
2857 : " %d Scratch win: reject+=2\n",
2858 : nop);
2859 115382 : reject += 2;
2860 : }
2861 : }
2862 : }
2863 237483869 : else if (did_match)
2864 : this_alternative_match_win = true;
2865 : else
2866 : {
2867 222794882 : if (prefer_memory_p && offmemok)
2868 : {
2869 0 : winreg = false;
2870 0 : this_alternative = NO_REGS;
2871 : }
2872 :
2873 222794882 : int const_to_mem = 0;
2874 222794882 : bool no_regs_p;
2875 :
2876 222794882 : reject += op_reject;
2877 : /* Mark output reload of the stack pointer. */
2878 222794882 : if (op == stack_pointer_rtx
2879 56649 : && curr_static_id->operand[nop].type != OP_IN)
2880 222794882 : curr_alt_out_sp_reload_p = true;
2881 :
2882 : /* If this alternative asks for a specific reg class, see if there
2883 : is at least one allocatable register in that class. */
2884 222794882 : no_regs_p
2885 388367866 : = (this_alternative == NO_REGS
2886 222794882 : || (hard_reg_set_subset_p
2887 331145990 : (reg_class_contents[this_alternative],
2888 : lra_no_alloc_regs)));
2889 :
2890 : /* For asms, verify that the class for this alternative is possible
2891 : for the mode that is specified. */
2892 165572984 : if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2893 : {
2894 : int i;
2895 69685 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2896 69683 : if (targetm.hard_regno_mode_ok (i, mode)
2897 69683 : && in_hard_reg_set_p (reg_class_contents[this_alternative],
2898 : mode, i))
2899 : break;
2900 20343 : if (i == FIRST_PSEUDO_REGISTER)
2901 222794882 : winreg = false;
2902 : }
2903 :
2904 : /* If this operand accepts a register, and if the
2905 : register class has at least one allocatable register,
2906 : then this operand can be reloaded. */
2907 222794882 : if (winreg && !no_regs_p)
2908 : badop = false;
2909 :
2910 57221900 : if (badop)
2911 : {
2912 47939532 : if (lra_dump_file != NULL)
2913 606 : fprintf (lra_dump_file,
2914 : " Bad operand -- refuse\n");
2915 122124021 : goto fail;
2916 : }
2917 :
2918 174855350 : if (this_alternative != NO_REGS)
2919 : {
2920 165572983 : HARD_REG_SET available_regs
2921 165572983 : = (reg_class_contents[this_alternative]
2922 165572983 : & ~((ira_prohibited_class_mode_regs
2923 165572983 : [this_alternative][mode])
2924 165572983 : | lra_no_alloc_regs));
2925 331145966 : if (!hard_reg_set_empty_p (available_regs))
2926 : {
2927 165571540 : if (early_clobber_p
2928 165537835 : || curr_static_id->operand[nop].type != OP_OUT)
2929 : {
2930 86751485 : all_reload_nregs
2931 86751485 : += ira_reg_class_min_nregs[this_alternative][mode];
2932 86751485 : all_this_alternative
2933 86751485 : = (reg_class_subunion
2934 86751485 : [all_this_alternative][this_alternative]);
2935 : }
2936 : }
2937 : else
2938 : {
2939 : /* There are no hard regs holding a value of given
2940 : mode. */
2941 1443 : if (offmemok)
2942 : {
2943 171 : this_alternative = NO_REGS;
2944 171 : if (lra_dump_file != NULL)
2945 0 : fprintf (lra_dump_file,
2946 : " %d Using memory because of"
2947 : " a bad mode: reject+=2\n",
2948 : nop);
2949 171 : reject += 2;
2950 : }
2951 : else
2952 : {
2953 1272 : if (lra_dump_file != NULL)
2954 0 : fprintf (lra_dump_file,
2955 : " Wrong mode -- refuse\n");
2956 1272 : goto fail;
2957 : }
2958 : }
2959 : }
2960 :
2961 : /* If not assigned pseudo has a class which a subset of
2962 : required reg class, it is a less costly alternative
2963 : as the pseudo still can get a hard reg of necessary
2964 : class. */
2965 165571711 : if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2966 21352672 : && (cl = get_reg_class (REGNO (op))) != NO_REGS
2967 177933954 : && ira_class_subset_p[this_alternative][cl])
2968 : {
2969 1065 : if (lra_dump_file != NULL)
2970 0 : fprintf
2971 0 : (lra_dump_file,
2972 : " %d Super set class reg: reject-=3\n", nop);
2973 1065 : reject -= 3;
2974 : }
2975 :
2976 174854078 : this_alternative_offmemok = offmemok;
2977 174854078 : if (this_costly_alternative != NO_REGS)
2978 : {
2979 19006499 : if (lra_dump_file != NULL)
2980 25 : fprintf (lra_dump_file,
2981 : " %d Costly loser: reject++\n", nop);
2982 19006499 : reject++;
2983 : }
2984 : /* If the operand is dying, has a matching constraint,
2985 : and satisfies constraints of the matched operand
2986 : which failed to satisfy the own constraints, most probably
2987 : the reload for this operand will be gone. */
2988 174854078 : if (this_alternative_matches >= 0
2989 41372818 : && !curr_alt_win[this_alternative_matches]
2990 937122 : && REG_P (op)
2991 685698 : && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2992 175555334 : && (hard_regno[nop] >= 0
2993 368680 : ? in_hard_reg_set_p (this_alternative_set,
2994 : mode, hard_regno[nop])
2995 36104 : : in_class_p (op, this_alternative, NULL)))
2996 : {
2997 220295 : if (lra_dump_file != NULL)
2998 1 : fprintf
2999 1 : (lra_dump_file,
3000 : " %d Dying matched operand reload: reject++\n",
3001 : nop);
3002 220295 : reject++;
3003 : }
3004 : else
3005 : {
3006 : /* Strict_low_part requires to reload the register
3007 : not the sub-register. In this case we should
3008 : check that a final reload hard reg can hold the
3009 : value mode. */
3010 174633783 : if (curr_static_id->operand[nop].strict_low
3011 117 : && REG_P (op)
3012 110 : && hard_regno[nop] < 0
3013 84 : && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
3014 84 : && ira_class_hard_regs_num[this_alternative] > 0
3015 174633867 : && (!targetm.hard_regno_mode_ok
3016 84 : (ira_class_hard_regs[this_alternative][0],
3017 84 : GET_MODE (*curr_id->operand_loc[nop]))))
3018 : {
3019 0 : if (lra_dump_file != NULL)
3020 0 : fprintf
3021 0 : (lra_dump_file,
3022 : " Strict low subreg reload -- refuse\n");
3023 0 : goto fail;
3024 : }
3025 174633783 : if (lra_dump_file != NULL)
3026 2177 : fprintf
3027 2177 : (lra_dump_file,
3028 : " %d Operand reload: losers++\n", nop);
3029 174633783 : losers++;
3030 : }
3031 174854078 : if (operand_reg[nop] != NULL_RTX
3032 : /* Output operands and matched input operands are
3033 : not inherited. The following conditions do not
3034 : exactly describe the previous statement but they
3035 : are pretty close. */
3036 62594858 : && curr_static_id->operand[nop].type != OP_OUT
3037 27605624 : && (this_alternative_matches < 0
3038 16006532 : || curr_static_id->operand[nop].type != OP_IN))
3039 : {
3040 11599092 : int last_reload = (lra_reg_info[ORIGINAL_REGNO
3041 11599092 : (operand_reg[nop])]
3042 11599092 : .last_reload);
3043 :
3044 : /* The value of reload_sum has sense only if we
3045 : process insns in their order. It happens only on
3046 : the first constraints sub-pass when we do most of
3047 : reload work. */
3048 11599092 : if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
3049 2522811 : reload_sum += last_reload - bb_reload_num;
3050 : }
3051 : /* If this is a constant that is reloaded into the
3052 : desired class by copying it to memory first, count
3053 : that as another reload. This is consistent with
3054 : other code and is required to avoid choosing another
3055 : alternative when the constant is moved into memory.
3056 : Note that the test here is precisely the same as in
3057 : the code below that calls force_const_mem. */
3058 224806568 : if (CONST_POOL_OK_P (mode, op)
3059 224806633 : && ((targetm.preferred_reload_class
3060 49952555 : (op, this_alternative) == NO_REGS)
3061 48436092 : || no_input_reloads_p))
3062 : {
3063 1516463 : const_to_mem = 1;
3064 1516463 : if (! no_regs_p)
3065 : {
3066 711169 : if (lra_dump_file != NULL)
3067 0 : fprintf
3068 0 : (lra_dump_file,
3069 : " %d Constant reload through memory: "
3070 : "losers++\n", nop);
3071 711169 : losers++;
3072 : }
3073 : }
3074 :
3075 : /* Alternative loses if it requires a type of reload not
3076 : permitted for this insn. We can always reload
3077 : objects with a REG_UNUSED note. */
3078 174854078 : if ((curr_static_id->operand[nop].type != OP_IN
3079 84940232 : && no_output_reloads_p
3080 0 : && ! find_reg_note (curr_insn, REG_UNUSED, op)
3081 0 : && ! scratch_p)
3082 174854078 : || (curr_static_id->operand[nop].type != OP_OUT
3083 89914059 : && no_input_reloads_p && ! const_to_mem)
3084 349708156 : || (this_alternative_matches >= 0
3085 41372818 : && (no_input_reloads_p
3086 41372818 : || (no_output_reloads_p
3087 0 : && (curr_static_id->operand
3088 0 : [this_alternative_matches].type != OP_IN)
3089 0 : && ! find_reg_note (curr_insn, REG_UNUSED,
3090 : no_subreg_reg_operand
3091 0 : [this_alternative_matches])
3092 0 : && ! scratch_p))))
3093 : {
3094 0 : if (lra_dump_file != NULL)
3095 0 : fprintf
3096 0 : (lra_dump_file,
3097 : " No input/output reload -- refuse\n");
3098 0 : goto fail;
3099 : }
3100 :
3101 : /* Alternative loses if it required class pseudo cannot
3102 : hold value of required mode. Such insns can be
3103 : described by insn definitions with mode iterators. */
3104 174854078 : if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
3105 126299016 : && ! hard_reg_set_empty_p (this_alternative_set)
3106 : /* It is common practice for constraints to use a
3107 : class which does not have actually enough regs to
3108 : hold the value (e.g. x86 AREG for mode requiring
3109 : more one general reg). Therefore we have 2
3110 : conditions to check that the reload pseudo cannot
3111 : hold the mode value. */
3112 117722019 : && (!targetm.hard_regno_mode_ok
3113 117722019 : (ira_class_hard_regs[this_alternative][0],
3114 : GET_MODE (*curr_id->operand_loc[nop])))
3115 : /* The above condition is not enough as the first
3116 : reg in ira_class_hard_regs can be not aligned for
3117 : multi-words mode values. */
3118 174854078 : && (prohibited_class_reg_set_mode_p
3119 0 : (this_alternative, this_alternative_set,
3120 0 : GET_MODE (*curr_id->operand_loc[nop]))))
3121 : {
3122 0 : if (lra_dump_file != NULL)
3123 0 : fprintf (lra_dump_file,
3124 : " reload pseudo for op %d "
3125 : "cannot hold the mode value -- refuse\n",
3126 : nop);
3127 0 : goto fail;
3128 : }
3129 :
3130 : /* Check strong discouragement of reload of non-constant
3131 : into class THIS_ALTERNATIVE. */
3132 124901523 : if (! CONSTANT_P (op) && ! no_regs_p
3133 291278528 : && (targetm.preferred_reload_class
3134 116424450 : (op, this_alternative) == NO_REGS
3135 107972058 : || (curr_static_id->operand[nop].type == OP_OUT
3136 74413340 : && (targetm.preferred_output_reload_class
3137 74413340 : (op, this_alternative) == NO_REGS))))
3138 : {
3139 12981068 : if (offmemok && REG_P (op))
3140 : {
3141 792047 : if (lra_dump_file != NULL)
3142 0 : fprintf
3143 0 : (lra_dump_file,
3144 : " %d Spill pseudo into memory: reject+=3\n",
3145 : nop);
3146 792047 : reject += 3;
3147 : }
3148 : else
3149 : {
3150 12189021 : if (lra_dump_file != NULL)
3151 0 : fprintf
3152 0 : (lra_dump_file,
3153 : " %d Non-prefered reload: reject+=%d\n",
3154 : nop, LRA_MAX_REJECT);
3155 12189021 : reject += LRA_MAX_REJECT;
3156 : }
3157 : }
3158 :
3159 174854078 : if (! (MEM_P (op) && offmemok)
3160 174854006 : && ! (const_to_mem && constmemok))
3161 : {
3162 : /* We prefer to reload pseudos over reloading other
3163 : things, since such reloads may be able to be
3164 : eliminated later. So bump REJECT in other cases.
3165 : Don't do this in the case where we are forcing a
3166 : constant into memory and it will then win since
3167 : we don't want to have a different alternative
3168 : match then. */
3169 173932167 : if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
3170 : {
3171 124360685 : if (lra_dump_file != NULL)
3172 1648 : fprintf
3173 1648 : (lra_dump_file,
3174 : " %d Non-pseudo reload: reject+=2\n",
3175 : nop);
3176 124360685 : reject += 2;
3177 : }
3178 :
3179 173932167 : if (! no_regs_p)
3180 165455138 : reload_nregs
3181 165455138 : += ira_reg_class_max_nregs[this_alternative][mode];
3182 :
3183 173932167 : if (SMALL_REGISTER_CLASS_P (this_alternative))
3184 : {
3185 857327 : if (lra_dump_file != NULL)
3186 45 : fprintf
3187 45 : (lra_dump_file,
3188 : " %d Small class reload: reject+=%d\n",
3189 : nop, LRA_LOSER_COST_FACTOR / 2);
3190 857327 : reject += LRA_LOSER_COST_FACTOR / 2;
3191 : }
3192 : }
3193 :
3194 : /* We are trying to spill pseudo into memory. It is
3195 : usually more costly than moving to a hard register
3196 : although it might takes the same number of
3197 : reloads.
3198 :
3199 : Non-pseudo spill may happen also. Suppose a target allows both
3200 : register and memory in the operand constraint alternatives,
3201 : then it's typical that an eliminable register has a substition
3202 : of "base + offset" which can either be reloaded by a simple
3203 : "new_reg <= base + offset" which will match the register
3204 : constraint, or a similar reg addition followed by further spill
3205 : to and reload from memory which will match the memory
3206 : constraint, but this memory spill will be much more costly
3207 : usually.
3208 :
3209 : Code below increases the reject for both pseudo and non-pseudo
3210 : spill. */
3211 174854078 : if (no_regs_p
3212 9282367 : && !(MEM_P (op) && offmemok)
3213 9282323 : && !(REG_P (op) && hard_regno[nop] < 0))
3214 : {
3215 8170557 : if (lra_dump_file != NULL)
3216 13 : fprintf
3217 20 : (lra_dump_file,
3218 : " %d Spill %spseudo into memory: reject+=3\n",
3219 : nop, REG_P (op) ? "" : "Non-");
3220 8170557 : reject += 3;
3221 8170557 : if (VECTOR_MODE_P (mode))
3222 : {
3223 : /* Spilling vectors into memory is usually more
3224 : costly as they contain big values. */
3225 365924 : if (lra_dump_file != NULL)
3226 0 : fprintf
3227 0 : (lra_dump_file,
3228 : " %d Spill vector pseudo: reject+=2\n",
3229 : nop);
3230 365924 : reject += 2;
3231 : }
3232 : }
3233 :
3234 : /* When we use an operand requiring memory in given
3235 : alternative, the insn should write *and* read the
3236 : value to/from memory it is costly in comparison with
3237 : an insn alternative which does not use memory
3238 : (e.g. register or immediate operand). We exclude
3239 : memory operand for such case as we can satisfy the
3240 : memory constraints by reloading address. */
3241 9282367 : if (no_regs_p && offmemok && !MEM_P (op))
3242 : {
3243 9282171 : if (lra_dump_file != NULL)
3244 27 : fprintf
3245 27 : (lra_dump_file,
3246 : " Using memory insn operand %d: reject+=3\n",
3247 : nop);
3248 9282171 : reject += 3;
3249 : }
3250 :
3251 : /* If reload requires moving value through secondary
3252 : memory, it will need one more insn at least. */
3253 174854078 : if (this_alternative != NO_REGS
3254 165571540 : && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
3255 210704584 : && ((curr_static_id->operand[nop].type != OP_OUT
3256 20326382 : && targetm.secondary_memory_needed (mode, cl,
3257 : this_alternative))
3258 32598212 : || (curr_static_id->operand[nop].type != OP_IN
3259 15524241 : && (targetm.secondary_memory_needed
3260 15524241 : (mode, this_alternative, cl)))))
3261 : {
3262 10784436 : if (lra_dump_file != NULL)
3263 16 : fprintf
3264 16 : (lra_dump_file,
3265 : " %d Secondary memory reload needed: "
3266 : "losers++\n", nop);
3267 10784436 : losers++;
3268 : }
3269 :
3270 174854078 : if (MEM_P (op) && offmemok)
3271 72 : addr_losers++;
3272 : else
3273 : {
3274 : /* Input reloads can be inherited more often than
3275 : output reloads can be removed, so penalize output
3276 : reloads. */
3277 174854006 : if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
3278 : {
3279 147248588 : if (lra_dump_file != NULL)
3280 1722 : fprintf
3281 1722 : (lra_dump_file,
3282 : " %d Non input pseudo reload: reject++\n",
3283 : nop);
3284 147248588 : reject++;
3285 : }
3286 :
3287 174854006 : if (curr_static_id->operand[nop].type == OP_INOUT)
3288 : {
3289 213 : if (lra_dump_file != NULL)
3290 0 : fprintf
3291 0 : (lra_dump_file,
3292 : " %d Input/Output reload: reject+=%d\n",
3293 : nop, LRA_LOSER_COST_FACTOR);
3294 213 : reject += LRA_LOSER_COST_FACTOR;
3295 : }
3296 : }
3297 : }
3298 :
3299 467069062 : if (early_clobber_p && ! scratch_p)
3300 : {
3301 164706 : if (lra_dump_file != NULL)
3302 4 : fprintf (lra_dump_file,
3303 : " %d Early clobber: reject++\n", nop);
3304 164706 : reject++;
3305 : }
3306 : /* ??? We check early clobbers after processing all operands
3307 : (see loop below) and there we update the costs more.
3308 : Should we update the cost (may be approximately) here
3309 : because of early clobber register reloads or it is a rare
3310 : or non-important thing to be worth to do it. */
3311 934138124 : overall = (losers * LRA_LOSER_COST_FACTOR + reject
3312 467069062 : - (addr_losers == losers ? static_reject : 0));
3313 467069062 : if ((best_losers == 0 || losers != 0) && best_overall < overall)
3314 : {
3315 74183217 : if (lra_dump_file != NULL)
3316 1036 : fprintf (lra_dump_file,
3317 : " overall=%d,losers=%d -- refuse\n",
3318 : overall, losers);
3319 74183217 : goto fail;
3320 : }
3321 :
3322 392885845 : if (update_and_check_small_class_inputs (nop, nalt,
3323 : this_alternative))
3324 : {
3325 0 : if (lra_dump_file != NULL)
3326 0 : fprintf (lra_dump_file,
3327 : " not enough small class regs -- refuse\n");
3328 0 : goto fail;
3329 : }
3330 392885845 : curr_alt[nop] = this_alternative;
3331 392885845 : curr_alt_set[nop] = this_alternative_set;
3332 392885845 : curr_alt_exclude_start_hard_regs[nop]
3333 392885845 : = this_alternative_exclude_start_hard_regs;
3334 392885845 : curr_alt_win[nop] = this_alternative_win;
3335 392885845 : curr_alt_match_win[nop] = this_alternative_match_win;
3336 392885845 : curr_alt_offmemok[nop] = this_alternative_offmemok;
3337 392885845 : curr_alt_matches[nop] = this_alternative_matches;
3338 :
3339 392885845 : if (this_alternative_matches >= 0
3340 392885845 : && !did_match && !this_alternative_win)
3341 13131536 : curr_alt_win[this_alternative_matches] = false;
3342 :
3343 392885845 : if (early_clobber_p && operand_reg[nop] != NULL_RTX)
3344 169033 : early_clobbered_nops[early_clobbered_regs_num++] = nop;
3345 : }
3346 :
3347 134404526 : if (curr_insn_set != NULL_RTX
3348 : /* Allow just two operands or three operands where the third
3349 : is a clobber. */
3350 130536340 : && (n_operands == 2
3351 28643740 : || (n_operands == 3
3352 26530482 : && GET_CODE (PATTERN (curr_insn)) == PARALLEL
3353 22492695 : && XVECLEN (PATTERN (curr_insn), 0) == 2
3354 22439553 : && GET_CODE (XVECEXP (PATTERN (curr_insn), 0, 1))
3355 : == CLOBBER))
3356 : /* Prevent processing non-move insns. */
3357 124249489 : && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
3358 122482383 : || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
3359 225315932 : && ((! curr_alt_win[0] && ! curr_alt_win[1]
3360 5868243 : && REG_P (no_subreg_reg_operand[0])
3361 2851972 : && REG_P (no_subreg_reg_operand[1])
3362 1213015 : && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3363 1007965 : || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
3364 90387823 : || (! curr_alt_win[0] && curr_alt_win[1]
3365 27105946 : && REG_P (no_subreg_reg_operand[1])
3366 : /* Check that we reload memory not the memory
3367 : address. */
3368 15543880 : && ! (curr_alt_offmemok[0]
3369 386328 : && MEM_P (no_subreg_reg_operand[0]))
3370 15543880 : && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
3371 76234295 : || (curr_alt_win[0] && ! curr_alt_win[1]
3372 9429132 : && REG_P (no_subreg_reg_operand[0])
3373 : /* Check that we reload memory not the memory
3374 : address. */
3375 6952444 : && ! (curr_alt_offmemok[1]
3376 1015567 : && MEM_P (no_subreg_reg_operand[1]))
3377 6952442 : && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3378 6090427 : && (! CONST_POOL_OK_P (curr_operand_mode[1],
3379 : no_subreg_reg_operand[1])
3380 2229255 : || (targetm.preferred_reload_class
3381 2229255 : (no_subreg_reg_operand[1],
3382 : (enum reg_class) curr_alt[1]) != NO_REGS))
3383 : /* If it is a result of recent elimination in move
3384 : insn we can transform it into an add still by
3385 : using this alternative. */
3386 6047588 : && GET_CODE (no_subreg_reg_operand[1]) != PLUS
3387 : /* Likewise if the source has been replaced with an
3388 : equivalent value. This only happens once -- the reload
3389 : will use the equivalent value instead of the register it
3390 : replaces -- so there should be no danger of cycling. */
3391 5571001 : && !equiv_substition_p[1])))
3392 : {
3393 : /* We have a move insn and a new reload insn will be similar
3394 : to the current insn. We should avoid such situation as
3395 : it results in LRA cycling. */
3396 20218644 : if (lra_dump_file != NULL)
3397 239 : fprintf (lra_dump_file,
3398 : " Cycle danger: overall += LRA_MAX_REJECT\n");
3399 20218644 : overall += LRA_MAX_REJECT;
3400 : }
3401 134404526 : if (all_this_alternative != NO_REGS
3402 115070172 : && !SMALL_REGISTER_CLASS_P (all_this_alternative)
3403 114221952 : && all_used_nregs != 0 && all_reload_nregs != 0
3404 134404526 : && (all_used_nregs + all_reload_nregs + 1
3405 3966886 : >= ira_class_hard_regs_num[all_this_alternative]))
3406 : {
3407 366 : if (lra_dump_file != NULL)
3408 0 : fprintf
3409 0 : (lra_dump_file,
3410 : " Register starvation: overall += LRA_MAX_REJECT"
3411 : "(class=%s,avail=%d,used=%d,reload=%d)\n",
3412 : reg_class_names[all_this_alternative],
3413 : ira_class_hard_regs_num[all_this_alternative],
3414 : all_used_nregs, all_reload_nregs);
3415 366 : overall += LRA_MAX_REJECT;
3416 366 : if (!prefer_memory_p && INSN_CODE (curr_insn) < 0)
3417 : {
3418 : /* asm can permit memory and reg and can be not enough regs for
3419 : asm -- try now memory: */
3420 102 : prefer_memory_p = true;
3421 102 : if (lra_dump_file != NULL)
3422 0 : fprintf
3423 0 : (lra_dump_file,
3424 : " Trying now memory for operands\n");
3425 102 : goto repeat;
3426 : }
3427 : }
3428 134569958 : ok_p = true;
3429 : curr_alt_dont_inherit_ops_num = 0;
3430 134569958 : for (nop = 0; nop < early_clobbered_regs_num; nop++)
3431 : {
3432 165535 : int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
3433 165535 : HARD_REG_SET temp_set;
3434 :
3435 165535 : i = early_clobbered_nops[nop];
3436 165535 : if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
3437 124620 : || hard_regno[i] < 0)
3438 164864 : continue;
3439 122785 : lra_assert (operand_reg[i] != NULL_RTX);
3440 : clobbered_hard_regno = hard_regno[i];
3441 122785 : CLEAR_HARD_REG_SET (temp_set);
3442 122785 : add_to_hard_reg_set (&temp_set, GET_MODE (*curr_id->operand_loc[i]),
3443 : clobbered_hard_regno);
3444 122785 : first_conflict_j = last_conflict_j = -1;
3445 620595 : for (j = 0; j < n_operands; j++)
3446 497811 : if (j == i
3447 : /* We don't want process insides of match_operator and
3448 : match_parallel because otherwise we would process
3449 : their operands once again generating a wrong
3450 : code. */
3451 375026 : || curr_static_id->operand[j].is_operator)
3452 124926 : continue;
3453 372885 : else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
3454 355024 : || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
3455 17861 : continue;
3456 : /* If we don't reload j-th operand, check conflicts. */
3457 123814 : else if ((curr_alt_win[j] || curr_alt_match_win[j])
3458 416257 : && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
3459 : {
3460 1145 : if (first_conflict_j < 0)
3461 671 : first_conflict_j = j;
3462 1145 : last_conflict_j = j;
3463 : /* Both the earlyclobber operand and conflicting operand
3464 : cannot both be user defined hard registers for asm.
3465 : Let curr_insn_transform diagnose it. */
3466 1145 : if (HARD_REGISTER_P (operand_reg[i])
3467 1 : && REG_USERVAR_P (operand_reg[i])
3468 1 : && operand_reg[j] != NULL_RTX
3469 1 : && HARD_REGISTER_P (operand_reg[j])
3470 1 : && REG_USERVAR_P (operand_reg[j])
3471 1146 : && INSN_CODE (curr_insn) < 0)
3472 1 : return false;
3473 : }
3474 122784 : if (last_conflict_j < 0)
3475 122114 : continue;
3476 :
3477 : /* If an earlyclobber operand conflicts with another non-matching
3478 : operand (ie, they have been assigned the same hard register),
3479 : then it is better to reload the other operand, as there may
3480 : exist yet another operand with a matching constraint associated
3481 : with the earlyclobber operand. However, if one of the operands
3482 : is an explicit use of a hard register, then we must reload the
3483 : other non-hard register operand. */
3484 670 : if (HARD_REGISTER_P (operand_reg[i])
3485 670 : || (first_conflict_j == last_conflict_j
3486 196 : && operand_reg[last_conflict_j] != NULL_RTX
3487 60 : && !curr_alt_match_win[last_conflict_j]
3488 60 : && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
3489 : {
3490 60 : curr_alt_win[last_conflict_j] = false;
3491 60 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
3492 60 : = last_conflict_j;
3493 60 : losers++;
3494 60 : if (lra_dump_file != NULL)
3495 0 : fprintf
3496 0 : (lra_dump_file,
3497 : " %d Conflict early clobber reload: losers++\n",
3498 : i);
3499 : }
3500 : else
3501 : {
3502 : /* We need to reload early clobbered register and the
3503 : matched registers. */
3504 3044 : for (j = 0; j < n_operands; j++)
3505 2434 : if (curr_alt_matches[j] == i)
3506 : {
3507 2 : curr_alt_match_win[j] = false;
3508 2 : losers++;
3509 2 : if (lra_dump_file != NULL)
3510 0 : fprintf
3511 0 : (lra_dump_file,
3512 : " %d Matching conflict early clobber "
3513 : "reloads: losers++\n",
3514 : j);
3515 2 : overall += LRA_LOSER_COST_FACTOR;
3516 : }
3517 610 : if (! curr_alt_match_win[i])
3518 610 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3519 : else
3520 : {
3521 : /* Remember pseudos used for match reloads are never
3522 : inherited. */
3523 0 : lra_assert (curr_alt_matches[i] >= 0);
3524 0 : curr_alt_win[curr_alt_matches[i]] = false;
3525 : }
3526 610 : curr_alt_win[i] = curr_alt_match_win[i] = false;
3527 610 : losers++;
3528 610 : if (lra_dump_file != NULL)
3529 0 : fprintf
3530 0 : (lra_dump_file,
3531 : " %d Matched conflict early clobber reloads: "
3532 : "losers++\n",
3533 : i);
3534 : }
3535 : /* Early clobber was already reflected in REJECT. */
3536 670 : if (!matching_early_clobber[i])
3537 : {
3538 670 : lra_assert (reject > 0);
3539 670 : reject--;
3540 670 : matching_early_clobber[i] = 1;
3541 : }
3542 670 : overall += LRA_LOSER_COST_FACTOR - 1;
3543 : }
3544 134404423 : if (lra_dump_file != NULL)
3545 1761 : fprintf (lra_dump_file, " overall=%d,losers=%d,rld_nregs=%d\n",
3546 : overall, losers, reload_nregs);
3547 :
3548 : /* If this alternative can be made to work by reloading, and it
3549 : needs less reloading than the others checked so far, record
3550 : it as the chosen goal for reloading. */
3551 134404423 : if ((best_losers != 0 && losers == 0)
3552 59460697 : || (((best_losers == 0 && losers == 0)
3553 58469328 : || (best_losers != 0 && losers != 0))
3554 59460697 : && (best_overall > overall
3555 15309689 : || (best_overall == overall
3556 : /* If the cost of the reloads is the same,
3557 : prefer alternative which requires minimal
3558 : number of reload regs. */
3559 11416163 : && (reload_nregs < best_reload_nregs
3560 11313196 : || (reload_nregs == best_reload_nregs
3561 11271033 : && (best_reload_sum < reload_sum
3562 11249964 : || (best_reload_sum == reload_sum
3563 11226193 : && nalt < goal_alt_number))))))))
3564 : {
3565 387880510 : for (nop = 0; nop < n_operands; nop++)
3566 : {
3567 268417869 : goal_alt_win[nop] = curr_alt_win[nop];
3568 268417869 : goal_alt_match_win[nop] = curr_alt_match_win[nop];
3569 268417869 : goal_alt_matches[nop] = curr_alt_matches[nop];
3570 268417869 : goal_alt[nop] = curr_alt[nop];
3571 268417869 : goal_alt_exclude_start_hard_regs[nop]
3572 268417869 : = curr_alt_exclude_start_hard_regs[nop];
3573 268417869 : goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3574 : }
3575 119462641 : goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3576 119462641 : goal_reuse_alt_p = curr_reuse_alt_p;
3577 119463298 : for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3578 657 : goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3579 119462641 : goal_alt_swapped = curr_swapped;
3580 119462641 : goal_alt_out_sp_reload_p = curr_alt_out_sp_reload_p;
3581 119462641 : best_overall = overall;
3582 119462641 : best_losers = losers;
3583 119462641 : best_reload_nregs = reload_nregs;
3584 119462641 : best_reload_sum = reload_sum;
3585 119462641 : goal_alt_number = nalt;
3586 : }
3587 134404423 : if (losers == 0 && !curr_alt_class_change_p)
3588 : /* Everything is satisfied. Do not process alternatives
3589 : anymore. */
3590 : break;
3591 58481401 : fail:
3592 180605422 : ;
3593 : }
3594 : return ok_p;
3595 : }
3596 :
3597 : /* Make reload base reg from address AD. */
3598 : static rtx
3599 0 : base_to_reg (struct address_info *ad)
3600 : {
3601 0 : enum reg_class cl;
3602 0 : int code = -1;
3603 0 : rtx new_inner = NULL_RTX;
3604 0 : rtx new_reg = NULL_RTX;
3605 0 : rtx_insn *insn;
3606 0 : rtx_insn *last_insn = get_last_insn();
3607 :
3608 0 : lra_assert (ad->disp == ad->disp_term);
3609 0 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3610 : get_index_code (ad));
3611 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX, cl, NULL,
3612 : "base");
3613 0 : new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3614 0 : ad->disp_term == NULL
3615 : ? const0_rtx
3616 : : *ad->disp_term);
3617 0 : if (!valid_address_p (ad->mode, new_inner, ad->as))
3618 : return NULL_RTX;
3619 0 : insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
3620 0 : code = recog_memoized (insn);
3621 0 : if (code < 0)
3622 : {
3623 0 : delete_insns_since (last_insn);
3624 0 : return NULL_RTX;
3625 : }
3626 :
3627 : return new_inner;
3628 : }
3629 :
3630 : /* Make reload base reg + DISP from address AD. Return the new pseudo. */
3631 : static rtx
3632 39 : base_plus_disp_to_reg (struct address_info *ad, rtx disp)
3633 : {
3634 39 : enum reg_class cl;
3635 39 : rtx new_reg;
3636 :
3637 39 : lra_assert (ad->base == ad->base_term);
3638 39 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3639 : get_index_code (ad));
3640 39 : new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX, cl, NULL,
3641 : "base + disp");
3642 39 : lra_emit_add (new_reg, *ad->base_term, disp);
3643 39 : return new_reg;
3644 : }
3645 :
3646 : /* Make reload of index part of address AD. Return the new
3647 : pseudo. */
3648 : static rtx
3649 0 : index_part_to_reg (struct address_info *ad, enum reg_class index_class)
3650 : {
3651 0 : rtx new_reg;
3652 :
3653 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3654 : index_class, NULL, "index term");
3655 0 : expand_mult (GET_MODE (*ad->index), *ad->index_term,
3656 : GEN_INT (get_index_scale (ad)), new_reg, 1);
3657 0 : return new_reg;
3658 : }
3659 :
3660 : /* Return true if we can add a displacement to address AD, even if that
3661 : makes the address invalid. The fix-up code requires any new address
3662 : to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
3663 : static bool
3664 20013 : can_add_disp_p (struct address_info *ad)
3665 : {
3666 20013 : return (!ad->autoinc_p
3667 20013 : && ad->segment == NULL
3668 20013 : && ad->base == ad->base_term
3669 40026 : && ad->disp == ad->disp_term);
3670 : }
3671 :
3672 : /* Make equiv substitution in address AD. Return true if a substitution
3673 : was made. */
3674 : static bool
3675 39445593 : equiv_address_substitution (struct address_info *ad)
3676 : {
3677 39445593 : rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
3678 39445593 : poly_int64 disp;
3679 39445593 : HOST_WIDE_INT scale;
3680 39445593 : bool change_p;
3681 :
3682 39445593 : base_term = strip_subreg (ad->base_term);
3683 8948 : if (base_term == NULL)
3684 : base_reg = new_base_reg = NULL_RTX;
3685 : else
3686 : {
3687 33234610 : base_reg = *base_term;
3688 33234610 : new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
3689 : }
3690 39445593 : index_term = strip_subreg (ad->index_term);
3691 4819 : if (index_term == NULL)
3692 : index_reg = new_index_reg = NULL_RTX;
3693 : else
3694 : {
3695 1898389 : index_reg = *index_term;
3696 1898389 : new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
3697 : }
3698 39445593 : if (base_reg == new_base_reg && index_reg == new_index_reg)
3699 : return false;
3700 143147 : disp = 0;
3701 143147 : change_p = false;
3702 143147 : if (lra_dump_file != NULL)
3703 : {
3704 0 : fprintf (lra_dump_file, "Changing address in insn %d ",
3705 0 : INSN_UID (curr_insn));
3706 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3707 : }
3708 143147 : if (base_reg != new_base_reg)
3709 : {
3710 142643 : poly_int64 offset;
3711 142643 : if (REG_P (new_base_reg))
3712 : {
3713 11545 : *base_term = new_base_reg;
3714 11545 : change_p = true;
3715 : }
3716 131098 : else if (GET_CODE (new_base_reg) == PLUS
3717 20013 : && REG_P (XEXP (new_base_reg, 0))
3718 20013 : && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
3719 151111 : && can_add_disp_p (ad))
3720 : {
3721 : disp += offset;
3722 20013 : *base_term = XEXP (new_base_reg, 0);
3723 20013 : change_p = true;
3724 : }
3725 142643 : if (ad->base_term2 != NULL)
3726 0 : *ad->base_term2 = *ad->base_term;
3727 : }
3728 143147 : if (index_reg != new_index_reg)
3729 : {
3730 710 : poly_int64 offset;
3731 710 : if (REG_P (new_index_reg))
3732 : {
3733 0 : *index_term = new_index_reg;
3734 0 : change_p = true;
3735 : }
3736 710 : else if (GET_CODE (new_index_reg) == PLUS
3737 0 : && REG_P (XEXP (new_index_reg, 0))
3738 0 : && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
3739 0 : && can_add_disp_p (ad)
3740 710 : && (scale = get_index_scale (ad)))
3741 : {
3742 0 : disp += offset * scale;
3743 0 : *index_term = XEXP (new_index_reg, 0);
3744 0 : change_p = true;
3745 : }
3746 : }
3747 143147 : if (maybe_ne (disp, 0))
3748 : {
3749 20013 : if (ad->disp != NULL)
3750 6800 : *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
3751 : else
3752 : {
3753 13213 : *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3754 13213 : update_address (ad);
3755 : }
3756 : change_p = true;
3757 : }
3758 143147 : if (lra_dump_file != NULL)
3759 : {
3760 0 : if (! change_p)
3761 0 : fprintf (lra_dump_file, " -- no change\n");
3762 : else
3763 : {
3764 0 : fprintf (lra_dump_file, " on equiv ");
3765 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3766 0 : fprintf (lra_dump_file, "\n");
3767 : }
3768 : }
3769 : return change_p;
3770 : }
3771 :
3772 : /* Skip all modifiers and whitespaces in constraint STR and return the
3773 : result. */
3774 : static const char *
3775 510292404 : skip_constraint_modifiers (const char *str)
3776 : {
3777 719042846 : for (;;str++)
3778 614667625 : switch (*str)
3779 : {
3780 104375221 : case '+': case '&' : case '=': case '*': case ' ': case '\t':
3781 104375221 : case '$': case '^' : case '%': case '?': case '!':
3782 104375221 : break;
3783 510292404 : default: return str;
3784 : }
3785 : }
3786 :
3787 : /* Takes a string of 0 or more comma-separated constraints. When more
3788 : than one constraint is present, evaluate whether they all correspond
3789 : to a single, repeated constraint (e.g. "r,r") or whether we have
3790 : more than one distinct constraints (e.g. "r,m"). */
3791 : static bool
3792 162201207 : constraint_unique (const char *cstr)
3793 : {
3794 162201207 : enum constraint_num ca, cb;
3795 162201207 : ca = CONSTRAINT__UNKNOWN;
3796 320851535 : for (;;)
3797 : {
3798 320851535 : cstr = skip_constraint_modifiers (cstr);
3799 320851535 : if (*cstr == '\0' || *cstr == ',')
3800 : cb = CONSTRAINT_X;
3801 : else
3802 : {
3803 320851535 : cb = lookup_constraint (cstr);
3804 320851535 : if (cb == CONSTRAINT__UNKNOWN)
3805 : return false;
3806 308790588 : cstr += CONSTRAINT_LEN (cstr[0], cstr);
3807 : }
3808 : /* Handle the first iteration of the loop. */
3809 308790588 : if (ca == CONSTRAINT__UNKNOWN)
3810 : ca = cb;
3811 : /* Handle the general case of comparing ca with subsequent
3812 : constraints. */
3813 158519521 : else if (ca != cb)
3814 : return false;
3815 166041114 : if (*cstr == '\0')
3816 : return true;
3817 158650328 : if (*cstr == ',')
3818 87503933 : cstr += 1;
3819 : }
3820 : }
3821 :
3822 : /* Major function to make reloads for an address in operand NOP or
3823 : check its correctness (If CHECK_ONLY_P is true). The supported
3824 : cases are:
3825 :
3826 : 1) an address that existed before LRA started, at which point it
3827 : must have been valid. These addresses are subject to elimination
3828 : and may have become invalid due to the elimination offset being out
3829 : of range.
3830 :
3831 : 2) an address created by forcing a constant to memory
3832 : (force_const_to_mem). The initial form of these addresses might
3833 : not be valid, and it is this function's job to make them valid.
3834 :
3835 : 3) a frame address formed from a register and a (possibly zero)
3836 : constant offset. As above, these addresses might not be valid and
3837 : this function must make them so.
3838 :
3839 : Add reloads to the lists *BEFORE and *AFTER. We might need to add
3840 : reloads to *AFTER because of inc/dec, {pre, post} modify in the
3841 : address. Return true for any RTL change.
3842 :
3843 : The function is a helper function which does not produce all
3844 : transformations (when CHECK_ONLY_P is false) which can be
3845 : necessary. It does just basic steps. To do all necessary
3846 : transformations use function process_address. */
3847 : static bool
3848 175794774 : process_address_1 (int nop, bool check_only_p,
3849 : rtx_insn **before, rtx_insn **after)
3850 : {
3851 175794774 : struct address_info ad;
3852 175794774 : rtx new_reg;
3853 175794774 : HOST_WIDE_INT scale;
3854 175794774 : rtx op = *curr_id->operand_loc[nop];
3855 175794774 : rtx mem = extract_mem_from_operand (op);
3856 175794774 : const char *constraint;
3857 175794774 : enum constraint_num cn;
3858 175794774 : bool change_p = false;
3859 :
3860 175794774 : if (MEM_P (mem)
3861 37714053 : && GET_MODE (mem) == BLKmode
3862 25515 : && GET_CODE (XEXP (mem, 0)) == SCRATCH)
3863 : return false;
3864 :
3865 175794774 : constraint
3866 175794774 : = skip_constraint_modifiers (curr_static_id->operand[nop].constraint);
3867 175794774 : if (IN_RANGE (constraint[0], '0', '9'))
3868 : {
3869 13646095 : char *end;
3870 13646095 : unsigned long dup = strtoul (constraint, &end, 10);
3871 13646095 : constraint
3872 13646095 : = skip_constraint_modifiers (curr_static_id->operand[dup].constraint);
3873 : }
3874 187649669 : cn = lookup_constraint (*constraint == '\0' ? "X" : constraint);
3875 : /* If we have several alternatives or/and several constraints in an
3876 : alternative and we can not say at this stage what constraint will be used,
3877 : use unknown constraint. The exception is an address constraint. If
3878 : operand has one address constraint, probably all others constraints are
3879 : address ones. */
3880 163939879 : if (constraint[0] != '\0' && get_constraint_type (cn) != CT_ADDRESS
3881 337995981 : && !constraint_unique (constraint))
3882 : cn = CONSTRAINT__UNKNOWN;
3883 20984353 : if (insn_extra_address_constraint (cn)
3884 : /* When we find an asm operand with an address constraint that
3885 : doesn't satisfy address_operand to begin with, we clear
3886 : is_address, so that we don't try to make a non-address fit.
3887 : If the asm statement got this far, it's because other
3888 : constraints are available, and we'll use them, disregarding
3889 : the unsatisfiable address ones. */
3890 20984353 : && curr_static_id->operand[nop].is_address)
3891 1738653 : decompose_lea_address (&ad, curr_id->operand_loc[nop]);
3892 : /* Do not attempt to decompose arbitrary addresses generated by combine
3893 : for asm operands with loose constraints, e.g 'X'.
3894 : Need to extract memory from op for special memory constraint,
3895 : i.e. bcst_mem_operand in i386 backend. */
3896 174056121 : else if (MEM_P (mem)
3897 174056301 : && !(INSN_CODE (curr_insn) < 0
3898 19351 : && get_constraint_type (cn) == CT_FIXED_FORM
3899 180 : && constraint_satisfied_p (op, cn)))
3900 37713873 : decompose_mem_address (&ad, mem);
3901 136342248 : else if (GET_CODE (op) == SUBREG
3902 3555965 : && MEM_P (SUBREG_REG (op)))
3903 0 : decompose_mem_address (&ad, SUBREG_REG (op));
3904 : else
3905 : return false;
3906 : /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3907 : index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3908 : when INDEX_REG_CLASS is a single register class. */
3909 39452526 : enum reg_class index_cl = index_reg_class (curr_insn);
3910 39452526 : if (ad.base_term != NULL
3911 33241475 : && ad.index_term != NULL
3912 1529989 : && ira_class_hard_regs_num[index_cl] == 1
3913 0 : && REG_P (*ad.base_term)
3914 0 : && REG_P (*ad.index_term)
3915 0 : && in_class_p (*ad.base_term, index_cl, NULL)
3916 39452526 : && ! in_class_p (*ad.index_term, index_cl, NULL))
3917 : {
3918 0 : std::swap (ad.base, ad.index);
3919 0 : std::swap (ad.base_term, ad.index_term);
3920 : }
3921 39452526 : if (! check_only_p)
3922 39445593 : change_p = equiv_address_substitution (&ad);
3923 39452526 : if (ad.base_term != NULL
3924 72694001 : && (process_addr_reg
3925 66482950 : (ad.base_term, check_only_p, before,
3926 33241475 : (ad.autoinc_p
3927 4173988 : && !(REG_P (*ad.base_term)
3928 2086994 : && find_regno_note (curr_insn, REG_DEAD,
3929 : REGNO (*ad.base_term)) != NULL_RTX)
3930 : ? after : NULL),
3931 33241475 : base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3932 : get_index_code (&ad), curr_insn))))
3933 : {
3934 434667 : change_p = true;
3935 434667 : if (ad.base_term2 != NULL)
3936 0 : *ad.base_term2 = *ad.base_term;
3937 : }
3938 39452526 : if (ad.index_term != NULL
3939 39452526 : && process_addr_reg (ad.index_term, check_only_p,
3940 : before, NULL, index_cl))
3941 : change_p = true;
3942 :
3943 : /* Target hooks sometimes don't treat extra-constraint addresses as
3944 : legitimate address_operands, so handle them specially. */
3945 39452526 : if (insn_extra_address_constraint (cn)
3946 39452526 : && satisfies_address_constraint_p (&ad, cn))
3947 : return change_p;
3948 :
3949 37713880 : if (check_only_p)
3950 : return change_p;
3951 :
3952 : /* There are three cases where the shape of *AD.INNER may now be invalid:
3953 :
3954 : 1) the original address was valid, but either elimination or
3955 : equiv_address_substitution was applied and that made
3956 : the address invalid.
3957 :
3958 : 2) the address is an invalid symbolic address created by
3959 : force_const_to_mem.
3960 :
3961 : 3) the address is a frame address with an invalid offset.
3962 :
3963 : 4) the address is a frame address with an invalid base.
3964 :
3965 : All these cases involve a non-autoinc address, so there is no
3966 : point revalidating other types. */
3967 37707558 : if (ad.autoinc_p || valid_address_p (op, &ad, cn))
3968 37707114 : return change_p;
3969 :
3970 : /* Any index existed before LRA started, so we can assume that the
3971 : presence and shape of the index is valid. */
3972 444 : push_to_sequence (*before);
3973 444 : lra_assert (ad.disp == ad.disp_term);
3974 444 : if (ad.base == NULL)
3975 : {
3976 352 : if (ad.index == NULL)
3977 : {
3978 352 : rtx_insn *insn;
3979 352 : rtx_insn *last = get_last_insn ();
3980 352 : int code = -1;
3981 352 : enum reg_class cl = base_reg_class (ad.mode, ad.as,
3982 : SCRATCH, SCRATCH,
3983 : curr_insn);
3984 352 : rtx addr = *ad.inner;
3985 :
3986 695 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
3987 352 : if (HAVE_lo_sum)
3988 : {
3989 : /* addr => lo_sum (new_base, addr), case (2) above. */
3990 : insn = emit_insn (gen_rtx_SET
3991 : (new_reg,
3992 : gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3993 : code = recog_memoized (insn);
3994 : if (code >= 0)
3995 : {
3996 : *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3997 : if (!valid_address_p (op, &ad, cn))
3998 : {
3999 : /* Try to put lo_sum into register. */
4000 : insn = emit_insn (gen_rtx_SET
4001 : (new_reg,
4002 : gen_rtx_LO_SUM (Pmode, new_reg, addr)));
4003 : code = recog_memoized (insn);
4004 : if (code >= 0)
4005 : {
4006 : *ad.inner = new_reg;
4007 : if (!valid_address_p (op, &ad, cn))
4008 : {
4009 : *ad.inner = addr;
4010 : code = -1;
4011 : }
4012 : }
4013 :
4014 : }
4015 : }
4016 : if (code < 0)
4017 : delete_insns_since (last);
4018 : }
4019 :
4020 352 : if (code < 0)
4021 : {
4022 : /* addr => new_base, case (2) above. */
4023 352 : lra_emit_move (new_reg, addr);
4024 :
4025 704 : for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
4026 704 : insn != NULL_RTX;
4027 352 : insn = NEXT_INSN (insn))
4028 352 : if (recog_memoized (insn) < 0)
4029 : break;
4030 352 : if (insn != NULL_RTX)
4031 : {
4032 : /* Do nothing if we cannot generate right insns.
4033 : This is analogous to reload pass behavior. */
4034 0 : delete_insns_since (last);
4035 0 : end_sequence ();
4036 0 : return false;
4037 : }
4038 352 : *ad.inner = new_reg;
4039 : }
4040 : }
4041 : else
4042 : {
4043 : /* index * scale + disp => new base + index * scale,
4044 : case (1) above. */
4045 0 : enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
4046 0 : GET_CODE (*ad.index),
4047 : curr_insn);
4048 :
4049 0 : lra_assert (index_cl != NO_REGS);
4050 0 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "disp");
4051 0 : lra_emit_move (new_reg, *ad.disp);
4052 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4053 0 : new_reg, *ad.index);
4054 : }
4055 : }
4056 92 : else if (ad.index == NULL)
4057 : {
4058 53 : int regno;
4059 53 : enum reg_class cl;
4060 53 : rtx set;
4061 53 : rtx_insn *insns, *last_insn;
4062 :
4063 53 : cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
4064 : get_index_code (&ad), curr_insn);
4065 :
4066 53 : if (REG_P (*ad.base_term)
4067 53 : && ira_class_subset_p[get_reg_class (REGNO (*ad.base_term))][cl])
4068 : /* It seems base reg is already in the base reg class and changing it
4069 : does not make a progress. So reload the whole inner address. */
4070 53 : goto reload_inner_addr;
4071 :
4072 : /* Try to reload base into register only if the base is invalid
4073 : for the address but with valid offset, case (4) above. */
4074 0 : start_sequence ();
4075 0 : new_reg = base_to_reg (&ad);
4076 :
4077 : /* base + disp => new base, cases (1) and (3) above. */
4078 : /* Another option would be to reload the displacement into an
4079 : index register. However, postreload has code to optimize
4080 : address reloads that have the same base and different
4081 : displacements, so reloading into an index register would
4082 : not necessarily be a win. */
4083 0 : if (new_reg == NULL_RTX)
4084 : {
4085 : /* See if the target can split the displacement into a
4086 : legitimate new displacement from a local anchor. */
4087 0 : gcc_assert (ad.disp == ad.disp_term);
4088 0 : poly_int64 orig_offset;
4089 0 : rtx offset1, offset2;
4090 0 : if (poly_int_rtx_p (*ad.disp, &orig_offset)
4091 0 : && targetm.legitimize_address_displacement (&offset1, &offset2,
4092 : orig_offset,
4093 : ad.mode))
4094 : {
4095 0 : new_reg = base_plus_disp_to_reg (&ad, offset1);
4096 0 : new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
4097 : }
4098 : else
4099 0 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4100 : }
4101 0 : insns = get_insns ();
4102 0 : last_insn = get_last_insn ();
4103 : /* If we generated at least two insns, try last insn source as
4104 : an address. If we succeed, we generate one less insn. */
4105 0 : if (REG_P (new_reg)
4106 0 : && last_insn != insns
4107 0 : && (set = single_set (last_insn)) != NULL_RTX
4108 0 : && GET_CODE (SET_SRC (set)) == PLUS
4109 0 : && REG_P (XEXP (SET_SRC (set), 0))
4110 0 : && CONSTANT_P (XEXP (SET_SRC (set), 1)))
4111 : {
4112 0 : *ad.inner = SET_SRC (set);
4113 0 : if (valid_address_p (op, &ad, cn))
4114 : {
4115 0 : *ad.base_term = XEXP (SET_SRC (set), 0);
4116 0 : *ad.disp_term = XEXP (SET_SRC (set), 1);
4117 0 : regno = REGNO (*ad.base_term);
4118 0 : if (regno >= FIRST_PSEUDO_REGISTER
4119 0 : && cl != lra_get_allocno_class (regno))
4120 0 : lra_change_class (regno, cl, " Change to", true);
4121 0 : new_reg = SET_SRC (set);
4122 0 : delete_insns_since (PREV_INSN (last_insn));
4123 : }
4124 : }
4125 0 : end_sequence ();
4126 0 : emit_insn (insns);
4127 0 : *ad.inner = new_reg;
4128 : }
4129 39 : else if (ad.disp_term != NULL)
4130 : {
4131 : /* base + scale * index + disp => new base + scale * index,
4132 : case (1) above. */
4133 39 : gcc_assert (ad.disp == ad.disp_term);
4134 39 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4135 39 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4136 39 : new_reg, *ad.index);
4137 : }
4138 0 : else if ((scale = get_index_scale (&ad)) == 1)
4139 : {
4140 : /* The last transformation to one reg will be made in
4141 : curr_insn_transform function. */
4142 0 : end_sequence ();
4143 0 : return false;
4144 : }
4145 0 : else if (scale != 0)
4146 : {
4147 : /* base + scale * index => base + new_reg,
4148 : case (1) above.
4149 : Index part of address may become invalid. For example, we
4150 : changed pseudo on the equivalent memory and a subreg of the
4151 : pseudo onto the memory of different mode for which the scale is
4152 : prohibitted. */
4153 0 : new_reg = index_part_to_reg (&ad, index_cl);
4154 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4155 0 : *ad.base_term, new_reg);
4156 : }
4157 : else
4158 : {
4159 53 : enum reg_class cl;
4160 53 : rtx addr;
4161 0 : reload_inner_addr:
4162 53 : cl = base_reg_class (ad.mode, ad.as, SCRATCH, SCRATCH, curr_insn);
4163 53 : addr = *ad.inner;
4164 53 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
4165 : /* addr => new_base. */
4166 53 : lra_emit_move (new_reg, addr);
4167 53 : *ad.inner = new_reg;
4168 : }
4169 444 : *before = end_sequence ();
4170 444 : return true;
4171 : }
4172 :
4173 : /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
4174 : Use process_address_1 as a helper function. Return true for any
4175 : RTL changes.
4176 :
4177 : If CHECK_ONLY_P is true, just check address correctness. Return
4178 : false if the address correct. */
4179 : static bool
4180 175303648 : process_address (int nop, bool check_only_p,
4181 : rtx_insn **before, rtx_insn **after)
4182 : {
4183 175303648 : bool res = false;
4184 : /* Use enough iterations to process all address parts: */
4185 175794774 : for (int i = 0; i < 10; i++)
4186 : {
4187 175794774 : if (!process_address_1 (nop, check_only_p, before, after))
4188 : {
4189 : return res;
4190 : }
4191 : else
4192 : {
4193 491126 : if (check_only_p)
4194 : return true;
4195 491126 : res = true;
4196 : }
4197 : }
4198 0 : fatal_insn ("unable to reload address in ", curr_insn);
4199 : }
4200 :
4201 : /* Override the generic address_reload_context in order to
4202 : control the creation of reload pseudos. */
4203 : class lra_autoinc_reload_context : public address_reload_context
4204 : {
4205 : machine_mode mode;
4206 : enum reg_class rclass;
4207 :
4208 : public:
4209 0 : lra_autoinc_reload_context (machine_mode mode, enum reg_class new_rclass)
4210 0 : : mode (mode), rclass (new_rclass) {}
4211 :
4212 0 : rtx get_reload_reg () const override final
4213 : {
4214 0 : return lra_create_new_reg (mode, NULL_RTX, rclass, NULL, "INC/DEC result");
4215 : }
4216 : };
4217 :
4218 : /* Emit insns to reload VALUE into a new register. VALUE is an
4219 : auto-increment or auto-decrement RTX whose operand is a register or
4220 : memory location; so reloading involves incrementing that location.
4221 :
4222 : INC_AMOUNT is the number to increment or decrement by (always
4223 : positive and ignored for POST_MODIFY/PRE_MODIFY).
4224 :
4225 : Return a pseudo containing the result. */
4226 : static rtx
4227 0 : emit_inc (enum reg_class new_rclass, rtx value, poly_int64 inc_amount)
4228 : {
4229 0 : lra_autoinc_reload_context context (GET_MODE (value), new_rclass);
4230 0 : return context.emit_autoinc (value, inc_amount);
4231 : }
4232 :
4233 : /* Return true if the current move insn does not need processing as we
4234 : already know that it satisfies its constraints. */
4235 : static bool
4236 100507428 : simple_move_p (void)
4237 : {
4238 100507428 : rtx dest, src;
4239 100507428 : enum reg_class dclass, sclass;
4240 :
4241 100507428 : lra_assert (curr_insn_set != NULL_RTX);
4242 100507428 : dest = SET_DEST (curr_insn_set);
4243 100507428 : src = SET_SRC (curr_insn_set);
4244 :
4245 : /* If the instruction has multiple sets we need to process it even if it
4246 : is single_set. This can happen if one or more of the SETs are dead.
4247 : See PR73650. */
4248 100507428 : if (multiple_sets (curr_insn))
4249 : return false;
4250 :
4251 100319776 : return ((dclass = get_op_class (dest)) != NO_REGS
4252 21078809 : && (sclass = get_op_class (src)) != NO_REGS
4253 : /* The backend guarantees that register moves of cost 2
4254 : never need reloads. */
4255 89403696 : && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
4256 : }
4257 :
4258 : /* Swap operands NOP and NOP + 1. */
4259 : static inline void
4260 21428269 : swap_operands (int nop)
4261 : {
4262 21428269 : std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
4263 21428269 : std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
4264 21428269 : std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
4265 21428269 : std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
4266 : /* Swap the duplicates too. */
4267 21428269 : lra_update_dup (curr_id, nop);
4268 21428269 : lra_update_dup (curr_id, nop + 1);
4269 21428269 : }
4270 :
4271 : /* Return TRUE if X is a (subreg of) reg and there are no hard regs of X class
4272 : which can contain value of MODE. */
4273 38 : static bool invalid_mode_reg_p (enum machine_mode mode, rtx x)
4274 : {
4275 38 : if (SUBREG_P (x))
4276 3 : x = SUBREG_REG (x);
4277 38 : if (! REG_P (x))
4278 : return false;
4279 38 : enum reg_class rclass = get_reg_class (REGNO (x));
4280 38 : return (!hard_reg_set_empty_p (reg_class_contents[rclass])
4281 38 : && hard_reg_set_subset_p
4282 38 : (reg_class_contents[rclass],
4283 38 : ira_prohibited_class_mode_regs[rclass][mode]));
4284 : }
4285 :
4286 : /* Return TRUE if regno is referenced in more than one non-debug insn. */
4287 : static bool
4288 2871314 : multiple_insn_refs_p (int regno)
4289 : {
4290 2871314 : unsigned int uid;
4291 2871314 : bitmap_iterator bi;
4292 2871314 : int nrefs = 0;
4293 6925714 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4294 : {
4295 6920133 : if (!NONDEBUG_INSN_P (lra_insn_recog_data[uid]->insn))
4296 1183086 : continue;
4297 5737047 : if (nrefs == 1)
4298 : return true;
4299 2871314 : nrefs++;
4300 : }
4301 : return false;
4302 : }
4303 :
4304 : /* Mark insns starting with FIRST as postponed for processing their
4305 : constraints. See comments for lra_postponed_insns. */
4306 : static void
4307 92450 : postpone_insns (rtx_insn *first)
4308 : {
4309 105131 : for (auto insn = first; insn != NULL_RTX; insn = NEXT_INSN (insn))
4310 : {
4311 12681 : bitmap_set_bit (&lra_postponed_insns, INSN_UID (insn));
4312 12681 : if (lra_dump_file != NULL)
4313 : {
4314 7 : fprintf (lra_dump_file, " Postponing constraint processing: ");
4315 7 : dump_insn_slim (lra_dump_file, insn);
4316 : }
4317 : }
4318 92450 : }
4319 :
4320 : /* Main entry point of the constraint code: search the body of the
4321 : current insn to choose the best alternative. It is mimicking insn
4322 : alternative cost calculation model of former reload pass. That is
4323 : because machine descriptions were written to use this model. This
4324 : model can be changed in future. Make commutative operand exchange
4325 : if it is chosen.
4326 :
4327 : if CHECK_ONLY_P is false, do RTL changes to satisfy the
4328 : constraints. Return true if any change happened during function
4329 : call.
4330 :
4331 : If CHECK_ONLY_P is true then don't do any transformation. Just
4332 : check that the insn satisfies all constraints. If the insn does
4333 : not satisfy any constraint, return true. */
4334 : static bool
4335 105868632 : curr_insn_transform (bool check_only_p)
4336 : {
4337 105868632 : int i, j, k;
4338 105868632 : int n_operands;
4339 105868632 : int n_alternatives;
4340 105868632 : int n_outputs;
4341 105868632 : int commutative;
4342 105868632 : signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
4343 105868632 : signed char match_inputs[MAX_RECOG_OPERANDS + 1];
4344 105868632 : signed char outputs[MAX_RECOG_OPERANDS + 1];
4345 105868632 : rtx_insn *before, *after;
4346 105868632 : bool alt_p = false;
4347 : /* Flag that the insn has been changed through a transformation. */
4348 105868632 : bool change_p;
4349 105868632 : bool sec_mem_p;
4350 105868632 : bool use_sec_mem_p;
4351 105868632 : int max_regno_before;
4352 105868632 : int reused_alternative_num;
4353 :
4354 105868632 : curr_insn_set = single_set (curr_insn);
4355 105868632 : if (curr_insn_set != NULL_RTX && simple_move_p ())
4356 : {
4357 : /* We assume that the corresponding insn alternative has no
4358 : earlier clobbers. If it is not the case, don't define move
4359 : cost equal to 2 for the corresponding register classes. */
4360 16190167 : lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
4361 16190167 : return false;
4362 : }
4363 :
4364 89678465 : no_input_reloads_p = no_output_reloads_p = false;
4365 89678465 : goal_alt_number = -1;
4366 89678465 : change_p = sec_mem_p = false;
4367 :
4368 : /* CALL_INSNs are not allowed to have any output reloads. */
4369 89678465 : if (CALL_P (curr_insn))
4370 5989698 : no_output_reloads_p = true;
4371 :
4372 89678465 : n_operands = curr_static_id->n_operands;
4373 89678465 : n_alternatives = curr_static_id->n_alternatives;
4374 :
4375 : /* Just return "no reloads" if insn has no operands with
4376 : constraints. */
4377 89678465 : if (n_operands == 0 || n_alternatives == 0)
4378 : return false;
4379 :
4380 79222605 : max_regno_before = max_reg_num ();
4381 :
4382 335016267 : for (i = 0; i < n_operands; i++)
4383 : {
4384 176571057 : goal_alt_matched[i][0] = -1;
4385 176571057 : goal_alt_matches[i] = -1;
4386 : }
4387 :
4388 79222605 : commutative = curr_static_id->commutative;
4389 :
4390 : /* Now see what we need for pseudos that didn't get hard regs or got
4391 : the wrong kind of hard reg. For this, we must consider all the
4392 : operands together against the register constraints. */
4393 :
4394 79222605 : best_losers = best_overall = INT_MAX;
4395 79222605 : best_reload_sum = 0;
4396 :
4397 79222605 : curr_swapped = false;
4398 79222605 : goal_alt_swapped = false;
4399 :
4400 79222605 : if (! check_only_p)
4401 : /* Make equivalence substitution and memory subreg elimination
4402 : before address processing because an address legitimacy can
4403 : depend on memory mode. */
4404 255723605 : for (i = 0; i < n_operands; i++)
4405 : {
4406 176520574 : rtx op, subst, old;
4407 176520574 : bool op_change_p = false;
4408 :
4409 176520574 : if (curr_static_id->operand[i].is_operator)
4410 1398529 : continue;
4411 :
4412 175122045 : old = op = *curr_id->operand_loc[i];
4413 175122045 : if (GET_CODE (old) == SUBREG)
4414 3611035 : old = SUBREG_REG (old);
4415 175122045 : subst = get_equiv_with_elimination (old, curr_insn);
4416 175122045 : original_subreg_reg_mode[i] = VOIDmode;
4417 175122045 : equiv_substition_p[i] = false;
4418 :
4419 175122045 : if (subst != old
4420 : /* We don't want to change an out operand by constant or invariant
4421 : which will require additional reloads, e.g. by putting a constant
4422 : into memory. */
4423 1518595 : && (curr_static_id->operand[i].type == OP_IN || MEM_P (subst)
4424 0 : || (GET_CODE (subst) == SUBREG && MEM_P (SUBREG_REG (subst)))))
4425 : {
4426 1518595 : equiv_substition_p[i] = true;
4427 1518595 : rtx new_subst = copy_rtx (subst);
4428 1518595 : if (lra_pointer_equiv_set_in (subst))
4429 774489 : lra_pointer_equiv_set_add (new_subst);
4430 1518595 : subst = new_subst;
4431 1518595 : lra_assert (REG_P (old));
4432 1518595 : if (GET_CODE (op) != SUBREG)
4433 1463974 : *curr_id->operand_loc[i] = subst;
4434 : else
4435 : {
4436 54621 : SUBREG_REG (op) = subst;
4437 54621 : if (GET_MODE (subst) == VOIDmode)
4438 90 : original_subreg_reg_mode[i] = GET_MODE (old);
4439 : }
4440 1518595 : if (lra_dump_file != NULL)
4441 : {
4442 3 : fprintf (lra_dump_file,
4443 : "Changing pseudo %d in operand %i of insn %u on equiv ",
4444 3 : REGNO (old), i, INSN_UID (curr_insn));
4445 3 : dump_value_slim (lra_dump_file, subst, 1);
4446 3 : fprintf (lra_dump_file, "\n");
4447 : }
4448 1518595 : op_change_p = change_p = true;
4449 : }
4450 175122045 : if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
4451 : {
4452 1519122 : change_p = true;
4453 1519122 : lra_update_dup (curr_id, i);
4454 : }
4455 : }
4456 :
4457 : /* We process equivalences before ignoring postponed insns on the current
4458 : constraint sub-pass but before any reload insn generation for the
4459 : postponed insn. */
4460 79203031 : if (! check_only_p
4461 79203031 : && bitmap_bit_p (&lra_postponed_insns, INSN_UID (curr_insn)))
4462 : return true;
4463 :
4464 : /* Reload address registers and displacements. We do it before
4465 : finding an alternative because of memory constraints. */
4466 79215904 : before = after = NULL;
4467 255773559 : for (i = 0; i < n_operands; i++)
4468 176557655 : if (! curr_static_id->operand[i].is_operator
4469 176557655 : && process_address (i, check_only_p, &before, &after))
4470 : {
4471 491123 : if (check_only_p)
4472 : return true;
4473 491123 : change_p = true;
4474 491123 : lra_update_dup (curr_id, i);
4475 : }
4476 :
4477 79215904 : if (change_p)
4478 : /* If we've changed the instruction then any alternative that
4479 : we chose previously may no longer be valid. */
4480 1962974 : lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
4481 :
4482 79196330 : if (! check_only_p && curr_insn_set != NULL_RTX
4483 154602073 : && check_and_process_move (&change_p, &sec_mem_p))
4484 0 : return change_p;
4485 :
4486 79215904 : try_swapped:
4487 :
4488 89627109 : reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
4489 89627109 : if (lra_dump_file != NULL && reused_alternative_num >= 0)
4490 0 : fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
4491 0 : reused_alternative_num, INSN_UID (curr_insn));
4492 :
4493 89627109 : if (process_alt_operands (reused_alternative_num))
4494 80978036 : alt_p = true;
4495 :
4496 89627109 : if (check_only_p)
4497 33149 : return ! alt_p || best_losers != 0;
4498 :
4499 : /* If insn is commutative (it's safe to exchange a certain pair of
4500 : operands) then we need to try each alternative twice, the second
4501 : time matching those two operands as if we had exchanged them. To
4502 : do this, really exchange them in operands.
4503 :
4504 : If we have just tried the alternatives the second time, return
4505 : operands to normal and drop through. */
4506 :
4507 89607535 : if (reused_alternative_num < 0 && commutative >= 0)
4508 : {
4509 20822410 : curr_swapped = !curr_swapped;
4510 20822410 : if (curr_swapped)
4511 : {
4512 10411205 : swap_operands (commutative);
4513 10411205 : goto try_swapped;
4514 : }
4515 : else
4516 10411205 : swap_operands (commutative);
4517 : }
4518 :
4519 79196330 : if (! alt_p && ! sec_mem_p)
4520 : {
4521 : /* No alternative works with reloads?? */
4522 6 : if (INSN_CODE (curr_insn) >= 0)
4523 0 : fatal_insn ("unable to generate reloads for:", curr_insn);
4524 6 : error_for_asm (curr_insn,
4525 : "inconsistent operand constraints in an %<asm%>");
4526 6 : lra_asm_error_p = true;
4527 6 : if (! JUMP_P (curr_insn))
4528 : {
4529 : /* Avoid further trouble with this insn. Don't generate use
4530 : pattern here as we could use the insn SP offset. */
4531 6 : lra_set_insn_deleted (curr_insn);
4532 : }
4533 : else
4534 : {
4535 0 : lra_invalidate_insn_data (curr_insn);
4536 0 : ira_nullify_asm_goto (curr_insn);
4537 0 : lra_update_insn_regno_info (curr_insn);
4538 : }
4539 6 : return true;
4540 : }
4541 :
4542 : /* If the best alternative is with operands 1 and 2 swapped, swap
4543 : them. Update the operand numbers of any reloads already
4544 : pushed. */
4545 :
4546 79196324 : if (goal_alt_swapped)
4547 : {
4548 600999 : if (lra_dump_file != NULL)
4549 18 : fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
4550 18 : INSN_UID (curr_insn));
4551 :
4552 : /* Swap the duplicates too. */
4553 600999 : swap_operands (commutative);
4554 600999 : change_p = true;
4555 : }
4556 :
4557 : /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
4558 : too conservatively. So we use the secondary memory only if there
4559 : is no any alternative without reloads. */
4560 79196324 : use_sec_mem_p = false;
4561 79196324 : if (! alt_p)
4562 : use_sec_mem_p = true;
4563 79196324 : else if (sec_mem_p)
4564 : {
4565 15022 : for (i = 0; i < n_operands; i++)
4566 14851 : if (! goal_alt_win[i] && ! goal_alt_match_win[i])
4567 : break;
4568 13349 : use_sec_mem_p = i < n_operands;
4569 : }
4570 :
4571 13349 : if (use_sec_mem_p)
4572 : {
4573 13178 : int in = -1, out = -1;
4574 13178 : rtx new_reg, src, dest, rld;
4575 13178 : machine_mode sec_mode, rld_mode;
4576 :
4577 13178 : lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4578 13178 : dest = SET_DEST (curr_insn_set);
4579 13178 : src = SET_SRC (curr_insn_set);
4580 39534 : for (i = 0; i < n_operands; i++)
4581 26356 : if (*curr_id->operand_loc[i] == dest)
4582 : out = i;
4583 13178 : else if (*curr_id->operand_loc[i] == src)
4584 13178 : in = i;
4585 13178 : for (i = 0; i < curr_static_id->n_dups; i++)
4586 0 : if (out < 0 && *curr_id->dup_loc[i] == dest)
4587 0 : out = curr_static_id->dup_num[i];
4588 0 : else if (in < 0 && *curr_id->dup_loc[i] == src)
4589 0 : in = curr_static_id->dup_num[i];
4590 13178 : lra_assert (out >= 0 && in >= 0
4591 : && curr_static_id->operand[out].type == OP_OUT
4592 : && curr_static_id->operand[in].type == OP_IN);
4593 13178 : rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
4594 13178 : rld_mode = GET_MODE (rld);
4595 13178 : sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
4596 13178 : if (rld_mode != sec_mode
4597 13178 : && (invalid_mode_reg_p (sec_mode, dest)
4598 19 : || invalid_mode_reg_p (sec_mode, src)))
4599 : sec_mode = rld_mode;
4600 13178 : new_reg = lra_create_new_reg (sec_mode, NULL_RTX, NO_REGS, NULL,
4601 : "secondary");
4602 : /* If the mode is changed, it should be wider. */
4603 13178 : lra_assert (!partial_subreg_p (sec_mode, rld_mode));
4604 13178 : if (sec_mode != rld_mode)
4605 : {
4606 : /* If the target says specifically to use another mode for
4607 : secondary memory moves we cannot reuse the original
4608 : insn. */
4609 19 : after = emit_spill_move (false, new_reg, dest);
4610 19 : lra_process_new_insns (curr_insn, NULL, after,
4611 : "Inserting the sec. move");
4612 : /* We may have non null BEFORE here (e.g. after address
4613 : processing. */
4614 19 : push_to_sequence (before);
4615 19 : before = emit_spill_move (true, new_reg, src);
4616 19 : emit_insn (before);
4617 19 : before = end_sequence ();
4618 19 : lra_process_new_insns (curr_insn, before, NULL, "Changing on");
4619 19 : lra_set_insn_deleted (curr_insn);
4620 : }
4621 13159 : else if (dest == rld)
4622 : {
4623 13159 : *curr_id->operand_loc[out] = new_reg;
4624 13159 : lra_update_dup (curr_id, out);
4625 13159 : after = emit_spill_move (false, new_reg, dest);
4626 13159 : lra_process_new_insns (curr_insn, NULL, after,
4627 : "Inserting the sec. move");
4628 : }
4629 : else
4630 : {
4631 0 : *curr_id->operand_loc[in] = new_reg;
4632 0 : lra_update_dup (curr_id, in);
4633 : /* See comments above. */
4634 0 : push_to_sequence (before);
4635 0 : before = emit_spill_move (true, new_reg, src);
4636 0 : emit_insn (before);
4637 0 : before = end_sequence ();
4638 0 : lra_process_new_insns (curr_insn, before, NULL,
4639 : "Inserting the sec. move");
4640 : }
4641 13178 : lra_update_insn_regno_info (curr_insn);
4642 13178 : return true;
4643 : }
4644 :
4645 79183146 : lra_assert (goal_alt_number >= 0);
4646 158271312 : lra_set_used_insn_alternative (curr_insn, goal_reuse_alt_p
4647 : ? goal_alt_number : LRA_UNKNOWN_ALT);
4648 :
4649 79183146 : if (lra_dump_file != NULL)
4650 : {
4651 1187 : const char *p;
4652 :
4653 1187 : fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
4654 1187 : goal_alt_number, INSN_UID (curr_insn));
4655 1187 : print_curr_insn_alt (goal_alt_number);
4656 1187 : if (INSN_CODE (curr_insn) >= 0
4657 1187 : && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4658 1180 : fprintf (lra_dump_file, " {%s}", p);
4659 1187 : if (maybe_ne (curr_id->sp_offset, 0))
4660 : {
4661 0 : fprintf (lra_dump_file, " (sp_off=");
4662 0 : print_dec (curr_id->sp_offset, lra_dump_file);
4663 0 : fprintf (lra_dump_file, ")");
4664 : }
4665 1187 : fprintf (lra_dump_file, "\n");
4666 : }
4667 :
4668 : /* Right now, for any pair of operands I and J that are required to
4669 : match, with J < I, goal_alt_matches[I] is J. Add I to
4670 : goal_alt_matched[J]. */
4671 :
4672 255663948 : for (i = 0; i < n_operands; i++)
4673 176480802 : if ((j = goal_alt_matches[i]) >= 0)
4674 : {
4675 10426666 : for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4676 : ;
4677 : /* We allow matching one output operand and several input
4678 : operands. */
4679 10426665 : lra_assert (k == 0
4680 : || (curr_static_id->operand[j].type == OP_OUT
4681 : && curr_static_id->operand[i].type == OP_IN
4682 : && (curr_static_id->operand
4683 : [goal_alt_matched[j][0]].type == OP_IN)));
4684 10426665 : goal_alt_matched[j][k] = i;
4685 10426665 : goal_alt_matched[j][k + 1] = -1;
4686 : }
4687 :
4688 255663948 : for (i = 0; i < n_operands; i++)
4689 176480802 : goal_alt_win[i] |= goal_alt_match_win[i];
4690 :
4691 : /* Any constants that aren't allowed and can't be reloaded into
4692 : registers are here changed into memory references. */
4693 255663948 : for (i = 0; i < n_operands; i++)
4694 176480802 : if (goal_alt_win[i])
4695 : {
4696 170450873 : int regno;
4697 170450873 : enum reg_class new_class;
4698 170450873 : rtx reg = *curr_id->operand_loc[i];
4699 :
4700 170450873 : if (GET_CODE (reg) == SUBREG)
4701 3306125 : reg = SUBREG_REG (reg);
4702 :
4703 170450873 : if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4704 : {
4705 77272453 : bool ok_p = in_class_p (reg, goal_alt[i], &new_class, true);
4706 :
4707 77272453 : if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4708 : {
4709 3479607 : lra_assert (ok_p);
4710 3479607 : lra_change_class (regno, new_class, " Change to", true);
4711 : }
4712 : }
4713 : }
4714 : else
4715 : {
4716 6029929 : const char *constraint;
4717 6029929 : char c;
4718 6029929 : rtx op = *curr_id->operand_loc[i];
4719 6029929 : rtx subreg = NULL_RTX;
4720 6029929 : machine_mode mode = curr_operand_mode[i];
4721 :
4722 6029929 : if (GET_CODE (op) == SUBREG)
4723 : {
4724 240191 : subreg = op;
4725 240191 : op = SUBREG_REG (op);
4726 240191 : mode = GET_MODE (op);
4727 : }
4728 :
4729 6246309 : if (CONST_POOL_OK_P (mode, op)
4730 6246309 : && ((targetm.preferred_reload_class
4731 216380 : (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4732 71337 : || no_input_reloads_p))
4733 : {
4734 145043 : rtx tem = force_const_mem (mode, op);
4735 :
4736 145043 : change_p = true;
4737 145043 : if (subreg != NULL_RTX)
4738 0 : tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
4739 :
4740 145043 : *curr_id->operand_loc[i] = tem;
4741 145043 : lra_update_dup (curr_id, i);
4742 145043 : process_address (i, false, &before, &after);
4743 :
4744 : /* If the alternative accepts constant pool refs directly
4745 : there will be no reload needed at all. */
4746 145043 : if (subreg != NULL_RTX)
4747 0 : continue;
4748 : /* Skip alternatives before the one requested. */
4749 145043 : constraint = (curr_static_id->operand_alternative
4750 145043 : [goal_alt_number * n_operands + i].constraint);
4751 145043 : for (;
4752 245736 : (c = *constraint) && c != ',' && c != '#';
4753 100693 : constraint += CONSTRAINT_LEN (c, constraint))
4754 : {
4755 198994 : enum constraint_num cn = lookup_constraint (constraint);
4756 198994 : if ((insn_extra_memory_constraint (cn)
4757 100838 : || insn_extra_special_memory_constraint (cn)
4758 : || insn_extra_relaxed_memory_constraint (cn))
4759 199139 : && satisfies_memory_constraint_p (tem, cn))
4760 : break;
4761 : }
4762 145043 : if (c == '\0' || c == ',' || c == '#')
4763 46742 : continue;
4764 :
4765 98301 : goal_alt_win[i] = true;
4766 : }
4767 : }
4768 :
4769 : n_outputs = 0;
4770 255663948 : for (i = 0; i < n_operands; i++)
4771 176480802 : if (curr_static_id->operand[i].type == OP_OUT)
4772 68726896 : outputs[n_outputs++] = i;
4773 79183146 : outputs[n_outputs] = -1;
4774 255663948 : for (i = 0; i < n_operands; i++)
4775 : {
4776 176480802 : int regno;
4777 176480802 : bool optional_p = false;
4778 176480802 : rtx old, new_reg;
4779 176480802 : rtx op = *curr_id->operand_loc[i];
4780 :
4781 176480802 : if (goal_alt_win[i])
4782 : {
4783 170549174 : if (goal_alt[i] == NO_REGS
4784 46527153 : && REG_P (op)
4785 5352913 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4786 : /* We assigned a hard register to the pseudo in the past but now
4787 : decided to spill it for the insn. If the pseudo is used only
4788 : in this insn, it is better to spill it here as we free hard
4789 : registers for other pseudos referenced in the insn. The most
4790 : common case of this is a scratch register which will be
4791 : transformed to scratch back at the end of LRA. */
4792 173420488 : && !multiple_insn_refs_p (regno))
4793 : {
4794 11162 : if (lra_get_allocno_class (regno) != NO_REGS)
4795 5301 : lra_change_class (regno, NO_REGS, " Change to", true);
4796 5581 : reg_renumber[regno] = -1;
4797 : }
4798 : /* We can do an optional reload. If the pseudo got a hard
4799 : reg, we might improve the code through inheritance. If
4800 : it does not get a hard register we coalesce memory/memory
4801 : moves later. Ignore move insns to avoid cycling. */
4802 170549174 : if (! lra_simple_p
4803 170004496 : && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4804 157563892 : && goal_alt[i] != NO_REGS && REG_P (op)
4805 78234211 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4806 65439792 : && regno < new_regno_start
4807 60728550 : && ! ira_former_scratch_p (regno)
4808 60674351 : && reg_renumber[regno] < 0
4809 : /* Check that the optional reload pseudo will be able to
4810 : hold given mode value. */
4811 3836221 : && ! (prohibited_class_reg_set_mode_p
4812 3836221 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4813 3836221 : PSEUDO_REGNO_MODE (regno)))
4814 174385385 : && (curr_insn_set == NULL_RTX
4815 3829083 : || !((REG_P (SET_SRC (curr_insn_set))
4816 : || MEM_P (SET_SRC (curr_insn_set))
4817 : || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4818 3194610 : && (REG_P (SET_DEST (curr_insn_set))
4819 : || MEM_P (SET_DEST (curr_insn_set))
4820 : || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
4821 : optional_p = true;
4822 169907534 : else if (goal_alt_matched[i][0] != -1
4823 8758240 : && curr_static_id->operand[i].type == OP_OUT
4824 8757107 : && (curr_static_id->operand_alternative
4825 8757107 : [goal_alt_number * n_operands + i].earlyclobber)
4826 18454 : && REG_P (op))
4827 : {
4828 23091 : for (j = 0; goal_alt_matched[i][j] != -1; j++)
4829 : {
4830 18401 : rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
4831 :
4832 18401 : if (REG_P (op2) && REGNO (op) != REGNO (op2))
4833 : break;
4834 : }
4835 18401 : if (goal_alt_matched[i][j] != -1)
4836 : {
4837 : /* Generate reloads for different output and matched
4838 : input registers. This is the easiest way to avoid
4839 : creation of non-existing register conflicts in
4840 : lra-lives.cc. */
4841 13711 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
4842 : &goal_alt_exclude_start_hard_regs[i], &before,
4843 : &after, true);
4844 : }
4845 171480114 : continue;
4846 18401 : }
4847 : else
4848 : {
4849 169889133 : enum reg_class rclass, common_class;
4850 :
4851 88995009 : if (REG_P (op) && goal_alt[i] != NO_REGS
4852 83642096 : && (regno = REGNO (op)) >= new_regno_start
4853 4720290 : && (rclass = get_reg_class (regno)) == ALL_REGS
4854 0 : && ((common_class = ira_reg_class_subset[rclass][goal_alt[i]])
4855 : != NO_REGS)
4856 0 : && common_class != ALL_REGS
4857 169889133 : && enough_allocatable_hard_regs_p (common_class,
4858 0 : GET_MODE (op)))
4859 : /* Refine reload pseudo class from chosen alternative
4860 : constraint. */
4861 0 : lra_change_class (regno, common_class, " Change to", true);
4862 169889133 : continue;
4863 169889133 : }
4864 : }
4865 :
4866 : /* Operands that match previous ones have already been handled. */
4867 6573268 : if (goal_alt_matches[i] >= 0)
4868 1572580 : continue;
4869 :
4870 : /* We should not have an operand with a non-offsettable address
4871 : appearing where an offsettable address will do. It also may
4872 : be a case when the address should be special in other words
4873 : not a general one (e.g. it needs no index reg). */
4874 5000688 : if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4875 : {
4876 72 : enum reg_class rclass;
4877 72 : rtx *loc = &XEXP (op, 0);
4878 72 : enum rtx_code code = GET_CODE (*loc);
4879 :
4880 72 : push_to_sequence (before);
4881 72 : rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4882 : MEM, SCRATCH, curr_insn);
4883 72 : if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4884 0 : new_reg = emit_inc (rclass, *loc,
4885 : /* This value does not matter for MODIFY. */
4886 0 : GET_MODE_SIZE (GET_MODE (op)));
4887 86 : else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
4888 : NULL, false, false,
4889 : "offsetable address", &new_reg))
4890 : {
4891 72 : rtx addr = *loc;
4892 72 : enum rtx_code code = GET_CODE (addr);
4893 72 : bool align_p = false;
4894 :
4895 72 : if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4896 : {
4897 : /* (and ... (const_int -X)) is used to align to X bytes. */
4898 0 : align_p = true;
4899 0 : addr = XEXP (*loc, 0);
4900 : }
4901 : else
4902 72 : addr = canonicalize_reload_addr (addr);
4903 :
4904 72 : lra_emit_move (new_reg, addr);
4905 72 : if (align_p)
4906 0 : emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4907 : }
4908 72 : before = end_sequence ();
4909 72 : *loc = new_reg;
4910 72 : lra_update_dup (curr_id, i);
4911 72 : }
4912 5000616 : else if (goal_alt_matched[i][0] == -1)
4913 : {
4914 3332192 : machine_mode mode;
4915 3332192 : rtx reg, *loc;
4916 3332192 : int hard_regno;
4917 3332192 : enum op_type type = curr_static_id->operand[i].type;
4918 :
4919 3332192 : loc = curr_id->operand_loc[i];
4920 3332192 : mode = curr_operand_mode[i];
4921 3332192 : if (GET_CODE (*loc) == SUBREG)
4922 : {
4923 75487 : reg = SUBREG_REG (*loc);
4924 75487 : poly_int64 byte = SUBREG_BYTE (*loc);
4925 75487 : if (REG_P (reg)
4926 : /* Strict_low_part requires reloading the register and not
4927 : just the subreg. Likewise for a strict subreg no wider
4928 : than a word for WORD_REGISTER_OPERATIONS targets. */
4929 75487 : && (curr_static_id->operand[i].strict_low
4930 75413 : || (!paradoxical_subreg_p (mode, GET_MODE (reg))
4931 72801 : && (hard_regno
4932 72801 : = get_try_hard_regno (REGNO (reg))) >= 0
4933 71239 : && (simplify_subreg_regno
4934 146726 : (hard_regno,
4935 71239 : GET_MODE (reg), byte, mode) < 0)
4936 0 : && (goal_alt[i] == NO_REGS
4937 0 : || (simplify_subreg_regno
4938 75487 : (ira_class_hard_regs[goal_alt[i]][0],
4939 0 : GET_MODE (reg), byte, mode) >= 0)))
4940 75413 : || (partial_subreg_p (mode, GET_MODE (reg))
4941 75413 : && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4942 : UNITS_PER_WORD)
4943 : && WORD_REGISTER_OPERATIONS))
4944 : /* Avoid the situation when there are no available hard regs
4945 : for the pseudo mode but there are ones for the subreg
4946 : mode: */
4947 75561 : && !(goal_alt[i] != NO_REGS
4948 74 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
4949 74 : && (prohibited_class_reg_set_mode_p
4950 74 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4951 74 : GET_MODE (reg)))
4952 : && !(prohibited_class_reg_set_mode_p
4953 0 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4954 : mode))))
4955 : {
4956 : /* An OP_INOUT is required when reloading a subreg of a
4957 : mode wider than a word to ensure that data beyond the
4958 : word being reloaded is preserved. Also automatically
4959 : ensure that strict_low_part reloads are made into
4960 : OP_INOUT which should already be true from the backend
4961 : constraints. */
4962 74 : if (type == OP_OUT
4963 74 : && (curr_static_id->operand[i].strict_low
4964 0 : || read_modify_subreg_p (*loc)))
4965 : type = OP_INOUT;
4966 74 : loc = &SUBREG_REG (*loc);
4967 74 : mode = GET_MODE (*loc);
4968 : }
4969 : }
4970 3332192 : old = *loc;
4971 3332192 : if (get_reload_reg (type, mode, old, goal_alt[i],
4972 : &goal_alt_exclude_start_hard_regs[i],
4973 3332192 : loc != curr_id->operand_loc[i],
4974 3332192 : curr_static_id->operand_alternative
4975 3332192 : [goal_alt_number * n_operands + i].earlyclobber,
4976 : "", &new_reg)
4977 3332192 : && type != OP_OUT)
4978 : {
4979 2362680 : push_to_sequence (before);
4980 2362680 : lra_emit_move (new_reg, old);
4981 2362680 : before = end_sequence ();
4982 : }
4983 3332192 : *loc = new_reg;
4984 3332192 : if (type != OP_IN
4985 968428 : && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX
4986 : /* OLD can be an equivalent constant here. */
4987 943586 : && !CONSTANT_P (old)
4988 : /* No need to write back anything for a scratch. */
4989 943586 : && GET_CODE (old) != SCRATCH
4990 4275778 : && (!REG_P(old) || !ira_former_scratch_p (REGNO (old))))
4991 : {
4992 943586 : start_sequence ();
4993 943586 : lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4994 943586 : emit_insn (after);
4995 943586 : after = end_sequence ();
4996 943586 : *loc = new_reg;
4997 : }
4998 3332192 : for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4999 619 : if (goal_alt_dont_inherit_ops[j] == i)
5000 : {
5001 619 : lra_set_regno_unique_value (REGNO (new_reg));
5002 619 : break;
5003 : }
5004 3332192 : lra_update_dup (curr_id, i);
5005 : }
5006 1668424 : else if (curr_static_id->operand[i].type == OP_IN
5007 1668424 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5008 : == OP_OUT
5009 0 : || (curr_static_id->operand[goal_alt_matched[i][0]].type
5010 : == OP_INOUT
5011 0 : && (operands_match_p
5012 0 : (*curr_id->operand_loc[i],
5013 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
5014 : -1)))))
5015 : {
5016 : /* generate reloads for input and matched outputs. */
5017 14754 : match_inputs[0] = i;
5018 14754 : match_inputs[1] = -1;
5019 14754 : match_reload (goal_alt_matched[i][0], match_inputs, outputs,
5020 : goal_alt[i], &goal_alt_exclude_start_hard_regs[i],
5021 : &before, &after,
5022 14754 : curr_static_id->operand_alternative
5023 14754 : [goal_alt_number * n_operands + goal_alt_matched[i][0]]
5024 14754 : .earlyclobber);
5025 : }
5026 1653670 : else if ((curr_static_id->operand[i].type == OP_OUT
5027 0 : || (curr_static_id->operand[i].type == OP_INOUT
5028 0 : && (operands_match_p
5029 0 : (*curr_id->operand_loc[i],
5030 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
5031 : -1))))
5032 1653670 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5033 : == OP_IN))
5034 : /* Generate reloads for output and matched inputs. */
5035 1653670 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
5036 : &goal_alt_exclude_start_hard_regs[i], &before, &after,
5037 1653670 : curr_static_id->operand_alternative
5038 1653670 : [goal_alt_number * n_operands + i].earlyclobber);
5039 0 : else if (curr_static_id->operand[i].type == OP_IN
5040 0 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5041 : == OP_IN))
5042 : {
5043 : /* Generate reloads for matched inputs. */
5044 0 : match_inputs[0] = i;
5045 0 : for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
5046 0 : match_inputs[j + 1] = k;
5047 0 : match_inputs[j + 1] = -1;
5048 0 : match_reload (-1, match_inputs, outputs, goal_alt[i],
5049 : &goal_alt_exclude_start_hard_regs[i],
5050 : &before, &after, false);
5051 : }
5052 : else
5053 : /* We must generate code in any case when function
5054 : process_alt_operands decides that it is possible. */
5055 0 : gcc_unreachable ();
5056 :
5057 5000688 : if (optional_p)
5058 : {
5059 641640 : rtx reg = op;
5060 :
5061 641640 : lra_assert (REG_P (reg));
5062 641640 : regno = REGNO (reg);
5063 641640 : op = *curr_id->operand_loc[i]; /* Substitution. */
5064 641640 : if (GET_CODE (op) == SUBREG)
5065 0 : op = SUBREG_REG (op);
5066 641640 : gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
5067 641640 : bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
5068 641640 : lra_reg_info[REGNO (op)].restore_rtx = reg;
5069 641640 : if (lra_dump_file != NULL)
5070 3 : fprintf (lra_dump_file,
5071 : " Making reload reg %d for reg %d optional\n",
5072 : REGNO (op), regno);
5073 : }
5074 : }
5075 74915206 : if (before != NULL_RTX || after != NULL_RTX
5076 153289244 : || max_regno_before != max_reg_num ())
5077 5102355 : change_p = true;
5078 79183146 : if (change_p)
5079 : {
5080 6055680 : lra_update_operator_dups (curr_id);
5081 : /* Something changes -- process the insn. */
5082 6055680 : lra_update_insn_regno_info (curr_insn);
5083 6055680 : if (asm_noperands (PATTERN (curr_insn)) >= 0
5084 6055680 : && ++curr_id->asm_reloads_num >= FIRST_PSEUDO_REGISTER)
5085 : /* Most probably there are no enough registers to satisfy asm insn: */
5086 : {
5087 11 : lra_asm_insn_error (curr_insn);
5088 11 : return change_p;
5089 : }
5090 : }
5091 79183135 : if (goal_alt_out_sp_reload_p)
5092 : {
5093 : /* We have an output stack pointer reload -- update sp offset: */
5094 0 : rtx set;
5095 0 : bool done_p = false;
5096 0 : poly_int64 sp_offset = curr_id->sp_offset;
5097 0 : for (rtx_insn *insn = after; insn != NULL_RTX; insn = NEXT_INSN (insn))
5098 0 : if ((set = single_set (insn)) != NULL_RTX
5099 0 : && SET_DEST (set) == stack_pointer_rtx)
5100 : {
5101 0 : lra_assert (!done_p);
5102 0 : done_p = true;
5103 0 : curr_id->sp_offset = 0;
5104 0 : lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
5105 0 : id->sp_offset = sp_offset;
5106 0 : if (lra_dump_file != NULL)
5107 0 : fprintf (lra_dump_file,
5108 : " Moving sp offset from insn %u to %u\n",
5109 0 : INSN_UID (curr_insn), INSN_UID (insn));
5110 : }
5111 0 : lra_assert (done_p);
5112 : }
5113 79183135 : int const_regno = -1;
5114 79183135 : rtx set;
5115 79183135 : rtx_insn *prev, *const_insn = NULL;
5116 4267935 : if (before != NULL_RTX && (prev = PREV_INSN (curr_insn)) != NULL_RTX
5117 83451070 : && (set = single_set (prev)) != NULL_RTX && CONSTANT_P (SET_SRC (set)))
5118 : {
5119 324008 : rtx reg = SET_DEST (set);
5120 324008 : if (GET_CODE (reg) == SUBREG)
5121 8828 : reg = SUBREG_REG (reg);
5122 : /* Consider only reload insns as we don't want to change the order
5123 : created by previous optimizations. */
5124 235189 : if (REG_P (reg) && (int) REGNO (reg) >= lra_new_regno_start
5125 324812 : && bitmap_bit_p (&lra_reg_info[REGNO (reg)].insn_bitmap,
5126 804 : INSN_UID (curr_insn)))
5127 : {
5128 320 : const_regno = REGNO (reg);
5129 320 : const_insn = prev;
5130 : }
5131 : }
5132 79183135 : if (asm_noperands (PATTERN (curr_insn)) >= 0)
5133 : {
5134 : /* Asm can have a lot of operands. To guarantee their assignment,
5135 : postpone processing the reload insns until the reload pseudos are
5136 : assigned. */
5137 46225 : postpone_insns (before);
5138 46225 : postpone_insns (after);
5139 : }
5140 79183135 : lra_process_new_insns (curr_insn, before, after,
5141 : "Inserting insn reload", true);
5142 79183135 : if (const_regno >= 0) {
5143 640 : bool move_p = true;
5144 640 : for (rtx_insn *insn = before; insn != curr_insn; insn = NEXT_INSN (insn))
5145 320 : if (bitmap_bit_p (&lra_reg_info[const_regno].insn_bitmap,
5146 320 : INSN_UID (insn)))
5147 : {
5148 : move_p = false;
5149 : break;
5150 : }
5151 320 : if (move_p)
5152 : {
5153 320 : reorder_insns_nobb (const_insn, const_insn, PREV_INSN (curr_insn));
5154 320 : if (lra_dump_file != NULL)
5155 : {
5156 0 : dump_insn_slim (lra_dump_file, const_insn);
5157 0 : fprintf (lra_dump_file,
5158 : " to decrease reg pressure, it is moved before:\n");
5159 0 : dump_insn_slim (lra_dump_file, curr_insn);
5160 : }
5161 : }
5162 : }
5163 : return change_p;
5164 : }
5165 :
5166 : /* Return true if INSN satisfies all constraints. In other words, no
5167 : reload insns are needed. */
5168 : bool
5169 3412 : lra_constrain_insn (rtx_insn *insn)
5170 : {
5171 3412 : int saved_new_regno_start = new_regno_start;
5172 3412 : int saved_new_insn_uid_start = new_insn_uid_start;
5173 3412 : bool change_p;
5174 :
5175 3412 : curr_insn = insn;
5176 3412 : curr_id = lra_get_insn_recog_data (curr_insn);
5177 3412 : curr_static_id = curr_id->insn_static_data;
5178 3412 : new_insn_uid_start = get_max_uid ();
5179 3412 : new_regno_start = max_reg_num ();
5180 3412 : change_p = curr_insn_transform (true);
5181 3412 : new_regno_start = saved_new_regno_start;
5182 3412 : new_insn_uid_start = saved_new_insn_uid_start;
5183 3412 : return ! change_p;
5184 : }
5185 :
5186 : /* Return true if X is in LIST. */
5187 : static bool
5188 1318008 : in_list_p (rtx x, rtx list)
5189 : {
5190 2253713 : for (; list != NULL_RTX; list = XEXP (list, 1))
5191 1238108 : if (XEXP (list, 0) == x)
5192 : return true;
5193 : return false;
5194 : }
5195 :
5196 : /* Return true if X contains an allocatable hard register (if
5197 : HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
5198 : static bool
5199 7335107 : contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
5200 : {
5201 7335107 : int i, j;
5202 7335107 : const char *fmt;
5203 7335107 : enum rtx_code code;
5204 :
5205 7335107 : code = GET_CODE (x);
5206 7335107 : if (REG_P (x))
5207 : {
5208 1482367 : int regno = REGNO (x);
5209 1482367 : HARD_REG_SET alloc_regs;
5210 :
5211 1482367 : if (hard_reg_p)
5212 : {
5213 466118 : if (regno >= FIRST_PSEUDO_REGISTER)
5214 136442 : regno = lra_get_regno_hard_regno (regno);
5215 466118 : if (regno < 0)
5216 : return false;
5217 466118 : alloc_regs = ~lra_no_alloc_regs;
5218 466118 : return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
5219 : }
5220 : else
5221 : {
5222 1016249 : if (regno < FIRST_PSEUDO_REGISTER)
5223 : return false;
5224 325637 : if (! spilled_p)
5225 : return true;
5226 172809 : return lra_get_regno_hard_regno (regno) < 0;
5227 : }
5228 : }
5229 5852740 : fmt = GET_RTX_FORMAT (code);
5230 14471011 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5231 : {
5232 9184534 : if (fmt[i] == 'e')
5233 : {
5234 4047469 : if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
5235 : return true;
5236 : }
5237 5137065 : else if (fmt[i] == 'E')
5238 : {
5239 1263304 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5240 1156210 : if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
5241 : return true;
5242 : }
5243 : }
5244 : return false;
5245 : }
5246 :
5247 : /* Process all regs in location *LOC and change them on equivalent
5248 : substitution. Return true if any change was done. */
5249 : static bool
5250 3353 : loc_equivalence_change_p (rtx *loc)
5251 : {
5252 3353 : rtx subst, reg, x = *loc;
5253 3353 : bool result = false;
5254 3353 : enum rtx_code code = GET_CODE (x);
5255 3353 : const char *fmt;
5256 3353 : int i, j;
5257 :
5258 3353 : if (code == SUBREG)
5259 : {
5260 20 : reg = SUBREG_REG (x);
5261 20 : if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
5262 20 : && GET_MODE (subst) == VOIDmode)
5263 : {
5264 : /* We cannot reload debug location. Simplify subreg here
5265 : while we know the inner mode. */
5266 0 : *loc = simplify_gen_subreg (GET_MODE (x), subst,
5267 0 : GET_MODE (reg), SUBREG_BYTE (x));
5268 0 : return true;
5269 : }
5270 : }
5271 3353 : if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
5272 : {
5273 8 : *loc = subst;
5274 8 : return true;
5275 : }
5276 :
5277 : /* Scan all the operand sub-expressions. */
5278 3345 : fmt = GET_RTX_FORMAT (code);
5279 8180 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5280 : {
5281 4835 : if (fmt[i] == 'e')
5282 2581 : result = loc_equivalence_change_p (&XEXP (x, i)) || result;
5283 2254 : else if (fmt[i] == 'E')
5284 270 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5285 200 : result
5286 210 : = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
5287 : }
5288 : return result;
5289 : }
5290 :
5291 : /* Similar to loc_equivalence_change_p, but for use as
5292 : simplify_replace_fn_rtx callback. DATA is insn for which the
5293 : elimination is done. If it null we don't do the elimination. */
5294 : static rtx
5295 42596258 : loc_equivalence_callback (rtx loc, const_rtx, void *data)
5296 : {
5297 42596258 : if (!REG_P (loc))
5298 : return NULL_RTX;
5299 :
5300 10985443 : rtx subst = (data == NULL
5301 10985443 : ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
5302 10985443 : if (subst != loc)
5303 : return subst;
5304 :
5305 : return NULL_RTX;
5306 : }
5307 :
5308 : /* Maximum number of generated reload insns per an insn. It is for
5309 : preventing this pass cycling in a bug case. */
5310 : #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
5311 :
5312 : /* The current iteration number of this LRA pass. */
5313 : int lra_constraint_iter;
5314 :
5315 : /* True if we should during assignment sub-pass check assignment
5316 : correctness for all pseudos and spill some of them to correct
5317 : conflicts. It can be necessary when we substitute equiv which
5318 : needs checking register allocation correctness because the
5319 : equivalent value contains allocatable hard registers, or when we
5320 : restore multi-register pseudo, or when we change the insn code and
5321 : its operand became INOUT operand when it was IN one before. */
5322 : bool check_and_force_assignment_correctness_p;
5323 :
5324 : /* Return true if REGNO is referenced in more than one block. */
5325 : static bool
5326 146416 : multi_block_pseudo_p (int regno)
5327 : {
5328 146416 : basic_block bb = NULL;
5329 146416 : unsigned int uid;
5330 146416 : bitmap_iterator bi;
5331 :
5332 146416 : if (regno < FIRST_PSEUDO_REGISTER)
5333 : return false;
5334 :
5335 448150 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5336 306541 : if (bb == NULL)
5337 146416 : bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
5338 160125 : else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
5339 : return true;
5340 : return false;
5341 : }
5342 :
5343 : /* Return true if LIST contains a deleted insn. */
5344 : static bool
5345 712116 : contains_deleted_insn_p (rtx_insn_list *list)
5346 : {
5347 1359451 : for (; list != NULL_RTX; list = list->next ())
5348 647335 : if (NOTE_P (list->insn ())
5349 647335 : && NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
5350 : return true;
5351 : return false;
5352 : }
5353 :
5354 : /* Return true if X contains a pseudo dying in INSN. */
5355 : static bool
5356 2219218 : dead_pseudo_p (rtx x, rtx_insn *insn)
5357 : {
5358 2219218 : int i, j;
5359 2219218 : const char *fmt;
5360 2219218 : enum rtx_code code;
5361 :
5362 2219218 : if (REG_P (x))
5363 494180 : return (insn != NULL_RTX
5364 494180 : && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
5365 1725038 : code = GET_CODE (x);
5366 1725038 : fmt = GET_RTX_FORMAT (code);
5367 4411482 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5368 : {
5369 2691924 : if (fmt[i] == 'e')
5370 : {
5371 1318788 : if (dead_pseudo_p (XEXP (x, i), insn))
5372 : return true;
5373 : }
5374 1373136 : else if (fmt[i] == 'E')
5375 : {
5376 278513 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5377 254489 : if (dead_pseudo_p (XVECEXP (x, i, j), insn))
5378 : return true;
5379 : }
5380 : }
5381 : return false;
5382 : }
5383 :
5384 : /* Return true if INSN contains a dying pseudo in INSN right hand
5385 : side. */
5386 : static bool
5387 645941 : insn_rhs_dead_pseudo_p (rtx_insn *insn)
5388 : {
5389 645941 : rtx set = single_set (insn);
5390 :
5391 645941 : gcc_assert (set != NULL);
5392 645941 : return dead_pseudo_p (SET_SRC (set), insn);
5393 : }
5394 :
5395 : /* Return true if any init insn of REGNO contains a dying pseudo in
5396 : insn right hand side. */
5397 : static bool
5398 710722 : init_insn_rhs_dead_pseudo_p (int regno)
5399 : {
5400 710722 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5401 :
5402 710722 : if (insns == NULL)
5403 : return false;
5404 1286356 : for (; insns != NULL_RTX; insns = insns->next ())
5405 645941 : if (insn_rhs_dead_pseudo_p (insns->insn ()))
5406 : return true;
5407 : return false;
5408 : }
5409 :
5410 : /* Return TRUE if REGNO has a reverse equivalence. The equivalence is
5411 : reverse only if we have one init insn with given REGNO as a
5412 : source. */
5413 : static bool
5414 712116 : reverse_equiv_p (int regno)
5415 : {
5416 712116 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5417 712116 : rtx set;
5418 :
5419 712116 : if (insns == NULL)
5420 : return false;
5421 647335 : if (! INSN_P (insns->insn ())
5422 1294670 : || insns->next () != NULL)
5423 : return false;
5424 647335 : if ((set = single_set (insns->insn ())) == NULL_RTX)
5425 : return false;
5426 647335 : return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
5427 : }
5428 :
5429 : /* Return TRUE if REGNO was reloaded in an equivalence init insn. We
5430 : call this function only for non-reverse equivalence. */
5431 : static bool
5432 705196 : contains_reloaded_insn_p (int regno)
5433 : {
5434 705196 : rtx set;
5435 705196 : rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
5436 :
5437 1345611 : for (; list != NULL; list = list->next ())
5438 640415 : if ((set = single_set (list->insn ())) == NULL_RTX
5439 640415 : || ! REG_P (SET_DEST (set))
5440 1280830 : || (int) REGNO (SET_DEST (set)) != regno)
5441 : return true;
5442 : return false;
5443 : }
5444 :
5445 : /* Try combine secondary memory reload insn FROM for insn TO into TO insn.
5446 : FROM should be a load insn (usually a secondary memory reload insn). Return
5447 : TRUE in case of success. */
5448 : static bool
5449 7197696 : combine_reload_insn (rtx_insn *from, rtx_insn *to)
5450 : {
5451 7197696 : bool ok_p;
5452 7197696 : rtx_insn *saved_insn;
5453 7197696 : rtx set, from_reg, to_reg, op;
5454 7197696 : enum reg_class to_class, from_class;
5455 7197696 : int n, nop;
5456 7197696 : signed char changed_nops[MAX_RECOG_OPERANDS + 1];
5457 :
5458 : /* Check conditions for second memory reload and original insn: */
5459 7197696 : if ((targetm.secondary_memory_needed
5460 : == hook_bool_mode_reg_class_t_reg_class_t_false)
5461 7197696 : || NEXT_INSN (from) != to
5462 4275945 : || !NONDEBUG_INSN_P (to)
5463 11473639 : || CALL_P (to))
5464 : return false;
5465 :
5466 4270532 : lra_insn_recog_data_t id = lra_get_insn_recog_data (to);
5467 4270532 : struct lra_static_insn_data *static_id = id->insn_static_data;
5468 :
5469 4270532 : if (id->used_insn_alternative == LRA_UNKNOWN_ALT
5470 4270532 : || (set = single_set (from)) == NULL_RTX)
5471 32129 : return false;
5472 4238403 : from_reg = SET_DEST (set);
5473 4238403 : to_reg = SET_SRC (set);
5474 : /* Ignore optional reloads: */
5475 4159507 : if (! REG_P (from_reg) || ! REG_P (to_reg)
5476 7076096 : || bitmap_bit_p (&lra_optional_reload_pseudos, REGNO (from_reg)))
5477 1943625 : return false;
5478 2294778 : to_class = lra_get_allocno_class (REGNO (to_reg));
5479 2294778 : from_class = lra_get_allocno_class (REGNO (from_reg));
5480 : /* Check that reload insn is a load: */
5481 2294778 : if (to_class != NO_REGS || from_class == NO_REGS)
5482 : return false;
5483 49758 : for (n = nop = 0; nop < static_id->n_operands; nop++)
5484 : {
5485 35807 : if (static_id->operand[nop].type != OP_IN)
5486 12927 : continue;
5487 22880 : op = *id->operand_loc[nop];
5488 22880 : if (!REG_P (op) || REGNO (op) != REGNO (from_reg))
5489 9112 : continue;
5490 13768 : *id->operand_loc[nop] = to_reg;
5491 13768 : changed_nops[n++] = nop;
5492 : }
5493 13951 : changed_nops[n] = -1;
5494 13951 : lra_update_dups (id, changed_nops);
5495 13951 : lra_update_insn_regno_info (to);
5496 13951 : ok_p = recog_memoized (to) >= 0;
5497 13951 : if (ok_p)
5498 : {
5499 : /* Check that combined insn does not need any reloads: */
5500 13932 : saved_insn = curr_insn;
5501 13932 : curr_insn = to;
5502 13932 : curr_id = lra_get_insn_recog_data (curr_insn);
5503 13932 : curr_static_id = curr_id->insn_static_data;
5504 13932 : for (bool swapped_p = false;;)
5505 : {
5506 16362 : ok_p = !curr_insn_transform (true);
5507 16362 : if (ok_p || curr_static_id->commutative < 0)
5508 : break;
5509 4860 : swap_operands (curr_static_id->commutative);
5510 4860 : if (lra_dump_file != NULL)
5511 : {
5512 0 : fprintf (lra_dump_file,
5513 : " Swapping %scombined insn operands:\n",
5514 : swapped_p ? "back " : "");
5515 0 : dump_insn_slim (lra_dump_file, to);
5516 : }
5517 4860 : if (swapped_p)
5518 : break;
5519 : swapped_p = true;
5520 : }
5521 13932 : curr_insn = saved_insn;
5522 13932 : curr_id = lra_get_insn_recog_data (curr_insn);
5523 13932 : curr_static_id = curr_id->insn_static_data;
5524 : }
5525 13951 : if (ok_p)
5526 : {
5527 3549 : id->used_insn_alternative = -1;
5528 3549 : lra_push_insn_and_update_insn_regno_info (to);
5529 3549 : if (lra_dump_file != NULL)
5530 : {
5531 0 : fprintf (lra_dump_file, " Use combined insn:\n");
5532 0 : dump_insn_slim (lra_dump_file, to);
5533 : }
5534 3549 : return true;
5535 : }
5536 10402 : if (lra_dump_file != NULL)
5537 : {
5538 0 : fprintf (lra_dump_file, " Failed combined insn:\n");
5539 0 : dump_insn_slim (lra_dump_file, to);
5540 : }
5541 21075 : for (int i = 0; i < n; i++)
5542 : {
5543 10673 : nop = changed_nops[i];
5544 10673 : *id->operand_loc[nop] = from_reg;
5545 : }
5546 10402 : lra_update_dups (id, changed_nops);
5547 10402 : lra_update_insn_regno_info (to);
5548 10402 : if (lra_dump_file != NULL)
5549 : {
5550 0 : fprintf (lra_dump_file, " Restoring insn after failed combining:\n");
5551 0 : dump_insn_slim (lra_dump_file, to);
5552 : }
5553 : return false;
5554 : }
5555 :
5556 : /* Entry function of LRA constraint pass. Return true if the
5557 : constraint pass did change the code. */
5558 : bool
5559 3223398 : lra_constraints (bool first_p)
5560 : {
5561 3223398 : bool changed_p;
5562 3223398 : int i, hard_regno, new_insns_num;
5563 3223398 : unsigned int min_len, new_min_len, uid;
5564 3223398 : rtx set, x, reg, nosubreg_dest;
5565 3223398 : rtx_insn *original_insn;
5566 3223398 : basic_block last_bb;
5567 3223398 : bitmap_iterator bi;
5568 :
5569 3223398 : lra_constraint_iter++;
5570 3223398 : if (lra_dump_file != NULL)
5571 194 : fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
5572 : lra_constraint_iter);
5573 3223398 : changed_p = false;
5574 3223398 : if (pic_offset_table_rtx
5575 3223398 : && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
5576 104451 : check_and_force_assignment_correctness_p = true;
5577 3118947 : else if (first_p)
5578 : /* On the first iteration we should check IRA assignment
5579 : correctness. In rare cases, the assignments can be wrong as
5580 : early clobbers operands are ignored in IRA or usages of
5581 : paradoxical sub-registers are not taken into account by
5582 : IRA. */
5583 1439679 : check_and_force_assignment_correctness_p = true;
5584 3223398 : new_insn_uid_start = get_max_uid ();
5585 3223398 : new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
5586 : /* Mark used hard regs for target stack size calulations. */
5587 205716015 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5588 202492617 : if (lra_reg_info[i].nrefs != 0
5589 299027493 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5590 : {
5591 92642129 : int j, nregs;
5592 :
5593 92642129 : nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
5594 188315055 : for (j = 0; j < nregs; j++)
5595 95672926 : df_set_regs_ever_live (hard_regno + j, true);
5596 : }
5597 : /* Do elimination before the equivalence processing as we can spill
5598 : some pseudos during elimination. */
5599 3223398 : lra_eliminate (false, first_p);
5600 3223398 : auto_bitmap equiv_insn_bitmap (®_obstack);
5601 :
5602 : /* Register elimination can create new pseudos via the addptr pattern,
5603 : so make sure the equivalency tables are resized appropriately. */
5604 3223398 : ira_expand_reg_equiv ();
5605 205716015 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5606 202492617 : if (lra_reg_info[i].nrefs != 0)
5607 : {
5608 96534876 : ira_reg_equiv[i].profitable_p = true;
5609 96534876 : reg = regno_reg_rtx[i];
5610 96534876 : if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
5611 : {
5612 723335 : bool pseudo_p = contains_reg_p (x, false, false);
5613 :
5614 : /* After RTL transformation, we cannot guarantee that
5615 : pseudo in the substitution was not reloaded which might
5616 : make equivalence invalid. For example, in reverse
5617 : equiv of p0
5618 :
5619 : p0 <- ...
5620 : ...
5621 : equiv_mem <- p0
5622 :
5623 : the memory address register was reloaded before the 2nd
5624 : insn. */
5625 723335 : if ((! first_p && pseudo_p)
5626 : /* We don't use DF for compilation speed sake. So it
5627 : is problematic to update live info when we use an
5628 : equivalence containing pseudos in more than one
5629 : BB. */
5630 716923 : || (pseudo_p && multi_block_pseudo_p (i))
5631 : /* If an init insn was deleted for some reason, cancel
5632 : the equiv. We could update the equiv insns after
5633 : transformations including an equiv insn deletion
5634 : but it is not worthy as such cases are extremely
5635 : rare. */
5636 712116 : || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
5637 : /* If it is not a reverse equivalence, we check that a
5638 : pseudo in rhs of the init insn is not dying in the
5639 : insn. Otherwise, the live info at the beginning of
5640 : the corresponding BB might be wrong after we
5641 : removed the insn. When the equiv can be a
5642 : constant, the right hand side of the init insn can
5643 : be a pseudo. */
5644 712116 : || (! reverse_equiv_p (i)
5645 710722 : && (init_insn_rhs_dead_pseudo_p (i)
5646 : /* If we reloaded the pseudo in an equivalence
5647 : init insn, we cannot remove the equiv init
5648 : insns and the init insns might write into
5649 : const memory in this case. */
5650 705196 : || contains_reloaded_insn_p (i)))
5651 : /* Prevent access beyond equivalent memory for
5652 : paradoxical subregs. */
5653 706590 : || (MEM_P (x)
5654 1132845 : && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
5655 : GET_MODE_SIZE (GET_MODE (x))))
5656 1429209 : || (pic_offset_table_rtx
5657 53142 : && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
5658 8114 : && (targetm.preferred_reload_class
5659 4057 : (x, lra_get_allocno_class (i)) == NO_REGS))
5660 51476 : || contains_symbol_ref_p (x))))
5661 19965 : ira_reg_equiv[i].defined_p
5662 19965 : = ira_reg_equiv[i].caller_save_p = false;
5663 723335 : if (contains_reg_p (x, false, true))
5664 9305 : ira_reg_equiv[i].profitable_p = false;
5665 723335 : if (get_equiv (reg) != reg)
5666 698368 : bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
5667 : }
5668 : }
5669 205716015 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5670 202492617 : update_equiv (i);
5671 : /* We should add all insns containing pseudos which should be
5672 : substituted by their equivalences. */
5673 5557116 : EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
5674 2333718 : lra_push_insn_by_uid (uid);
5675 3223398 : min_len = lra_insn_stack_length ();
5676 3223398 : new_insns_num = 0;
5677 3223398 : last_bb = NULL;
5678 3223398 : changed_p = false;
5679 3223398 : original_insn = NULL;
5680 166396918 : while ((new_min_len = lra_insn_stack_length ()) != 0)
5681 : {
5682 159950122 : curr_insn = lra_pop_insn ();
5683 159950122 : --new_min_len;
5684 159950122 : curr_bb = BLOCK_FOR_INSN (curr_insn);
5685 159950122 : if (curr_bb != last_bb)
5686 : {
5687 20548739 : last_bb = curr_bb;
5688 20548739 : bb_reload_num = lra_curr_reload_num;
5689 : }
5690 159950122 : if (min_len > new_min_len)
5691 : {
5692 : min_len = new_min_len;
5693 : new_insns_num = 0;
5694 : original_insn = curr_insn;
5695 : }
5696 7197696 : else if (combine_reload_insn (curr_insn, original_insn))
5697 : {
5698 3549 : continue;
5699 : }
5700 7194147 : if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
5701 0 : internal_error
5702 0 : ("maximum number of generated reload insns per insn achieved (%d)",
5703 : MAX_RELOAD_INSNS_NUMBER);
5704 159946573 : new_insns_num++;
5705 159946573 : if (DEBUG_INSN_P (curr_insn))
5706 : {
5707 : /* We need to check equivalence in debug insn and change
5708 : pseudo to the equivalent value if necessary. */
5709 52357763 : curr_id = lra_get_insn_recog_data (curr_insn);
5710 52357763 : if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
5711 : {
5712 29714 : rtx old = *curr_id->operand_loc[0];
5713 29714 : *curr_id->operand_loc[0]
5714 29714 : = simplify_replace_fn_rtx (old, NULL_RTX,
5715 : loc_equivalence_callback, curr_insn);
5716 29714 : if (old != *curr_id->operand_loc[0])
5717 : {
5718 : /* If we substitute pseudo by shared equivalence, we can fail
5719 : to update LRA reg info and this can result in many
5720 : unexpected consequences. So keep rtl unshared: */
5721 29714 : *curr_id->operand_loc[0]
5722 29714 : = copy_rtx (*curr_id->operand_loc[0]);
5723 29714 : lra_update_insn_regno_info (curr_insn);
5724 29714 : changed_p = true;
5725 : }
5726 : }
5727 : }
5728 107588810 : else if (INSN_P (curr_insn))
5729 : {
5730 106533616 : if ((set = single_set (curr_insn)) != NULL_RTX)
5731 : {
5732 101172822 : nosubreg_dest = SET_DEST (set);
5733 : /* The equivalence pseudo could be set up as SUBREG in a
5734 : case when it is a call restore insn in a mode
5735 : different from the pseudo mode. */
5736 101172822 : if (GET_CODE (nosubreg_dest) == SUBREG)
5737 1153518 : nosubreg_dest = SUBREG_REG (nosubreg_dest);
5738 101857580 : if ((REG_P (nosubreg_dest)
5739 74674417 : && (x = get_equiv (nosubreg_dest)) != nosubreg_dest
5740 : /* Remove insns which set up a pseudo whose value
5741 : cannot be changed. Such insns might be not in
5742 : init_insns because we don't update equiv data
5743 : during insn transformations.
5744 :
5745 : As an example, let suppose that a pseudo got
5746 : hard register and on the 1st pass was not
5747 : changed to equivalent constant. We generate an
5748 : additional insn setting up the pseudo because of
5749 : secondary memory movement. Then the pseudo is
5750 : spilled and we use the equiv constant. In this
5751 : case we should remove the additional insn and
5752 : this insn is not init_insns list. */
5753 702848 : && (! MEM_P (x) || MEM_READONLY_P (x)
5754 : /* Check that this is actually an insn setting
5755 : up the equivalence. */
5756 320493 : || in_list_p (curr_insn,
5757 320493 : ira_reg_equiv
5758 320493 : [REGNO (nosubreg_dest)].init_insns)))
5759 175163808 : || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
5760 1995030 : && in_list_p (curr_insn,
5761 997515 : ira_reg_equiv
5762 997515 : [REGNO (SET_SRC (set))].init_insns)
5763 : /* This is a reverse equivalence to memory (see ira.cc)
5764 : in store insn. We can reload all the destination and
5765 : have an output reload which is a store to memory. If
5766 : we just remove the insn, we will have the output
5767 : reload storing an undefined value to the memory.
5768 : Check that we did not reload the memory to prevent a
5769 : wrong code generation. We could implement using the
5770 : equivalence still in such case but doing this is not
5771 : worth the efforts as such case is very rare. */
5772 1327 : && MEM_P (nosubreg_dest)))
5773 : {
5774 : /* This is equiv init insn of pseudo which did not get a
5775 : hard register -- remove the insn. */
5776 684758 : if (lra_dump_file != NULL)
5777 : {
5778 9 : fprintf (lra_dump_file,
5779 : " Removing equiv init insn %i (freq=%d)\n",
5780 3 : INSN_UID (curr_insn),
5781 6 : REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
5782 3 : dump_insn_slim (lra_dump_file, curr_insn);
5783 : }
5784 684758 : if (contains_reg_p (x, true, false))
5785 136442 : check_and_force_assignment_correctness_p = true;
5786 684758 : lra_set_insn_deleted (curr_insn);
5787 684758 : continue;
5788 : }
5789 : }
5790 105848858 : curr_id = lra_get_insn_recog_data (curr_insn);
5791 105848858 : curr_static_id = curr_id->insn_static_data;
5792 105848858 : init_curr_insn_input_reloads ();
5793 105848858 : init_curr_operand_mode ();
5794 105848858 : if (curr_insn_transform (false))
5795 : changed_p = true;
5796 : /* Check non-transformed insns too for equiv change as USE
5797 : or CLOBBER don't need reloads but can contain pseudos
5798 : being changed on their equivalences. */
5799 99773293 : else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
5800 99773293 : && loc_equivalence_change_p (&PATTERN (curr_insn)))
5801 : {
5802 8 : lra_update_insn_regno_info (curr_insn);
5803 8 : lra_push_insn_by_uid (INSN_UID (curr_insn));
5804 8 : changed_p = true;
5805 : }
5806 : }
5807 : }
5808 :
5809 : /* If we used a new hard regno, changed_p should be true because the
5810 : hard reg is assigned to a new pseudo. */
5811 3223398 : if (flag_checking && !changed_p)
5812 : {
5813 132471290 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5814 129859058 : if (lra_reg_info[i].nrefs != 0
5815 189776356 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5816 : {
5817 58474644 : int j, nregs = hard_regno_nregs (hard_regno,
5818 58474644 : PSEUDO_REGNO_MODE (i));
5819 :
5820 118963108 : for (j = 0; j < nregs; j++)
5821 60488464 : lra_assert (df_regs_ever_live_p (hard_regno + j));
5822 : }
5823 : }
5824 2612272 : if (changed_p)
5825 611129 : lra_dump_insns_if_possible ("changed func after local");
5826 3223398 : return changed_p;
5827 3223398 : }
5828 :
5829 : static void initiate_invariants (void);
5830 : static void finish_invariants (void);
5831 :
5832 : /* Initiate the LRA constraint pass. It is done once per
5833 : function. */
5834 : void
5835 1480117 : lra_constraints_init (void)
5836 : {
5837 1480117 : initiate_invariants ();
5838 1480117 : }
5839 :
5840 : /* Finalize the LRA constraint pass. It is done once per
5841 : function. */
5842 : void
5843 1480117 : lra_constraints_finish (void)
5844 : {
5845 1480117 : finish_invariants ();
5846 1480117 : }
5847 :
5848 :
5849 :
5850 : /* Structure describes invariants for ineheritance. */
5851 : struct lra_invariant
5852 : {
5853 : /* The order number of the invariant. */
5854 : int num;
5855 : /* The invariant RTX. */
5856 : rtx invariant_rtx;
5857 : /* The origin insn of the invariant. */
5858 : rtx_insn *insn;
5859 : };
5860 :
5861 : typedef lra_invariant invariant_t;
5862 : typedef invariant_t *invariant_ptr_t;
5863 : typedef const invariant_t *const_invariant_ptr_t;
5864 :
5865 : /* Pointer to the inheritance invariants. */
5866 : static vec<invariant_ptr_t> invariants;
5867 :
5868 : /* Allocation pool for the invariants. */
5869 : static object_allocator<lra_invariant> *invariants_pool;
5870 :
5871 : /* Hash table for the invariants. */
5872 : static htab_t invariant_table;
5873 :
5874 : /* Hash function for INVARIANT. */
5875 : static hashval_t
5876 175353 : invariant_hash (const void *invariant)
5877 : {
5878 175353 : rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5879 175353 : return lra_rtx_hash (inv);
5880 : }
5881 :
5882 : /* Equal function for invariants INVARIANT1 and INVARIANT2. */
5883 : static int
5884 56917 : invariant_eq_p (const void *invariant1, const void *invariant2)
5885 : {
5886 56917 : rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5887 56917 : rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5888 :
5889 56917 : return rtx_equal_p (inv1, inv2);
5890 : }
5891 :
5892 : /* Insert INVARIANT_RTX into the table if it is not there yet. Return
5893 : invariant which is in the table. */
5894 : static invariant_ptr_t
5895 175161 : insert_invariant (rtx invariant_rtx)
5896 : {
5897 175161 : void **entry_ptr;
5898 175161 : invariant_t invariant;
5899 175161 : invariant_ptr_t invariant_ptr;
5900 :
5901 175161 : invariant.invariant_rtx = invariant_rtx;
5902 175161 : entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5903 175161 : if (*entry_ptr == NULL)
5904 : {
5905 152058 : invariant_ptr = invariants_pool->allocate ();
5906 152058 : invariant_ptr->invariant_rtx = invariant_rtx;
5907 152058 : invariant_ptr->insn = NULL;
5908 152058 : invariants.safe_push (invariant_ptr);
5909 152058 : *entry_ptr = (void *) invariant_ptr;
5910 : }
5911 175161 : return (invariant_ptr_t) *entry_ptr;
5912 : }
5913 :
5914 : /* Initiate the invariant table. */
5915 : static void
5916 1480117 : initiate_invariants (void)
5917 : {
5918 1480117 : invariants.create (100);
5919 1480117 : invariants_pool
5920 1480117 : = new object_allocator<lra_invariant> ("Inheritance invariants");
5921 1480117 : invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5922 1480117 : }
5923 :
5924 : /* Finish the invariant table. */
5925 : static void
5926 1480117 : finish_invariants (void)
5927 : {
5928 1480117 : htab_delete (invariant_table);
5929 2960234 : delete invariants_pool;
5930 1480117 : invariants.release ();
5931 1480117 : }
5932 :
5933 : /* Make the invariant table empty. */
5934 : static void
5935 12672159 : clear_invariants (void)
5936 : {
5937 12672159 : htab_empty (invariant_table);
5938 12672159 : invariants_pool->release ();
5939 12672159 : invariants.truncate (0);
5940 12672159 : }
5941 :
5942 :
5943 :
5944 : /* This page contains code to do inheritance/split
5945 : transformations. */
5946 :
5947 : /* Number of reloads passed so far in current EBB. */
5948 : static int reloads_num;
5949 :
5950 : /* Number of calls passed so far in current EBB. */
5951 : static int calls_num;
5952 :
5953 : /* Index ID is the CALLS_NUM associated the last call we saw with
5954 : ABI identifier ID. */
5955 : static int last_call_for_abi[NUM_ABI_IDS];
5956 :
5957 : /* Which registers have been fully or partially clobbered by a call
5958 : since they were last used. */
5959 : static HARD_REG_SET full_and_partial_call_clobbers;
5960 :
5961 : /* Current reload pseudo check for validity of elements in
5962 : USAGE_INSNS. */
5963 : static int curr_usage_insns_check;
5964 :
5965 : /* Info about last usage of registers in EBB to do inheritance/split
5966 : transformation. Inheritance transformation is done from a spilled
5967 : pseudo and split transformations from a hard register or a pseudo
5968 : assigned to a hard register. */
5969 : struct usage_insns
5970 : {
5971 : /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5972 : value INSNS is valid. The insns is chain of optional debug insns
5973 : and a finishing non-debug insn using the corresponding reg. The
5974 : value is also used to mark the registers which are set up in the
5975 : current insn. The negated insn uid is used for this. */
5976 : int check;
5977 : /* Value of global reloads_num at the last insn in INSNS. */
5978 : int reloads_num;
5979 : /* Value of global reloads_nums at the last insn in INSNS. */
5980 : int calls_num;
5981 : /* It can be true only for splitting. And it means that the restore
5982 : insn should be put after insn given by the following member. */
5983 : bool after_p;
5984 : /* Next insns in the current EBB which use the original reg and the
5985 : original reg value is not changed between the current insn and
5986 : the next insns. In order words, e.g. for inheritance, if we need
5987 : to use the original reg value again in the next insns we can try
5988 : to use the value in a hard register from a reload insn of the
5989 : current insn. */
5990 : rtx insns;
5991 : };
5992 :
5993 : /* Map: regno -> corresponding pseudo usage insns. */
5994 : static struct usage_insns *usage_insns;
5995 :
5996 : static void
5997 246468084 : setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
5998 : {
5999 246468084 : usage_insns[regno].check = curr_usage_insns_check;
6000 246468084 : usage_insns[regno].insns = insn;
6001 246468084 : usage_insns[regno].reloads_num = reloads_num;
6002 246468084 : usage_insns[regno].calls_num = calls_num;
6003 246468084 : usage_insns[regno].after_p = after_p;
6004 246468084 : if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
6005 111296661 : remove_from_hard_reg_set (&full_and_partial_call_clobbers,
6006 111296661 : PSEUDO_REGNO_MODE (regno),
6007 : reg_renumber[regno]);
6008 246468084 : }
6009 :
6010 : /* The function is used to form list REGNO usages which consists of
6011 : optional debug insns finished by a non-debug insn using REGNO.
6012 : RELOADS_NUM is current number of reload insns processed so far. */
6013 : static void
6014 139687917 : add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
6015 : {
6016 139687917 : rtx next_usage_insns;
6017 :
6018 139687917 : if (usage_insns[regno].check == curr_usage_insns_check
6019 73125858 : && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
6020 212813775 : && DEBUG_INSN_P (insn))
6021 : {
6022 : /* Check that we did not add the debug insn yet. */
6023 13938967 : if (next_usage_insns != insn
6024 13938967 : && (GET_CODE (next_usage_insns) != INSN_LIST
6025 6292968 : || XEXP (next_usage_insns, 0) != insn))
6026 13938953 : usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
6027 : next_usage_insns);
6028 : }
6029 125748950 : else if (NONDEBUG_INSN_P (insn))
6030 125244174 : setup_next_usage_insn (regno, insn, reloads_num, false);
6031 : else
6032 504776 : usage_insns[regno].check = 0;
6033 139687917 : }
6034 :
6035 : /* Return first non-debug insn in list USAGE_INSNS. */
6036 : static rtx_insn *
6037 1166494 : skip_usage_debug_insns (rtx usage_insns)
6038 : {
6039 1166494 : rtx insn;
6040 :
6041 : /* Skip debug insns. */
6042 1166494 : for (insn = usage_insns;
6043 1447993 : insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
6044 281499 : insn = XEXP (insn, 1))
6045 : ;
6046 1166494 : return safe_as_a <rtx_insn *> (insn);
6047 : }
6048 :
6049 : /* Return true if we need secondary memory moves for insn in
6050 : USAGE_INSNS after inserting inherited pseudo of class INHER_CL
6051 : into the insn. */
6052 : static bool
6053 1166501 : check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
6054 : rtx usage_insns ATTRIBUTE_UNUSED)
6055 : {
6056 1166501 : rtx_insn *insn;
6057 1166501 : rtx set, dest;
6058 1166501 : enum reg_class cl;
6059 :
6060 1166501 : if (inher_cl == ALL_REGS
6061 1166501 : || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
6062 : return false;
6063 1166494 : lra_assert (INSN_P (insn));
6064 1166494 : if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
6065 : return false;
6066 1128482 : dest = SET_DEST (set);
6067 1128482 : if (! REG_P (dest))
6068 : return false;
6069 1128482 : lra_assert (inher_cl != NO_REGS);
6070 1128482 : cl = get_reg_class (REGNO (dest));
6071 1128482 : return (cl != NO_REGS && cl != ALL_REGS
6072 1128482 : && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
6073 : }
6074 :
6075 : /* Registers involved in inheritance/split in the current EBB
6076 : (inheritance/split pseudos and original registers). */
6077 : static bitmap_head check_only_regs;
6078 :
6079 : /* Reload pseudos cannot be involded in invariant inheritance in the
6080 : current EBB. */
6081 : static bitmap_head invalid_invariant_regs;
6082 :
6083 : /* Do inheritance transformations for insn INSN, which defines (if
6084 : DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
6085 : instruction in the EBB next uses ORIGINAL_REGNO; it has the same
6086 : form as the "insns" field of usage_insns. Return true if we
6087 : succeed in such transformation.
6088 :
6089 : The transformations look like:
6090 :
6091 : p <- ... i <- ...
6092 : ... p <- i (new insn)
6093 : ... =>
6094 : <- ... p ... <- ... i ...
6095 : or
6096 : ... i <- p (new insn)
6097 : <- ... p ... <- ... i ...
6098 : ... =>
6099 : <- ... p ... <- ... i ...
6100 : where p is a spilled original pseudo and i is a new inheritance pseudo.
6101 :
6102 :
6103 : The inheritance pseudo has the smallest class of two classes CL and
6104 : class of ORIGINAL REGNO. */
6105 : static bool
6106 1262482 : inherit_reload_reg (bool def_p, int original_regno,
6107 : enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
6108 : {
6109 1262482 : if (optimize_function_for_size_p (cfun))
6110 : return false;
6111 :
6112 1231552 : enum reg_class rclass = lra_get_allocno_class (original_regno);
6113 1231552 : rtx original_reg = regno_reg_rtx[original_regno];
6114 1231552 : rtx new_reg, usage_insn;
6115 1231552 : rtx_insn *new_insns;
6116 :
6117 1231552 : lra_assert (! usage_insns[original_regno].after_p);
6118 1231552 : if (lra_dump_file != NULL)
6119 2 : fprintf (lra_dump_file,
6120 : " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
6121 1231552 : if (! ira_reg_classes_intersect_p[cl][rclass])
6122 : {
6123 65051 : if (lra_dump_file != NULL)
6124 : {
6125 0 : fprintf (lra_dump_file,
6126 : " Rejecting inheritance for %d "
6127 : "because of disjoint classes %s and %s\n",
6128 : original_regno, reg_class_names[cl],
6129 : reg_class_names[rclass]);
6130 0 : fprintf (lra_dump_file,
6131 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6132 : }
6133 65051 : return false;
6134 : }
6135 1166501 : if ((ira_class_subset_p[cl][rclass] && cl != rclass)
6136 : /* We don't use a subset of two classes because it can be
6137 : NO_REGS. This transformation is still profitable in most
6138 : cases even if the classes are not intersected as register
6139 : move is probably cheaper than a memory load. */
6140 436948 : || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
6141 : {
6142 729553 : if (lra_dump_file != NULL)
6143 2 : fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
6144 : reg_class_names[cl], reg_class_names[rclass]);
6145 :
6146 : rclass = cl;
6147 : }
6148 1166501 : if (check_secondary_memory_needed_p (rclass, next_usage_insns))
6149 : {
6150 : /* Reject inheritance resulting in secondary memory moves.
6151 : Otherwise, there is a danger in LRA cycling. Also such
6152 : transformation will be unprofitable. */
6153 12878 : if (lra_dump_file != NULL)
6154 : {
6155 0 : rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
6156 0 : rtx set = single_set (insn);
6157 :
6158 0 : lra_assert (set != NULL_RTX);
6159 :
6160 0 : rtx dest = SET_DEST (set);
6161 :
6162 0 : lra_assert (REG_P (dest));
6163 0 : fprintf (lra_dump_file,
6164 : " Rejecting inheritance for insn %d(%s)<-%d(%s) "
6165 : "as secondary mem is needed\n",
6166 0 : REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
6167 0 : original_regno, reg_class_names[rclass]);
6168 0 : fprintf (lra_dump_file,
6169 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6170 : }
6171 12878 : return false;
6172 : }
6173 1153623 : if (ira_reg_class_min_nregs[rclass][GET_MODE (original_reg)]
6174 1153623 : != ira_reg_class_max_nregs[rclass][GET_MODE (original_reg)])
6175 : {
6176 29 : if (lra_dump_file != NULL)
6177 : {
6178 0 : fprintf (lra_dump_file,
6179 : " Rejecting inheritance for %d "
6180 : "because of requiring non-uniform class %s\n",
6181 : original_regno, reg_class_names[rclass]);
6182 0 : fprintf (lra_dump_file,
6183 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6184 : }
6185 29 : return false;
6186 : }
6187 1153594 : new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
6188 : rclass, NULL, "inheritance");
6189 1153594 : start_sequence ();
6190 1153594 : if (def_p)
6191 541455 : lra_emit_move (original_reg, new_reg);
6192 : else
6193 612139 : lra_emit_move (new_reg, original_reg);
6194 1153594 : new_insns = end_sequence ();
6195 1153594 : if (NEXT_INSN (new_insns) != NULL_RTX)
6196 : {
6197 0 : if (lra_dump_file != NULL)
6198 : {
6199 0 : fprintf (lra_dump_file,
6200 : " Rejecting inheritance %d->%d "
6201 : "as it results in 2 or more insns:\n",
6202 : original_regno, REGNO (new_reg));
6203 0 : dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
6204 0 : fprintf (lra_dump_file,
6205 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6206 : }
6207 0 : return false;
6208 : }
6209 1153594 : lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
6210 1153594 : lra_update_insn_regno_info (insn);
6211 1153594 : if (! def_p)
6212 : /* We now have a new usage insn for original regno. */
6213 612139 : setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
6214 1153594 : if (lra_dump_file != NULL)
6215 2 : fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
6216 2 : original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6217 1153594 : lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
6218 1153594 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6219 1153594 : bitmap_set_bit (&check_only_regs, original_regno);
6220 1153594 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6221 1153594 : if (def_p)
6222 541455 : lra_process_new_insns (insn, NULL, new_insns,
6223 : "Add original<-inheritance");
6224 : else
6225 612139 : lra_process_new_insns (insn, new_insns, NULL,
6226 : "Add inheritance<-original");
6227 2587142 : while (next_usage_insns != NULL_RTX)
6228 : {
6229 1433548 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6230 : {
6231 1153594 : usage_insn = next_usage_insns;
6232 1153594 : lra_assert (NONDEBUG_INSN_P (usage_insn));
6233 : next_usage_insns = NULL;
6234 : }
6235 : else
6236 : {
6237 279954 : usage_insn = XEXP (next_usage_insns, 0);
6238 279954 : lra_assert (DEBUG_INSN_P (usage_insn));
6239 279954 : next_usage_insns = XEXP (next_usage_insns, 1);
6240 : }
6241 1433548 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6242 1433548 : DEBUG_INSN_P (usage_insn));
6243 1433548 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6244 1433548 : if (lra_dump_file != NULL)
6245 : {
6246 2 : basic_block bb = BLOCK_FOR_INSN (usage_insn);
6247 2 : fprintf (lra_dump_file,
6248 : " Inheritance reuse change %d->%d (bb%d):\n",
6249 : original_regno, REGNO (new_reg),
6250 : bb ? bb->index : -1);
6251 2 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6252 : }
6253 : }
6254 1153594 : if (lra_dump_file != NULL)
6255 2 : fprintf (lra_dump_file,
6256 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6257 : return true;
6258 : }
6259 :
6260 : /* Return true if we need a caller save/restore for pseudo REGNO which
6261 : was assigned to a hard register. */
6262 : static inline bool
6263 113917211 : need_for_call_save_p (int regno)
6264 : {
6265 113917211 : lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
6266 113917211 : if (usage_insns[regno].calls_num < calls_num)
6267 : {
6268 : unsigned int abis = 0;
6269 117884884 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
6270 108816816 : if (last_call_for_abi[i] > usage_insns[regno].calls_num)
6271 9068068 : abis |= 1 << i;
6272 9068068 : gcc_assert (abis);
6273 9068068 : if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
6274 9068068 : PSEUDO_REGNO_MODE (regno),
6275 : reg_renumber[regno]))
6276 : return true;
6277 : }
6278 : return false;
6279 : }
6280 :
6281 : /* Global registers occurring in the current EBB. */
6282 : static bitmap_head ebb_global_regs;
6283 :
6284 : /* Return true if we need a split for hard register REGNO or pseudo
6285 : REGNO which was assigned to a hard register.
6286 : POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
6287 : used for reloads since the EBB end. It is an approximation of the
6288 : used hard registers in the split range. The exact value would
6289 : require expensive calculations. If we were aggressive with
6290 : splitting because of the approximation, the split pseudo will save
6291 : the same hard register assignment and will be removed in the undo
6292 : pass. We still need the approximation because too aggressive
6293 : splitting would result in too inaccurate cost calculation in the
6294 : assignment pass because of too many generated moves which will be
6295 : probably removed in the undo pass. */
6296 : static inline bool
6297 241795539 : need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
6298 : {
6299 241795539 : int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
6300 :
6301 241795539 : lra_assert (hard_regno >= 0);
6302 241795539 : return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
6303 : /* Don't split eliminable hard registers, otherwise we can
6304 : split hard registers like hard frame pointer, which
6305 : lives on BB start/end according to DF-infrastructure,
6306 : when there is a pseudo assigned to the register and
6307 : living in the same BB. */
6308 672030 : && (regno >= FIRST_PSEUDO_REGISTER
6309 45089 : || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
6310 641688 : && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
6311 : /* Don't split call clobbered hard regs living through
6312 : calls, otherwise we might have a check problem in the
6313 : assign sub-pass as in the most cases (exception is a
6314 : situation when check_and_force_assignment_correctness_p value is
6315 : true) the assign pass assumes that all pseudos living
6316 : through calls are assigned to call saved hard regs. */
6317 628033 : && (regno >= FIRST_PSEUDO_REGISTER
6318 1092 : || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
6319 : /* We need at least 2 reloads to make pseudo splitting
6320 : profitable. We should provide hard regno splitting in
6321 : any case to solve 1st insn scheduling problem when
6322 : moving hard register definition up might result in
6323 : impossibility to find hard register for reload pseudo of
6324 : small register class. */
6325 1256014 : && (usage_insns[regno].reloads_num
6326 1254948 : + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
6327 2694 : && (regno < FIRST_PSEUDO_REGISTER
6328 : /* For short living pseudos, spilling + inheritance can
6329 : be considered a substitution for splitting.
6330 : Therefore we do not splitting for local pseudos. It
6331 : decreases also aggressiveness of splitting. The
6332 : minimal number of references is chosen taking into
6333 : account that for 2 references splitting has no sense
6334 : as we can just spill the pseudo. */
6335 : || (regno >= FIRST_PSEUDO_REGISTER
6336 2647 : && lra_reg_info[regno].nrefs > 3
6337 2290 : && bitmap_bit_p (&ebb_global_regs, regno))))
6338 242466332 : || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
6339 : }
6340 :
6341 : /* Return class for the split pseudo created from original pseudo with
6342 : ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
6343 : choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
6344 : results in no secondary memory movements. */
6345 : static enum reg_class
6346 1396 : choose_split_class (enum reg_class allocno_class,
6347 : int hard_regno ATTRIBUTE_UNUSED,
6348 : machine_mode mode ATTRIBUTE_UNUSED)
6349 : {
6350 1396 : int i;
6351 1396 : enum reg_class cl, best_cl = NO_REGS;
6352 1396 : enum reg_class hard_reg_class ATTRIBUTE_UNUSED
6353 : = REGNO_REG_CLASS (hard_regno);
6354 :
6355 1396 : if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
6356 1396 : && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
6357 : return allocno_class;
6358 0 : for (i = 0;
6359 0 : (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
6360 : i++)
6361 0 : if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
6362 0 : && ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
6363 0 : && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
6364 0 : && (best_cl == NO_REGS
6365 0 : || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
6366 : best_cl = cl;
6367 : return best_cl;
6368 : }
6369 :
6370 : /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO. It only
6371 : makes sense to call this function if NEW_REGNO is always equal to
6372 : ORIGINAL_REGNO. Set up defined_p flag when caller_save_p flag is set up and
6373 : CALL_SAVE_P is true. */
6374 :
6375 : static void
6376 652614 : lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno,
6377 : bool call_save_p)
6378 : {
6379 652614 : if (!ira_reg_equiv[original_regno].defined_p
6380 588877 : && !(call_save_p && ira_reg_equiv[original_regno].caller_save_p))
6381 : return;
6382 :
6383 63908 : ira_expand_reg_equiv ();
6384 63908 : ira_reg_equiv[new_regno].defined_p = true;
6385 63908 : if (ira_reg_equiv[original_regno].memory)
6386 30122 : ira_reg_equiv[new_regno].memory
6387 30122 : = copy_rtx (ira_reg_equiv[original_regno].memory);
6388 63908 : if (ira_reg_equiv[original_regno].constant)
6389 27558 : ira_reg_equiv[new_regno].constant
6390 27558 : = copy_rtx (ira_reg_equiv[original_regno].constant);
6391 63908 : if (ira_reg_equiv[original_regno].invariant)
6392 6228 : ira_reg_equiv[new_regno].invariant
6393 6228 : = copy_rtx (ira_reg_equiv[original_regno].invariant);
6394 : }
6395 :
6396 : /* Do split transformations for insn INSN, which defines or uses
6397 : ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
6398 : the EBB next uses ORIGINAL_REGNO; it has the same form as the
6399 : "insns" field of usage_insns. If TO is not NULL, we don't use
6400 : usage_insns, we put restore insns after TO insn. It is a case when
6401 : we call it from lra_split_hard_reg_for, outside the inheritance
6402 : pass.
6403 :
6404 : The transformations look like:
6405 :
6406 : p <- ... p <- ...
6407 : ... s <- p (new insn -- save)
6408 : ... =>
6409 : ... p <- s (new insn -- restore)
6410 : <- ... p ... <- ... p ...
6411 : or
6412 : <- ... p ... <- ... p ...
6413 : ... s <- p (new insn -- save)
6414 : ... =>
6415 : ... p <- s (new insn -- restore)
6416 : <- ... p ... <- ... p ...
6417 :
6418 : where p is an original pseudo got a hard register or a hard
6419 : register and s is a new split pseudo. The save is put before INSN
6420 : if BEFORE_P is true. Return true if we succeed in such
6421 : transformation. */
6422 : static bool
6423 654306 : split_reg (bool before_p, int original_regno, rtx_insn *insn,
6424 : rtx next_usage_insns, rtx_insn *to)
6425 : {
6426 654306 : enum reg_class rclass;
6427 654306 : rtx original_reg;
6428 654306 : int hard_regno, nregs;
6429 654306 : rtx new_reg, usage_insn;
6430 654306 : rtx_insn *restore, *save;
6431 654306 : bool after_p;
6432 654306 : bool call_save_p;
6433 654306 : machine_mode mode;
6434 :
6435 654306 : if (original_regno < FIRST_PSEUDO_REGISTER)
6436 : {
6437 206 : rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
6438 206 : hard_regno = original_regno;
6439 206 : call_save_p = false;
6440 206 : nregs = 1;
6441 206 : mode = lra_reg_info[hard_regno].biggest_mode;
6442 206 : machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
6443 : /* A reg can have a biggest_mode of VOIDmode if it was only ever seen as
6444 : part of a multi-word register. In that case, just use the reg_rtx
6445 : mode. Do the same also if the biggest mode was larger than a register
6446 : or we can not compare the modes. Otherwise, limit the size to that of
6447 : the biggest access in the function or to the natural mode at least. */
6448 206 : if (mode == VOIDmode
6449 206 : || !ordered_p (GET_MODE_PRECISION (mode),
6450 206 : GET_MODE_PRECISION (reg_rtx_mode))
6451 206 : || paradoxical_subreg_p (mode, reg_rtx_mode)
6452 411 : || maybe_gt (GET_MODE_PRECISION (reg_rtx_mode), GET_MODE_PRECISION (mode)))
6453 : {
6454 654306 : original_reg = regno_reg_rtx[hard_regno];
6455 654306 : mode = reg_rtx_mode;
6456 : }
6457 : else
6458 189 : original_reg = gen_rtx_REG (mode, hard_regno);
6459 : }
6460 : else
6461 : {
6462 654100 : mode = PSEUDO_REGNO_MODE (original_regno);
6463 654100 : hard_regno = reg_renumber[original_regno];
6464 654100 : nregs = hard_regno_nregs (hard_regno, mode);
6465 654100 : rclass = lra_get_allocno_class (original_regno);
6466 654100 : original_reg = regno_reg_rtx[original_regno];
6467 654100 : call_save_p = need_for_call_save_p (original_regno);
6468 : }
6469 654306 : lra_assert (hard_regno >= 0);
6470 654306 : if (lra_dump_file != NULL)
6471 0 : fprintf (lra_dump_file,
6472 : " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
6473 :
6474 654306 : if (call_save_p)
6475 : {
6476 652910 : mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
6477 : hard_regno_nregs (hard_regno, mode),
6478 : mode);
6479 652910 : new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, NULL, "save");
6480 : }
6481 : else
6482 : {
6483 1396 : rclass = choose_split_class (rclass, hard_regno, mode);
6484 1396 : if (rclass == NO_REGS)
6485 : {
6486 0 : if (lra_dump_file != NULL)
6487 : {
6488 0 : fprintf (lra_dump_file,
6489 : " Rejecting split of %d(%s): "
6490 : "no good reg class for %d(%s)\n",
6491 : original_regno,
6492 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6493 : hard_regno,
6494 0 : reg_class_names[REGNO_REG_CLASS (hard_regno)]);
6495 0 : fprintf
6496 0 : (lra_dump_file,
6497 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6498 : }
6499 0 : return false;
6500 : }
6501 : /* Split_if_necessary can split hard registers used as part of a
6502 : multi-register mode but splits each register individually. The
6503 : mode used for each independent register may not be supported
6504 : so reject the split. Splitting the wider mode should theoretically
6505 : be possible but is not implemented. */
6506 1396 : if (!targetm.hard_regno_mode_ok (hard_regno, mode))
6507 : {
6508 0 : if (lra_dump_file != NULL)
6509 : {
6510 0 : fprintf (lra_dump_file,
6511 : " Rejecting split of %d(%s): unsuitable mode %s\n",
6512 : original_regno,
6513 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6514 0 : GET_MODE_NAME (mode));
6515 0 : fprintf
6516 0 : (lra_dump_file,
6517 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6518 : }
6519 0 : return false;
6520 : }
6521 1396 : new_reg = lra_create_new_reg (mode, original_reg, rclass, NULL, "split");
6522 1396 : reg_renumber[REGNO (new_reg)] = hard_regno;
6523 : }
6524 654306 : int new_regno = REGNO (new_reg);
6525 654306 : save = emit_spill_move (true, new_reg, original_reg);
6526 654306 : if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
6527 : {
6528 0 : if (lra_dump_file != NULL)
6529 : {
6530 0 : fprintf
6531 0 : (lra_dump_file,
6532 : " Rejecting split %d->%d resulting in > 2 save insns:\n",
6533 : original_regno, new_regno);
6534 0 : dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
6535 0 : fprintf (lra_dump_file,
6536 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6537 : }
6538 0 : return false;
6539 : }
6540 654306 : restore = emit_spill_move (false, new_reg, original_reg);
6541 654306 : if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
6542 : {
6543 0 : if (lra_dump_file != NULL)
6544 : {
6545 0 : fprintf (lra_dump_file,
6546 : " Rejecting split %d->%d "
6547 : "resulting in > 2 restore insns:\n",
6548 : original_regno, new_regno);
6549 0 : dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
6550 0 : fprintf (lra_dump_file,
6551 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6552 : }
6553 0 : return false;
6554 : }
6555 : /* Transfer equivalence information to the spill register, so that
6556 : if we fail to allocate the spill register, we have the option of
6557 : rematerializing the original value instead of spilling to the stack. */
6558 654306 : if (!HARD_REGISTER_NUM_P (original_regno)
6559 654100 : && mode == PSEUDO_REGNO_MODE (original_regno))
6560 652614 : lra_copy_reg_equiv (new_regno, original_regno, call_save_p);
6561 654306 : lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
6562 654306 : bitmap_set_bit (&lra_split_regs, new_regno);
6563 654306 : if (to != NULL)
6564 : {
6565 159 : lra_assert (next_usage_insns == NULL);
6566 159 : usage_insn = to;
6567 159 : after_p = true;
6568 : }
6569 : else
6570 : {
6571 : /* We need check_only_regs only inside the inheritance pass. */
6572 654147 : bitmap_set_bit (&check_only_regs, new_regno);
6573 654147 : bitmap_set_bit (&check_only_regs, original_regno);
6574 654147 : after_p = usage_insns[original_regno].after_p;
6575 761458 : for (;;)
6576 : {
6577 761458 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6578 : {
6579 654147 : usage_insn = next_usage_insns;
6580 654147 : break;
6581 : }
6582 107311 : usage_insn = XEXP (next_usage_insns, 0);
6583 107311 : lra_assert (DEBUG_INSN_P (usage_insn));
6584 107311 : next_usage_insns = XEXP (next_usage_insns, 1);
6585 107311 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6586 : true);
6587 107311 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6588 107311 : if (lra_dump_file != NULL)
6589 : {
6590 0 : fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
6591 : original_regno, new_regno);
6592 0 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6593 : }
6594 : }
6595 : }
6596 654306 : lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
6597 654306 : lra_assert (usage_insn != insn || (after_p && before_p));
6598 1108158 : lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
6599 : after_p ? NULL : restore,
6600 : after_p ? restore : NULL,
6601 : call_save_p ? "Add reg<-save" : "Add reg<-split");
6602 654306 : if (call_save_p
6603 652910 : && first_call_insn != NULL
6604 1307216 : && BLOCK_FOR_INSN (first_call_insn) != BLOCK_FOR_INSN (insn))
6605 : /* PR116028: If original_regno is a pseudo that has been assigned a
6606 : callee-saved hard register, then emit the spill insn before the call
6607 : insn 'first_call_insn' instead of adjacent to 'insn'. If 'insn'
6608 : and 'first_call_insn' belong to the same EBB but to two separate
6609 : BBs, and if 'insn' is present in the entry BB, then generating the
6610 : spill insn in the entry BB can prevent shrink wrap from happening.
6611 : This is because the spill insn references the stack pointer and
6612 : hence the prolog gets generated in the entry BB itself. It is
6613 : also more efficient to generate the spill before
6614 : 'first_call_insn' as the spill now occurs only in the path
6615 : containing the call. */
6616 26496 : lra_process_new_insns (first_call_insn, save, NULL, "Add save<-reg");
6617 : else
6618 1256375 : lra_process_new_insns (insn, before_p ? save : NULL,
6619 : before_p ? NULL : save,
6620 : call_save_p ? "Add save<-reg" : "Add split<-reg");
6621 654306 : if (nregs > 1 || original_regno < FIRST_PSEUDO_REGISTER)
6622 : /* If we are trying to split multi-register. We should check
6623 : conflicts on the next assignment sub-pass. IRA can allocate on
6624 : sub-register levels, LRA do this on pseudos level right now and
6625 : this discrepancy may create allocation conflicts after
6626 : splitting.
6627 :
6628 : If we are trying to split hard register we should also check conflicts
6629 : as such splitting can create artificial conflict of the hard register
6630 : with another pseudo because of simplified conflict calculation in
6631 : LRA. */
6632 9822 : check_and_force_assignment_correctness_p = true;
6633 654306 : if (lra_dump_file != NULL)
6634 0 : fprintf (lra_dump_file,
6635 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6636 : return true;
6637 : }
6638 :
6639 : /* Split a hard reg for reload pseudo REGNO having RCLASS and living
6640 : in the range [FROM, TO]. Return true if did a split. Otherwise,
6641 : return false. */
6642 : bool
6643 1570 : spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
6644 : {
6645 1570 : int i, hard_regno;
6646 1570 : int rclass_size;
6647 1570 : rtx_insn *insn;
6648 1570 : unsigned int uid;
6649 1570 : bitmap_iterator bi;
6650 1570 : HARD_REG_SET ignore;
6651 :
6652 1570 : lra_assert (from != NULL && to != NULL);
6653 1570 : ignore = lra_no_alloc_regs;
6654 4523 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
6655 : {
6656 2953 : lra_insn_recog_data_t id = lra_insn_recog_data[uid];
6657 2953 : struct lra_static_insn_data *static_id = id->insn_static_data;
6658 2953 : struct lra_insn_reg *reg;
6659 :
6660 9848 : for (reg = id->regs; reg != NULL; reg = reg->next)
6661 6895 : if (reg->regno < FIRST_PSEUDO_REGISTER)
6662 157 : SET_HARD_REG_BIT (ignore, reg->regno);
6663 4609 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6664 1656 : SET_HARD_REG_BIT (ignore, reg->regno);
6665 : }
6666 1570 : rclass_size = ira_class_hard_regs_num[rclass];
6667 4212 : for (i = 0; i < rclass_size; i++)
6668 : {
6669 2801 : hard_regno = ira_class_hard_regs[rclass][i];
6670 2801 : if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
6671 2801 : || TEST_HARD_REG_BIT (ignore, hard_regno))
6672 2636 : continue;
6673 476 : for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
6674 : {
6675 317 : struct lra_static_insn_data *static_id;
6676 317 : struct lra_insn_reg *reg;
6677 :
6678 317 : if (!INSN_P (insn))
6679 0 : continue;
6680 317 : if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
6681 317 : INSN_UID (insn)))
6682 : break;
6683 311 : static_id = lra_get_insn_recog_data (insn)->insn_static_data;
6684 365 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6685 54 : if (reg->regno == hard_regno)
6686 : break;
6687 : if (reg != NULL)
6688 : break;
6689 : }
6690 165 : if (insn != NEXT_INSN (to))
6691 6 : continue;
6692 159 : if (split_reg (true, hard_regno, from, NULL, to))
6693 : return true;
6694 : }
6695 : return false;
6696 : }
6697 :
6698 : /* Recognize that we need a split transformation for insn INSN, which
6699 : defines or uses REGNO in its insn biggest MODE (we use it only if
6700 : REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
6701 : hard registers which might be used for reloads since the EBB end.
6702 : Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
6703 : uid before starting INSN processing. Return true if we succeed in
6704 : such transformation. */
6705 : static bool
6706 199035959 : split_if_necessary (int regno, machine_mode mode,
6707 : HARD_REG_SET potential_reload_hard_regs,
6708 : bool before_p, rtx_insn *insn, int max_uid)
6709 : {
6710 199035959 : bool res = false;
6711 199035959 : int i, nregs = 1;
6712 199035959 : rtx next_usage_insns;
6713 :
6714 199035959 : if (regno < FIRST_PSEUDO_REGISTER)
6715 93499335 : nregs = hard_regno_nregs (regno, mode);
6716 398421566 : for (i = 0; i < nregs; i++)
6717 199385607 : if (usage_insns[regno + i].check == curr_usage_insns_check
6718 133082504 : && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
6719 : /* To avoid processing the register twice or more. */
6720 133082504 : && ((GET_CODE (next_usage_insns) != INSN_LIST
6721 128872687 : && INSN_UID (next_usage_insns) < max_uid)
6722 4209817 : || (GET_CODE (next_usage_insns) == INSN_LIST
6723 4209817 : && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
6724 133082504 : && need_for_split_p (potential_reload_hard_regs, regno + i)
6725 199658700 : && split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
6726 : res = true;
6727 199035959 : return res;
6728 : }
6729 :
6730 : /* Return TRUE if rtx X is considered as an invariant for
6731 : inheritance. */
6732 : static bool
6733 11472794 : invariant_p (const_rtx x)
6734 : {
6735 11472794 : machine_mode mode;
6736 11472794 : const char *fmt;
6737 11472794 : enum rtx_code code;
6738 11472794 : int i, j;
6739 :
6740 11472794 : if (side_effects_p (x))
6741 : return false;
6742 :
6743 11446797 : code = GET_CODE (x);
6744 11446797 : mode = GET_MODE (x);
6745 11446797 : if (code == SUBREG)
6746 : {
6747 464197 : x = SUBREG_REG (x);
6748 464197 : code = GET_CODE (x);
6749 464197 : mode = wider_subreg_mode (mode, GET_MODE (x));
6750 : }
6751 :
6752 11446797 : if (MEM_P (x))
6753 : return false;
6754 :
6755 9702546 : if (REG_P (x))
6756 : {
6757 3453693 : int i, nregs, regno = REGNO (x);
6758 :
6759 3453693 : if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
6760 898934 : || TEST_HARD_REG_BIT (eliminable_regset, regno)
6761 3470904 : || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
6762 : return false;
6763 2 : nregs = hard_regno_nregs (regno, mode);
6764 2 : for (i = 0; i < nregs; i++)
6765 2 : if (! fixed_regs[regno + i]
6766 : /* A hard register may be clobbered in the current insn
6767 : but we can ignore this case because if the hard
6768 : register is used it should be set somewhere after the
6769 : clobber. */
6770 2 : || bitmap_bit_p (&invalid_invariant_regs, regno + i))
6771 2 : return false;
6772 : }
6773 6248853 : fmt = GET_RTX_FORMAT (code);
6774 10995494 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6775 : {
6776 8296858 : if (fmt[i] == 'e')
6777 : {
6778 5436677 : if (! invariant_p (XEXP (x, i)))
6779 : return false;
6780 : }
6781 2860181 : else if (fmt[i] == 'E')
6782 : {
6783 646397 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6784 535867 : if (! invariant_p (XVECEXP (x, i, j)))
6785 : return false;
6786 : }
6787 : }
6788 : return true;
6789 : }
6790 :
6791 : /* We have 'dest_reg <- invariant'. Let us try to make an invariant
6792 : inheritance transformation (using dest_reg instead invariant in a
6793 : subsequent insn). */
6794 : static bool
6795 175161 : process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
6796 : {
6797 175161 : invariant_ptr_t invariant_ptr;
6798 175161 : rtx_insn *insn, *new_insns;
6799 175161 : rtx insn_set, insn_reg, new_reg;
6800 175161 : int insn_regno;
6801 175161 : bool succ_p = false;
6802 175161 : int dst_regno = REGNO (dst_reg);
6803 175161 : machine_mode dst_mode = GET_MODE (dst_reg);
6804 175161 : enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
6805 :
6806 175161 : invariant_ptr = insert_invariant (invariant_rtx);
6807 175161 : if ((insn = invariant_ptr->insn) != NULL_RTX)
6808 : {
6809 : /* We have a subsequent insn using the invariant. */
6810 23103 : insn_set = single_set (insn);
6811 23103 : lra_assert (insn_set != NULL);
6812 23103 : insn_reg = SET_DEST (insn_set);
6813 23103 : lra_assert (REG_P (insn_reg));
6814 23103 : insn_regno = REGNO (insn_reg);
6815 23103 : insn_reg_cl = lra_get_allocno_class (insn_regno);
6816 :
6817 23103 : if (dst_mode == GET_MODE (insn_reg)
6818 : /* We should consider only result move reg insns which are
6819 : cheap. */
6820 23031 : && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
6821 45538 : && targetm.register_move_cost (dst_mode, cl, cl) == 2)
6822 : {
6823 22435 : if (lra_dump_file != NULL)
6824 0 : fprintf (lra_dump_file,
6825 : " [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
6826 22435 : new_reg = lra_create_new_reg (dst_mode, dst_reg, cl, NULL,
6827 : "invariant inheritance");
6828 22435 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6829 22435 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6830 22435 : lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
6831 22435 : start_sequence ();
6832 22435 : lra_emit_move (new_reg, dst_reg);
6833 22435 : new_insns = end_sequence ();
6834 22435 : lra_process_new_insns (curr_insn, NULL, new_insns,
6835 : "Add invariant inheritance<-original");
6836 22435 : start_sequence ();
6837 22435 : lra_emit_move (SET_DEST (insn_set), new_reg);
6838 22435 : new_insns = end_sequence ();
6839 22435 : lra_process_new_insns (insn, NULL, new_insns,
6840 : "Changing reload<-inheritance");
6841 22435 : lra_set_insn_deleted (insn);
6842 22435 : succ_p = true;
6843 22435 : if (lra_dump_file != NULL)
6844 : {
6845 0 : fprintf (lra_dump_file,
6846 : " Invariant inheritance reuse change %d (bb%d):\n",
6847 0 : REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6848 0 : dump_insn_slim (lra_dump_file, insn);
6849 0 : fprintf (lra_dump_file,
6850 : " ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
6851 : }
6852 : }
6853 : }
6854 175161 : invariant_ptr->insn = curr_insn;
6855 175161 : return succ_p;
6856 : }
6857 :
6858 : /* Check only registers living at the current program point in the
6859 : current EBB. */
6860 : static bitmap_head live_regs;
6861 :
6862 : /* Update live info in EBB given by its HEAD and TAIL insns after
6863 : inheritance/split transformation. The function removes dead moves
6864 : too. */
6865 : static void
6866 741652 : update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
6867 : {
6868 741652 : unsigned int j;
6869 741652 : int i, regno;
6870 741652 : bool live_p;
6871 741652 : rtx_insn *prev_insn;
6872 741652 : rtx set;
6873 741652 : bool remove_p;
6874 741652 : basic_block last_bb, prev_bb, curr_bb;
6875 741652 : bitmap_iterator bi;
6876 741652 : struct lra_insn_reg *reg;
6877 741652 : edge e;
6878 741652 : edge_iterator ei;
6879 :
6880 741652 : last_bb = BLOCK_FOR_INSN (tail);
6881 741652 : prev_bb = NULL;
6882 741652 : for (curr_insn = tail;
6883 37292309 : curr_insn != PREV_INSN (head);
6884 36550657 : curr_insn = prev_insn)
6885 : {
6886 36550657 : prev_insn = PREV_INSN (curr_insn);
6887 : /* We need to process empty blocks too. They contain
6888 : NOTE_INSN_BASIC_BLOCK referring for the basic block. */
6889 36550657 : if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6890 1403795 : continue;
6891 35146862 : curr_bb = BLOCK_FOR_INSN (curr_insn);
6892 35146862 : if (curr_bb != prev_bb)
6893 : {
6894 1477687 : if (prev_bb != NULL)
6895 : {
6896 : /* Update df_get_live_in (prev_bb): */
6897 54266853 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6898 53530818 : if (bitmap_bit_p (&live_regs, j))
6899 1608775 : bitmap_set_bit (df_get_live_in (prev_bb), j);
6900 : else
6901 51922043 : bitmap_clear_bit (df_get_live_in (prev_bb), j);
6902 : }
6903 1477687 : if (curr_bb != last_bb)
6904 : {
6905 : /* Update df_get_live_out (curr_bb): */
6906 54266853 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6907 : {
6908 53530818 : live_p = bitmap_bit_p (&live_regs, j);
6909 53530818 : if (! live_p)
6910 155669559 : FOR_EACH_EDGE (e, ei, curr_bb->succs)
6911 103802440 : if (bitmap_bit_p (df_get_live_in (e->dest), j))
6912 : {
6913 : live_p = true;
6914 : break;
6915 : }
6916 51922043 : if (live_p)
6917 1663699 : bitmap_set_bit (df_get_live_out (curr_bb), j);
6918 : else
6919 51867119 : bitmap_clear_bit (df_get_live_out (curr_bb), j);
6920 : }
6921 : }
6922 1477687 : prev_bb = curr_bb;
6923 1477687 : bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6924 : }
6925 35146862 : if (! NONDEBUG_INSN_P (curr_insn))
6926 13077614 : continue;
6927 22069248 : curr_id = lra_get_insn_recog_data (curr_insn);
6928 22069248 : curr_static_id = curr_id->insn_static_data;
6929 22069248 : remove_p = false;
6930 22069248 : if ((set = single_set (curr_insn)) != NULL_RTX
6931 21387074 : && REG_P (SET_DEST (set))
6932 17062036 : && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
6933 12689007 : && SET_DEST (set) != pic_offset_table_rtx
6934 12682362 : && bitmap_bit_p (&check_only_regs, regno)
6935 25289068 : && ! bitmap_bit_p (&live_regs, regno))
6936 : remove_p = true;
6937 : /* See which defined values die here. */
6938 60954406 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6939 38885158 : if (reg->type == OP_OUT && ! reg->subreg_p)
6940 15194715 : bitmap_clear_bit (&live_regs, reg->regno);
6941 26247490 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6942 4178242 : if (reg->type == OP_OUT && ! reg->subreg_p)
6943 3166146 : bitmap_clear_bit (&live_regs, reg->regno);
6944 22069248 : if (curr_id->arg_hard_regs != NULL)
6945 : /* Make clobbered argument hard registers die. */
6946 3319670 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6947 2379361 : if (regno >= FIRST_PSEUDO_REGISTER)
6948 189780 : bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
6949 : /* Mark each used value as live. */
6950 60954406 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6951 38885158 : if (reg->type != OP_OUT
6952 38885158 : && bitmap_bit_p (&check_only_regs, reg->regno))
6953 4523657 : bitmap_set_bit (&live_regs, reg->regno);
6954 26247490 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6955 4178242 : if (reg->type != OP_OUT
6956 4178242 : && bitmap_bit_p (&check_only_regs, reg->regno))
6957 0 : bitmap_set_bit (&live_regs, reg->regno);
6958 22069248 : if (curr_id->arg_hard_regs != NULL)
6959 : /* Make used argument hard registers live. */
6960 3319670 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6961 2379361 : if (regno < FIRST_PSEUDO_REGISTER
6962 2379361 : && bitmap_bit_p (&check_only_regs, regno))
6963 0 : bitmap_set_bit (&live_regs, regno);
6964 : /* It is quite important to remove dead move insns because it
6965 : means removing dead store. We don't need to process them for
6966 : constraints. */
6967 22069248 : if (remove_p)
6968 : {
6969 296846 : if (lra_dump_file != NULL)
6970 : {
6971 2 : fprintf (lra_dump_file, " Removing dead insn:\n ");
6972 2 : dump_insn_slim (lra_dump_file, curr_insn);
6973 : }
6974 296846 : lra_set_insn_deleted (curr_insn);
6975 : }
6976 : }
6977 741652 : }
6978 :
6979 : /* The structure describes info to do an inheritance for the current
6980 : insn. We need to collect such info first before doing the
6981 : transformations because the transformations change the insn
6982 : internal representation. */
6983 : struct to_inherit
6984 : {
6985 : /* Original regno. */
6986 : int regno;
6987 : /* Subsequent insns which can inherit original reg value. */
6988 : rtx insns;
6989 : };
6990 :
6991 : /* Array containing all info for doing inheritance from the current
6992 : insn. */
6993 : static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6994 :
6995 : /* Number elements in the previous array. */
6996 : static int to_inherit_num;
6997 :
6998 : /* Add inheritance info REGNO and INSNS. Their meaning is described in
6999 : structure to_inherit. */
7000 : static void
7001 312184 : add_to_inherit (int regno, rtx insns)
7002 : {
7003 312184 : int i;
7004 :
7005 312266 : for (i = 0; i < to_inherit_num; i++)
7006 82 : if (to_inherit[i].regno == regno)
7007 : return;
7008 312184 : lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
7009 312184 : to_inherit[to_inherit_num].regno = regno;
7010 312184 : to_inherit[to_inherit_num++].insns = insns;
7011 : }
7012 :
7013 : /* Return the last non-debug insn in basic block BB, or the block begin
7014 : note if none. */
7015 : static rtx_insn *
7016 29951957 : get_last_insertion_point (basic_block bb)
7017 : {
7018 29951957 : rtx_insn *insn;
7019 :
7020 32413188 : FOR_BB_INSNS_REVERSE (bb, insn)
7021 32413188 : if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
7022 29951957 : return insn;
7023 0 : gcc_unreachable ();
7024 : }
7025 :
7026 : /* Set up RES by registers living on edges FROM except the edge (FROM,
7027 : TO) or by registers set up in a jump insn in BB FROM. */
7028 : static void
7029 11519944 : get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
7030 : {
7031 11519944 : rtx_insn *last;
7032 11519944 : struct lra_insn_reg *reg;
7033 11519944 : edge e;
7034 11519944 : edge_iterator ei;
7035 :
7036 11519944 : lra_assert (to != NULL);
7037 11519944 : bitmap_clear (res);
7038 34301001 : FOR_EACH_EDGE (e, ei, from->succs)
7039 22781057 : if (e->dest != to)
7040 11261113 : bitmap_ior_into (res, df_get_live_in (e->dest));
7041 11519944 : last = get_last_insertion_point (from);
7042 11519944 : if (! JUMP_P (last))
7043 1892186 : return;
7044 9627758 : curr_id = lra_get_insn_recog_data (last);
7045 19255338 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7046 9627580 : if (reg->type != OP_IN)
7047 76 : bitmap_set_bit (res, reg->regno);
7048 : }
7049 :
7050 : /* Used as a temporary results of some bitmap calculations. */
7051 : static bitmap_head temp_bitmap;
7052 :
7053 : /* We split for reloads of small class of hard regs. The following
7054 : defines how many hard regs the class should have to be qualified as
7055 : small. The code is mostly oriented to x86/x86-64 architecture
7056 : where some insns need to use only specific register or pair of
7057 : registers and these register can live in RTL explicitly, e.g. for
7058 : parameter passing. */
7059 : static const int max_small_class_regs_num = 2;
7060 :
7061 : /* Do inheritance/split transformations in EBB starting with HEAD and
7062 : finishing on TAIL. We process EBB insns in the reverse order.
7063 : Return true if we did any inheritance/split transformation in the
7064 : EBB.
7065 :
7066 : We should avoid excessive splitting which results in worse code
7067 : because of inaccurate cost calculations for spilling new split
7068 : pseudos in such case. To achieve this we do splitting only if
7069 : register pressure is high in given basic block and there are reload
7070 : pseudos requiring hard registers. We could do more register
7071 : pressure calculations at any given program point to avoid necessary
7072 : splitting even more but it is to expensive and the current approach
7073 : works well enough. */
7074 : static bool
7075 12672159 : inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
7076 : {
7077 12672159 : int i, src_regno, dst_regno, nregs;
7078 12672159 : bool change_p, succ_p, update_reloads_num_p;
7079 12672159 : rtx_insn *prev_insn, *last_insn;
7080 12672159 : rtx next_usage_insns, curr_set;
7081 12672159 : enum reg_class cl;
7082 12672159 : struct lra_insn_reg *reg;
7083 12672159 : basic_block last_processed_bb, curr_bb = NULL;
7084 12672159 : HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
7085 12672159 : bitmap to_process;
7086 12672159 : unsigned int j;
7087 12672159 : bitmap_iterator bi;
7088 12672159 : bool head_p, after_p;
7089 :
7090 12672159 : change_p = false;
7091 12672159 : curr_usage_insns_check++;
7092 12672159 : clear_invariants ();
7093 12672159 : reloads_num = calls_num = 0;
7094 164738067 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
7095 152065908 : last_call_for_abi[i] = 0;
7096 12672159 : CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
7097 12672159 : bitmap_clear (&check_only_regs);
7098 12672159 : bitmap_clear (&invalid_invariant_regs);
7099 12672159 : last_processed_bb = NULL;
7100 12672159 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7101 12672159 : live_hard_regs = eliminable_regset | lra_no_alloc_regs;
7102 : /* We don't process new insns generated in the loop. */
7103 235837732 : for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
7104 : {
7105 223165573 : prev_insn = PREV_INSN (curr_insn);
7106 223165573 : if (BLOCK_FOR_INSN (curr_insn) != NULL)
7107 223165337 : curr_bb = BLOCK_FOR_INSN (curr_insn);
7108 223165573 : if (last_processed_bb != curr_bb)
7109 : {
7110 : /* We are at the end of BB. Add qualified living
7111 : pseudos for potential splitting. */
7112 18432013 : to_process = df_get_live_out (curr_bb);
7113 18432013 : if (last_processed_bb != NULL)
7114 : {
7115 : /* We are somewhere in the middle of EBB. */
7116 5759854 : get_live_on_other_edges (curr_bb, last_processed_bb,
7117 : &temp_bitmap);
7118 5759854 : to_process = &temp_bitmap;
7119 : }
7120 18432013 : last_processed_bb = curr_bb;
7121 18432013 : last_insn = get_last_insertion_point (curr_bb);
7122 36864026 : after_p = (! JUMP_P (last_insn)
7123 18432013 : && (! CALL_P (last_insn)
7124 2296180 : || (find_reg_note (last_insn,
7125 : REG_NORETURN, NULL_RTX) == NULL_RTX
7126 1371747 : && ! SIBLING_CALL_P (last_insn))));
7127 18432013 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7128 200566026 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7129 : {
7130 182134019 : if ((int) j >= lra_constraint_new_regno_start)
7131 : break;
7132 182134013 : if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7133 : {
7134 118430516 : if (j < FIRST_PSEUDO_REGISTER)
7135 69300022 : SET_HARD_REG_BIT (live_hard_regs, j);
7136 : else
7137 49130494 : add_to_hard_reg_set (&live_hard_regs,
7138 49130494 : PSEUDO_REGNO_MODE (j),
7139 49130494 : reg_renumber[j]);
7140 118430516 : setup_next_usage_insn (j, last_insn, reloads_num, after_p);
7141 : }
7142 : }
7143 : }
7144 223165573 : src_regno = dst_regno = -1;
7145 223165573 : curr_set = single_set (curr_insn);
7146 223165573 : if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
7147 83264677 : dst_regno = REGNO (SET_DEST (curr_set));
7148 113124115 : if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
7149 39227544 : src_regno = REGNO (SET_SRC (curr_set));
7150 223165573 : update_reloads_num_p = true;
7151 223165573 : if (src_regno < lra_constraint_new_regno_start
7152 216834519 : && src_regno >= FIRST_PSEUDO_REGISTER
7153 27674923 : && reg_renumber[src_regno] < 0
7154 3679373 : && dst_regno >= lra_constraint_new_regno_start
7155 225775578 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
7156 : {
7157 : /* 'reload_pseudo <- original_pseudo'. */
7158 2610005 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7159 22219 : reloads_num++;
7160 2610005 : update_reloads_num_p = false;
7161 2610005 : succ_p = false;
7162 2610005 : if (usage_insns[src_regno].check == curr_usage_insns_check
7163 2610005 : && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
7164 475274 : succ_p = inherit_reload_reg (false, src_regno, cl,
7165 : curr_insn, next_usage_insns);
7166 475274 : if (succ_p)
7167 : change_p = true;
7168 : else
7169 2158020 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7170 5220010 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7171 627644091 : potential_reload_hard_regs |= reg_class_contents[cl];
7172 : }
7173 220555568 : else if (src_regno < 0
7174 183938029 : && dst_regno >= lra_constraint_new_regno_start
7175 5500250 : && invariant_p (SET_SRC (curr_set))
7176 276309 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
7177 275767 : && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
7178 220792632 : && ! bitmap_bit_p (&invalid_invariant_regs,
7179 237064 : ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
7180 : {
7181 : /* 'reload_pseudo <- invariant'. */
7182 175161 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7183 8348 : reloads_num++;
7184 175161 : update_reloads_num_p = false;
7185 175161 : if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
7186 22435 : change_p = true;
7187 350322 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7188 627644091 : potential_reload_hard_regs |= reg_class_contents[cl];
7189 : }
7190 220380407 : else if (src_regno >= lra_constraint_new_regno_start
7191 6331054 : && dst_regno < lra_constraint_new_regno_start
7192 5524231 : && dst_regno >= FIRST_PSEUDO_REGISTER
7193 3692093 : && reg_renumber[dst_regno] < 0
7194 1448157 : && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
7195 1448157 : && usage_insns[dst_regno].check == curr_usage_insns_check
7196 220380407 : && (next_usage_insns
7197 475024 : = usage_insns[dst_regno].insns) != NULL_RTX)
7198 : {
7199 475024 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7200 8004 : reloads_num++;
7201 475024 : update_reloads_num_p = false;
7202 : /* 'original_pseudo <- reload_pseudo'. */
7203 475024 : if (! JUMP_P (curr_insn)
7204 475024 : && inherit_reload_reg (true, dst_regno, cl,
7205 : curr_insn, next_usage_insns))
7206 : change_p = true;
7207 : /* Invalidate. */
7208 475024 : usage_insns[dst_regno].check = 0;
7209 950048 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7210 627644091 : potential_reload_hard_regs |= reg_class_contents[cl];
7211 : }
7212 219905383 : else if (INSN_P (curr_insn))
7213 : {
7214 184500005 : int iter;
7215 184500005 : int max_uid = get_max_uid ();
7216 :
7217 184500005 : curr_id = lra_get_insn_recog_data (curr_insn);
7218 184500005 : curr_static_id = curr_id->insn_static_data;
7219 184500005 : to_inherit_num = 0;
7220 : /* Process insn definitions. */
7221 553500015 : for (iter = 0; iter < 2; iter++)
7222 369000010 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7223 595643071 : reg != NULL;
7224 226643061 : reg = reg->next)
7225 226643061 : if (reg->type != OP_IN
7226 226643061 : && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
7227 : {
7228 45545765 : if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
7229 43438667 : && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
7230 1759841 : && usage_insns[dst_regno].check == curr_usage_insns_check
7231 90428072 : && (next_usage_insns
7232 128795 : = usage_insns[dst_regno].insns) != NULL_RTX)
7233 : {
7234 128795 : struct lra_insn_reg *r;
7235 :
7236 383939 : for (r = curr_id->regs; r != NULL; r = r->next)
7237 255144 : if (r->type != OP_OUT && r->regno == dst_regno)
7238 : break;
7239 : /* Don't do inheritance if the pseudo is also
7240 : used in the insn. */
7241 128795 : if (r == NULL)
7242 : /* We cannot do inheritance right now
7243 : because the current insn reg info (chain
7244 : regs) can change after that. */
7245 128795 : add_to_inherit (dst_regno, next_usage_insns);
7246 : }
7247 : /* We cannot process one reg twice here because of
7248 : usage_insns invalidation. */
7249 90428072 : if ((dst_regno < FIRST_PSEUDO_REGISTER
7250 45545765 : || reg_renumber[dst_regno] >= 0)
7251 88531935 : && ! reg->subreg_p && reg->type != OP_IN)
7252 : {
7253 88252846 : HARD_REG_SET s;
7254 :
7255 88252846 : if (split_if_necessary (dst_regno, reg->biggest_mode,
7256 : potential_reload_hard_regs,
7257 : false, curr_insn, max_uid))
7258 57998 : change_p = true;
7259 88252846 : CLEAR_HARD_REG_SET (s);
7260 88252846 : if (dst_regno < FIRST_PSEUDO_REGISTER)
7261 44882307 : add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
7262 : else
7263 43370539 : add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
7264 43370539 : reg_renumber[dst_regno]);
7265 88252846 : live_hard_regs &= ~s;
7266 176505692 : potential_reload_hard_regs &= ~s;
7267 : }
7268 : /* We should invalidate potential inheritance or
7269 : splitting for the current insn usages to the next
7270 : usage insns (see code below) as the output pseudo
7271 : prevents this. */
7272 90428072 : if ((dst_regno >= FIRST_PSEUDO_REGISTER
7273 45545765 : && reg_renumber[dst_regno] < 0)
7274 88531935 : || (reg->type == OP_OUT && ! reg->subreg_p
7275 80554438 : && (dst_regno < FIRST_PSEUDO_REGISTER
7276 41271999 : || reg_renumber[dst_regno] >= 0)))
7277 : {
7278 : /* Invalidate and mark definitions. */
7279 43168136 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7280 43168136 : usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
7281 : else
7282 : {
7283 39282439 : nregs = hard_regno_nregs (dst_regno,
7284 39282439 : reg->biggest_mode);
7285 78818561 : for (i = 0; i < nregs; i++)
7286 79072244 : usage_insns[dst_regno + i].check
7287 39536122 : = -(int) INSN_UID (curr_insn);
7288 : }
7289 : }
7290 : }
7291 : /* Process clobbered call regs. */
7292 184500005 : if (curr_id->arg_hard_regs != NULL)
7293 19676207 : for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7294 14012345 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7295 1611174 : usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
7296 805587 : = -(int) INSN_UID (curr_insn);
7297 184500005 : if (! JUMP_P (curr_insn))
7298 173082634 : for (i = 0; i < to_inherit_num; i++)
7299 128795 : if (inherit_reload_reg (true, to_inherit[i].regno,
7300 : ALL_REGS, curr_insn,
7301 : to_inherit[i].insns))
7302 103711 : change_p = true;
7303 184500005 : if (CALL_P (curr_insn))
7304 : {
7305 7268914 : rtx cheap, pat, dest;
7306 7268914 : rtx_insn *restore;
7307 7268914 : int regno, hard_regno;
7308 :
7309 7268914 : calls_num++;
7310 7268914 : function_abi callee_abi = insn_callee_abi (curr_insn);
7311 7268914 : last_call_for_abi[callee_abi.id ()] = calls_num;
7312 7268914 : full_and_partial_call_clobbers
7313 7268914 : |= callee_abi.full_and_partial_reg_clobbers ();
7314 7268914 : first_call_insn = curr_insn;
7315 7268914 : if ((cheap = find_reg_note (curr_insn,
7316 : REG_RETURNED, NULL_RTX)) != NULL_RTX
7317 43809 : && ((cheap = XEXP (cheap, 0)), true)
7318 43809 : && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
7319 43809 : && (hard_regno = reg_renumber[regno]) >= 0
7320 32524 : && usage_insns[regno].check == curr_usage_insns_check
7321 : /* If there are pending saves/restores, the
7322 : optimization is not worth. */
7323 28352 : && usage_insns[regno].calls_num == calls_num - 1
7324 7295415 : && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
7325 : {
7326 : /* Restore the pseudo from the call result as
7327 : REG_RETURNED note says that the pseudo value is
7328 : in the call result and the pseudo is an argument
7329 : of the call. */
7330 10968 : pat = PATTERN (curr_insn);
7331 10968 : if (GET_CODE (pat) == PARALLEL)
7332 0 : pat = XVECEXP (pat, 0, 0);
7333 10968 : dest = SET_DEST (pat);
7334 : /* For multiple return values dest is PARALLEL.
7335 : Currently we handle only single return value case. */
7336 10968 : if (REG_P (dest))
7337 : {
7338 10968 : start_sequence ();
7339 10968 : emit_move_insn (cheap, copy_rtx (dest));
7340 10968 : restore = end_sequence ();
7341 10968 : lra_process_new_insns (curr_insn, NULL, restore,
7342 : "Inserting call parameter restore");
7343 : /* We don't need to save/restore of the pseudo from
7344 : this call. */
7345 10968 : usage_insns[regno].calls_num = calls_num;
7346 10968 : remove_from_hard_reg_set
7347 10968 : (&full_and_partial_call_clobbers,
7348 10968 : GET_MODE (cheap), hard_regno);
7349 10968 : bitmap_set_bit (&check_only_regs, regno);
7350 : }
7351 : }
7352 : }
7353 184500005 : to_inherit_num = 0;
7354 : /* Process insn usages. */
7355 553500015 : for (iter = 0; iter < 2; iter++)
7356 369000010 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7357 595643071 : reg != NULL;
7358 226643061 : reg = reg->next)
7359 226643061 : if ((reg->type != OP_OUT
7360 89133921 : || (reg->type == OP_OUT && reg->subreg_p))
7361 227184594 : && (src_regno = reg->regno) < lra_constraint_new_regno_start)
7362 : {
7363 126704097 : if (src_regno >= FIRST_PSEUDO_REGISTER
7364 73483905 : && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
7365 : {
7366 2443686 : if (usage_insns[src_regno].check == curr_usage_insns_check
7367 783486 : && (next_usage_insns
7368 783486 : = usage_insns[src_regno].insns) != NULL_RTX
7369 3227172 : && NONDEBUG_INSN_P (curr_insn))
7370 183389 : add_to_inherit (src_regno, next_usage_insns);
7371 4520594 : else if (usage_insns[src_regno].check
7372 2260297 : != -(int) INSN_UID (curr_insn))
7373 : /* Add usages but only if the reg is not set up
7374 : in the same insn. */
7375 2260297 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7376 : }
7377 71040219 : else if (src_regno < FIRST_PSEUDO_REGISTER
7378 71040219 : || reg_renumber[src_regno] >= 0)
7379 : {
7380 124124115 : bool before_p;
7381 124124115 : rtx_insn *use_insn = curr_insn;
7382 124124115 : rtx_insn *prev_insn = PREV_INSN (curr_insn);
7383 :
7384 248248230 : before_p = (JUMP_P (curr_insn)
7385 124124115 : || (CALL_P (curr_insn) && reg->type == OP_IN));
7386 124124115 : if (NONDEBUG_INSN_P (curr_insn)
7387 110783218 : && (! JUMP_P (curr_insn) || reg->type == OP_IN)
7388 234907228 : && split_if_necessary (src_regno, reg->biggest_mode,
7389 : potential_reload_hard_regs,
7390 : before_p, curr_insn, max_uid))
7391 : {
7392 215095 : if (reg->subreg_p)
7393 3151 : check_and_force_assignment_correctness_p = true;
7394 215095 : change_p = true;
7395 : /* Invalidate. */
7396 215095 : usage_insns[src_regno].check = 0;
7397 215095 : if (before_p && PREV_INSN (curr_insn) != prev_insn)
7398 : use_insn = PREV_INSN (curr_insn);
7399 : }
7400 124124115 : if (NONDEBUG_INSN_P (curr_insn))
7401 : {
7402 110783218 : if (src_regno < FIRST_PSEUDO_REGISTER)
7403 48617051 : add_to_hard_reg_set (&live_hard_regs,
7404 48617051 : reg->biggest_mode, src_regno);
7405 : else
7406 62166167 : add_to_hard_reg_set (&live_hard_regs,
7407 62166167 : PSEUDO_REGNO_MODE (src_regno),
7408 62166167 : reg_renumber[src_regno]);
7409 : }
7410 124124115 : if (src_regno >= FIRST_PSEUDO_REGISTER)
7411 70903923 : add_next_usage_insn (src_regno, use_insn, reloads_num);
7412 : else
7413 : {
7414 106537131 : for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
7415 53316939 : add_next_usage_insn (src_regno + i, use_insn, reloads_num);
7416 : }
7417 : }
7418 : }
7419 : /* Process used call regs. */
7420 184500005 : if (curr_id->arg_hard_regs != NULL)
7421 19676207 : for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7422 14012345 : if (src_regno < FIRST_PSEUDO_REGISTER)
7423 : {
7424 13206758 : SET_HARD_REG_BIT (live_hard_regs, src_regno);
7425 13206758 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7426 : }
7427 184683394 : for (i = 0; i < to_inherit_num; i++)
7428 : {
7429 183389 : src_regno = to_inherit[i].regno;
7430 183389 : if (inherit_reload_reg (false, src_regno, ALL_REGS,
7431 : curr_insn, to_inherit[i].insns))
7432 : change_p = true;
7433 : else
7434 23235 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7435 : }
7436 : }
7437 184573135 : if (update_reloads_num_p
7438 219905383 : && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
7439 : {
7440 109863925 : int regno = -1;
7441 109863925 : if ((REG_P (SET_DEST (curr_set))
7442 80004487 : && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
7443 8103442 : && reg_renumber[regno] < 0
7444 5177586 : && (cl = lra_get_allocno_class (regno)) != NO_REGS)
7445 184970904 : || (REG_P (SET_SRC (curr_set))
7446 34894232 : && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
7447 5925530 : && reg_renumber[regno] < 0
7448 3474811 : && (cl = lra_get_allocno_class (regno)) != NO_REGS))
7449 : {
7450 8104588 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7451 213999 : reloads_num++;
7452 16209176 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7453 223165573 : potential_reload_hard_regs |= reg_class_contents[cl];
7454 : }
7455 : }
7456 223165573 : if (NONDEBUG_INSN_P (curr_insn))
7457 : {
7458 119431318 : int regno;
7459 :
7460 : /* Invalidate invariants with changed regs. */
7461 119431318 : curr_id = lra_get_insn_recog_data (curr_insn);
7462 306402719 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7463 186971401 : if (reg->type != OP_IN)
7464 : {
7465 79828385 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7466 159656770 : bitmap_set_bit (&invalid_invariant_regs,
7467 79828385 : ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
7468 : }
7469 119431318 : curr_static_id = curr_id->insn_static_data;
7470 150868142 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
7471 31436824 : if (reg->type != OP_IN)
7472 22488486 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7473 119431318 : if (curr_id->arg_hard_regs != NULL)
7474 19676207 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7475 14012345 : if (regno >= FIRST_PSEUDO_REGISTER)
7476 805587 : bitmap_set_bit (&invalid_invariant_regs,
7477 : regno - FIRST_PSEUDO_REGISTER);
7478 : }
7479 : /* We reached the start of the current basic block. */
7480 223165565 : if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
7481 433658987 : || BLOCK_FOR_INSN (prev_insn) != curr_bb)
7482 : {
7483 : /* We reached the beginning of the current block -- do
7484 : rest of spliting in the current BB. */
7485 18432249 : to_process = df_get_live_in (curr_bb);
7486 18432249 : if (BLOCK_FOR_INSN (head) != curr_bb)
7487 : {
7488 : /* We are somewhere in the middle of EBB. */
7489 5760090 : get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
7490 : curr_bb, &temp_bitmap);
7491 5760090 : to_process = &temp_bitmap;
7492 : }
7493 18432249 : head_p = true;
7494 193830236 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7495 : {
7496 175397994 : if ((int) j >= lra_constraint_new_regno_start)
7497 : break;
7498 111906787 : if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7499 110590878 : && usage_insns[j].check == curr_usage_insns_check
7500 284111022 : && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
7501 : {
7502 108713035 : if (need_for_split_p (potential_reload_hard_regs, j))
7503 : {
7504 381054 : if (lra_dump_file != NULL && head_p)
7505 : {
7506 0 : fprintf (lra_dump_file,
7507 : " ----------------------------------\n");
7508 0 : head_p = false;
7509 : }
7510 381054 : if (split_reg (false, j, bb_note (curr_bb),
7511 : next_usage_insns, NULL))
7512 381054 : change_p = true;
7513 : }
7514 108713035 : usage_insns[j].check = 0;
7515 : }
7516 : }
7517 : }
7518 : }
7519 12672159 : first_call_insn = NULL;
7520 12672159 : return change_p;
7521 : }
7522 :
7523 : /* This value affects EBB forming. If probability of edge from EBB to
7524 : a BB is not greater than the following value, we don't add the BB
7525 : to EBB. */
7526 : #define EBB_PROBABILITY_CUTOFF \
7527 : ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
7528 :
7529 : /* Current number of inheritance/split iteration. */
7530 : int lra_inheritance_iter;
7531 :
7532 : /* Entry function for inheritance/split pass. */
7533 : void
7534 1546132 : lra_inheritance (void)
7535 : {
7536 1546132 : int i;
7537 1546132 : basic_block bb, start_bb;
7538 1546132 : edge e;
7539 :
7540 1546132 : lra_inheritance_iter++;
7541 1546132 : if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7542 : return;
7543 1543001 : timevar_push (TV_LRA_INHERITANCE);
7544 1543001 : if (lra_dump_file != NULL)
7545 97 : fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
7546 : lra_inheritance_iter);
7547 1543001 : curr_usage_insns_check = 0;
7548 1543001 : usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
7549 228686212 : for (i = 0; i < lra_constraint_new_regno_start; i++)
7550 227143211 : usage_insns[i].check = 0;
7551 1543001 : bitmap_initialize (&check_only_regs, ®_obstack);
7552 1543001 : bitmap_initialize (&invalid_invariant_regs, ®_obstack);
7553 1543001 : bitmap_initialize (&live_regs, ®_obstack);
7554 1543001 : bitmap_initialize (&temp_bitmap, ®_obstack);
7555 1543001 : bitmap_initialize (&ebb_global_regs, ®_obstack);
7556 14215160 : FOR_EACH_BB_FN (bb, cfun)
7557 : {
7558 12672159 : start_bb = bb;
7559 12672159 : if (lra_dump_file != NULL)
7560 347 : fprintf (lra_dump_file, "EBB");
7561 : /* Form a EBB starting with BB. */
7562 12672159 : bitmap_clear (&ebb_global_regs);
7563 12672159 : bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
7564 18432013 : for (;;)
7565 : {
7566 18432013 : if (lra_dump_file != NULL)
7567 477 : fprintf (lra_dump_file, " %d", bb->index);
7568 18432013 : if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
7569 16889012 : || LABEL_P (BB_HEAD (bb->next_bb)))
7570 : break;
7571 8178090 : e = find_fallthru_edge (bb->succs);
7572 8178090 : if (! e)
7573 : break;
7574 8178090 : if (e->probability.initialized_p ()
7575 8178090 : && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
7576 : break;
7577 : bb = bb->next_bb;
7578 : }
7579 12672159 : bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
7580 12672159 : if (lra_dump_file != NULL)
7581 347 : fprintf (lra_dump_file, "\n");
7582 12672159 : if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
7583 : /* Remember that the EBB head and tail can change in
7584 : inherit_in_ebb. */
7585 741652 : update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
7586 : }
7587 1543001 : bitmap_release (&ebb_global_regs);
7588 1543001 : bitmap_release (&temp_bitmap);
7589 1543001 : bitmap_release (&live_regs);
7590 1543001 : bitmap_release (&invalid_invariant_regs);
7591 1543001 : bitmap_release (&check_only_regs);
7592 1543001 : free (usage_insns);
7593 1543001 : lra_dump_insns_if_possible ("func after inheritance");
7594 1543001 : timevar_pop (TV_LRA_INHERITANCE);
7595 : }
7596 :
7597 :
7598 :
7599 : /* This page contains code to undo failed inheritance/split
7600 : transformations. */
7601 :
7602 : /* Current number of iteration undoing inheritance/split. */
7603 : int lra_undo_inheritance_iter;
7604 :
7605 : /* Fix BB live info LIVE after removing pseudos created on pass doing
7606 : inheritance/split which are REMOVED_PSEUDOS. */
7607 : static void
7608 36864026 : fix_bb_live_info (bitmap live, bitmap removed_pseudos)
7609 : {
7610 36864026 : unsigned int regno;
7611 36864026 : bitmap_iterator bi;
7612 :
7613 210289194 : EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
7614 173425168 : if (bitmap_clear_bit (live, regno)
7615 173425168 : && REG_P (lra_reg_info[regno].restore_rtx))
7616 1207898 : bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
7617 36864026 : }
7618 :
7619 : /* Return regno of the (subreg of) REG. Otherwise, return a negative
7620 : number. */
7621 : static int
7622 66923546 : get_regno (rtx reg)
7623 : {
7624 1078534 : if (GET_CODE (reg) == SUBREG)
7625 1023788 : reg = SUBREG_REG (reg);
7626 66923546 : if (REG_P (reg))
7627 43281622 : return REGNO (reg);
7628 : return -1;
7629 : }
7630 :
7631 : /* Delete a move INSN with destination reg DREGNO and a previous
7632 : clobber insn with the same regno. The inheritance/split code can
7633 : generate moves with preceding clobber and when we delete such moves
7634 : we should delete the clobber insn too to keep the correct life
7635 : info. */
7636 : static void
7637 742668 : delete_move_and_clobber (rtx_insn *insn, int dregno)
7638 : {
7639 742668 : rtx_insn *prev_insn = PREV_INSN (insn);
7640 :
7641 742668 : lra_set_insn_deleted (insn);
7642 742668 : lra_assert (dregno >= 0);
7643 742668 : if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
7644 317994 : && GET_CODE (PATTERN (prev_insn)) == CLOBBER
7645 743018 : && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
7646 0 : lra_set_insn_deleted (prev_insn);
7647 742668 : }
7648 :
7649 : /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
7650 : return true if we did any change. The undo transformations for
7651 : inheritance looks like
7652 : i <- i2
7653 : p <- i => p <- i2
7654 : or removing
7655 : p <- i, i <- p, and i <- i3
7656 : where p is original pseudo from which inheritance pseudo i was
7657 : created, i and i3 are removed inheritance pseudos, i2 is another
7658 : not removed inheritance pseudo. All split pseudos or other
7659 : occurrences of removed inheritance pseudos are changed on the
7660 : corresponding original pseudos.
7661 :
7662 : The function also schedules insns changed and created during
7663 : inheritance/split pass for processing by the subsequent constraint
7664 : pass. */
7665 : static bool
7666 1543001 : remove_inheritance_pseudos (bitmap remove_pseudos)
7667 : {
7668 1543001 : basic_block bb;
7669 1543001 : int regno, sregno, prev_sregno, dregno;
7670 1543001 : rtx restore_rtx;
7671 1543001 : rtx set, prev_set;
7672 1543001 : rtx_insn *prev_insn;
7673 1543001 : bool change_p, done_p;
7674 :
7675 1543001 : change_p = ! bitmap_empty_p (remove_pseudos);
7676 : /* We cannot finish the function right away if CHANGE_P is true
7677 : because we need to marks insns affected by previous
7678 : inheritance/split pass for processing by the subsequent
7679 : constraint pass. */
7680 19975014 : FOR_EACH_BB_FN (bb, cfun)
7681 : {
7682 18432013 : fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
7683 18432013 : fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
7684 244115076 : FOR_BB_INSNS_REVERSE (bb, curr_insn)
7685 : {
7686 225683063 : if (! INSN_P (curr_insn))
7687 35750560 : continue;
7688 189932503 : done_p = false;
7689 189932503 : sregno = dregno = -1;
7690 45879590 : if (change_p && NONDEBUG_INSN_P (curr_insn)
7691 221559010 : && (set = single_set (curr_insn)) != NULL_RTX)
7692 : {
7693 30626626 : dregno = get_regno (SET_DEST (set));
7694 61253252 : sregno = get_regno (SET_SRC (set));
7695 : }
7696 :
7697 189932503 : if (sregno >= 0 && dregno >= 0)
7698 : {
7699 10753578 : if (bitmap_bit_p (remove_pseudos, dregno)
7700 10753578 : && ! REG_P (lra_reg_info[dregno].restore_rtx))
7701 : {
7702 : /* invariant inheritance pseudo <- original pseudo */
7703 7119 : if (lra_dump_file != NULL)
7704 : {
7705 0 : fprintf (lra_dump_file, " Removing invariant inheritance:\n");
7706 0 : dump_insn_slim (lra_dump_file, curr_insn);
7707 0 : fprintf (lra_dump_file, "\n");
7708 : }
7709 7119 : delete_move_and_clobber (curr_insn, dregno);
7710 7119 : done_p = true;
7711 : }
7712 10746459 : else if (bitmap_bit_p (remove_pseudos, sregno)
7713 10746459 : && ! REG_P (lra_reg_info[sregno].restore_rtx))
7714 : {
7715 : /* reload pseudo <- invariant inheritance pseudo */
7716 7119 : start_sequence ();
7717 : /* We cannot just change the source. It might be
7718 : an insn different from the move. */
7719 7119 : emit_insn (lra_reg_info[sregno].restore_rtx);
7720 7119 : rtx_insn *new_insns = end_sequence ();
7721 7119 : lra_assert (single_set (new_insns) != NULL
7722 : && SET_DEST (set) == SET_DEST (single_set (new_insns)));
7723 7119 : lra_process_new_insns (curr_insn, NULL, new_insns,
7724 : "Changing reload<-invariant inheritance");
7725 7119 : delete_move_and_clobber (curr_insn, dregno);
7726 7119 : done_p = true;
7727 : }
7728 10739340 : else if ((bitmap_bit_p (remove_pseudos, sregno)
7729 1213898 : && (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
7730 571750 : || (bitmap_bit_p (remove_pseudos, dregno)
7731 188362 : && get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7732 188362 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7733 188362 : == get_regno (lra_reg_info[dregno].restore_rtx)))))
7734 11216909 : || (bitmap_bit_p (remove_pseudos, dregno)
7735 649382 : && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
7736 : /* One of the following cases:
7737 : original <- removed inheritance pseudo
7738 : removed inherit pseudo <- another removed inherit pseudo
7739 : removed inherit pseudo <- original pseudo
7740 : Or
7741 : removed_split_pseudo <- original_reg
7742 : original_reg <- removed_split_pseudo */
7743 : {
7744 178465 : if (lra_dump_file != NULL)
7745 : {
7746 0 : fprintf (lra_dump_file, " Removing %s:\n",
7747 0 : bitmap_bit_p (&lra_split_regs, sregno)
7748 0 : || bitmap_bit_p (&lra_split_regs, dregno)
7749 : ? "split" : "inheritance");
7750 0 : dump_insn_slim (lra_dump_file, curr_insn);
7751 : }
7752 178465 : delete_move_and_clobber (curr_insn, dregno);
7753 178465 : done_p = true;
7754 : }
7755 10560875 : else if (bitmap_bit_p (remove_pseudos, sregno)
7756 10560875 : && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
7757 : {
7758 : /* Search the following pattern:
7759 : inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
7760 : original_pseudo <- inherit_or_split_pseudo1
7761 : where the 2nd insn is the current insn and
7762 : inherit_or_split_pseudo2 is not removed. If it is found,
7763 : change the current insn onto:
7764 : original_pseudo <- inherit_or_split_pseudo2. */
7765 728184 : for (prev_insn = PREV_INSN (curr_insn);
7766 728184 : prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
7767 250615 : prev_insn = PREV_INSN (prev_insn))
7768 : ;
7769 477569 : if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
7770 464747 : && (prev_set = single_set (prev_insn)) != NULL_RTX
7771 : /* There should be no subregs in insn we are
7772 : searching because only the original reg might
7773 : be in subreg when we changed the mode of
7774 : load/store for splitting. */
7775 458860 : && REG_P (SET_DEST (prev_set))
7776 352572 : && REG_P (SET_SRC (prev_set))
7777 271380 : && (int) REGNO (SET_DEST (prev_set)) == sregno
7778 185219 : && ((prev_sregno = REGNO (SET_SRC (prev_set)))
7779 : >= FIRST_PSEUDO_REGISTER)
7780 185219 : && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
7781 143623 : ||
7782 : /* As we consider chain of inheritance or
7783 : splitting described in above comment we should
7784 : check that sregno and prev_sregno were
7785 : inheritance/split pseudos created from the
7786 : same original regno. */
7787 287246 : (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7788 287246 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7789 287246 : == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
7790 662788 : && ! bitmap_bit_p (remove_pseudos, prev_sregno))
7791 : {
7792 102670 : int restore_regno = get_regno (lra_reg_info[sregno].restore_rtx);
7793 102670 : if (restore_regno < 0)
7794 0 : restore_regno = prev_sregno;
7795 102670 : lra_assert (GET_MODE (SET_SRC (prev_set))
7796 : == GET_MODE (regno_reg_rtx[restore_regno]));
7797 : /* Although we have a single set, the insn can
7798 : contain more one sregno register occurrence
7799 : as a source. Change all occurrences. */
7800 102670 : lra_substitute_pseudo_within_insn (curr_insn, sregno,
7801 : regno_reg_rtx[restore_regno],
7802 : false);
7803 : /* As we are finishing with processing the insn
7804 : here, check the destination too as it might
7805 : inheritance pseudo for another pseudo. */
7806 102670 : if (bitmap_bit_p (remove_pseudos, dregno)
7807 0 : && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
7808 102670 : && (restore_rtx
7809 0 : = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
7810 : {
7811 0 : if (GET_CODE (SET_DEST (set)) == SUBREG)
7812 0 : SUBREG_REG (SET_DEST (set)) = restore_rtx;
7813 : else
7814 0 : SET_DEST (set) = restore_rtx;
7815 : }
7816 102670 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7817 102670 : lra_set_used_insn_alternative_by_uid
7818 102670 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7819 102670 : done_p = true;
7820 102670 : if (lra_dump_file != NULL)
7821 : {
7822 0 : fprintf (lra_dump_file, " Change reload insn:\n");
7823 0 : dump_insn_slim (lra_dump_file, curr_insn);
7824 : }
7825 : }
7826 : }
7827 : }
7828 192703 : if (! done_p)
7829 : {
7830 189637130 : struct lra_insn_reg *reg;
7831 189637130 : bool restored_regs_p = false;
7832 189637130 : bool kept_regs_p = false;
7833 :
7834 189637130 : curr_id = lra_get_insn_recog_data (curr_insn);
7835 394983420 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7836 : {
7837 205346290 : regno = reg->regno;
7838 205346290 : restore_rtx = lra_reg_info[regno].restore_rtx;
7839 205346290 : if (restore_rtx != NULL_RTX)
7840 : {
7841 5875738 : if (change_p && bitmap_bit_p (remove_pseudos, regno))
7842 : {
7843 827127 : lra_substitute_pseudo_within_insn
7844 827127 : (curr_insn, regno, restore_rtx, false);
7845 827127 : restored_regs_p = true;
7846 : }
7847 : else
7848 : kept_regs_p = true;
7849 : }
7850 : }
7851 189637130 : if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
7852 : {
7853 : /* The instruction has changed since the previous
7854 : constraints pass. */
7855 4428216 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7856 4428216 : lra_set_used_insn_alternative_by_uid
7857 4428216 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7858 : }
7859 185208914 : else if (restored_regs_p)
7860 : /* The instruction has been restored to the form that
7861 : it had during the previous constraints pass. */
7862 664631 : lra_update_insn_regno_info (curr_insn);
7863 5092847 : if (restored_regs_p && lra_dump_file != NULL)
7864 : {
7865 0 : fprintf (lra_dump_file, " Insn after restoring regs:\n");
7866 0 : dump_insn_slim (lra_dump_file, curr_insn);
7867 : }
7868 : }
7869 : }
7870 : }
7871 1543001 : return change_p;
7872 : }
7873 :
7874 : /* If optional reload pseudos failed to get a hard register or was not
7875 : inherited, it is better to remove optional reloads. We do this
7876 : transformation after undoing inheritance to figure out necessity to
7877 : remove optional reloads easier. Return true if we do any
7878 : change. */
7879 : static bool
7880 1543001 : undo_optional_reloads (void)
7881 : {
7882 1543001 : bool change_p, keep_p;
7883 1543001 : unsigned int regno, uid;
7884 1543001 : bitmap_iterator bi, bi2;
7885 1543001 : rtx_insn *insn;
7886 1543001 : rtx set, src, dest;
7887 1543001 : auto_bitmap removed_optional_reload_pseudos (®_obstack);
7888 :
7889 1543001 : bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
7890 2537738 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7891 : {
7892 994737 : keep_p = false;
7893 : /* Keep optional reloads from previous subpasses. */
7894 994737 : if (lra_reg_info[regno].restore_rtx == NULL_RTX
7895 : /* If the original pseudo changed its allocation, just
7896 : removing the optional pseudo is dangerous as the original
7897 : pseudo will have longer live range. */
7898 994737 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
7899 : keep_p = true;
7900 615742 : else if (reg_renumber[regno] >= 0)
7901 1763349 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
7902 : {
7903 1286168 : insn = lra_insn_recog_data[uid]->insn;
7904 1286168 : if ((set = single_set (insn)) == NULL_RTX)
7905 6647 : continue;
7906 1279521 : src = SET_SRC (set);
7907 1279521 : dest = SET_DEST (set);
7908 1279521 : if ((! REG_P (src) && ! SUBREG_P (src))
7909 685796 : || (! REG_P (dest) && ! SUBREG_P (dest)))
7910 593754 : continue;
7911 685767 : if (get_regno (dest) == (int) regno
7912 : /* Ignore insn for optional reloads itself. */
7913 1153714 : && (get_regno (lra_reg_info[regno].restore_rtx)
7914 576857 : != get_regno (src))
7915 : /* Check only inheritance on last inheritance pass. */
7916 123220 : && get_regno (src) >= new_regno_start
7917 : /* Check that the optional reload was inherited. */
7918 808987 : && bitmap_bit_p (&lra_inheritance_pseudos, get_regno (src)))
7919 : {
7920 : keep_p = true;
7921 : break;
7922 : }
7923 : }
7924 979396 : if (keep_p)
7925 : {
7926 502215 : bitmap_clear_bit (removed_optional_reload_pseudos, regno);
7927 502215 : if (lra_dump_file != NULL)
7928 3 : fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7929 : }
7930 : }
7931 1543001 : change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7932 1543001 : auto_bitmap insn_bitmap (®_obstack);
7933 2035523 : EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
7934 : {
7935 492522 : if (lra_dump_file != NULL)
7936 2 : fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
7937 492522 : bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7938 1565239 : EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
7939 : {
7940 : /* We may have already removed a clobber. */
7941 1072717 : if (!lra_insn_recog_data[uid])
7942 0 : continue;
7943 1072717 : insn = lra_insn_recog_data[uid]->insn;
7944 1072717 : if ((set = single_set (insn)) != NULL_RTX)
7945 : {
7946 1066794 : src = SET_SRC (set);
7947 1066794 : dest = SET_DEST (set);
7948 486580 : if ((REG_P (src) || SUBREG_P (src))
7949 580226 : && (REG_P (dest) || SUBREG_P (dest))
7950 1646989 : && ((get_regno (src) == (int) regno
7951 224052 : && (get_regno (lra_reg_info[regno].restore_rtx)
7952 112026 : == get_regno (dest)))
7953 498339 : || (get_regno (dest) == (int) regno
7954 468169 : && (get_regno (lra_reg_info[regno].restore_rtx)
7955 468169 : == get_regno (src)))))
7956 : {
7957 549965 : if (lra_dump_file != NULL)
7958 : {
7959 0 : fprintf (lra_dump_file, " Deleting move %u\n",
7960 0 : INSN_UID (insn));
7961 0 : dump_insn_slim (lra_dump_file, insn);
7962 : }
7963 1099930 : delete_move_and_clobber (insn, get_regno (dest));
7964 549965 : continue;
7965 : }
7966 : /* We should not worry about generation memory-memory
7967 : moves here as if the corresponding inheritance did
7968 : not work (inheritance pseudo did not get a hard reg),
7969 : we remove the inheritance pseudo and the optional
7970 : reload. */
7971 : }
7972 522752 : if (GET_CODE (PATTERN (insn)) == CLOBBER
7973 0 : && REG_P (SET_DEST (insn))
7974 522752 : && get_regno (SET_DEST (insn)) == (int) regno)
7975 : /* Refuse to remap clobbers to preexisting pseudos. */
7976 0 : gcc_unreachable ();
7977 522752 : lra_substitute_pseudo_within_insn
7978 522752 : (insn, regno, lra_reg_info[regno].restore_rtx, false);
7979 522752 : lra_update_insn_regno_info (insn);
7980 522752 : if (lra_dump_file != NULL)
7981 : {
7982 4 : fprintf (lra_dump_file,
7983 : " Restoring original insn:\n");
7984 4 : dump_insn_slim (lra_dump_file, insn);
7985 : }
7986 : }
7987 : }
7988 : /* Clear restore_regnos. */
7989 2537738 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7990 994737 : lra_reg_info[regno].restore_rtx = NULL_RTX;
7991 1543001 : return change_p;
7992 1543001 : }
7993 :
7994 : /* Entry function for undoing inheritance/split transformation. Return true
7995 : if we did any RTL change in this pass. */
7996 : bool
7997 1546132 : lra_undo_inheritance (void)
7998 : {
7999 1546132 : unsigned int regno;
8000 1546132 : int hard_regno;
8001 1546132 : int n_all_inherit, n_inherit, n_all_split, n_split;
8002 1546132 : rtx restore_rtx;
8003 1546132 : bitmap_iterator bi;
8004 1546132 : bool change_p;
8005 :
8006 1546132 : lra_undo_inheritance_iter++;
8007 1546132 : if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
8008 : return false;
8009 1543001 : if (lra_dump_file != NULL)
8010 97 : fprintf (lra_dump_file,
8011 : "\n********** Undoing inheritance #%d: **********\n\n",
8012 : lra_undo_inheritance_iter);
8013 1543001 : auto_bitmap remove_pseudos (®_obstack);
8014 1543001 : n_inherit = n_all_inherit = 0;
8015 3403618 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8016 1860617 : if (lra_reg_info[regno].restore_rtx != NULL_RTX)
8017 : {
8018 1176029 : n_all_inherit++;
8019 1176029 : if (reg_renumber[regno] < 0
8020 : /* If the original pseudo changed its allocation, just
8021 : removing inheritance is dangerous as for changing
8022 : allocation we used shorter live-ranges. */
8023 1176029 : && (! REG_P (lra_reg_info[regno].restore_rtx)
8024 420058 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
8025 427177 : bitmap_set_bit (remove_pseudos, regno);
8026 : else
8027 748852 : n_inherit++;
8028 : }
8029 1543001 : if (lra_dump_file != NULL && n_all_inherit != 0)
8030 2 : fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
8031 : n_inherit, n_all_inherit,
8032 2 : (double) n_inherit / n_all_inherit * 100);
8033 1543001 : n_split = n_all_split = 0;
8034 2495655 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8035 952654 : if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
8036 : {
8037 654182 : int restore_regno = REGNO (restore_rtx);
8038 :
8039 654182 : n_all_split++;
8040 1308282 : hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
8041 654182 : ? reg_renumber[restore_regno] : restore_regno);
8042 654182 : if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
8043 2215 : bitmap_set_bit (remove_pseudos, regno);
8044 : else
8045 : {
8046 651967 : n_split++;
8047 651967 : if (lra_dump_file != NULL)
8048 0 : fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
8049 : regno, restore_regno);
8050 : }
8051 : }
8052 1543001 : if (lra_dump_file != NULL && n_all_split != 0)
8053 0 : fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
8054 : n_split, n_all_split,
8055 0 : (double) n_split / n_all_split * 100);
8056 1543001 : change_p = remove_inheritance_pseudos (remove_pseudos);
8057 : /* Clear restore_regnos. */
8058 3403618 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8059 1860617 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8060 2495655 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8061 952654 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8062 1543001 : change_p = undo_optional_reloads () || change_p;
8063 : if (change_p)
8064 109480 : lra_dump_insns_if_possible ("changed func after undoing inheritance");
8065 1543001 : return change_p;
8066 1543001 : }
|